1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "image_writer.h"
18
19 #include <lz4.h>
20 #include <lz4hc.h>
21 #include <sys/stat.h>
22 #include <zlib.h>
23
24 #include <memory>
25 #include <numeric>
26 #include <vector>
27
28 #include "art_field-inl.h"
29 #include "art_method-inl.h"
30 #include "base/callee_save_type.h"
31 #include "base/enums.h"
32 #include "base/globals.h"
33 #include "base/logging.h" // For VLOG.
34 #include "base/stl_util.h"
35 #include "base/unix_file/fd_file.h"
36 #include "class_linker-inl.h"
37 #include "class_root-inl.h"
38 #include "compiled_method.h"
39 #include "dex/dex_file-inl.h"
40 #include "dex/dex_file_types.h"
41 #include "driver/compiler_options.h"
42 #include "elf/elf_utils.h"
43 #include "elf_file.h"
44 #include "entrypoints/entrypoint_utils-inl.h"
45 #include "gc/accounting/card_table-inl.h"
46 #include "gc/accounting/heap_bitmap.h"
47 #include "gc/accounting/space_bitmap-inl.h"
48 #include "gc/collector/concurrent_copying.h"
49 #include "gc/heap-visit-objects-inl.h"
50 #include "gc/heap.h"
51 #include "gc/space/large_object_space.h"
52 #include "gc/space/region_space.h"
53 #include "gc/space/space-inl.h"
54 #include "gc/verification.h"
55 #include "handle_scope-inl.h"
56 #include "image-inl.h"
57 #include "imt_conflict_table.h"
58 #include "indirect_reference_table-inl.h"
59 #include "intern_table-inl.h"
60 #include "jni/java_vm_ext-inl.h"
61 #include "jni/jni_internal.h"
62 #include "linear_alloc.h"
63 #include "lock_word.h"
64 #include "mirror/array-inl.h"
65 #include "mirror/class-inl.h"
66 #include "mirror/class_ext-inl.h"
67 #include "mirror/class_loader.h"
68 #include "mirror/dex_cache-inl.h"
69 #include "mirror/dex_cache.h"
70 #include "mirror/executable.h"
71 #include "mirror/method.h"
72 #include "mirror/object-inl.h"
73 #include "mirror/object-refvisitor-inl.h"
74 #include "mirror/object_array-alloc-inl.h"
75 #include "mirror/object_array-inl.h"
76 #include "mirror/string-inl.h"
77 #include "mirror/var_handle.h"
78 #include "nterp_helpers.h"
79 #include "oat.h"
80 #include "oat_file.h"
81 #include "oat_file_manager.h"
82 #include "optimizing/intrinsic_objects.h"
83 #include "runtime.h"
84 #include "scoped_thread_state_change-inl.h"
85 #include "subtype_check.h"
86 #include "well_known_classes.h"
87
88 using ::art::mirror::Class;
89 using ::art::mirror::DexCache;
90 using ::art::mirror::Object;
91 using ::art::mirror::ObjectArray;
92 using ::art::mirror::String;
93
94 namespace art {
95 namespace linker {
96
97 // The actual value of `kImageClassTableMinLoadFactor` is irrelevant because image class tables
98 // are never resized, but we still need to pass a reasonable value to the constructor.
99 constexpr double kImageClassTableMinLoadFactor = 0.5;
100 // We use `kImageClassTableMaxLoadFactor` to determine the buffer size for image class tables
101 // to make them full. We never insert additional elements to them, so we do not want to waste
102 // extra memory. And unlike runtime class tables, we do not want this to depend on runtime
103 // properties (see `Runtime::GetHashTableMaxLoadFactor()` checking for low memory mode).
104 constexpr double kImageClassTableMaxLoadFactor = 0.7;
105
106 // The actual value of `kImageInternTableMinLoadFactor` is irrelevant because image intern tables
107 // are never resized, but we still need to pass a reasonable value to the constructor.
108 constexpr double kImageInternTableMinLoadFactor = 0.5;
109 // We use `kImageInternTableMaxLoadFactor` to determine the buffer size for image intern tables
110 // to make them full. We never insert additional elements to them, so we do not want to waste
111 // extra memory. And unlike runtime intern tables, we do not want this to depend on runtime
112 // properties (see `Runtime::GetHashTableMaxLoadFactor()` checking for low memory mode).
113 constexpr double kImageInternTableMaxLoadFactor = 0.7;
114
MaybeCompressData(ArrayRef<const uint8_t> source,ImageHeader::StorageMode image_storage_mode,dchecked_vector<uint8_t> * storage)115 static ArrayRef<const uint8_t> MaybeCompressData(ArrayRef<const uint8_t> source,
116 ImageHeader::StorageMode image_storage_mode,
117 /*out*/ dchecked_vector<uint8_t>* storage) {
118 const uint64_t compress_start_time = NanoTime();
119
120 switch (image_storage_mode) {
121 case ImageHeader::kStorageModeLZ4: {
122 storage->resize(LZ4_compressBound(source.size()));
123 size_t data_size = LZ4_compress_default(
124 reinterpret_cast<char*>(const_cast<uint8_t*>(source.data())),
125 reinterpret_cast<char*>(storage->data()),
126 source.size(),
127 storage->size());
128 storage->resize(data_size);
129 break;
130 }
131 case ImageHeader::kStorageModeLZ4HC: {
132 // Bound is same as non HC.
133 storage->resize(LZ4_compressBound(source.size()));
134 size_t data_size = LZ4_compress_HC(
135 reinterpret_cast<const char*>(const_cast<uint8_t*>(source.data())),
136 reinterpret_cast<char*>(storage->data()),
137 source.size(),
138 storage->size(),
139 LZ4HC_CLEVEL_MAX);
140 storage->resize(data_size);
141 break;
142 }
143 case ImageHeader::kStorageModeUncompressed: {
144 return source;
145 }
146 default: {
147 LOG(FATAL) << "Unsupported";
148 UNREACHABLE();
149 }
150 }
151
152 DCHECK(image_storage_mode == ImageHeader::kStorageModeLZ4 ||
153 image_storage_mode == ImageHeader::kStorageModeLZ4HC);
154 VLOG(compiler) << "Compressed from " << source.size() << " to " << storage->size() << " in "
155 << PrettyDuration(NanoTime() - compress_start_time);
156 if (kIsDebugBuild) {
157 dchecked_vector<uint8_t> decompressed(source.size());
158 const size_t decompressed_size = LZ4_decompress_safe(
159 reinterpret_cast<char*>(storage->data()),
160 reinterpret_cast<char*>(decompressed.data()),
161 storage->size(),
162 decompressed.size());
163 CHECK_EQ(decompressed_size, decompressed.size());
164 CHECK_EQ(memcmp(source.data(), decompressed.data(), source.size()), 0) << image_storage_mode;
165 }
166 return ArrayRef<const uint8_t>(*storage);
167 }
168
169 // Separate objects into multiple bins to optimize dirty memory use.
170 static constexpr bool kBinObjects = true;
171
AllocateBootImageLiveObjects(Thread * self,Runtime * runtime)172 static ObjPtr<mirror::ObjectArray<mirror::Object>> AllocateBootImageLiveObjects(
173 Thread* self, Runtime* runtime) REQUIRES_SHARED(Locks::mutator_lock_) {
174 ClassLinker* class_linker = runtime->GetClassLinker();
175 // The objects used for the Integer.valueOf() intrinsic must remain live even if references
176 // to them are removed using reflection. Image roots are not accessible through reflection,
177 // so the array we construct here shall keep them alive.
178 StackHandleScope<1> hs(self);
179 Handle<mirror::ObjectArray<mirror::Object>> integer_cache =
180 hs.NewHandle(IntrinsicObjects::LookupIntegerCache(self, class_linker));
181 size_t live_objects_size =
182 enum_cast<size_t>(ImageHeader::kIntrinsicObjectsStart) +
183 ((integer_cache != nullptr) ? (/* cache */ 1u + integer_cache->GetLength()) : 0u);
184 ObjPtr<mirror::ObjectArray<mirror::Object>> live_objects =
185 mirror::ObjectArray<mirror::Object>::Alloc(
186 self, GetClassRoot<mirror::ObjectArray<mirror::Object>>(class_linker), live_objects_size);
187 if (live_objects == nullptr) {
188 return nullptr;
189 }
190 int32_t index = 0u;
191 auto set_entry = [&](ImageHeader::BootImageLiveObjects entry,
192 ObjPtr<mirror::Object> value) REQUIRES_SHARED(Locks::mutator_lock_) {
193 DCHECK_EQ(index, enum_cast<int32_t>(entry));
194 live_objects->Set</*kTransacrionActive=*/ false>(index, value);
195 ++index;
196 };
197 set_entry(ImageHeader::kOomeWhenThrowingException,
198 runtime->GetPreAllocatedOutOfMemoryErrorWhenThrowingException());
199 set_entry(ImageHeader::kOomeWhenThrowingOome,
200 runtime->GetPreAllocatedOutOfMemoryErrorWhenThrowingOOME());
201 set_entry(ImageHeader::kOomeWhenHandlingStackOverflow,
202 runtime->GetPreAllocatedOutOfMemoryErrorWhenHandlingStackOverflow());
203 set_entry(ImageHeader::kNoClassDefFoundError, runtime->GetPreAllocatedNoClassDefFoundError());
204 set_entry(ImageHeader::kClearedJniWeakSentinel, runtime->GetSentinel().Read());
205
206 DCHECK_EQ(index, enum_cast<int32_t>(ImageHeader::kIntrinsicObjectsStart));
207 if (integer_cache != nullptr) {
208 live_objects->Set(index++, integer_cache.Get());
209 for (int32_t i = 0, length = integer_cache->GetLength(); i != length; ++i) {
210 live_objects->Set(index++, integer_cache->Get(i));
211 }
212 }
213 CHECK_EQ(index, live_objects->GetLength());
214
215 if (kIsDebugBuild && integer_cache != nullptr) {
216 CHECK_EQ(integer_cache.Get(), IntrinsicObjects::GetIntegerValueOfCache(live_objects));
217 for (int32_t i = 0, len = integer_cache->GetLength(); i != len; ++i) {
218 CHECK_EQ(integer_cache->GetWithoutChecks(i),
219 IntrinsicObjects::GetIntegerValueOfObject(live_objects, i));
220 }
221 }
222 return live_objects;
223 }
224
225 template <typename MirrorType>
DecodeGlobalWithoutRB(JavaVMExt * vm,jobject obj)226 ObjPtr<MirrorType> ImageWriter::DecodeGlobalWithoutRB(JavaVMExt* vm, jobject obj) {
227 DCHECK_EQ(IndirectReferenceTable::GetIndirectRefKind(obj), kGlobal);
228 return ObjPtr<MirrorType>::DownCast(vm->globals_.Get<kWithoutReadBarrier>(obj));
229 }
230
231 template <typename MirrorType>
DecodeWeakGlobalWithoutRB(JavaVMExt * vm,Thread * self,jobject obj)232 ObjPtr<MirrorType> ImageWriter::DecodeWeakGlobalWithoutRB(
233 JavaVMExt* vm, Thread* self, jobject obj) {
234 DCHECK_EQ(IndirectReferenceTable::GetIndirectRefKind(obj), kWeakGlobal);
235 DCHECK(vm->MayAccessWeakGlobals(self));
236 return ObjPtr<MirrorType>::DownCast(vm->weak_globals_.Get<kWithoutReadBarrier>(obj));
237 }
238
GetAppClassLoader() const239 ObjPtr<mirror::ClassLoader> ImageWriter::GetAppClassLoader() const
240 REQUIRES_SHARED(Locks::mutator_lock_) {
241 return compiler_options_.IsAppImage()
242 ? ObjPtr<mirror::ClassLoader>::DownCast(Thread::Current()->DecodeJObject(app_class_loader_))
243 : nullptr;
244 }
245
IsImageDexCache(ObjPtr<mirror::DexCache> dex_cache) const246 bool ImageWriter::IsImageDexCache(ObjPtr<mirror::DexCache> dex_cache) const {
247 // For boot image, we keep all dex caches.
248 if (compiler_options_.IsBootImage()) {
249 return true;
250 }
251 // Dex caches already in the boot image do not belong to the image being written.
252 if (IsInBootImage(dex_cache.Ptr())) {
253 return false;
254 }
255 // Dex caches for the boot class path components that are not part of the boot image
256 // cannot be garbage collected in PrepareImageAddressSpace() but we do not want to
257 // include them in the app image.
258 if (!ContainsElement(compiler_options_.GetDexFilesForOatFile(), dex_cache->GetDexFile())) {
259 return false;
260 }
261 return true;
262 }
263
ClearDexFileCookies()264 static void ClearDexFileCookies() REQUIRES_SHARED(Locks::mutator_lock_) {
265 auto visitor = [](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
266 DCHECK(obj != nullptr);
267 Class* klass = obj->GetClass();
268 if (klass == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_DexFile)) {
269 ArtField* field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
270 // Null out the cookie to enable determinism. b/34090128
271 field->SetObject</*kTransactionActive*/false>(obj, nullptr);
272 }
273 };
274 Runtime::Current()->GetHeap()->VisitObjects(visitor);
275 }
276
PrepareImageAddressSpace(TimingLogger * timings)277 bool ImageWriter::PrepareImageAddressSpace(TimingLogger* timings) {
278 target_ptr_size_ = InstructionSetPointerSize(compiler_options_.GetInstructionSet());
279
280 Thread* const self = Thread::Current();
281
282 gc::Heap* const heap = Runtime::Current()->GetHeap();
283 {
284 ScopedObjectAccess soa(self);
285 {
286 TimingLogger::ScopedTiming t("PruneNonImageClasses", timings);
287 PruneNonImageClasses(); // Remove junk
288 }
289
290 if (UNLIKELY(!CreateImageRoots())) {
291 self->AssertPendingOOMException();
292 self->ClearException();
293 return false;
294 }
295
296 if (compiler_options_.IsAppImage()) {
297 TimingLogger::ScopedTiming t("ClearDexFileCookies", timings);
298 // Clear dex file cookies for app images to enable app image determinism. This is required
299 // since the cookie field contains long pointers to DexFiles which are not deterministic.
300 // b/34090128
301 ClearDexFileCookies();
302 }
303 }
304
305 {
306 TimingLogger::ScopedTiming t("CollectGarbage", timings);
307 heap->CollectGarbage(/* clear_soft_references */ false); // Remove garbage.
308 }
309
310 if (kIsDebugBuild) {
311 ScopedObjectAccess soa(self);
312 CheckNonImageClassesRemoved();
313 }
314
315 // From this point on, there should be no GC, so we should not use unnecessary read barriers.
316 ScopedDebugDisallowReadBarriers sddrb(self);
317
318 {
319 // All remaining weak interns are referenced. Promote them to strong interns. Whether a
320 // string was strongly or weakly interned, we shall make it strongly interned in the image.
321 TimingLogger::ScopedTiming t("PromoteInterns", timings);
322 ScopedObjectAccess soa(self);
323 PromoteWeakInternsToStrong(self);
324 }
325
326 {
327 TimingLogger::ScopedTiming t("CalculateNewObjectOffsets", timings);
328 ScopedObjectAccess soa(self);
329 CalculateNewObjectOffsets();
330 }
331
332 // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
333 // bin size sums being calculated.
334 TimingLogger::ScopedTiming t("AllocMemory", timings);
335 return AllocMemory();
336 }
337
CopyMetadata()338 void ImageWriter::CopyMetadata() {
339 DCHECK(compiler_options_.IsAppImage());
340 CHECK_EQ(image_infos_.size(), 1u);
341
342 const ImageInfo& image_info = image_infos_.back();
343 dchecked_vector<ImageSection> image_sections = image_info.CreateImageSections().second;
344
345 auto* sfo_section_base = reinterpret_cast<AppImageReferenceOffsetInfo*>(
346 image_info.image_.Begin() +
347 image_sections[ImageHeader::kSectionStringReferenceOffsets].Offset());
348
349 std::copy(image_info.string_reference_offsets_.begin(),
350 image_info.string_reference_offsets_.end(),
351 sfo_section_base);
352 }
353
354 // NO_THREAD_SAFETY_ANALYSIS: Avoid locking the `Locks::intern_table_lock_` while single-threaded.
IsStronglyInternedString(ObjPtr<mirror::String> str)355 bool ImageWriter::IsStronglyInternedString(ObjPtr<mirror::String> str) NO_THREAD_SAFETY_ANALYSIS {
356 uint32_t hash = static_cast<uint32_t>(str->GetStoredHashCode());
357 if (hash == 0u && str->ComputeHashCode() != 0) {
358 // A string with uninitialized hash code cannot be interned.
359 return false;
360 }
361 InternTable* intern_table = Runtime::Current()->GetInternTable();
362 for (InternTable::Table::InternalTable& table : intern_table->strong_interns_.tables_) {
363 auto it = table.set_.FindWithHash(GcRoot<mirror::String>(str), hash);
364 if (it != table.set_.end()) {
365 return it->Read<kWithoutReadBarrier>() == str;
366 }
367 }
368 return false;
369 }
370
IsInternedAppImageStringReference(ObjPtr<mirror::Object> referred_obj) const371 bool ImageWriter::IsInternedAppImageStringReference(ObjPtr<mirror::Object> referred_obj) const {
372 return referred_obj != nullptr &&
373 !IsInBootImage(referred_obj.Ptr()) &&
374 referred_obj->IsString() &&
375 IsStronglyInternedString(referred_obj->AsString());
376 }
377
378 // Helper class that erases the image file if it isn't properly flushed and closed.
379 class ImageWriter::ImageFileGuard {
380 public:
381 ImageFileGuard() noexcept = default;
382 ImageFileGuard(ImageFileGuard&& other) noexcept = default;
383 ImageFileGuard& operator=(ImageFileGuard&& other) noexcept = default;
384
~ImageFileGuard()385 ~ImageFileGuard() {
386 if (image_file_ != nullptr) {
387 // Failure, erase the image file.
388 image_file_->Erase();
389 }
390 }
391
reset(File * image_file)392 void reset(File* image_file) {
393 image_file_.reset(image_file);
394 }
395
operator ==(std::nullptr_t)396 bool operator==(std::nullptr_t) {
397 return image_file_ == nullptr;
398 }
399
operator !=(std::nullptr_t)400 bool operator!=(std::nullptr_t) {
401 return image_file_ != nullptr;
402 }
403
operator ->() const404 File* operator->() const {
405 return image_file_.get();
406 }
407
WriteHeaderAndClose(const std::string & image_filename,const ImageHeader * image_header)408 bool WriteHeaderAndClose(const std::string& image_filename, const ImageHeader* image_header) {
409 // The header is uncompressed since it contains whether the image is compressed or not.
410 if (!image_file_->PwriteFully(image_header, sizeof(ImageHeader), 0)) {
411 PLOG(ERROR) << "Failed to write image file header " << image_filename;
412 return false;
413 }
414
415 // FlushCloseOrErase() takes care of erasing, so the destructor does not need
416 // to do that whether the FlushCloseOrErase() succeeds or fails.
417 std::unique_ptr<File> image_file = std::move(image_file_);
418 if (image_file->FlushCloseOrErase() != 0) {
419 PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
420 return false;
421 }
422
423 return true;
424 }
425
426 private:
427 std::unique_ptr<File> image_file_;
428 };
429
Write(int image_fd,const std::vector<std::string> & image_filenames,size_t component_count)430 bool ImageWriter::Write(int image_fd,
431 const std::vector<std::string>& image_filenames,
432 size_t component_count) {
433 // If image_fd or oat_fd are not File::kInvalidFd then we may have empty strings in
434 // image_filenames or oat_filenames.
435 CHECK(!image_filenames.empty());
436 if (image_fd != File::kInvalidFd) {
437 CHECK_EQ(image_filenames.size(), 1u);
438 }
439 DCHECK(!oat_filenames_.empty());
440 CHECK_EQ(image_filenames.size(), oat_filenames_.size());
441
442 Thread* const self = Thread::Current();
443 ScopedDebugDisallowReadBarriers sddrb(self);
444 {
445 ScopedObjectAccess soa(self);
446 for (size_t i = 0; i < oat_filenames_.size(); ++i) {
447 CreateHeader(i, component_count);
448 CopyAndFixupNativeData(i);
449 }
450 }
451
452 {
453 // TODO: heap validation can't handle these fix up passes.
454 ScopedObjectAccess soa(self);
455 Runtime::Current()->GetHeap()->DisableObjectValidation();
456 CopyAndFixupObjects();
457 }
458
459 if (compiler_options_.IsAppImage()) {
460 CopyMetadata();
461 }
462
463 // Primary image header shall be written last for two reasons. First, this ensures
464 // that we shall not end up with a valid primary image and invalid secondary image.
465 // Second, its checksum shall include the checksums of the secondary images (XORed).
466 // This way only the primary image checksum needs to be checked to determine whether
467 // any of the images or oat files are out of date. (Oat file checksums are included
468 // in the image checksum calculation.)
469 ImageHeader* primary_header = reinterpret_cast<ImageHeader*>(image_infos_[0].image_.Begin());
470 ImageFileGuard primary_image_file;
471 for (size_t i = 0; i < image_filenames.size(); ++i) {
472 const std::string& image_filename = image_filenames[i];
473 ImageInfo& image_info = GetImageInfo(i);
474 ImageFileGuard image_file;
475 if (image_fd != File::kInvalidFd) {
476 // Ignore image_filename, it is supplied only for better diagnostic.
477 image_file.reset(new File(image_fd, unix_file::kCheckSafeUsage));
478 // Empty the file in case it already exists.
479 if (image_file != nullptr) {
480 TEMP_FAILURE_RETRY(image_file->SetLength(0));
481 TEMP_FAILURE_RETRY(image_file->Flush());
482 }
483 } else {
484 image_file.reset(OS::CreateEmptyFile(image_filename.c_str()));
485 }
486
487 if (image_file == nullptr) {
488 LOG(ERROR) << "Failed to open image file " << image_filename;
489 return false;
490 }
491
492 // Make file world readable if we have created it, i.e. when not passed as file descriptor.
493 if (image_fd == -1 && !compiler_options_.IsAppImage() && fchmod(image_file->Fd(), 0644) != 0) {
494 PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
495 return false;
496 }
497
498 // Image data size excludes the bitmap and the header.
499 ImageHeader* const image_header = reinterpret_cast<ImageHeader*>(image_info.image_.Begin());
500
501 // Block sources (from the image).
502 const bool is_compressed = image_storage_mode_ != ImageHeader::kStorageModeUncompressed;
503 dchecked_vector<std::pair<uint32_t, uint32_t>> block_sources;
504 dchecked_vector<ImageHeader::Block> blocks;
505
506 // Add a set of solid blocks such that no block is larger than the maximum size. A solid block
507 // is a block that must be decompressed all at once.
508 auto add_blocks = [&](uint32_t offset, uint32_t size) {
509 while (size != 0u) {
510 const uint32_t cur_size = std::min(size, compiler_options_.MaxImageBlockSize());
511 block_sources.emplace_back(offset, cur_size);
512 offset += cur_size;
513 size -= cur_size;
514 }
515 };
516
517 add_blocks(sizeof(ImageHeader), image_header->GetImageSize() - sizeof(ImageHeader));
518
519 // Checksum of compressed image data and header.
520 uint32_t image_checksum = adler32(0L, Z_NULL, 0);
521 image_checksum = adler32(image_checksum,
522 reinterpret_cast<const uint8_t*>(image_header),
523 sizeof(ImageHeader));
524 // Copy and compress blocks.
525 size_t out_offset = sizeof(ImageHeader);
526 for (const std::pair<uint32_t, uint32_t> block : block_sources) {
527 ArrayRef<const uint8_t> raw_image_data(image_info.image_.Begin() + block.first,
528 block.second);
529 dchecked_vector<uint8_t> compressed_data;
530 ArrayRef<const uint8_t> image_data =
531 MaybeCompressData(raw_image_data, image_storage_mode_, &compressed_data);
532
533 if (!is_compressed) {
534 // For uncompressed, preserve alignment since the image will be directly mapped.
535 out_offset = block.first;
536 }
537
538 // Fill in the compressed location of the block.
539 blocks.emplace_back(ImageHeader::Block(
540 image_storage_mode_,
541 /*data_offset=*/ out_offset,
542 /*data_size=*/ image_data.size(),
543 /*image_offset=*/ block.first,
544 /*image_size=*/ block.second));
545
546 // Write out the image + fields + methods.
547 if (!image_file->PwriteFully(image_data.data(), image_data.size(), out_offset)) {
548 PLOG(ERROR) << "Failed to write image file data " << image_filename;
549 image_file->Erase();
550 return false;
551 }
552 out_offset += image_data.size();
553 image_checksum = adler32(image_checksum, image_data.data(), image_data.size());
554 }
555
556 // Write the block metadata directly after the image sections.
557 // Note: This is not part of the mapped image and is not preserved after decompressing, it's
558 // only used for image loading. For this reason, only write it out for compressed images.
559 if (is_compressed) {
560 // Align up since the compressed data is not necessarily aligned.
561 out_offset = RoundUp(out_offset, alignof(ImageHeader::Block));
562 CHECK(!blocks.empty());
563 const size_t blocks_bytes = blocks.size() * sizeof(blocks[0]);
564 if (!image_file->PwriteFully(&blocks[0], blocks_bytes, out_offset)) {
565 PLOG(ERROR) << "Failed to write image blocks " << image_filename;
566 image_file->Erase();
567 return false;
568 }
569 image_header->blocks_offset_ = out_offset;
570 image_header->blocks_count_ = blocks.size();
571 out_offset += blocks_bytes;
572 }
573
574 // Data size includes everything except the bitmap.
575 image_header->data_size_ = out_offset - sizeof(ImageHeader);
576
577 // Update and write the bitmap section. Note that the bitmap section is relative to the
578 // possibly compressed image.
579 ImageSection& bitmap_section = image_header->GetImageSection(ImageHeader::kSectionImageBitmap);
580 // Align up since data size may be unaligned if the image is compressed.
581 out_offset = RoundUp(out_offset, kPageSize);
582 bitmap_section = ImageSection(out_offset, bitmap_section.Size());
583
584 if (!image_file->PwriteFully(image_info.image_bitmap_.Begin(),
585 bitmap_section.Size(),
586 bitmap_section.Offset())) {
587 PLOG(ERROR) << "Failed to write image file bitmap " << image_filename;
588 return false;
589 }
590
591 int err = image_file->Flush();
592 if (err < 0) {
593 PLOG(ERROR) << "Failed to flush image file " << image_filename << " with result " << err;
594 return false;
595 }
596
597 // Calculate the image checksum of the remaining data.
598 image_checksum = adler32(image_checksum,
599 reinterpret_cast<const uint8_t*>(image_info.image_bitmap_.Begin()),
600 bitmap_section.Size());
601 image_header->SetImageChecksum(image_checksum);
602
603 if (VLOG_IS_ON(compiler)) {
604 const size_t separately_written_section_size = bitmap_section.Size();
605 const size_t total_uncompressed_size = image_info.image_size_ +
606 separately_written_section_size;
607 const size_t total_compressed_size = out_offset + separately_written_section_size;
608
609 VLOG(compiler) << "Dex2Oat:uncompressedImageSize = " << total_uncompressed_size;
610 if (total_uncompressed_size != total_compressed_size) {
611 VLOG(compiler) << "Dex2Oat:compressedImageSize = " << total_compressed_size;
612 }
613 }
614
615 CHECK_EQ(bitmap_section.End(), static_cast<size_t>(image_file->GetLength()))
616 << "Bitmap should be at the end of the file";
617
618 // Write header last in case the compiler gets killed in the middle of image writing.
619 // We do not want to have a corrupted image with a valid header.
620 // Delay the writing of the primary image header until after writing secondary images.
621 if (i == 0u) {
622 primary_image_file = std::move(image_file);
623 } else {
624 if (!image_file.WriteHeaderAndClose(image_filename, image_header)) {
625 return false;
626 }
627 // Update the primary image checksum with the secondary image checksum.
628 primary_header->SetImageChecksum(primary_header->GetImageChecksum() ^ image_checksum);
629 }
630 }
631 DCHECK(primary_image_file != nullptr);
632 if (!primary_image_file.WriteHeaderAndClose(image_filenames[0], primary_header)) {
633 return false;
634 }
635
636 return true;
637 }
638
GetImageOffset(mirror::Object * object,size_t oat_index) const639 size_t ImageWriter::GetImageOffset(mirror::Object* object, size_t oat_index) const {
640 BinSlot bin_slot = GetImageBinSlot(object, oat_index);
641 const ImageInfo& image_info = GetImageInfo(oat_index);
642 size_t offset = image_info.GetBinSlotOffset(bin_slot.GetBin()) + bin_slot.GetOffset();
643 DCHECK_LT(offset, image_info.image_end_);
644 return offset;
645 }
646
SetImageBinSlot(mirror::Object * object,BinSlot bin_slot)647 void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
648 DCHECK(object != nullptr);
649 DCHECK(!IsImageBinSlotAssigned(object));
650
651 // Before we stomp over the lock word, save the hash code for later.
652 LockWord lw(object->GetLockWord(false));
653 switch (lw.GetState()) {
654 case LockWord::kFatLocked:
655 FALLTHROUGH_INTENDED;
656 case LockWord::kThinLocked: {
657 std::ostringstream oss;
658 bool thin = (lw.GetState() == LockWord::kThinLocked);
659 oss << (thin ? "Thin" : "Fat")
660 << " locked object " << object << "(" << object->PrettyTypeOf()
661 << ") found during object copy";
662 if (thin) {
663 oss << ". Lock owner:" << lw.ThinLockOwner();
664 }
665 LOG(FATAL) << oss.str();
666 UNREACHABLE();
667 }
668 case LockWord::kUnlocked:
669 // No hash, don't need to save it.
670 break;
671 case LockWord::kHashCode:
672 DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
673 saved_hashcode_map_.insert(std::make_pair(object, lw.GetHashCode()));
674 break;
675 default:
676 LOG(FATAL) << "UNREACHABLE";
677 UNREACHABLE();
678 }
679 object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()),
680 /*as_volatile=*/ false);
681 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
682 DCHECK(IsImageBinSlotAssigned(object));
683 }
684
AssignImageBinSlot(mirror::Object * object,size_t oat_index)685 ImageWriter::Bin ImageWriter::AssignImageBinSlot(mirror::Object* object, size_t oat_index) {
686 DCHECK(object != nullptr);
687
688 // The magic happens here. We segregate objects into different bins based
689 // on how likely they are to get dirty at runtime.
690 //
691 // Likely-to-dirty objects get packed together into the same bin so that
692 // at runtime their page dirtiness ratio (how many dirty objects a page has) is
693 // maximized.
694 //
695 // This means more pages will stay either clean or shared dirty (with zygote) and
696 // the app will use less of its own (private) memory.
697 Bin bin = Bin::kRegular;
698
699 if (kBinObjects) {
700 //
701 // Changing the bin of an object is purely a memory-use tuning.
702 // It has no change on runtime correctness.
703 //
704 // Memory analysis has determined that the following types of objects get dirtied
705 // the most:
706 //
707 // * Class'es which are verified [their clinit runs only at runtime]
708 // - classes in general [because their static fields get overwritten]
709 // - initialized classes with all-final statics are unlikely to be ever dirty,
710 // so bin them separately
711 // * Art Methods that are:
712 // - native [their native entry point is not looked up until runtime]
713 // - have declaring classes that aren't initialized
714 // [their interpreter/quick entry points are trampolines until the class
715 // becomes initialized]
716 //
717 // We also assume the following objects get dirtied either never or extremely rarely:
718 // * Strings (they are immutable)
719 // * Art methods that aren't native and have initialized declared classes
720 //
721 // We assume that "regular" bin objects are highly unlikely to become dirtied,
722 // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
723 //
724 ObjPtr<mirror::Class> klass = object->GetClass<kVerifyNone, kWithoutReadBarrier>();
725 if (klass->IsClassClass()) {
726 bin = Bin::kClassVerified;
727 ObjPtr<mirror::Class> as_klass = object->AsClass<kVerifyNone>();
728
729 // Move known dirty objects into their own sections. This includes:
730 // - classes with dirty static fields.
731 auto is_dirty = [&](ObjPtr<mirror::Class> k) REQUIRES_SHARED(Locks::mutator_lock_) {
732 std::string temp;
733 std::string_view descriptor = k->GetDescriptor(&temp);
734 return dirty_image_objects_->find(descriptor) != dirty_image_objects_->end();
735 };
736 if (dirty_image_objects_ != nullptr && is_dirty(as_klass)) {
737 bin = Bin::kKnownDirty;
738 } else if (as_klass->IsVisiblyInitialized<kVerifyNone>()) {
739 bin = Bin::kClassInitialized;
740
741 // If the class's static fields are all final, put it into a separate bin
742 // since it's very likely it will stay clean.
743 uint32_t num_static_fields = as_klass->NumStaticFields();
744 if (num_static_fields == 0) {
745 bin = Bin::kClassInitializedFinalStatics;
746 } else {
747 // Maybe all the statics are final?
748 bool all_final = true;
749 for (uint32_t i = 0; i < num_static_fields; ++i) {
750 ArtField* field = as_klass->GetStaticField(i);
751 if (!field->IsFinal()) {
752 all_final = false;
753 break;
754 }
755 }
756
757 if (all_final) {
758 bin = Bin::kClassInitializedFinalStatics;
759 }
760 }
761 }
762 } else if (klass->IsStringClass<kVerifyNone>()) {
763 bin = Bin::kString; // Strings are almost always immutable (except for object header).
764 } else if (!klass->HasSuperClass()) {
765 // Only `j.l.Object` and primitive classes lack the superclass and
766 // there are no instances of primitive classes.
767 DCHECK(klass->IsObjectClass());
768 // Instance of java lang object, probably a lock object. This means it will be dirty when we
769 // synchronize on it.
770 bin = Bin::kMiscDirty;
771 } else if (klass->IsDexCacheClass<kVerifyNone>()) {
772 // Dex file field becomes dirty when the image is loaded.
773 bin = Bin::kMiscDirty;
774 }
775 // else bin = kBinRegular
776 }
777
778 AssignImageBinSlot(object, oat_index, bin);
779 return bin;
780 }
781
AssignImageBinSlot(mirror::Object * object,size_t oat_index,Bin bin)782 void ImageWriter::AssignImageBinSlot(mirror::Object* object, size_t oat_index, Bin bin) {
783 DCHECK(object != nullptr);
784 size_t object_size = object->SizeOf();
785
786 // Assign the oat index too.
787 if (IsMultiImage()) {
788 DCHECK(oat_index_map_.find(object) == oat_index_map_.end());
789 oat_index_map_.insert(std::make_pair(object, oat_index));
790 } else {
791 DCHECK(oat_index_map_.empty());
792 }
793
794 ImageInfo& image_info = GetImageInfo(oat_index);
795
796 size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment
797 // How many bytes the current bin is at (aligned).
798 size_t current_offset = image_info.GetBinSlotSize(bin);
799 // Move the current bin size up to accommodate the object we just assigned a bin slot.
800 image_info.IncrementBinSlotSize(bin, offset_delta);
801
802 BinSlot new_bin_slot(bin, current_offset);
803 SetImageBinSlot(object, new_bin_slot);
804
805 image_info.IncrementBinSlotCount(bin, 1u);
806
807 // Grow the image closer to the end by the object we just assigned.
808 image_info.image_end_ += offset_delta;
809 }
810
WillMethodBeDirty(ArtMethod * m) const811 bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
812 if (m->IsNative()) {
813 return true;
814 }
815 ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClass<kWithoutReadBarrier>();
816 // Initialized is highly unlikely to dirty since there's no entry points to mutate.
817 return declaring_class == nullptr ||
818 declaring_class->GetStatus() != ClassStatus::kVisiblyInitialized;
819 }
820
IsImageBinSlotAssigned(mirror::Object * object) const821 bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
822 DCHECK(object != nullptr);
823
824 // We always stash the bin slot into a lockword, in the 'forwarding address' state.
825 // If it's in some other state, then we haven't yet assigned an image bin slot.
826 if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
827 return false;
828 } else if (kIsDebugBuild) {
829 LockWord lock_word = object->GetLockWord(false);
830 size_t offset = lock_word.ForwardingAddress();
831 BinSlot bin_slot(offset);
832 size_t oat_index = GetOatIndex(object);
833 const ImageInfo& image_info = GetImageInfo(oat_index);
834 DCHECK_LT(bin_slot.GetOffset(), image_info.GetBinSlotSize(bin_slot.GetBin()))
835 << "bin slot offset should not exceed the size of that bin";
836 }
837 return true;
838 }
839
GetImageBinSlot(mirror::Object * object,size_t oat_index) const840 ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object, size_t oat_index) const {
841 DCHECK(object != nullptr);
842 DCHECK(IsImageBinSlotAssigned(object));
843
844 LockWord lock_word = object->GetLockWord(false);
845 size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t
846 DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
847
848 BinSlot bin_slot(static_cast<uint32_t>(offset));
849 DCHECK_LT(bin_slot.GetOffset(), GetImageInfo(oat_index).GetBinSlotSize(bin_slot.GetBin()));
850
851 return bin_slot;
852 }
853
UpdateImageBinSlotOffset(mirror::Object * object,size_t oat_index,size_t new_offset)854 void ImageWriter::UpdateImageBinSlotOffset(mirror::Object* object,
855 size_t oat_index,
856 size_t new_offset) {
857 BinSlot old_bin_slot = GetImageBinSlot(object, oat_index);
858 DCHECK_LT(new_offset, GetImageInfo(oat_index).GetBinSlotSize(old_bin_slot.GetBin()));
859 BinSlot new_bin_slot(old_bin_slot.GetBin(), new_offset);
860 object->SetLockWord(LockWord::FromForwardingAddress(new_bin_slot.Uint32Value()),
861 /*as_volatile=*/ false);
862 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
863 DCHECK(IsImageBinSlotAssigned(object));
864 }
865
AllocMemory()866 bool ImageWriter::AllocMemory() {
867 for (ImageInfo& image_info : image_infos_) {
868 const size_t length = RoundUp(image_info.CreateImageSections().first, kPageSize);
869
870 std::string error_msg;
871 image_info.image_ = MemMap::MapAnonymous("image writer image",
872 length,
873 PROT_READ | PROT_WRITE,
874 /*low_4gb=*/ false,
875 &error_msg);
876 if (UNLIKELY(!image_info.image_.IsValid())) {
877 LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
878 return false;
879 }
880
881 // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
882 CHECK_LE(image_info.image_end_, length);
883 image_info.image_bitmap_ = gc::accounting::ContinuousSpaceBitmap::Create(
884 "image bitmap", image_info.image_.Begin(), RoundUp(image_info.image_end_, kPageSize));
885 if (!image_info.image_bitmap_.IsValid()) {
886 LOG(ERROR) << "Failed to allocate memory for image bitmap";
887 return false;
888 }
889 }
890 return true;
891 }
892
893 // This visitor follows the references of an instance, recursively then prune this class
894 // if a type of any field is pruned.
895 class ImageWriter::PruneObjectReferenceVisitor {
896 public:
PruneObjectReferenceVisitor(ImageWriter * image_writer,bool * early_exit,HashSet<mirror::Object * > * visited,bool * result)897 PruneObjectReferenceVisitor(ImageWriter* image_writer,
898 bool* early_exit,
899 HashSet<mirror::Object*>* visited,
900 bool* result)
901 : image_writer_(image_writer), early_exit_(early_exit), visited_(visited), result_(result) {}
902
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const903 ALWAYS_INLINE void VisitRootIfNonNull(
904 mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const
905 REQUIRES_SHARED(Locks::mutator_lock_) { }
906
VisitRoot(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const907 ALWAYS_INLINE void VisitRoot(
908 mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const
909 REQUIRES_SHARED(Locks::mutator_lock_) { }
910
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const911 ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj,
912 MemberOffset offset,
913 bool is_static ATTRIBUTE_UNUSED) const
914 REQUIRES_SHARED(Locks::mutator_lock_) {
915 mirror::Object* ref =
916 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
917 if (ref == nullptr || visited_->find(ref) != visited_->end()) {
918 return;
919 }
920
921 ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots =
922 Runtime::Current()->GetClassLinker()->GetClassRoots();
923 ObjPtr<mirror::Class> klass = ref->IsClass() ? ref->AsClass() : ref->GetClass();
924 if (klass == GetClassRoot<mirror::Method>(class_roots) ||
925 klass == GetClassRoot<mirror::Constructor>(class_roots)) {
926 // Prune all classes using reflection because the content they held will not be fixup.
927 *result_ = true;
928 }
929
930 if (ref->IsClass()) {
931 *result_ = *result_ ||
932 image_writer_->PruneImageClassInternal(ref->AsClass(), early_exit_, visited_);
933 } else {
934 // Record the object visited in case of circular reference.
935 visited_->insert(ref);
936 *result_ = *result_ ||
937 image_writer_->PruneImageClassInternal(klass, early_exit_, visited_);
938 ref->VisitReferences(*this, *this);
939 // Clean up before exit for next call of this function.
940 auto it = visited_->find(ref);
941 DCHECK(it != visited_->end());
942 visited_->erase(it);
943 }
944 }
945
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const946 ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
947 ObjPtr<mirror::Reference> ref) const
948 REQUIRES_SHARED(Locks::mutator_lock_) {
949 operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
950 }
951
952 private:
953 ImageWriter* image_writer_;
954 bool* early_exit_;
955 HashSet<mirror::Object*>* visited_;
956 bool* const result_;
957 };
958
959
PruneImageClass(ObjPtr<mirror::Class> klass)960 bool ImageWriter::PruneImageClass(ObjPtr<mirror::Class> klass) {
961 bool early_exit = false;
962 HashSet<mirror::Object*> visited;
963 return PruneImageClassInternal(klass, &early_exit, &visited);
964 }
965
PruneImageClassInternal(ObjPtr<mirror::Class> klass,bool * early_exit,HashSet<mirror::Object * > * visited)966 bool ImageWriter::PruneImageClassInternal(
967 ObjPtr<mirror::Class> klass,
968 bool* early_exit,
969 HashSet<mirror::Object*>* visited) {
970 DCHECK(early_exit != nullptr);
971 DCHECK(visited != nullptr);
972 DCHECK(compiler_options_.IsAppImage() || compiler_options_.IsBootImageExtension());
973 if (klass == nullptr || IsInBootImage(klass.Ptr())) {
974 return false;
975 }
976 auto found = prune_class_memo_.find(klass.Ptr());
977 if (found != prune_class_memo_.end()) {
978 // Already computed, return the found value.
979 return found->second;
980 }
981 // Circular dependencies, return false but do not store the result in the memoization table.
982 if (visited->find(klass.Ptr()) != visited->end()) {
983 *early_exit = true;
984 return false;
985 }
986 visited->insert(klass.Ptr());
987 bool result = klass->IsBootStrapClassLoaded();
988 std::string temp;
989 // Prune if not an image class, this handles any broken sets of image classes such as having a
990 // class in the set but not it's superclass.
991 result = result || !compiler_options_.IsImageClass(klass->GetDescriptor(&temp));
992 bool my_early_exit = false; // Only for ourselves, ignore caller.
993 // Remove classes that failed to verify since we don't want to have java.lang.VerifyError in the
994 // app image.
995 if (klass->IsErroneous()) {
996 result = true;
997 } else {
998 ObjPtr<mirror::ClassExt> ext(klass->GetExtData());
999 CHECK(ext.IsNull() || ext->GetErroneousStateError() == nullptr) << klass->PrettyClass();
1000 }
1001 if (!result) {
1002 // Check interfaces since these wont be visited through VisitReferences.)
1003 ObjPtr<mirror::IfTable> if_table = klass->GetIfTable();
1004 for (size_t i = 0, num_interfaces = klass->GetIfTableCount(); i < num_interfaces; ++i) {
1005 result = result || PruneImageClassInternal(if_table->GetInterface(i),
1006 &my_early_exit,
1007 visited);
1008 }
1009 }
1010 if (klass->IsObjectArrayClass()) {
1011 result = result || PruneImageClassInternal(klass->GetComponentType(),
1012 &my_early_exit,
1013 visited);
1014 }
1015 // Check static fields and their classes.
1016 if (klass->IsResolved() && klass->NumReferenceStaticFields() != 0) {
1017 size_t num_static_fields = klass->NumReferenceStaticFields();
1018 // Presumably GC can happen when we are cross compiling, it should not cause performance
1019 // problems to do pointer size logic.
1020 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(
1021 Runtime::Current()->GetClassLinker()->GetImagePointerSize());
1022 for (size_t i = 0u; i < num_static_fields; ++i) {
1023 mirror::Object* ref = klass->GetFieldObject<mirror::Object>(field_offset);
1024 if (ref != nullptr) {
1025 if (ref->IsClass()) {
1026 result = result || PruneImageClassInternal(ref->AsClass(), &my_early_exit, visited);
1027 } else {
1028 mirror::Class* type = ref->GetClass();
1029 result = result || PruneImageClassInternal(type, &my_early_exit, visited);
1030 if (!result) {
1031 // For non-class case, also go through all the types mentioned by it's fields'
1032 // references recursively to decide whether to keep this class.
1033 bool tmp = false;
1034 PruneObjectReferenceVisitor visitor(this, &my_early_exit, visited, &tmp);
1035 ref->VisitReferences(visitor, visitor);
1036 result = result || tmp;
1037 }
1038 }
1039 }
1040 field_offset = MemberOffset(field_offset.Uint32Value() +
1041 sizeof(mirror::HeapReference<mirror::Object>));
1042 }
1043 }
1044 result = result || PruneImageClassInternal(klass->GetSuperClass(), &my_early_exit, visited);
1045 // Remove the class if the dex file is not in the set of dex files. This happens for classes that
1046 // are from uses-library if there is no profile. b/30688277
1047 ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache();
1048 if (dex_cache != nullptr) {
1049 result = result ||
1050 dex_file_oat_index_map_.find(dex_cache->GetDexFile()) == dex_file_oat_index_map_.end();
1051 }
1052 // Erase the element we stored earlier since we are exiting the function.
1053 auto it = visited->find(klass.Ptr());
1054 DCHECK(it != visited->end());
1055 visited->erase(it);
1056 // Only store result if it is true or none of the calls early exited due to circular
1057 // dependencies. If visited is empty then we are the root caller, in this case the cycle was in
1058 // a child call and we can remember the result.
1059 if (result == true || !my_early_exit || visited->empty()) {
1060 prune_class_memo_.Overwrite(klass.Ptr(), result);
1061 }
1062 *early_exit |= my_early_exit;
1063 return result;
1064 }
1065
KeepClass(ObjPtr<mirror::Class> klass)1066 bool ImageWriter::KeepClass(ObjPtr<mirror::Class> klass) {
1067 if (klass == nullptr) {
1068 return false;
1069 }
1070 if (IsInBootImage(klass.Ptr())) {
1071 // Already in boot image, return true.
1072 DCHECK(!compiler_options_.IsBootImage());
1073 return true;
1074 }
1075 std::string temp;
1076 if (!compiler_options_.IsImageClass(klass->GetDescriptor(&temp))) {
1077 return false;
1078 }
1079 if (compiler_options_.IsAppImage()) {
1080 // For app images, we need to prune classes that
1081 // are defined by the boot class path we're compiling against but not in
1082 // the boot image spaces since these may have already been loaded at
1083 // run time when this image is loaded. Keep classes in the boot image
1084 // spaces we're compiling against since we don't want to re-resolve these.
1085 return !PruneImageClass(klass);
1086 }
1087 return true;
1088 }
1089
1090 class ImageWriter::PruneClassesVisitor : public ClassVisitor {
1091 public:
PruneClassesVisitor(ImageWriter * image_writer,ObjPtr<mirror::ClassLoader> class_loader)1092 PruneClassesVisitor(ImageWriter* image_writer, ObjPtr<mirror::ClassLoader> class_loader)
1093 : image_writer_(image_writer),
1094 class_loader_(class_loader),
1095 classes_to_prune_(),
1096 defined_class_count_(0u) { }
1097
operator ()(ObjPtr<mirror::Class> klass)1098 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
1099 if (!image_writer_->KeepClass(klass.Ptr())) {
1100 classes_to_prune_.insert(klass.Ptr());
1101 if (klass->GetClassLoader() == class_loader_) {
1102 ++defined_class_count_;
1103 }
1104 }
1105 return true;
1106 }
1107
Prune()1108 size_t Prune() REQUIRES_SHARED(Locks::mutator_lock_) {
1109 ClassTable* class_table =
1110 Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader_);
1111 WriterMutexLock mu(Thread::Current(), class_table->lock_);
1112 // App class loader class tables contain only one internal set. The boot class path class
1113 // table also contains class sets from boot images we're compiling against but we are not
1114 // pruning these boot image classes, so all classes to remove are in the last set.
1115 DCHECK(!class_table->classes_.empty());
1116 ClassTable::ClassSet& last_class_set = class_table->classes_.back();
1117 for (mirror::Class* klass : classes_to_prune_) {
1118 uint32_t hash = klass->DescriptorHash();
1119 auto it = last_class_set.FindWithHash(ClassTable::TableSlot(klass, hash), hash);
1120 DCHECK(it != last_class_set.end());
1121 last_class_set.erase(it);
1122 DCHECK(std::none_of(class_table->classes_.begin(),
1123 class_table->classes_.end(),
1124 [klass, hash](ClassTable::ClassSet& class_set) {
1125 ClassTable::TableSlot slot(klass, hash);
1126 return class_set.FindWithHash(slot, hash) != class_set.end();
1127 }));
1128 }
1129 return defined_class_count_;
1130 }
1131
1132 private:
1133 ImageWriter* const image_writer_;
1134 const ObjPtr<mirror::ClassLoader> class_loader_;
1135 HashSet<mirror::Class*> classes_to_prune_;
1136 size_t defined_class_count_;
1137 };
1138
1139 class ImageWriter::PruneClassLoaderClassesVisitor : public ClassLoaderVisitor {
1140 public:
PruneClassLoaderClassesVisitor(ImageWriter * image_writer)1141 explicit PruneClassLoaderClassesVisitor(ImageWriter* image_writer)
1142 : image_writer_(image_writer), removed_class_count_(0) {}
1143
Visit(ObjPtr<mirror::ClassLoader> class_loader)1144 void Visit(ObjPtr<mirror::ClassLoader> class_loader) override
1145 REQUIRES_SHARED(Locks::mutator_lock_) {
1146 PruneClassesVisitor classes_visitor(image_writer_, class_loader);
1147 ClassTable* class_table =
1148 Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader);
1149 class_table->Visit(classes_visitor);
1150 removed_class_count_ += classes_visitor.Prune();
1151 }
1152
GetRemovedClassCount() const1153 size_t GetRemovedClassCount() const {
1154 return removed_class_count_;
1155 }
1156
1157 private:
1158 ImageWriter* const image_writer_;
1159 size_t removed_class_count_;
1160 };
1161
VisitClassLoaders(ClassLoaderVisitor * visitor)1162 void ImageWriter::VisitClassLoaders(ClassLoaderVisitor* visitor) {
1163 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
1164 visitor->Visit(nullptr); // Visit boot class loader.
1165 Runtime::Current()->GetClassLinker()->VisitClassLoaders(visitor);
1166 }
1167
PruneNonImageClasses()1168 void ImageWriter::PruneNonImageClasses() {
1169 Runtime* runtime = Runtime::Current();
1170 ClassLinker* class_linker = runtime->GetClassLinker();
1171 Thread* self = Thread::Current();
1172 ScopedAssertNoThreadSuspension sa(__FUNCTION__);
1173
1174 // Prune uses-library dex caches. Only prune the uses-library dex caches since we want to make
1175 // sure the other ones don't get unloaded before the OatWriter runs.
1176 class_linker->VisitClassTables(
1177 [&](ClassTable* table) REQUIRES_SHARED(Locks::mutator_lock_) {
1178 table->RemoveStrongRoots(
1179 [&](GcRoot<mirror::Object> root) REQUIRES_SHARED(Locks::mutator_lock_) {
1180 ObjPtr<mirror::Object> obj = root.Read();
1181 if (obj->IsDexCache()) {
1182 // Return true if the dex file is not one of the ones in the map.
1183 return dex_file_oat_index_map_.find(obj->AsDexCache()->GetDexFile()) ==
1184 dex_file_oat_index_map_.end();
1185 }
1186 // Return false to avoid removing.
1187 return false;
1188 });
1189 });
1190
1191 // Remove the undesired classes from the class roots.
1192 {
1193 PruneClassLoaderClassesVisitor class_loader_visitor(this);
1194 VisitClassLoaders(&class_loader_visitor);
1195 VLOG(compiler) << "Pruned " << class_loader_visitor.GetRemovedClassCount() << " classes";
1196 }
1197
1198 // Completely clear DexCaches.
1199 dchecked_vector<ObjPtr<mirror::DexCache>> dex_caches = FindDexCaches(self);
1200 for (ObjPtr<mirror::DexCache> dex_cache : dex_caches) {
1201 dex_cache->ResetNativeArrays();
1202 }
1203
1204 // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
1205 class_linker->DropFindArrayClassCache();
1206
1207 // Clear to save RAM.
1208 prune_class_memo_.clear();
1209 }
1210
FindDexCaches(Thread * self)1211 dchecked_vector<ObjPtr<mirror::DexCache>> ImageWriter::FindDexCaches(Thread* self) {
1212 dchecked_vector<ObjPtr<mirror::DexCache>> dex_caches;
1213 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1214 ReaderMutexLock mu2(self, *Locks::dex_lock_);
1215 dex_caches.reserve(class_linker->GetDexCachesData().size());
1216 for (const auto& entry : class_linker->GetDexCachesData()) {
1217 const ClassLinker::DexCacheData& data = entry.second;
1218 if (self->IsJWeakCleared(data.weak_root)) {
1219 continue;
1220 }
1221 dex_caches.push_back(self->DecodeJObject(data.weak_root)->AsDexCache());
1222 }
1223 return dex_caches;
1224 }
1225
CheckNonImageClassesRemoved()1226 void ImageWriter::CheckNonImageClassesRemoved() {
1227 auto visitor = [&](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1228 if (obj->IsClass() && !IsInBootImage(obj)) {
1229 ObjPtr<Class> klass = obj->AsClass();
1230 if (!KeepClass(klass)) {
1231 DumpImageClasses();
1232 CHECK(KeepClass(klass))
1233 << Runtime::Current()->GetHeap()->GetVerification()->FirstPathFromRootSet(klass);
1234 }
1235 }
1236 };
1237 gc::Heap* heap = Runtime::Current()->GetHeap();
1238 heap->VisitObjects(visitor);
1239 }
1240
PromoteWeakInternsToStrong(Thread * self)1241 void ImageWriter::PromoteWeakInternsToStrong(Thread* self) {
1242 InternTable* intern_table = Runtime::Current()->GetInternTable();
1243 MutexLock mu(self, *Locks::intern_table_lock_);
1244 DCHECK_EQ(intern_table->weak_interns_.tables_.size(), 1u);
1245 for (GcRoot<mirror::String>& entry : intern_table->weak_interns_.tables_.front().set_) {
1246 ObjPtr<mirror::String> s = entry.Read<kWithoutReadBarrier>();
1247 DCHECK(!IsStronglyInternedString(s));
1248 uint32_t hash = static_cast<uint32_t>(s->GetStoredHashCode());
1249 intern_table->InsertStrong(s, hash);
1250 }
1251 intern_table->weak_interns_.tables_.front().set_.clear();
1252 }
1253
DumpImageClasses()1254 void ImageWriter::DumpImageClasses() {
1255 for (const std::string& image_class : compiler_options_.GetImageClasses()) {
1256 LOG(INFO) << " " << image_class;
1257 }
1258 }
1259
CreateImageRoots()1260 bool ImageWriter::CreateImageRoots() {
1261 Runtime* runtime = Runtime::Current();
1262 ClassLinker* class_linker = runtime->GetClassLinker();
1263 Thread* self = Thread::Current();
1264 VariableSizedHandleScope handles(self);
1265
1266 // Prepare boot image live objects if we're compiling a boot image or boot image extension.
1267 Handle<mirror::ObjectArray<mirror::Object>> boot_image_live_objects;
1268 if (compiler_options_.IsBootImage()) {
1269 boot_image_live_objects = handles.NewHandle(AllocateBootImageLiveObjects(self, runtime));
1270 if (boot_image_live_objects == nullptr) {
1271 return false;
1272 }
1273 } else if (compiler_options_.IsBootImageExtension()) {
1274 gc::Heap* heap = runtime->GetHeap();
1275 DCHECK(!heap->GetBootImageSpaces().empty());
1276 const ImageHeader& primary_header = heap->GetBootImageSpaces().front()->GetImageHeader();
1277 boot_image_live_objects = handles.NewHandle(ObjPtr<ObjectArray<Object>>::DownCast(
1278 primary_header.GetImageRoot<kWithReadBarrier>(ImageHeader::kBootImageLiveObjects)));
1279 DCHECK(boot_image_live_objects != nullptr);
1280 }
1281
1282 // Collect dex caches and the sizes of dex cache arrays.
1283 struct DexCacheRecord {
1284 uint64_t registration_index;
1285 Handle<mirror::DexCache> dex_cache;
1286 size_t oat_index;
1287 };
1288 size_t num_oat_files = oat_filenames_.size();
1289 dchecked_vector<size_t> dex_cache_counts(num_oat_files, 0u);
1290 dchecked_vector<DexCacheRecord> dex_cache_records;
1291 dex_cache_records.reserve(dex_file_oat_index_map_.size());
1292 {
1293 ReaderMutexLock mu(self, *Locks::dex_lock_);
1294 // Count number of dex caches not in the boot image.
1295 for (const auto& entry : class_linker->GetDexCachesData()) {
1296 const ClassLinker::DexCacheData& data = entry.second;
1297 ObjPtr<mirror::DexCache> dex_cache =
1298 ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
1299 if (dex_cache == nullptr) {
1300 continue;
1301 }
1302 const DexFile* dex_file = dex_cache->GetDexFile();
1303 auto it = dex_file_oat_index_map_.find(dex_file);
1304 if (it != dex_file_oat_index_map_.end()) {
1305 size_t oat_index = it->second;
1306 DCHECK(IsImageDexCache(dex_cache));
1307 ++dex_cache_counts[oat_index];
1308 Handle<mirror::DexCache> h_dex_cache = handles.NewHandle(dex_cache);
1309 dex_cache_records.push_back({data.registration_index, h_dex_cache, oat_index});
1310 }
1311 }
1312 }
1313
1314 // Allocate dex cache arrays.
1315 dchecked_vector<Handle<ObjectArray<Object>>> dex_cache_arrays;
1316 dex_cache_arrays.reserve(num_oat_files);
1317 for (size_t oat_index = 0; oat_index != num_oat_files; ++oat_index) {
1318 ObjPtr<ObjectArray<Object>> dex_caches = ObjectArray<Object>::Alloc(
1319 self, GetClassRoot<ObjectArray<Object>>(class_linker), dex_cache_counts[oat_index]);
1320 if (dex_caches == nullptr) {
1321 return false;
1322 }
1323 dex_cache_counts[oat_index] = 0u; // Reset count for filling in dex caches below.
1324 dex_cache_arrays.push_back(handles.NewHandle(dex_caches));
1325 }
1326
1327 // Sort dex caches by registration index to make output deterministic.
1328 std::sort(dex_cache_records.begin(),
1329 dex_cache_records.end(),
1330 [](const DexCacheRecord& lhs, const DexCacheRecord&rhs) {
1331 return lhs.registration_index < rhs.registration_index;
1332 });
1333
1334 // Fill dex cache arrays.
1335 for (const DexCacheRecord& record : dex_cache_records) {
1336 ObjPtr<ObjectArray<Object>> dex_caches = dex_cache_arrays[record.oat_index].Get();
1337 dex_caches->SetWithoutChecks</*kTransactionActive=*/ false>(
1338 dex_cache_counts[record.oat_index], record.dex_cache.Get());
1339 ++dex_cache_counts[record.oat_index];
1340 }
1341
1342 // Create image roots with empty dex cache arrays.
1343 image_roots_.reserve(num_oat_files);
1344 JavaVMExt* vm = down_cast<JNIEnvExt*>(self->GetJniEnv())->GetVm();
1345 for (size_t oat_index = 0; oat_index != num_oat_files; ++oat_index) {
1346 // Build an Object[] of the roots needed to restore the runtime.
1347 int32_t image_roots_size = ImageHeader::NumberOfImageRoots(compiler_options_.IsAppImage());
1348 ObjPtr<ObjectArray<Object>> image_roots = ObjectArray<Object>::Alloc(
1349 self, GetClassRoot<ObjectArray<Object>>(class_linker), image_roots_size);
1350 if (image_roots == nullptr) {
1351 return false;
1352 }
1353 ObjPtr<ObjectArray<Object>> dex_caches = dex_cache_arrays[oat_index].Get();
1354 CHECK_EQ(dex_cache_counts[oat_index],
1355 dchecked_integral_cast<size_t>(dex_caches->GetLength<kVerifyNone>()))
1356 << "The number of non-image dex caches changed.";
1357 image_roots->SetWithoutChecks</*kTransactionActive=*/ false>(
1358 ImageHeader::kDexCaches, dex_caches);
1359 image_roots->SetWithoutChecks</*kTransactionActive=*/ false>(
1360 ImageHeader::kClassRoots, class_linker->GetClassRoots());
1361 if (!compiler_options_.IsAppImage()) {
1362 DCHECK(boot_image_live_objects != nullptr);
1363 image_roots->SetWithoutChecks</*kTransactionActive=*/ false>(
1364 ImageHeader::kBootImageLiveObjects, boot_image_live_objects.Get());
1365 } else {
1366 DCHECK(boot_image_live_objects.GetReference() == nullptr);
1367 image_roots->SetWithoutChecks</*kTransactionActive=*/ false>(
1368 ImageHeader::kAppImageClassLoader, GetAppClassLoader());
1369 }
1370 for (int32_t i = 0; i != image_roots_size; ++i) {
1371 CHECK(image_roots->Get(i) != nullptr);
1372 }
1373 image_roots_.push_back(vm->AddGlobalRef(self, image_roots));
1374 }
1375
1376 return true;
1377 }
1378
RecordNativeRelocations(ObjPtr<mirror::Class> klass,size_t oat_index)1379 void ImageWriter::RecordNativeRelocations(ObjPtr<mirror::Class> klass, size_t oat_index) {
1380 // Visit and assign offsets for fields and field arrays.
1381 DCHECK_EQ(oat_index, GetOatIndexForClass(klass));
1382 DCHECK(!klass->IsErroneous()) << klass->GetStatus();
1383 if (compiler_options_.IsAppImage()) {
1384 // Extra consistency check: no boot loader classes should be left!
1385 CHECK(!klass->IsBootStrapClassLoaded()) << klass->PrettyClass();
1386 }
1387 LengthPrefixedArray<ArtField>* fields[] = {
1388 klass->GetSFieldsPtr(), klass->GetIFieldsPtr(),
1389 };
1390 ImageInfo& image_info = GetImageInfo(oat_index);
1391 for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
1392 // Total array length including header.
1393 if (cur_fields != nullptr) {
1394 // Forward the entire array at once.
1395 size_t offset = image_info.GetBinSlotSize(Bin::kArtField);
1396 DCHECK(!IsInBootImage(cur_fields));
1397 bool inserted =
1398 native_object_relocations_.insert(std::make_pair(
1399 cur_fields,
1400 NativeObjectRelocation{
1401 oat_index, offset, NativeObjectRelocationType::kArtFieldArray
1402 })).second;
1403 CHECK(inserted) << "Field array " << cur_fields << " already forwarded";
1404 const size_t size = LengthPrefixedArray<ArtField>::ComputeSize(cur_fields->size());
1405 offset += size;
1406 image_info.IncrementBinSlotSize(Bin::kArtField, size);
1407 DCHECK_EQ(offset, image_info.GetBinSlotSize(Bin::kArtField));
1408 }
1409 }
1410 // Visit and assign offsets for methods.
1411 size_t num_methods = klass->NumMethods();
1412 if (num_methods != 0) {
1413 bool any_dirty = false;
1414 for (auto& m : klass->GetMethods(target_ptr_size_)) {
1415 if (WillMethodBeDirty(&m)) {
1416 any_dirty = true;
1417 break;
1418 }
1419 }
1420 NativeObjectRelocationType type = any_dirty
1421 ? NativeObjectRelocationType::kArtMethodDirty
1422 : NativeObjectRelocationType::kArtMethodClean;
1423 Bin bin_type = BinTypeForNativeRelocationType(type);
1424 // Forward the entire array at once, but header first.
1425 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
1426 const size_t method_size = ArtMethod::Size(target_ptr_size_);
1427 const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
1428 method_size,
1429 method_alignment);
1430 LengthPrefixedArray<ArtMethod>* array = klass->GetMethodsPtr();
1431 size_t offset = image_info.GetBinSlotSize(bin_type);
1432 DCHECK(!IsInBootImage(array));
1433 bool inserted =
1434 native_object_relocations_.insert(std::make_pair(
1435 array,
1436 NativeObjectRelocation{
1437 oat_index,
1438 offset,
1439 any_dirty ? NativeObjectRelocationType::kArtMethodArrayDirty
1440 : NativeObjectRelocationType::kArtMethodArrayClean
1441 })).second;
1442 CHECK(inserted) << "Method array " << array << " already forwarded";
1443 image_info.IncrementBinSlotSize(bin_type, header_size);
1444 for (auto& m : klass->GetMethods(target_ptr_size_)) {
1445 AssignMethodOffset(&m, type, oat_index);
1446 }
1447 (any_dirty ? dirty_methods_ : clean_methods_) += num_methods;
1448 }
1449 // Assign offsets for all runtime methods in the IMT since these may hold conflict tables
1450 // live.
1451 if (klass->ShouldHaveImt()) {
1452 ImTable* imt = klass->GetImt(target_ptr_size_);
1453 if (TryAssignImTableOffset(imt, oat_index)) {
1454 // Since imt's can be shared only do this the first time to not double count imt method
1455 // fixups.
1456 for (size_t i = 0; i < ImTable::kSize; ++i) {
1457 ArtMethod* imt_method = imt->Get(i, target_ptr_size_);
1458 DCHECK(imt_method != nullptr);
1459 if (imt_method->IsRuntimeMethod() &&
1460 !IsInBootImage(imt_method) &&
1461 !NativeRelocationAssigned(imt_method)) {
1462 AssignMethodOffset(imt_method, NativeObjectRelocationType::kRuntimeMethod, oat_index);
1463 }
1464 }
1465 }
1466 }
1467 }
1468
NativeRelocationAssigned(void * ptr) const1469 bool ImageWriter::NativeRelocationAssigned(void* ptr) const {
1470 return native_object_relocations_.find(ptr) != native_object_relocations_.end();
1471 }
1472
TryAssignImTableOffset(ImTable * imt,size_t oat_index)1473 bool ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) {
1474 // No offset, or already assigned.
1475 if (imt == nullptr || IsInBootImage(imt) || NativeRelocationAssigned(imt)) {
1476 return false;
1477 }
1478 // If the method is a conflict method we also want to assign the conflict table offset.
1479 ImageInfo& image_info = GetImageInfo(oat_index);
1480 const size_t size = ImTable::SizeInBytes(target_ptr_size_);
1481 native_object_relocations_.insert(std::make_pair(
1482 imt,
1483 NativeObjectRelocation{
1484 oat_index,
1485 image_info.GetBinSlotSize(Bin::kImTable),
1486 NativeObjectRelocationType::kIMTable
1487 }));
1488 image_info.IncrementBinSlotSize(Bin::kImTable, size);
1489 return true;
1490 }
1491
TryAssignConflictTableOffset(ImtConflictTable * table,size_t oat_index)1492 void ImageWriter::TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index) {
1493 // No offset, or already assigned.
1494 if (table == nullptr || NativeRelocationAssigned(table)) {
1495 return;
1496 }
1497 CHECK(!IsInBootImage(table));
1498 // If the method is a conflict method we also want to assign the conflict table offset.
1499 ImageInfo& image_info = GetImageInfo(oat_index);
1500 const size_t size = table->ComputeSize(target_ptr_size_);
1501 native_object_relocations_.insert(std::make_pair(
1502 table,
1503 NativeObjectRelocation{
1504 oat_index,
1505 image_info.GetBinSlotSize(Bin::kIMTConflictTable),
1506 NativeObjectRelocationType::kIMTConflictTable
1507 }));
1508 image_info.IncrementBinSlotSize(Bin::kIMTConflictTable, size);
1509 }
1510
AssignMethodOffset(ArtMethod * method,NativeObjectRelocationType type,size_t oat_index)1511 void ImageWriter::AssignMethodOffset(ArtMethod* method,
1512 NativeObjectRelocationType type,
1513 size_t oat_index) {
1514 DCHECK(!IsInBootImage(method));
1515 CHECK(!NativeRelocationAssigned(method)) << "Method " << method << " already assigned "
1516 << ArtMethod::PrettyMethod(method);
1517 if (method->IsRuntimeMethod()) {
1518 TryAssignConflictTableOffset(method->GetImtConflictTable(target_ptr_size_), oat_index);
1519 }
1520 ImageInfo& image_info = GetImageInfo(oat_index);
1521 Bin bin_type = BinTypeForNativeRelocationType(type);
1522 size_t offset = image_info.GetBinSlotSize(bin_type);
1523 native_object_relocations_.insert(
1524 std::make_pair(method, NativeObjectRelocation{oat_index, offset, type}));
1525 image_info.IncrementBinSlotSize(bin_type, ArtMethod::Size(target_ptr_size_));
1526 }
1527
1528 class ImageWriter::LayoutHelper {
1529 public:
LayoutHelper(ImageWriter * image_writer)1530 explicit LayoutHelper(ImageWriter* image_writer)
1531 : image_writer_(image_writer) {
1532 bin_objects_.resize(image_writer_->image_infos_.size());
1533 for (auto& inner : bin_objects_) {
1534 inner.resize(enum_cast<size_t>(Bin::kMirrorCount));
1535 }
1536 }
1537
1538 void ProcessDexFileObjects(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
1539 void ProcessRoots(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
1540 void FinalizeInternTables() REQUIRES_SHARED(Locks::mutator_lock_);
1541
1542 void VerifyImageBinSlotsAssigned() REQUIRES_SHARED(Locks::mutator_lock_);
1543
1544 void FinalizeBinSlotOffsets() REQUIRES_SHARED(Locks::mutator_lock_);
1545
1546 /*
1547 * Collects the string reference info necessary for loading app images.
1548 *
1549 * Because AppImages may contain interned strings that must be deduplicated
1550 * with previously interned strings when loading the app image, we need to
1551 * visit references to these strings and update them to point to the correct
1552 * string. To speed up the visiting of references at load time we include
1553 * a list of offsets to string references in the AppImage.
1554 */
1555 void CollectStringReferenceInfo() REQUIRES_SHARED(Locks::mutator_lock_);
1556
1557 private:
1558 class CollectClassesVisitor;
1559 class CollectStringReferenceVisitor;
1560 class VisitReferencesVisitor;
1561
1562 void ProcessInterns(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
1563 void ProcessWorkQueue() REQUIRES_SHARED(Locks::mutator_lock_);
1564
1565 using WorkQueue = std::deque<std::pair<ObjPtr<mirror::Object>, size_t>>;
1566
1567 void VisitReferences(ObjPtr<mirror::Object> obj, size_t oat_index)
1568 REQUIRES_SHARED(Locks::mutator_lock_);
1569 bool TryAssignBinSlot(ObjPtr<mirror::Object> obj, size_t oat_index)
1570 REQUIRES_SHARED(Locks::mutator_lock_);
1571 void AssignImageBinSlot(ObjPtr<mirror::Object> object, size_t oat_index, Bin bin)
1572 REQUIRES_SHARED(Locks::mutator_lock_);
1573
1574 ImageWriter* const image_writer_;
1575
1576 // Work list of <object, oat_index> for objects. Everything in the queue must already be
1577 // assigned a bin slot.
1578 WorkQueue work_queue_;
1579
1580 // Objects for individual bins. Indexed by `oat_index` and `bin`.
1581 // Cannot use ObjPtr<> because of invalidation in Heap::VisitObjects().
1582 dchecked_vector<dchecked_vector<dchecked_vector<mirror::Object*>>> bin_objects_;
1583
1584 // Interns that do not have a corresponding StringId in any of the input dex files.
1585 // These shall be assigned to individual images based on the `oat_index` that we
1586 // see as we visit them during the work queue processing.
1587 dchecked_vector<mirror::String*> non_dex_file_interns_;
1588 };
1589
1590 class ImageWriter::LayoutHelper::CollectClassesVisitor {
1591 public:
CollectClassesVisitor(ImageWriter * image_writer)1592 explicit CollectClassesVisitor(ImageWriter* image_writer)
1593 : image_writer_(image_writer),
1594 dex_files_(image_writer_->compiler_options_.GetDexFilesForOatFile()) {}
1595
operator ()(ObjPtr<mirror::Class> klass)1596 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
1597 if (!image_writer_->IsInBootImage(klass.Ptr())) {
1598 ObjPtr<mirror::Class> component_type = klass;
1599 size_t dimension = 0u;
1600 while (component_type->IsArrayClass<kVerifyNone>()) {
1601 ++dimension;
1602 component_type = component_type->GetComponentType<kVerifyNone, kWithoutReadBarrier>();
1603 }
1604 DCHECK(!component_type->IsProxyClass());
1605 size_t dex_file_index;
1606 uint32_t class_def_index = 0u;
1607 if (UNLIKELY(component_type->IsPrimitive())) {
1608 DCHECK(image_writer_->compiler_options_.IsBootImage());
1609 dex_file_index = 0u;
1610 class_def_index = enum_cast<uint32_t>(component_type->GetPrimitiveType());
1611 } else {
1612 auto it = std::find(dex_files_.begin(), dex_files_.end(), &component_type->GetDexFile());
1613 DCHECK(it != dex_files_.end()) << klass->PrettyDescriptor();
1614 dex_file_index = std::distance(dex_files_.begin(), it) + 1u; // 0 is for primitive types.
1615 class_def_index = component_type->GetDexClassDefIndex();
1616 }
1617 klasses_.push_back({klass, dex_file_index, class_def_index, dimension});
1618 }
1619 return true;
1620 }
1621
ProcessCollectedClasses(Thread * self)1622 WorkQueue ProcessCollectedClasses(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_) {
1623 std::sort(klasses_.begin(), klasses_.end());
1624
1625 ImageWriter* image_writer = image_writer_;
1626 WorkQueue work_queue;
1627 size_t last_dex_file_index = static_cast<size_t>(-1);
1628 size_t last_oat_index = static_cast<size_t>(-1);
1629 for (const ClassEntry& entry : klasses_) {
1630 if (last_dex_file_index != entry.dex_file_index) {
1631 if (UNLIKELY(entry.dex_file_index == 0u)) {
1632 last_oat_index = GetDefaultOatIndex(); // Primitive type.
1633 } else {
1634 uint32_t dex_file_index = entry.dex_file_index - 1u; // 0 is for primitive types.
1635 last_oat_index = image_writer->GetOatIndexForDexFile(dex_files_[dex_file_index]);
1636 }
1637 last_dex_file_index = entry.dex_file_index;
1638 }
1639 // Count the number of classes for class tables.
1640 image_writer->image_infos_[last_oat_index].class_table_size_ += 1u;
1641 work_queue.emplace_back(entry.klass, last_oat_index);
1642 }
1643 klasses_.clear();
1644
1645 // Prepare image class tables.
1646 dchecked_vector<mirror::Class*> boot_image_classes;
1647 if (image_writer->compiler_options_.IsAppImage()) {
1648 DCHECK_EQ(image_writer->image_infos_.size(), 1u);
1649 ImageInfo& image_info = image_writer->image_infos_[0];
1650 // Log the non-boot image class count for app image for debugging purposes.
1651 VLOG(compiler) << "Dex2Oat:AppImage:classCount = " << image_info.class_table_size_;
1652 // Collect boot image classes referenced by app class loader's class table.
1653 JavaVMExt* vm = down_cast<JNIEnvExt*>(self->GetJniEnv())->GetVm();
1654 auto app_class_loader = DecodeGlobalWithoutRB<mirror::ClassLoader>(
1655 vm, image_writer->app_class_loader_);
1656 ClassTable* app_class_table = app_class_loader->GetClassTable();
1657 ReaderMutexLock lock(self, app_class_table->lock_);
1658 DCHECK_EQ(app_class_table->classes_.size(), 1u);
1659 const ClassTable::ClassSet& app_class_set = app_class_table->classes_[0];
1660 DCHECK_GE(app_class_set.size(), image_info.class_table_size_);
1661 boot_image_classes.reserve(app_class_set.size() - image_info.class_table_size_);
1662 for (const ClassTable::TableSlot& slot : app_class_set) {
1663 mirror::Class* klass = slot.Read<kWithoutReadBarrier>().Ptr();
1664 if (image_writer->IsInBootImage(klass)) {
1665 boot_image_classes.push_back(klass);
1666 }
1667 }
1668 DCHECK_EQ(app_class_set.size() - image_info.class_table_size_, boot_image_classes.size());
1669 // Increase the app class table size to include referenced boot image classes.
1670 image_info.class_table_size_ = app_class_set.size();
1671 }
1672 for (ImageInfo& image_info : image_writer->image_infos_) {
1673 if (image_info.class_table_size_ != 0u) {
1674 // Make sure the class table shall be full by allocating a buffer of the right size.
1675 size_t buffer_size = static_cast<size_t>(
1676 ceil(image_info.class_table_size_ / kImageClassTableMaxLoadFactor));
1677 image_info.class_table_buffer_.reset(new ClassTable::TableSlot[buffer_size]);
1678 DCHECK(image_info.class_table_buffer_ != nullptr);
1679 image_info.class_table_.emplace(kImageClassTableMinLoadFactor,
1680 kImageClassTableMaxLoadFactor,
1681 image_info.class_table_buffer_.get(),
1682 buffer_size);
1683 }
1684 }
1685 for (const auto& pair : work_queue) {
1686 ObjPtr<mirror::Class> klass = pair.first->AsClass();
1687 size_t oat_index = pair.second;
1688 DCHECK(image_writer->image_infos_[oat_index].class_table_.has_value());
1689 ClassTable::ClassSet& class_table = *image_writer->image_infos_[oat_index].class_table_;
1690 uint32_t hash = klass->DescriptorHash();
1691 bool inserted = class_table.InsertWithHash(ClassTable::TableSlot(klass, hash), hash).second;
1692 DCHECK(inserted) << "Class " << klass->PrettyDescriptor()
1693 << " (" << klass.Ptr() << ") already inserted";
1694 }
1695 if (image_writer->compiler_options_.IsAppImage()) {
1696 DCHECK_EQ(image_writer->image_infos_.size(), 1u);
1697 ImageInfo& image_info = image_writer->image_infos_[0];
1698 if (image_info.class_table_size_ != 0u) {
1699 // Insert boot image class references to the app class table.
1700 // The order of insertion into the app class loader's ClassTable is non-deterministic,
1701 // so sort the boot image classes by the boot image address to get deterministic table.
1702 std::sort(boot_image_classes.begin(), boot_image_classes.end());
1703 DCHECK(image_info.class_table_.has_value());
1704 ClassTable::ClassSet& table = *image_info.class_table_;
1705 for (mirror::Class* klass : boot_image_classes) {
1706 uint32_t hash = klass->DescriptorHash();
1707 bool inserted = table.InsertWithHash(ClassTable::TableSlot(klass, hash), hash).second;
1708 DCHECK(inserted) << "Boot image class " << klass->PrettyDescriptor()
1709 << " (" << klass << ") already inserted";
1710 }
1711 DCHECK_EQ(table.size(), image_info.class_table_size_);
1712 }
1713 }
1714 for (ImageInfo& image_info : image_writer->image_infos_) {
1715 DCHECK_EQ(image_info.class_table_bytes_, 0u);
1716 if (image_info.class_table_size_ != 0u) {
1717 DCHECK(image_info.class_table_.has_value());
1718 DCHECK_EQ(image_info.class_table_->size(), image_info.class_table_size_);
1719 image_info.class_table_bytes_ = image_info.class_table_->WriteToMemory(nullptr);
1720 DCHECK_NE(image_info.class_table_bytes_, 0u);
1721 } else {
1722 DCHECK(!image_info.class_table_.has_value());
1723 }
1724 }
1725
1726 return work_queue;
1727 }
1728
1729 private:
1730 struct ClassEntry {
1731 ObjPtr<mirror::Class> klass;
1732 // We shall sort classes by dex file, class def index and array dimension.
1733 size_t dex_file_index;
1734 uint32_t class_def_index;
1735 size_t dimension;
1736
operator <art::linker::ImageWriter::LayoutHelper::CollectClassesVisitor::ClassEntry1737 bool operator<(const ClassEntry& other) const {
1738 return std::tie(dex_file_index, class_def_index, dimension) <
1739 std::tie(other.dex_file_index, other.class_def_index, other.dimension);
1740 }
1741 };
1742
1743 ImageWriter* const image_writer_;
1744 const ArrayRef<const DexFile* const> dex_files_;
1745 std::deque<ClassEntry> klasses_;
1746 };
1747
1748 class ImageWriter::LayoutHelper::CollectStringReferenceVisitor {
1749 public:
CollectStringReferenceVisitor(const ImageWriter * image_writer,size_t oat_index,dchecked_vector<AppImageReferenceOffsetInfo> * const string_reference_offsets,ObjPtr<mirror::Object> current_obj)1750 explicit CollectStringReferenceVisitor(
1751 const ImageWriter* image_writer,
1752 size_t oat_index,
1753 dchecked_vector<AppImageReferenceOffsetInfo>* const string_reference_offsets,
1754 ObjPtr<mirror::Object> current_obj)
1755 : image_writer_(image_writer),
1756 oat_index_(oat_index),
1757 string_reference_offsets_(string_reference_offsets),
1758 current_obj_(current_obj) {}
1759
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const1760 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
1761 REQUIRES_SHARED(Locks::mutator_lock_) {
1762 if (!root->IsNull()) {
1763 VisitRoot(root);
1764 }
1765 }
1766
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const1767 void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1768 REQUIRES_SHARED(Locks::mutator_lock_) {
1769 // Only dex caches have native String roots. These are collected separately.
1770 DCHECK((current_obj_->IsDexCache<kVerifyNone, kWithoutReadBarrier>()) ||
1771 !image_writer_->IsInternedAppImageStringReference(root->AsMirrorPtr()))
1772 << mirror::Object::PrettyTypeOf(current_obj_);
1773 }
1774
1775 // Collects info for managed fields that reference managed Strings.
operator ()(ObjPtr<mirror::Object> obj,MemberOffset member_offset,bool is_static ATTRIBUTE_UNUSED) const1776 void operator() (ObjPtr<mirror::Object> obj,
1777 MemberOffset member_offset,
1778 bool is_static ATTRIBUTE_UNUSED) const
1779 REQUIRES_SHARED(Locks::mutator_lock_) {
1780 ObjPtr<mirror::Object> referred_obj =
1781 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(member_offset);
1782
1783 if (image_writer_->IsInternedAppImageStringReference(referred_obj)) {
1784 size_t base_offset = image_writer_->GetImageOffset(current_obj_.Ptr(), oat_index_);
1785 string_reference_offsets_->emplace_back(base_offset, member_offset.Uint32Value());
1786 }
1787 }
1788
1789 ALWAYS_INLINE
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const1790 void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1791 ObjPtr<mirror::Reference> ref) const
1792 REQUIRES_SHARED(Locks::mutator_lock_) {
1793 operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
1794 }
1795
1796 private:
1797 const ImageWriter* const image_writer_;
1798 const size_t oat_index_;
1799 dchecked_vector<AppImageReferenceOffsetInfo>* const string_reference_offsets_;
1800 const ObjPtr<mirror::Object> current_obj_;
1801 };
1802
1803 class ImageWriter::LayoutHelper::VisitReferencesVisitor {
1804 public:
VisitReferencesVisitor(LayoutHelper * helper,size_t oat_index)1805 VisitReferencesVisitor(LayoutHelper* helper, size_t oat_index)
1806 : helper_(helper), oat_index_(oat_index) {}
1807
1808 // We do not visit native roots. These are handled with other logic.
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const1809 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
1810 const {
1811 LOG(FATAL) << "UNREACHABLE";
1812 }
VisitRoot(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const1813 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {
1814 LOG(FATAL) << "UNREACHABLE";
1815 }
1816
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const1817 ALWAYS_INLINE void operator()(ObjPtr<mirror::Object> obj,
1818 MemberOffset offset,
1819 bool is_static ATTRIBUTE_UNUSED) const
1820 REQUIRES_SHARED(Locks::mutator_lock_) {
1821 mirror::Object* ref =
1822 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1823 VisitReference(ref);
1824 }
1825
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const1826 ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1827 ObjPtr<mirror::Reference> ref) const
1828 REQUIRES_SHARED(Locks::mutator_lock_) {
1829 operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
1830 }
1831
1832 private:
VisitReference(mirror::Object * ref) const1833 void VisitReference(mirror::Object* ref) const REQUIRES_SHARED(Locks::mutator_lock_) {
1834 if (helper_->TryAssignBinSlot(ref, oat_index_)) {
1835 // Remember how many objects we're adding at the front of the queue as we want
1836 // to reverse that range to process these references in the order of addition.
1837 helper_->work_queue_.emplace_front(ref, oat_index_);
1838 }
1839 if (ClassLinker::kAppImageMayContainStrings &&
1840 helper_->image_writer_->compiler_options_.IsAppImage() &&
1841 helper_->image_writer_->IsInternedAppImageStringReference(ref)) {
1842 helper_->image_writer_->image_infos_[oat_index_].num_string_references_ += 1u;
1843 }
1844 }
1845
1846 LayoutHelper* const helper_;
1847 const size_t oat_index_;
1848 };
1849
1850 // Visit method pointer arrays in `klass` that were not inherited from its superclass.
1851 template <typename Visitor>
VisitNewMethodPointerArrays(ObjPtr<mirror::Class> klass,Visitor && visitor)1852 static void VisitNewMethodPointerArrays(ObjPtr<mirror::Class> klass, Visitor&& visitor)
1853 REQUIRES_SHARED(Locks::mutator_lock_) {
1854 ObjPtr<mirror::Class> super = klass->GetSuperClass<kVerifyNone, kWithoutReadBarrier>();
1855 ObjPtr<mirror::PointerArray> vtable = klass->GetVTable<kVerifyNone, kWithoutReadBarrier>();
1856 if (vtable != nullptr &&
1857 (super == nullptr || vtable != super->GetVTable<kVerifyNone, kWithoutReadBarrier>())) {
1858 visitor(vtable);
1859 }
1860 int32_t iftable_count = klass->GetIfTableCount();
1861 int32_t super_iftable_count = (super != nullptr) ? super->GetIfTableCount() : 0;
1862 ObjPtr<mirror::IfTable> iftable = klass->GetIfTable<kVerifyNone, kWithoutReadBarrier>();
1863 ObjPtr<mirror::IfTable> super_iftable =
1864 (super != nullptr) ? super->GetIfTable<kVerifyNone, kWithoutReadBarrier>() : nullptr;
1865 for (int32_t i = 0; i < iftable_count; ++i) {
1866 ObjPtr<mirror::PointerArray> methods =
1867 iftable->GetMethodArrayOrNull<kVerifyNone, kWithoutReadBarrier>(i);
1868 ObjPtr<mirror::PointerArray> super_methods = (i < super_iftable_count)
1869 ? super_iftable->GetMethodArrayOrNull<kVerifyNone, kWithoutReadBarrier>(i)
1870 : nullptr;
1871 if (methods != super_methods) {
1872 DCHECK(methods != nullptr);
1873 if (i < super_iftable_count) {
1874 DCHECK(super_methods != nullptr);
1875 DCHECK_EQ(methods->GetLength(), super_methods->GetLength());
1876 }
1877 visitor(methods);
1878 }
1879 }
1880 }
1881
ProcessDexFileObjects(Thread * self)1882 void ImageWriter::LayoutHelper::ProcessDexFileObjects(Thread* self) {
1883 Runtime* runtime = Runtime::Current();
1884 ClassLinker* class_linker = runtime->GetClassLinker();
1885 const CompilerOptions& compiler_options = image_writer_->compiler_options_;
1886 JavaVMExt* vm = down_cast<JNIEnvExt*>(self->GetJniEnv())->GetVm();
1887
1888 // To ensure deterministic output, populate the work queue with objects in a pre-defined order.
1889 // Note: If we decide to implement a profile-guided layout, this is the place to do so.
1890
1891 // Get initial work queue with the image classes and assign their bin slots.
1892 CollectClassesVisitor visitor(image_writer_);
1893 {
1894 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
1895 if (compiler_options.IsBootImage() || compiler_options.IsBootImageExtension()) {
1896 // No need to filter based on class loader, boot class table contains only
1897 // classes defined by the boot class loader.
1898 ClassTable* class_table = class_linker->boot_class_table_.get();
1899 class_table->Visit<kWithoutReadBarrier>(visitor);
1900 } else {
1901 // No need to visit boot class table as there are no classes there for the app image.
1902 for (const ClassLinker::ClassLoaderData& data : class_linker->class_loaders_) {
1903 auto class_loader =
1904 DecodeWeakGlobalWithoutRB<mirror::ClassLoader>(vm, self, data.weak_root);
1905 if (class_loader != nullptr) {
1906 ClassTable* class_table = class_loader->GetClassTable();
1907 if (class_table != nullptr) {
1908 // Visit only classes defined in this class loader (avoid visiting multiple times).
1909 auto filtering_visitor = [&visitor, class_loader](ObjPtr<mirror::Class> klass)
1910 REQUIRES_SHARED(Locks::mutator_lock_) {
1911 if (klass->GetClassLoader<kVerifyNone, kWithoutReadBarrier>() == class_loader) {
1912 visitor(klass);
1913 }
1914 return true;
1915 };
1916 class_table->Visit<kWithoutReadBarrier>(filtering_visitor);
1917 }
1918 }
1919 }
1920 }
1921 }
1922 DCHECK(work_queue_.empty());
1923 work_queue_ = visitor.ProcessCollectedClasses(self);
1924 for (const std::pair<ObjPtr<mirror::Object>, size_t>& entry : work_queue_) {
1925 DCHECK(entry.first != nullptr);
1926 ObjPtr<mirror::Class> klass = entry.first->AsClass();
1927 size_t oat_index = entry.second;
1928 DCHECK(!image_writer_->IsInBootImage(klass.Ptr()));
1929 DCHECK(!image_writer_->IsImageBinSlotAssigned(klass.Ptr()));
1930 image_writer_->RecordNativeRelocations(klass, oat_index);
1931 Bin klass_bin = image_writer_->AssignImageBinSlot(klass.Ptr(), oat_index);
1932 bin_objects_[oat_index][enum_cast<size_t>(klass_bin)].push_back(klass.Ptr());
1933
1934 auto method_pointer_array_visitor =
1935 [&](ObjPtr<mirror::PointerArray> pointer_array) REQUIRES_SHARED(Locks::mutator_lock_) {
1936 constexpr Bin bin = kBinObjects ? Bin::kInternalClean : Bin::kRegular;
1937 image_writer_->AssignImageBinSlot(pointer_array.Ptr(), oat_index, bin);
1938 bin_objects_[oat_index][enum_cast<size_t>(bin)].push_back(pointer_array.Ptr());
1939 // No need to add to the work queue. The class reference, if not in the boot image
1940 // (that is, when compiling the primary boot image), is already in the work queue.
1941 };
1942 VisitNewMethodPointerArrays(klass, method_pointer_array_visitor);
1943 }
1944
1945 // Assign bin slots to dex caches.
1946 {
1947 ReaderMutexLock mu(self, *Locks::dex_lock_);
1948 for (const DexFile* dex_file : compiler_options.GetDexFilesForOatFile()) {
1949 auto it = image_writer_->dex_file_oat_index_map_.find(dex_file);
1950 DCHECK(it != image_writer_->dex_file_oat_index_map_.end()) << dex_file->GetLocation();
1951 const size_t oat_index = it->second;
1952 // Assign bin slot to this file's dex cache and add it to the end of the work queue.
1953 auto dcd_it = class_linker->GetDexCachesData().find(dex_file);
1954 DCHECK(dcd_it != class_linker->GetDexCachesData().end()) << dex_file->GetLocation();
1955 auto dex_cache =
1956 DecodeWeakGlobalWithoutRB<mirror::DexCache>(vm, self, dcd_it->second.weak_root);
1957 DCHECK(dex_cache != nullptr);
1958 bool assigned = TryAssignBinSlot(dex_cache, oat_index);
1959 DCHECK(assigned);
1960 work_queue_.emplace_back(dex_cache, oat_index);
1961 }
1962 }
1963
1964 // Assign interns to images depending on the first dex file they appear in.
1965 // Record those that do not have a StringId in any dex file.
1966 ProcessInterns(self);
1967
1968 // Since classes and dex caches have been assigned to their bins, when we process a class
1969 // we do not follow through the class references or dex caches, so we correctly process
1970 // only objects actually belonging to that class before taking a new class from the queue.
1971 // If multiple class statics reference the same object (directly or indirectly), the object
1972 // is treated as belonging to the first encountered referencing class.
1973 ProcessWorkQueue();
1974 }
1975
ProcessRoots(Thread * self)1976 void ImageWriter::LayoutHelper::ProcessRoots(Thread* self) {
1977 // Assign bin slots to the image roots and boot image live objects, add them to the work queue
1978 // and process the work queue. These objects reference other objects needed for the image, for
1979 // example the array of dex cache references, or the pre-allocated exceptions for the boot image.
1980 DCHECK(work_queue_.empty());
1981
1982 constexpr Bin clean_bin = kBinObjects ? Bin::kInternalClean : Bin::kRegular;
1983 size_t num_oat_files = image_writer_->oat_filenames_.size();
1984 JavaVMExt* vm = down_cast<JNIEnvExt*>(self->GetJniEnv())->GetVm();
1985 for (size_t oat_index = 0; oat_index != num_oat_files; ++oat_index) {
1986 // Put image roots and dex caches into `clean_bin`.
1987 auto image_roots = DecodeGlobalWithoutRB<mirror::ObjectArray<mirror::Object>>(
1988 vm, image_writer_->image_roots_[oat_index]);
1989 AssignImageBinSlot(image_roots, oat_index, clean_bin);
1990 work_queue_.emplace_back(image_roots, oat_index);
1991 // Do not rely on the `work_queue_` for dex cache arrays, it would assign a different bin.
1992 ObjPtr<ObjectArray<Object>> dex_caches = ObjPtr<ObjectArray<Object>>::DownCast(
1993 image_roots->GetWithoutChecks<kVerifyNone, kWithoutReadBarrier>(ImageHeader::kDexCaches));
1994 AssignImageBinSlot(dex_caches, oat_index, clean_bin);
1995 work_queue_.emplace_back(dex_caches, oat_index);
1996 }
1997 // Do not rely on the `work_queue_` for boot image live objects, it would assign a different bin.
1998 if (image_writer_->compiler_options_.IsBootImage()) {
1999 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
2000 image_writer_->boot_image_live_objects_;
2001 AssignImageBinSlot(boot_image_live_objects, GetDefaultOatIndex(), clean_bin);
2002 work_queue_.emplace_back(boot_image_live_objects, GetDefaultOatIndex());
2003 }
2004
2005 ProcessWorkQueue();
2006 }
2007
ProcessInterns(Thread * self)2008 void ImageWriter::LayoutHelper::ProcessInterns(Thread* self) {
2009 // String bins are empty at this point.
2010 DCHECK(std::all_of(bin_objects_.begin(),
2011 bin_objects_.end(),
2012 [](const auto& bins) {
2013 return bins[enum_cast<size_t>(Bin::kString)].empty();
2014 }));
2015
2016 // There is only one non-boot image intern table and it's the last one.
2017 InternTable* const intern_table = Runtime::Current()->GetInternTable();
2018 MutexLock mu(self, *Locks::intern_table_lock_);
2019 DCHECK_EQ(std::count_if(intern_table->strong_interns_.tables_.begin(),
2020 intern_table->strong_interns_.tables_.end(),
2021 [](const InternTable::Table::InternalTable& table) {
2022 return !table.IsBootImage();
2023 }),
2024 1);
2025 DCHECK(!intern_table->strong_interns_.tables_.back().IsBootImage());
2026 const InternTable::UnorderedSet& intern_set = intern_table->strong_interns_.tables_.back().set_;
2027
2028 // Assign bin slots to all interns with a corresponding StringId in one of the input dex files.
2029 ImageWriter* image_writer = image_writer_;
2030 for (const DexFile* dex_file : image_writer->compiler_options_.GetDexFilesForOatFile()) {
2031 auto it = image_writer->dex_file_oat_index_map_.find(dex_file);
2032 DCHECK(it != image_writer->dex_file_oat_index_map_.end()) << dex_file->GetLocation();
2033 const size_t oat_index = it->second;
2034 // Assign bin slots for strings defined in this dex file in StringId (lexicographical) order.
2035 auto& string_bin_objects = bin_objects_[oat_index][enum_cast<size_t>(Bin::kString)];
2036 for (size_t i = 0, count = dex_file->NumStringIds(); i != count; ++i) {
2037 uint32_t utf16_length;
2038 const char* utf8_data = dex_file->StringDataAndUtf16LengthByIdx(dex::StringIndex(i),
2039 &utf16_length);
2040 uint32_t hash = InternTable::Utf8String::Hash(utf16_length, utf8_data);
2041 auto intern_it =
2042 intern_set.FindWithHash(InternTable::Utf8String(utf16_length, utf8_data), hash);
2043 if (intern_it != intern_set.end()) {
2044 mirror::String* string = intern_it->Read<kWithoutReadBarrier>();
2045 DCHECK(string != nullptr);
2046 DCHECK(!image_writer->IsInBootImage(string));
2047 if (!image_writer->IsImageBinSlotAssigned(string)) {
2048 Bin bin = image_writer->AssignImageBinSlot(string, oat_index);
2049 DCHECK_EQ(bin, kBinObjects ? Bin::kString : Bin::kRegular);
2050 string_bin_objects.push_back(string);
2051 } else {
2052 // We have already seen this string in a previous dex file.
2053 DCHECK(dex_file != image_writer->compiler_options_.GetDexFilesForOatFile().front());
2054 }
2055 }
2056 }
2057 }
2058
2059 // String bins have been filled with dex file interns. Record their numbers in image infos.
2060 DCHECK_EQ(bin_objects_.size(), image_writer_->image_infos_.size());
2061 size_t total_dex_file_interns = 0u;
2062 for (size_t oat_index = 0, size = bin_objects_.size(); oat_index != size; ++oat_index) {
2063 size_t num_dex_file_interns = bin_objects_[oat_index][enum_cast<size_t>(Bin::kString)].size();
2064 ImageInfo& image_info = image_writer_->GetImageInfo(oat_index);
2065 DCHECK_EQ(image_info.intern_table_size_, 0u);
2066 image_info.intern_table_size_ = num_dex_file_interns;
2067 total_dex_file_interns += num_dex_file_interns;
2068 }
2069
2070 // Collect interns that do not have a corresponding StringId in any of the input dex files.
2071 non_dex_file_interns_.reserve(intern_set.size() - total_dex_file_interns);
2072 for (const GcRoot<mirror::String>& root : intern_set) {
2073 mirror::String* string = root.Read<kWithoutReadBarrier>();
2074 if (!image_writer->IsImageBinSlotAssigned(string)) {
2075 non_dex_file_interns_.push_back(string);
2076 }
2077 }
2078 DCHECK_EQ(intern_set.size(), total_dex_file_interns + non_dex_file_interns_.size());
2079 }
2080
FinalizeInternTables()2081 void ImageWriter::LayoutHelper::FinalizeInternTables() {
2082 // Remove interns that do not have a bin slot assigned. These correspond
2083 // to the DexCache locations excluded in VerifyImageBinSlotsAssigned().
2084 ImageWriter* image_writer = image_writer_;
2085 auto retained_end = std::remove_if(
2086 non_dex_file_interns_.begin(),
2087 non_dex_file_interns_.end(),
2088 [=](mirror::String* string) REQUIRES_SHARED(Locks::mutator_lock_) {
2089 return !image_writer->IsImageBinSlotAssigned(string);
2090 });
2091 non_dex_file_interns_.resize(std::distance(non_dex_file_interns_.begin(), retained_end));
2092
2093 // Sort `non_dex_file_interns_` based on oat index and bin offset.
2094 ArrayRef<mirror::String*> non_dex_file_interns(non_dex_file_interns_);
2095 std::sort(non_dex_file_interns.begin(),
2096 non_dex_file_interns.end(),
2097 [=](mirror::String* lhs, mirror::String* rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
2098 size_t lhs_oat_index = image_writer->GetOatIndex(lhs);
2099 size_t rhs_oat_index = image_writer->GetOatIndex(rhs);
2100 if (lhs_oat_index != rhs_oat_index) {
2101 return lhs_oat_index < rhs_oat_index;
2102 }
2103 BinSlot lhs_bin_slot = image_writer->GetImageBinSlot(lhs, lhs_oat_index);
2104 BinSlot rhs_bin_slot = image_writer->GetImageBinSlot(rhs, rhs_oat_index);
2105 return lhs_bin_slot < rhs_bin_slot;
2106 });
2107
2108 // Allocate and fill intern tables.
2109 size_t ndfi_index = 0u;
2110 DCHECK_EQ(bin_objects_.size(), image_writer->image_infos_.size());
2111 for (size_t oat_index = 0, size = bin_objects_.size(); oat_index != size; ++oat_index) {
2112 // Find the end of `non_dex_file_interns` for this oat file.
2113 size_t ndfi_end = ndfi_index;
2114 while (ndfi_end != non_dex_file_interns.size() &&
2115 image_writer->GetOatIndex(non_dex_file_interns[ndfi_end]) == oat_index) {
2116 ++ndfi_end;
2117 }
2118
2119 // Calculate final intern table size.
2120 ImageInfo& image_info = image_writer->GetImageInfo(oat_index);
2121 DCHECK_EQ(image_info.intern_table_bytes_, 0u);
2122 size_t num_dex_file_interns = image_info.intern_table_size_;
2123 size_t num_non_dex_file_interns = ndfi_end - ndfi_index;
2124 image_info.intern_table_size_ = num_dex_file_interns + num_non_dex_file_interns;
2125 if (image_info.intern_table_size_ != 0u) {
2126 // Make sure the intern table shall be full by allocating a buffer of the right size.
2127 size_t buffer_size = static_cast<size_t>(
2128 ceil(image_info.intern_table_size_ / kImageInternTableMaxLoadFactor));
2129 image_info.intern_table_buffer_.reset(new GcRoot<mirror::String>[buffer_size]);
2130 DCHECK(image_info.intern_table_buffer_ != nullptr);
2131 image_info.intern_table_.emplace(kImageInternTableMinLoadFactor,
2132 kImageInternTableMaxLoadFactor,
2133 image_info.intern_table_buffer_.get(),
2134 buffer_size);
2135
2136 // Fill the intern table. Dex file interns are at the start of the bin_objects[.][kString].
2137 InternTable::UnorderedSet& table = *image_info.intern_table_;
2138 const auto& oat_file_strings = bin_objects_[oat_index][enum_cast<size_t>(Bin::kString)];
2139 DCHECK_LE(num_dex_file_interns, oat_file_strings.size());
2140 ArrayRef<mirror::Object* const> dex_file_interns(
2141 oat_file_strings.data(), num_dex_file_interns);
2142 for (mirror::Object* string : dex_file_interns) {
2143 bool inserted = table.insert(GcRoot<mirror::String>(string->AsString())).second;
2144 DCHECK(inserted) << "String already inserted: " << string->AsString()->ToModifiedUtf8();
2145 }
2146 ArrayRef<mirror::String*> current_non_dex_file_interns =
2147 non_dex_file_interns.SubArray(ndfi_index, num_non_dex_file_interns);
2148 for (mirror::String* string : current_non_dex_file_interns) {
2149 bool inserted = table.insert(GcRoot<mirror::String>(string)).second;
2150 DCHECK(inserted) << "String already inserted: " << string->ToModifiedUtf8();
2151 }
2152
2153 // Record the intern table size in bytes.
2154 image_info.intern_table_bytes_ = table.WriteToMemory(nullptr);
2155 }
2156
2157 ndfi_index = ndfi_end;
2158 }
2159 }
2160
ProcessWorkQueue()2161 void ImageWriter::LayoutHelper::ProcessWorkQueue() {
2162 while (!work_queue_.empty()) {
2163 std::pair<ObjPtr<mirror::Object>, size_t> pair = work_queue_.front();
2164 work_queue_.pop_front();
2165 VisitReferences(/*obj=*/ pair.first, /*oat_index=*/ pair.second);
2166 }
2167 }
2168
VerifyImageBinSlotsAssigned()2169 void ImageWriter::LayoutHelper::VerifyImageBinSlotsAssigned() {
2170 dchecked_vector<mirror::Object*> carveout;
2171 JavaVMExt* vm = nullptr;
2172 if (image_writer_->compiler_options_.IsAppImage()) {
2173 // Exclude boot class path dex caches that are not part of the boot image.
2174 // Also exclude their locations if they have not been visited through another path.
2175 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2176 Thread* self = Thread::Current();
2177 vm = down_cast<JNIEnvExt*>(self->GetJniEnv())->GetVm();
2178 ReaderMutexLock mu(self, *Locks::dex_lock_);
2179 for (const auto& entry : class_linker->GetDexCachesData()) {
2180 const ClassLinker::DexCacheData& data = entry.second;
2181 auto dex_cache = DecodeWeakGlobalWithoutRB<mirror::DexCache>(vm, self, data.weak_root);
2182 if (dex_cache == nullptr ||
2183 image_writer_->IsInBootImage(dex_cache.Ptr()) ||
2184 ContainsElement(image_writer_->compiler_options_.GetDexFilesForOatFile(),
2185 dex_cache->GetDexFile())) {
2186 continue;
2187 }
2188 CHECK(!image_writer_->IsImageBinSlotAssigned(dex_cache.Ptr()));
2189 carveout.push_back(dex_cache.Ptr());
2190 ObjPtr<mirror::String> location = dex_cache->GetLocation<kVerifyNone, kWithoutReadBarrier>();
2191 if (!image_writer_->IsImageBinSlotAssigned(location.Ptr())) {
2192 carveout.push_back(location.Ptr());
2193 }
2194 }
2195 }
2196
2197 dchecked_vector<mirror::Object*> missed_objects;
2198 auto ensure_bin_slots_assigned = [&](mirror::Object* obj)
2199 REQUIRES_SHARED(Locks::mutator_lock_) {
2200 if (!image_writer_->IsInBootImage(obj)) {
2201 if (!UNLIKELY(image_writer_->IsImageBinSlotAssigned(obj))) {
2202 // Ignore the `carveout` objects.
2203 if (ContainsElement(carveout, obj)) {
2204 return;
2205 }
2206 // Ignore finalizer references for the dalvik.system.DexFile objects referenced by
2207 // the app class loader.
2208 ObjPtr<mirror::Class> klass = obj->GetClass<kVerifyNone, kWithoutReadBarrier>();
2209 if (klass->IsFinalizerReferenceClass<kVerifyNone>()) {
2210 ObjPtr<mirror::Class> reference_class =
2211 klass->GetSuperClass<kVerifyNone, kWithoutReadBarrier>();
2212 DCHECK(reference_class->DescriptorEquals("Ljava/lang/ref/Reference;"));
2213 ArtField* ref_field = reference_class->FindDeclaredInstanceField(
2214 "referent", "Ljava/lang/Object;");
2215 CHECK(ref_field != nullptr);
2216 ObjPtr<mirror::Object> ref = ref_field->GetObject<kWithoutReadBarrier>(obj);
2217 CHECK(ref != nullptr);
2218 CHECK(image_writer_->IsImageBinSlotAssigned(ref.Ptr()));
2219 ObjPtr<mirror::Class> ref_klass = ref->GetClass<kVerifyNone, kWithoutReadBarrier>();
2220 CHECK(ref_klass ==
2221 DecodeGlobalWithoutRB<mirror::Class>(vm, WellKnownClasses::dalvik_system_DexFile));
2222 // Note: The app class loader is used only for checking against the runtime
2223 // class loader, the dex file cookie is cleared and therefore we do not need
2224 // to run the finalizer even if we implement app image objects collection.
2225 ArtField* field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
2226 CHECK(field->GetObject<kWithoutReadBarrier>(ref) == nullptr);
2227 return;
2228 }
2229 if (klass->IsStringClass()) {
2230 // Ignore interned strings. These may come from reflection interning method names.
2231 // TODO: Make dex file strings weak interns and GC them before writing the image.
2232 if (IsStronglyInternedString(obj->AsString())) {
2233 return;
2234 }
2235 }
2236 missed_objects.push_back(obj);
2237 }
2238 }
2239 };
2240 Runtime::Current()->GetHeap()->VisitObjects(ensure_bin_slots_assigned);
2241 if (!missed_objects.empty()) {
2242 const gc::Verification* v = Runtime::Current()->GetHeap()->GetVerification();
2243 size_t num_missed_objects = missed_objects.size();
2244 size_t num_paths = std::min<size_t>(num_missed_objects, 5u); // Do not flood the output.
2245 ArrayRef<mirror::Object*> missed_objects_head =
2246 ArrayRef<mirror::Object*>(missed_objects).SubArray(/*pos=*/ 0u, /*length=*/ num_paths);
2247 for (mirror::Object* obj : missed_objects_head) {
2248 LOG(ERROR) << "Image object without assigned bin slot: "
2249 << mirror::Object::PrettyTypeOf(obj) << " " << obj
2250 << " " << v->FirstPathFromRootSet(obj);
2251 }
2252 LOG(FATAL) << "Found " << num_missed_objects << " objects without assigned bin slots.";
2253 }
2254 }
2255
FinalizeBinSlotOffsets()2256 void ImageWriter::LayoutHelper::FinalizeBinSlotOffsets() {
2257 // Calculate bin slot offsets and adjust for region padding if needed.
2258 const size_t region_size = image_writer_->region_size_;
2259 const size_t num_image_infos = image_writer_->image_infos_.size();
2260 for (size_t oat_index = 0; oat_index != num_image_infos; ++oat_index) {
2261 ImageInfo& image_info = image_writer_->image_infos_[oat_index];
2262 size_t bin_offset = image_writer_->image_objects_offset_begin_;
2263
2264 for (size_t i = 0; i != kNumberOfBins; ++i) {
2265 Bin bin = enum_cast<Bin>(i);
2266 switch (bin) {
2267 case Bin::kArtMethodClean:
2268 case Bin::kArtMethodDirty: {
2269 bin_offset = RoundUp(bin_offset, ArtMethod::Alignment(image_writer_->target_ptr_size_));
2270 break;
2271 }
2272 case Bin::kImTable:
2273 case Bin::kIMTConflictTable: {
2274 bin_offset = RoundUp(bin_offset, static_cast<size_t>(image_writer_->target_ptr_size_));
2275 break;
2276 }
2277 default: {
2278 // Normal alignment.
2279 }
2280 }
2281 image_info.bin_slot_offsets_[i] = bin_offset;
2282
2283 // If the bin is for mirror objects, we may need to add region padding and update offsets.
2284 if (i < enum_cast<size_t>(Bin::kMirrorCount) && region_size != 0u) {
2285 const size_t offset_after_header = bin_offset - sizeof(ImageHeader);
2286 size_t remaining_space =
2287 RoundUp(offset_after_header + 1u, region_size) - offset_after_header;
2288 // Exercise the loop below in debug builds to get coverage.
2289 if (kIsDebugBuild || remaining_space < image_info.bin_slot_sizes_[i]) {
2290 // The bin crosses a region boundary. Add padding if needed.
2291 size_t object_offset = 0u;
2292 size_t padding = 0u;
2293 for (mirror::Object* object : bin_objects_[oat_index][i]) {
2294 BinSlot bin_slot = image_writer_->GetImageBinSlot(object, oat_index);
2295 DCHECK_EQ(enum_cast<size_t>(bin_slot.GetBin()), i);
2296 DCHECK_EQ(bin_slot.GetOffset() + padding, object_offset);
2297 size_t object_size = RoundUp(object->SizeOf<kVerifyNone>(), kObjectAlignment);
2298
2299 auto add_padding = [&](bool tail_region) {
2300 DCHECK_NE(remaining_space, 0u);
2301 DCHECK_LT(remaining_space, region_size);
2302 DCHECK_ALIGNED(remaining_space, kObjectAlignment);
2303 // TODO When copying to heap regions, leave the tail region padding zero-filled.
2304 if (!tail_region || true) {
2305 image_info.padding_offsets_.push_back(bin_offset + object_offset);
2306 }
2307 image_info.bin_slot_sizes_[i] += remaining_space;
2308 padding += remaining_space;
2309 object_offset += remaining_space;
2310 remaining_space = region_size;
2311 };
2312 if (object_size > remaining_space) {
2313 // Padding needed if we're not at region boundary (with a multi-region object).
2314 if (remaining_space != region_size) {
2315 // TODO: Instead of adding padding, we should consider reordering the bins
2316 // or objects to reduce wasted space.
2317 add_padding(/*tail_region=*/ false);
2318 }
2319 DCHECK_EQ(remaining_space, region_size);
2320 // For huge objects, adjust the remaining space to hold the object and some more.
2321 if (object_size > region_size) {
2322 remaining_space = RoundUp(object_size + 1u, region_size);
2323 }
2324 } else if (remaining_space == object_size) {
2325 // Move to the next region, no padding needed.
2326 remaining_space += region_size;
2327 }
2328 DCHECK_GT(remaining_space, object_size);
2329 remaining_space -= object_size;
2330 image_writer_->UpdateImageBinSlotOffset(object, oat_index, object_offset);
2331 object_offset += object_size;
2332 // Add padding to the tail region of huge objects if not region-aligned.
2333 if (object_size > region_size && remaining_space != region_size) {
2334 DCHECK(!IsAlignedParam(object_size, region_size));
2335 add_padding(/*tail_region=*/ true);
2336 }
2337 }
2338 image_writer_->region_alignment_wasted_ += padding;
2339 image_info.image_end_ += padding;
2340 }
2341 }
2342 bin_offset += image_info.bin_slot_sizes_[i];
2343 }
2344 // NOTE: There may be additional padding between the bin slots and the intern table.
2345 DCHECK_EQ(
2346 image_info.image_end_,
2347 image_info.GetBinSizeSum(Bin::kMirrorCount) + image_writer_->image_objects_offset_begin_);
2348 }
2349
2350 VLOG(image) << "Space wasted for region alignment " << image_writer_->region_alignment_wasted_;
2351 }
2352
CollectStringReferenceInfo()2353 void ImageWriter::LayoutHelper::CollectStringReferenceInfo() {
2354 size_t total_string_refs = 0u;
2355
2356 const size_t num_image_infos = image_writer_->image_infos_.size();
2357 for (size_t oat_index = 0; oat_index != num_image_infos; ++oat_index) {
2358 ImageInfo& image_info = image_writer_->image_infos_[oat_index];
2359 DCHECK(image_info.string_reference_offsets_.empty());
2360 image_info.string_reference_offsets_.reserve(image_info.num_string_references_);
2361
2362 for (size_t i = 0; i < enum_cast<size_t>(Bin::kMirrorCount); ++i) {
2363 for (mirror::Object* obj : bin_objects_[oat_index][i]) {
2364 CollectStringReferenceVisitor visitor(image_writer_,
2365 oat_index,
2366 &image_info.string_reference_offsets_,
2367 obj);
2368 /*
2369 * References to managed strings can occur either in the managed heap or in
2370 * native memory regions. Information about managed references is collected
2371 * by the CollectStringReferenceVisitor and directly added to the image info.
2372 *
2373 * Native references to managed strings can only occur through DexCache
2374 * objects. This is verified by the visitor in debug mode and the references
2375 * are collected separately below.
2376 */
2377 obj->VisitReferences</*kVisitNativeRoots=*/ kIsDebugBuild,
2378 kVerifyNone,
2379 kWithoutReadBarrier>(visitor, visitor);
2380 }
2381 }
2382
2383 total_string_refs += image_info.string_reference_offsets_.size();
2384
2385 // Check that we collected the same number of string references as we saw in the previous pass.
2386 CHECK_EQ(image_info.string_reference_offsets_.size(), image_info.num_string_references_);
2387 }
2388
2389 VLOG(compiler) << "Dex2Oat:AppImage:stringReferences = " << total_string_refs;
2390 }
2391
VisitReferences(ObjPtr<mirror::Object> obj,size_t oat_index)2392 void ImageWriter::LayoutHelper::VisitReferences(ObjPtr<mirror::Object> obj, size_t oat_index) {
2393 size_t old_work_queue_size = work_queue_.size();
2394 VisitReferencesVisitor visitor(this, oat_index);
2395 // Walk references and assign bin slots for them.
2396 obj->VisitReferences</*kVisitNativeRoots=*/ false, kVerifyNone, kWithoutReadBarrier>(
2397 visitor,
2398 visitor);
2399 // Put the added references in the queue in the order in which they were added.
2400 // The visitor just pushes them to the front as it visits them.
2401 DCHECK_LE(old_work_queue_size, work_queue_.size());
2402 size_t num_added = work_queue_.size() - old_work_queue_size;
2403 std::reverse(work_queue_.begin(), work_queue_.begin() + num_added);
2404 }
2405
TryAssignBinSlot(ObjPtr<mirror::Object> obj,size_t oat_index)2406 bool ImageWriter::LayoutHelper::TryAssignBinSlot(ObjPtr<mirror::Object> obj, size_t oat_index) {
2407 if (obj == nullptr || image_writer_->IsInBootImage(obj.Ptr())) {
2408 // Object is null or already in the image, there is no work to do.
2409 return false;
2410 }
2411 bool assigned = false;
2412 if (!image_writer_->IsImageBinSlotAssigned(obj.Ptr())) {
2413 Bin bin = image_writer_->AssignImageBinSlot(obj.Ptr(), oat_index);
2414 bin_objects_[oat_index][enum_cast<size_t>(bin)].push_back(obj.Ptr());
2415 assigned = true;
2416 }
2417 return assigned;
2418 }
2419
AssignImageBinSlot(ObjPtr<mirror::Object> object,size_t oat_index,Bin bin)2420 void ImageWriter::LayoutHelper::AssignImageBinSlot(
2421 ObjPtr<mirror::Object> object, size_t oat_index, Bin bin) {
2422 DCHECK(object != nullptr);
2423 DCHECK(!image_writer_->IsInBootImage(object.Ptr()));
2424 DCHECK(!image_writer_->IsImageBinSlotAssigned(object.Ptr()));
2425 image_writer_->AssignImageBinSlot(object.Ptr(), oat_index, bin);
2426 bin_objects_[oat_index][enum_cast<size_t>(bin)].push_back(object.Ptr());
2427 }
2428
CalculateNewObjectOffsets()2429 void ImageWriter::CalculateNewObjectOffsets() {
2430 Thread* const self = Thread::Current();
2431 Runtime* const runtime = Runtime::Current();
2432 gc::Heap* const heap = runtime->GetHeap();
2433
2434 // Leave space for the header, but do not write it yet, we need to
2435 // know where image_roots is going to end up
2436 image_objects_offset_begin_ = RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment
2437
2438 // Write the image runtime methods.
2439 image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
2440 image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
2441 image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
2442 image_methods_[ImageHeader::kSaveAllCalleeSavesMethod] =
2443 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves);
2444 image_methods_[ImageHeader::kSaveRefsOnlyMethod] =
2445 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsOnly);
2446 image_methods_[ImageHeader::kSaveRefsAndArgsMethod] =
2447 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs);
2448 image_methods_[ImageHeader::kSaveEverythingMethod] =
2449 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverything);
2450 image_methods_[ImageHeader::kSaveEverythingMethodForClinit] =
2451 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForClinit);
2452 image_methods_[ImageHeader::kSaveEverythingMethodForSuspendCheck] =
2453 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForSuspendCheck);
2454 // Visit image methods first to have the main runtime methods in the first image.
2455 for (auto* m : image_methods_) {
2456 CHECK(m != nullptr);
2457 CHECK(m->IsRuntimeMethod());
2458 DCHECK_EQ(!compiler_options_.IsBootImage(), IsInBootImage(m))
2459 << "Trampolines should be in boot image";
2460 if (!IsInBootImage(m)) {
2461 AssignMethodOffset(m, NativeObjectRelocationType::kRuntimeMethod, GetDefaultOatIndex());
2462 }
2463 }
2464
2465 // Deflate monitors before we visit roots since deflating acquires the monitor lock. Acquiring
2466 // this lock while holding other locks may cause lock order violations.
2467 {
2468 auto deflate_monitor = [](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
2469 Monitor::Deflate(Thread::Current(), obj);
2470 };
2471 heap->VisitObjects(deflate_monitor);
2472 }
2473
2474 // From this point on, there shall be no GC anymore and no objects shall be allocated.
2475 // We can now assign a BitSlot to each object and store it in its lockword.
2476
2477 JavaVMExt* vm = down_cast<JNIEnvExt*>(self->GetJniEnv())->GetVm();
2478 if (compiler_options_.IsBootImage() || compiler_options_.IsBootImageExtension()) {
2479 // Record the address of boot image live objects.
2480 auto image_roots = DecodeGlobalWithoutRB<mirror::ObjectArray<mirror::Object>>(
2481 vm, image_roots_[0]);
2482 boot_image_live_objects_ = ObjPtr<ObjectArray<Object>>::DownCast(
2483 image_roots->GetWithoutChecks<kVerifyNone, kWithoutReadBarrier>(
2484 ImageHeader::kBootImageLiveObjects)).Ptr();
2485 }
2486
2487 LayoutHelper layout_helper(this);
2488 layout_helper.ProcessDexFileObjects(self);
2489 layout_helper.ProcessRoots(self);
2490 layout_helper.FinalizeInternTables();
2491
2492 // Verify that all objects have assigned image bin slots.
2493 layout_helper.VerifyImageBinSlotsAssigned();
2494
2495 // Finalize bin slot offsets. This may add padding for regions.
2496 layout_helper.FinalizeBinSlotOffsets();
2497
2498 // Collect string reference info for app images.
2499 if (ClassLinker::kAppImageMayContainStrings && compiler_options_.IsAppImage()) {
2500 layout_helper.CollectStringReferenceInfo();
2501 }
2502
2503 // Calculate image offsets.
2504 size_t image_offset = 0;
2505 for (ImageInfo& image_info : image_infos_) {
2506 image_info.image_begin_ = global_image_begin_ + image_offset;
2507 image_info.image_offset_ = image_offset;
2508 image_info.image_size_ = RoundUp(image_info.CreateImageSections().first, kPageSize);
2509 // There should be no gaps until the next image.
2510 image_offset += image_info.image_size_;
2511 }
2512
2513 size_t oat_index = 0;
2514 for (ImageInfo& image_info : image_infos_) {
2515 auto image_roots = DecodeGlobalWithoutRB<mirror::ObjectArray<mirror::Object>>(
2516 vm, image_roots_[oat_index]);
2517 image_info.image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots.Ptr()));
2518 ++oat_index;
2519 }
2520
2521 // Update the native relocations by adding their bin sums.
2522 for (auto& pair : native_object_relocations_) {
2523 NativeObjectRelocation& relocation = pair.second;
2524 Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
2525 ImageInfo& image_info = GetImageInfo(relocation.oat_index);
2526 relocation.offset += image_info.GetBinSlotOffset(bin_type);
2527 }
2528 }
2529
2530 std::pair<size_t, dchecked_vector<ImageSection>>
CreateImageSections() const2531 ImageWriter::ImageInfo::CreateImageSections() const {
2532 dchecked_vector<ImageSection> sections(ImageHeader::kSectionCount);
2533
2534 // Do not round up any sections here that are represented by the bins since it
2535 // will break offsets.
2536
2537 /*
2538 * Objects section
2539 */
2540 sections[ImageHeader::kSectionObjects] =
2541 ImageSection(0u, image_end_);
2542
2543 /*
2544 * Field section
2545 */
2546 sections[ImageHeader::kSectionArtFields] =
2547 ImageSection(GetBinSlotOffset(Bin::kArtField), GetBinSlotSize(Bin::kArtField));
2548
2549 /*
2550 * Method section
2551 */
2552 sections[ImageHeader::kSectionArtMethods] =
2553 ImageSection(GetBinSlotOffset(Bin::kArtMethodClean),
2554 GetBinSlotSize(Bin::kArtMethodClean) +
2555 GetBinSlotSize(Bin::kArtMethodDirty));
2556
2557 /*
2558 * IMT section
2559 */
2560 sections[ImageHeader::kSectionImTables] =
2561 ImageSection(GetBinSlotOffset(Bin::kImTable), GetBinSlotSize(Bin::kImTable));
2562
2563 /*
2564 * Conflict Tables section
2565 */
2566 sections[ImageHeader::kSectionIMTConflictTables] =
2567 ImageSection(GetBinSlotOffset(Bin::kIMTConflictTable), GetBinSlotSize(Bin::kIMTConflictTable));
2568
2569 /*
2570 * Runtime Methods section
2571 */
2572 sections[ImageHeader::kSectionRuntimeMethods] =
2573 ImageSection(GetBinSlotOffset(Bin::kRuntimeMethod), GetBinSlotSize(Bin::kRuntimeMethod));
2574
2575 /*
2576 * Interned Strings section
2577 */
2578
2579 // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
2580 size_t cur_pos = RoundUp(sections[ImageHeader::kSectionRuntimeMethods].End(), sizeof(uint64_t));
2581
2582 const ImageSection& interned_strings_section =
2583 sections[ImageHeader::kSectionInternedStrings] =
2584 ImageSection(cur_pos, intern_table_bytes_);
2585
2586 /*
2587 * Class Table section
2588 */
2589
2590 // Obtain the new position and round it up to the appropriate alignment.
2591 cur_pos = RoundUp(interned_strings_section.End(), sizeof(uint64_t));
2592
2593 const ImageSection& class_table_section =
2594 sections[ImageHeader::kSectionClassTable] =
2595 ImageSection(cur_pos, class_table_bytes_);
2596
2597 /*
2598 * String Field Offsets section
2599 */
2600
2601 // Round up to the alignment of the offsets we are going to store.
2602 cur_pos = RoundUp(class_table_section.End(), sizeof(uint32_t));
2603
2604 // The size of string_reference_offsets_ can't be used here because it hasn't
2605 // been filled with AppImageReferenceOffsetInfo objects yet. The
2606 // num_string_references_ value is calculated separately, before we can
2607 // compute the actual offsets.
2608 const ImageSection& string_reference_offsets =
2609 sections[ImageHeader::kSectionStringReferenceOffsets] =
2610 ImageSection(cur_pos, sizeof(string_reference_offsets_[0]) * num_string_references_);
2611
2612 /*
2613 * Metadata section.
2614 */
2615
2616 // Round up to the alignment of the offsets we are going to store.
2617 cur_pos = RoundUp(string_reference_offsets.End(), sizeof(uint32_t));
2618
2619 const ImageSection& metadata_section =
2620 sections[ImageHeader::kSectionMetadata] =
2621 ImageSection(cur_pos, GetBinSlotSize(Bin::kMetadata));
2622
2623 // Return the number of bytes described by these sections, and the sections
2624 // themselves.
2625 return make_pair(metadata_section.End(), std::move(sections));
2626 }
2627
CreateHeader(size_t oat_index,size_t component_count)2628 void ImageWriter::CreateHeader(size_t oat_index, size_t component_count) {
2629 ImageInfo& image_info = GetImageInfo(oat_index);
2630 const uint8_t* oat_file_begin = image_info.oat_file_begin_;
2631 const uint8_t* oat_file_end = oat_file_begin + image_info.oat_loaded_size_;
2632 const uint8_t* oat_data_end = image_info.oat_data_begin_ + image_info.oat_size_;
2633
2634 uint32_t image_reservation_size = image_info.image_size_;
2635 DCHECK_ALIGNED(image_reservation_size, kPageSize);
2636 uint32_t current_component_count = 1u;
2637 if (compiler_options_.IsAppImage()) {
2638 DCHECK_EQ(oat_index, 0u);
2639 DCHECK_EQ(component_count, current_component_count);
2640 } else {
2641 DCHECK(image_infos_.size() == 1u || image_infos_.size() == component_count)
2642 << image_infos_.size() << " " << component_count;
2643 if (oat_index == 0u) {
2644 const ImageInfo& last_info = image_infos_.back();
2645 const uint8_t* end = last_info.oat_file_begin_ + last_info.oat_loaded_size_;
2646 DCHECK_ALIGNED(image_info.image_begin_, kPageSize);
2647 image_reservation_size =
2648 dchecked_integral_cast<uint32_t>(RoundUp(end - image_info.image_begin_, kPageSize));
2649 current_component_count = component_count;
2650 } else {
2651 image_reservation_size = 0u;
2652 current_component_count = 0u;
2653 }
2654 }
2655
2656 // Compute boot image checksums for the primary component, leave as 0 otherwise.
2657 uint32_t boot_image_components = 0u;
2658 uint32_t boot_image_checksums = 0u;
2659 if (oat_index == 0u) {
2660 const std::vector<gc::space::ImageSpace*>& image_spaces =
2661 Runtime::Current()->GetHeap()->GetBootImageSpaces();
2662 DCHECK_EQ(image_spaces.empty(), compiler_options_.IsBootImage());
2663 for (size_t i = 0u, size = image_spaces.size(); i != size; ) {
2664 const ImageHeader& header = image_spaces[i]->GetImageHeader();
2665 boot_image_components += header.GetComponentCount();
2666 boot_image_checksums ^= header.GetImageChecksum();
2667 DCHECK_LE(header.GetImageSpaceCount(), size - i);
2668 i += header.GetImageSpaceCount();
2669 }
2670 }
2671
2672 // Create the image sections.
2673 auto section_info_pair = image_info.CreateImageSections();
2674 const size_t image_end = section_info_pair.first;
2675 dchecked_vector<ImageSection>& sections = section_info_pair.second;
2676
2677 // Finally bitmap section.
2678 const size_t bitmap_bytes = image_info.image_bitmap_.Size();
2679 auto* bitmap_section = §ions[ImageHeader::kSectionImageBitmap];
2680 *bitmap_section = ImageSection(RoundUp(image_end, kPageSize), RoundUp(bitmap_bytes, kPageSize));
2681 if (VLOG_IS_ON(compiler)) {
2682 LOG(INFO) << "Creating header for " << oat_filenames_[oat_index];
2683 size_t idx = 0;
2684 for (const ImageSection& section : sections) {
2685 LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
2686 ++idx;
2687 }
2688 LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
2689 LOG(INFO) << "Image roots address=" << std::hex << image_info.image_roots_address_ << std::dec;
2690 LOG(INFO) << "Image begin=" << std::hex << reinterpret_cast<uintptr_t>(global_image_begin_)
2691 << " Image offset=" << image_info.image_offset_ << std::dec;
2692 LOG(INFO) << "Oat file begin=" << std::hex << reinterpret_cast<uintptr_t>(oat_file_begin)
2693 << " Oat data begin=" << reinterpret_cast<uintptr_t>(image_info.oat_data_begin_)
2694 << " Oat data end=" << reinterpret_cast<uintptr_t>(oat_data_end)
2695 << " Oat file end=" << reinterpret_cast<uintptr_t>(oat_file_end);
2696 }
2697
2698 // Create the header, leave 0 for data size since we will fill this in as we are writing the
2699 // image.
2700 new (image_info.image_.Begin()) ImageHeader(
2701 image_reservation_size,
2702 current_component_count,
2703 PointerToLowMemUInt32(image_info.image_begin_),
2704 image_end,
2705 sections.data(),
2706 image_info.image_roots_address_,
2707 image_info.oat_checksum_,
2708 PointerToLowMemUInt32(oat_file_begin),
2709 PointerToLowMemUInt32(image_info.oat_data_begin_),
2710 PointerToLowMemUInt32(oat_data_end),
2711 PointerToLowMemUInt32(oat_file_end),
2712 boot_image_begin_,
2713 boot_image_size_,
2714 boot_image_components,
2715 boot_image_checksums,
2716 static_cast<uint32_t>(target_ptr_size_));
2717 }
2718
GetImageMethodAddress(ArtMethod * method)2719 ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
2720 NativeObjectRelocation relocation = GetNativeRelocation(method);
2721 const ImageInfo& image_info = GetImageInfo(relocation.oat_index);
2722 CHECK_GE(relocation.offset, image_info.image_end_) << "ArtMethods should be after Objects";
2723 return reinterpret_cast<ArtMethod*>(image_info.image_begin_ + relocation.offset);
2724 }
2725
GetIntrinsicReferenceAddress(uint32_t intrinsic_data)2726 const void* ImageWriter::GetIntrinsicReferenceAddress(uint32_t intrinsic_data) {
2727 DCHECK(compiler_options_.IsBootImage());
2728 switch (IntrinsicObjects::DecodePatchType(intrinsic_data)) {
2729 case IntrinsicObjects::PatchType::kIntegerValueOfArray: {
2730 const uint8_t* base_address =
2731 reinterpret_cast<const uint8_t*>(GetImageAddress(boot_image_live_objects_));
2732 MemberOffset data_offset =
2733 IntrinsicObjects::GetIntegerValueOfArrayDataOffset(boot_image_live_objects_);
2734 return base_address + data_offset.Uint32Value();
2735 }
2736 case IntrinsicObjects::PatchType::kIntegerValueOfObject: {
2737 uint32_t index = IntrinsicObjects::DecodePatchIndex(intrinsic_data);
2738 ObjPtr<mirror::Object> value =
2739 IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects_, index);
2740 return GetImageAddress(value.Ptr());
2741 }
2742 }
2743 LOG(FATAL) << "UNREACHABLE";
2744 UNREACHABLE();
2745 }
2746
2747
2748 class ImageWriter::FixupRootVisitor : public RootVisitor {
2749 public:
FixupRootVisitor(ImageWriter * image_writer)2750 explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
2751 }
2752
VisitRoots(mirror::Object *** roots ATTRIBUTE_UNUSED,size_t count ATTRIBUTE_UNUSED,const RootInfo & info ATTRIBUTE_UNUSED)2753 void VisitRoots(mirror::Object*** roots ATTRIBUTE_UNUSED,
2754 size_t count ATTRIBUTE_UNUSED,
2755 const RootInfo& info ATTRIBUTE_UNUSED)
2756 override REQUIRES_SHARED(Locks::mutator_lock_) {
2757 LOG(FATAL) << "Unsupported";
2758 }
2759
VisitRoots(mirror::CompressedReference<mirror::Object> ** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)2760 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
2761 size_t count,
2762 const RootInfo& info ATTRIBUTE_UNUSED)
2763 override REQUIRES_SHARED(Locks::mutator_lock_) {
2764 for (size_t i = 0; i < count; ++i) {
2765 // Copy the reference. Since we do not have the address for recording the relocation,
2766 // it needs to be recorded explicitly by the user of FixupRootVisitor.
2767 ObjPtr<mirror::Object> old_ptr = roots[i]->AsMirrorPtr();
2768 roots[i]->Assign(image_writer_->GetImageAddress(old_ptr.Ptr()));
2769 }
2770 }
2771
2772 private:
2773 ImageWriter* const image_writer_;
2774 };
2775
CopyAndFixupImTable(ImTable * orig,ImTable * copy)2776 void ImageWriter::CopyAndFixupImTable(ImTable* orig, ImTable* copy) {
2777 for (size_t i = 0; i < ImTable::kSize; ++i) {
2778 ArtMethod* method = orig->Get(i, target_ptr_size_);
2779 void** address = reinterpret_cast<void**>(copy->AddressOfElement(i, target_ptr_size_));
2780 CopyAndFixupPointer(address, method);
2781 DCHECK_EQ(copy->Get(i, target_ptr_size_), NativeLocationInImage(method));
2782 }
2783 }
2784
CopyAndFixupImtConflictTable(ImtConflictTable * orig,ImtConflictTable * copy)2785 void ImageWriter::CopyAndFixupImtConflictTable(ImtConflictTable* orig, ImtConflictTable* copy) {
2786 const size_t count = orig->NumEntries(target_ptr_size_);
2787 for (size_t i = 0; i < count; ++i) {
2788 ArtMethod* interface_method = orig->GetInterfaceMethod(i, target_ptr_size_);
2789 ArtMethod* implementation_method = orig->GetImplementationMethod(i, target_ptr_size_);
2790 CopyAndFixupPointer(copy->AddressOfInterfaceMethod(i, target_ptr_size_), interface_method);
2791 CopyAndFixupPointer(
2792 copy->AddressOfImplementationMethod(i, target_ptr_size_), implementation_method);
2793 DCHECK_EQ(copy->GetInterfaceMethod(i, target_ptr_size_),
2794 NativeLocationInImage(interface_method));
2795 DCHECK_EQ(copy->GetImplementationMethod(i, target_ptr_size_),
2796 NativeLocationInImage(implementation_method));
2797 }
2798 }
2799
CopyAndFixupNativeData(size_t oat_index)2800 void ImageWriter::CopyAndFixupNativeData(size_t oat_index) {
2801 const ImageInfo& image_info = GetImageInfo(oat_index);
2802 // Copy ArtFields and methods to their locations and update the array for convenience.
2803 for (auto& pair : native_object_relocations_) {
2804 NativeObjectRelocation& relocation = pair.second;
2805 // Only work with fields and methods that are in the current oat file.
2806 if (relocation.oat_index != oat_index) {
2807 continue;
2808 }
2809 auto* dest = image_info.image_.Begin() + relocation.offset;
2810 DCHECK_GE(dest, image_info.image_.Begin() + image_info.image_end_);
2811 DCHECK(!IsInBootImage(pair.first));
2812 switch (relocation.type) {
2813 case NativeObjectRelocationType::kRuntimeMethod:
2814 case NativeObjectRelocationType::kArtMethodClean:
2815 case NativeObjectRelocationType::kArtMethodDirty: {
2816 CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
2817 reinterpret_cast<ArtMethod*>(dest),
2818 oat_index);
2819 break;
2820 }
2821 case NativeObjectRelocationType::kArtFieldArray: {
2822 // Copy and fix up the entire field array.
2823 auto* src_array = reinterpret_cast<LengthPrefixedArray<ArtField>*>(pair.first);
2824 auto* dest_array = reinterpret_cast<LengthPrefixedArray<ArtField>*>(dest);
2825 size_t size = src_array->size();
2826 memcpy(dest_array, src_array, LengthPrefixedArray<ArtField>::ComputeSize(size));
2827 for (size_t i = 0; i != size; ++i) {
2828 CopyAndFixupReference(
2829 dest_array->At(i).GetDeclaringClassAddressWithoutBarrier(),
2830 src_array->At(i).GetDeclaringClass<kWithoutReadBarrier>());
2831 }
2832 break;
2833 }
2834 case NativeObjectRelocationType::kArtMethodArrayClean:
2835 case NativeObjectRelocationType::kArtMethodArrayDirty: {
2836 // For method arrays, copy just the header since the elements will
2837 // get copied by their corresponding relocations.
2838 size_t size = ArtMethod::Size(target_ptr_size_);
2839 size_t alignment = ArtMethod::Alignment(target_ptr_size_);
2840 memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(0, size, alignment));
2841 // Clear padding to avoid non-deterministic data in the image.
2842 // Historical note: We also did that to placate Valgrind.
2843 reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(dest)->ClearPadding(size, alignment);
2844 break;
2845 }
2846 case NativeObjectRelocationType::kIMTable: {
2847 ImTable* orig_imt = reinterpret_cast<ImTable*>(pair.first);
2848 ImTable* dest_imt = reinterpret_cast<ImTable*>(dest);
2849 CopyAndFixupImTable(orig_imt, dest_imt);
2850 break;
2851 }
2852 case NativeObjectRelocationType::kIMTConflictTable: {
2853 auto* orig_table = reinterpret_cast<ImtConflictTable*>(pair.first);
2854 CopyAndFixupImtConflictTable(
2855 orig_table,
2856 new(dest)ImtConflictTable(orig_table->NumEntries(target_ptr_size_), target_ptr_size_));
2857 break;
2858 }
2859 case NativeObjectRelocationType::kGcRootPointer: {
2860 auto* orig_pointer = reinterpret_cast<GcRoot<mirror::Object>*>(pair.first);
2861 auto* dest_pointer = reinterpret_cast<GcRoot<mirror::Object>*>(dest);
2862 CopyAndFixupReference(dest_pointer->AddressWithoutBarrier(), orig_pointer->Read());
2863 break;
2864 }
2865 }
2866 }
2867 // Fixup the image method roots.
2868 auto* image_header = reinterpret_cast<ImageHeader*>(image_info.image_.Begin());
2869 for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
2870 ArtMethod* method = image_methods_[i];
2871 CHECK(method != nullptr);
2872 CopyAndFixupPointer(
2873 reinterpret_cast<void**>(&image_header->image_methods_[i]), method, PointerSize::k32);
2874 }
2875 FixupRootVisitor root_visitor(this);
2876
2877 // Write the intern table into the image.
2878 if (image_info.intern_table_bytes_ > 0) {
2879 const ImageSection& intern_table_section = image_header->GetInternedStringsSection();
2880 DCHECK(image_info.intern_table_.has_value());
2881 const InternTable::UnorderedSet& intern_table = *image_info.intern_table_;
2882 uint8_t* const intern_table_memory_ptr =
2883 image_info.image_.Begin() + intern_table_section.Offset();
2884 const size_t intern_table_bytes = intern_table.WriteToMemory(intern_table_memory_ptr);
2885 CHECK_EQ(intern_table_bytes, image_info.intern_table_bytes_);
2886 // Fixup the pointers in the newly written intern table to contain image addresses.
2887 InternTable temp_intern_table;
2888 // Note that we require that ReadFromMemory does not make an internal copy of the elements so
2889 // that the VisitRoots() will update the memory directly rather than the copies.
2890 // This also relies on visit roots not doing any verification which could fail after we update
2891 // the roots to be the image addresses.
2892 temp_intern_table.AddTableFromMemory(intern_table_memory_ptr,
2893 VoidFunctor(),
2894 /*is_boot_image=*/ false);
2895 CHECK_EQ(temp_intern_table.Size(), intern_table.size());
2896 temp_intern_table.VisitRoots(&root_visitor, kVisitRootFlagAllRoots);
2897
2898 if (kIsDebugBuild) {
2899 MutexLock lock(Thread::Current(), *Locks::intern_table_lock_);
2900 CHECK(!temp_intern_table.strong_interns_.tables_.empty());
2901 // The UnorderedSet was inserted at the beginning.
2902 CHECK_EQ(temp_intern_table.strong_interns_.tables_[0].Size(), intern_table.size());
2903 }
2904 }
2905
2906 // Write the class table(s) into the image. class_table_bytes_ may be 0 if there are multiple
2907 // class loaders. Writing multiple class tables into the image is currently unsupported.
2908 if (image_info.class_table_bytes_ > 0u) {
2909 const ImageSection& class_table_section = image_header->GetClassTableSection();
2910 uint8_t* const class_table_memory_ptr =
2911 image_info.image_.Begin() + class_table_section.Offset();
2912
2913 DCHECK(image_info.class_table_.has_value());
2914 const ClassTable::ClassSet& table = *image_info.class_table_;
2915 CHECK_EQ(table.size(), image_info.class_table_size_);
2916 const size_t class_table_bytes = table.WriteToMemory(class_table_memory_ptr);
2917 CHECK_EQ(class_table_bytes, image_info.class_table_bytes_);
2918
2919 // Fixup the pointers in the newly written class table to contain image addresses. See
2920 // above comment for intern tables.
2921 ClassTable temp_class_table;
2922 temp_class_table.ReadFromMemory(class_table_memory_ptr);
2923 CHECK_EQ(temp_class_table.NumReferencedZygoteClasses(), table.size());
2924 UnbufferedRootVisitor visitor(&root_visitor, RootInfo(kRootUnknown));
2925 temp_class_table.VisitRoots(visitor);
2926
2927 if (kIsDebugBuild) {
2928 ReaderMutexLock lock(Thread::Current(), temp_class_table.lock_);
2929 CHECK(!temp_class_table.classes_.empty());
2930 // The ClassSet was inserted at the beginning.
2931 CHECK_EQ(temp_class_table.classes_[0].size(), table.size());
2932 }
2933 }
2934 }
2935
CopyAndFixupMethodPointerArray(mirror::PointerArray * arr)2936 void ImageWriter::CopyAndFixupMethodPointerArray(mirror::PointerArray* arr) {
2937 // Pointer arrays are processed early and each is visited just once.
2938 // Therefore we know that this array has not been copied yet.
2939 mirror::Object* dst = CopyObject</*kCheckIfDone=*/ false>(arr);
2940 DCHECK(dst != nullptr);
2941 DCHECK(arr->IsIntArray() || arr->IsLongArray())
2942 << arr->GetClass<kVerifyNone, kWithoutReadBarrier>()->PrettyClass() << " " << arr;
2943 // Fixup int and long pointers for the ArtMethod or ArtField arrays.
2944 const size_t num_elements = arr->GetLength();
2945 CopyAndFixupReference(dst->GetFieldObjectReferenceAddr<kVerifyNone>(Class::ClassOffset()),
2946 arr->GetClass<kVerifyNone, kWithoutReadBarrier>());
2947 auto* dest_array = down_cast<mirror::PointerArray*>(dst);
2948 for (size_t i = 0, count = num_elements; i < count; ++i) {
2949 void* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
2950 if (kIsDebugBuild && elem != nullptr && !IsInBootImage(elem)) {
2951 auto it = native_object_relocations_.find(elem);
2952 if (UNLIKELY(it == native_object_relocations_.end())) {
2953 auto* method = reinterpret_cast<ArtMethod*>(elem);
2954 LOG(FATAL) << "No relocation entry for ArtMethod " << method->PrettyMethod() << " @ "
2955 << method << " idx=" << i << "/" << num_elements << " with declaring class "
2956 << Class::PrettyClass(method->GetDeclaringClass<kWithoutReadBarrier>());
2957 UNREACHABLE();
2958 }
2959 }
2960 CopyAndFixupPointer(dest_array->ElementAddress(i, target_ptr_size_), elem);
2961 }
2962 }
2963
CopyAndFixupObject(Object * obj)2964 void ImageWriter::CopyAndFixupObject(Object* obj) {
2965 if (!IsImageBinSlotAssigned(obj)) {
2966 return;
2967 }
2968 // Some objects (such as method pointer arrays) may have been processed before.
2969 mirror::Object* dst = CopyObject</*kCheckIfDone=*/ true>(obj);
2970 if (dst != nullptr) {
2971 FixupObject(obj, dst);
2972 }
2973 }
2974
2975 template <bool kCheckIfDone>
CopyObject(Object * obj)2976 inline Object* ImageWriter::CopyObject(Object* obj) {
2977 size_t oat_index = GetOatIndex(obj);
2978 size_t offset = GetImageOffset(obj, oat_index);
2979 ImageInfo& image_info = GetImageInfo(oat_index);
2980 auto* dst = reinterpret_cast<Object*>(image_info.image_.Begin() + offset);
2981 DCHECK_LT(offset, image_info.image_end_);
2982 const auto* src = reinterpret_cast<const uint8_t*>(obj);
2983
2984 bool done = image_info.image_bitmap_.Set(dst); // Mark the obj as live.
2985 // Check if the object was already copied, unless the caller indicated that it was not.
2986 if (kCheckIfDone && done) {
2987 return nullptr;
2988 }
2989 DCHECK(!done);
2990
2991 const size_t n = obj->SizeOf();
2992
2993 if (kIsDebugBuild && region_size_ != 0u) {
2994 const size_t offset_after_header = offset - sizeof(ImageHeader);
2995 const size_t next_region = RoundUp(offset_after_header, region_size_);
2996 if (offset_after_header != next_region) {
2997 // If the object is not on a region bondary, it must not be cross region.
2998 CHECK_LT(offset_after_header, next_region)
2999 << "offset_after_header=" << offset_after_header << " size=" << n;
3000 CHECK_LE(offset_after_header + n, next_region)
3001 << "offset_after_header=" << offset_after_header << " size=" << n;
3002 }
3003 }
3004 DCHECK_LE(offset + n, image_info.image_.Size());
3005 memcpy(dst, src, n);
3006
3007 // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
3008 // word.
3009 const auto it = saved_hashcode_map_.find(obj);
3010 dst->SetLockWord(it != saved_hashcode_map_.end() ?
3011 LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
3012 if (kUseBakerReadBarrier && gc::collector::ConcurrentCopying::kGrayDirtyImmuneObjects) {
3013 // Treat all of the objects in the image as marked to avoid unnecessary dirty pages. This is
3014 // safe since we mark all of the objects that may reference non immune objects as gray.
3015 CHECK(dst->AtomicSetMarkBit(0, 1));
3016 }
3017 return dst;
3018 }
3019
3020 // Rewrite all the references in the copied object to point to their image address equivalent
3021 class ImageWriter::FixupVisitor {
3022 public:
FixupVisitor(ImageWriter * image_writer,Object * copy)3023 FixupVisitor(ImageWriter* image_writer, Object* copy)
3024 : image_writer_(image_writer), copy_(copy) {
3025 }
3026
3027 // We do not visit native roots. These are handled with other logic.
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const3028 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
3029 const {
3030 LOG(FATAL) << "UNREACHABLE";
3031 }
VisitRoot(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const3032 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {
3033 LOG(FATAL) << "UNREACHABLE";
3034 }
3035
operator ()(ObjPtr<Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const3036 void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
3037 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
3038 ObjPtr<Object> ref = obj->GetFieldObject<Object, kVerifyNone, kWithoutReadBarrier>(offset);
3039 // Copy the reference and record the fixup if necessary.
3040 image_writer_->CopyAndFixupReference(
3041 copy_->GetFieldObjectReferenceAddr<kVerifyNone>(offset), ref);
3042 }
3043
3044 // java.lang.ref.Reference visitor.
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const3045 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
3046 ObjPtr<mirror::Reference> ref) const
3047 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
3048 operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
3049 }
3050
3051 protected:
3052 ImageWriter* const image_writer_;
3053 mirror::Object* const copy_;
3054 };
3055
CopyAndFixupObjects()3056 void ImageWriter::CopyAndFixupObjects() {
3057 // Copy and fix up pointer arrays first as they require special treatment.
3058 auto method_pointer_array_visitor =
3059 [&](ObjPtr<mirror::PointerArray> pointer_array) REQUIRES_SHARED(Locks::mutator_lock_) {
3060 CopyAndFixupMethodPointerArray(pointer_array.Ptr());
3061 };
3062 for (ImageInfo& image_info : image_infos_) {
3063 if (image_info.class_table_size_ != 0u) {
3064 DCHECK(image_info.class_table_.has_value());
3065 for (const ClassTable::TableSlot& slot : *image_info.class_table_) {
3066 ObjPtr<mirror::Class> klass = slot.Read<kWithoutReadBarrier>();
3067 DCHECK(klass != nullptr);
3068 // Do not process boot image classes present in app image class table.
3069 DCHECK(!IsInBootImage(klass.Ptr()) || compiler_options_.IsAppImage());
3070 if (!IsInBootImage(klass.Ptr())) {
3071 // Do not fix up method pointer arrays inherited from superclass. If they are part
3072 // of the current image, they were or shall be copied when visiting the superclass.
3073 VisitNewMethodPointerArrays(klass, method_pointer_array_visitor);
3074 }
3075 }
3076 }
3077 }
3078
3079 auto visitor = [&](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
3080 DCHECK(obj != nullptr);
3081 CopyAndFixupObject(obj);
3082 };
3083 Runtime::Current()->GetHeap()->VisitObjects(visitor);
3084
3085 // Fill the padding objects since they are required for in order traversal of the image space.
3086 for (ImageInfo& image_info : image_infos_) {
3087 for (const size_t start_offset : image_info.padding_offsets_) {
3088 const size_t offset_after_header = start_offset - sizeof(ImageHeader);
3089 size_t remaining_space =
3090 RoundUp(offset_after_header + 1u, region_size_) - offset_after_header;
3091 DCHECK_NE(remaining_space, 0u);
3092 DCHECK_LT(remaining_space, region_size_);
3093 Object* dst = reinterpret_cast<Object*>(image_info.image_.Begin() + start_offset);
3094 ObjPtr<Class> object_class = GetClassRoot<mirror::Object, kWithoutReadBarrier>();
3095 DCHECK_ALIGNED_PARAM(remaining_space, object_class->GetObjectSize());
3096 Object* end = dst + remaining_space / object_class->GetObjectSize();
3097 Class* image_object_class = GetImageAddress(object_class.Ptr());
3098 while (dst != end) {
3099 dst->SetClass<kVerifyNone>(image_object_class);
3100 dst->SetLockWord<kVerifyNone>(LockWord::Default(), /*as_volatile=*/ false);
3101 image_info.image_bitmap_.Set(dst); // Mark the obj as live.
3102 ++dst;
3103 }
3104 }
3105 }
3106
3107 // We no longer need the hashcode map, values have already been copied to target objects.
3108 saved_hashcode_map_.clear();
3109 }
3110
3111 class ImageWriter::FixupClassVisitor final : public FixupVisitor {
3112 public:
FixupClassVisitor(ImageWriter * image_writer,Object * copy)3113 FixupClassVisitor(ImageWriter* image_writer, Object* copy)
3114 : FixupVisitor(image_writer, copy) {}
3115
operator ()(ObjPtr<Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const3116 void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
3117 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
3118 DCHECK(obj->IsClass());
3119 FixupVisitor::operator()(obj, offset, /*is_static*/false);
3120 }
3121
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const3122 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
3123 ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const
3124 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
3125 LOG(FATAL) << "Reference not expected here.";
3126 }
3127 };
3128
GetNativeRelocation(void * obj)3129 ImageWriter::NativeObjectRelocation ImageWriter::GetNativeRelocation(void* obj) {
3130 DCHECK(obj != nullptr);
3131 DCHECK(!IsInBootImage(obj));
3132 auto it = native_object_relocations_.find(obj);
3133 CHECK(it != native_object_relocations_.end()) << obj << " spaces "
3134 << Runtime::Current()->GetHeap()->DumpSpaces();
3135 return it->second;
3136 }
3137
3138 template <typename T>
PrettyPrint(T * ptr)3139 std::string PrettyPrint(T* ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
3140 std::ostringstream oss;
3141 oss << ptr;
3142 return oss.str();
3143 }
3144
3145 template <>
PrettyPrint(ArtMethod * method)3146 std::string PrettyPrint(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
3147 return ArtMethod::PrettyMethod(method);
3148 }
3149
3150 template <typename T>
NativeLocationInImage(T * obj)3151 T* ImageWriter::NativeLocationInImage(T* obj) {
3152 if (obj == nullptr || IsInBootImage(obj)) {
3153 return obj;
3154 } else {
3155 NativeObjectRelocation relocation = GetNativeRelocation(obj);
3156 const ImageInfo& image_info = GetImageInfo(relocation.oat_index);
3157 return reinterpret_cast<T*>(image_info.image_begin_ + relocation.offset);
3158 }
3159 }
3160
NativeLocationInImage(ArtField * src_field)3161 ArtField* ImageWriter::NativeLocationInImage(ArtField* src_field) {
3162 // Fields are not individually stored in the native relocation map. Use the field array.
3163 ObjPtr<mirror::Class> declaring_class = src_field->GetDeclaringClass<kWithoutReadBarrier>();
3164 LengthPrefixedArray<ArtField>* src_fields =
3165 src_field->IsStatic() ? declaring_class->GetSFieldsPtr() : declaring_class->GetIFieldsPtr();
3166 DCHECK(src_fields != nullptr);
3167 LengthPrefixedArray<ArtField>* dst_fields = NativeLocationInImage(src_fields);
3168 DCHECK(dst_fields != nullptr);
3169 size_t field_offset =
3170 reinterpret_cast<uint8_t*>(src_field) - reinterpret_cast<uint8_t*>(src_fields);
3171 return reinterpret_cast<ArtField*>(reinterpret_cast<uint8_t*>(dst_fields) + field_offset);
3172 }
3173
3174 class ImageWriter::NativeLocationVisitor {
3175 public:
NativeLocationVisitor(ImageWriter * image_writer)3176 explicit NativeLocationVisitor(ImageWriter* image_writer)
3177 : image_writer_(image_writer) {}
3178
3179 template <typename T>
operator ()(T * ptr,void ** dest_addr) const3180 T* operator()(T* ptr, void** dest_addr) const REQUIRES_SHARED(Locks::mutator_lock_) {
3181 if (ptr != nullptr) {
3182 image_writer_->CopyAndFixupPointer(dest_addr, ptr);
3183 }
3184 // TODO: The caller shall overwrite the value stored by CopyAndFixupPointer()
3185 // with the value we return here. We should try to avoid the duplicate work.
3186 return image_writer_->NativeLocationInImage(ptr);
3187 }
3188
3189 private:
3190 ImageWriter* const image_writer_;
3191 };
3192
FixupClass(mirror::Class * orig,mirror::Class * copy)3193 void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
3194 orig->FixupNativePointers(copy, target_ptr_size_, NativeLocationVisitor(this));
3195 FixupClassVisitor visitor(this, copy);
3196 ObjPtr<mirror::Object>(orig)->VisitReferences<
3197 /*kVisitNativeRoots=*/ false, kVerifyNone, kWithoutReadBarrier>(visitor, visitor);
3198
3199 if (kBitstringSubtypeCheckEnabled && !compiler_options_.IsBootImage()) {
3200 // When we call SubtypeCheck::EnsureInitialize, it Assigns new bitstring
3201 // values to the parent of that class.
3202 //
3203 // Every time this happens, the parent class has to mutate to increment
3204 // the "Next" value.
3205 //
3206 // If any of these parents are in the boot image, the changes [in the parents]
3207 // would be lost when the app image is reloaded.
3208 //
3209 // To prevent newly loaded classes (not in the app image) from being reassigned
3210 // the same bitstring value as an existing app image class, uninitialize
3211 // all the classes in the app image.
3212 //
3213 // On startup, the class linker will then re-initialize all the app
3214 // image bitstrings. See also ClassLinker::AddImageSpace.
3215 //
3216 // FIXME: Deal with boot image extensions.
3217 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
3218 // Lock every time to prevent a dcheck failure when we suspend with the lock held.
3219 SubtypeCheck<mirror::Class*>::ForceUninitialize(copy);
3220 }
3221
3222 // Remove the clinitThreadId. This is required for image determinism.
3223 copy->SetClinitThreadId(static_cast<pid_t>(0));
3224 // We never emit kRetryVerificationAtRuntime, instead we mark the class as
3225 // resolved and the class will therefore be re-verified at runtime.
3226 if (orig->ShouldVerifyAtRuntime()) {
3227 copy->SetStatusInternal(ClassStatus::kResolved);
3228 }
3229 }
3230
FixupObject(Object * orig,Object * copy)3231 void ImageWriter::FixupObject(Object* orig, Object* copy) {
3232 DCHECK(orig != nullptr);
3233 DCHECK(copy != nullptr);
3234 if (kUseBakerReadBarrier) {
3235 orig->AssertReadBarrierState();
3236 }
3237 ObjPtr<mirror::Class> klass = orig->GetClass<kVerifyNone, kWithoutReadBarrier>();
3238 if (klass->IsClassClass()) {
3239 FixupClass(orig->AsClass<kVerifyNone>().Ptr(), down_cast<mirror::Class*>(copy));
3240 } else {
3241 ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots =
3242 Runtime::Current()->GetClassLinker()->GetClassRoots<kWithoutReadBarrier>();
3243 if (klass == GetClassRoot<mirror::String, kWithoutReadBarrier>(class_roots)) {
3244 // Make sure all image strings have the hash code calculated, even if they are not interned.
3245 down_cast<mirror::String*>(copy)->GetHashCode();
3246 } else if (klass == GetClassRoot<mirror::Method, kWithoutReadBarrier>(class_roots) ||
3247 klass == GetClassRoot<mirror::Constructor, kWithoutReadBarrier>(class_roots)) {
3248 // Need to update the ArtMethod.
3249 auto* dest = down_cast<mirror::Executable*>(copy);
3250 auto* src = down_cast<mirror::Executable*>(orig);
3251 ArtMethod* src_method = src->GetArtMethod();
3252 CopyAndFixupPointer(dest, mirror::Executable::ArtMethodOffset(), src_method);
3253 } else if (klass == GetClassRoot<mirror::FieldVarHandle, kWithoutReadBarrier>(class_roots) ||
3254 klass == GetClassRoot<mirror::StaticFieldVarHandle, kWithoutReadBarrier>(class_roots)) {
3255 // Need to update the ArtField.
3256 auto* dest = down_cast<mirror::FieldVarHandle*>(copy);
3257 auto* src = down_cast<mirror::FieldVarHandle*>(orig);
3258 ArtField* src_field = src->GetArtField();
3259 CopyAndFixupPointer(dest, mirror::FieldVarHandle::ArtFieldOffset(), src_field);
3260 } else if (klass == GetClassRoot<mirror::DexCache, kWithoutReadBarrier>(class_roots)) {
3261 down_cast<mirror::DexCache*>(copy)->SetDexFile(nullptr);
3262 down_cast<mirror::DexCache*>(copy)->ResetNativeArrays();
3263 } else if (klass->IsClassLoaderClass()) {
3264 mirror::ClassLoader* copy_loader = down_cast<mirror::ClassLoader*>(copy);
3265 // If src is a ClassLoader, set the class table to null so that it gets recreated by the
3266 // ClassLinker.
3267 copy_loader->SetClassTable(nullptr);
3268 // Also set allocator to null to be safe. The allocator is created when we create the class
3269 // table. We also never expect to unload things in the image since they are held live as
3270 // roots.
3271 copy_loader->SetAllocator(nullptr);
3272 }
3273 FixupVisitor visitor(this, copy);
3274 orig->VisitReferences</*kVisitNativeRoots=*/ false, kVerifyNone, kWithoutReadBarrier>(
3275 visitor, visitor);
3276 }
3277 }
3278
GetOatAddress(StubType type) const3279 const uint8_t* ImageWriter::GetOatAddress(StubType type) const {
3280 DCHECK_LE(type, StubType::kLast);
3281 // If we are compiling a boot image extension or app image,
3282 // we need to use the stubs of the primary boot image.
3283 if (!compiler_options_.IsBootImage()) {
3284 // Use the current image pointers.
3285 const std::vector<gc::space::ImageSpace*>& image_spaces =
3286 Runtime::Current()->GetHeap()->GetBootImageSpaces();
3287 DCHECK(!image_spaces.empty());
3288 const OatFile* oat_file = image_spaces[0]->GetOatFile();
3289 CHECK(oat_file != nullptr);
3290 const OatHeader& header = oat_file->GetOatHeader();
3291 switch (type) {
3292 // TODO: We could maybe clean this up if we stored them in an array in the oat header.
3293 case StubType::kQuickGenericJNITrampoline:
3294 return static_cast<const uint8_t*>(header.GetQuickGenericJniTrampoline());
3295 case StubType::kJNIDlsymLookupTrampoline:
3296 return static_cast<const uint8_t*>(header.GetJniDlsymLookupTrampoline());
3297 case StubType::kJNIDlsymLookupCriticalTrampoline:
3298 return static_cast<const uint8_t*>(header.GetJniDlsymLookupCriticalTrampoline());
3299 case StubType::kQuickIMTConflictTrampoline:
3300 return static_cast<const uint8_t*>(header.GetQuickImtConflictTrampoline());
3301 case StubType::kQuickResolutionTrampoline:
3302 return static_cast<const uint8_t*>(header.GetQuickResolutionTrampoline());
3303 case StubType::kQuickToInterpreterBridge:
3304 return static_cast<const uint8_t*>(header.GetQuickToInterpreterBridge());
3305 case StubType::kNterpTrampoline:
3306 return static_cast<const uint8_t*>(header.GetNterpTrampoline());
3307 default:
3308 UNREACHABLE();
3309 }
3310 }
3311 const ImageInfo& primary_image_info = GetImageInfo(0);
3312 return GetOatAddressForOffset(primary_image_info.GetStubOffset(type), primary_image_info);
3313 }
3314
GetQuickCode(ArtMethod * method,const ImageInfo & image_info)3315 const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method, const ImageInfo& image_info) {
3316 DCHECK(!method->IsResolutionMethod()) << method->PrettyMethod();
3317 DCHECK_NE(method, Runtime::Current()->GetImtConflictMethod()) << method->PrettyMethod();
3318 DCHECK(!method->IsImtUnimplementedMethod()) << method->PrettyMethod();
3319 DCHECK(method->IsInvokable()) << method->PrettyMethod();
3320 DCHECK(!IsInBootImage(method)) << method->PrettyMethod();
3321
3322 // Use original code if it exists. Otherwise, set the code pointer to the resolution
3323 // trampoline.
3324
3325 // Quick entrypoint:
3326 const void* quick_oat_entry_point =
3327 method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_);
3328 const uint8_t* quick_code;
3329
3330 if (UNLIKELY(IsInBootImage(method->GetDeclaringClass<kWithoutReadBarrier>().Ptr()))) {
3331 DCHECK(method->IsCopied());
3332 // If the code is not in the oat file corresponding to this image (e.g. default methods)
3333 quick_code = reinterpret_cast<const uint8_t*>(quick_oat_entry_point);
3334 } else {
3335 uint32_t quick_oat_code_offset = PointerToLowMemUInt32(quick_oat_entry_point);
3336 quick_code = GetOatAddressForOffset(quick_oat_code_offset, image_info);
3337 }
3338
3339 bool needs_clinit_check = NeedsClinitCheckBeforeCall(method) &&
3340 !method->GetDeclaringClass<kWithoutReadBarrier>()->IsVisiblyInitialized();
3341
3342 if (quick_code == nullptr) {
3343 // If we don't have code, use generic jni / interpreter.
3344 if (method->IsNative()) {
3345 // The generic JNI trampolines performs class initialization check if needed.
3346 quick_code = GetOatAddress(StubType::kQuickGenericJNITrampoline);
3347 } else if (CanMethodUseNterp(method, compiler_options_.GetInstructionSet())) {
3348 // The nterp trampoline doesn't do initialization checks, so install the
3349 // resolution stub if needed.
3350 if (needs_clinit_check) {
3351 quick_code = GetOatAddress(StubType::kQuickResolutionTrampoline);
3352 } else {
3353 quick_code = GetOatAddress(StubType::kNterpTrampoline);
3354 }
3355 } else {
3356 // The interpreter brige performs class initialization check if needed.
3357 quick_code = GetOatAddress(StubType::kQuickToInterpreterBridge);
3358 }
3359 } else if (needs_clinit_check) {
3360 // If we do have code but the method needs a class initialization check before calling
3361 // that code, install the resolution stub that will perform the check.
3362 quick_code = GetOatAddress(StubType::kQuickResolutionTrampoline);
3363 }
3364 return quick_code;
3365 }
3366
CopyAndFixupMethod(ArtMethod * orig,ArtMethod * copy,size_t oat_index)3367 void ImageWriter::CopyAndFixupMethod(ArtMethod* orig,
3368 ArtMethod* copy,
3369 size_t oat_index) {
3370 if (orig->IsAbstract()) {
3371 // Ignore the single-implementation info for abstract method.
3372 // Do this on orig instead of copy, otherwise there is a crash due to methods
3373 // are copied before classes.
3374 // TODO: handle fixup of single-implementation method for abstract method.
3375 orig->SetHasSingleImplementation(false);
3376 orig->SetSingleImplementation(
3377 nullptr, Runtime::Current()->GetClassLinker()->GetImagePointerSize());
3378 }
3379
3380 if (!orig->IsRuntimeMethod() &&
3381 (compiler_options_.IsBootImage() || compiler_options_.IsBootImageExtension())) {
3382 orig->SetMemorySharedMethod();
3383 }
3384
3385 memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
3386
3387 CopyAndFixupReference(copy->GetDeclaringClassAddressWithoutBarrier(),
3388 orig->GetDeclaringClassUnchecked<kWithoutReadBarrier>());
3389
3390 // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
3391 // oat_begin_
3392
3393 // The resolution method has a special trampoline to call.
3394 Runtime* runtime = Runtime::Current();
3395 const void* quick_code;
3396 if (orig->IsRuntimeMethod()) {
3397 ImtConflictTable* orig_table = orig->GetImtConflictTable(target_ptr_size_);
3398 if (orig_table != nullptr) {
3399 // Special IMT conflict method, normal IMT conflict method or unimplemented IMT method.
3400 quick_code = GetOatAddress(StubType::kQuickIMTConflictTrampoline);
3401 CopyAndFixupPointer(copy, ArtMethod::DataOffset(target_ptr_size_), orig_table);
3402 } else if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
3403 quick_code = GetOatAddress(StubType::kQuickResolutionTrampoline);
3404 // Set JNI entrypoint for resolving @CriticalNative methods called from compiled code .
3405 const void* jni_code = GetOatAddress(StubType::kJNIDlsymLookupCriticalTrampoline);
3406 copy->SetEntryPointFromJniPtrSize(jni_code, target_ptr_size_);
3407 } else {
3408 bool found_one = false;
3409 for (size_t i = 0; i < static_cast<size_t>(CalleeSaveType::kLastCalleeSaveType); ++i) {
3410 auto idx = static_cast<CalleeSaveType>(i);
3411 if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
3412 found_one = true;
3413 break;
3414 }
3415 }
3416 CHECK(found_one) << "Expected to find callee save method but got " << orig->PrettyMethod();
3417 CHECK(copy->IsRuntimeMethod());
3418 CHECK(copy->GetEntryPointFromQuickCompiledCode() == nullptr);
3419 quick_code = nullptr;
3420 }
3421 } else {
3422 // We assume all methods have code. If they don't currently then we set them to the use the
3423 // resolution trampoline. Abstract methods never have code and so we need to make sure their
3424 // use results in an AbstractMethodError. We use the interpreter to achieve this.
3425 if (UNLIKELY(!orig->IsInvokable())) {
3426 quick_code = GetOatAddress(StubType::kQuickToInterpreterBridge);
3427 } else {
3428 const ImageInfo& image_info = image_infos_[oat_index];
3429 quick_code = GetQuickCode(orig, image_info);
3430
3431 // JNI entrypoint:
3432 if (orig->IsNative()) {
3433 // The native method's pointer is set to a stub to lookup via dlsym.
3434 // Note this is not the code_ pointer, that is handled above.
3435 StubType stub_type = orig->IsCriticalNative() ? StubType::kJNIDlsymLookupCriticalTrampoline
3436 : StubType::kJNIDlsymLookupTrampoline;
3437 copy->SetEntryPointFromJniPtrSize(GetOatAddress(stub_type), target_ptr_size_);
3438 } else if (!orig->HasCodeItem()) {
3439 CHECK(copy->GetDataPtrSize(target_ptr_size_) == nullptr);
3440 } else {
3441 CHECK(copy->GetDataPtrSize(target_ptr_size_) != nullptr);
3442 }
3443 }
3444 }
3445 if (quick_code != nullptr) {
3446 copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
3447 }
3448 }
3449
GetBinSizeSum(Bin up_to) const3450 size_t ImageWriter::ImageInfo::GetBinSizeSum(Bin up_to) const {
3451 DCHECK_LE(static_cast<size_t>(up_to), kNumberOfBins);
3452 return std::accumulate(&bin_slot_sizes_[0],
3453 &bin_slot_sizes_[0] + static_cast<size_t>(up_to),
3454 /*init*/ static_cast<size_t>(0));
3455 }
3456
BinSlot(uint32_t lockword)3457 ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
3458 // These values may need to get updated if more bins are added to the enum Bin
3459 static_assert(kBinBits == 3, "wrong number of bin bits");
3460 static_assert(kBinShift == 27, "wrong number of shift");
3461 static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
3462
3463 DCHECK_LT(GetBin(), Bin::kMirrorCount);
3464 DCHECK_ALIGNED(GetOffset(), kObjectAlignment);
3465 }
3466
BinSlot(Bin bin,uint32_t index)3467 ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
3468 : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
3469 DCHECK_EQ(index, GetOffset());
3470 }
3471
GetBin() const3472 ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
3473 return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
3474 }
3475
GetOffset() const3476 uint32_t ImageWriter::BinSlot::GetOffset() const {
3477 return lockword_ & ~kBinMask;
3478 }
3479
BinTypeForNativeRelocationType(NativeObjectRelocationType type)3480 ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
3481 switch (type) {
3482 case NativeObjectRelocationType::kArtFieldArray:
3483 return Bin::kArtField;
3484 case NativeObjectRelocationType::kArtMethodClean:
3485 case NativeObjectRelocationType::kArtMethodArrayClean:
3486 return Bin::kArtMethodClean;
3487 case NativeObjectRelocationType::kArtMethodDirty:
3488 case NativeObjectRelocationType::kArtMethodArrayDirty:
3489 return Bin::kArtMethodDirty;
3490 case NativeObjectRelocationType::kRuntimeMethod:
3491 return Bin::kRuntimeMethod;
3492 case NativeObjectRelocationType::kIMTable:
3493 return Bin::kImTable;
3494 case NativeObjectRelocationType::kIMTConflictTable:
3495 return Bin::kIMTConflictTable;
3496 case NativeObjectRelocationType::kGcRootPointer:
3497 return Bin::kMetadata;
3498 }
3499 UNREACHABLE();
3500 }
3501
GetOatIndex(mirror::Object * obj) const3502 size_t ImageWriter::GetOatIndex(mirror::Object* obj) const {
3503 if (!IsMultiImage()) {
3504 DCHECK(oat_index_map_.empty());
3505 return GetDefaultOatIndex();
3506 }
3507 auto it = oat_index_map_.find(obj);
3508 DCHECK(it != oat_index_map_.end()) << obj;
3509 return it->second;
3510 }
3511
GetOatIndexForDexFile(const DexFile * dex_file) const3512 size_t ImageWriter::GetOatIndexForDexFile(const DexFile* dex_file) const {
3513 if (!IsMultiImage()) {
3514 return GetDefaultOatIndex();
3515 }
3516 auto it = dex_file_oat_index_map_.find(dex_file);
3517 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
3518 return it->second;
3519 }
3520
GetOatIndexForClass(ObjPtr<mirror::Class> klass) const3521 size_t ImageWriter::GetOatIndexForClass(ObjPtr<mirror::Class> klass) const {
3522 while (klass->IsArrayClass()) {
3523 klass = klass->GetComponentType<kVerifyNone, kWithoutReadBarrier>();
3524 }
3525 if (UNLIKELY(klass->IsPrimitive())) {
3526 DCHECK((klass->GetDexCache<kVerifyNone, kWithoutReadBarrier>()) == nullptr);
3527 return GetDefaultOatIndex();
3528 } else {
3529 DCHECK((klass->GetDexCache<kVerifyNone, kWithoutReadBarrier>()) != nullptr);
3530 return GetOatIndexForDexFile(&klass->GetDexFile());
3531 }
3532 }
3533
UpdateOatFileLayout(size_t oat_index,size_t oat_loaded_size,size_t oat_data_offset,size_t oat_data_size)3534 void ImageWriter::UpdateOatFileLayout(size_t oat_index,
3535 size_t oat_loaded_size,
3536 size_t oat_data_offset,
3537 size_t oat_data_size) {
3538 DCHECK_GE(oat_loaded_size, oat_data_offset);
3539 DCHECK_GE(oat_loaded_size - oat_data_offset, oat_data_size);
3540
3541 const uint8_t* images_end = image_infos_.back().image_begin_ + image_infos_.back().image_size_;
3542 DCHECK(images_end != nullptr); // Image space must be ready.
3543 for (const ImageInfo& info : image_infos_) {
3544 DCHECK_LE(info.image_begin_ + info.image_size_, images_end);
3545 }
3546
3547 ImageInfo& cur_image_info = GetImageInfo(oat_index);
3548 cur_image_info.oat_file_begin_ = images_end + cur_image_info.oat_offset_;
3549 cur_image_info.oat_loaded_size_ = oat_loaded_size;
3550 cur_image_info.oat_data_begin_ = cur_image_info.oat_file_begin_ + oat_data_offset;
3551 cur_image_info.oat_size_ = oat_data_size;
3552
3553 if (compiler_options_.IsAppImage()) {
3554 CHECK_EQ(oat_filenames_.size(), 1u) << "App image should have no next image.";
3555 return;
3556 }
3557
3558 // Update the oat_offset of the next image info.
3559 if (oat_index + 1u != oat_filenames_.size()) {
3560 // There is a following one.
3561 ImageInfo& next_image_info = GetImageInfo(oat_index + 1u);
3562 next_image_info.oat_offset_ = cur_image_info.oat_offset_ + oat_loaded_size;
3563 }
3564 }
3565
UpdateOatFileHeader(size_t oat_index,const OatHeader & oat_header)3566 void ImageWriter::UpdateOatFileHeader(size_t oat_index, const OatHeader& oat_header) {
3567 ImageInfo& cur_image_info = GetImageInfo(oat_index);
3568 cur_image_info.oat_checksum_ = oat_header.GetChecksum();
3569
3570 if (oat_index == GetDefaultOatIndex()) {
3571 // Primary oat file, read the trampolines.
3572 cur_image_info.SetStubOffset(StubType::kJNIDlsymLookupTrampoline,
3573 oat_header.GetJniDlsymLookupTrampolineOffset());
3574 cur_image_info.SetStubOffset(StubType::kJNIDlsymLookupCriticalTrampoline,
3575 oat_header.GetJniDlsymLookupCriticalTrampolineOffset());
3576 cur_image_info.SetStubOffset(StubType::kQuickGenericJNITrampoline,
3577 oat_header.GetQuickGenericJniTrampolineOffset());
3578 cur_image_info.SetStubOffset(StubType::kQuickIMTConflictTrampoline,
3579 oat_header.GetQuickImtConflictTrampolineOffset());
3580 cur_image_info.SetStubOffset(StubType::kQuickResolutionTrampoline,
3581 oat_header.GetQuickResolutionTrampolineOffset());
3582 cur_image_info.SetStubOffset(StubType::kQuickToInterpreterBridge,
3583 oat_header.GetQuickToInterpreterBridgeOffset());
3584 cur_image_info.SetStubOffset(StubType::kNterpTrampoline,
3585 oat_header.GetNterpTrampolineOffset());
3586 }
3587 }
3588
ImageWriter(const CompilerOptions & compiler_options,uintptr_t image_begin,ImageHeader::StorageMode image_storage_mode,const std::vector<std::string> & oat_filenames,const HashMap<const DexFile *,size_t> & dex_file_oat_index_map,jobject class_loader,const HashSet<std::string> * dirty_image_objects)3589 ImageWriter::ImageWriter(
3590 const CompilerOptions& compiler_options,
3591 uintptr_t image_begin,
3592 ImageHeader::StorageMode image_storage_mode,
3593 const std::vector<std::string>& oat_filenames,
3594 const HashMap<const DexFile*, size_t>& dex_file_oat_index_map,
3595 jobject class_loader,
3596 const HashSet<std::string>* dirty_image_objects)
3597 : compiler_options_(compiler_options),
3598 boot_image_begin_(Runtime::Current()->GetHeap()->GetBootImagesStartAddress()),
3599 boot_image_size_(Runtime::Current()->GetHeap()->GetBootImagesSize()),
3600 global_image_begin_(reinterpret_cast<uint8_t*>(image_begin)),
3601 image_objects_offset_begin_(0),
3602 target_ptr_size_(InstructionSetPointerSize(compiler_options.GetInstructionSet())),
3603 image_infos_(oat_filenames.size()),
3604 dirty_methods_(0u),
3605 clean_methods_(0u),
3606 app_class_loader_(class_loader),
3607 boot_image_live_objects_(nullptr),
3608 image_roots_(),
3609 image_storage_mode_(image_storage_mode),
3610 oat_filenames_(oat_filenames),
3611 dex_file_oat_index_map_(dex_file_oat_index_map),
3612 dirty_image_objects_(dirty_image_objects) {
3613 DCHECK(compiler_options.IsBootImage() ||
3614 compiler_options.IsBootImageExtension() ||
3615 compiler_options.IsAppImage());
3616 DCHECK_EQ(compiler_options.IsBootImage(), boot_image_begin_ == 0u);
3617 DCHECK_EQ(compiler_options.IsBootImage(), boot_image_size_ == 0u);
3618 CHECK_NE(image_begin, 0U);
3619 std::fill_n(image_methods_, arraysize(image_methods_), nullptr);
3620 CHECK_EQ(compiler_options.IsBootImage(),
3621 Runtime::Current()->GetHeap()->GetBootImageSpaces().empty())
3622 << "Compiling a boot image should occur iff there are no boot image spaces loaded";
3623 if (compiler_options_.IsAppImage()) {
3624 // Make sure objects are not crossing region boundaries for app images.
3625 region_size_ = gc::space::RegionSpace::kRegionSize;
3626 }
3627 }
3628
~ImageWriter()3629 ImageWriter::~ImageWriter() {
3630 if (!image_roots_.empty()) {
3631 Thread* self = Thread::Current();
3632 JavaVMExt* vm = down_cast<JNIEnvExt*>(self->GetJniEnv())->GetVm();
3633 for (jobject image_roots : image_roots_) {
3634 vm->DeleteGlobalRef(self, image_roots);
3635 }
3636 }
3637 }
3638
ImageInfo()3639 ImageWriter::ImageInfo::ImageInfo()
3640 : intern_table_(),
3641 class_table_() {}
3642
3643 template <typename DestType>
CopyAndFixupReference(DestType * dest,ObjPtr<mirror::Object> src)3644 void ImageWriter::CopyAndFixupReference(DestType* dest, ObjPtr<mirror::Object> src) {
3645 static_assert(std::is_same<DestType, mirror::CompressedReference<mirror::Object>>::value ||
3646 std::is_same<DestType, mirror::HeapReference<mirror::Object>>::value,
3647 "DestType must be a Compressed-/HeapReference<Object>.");
3648 dest->Assign(GetImageAddress(src.Ptr()));
3649 }
3650
3651 template <typename ValueType>
CopyAndFixupPointer(void ** target,ValueType src_value,PointerSize pointer_size)3652 void ImageWriter::CopyAndFixupPointer(
3653 void** target, ValueType src_value, PointerSize pointer_size) {
3654 DCHECK(src_value != nullptr);
3655 void* new_value = NativeLocationInImage(src_value);
3656 DCHECK(new_value != nullptr);
3657 if (pointer_size == PointerSize::k32) {
3658 *reinterpret_cast<uint32_t*>(target) = reinterpret_cast32<uint32_t>(new_value);
3659 } else {
3660 *reinterpret_cast<uint64_t*>(target) = reinterpret_cast64<uint64_t>(new_value);
3661 }
3662 }
3663
3664 template <typename ValueType>
CopyAndFixupPointer(void ** target,ValueType src_value)3665 void ImageWriter::CopyAndFixupPointer(void** target, ValueType src_value)
3666 REQUIRES_SHARED(Locks::mutator_lock_) {
3667 CopyAndFixupPointer(target, src_value, target_ptr_size_);
3668 }
3669
3670 template <typename ValueType>
CopyAndFixupPointer(void * object,MemberOffset offset,ValueType src_value,PointerSize pointer_size)3671 void ImageWriter::CopyAndFixupPointer(
3672 void* object, MemberOffset offset, ValueType src_value, PointerSize pointer_size) {
3673 void** target =
3674 reinterpret_cast<void**>(reinterpret_cast<uint8_t*>(object) + offset.Uint32Value());
3675 return CopyAndFixupPointer(target, src_value, pointer_size);
3676 }
3677
3678 template <typename ValueType>
CopyAndFixupPointer(void * object,MemberOffset offset,ValueType src_value)3679 void ImageWriter::CopyAndFixupPointer(void* object, MemberOffset offset, ValueType src_value) {
3680 return CopyAndFixupPointer(object, offset, src_value, target_ptr_size_);
3681 }
3682
3683 } // namespace linker
3684 } // namespace art
3685