• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "class_linker.h"
18 
19 #include <unistd.h>
20 
21 #include <algorithm>
22 #include <deque>
23 #include <forward_list>
24 #include <iostream>
25 #include <iterator>
26 #include <map>
27 #include <memory>
28 #include <queue>
29 #include <string>
30 #include <string_view>
31 #include <tuple>
32 #include <utility>
33 #include <vector>
34 
35 #include "android-base/macros.h"
36 #include "android-base/stringprintf.h"
37 #include "android-base/strings.h"
38 #include "art_field-inl.h"
39 #include "art_method-inl.h"
40 #include "barrier.h"
41 #include "base/arena_allocator.h"
42 #include "base/arena_bit_vector.h"
43 #include "base/casts.h"
44 #include "base/file_utils.h"
45 #include "base/hash_map.h"
46 #include "base/hash_set.h"
47 #include "base/leb128.h"
48 #include "base/logging.h"
49 #include "base/mem_map_arena_pool.h"
50 #include "base/membarrier.h"
51 #include "base/metrics/metrics.h"
52 #include "base/mutex-inl.h"
53 #include "base/os.h"
54 #include "base/pointer_size.h"
55 #include "base/quasi_atomic.h"
56 #include "base/scoped_arena_containers.h"
57 #include "base/scoped_flock.h"
58 #include "base/stl_util.h"
59 #include "base/systrace.h"
60 #include "base/time_utils.h"
61 #include "base/unix_file/fd_file.h"
62 #include "base/utils.h"
63 #include "base/value_object.h"
64 #include "cha.h"
65 #include "class_linker-inl.h"
66 #include "class_loader_utils.h"
67 #include "class_root-inl.h"
68 #include "class_table-inl.h"
69 #include "common_throws.h"
70 #include "compiler_callbacks.h"
71 #include "debug_print.h"
72 #include "debugger.h"
73 #include "dex/class_accessor-inl.h"
74 #include "dex/descriptors_names.h"
75 #include "dex/dex_file-inl.h"
76 #include "dex/dex_file.h"
77 #include "dex/dex_file_annotations.h"
78 #include "dex/dex_file_exception_helpers.h"
79 #include "dex/dex_file_loader.h"
80 #include "dex/modifiers.h"
81 #include "dex/signature-inl.h"
82 #include "dex/utf.h"
83 #include "entrypoints/entrypoint_utils-inl.h"
84 #include "entrypoints/runtime_asm_entrypoints.h"
85 #include "experimental_flags.h"
86 #include "gc/accounting/card_table-inl.h"
87 #include "gc/accounting/heap_bitmap-inl.h"
88 #include "gc/accounting/space_bitmap-inl.h"
89 #include "gc/heap-visit-objects-inl.h"
90 #include "gc/heap.h"
91 #include "gc/scoped_gc_critical_section.h"
92 #include "gc/space/image_space.h"
93 #include "gc/space/space-inl.h"
94 #include "gc_root-inl.h"
95 #include "handle.h"
96 #include "handle_scope-inl.h"
97 #include "hidden_api.h"
98 #include "imt_conflict_table.h"
99 #include "imtable-inl.h"
100 #include "instrumentation-inl.h"
101 #include "intern_table-inl.h"
102 #include "intern_table.h"
103 #include "interpreter/interpreter.h"
104 #include "interpreter/mterp/nterp.h"
105 #include "jit/debugger_interface.h"
106 #include "jit/jit.h"
107 #include "jit/jit_code_cache.h"
108 #include "jni/java_vm_ext.h"
109 #include "jni/jni_internal.h"
110 #include "linear_alloc-inl.h"
111 #include "mirror/array-alloc-inl.h"
112 #include "mirror/array-inl.h"
113 #include "mirror/call_site.h"
114 #include "mirror/class-alloc-inl.h"
115 #include "mirror/class-inl.h"
116 #include "mirror/class.h"
117 #include "mirror/class_ext.h"
118 #include "mirror/class_loader.h"
119 #include "mirror/dex_cache-inl.h"
120 #include "mirror/dex_cache.h"
121 #include "mirror/emulated_stack_frame.h"
122 #include "mirror/field.h"
123 #include "mirror/iftable-inl.h"
124 #include "mirror/method.h"
125 #include "mirror/method_handle_impl.h"
126 #include "mirror/method_handles_lookup.h"
127 #include "mirror/method_type-inl.h"
128 #include "mirror/object-inl.h"
129 #include "mirror/object-refvisitor-inl.h"
130 #include "mirror/object.h"
131 #include "mirror/object_array-alloc-inl.h"
132 #include "mirror/object_array-inl.h"
133 #include "mirror/object_array.h"
134 #include "mirror/object_reference-inl.h"
135 #include "mirror/object_reference.h"
136 #include "mirror/proxy.h"
137 #include "mirror/reference-inl.h"
138 #include "mirror/stack_trace_element.h"
139 #include "mirror/string-inl.h"
140 #include "mirror/throwable.h"
141 #include "mirror/var_handle.h"
142 #include "native/dalvik_system_DexFile.h"
143 #include "nativehelper/scoped_local_ref.h"
144 #include "nterp_helpers-inl.h"
145 #include "nterp_helpers.h"
146 #include "oat/image-inl.h"
147 #include "oat/jni_stub_hash_map-inl.h"
148 #include "oat/oat.h"
149 #include "oat/oat_file-inl.h"
150 #include "oat/oat_file.h"
151 #include "oat/oat_file_assistant.h"
152 #include "oat/oat_file_manager.h"
153 #include "object_lock.h"
154 #include "profile/profile_compilation_info.h"
155 #include "runtime.h"
156 #include "runtime_callbacks.h"
157 #include "scoped_assert_no_transaction_checks.h"
158 #include "scoped_thread_state_change-inl.h"
159 #include "startup_completed_task.h"
160 #include "thread-inl.h"
161 #include "thread.h"
162 #include "thread_list.h"
163 #include "trace.h"
164 #include "vdex_file.h"
165 #include "verifier/class_verifier.h"
166 #include "verifier/verifier_deps.h"
167 #include "well_known_classes.h"
168 
169 namespace art HIDDEN {
170 
171 using android::base::StringPrintf;
172 
173 static constexpr bool kCheckImageObjects = kIsDebugBuild;
174 static constexpr bool kVerifyArtMethodDeclaringClasses = kIsDebugBuild;
175 
176 static void ThrowNoClassDefFoundError(const char* fmt, ...)
177     __attribute__((__format__(__printf__, 1, 2)))
178     REQUIRES_SHARED(Locks::mutator_lock_);
ThrowNoClassDefFoundError(const char * fmt,...)179 static void ThrowNoClassDefFoundError(const char* fmt, ...) {
180   va_list args;
181   va_start(args, fmt);
182   Thread* self = Thread::Current();
183   self->ThrowNewExceptionV("Ljava/lang/NoClassDefFoundError;", fmt, args);
184   va_end(args);
185 }
186 
GetErroneousStateError(ObjPtr<mirror::Class> c)187 static ObjPtr<mirror::Object> GetErroneousStateError(ObjPtr<mirror::Class> c)
188     REQUIRES_SHARED(Locks::mutator_lock_) {
189   ObjPtr<mirror::ClassExt> ext(c->GetExtData());
190   if (ext == nullptr) {
191     return nullptr;
192   } else {
193     return ext->GetErroneousStateError();
194   }
195 }
196 
IsVerifyError(ObjPtr<mirror::Object> obj)197 static bool IsVerifyError(ObjPtr<mirror::Object> obj)
198     REQUIRES_SHARED(Locks::mutator_lock_) {
199   // This is slow, but we only use it for rethrowing an error, and for DCHECK.
200   return obj->GetClass()->DescriptorEquals("Ljava/lang/VerifyError;");
201 }
202 
203 // Helper for ThrowEarlierClassFailure. Throws the stored error.
HandleEarlierErroneousStateError(Thread * self,ClassLinker * class_linker,ObjPtr<mirror::Class> c)204 static void HandleEarlierErroneousStateError(Thread* self,
205                                              ClassLinker* class_linker,
206                                              ObjPtr<mirror::Class> c)
207     REQUIRES_SHARED(Locks::mutator_lock_) {
208   ObjPtr<mirror::Object> obj = GetErroneousStateError(c);
209   DCHECK(obj != nullptr);
210   self->AssertNoPendingException();
211   DCHECK(!obj->IsClass());
212   ObjPtr<mirror::Class> throwable_class = GetClassRoot<mirror::Throwable>(class_linker);
213   ObjPtr<mirror::Class> error_class = obj->GetClass();
214   CHECK(throwable_class->IsAssignableFrom(error_class));
215   self->SetException(obj->AsThrowable());
216   self->AssertPendingException();
217 }
218 
UpdateClassAfterVerification(Handle<mirror::Class> klass,PointerSize pointer_size,verifier::FailureKind failure_kind)219 static void UpdateClassAfterVerification(Handle<mirror::Class> klass,
220                                          PointerSize pointer_size,
221                                          verifier::FailureKind failure_kind)
222     REQUIRES_SHARED(Locks::mutator_lock_) {
223   Runtime* runtime = Runtime::Current();
224   ClassLinker* class_linker = runtime->GetClassLinker();
225   if (klass->IsVerified() && (failure_kind == verifier::FailureKind::kNoFailure)) {
226     klass->SetSkipAccessChecksFlagOnAllMethods(pointer_size);
227   }
228 
229   // Now that the class has passed verification, try to set nterp entrypoints
230   // to methods that currently use the switch interpreter.
231   if (interpreter::CanRuntimeUseNterp()) {
232     for (ArtMethod& m : klass->GetMethods(pointer_size)) {
233       if (class_linker->IsQuickToInterpreterBridge(m.GetEntryPointFromQuickCompiledCode())) {
234         runtime->GetInstrumentation()->ReinitializeMethodsCode(&m);
235       }
236     }
237   }
238 }
239 
240 // Callback responsible for making a batch of classes visibly initialized after ensuring
241 // visibility for all threads, either by using `membarrier()` or by running a checkpoint.
242 class ClassLinker::VisiblyInitializedCallback final
243     : public Closure, public IntrusiveForwardListNode<VisiblyInitializedCallback> {
244  public:
VisiblyInitializedCallback(ClassLinker * class_linker)245   explicit VisiblyInitializedCallback(ClassLinker* class_linker)
246       : class_linker_(class_linker),
247         num_classes_(0u),
248         thread_visibility_counter_(0),
249         barriers_() {
250     std::fill_n(classes_, kMaxClasses, nullptr);
251   }
252 
IsEmpty() const253   bool IsEmpty() const {
254     DCHECK_LE(num_classes_, kMaxClasses);
255     return num_classes_ == 0u;
256   }
257 
IsFull() const258   bool IsFull() const {
259     DCHECK_LE(num_classes_, kMaxClasses);
260     return num_classes_ == kMaxClasses;
261   }
262 
AddClass(Thread * self,ObjPtr<mirror::Class> klass)263   void AddClass(Thread* self, ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
264     DCHECK_EQ(klass->GetStatus(), ClassStatus::kInitialized);
265     DCHECK(!IsFull());
266     classes_[num_classes_] = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, klass);
267     ++num_classes_;
268   }
269 
AddBarrier(Barrier * barrier)270   void AddBarrier(Barrier* barrier) {
271     barriers_.push_front(barrier);
272   }
273 
GetAndClearBarriers()274   std::forward_list<Barrier*> GetAndClearBarriers() {
275     std::forward_list<Barrier*> result;
276     result.swap(barriers_);
277     result.reverse();  // Return barriers in insertion order.
278     return result;
279   }
280 
MakeVisible(Thread * self)281   void MakeVisible(Thread* self) {
282     if (class_linker_->visibly_initialize_classes_with_membarier_) {
283       // If the associated register command succeeded, this command should never fail.
284       int membarrier_result = art::membarrier(MembarrierCommand::kPrivateExpedited);
285       CHECK_EQ(membarrier_result, 0) << strerror(errno);
286       MarkVisiblyInitialized(self);
287     } else {
288       DCHECK_EQ(thread_visibility_counter_.load(std::memory_order_relaxed), 0);
289       size_t count = Runtime::Current()->GetThreadList()->RunCheckpoint(this);
290       AdjustThreadVisibilityCounter(self, count);
291     }
292   }
293 
Run(Thread * self)294   void Run(Thread* self) override {
295     AdjustThreadVisibilityCounter(self, -1);
296   }
297 
298  private:
AdjustThreadVisibilityCounter(Thread * self,ssize_t adjustment)299   void AdjustThreadVisibilityCounter(Thread* self, ssize_t adjustment) {
300     ssize_t old = thread_visibility_counter_.fetch_add(adjustment, std::memory_order_relaxed);
301     if (old + adjustment == 0) {
302       // All threads passed the checkpoint. Mark classes as visibly initialized.
303       MarkVisiblyInitialized(self);
304     }
305   }
306 
MarkVisiblyInitialized(Thread * self)307   void MarkVisiblyInitialized(Thread* self) {
308     {
309       ScopedObjectAccess soa(self);
310       StackHandleScope<1u> hs(self);
311       MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
312       JavaVMExt* vm = self->GetJniEnv()->GetVm();
313       for (size_t i = 0, num = num_classes_; i != num; ++i) {
314         klass.Assign(ObjPtr<mirror::Class>::DownCast(self->DecodeJObject(classes_[i])));
315         vm->DeleteWeakGlobalRef(self, classes_[i]);
316         if (klass != nullptr) {
317           mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
318           class_linker_->FixupStaticTrampolines(self, klass.Get());
319         }
320       }
321       num_classes_ = 0u;
322     }
323     class_linker_->VisiblyInitializedCallbackDone(self, this);
324   }
325 
326   // Making classes initialized in bigger batches helps with app startup for apps
327   // that initialize a lot of classes by running fewer synchronization functions.
328   // (On the other hand, bigger batches make class initialization checks more
329   // likely to take a slow path but that is mitigated by making partially
330   // filled buffers visibly initialized if we take the slow path many times.
331   // See `Thread::kMakeVisiblyInitializedCounterTriggerCount`.)
332   static constexpr size_t kMaxClasses = 48;
333 
334   ClassLinker* const class_linker_;
335   size_t num_classes_;
336   jweak classes_[kMaxClasses];
337 
338   // The thread visibility counter starts at 0 and it is incremented by the number of
339   // threads that need to run this callback (by the thread that request the callback
340   // to be run) and decremented once for each `Run()` execution. When it reaches 0,
341   // whether after the increment or after a decrement, we know that `Run()` was executed
342   // for all threads and therefore we can mark the classes as visibly initialized.
343   // Used only if the preferred `membarrier()` command is unsupported.
344   std::atomic<ssize_t> thread_visibility_counter_;
345 
346   // List of barries to `Pass()` for threads that wait for the callback to complete.
347   std::forward_list<Barrier*> barriers_;
348 };
349 
MakeInitializedClassesVisiblyInitialized(Thread * self,bool wait)350 void ClassLinker::MakeInitializedClassesVisiblyInitialized(Thread* self, bool wait) {
351   if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
352     return;  // Nothing to do. Thanks to the x86 memory model classes skip the initialized status.
353   }
354   std::optional<Barrier> maybe_barrier;  // Avoid constructing the Barrier for `wait == false`.
355   if (wait) {
356     Locks::mutator_lock_->AssertNotHeld(self);
357     maybe_barrier.emplace(0);
358   }
359   int wait_count = 0;
360   VisiblyInitializedCallback* callback = nullptr;
361   {
362     MutexLock lock(self, visibly_initialized_callback_lock_);
363     if (visibly_initialized_callback_ != nullptr && !visibly_initialized_callback_->IsEmpty()) {
364       callback = visibly_initialized_callback_.release();
365       running_visibly_initialized_callbacks_.push_front(*callback);
366     }
367     if (wait) {
368       DCHECK(maybe_barrier.has_value());
369       Barrier* barrier = std::addressof(*maybe_barrier);
370       for (VisiblyInitializedCallback& cb : running_visibly_initialized_callbacks_) {
371         cb.AddBarrier(barrier);
372         ++wait_count;
373       }
374     }
375   }
376   if (callback != nullptr) {
377     callback->MakeVisible(self);
378   }
379   if (wait_count != 0) {
380     DCHECK(maybe_barrier.has_value());
381     maybe_barrier->Increment(self, wait_count);
382   }
383 }
384 
VisiblyInitializedCallbackDone(Thread * self,VisiblyInitializedCallback * callback)385 void ClassLinker::VisiblyInitializedCallbackDone(Thread* self,
386                                                  VisiblyInitializedCallback* callback) {
387   MutexLock lock(self, visibly_initialized_callback_lock_);
388   // Pass the barriers if requested.
389   for (Barrier* barrier : callback->GetAndClearBarriers()) {
390     barrier->Pass(self);
391   }
392   // Remove the callback from the list of running callbacks.
393   auto before = running_visibly_initialized_callbacks_.before_begin();
394   auto it = running_visibly_initialized_callbacks_.begin();
395   DCHECK(it != running_visibly_initialized_callbacks_.end());
396   while (std::addressof(*it) != callback) {
397     before = it;
398     ++it;
399     DCHECK(it != running_visibly_initialized_callbacks_.end());
400   }
401   running_visibly_initialized_callbacks_.erase_after(before);
402   // Reuse or destroy the callback object.
403   if (visibly_initialized_callback_ == nullptr) {
404     visibly_initialized_callback_.reset(callback);
405   } else {
406     delete callback;
407   }
408 }
409 
ForceClassInitialized(Thread * self,Handle<mirror::Class> klass)410 void ClassLinker::ForceClassInitialized(Thread* self, Handle<mirror::Class> klass) {
411   ClassLinker::VisiblyInitializedCallback* cb = MarkClassInitialized(self, klass);
412   if (cb != nullptr) {
413     cb->MakeVisible(self);
414   }
415   ScopedThreadSuspension sts(self, ThreadState::kSuspended);
416   MakeInitializedClassesVisiblyInitialized(self, /*wait=*/true);
417 }
418 
FindBootJniStub(ArtMethod * method)419 const void* ClassLinker::FindBootJniStub(ArtMethod* method) {
420   return FindBootJniStub(JniStubKey(method));
421 }
422 
FindBootJniStub(uint32_t flags,std::string_view shorty)423 const void* ClassLinker::FindBootJniStub(uint32_t flags, std::string_view shorty) {
424   return FindBootJniStub(JniStubKey(flags, shorty));
425 }
426 
FindBootJniStub(JniStubKey key)427 const void* ClassLinker::FindBootJniStub(JniStubKey key) {
428   auto it = boot_image_jni_stubs_.find(key);
429   if (it == boot_image_jni_stubs_.end()) {
430     return nullptr;
431   } else {
432     return it->second;
433   }
434 }
435 
MarkClassInitialized(Thread * self,Handle<mirror::Class> klass)436 ClassLinker::VisiblyInitializedCallback* ClassLinker::MarkClassInitialized(
437     Thread* self, Handle<mirror::Class> klass) {
438   if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
439     // Thanks to the x86 memory model, we do not need any memory fences and
440     // we can immediately mark the class as visibly initialized.
441     mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
442     FixupStaticTrampolines(self, klass.Get());
443     return nullptr;
444   }
445   if (Runtime::Current()->IsActiveTransaction()) {
446     // Transactions are single-threaded, so we can mark the class as visibly intialized.
447     // (Otherwise we'd need to track the callback's entry in the transaction for rollback.)
448     mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
449     FixupStaticTrampolines(self, klass.Get());
450     return nullptr;
451   }
452   mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
453   MutexLock lock(self, visibly_initialized_callback_lock_);
454   if (visibly_initialized_callback_ == nullptr) {
455     visibly_initialized_callback_.reset(new VisiblyInitializedCallback(this));
456   }
457   DCHECK(!visibly_initialized_callback_->IsFull());
458   visibly_initialized_callback_->AddClass(self, klass.Get());
459 
460   if (visibly_initialized_callback_->IsFull()) {
461     VisiblyInitializedCallback* callback = visibly_initialized_callback_.release();
462     running_visibly_initialized_callbacks_.push_front(*callback);
463     return callback;
464   } else {
465     return nullptr;
466   }
467 }
468 
RegisterNative(Thread * self,ArtMethod * method,const void * native_method)469 const void* ClassLinker::RegisterNative(
470     Thread* self, ArtMethod* method, const void* native_method) {
471   CHECK(method->IsNative()) << method->PrettyMethod();
472   CHECK(native_method != nullptr) << method->PrettyMethod();
473   void* new_native_method = nullptr;
474   Runtime* runtime = Runtime::Current();
475   runtime->GetRuntimeCallbacks()->RegisterNativeMethod(method,
476                                                        native_method,
477                                                        /*out*/&new_native_method);
478   if (method->IsCriticalNative()) {
479     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
480     // Remove old registered method if any.
481     auto it = critical_native_code_with_clinit_check_.find(method);
482     if (it != critical_native_code_with_clinit_check_.end()) {
483       critical_native_code_with_clinit_check_.erase(it);
484     }
485     // To ensure correct memory visibility, we need the class to be visibly
486     // initialized before we can set the JNI entrypoint.
487     if (method->GetDeclaringClass()->IsVisiblyInitialized()) {
488       method->SetEntryPointFromJni(new_native_method);
489     } else {
490       critical_native_code_with_clinit_check_.emplace(method, new_native_method);
491     }
492   } else {
493     method->SetEntryPointFromJni(new_native_method);
494   }
495   return new_native_method;
496 }
497 
UnregisterNative(Thread * self,ArtMethod * method)498 void ClassLinker::UnregisterNative(Thread* self, ArtMethod* method) {
499   CHECK(method->IsNative()) << method->PrettyMethod();
500   // Restore stub to lookup native pointer via dlsym.
501   if (method->IsCriticalNative()) {
502     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
503     auto it = critical_native_code_with_clinit_check_.find(method);
504     if (it != critical_native_code_with_clinit_check_.end()) {
505       critical_native_code_with_clinit_check_.erase(it);
506     }
507     method->SetEntryPointFromJni(GetJniDlsymLookupCriticalStub());
508   } else {
509     method->SetEntryPointFromJni(GetJniDlsymLookupStub());
510   }
511 }
512 
GetRegisteredNative(Thread * self,ArtMethod * method)513 const void* ClassLinker::GetRegisteredNative(Thread* self, ArtMethod* method) {
514   if (method->IsCriticalNative()) {
515     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
516     auto it = critical_native_code_with_clinit_check_.find(method);
517     if (it != critical_native_code_with_clinit_check_.end()) {
518       return it->second;
519     }
520     const void* native_code = method->GetEntryPointFromJni();
521     return IsJniDlsymLookupCriticalStub(native_code) ? nullptr : native_code;
522   } else {
523     const void* native_code = method->GetEntryPointFromJni();
524     return IsJniDlsymLookupStub(native_code) ? nullptr : native_code;
525   }
526 }
527 
ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,bool wrap_in_no_class_def,bool log)528 void ClassLinker::ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,
529                                            bool wrap_in_no_class_def,
530                                            bool log) {
531   // The class failed to initialize on a previous attempt, so we want to throw
532   // a NoClassDefFoundError (v2 2.17.5).  The exception to this rule is if we
533   // failed in verification, in which case v2 5.4.1 says we need to re-throw
534   // the previous error.
535   Runtime* const runtime = Runtime::Current();
536   if (!runtime->IsAotCompiler()) {  // Give info if this occurs at runtime.
537     std::string extra;
538     ObjPtr<mirror::Object> verify_error = GetErroneousStateError(c);
539     if (verify_error != nullptr) {
540       DCHECK(!verify_error->IsClass());
541       extra = verify_error->AsThrowable()->Dump();
542     }
543     if (log) {
544       LOG(INFO) << "Rejecting re-init on previously-failed class " << c->PrettyClass()
545                 << ": " << extra;
546     }
547   }
548 
549   CHECK(c->IsErroneous()) << c->PrettyClass() << " " << c->GetStatus();
550   Thread* self = Thread::Current();
551   if (runtime->IsAotCompiler()) {
552     // At compile time, accurate errors and NCDFE are disabled to speed compilation.
553     ObjPtr<mirror::Throwable> pre_allocated = runtime->GetPreAllocatedNoClassDefFoundError();
554     self->SetException(pre_allocated);
555   } else {
556     ObjPtr<mirror::Object> erroneous_state_error = GetErroneousStateError(c);
557     if (erroneous_state_error != nullptr) {
558       // Rethrow stored error.
559       HandleEarlierErroneousStateError(self, this, c);
560     }
561     // TODO This might be wrong if we hit an OOME while allocating the ClassExt. In that case we
562     // might have meant to go down the earlier if statement with the original error but it got
563     // swallowed by the OOM so we end up here.
564     if (erroneous_state_error == nullptr ||
565         (wrap_in_no_class_def && !IsVerifyError(erroneous_state_error))) {
566       // If there isn't a recorded earlier error, or this is a repeat throw from initialization,
567       // the top-level exception must be a NoClassDefFoundError. The potentially already pending
568       // exception will be a cause.
569       self->ThrowNewWrappedException("Ljava/lang/NoClassDefFoundError;",
570                                      c->PrettyDescriptor().c_str());
571     }
572   }
573 }
574 
VlogClassInitializationFailure(Handle<mirror::Class> klass)575 static void VlogClassInitializationFailure(Handle<mirror::Class> klass)
576     REQUIRES_SHARED(Locks::mutator_lock_) {
577   if (VLOG_IS_ON(class_linker)) {
578     std::string temp;
579     LOG(INFO) << "Failed to initialize class " << klass->GetDescriptor(&temp) << " from "
580               << klass->GetLocation() << "\n" << Thread::Current()->GetException()->Dump();
581   }
582 }
583 
WrapExceptionInInitializer(Handle<mirror::Class> klass)584 static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
585     REQUIRES_SHARED(Locks::mutator_lock_) {
586   Thread* self = Thread::Current();
587 
588   ObjPtr<mirror::Throwable> cause = self->GetException();
589   CHECK(cause != nullptr);
590 
591   // Boot classpath classes should not fail initialization. This is a consistency debug check.
592   // This cannot in general be guaranteed, but in all likelihood leads to breakage down the line.
593   if (klass->GetClassLoader() == nullptr && !Runtime::Current()->IsAotCompiler()) {
594     std::string tmp;
595     // We want to LOG(FATAL) on debug builds since this really shouldn't be happening but we need to
596     // make sure to only do it if we don't have AsyncExceptions being thrown around since those
597     // could have caused the error.
598     bool known_impossible = kIsDebugBuild && !Runtime::Current()->AreAsyncExceptionsThrown();
599     LOG(known_impossible ? FATAL : WARNING) << klass->GetDescriptor(&tmp)
600                                             << " failed initialization: "
601                                             << self->GetException()->Dump();
602   }
603 
604   // We only wrap non-Error exceptions; an Error can just be used as-is.
605   if (!cause->IsError()) {
606     self->ThrowNewWrappedException("Ljava/lang/ExceptionInInitializerError;", nullptr);
607   }
608   VlogClassInitializationFailure(klass);
609 }
610 
RegisterMemBarrierForClassInitialization()611 static bool RegisterMemBarrierForClassInitialization() {
612   if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
613     // Thanks to the x86 memory model, classes skip the initialized status, so there is no need
614     // to use `membarrier()` or other synchronization for marking classes visibly initialized.
615     return false;
616   }
617   int membarrier_result = art::membarrier(MembarrierCommand::kRegisterPrivateExpedited);
618   return membarrier_result == 0;
619 }
620 
ClassLinker(InternTable * intern_table,bool fast_class_not_found_exceptions)621 ClassLinker::ClassLinker(InternTable* intern_table, bool fast_class_not_found_exceptions)
622     : boot_class_table_(new ClassTable()),
623       failed_dex_cache_class_lookups_(0),
624       class_roots_(nullptr),
625       find_array_class_cache_next_victim_(0),
626       init_done_(false),
627       log_new_roots_(false),
628       intern_table_(intern_table),
629       fast_class_not_found_exceptions_(fast_class_not_found_exceptions),
630       jni_dlsym_lookup_trampoline_(nullptr),
631       jni_dlsym_lookup_critical_trampoline_(nullptr),
632       quick_resolution_trampoline_(nullptr),
633       quick_imt_conflict_trampoline_(nullptr),
634       quick_generic_jni_trampoline_(nullptr),
635       quick_to_interpreter_bridge_trampoline_(nullptr),
636       nterp_trampoline_(nullptr),
637       image_pointer_size_(kRuntimePointerSize),
638       visibly_initialized_callback_lock_("visibly initialized callback lock"),
639       visibly_initialized_callback_(nullptr),
640       running_visibly_initialized_callbacks_(),
641       visibly_initialize_classes_with_membarier_(RegisterMemBarrierForClassInitialization()),
642       critical_native_code_with_clinit_check_lock_("critical native code with clinit check lock"),
643       critical_native_code_with_clinit_check_(),
644       boot_image_jni_stubs_(JniStubKeyHash(Runtime::Current()->GetInstructionSet()),
645                             JniStubKeyEquals(Runtime::Current()->GetInstructionSet())),
646       cha_(Runtime::Current()->IsAotCompiler() ? nullptr : new ClassHierarchyAnalysis()) {
647   // For CHA disabled during Aot, see b/34193647.
648 
649   CHECK(intern_table_ != nullptr);
650   static_assert(kFindArrayCacheSize == arraysize(find_array_class_cache_),
651                 "Array cache size wrong.");
652   for (size_t i = 0; i < kFindArrayCacheSize; i++) {
653     find_array_class_cache_[i].store(GcRoot<mirror::Class>(nullptr), std::memory_order_relaxed);
654   }
655 }
656 
CheckSystemClass(Thread * self,Handle<mirror::Class> c1,const char * descriptor)657 void ClassLinker::CheckSystemClass(Thread* self, Handle<mirror::Class> c1, const char* descriptor) {
658   ObjPtr<mirror::Class> c2 = FindSystemClass(self, descriptor);
659   if (c2 == nullptr) {
660     LOG(FATAL) << "Could not find class " << descriptor;
661     UNREACHABLE();
662   }
663   if (c1.Get() != c2) {
664     std::ostringstream os1, os2;
665     c1->DumpClass(os1, mirror::Class::kDumpClassFullDetail);
666     c2->DumpClass(os2, mirror::Class::kDumpClassFullDetail);
667     LOG(FATAL) << "InitWithoutImage: Class mismatch for " << descriptor
668                << ". This is most likely the result of a broken build. Make sure that "
669                << "libcore and art projects match.\n\n"
670                << os1.str() << "\n\n" << os2.str();
671     UNREACHABLE();
672   }
673 }
674 
AllocIfTable(Thread * self,size_t ifcount,ObjPtr<mirror::Class> iftable_class)675 ObjPtr<mirror::IfTable> AllocIfTable(Thread* self,
676                                      size_t ifcount,
677                                      ObjPtr<mirror::Class> iftable_class)
678     REQUIRES_SHARED(Locks::mutator_lock_) {
679   DCHECK(iftable_class->IsArrayClass());
680   DCHECK(iftable_class->GetComponentType()->IsObjectClass());
681   return ObjPtr<mirror::IfTable>::DownCast(ObjPtr<mirror::ObjectArray<mirror::Object>>(
682       mirror::IfTable::Alloc(self, iftable_class, ifcount * mirror::IfTable::kMax)));
683 }
684 
InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,std::string * error_msg)685 bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,
686                                    std::string* error_msg) {
687   VLOG(startup) << "ClassLinker::Init";
688 
689   Thread* const self = Thread::Current();
690   Runtime* const runtime = Runtime::Current();
691   gc::Heap* const heap = runtime->GetHeap();
692 
693   CHECK(!heap->HasBootImageSpace()) << "Runtime has image. We should use it.";
694   CHECK(!init_done_);
695 
696   // Use the pointer size from the runtime since we are probably creating the image.
697   image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet());
698 
699   // java_lang_Class comes first, it's needed for AllocClass
700   // The GC can't handle an object with a null class since we can't get the size of this object.
701   heap->IncrementDisableMovingGC(self);
702   StackHandleScope<64> hs(self);  // 64 is picked arbitrarily.
703   auto class_class_size = mirror::Class::ClassClassSize(image_pointer_size_);
704   // Allocate the object as non-movable so that there are no cases where Object::IsClass returns
705   // the incorrect result when comparing to-space vs from-space.
706   Handle<mirror::Class> java_lang_Class(hs.NewHandle(ObjPtr<mirror::Class>::DownCast(
707       heap->AllocNonMovableObject(self, nullptr, class_class_size, VoidFunctor()))));
708   CHECK(java_lang_Class != nullptr);
709   java_lang_Class->SetClassFlags(mirror::kClassFlagClass);
710   java_lang_Class->SetClass(java_lang_Class.Get());
711   if (kUseBakerReadBarrier) {
712     java_lang_Class->AssertReadBarrierState();
713   }
714   java_lang_Class->SetClassSize(class_class_size);
715   java_lang_Class->SetPrimitiveType(Primitive::kPrimNot);
716   heap->DecrementDisableMovingGC(self);
717   // AllocClass(ObjPtr<mirror::Class>) can now be used
718 
719   // Class[] is used for reflection support.
720   auto class_array_class_size = mirror::ObjectArray<mirror::Class>::ClassSize(image_pointer_size_);
721   Handle<mirror::Class> class_array_class(hs.NewHandle(
722       AllocClass(self, java_lang_Class.Get(), class_array_class_size)));
723   class_array_class->SetComponentType(java_lang_Class.Get());
724 
725   // java_lang_Object comes next so that object_array_class can be created.
726   Handle<mirror::Class> java_lang_Object(hs.NewHandle(
727       AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize(image_pointer_size_))));
728   CHECK(java_lang_Object != nullptr);
729   // backfill Object as the super class of Class.
730   java_lang_Class->SetSuperClass(java_lang_Object.Get());
731   mirror::Class::SetStatus(java_lang_Object, ClassStatus::kLoaded, self);
732 
733   java_lang_Object->SetObjectSize(sizeof(mirror::Object));
734   // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
735   // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
736   runtime->SetSentinel(heap->AllocNonMovableObject(self,
737                                                    java_lang_Object.Get(),
738                                                    java_lang_Object->GetObjectSize(),
739                                                    VoidFunctor()));
740 
741   // Initialize the SubtypeCheck bitstring for java.lang.Object and java.lang.Class.
742   if (kBitstringSubtypeCheckEnabled) {
743     // It might seem the lock here is unnecessary, however all the SubtypeCheck
744     // functions are annotated to require locks all the way down.
745     //
746     // We take the lock here to avoid using NO_THREAD_SAFETY_ANALYSIS.
747     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
748     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Object.Get());
749     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Class.Get());
750   }
751 
752   // Object[] next to hold class roots.
753   Handle<mirror::Class> object_array_class(hs.NewHandle(
754       AllocClass(self, java_lang_Class.Get(),
755                  mirror::ObjectArray<mirror::Object>::ClassSize(image_pointer_size_))));
756   object_array_class->SetComponentType(java_lang_Object.Get());
757 
758   // Setup java.lang.String.
759   //
760   // We make this class non-movable for the unlikely case where it were to be
761   // moved by a sticky-bit (minor) collection when using the Generational
762   // Concurrent Copying (CC) collector, potentially creating a stale reference
763   // in the `klass_` field of one of its instances allocated in the Large-Object
764   // Space (LOS) -- see the comment about the dirty card scanning logic in
765   // art::gc::collector::ConcurrentCopying::MarkingPhase.
766   Handle<mirror::Class> java_lang_String(hs.NewHandle(
767       AllocClass</* kMovable= */ false>(
768           self, java_lang_Class.Get(), mirror::String::ClassSize(image_pointer_size_))));
769   java_lang_String->SetStringClass();
770   mirror::Class::SetStatus(java_lang_String, ClassStatus::kResolved, self);
771 
772   // Setup java.lang.ref.Reference.
773   Handle<mirror::Class> java_lang_ref_Reference(hs.NewHandle(
774       AllocClass(self, java_lang_Class.Get(), mirror::Reference::ClassSize(image_pointer_size_))));
775   java_lang_ref_Reference->SetObjectSize(mirror::Reference::InstanceSize());
776   mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kResolved, self);
777 
778   // Create storage for root classes, save away our work so far (requires descriptors).
779   class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
780       mirror::ObjectArray<mirror::Class>::Alloc(self,
781                                                 object_array_class.Get(),
782                                                 static_cast<int32_t>(ClassRoot::kMax)));
783   CHECK(!class_roots_.IsNull());
784   SetClassRoot(ClassRoot::kJavaLangClass, java_lang_Class.Get());
785   SetClassRoot(ClassRoot::kJavaLangObject, java_lang_Object.Get());
786   SetClassRoot(ClassRoot::kClassArrayClass, class_array_class.Get());
787   SetClassRoot(ClassRoot::kObjectArrayClass, object_array_class.Get());
788   SetClassRoot(ClassRoot::kJavaLangString, java_lang_String.Get());
789   SetClassRoot(ClassRoot::kJavaLangRefReference, java_lang_ref_Reference.Get());
790 
791   // Fill in the empty iftable. Needs to be done after the kObjectArrayClass root is set.
792   java_lang_Object->SetIfTable(AllocIfTable(self, 0, object_array_class.Get()));
793 
794   // Create array interface entries to populate once we can load system classes.
795   object_array_class->SetIfTable(AllocIfTable(self, 2, object_array_class.Get()));
796   DCHECK_EQ(GetArrayIfTable(), object_array_class->GetIfTable());
797 
798   // Setup the primitive type classes.
799   CreatePrimitiveClass(self, Primitive::kPrimBoolean, ClassRoot::kPrimitiveBoolean);
800   CreatePrimitiveClass(self, Primitive::kPrimByte, ClassRoot::kPrimitiveByte);
801   CreatePrimitiveClass(self, Primitive::kPrimChar, ClassRoot::kPrimitiveChar);
802   CreatePrimitiveClass(self, Primitive::kPrimShort, ClassRoot::kPrimitiveShort);
803   CreatePrimitiveClass(self, Primitive::kPrimInt, ClassRoot::kPrimitiveInt);
804   CreatePrimitiveClass(self, Primitive::kPrimLong, ClassRoot::kPrimitiveLong);
805   CreatePrimitiveClass(self, Primitive::kPrimFloat, ClassRoot::kPrimitiveFloat);
806   CreatePrimitiveClass(self, Primitive::kPrimDouble, ClassRoot::kPrimitiveDouble);
807   CreatePrimitiveClass(self, Primitive::kPrimVoid, ClassRoot::kPrimitiveVoid);
808 
809   // Allocate the primitive array classes. We need only the native pointer
810   // array at this point (int[] or long[], depending on architecture) but
811   // we shall perform the same setup steps for all primitive array classes.
812   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveBoolean, ClassRoot::kBooleanArrayClass);
813   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveByte, ClassRoot::kByteArrayClass);
814   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveChar, ClassRoot::kCharArrayClass);
815   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveShort, ClassRoot::kShortArrayClass);
816   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveInt, ClassRoot::kIntArrayClass);
817   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveLong, ClassRoot::kLongArrayClass);
818   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveFloat, ClassRoot::kFloatArrayClass);
819   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveDouble, ClassRoot::kDoubleArrayClass);
820 
821   // now that these are registered, we can use AllocClass() and AllocObjectArray
822 
823   // Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache.
824   Handle<mirror::Class> java_lang_DexCache(hs.NewHandle(
825       AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize(image_pointer_size_))));
826   SetClassRoot(ClassRoot::kJavaLangDexCache, java_lang_DexCache.Get());
827   java_lang_DexCache->SetDexCacheClass();
828   java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize());
829   mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kResolved, self);
830 
831 
832   // Setup dalvik.system.ClassExt
833   Handle<mirror::Class> dalvik_system_ClassExt(hs.NewHandle(
834       AllocClass(self, java_lang_Class.Get(), mirror::ClassExt::ClassSize(image_pointer_size_))));
835   SetClassRoot(ClassRoot::kDalvikSystemClassExt, dalvik_system_ClassExt.Get());
836   mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kResolved, self);
837 
838   // Set up array classes for string, field, method
839   Handle<mirror::Class> object_array_string(hs.NewHandle(
840       AllocClass(self, java_lang_Class.Get(),
841                  mirror::ObjectArray<mirror::String>::ClassSize(image_pointer_size_))));
842   object_array_string->SetComponentType(java_lang_String.Get());
843   SetClassRoot(ClassRoot::kJavaLangStringArrayClass, object_array_string.Get());
844 
845   LinearAlloc* linear_alloc = runtime->GetLinearAlloc();
846   // Create runtime resolution and imt conflict methods.
847   runtime->SetResolutionMethod(runtime->CreateResolutionMethod());
848   runtime->SetImtConflictMethod(runtime->CreateImtConflictMethod(linear_alloc));
849   runtime->SetImtUnimplementedMethod(runtime->CreateImtConflictMethod(linear_alloc));
850 
851   // Setup boot_class_path_ and register class_path now that we can use AllocObjectArray to create
852   // DexCache instances. Needs to be after String, Field, Method arrays since AllocDexCache uses
853   // these roots.
854   if (boot_class_path.empty()) {
855     *error_msg = "Boot classpath is empty.";
856     return false;
857   }
858   for (auto& dex_file : boot_class_path) {
859     if (dex_file == nullptr) {
860       *error_msg = "Null dex file.";
861       return false;
862     }
863     AppendToBootClassPath(self, dex_file.get());
864     boot_dex_files_.push_back(std::move(dex_file));
865   }
866 
867   // now we can use FindSystemClass
868 
869   // Set up GenericJNI entrypoint. That is mainly a hack for common_compiler_test.h so that
870   // we do not need friend classes or a publicly exposed setter.
871   quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
872   if (!runtime->IsAotCompiler()) {
873     // We need to set up the generic trampolines since we don't have an image.
874     jni_dlsym_lookup_trampoline_ = GetJniDlsymLookupStub();
875     jni_dlsym_lookup_critical_trampoline_ = GetJniDlsymLookupCriticalStub();
876     quick_resolution_trampoline_ = GetQuickResolutionStub();
877     quick_imt_conflict_trampoline_ = GetQuickImtConflictStub();
878     quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
879     quick_to_interpreter_bridge_trampoline_ = GetQuickToInterpreterBridge();
880     nterp_trampoline_ = interpreter::GetNterpEntryPoint();
881   }
882 
883   // Object, String, ClassExt and DexCache need to be rerun through FindSystemClass to finish init
884   // We also need to immediately clear the finalizable flag for Object so that other classes are
885   // not erroneously marked as finalizable. (Object defines an empty finalizer, so that other
886   // classes can override it but it is not itself finalizable.)
887   mirror::Class::SetStatus(java_lang_Object, ClassStatus::kNotReady, self);
888   CheckSystemClass(self, java_lang_Object, "Ljava/lang/Object;");
889   CHECK(java_lang_Object->IsFinalizable());
890   java_lang_Object->ClearFinalizable();
891   CHECK_EQ(java_lang_Object->GetObjectSize(), mirror::Object::InstanceSize());
892   mirror::Class::SetStatus(java_lang_String, ClassStatus::kNotReady, self);
893   CheckSystemClass(self, java_lang_String, "Ljava/lang/String;");
894   mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kNotReady, self);
895   CheckSystemClass(self, java_lang_DexCache, "Ljava/lang/DexCache;");
896   CHECK_EQ(java_lang_DexCache->GetObjectSize(), mirror::DexCache::InstanceSize());
897   mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kNotReady, self);
898   CheckSystemClass(self, dalvik_system_ClassExt, "Ldalvik/system/ClassExt;");
899   CHECK_EQ(dalvik_system_ClassExt->GetObjectSize(), mirror::ClassExt::InstanceSize());
900 
901   // Run Class through FindSystemClass. This initializes the dex_cache_ fields and register it
902   // in class_table_.
903   CheckSystemClass(self, java_lang_Class, "Ljava/lang/Class;");
904 
905   // Setup core array classes, i.e. Object[], String[] and Class[] and primitive
906   // arrays - can't be done until Object has a vtable and component classes are loaded.
907   FinishCoreArrayClassSetup(ClassRoot::kObjectArrayClass);
908   FinishCoreArrayClassSetup(ClassRoot::kClassArrayClass);
909   FinishCoreArrayClassSetup(ClassRoot::kJavaLangStringArrayClass);
910   FinishCoreArrayClassSetup(ClassRoot::kBooleanArrayClass);
911   FinishCoreArrayClassSetup(ClassRoot::kByteArrayClass);
912   FinishCoreArrayClassSetup(ClassRoot::kCharArrayClass);
913   FinishCoreArrayClassSetup(ClassRoot::kShortArrayClass);
914   FinishCoreArrayClassSetup(ClassRoot::kIntArrayClass);
915   FinishCoreArrayClassSetup(ClassRoot::kLongArrayClass);
916   FinishCoreArrayClassSetup(ClassRoot::kFloatArrayClass);
917   FinishCoreArrayClassSetup(ClassRoot::kDoubleArrayClass);
918 
919   // Setup the single, global copy of "iftable".
920   auto java_lang_Cloneable = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Cloneable;"));
921   CHECK(java_lang_Cloneable != nullptr);
922   auto java_io_Serializable = hs.NewHandle(FindSystemClass(self, "Ljava/io/Serializable;"));
923   CHECK(java_io_Serializable != nullptr);
924   // We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
925   // crawl up and explicitly list all of the supers as well.
926   object_array_class->GetIfTable()->SetInterface(0, java_lang_Cloneable.Get());
927   object_array_class->GetIfTable()->SetInterface(1, java_io_Serializable.Get());
928 
929   // Check Class[] and Object[]'s interfaces.
930   CHECK_EQ(java_lang_Cloneable.Get(), class_array_class->GetDirectInterface(0));
931   CHECK_EQ(java_io_Serializable.Get(), class_array_class->GetDirectInterface(1));
932   CHECK_EQ(java_lang_Cloneable.Get(), object_array_class->GetDirectInterface(0));
933   CHECK_EQ(java_io_Serializable.Get(), object_array_class->GetDirectInterface(1));
934 
935   CHECK_EQ(object_array_string.Get(),
936            FindSystemClass(self, GetClassRootDescriptor(ClassRoot::kJavaLangStringArrayClass)));
937 
938   // The Enum class declares a "final" finalize() method to prevent subclasses from introducing
939   // a finalizer but it is not itself consedered finalizable. Load the Enum class now and clear
940   // the finalizable flag to prevent subclasses from being marked as finalizable.
941   CHECK_EQ(LookupClass(self, "Ljava/lang/Enum;", /*class_loader=*/ nullptr), nullptr);
942   Handle<mirror::Class> java_lang_Enum = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Enum;"));
943   CHECK(java_lang_Enum->IsFinalizable());
944   java_lang_Enum->ClearFinalizable();
945 
946   // End of special init trickery, all subsequent classes may be loaded via FindSystemClass.
947 
948   // Create java.lang.reflect.Proxy root.
949   SetClassRoot(ClassRoot::kJavaLangReflectProxy,
950                FindSystemClass(self, "Ljava/lang/reflect/Proxy;"));
951 
952   // Create java.lang.reflect.Field.class root.
953   ObjPtr<mirror::Class> class_root = FindSystemClass(self, "Ljava/lang/reflect/Field;");
954   CHECK(class_root != nullptr);
955   SetClassRoot(ClassRoot::kJavaLangReflectField, class_root);
956 
957   // Create java.lang.reflect.Field array root.
958   class_root = FindSystemClass(self, "[Ljava/lang/reflect/Field;");
959   CHECK(class_root != nullptr);
960   SetClassRoot(ClassRoot::kJavaLangReflectFieldArrayClass, class_root);
961 
962   // Create java.lang.reflect.Constructor.class root and array root.
963   class_root = FindSystemClass(self, "Ljava/lang/reflect/Constructor;");
964   CHECK(class_root != nullptr);
965   SetClassRoot(ClassRoot::kJavaLangReflectConstructor, class_root);
966   class_root = FindSystemClass(self, "[Ljava/lang/reflect/Constructor;");
967   CHECK(class_root != nullptr);
968   SetClassRoot(ClassRoot::kJavaLangReflectConstructorArrayClass, class_root);
969 
970   // Create java.lang.reflect.Method.class root and array root.
971   class_root = FindSystemClass(self, "Ljava/lang/reflect/Method;");
972   CHECK(class_root != nullptr);
973   SetClassRoot(ClassRoot::kJavaLangReflectMethod, class_root);
974   class_root = FindSystemClass(self, "[Ljava/lang/reflect/Method;");
975   CHECK(class_root != nullptr);
976   SetClassRoot(ClassRoot::kJavaLangReflectMethodArrayClass, class_root);
977 
978   // Create java.lang.invoke.CallSite.class root
979   class_root = FindSystemClass(self, "Ljava/lang/invoke/CallSite;");
980   CHECK(class_root != nullptr);
981   SetClassRoot(ClassRoot::kJavaLangInvokeCallSite, class_root);
982 
983   // Create java.lang.invoke.MethodType.class root
984   class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodType;");
985   CHECK(class_root != nullptr);
986   SetClassRoot(ClassRoot::kJavaLangInvokeMethodType, class_root);
987 
988   // Create java.lang.invoke.MethodHandleImpl.class root
989   class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandleImpl;");
990   CHECK(class_root != nullptr);
991   SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandleImpl, class_root);
992   SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandle, class_root->GetSuperClass());
993 
994   // Create java.lang.invoke.MethodHandles.Lookup.class root
995   class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandles$Lookup;");
996   CHECK(class_root != nullptr);
997   SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandlesLookup, class_root);
998 
999   // Create java.lang.invoke.VarHandle.class root
1000   class_root = FindSystemClass(self, "Ljava/lang/invoke/VarHandle;");
1001   CHECK(class_root != nullptr);
1002   SetClassRoot(ClassRoot::kJavaLangInvokeVarHandle, class_root);
1003 
1004   // Create java.lang.invoke.FieldVarHandle.class root
1005   class_root = FindSystemClass(self, "Ljava/lang/invoke/FieldVarHandle;");
1006   CHECK(class_root != nullptr);
1007   SetClassRoot(ClassRoot::kJavaLangInvokeFieldVarHandle, class_root);
1008 
1009   // Create java.lang.invoke.StaticFieldVarHandle.class root
1010   class_root = FindSystemClass(self, "Ljava/lang/invoke/StaticFieldVarHandle;");
1011   CHECK(class_root != nullptr);
1012   SetClassRoot(ClassRoot::kJavaLangInvokeStaticFieldVarHandle, class_root);
1013 
1014   // Create java.lang.invoke.ArrayElementVarHandle.class root
1015   class_root = FindSystemClass(self, "Ljava/lang/invoke/ArrayElementVarHandle;");
1016   CHECK(class_root != nullptr);
1017   SetClassRoot(ClassRoot::kJavaLangInvokeArrayElementVarHandle, class_root);
1018 
1019   // Create java.lang.invoke.ByteArrayViewVarHandle.class root
1020   class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteArrayViewVarHandle;");
1021   CHECK(class_root != nullptr);
1022   SetClassRoot(ClassRoot::kJavaLangInvokeByteArrayViewVarHandle, class_root);
1023 
1024   // Create java.lang.invoke.ByteBufferViewVarHandle.class root
1025   class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteBufferViewVarHandle;");
1026   CHECK(class_root != nullptr);
1027   SetClassRoot(ClassRoot::kJavaLangInvokeByteBufferViewVarHandle, class_root);
1028 
1029   class_root = FindSystemClass(self, "Ldalvik/system/EmulatedStackFrame;");
1030   CHECK(class_root != nullptr);
1031   SetClassRoot(ClassRoot::kDalvikSystemEmulatedStackFrame, class_root);
1032 
1033   // java.lang.ref classes need to be specially flagged, but otherwise are normal classes
1034   // finish initializing Reference class
1035   mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kNotReady, self);
1036   CheckSystemClass(self, java_lang_ref_Reference, "Ljava/lang/ref/Reference;");
1037   CHECK_EQ(java_lang_ref_Reference->GetObjectSize(), mirror::Reference::InstanceSize());
1038   CHECK_EQ(java_lang_ref_Reference->GetClassSize(),
1039            mirror::Reference::ClassSize(image_pointer_size_));
1040   class_root = FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;");
1041   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1042   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagFinalizerReference);
1043   class_root = FindSystemClass(self, "Ljava/lang/ref/PhantomReference;");
1044   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1045   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagPhantomReference);
1046   class_root = FindSystemClass(self, "Ljava/lang/ref/SoftReference;");
1047   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1048   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagSoftReference);
1049   class_root = FindSystemClass(self, "Ljava/lang/ref/WeakReference;");
1050   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1051   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagWeakReference);
1052 
1053   // Setup the ClassLoader, verifying the object_size_.
1054   class_root = FindSystemClass(self, "Ljava/lang/ClassLoader;");
1055   class_root->SetClassLoaderClass();
1056   CHECK_EQ(class_root->GetObjectSize(), mirror::ClassLoader::InstanceSize());
1057   SetClassRoot(ClassRoot::kJavaLangClassLoader, class_root);
1058 
1059   // Set up java.lang.Throwable, java.lang.ClassNotFoundException, and
1060   // java.lang.StackTraceElement as a convenience.
1061   SetClassRoot(ClassRoot::kJavaLangThrowable, FindSystemClass(self, "Ljava/lang/Throwable;"));
1062   SetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
1063                FindSystemClass(self, "Ljava/lang/ClassNotFoundException;"));
1064   SetClassRoot(ClassRoot::kJavaLangStackTraceElement,
1065                FindSystemClass(self, "Ljava/lang/StackTraceElement;"));
1066   SetClassRoot(ClassRoot::kJavaLangStackTraceElementArrayClass,
1067                FindSystemClass(self, "[Ljava/lang/StackTraceElement;"));
1068   SetClassRoot(ClassRoot::kJavaLangClassLoaderArrayClass,
1069                FindSystemClass(self, "[Ljava/lang/ClassLoader;"));
1070 
1071   // Create conflict tables that depend on the class linker.
1072   runtime->FixupConflictTables();
1073 
1074   FinishInit(self);
1075 
1076   VLOG(startup) << "ClassLinker::InitFromCompiler exiting";
1077 
1078   return true;
1079 }
1080 
CreateStringInitBindings(Thread * self,ClassLinker * class_linker)1081 static void CreateStringInitBindings(Thread* self, ClassLinker* class_linker)
1082     REQUIRES_SHARED(Locks::mutator_lock_) {
1083   // Find String.<init> -> StringFactory bindings.
1084   ObjPtr<mirror::Class> string_factory_class =
1085       class_linker->FindSystemClass(self, "Ljava/lang/StringFactory;");
1086   CHECK(string_factory_class != nullptr);
1087   ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>(class_linker);
1088   WellKnownClasses::InitStringInit(string_class, string_factory_class);
1089   // Update the primordial thread.
1090   self->InitStringEntryPoints();
1091 }
1092 
FinishInit(Thread * self)1093 void ClassLinker::FinishInit(Thread* self) {
1094   VLOG(startup) << "ClassLinker::FinishInit entering";
1095 
1096   CreateStringInitBindings(self, this);
1097 
1098   // ensure all class_roots_ are initialized
1099   for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
1100     ClassRoot class_root = static_cast<ClassRoot>(i);
1101     ObjPtr<mirror::Class> klass = GetClassRoot(class_root);
1102     CHECK(klass != nullptr);
1103     DCHECK(klass->IsArrayClass() || klass->IsPrimitive() || klass->GetDexCache() != nullptr);
1104     // note SetClassRoot does additional validation.
1105     // if possible add new checks there to catch errors early
1106   }
1107 
1108   CHECK(GetArrayIfTable() != nullptr);
1109 
1110   // disable the slow paths in FindClass and CreatePrimitiveClass now
1111   // that Object, Class, and Object[] are setup
1112   init_done_ = true;
1113 
1114   // Under sanitization, the small carve-out to handle stack overflow might not be enough to
1115   // initialize the StackOverflowError class (as it might require running the verifier). Instead,
1116   // ensure that the class will be initialized.
1117   if (kMemoryToolIsAvailable && !Runtime::Current()->IsAotCompiler()) {
1118     ObjPtr<mirror::Class> soe_klass = FindSystemClass(self, "Ljava/lang/StackOverflowError;");
1119     StackHandleScope<1> hs(self);
1120     if (soe_klass == nullptr || !EnsureInitialized(self, hs.NewHandle(soe_klass), true, true)) {
1121       // Strange, but don't crash.
1122       LOG(WARNING) << "Could not prepare StackOverflowError.";
1123       self->ClearException();
1124     }
1125   }
1126 
1127   VLOG(startup) << "ClassLinker::FinishInit exiting";
1128 }
1129 
EnsureRootInitialized(ClassLinker * class_linker,Thread * self,ObjPtr<mirror::Class> klass)1130 static void EnsureRootInitialized(ClassLinker* class_linker,
1131                                   Thread* self,
1132                                   ObjPtr<mirror::Class> klass)
1133     REQUIRES_SHARED(Locks::mutator_lock_) {
1134   if (!klass->IsVisiblyInitialized()) {
1135     DCHECK(!klass->IsArrayClass());
1136     DCHECK(!klass->IsPrimitive());
1137     StackHandleScope<1> hs(self);
1138     Handle<mirror::Class> h_class(hs.NewHandle(klass));
1139     if (!class_linker->EnsureInitialized(
1140              self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true)) {
1141       LOG(FATAL) << "Exception when initializing " << h_class->PrettyClass()
1142           << ": " << self->GetException()->Dump();
1143     }
1144   }
1145 }
1146 
RunEarlyRootClinits(Thread * self)1147 void ClassLinker::RunEarlyRootClinits(Thread* self) {
1148   StackHandleScope<1u> hs(self);
1149   Handle<mirror::ObjectArray<mirror::Class>> class_roots = hs.NewHandle(GetClassRoots());
1150   EnsureRootInitialized(this, self, GetClassRoot<mirror::Class>(class_roots.Get()));
1151   EnsureRootInitialized(this, self, GetClassRoot<mirror::String>(class_roots.Get()));
1152   // `Field` class is needed for register_java_net_InetAddress in libcore, b/28153851.
1153   EnsureRootInitialized(this, self, GetClassRoot<mirror::Field>(class_roots.Get()));
1154 
1155   WellKnownClasses::Init(self->GetJniEnv());
1156 
1157   // `FinalizerReference` class is needed for initialization of `java.net.InetAddress`.
1158   // (Indirectly by constructing a `ObjectStreamField` which uses a `StringBuilder`
1159   // and, when resizing, initializes the `System` class for `System.arraycopy()`
1160   // and `System.<clinit> creates a finalizable object.)
1161   EnsureRootInitialized(
1162       this, self, WellKnownClasses::java_lang_ref_FinalizerReference_add->GetDeclaringClass());
1163 }
1164 
RunRootClinits(Thread * self)1165 void ClassLinker::RunRootClinits(Thread* self) {
1166   StackHandleScope<1u> hs(self);
1167   Handle<mirror::ObjectArray<mirror::Class>> class_roots = hs.NewHandle(GetClassRoots());
1168   for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); ++i) {
1169     EnsureRootInitialized(this, self, GetClassRoot(ClassRoot(i), class_roots.Get()));
1170   }
1171 
1172   // Make sure certain well-known classes are initialized. Note that well-known
1173   // classes are always in the boot image, so this code is primarily intended
1174   // for running without boot image but may be needed for boot image if the
1175   // AOT-initialization fails due to introduction of new code to `<clinit>`.
1176   ArtMethod* methods_of_classes_to_initialize[] = {
1177       // Initialize primitive boxing classes (avoid check at runtime).
1178       WellKnownClasses::java_lang_Boolean_valueOf,
1179       WellKnownClasses::java_lang_Byte_valueOf,
1180       WellKnownClasses::java_lang_Character_valueOf,
1181       WellKnownClasses::java_lang_Double_valueOf,
1182       WellKnownClasses::java_lang_Float_valueOf,
1183       WellKnownClasses::java_lang_Integer_valueOf,
1184       WellKnownClasses::java_lang_Long_valueOf,
1185       WellKnownClasses::java_lang_Short_valueOf,
1186       // Initialize `StackOverflowError`.
1187       WellKnownClasses::java_lang_StackOverflowError_init,
1188       // Ensure class loader classes are initialized (avoid check at runtime).
1189       // Superclass `ClassLoader` is a class root and already initialized above.
1190       // Superclass `BaseDexClassLoader` is initialized implicitly.
1191       WellKnownClasses::dalvik_system_DelegateLastClassLoader_init,
1192       WellKnownClasses::dalvik_system_DexClassLoader_init,
1193       WellKnownClasses::dalvik_system_InMemoryDexClassLoader_init,
1194       WellKnownClasses::dalvik_system_PathClassLoader_init,
1195       WellKnownClasses::java_lang_BootClassLoader_init,
1196       // Ensure `Daemons` class is initialized (avoid check at runtime).
1197       WellKnownClasses::java_lang_Daemons_start,
1198       // Ensure `Thread` and `ThreadGroup` classes are initialized (avoid check at runtime).
1199       WellKnownClasses::java_lang_Thread_init,
1200       WellKnownClasses::java_lang_ThreadGroup_add,
1201       // Ensure reference classes are initialized (avoid check at runtime).
1202       // The `FinalizerReference` class was initialized in `RunEarlyRootClinits()`.
1203       WellKnownClasses::java_lang_ref_ReferenceQueue_add,
1204       // Ensure `InvocationTargetException` class is initialized (avoid check at runtime).
1205       WellKnownClasses::java_lang_reflect_InvocationTargetException_init,
1206       // Ensure `Parameter` class is initialized (avoid check at runtime).
1207       WellKnownClasses::java_lang_reflect_Parameter_init,
1208       // Ensure `MethodHandles` and `MethodType` classes are initialized (avoid check at runtime).
1209       WellKnownClasses::java_lang_invoke_MethodHandles_lookup,
1210       WellKnownClasses::java_lang_invoke_MethodType_makeImpl,
1211       // Ensure `DirectByteBuffer` class is initialized (avoid check at runtime).
1212       WellKnownClasses::java_nio_DirectByteBuffer_init,
1213       // Ensure `FloatingDecimal` class is initialized (avoid check at runtime).
1214       WellKnownClasses::jdk_internal_math_FloatingDecimal_getBinaryToASCIIConverter_D,
1215       // Ensure reflection annotation classes are initialized (avoid check at runtime).
1216       WellKnownClasses::libcore_reflect_AnnotationFactory_createAnnotation,
1217       WellKnownClasses::libcore_reflect_AnnotationMember_init,
1218       // We're suppressing exceptions from `DdmServer` and we do not want to repeatedly
1219       // suppress class initialization error (say, due to OOM), so initialize it early.
1220       WellKnownClasses::org_apache_harmony_dalvik_ddmc_DdmServer_dispatch,
1221   };
1222   for (ArtMethod* method : methods_of_classes_to_initialize) {
1223     EnsureRootInitialized(this, self, method->GetDeclaringClass());
1224   }
1225   ArtField* fields_of_classes_to_initialize[] = {
1226       // Ensure classes used by class loaders are initialized (avoid check at runtime).
1227       WellKnownClasses::dalvik_system_DexFile_cookie,
1228       WellKnownClasses::dalvik_system_DexPathList_dexElements,
1229       WellKnownClasses::dalvik_system_DexPathList__Element_dexFile,
1230       // Ensure `VMRuntime` is initialized (avoid check at runtime).
1231       WellKnownClasses::dalvik_system_VMRuntime_nonSdkApiUsageConsumer,
1232       // Initialize empty arrays needed by `StackOverflowError`.
1233       WellKnownClasses::java_util_Collections_EMPTY_LIST,
1234       WellKnownClasses::libcore_util_EmptyArray_STACK_TRACE_ELEMENT,
1235       // Initialize boxing caches needed by the compiler.
1236       WellKnownClasses::java_lang_Byte_ByteCache_cache,
1237       WellKnownClasses::java_lang_Character_CharacterCache_cache,
1238       WellKnownClasses::java_lang_Integer_IntegerCache_cache,
1239       WellKnownClasses::java_lang_Long_LongCache_cache,
1240       WellKnownClasses::java_lang_Short_ShortCache_cache,
1241   };
1242   for (ArtField* field : fields_of_classes_to_initialize) {
1243     EnsureRootInitialized(this, self, field->GetDeclaringClass());
1244   }
1245 }
1246 
1247 ALWAYS_INLINE
ComputeMethodHash(ArtMethod * method)1248 static uint32_t ComputeMethodHash(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
1249   DCHECK(!method->IsRuntimeMethod());
1250   DCHECK(!method->IsProxyMethod());
1251   DCHECK(!method->IsObsolete());
1252   // Do not use `ArtMethod::GetNameView()` to avoid unnecessary runtime/proxy/obsolete method
1253   // checks. It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1254   const DexFile& dex_file = method->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1255   const dex::MethodId& method_id = dex_file.GetMethodId(method->GetDexMethodIndex());
1256   std::string_view name = dex_file.GetMethodNameView(method_id);
1257   return ComputeModifiedUtf8Hash(name);
1258 }
1259 
1260 ALWAYS_INLINE
MethodSignatureEquals(ArtMethod * lhs,ArtMethod * rhs)1261 static bool MethodSignatureEquals(ArtMethod* lhs, ArtMethod* rhs)
1262     REQUIRES_SHARED(Locks::mutator_lock_) {
1263   DCHECK(!lhs->IsRuntimeMethod());
1264   DCHECK(!lhs->IsProxyMethod());
1265   DCHECK(!lhs->IsObsolete());
1266   DCHECK(!rhs->IsRuntimeMethod());
1267   DCHECK(!rhs->IsProxyMethod());
1268   DCHECK(!rhs->IsObsolete());
1269   // Do not use `ArtMethod::GetDexFile()` to avoid unnecessary obsolete method checks.
1270   // It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1271   const DexFile& lhs_dex_file = lhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1272   const DexFile& rhs_dex_file = rhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1273   const dex::MethodId& lhs_mid = lhs_dex_file.GetMethodId(lhs->GetDexMethodIndex());
1274   const dex::MethodId& rhs_mid = rhs_dex_file.GetMethodId(rhs->GetDexMethodIndex());
1275   if (&lhs_dex_file == &rhs_dex_file) {
1276     return lhs_mid.name_idx_ == rhs_mid.name_idx_ &&
1277            lhs_mid.proto_idx_ == rhs_mid.proto_idx_;
1278   } else {
1279     return
1280         lhs_dex_file.GetMethodNameView(lhs_mid) == rhs_dex_file.GetMethodNameView(rhs_mid) &&
1281         lhs_dex_file.GetMethodSignature(lhs_mid) == rhs_dex_file.GetMethodSignature(rhs_mid);
1282   }
1283 }
1284 
InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,PointerSize pointer_size,ArrayRef<uint32_t> virtual_method_hashes)1285 static void InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,
1286                                                 PointerSize pointer_size,
1287                                                 /*out*/ ArrayRef<uint32_t> virtual_method_hashes)
1288     REQUIRES_SHARED(Locks::mutator_lock_) {
1289   ArraySlice<ArtMethod> virtual_methods = java_lang_Object->GetVirtualMethods(pointer_size);
1290   DCHECK_EQ(virtual_method_hashes.size(), virtual_methods.size());
1291   for (size_t i = 0; i != virtual_method_hashes.size(); ++i) {
1292     virtual_method_hashes[i] = ComputeMethodHash(&virtual_methods[i]);
1293   }
1294 }
1295 
1296 struct TrampolineCheckData {
1297   const void* quick_resolution_trampoline;
1298   const void* quick_imt_conflict_trampoline;
1299   const void* quick_generic_jni_trampoline;
1300   const void* quick_to_interpreter_bridge_trampoline;
1301   const void* nterp_trampoline;
1302   PointerSize pointer_size;
1303   ArtMethod* m;
1304   bool error;
1305 };
1306 
InitFromBootImage(std::string * error_msg)1307 bool ClassLinker::InitFromBootImage(std::string* error_msg) {
1308   VLOG(startup) << __FUNCTION__ << " entering";
1309   CHECK(!init_done_);
1310 
1311   Runtime* const runtime = Runtime::Current();
1312   Thread* const self = Thread::Current();
1313   gc::Heap* const heap = runtime->GetHeap();
1314   std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces();
1315   CHECK(!spaces.empty());
1316   const ImageHeader& image_header = spaces[0]->GetImageHeader();
1317   image_pointer_size_ = image_header.GetPointerSize();
1318   if (UNLIKELY(image_pointer_size_ != PointerSize::k32 &&
1319                image_pointer_size_ != PointerSize::k64)) {
1320     *error_msg =
1321         StringPrintf("Invalid image pointer size: %u", static_cast<uint32_t>(image_pointer_size_));
1322     return false;
1323   }
1324   if (!runtime->IsAotCompiler()) {
1325     // Only the Aot compiler supports having an image with a different pointer size than the
1326     // runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart
1327     // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
1328     if (image_pointer_size_ != kRuntimePointerSize) {
1329       *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
1330                                 static_cast<size_t>(image_pointer_size_),
1331                                 sizeof(void*));
1332       return false;
1333     }
1334   }
1335   DCHECK(!runtime->HasResolutionMethod());
1336   runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod));
1337   runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod));
1338   runtime->SetImtUnimplementedMethod(
1339       image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod));
1340   runtime->SetCalleeSaveMethod(
1341       image_header.GetImageMethod(ImageHeader::kSaveAllCalleeSavesMethod),
1342       CalleeSaveType::kSaveAllCalleeSaves);
1343   runtime->SetCalleeSaveMethod(
1344       image_header.GetImageMethod(ImageHeader::kSaveRefsOnlyMethod),
1345       CalleeSaveType::kSaveRefsOnly);
1346   runtime->SetCalleeSaveMethod(
1347       image_header.GetImageMethod(ImageHeader::kSaveRefsAndArgsMethod),
1348       CalleeSaveType::kSaveRefsAndArgs);
1349   runtime->SetCalleeSaveMethod(
1350       image_header.GetImageMethod(ImageHeader::kSaveEverythingMethod),
1351       CalleeSaveType::kSaveEverything);
1352   runtime->SetCalleeSaveMethod(
1353       image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForClinit),
1354       CalleeSaveType::kSaveEverythingForClinit);
1355   runtime->SetCalleeSaveMethod(
1356       image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForSuspendCheck),
1357       CalleeSaveType::kSaveEverythingForSuspendCheck);
1358 
1359   std::vector<const OatFile*> oat_files =
1360       runtime->GetOatFileManager().RegisterImageOatFiles(spaces);
1361   DCHECK(!oat_files.empty());
1362   const OatHeader& default_oat_header = oat_files[0]->GetOatHeader();
1363   jni_dlsym_lookup_trampoline_ = default_oat_header.GetJniDlsymLookupTrampoline();
1364   jni_dlsym_lookup_critical_trampoline_ = default_oat_header.GetJniDlsymLookupCriticalTrampoline();
1365   quick_resolution_trampoline_ = default_oat_header.GetQuickResolutionTrampoline();
1366   quick_imt_conflict_trampoline_ = default_oat_header.GetQuickImtConflictTrampoline();
1367   quick_generic_jni_trampoline_ = default_oat_header.GetQuickGenericJniTrampoline();
1368   quick_to_interpreter_bridge_trampoline_ = default_oat_header.GetQuickToInterpreterBridge();
1369   nterp_trampoline_ = default_oat_header.GetNterpTrampoline();
1370   if (kIsDebugBuild) {
1371     // Check that the other images use the same trampoline.
1372     for (size_t i = 1; i < oat_files.size(); ++i) {
1373       const OatHeader& ith_oat_header = oat_files[i]->GetOatHeader();
1374       const void* ith_jni_dlsym_lookup_trampoline_ =
1375           ith_oat_header.GetJniDlsymLookupTrampoline();
1376       const void* ith_jni_dlsym_lookup_critical_trampoline_ =
1377           ith_oat_header.GetJniDlsymLookupCriticalTrampoline();
1378       const void* ith_quick_resolution_trampoline =
1379           ith_oat_header.GetQuickResolutionTrampoline();
1380       const void* ith_quick_imt_conflict_trampoline =
1381           ith_oat_header.GetQuickImtConflictTrampoline();
1382       const void* ith_quick_generic_jni_trampoline =
1383           ith_oat_header.GetQuickGenericJniTrampoline();
1384       const void* ith_quick_to_interpreter_bridge_trampoline =
1385           ith_oat_header.GetQuickToInterpreterBridge();
1386       const void* ith_nterp_trampoline =
1387           ith_oat_header.GetNterpTrampoline();
1388       if (ith_jni_dlsym_lookup_trampoline_ != jni_dlsym_lookup_trampoline_ ||
1389           ith_jni_dlsym_lookup_critical_trampoline_ != jni_dlsym_lookup_critical_trampoline_ ||
1390           ith_quick_resolution_trampoline != quick_resolution_trampoline_ ||
1391           ith_quick_imt_conflict_trampoline != quick_imt_conflict_trampoline_ ||
1392           ith_quick_generic_jni_trampoline != quick_generic_jni_trampoline_ ||
1393           ith_quick_to_interpreter_bridge_trampoline != quick_to_interpreter_bridge_trampoline_ ||
1394           ith_nterp_trampoline != nterp_trampoline_) {
1395         // Make sure that all methods in this image do not contain those trampolines as
1396         // entrypoints. Otherwise the class-linker won't be able to work with a single set.
1397         TrampolineCheckData data;
1398         data.error = false;
1399         data.pointer_size = GetImagePointerSize();
1400         data.quick_resolution_trampoline = ith_quick_resolution_trampoline;
1401         data.quick_imt_conflict_trampoline = ith_quick_imt_conflict_trampoline;
1402         data.quick_generic_jni_trampoline = ith_quick_generic_jni_trampoline;
1403         data.quick_to_interpreter_bridge_trampoline = ith_quick_to_interpreter_bridge_trampoline;
1404         data.nterp_trampoline = ith_nterp_trampoline;
1405         ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
1406         auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1407           if (obj->IsClass()) {
1408             ObjPtr<mirror::Class> klass = obj->AsClass();
1409             for (ArtMethod& m : klass->GetMethods(data.pointer_size)) {
1410               const void* entrypoint =
1411                   m.GetEntryPointFromQuickCompiledCodePtrSize(data.pointer_size);
1412               if (entrypoint == data.quick_resolution_trampoline ||
1413                   entrypoint == data.quick_imt_conflict_trampoline ||
1414                   entrypoint == data.quick_generic_jni_trampoline ||
1415                   entrypoint == data.quick_to_interpreter_bridge_trampoline) {
1416                 data.m = &m;
1417                 data.error = true;
1418                 return;
1419               }
1420             }
1421           }
1422         };
1423         spaces[i]->GetLiveBitmap()->Walk(visitor);
1424         if (data.error) {
1425           ArtMethod* m = data.m;
1426           LOG(ERROR) << "Found a broken ArtMethod: " << ArtMethod::PrettyMethod(m);
1427           *error_msg = "Found an ArtMethod with a bad entrypoint";
1428           return false;
1429         }
1430       }
1431     }
1432   }
1433 
1434   class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
1435       ObjPtr<mirror::ObjectArray<mirror::Class>>::DownCast(
1436           image_header.GetImageRoot(ImageHeader::kClassRoots)));
1437   DCHECK_EQ(GetClassRoot<mirror::Class>(this)->GetClassFlags(), mirror::kClassFlagClass);
1438 
1439   DCHECK_EQ(GetClassRoot<mirror::Object>(this)->GetObjectSize(), sizeof(mirror::Object));
1440   ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
1441       ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
1442           image_header.GetImageRoot(ImageHeader::kBootImageLiveObjects));
1443   runtime->SetSentinel(boot_image_live_objects->Get(ImageHeader::kClearedJniWeakSentinel));
1444   DCHECK(runtime->GetSentinel().Read()->GetClass() == GetClassRoot<mirror::Object>(this));
1445 
1446   // Boot class loader, use a null handle.
1447   if (!AddImageSpaces(ArrayRef<gc::space::ImageSpace*>(spaces),
1448                       ScopedNullHandle<mirror::ClassLoader>(),
1449                       /*context=*/nullptr,
1450                       &boot_dex_files_,
1451                       error_msg)) {
1452     return false;
1453   }
1454   // We never use AOT code for debuggable.
1455   if (!runtime->IsJavaDebuggable()) {
1456     for (gc::space::ImageSpace* space : spaces) {
1457       const ImageHeader& header = space->GetImageHeader();
1458       header.VisitJniStubMethods([&](ArtMethod* method)
1459           REQUIRES_SHARED(Locks::mutator_lock_) {
1460         const void* stub = method->GetOatMethodQuickCode(image_pointer_size_);
1461         boot_image_jni_stubs_.Put(std::make_pair(JniStubKey(method), stub));
1462         return method;
1463       }, space->Begin(), image_pointer_size_);
1464     }
1465   }
1466 
1467   InitializeObjectVirtualMethodHashes(GetClassRoot<mirror::Object>(this),
1468                                       image_pointer_size_,
1469                                       ArrayRef<uint32_t>(object_virtual_method_hashes_));
1470   FinishInit(self);
1471 
1472   VLOG(startup) << __FUNCTION__ << " exiting";
1473   return true;
1474 }
1475 
AddExtraBootDexFiles(Thread * self,std::vector<std::unique_ptr<const DexFile>> && additional_dex_files)1476 void ClassLinker::AddExtraBootDexFiles(
1477     Thread* self,
1478     std::vector<std::unique_ptr<const DexFile>>&& additional_dex_files) {
1479   for (std::unique_ptr<const DexFile>& dex_file : additional_dex_files) {
1480     AppendToBootClassPath(self, dex_file.get());
1481     if (kIsDebugBuild) {
1482       for (const auto& boot_dex_file : boot_dex_files_) {
1483         DCHECK_NE(boot_dex_file->GetLocation(), dex_file->GetLocation());
1484       }
1485     }
1486     boot_dex_files_.push_back(std::move(dex_file));
1487   }
1488 }
1489 
IsBootClassLoader(ObjPtr<mirror::Object> class_loader)1490 bool ClassLinker::IsBootClassLoader(ObjPtr<mirror::Object> class_loader) {
1491   return class_loader == nullptr ||
1492          WellKnownClasses::java_lang_BootClassLoader == class_loader->GetClass();
1493 }
1494 
1495 class CHAOnDeleteUpdateClassVisitor {
1496  public:
CHAOnDeleteUpdateClassVisitor(LinearAlloc * alloc)1497   explicit CHAOnDeleteUpdateClassVisitor(LinearAlloc* alloc)
1498       : allocator_(alloc), cha_(Runtime::Current()->GetClassLinker()->GetClassHierarchyAnalysis()),
1499         pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()),
1500         self_(Thread::Current()) {}
1501 
operator ()(ObjPtr<mirror::Class> klass)1502   bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
1503     // This class is going to be unloaded. Tell CHA about it.
1504     cha_->ResetSingleImplementationInHierarchy(klass, allocator_, pointer_size_);
1505     return true;
1506   }
1507  private:
1508   const LinearAlloc* allocator_;
1509   const ClassHierarchyAnalysis* cha_;
1510   const PointerSize pointer_size_;
1511   const Thread* self_;
1512 };
1513 
1514 /*
1515  * A class used to ensure that all references to strings interned in an AppImage have been
1516  * properly recorded in the interned references list, and is only ever run in debug mode.
1517  */
1518 class CountInternedStringReferencesVisitor {
1519  public:
CountInternedStringReferencesVisitor(const gc::space::ImageSpace & space,const InternTable::UnorderedSet & image_interns)1520   CountInternedStringReferencesVisitor(const gc::space::ImageSpace& space,
1521                                        const InternTable::UnorderedSet& image_interns)
1522       : space_(space),
1523         image_interns_(image_interns),
1524         count_(0u) {}
1525 
TestObject(ObjPtr<mirror::Object> referred_obj) const1526   void TestObject(ObjPtr<mirror::Object> referred_obj) const
1527       REQUIRES_SHARED(Locks::mutator_lock_) {
1528     if (referred_obj != nullptr &&
1529         space_.HasAddress(referred_obj.Ptr()) &&
1530         referred_obj->IsString()) {
1531       ObjPtr<mirror::String> referred_str = referred_obj->AsString();
1532       uint32_t hash = static_cast<uint32_t>(referred_str->GetStoredHashCode());
1533       // All image strings have the hash code calculated, even if they are not interned.
1534       DCHECK_EQ(hash, static_cast<uint32_t>(referred_str->ComputeHashCode()));
1535       auto it = image_interns_.FindWithHash(GcRoot<mirror::String>(referred_str), hash);
1536       if (it != image_interns_.end() && it->Read() == referred_str) {
1537         ++count_;
1538       }
1539     }
1540   }
1541 
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const1542   void VisitRootIfNonNull(
1543       mirror::CompressedReference<mirror::Object>* root) const
1544       REQUIRES_SHARED(Locks::mutator_lock_) {
1545     if (!root->IsNull()) {
1546       VisitRoot(root);
1547     }
1548   }
1549 
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const1550   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1551       REQUIRES_SHARED(Locks::mutator_lock_) {
1552     TestObject(root->AsMirrorPtr());
1553   }
1554 
1555   // Visit Class Fields
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static) const1556   void operator()(ObjPtr<mirror::Object> obj,
1557                   MemberOffset offset,
1558                   [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) {
1559     // References within image or across images don't need a read barrier.
1560     ObjPtr<mirror::Object> referred_obj =
1561         obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1562     TestObject(referred_obj);
1563   }
1564 
operator ()(ObjPtr<mirror::Class> klass,ObjPtr<mirror::Reference> ref) const1565   void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
1566       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
1567     operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
1568   }
1569 
GetCount() const1570   size_t GetCount() const {
1571     return count_;
1572   }
1573 
1574  private:
1575   const gc::space::ImageSpace& space_;
1576   const InternTable::UnorderedSet& image_interns_;
1577   mutable size_t count_;  // Modified from the `const` callbacks.
1578 };
1579 
1580 /*
1581  * This function counts references to strings interned in the AppImage.
1582  * This is used in debug build to check against the number of the recorded references.
1583  */
CountInternedStringReferences(gc::space::ImageSpace & space,const InternTable::UnorderedSet & image_interns)1584 size_t CountInternedStringReferences(gc::space::ImageSpace& space,
1585                                      const InternTable::UnorderedSet& image_interns)
1586     REQUIRES_SHARED(Locks::mutator_lock_) {
1587   const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap();
1588   const ImageHeader& image_header = space.GetImageHeader();
1589   const uint8_t* target_base = space.GetMemMap()->Begin();
1590   const ImageSection& objects_section = image_header.GetObjectsSection();
1591 
1592   auto objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
1593   auto objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
1594 
1595   CountInternedStringReferencesVisitor visitor(space, image_interns);
1596   bitmap->VisitMarkedRange(objects_begin,
1597                            objects_end,
1598                            [&space, &visitor](mirror::Object* obj)
1599     REQUIRES_SHARED(Locks::mutator_lock_) {
1600     if (space.HasAddress(obj)) {
1601       if (obj->IsDexCache()) {
1602         obj->VisitReferences</* kVisitNativeRoots= */ true,
1603                              kVerifyNone,
1604                              kWithoutReadBarrier>(visitor, visitor);
1605       } else {
1606         // Don't visit native roots for non-dex-cache as they can't contain
1607         // native references to strings.  This is verified during compilation
1608         // by ImageWriter::VerifyNativeGCRootInvariants.
1609         obj->VisitReferences</* kVisitNativeRoots= */ false,
1610                              kVerifyNone,
1611                              kWithoutReadBarrier>(visitor, visitor);
1612       }
1613     }
1614   });
1615   return visitor.GetCount();
1616 }
1617 
1618 template <typename Visitor>
VisitInternedStringReferences(gc::space::ImageSpace * space,const Visitor & visitor)1619 static void VisitInternedStringReferences(
1620     gc::space::ImageSpace* space,
1621     const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1622   const uint8_t* target_base = space->Begin();
1623   const ImageSection& sro_section =
1624       space->GetImageHeader().GetImageStringReferenceOffsetsSection();
1625   const size_t num_string_offsets = sro_section.Size() / sizeof(AppImageReferenceOffsetInfo);
1626 
1627   VLOG(image)
1628       << "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = "
1629       << num_string_offsets;
1630 
1631   const auto* sro_base =
1632       reinterpret_cast<const AppImageReferenceOffsetInfo*>(target_base + sro_section.Offset());
1633 
1634   for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
1635     uint32_t base_offset = sro_base[offset_index].first;
1636 
1637     uint32_t raw_member_offset = sro_base[offset_index].second;
1638     DCHECK_ALIGNED(base_offset, 2);
1639 
1640     ObjPtr<mirror::Object> obj_ptr =
1641         reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
1642     if (obj_ptr->IsDexCache() && raw_member_offset >= sizeof(mirror::DexCache)) {
1643       // Special case for strings referenced from dex cache array: the offset is
1644       // actually decoded as an index into the dex cache string array.
1645       uint32_t index = raw_member_offset - sizeof(mirror::DexCache);
1646       mirror::GcRootArray<mirror::String>* array = obj_ptr->AsDexCache()->GetStringsArray();
1647       // The array could be concurrently set to null. See `StartupCompletedTask`.
1648       if (array != nullptr) {
1649         ObjPtr<mirror::String> referred_string = array->Get(index);
1650         DCHECK(referred_string != nullptr);
1651         ObjPtr<mirror::String> visited = visitor(referred_string);
1652         if (visited != referred_string) {
1653           array->Set(index, visited.Ptr());
1654         }
1655       }
1656     } else {
1657       DCHECK_ALIGNED(raw_member_offset, 2);
1658       MemberOffset member_offset(raw_member_offset);
1659       ObjPtr<mirror::String> referred_string =
1660           obj_ptr->GetFieldObject<mirror::String,
1661                                   kVerifyNone,
1662                                   kWithoutReadBarrier,
1663                                   /* kIsVolatile= */ false>(member_offset);
1664       DCHECK(referred_string != nullptr);
1665 
1666       ObjPtr<mirror::String> visited = visitor(referred_string);
1667       if (visited != referred_string) {
1668         obj_ptr->SetFieldObject</* kTransactionActive= */ false,
1669                                 /* kCheckTransaction= */ false,
1670                                 kVerifyNone,
1671                                 /* kIsVolatile= */ false>(member_offset, visited);
1672       }
1673     }
1674   }
1675 }
1676 
VerifyInternedStringReferences(gc::space::ImageSpace * space)1677 static void VerifyInternedStringReferences(gc::space::ImageSpace* space)
1678     REQUIRES_SHARED(Locks::mutator_lock_) {
1679   InternTable::UnorderedSet image_interns;
1680   const ImageSection& section = space->GetImageHeader().GetInternedStringsSection();
1681   if (section.Size() > 0) {
1682     size_t read_count;
1683     const uint8_t* data = space->Begin() + section.Offset();
1684     InternTable::UnorderedSet image_set(data, /*make_copy_of_data=*/ false, &read_count);
1685     image_set.swap(image_interns);
1686   }
1687   size_t num_recorded_refs = 0u;
1688   VisitInternedStringReferences(
1689       space,
1690       [&image_interns, &num_recorded_refs](ObjPtr<mirror::String> str)
1691           REQUIRES_SHARED(Locks::mutator_lock_) {
1692         auto it = image_interns.find(GcRoot<mirror::String>(str));
1693         CHECK(it != image_interns.end());
1694         CHECK(it->Read() == str);
1695         ++num_recorded_refs;
1696         return str;
1697       });
1698   size_t num_found_refs = CountInternedStringReferences(*space, image_interns);
1699   CHECK_EQ(num_recorded_refs, num_found_refs);
1700 }
1701 
PatchDexCacheLocations(Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,InternTable * intern_table,std::string * error_msg)1702 static bool PatchDexCacheLocations(Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,
1703                                    InternTable* intern_table,
1704                                    std::string* error_msg) REQUIRES_SHARED(Locks::mutator_lock_) {
1705   // Replace the location in the dex cache in the app image (the `--dex-location` passed to
1706   // dex2oat) with the actual location if needed.
1707   // The actual location is computed by the logic in `OatFileBase::Setup`.
1708   // This is needed when the location on device is unknown at compile-time, typically during
1709   // Cloud Compilation because the compilation is done on the server and the apk is later
1710   // installed on device into `/data/app/<random_string>`.
1711   // This is not needed during dexpreopt because the location on device is known to be a certain
1712   // location in /system, /product, etc.
1713   Thread* self = Thread::Current();
1714   StackHandleScope<1> hs(self);
1715   MutableHandle<mirror::DexCache> dex_cache = hs.NewHandle<mirror::DexCache>(nullptr);
1716   for (auto dex_cache_ptr : dex_caches.Iterate<mirror::DexCache>()) {
1717     dex_cache.Assign(dex_cache_ptr);
1718     std::string dex_file_location =
1719         dex_cache->GetLocation(/*allow_location_mismatch=*/true)->ToModifiedUtf8();
1720     const DexFile* dex_file = dex_cache->GetDexFile();
1721     if (dex_file_location != dex_file->GetLocation()) {
1722       ObjPtr<mirror::String> location = intern_table->InternWeak(dex_file->GetLocation().c_str());
1723       if (location == nullptr) {
1724         self->AssertPendingOOMException();
1725         *error_msg = "Failed to intern string for dex cache location";
1726         return false;
1727       }
1728       dex_cache->SetLocation(location);
1729     }
1730   }
1731   return true;
1732 }
1733 
1734 // new_class_set is the set of classes that were read from the class table section in the image.
1735 // If there was no class table section, it is null.
1736 // Note: using a class here to avoid having to make ClassLinker internals public.
1737 class AppImageLoadingHelper {
1738  public:
1739   static bool Update(
1740       ClassLinker* class_linker,
1741       gc::space::ImageSpace* space,
1742       Handle<mirror::ClassLoader> class_loader,
1743       Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,
1744       InternTable* intern_table,
1745       std::string* error_msg)
1746       REQUIRES(!Locks::dex_lock_)
1747       REQUIRES_SHARED(Locks::mutator_lock_);
1748 
1749   static void HandleAppImageStrings(gc::space::ImageSpace* space)
1750       REQUIRES_SHARED(Locks::mutator_lock_);
1751 };
1752 
Update(ClassLinker * class_linker,gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,InternTable * intern_table,std::string * error_msg)1753 bool AppImageLoadingHelper::Update(
1754     ClassLinker* class_linker,
1755     gc::space::ImageSpace* space,
1756     Handle<mirror::ClassLoader> class_loader,
1757     Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,
1758     InternTable* intern_table,
1759     std::string* error_msg)
1760     REQUIRES(!Locks::dex_lock_)
1761     REQUIRES_SHARED(Locks::mutator_lock_) {
1762   ScopedTrace app_image_timing("AppImage:Updating");
1763 
1764   if (kIsDebugBuild && ClassLinker::kAppImageMayContainStrings) {
1765     // In debug build, verify the string references before applying
1766     // the Runtime::LoadAppImageStartupCache() option.
1767     VerifyInternedStringReferences(space);
1768   }
1769   if (!PatchDexCacheLocations(dex_caches, intern_table, error_msg)) {
1770     return false;
1771   }
1772   DCHECK(class_loader.Get() != nullptr);
1773   Thread* const self = Thread::Current();
1774   Runtime* const runtime = Runtime::Current();
1775   gc::Heap* const heap = runtime->GetHeap();
1776   const ImageHeader& header = space->GetImageHeader();
1777   int32_t number_of_dex_cache_arrays_cleared = 0;
1778   {
1779     // Register dex caches with the class loader.
1780     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
1781     for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
1782       const DexFile* const dex_file = dex_cache->GetDexFile();
1783       {
1784         WriterMutexLock mu2(self, *Locks::dex_lock_);
1785         CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
1786         if (runtime->GetStartupCompleted()) {
1787           number_of_dex_cache_arrays_cleared++;
1788           // Free up dex cache arrays that we would only allocate at startup.
1789           // We do this here before registering and within the lock to be
1790           // consistent with `StartupCompletedTask`.
1791           dex_cache->UnlinkStartupCaches();
1792         }
1793         VLOG(image) << "App image registers dex file " << dex_file->GetLocation();
1794         class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
1795       }
1796     }
1797   }
1798   if (number_of_dex_cache_arrays_cleared == dex_caches->GetLength()) {
1799     // Free up dex cache arrays that we would only allocate at startup.
1800     // If `number_of_dex_cache_arrays_cleared` isn't the number of dex caches in
1801     // the image, then there is a race with the `StartupCompletedTask`, which
1802     // will release the space instead.
1803     space->ReleaseMetadata();
1804   }
1805 
1806   if (ClassLinker::kAppImageMayContainStrings) {
1807     HandleAppImageStrings(space);
1808   }
1809 
1810   if (kVerifyArtMethodDeclaringClasses) {
1811     ScopedTrace timing("AppImage:VerifyDeclaringClasses");
1812     ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_);
1813     gc::accounting::HeapBitmap* live_bitmap = heap->GetLiveBitmap();
1814     header.VisitPackedArtMethods([&](ArtMethod& method)
1815         REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
1816       ObjPtr<mirror::Class> klass = method.GetDeclaringClassUnchecked();
1817       if (klass != nullptr) {
1818         CHECK(live_bitmap->Test(klass.Ptr())) << "Image method has unmarked declaring class";
1819       }
1820     }, space->Begin(), kRuntimePointerSize);
1821   }
1822 
1823   return true;
1824 }
1825 
HandleAppImageStrings(gc::space::ImageSpace * space)1826 void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space) {
1827   // Iterate over the string reference offsets stored in the image and intern
1828   // the strings they point to.
1829   ScopedTrace timing("AppImage:InternString");
1830 
1831   Runtime* const runtime = Runtime::Current();
1832   InternTable* const intern_table = runtime->GetInternTable();
1833 
1834   // Add the intern table, removing any conflicts. For conflicts, store the new address in a map
1835   // for faster lookup.
1836   // TODO: Optimize with a bitmap or bloom filter
1837   SafeMap<mirror::String*, mirror::String*> intern_remap;
1838   auto func = [&](InternTable::UnorderedSet& interns)
1839       REQUIRES_SHARED(Locks::mutator_lock_)
1840       REQUIRES(Locks::intern_table_lock_) {
1841     const size_t non_boot_image_strings = intern_table->CountInterns(
1842         /*visit_boot_images=*/false,
1843         /*visit_non_boot_images=*/true);
1844     VLOG(image) << "AppImage:stringsInInternTableSize = " << interns.size();
1845     VLOG(image) << "AppImage:nonBootImageInternStrings = " << non_boot_image_strings;
1846     // Visit the smaller of the two sets to compute the intersection.
1847     if (interns.size() < non_boot_image_strings) {
1848       for (auto it = interns.begin(); it != interns.end(); ) {
1849         ObjPtr<mirror::String> string = it->Read();
1850         ObjPtr<mirror::String> existing = intern_table->LookupWeakLocked(string);
1851         if (existing == nullptr) {
1852           existing = intern_table->LookupStrongLocked(string);
1853         }
1854         if (existing != nullptr) {
1855           intern_remap.Put(string.Ptr(), existing.Ptr());
1856           it = interns.erase(it);
1857         } else {
1858           ++it;
1859         }
1860       }
1861     } else {
1862       intern_table->VisitInterns([&](const GcRoot<mirror::String>& root)
1863           REQUIRES_SHARED(Locks::mutator_lock_)
1864           REQUIRES(Locks::intern_table_lock_) {
1865         auto it = interns.find(root);
1866         if (it != interns.end()) {
1867           ObjPtr<mirror::String> existing = root.Read();
1868           intern_remap.Put(it->Read(), existing.Ptr());
1869           it = interns.erase(it);
1870         }
1871       }, /*visit_boot_images=*/false, /*visit_non_boot_images=*/true);
1872     }
1873     // Consistency check to ensure correctness.
1874     if (kIsDebugBuild) {
1875       for (GcRoot<mirror::String>& root : interns) {
1876         ObjPtr<mirror::String> string = root.Read();
1877         CHECK(intern_table->LookupWeakLocked(string) == nullptr) << string->ToModifiedUtf8();
1878         CHECK(intern_table->LookupStrongLocked(string) == nullptr) << string->ToModifiedUtf8();
1879       }
1880     }
1881   };
1882   intern_table->AddImageStringsToTable(space, func);
1883   if (!intern_remap.empty()) {
1884     VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
1885     VisitInternedStringReferences(
1886         space,
1887         [&intern_remap](ObjPtr<mirror::String> str) REQUIRES_SHARED(Locks::mutator_lock_) {
1888           auto it = intern_remap.find(str.Ptr());
1889           if (it != intern_remap.end()) {
1890             return ObjPtr<mirror::String>(it->second);
1891           }
1892           return str;
1893         });
1894   }
1895 }
1896 
OpenOatDexFile(const OatFile * oat_file,const char * location,std::string * error_msg)1897 static std::unique_ptr<const DexFile> OpenOatDexFile(const OatFile* oat_file,
1898                                                      const char* location,
1899                                                      std::string* error_msg)
1900     REQUIRES_SHARED(Locks::mutator_lock_) {
1901   DCHECK(error_msg != nullptr);
1902   std::unique_ptr<const DexFile> dex_file;
1903   const OatDexFile* oat_dex_file = oat_file->GetOatDexFile(location, error_msg);
1904   if (oat_dex_file == nullptr) {
1905     return std::unique_ptr<const DexFile>();
1906   }
1907   std::string inner_error_msg;
1908   dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
1909   if (dex_file == nullptr) {
1910     *error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'",
1911                               location,
1912                               oat_file->GetLocation().c_str(),
1913                               inner_error_msg.c_str());
1914     return std::unique_ptr<const DexFile>();
1915   }
1916 
1917   if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
1918     CHECK(dex_file->GetSha1() != oat_dex_file->GetSha1());
1919     *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
1920                               location,
1921                               dex_file->GetLocationChecksum(),
1922                               oat_dex_file->GetDexFileLocationChecksum());
1923     return std::unique_ptr<const DexFile>();
1924   }
1925   CHECK(dex_file->GetSha1() == oat_dex_file->GetSha1());
1926   return dex_file;
1927 }
1928 
OpenImageDexFiles(gc::space::ImageSpace * space,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1929 bool ClassLinker::OpenImageDexFiles(gc::space::ImageSpace* space,
1930                                     std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1931                                     std::string* error_msg) {
1932   ScopedAssertNoThreadSuspension nts(__FUNCTION__);
1933   const ImageHeader& header = space->GetImageHeader();
1934   ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1935   DCHECK(dex_caches_object != nullptr);
1936   ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
1937       dex_caches_object->AsObjectArray<mirror::DexCache>();
1938   const OatFile* oat_file = space->GetOatFile();
1939   for (auto dex_cache : dex_caches->Iterate()) {
1940     std::string dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8());
1941     std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1942                                                              dex_file_location.c_str(),
1943                                                              error_msg);
1944     if (dex_file == nullptr) {
1945       return false;
1946     }
1947     dex_cache->SetDexFile(dex_file.get());
1948     out_dex_files->push_back(std::move(dex_file));
1949   }
1950   return true;
1951 }
1952 
OpenAndInitImageDexFiles(const gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1953 bool ClassLinker::OpenAndInitImageDexFiles(
1954     const gc::space::ImageSpace* space,
1955     Handle<mirror::ClassLoader> class_loader,
1956     std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1957     std::string* error_msg) {
1958   DCHECK(out_dex_files != nullptr);
1959   const bool app_image = class_loader != nullptr;
1960   const ImageHeader& header = space->GetImageHeader();
1961   ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1962   DCHECK(dex_caches_object != nullptr);
1963   Thread* const self = Thread::Current();
1964   StackHandleScope<3> hs(self);
1965   Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
1966       hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
1967   const OatFile* oat_file = space->GetOatFile();
1968   if (oat_file->GetOatHeader().GetDexFileCount() !=
1969       static_cast<uint32_t>(dex_caches->GetLength())) {
1970     *error_msg =
1971         "Dex cache count and dex file count mismatch while trying to initialize from image";
1972     return false;
1973   }
1974 
1975   for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
1976     std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
1977     // At this point, the location in the dex cache (from `--dex-location` passed to dex2oat) is not
1978     // necessarily the actual dex location on device. `OpenOatDexFile` uses the table
1979     // `OatFile::oat_dex_files_` to find the dex file. For each dex file, the table contains two
1980     // keys corresponding to it, one from the oat header (from `--dex-location` passed to dex2oat)
1981     // and the other being the actual dex location on device, unless they are the same. The lookup
1982     // is based on the former key. Later, `PatchDexCacheLocations` will replace the location in the
1983     // dex cache with the actual dex location, which is the latter key in the table.
1984     std::unique_ptr<const DexFile> dex_file =
1985         OpenOatDexFile(oat_file, dex_file_location.c_str(), error_msg);
1986     if (dex_file == nullptr) {
1987       return false;
1988     }
1989 
1990     {
1991       // Native fields are all null.  Initialize them.
1992       WriterMutexLock mu(self, *Locks::dex_lock_);
1993       dex_cache->Initialize(dex_file.get(), class_loader.Get());
1994     }
1995     if (!app_image) {
1996       // Register dex files, keep track of existing ones that are conflicts.
1997       AppendToBootClassPath(dex_file.get(), dex_cache);
1998     }
1999     out_dex_files->push_back(std::move(dex_file));
2000   }
2001   return true;
2002 }
2003 
2004 // Helper class for ArtMethod checks when adding an image. Keeps all required functionality
2005 // together and caches some intermediate results.
2006 template <PointerSize kPointerSize>
2007 class ImageChecker final {
2008  public:
CheckObjects(gc::Heap * heap,gc::space::ImageSpace * space)2009   static void CheckObjects(gc::Heap* heap, gc::space::ImageSpace* space)
2010       REQUIRES_SHARED(Locks::mutator_lock_) {
2011     // There can be no GC during boot image initialization, so we do not need read barriers.
2012     ScopedDebugDisallowReadBarriers sddrb(Thread::Current());
2013 
2014     CHECK_EQ(kPointerSize, space->GetImageHeader().GetPointerSize());
2015     const ImageSection& objects_section = space->GetImageHeader().GetObjectsSection();
2016     uintptr_t space_begin = reinterpret_cast<uintptr_t>(space->Begin());
2017     uintptr_t objects_begin = space_begin + objects_section.Offset();
2018     uintptr_t objects_end = objects_begin + objects_section.Size();
2019     ImageChecker ic(heap);
2020     auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
2021       DCHECK(obj != nullptr);
2022       mirror::Class* obj_klass = obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
2023       CHECK(obj_klass != nullptr) << "Null class in object " << obj;
2024       mirror::Class* class_class = obj_klass->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
2025       CHECK(class_class != nullptr) << "Null class class " << obj;
2026       if (obj_klass == class_class) {
2027         auto klass = obj->AsClass();
2028         for (ArtField& field : klass->GetFields()) {
2029           CHECK_EQ(field.GetDeclaringClass<kWithoutReadBarrier>(), klass);
2030         }
2031         for (ArtMethod& m : klass->GetMethods(kPointerSize)) {
2032           ic.CheckArtMethod(&m, klass);
2033         }
2034         ObjPtr<mirror::PointerArray> vtable =
2035             klass->GetVTable<kDefaultVerifyFlags, kWithoutReadBarrier>();
2036         if (vtable != nullptr) {
2037           ic.CheckArtMethodPointerArray(vtable);
2038         }
2039         if (klass->ShouldHaveImt()) {
2040           ImTable* imt = klass->GetImt(kPointerSize);
2041           for (size_t i = 0; i < ImTable::kSize; ++i) {
2042             ic.CheckArtMethod(imt->Get(i, kPointerSize), /*expected_class=*/ nullptr);
2043           }
2044         }
2045         if (klass->ShouldHaveEmbeddedVTable()) {
2046           for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) {
2047             ic.CheckArtMethod(klass->GetEmbeddedVTableEntry(i, kPointerSize),
2048                               /*expected_class=*/ nullptr);
2049           }
2050         }
2051         ObjPtr<mirror::IfTable> iftable =
2052             klass->GetIfTable<kDefaultVerifyFlags, kWithoutReadBarrier>();
2053         int32_t iftable_count = (iftable != nullptr) ? iftable->Count() : 0;
2054         for (int32_t i = 0; i < iftable_count; ++i) {
2055           ObjPtr<mirror::PointerArray> method_array =
2056               iftable->GetMethodArrayOrNull<kDefaultVerifyFlags, kWithoutReadBarrier>(i);
2057           if (method_array != nullptr) {
2058             ic.CheckArtMethodPointerArray(method_array);
2059           }
2060         }
2061       }
2062     };
2063     space->GetLiveBitmap()->VisitMarkedRange(objects_begin, objects_end, visitor);
2064   }
2065 
2066  private:
ImageChecker(gc::Heap * heap)2067   explicit ImageChecker(gc::Heap* heap) {
2068     ArrayRef<gc::space::ImageSpace* const> spaces(heap->GetBootImageSpaces());
2069     space_begin_.reserve(spaces.size());
2070     for (gc::space::ImageSpace* space : spaces) {
2071       CHECK_EQ(static_cast<const void*>(space->Begin()), &space->GetImageHeader());
2072       space_begin_.push_back(space->Begin());
2073     }
2074   }
2075 
CheckArtMethod(ArtMethod * m,ObjPtr<mirror::Class> expected_class)2076   void CheckArtMethod(ArtMethod* m, ObjPtr<mirror::Class> expected_class)
2077       REQUIRES_SHARED(Locks::mutator_lock_) {
2078     ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked<kWithoutReadBarrier>();
2079     if (m->IsRuntimeMethod()) {
2080       CHECK(declaring_class == nullptr) << declaring_class << " " << m->PrettyMethod();
2081     } else if (m->IsCopied()) {
2082       CHECK(declaring_class != nullptr) << m->PrettyMethod();
2083     } else if (expected_class != nullptr) {
2084       CHECK_EQ(declaring_class, expected_class) << m->PrettyMethod();
2085     }
2086     bool contains = false;
2087     for (const uint8_t* begin : space_begin_) {
2088       const size_t offset = reinterpret_cast<uint8_t*>(m) - begin;
2089       const ImageHeader* header = reinterpret_cast<const ImageHeader*>(begin);
2090       if (header->GetMethodsSection().Contains(offset) ||
2091           header->GetRuntimeMethodsSection().Contains(offset)) {
2092         contains = true;
2093         break;
2094       }
2095     }
2096     CHECK(contains) << m << " not found";
2097   }
2098 
CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr)2099   void CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr)
2100       REQUIRES_SHARED(Locks::mutator_lock_) {
2101     CHECK(arr != nullptr);
2102     for (int32_t j = 0; j < arr->GetLength(); ++j) {
2103       auto* method = arr->GetElementPtrSize<ArtMethod*>(j, kPointerSize);
2104       CHECK(method != nullptr);
2105       CheckArtMethod(method, /*expected_class=*/ nullptr);
2106     }
2107   }
2108 
2109   std::vector<const uint8_t*> space_begin_;
2110 };
2111 
VerifyAppImage(const ImageHeader & header,const Handle<mirror::ClassLoader> & class_loader,ClassTable * class_table,gc::space::ImageSpace * space)2112 static void VerifyAppImage(const ImageHeader& header,
2113                            const Handle<mirror::ClassLoader>& class_loader,
2114                            ClassTable* class_table,
2115                            gc::space::ImageSpace* space)
2116     REQUIRES_SHARED(Locks::mutator_lock_) {
2117   header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2118     ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
2119     if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
2120       CHECK_EQ(class_table->LookupByDescriptor(klass), klass)
2121           << mirror::Class::PrettyClass(klass);
2122     }
2123   }, space->Begin(), kRuntimePointerSize);
2124   {
2125     // Verify that all direct interfaces of classes in the class table are also resolved.
2126     std::vector<ObjPtr<mirror::Class>> classes;
2127     auto verify_direct_interfaces_in_table = [&](ObjPtr<mirror::Class> klass)
2128         REQUIRES_SHARED(Locks::mutator_lock_) {
2129       if (!klass->IsPrimitive() && klass->GetClassLoader() == class_loader.Get()) {
2130         classes.push_back(klass);
2131       }
2132       return true;
2133     };
2134     class_table->Visit(verify_direct_interfaces_in_table);
2135     for (ObjPtr<mirror::Class> klass : classes) {
2136       for (uint32_t i = 0, num = klass->NumDirectInterfaces(); i != num; ++i) {
2137         CHECK(klass->GetDirectInterface(i) != nullptr)
2138             << klass->PrettyDescriptor() << " iface #" << i;
2139       }
2140     }
2141   }
2142 }
2143 
AddImageSpace(gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,ClassLoaderContext * context,const std::vector<std::unique_ptr<const DexFile>> & dex_files,std::string * error_msg)2144 bool ClassLinker::AddImageSpace(gc::space::ImageSpace* space,
2145                                 Handle<mirror::ClassLoader> class_loader,
2146                                 ClassLoaderContext* context,
2147                                 const std::vector<std::unique_ptr<const DexFile>>& dex_files,
2148                                 std::string* error_msg) {
2149   DCHECK(error_msg != nullptr);
2150   const uint64_t start_time = NanoTime();
2151   const bool app_image = class_loader != nullptr;
2152   const ImageHeader& header = space->GetImageHeader();
2153   ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
2154   DCHECK(dex_caches_object != nullptr);
2155   Runtime* const runtime = Runtime::Current();
2156   gc::Heap* const heap = runtime->GetHeap();
2157   Thread* const self = Thread::Current();
2158   // Check that the image is what we are expecting.
2159   if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) {
2160     *error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu",
2161                               static_cast<size_t>(space->GetImageHeader().GetPointerSize()),
2162                               static_cast<size_t>(image_pointer_size_));
2163     return false;
2164   }
2165   size_t expected_image_roots = ImageHeader::NumberOfImageRoots(app_image);
2166   if (static_cast<size_t>(header.GetImageRoots()->GetLength()) != expected_image_roots) {
2167     *error_msg = StringPrintf("Expected %zu image roots but got %d",
2168                               expected_image_roots,
2169                               header.GetImageRoots()->GetLength());
2170     return false;
2171   }
2172   StackHandleScope<3> hs(self);
2173   Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
2174       hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
2175   Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
2176       header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
2177   MutableHandle<mirror::Object> special_root(hs.NewHandle(
2178       app_image ? header.GetImageRoot(ImageHeader::kSpecialRoots) : nullptr));
2179   DCHECK(class_roots != nullptr);
2180   if (class_roots->GetLength() != static_cast<int32_t>(ClassRoot::kMax)) {
2181     *error_msg = StringPrintf("Expected %d class roots but got %d",
2182                               class_roots->GetLength(),
2183                               static_cast<int32_t>(ClassRoot::kMax));
2184     return false;
2185   }
2186   // Check against existing class roots to make sure they match the ones in the boot image.
2187   ObjPtr<mirror::ObjectArray<mirror::Class>> existing_class_roots = GetClassRoots();
2188   for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
2189     if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i), existing_class_roots)) {
2190       *error_msg = "App image class roots must have pointer equality with runtime ones.";
2191       return false;
2192     }
2193   }
2194   const OatFile* oat_file = space->GetOatFile();
2195 
2196   if (app_image) {
2197     ScopedAssertNoThreadSuspension sants("Checking app image");
2198     if (special_root == nullptr) {
2199       *error_msg = "Unexpected null special root in app image";
2200       return false;
2201     } else if (special_root->IsByteArray()) {
2202       OatHeader* oat_header = reinterpret_cast<OatHeader*>(special_root->AsByteArray()->GetData());
2203       if (!oat_header->IsValid()) {
2204         *error_msg = "Invalid oat header in special root";
2205         return false;
2206       }
2207       if (oat_file->GetVdexFile()->GetNumberOfDexFiles() != oat_header->GetDexFileCount()) {
2208         *error_msg = "Checksums count does not match";
2209         return false;
2210       }
2211       if (oat_header->IsConcurrentCopying() != gUseReadBarrier) {
2212         *error_msg = "GCs do not match";
2213         return false;
2214       }
2215 
2216       // Check if the dex checksums match the dex files that we just loaded.
2217       uint32_t* checksums = reinterpret_cast<uint32_t*>(
2218           reinterpret_cast<uint8_t*>(oat_header) + oat_header->GetHeaderSize());
2219       for (uint32_t i = 0; i  < oat_header->GetDexFileCount(); ++i) {
2220         uint32_t dex_checksum = dex_files.at(i)->GetHeader().checksum_;
2221         if (checksums[i] != dex_checksum) {
2222           *error_msg = StringPrintf(
2223               "Image and dex file checksums did not match for %s: image has %d, dex file has %d",
2224               dex_files.at(i)->GetLocation().c_str(),
2225               checksums[i],
2226               dex_checksum);
2227           return false;
2228         }
2229       }
2230 
2231       // Validate the class loader context.
2232       const char* stored_context = oat_header->GetStoreValueByKey(OatHeader::kClassPathKey);
2233       if (stored_context == nullptr) {
2234         *error_msg = "Missing class loader context in special root";
2235         return false;
2236       }
2237       if (context->VerifyClassLoaderContextMatch(stored_context) ==
2238               ClassLoaderContext::VerificationResult::kMismatch) {
2239         *error_msg = StringPrintf("Class loader contexts don't match: %s", stored_context);
2240         return false;
2241       }
2242 
2243       const char* oat_apex_versions =
2244           oat_header->GetStoreValueByKeyUnsafe(OatHeader::kApexVersionsKey);
2245       if (oat_apex_versions == nullptr) {
2246         *error_msg = StringPrintf("Missing apex versions in special root in runtime image '%s'",
2247                                   space->GetImageLocation().c_str());
2248         return false;
2249       }
2250 
2251       // Validate the apex versions.
2252       if (!gc::space::ImageSpace::ValidateApexVersions(oat_apex_versions,
2253                                                        runtime->GetApexVersions(),
2254                                                        space->GetImageLocation(),
2255                                                        error_msg)) {
2256         return false;
2257       }
2258 
2259       // Validate the boot classpath.
2260       const char* bcp = oat_header->GetStoreValueByKey(OatHeader::kBootClassPathKey);
2261       if (bcp == nullptr) {
2262         *error_msg = "Missing boot classpath in special root";
2263         return false;
2264       }
2265       std::string runtime_bcp = android::base::Join(runtime->GetBootClassPathLocations(), ':');
2266       if (strcmp(bcp, runtime_bcp.c_str()) != 0) {
2267         *error_msg = StringPrintf("Mismatch boot classpath: image has %s, runtime has %s",
2268                                   bcp,
2269                                   runtime_bcp.c_str());
2270         return false;
2271       }
2272 
2273       // Validate the dex checksums of the boot classpath.
2274       const char* bcp_checksums =
2275           oat_header->GetStoreValueByKey(OatHeader::kBootClassPathChecksumsKey);
2276       if (bcp_checksums == nullptr) {
2277         *error_msg = "Missing boot classpath checksums in special root";
2278         return false;
2279       }
2280       if (strcmp(bcp_checksums, runtime->GetBootClassPathChecksums().c_str()) != 0) {
2281         *error_msg = StringPrintf("Mismatch boot classpath checksums: image has %s, runtime has %s",
2282                                   bcp_checksums,
2283                                   runtime->GetBootClassPathChecksums().c_str());
2284         return false;
2285       }
2286     } else if (IsBootClassLoader(special_root.Get())) {
2287       *error_msg = "Unexpected BootClassLoader in app image";
2288       return false;
2289     } else if (!special_root->IsClassLoader()) {
2290       *error_msg = "Unexpected special root in app image";
2291       return false;
2292     }
2293   }
2294 
2295   if (kCheckImageObjects) {
2296     if (!app_image) {
2297       if (image_pointer_size_ == PointerSize::k64) {
2298         ImageChecker<PointerSize::k64>::CheckObjects(heap, space);
2299       } else {
2300         ImageChecker<PointerSize::k32>::CheckObjects(heap, space);
2301       }
2302     }
2303   }
2304 
2305   // Set entry point to interpreter if in InterpretOnly mode.
2306   if (!runtime->IsAotCompiler() &&
2307       (runtime->GetInstrumentation()->InterpretOnly() ||
2308        runtime->IsJavaDebuggable())) {
2309     // Set image methods' entry point to interpreter.
2310     header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2311       if (!method.IsRuntimeMethod()) {
2312         DCHECK(method.GetDeclaringClass() != nullptr);
2313         if (!method.IsNative() && !method.IsResolutionMethod()) {
2314           method.SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(),
2315                                                             image_pointer_size_);
2316         }
2317       }
2318     }, space->Begin(), image_pointer_size_);
2319   }
2320 
2321   if (!runtime->IsAotCompiler()) {
2322     // If the boot image is not loaded by the zygote, we don't need the shared
2323     // memory optimization.
2324     // If we are profiling the boot classpath, we disable the shared memory
2325     // optimization to make sure boot classpath methods all get properly
2326     // profiled.
2327     // For debuggable runtimes we don't use AOT code, so don't use shared memory
2328     // optimization so the methods can be JITed better.
2329     //
2330     // We need to disable the flag before doing ResetCounter below, as counters
2331     // of shared memory method always hold the "hot" value.
2332     if (!runtime->IsZygote() ||
2333         runtime->GetJITOptions()->GetProfileSaverOptions().GetProfileBootClassPath() ||
2334         runtime->IsJavaDebuggable()) {
2335       header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2336         method.ClearMemorySharedMethod();
2337       }, space->Begin(), image_pointer_size_);
2338     }
2339 
2340     for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
2341       CHECK(!dex_cache->GetDexFile()->IsCompactDexFile());
2342     }
2343 
2344     ScopedTrace trace("AppImage:UpdateCodeItemAndNterp");
2345     bool can_use_nterp = interpreter::CanRuntimeUseNterp();
2346     uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold();
2347     header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2348       // In the image, the `data` pointer field of the ArtMethod contains the code
2349       // item offset. Change this to the actual pointer to the code item.
2350       if (method.HasCodeItem()) {
2351         const dex::CodeItem* code_item = method.GetDexFile()->GetCodeItem(
2352             reinterpret_cast32<uint32_t>(method.GetDataPtrSize(image_pointer_size_)));
2353         method.SetCodeItem(code_item);
2354         // The hotness counter may have changed since we compiled the image, so
2355         // reset it with the runtime value.
2356         method.ResetCounter(hotness_threshold);
2357       }
2358       if (method.GetEntryPointFromQuickCompiledCode() == nterp_trampoline_) {
2359         if (can_use_nterp) {
2360           // Set image methods' entry point that point to the nterp trampoline to the
2361           // nterp entry point. This allows taking the fast path when doing a
2362           // nterp->nterp call.
2363           DCHECK(!method.StillNeedsClinitCheck());
2364           method.SetEntryPointFromQuickCompiledCode(interpreter::GetNterpEntryPoint());
2365         } else {
2366           method.SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
2367         }
2368       }
2369     }, space->Begin(), image_pointer_size_);
2370   }
2371 
2372   if (runtime->IsVerificationSoftFail()) {
2373     header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2374       if (method.IsManagedAndInvokable()) {
2375         method.ClearSkipAccessChecks();
2376       }
2377     }, space->Begin(), image_pointer_size_);
2378   }
2379 
2380   ClassTable* class_table = nullptr;
2381   {
2382     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2383     class_table = InsertClassTableForClassLoader(class_loader.Get());
2384   }
2385   // If we have a class table section, read it and use it for verification in
2386   // UpdateAppImageClassLoadersAndDexCaches.
2387   ClassTable::ClassSet temp_set;
2388   const ImageSection& class_table_section = header.GetClassTableSection();
2389   const bool added_class_table = class_table_section.Size() > 0u;
2390   if (added_class_table) {
2391     const uint64_t start_time2 = NanoTime();
2392     size_t read_count = 0;
2393     temp_set = ClassTable::ClassSet(space->Begin() + class_table_section.Offset(),
2394                                     /*make copy*/false,
2395                                     &read_count);
2396     VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
2397   }
2398   if (app_image) {
2399     if (!AppImageLoadingHelper::Update(
2400             this, space, class_loader, dex_caches, intern_table_, error_msg)) {
2401       return false;
2402     }
2403 
2404     {
2405       ScopedTrace trace("AppImage:UpdateClassLoaders");
2406       // Update class loader and resolved strings. If added_class_table is false, the resolved
2407       // strings were forwarded UpdateAppImageClassLoadersAndDexCaches.
2408       ObjPtr<mirror::ClassLoader> loader(class_loader.Get());
2409       for (const ClassTable::TableSlot& root : temp_set) {
2410         // Note: We probably don't need the read barrier unless we copy the app image objects into
2411         // the region space.
2412         ObjPtr<mirror::Class> klass(root.Read());
2413         // Do not update class loader for boot image classes where the app image
2414         // class loader is only the initiating loader but not the defining loader.
2415         if (space->HasAddress(klass.Ptr())) {
2416           klass->SetClassLoader(loader);
2417         } else {
2418           DCHECK(klass->IsBootStrapClassLoaded());
2419           DCHECK(Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass.Ptr()));
2420         }
2421       }
2422     }
2423 
2424     if (kBitstringSubtypeCheckEnabled) {
2425       // Every class in the app image has initially SubtypeCheckInfo in the
2426       // Uninitialized state.
2427       //
2428       // The SubtypeCheck invariants imply that a SubtypeCheckInfo is at least Initialized
2429       // after class initialization is complete. The app image ClassStatus as-is
2430       // are almost all ClassStatus::Initialized, and being in the
2431       // SubtypeCheckInfo::kUninitialized state is violating that invariant.
2432       //
2433       // Force every app image class's SubtypeCheck to be at least kIninitialized.
2434       //
2435       // See also ImageWriter::FixupClass.
2436       ScopedTrace trace("AppImage:RecacluateSubtypeCheckBitstrings");
2437       MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2438       for (const ClassTable::TableSlot& root : temp_set) {
2439         SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(root.Read());
2440       }
2441     }
2442   }
2443   if (!oat_file->GetBssGcRoots().empty()) {
2444     // Insert oat file to class table for visiting .bss GC roots.
2445     class_table->InsertOatFile(oat_file);
2446   }
2447 
2448   if (added_class_table) {
2449     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2450     class_table->AddClassSet(std::move(temp_set));
2451   }
2452 
2453   if (kIsDebugBuild && app_image) {
2454     // This verification needs to happen after the classes have been added to the class loader.
2455     // Since it ensures classes are in the class table.
2456     ScopedTrace trace("AppImage:Verify");
2457     VerifyAppImage(header, class_loader, class_table, space);
2458   }
2459 
2460   VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
2461   return true;
2462 }
2463 
AddImageSpaces(ArrayRef<gc::space::ImageSpace * > spaces,Handle<mirror::ClassLoader> class_loader,ClassLoaderContext * context,std::vector<std::unique_ptr<const DexFile>> * dex_files,std::string * error_msg)2464 bool ClassLinker::AddImageSpaces(ArrayRef<gc::space::ImageSpace*> spaces,
2465                                  Handle<mirror::ClassLoader> class_loader,
2466                                  ClassLoaderContext* context,
2467                                  /*out*/ std::vector<std::unique_ptr<const DexFile>>* dex_files,
2468                                  /*out*/ std::string* error_msg) {
2469   std::vector<std::vector<std::unique_ptr<const DexFile>>> dex_files_by_space_index;
2470   for (const gc::space::ImageSpace* space : spaces) {
2471     std::vector<std::unique_ptr<const DexFile>> space_dex_files;
2472     if (!OpenAndInitImageDexFiles(space, class_loader, /*out*/ &space_dex_files, error_msg)) {
2473       return false;
2474     }
2475     dex_files_by_space_index.push_back(std::move(space_dex_files));
2476   }
2477   // This must be done in a separate loop after all dex files are initialized because there can be
2478   // references from an image space to another image space that comes after it.
2479   for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
2480     std::vector<std::unique_ptr<const DexFile>>& space_dex_files = dex_files_by_space_index[i];
2481     if (!AddImageSpace(spaces[i], class_loader, context, space_dex_files, error_msg)) {
2482       return false;
2483     }
2484     // Append opened dex files at the end.
2485     std::move(space_dex_files.begin(), space_dex_files.end(), std::back_inserter(*dex_files));
2486   }
2487   return true;
2488 }
2489 
VisitClassRoots(RootVisitor * visitor,VisitRootFlags flags)2490 void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
2491   // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
2492   // enabling tracing requires the mutator lock, there are no race conditions here.
2493   const bool tracing_enabled = Trace::IsTracingEnabled();
2494   Thread* const self = Thread::Current();
2495   WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2496   if (gUseReadBarrier) {
2497     // We do not track new roots for CC.
2498     DCHECK_EQ(0, flags & (kVisitRootFlagNewRoots |
2499                           kVisitRootFlagClearRootLog |
2500                           kVisitRootFlagStartLoggingNewRoots |
2501                           kVisitRootFlagStopLoggingNewRoots));
2502   }
2503   if ((flags & kVisitRootFlagAllRoots) != 0) {
2504     // Argument for how root visiting deals with ArtField and ArtMethod roots.
2505     // There is 3 GC cases to handle:
2506     // Non moving concurrent:
2507     // This case is easy to handle since the reference members of ArtMethod and ArtFields are held
2508     // live by the class and class roots.
2509     //
2510     // Moving non-concurrent:
2511     // This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move.
2512     // To prevent missing roots, this case needs to ensure that there is no
2513     // suspend points between the point which we allocate ArtMethod arrays and place them in a
2514     // class which is in the class table.
2515     //
2516     // Moving concurrent:
2517     // Need to make sure to not copy ArtMethods without doing read barriers since the roots are
2518     // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
2519     //
2520     // Use an unbuffered visitor since the class table uses a temporary GcRoot for holding decoded
2521     // ClassTable::TableSlot. The buffered root visiting would access a stale stack location for
2522     // these objects.
2523     UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
2524     boot_class_table_->VisitRoots(root_visitor);
2525     // If tracing is enabled, then mark all the class loaders to prevent unloading.
2526     if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
2527       for (const ClassLoaderData& data : class_loaders_) {
2528         GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
2529         root.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
2530       }
2531     }
2532   } else if (!gUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
2533     for (auto& root : new_roots_) {
2534       ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2535       root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2536       ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2537       // Concurrent moving GC marked new roots through the to-space invariant.
2538       DCHECK_EQ(new_ref, old_ref);
2539     }
2540     for (const OatFile* oat_file : new_bss_roots_boot_oat_files_) {
2541       for (GcRoot<mirror::Object>& root : oat_file->GetBssGcRoots()) {
2542         ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2543         if (old_ref != nullptr) {
2544           DCHECK(old_ref->IsClass() || old_ref->IsString());
2545           root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2546           ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2547           // Concurrent moving GC marked new roots through the to-space invariant.
2548           DCHECK_EQ(new_ref, old_ref);
2549         }
2550       }
2551     }
2552   }
2553   if (!gUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
2554     new_roots_.clear();
2555     new_bss_roots_boot_oat_files_.clear();
2556   }
2557   if (!gUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
2558     log_new_roots_ = true;
2559   } else if (!gUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
2560     log_new_roots_ = false;
2561   }
2562   // We deliberately ignore the class roots in the image since we
2563   // handle image roots by using the MS/CMS rescanning of dirty cards.
2564 }
2565 
2566 // Keep in sync with InitCallback. Anything we visit, we need to
2567 // reinit references to when reinitializing a ClassLinker from a
2568 // mapped image.
VisitRoots(RootVisitor * visitor,VisitRootFlags flags,bool visit_class_roots)2569 void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags, bool visit_class_roots) {
2570   class_roots_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
2571   if (visit_class_roots) {
2572     VisitClassRoots(visitor, flags);
2573   }
2574   // Instead of visiting the find_array_class_cache_ drop it so that it doesn't prevent class
2575   // unloading if we are marking roots.
2576   DropFindArrayClassCache();
2577 }
2578 
2579 class VisitClassLoaderClassesVisitor : public ClassLoaderVisitor {
2580  public:
VisitClassLoaderClassesVisitor(ClassVisitor * visitor)2581   explicit VisitClassLoaderClassesVisitor(ClassVisitor* visitor)
2582       : visitor_(visitor),
2583         done_(false) {}
2584 
Visit(ObjPtr<mirror::ClassLoader> class_loader)2585   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
2586       REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
2587     ClassTable* const class_table = class_loader->GetClassTable();
2588     if (!done_ && class_table != nullptr) {
2589       DefiningClassLoaderFilterVisitor visitor(class_loader, visitor_);
2590       if (!class_table->Visit(visitor)) {
2591         // If the visitor ClassTable returns false it means that we don't need to continue.
2592         done_ = true;
2593       }
2594     }
2595   }
2596 
2597  private:
2598   // Class visitor that limits the class visits from a ClassTable to the classes with
2599   // the provided defining class loader. This filter is used to avoid multiple visits
2600   // of the same class which can be recorded for multiple initiating class loaders.
2601   class DefiningClassLoaderFilterVisitor : public ClassVisitor {
2602    public:
DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,ClassVisitor * visitor)2603     DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,
2604                                      ClassVisitor* visitor)
2605         : defining_class_loader_(defining_class_loader), visitor_(visitor) { }
2606 
operator ()(ObjPtr<mirror::Class> klass)2607     bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2608       if (klass->GetClassLoader() != defining_class_loader_) {
2609         return true;
2610       }
2611       return (*visitor_)(klass);
2612     }
2613 
2614     const ObjPtr<mirror::ClassLoader> defining_class_loader_;
2615     ClassVisitor* const visitor_;
2616   };
2617 
2618   ClassVisitor* const visitor_;
2619   // If done is true then we don't need to do any more visiting.
2620   bool done_;
2621 };
2622 
VisitClassesInternal(ClassVisitor * visitor)2623 void ClassLinker::VisitClassesInternal(ClassVisitor* visitor) {
2624   if (boot_class_table_->Visit(*visitor)) {
2625     VisitClassLoaderClassesVisitor loader_visitor(visitor);
2626     VisitClassLoaders(&loader_visitor);
2627   }
2628 }
2629 
VisitClasses(ClassVisitor * visitor)2630 void ClassLinker::VisitClasses(ClassVisitor* visitor) {
2631   Thread* const self = Thread::Current();
2632   ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2633   // Not safe to have thread suspension when we are holding a lock.
2634   if (self != nullptr) {
2635     ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2636     VisitClassesInternal(visitor);
2637   } else {
2638     VisitClassesInternal(visitor);
2639   }
2640 }
2641 
2642 class GetClassesInToVector : public ClassVisitor {
2643  public:
operator ()(ObjPtr<mirror::Class> klass)2644   bool operator()(ObjPtr<mirror::Class> klass) override {
2645     classes_.push_back(klass);
2646     return true;
2647   }
2648   std::vector<ObjPtr<mirror::Class>> classes_;
2649 };
2650 
2651 class GetClassInToObjectArray : public ClassVisitor {
2652  public:
GetClassInToObjectArray(mirror::ObjectArray<mirror::Class> * arr)2653   explicit GetClassInToObjectArray(mirror::ObjectArray<mirror::Class>* arr)
2654       : arr_(arr), index_(0) {}
2655 
operator ()(ObjPtr<mirror::Class> klass)2656   bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2657     ++index_;
2658     if (index_ <= arr_->GetLength()) {
2659       arr_->Set(index_ - 1, klass);
2660       return true;
2661     }
2662     return false;
2663   }
2664 
Succeeded() const2665   bool Succeeded() const REQUIRES_SHARED(Locks::mutator_lock_) {
2666     return index_ <= arr_->GetLength();
2667   }
2668 
2669  private:
2670   mirror::ObjectArray<mirror::Class>* const arr_;
2671   int32_t index_;
2672 };
2673 
VisitClassesWithoutClassesLock(ClassVisitor * visitor)2674 void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) {
2675   // TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem
2676   // is avoiding duplicates.
2677   if (!kMovingClasses) {
2678     ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2679     GetClassesInToVector accumulator;
2680     VisitClasses(&accumulator);
2681     for (ObjPtr<mirror::Class> klass : accumulator.classes_) {
2682       if (!visitor->operator()(klass)) {
2683         return;
2684       }
2685     }
2686   } else {
2687     Thread* const self = Thread::Current();
2688     StackHandleScope<1> hs(self);
2689     auto classes = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
2690     // We size the array assuming classes won't be added to the class table during the visit.
2691     // If this assumption fails we iterate again.
2692     while (true) {
2693       size_t class_table_size;
2694       {
2695         ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2696         // Add 100 in case new classes get loaded when we are filling in the object array.
2697         class_table_size = NumZygoteClasses() + NumNonZygoteClasses() + 100;
2698       }
2699       ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
2700       classes.Assign(
2701           mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
2702       CHECK(classes != nullptr);  // OOME.
2703       GetClassInToObjectArray accumulator(classes.Get());
2704       VisitClasses(&accumulator);
2705       if (accumulator.Succeeded()) {
2706         break;
2707       }
2708     }
2709     for (int32_t i = 0; i < classes->GetLength(); ++i) {
2710       // If the class table shrank during creation of the clases array we expect null elements. If
2711       // the class table grew then the loop repeats. If classes are created after the loop has
2712       // finished then we don't visit.
2713       ObjPtr<mirror::Class> klass = classes->Get(i);
2714       if (klass != nullptr && !visitor->operator()(klass)) {
2715         return;
2716       }
2717     }
2718   }
2719 }
2720 
~ClassLinker()2721 ClassLinker::~ClassLinker() {
2722   Thread* const self = Thread::Current();
2723   for (const ClassLoaderData& data : class_loaders_) {
2724     // CHA unloading analysis is not needed. No negative consequences are expected because
2725     // all the classloaders are deleted at the same time.
2726     PrepareToDeleteClassLoader(self, data, /*cleanup_cha=*/false);
2727   }
2728   for (const ClassLoaderData& data : class_loaders_) {
2729     delete data.allocator;
2730     delete data.class_table;
2731   }
2732   class_loaders_.clear();
2733   while (!running_visibly_initialized_callbacks_.empty()) {
2734     std::unique_ptr<VisiblyInitializedCallback> callback(
2735         std::addressof(running_visibly_initialized_callbacks_.front()));
2736     running_visibly_initialized_callbacks_.pop_front();
2737   }
2738 }
2739 
PrepareToDeleteClassLoader(Thread * self,const ClassLoaderData & data,bool cleanup_cha)2740 void ClassLinker::PrepareToDeleteClassLoader(Thread* self,
2741                                              const ClassLoaderData& data,
2742                                              bool cleanup_cha) {
2743   Runtime* const runtime = Runtime::Current();
2744   JavaVMExt* const vm = runtime->GetJavaVM();
2745   vm->DeleteWeakGlobalRef(self, data.weak_root);
2746   // Notify the JIT that we need to remove the methods and/or profiling info.
2747   if (runtime->GetJit() != nullptr) {
2748     jit::JitCodeCache* code_cache = runtime->GetJit()->GetCodeCache();
2749     if (code_cache != nullptr) {
2750       // For the JIT case, RemoveMethodsIn removes the CHA dependencies.
2751       code_cache->RemoveMethodsIn(self, *data.allocator);
2752     }
2753   } else if (cha_ != nullptr) {
2754     // If we don't have a JIT, we need to manually remove the CHA dependencies manually.
2755     cha_->RemoveDependenciesForLinearAlloc(self, data.allocator);
2756   }
2757   // Cleanup references to single implementation ArtMethods that will be deleted.
2758   if (cleanup_cha) {
2759     CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
2760     data.class_table->Visit<kWithoutReadBarrier>(visitor);
2761   }
2762   {
2763     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
2764     auto end = critical_native_code_with_clinit_check_.end();
2765     for (auto it = critical_native_code_with_clinit_check_.begin(); it != end; ) {
2766       if (data.allocator->ContainsUnsafe(it->first)) {
2767         it = critical_native_code_with_clinit_check_.erase(it);
2768       } else {
2769         ++it;
2770       }
2771     }
2772   }
2773 }
2774 
AllocPointerArray(Thread * self,size_t length)2775 ObjPtr<mirror::PointerArray> ClassLinker::AllocPointerArray(Thread* self, size_t length) {
2776   return ObjPtr<mirror::PointerArray>::DownCast(
2777       image_pointer_size_ == PointerSize::k64
2778           ? ObjPtr<mirror::Array>(mirror::LongArray::Alloc(self, length))
2779           : ObjPtr<mirror::Array>(mirror::IntArray::Alloc(self, length)));
2780 }
2781 
AllocDexCache(Thread * self,const DexFile & dex_file)2782 ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(Thread* self, const DexFile& dex_file) {
2783   StackHandleScope<1> hs(self);
2784   auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
2785       GetClassRoot<mirror::DexCache>(this)->AllocObject(self))));
2786   if (dex_cache == nullptr) {
2787     self->AssertPendingOOMException();
2788     return nullptr;
2789   }
2790   // Use InternWeak() so that the location String can be collected when the ClassLoader
2791   // with this DexCache is collected.
2792   ObjPtr<mirror::String> location = intern_table_->InternWeak(dex_file.GetLocation().c_str());
2793   if (location == nullptr) {
2794     self->AssertPendingOOMException();
2795     return nullptr;
2796   }
2797   dex_cache->SetLocation(location);
2798   return dex_cache.Get();
2799 }
2800 
AllocAndInitializeDexCache(Thread * self,const DexFile & dex_file,ObjPtr<mirror::ClassLoader> class_loader)2801 ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(
2802     Thread* self, const DexFile& dex_file, ObjPtr<mirror::ClassLoader> class_loader) {
2803   StackHandleScope<1> hs(self);
2804   Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
2805   ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(self, dex_file);
2806   if (dex_cache != nullptr) {
2807     WriterMutexLock mu(self, *Locks::dex_lock_);
2808     dex_cache->Initialize(&dex_file, h_class_loader.Get());
2809   }
2810   return dex_cache;
2811 }
2812 
2813 template <bool kMovable, typename PreFenceVisitor>
AllocClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class,uint32_t class_size,const PreFenceVisitor & pre_fence_visitor)2814 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2815                                               ObjPtr<mirror::Class> java_lang_Class,
2816                                               uint32_t class_size,
2817                                               const PreFenceVisitor& pre_fence_visitor) {
2818   DCHECK_GE(class_size, sizeof(mirror::Class));
2819   gc::Heap* heap = Runtime::Current()->GetHeap();
2820   ObjPtr<mirror::Object> k = (kMovingClasses && kMovable) ?
2821       heap->AllocObject(self, java_lang_Class, class_size, pre_fence_visitor) :
2822       heap->AllocNonMovableObject(self, java_lang_Class, class_size, pre_fence_visitor);
2823   if (UNLIKELY(k == nullptr)) {
2824     self->AssertPendingOOMException();
2825     return nullptr;
2826   }
2827   return k->AsClass();
2828 }
2829 
2830 template <bool kMovable>
AllocClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class,uint32_t class_size)2831 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2832                                               ObjPtr<mirror::Class> java_lang_Class,
2833                                               uint32_t class_size) {
2834   mirror::Class::InitializeClassVisitor visitor(class_size);
2835   return AllocClass<kMovable>(self, java_lang_Class, class_size, visitor);
2836 }
2837 
AllocClass(Thread * self,uint32_t class_size)2838 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self, uint32_t class_size) {
2839   return AllocClass(self, GetClassRoot<mirror::Class>(this), class_size);
2840 }
2841 
AllocPrimitiveArrayClass(Thread * self,ClassRoot primitive_root,ClassRoot array_root)2842 void ClassLinker::AllocPrimitiveArrayClass(Thread* self,
2843                                            ClassRoot primitive_root,
2844                                            ClassRoot array_root) {
2845   // We make this class non-movable for the unlikely case where it were to be
2846   // moved by a sticky-bit (minor) collection when using the Generational
2847   // Concurrent Copying (CC) collector, potentially creating a stale reference
2848   // in the `klass_` field of one of its instances allocated in the Large-Object
2849   // Space (LOS) -- see the comment about the dirty card scanning logic in
2850   // art::gc::collector::ConcurrentCopying::MarkingPhase.
2851   ObjPtr<mirror::Class> array_class = AllocClass</* kMovable= */ false>(
2852       self, GetClassRoot<mirror::Class>(this), mirror::Array::ClassSize(image_pointer_size_));
2853   ObjPtr<mirror::Class> component_type = GetClassRoot(primitive_root, this);
2854   DCHECK(component_type->IsPrimitive());
2855   array_class->SetComponentType(component_type);
2856   SetClassRoot(array_root, array_class);
2857 }
2858 
FinishArrayClassSetup(ObjPtr<mirror::Class> array_class)2859 void ClassLinker::FinishArrayClassSetup(ObjPtr<mirror::Class> array_class) {
2860   ObjPtr<mirror::Class> java_lang_Object = GetClassRoot<mirror::Object>(this);
2861   array_class->SetSuperClass(java_lang_Object);
2862   array_class->SetVTable(java_lang_Object->GetVTable());
2863   array_class->SetPrimitiveType(Primitive::kPrimNot);
2864   ObjPtr<mirror::Class> component_type = array_class->GetComponentType();
2865   DCHECK_LT(component_type->GetPrimitiveTypeSizeShift(), 4u);
2866   uint32_t class_flags =
2867       component_type->GetPrimitiveTypeSizeShift() << mirror::kArrayComponentSizeShiftShift;
2868   class_flags |= component_type->IsPrimitive()
2869                      ? (mirror::kClassFlagNoReferenceFields | mirror::kClassFlagPrimitiveArray)
2870                      : mirror::kClassFlagObjectArray;
2871   array_class->SetClassFlags(class_flags);
2872   array_class->SetClassLoader(component_type->GetClassLoader());
2873   array_class->SetStatusForPrimitiveOrArray(ClassStatus::kLoaded);
2874   array_class->PopulateEmbeddedVTable(image_pointer_size_);
2875   ImTable* object_imt = java_lang_Object->GetImt(image_pointer_size_);
2876   array_class->SetImt(object_imt, image_pointer_size_);
2877   DCHECK_EQ(array_class->NumMethods(), 0u);
2878 
2879   // don't need to set new_class->SetObjectSize(..)
2880   // because Object::SizeOf delegates to Array::SizeOf
2881 
2882   // All arrays have java/lang/Cloneable and java/io/Serializable as
2883   // interfaces.  We need to set that up here, so that stuff like
2884   // "instanceof" works right.
2885 
2886   // Use the single, global copies of "interfaces" and "iftable"
2887   // (remember not to free them for arrays).
2888   {
2889     ObjPtr<mirror::IfTable> array_iftable = GetArrayIfTable();
2890     CHECK(array_iftable != nullptr);
2891     array_class->SetIfTable(array_iftable);
2892   }
2893 
2894   // Inherit access flags from the component type.
2895   int access_flags = component_type->GetAccessFlags();
2896   // Lose any implementation detail flags; in particular, arrays aren't finalizable.
2897   access_flags &= kAccJavaFlagsMask;
2898   // Arrays can't be used as a superclass or interface, so we want to add "abstract final"
2899   // and remove "interface".
2900   access_flags |= kAccAbstract | kAccFinal;
2901   access_flags &= ~kAccInterface;
2902 
2903   array_class->SetAccessFlagsDuringLinking(access_flags);
2904 
2905   // Array classes are fully initialized either during single threaded startup,
2906   // or from a pre-fence visitor, so visibly initialized.
2907   array_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
2908 }
2909 
FinishCoreArrayClassSetup(ClassRoot array_root)2910 void ClassLinker::FinishCoreArrayClassSetup(ClassRoot array_root) {
2911   // Do not hold lock on the array class object, the initialization of
2912   // core array classes is done while the process is still single threaded.
2913   ObjPtr<mirror::Class> array_class = GetClassRoot(array_root, this);
2914   FinishArrayClassSetup(array_class);
2915 
2916   std::string descriptor;
2917   const char* raw_descriptor = array_class->GetDescriptor(&descriptor);
2918   DCHECK(raw_descriptor == descriptor.c_str());
2919   size_t hash = ComputeModifiedUtf8Hash(descriptor);
2920   ObjPtr<mirror::Class> existing = InsertClass(descriptor, array_class, hash);
2921   CHECK(existing == nullptr);
2922 }
2923 
AllocStackTraceElementArray(Thread * self,size_t length)2924 ObjPtr<mirror::ObjectArray<mirror::StackTraceElement>> ClassLinker::AllocStackTraceElementArray(
2925     Thread* self,
2926     size_t length) {
2927   return mirror::ObjectArray<mirror::StackTraceElement>::Alloc(
2928       self, GetClassRoot<mirror::ObjectArray<mirror::StackTraceElement>>(this), length);
2929 }
2930 
EnsureResolved(Thread * self,std::string_view descriptor,ObjPtr<mirror::Class> klass)2931 ObjPtr<mirror::Class> ClassLinker::EnsureResolved(Thread* self,
2932                                                   std::string_view descriptor,
2933                                                   ObjPtr<mirror::Class> klass) {
2934   DCHECK(klass != nullptr);
2935   if (kIsDebugBuild) {
2936     StackHandleScope<1> hs(self);
2937     HandleWrapperObjPtr<mirror::Class> h = hs.NewHandleWrapper(&klass);
2938     Thread::PoisonObjectPointersIfDebug();
2939   }
2940 
2941   // For temporary classes we must wait for them to be retired.
2942   if (init_done_ && klass->IsTemp()) {
2943     CHECK(!klass->IsResolved());
2944     if (klass->IsErroneousUnresolved()) {
2945       ThrowEarlierClassFailure(klass);
2946       return nullptr;
2947     }
2948     StackHandleScope<1> hs(self);
2949     Handle<mirror::Class> h_class(hs.NewHandle(klass));
2950     ObjectLock<mirror::Class> lock(self, h_class);
2951     // Loop and wait for the resolving thread to retire this class.
2952     while (!h_class->IsRetired() && !h_class->IsErroneousUnresolved()) {
2953       lock.WaitIgnoringInterrupts();
2954     }
2955     if (h_class->IsErroneousUnresolved()) {
2956       ThrowEarlierClassFailure(h_class.Get());
2957       return nullptr;
2958     }
2959     CHECK(h_class->IsRetired());
2960     // Get the updated class from class table.
2961     klass = LookupClass(self, descriptor, h_class.Get()->GetClassLoader());
2962   }
2963 
2964   // Wait for the class if it has not already been linked.
2965   size_t index = 0;
2966   // Maximum number of yield iterations until we start sleeping.
2967   static const size_t kNumYieldIterations = 1000;
2968   // How long each sleep is in us.
2969   static const size_t kSleepDurationUS = 1000;  // 1 ms.
2970   while (!klass->IsResolved() && !klass->IsErroneousUnresolved()) {
2971     StackHandleScope<1> hs(self);
2972     HandleWrapperObjPtr<mirror::Class> h_class(hs.NewHandleWrapper(&klass));
2973     {
2974       ObjectTryLock<mirror::Class> lock(self, h_class);
2975       // Can not use a monitor wait here since it may block when returning and deadlock if another
2976       // thread has locked klass.
2977       if (lock.Acquired()) {
2978         // Check for circular dependencies between classes, the lock is required for SetStatus.
2979         if (!h_class->IsResolved() && h_class->GetClinitThreadId() == self->GetTid()) {
2980           ThrowClassCircularityError(h_class.Get());
2981           mirror::Class::SetStatus(h_class, ClassStatus::kErrorUnresolved, self);
2982           return nullptr;
2983         }
2984       }
2985     }
2986     {
2987       // Handle wrapper deals with klass moving.
2988       ScopedThreadSuspension sts(self, ThreadState::kSuspended);
2989       if (index < kNumYieldIterations) {
2990         sched_yield();
2991       } else {
2992         usleep(kSleepDurationUS);
2993       }
2994     }
2995     ++index;
2996   }
2997 
2998   if (klass->IsErroneousUnresolved()) {
2999     ThrowEarlierClassFailure(klass);
3000     return nullptr;
3001   }
3002   // Return the loaded class.  No exceptions should be pending.
3003   CHECK(klass->IsResolved()) << klass->PrettyClass();
3004   self->AssertNoPendingException();
3005   return klass;
3006 }
3007 
3008 using ClassPathEntry = std::pair<const DexFile*, const dex::ClassDef*>;
3009 
3010 // Search a collection of DexFiles for a descriptor
FindInClassPath(std::string_view descriptor,size_t hash,const std::vector<const DexFile * > & class_path)3011 ClassPathEntry FindInClassPath(std::string_view descriptor,
3012                                size_t hash,
3013                                const std::vector<const DexFile*>& class_path) {
3014   for (const DexFile* dex_file : class_path) {
3015     DCHECK(dex_file != nullptr);
3016     const dex::ClassDef* dex_class_def = OatDexFile::FindClassDef(*dex_file, descriptor, hash);
3017     if (dex_class_def != nullptr) {
3018       return ClassPathEntry(dex_file, dex_class_def);
3019     }
3020   }
3021   return ClassPathEntry(nullptr, nullptr);
3022 }
3023 
3024 // Helper macro to make sure each class loader lookup call handles the case the
3025 // class loader is not recognized, or the lookup threw an exception.
3026 #define RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(call_, result_, thread_) \
3027 do {                                                                          \
3028   auto local_call = call_;                                                    \
3029   if (!local_call) {                                                          \
3030     return false;                                                             \
3031   }                                                                           \
3032   auto local_result = result_;                                                \
3033   if (local_result != nullptr) {                                              \
3034     return true;                                                              \
3035   }                                                                           \
3036   auto local_thread = thread_;                                                \
3037   if (local_thread->IsExceptionPending()) {                                   \
3038     /* Pending exception means there was an error other than */               \
3039     /* ClassNotFound that must be returned to the caller. */                  \
3040     return false;                                                             \
3041   }                                                                           \
3042 } while (0)
3043 
FindClassInSharedLibraries(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)3044 bool ClassLinker::FindClassInSharedLibraries(Thread* self,
3045                                              const char* descriptor,
3046                                              size_t descriptor_length,
3047                                              size_t hash,
3048                                              Handle<mirror::ClassLoader> class_loader,
3049                                              /*out*/ ObjPtr<mirror::Class>* result) {
3050   ArtField* field = WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders;
3051   return FindClassInSharedLibrariesHelper(
3052       self, descriptor, descriptor_length, hash, class_loader, field, result);
3053 }
3054 
FindClassInSharedLibrariesHelper(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader,ArtField * field,ObjPtr<mirror::Class> * result)3055 bool ClassLinker::FindClassInSharedLibrariesHelper(Thread* self,
3056                                                    const char* descriptor,
3057                                                    size_t descriptor_length,
3058                                                    size_t hash,
3059                                                    Handle<mirror::ClassLoader> class_loader,
3060                                                    ArtField* field,
3061                                                    /*out*/ ObjPtr<mirror::Class>* result) {
3062   ObjPtr<mirror::Object> raw_shared_libraries = field->GetObject(class_loader.Get());
3063   if (raw_shared_libraries == nullptr) {
3064     return true;
3065   }
3066 
3067   StackHandleScope<2> hs(self);
3068   Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries(
3069       hs.NewHandle(raw_shared_libraries->AsObjectArray<mirror::ClassLoader>()));
3070   MutableHandle<mirror::ClassLoader> temp_loader = hs.NewHandle<mirror::ClassLoader>(nullptr);
3071   for (auto loader : shared_libraries.Iterate<mirror::ClassLoader>()) {
3072     temp_loader.Assign(loader);
3073     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3074         FindClassInBaseDexClassLoader(
3075             self, descriptor, descriptor_length, hash, temp_loader, result),
3076         *result,
3077         self);
3078   }
3079   return true;
3080 }
3081 
FindClassInSharedLibrariesAfter(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)3082 bool ClassLinker::FindClassInSharedLibrariesAfter(Thread* self,
3083                                                   const char* descriptor,
3084                                                   size_t descriptor_length,
3085                                                   size_t hash,
3086                                                   Handle<mirror::ClassLoader> class_loader,
3087                                                   /*out*/ ObjPtr<mirror::Class>* result) {
3088   ArtField* field = WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter;
3089   return FindClassInSharedLibrariesHelper(
3090       self, descriptor, descriptor_length, hash, class_loader, field, result);
3091 }
3092 
FindClassInBaseDexClassLoader(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)3093 bool ClassLinker::FindClassInBaseDexClassLoader(Thread* self,
3094                                                 const char* descriptor,
3095                                                 size_t descriptor_length,
3096                                                 size_t hash,
3097                                                 Handle<mirror::ClassLoader> class_loader,
3098                                                 /*out*/ ObjPtr<mirror::Class>* result) {
3099   // Termination case: boot class loader.
3100   if (IsBootClassLoader(class_loader.Get())) {
3101     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3102         FindClassInBootClassLoaderClassPath(self, descriptor, descriptor_length, hash, result),
3103         *result,
3104         self);
3105     return true;
3106   }
3107 
3108   if (IsPathOrDexClassLoader(class_loader) || IsInMemoryDexClassLoader(class_loader)) {
3109     // For regular path or dex class loader the search order is:
3110     //    - parent
3111     //    - shared libraries
3112     //    - class loader dex files
3113 
3114     // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
3115     StackHandleScope<1> hs(self);
3116     Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
3117     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3118         FindClassInBaseDexClassLoader(self, descriptor, descriptor_length, hash, h_parent, result),
3119         *result,
3120         self);
3121     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3122         FindClassInSharedLibraries(self, descriptor, descriptor_length, hash, class_loader, result),
3123         *result,
3124         self);
3125     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3126         FindClassInBaseDexClassLoaderClassPath(
3127             self, descriptor, descriptor_length, hash, class_loader, result),
3128         *result,
3129         self);
3130     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3131         FindClassInSharedLibrariesAfter(
3132             self, descriptor, descriptor_length, hash, class_loader, result),
3133         *result,
3134         self);
3135     // We did not find a class, but the class loader chain was recognized, so we
3136     // return true.
3137     return true;
3138   }
3139 
3140   if (IsDelegateLastClassLoader(class_loader)) {
3141     // For delegate last, the search order is:
3142     //    - boot class path
3143     //    - shared libraries
3144     //    - class loader dex files
3145     //    - parent
3146     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3147         FindClassInBootClassLoaderClassPath(self, descriptor, descriptor_length, hash, result),
3148         *result,
3149         self);
3150     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3151         FindClassInSharedLibraries(self, descriptor, descriptor_length, hash, class_loader, result),
3152         *result,
3153         self);
3154     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3155         FindClassInBaseDexClassLoaderClassPath(
3156             self, descriptor, descriptor_length, hash, class_loader, result),
3157         *result,
3158         self);
3159     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3160         FindClassInSharedLibrariesAfter(
3161             self, descriptor, descriptor_length, hash, class_loader, result),
3162         *result,
3163         self);
3164 
3165     // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
3166     StackHandleScope<1> hs(self);
3167     Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
3168     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3169         FindClassInBaseDexClassLoader(self, descriptor, descriptor_length, hash, h_parent, result),
3170         *result,
3171         self);
3172     // We did not find a class, but the class loader chain was recognized, so we
3173     // return true.
3174     return true;
3175   }
3176 
3177   // Unsupported class loader.
3178   *result = nullptr;
3179   return false;
3180 }
3181 
3182 #undef RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION
3183 
3184 namespace {
3185 
3186 // Matches exceptions caught in DexFile.defineClass.
MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,ClassLinker * class_linker)3187 ALWAYS_INLINE bool MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,
3188                                                   ClassLinker* class_linker)
3189     REQUIRES_SHARED(Locks::mutator_lock_) {
3190   return
3191       // ClassNotFoundException.
3192       throwable->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
3193                                          class_linker))
3194       ||
3195       // NoClassDefFoundError. TODO: Reconsider this. b/130746382.
3196       throwable->InstanceOf(Runtime::Current()->GetPreAllocatedNoClassDefFoundError()->GetClass());
3197 }
3198 
3199 // Clear exceptions caught in DexFile.defineClass.
FilterDexFileCaughtExceptions(Thread * self,ClassLinker * class_linker)3200 ALWAYS_INLINE void FilterDexFileCaughtExceptions(Thread* self, ClassLinker* class_linker)
3201     REQUIRES_SHARED(Locks::mutator_lock_) {
3202   if (MatchesDexFileCaughtExceptions(self->GetException(), class_linker)) {
3203     self->ClearException();
3204   }
3205 }
3206 
3207 }  // namespace
3208 
3209 // Finds the class in the boot class loader.
3210 // If the class is found the method returns the resolved class. Otherwise it returns null.
FindClassInBootClassLoaderClassPath(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,ObjPtr<mirror::Class> * result)3211 bool ClassLinker::FindClassInBootClassLoaderClassPath(Thread* self,
3212                                                       const char* descriptor,
3213                                                       size_t descriptor_length,
3214                                                       size_t hash,
3215                                                       /*out*/ ObjPtr<mirror::Class>* result) {
3216   std::string_view sv_descriptor(descriptor, descriptor_length);
3217   ClassPathEntry pair = FindInClassPath(sv_descriptor, hash, boot_class_path_);
3218   if (pair.second != nullptr) {
3219     ObjPtr<mirror::Class> klass = LookupClass(self, sv_descriptor, hash, nullptr);
3220     if (klass != nullptr) {
3221       *result = EnsureResolved(self, sv_descriptor, klass);
3222     } else {
3223       *result = DefineClass(self,
3224                             descriptor,
3225                             descriptor_length,
3226                             hash,
3227                             ScopedNullHandle<mirror::ClassLoader>(),
3228                             *pair.first,
3229                             *pair.second);
3230     }
3231     if (*result == nullptr) {
3232       CHECK(self->IsExceptionPending()) << descriptor;
3233       FilterDexFileCaughtExceptions(self, this);
3234     }
3235   }
3236   // The boot classloader is always a known lookup.
3237   return true;
3238 }
3239 
FindClassInBaseDexClassLoaderClassPath(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)3240 bool ClassLinker::FindClassInBaseDexClassLoaderClassPath(
3241     Thread* self,
3242     const char* descriptor,
3243     size_t descriptor_length,
3244     size_t hash,
3245     Handle<mirror::ClassLoader> class_loader,
3246     /*out*/ ObjPtr<mirror::Class>* result) {
3247   DCHECK(IsPathOrDexClassLoader(class_loader) ||
3248          IsInMemoryDexClassLoader(class_loader) ||
3249          IsDelegateLastClassLoader(class_loader))
3250       << "Unexpected class loader for descriptor " << descriptor;
3251 
3252   std::string_view sv_descriptor(descriptor, descriptor_length);
3253   const DexFile* dex_file = nullptr;
3254   const dex::ClassDef* class_def = nullptr;
3255   ObjPtr<mirror::Class> ret;
3256   auto find_class_def = [&](const DexFile* cp_dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
3257     const dex::ClassDef* cp_class_def = OatDexFile::FindClassDef(*cp_dex_file, sv_descriptor, hash);
3258     if (cp_class_def != nullptr) {
3259       dex_file = cp_dex_file;
3260       class_def = cp_class_def;
3261       return false;  // Found a class definition, stop visit.
3262     }
3263     return true;  // Continue with the next DexFile.
3264   };
3265   VisitClassLoaderDexFiles(self, class_loader, find_class_def);
3266 
3267   if (class_def != nullptr) {
3268     *result =
3269         DefineClass(self, descriptor, descriptor_length, hash, class_loader, *dex_file, *class_def);
3270     if (UNLIKELY(*result == nullptr)) {
3271       CHECK(self->IsExceptionPending()) << descriptor;
3272       FilterDexFileCaughtExceptions(self, this);
3273     } else {
3274       DCHECK(!self->IsExceptionPending());
3275     }
3276   }
3277   // A BaseDexClassLoader is always a known lookup.
3278   return true;
3279 }
3280 
FindClass(Thread * self,const DexFile & dex_file,dex::TypeIndex type_index,Handle<mirror::ClassLoader> class_loader)3281 ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
3282                                              const DexFile& dex_file,
3283                                              dex::TypeIndex type_index,
3284                                              Handle<mirror::ClassLoader> class_loader) {
3285   dex::StringIndex descriptor_idx = dex_file.GetTypeId(type_index).descriptor_idx_;
3286   uint32_t utf16_length;
3287   const char* descriptor = dex_file.GetStringDataAndUtf16Length(descriptor_idx, &utf16_length);
3288   size_t descriptor_length = DexFile::Utf8Length(descriptor, utf16_length);
3289   return FindClass(self, descriptor, descriptor_length, class_loader);
3290 }
3291 
FindClass(Thread * self,const char * descriptor,size_t descriptor_length,Handle<mirror::ClassLoader> class_loader)3292 ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
3293                                              const char* descriptor,
3294                                              size_t descriptor_length,
3295                                              Handle<mirror::ClassLoader> class_loader) {
3296   DCHECK_EQ(strlen(descriptor), descriptor_length);
3297   DCHECK_NE(descriptor_length, 0u) << "descriptor is empty string";
3298   DCHECK(self != nullptr);
3299   self->AssertNoPendingException();
3300   self->PoisonObjectPointers();  // For DefineClass, CreateArrayClass, etc...
3301   if (descriptor_length == 1u) {
3302     // only the descriptors of primitive types should be 1 character long, also avoid class lookup
3303     // for primitive classes that aren't backed by dex files.
3304     return FindPrimitiveClass(descriptor[0]);
3305   }
3306   const std::string_view sv_descriptor(descriptor, descriptor_length);
3307   const size_t hash = ComputeModifiedUtf8Hash(sv_descriptor);
3308   // Find the class in the loaded classes table.
3309   ObjPtr<mirror::Class> klass = LookupClass(self, sv_descriptor, hash, class_loader.Get());
3310   if (klass != nullptr) {
3311     return EnsureResolved(self, sv_descriptor, klass);
3312   }
3313   // Class is not yet loaded.
3314   if (descriptor[0] != '[' && class_loader == nullptr) {
3315     // Non-array class and the boot class loader, search the boot class path.
3316     ClassPathEntry pair = FindInClassPath(sv_descriptor, hash, boot_class_path_);
3317     if (pair.second != nullptr) {
3318       return DefineClass(self,
3319                          descriptor,
3320                          descriptor_length,
3321                          hash,
3322                          ScopedNullHandle<mirror::ClassLoader>(),
3323                          *pair.first,
3324                          *pair.second);
3325     } else {
3326       // The boot class loader is searched ahead of the application class loader, failures are
3327       // expected and will be wrapped in a ClassNotFoundException. Use the pre-allocated error to
3328       // trigger the chaining with a proper stack trace.
3329       ObjPtr<mirror::Throwable> pre_allocated =
3330           Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3331       self->SetException(pre_allocated);
3332       return nullptr;
3333     }
3334   }
3335   ObjPtr<mirror::Class> result_ptr;
3336   bool descriptor_equals;
3337   if (descriptor[0] == '[') {
3338     result_ptr = CreateArrayClass(self, descriptor, descriptor_length, hash, class_loader);
3339     DCHECK_EQ(result_ptr == nullptr, self->IsExceptionPending());
3340     DCHECK(result_ptr == nullptr || result_ptr->DescriptorEquals(sv_descriptor));
3341     descriptor_equals = true;
3342   } else {
3343     ScopedObjectAccessUnchecked soa(self);
3344     bool known_hierarchy = FindClassInBaseDexClassLoader(
3345         self, descriptor, descriptor_length, hash, class_loader, &result_ptr);
3346     if (result_ptr != nullptr) {
3347       // The chain was understood and we found the class. We still need to add the class to
3348       // the class table to protect from racy programs that can try and redefine the path list
3349       // which would change the Class<?> returned for subsequent evaluation of const-class.
3350       DCHECK(known_hierarchy);
3351       DCHECK(result_ptr->DescriptorEquals(sv_descriptor));
3352       descriptor_equals = true;
3353     } else if (!self->IsExceptionPending()) {
3354       // Either the chain wasn't understood or the class wasn't found.
3355       // If there is a pending exception we didn't clear, it is a not a ClassNotFoundException and
3356       // we should return it instead of silently clearing and retrying.
3357       //
3358       // If the chain was understood but we did not find the class, let the Java-side
3359       // rediscover all this and throw the exception with the right stack trace. Note that
3360       // the Java-side could still succeed for racy programs if another thread is actively
3361       // modifying the class loader's path list.
3362 
3363       // The runtime is not allowed to call into java from a runtime-thread so just abort.
3364       if (self->IsRuntimeThread()) {
3365         // Oops, we can't call into java so we can't run actual class-loader code.
3366         // This is true for e.g. for the compiler (jit or aot).
3367         ObjPtr<mirror::Throwable> pre_allocated =
3368             Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3369         self->SetException(pre_allocated);
3370         return nullptr;
3371       }
3372 
3373       // Inlined DescriptorToDot(descriptor) with extra validation.
3374       //
3375       // Throw NoClassDefFoundError early rather than potentially load a class only to fail
3376       // the DescriptorEquals() check below and give a confusing error message. For example,
3377       // when native code erroneously calls JNI GetFieldId() with signature "java/lang/String"
3378       // instead of "Ljava/lang/String;", the message below using the "dot" names would be
3379       // "class loader [...] returned class java.lang.String instead of java.lang.String".
3380       if (UNLIKELY(descriptor[0] != 'L') ||
3381           UNLIKELY(descriptor[descriptor_length - 1] != ';') ||
3382           UNLIKELY(memchr(descriptor + 1, '.', descriptor_length - 2) != nullptr)) {
3383         ThrowNoClassDefFoundError("Invalid descriptor: %s.", descriptor);
3384         return nullptr;
3385       }
3386 
3387       std::string class_name_string(sv_descriptor.substr(1u, descriptor_length - 2u));
3388       std::replace(class_name_string.begin(), class_name_string.end(), '/', '.');
3389       if (known_hierarchy &&
3390           fast_class_not_found_exceptions_ &&
3391           !Runtime::Current()->IsJavaDebuggable()) {
3392         // For known hierarchy, we know that the class is going to throw an exception. If we aren't
3393         // debuggable, optimize this path by throwing directly here without going back to Java
3394         // language. This reduces how many ClassNotFoundExceptions happen.
3395         self->ThrowNewExceptionF("Ljava/lang/ClassNotFoundException;",
3396                                  "%s",
3397                                  class_name_string.c_str());
3398       } else {
3399         StackHandleScope<1u> hs(self);
3400         Handle<mirror::String> class_name_object = hs.NewHandle(
3401             mirror::String::AllocFromModifiedUtf8(self, class_name_string.c_str()));
3402         if (class_name_object == nullptr) {
3403           DCHECK(self->IsExceptionPending());  // OOME.
3404           return nullptr;
3405         }
3406         DCHECK(class_loader != nullptr);
3407         result_ptr = ObjPtr<mirror::Class>::DownCast(
3408             WellKnownClasses::java_lang_ClassLoader_loadClass->InvokeVirtual<'L', 'L'>(
3409                 self, class_loader.Get(), class_name_object.Get()));
3410         if (result_ptr == nullptr && !self->IsExceptionPending()) {
3411           // broken loader - throw NPE to be compatible with Dalvik
3412           ThrowNullPointerException(StringPrintf("ClassLoader.loadClass returned null for %s",
3413                                                  class_name_string.c_str()).c_str());
3414           return nullptr;
3415         }
3416         // Check the name of the returned class.
3417         descriptor_equals = (result_ptr != nullptr) && result_ptr->DescriptorEquals(sv_descriptor);
3418       }
3419     } else {
3420       DCHECK(!MatchesDexFileCaughtExceptions(self->GetException(), this));
3421     }
3422   }
3423 
3424   if (self->IsExceptionPending()) {
3425     // If the ClassLoader threw or array class allocation failed, pass that exception up.
3426     // However, to comply with the RI behavior, first check if another thread succeeded.
3427     result_ptr = LookupClass(self, sv_descriptor, hash, class_loader.Get());
3428     if (result_ptr != nullptr && !result_ptr->IsErroneous()) {
3429       self->ClearException();
3430       return EnsureResolved(self, sv_descriptor, result_ptr);
3431     }
3432     return nullptr;
3433   }
3434 
3435   // Try to insert the class to the class table, checking for mismatch.
3436   ObjPtr<mirror::Class> old;
3437   {
3438     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3439     ClassTable* const class_table = InsertClassTableForClassLoader(class_loader.Get());
3440     old = class_table->Lookup(sv_descriptor, hash);
3441     if (old == nullptr) {
3442       old = result_ptr;  // For the comparison below, after releasing the lock.
3443       if (descriptor_equals) {
3444         class_table->InsertWithHash(result_ptr, hash);
3445         WriteBarrier::ForEveryFieldWrite(class_loader.Get());
3446       }  // else throw below, after releasing the lock.
3447     }
3448   }
3449   if (UNLIKELY(old != result_ptr)) {
3450     // Return `old` (even if `!descriptor_equals`) to mimic the RI behavior for parallel
3451     // capable class loaders.  (All class loaders are considered parallel capable on Android.)
3452     ObjPtr<mirror::Class> loader_class = class_loader->GetClass();
3453     const char* loader_class_name =
3454         loader_class->GetDexFile().GetTypeDescriptor(loader_class->GetDexTypeIndex());
3455     LOG(WARNING) << "Initiating class loader of type " << DescriptorToDot(loader_class_name)
3456         << " is not well-behaved; it returned a different Class for racing loadClass(\""
3457         << DescriptorToDot(descriptor) << "\").";
3458     return EnsureResolved(self, sv_descriptor, old);
3459   }
3460   if (UNLIKELY(!descriptor_equals)) {
3461     std::string result_storage;
3462     const char* result_name = result_ptr->GetDescriptor(&result_storage);
3463     std::string loader_storage;
3464     const char* loader_class_name = class_loader->GetClass()->GetDescriptor(&loader_storage);
3465     ThrowNoClassDefFoundError(
3466         "Initiating class loader of type %s returned class %s instead of %s.",
3467         DescriptorToDot(loader_class_name).c_str(),
3468         DescriptorToDot(result_name).c_str(),
3469         DescriptorToDot(descriptor).c_str());
3470     return nullptr;
3471   }
3472   // Success.
3473   return result_ptr;
3474 }
3475 
3476 // Helper for maintaining DefineClass counting. We need to notify callbacks when we start/end a
3477 // define-class and how many recursive DefineClasses we are at in order to allow for doing  things
3478 // like pausing class definition.
3479 struct ScopedDefiningClass {
3480  public:
REQUIRES_SHAREDart::ScopedDefiningClass3481   explicit ScopedDefiningClass(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
3482       : self_(self), returned_(false) {
3483     Locks::mutator_lock_->AssertSharedHeld(self_);
3484     Runtime::Current()->GetRuntimeCallbacks()->BeginDefineClass();
3485     self_->IncrDefineClassCount();
3486   }
REQUIRES_SHAREDart::ScopedDefiningClass3487   ~ScopedDefiningClass() REQUIRES_SHARED(Locks::mutator_lock_) {
3488     Locks::mutator_lock_->AssertSharedHeld(self_);
3489     CHECK(returned_);
3490   }
3491 
Finishart::ScopedDefiningClass3492   ObjPtr<mirror::Class> Finish(Handle<mirror::Class> h_klass)
3493       REQUIRES_SHARED(Locks::mutator_lock_) {
3494     CHECK(!returned_);
3495     self_->DecrDefineClassCount();
3496     Runtime::Current()->GetRuntimeCallbacks()->EndDefineClass();
3497     Thread::PoisonObjectPointersIfDebug();
3498     returned_ = true;
3499     return h_klass.Get();
3500   }
3501 
Finishart::ScopedDefiningClass3502   ObjPtr<mirror::Class> Finish(ObjPtr<mirror::Class> klass)
3503       REQUIRES_SHARED(Locks::mutator_lock_) {
3504     StackHandleScope<1> hs(self_);
3505     Handle<mirror::Class> h_klass(hs.NewHandle(klass));
3506     return Finish(h_klass);
3507   }
3508 
Finishart::ScopedDefiningClass3509   ObjPtr<mirror::Class> Finish([[maybe_unused]] nullptr_t np)
3510       REQUIRES_SHARED(Locks::mutator_lock_) {
3511     ScopedNullHandle<mirror::Class> snh;
3512     return Finish(snh);
3513   }
3514 
3515  private:
3516   Thread* self_;
3517   bool returned_;
3518 };
3519 
DefineClass(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,const dex::ClassDef & dex_class_def)3520 ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
3521                                                const char* descriptor,
3522                                                size_t descriptor_length,
3523                                                size_t hash,
3524                                                Handle<mirror::ClassLoader> class_loader,
3525                                                const DexFile& dex_file,
3526                                                const dex::ClassDef& dex_class_def) {
3527   std::string_view sv_descriptor(descriptor, descriptor_length);
3528   ScopedDefiningClass sdc(self);
3529   StackHandleScope<3> hs(self);
3530   metrics::AutoTimer timer{GetMetrics()->ClassLoadingTotalTime()};
3531   metrics::AutoTimer timeDelta{GetMetrics()->ClassLoadingTotalTimeDelta()};
3532   auto klass = hs.NewHandle<mirror::Class>(nullptr);
3533 
3534   // Load the class from the dex file.
3535   if (UNLIKELY(!init_done_)) {
3536     // finish up init of hand crafted class_roots_
3537     if (sv_descriptor == "Ljava/lang/Object;") {
3538       klass.Assign(GetClassRoot<mirror::Object>(this));
3539     } else if (sv_descriptor == "Ljava/lang/Class;") {
3540       klass.Assign(GetClassRoot<mirror::Class>(this));
3541     } else if (sv_descriptor == "Ljava/lang/String;") {
3542       klass.Assign(GetClassRoot<mirror::String>(this));
3543     } else if (sv_descriptor == "Ljava/lang/ref/Reference;") {
3544       klass.Assign(GetClassRoot<mirror::Reference>(this));
3545     } else if (sv_descriptor == "Ljava/lang/DexCache;") {
3546       klass.Assign(GetClassRoot<mirror::DexCache>(this));
3547     } else if (sv_descriptor == "Ldalvik/system/ClassExt;") {
3548       klass.Assign(GetClassRoot<mirror::ClassExt>(this));
3549     }
3550   }
3551 
3552   // For AOT-compilation of an app, we may use only a public SDK to resolve symbols. If the SDK
3553   // checks are configured (a non null SdkChecker) and the descriptor is not in the provided
3554   // public class path then we prevent the definition of the class.
3555   //
3556   // NOTE that we only do the checks for the boot classpath APIs. Anything else, like the app
3557   // classpath is not checked.
3558   if (class_loader == nullptr &&
3559       Runtime::Current()->IsAotCompiler() &&
3560       DenyAccessBasedOnPublicSdk(descriptor)) {
3561     ObjPtr<mirror::Throwable> pre_allocated =
3562         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3563     self->SetException(pre_allocated);
3564     return sdc.Finish(nullptr);
3565   }
3566 
3567   // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
3568   // code to be executed. We put it up here so we can avoid all the allocations associated with
3569   // creating the class. This can happen with (eg) jit threads.
3570   if (!self->CanLoadClasses()) {
3571     // Make sure we don't try to load anything, potentially causing an infinite loop.
3572     ObjPtr<mirror::Throwable> pre_allocated =
3573         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3574     self->SetException(pre_allocated);
3575     return sdc.Finish(nullptr);
3576   }
3577 
3578   ScopedTrace trace(descriptor);
3579   if (klass == nullptr) {
3580     // Allocate a class with the status of not ready.
3581     // Interface object should get the right size here. Regular class will
3582     // figure out the right size later and be replaced with one of the right
3583     // size when the class becomes resolved.
3584     if (CanAllocClass()) {
3585       klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def)));
3586     } else {
3587       return sdc.Finish(nullptr);
3588     }
3589   }
3590   if (UNLIKELY(klass == nullptr)) {
3591     self->AssertPendingOOMException();
3592     return sdc.Finish(nullptr);
3593   }
3594   // Get the real dex file. This will return the input if there aren't any callbacks or they do
3595   // nothing.
3596   DexFile const* new_dex_file = nullptr;
3597   dex::ClassDef const* new_class_def = nullptr;
3598   // TODO We should ideally figure out some way to move this after we get a lock on the klass so it
3599   // will only be called once.
3600   Runtime::Current()->GetRuntimeCallbacks()->ClassPreDefine(descriptor,
3601                                                             klass,
3602                                                             class_loader,
3603                                                             dex_file,
3604                                                             dex_class_def,
3605                                                             &new_dex_file,
3606                                                             &new_class_def);
3607   // Check to see if an exception happened during runtime callbacks. Return if so.
3608   if (self->IsExceptionPending()) {
3609     return sdc.Finish(nullptr);
3610   }
3611   ObjPtr<mirror::DexCache> dex_cache = RegisterDexFile(*new_dex_file, class_loader.Get());
3612   if (dex_cache == nullptr) {
3613     self->AssertPendingException();
3614     return sdc.Finish(nullptr);
3615   }
3616   klass->SetDexCache(dex_cache);
3617   SetupClass(*new_dex_file, *new_class_def, klass, class_loader.Get());
3618 
3619   // Mark the string class by setting its access flag.
3620   if (UNLIKELY(!init_done_)) {
3621     if (sv_descriptor == "Ljava/lang/String;") {
3622       klass->SetStringClass();
3623     }
3624   }
3625 
3626   ObjectLock<mirror::Class> lock(self, klass);
3627   klass->SetClinitThreadId(self->GetTid());
3628   // Make sure we have a valid empty iftable even if there are errors.
3629   klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
3630 
3631   // Add the newly loaded class to the loaded classes table.
3632   ObjPtr<mirror::Class> existing = InsertClass(sv_descriptor, klass.Get(), hash);
3633   if (existing != nullptr) {
3634     // We failed to insert because we raced with another thread. Calling EnsureResolved may cause
3635     // this thread to block.
3636     return sdc.Finish(EnsureResolved(self, sv_descriptor, existing));
3637   }
3638 
3639   // Load the fields and other things after we are inserted in the table. This is so that we don't
3640   // end up allocating unfree-able linear alloc resources and then lose the race condition. The
3641   // other reason is that the field roots are only visited from the class table. So we need to be
3642   // inserted before we allocate / fill in these fields.
3643   LoadClass(self, *new_dex_file, *new_class_def, klass);
3644   if (self->IsExceptionPending()) {
3645     VLOG(class_linker) << self->GetException()->Dump();
3646     // An exception occured during load, set status to erroneous while holding klass' lock in case
3647     // notification is necessary.
3648     if (!klass->IsErroneous()) {
3649       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3650     }
3651     return sdc.Finish(nullptr);
3652   }
3653 
3654   // Finish loading (if necessary) by finding parents
3655   CHECK(!klass->IsLoaded());
3656   if (!LoadSuperAndInterfaces(klass, *new_dex_file)) {
3657     // Loading failed.
3658     if (!klass->IsErroneous()) {
3659       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3660     }
3661     return sdc.Finish(nullptr);
3662   }
3663   CHECK(klass->IsLoaded());
3664 
3665   // At this point the class is loaded. Publish a ClassLoad event.
3666   // Note: this may be a temporary class. It is a listener's responsibility to handle this.
3667   Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(klass);
3668 
3669   // Link the class (if necessary)
3670   CHECK(!klass->IsResolved());
3671   // TODO: Use fast jobjects?
3672   auto interfaces = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
3673 
3674   MutableHandle<mirror::Class> h_new_class = hs.NewHandle<mirror::Class>(nullptr);
3675   if (!LinkClass(self, descriptor, klass, interfaces, &h_new_class)) {
3676     // Linking failed.
3677     if (!klass->IsErroneous()) {
3678       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3679     }
3680     return sdc.Finish(nullptr);
3681   }
3682   self->AssertNoPendingException();
3683   CHECK(h_new_class != nullptr) << descriptor;
3684   CHECK(h_new_class->IsResolved()) << descriptor << " " << h_new_class->GetStatus();
3685 
3686   // Instrumentation may have updated entrypoints for all methods of all
3687   // classes. However it could not update methods of this class while we
3688   // were loading it. Now the class is resolved, we can update entrypoints
3689   // as required by instrumentation.
3690   if (Runtime::Current()->GetInstrumentation()->EntryExitStubsInstalled()) {
3691     // We must be in the kRunnable state to prevent instrumentation from
3692     // suspending all threads to update entrypoints while we are doing it
3693     // for this class.
3694     DCHECK_EQ(self->GetState(), ThreadState::kRunnable);
3695     Runtime::Current()->GetInstrumentation()->InstallStubsForClass(h_new_class.Get());
3696   }
3697 
3698   /*
3699    * We send CLASS_PREPARE events to the debugger from here.  The
3700    * definition of "preparation" is creating the static fields for a
3701    * class and initializing them to the standard default values, but not
3702    * executing any code (that comes later, during "initialization").
3703    *
3704    * We did the static preparation in LinkClass.
3705    *
3706    * The class has been prepared and resolved but possibly not yet verified
3707    * at this point.
3708    */
3709   Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(klass, h_new_class);
3710 
3711   // Notify native debugger of the new class and its layout.
3712   jit::Jit::NewTypeLoadedIfUsingJit(h_new_class.Get());
3713 
3714   return sdc.Finish(h_new_class);
3715 }
3716 
SizeOfClassWithoutEmbeddedTables(const DexFile & dex_file,const dex::ClassDef & dex_class_def)3717 uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
3718                                                        const dex::ClassDef& dex_class_def) {
3719   size_t num_ref = 0;
3720   size_t num_8 = 0;
3721   size_t num_16 = 0;
3722   size_t num_32 = 0;
3723   size_t num_64 = 0;
3724   ClassAccessor accessor(dex_file, dex_class_def);
3725   // We allow duplicate definitions of the same field in a class_data_item
3726   // but ignore the repeated indexes here, b/21868015.
3727   uint32_t last_field_idx = dex::kDexNoIndex;
3728   for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
3729     uint32_t field_idx = field.GetIndex();
3730     // Ordering enforced by DexFileVerifier.
3731     DCHECK(last_field_idx == dex::kDexNoIndex || last_field_idx <= field_idx);
3732     if (UNLIKELY(field_idx == last_field_idx)) {
3733       continue;
3734     }
3735     last_field_idx = field_idx;
3736     const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
3737     const char* descriptor = dex_file.GetFieldTypeDescriptor(field_id);
3738     char c = descriptor[0];
3739     switch (c) {
3740       case 'L':
3741       case '[':
3742         num_ref++;
3743         break;
3744       case 'J':
3745       case 'D':
3746         num_64++;
3747         break;
3748       case 'I':
3749       case 'F':
3750         num_32++;
3751         break;
3752       case 'S':
3753       case 'C':
3754         num_16++;
3755         break;
3756       case 'B':
3757       case 'Z':
3758         num_8++;
3759         break;
3760       default:
3761         LOG(FATAL) << "Unknown descriptor: " << c;
3762         UNREACHABLE();
3763     }
3764   }
3765   return mirror::Class::ComputeClassSize(/*has_embedded_vtable=*/false,
3766                                          /*num_vtable_entries=*/0,
3767                                          num_8,
3768                                          num_16,
3769                                          num_32,
3770                                          num_64,
3771                                          num_ref,
3772                                          /*num_ref_bitmap_entries=*/0,
3773                                          image_pointer_size_);
3774 }
3775 
FixupStaticTrampolines(Thread * self,ObjPtr<mirror::Class> klass)3776 void ClassLinker::FixupStaticTrampolines(Thread* self, ObjPtr<mirror::Class> klass) {
3777   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3778   DCHECK(klass->IsVisiblyInitialized()) << klass->PrettyDescriptor();
3779   size_t num_direct_methods = klass->NumDirectMethods();
3780   if (num_direct_methods == 0) {
3781     return;  // No direct methods => no static methods.
3782   }
3783   if (UNLIKELY(klass->IsProxyClass())) {
3784     return;
3785   }
3786   PointerSize pointer_size = image_pointer_size_;
3787   if (std::any_of(klass->GetDirectMethods(pointer_size).begin(),
3788                   klass->GetDirectMethods(pointer_size).end(),
3789                   [](const ArtMethod& m) { return m.IsCriticalNative(); })) {
3790     // Store registered @CriticalNative methods, if any, to JNI entrypoints.
3791     // Direct methods are a contiguous chunk of memory, so use the ordering of the map.
3792     ArtMethod* first_method = klass->GetDirectMethod(0u, pointer_size);
3793     ArtMethod* last_method = klass->GetDirectMethod(num_direct_methods - 1u, pointer_size);
3794     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
3795     auto lb = critical_native_code_with_clinit_check_.lower_bound(first_method);
3796     while (lb != critical_native_code_with_clinit_check_.end() && lb->first <= last_method) {
3797       lb->first->SetEntryPointFromJni(lb->second);
3798       lb = critical_native_code_with_clinit_check_.erase(lb);
3799     }
3800   }
3801   Runtime* runtime = Runtime::Current();
3802   if (runtime->IsAotCompiler()) {
3803     // We should not update entrypoints when running the transactional
3804     // interpreter.
3805     return;
3806   }
3807 
3808   instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
3809   bool enable_boot_jni_stub = !runtime->IsJavaDebuggable();
3810   for (size_t method_index = 0; method_index < num_direct_methods; ++method_index) {
3811     ArtMethod* method = klass->GetDirectMethod(method_index, pointer_size);
3812     if (method->NeedsClinitCheckBeforeCall()) {
3813       const void* quick_code = instrumentation->GetCodeForInvoke(method);
3814       if (method->IsNative() && IsQuickGenericJniStub(quick_code) && enable_boot_jni_stub) {
3815         const void* boot_jni_stub = FindBootJniStub(method);
3816         if (boot_jni_stub != nullptr) {
3817           // Use boot JNI stub if found.
3818           quick_code = boot_jni_stub;
3819         }
3820       }
3821       instrumentation->UpdateMethodsCode(method, quick_code);
3822     }
3823   }
3824   // Ignore virtual methods on the iterator.
3825 }
3826 
3827 // Does anything needed to make sure that the compiler will not generate a direct invoke to this
3828 // method. Should only be called on non-invokable methods.
EnsureThrowsInvocationError(ClassLinker * class_linker,ArtMethod * method)3829 inline void EnsureThrowsInvocationError(ClassLinker* class_linker, ArtMethod* method)
3830     REQUIRES_SHARED(Locks::mutator_lock_) {
3831   DCHECK(method != nullptr);
3832   DCHECK(!method->IsInvokable());
3833   method->SetEntryPointFromQuickCompiledCodePtrSize(
3834       class_linker->GetQuickToInterpreterBridgeTrampoline(),
3835       class_linker->GetImagePointerSize());
3836 }
3837 
3838 class ClassLinker::OatClassCodeIterator {
3839  public:
OatClassCodeIterator(const OatFile::OatClass & oat_class)3840   explicit OatClassCodeIterator(const OatFile::OatClass& oat_class)
3841       : begin_(oat_class.methods_pointer_ != nullptr && oat_class.oat_file_->IsExecutable()
3842                    ? oat_class.oat_file_->Begin()
3843                    : nullptr),
3844         bitmap_(oat_class.bitmap_),
3845         current_(oat_class.methods_pointer_ != nullptr && oat_class.oat_file_->IsExecutable()
3846                      ? oat_class.methods_pointer_
3847                      : nullptr),
3848         method_index_(0u),
3849         num_methods_(oat_class.num_methods_) {
3850     DCHECK_EQ(bitmap_ != nullptr, oat_class.GetType() == OatClassType::kSomeCompiled);
3851   }
3852 
GetAndAdvance(uint32_t method_index)3853   const void* GetAndAdvance(uint32_t method_index) {
3854     if (kIsDebugBuild) {
3855       CHECK_EQ(method_index, method_index_);
3856       ++method_index_;
3857     }
3858     if (current_ == nullptr) {
3859       // We may not have a valid `num_methods_` to perform the next `DCHECK()`.
3860       return nullptr;
3861     }
3862     DCHECK_LT(method_index, num_methods_);
3863     DCHECK(begin_ != nullptr);
3864     if (bitmap_ == nullptr || BitVector::IsBitSet(bitmap_, method_index)) {
3865       DCHECK_NE(current_->code_offset_, 0u);
3866       const void* result = begin_ + current_->code_offset_;
3867       ++current_;
3868       return result;
3869     } else {
3870       return nullptr;
3871     }
3872   }
3873 
SkipAbstract(uint32_t method_index)3874   void SkipAbstract(uint32_t method_index) {
3875     if (kIsDebugBuild) {
3876       CHECK_EQ(method_index, method_index_);
3877       ++method_index_;
3878       if (current_ != nullptr) {
3879         CHECK_LT(method_index, num_methods_);
3880         CHECK(bitmap_ != nullptr);
3881         CHECK(!BitVector::IsBitSet(bitmap_, method_index));
3882       }
3883     }
3884   }
3885 
3886  private:
3887   const uint8_t* const begin_;
3888   const uint32_t* const bitmap_;
3889   const OatMethodOffsets* current_;
3890 
3891   // Debug mode members.
3892   uint32_t method_index_;
3893   const uint32_t num_methods_;
3894 };
3895 
SetupClass(const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass,ObjPtr<mirror::ClassLoader> class_loader)3896 void ClassLinker::SetupClass(const DexFile& dex_file,
3897                              const dex::ClassDef& dex_class_def,
3898                              Handle<mirror::Class> klass,
3899                              ObjPtr<mirror::ClassLoader> class_loader) {
3900   CHECK(klass != nullptr);
3901   CHECK(klass->GetDexCache() != nullptr);
3902   CHECK_EQ(ClassStatus::kNotReady, klass->GetStatus());
3903   const char* descriptor = dex_file.GetClassDescriptor(dex_class_def);
3904   CHECK(descriptor != nullptr);
3905 
3906   klass->SetClass(GetClassRoot<mirror::Class>(this));
3907   uint32_t access_flags = dex_class_def.GetJavaAccessFlags();
3908   CHECK_EQ(access_flags & ~kAccJavaFlagsMask, 0U);
3909   klass->SetAccessFlagsDuringLinking(access_flags);
3910   klass->SetClassLoader(class_loader);
3911   DCHECK_EQ(klass->GetPrimitiveType(), Primitive::kPrimNot);
3912   mirror::Class::SetStatus(klass, ClassStatus::kIdx, nullptr);
3913 
3914   klass->SetDexClassDefIndex(dex_file.GetIndexForClassDef(dex_class_def));
3915   klass->SetDexTypeIndex(dex_class_def.class_idx_);
3916 }
3917 
AllocArtFieldArray(Thread * self,LinearAlloc * allocator,size_t length)3918 LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self,
3919                                                                LinearAlloc* allocator,
3920                                                                size_t length) {
3921   if (length == 0) {
3922     return nullptr;
3923   }
3924   // If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>.
3925   static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4.");
3926   size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length);
3927   void* array_storage = allocator->Alloc(self, storage_size, LinearAllocKind::kArtFieldArray);
3928   auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length);
3929   CHECK(ret != nullptr);
3930   std::uninitialized_fill_n(&ret->At(0), length, ArtField());
3931   return ret;
3932 }
3933 
AllocArtMethodArray(Thread * self,LinearAlloc * allocator,size_t length)3934 LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self,
3935                                                                  LinearAlloc* allocator,
3936                                                                  size_t length) {
3937   if (length == 0) {
3938     return nullptr;
3939   }
3940   const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_);
3941   const size_t method_size = ArtMethod::Size(image_pointer_size_);
3942   const size_t storage_size =
3943       LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment);
3944   void* array_storage = allocator->Alloc(self, storage_size, LinearAllocKind::kArtMethodArray);
3945   auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length);
3946   CHECK(ret != nullptr);
3947   for (size_t i = 0; i < length; ++i) {
3948     new(reinterpret_cast<void*>(&ret->At(i, method_size, method_alignment))) ArtMethod;
3949   }
3950   return ret;
3951 }
3952 
GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3953 LinearAlloc* ClassLinker::GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3954   if (class_loader == nullptr) {
3955     return Runtime::Current()->GetLinearAlloc();
3956   }
3957   LinearAlloc* allocator = class_loader->GetAllocator();
3958   DCHECK(allocator != nullptr);
3959   return allocator;
3960 }
3961 
GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3962 LinearAlloc* ClassLinker::GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3963   if (class_loader == nullptr) {
3964     return Runtime::Current()->GetLinearAlloc();
3965   }
3966   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
3967   LinearAlloc* allocator = class_loader->GetAllocator();
3968   if (allocator == nullptr) {
3969     RegisterClassLoader(class_loader);
3970     allocator = class_loader->GetAllocator();
3971     CHECK(allocator != nullptr);
3972   }
3973   return allocator;
3974 }
3975 
3976 // Helper class for iterating over method annotations, using their ordering in the dex file.
3977 // Since direct and virtual methods are separated (but each section is ordered), we shall use
3978 // separate iterators for loading direct and virtual methods.
3979 class ClassLinker::MethodAnnotationsIterator {
3980  public:
MethodAnnotationsIterator(const DexFile & dex_file,const dex::AnnotationsDirectoryItem * annotations_dir)3981   MethodAnnotationsIterator(const DexFile& dex_file,
3982                             const dex::AnnotationsDirectoryItem* annotations_dir)
3983       : current_((annotations_dir != nullptr) ? dex_file.GetMethodAnnotations(annotations_dir)
3984                                               : nullptr),
3985         end_((annotations_dir != nullptr) ? current_ + annotations_dir->methods_size_ : nullptr) {}
3986 
AdvanceTo(uint32_t method_idx)3987   const dex::MethodAnnotationsItem* AdvanceTo(uint32_t method_idx) {
3988     while (current_ != end_ && current_->method_idx_ < method_idx) {
3989       ++current_;
3990     }
3991     return (current_ != end_ && current_->method_idx_ == method_idx) ? current_ : nullptr;
3992   }
3993 
3994  private:
3995   const dex::MethodAnnotationsItem* current_;
3996   const dex::MethodAnnotationsItem* const end_;
3997 };
3998 
3999 class ClassLinker::LoadClassHelper {
4000  public:
LoadClassHelper(Runtime * runtime,const DexFile & dex_file,bool is_interface)4001   LoadClassHelper(
4002       Runtime* runtime, const DexFile& dex_file, bool is_interface)
4003       : runtime_(runtime),
4004         dex_file_(dex_file),
4005         hotness_count_(runtime->GetJITOptions()->GetWarmupThreshold()),
4006         is_aot_compiler_(runtime->IsAotCompiler()),
4007         is_interface_(is_interface),
4008         stack_(runtime->GetArenaPool()),
4009         allocator_(&stack_),
4010         num_direct_methods_(0u),
4011         has_finalizer_(false) {}
4012 
4013   // Note: This function can take a long time and therefore it should not be called while holding
4014   // the mutator lock. Otherwise we can experience an occasional suspend request timeout.
4015   void Load(const ClassAccessor& accessor,
4016             const dex::ClassDef& dex_class_def,
4017             const OatFile::OatClass& oat_class);
4018 
4019   void Commit(Handle<mirror::Class> klass,
4020               PointerSize pointer_size,
4021               LengthPrefixedArray<ArtField>* fields,
4022               LengthPrefixedArray<ArtMethod>* methods)
4023       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Roles::uninterruptible_);
4024 
NumFields() const4025   uint32_t NumFields() const {
4026     return dchecked_integral_cast<uint32_t>(fields_.size());
4027   }
4028 
NumMethods() const4029   uint32_t NumMethods() const {
4030     return dchecked_integral_cast<uint32_t>(methods_.size());
4031   }
4032 
4033  private:
4034   struct ArtFieldData {
4035     uint32_t access_flags;
4036     uint32_t dex_field_index;
4037   };
4038 
4039   struct ArtMethodData {
4040     uint32_t access_flags;
4041     uint32_t dex_method_index;
4042     uint16_t method_index;
4043     uint16_t imt_index_or_hotness_count;
4044     const void* data;
4045     const void* entrypoint;
4046   };
4047 
4048   ALWAYS_INLINE
4049   static void LoadField(const ClassAccessor::Field& field, /*out*/ ArtFieldData* dst);
4050 
4051   ALWAYS_INLINE
4052   void LoadMethod(const ClassAccessor::Method& method,
4053                   /*inout*/ MethodAnnotationsIterator* mai,
4054                   /*out*/ ArtMethodData* dst);
4055 
4056   ALWAYS_INLINE
4057   void LinkCode(ArtMethodData* method,
4058                 uint32_t class_def_method_index,
4059                 /*inout*/ OatClassCodeIterator* occi);
4060 
4061   ALWAYS_INLINE
4062   void FillFields(ObjPtr<mirror::Class> klass, /*out*/ LengthPrefixedArray<ArtField>* fields)
4063       REQUIRES_SHARED(Locks::mutator_lock_);
4064 
4065   template <PointerSize kPointerSize>
4066   ALWAYS_INLINE
4067   void FillMethods(ObjPtr<mirror::Class> klass, /*out*/ LengthPrefixedArray<ArtMethod>* methods)
4068       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Roles::uninterruptible_);
4069 
4070   Runtime* const runtime_;
4071   const DexFile& dex_file_;
4072   const uint16_t hotness_count_;
4073   const bool is_aot_compiler_;
4074   const bool is_interface_;
4075 
4076   ArenaStack stack_;
4077   ScopedArenaAllocator allocator_;
4078 
4079   ArrayRef<ArtFieldData> fields_;
4080   ArrayRef<ArtMethodData> methods_;
4081   uint32_t num_direct_methods_;
4082   bool has_finalizer_;
4083 };
4084 
LoadField(const ClassAccessor::Field & field,ArtFieldData * dst)4085 inline void ClassLinker::LoadClassHelper::LoadField(const ClassAccessor::Field& field,
4086                                                     /*out*/ ArtFieldData* dst) {
4087   dst->dex_field_index = field.GetIndex();
4088   // Get access flags from the DexFile and set hiddenapi runtime access flags.
4089   dst->access_flags = field.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(field);
4090 }
4091 
LoadMethod(const ClassAccessor::Method & method,MethodAnnotationsIterator * mai,ArtMethodData * dst)4092 inline void ClassLinker::LoadClassHelper::LoadMethod(const ClassAccessor::Method& method,
4093                                                      /*inout*/ MethodAnnotationsIterator* mai,
4094                                                      /*out*/ ArtMethodData* dst) {
4095   const uint32_t dex_method_index = method.GetIndex();
4096   const dex::MethodId& method_id = dex_file_.GetMethodId(dex_method_index);
4097   uint32_t name_utf16_length;
4098   const char* method_name = dex_file_.GetStringDataAndUtf16Length(method_id.name_idx_,
4099                                                                   &name_utf16_length);
4100   std::string_view shorty = dex_file_.GetShortyView(dex_file_.GetProtoId(method_id.proto_idx_));
4101 
4102   // Get access flags from the DexFile and set hiddenapi runtime access flags.
4103   uint32_t access_flags = method.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(method);
4104 
4105   auto has_ascii_name = [method_name, name_utf16_length](const char* ascii_name,
4106                                                          size_t length) ALWAYS_INLINE {
4107     DCHECK_EQ(strlen(ascii_name), length);
4108     return length == name_utf16_length &&
4109            method_name[length] == 0 &&  // Is `method_name` an ASCII string?
4110            memcmp(ascii_name, method_name, length) == 0;
4111   };
4112   if (UNLIKELY(has_ascii_name("finalize", sizeof("finalize") - 1u))) {
4113     // Set finalizable flag on declaring class if the method has the right signature.
4114     // When initializing without a boot image, `Object` and `Enum` shall have the finalizable
4115     // flag cleared immediately after loading these classes, see  `InitWithoutImage()`.
4116     if (shorty == "V") {
4117       has_finalizer_ = true;
4118     }
4119   } else if (method_name[0] == '<') {
4120     // Fix broken access flags for initializers. Bug 11157540.
4121     // `DexFileVerifier` rejects method names starting with '<' other than constructors.
4122     DCHECK(has_ascii_name("<init>", sizeof("<init>") - 1u) ||
4123            has_ascii_name("<clinit>", sizeof("<clinit>") - 1u)) << method_name;
4124     if (UNLIKELY((access_flags & kAccConstructor) == 0)) {
4125       LOG(WARNING) << method_name << " didn't have expected constructor access flag in class "
4126           << PrettyDescriptor(dex_file_.GetMethodDeclaringClassDescriptor(dex_method_index))
4127           << " in dex file " << dex_file_.GetLocation();
4128       access_flags |= kAccConstructor;
4129     }
4130   }
4131 
4132   access_flags |= GetNterpFastPathFlags(shorty, access_flags, kRuntimeQuickCodeISA);
4133 
4134   uint16_t imt_index_or_hotness_count = hotness_count_;
4135   const void* data = nullptr;
4136   if (UNLIKELY((access_flags & kAccNative) != 0u)) {
4137     // Check if the native method is annotated with @FastNative or @CriticalNative.
4138     const dex::MethodAnnotationsItem* method_annotations = mai->AdvanceTo(dex_method_index);
4139     if (method_annotations != nullptr) {
4140       access_flags |=
4141           annotations::GetNativeMethodAnnotationAccessFlags(dex_file_, *method_annotations);
4142     }
4143     DCHECK(!ArtMethod::IsAbstract(access_flags));
4144     DCHECK(!ArtMethod::NeedsCodeItem(access_flags));
4145     DCHECK_EQ(method.GetCodeItemOffset(), 0u);
4146     DCHECK(data == nullptr);  // JNI stub/trampoline not linked yet.
4147   } else if ((access_flags & kAccAbstract) != 0u) {
4148     DCHECK(ArtMethod::IsAbstract(access_flags));
4149     imt_index_or_hotness_count = is_interface_
4150         ? ImTable::GetImtIndexForAbstractMethod(dex_file_, dex_method_index)
4151         : /* unused */ 0u;
4152     DCHECK(!ArtMethod::NeedsCodeItem(access_flags));
4153     DCHECK_EQ(method.GetCodeItemOffset(), 0u);
4154     DCHECK(data == nullptr);  // Single implementation not set yet.
4155   } else {
4156     const dex::MethodAnnotationsItem* method_annotations = mai->AdvanceTo(dex_method_index);
4157     if (method_annotations != nullptr &&
4158         annotations::MethodIsNeverCompile(dex_file_, *method_annotations)) {
4159       access_flags |= kAccCompileDontBother;
4160     }
4161     DCHECK(!ArtMethod::IsAbstract(access_flags));
4162     DCHECK(ArtMethod::NeedsCodeItem(access_flags));
4163     uint32_t code_item_offset = method.GetCodeItemOffset();
4164     DCHECK_NE(code_item_offset, 0u);
4165     if (is_aot_compiler_) {
4166       data = reinterpret_cast32<void*>(code_item_offset);
4167     } else {
4168       data = dex_file_.GetCodeItem(code_item_offset);
4169     }
4170   }
4171 
4172   if ((access_flags & kAccAbstract) == 0u &&
4173       Runtime::Current()->IsZygote() &&
4174       !Runtime::Current()->GetJITOptions()->GetProfileSaverOptions().GetProfileBootClassPath()) {
4175     DCHECK(!ArtMethod::IsAbstract(access_flags));
4176     DCHECK(!ArtMethod::IsIntrinsic(access_flags));
4177     access_flags = ArtMethod::SetMemorySharedMethod(access_flags);
4178     imt_index_or_hotness_count = 0u;  // Mark the method as hot.
4179   }
4180 
4181   dst->access_flags = access_flags;
4182   dst->dex_method_index = dex_method_index;
4183   dst->method_index = 0u;
4184   dst->imt_index_or_hotness_count  = imt_index_or_hotness_count;
4185   dst->data = data;
4186   dst->entrypoint = nullptr;
4187 }
4188 
LinkCode(ArtMethodData * method,uint32_t class_def_method_index,OatClassCodeIterator * occi)4189 inline void ClassLinker::LoadClassHelper::LinkCode(ArtMethodData* method,
4190                                                    uint32_t class_def_method_index,
4191                                                    /*inout*/ OatClassCodeIterator* occi) {
4192   if (is_aot_compiler_) {
4193     // The following code only applies to a non-compiler runtime.
4194     return;
4195   }
4196 
4197   // Method shouldn't have already been linked.
4198   DCHECK_EQ(method->entrypoint, nullptr);
4199 
4200   uint32_t access_flags = method->access_flags;
4201   if (!ArtMethod::IsInvokable(access_flags)) {
4202     method->entrypoint = GetQuickToInterpreterBridge();
4203     occi->SkipAbstract(class_def_method_index);
4204     return;
4205   }
4206 
4207   const void* quick_code = occi->GetAndAdvance(class_def_method_index);
4208   if (ArtMethod::IsNative(access_flags) && quick_code == nullptr) {
4209     std::string_view shorty = dex_file_.GetMethodShortyView(method->dex_method_index);
4210     const void* boot_jni_stub = runtime_->GetClassLinker()->FindBootJniStub(access_flags, shorty);
4211     if (boot_jni_stub != nullptr) {
4212       // Use boot JNI stub if found.
4213       quick_code = boot_jni_stub;
4214     }
4215   }
4216   method->entrypoint =
4217       runtime_->GetInstrumentation()->GetInitialEntrypoint(access_flags, quick_code);
4218 
4219   if (ArtMethod::IsNative(access_flags)) {
4220     // Set up the dlsym lookup stub. Do not go through `UnregisterNative()`
4221     // as the extra processing for @CriticalNative is not needed yet.
4222     method->data = ArtMethod::IsCriticalNative(access_flags)
4223         ? GetJniDlsymLookupCriticalStub()
4224         : GetJniDlsymLookupStub();
4225   }
4226 }
4227 
Load(const ClassAccessor & accessor,const dex::ClassDef & dex_class_def,const OatFile::OatClass & oat_class)4228 void ClassLinker::LoadClassHelper::Load(const ClassAccessor& accessor,
4229                                         const dex::ClassDef& dex_class_def,
4230                                         const OatFile::OatClass& oat_class) {
4231   DCHECK(fields_.empty());
4232   DCHECK(methods_.empty());
4233   DCHECK_EQ(num_direct_methods_, 0u);
4234   DCHECK(!has_finalizer_);
4235 
4236   size_t num_fields = accessor.NumFields();
4237   size_t num_methods = accessor.NumMethods();
4238   ArrayRef<ArtFieldData> fields(allocator_.AllocArray<ArtFieldData>(num_fields), num_fields);
4239   ArrayRef<ArtMethodData> methods(allocator_.AllocArray<ArtMethodData>(num_methods), num_methods);
4240 
4241   size_t num_loaded_fields = 0u;
4242   size_t num_sfields = 0u;
4243   size_t num_ifields = 0u;
4244   uint32_t last_static_field_idx = 0u;
4245   uint32_t last_instance_field_idx = 0u;
4246 
4247   OatClassCodeIterator occi(oat_class);
4248   size_t class_def_method_index = 0;
4249   uint32_t last_dex_method_index = dex::kDexNoIndex;
4250   size_t last_class_def_method_index = 0;
4251 
4252   // Initialize separate `MethodAnnotationsIterator`s for direct and virtual methods.
4253   MethodAnnotationsIterator mai_direct(dex_file_, dex_file_.GetAnnotationsDirectory(dex_class_def));
4254   MethodAnnotationsIterator mai_virtual = mai_direct;
4255 
4256   // Use the visitor since the ranged based loops are bit slower from seeking. Seeking to the
4257   // methods needs to decode all of the fields.
4258   accessor.VisitFieldsAndMethods([&](
4259       // We allow duplicate definitions of the same field in a class_data_item
4260       // but ignore the repeated indexes here, b/21868015.
4261       const ClassAccessor::Field& field) {
4262         uint32_t field_idx = field.GetIndex();
4263         DCHECK_GE(field_idx, last_static_field_idx);  // Ordering enforced by DexFileVerifier.
4264         if (num_sfields == 0 || LIKELY(field_idx > last_static_field_idx)) {
4265           LoadField(field, &fields[num_loaded_fields]);
4266           ++num_loaded_fields;
4267           ++num_sfields;
4268           last_static_field_idx = field_idx;
4269         }
4270       }, [&](const ClassAccessor::Field& field) {
4271         uint32_t field_idx = field.GetIndex();
4272         DCHECK_GE(field_idx, last_instance_field_idx);  // Ordering enforced by DexFileVerifier.
4273         if (num_ifields == 0 || LIKELY(field_idx > last_instance_field_idx)) {
4274           LoadField(field, &fields[num_loaded_fields]);
4275           ++num_loaded_fields;
4276           ++num_ifields;
4277           last_instance_field_idx = field_idx;
4278         }
4279       }, [&](const ClassAccessor::Method& method) {
4280         ArtMethodData* method_data = &methods[class_def_method_index];
4281         LoadMethod(method, &mai_direct, method_data);
4282         LinkCode(method_data, class_def_method_index, &occi);
4283         uint32_t it_method_index = method.GetIndex();
4284         if (last_dex_method_index == it_method_index) {
4285           // duplicate case
4286           method_data->method_index = last_class_def_method_index;
4287         } else {
4288           method_data->method_index = class_def_method_index;
4289           last_dex_method_index = it_method_index;
4290           last_class_def_method_index = class_def_method_index;
4291         }
4292         ++class_def_method_index;
4293       }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
4294         ArtMethodData* method_data = &methods[class_def_method_index];
4295         LoadMethod(method, &mai_virtual, method_data);
4296         LinkCode(method_data, class_def_method_index, &occi);
4297         DCHECK_EQ(method_data->method_index, 0u);  // Shall be updated in `LinkMethods()`.
4298         ++class_def_method_index;
4299       });
4300 
4301   if (UNLIKELY(num_loaded_fields != num_fields)) {
4302     LOG(WARNING) << "Duplicate fields in class "
4303         << PrettyDescriptor(dex_file_.GetFieldDeclaringClassDescriptor(fields[0].dex_field_index))
4304         << " (unique static fields: " << num_sfields << "/" << accessor.NumStaticFields()
4305         << ", unique instance fields: " << num_ifields << "/" << accessor.NumInstanceFields()
4306         << ")";
4307     DCHECK_LT(num_loaded_fields, num_fields);
4308     fields = fields.SubArray(/*pos=*/ 0u, num_loaded_fields);
4309   }
4310 
4311   // Sort the fields by dex field index to facilitate fast lookups.
4312   std::sort(fields.begin(),
4313             fields.end(),
4314             [](ArtFieldData& lhs, ArtFieldData& rhs) {
4315               return lhs.dex_field_index < rhs.dex_field_index;
4316             });
4317 
4318   fields_ = fields;
4319   methods_ = methods;
4320   num_direct_methods_ = accessor.NumDirectMethods();
4321 }
4322 
FillFields(ObjPtr<mirror::Class> klass,LengthPrefixedArray<ArtField> * fields)4323 void ClassLinker::LoadClassHelper::FillFields(ObjPtr<mirror::Class> klass,
4324                                               LengthPrefixedArray<ArtField>* fields) {
4325   DCHECK_IMPLIES(!fields_.empty(), fields != nullptr);
4326   DCHECK_EQ(fields_.size(), (fields != nullptr) ? fields->size() : 0u);
4327   for (size_t i = 0, size = fields_.size(); i != size; ++i) {
4328     const ArtFieldData& src = fields_[i];
4329     ArtField* dst = &fields->At(i);
4330     dst->SetDeclaringClass(klass);
4331     dst->SetAccessFlags(src.access_flags);
4332     dst->SetDexFieldIndex(src.dex_field_index);
4333     // The `ArtField::offset_` shall be set in `LinkFields()`.
4334   }
4335 }
4336 
4337 template <PointerSize kPointerSize>
FillMethods(ObjPtr<mirror::Class> klass,LengthPrefixedArray<ArtMethod> * methods)4338 void ClassLinker::LoadClassHelper::FillMethods(ObjPtr<mirror::Class> klass,
4339                                                LengthPrefixedArray<ArtMethod>* methods) {
4340   DCHECK_IMPLIES(!methods_.empty(), methods != nullptr);
4341   DCHECK_EQ(methods_.size(), (methods != nullptr) ? methods->size() : 0u);
4342   static constexpr size_t kMethodAlignment = ArtMethod::Alignment(kPointerSize);
4343   static constexpr size_t kMethodSize = ArtMethod::Size(kPointerSize);
4344   instrumentation::Instrumentation* instr = nullptr;
4345   bool use_stubs = false;
4346   if (!is_aot_compiler_) {
4347     instr = runtime_->GetInstrumentation();
4348     use_stubs = instr->InitialEntrypointNeedsInstrumentationStubs();
4349   }
4350   for (size_t i = 0, size = methods_.size(); i != size; ++i) {
4351     const ArtMethodData& src = methods_[i];
4352     ArtMethod* dst = &methods->At(i, kMethodSize, kMethodAlignment);
4353     dst->SetDeclaringClass(klass);
4354     uint32_t access_flags = src.access_flags;
4355     dst->SetAccessFlags(access_flags);
4356     dst->SetDexMethodIndex(src.dex_method_index);
4357     dst->SetMethodIndex(src.method_index);
4358     // Note: We set the appropriate field of the union (`imt_index_` or `hotness_count_`)
4359     // as required by the C++ standard but we expect the C++ compiler to optimize away
4360     // the condition and just copy the `imt_index_or_hotness_count` directly.
4361     if (ArtMethod::IsInvokable(access_flags)) {
4362       dst->SetHotnessCount(src.imt_index_or_hotness_count);
4363     } else {
4364       // For abstract non-interface methods, the value shall not be used.
4365       DCHECK_IMPLIES(!is_interface_, src.imt_index_or_hotness_count == 0u);
4366       dst->SetImtIndex(src.imt_index_or_hotness_count);
4367     }
4368     DCHECK_IMPLIES(dst->IsMemorySharedMethod(), !dst->IsAbstract());
4369     DCHECK_IMPLIES(dst->IsMemorySharedMethod(), dst->CounterIsHot());
4370     DCHECK_IMPLIES(!dst->IsAbstract() && !dst->IsMemorySharedMethod(),
4371                    dst->GetCounter() == hotness_count_);
4372     dst->SetDataPtrSize(src.data, kPointerSize);
4373     if (instr != nullptr) {
4374       DCHECK_IMPLIES(dst->IsNative(), dst->GetEntryPointFromJniPtrSize(kPointerSize) == src.data);
4375       const void* entrypoint = src.entrypoint;
4376       if (UNLIKELY(use_stubs)) {
4377         bool is_native = ArtMethod::IsNative(access_flags);
4378         entrypoint = is_native ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge();
4379       }
4380       instr->InitializeMethodsCode(dst, entrypoint, kPointerSize);
4381     }
4382   }
4383 }
4384 
Commit(Handle<mirror::Class> klass,PointerSize pointer_size,LengthPrefixedArray<ArtField> * fields,LengthPrefixedArray<ArtMethod> * methods)4385 void ClassLinker::LoadClassHelper::Commit(Handle<mirror::Class> klass,
4386                                           PointerSize pointer_size,
4387                                           LengthPrefixedArray<ArtField>* fields,
4388                                           LengthPrefixedArray<ArtMethod>* methods) {
4389   FillFields(klass.Get(), fields);
4390   if (pointer_size == PointerSize::k64) {
4391     FillMethods<PointerSize::k64>(klass.Get(), methods);
4392   } else {
4393     FillMethods<PointerSize::k32>(klass.Get(), methods);
4394   }
4395   klass->SetFieldsPtr(fields);
4396   klass->SetMethodsPtr(methods, num_direct_methods_, methods_.size() - num_direct_methods_);
4397   if (has_finalizer_) {
4398     klass->SetFinalizable();
4399   }
4400 }
4401 
LoadClass(Thread * self,const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass)4402 void ClassLinker::LoadClass(Thread* self,
4403                             const DexFile& dex_file,
4404                             const dex::ClassDef& dex_class_def,
4405                             Handle<mirror::Class> klass) {
4406   CHECK(!dex_file.IsCompactDexFile());
4407   ClassAccessor accessor(dex_file,
4408                          dex_class_def,
4409                          /* parse_hiddenapi_class_data= */ klass->IsBootStrapClassLoaded());
4410   if (!accessor.HasClassData()) {
4411     return;
4412   }
4413   Runtime* const runtime = Runtime::Current();
4414   {
4415     bool has_oat_class = false;
4416     const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
4417         ? OatFile::FindOatClass(dex_file, klass->GetDexClassDefIndex(), &has_oat_class)
4418         : OatFile::OatClass::Invalid();
4419     LoadClassHelper helper(runtime, dex_file, klass->IsInterface());
4420     {
4421       ScopedThreadSuspension sts(self, ThreadState::kNative);
4422       helper.Load(accessor, dex_class_def, oat_class);
4423     }
4424 
4425     // Note: We cannot have thread suspension until the field and method arrays are setup or else
4426     // Class::VisitFieldRoots may miss some fields or methods.
4427     ScopedAssertNoThreadSuspension nts(__FUNCTION__);
4428 
4429     LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
4430     LengthPrefixedArray<ArtField>* fields =
4431         AllocArtFieldArray(self, allocator, helper.NumFields());
4432     LengthPrefixedArray<ArtMethod>* methods =
4433         AllocArtMethodArray(self, allocator, helper.NumMethods());
4434     helper.Commit(klass, image_pointer_size_, fields, methods);
4435   }
4436   // Ensure that the card is marked so that remembered sets pick up native roots.
4437   WriteBarrier::ForEveryFieldWrite(klass.Get());
4438   self->AllowThreadSuspension();
4439 }
4440 
AppendToBootClassPath(Thread * self,const DexFile * dex_file)4441 void ClassLinker::AppendToBootClassPath(Thread* self, const DexFile* dex_file) {
4442   ObjPtr<mirror::DexCache> dex_cache =
4443       AllocAndInitializeDexCache(self, *dex_file, /* class_loader= */ nullptr);
4444   CHECK(dex_cache != nullptr) << "Failed to allocate dex cache for " << dex_file->GetLocation();
4445   AppendToBootClassPath(dex_file, dex_cache);
4446   WriteBarrierOnClassLoader(self, /*class_loader=*/nullptr, dex_cache);
4447 }
4448 
AppendToBootClassPath(const DexFile * dex_file,ObjPtr<mirror::DexCache> dex_cache)4449 void ClassLinker::AppendToBootClassPath(const DexFile* dex_file,
4450                                         ObjPtr<mirror::DexCache> dex_cache) {
4451   CHECK(dex_file != nullptr);
4452   CHECK(dex_cache != nullptr) << dex_file->GetLocation();
4453   CHECK_EQ(dex_cache->GetDexFile(), dex_file) << dex_file->GetLocation();
4454   boot_class_path_.push_back(dex_file);
4455   WriterMutexLock mu(Thread::Current(), *Locks::dex_lock_);
4456   RegisterDexFileLocked(*dex_file, dex_cache, /* class_loader= */ nullptr);
4457 }
4458 
RegisterDexFileLocked(const DexFile & dex_file,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)4459 void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
4460                                         ObjPtr<mirror::DexCache> dex_cache,
4461                                         ObjPtr<mirror::ClassLoader> class_loader) {
4462   Thread* const self = Thread::Current();
4463   Locks::dex_lock_->AssertExclusiveHeld(self);
4464   CHECK(dex_cache != nullptr) << dex_file.GetLocation();
4465   CHECK_EQ(dex_cache->GetDexFile(), &dex_file) << dex_file.GetLocation();
4466   // For app images, the dex cache location may be a suffix of the dex file location since the
4467   // dex file location is an absolute path.
4468   const std::string dex_cache_location = dex_cache->GetLocation()->ToModifiedUtf8();
4469   const size_t dex_cache_length = dex_cache_location.length();
4470   CHECK_GT(dex_cache_length, 0u) << dex_file.GetLocation();
4471   std::string dex_file_location = dex_file.GetLocation();
4472   // The following paths checks don't work on preopt when using boot dex files, where the dex
4473   // cache location is the one on device, and the dex_file's location is the one on host.
4474   Runtime* runtime = Runtime::Current();
4475   if (!(runtime->IsAotCompiler() && class_loader == nullptr && !kIsTargetBuild)) {
4476     CHECK_GE(dex_file_location.length(), dex_cache_length)
4477         << dex_cache_location << " " << dex_file.GetLocation();
4478     const std::string dex_file_suffix = dex_file_location.substr(
4479         dex_file_location.length() - dex_cache_length,
4480         dex_cache_length);
4481     // Example dex_cache location is SettingsProvider.apk and
4482     // dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk
4483     CHECK_EQ(dex_cache_location, dex_file_suffix);
4484   }
4485 
4486   // Check if we need to initialize OatFile data (.data.img.rel.ro and .bss
4487   // sections) needed for code execution and register the oat code range.
4488   const OatFile* oat_file =
4489       (dex_file.GetOatDexFile() != nullptr) ? dex_file.GetOatDexFile()->GetOatFile() : nullptr;
4490   bool initialize_oat_file_data = (oat_file != nullptr) && oat_file->IsExecutable();
4491   if (initialize_oat_file_data) {
4492     for (const auto& entry : dex_caches_) {
4493       if (!self->IsJWeakCleared(entry.second.weak_root) &&
4494           entry.first->GetOatDexFile() != nullptr &&
4495           entry.first->GetOatDexFile()->GetOatFile() == oat_file) {
4496         initialize_oat_file_data = false;  // Already initialized.
4497         break;
4498       }
4499     }
4500   }
4501   if (initialize_oat_file_data) {
4502     oat_file->InitializeRelocations();
4503     // Notify the fault handler about the new executable code range if needed.
4504     size_t exec_offset = oat_file->GetOatHeader().GetExecutableOffset();
4505     DCHECK_LE(exec_offset, oat_file->Size());
4506     size_t exec_size = oat_file->Size() - exec_offset;
4507     if (exec_size != 0u) {
4508       runtime->AddGeneratedCodeRange(oat_file->Begin() + exec_offset, exec_size);
4509     }
4510   }
4511 
4512   // Let hiddenapi assign a domain to the newly registered dex file.
4513   hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
4514 
4515   jweak dex_cache_jweak = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, dex_cache);
4516   DexCacheData data;
4517   data.weak_root = dex_cache_jweak;
4518   data.class_table = ClassTableForClassLoader(class_loader);
4519   AddNativeDebugInfoForDex(self, &dex_file);
4520   DCHECK(data.class_table != nullptr);
4521   // Make sure to hold the dex cache live in the class table. This case happens for the boot class
4522   // path dex caches without an image.
4523   data.class_table->InsertStrongRoot(dex_cache);
4524   // Make sure that the dex cache holds the classloader live.
4525   dex_cache->SetClassLoader(class_loader);
4526   if (class_loader != nullptr) {
4527     // Since we added a strong root to the class table, do the write barrier as required for
4528     // remembered sets and generational GCs.
4529     WriteBarrier::ForEveryFieldWrite(class_loader);
4530   }
4531   bool inserted = dex_caches_.emplace(&dex_file, std::move(data)).second;
4532   CHECK(inserted);
4533 }
4534 
DecodeDexCacheLocked(Thread * self,const DexCacheData * data)4535 ObjPtr<mirror::DexCache> ClassLinker::DecodeDexCacheLocked(Thread* self, const DexCacheData* data) {
4536   return data != nullptr
4537       ? ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data->weak_root))
4538       : nullptr;
4539 }
4540 
IsSameClassLoader(ObjPtr<mirror::DexCache> dex_cache,const DexCacheData * data,ObjPtr<mirror::ClassLoader> class_loader)4541 bool ClassLinker::IsSameClassLoader(
4542     ObjPtr<mirror::DexCache> dex_cache,
4543     const DexCacheData* data,
4544     ObjPtr<mirror::ClassLoader> class_loader) {
4545   CHECK(data != nullptr);
4546   DCHECK_EQ(FindDexCacheDataLocked(*dex_cache->GetDexFile()), data);
4547   return data->class_table == ClassTableForClassLoader(class_loader);
4548 }
4549 
RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)4550 void ClassLinker::RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,
4551                                            ObjPtr<mirror::ClassLoader> class_loader) {
4552   SCOPED_TRACE << __FUNCTION__ << " " << dex_cache->GetDexFile()->GetLocation();
4553   Thread* self = Thread::Current();
4554   StackHandleScope<2> hs(self);
4555   Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
4556   Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
4557   const DexFile* dex_file = dex_cache->GetDexFile();
4558   DCHECK(dex_file != nullptr) << "Attempt to register uninitialized dex_cache object!";
4559   if (kIsDebugBuild) {
4560     ReaderMutexLock mu(self, *Locks::dex_lock_);
4561     const DexCacheData* old_data = FindDexCacheDataLocked(*dex_file);
4562     ObjPtr<mirror::DexCache> old_dex_cache = DecodeDexCacheLocked(self, old_data);
4563     DCHECK(old_dex_cache.IsNull()) << "Attempt to manually register a dex cache thats already "
4564                                    << "been registered on dex file " << dex_file->GetLocation();
4565   }
4566   ClassTable* table;
4567   {
4568     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4569     table = InsertClassTableForClassLoader(h_class_loader.Get());
4570   }
4571   // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4572   // a thread holding the dex lock and blocking on a condition variable regarding
4573   // weak references access, and a thread blocking on the dex lock.
4574   gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
4575   WriterMutexLock mu(self, *Locks::dex_lock_);
4576   RegisterDexFileLocked(*dex_file, h_dex_cache.Get(), h_class_loader.Get());
4577   table->InsertStrongRoot(h_dex_cache.Get());
4578   if (h_class_loader.Get() != nullptr) {
4579     // Since we added a strong root to the class table, do the write barrier as required for
4580     // remembered sets and generational GCs.
4581     WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
4582   }
4583 }
4584 
ThrowDexFileAlreadyRegisteredError(Thread * self,const DexFile & dex_file)4585 static void ThrowDexFileAlreadyRegisteredError(Thread* self, const DexFile& dex_file)
4586     REQUIRES_SHARED(Locks::mutator_lock_) {
4587   self->ThrowNewExceptionF("Ljava/lang/InternalError;",
4588                            "Attempt to register dex file %s with multiple class loaders",
4589                            dex_file.GetLocation().c_str());
4590 }
4591 
WriteBarrierOnClassLoaderLocked(ObjPtr<mirror::ClassLoader> class_loader,ObjPtr<mirror::Object> root)4592 void ClassLinker::WriteBarrierOnClassLoaderLocked(ObjPtr<mirror::ClassLoader> class_loader,
4593                                                   ObjPtr<mirror::Object> root) {
4594   if (class_loader != nullptr) {
4595     // Since we added a strong root to the class table, do the write barrier as required for
4596     // remembered sets and generational GCs.
4597     WriteBarrier::ForEveryFieldWrite(class_loader);
4598   } else if (log_new_roots_) {
4599     new_roots_.push_back(GcRoot<mirror::Object>(root));
4600   }
4601 }
4602 
WriteBarrierOnClassLoader(Thread * self,ObjPtr<mirror::ClassLoader> class_loader,ObjPtr<mirror::Object> root)4603 void ClassLinker::WriteBarrierOnClassLoader(Thread* self,
4604                                             ObjPtr<mirror::ClassLoader> class_loader,
4605                                             ObjPtr<mirror::Object> root) {
4606   if (class_loader != nullptr) {
4607     // Since we added a strong root to the class table, do the write barrier as required for
4608     // remembered sets and generational GCs.
4609     WriteBarrier::ForEveryFieldWrite(class_loader);
4610   } else {
4611     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4612     if (log_new_roots_) {
4613       new_roots_.push_back(GcRoot<mirror::Object>(root));
4614     }
4615   }
4616 }
4617 
RegisterDexFile(const DexFile & dex_file,ObjPtr<mirror::ClassLoader> class_loader)4618 ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
4619                                                       ObjPtr<mirror::ClassLoader> class_loader) {
4620   Thread* self = Thread::Current();
4621   ObjPtr<mirror::DexCache> old_dex_cache;
4622   bool registered_with_another_class_loader = false;
4623   {
4624     ReaderMutexLock mu(self, *Locks::dex_lock_);
4625     const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4626     old_dex_cache = DecodeDexCacheLocked(self, old_data);
4627     if (old_dex_cache != nullptr) {
4628       if (IsSameClassLoader(old_dex_cache, old_data, class_loader)) {
4629         return old_dex_cache;
4630       } else {
4631         // TODO This is not very clean looking. Should maybe try to make a way to request exceptions
4632         // be thrown when it's safe to do so to simplify this.
4633         registered_with_another_class_loader = true;
4634       }
4635     }
4636   }
4637   // We need to have released the dex_lock_ to allocate safely.
4638   if (registered_with_another_class_loader) {
4639     ThrowDexFileAlreadyRegisteredError(self, dex_file);
4640     return nullptr;
4641   }
4642   SCOPED_TRACE << __FUNCTION__ << " " << dex_file.GetLocation();
4643   LinearAlloc* const linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader);
4644   DCHECK(linear_alloc != nullptr);
4645   ClassTable* table;
4646   {
4647     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4648     table = InsertClassTableForClassLoader(class_loader);
4649   }
4650   // Don't alloc while holding the lock, since allocation may need to
4651   // suspend all threads and another thread may need the dex_lock_ to
4652   // get to a suspend point.
4653   StackHandleScope<3> hs(self);
4654   Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
4655   Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(self, dex_file)));
4656   {
4657     // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4658     // a thread holding the dex lock and blocking on a condition variable regarding
4659     // weak references access, and a thread blocking on the dex lock.
4660     gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
4661     WriterMutexLock mu(self, *Locks::dex_lock_);
4662     const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4663     old_dex_cache = DecodeDexCacheLocked(self, old_data);
4664     if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
4665       // Do Initialize while holding dex lock to make sure two threads don't call it
4666       // at the same time with the same dex cache. Since the .bss is shared this can cause failing
4667       // DCHECK that the arrays are null.
4668       h_dex_cache->Initialize(&dex_file, h_class_loader.Get());
4669       RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
4670     }
4671     if (old_dex_cache != nullptr) {
4672       // Another thread managed to initialize the dex cache faster, so use that DexCache.
4673       // If this thread encountered OOME, ignore it.
4674       DCHECK_EQ(h_dex_cache == nullptr, self->IsExceptionPending());
4675       self->ClearException();
4676       // We cannot call EnsureSameClassLoader() or allocate an exception while holding the
4677       // dex_lock_.
4678       if (IsSameClassLoader(old_dex_cache, old_data, h_class_loader.Get())) {
4679         return old_dex_cache;
4680       } else {
4681         registered_with_another_class_loader = true;
4682       }
4683     }
4684   }
4685   if (registered_with_another_class_loader) {
4686     ThrowDexFileAlreadyRegisteredError(self, dex_file);
4687     return nullptr;
4688   }
4689   if (h_dex_cache == nullptr) {
4690     self->AssertPendingOOMException();
4691     return nullptr;
4692   }
4693   if (table->InsertStrongRoot(h_dex_cache.Get())) {
4694     WriteBarrierOnClassLoader(self, h_class_loader.Get(), h_dex_cache.Get());
4695   } else {
4696     // Write-barrier not required if strong-root isn't inserted.
4697   }
4698   VLOG(class_linker) << "Registered dex file " << dex_file.GetLocation();
4699   PaletteNotifyDexFileLoaded(dex_file.GetLocation().c_str());
4700   return h_dex_cache.Get();
4701 }
4702 
IsDexFileRegistered(Thread * self,const DexFile & dex_file)4703 bool ClassLinker::IsDexFileRegistered(Thread* self, const DexFile& dex_file) {
4704   ReaderMutexLock mu(self, *Locks::dex_lock_);
4705   return DecodeDexCacheLocked(self, FindDexCacheDataLocked(dex_file)) != nullptr;
4706 }
4707 
FindDexCache(Thread * self,const DexFile & dex_file)4708 ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const DexFile& dex_file) {
4709   ReaderMutexLock mu(self, *Locks::dex_lock_);
4710   const DexCacheData* dex_cache_data = FindDexCacheDataLocked(dex_file);
4711   ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
4712   if (dex_cache != nullptr) {
4713     return dex_cache;
4714   }
4715   // Failure, dump diagnostic and abort.
4716   for (const auto& entry : dex_caches_) {
4717     const DexCacheData& data = entry.second;
4718     if (DecodeDexCacheLocked(self, &data) != nullptr) {
4719       LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << entry.first->GetLocation();
4720     }
4721   }
4722   LOG(FATAL) << "Failed to find DexCache for DexFile " << dex_file.GetLocation()
4723              << " " << &dex_file;
4724   UNREACHABLE();
4725 }
4726 
FindDexCache(Thread * self,const OatDexFile & oat_dex_file)4727 ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const OatDexFile& oat_dex_file) {
4728   ReaderMutexLock mu(self, *Locks::dex_lock_);
4729   const DexCacheData* dex_cache_data = FindDexCacheDataLocked(oat_dex_file);
4730   ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
4731   if (dex_cache != nullptr) {
4732     return dex_cache;
4733   }
4734   // Failure, dump diagnostic and abort.
4735   if (dex_cache_data == nullptr) {
4736     LOG(FATAL_WITHOUT_ABORT) << "NULL dex_cache_data";
4737   } else {
4738     LOG(FATAL_WITHOUT_ABORT)
4739         << "dex_cache_data=" << dex_cache_data
4740         << " weak_root=" << dex_cache_data->weak_root
4741         << " decoded_weak_root=" << self->DecodeJObject(dex_cache_data->weak_root);
4742   }
4743   for (const auto& entry : dex_caches_) {
4744     const DexCacheData& data = entry.second;
4745     if (DecodeDexCacheLocked(self, &data) != nullptr) {
4746       const OatDexFile* other_oat_dex_file = entry.first->GetOatDexFile();
4747       const OatFile* oat_file =
4748           (other_oat_dex_file == nullptr) ? nullptr : other_oat_dex_file->GetOatFile();
4749       LOG(FATAL_WITHOUT_ABORT)
4750           << "Registered dex file " << entry.first->GetLocation()
4751           << " oat_dex_file=" << other_oat_dex_file
4752           << " oat_file=" << oat_file
4753           << " oat_location=" << (oat_file == nullptr ? "null" : oat_file->GetLocation())
4754           << " dex_file=" << &entry.first
4755           << " weak_root=" << data.weak_root
4756           << " decoded_weak_root=" << self->DecodeJObject(data.weak_root)
4757           << " dex_cache_data=" << &data;
4758     }
4759   }
4760   LOG(FATAL) << "Failed to find DexCache for OatDexFile "
4761              << oat_dex_file.GetDexFileLocation()
4762              << " oat_dex_file=" << &oat_dex_file
4763              << " oat_file=" << oat_dex_file.GetOatFile()
4764              << " oat_location=" << oat_dex_file.GetOatFile()->GetLocation();
4765   UNREACHABLE();
4766 }
4767 
FindClassTable(Thread * self,ObjPtr<mirror::DexCache> dex_cache)4768 ClassTable* ClassLinker::FindClassTable(Thread* self, ObjPtr<mirror::DexCache> dex_cache) {
4769   const DexFile* dex_file = dex_cache->GetDexFile();
4770   DCHECK(dex_file != nullptr);
4771   ReaderMutexLock mu(self, *Locks::dex_lock_);
4772   auto it = dex_caches_.find(dex_file);
4773   if (it != dex_caches_.end()) {
4774     const DexCacheData& data = it->second;
4775     ObjPtr<mirror::DexCache> registered_dex_cache = DecodeDexCacheLocked(self, &data);
4776     if (registered_dex_cache != nullptr) {
4777       CHECK_EQ(registered_dex_cache, dex_cache) << dex_file->GetLocation();
4778       return data.class_table;
4779     }
4780   }
4781   return nullptr;
4782 }
4783 
FindDexCacheDataLocked(const OatDexFile & oat_dex_file)4784 const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(
4785     const OatDexFile& oat_dex_file) {
4786   auto it = std::find_if(dex_caches_.begin(), dex_caches_.end(), [&](const auto& entry) {
4787     return entry.first->GetOatDexFile() == &oat_dex_file;
4788   });
4789   return it != dex_caches_.end() ? &it->second : nullptr;
4790 }
4791 
FindDexCacheDataLocked(const DexFile & dex_file)4792 const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(const DexFile& dex_file) {
4793   auto it = dex_caches_.find(&dex_file);
4794   return it != dex_caches_.end() ? &it->second : nullptr;
4795 }
4796 
CreatePrimitiveClass(Thread * self,Primitive::Type type,ClassRoot primitive_root)4797 void ClassLinker::CreatePrimitiveClass(Thread* self,
4798                                        Primitive::Type type,
4799                                        ClassRoot primitive_root) {
4800   ObjPtr<mirror::Class> primitive_class =
4801       AllocClass(self, mirror::Class::PrimitiveClassSize(image_pointer_size_));
4802   CHECK(primitive_class != nullptr) << "OOM for primitive class " << type;
4803   // Do not hold lock on the primitive class object, the initialization of
4804   // primitive classes is done while the process is still single threaded.
4805   primitive_class->SetAccessFlagsDuringLinking(kAccPublic | kAccFinal | kAccAbstract);
4806   primitive_class->SetPrimitiveType(type);
4807   primitive_class->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
4808   DCHECK_EQ(primitive_class->NumMethods(), 0u);
4809   // Primitive classes are initialized during single threaded startup, so visibly initialized.
4810   primitive_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
4811   std::string_view descriptor(Primitive::Descriptor(type));
4812   ObjPtr<mirror::Class> existing = InsertClass(descriptor,
4813                                                primitive_class,
4814                                                ComputeModifiedUtf8Hash(descriptor));
4815   CHECK(existing == nullptr) << "InitPrimitiveClass(" << type << ") failed";
4816   SetClassRoot(primitive_root, primitive_class);
4817 }
4818 
GetArrayIfTable()4819 inline ObjPtr<mirror::IfTable> ClassLinker::GetArrayIfTable() {
4820   return GetClassRoot<mirror::ObjectArray<mirror::Object>>(this)->GetIfTable();
4821 }
4822 
4823 // Create an array class (i.e. the class object for the array, not the
4824 // array itself).  "descriptor" looks like "[C" or "[[[[B" or
4825 // "[Ljava/lang/String;".
4826 //
4827 // If "descriptor" refers to an array of primitives, look up the
4828 // primitive type's internally-generated class object.
4829 //
4830 // "class_loader" is the class loader of the class that's referring to
4831 // us.  It's used to ensure that we're looking for the element type in
4832 // the right context.  It does NOT become the class loader for the
4833 // array class; that always comes from the base element class.
4834 //
4835 // Returns null with an exception raised on failure.
CreateArrayClass(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader)4836 ObjPtr<mirror::Class> ClassLinker::CreateArrayClass(Thread* self,
4837                                                     const char* descriptor,
4838                                                     size_t descriptor_length,
4839                                                     size_t hash,
4840                                                     Handle<mirror::ClassLoader> class_loader) {
4841   // Identify the underlying component type
4842   CHECK_EQ('[', descriptor[0]);
4843   std::string_view sv_descriptor(descriptor, descriptor_length);
4844   StackHandleScope<2> hs(self);
4845 
4846   // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4847   // code to be executed. We put it up here so we can avoid all the allocations associated with
4848   // creating the class. This can happen with (eg) jit threads.
4849   if (!self->CanLoadClasses()) {
4850     // Make sure we don't try to load anything, potentially causing an infinite loop.
4851     ObjPtr<mirror::Throwable> pre_allocated =
4852         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4853     self->SetException(pre_allocated);
4854     return nullptr;
4855   }
4856 
4857   MutableHandle<mirror::Class> component_type =
4858       hs.NewHandle(FindClass(self, descriptor + 1, descriptor_length - 1, class_loader));
4859   if (component_type == nullptr) {
4860     DCHECK(self->IsExceptionPending());
4861     // We need to accept erroneous classes as component types. Under AOT, we
4862     // don't accept them as we cannot encode the erroneous class in an image.
4863     std::string_view component_descriptor = sv_descriptor.substr(1u);
4864     const size_t component_hash = ComputeModifiedUtf8Hash(component_descriptor);
4865     component_type.Assign(
4866         LookupClass(self, component_descriptor, component_hash, class_loader.Get()));
4867     if (component_type == nullptr || Runtime::Current()->IsAotCompiler()) {
4868       DCHECK(self->IsExceptionPending());
4869       return nullptr;
4870     } else {
4871       self->ClearException();
4872     }
4873   }
4874   if (UNLIKELY(component_type->IsPrimitiveVoid())) {
4875     ThrowNoClassDefFoundError("Attempt to create array of void primitive type");
4876     return nullptr;
4877   }
4878   // See if the component type is already loaded.  Array classes are
4879   // always associated with the class loader of their underlying
4880   // element type -- an array of Strings goes with the loader for
4881   // java/lang/String -- so we need to look for it there.  (The
4882   // caller should have checked for the existence of the class
4883   // before calling here, but they did so with *their* class loader,
4884   // not the component type's loader.)
4885   //
4886   // If we find it, the caller adds "loader" to the class' initiating
4887   // loader list, which should prevent us from going through this again.
4888   //
4889   // This call is unnecessary if "loader" and "component_type->GetClassLoader()"
4890   // are the same, because our caller (FindClass) just did the
4891   // lookup.  (Even if we get this wrong we still have correct behavior,
4892   // because we effectively do this lookup again when we add the new
4893   // class to the hash table --- necessary because of possible races with
4894   // other threads.)
4895   if (class_loader.Get() != component_type->GetClassLoader()) {
4896     ObjPtr<mirror::Class> new_class =
4897         LookupClass(self, sv_descriptor, hash, component_type->GetClassLoader());
4898     if (new_class != nullptr) {
4899       return new_class;
4900     }
4901   }
4902   // Core array classes, i.e. Object[], Class[], String[] and primitive
4903   // arrays, have special initialization and they should be found above.
4904   DCHECK_IMPLIES(component_type->IsObjectClass(),
4905                  // Guard from false positives for errors before setting superclass.
4906                  component_type->IsErroneousUnresolved());
4907   DCHECK(!component_type->IsStringClass());
4908   DCHECK(!component_type->IsClassClass());
4909   DCHECK(!component_type->IsPrimitive());
4910 
4911   // Fill out the fields in the Class.
4912   //
4913   // It is possible to execute some methods against arrays, because
4914   // all arrays are subclasses of java_lang_Object_, so we need to set
4915   // up a vtable.  We can just point at the one in java_lang_Object_.
4916   //
4917   // Array classes are simple enough that we don't need to do a full
4918   // link step.
4919   size_t array_class_size = mirror::Array::ClassSize(image_pointer_size_);
4920   auto visitor = [this, array_class_size, component_type](ObjPtr<mirror::Object> obj,
4921                                                           size_t usable_size)
4922       REQUIRES_SHARED(Locks::mutator_lock_) {
4923     ScopedAssertNoTransactionChecks santc("CreateArrayClass");
4924     mirror::Class::InitializeClassVisitor init_class(array_class_size);
4925     init_class(obj, usable_size);
4926     ObjPtr<mirror::Class> klass = ObjPtr<mirror::Class>::DownCast(obj);
4927     klass->SetComponentType(component_type.Get());
4928     // Do not hold lock for initialization, the fence issued after the visitor
4929     // returns ensures memory visibility together with the implicit consume
4930     // semantics (for all supported architectures) for any thread that loads
4931     // the array class reference from any memory locations afterwards.
4932     FinishArrayClassSetup(klass);
4933   };
4934   auto new_class = hs.NewHandle<mirror::Class>(
4935       AllocClass(self, GetClassRoot<mirror::Class>(this), array_class_size, visitor));
4936   if (new_class == nullptr) {
4937     self->AssertPendingOOMException();
4938     return nullptr;
4939   }
4940 
4941   ObjPtr<mirror::Class> existing = InsertClass(sv_descriptor, new_class.Get(), hash);
4942   if (existing == nullptr) {
4943     // We postpone ClassLoad and ClassPrepare events to this point in time to avoid
4944     // duplicate events in case of races. Array classes don't really follow dedicated
4945     // load and prepare, anyways.
4946     Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(new_class);
4947     Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(new_class, new_class);
4948 
4949     jit::Jit::NewTypeLoadedIfUsingJit(new_class.Get());
4950     return new_class.Get();
4951   }
4952   // Another thread must have loaded the class after we
4953   // started but before we finished.  Abandon what we've
4954   // done.
4955   //
4956   // (Yes, this happens.)
4957 
4958   return existing;
4959 }
4960 
LookupPrimitiveClass(char type)4961 ObjPtr<mirror::Class> ClassLinker::LookupPrimitiveClass(char type) {
4962   ClassRoot class_root;
4963   switch (type) {
4964     case 'B': class_root = ClassRoot::kPrimitiveByte; break;
4965     case 'C': class_root = ClassRoot::kPrimitiveChar; break;
4966     case 'D': class_root = ClassRoot::kPrimitiveDouble; break;
4967     case 'F': class_root = ClassRoot::kPrimitiveFloat; break;
4968     case 'I': class_root = ClassRoot::kPrimitiveInt; break;
4969     case 'J': class_root = ClassRoot::kPrimitiveLong; break;
4970     case 'S': class_root = ClassRoot::kPrimitiveShort; break;
4971     case 'Z': class_root = ClassRoot::kPrimitiveBoolean; break;
4972     case 'V': class_root = ClassRoot::kPrimitiveVoid; break;
4973     default:
4974       return nullptr;
4975   }
4976   return GetClassRoot(class_root, this);
4977 }
4978 
FindPrimitiveClass(char type)4979 ObjPtr<mirror::Class> ClassLinker::FindPrimitiveClass(char type) {
4980   ObjPtr<mirror::Class> result = LookupPrimitiveClass(type);
4981   if (UNLIKELY(result == nullptr)) {
4982     std::string printable_type(PrintableChar(type));
4983     ThrowNoClassDefFoundError("Not a primitive type: %s", printable_type.c_str());
4984   }
4985   return result;
4986 }
4987 
InsertClass(std::string_view descriptor,ObjPtr<mirror::Class> klass,size_t hash)4988 ObjPtr<mirror::Class> ClassLinker::InsertClass(std::string_view descriptor,
4989                                                ObjPtr<mirror::Class> klass,
4990                                                size_t hash) {
4991   DCHECK(Thread::Current()->CanLoadClasses());
4992   if (VLOG_IS_ON(class_linker)) {
4993     ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache();
4994     std::string source;
4995     if (dex_cache != nullptr) {
4996       source += " from ";
4997       source += dex_cache->GetLocation()->ToModifiedUtf8();
4998     }
4999     LOG(INFO) << "Loaded class " << descriptor << source;
5000   }
5001   {
5002     WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
5003     const ObjPtr<mirror::ClassLoader> class_loader = klass->GetClassLoader();
5004     ClassTable* const class_table = InsertClassTableForClassLoader(class_loader);
5005     ObjPtr<mirror::Class> existing = class_table->Lookup(descriptor, hash);
5006     if (existing != nullptr) {
5007       return existing;
5008     }
5009     VerifyObject(klass);
5010     class_table->InsertWithHash(klass, hash);
5011     WriteBarrierOnClassLoaderLocked(class_loader, klass);
5012   }
5013   if (kIsDebugBuild) {
5014     // Test that copied methods correctly can find their holder.
5015     for (ArtMethod& method : klass->GetCopiedMethods(image_pointer_size_)) {
5016       CHECK_EQ(GetHoldingClassOfCopiedMethod(&method), klass);
5017     }
5018   }
5019   return nullptr;
5020 }
5021 
WriteBarrierForBootOatFileBssRoots(const OatFile * oat_file)5022 void ClassLinker::WriteBarrierForBootOatFileBssRoots(const OatFile* oat_file) {
5023   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
5024   DCHECK(!oat_file->GetBssGcRoots().empty()) << oat_file->GetLocation();
5025   if (log_new_roots_ && !ContainsElement(new_bss_roots_boot_oat_files_, oat_file)) {
5026     new_bss_roots_boot_oat_files_.push_back(oat_file);
5027   }
5028 }
5029 
5030 // TODO This should really be in mirror::Class.
UpdateClassMethods(ObjPtr<mirror::Class> klass,LengthPrefixedArray<ArtMethod> * new_methods)5031 void ClassLinker::UpdateClassMethods(ObjPtr<mirror::Class> klass,
5032                                      LengthPrefixedArray<ArtMethod>* new_methods) {
5033   klass->SetMethodsPtrUnchecked(new_methods,
5034                                 klass->NumDirectMethods(),
5035                                 klass->NumDeclaredVirtualMethods());
5036   // Need to mark the card so that the remembered sets and mod union tables get updated.
5037   WriteBarrier::ForEveryFieldWrite(klass);
5038 }
5039 
LookupClass(Thread * self,std::string_view descriptor,ObjPtr<mirror::ClassLoader> class_loader)5040 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
5041                                                std::string_view descriptor,
5042                                                ObjPtr<mirror::ClassLoader> class_loader) {
5043   return LookupClass(self, descriptor, ComputeModifiedUtf8Hash(descriptor), class_loader);
5044 }
5045 
LookupClass(Thread * self,std::string_view descriptor,size_t hash,ObjPtr<mirror::ClassLoader> class_loader)5046 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
5047                                                std::string_view descriptor,
5048                                                size_t hash,
5049                                                ObjPtr<mirror::ClassLoader> class_loader) {
5050   ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
5051   ClassTable* const class_table = ClassTableForClassLoader(class_loader);
5052   if (class_table != nullptr) {
5053     ObjPtr<mirror::Class> result = class_table->Lookup(descriptor, hash);
5054     if (result != nullptr) {
5055       return result;
5056     }
5057   }
5058   return nullptr;
5059 }
5060 
5061 class MoveClassTableToPreZygoteVisitor : public ClassLoaderVisitor {
5062  public:
MoveClassTableToPreZygoteVisitor()5063   MoveClassTableToPreZygoteVisitor() {}
5064 
Visit(ObjPtr<mirror::ClassLoader> class_loader)5065   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
5066       REQUIRES(Locks::classlinker_classes_lock_)
5067       REQUIRES_SHARED(Locks::mutator_lock_) override {
5068     ClassTable* const class_table = class_loader->GetClassTable();
5069     if (class_table != nullptr) {
5070       class_table->FreezeSnapshot();
5071     }
5072   }
5073 };
5074 
MoveClassTableToPreZygote()5075 void ClassLinker::MoveClassTableToPreZygote() {
5076   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
5077   boot_class_table_->FreezeSnapshot();
5078   MoveClassTableToPreZygoteVisitor visitor;
5079   VisitClassLoaders(&visitor);
5080 }
5081 
AttemptSupertypeVerification(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,Handle<mirror::Class> supertype)5082 bool ClassLinker::AttemptSupertypeVerification(Thread* self,
5083                                                verifier::VerifierDeps* verifier_deps,
5084                                                Handle<mirror::Class> klass,
5085                                                Handle<mirror::Class> supertype) {
5086   DCHECK(self != nullptr);
5087   DCHECK(klass != nullptr);
5088   DCHECK(supertype != nullptr);
5089 
5090   if (!supertype->IsVerified() && !supertype->IsErroneous()) {
5091     VerifyClass(self, verifier_deps, supertype);
5092   }
5093 
5094   if (supertype->IsVerified()
5095       || supertype->ShouldVerifyAtRuntime()
5096       || supertype->IsVerifiedNeedsAccessChecks()) {
5097     // The supertype is either verified, or we soft failed at AOT time.
5098     DCHECK(supertype->IsVerified() || Runtime::Current()->IsAotCompiler());
5099     return true;
5100   }
5101   // If we got this far then we have a hard failure.
5102   std::string error_msg =
5103       StringPrintf("Rejecting class %s that attempts to sub-type erroneous class %s",
5104                    klass->PrettyDescriptor().c_str(),
5105                    supertype->PrettyDescriptor().c_str());
5106   LOG(WARNING) << error_msg  << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
5107   StackHandleScope<1> hs(self);
5108   Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
5109   if (cause != nullptr) {
5110     // Set during VerifyClass call (if at all).
5111     self->ClearException();
5112   }
5113   // Change into a verify error.
5114   ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
5115   if (cause != nullptr) {
5116     self->GetException()->SetCause(cause.Get());
5117   }
5118   ClassReference ref(klass->GetDexCache()->GetDexFile(), klass->GetDexClassDefIndex());
5119   if (Runtime::Current()->IsAotCompiler()) {
5120     Runtime::Current()->GetCompilerCallbacks()->ClassRejected(ref);
5121   }
5122   // Need to grab the lock to change status.
5123   ObjectLock<mirror::Class> super_lock(self, klass);
5124   mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5125   return false;
5126 }
5127 
VerifyClass(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level)5128 verifier::FailureKind ClassLinker::VerifyClass(Thread* self,
5129                                                verifier::VerifierDeps* verifier_deps,
5130                                                Handle<mirror::Class> klass,
5131                                                verifier::HardFailLogMode log_level) {
5132   {
5133     // TODO: assert that the monitor on the Class is held
5134     ObjectLock<mirror::Class> lock(self, klass);
5135 
5136     // Is somebody verifying this now?
5137     ClassStatus old_status = klass->GetStatus();
5138     while (old_status == ClassStatus::kVerifying) {
5139       lock.WaitIgnoringInterrupts();
5140       // WaitIgnoringInterrupts can still receive an interrupt and return early, in this
5141       // case we may see the same status again. b/62912904. This is why the check is
5142       // greater or equal.
5143       CHECK(klass->IsErroneous() || (klass->GetStatus() >= old_status))
5144           << "Class '" << klass->PrettyClass()
5145           << "' performed an illegal verification state transition from " << old_status
5146           << " to " << klass->GetStatus();
5147       old_status = klass->GetStatus();
5148     }
5149 
5150     // The class might already be erroneous, for example at compile time if we attempted to verify
5151     // this class as a parent to another.
5152     if (klass->IsErroneous()) {
5153       ThrowEarlierClassFailure(klass.Get());
5154       return verifier::FailureKind::kHardFailure;
5155     }
5156 
5157     // Don't attempt to re-verify if already verified.
5158     if (klass->IsVerified()) {
5159       if (verifier_deps != nullptr &&
5160           verifier_deps->ContainsDexFile(klass->GetDexFile()) &&
5161           !verifier_deps->HasRecordedVerifiedStatus(klass->GetDexFile(), *klass->GetClassDef()) &&
5162           !Runtime::Current()->IsAotCompiler()) {
5163         // If the klass is verified, but `verifier_deps` did not record it, this
5164         // means we are running background verification of a secondary dex file.
5165         // Re-run the verifier to populate `verifier_deps`.
5166         // No need to run the verification when running on the AOT Compiler, as
5167         // the driver handles those multithreaded cases already.
5168         std::string error_msg;
5169         verifier::FailureKind failure =
5170             PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
5171         // We could have soft failures, so just check that we don't have a hard
5172         // failure.
5173         DCHECK_NE(failure, verifier::FailureKind::kHardFailure) << error_msg;
5174       }
5175       return verifier::FailureKind::kNoFailure;
5176     }
5177 
5178     if (klass->IsVerifiedNeedsAccessChecks()) {
5179       if (!Runtime::Current()->IsAotCompiler()) {
5180         // Mark the class as having a verification attempt to avoid re-running
5181         // the verifier.
5182         mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
5183       }
5184       return verifier::FailureKind::kAccessChecksFailure;
5185     }
5186 
5187     // For AOT, don't attempt to re-verify if we have already found we should
5188     // verify at runtime.
5189     if (klass->ShouldVerifyAtRuntime()) {
5190       CHECK(Runtime::Current()->IsAotCompiler());
5191       return verifier::FailureKind::kSoftFailure;
5192     }
5193 
5194     DCHECK_EQ(klass->GetStatus(), ClassStatus::kResolved);
5195     mirror::Class::SetStatus(klass, ClassStatus::kVerifying, self);
5196 
5197     // Skip verification if disabled.
5198     if (!Runtime::Current()->IsVerificationEnabled()) {
5199       mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
5200       UpdateClassAfterVerification(klass, image_pointer_size_, verifier::FailureKind::kNoFailure);
5201       return verifier::FailureKind::kNoFailure;
5202     }
5203   }
5204 
5205   VLOG(class_linker) << "Beginning verification for class: "
5206                      << klass->PrettyDescriptor()
5207                      << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
5208 
5209   // Verify super class.
5210   StackHandleScope<2> hs(self);
5211   MutableHandle<mirror::Class> supertype(hs.NewHandle(klass->GetSuperClass()));
5212   // If we have a superclass and we get a hard verification failure we can return immediately.
5213   if (supertype != nullptr &&
5214       !AttemptSupertypeVerification(self, verifier_deps, klass, supertype)) {
5215     CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
5216     return verifier::FailureKind::kHardFailure;
5217   }
5218 
5219   // Verify all default super-interfaces.
5220   //
5221   // (1) Don't bother if the superclass has already had a soft verification failure.
5222   //
5223   // (2) Interfaces shouldn't bother to do this recursive verification because they cannot cause
5224   //     recursive initialization by themselves. This is because when an interface is initialized
5225   //     directly it must not initialize its superinterfaces. We are allowed to verify regardless
5226   //     but choose not to for an optimization. If the interfaces is being verified due to a class
5227   //     initialization (which would need all the default interfaces to be verified) the class code
5228   //     will trigger the recursive verification anyway.
5229   if ((supertype == nullptr || supertype->IsVerified())  // See (1)
5230       && !klass->IsInterface()) {                              // See (2)
5231     int32_t iftable_count = klass->GetIfTableCount();
5232     MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
5233     // Loop through all interfaces this class has defined. It doesn't matter the order.
5234     for (int32_t i = 0; i < iftable_count; i++) {
5235       iface.Assign(klass->GetIfTable()->GetInterface(i));
5236       DCHECK(iface != nullptr);
5237       // We only care if we have default interfaces and can skip if we are already verified...
5238       if (LIKELY(!iface->HasDefaultMethods() || iface->IsVerified())) {
5239         continue;
5240       } else if (UNLIKELY(!AttemptSupertypeVerification(self, verifier_deps, klass, iface))) {
5241         // We had a hard failure while verifying this interface. Just return immediately.
5242         CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
5243         return verifier::FailureKind::kHardFailure;
5244       } else if (UNLIKELY(!iface->IsVerified())) {
5245         // We softly failed to verify the iface. Stop checking and clean up.
5246         // Put the iface into the supertype handle so we know what caused us to fail.
5247         supertype.Assign(iface.Get());
5248         break;
5249       }
5250     }
5251   }
5252 
5253   // At this point if verification failed, then supertype is the "first" supertype that failed
5254   // verification (without a specific order). If verification succeeded, then supertype is either
5255   // null or the original superclass of klass and is verified.
5256   DCHECK(supertype == nullptr ||
5257          supertype.Get() == klass->GetSuperClass() ||
5258          !supertype->IsVerified());
5259 
5260   // Try to use verification information from the oat file, otherwise do runtime verification.
5261   const DexFile& dex_file = *klass->GetDexCache()->GetDexFile();
5262   ClassStatus oat_file_class_status(ClassStatus::kNotReady);
5263   bool preverified = VerifyClassUsingOatFile(self, dex_file, klass, oat_file_class_status);
5264 
5265   VLOG(class_linker) << "Class preverified status for class "
5266                      << klass->PrettyDescriptor()
5267                      << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
5268                      << ": "
5269                      << preverified
5270                      << " (" << oat_file_class_status << ")";
5271 
5272   // If the oat file says the class had an error, re-run the verifier. That way we will either:
5273   // 1) Be successful at runtime, or
5274   // 2) Get a precise error message.
5275   DCHECK_IMPLIES(mirror::Class::IsErroneous(oat_file_class_status), !preverified);
5276 
5277   std::string error_msg;
5278   verifier::FailureKind verifier_failure = verifier::FailureKind::kNoFailure;
5279   if (!preverified) {
5280     verifier_failure = PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
5281   } else if (oat_file_class_status == ClassStatus::kVerifiedNeedsAccessChecks) {
5282     verifier_failure = verifier::FailureKind::kAccessChecksFailure;
5283   }
5284 
5285   // Verification is done, grab the lock again.
5286   ObjectLock<mirror::Class> lock(self, klass);
5287   self->AssertNoPendingException();
5288 
5289   if (verifier_failure == verifier::FailureKind::kHardFailure) {
5290     VLOG(verifier) << "Verification failed on class " << klass->PrettyDescriptor()
5291                   << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
5292                   << " because: " << error_msg;
5293     ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
5294     mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5295     return verifier_failure;
5296   }
5297 
5298   // Make sure all classes referenced by catch blocks are resolved.
5299   ResolveClassExceptionHandlerTypes(klass);
5300 
5301   if (Runtime::Current()->IsAotCompiler()) {
5302     if (supertype != nullptr && supertype->ShouldVerifyAtRuntime()) {
5303       // Regardless of our own verification result, we need to verify the class
5304       // at runtime if the super class is not verified. This is required in case
5305       // we generate an app/boot image.
5306       mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
5307     } else if (verifier_failure == verifier::FailureKind::kNoFailure) {
5308       mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
5309     } else if (verifier_failure == verifier::FailureKind::kSoftFailure ||
5310                verifier_failure == verifier::FailureKind::kTypeChecksFailure) {
5311       mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
5312     } else {
5313       mirror::Class::SetStatus(klass, ClassStatus::kVerifiedNeedsAccessChecks, self);
5314     }
5315     // Notify the compiler about the verification status, in case the class
5316     // was verified implicitly (eg super class of a compiled class). When the
5317     // compiler unloads dex file after compilation, we still want to keep
5318     // verification states.
5319     Runtime::Current()->GetCompilerCallbacks()->UpdateClassState(
5320         ClassReference(&klass->GetDexFile(), klass->GetDexClassDefIndex()), klass->GetStatus());
5321   } else {
5322     if (verifier_failure == verifier::FailureKind::kTypeChecksFailure) {
5323       klass->SetHasTypeChecksFailure();
5324     }
5325     mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
5326   }
5327 
5328   UpdateClassAfterVerification(klass, image_pointer_size_, verifier_failure);
5329   return verifier_failure;
5330 }
5331 
PerformClassVerification(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level,std::string * error_msg)5332 verifier::FailureKind ClassLinker::PerformClassVerification(Thread* self,
5333                                                             verifier::VerifierDeps* verifier_deps,
5334                                                             Handle<mirror::Class> klass,
5335                                                             verifier::HardFailLogMode log_level,
5336                                                             std::string* error_msg) {
5337   Runtime* const runtime = Runtime::Current();
5338   StackHandleScope<2> hs(self);
5339   Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
5340   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
5341   return verifier::ClassVerifier::VerifyClass(self,
5342                                               verifier_deps,
5343                                               dex_cache->GetDexFile(),
5344                                               klass,
5345                                               dex_cache,
5346                                               class_loader,
5347                                               *klass->GetClassDef(),
5348                                               runtime->GetCompilerCallbacks(),
5349                                               log_level,
5350                                               Runtime::Current()->GetTargetSdkVersion(),
5351                                               error_msg);
5352 }
5353 
VerifyClassUsingOatFile(Thread * self,const DexFile & dex_file,Handle<mirror::Class> klass,ClassStatus & oat_file_class_status)5354 bool ClassLinker::VerifyClassUsingOatFile(Thread* self,
5355                                           const DexFile& dex_file,
5356                                           Handle<mirror::Class> klass,
5357                                           ClassStatus& oat_file_class_status) {
5358   // If we're compiling, we can only verify the class using the oat file if
5359   // we are not compiling the image or if the class we're verifying is not part of
5360   // the compilation unit (app - dependencies). We will let the compiler callback
5361   // tell us about the latter.
5362   if (Runtime::Current()->IsAotCompiler()) {
5363     CompilerCallbacks* callbacks = Runtime::Current()->GetCompilerCallbacks();
5364     // We are compiling an app (not the image).
5365     if (!callbacks->CanUseOatStatusForVerification(klass.Get())) {
5366       return false;
5367     }
5368   }
5369 
5370   const OatDexFile* oat_dex_file = dex_file.GetOatDexFile();
5371   // In case we run without an image there won't be a backing oat file.
5372   if (oat_dex_file == nullptr || oat_dex_file->GetOatFile() == nullptr) {
5373     return false;
5374   }
5375 
5376   uint16_t class_def_index = klass->GetDexClassDefIndex();
5377   oat_file_class_status = oat_dex_file->GetOatClass(class_def_index).GetStatus();
5378   if (oat_file_class_status >= ClassStatus::kVerified) {
5379     return true;
5380   }
5381   if (oat_file_class_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
5382     // We return that the class has already been verified, and the caller should
5383     // check the class status to ensure we run with access checks.
5384     return true;
5385   }
5386 
5387   // Check the class status with the vdex file.
5388   const OatFile* oat_file = oat_dex_file->GetOatFile();
5389   if (oat_file != nullptr) {
5390     ClassStatus vdex_status = oat_file->GetVdexFile()->ComputeClassStatus(self, klass);
5391     if (vdex_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
5392       VLOG(verifier) << "Vdex verification success for " << klass->PrettyClass();
5393       oat_file_class_status = vdex_status;
5394       return true;
5395     }
5396   }
5397 
5398   // If we only verified a subset of the classes at compile time, we can end up with classes that
5399   // were resolved by the verifier.
5400   if (oat_file_class_status == ClassStatus::kResolved) {
5401     return false;
5402   }
5403   // We never expect a .oat file to have kRetryVerificationAtRuntime statuses.
5404   CHECK_NE(oat_file_class_status, ClassStatus::kRetryVerificationAtRuntime)
5405       << klass->PrettyClass() << " " << dex_file.GetLocation();
5406 
5407   if (mirror::Class::IsErroneous(oat_file_class_status)) {
5408     // Compile time verification failed with a hard error. We'll re-run
5409     // verification, which might be successful at runtime.
5410     return false;
5411   }
5412   if (oat_file_class_status == ClassStatus::kNotReady) {
5413     // Status is uninitialized if we couldn't determine the status at compile time, for example,
5414     // not loading the class.
5415     // TODO: when the verifier doesn't rely on Class-es failing to resolve/load the type hierarchy
5416     // isn't a problem and this case shouldn't occur
5417     return false;
5418   }
5419   std::string temp;
5420   LOG(FATAL) << "Unexpected class status: " << oat_file_class_status
5421              << " " << dex_file.GetLocation() << " " << klass->PrettyClass() << " "
5422              << klass->GetDescriptor(&temp);
5423   UNREACHABLE();
5424 }
5425 
ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass)5426 void ClassLinker::ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass) {
5427   for (ArtMethod& method : klass->GetMethods(image_pointer_size_)) {
5428     ResolveMethodExceptionHandlerTypes(&method);
5429   }
5430 }
5431 
ResolveMethodExceptionHandlerTypes(ArtMethod * method)5432 void ClassLinker::ResolveMethodExceptionHandlerTypes(ArtMethod* method) {
5433   // similar to DexVerifier::ScanTryCatchBlocks and dex2oat's ResolveExceptionsForMethod.
5434   CodeItemDataAccessor accessor(method->DexInstructionData());
5435   if (!accessor.HasCodeItem()) {
5436     return;  // native or abstract method
5437   }
5438   if (accessor.TriesSize() == 0) {
5439     return;  // nothing to process
5440   }
5441   const uint8_t* handlers_ptr = accessor.GetCatchHandlerData(0);
5442   CHECK(method->GetDexFile()->IsInDataSection(handlers_ptr))
5443       << method->PrettyMethod()
5444       << "@" << method->GetDexFile()->GetLocation()
5445       << "@" << reinterpret_cast<const void*>(handlers_ptr);
5446 
5447   uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
5448   for (uint32_t idx = 0; idx < handlers_size; idx++) {
5449     CatchHandlerIterator iterator(handlers_ptr);
5450     for (; iterator.HasNext(); iterator.Next()) {
5451       // Ensure exception types are resolved so that they don't need resolution to be delivered,
5452       // unresolved exception types will be ignored by exception delivery
5453       if (iterator.GetHandlerTypeIndex().IsValid()) {
5454         ObjPtr<mirror::Class> exception_type = ResolveType(iterator.GetHandlerTypeIndex(), method);
5455         if (exception_type == nullptr) {
5456           DCHECK(Thread::Current()->IsExceptionPending());
5457           Thread::Current()->ClearException();
5458         }
5459       }
5460     }
5461     handlers_ptr = iterator.EndDataPointer();
5462   }
5463 }
5464 
CreateProxyClass(ScopedObjectAccessAlreadyRunnable & soa,jstring name,jobjectArray interfaces,jobject loader,jobjectArray methods,jobjectArray throws)5465 ObjPtr<mirror::Class> ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& soa,
5466                                                     jstring name,
5467                                                     jobjectArray interfaces,
5468                                                     jobject loader,
5469                                                     jobjectArray methods,
5470                                                     jobjectArray throws) {
5471   Thread* self = soa.Self();
5472 
5473   // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
5474   // code to be executed. We put it up here so we can avoid all the allocations associated with
5475   // creating the class. This can happen with (eg) jit-threads.
5476   if (!self->CanLoadClasses()) {
5477     // Make sure we don't try to load anything, potentially causing an infinite loop.
5478     ObjPtr<mirror::Throwable> pre_allocated =
5479         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
5480     self->SetException(pre_allocated);
5481     return nullptr;
5482   }
5483 
5484   StackHandleScope<12> hs(self);
5485   MutableHandle<mirror::Class> temp_klass(hs.NewHandle(
5486       AllocClass(self, GetClassRoot<mirror::Class>(this), sizeof(mirror::Class))));
5487   if (temp_klass == nullptr) {
5488     CHECK(self->IsExceptionPending());  // OOME.
5489     return nullptr;
5490   }
5491   DCHECK(temp_klass->GetClass() != nullptr);
5492   temp_klass->SetObjectSize(sizeof(mirror::Proxy));
5493   // Set the class access flags incl. VerificationAttempted, so we do not try to set the flag on
5494   // the methods.
5495   temp_klass->SetAccessFlagsDuringLinking(kAccClassIsProxy | kAccPublic | kAccFinal);
5496   temp_klass->SetClassLoader(soa.Decode<mirror::ClassLoader>(loader));
5497   DCHECK_EQ(temp_klass->GetPrimitiveType(), Primitive::kPrimNot);
5498   temp_klass->SetName(soa.Decode<mirror::String>(name));
5499   temp_klass->SetDexCache(GetClassRoot<mirror::Proxy>(this)->GetDexCache());
5500   // Object has an empty iftable, copy it for that reason.
5501   temp_klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
5502   mirror::Class::SetStatus(temp_klass, ClassStatus::kIdx, self);
5503   std::string descriptor;
5504   const char* raw_descriptor = temp_klass->GetDescriptor(&descriptor);
5505   DCHECK(raw_descriptor == descriptor.c_str());
5506   const size_t hash = ComputeModifiedUtf8Hash(descriptor);
5507 
5508   // Needs to be before we insert the class so that the allocator field is set.
5509   LinearAlloc* const allocator = GetOrCreateAllocatorForClassLoader(temp_klass->GetClassLoader());
5510 
5511   // Insert the class before loading the fields as the field roots
5512   // (ArtField::declaring_class_) are only visited from the class
5513   // table. There can't be any suspend points between inserting the
5514   // class and setting the field arrays below.
5515   ObjPtr<mirror::Class> existing = InsertClass(descriptor, temp_klass.Get(), hash);
5516   CHECK(existing == nullptr);
5517 
5518   // Instance fields are inherited, but we add a couple of static fields...
5519   const size_t num_fields = 2;
5520   LengthPrefixedArray<ArtField>* fields = AllocArtFieldArray(self, allocator, num_fields);
5521   temp_klass->SetFieldsPtr(fields);
5522 
5523   // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by
5524   // our proxy, so Class.getInterfaces doesn't return the flattened set.
5525   ArtField& interfaces_sfield = fields->At(0);
5526   interfaces_sfield.SetDexFieldIndex(0);
5527   interfaces_sfield.SetDeclaringClass(temp_klass.Get());
5528   interfaces_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
5529 
5530   // 2. Create a static field 'throws' that holds exceptions thrown by our methods.
5531   ArtField& throws_sfield = fields->At(1);
5532   throws_sfield.SetDexFieldIndex(1);
5533   throws_sfield.SetDeclaringClass(temp_klass.Get());
5534   throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
5535 
5536   // Proxies have 1 direct method, the constructor
5537   const size_t num_direct_methods = 1;
5538 
5539   // The array we get passed contains all methods, including private and static
5540   // ones that aren't proxied. We need to filter those out since only interface
5541   // methods (non-private & virtual) are actually proxied.
5542   Handle<mirror::ObjectArray<mirror::Method>> h_methods =
5543       hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Method>>(methods));
5544   DCHECK_EQ(h_methods->GetClass(), GetClassRoot<mirror::ObjectArray<mirror::Method>>())
5545       << mirror::Class::PrettyClass(h_methods->GetClass());
5546   // List of the actual virtual methods this class will have.
5547   std::vector<ArtMethod*> proxied_methods;
5548   std::vector<size_t> proxied_throws_idx;
5549   proxied_methods.reserve(h_methods->GetLength());
5550   proxied_throws_idx.reserve(h_methods->GetLength());
5551   // Filter out to only the non-private virtual methods.
5552   for (auto [mirror, idx] : ZipCount(h_methods.Iterate<mirror::Method>())) {
5553     ArtMethod* m = mirror->GetArtMethod();
5554     if (!m->IsPrivate() && !m->IsStatic()) {
5555       proxied_methods.push_back(m);
5556       proxied_throws_idx.push_back(idx);
5557     }
5558   }
5559   const size_t num_virtual_methods = proxied_methods.size();
5560   // We also need to filter out the 'throws'. The 'throws' are a Class[][] that
5561   // contains an array of all the classes each function is declared to throw.
5562   // This is used to wrap unexpected exceptions in a
5563   // UndeclaredThrowableException exception. This array is in the same order as
5564   // the methods array and like the methods array must be filtered to remove any
5565   // non-proxied methods.
5566   const bool has_filtered_methods =
5567       static_cast<int32_t>(num_virtual_methods) != h_methods->GetLength();
5568   MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> original_proxied_throws(
5569       hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(throws)));
5570   MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> proxied_throws(
5571       hs.NewHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(
5572           (has_filtered_methods)
5573               ? mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>::Alloc(
5574                     self, original_proxied_throws->GetClass(), num_virtual_methods)
5575               : original_proxied_throws.Get()));
5576   if (proxied_throws.IsNull() && !original_proxied_throws.IsNull()) {
5577     self->AssertPendingOOMException();
5578     return nullptr;
5579   }
5580   if (has_filtered_methods) {
5581     for (auto [orig_idx, new_idx] : ZipCount(MakeIterationRange(proxied_throws_idx))) {
5582       DCHECK_LE(new_idx, orig_idx);
5583       proxied_throws->Set(new_idx, original_proxied_throws->Get(orig_idx));
5584     }
5585   }
5586 
5587   // Create the methods array.
5588   LengthPrefixedArray<ArtMethod>* proxy_class_methods = AllocArtMethodArray(
5589         self, allocator, num_direct_methods + num_virtual_methods);
5590   // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
5591   // want to throw OOM in the future.
5592   if (UNLIKELY(proxy_class_methods == nullptr)) {
5593     self->AssertPendingOOMException();
5594     return nullptr;
5595   }
5596   temp_klass->SetMethodsPtr(proxy_class_methods, num_direct_methods, num_virtual_methods);
5597 
5598   // Create the single direct method.
5599   CreateProxyConstructor(temp_klass, temp_klass->GetDirectMethodUnchecked(0, image_pointer_size_));
5600 
5601   // Create virtual method using specified prototypes.
5602   // TODO These should really use the iterators.
5603   for (size_t i = 0; i < num_virtual_methods; ++i) {
5604     auto* virtual_method = temp_klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
5605     auto* prototype = proxied_methods[i];
5606     CreateProxyMethod(temp_klass, prototype, virtual_method);
5607     DCHECK(virtual_method->GetDeclaringClass() != nullptr);
5608     DCHECK(prototype->GetDeclaringClass() != nullptr);
5609   }
5610 
5611   // The super class is java.lang.reflect.Proxy
5612   temp_klass->SetSuperClass(GetClassRoot<mirror::Proxy>(this));
5613   // Now effectively in the loaded state.
5614   mirror::Class::SetStatus(temp_klass, ClassStatus::kLoaded, self);
5615   self->AssertNoPendingException();
5616 
5617   // At this point the class is loaded. Publish a ClassLoad event.
5618   // Note: this may be a temporary class. It is a listener's responsibility to handle this.
5619   Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(temp_klass);
5620 
5621   MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
5622   {
5623     // Must hold lock on object when resolved.
5624     ObjectLock<mirror::Class> resolution_lock(self, temp_klass);
5625     // Link the fields and virtual methods, creating vtable and iftables.
5626     // The new class will replace the old one in the class table.
5627     Handle<mirror::ObjectArray<mirror::Class>> h_interfaces(
5628         hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces)));
5629     if (!LinkClass(self, descriptor.c_str(), temp_klass, h_interfaces, &klass)) {
5630       if (!temp_klass->IsErroneous()) {
5631         mirror::Class::SetStatus(temp_klass, ClassStatus::kErrorUnresolved, self);
5632       }
5633       return nullptr;
5634     }
5635   }
5636   CHECK(temp_klass->IsRetired());
5637   CHECK_NE(temp_klass.Get(), klass.Get());
5638 
5639   CHECK_EQ(interfaces_sfield.GetDeclaringClass(), klass.Get());
5640   interfaces_sfield.SetObject<false>(
5641       klass.Get(),
5642       soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
5643   CHECK_EQ(throws_sfield.GetDeclaringClass(), klass.Get());
5644   throws_sfield.SetObject<false>(
5645       klass.Get(),
5646       proxied_throws.Get());
5647 
5648   Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(temp_klass, klass);
5649 
5650   // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5651   // See also ClassLinker::EnsureInitialized().
5652   if (kBitstringSubtypeCheckEnabled) {
5653     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
5654     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(klass.Get());
5655     // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck for j.l.r.Proxy is already assigned.
5656   }
5657 
5658   VisiblyInitializedCallback* callback = nullptr;
5659   {
5660     // Lock on klass is released. Lock new class object.
5661     ObjectLock<mirror::Class> initialization_lock(self, klass);
5662     // Conservatively go through the ClassStatus::kInitialized state.
5663     callback = MarkClassInitialized(self, klass);
5664   }
5665   if (callback != nullptr) {
5666     callback->MakeVisible(self);
5667   }
5668 
5669   // Consistency checks.
5670   if (kIsDebugBuild) {
5671     CheckProxyConstructor(klass->GetDirectMethod(0, image_pointer_size_));
5672 
5673     for (size_t i = 0; i < num_virtual_methods; ++i) {
5674       auto* virtual_method = klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
5675       CheckProxyMethod(virtual_method, proxied_methods[i]);
5676     }
5677 
5678     StackHandleScope<1> hs2(self);
5679     Handle<mirror::String> decoded_name = hs2.NewHandle(soa.Decode<mirror::String>(name));
5680     std::string interfaces_field_name(StringPrintf("java.lang.Class[] %s.interfaces",
5681                                                    decoded_name->ToModifiedUtf8().c_str()));
5682     CHECK_EQ(ArtField::PrettyField(klass->GetField(0)), interfaces_field_name);
5683 
5684     std::string throws_field_name(StringPrintf("java.lang.Class[][] %s.throws",
5685                                                decoded_name->ToModifiedUtf8().c_str()));
5686     CHECK_EQ(ArtField::PrettyField(klass->GetField(1)), throws_field_name);
5687 
5688     CHECK_EQ(klass.Get()->GetProxyInterfaces(),
5689              soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
5690     CHECK_EQ(klass.Get()->GetProxyThrows(),
5691              proxied_throws.Get());
5692   }
5693   return klass.Get();
5694 }
5695 
CreateProxyConstructor(Handle<mirror::Class> klass,ArtMethod * out)5696 void ClassLinker::CreateProxyConstructor(Handle<mirror::Class> klass, ArtMethod* out) {
5697   // Create constructor for Proxy that must initialize the method.
5698   ObjPtr<mirror::Class> proxy_class = GetClassRoot<mirror::Proxy>(this);
5699   CHECK_EQ(proxy_class->NumDirectMethods(), 21u);
5700 
5701   // Find the <init>(InvocationHandler)V method. The exact method offset varies depending
5702   // on which front-end compiler was used to build the libcore DEX files.
5703   ArtMethod* proxy_constructor = WellKnownClasses::java_lang_reflect_Proxy_init;
5704   DCHECK(proxy_constructor != nullptr)
5705       << "Could not find <init> method in java.lang.reflect.Proxy";
5706 
5707   // Clone the existing constructor of Proxy (our constructor would just invoke it so steal its
5708   // code_ too)
5709   DCHECK(out != nullptr);
5710   out->CopyFrom(proxy_constructor, image_pointer_size_);
5711   // Make this constructor public and fix the class to be our Proxy version.
5712   // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5713   // Note that the compiler calls a ResolveMethod() overload that does not handle a Proxy referrer.
5714   out->SetAccessFlags((out->GetAccessFlags() & ~kAccProtected) |
5715                       kAccPublic |
5716                       kAccCompileDontBother);
5717   out->SetDeclaringClass(klass.Get());
5718 
5719   // Set the original constructor method.
5720   out->SetDataPtrSize(proxy_constructor, image_pointer_size_);
5721 }
5722 
CheckProxyConstructor(ArtMethod * constructor) const5723 void ClassLinker::CheckProxyConstructor(ArtMethod* constructor) const {
5724   CHECK(constructor->IsConstructor());
5725   auto* np = constructor->GetInterfaceMethodIfProxy(image_pointer_size_);
5726   CHECK_STREQ(np->GetName(), "<init>");
5727   CHECK_STREQ(np->GetSignature().ToString().c_str(), "(Ljava/lang/reflect/InvocationHandler;)V");
5728   DCHECK(constructor->IsPublic());
5729 }
5730 
CreateProxyMethod(Handle<mirror::Class> klass,ArtMethod * prototype,ArtMethod * out)5731 void ClassLinker::CreateProxyMethod(Handle<mirror::Class> klass, ArtMethod* prototype,
5732                                     ArtMethod* out) {
5733   // We steal everything from the prototype (such as DexCache, invoke stub, etc.) then specialize
5734   // as necessary
5735   DCHECK(out != nullptr);
5736   out->CopyFrom(prototype, image_pointer_size_);
5737 
5738   // Set class to be the concrete proxy class.
5739   out->SetDeclaringClass(klass.Get());
5740   // Clear the abstract and default flags to ensure that defaults aren't picked in
5741   // preference to the invocation handler.
5742   const uint32_t kRemoveFlags = kAccAbstract | kAccDefault;
5743   static_assert((kAccDefault & kAccIntrinsicBits) != 0);
5744   DCHECK(!out->IsIntrinsic()) << "Removing kAccDefault from an intrinsic would be a mistake as it "
5745                               << "overlaps with kAccIntrinsicBits.";
5746   // Make the method final.
5747   // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5748   const uint32_t kAddFlags = kAccFinal | kAccCompileDontBother;
5749   out->SetAccessFlags((out->GetAccessFlags() & ~kRemoveFlags) | kAddFlags);
5750 
5751   // Set the original interface method.
5752   out->SetDataPtrSize(prototype, image_pointer_size_);
5753 
5754   // At runtime the method looks like a reference and argument saving method, clone the code
5755   // related parameters from this method.
5756   out->SetEntryPointFromQuickCompiledCode(GetQuickProxyInvokeHandler());
5757 }
5758 
CheckProxyMethod(ArtMethod * method,ArtMethod * prototype) const5759 void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) const {
5760   // Basic consistency checks.
5761   CHECK(!prototype->IsFinal());
5762   CHECK(method->IsFinal());
5763   CHECK(method->IsInvokable());
5764 
5765   // The proxy method doesn't have its own dex cache or dex file and so it steals those of its
5766   // interface prototype. The exception to this are Constructors and the Class of the Proxy itself.
5767   CHECK_EQ(prototype->GetDexMethodIndex(), method->GetDexMethodIndex());
5768   CHECK_EQ(prototype, method->GetInterfaceMethodIfProxy(image_pointer_size_));
5769 }
5770 
CanWeInitializeClass(ObjPtr<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5771 bool ClassLinker::CanWeInitializeClass(ObjPtr<mirror::Class> klass,
5772                                        bool can_init_statics,
5773                                        bool can_init_parents) {
5774   if (can_init_statics && can_init_parents) {
5775     return true;
5776   }
5777   DCHECK(Runtime::Current()->IsAotCompiler());
5778 
5779   // We currently don't support initializing at AOT time classes that need access
5780   // checks.
5781   if (klass->IsVerifiedNeedsAccessChecks()) {
5782     return false;
5783   }
5784   if (!can_init_statics) {
5785     // Check if there's a class initializer.
5786     ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5787     if (clinit != nullptr) {
5788       return false;
5789     }
5790     // Check if there are encoded static values needing initialization.
5791     if (klass->HasStaticFields()) {
5792       const dex::ClassDef* dex_class_def = klass->GetClassDef();
5793       DCHECK(dex_class_def != nullptr);
5794       if (dex_class_def->static_values_off_ != 0) {
5795         return false;
5796       }
5797     }
5798   }
5799   // If we are a class we need to initialize all interfaces with default methods when we are
5800   // initialized. Check all of them.
5801   if (!klass->IsInterface()) {
5802     size_t num_interfaces = klass->GetIfTableCount();
5803     for (size_t i = 0; i < num_interfaces; i++) {
5804       ObjPtr<mirror::Class> iface = klass->GetIfTable()->GetInterface(i);
5805       if (iface->HasDefaultMethods() && !iface->IsInitialized()) {
5806         if (!can_init_parents || !CanWeInitializeClass(iface, can_init_statics, can_init_parents)) {
5807           return false;
5808         }
5809       }
5810     }
5811   }
5812   if (klass->IsInterface() || !klass->HasSuperClass()) {
5813     return true;
5814   }
5815   ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5816   if (super_class->IsInitialized()) {
5817     return true;
5818   }
5819   return can_init_parents && CanWeInitializeClass(super_class, can_init_statics, can_init_parents);
5820 }
5821 
InitializeClass(Thread * self,Handle<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5822 bool ClassLinker::InitializeClass(Thread* self,
5823                                   Handle<mirror::Class> klass,
5824                                   bool can_init_statics,
5825                                   bool can_init_parents) {
5826   // see JLS 3rd edition, 12.4.2 "Detailed Initialization Procedure" for the locking protocol
5827 
5828   // Are we already initialized and therefore done?
5829   // Note: we differ from the JLS here as we don't do this under the lock, this is benign as
5830   // an initialized class will never change its state.
5831   if (klass->IsInitialized()) {
5832     return true;
5833   }
5834 
5835   // Fast fail if initialization requires a full runtime. Not part of the JLS.
5836   if (!CanWeInitializeClass(klass.Get(), can_init_statics, can_init_parents)) {
5837     return false;
5838   }
5839 
5840   self->AllowThreadSuspension();
5841   Runtime* const runtime = Runtime::Current();
5842   const bool stats_enabled = runtime->HasStatsEnabled();
5843   uint64_t t0;
5844   {
5845     ObjectLock<mirror::Class> lock(self, klass);
5846 
5847     // Re-check under the lock in case another thread initialized ahead of us.
5848     if (klass->IsInitialized()) {
5849       return true;
5850     }
5851 
5852     // Was the class already found to be erroneous? Done under the lock to match the JLS.
5853     if (klass->IsErroneous()) {
5854       ThrowEarlierClassFailure(klass.Get(), true, /* log= */ true);
5855       VlogClassInitializationFailure(klass);
5856       return false;
5857     }
5858 
5859     CHECK(klass->IsResolved() && !klass->IsErroneousResolved())
5860         << klass->PrettyClass() << ": state=" << klass->GetStatus();
5861 
5862     if (!klass->IsVerified()) {
5863       VerifyClass(self, /*verifier_deps= */ nullptr, klass);
5864       if (!klass->IsVerified()) {
5865         // We failed to verify, expect either the klass to be erroneous or verification failed at
5866         // compile time.
5867         if (klass->IsErroneous()) {
5868           // The class is erroneous. This may be a verifier error, or another thread attempted
5869           // verification and/or initialization and failed. We can distinguish those cases by
5870           // whether an exception is already pending.
5871           if (self->IsExceptionPending()) {
5872             // Check that it's a VerifyError.
5873             DCHECK(IsVerifyError(self->GetException()));
5874           } else {
5875             // Check that another thread attempted initialization.
5876             DCHECK_NE(0, klass->GetClinitThreadId());
5877             DCHECK_NE(self->GetTid(), klass->GetClinitThreadId());
5878             // Need to rethrow the previous failure now.
5879             ThrowEarlierClassFailure(klass.Get(), true);
5880           }
5881           VlogClassInitializationFailure(klass);
5882         } else {
5883           CHECK(Runtime::Current()->IsAotCompiler());
5884           CHECK(klass->ShouldVerifyAtRuntime() || klass->IsVerifiedNeedsAccessChecks());
5885           self->AssertNoPendingException();
5886           self->SetException(Runtime::Current()->GetPreAllocatedNoClassDefFoundError());
5887         }
5888         self->AssertPendingException();
5889         return false;
5890       } else {
5891         self->AssertNoPendingException();
5892       }
5893 
5894       // A separate thread could have moved us all the way to initialized. A "simple" example
5895       // involves a subclass of the current class being initialized at the same time (which
5896       // will implicitly initialize the superclass, if scheduled that way). b/28254258
5897       DCHECK(!klass->IsErroneous()) << klass->GetStatus();
5898       if (klass->IsInitialized()) {
5899         return true;
5900       }
5901     }
5902 
5903     // If the class is ClassStatus::kInitializing, either this thread is
5904     // initializing higher up the stack or another thread has beat us
5905     // to initializing and we need to wait. Either way, this
5906     // invocation of InitializeClass will not be responsible for
5907     // running <clinit> and will return.
5908     if (klass->GetStatus() == ClassStatus::kInitializing) {
5909       // Could have got an exception during verification.
5910       if (self->IsExceptionPending()) {
5911         VlogClassInitializationFailure(klass);
5912         return false;
5913       }
5914       // We caught somebody else in the act; was it us?
5915       if (klass->GetClinitThreadId() == self->GetTid()) {
5916         // Yes. That's fine. Return so we can continue initializing.
5917         return true;
5918       }
5919       // No. That's fine. Wait for another thread to finish initializing.
5920       return WaitForInitializeClass(klass, self, lock);
5921     }
5922 
5923     // Try to get the oat class's status for this class if the oat file is present. The compiler
5924     // tries to validate superclass descriptors, and writes the result into the oat file.
5925     // Runtime correctness is guaranteed by classpath checks done on loading. If the classpath
5926     // is different at runtime than it was at compile time, the oat file is rejected. So if the
5927     // oat file is present, the classpaths must match, and the runtime time check can be skipped.
5928     bool has_oat_class = false;
5929     const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
5930         ? OatFile::FindOatClass(klass->GetDexFile(), klass->GetDexClassDefIndex(), &has_oat_class)
5931         : OatFile::OatClass::Invalid();
5932     if (oat_class.GetStatus() < ClassStatus::kSuperclassValidated &&
5933         !ValidateSuperClassDescriptors(klass)) {
5934       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5935       return false;
5936     }
5937     self->AllowThreadSuspension();
5938 
5939     CHECK_EQ(klass->GetStatus(), ClassStatus::kVerified) << klass->PrettyClass()
5940         << " self.tid=" << self->GetTid() << " clinit.tid=" << klass->GetClinitThreadId();
5941 
5942     // From here out other threads may observe that we're initializing and so changes of state
5943     // require the a notification.
5944     klass->SetClinitThreadId(self->GetTid());
5945     mirror::Class::SetStatus(klass, ClassStatus::kInitializing, self);
5946 
5947     t0 = stats_enabled ? NanoTime() : 0u;
5948   }
5949 
5950   uint64_t t_sub = 0;
5951 
5952   // Initialize super classes, must be done while initializing for the JLS.
5953   if (!klass->IsInterface() && klass->HasSuperClass()) {
5954     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5955     if (!super_class->IsInitialized()) {
5956       CHECK(!super_class->IsInterface());
5957       CHECK(can_init_parents);
5958       StackHandleScope<1> hs(self);
5959       Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
5960       uint64_t super_t0 = stats_enabled ? NanoTime() : 0u;
5961       bool super_initialized = InitializeClass(self, handle_scope_super, can_init_statics, true);
5962       uint64_t super_t1 = stats_enabled ? NanoTime() : 0u;
5963       if (!super_initialized) {
5964         // The super class was verified ahead of entering initializing, we should only be here if
5965         // the super class became erroneous due to initialization.
5966         // For the case of aot compiler, the super class might also be initializing but we don't
5967         // want to process circular dependencies in pre-compile.
5968         CHECK(self->IsExceptionPending())
5969             << "Super class initialization failed for "
5970             << handle_scope_super->PrettyDescriptor()
5971             << " that has unexpected status " << handle_scope_super->GetStatus()
5972             << "\nPending exception:\n"
5973             << (self->GetException() != nullptr ? self->GetException()->Dump() : "");
5974         ObjectLock<mirror::Class> lock(self, klass);
5975         // Initialization failed because the super-class is erroneous.
5976         mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5977         return false;
5978       }
5979       t_sub = super_t1 - super_t0;
5980     }
5981   }
5982 
5983   if (!klass->IsInterface()) {
5984     // Initialize interfaces with default methods for the JLS.
5985     size_t num_direct_interfaces = klass->NumDirectInterfaces();
5986     // Only setup the (expensive) handle scope if we actually need to.
5987     if (UNLIKELY(num_direct_interfaces > 0)) {
5988       StackHandleScope<1> hs_iface(self);
5989       MutableHandle<mirror::Class> handle_scope_iface(hs_iface.NewHandle<mirror::Class>(nullptr));
5990       for (size_t i = 0; i < num_direct_interfaces; i++) {
5991         handle_scope_iface.Assign(klass->GetDirectInterface(i));
5992         CHECK(handle_scope_iface != nullptr) << klass->PrettyDescriptor() << " iface #" << i;
5993         CHECK(handle_scope_iface->IsInterface());
5994         if (handle_scope_iface->HasBeenRecursivelyInitialized()) {
5995           // We have already done this for this interface. Skip it.
5996           continue;
5997         }
5998         // We cannot just call initialize class directly because we need to ensure that ALL
5999         // interfaces with default methods are initialized. Non-default interface initialization
6000         // will not affect other non-default super-interfaces.
6001         // This is not very precise, misses all walking.
6002         uint64_t inf_t0 = stats_enabled ? NanoTime() : 0u;
6003         bool iface_initialized = InitializeDefaultInterfaceRecursive(self,
6004                                                                      handle_scope_iface,
6005                                                                      can_init_statics,
6006                                                                      can_init_parents);
6007         uint64_t inf_t1 = stats_enabled ? NanoTime() : 0u;
6008         if (!iface_initialized) {
6009           ObjectLock<mirror::Class> lock(self, klass);
6010           // Initialization failed because one of our interfaces with default methods is erroneous.
6011           mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
6012           return false;
6013         }
6014         t_sub += inf_t1 - inf_t0;
6015       }
6016     }
6017   }
6018 
6019   if (klass->HasStaticFields()) {
6020     const dex::ClassDef* dex_class_def = klass->GetClassDef();
6021     CHECK(dex_class_def != nullptr);
6022     StackHandleScope<2> hs(self);
6023     Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
6024     Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
6025 
6026     // Eagerly fill in static fields so that the we don't have to do as many expensive
6027     // Class::FindStaticField in ResolveField.
6028     for (size_t i = 0; i < klass->NumFields(); ++i) {
6029       ArtField* field = klass->GetField(i);
6030       if (!field->IsStatic()) {
6031         continue;
6032       }
6033       const uint32_t field_idx = field->GetDexFieldIndex();
6034       ArtField* resolved_field = dex_cache->GetResolvedField(field_idx);
6035       if (resolved_field == nullptr) {
6036         // Populating cache of a dex file which defines `klass` should always be allowed.
6037         DCHECK(!hiddenapi::ShouldDenyAccessToMember(
6038             field,
6039             hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
6040             hiddenapi::AccessMethod::kCheckWithPolicy));
6041         dex_cache->SetResolvedField(field_idx, field);
6042       } else {
6043         DCHECK_EQ(field, resolved_field);
6044       }
6045     }
6046 
6047     annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache,
6048                                                                  class_loader,
6049                                                                  this,
6050                                                                  *dex_class_def);
6051     const DexFile& dex_file = *dex_cache->GetDexFile();
6052 
6053     if (value_it.HasNext()) {
6054       ClassAccessor accessor(dex_file, *dex_class_def);
6055       CHECK(can_init_statics);
6056       for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
6057         if (!value_it.HasNext()) {
6058           break;
6059         }
6060         ArtField* art_field = ResolveField(field.GetIndex(),
6061                                            dex_cache,
6062                                            class_loader,
6063                                            /* is_static= */ true);
6064         if (Runtime::Current()->IsActiveTransaction()) {
6065           value_it.ReadValueToField<true>(art_field);
6066         } else {
6067           value_it.ReadValueToField<false>(art_field);
6068         }
6069         if (self->IsExceptionPending()) {
6070           break;
6071         }
6072         value_it.Next();
6073       }
6074       DCHECK(self->IsExceptionPending() || !value_it.HasNext());
6075     }
6076   }
6077 
6078 
6079   if (!self->IsExceptionPending()) {
6080     ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
6081     if (clinit != nullptr) {
6082       CHECK(can_init_statics);
6083       JValue result;
6084       clinit->Invoke(self, nullptr, 0, &result, "V");
6085     }
6086   }
6087   self->AllowThreadSuspension();
6088   uint64_t t1 = stats_enabled ? NanoTime() : 0u;
6089 
6090   VisiblyInitializedCallback* callback = nullptr;
6091   bool success = true;
6092   {
6093     ObjectLock<mirror::Class> lock(self, klass);
6094 
6095     if (self->IsExceptionPending()) {
6096       WrapExceptionInInitializer(klass);
6097       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
6098       success = false;
6099     } else if (Runtime::Current()->IsActiveTransaction() && IsTransactionAborted()) {
6100       // The exception thrown when the transaction aborted has been caught and cleared
6101       // so we need to throw it again now.
6102       VLOG(compiler) << "Return from class initializer of "
6103                      << mirror::Class::PrettyDescriptor(klass.Get())
6104                      << " without exception while transaction was aborted: re-throw it now.";
6105       ThrowTransactionAbortError(self);
6106       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
6107       success = false;
6108     } else {
6109       if (stats_enabled) {
6110         RuntimeStats* global_stats = runtime->GetStats();
6111         RuntimeStats* thread_stats = self->GetStats();
6112         ++global_stats->class_init_count;
6113         ++thread_stats->class_init_count;
6114         global_stats->class_init_time_ns += (t1 - t0 - t_sub);
6115         thread_stats->class_init_time_ns += (t1 - t0 - t_sub);
6116       }
6117       // Set the class as initialized except if failed to initialize static fields.
6118       callback = MarkClassInitialized(self, klass);
6119       if (VLOG_IS_ON(class_linker)) {
6120         std::string temp;
6121         LOG(INFO) << "Initialized class " << klass->GetDescriptor(&temp) << " from " <<
6122             klass->GetLocation();
6123       }
6124     }
6125   }
6126   if (callback != nullptr) {
6127     callback->MakeVisible(self);
6128   }
6129   return success;
6130 }
6131 
6132 // We recursively run down the tree of interfaces. We need to do this in the order they are declared
6133 // and perform the initialization only on those interfaces that contain default methods.
InitializeDefaultInterfaceRecursive(Thread * self,Handle<mirror::Class> iface,bool can_init_statics,bool can_init_parents)6134 bool ClassLinker::InitializeDefaultInterfaceRecursive(Thread* self,
6135                                                       Handle<mirror::Class> iface,
6136                                                       bool can_init_statics,
6137                                                       bool can_init_parents) {
6138   CHECK(iface->IsInterface());
6139   size_t num_direct_ifaces = iface->NumDirectInterfaces();
6140   // Only create the (expensive) handle scope if we need it.
6141   if (UNLIKELY(num_direct_ifaces > 0)) {
6142     StackHandleScope<1> hs(self);
6143     MutableHandle<mirror::Class> handle_super_iface(hs.NewHandle<mirror::Class>(nullptr));
6144     // First we initialize all of iface's super-interfaces recursively.
6145     for (size_t i = 0; i < num_direct_ifaces; i++) {
6146       ObjPtr<mirror::Class> super_iface = iface->GetDirectInterface(i);
6147       CHECK(super_iface != nullptr) << iface->PrettyDescriptor() << " iface #" << i;
6148       if (!super_iface->HasBeenRecursivelyInitialized()) {
6149         // Recursive step
6150         handle_super_iface.Assign(super_iface);
6151         if (!InitializeDefaultInterfaceRecursive(self,
6152                                                  handle_super_iface,
6153                                                  can_init_statics,
6154                                                  can_init_parents)) {
6155           return false;
6156         }
6157       }
6158     }
6159   }
6160 
6161   bool result = true;
6162   // Then we initialize 'iface' if it has default methods. We do not need to (and in fact must not)
6163   // initialize if we don't have default methods.
6164   if (iface->HasDefaultMethods()) {
6165     result = EnsureInitialized(self, iface, can_init_statics, can_init_parents);
6166   }
6167 
6168   // Mark that this interface has undergone recursive default interface initialization so we know we
6169   // can skip it on any later class initializations. We do this even if we are not a default
6170   // interface since we can still avoid the traversal. This is purely a performance optimization.
6171   if (result) {
6172     // TODO This should be done in a better way
6173     // Note: Use a try-lock to avoid blocking when someone else is holding the lock on this
6174     //       interface. It is bad (Java) style, but not impossible. Marking the recursive
6175     //       initialization is a performance optimization (to avoid another idempotent visit
6176     //       for other implementing classes/interfaces), and can be revisited later.
6177     ObjectTryLock<mirror::Class> lock(self, iface);
6178     if (lock.Acquired()) {
6179       iface->SetRecursivelyInitialized();
6180     }
6181   }
6182   return result;
6183 }
6184 
WaitForInitializeClass(Handle<mirror::Class> klass,Thread * self,ObjectLock<mirror::Class> & lock)6185 bool ClassLinker::WaitForInitializeClass(Handle<mirror::Class> klass,
6186                                          Thread* self,
6187                                          ObjectLock<mirror::Class>& lock)
6188     REQUIRES_SHARED(Locks::mutator_lock_) {
6189   while (true) {
6190     self->AssertNoPendingException();
6191     CHECK(!klass->IsInitialized());
6192     lock.WaitIgnoringInterrupts();
6193 
6194     // When we wake up, repeat the test for init-in-progress.  If
6195     // there's an exception pending (only possible if
6196     // we were not using WaitIgnoringInterrupts), bail out.
6197     if (self->IsExceptionPending()) {
6198       WrapExceptionInInitializer(klass);
6199       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
6200       return false;
6201     }
6202     // Spurious wakeup? Go back to waiting.
6203     if (klass->GetStatus() == ClassStatus::kInitializing) {
6204       continue;
6205     }
6206     if (klass->GetStatus() == ClassStatus::kVerified &&
6207         Runtime::Current()->IsAotCompiler()) {
6208       // Compile time initialization failed.
6209       return false;
6210     }
6211     if (klass->IsErroneous()) {
6212       // The caller wants an exception, but it was thrown in a
6213       // different thread.  Synthesize one here.
6214       ThrowNoClassDefFoundError("<clinit> failed for class %s; see exception in other thread",
6215                                 klass->PrettyDescriptor().c_str());
6216       VlogClassInitializationFailure(klass);
6217       return false;
6218     }
6219     if (klass->IsInitialized()) {
6220       return true;
6221     }
6222     LOG(FATAL) << "Unexpected class status. " << klass->PrettyClass() << " is "
6223         << klass->GetStatus();
6224   }
6225   UNREACHABLE();
6226 }
6227 
ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m)6228 static void ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,
6229                                                           Handle<mirror::Class> super_klass,
6230                                                           ArtMethod* method,
6231                                                           ArtMethod* m)
6232     REQUIRES_SHARED(Locks::mutator_lock_) {
6233   DCHECK(Thread::Current()->IsExceptionPending());
6234   DCHECK(!m->IsProxyMethod());
6235   const DexFile* dex_file = m->GetDexFile();
6236   const dex::MethodId& method_id = dex_file->GetMethodId(m->GetDexMethodIndex());
6237   const dex::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id);
6238   dex::TypeIndex return_type_idx = proto_id.return_type_idx_;
6239   std::string return_type = dex_file->PrettyType(return_type_idx);
6240   std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
6241   ThrowWrappedLinkageError(klass.Get(),
6242                            "While checking class %s method %s signature against %s %s: "
6243                            "Failed to resolve return type %s with %s",
6244                            mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
6245                            ArtMethod::PrettyMethod(method).c_str(),
6246                            super_klass->IsInterface() ? "interface" : "superclass",
6247                            mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
6248                            return_type.c_str(), class_loader.c_str());
6249 }
6250 
ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m,uint32_t index,dex::TypeIndex arg_type_idx)6251 static void ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,
6252                                                    Handle<mirror::Class> super_klass,
6253                                                    ArtMethod* method,
6254                                                    ArtMethod* m,
6255                                                    uint32_t index,
6256                                                    dex::TypeIndex arg_type_idx)
6257     REQUIRES_SHARED(Locks::mutator_lock_) {
6258   DCHECK(Thread::Current()->IsExceptionPending());
6259   DCHECK(!m->IsProxyMethod());
6260   const DexFile* dex_file = m->GetDexFile();
6261   std::string arg_type = dex_file->PrettyType(arg_type_idx);
6262   std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
6263   ThrowWrappedLinkageError(klass.Get(),
6264                            "While checking class %s method %s signature against %s %s: "
6265                            "Failed to resolve arg %u type %s with %s",
6266                            mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
6267                            ArtMethod::PrettyMethod(method).c_str(),
6268                            super_klass->IsInterface() ? "interface" : "superclass",
6269                            mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
6270                            index, arg_type.c_str(), class_loader.c_str());
6271 }
6272 
ThrowSignatureMismatch(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,const std::string & error_msg)6273 static void ThrowSignatureMismatch(Handle<mirror::Class> klass,
6274                                    Handle<mirror::Class> super_klass,
6275                                    ArtMethod* method,
6276                                    const std::string& error_msg)
6277     REQUIRES_SHARED(Locks::mutator_lock_) {
6278   ThrowLinkageError(klass.Get(),
6279                     "Class %s method %s resolves differently in %s %s: %s",
6280                     mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
6281                     ArtMethod::PrettyMethod(method).c_str(),
6282                     super_klass->IsInterface() ? "interface" : "superclass",
6283                     mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
6284                     error_msg.c_str());
6285 }
6286 
HasSameSignatureWithDifferentClassLoaders(Thread * self,Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method1,ArtMethod * method2)6287 static bool HasSameSignatureWithDifferentClassLoaders(Thread* self,
6288                                                       Handle<mirror::Class> klass,
6289                                                       Handle<mirror::Class> super_klass,
6290                                                       ArtMethod* method1,
6291                                                       ArtMethod* method2)
6292     REQUIRES_SHARED(Locks::mutator_lock_) {
6293   {
6294     StackHandleScope<1> hs(self);
6295     Handle<mirror::Class> return_type(hs.NewHandle(method1->ResolveReturnType()));
6296     if (UNLIKELY(return_type == nullptr)) {
6297       ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method1);
6298       return false;
6299     }
6300     ObjPtr<mirror::Class> other_return_type = method2->ResolveReturnType();
6301     if (UNLIKELY(other_return_type == nullptr)) {
6302       ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method2);
6303       return false;
6304     }
6305     if (UNLIKELY(other_return_type != return_type.Get())) {
6306       ThrowSignatureMismatch(klass, super_klass, method1,
6307                              StringPrintf("Return types mismatch: %s(%p) vs %s(%p)",
6308                                           return_type->PrettyClassAndClassLoader().c_str(),
6309                                           return_type.Get(),
6310                                           other_return_type->PrettyClassAndClassLoader().c_str(),
6311                                           other_return_type.Ptr()));
6312       return false;
6313     }
6314   }
6315   const dex::TypeList* types1 = method1->GetParameterTypeList();
6316   const dex::TypeList* types2 = method2->GetParameterTypeList();
6317   if (types1 == nullptr) {
6318     if (types2 != nullptr && types2->Size() != 0) {
6319       ThrowSignatureMismatch(klass, super_klass, method1,
6320                              StringPrintf("Type list mismatch with %s",
6321                                           method2->PrettyMethod(true).c_str()));
6322       return false;
6323     }
6324     return true;
6325   } else if (UNLIKELY(types2 == nullptr)) {
6326     if (types1->Size() != 0) {
6327       ThrowSignatureMismatch(klass, super_klass, method1,
6328                              StringPrintf("Type list mismatch with %s",
6329                                           method2->PrettyMethod(true).c_str()));
6330       return false;
6331     }
6332     return true;
6333   }
6334   uint32_t num_types = types1->Size();
6335   if (UNLIKELY(num_types != types2->Size())) {
6336     ThrowSignatureMismatch(klass, super_klass, method1,
6337                            StringPrintf("Type list mismatch with %s",
6338                                         method2->PrettyMethod(true).c_str()));
6339     return false;
6340   }
6341   for (uint32_t i = 0; i < num_types; ++i) {
6342     StackHandleScope<1> hs(self);
6343     dex::TypeIndex param_type_idx = types1->GetTypeItem(i).type_idx_;
6344     Handle<mirror::Class> param_type(hs.NewHandle(
6345         method1->ResolveClassFromTypeIndex(param_type_idx)));
6346     if (UNLIKELY(param_type == nullptr)) {
6347       ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
6348                                              method1, i, param_type_idx);
6349       return false;
6350     }
6351     dex::TypeIndex other_param_type_idx = types2->GetTypeItem(i).type_idx_;
6352     ObjPtr<mirror::Class> other_param_type =
6353         method2->ResolveClassFromTypeIndex(other_param_type_idx);
6354     if (UNLIKELY(other_param_type == nullptr)) {
6355       ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
6356                                              method2, i, other_param_type_idx);
6357       return false;
6358     }
6359     if (UNLIKELY(param_type.Get() != other_param_type)) {
6360       ThrowSignatureMismatch(klass, super_klass, method1,
6361                              StringPrintf("Parameter %u type mismatch: %s(%p) vs %s(%p)",
6362                                           i,
6363                                           param_type->PrettyClassAndClassLoader().c_str(),
6364                                           param_type.Get(),
6365                                           other_param_type->PrettyClassAndClassLoader().c_str(),
6366                                           other_param_type.Ptr()));
6367       return false;
6368     }
6369   }
6370   return true;
6371 }
6372 
6373 
ValidateSuperClassDescriptors(Handle<mirror::Class> klass)6374 bool ClassLinker::ValidateSuperClassDescriptors(Handle<mirror::Class> klass) {
6375   if (klass->IsInterface()) {
6376     return true;
6377   }
6378   // Begin with the methods local to the superclass.
6379   Thread* self = Thread::Current();
6380   StackHandleScope<1> hs(self);
6381   MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(nullptr));
6382   if (klass->HasSuperClass() &&
6383       klass->GetClassLoader() != klass->GetSuperClass()->GetClassLoader()) {
6384     super_klass.Assign(klass->GetSuperClass());
6385     for (int i = klass->GetSuperClass()->GetVTableLength() - 1; i >= 0; --i) {
6386       auto* m = klass->GetVTableEntry(i, image_pointer_size_);
6387       auto* super_m = klass->GetSuperClass()->GetVTableEntry(i, image_pointer_size_);
6388       if (m != super_m) {
6389         if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
6390                                                                 klass,
6391                                                                 super_klass,
6392                                                                 m,
6393                                                                 super_m))) {
6394           self->AssertPendingException();
6395           return false;
6396         }
6397       }
6398     }
6399   }
6400   for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
6401     super_klass.Assign(klass->GetIfTable()->GetInterface(i));
6402     if (klass->GetClassLoader() != super_klass->GetClassLoader()) {
6403       uint32_t num_methods = super_klass->NumVirtualMethods();
6404       for (uint32_t j = 0; j < num_methods; ++j) {
6405         auto* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
6406             j, image_pointer_size_);
6407         auto* super_m = super_klass->GetVirtualMethod(j, image_pointer_size_);
6408         if (m != super_m) {
6409           if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
6410                                                                   klass,
6411                                                                   super_klass,
6412                                                                   m,
6413                                                                   super_m))) {
6414             self->AssertPendingException();
6415             return false;
6416           }
6417         }
6418       }
6419     }
6420   }
6421   return true;
6422 }
6423 
EnsureInitialized(Thread * self,Handle<mirror::Class> c,bool can_init_fields,bool can_init_parents)6424 bool ClassLinker::EnsureInitialized(Thread* self,
6425                                     Handle<mirror::Class> c,
6426                                     bool can_init_fields,
6427                                     bool can_init_parents) {
6428   DCHECK(c != nullptr);
6429 
6430   if (c->IsInitialized()) {
6431     // If we've seen an initialized but not visibly initialized class
6432     // many times, request visible initialization.
6433     if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
6434       // Thanks to the x86 memory model classes skip the initialized status.
6435       DCHECK(c->IsVisiblyInitialized());
6436     } else if (UNLIKELY(!c->IsVisiblyInitialized())) {
6437       if (self->IncrementMakeVisiblyInitializedCounter()) {
6438         MakeInitializedClassesVisiblyInitialized(self, /*wait=*/ false);
6439       }
6440     }
6441     return true;
6442   }
6443   // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
6444   //
6445   // Ensure the bitstring is initialized before any of the class initialization
6446   // logic occurs. Once a class initializer starts running, objects can
6447   // escape into the heap and use the subtype checking code.
6448   //
6449   // Note: A class whose SubtypeCheckInfo is at least Initialized means it
6450   // can be used as a source for the IsSubClass check, and that all ancestors
6451   // of the class are Assigned (can be used as a target for IsSubClass check)
6452   // or Overflowed (can be used as a source for IsSubClass check).
6453   if (kBitstringSubtypeCheckEnabled) {
6454     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
6455     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(c.Get());
6456     // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck is already initialized.
6457   }
6458   const bool success = InitializeClass(self, c, can_init_fields, can_init_parents);
6459   if (!success) {
6460     if (can_init_fields && can_init_parents) {
6461       CHECK(self->IsExceptionPending()) << c->PrettyClass();
6462     } else {
6463       // There may or may not be an exception pending. If there is, clear it.
6464       // We propagate the exception only if we can initialize fields and parents.
6465       self->ClearException();
6466     }
6467   } else {
6468     self->AssertNoPendingException();
6469   }
6470   return success;
6471 }
6472 
FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,ObjPtr<mirror::Class> new_class)6473 void ClassLinker::FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,
6474                                                ObjPtr<mirror::Class> new_class) {
6475   DCHECK_EQ(temp_class->NumFields(), 0u);
6476   for (ArtField& field : new_class->GetFields()) {
6477     if (field.GetDeclaringClass() == temp_class) {
6478       field.SetDeclaringClass(new_class);
6479     }
6480   }
6481 
6482   DCHECK_EQ(temp_class->NumDirectMethods(), 0u);
6483   DCHECK_EQ(temp_class->NumVirtualMethods(), 0u);
6484   for (auto& method : new_class->GetMethods(image_pointer_size_)) {
6485     if (method.GetDeclaringClass() == temp_class) {
6486       method.SetDeclaringClass(new_class);
6487     }
6488   }
6489 
6490   // Make sure the remembered set and mod-union tables know that we updated some of the native
6491   // roots.
6492   WriteBarrier::ForEveryFieldWrite(new_class);
6493 }
6494 
RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader)6495 void ClassLinker::RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
6496   CHECK(class_loader->GetAllocator() == nullptr);
6497   CHECK(class_loader->GetClassTable() == nullptr);
6498   Thread* const self = Thread::Current();
6499   ClassLoaderData data;
6500   data.weak_root = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, class_loader);
6501   // Create and set the class table.
6502   data.class_table = new ClassTable;
6503   class_loader->SetClassTable(data.class_table);
6504   // Create and set the linear allocator.
6505   data.allocator = Runtime::Current()->CreateLinearAlloc();
6506   class_loader->SetAllocator(data.allocator);
6507   // Add to the list so that we know to free the data later.
6508   class_loaders_.push_back(data);
6509 }
6510 
InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)6511 ClassTable* ClassLinker::InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
6512   if (class_loader == nullptr) {
6513     return boot_class_table_.get();
6514   }
6515   ClassTable* class_table = class_loader->GetClassTable();
6516   if (class_table == nullptr) {
6517     RegisterClassLoader(class_loader);
6518     class_table = class_loader->GetClassTable();
6519     DCHECK(class_table != nullptr);
6520   }
6521   return class_table;
6522 }
6523 
ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)6524 ClassTable* ClassLinker::ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
6525   return class_loader == nullptr ? boot_class_table_.get() : class_loader->GetClassTable();
6526 }
6527 
LinkClass(Thread * self,const char * descriptor,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,MutableHandle<mirror::Class> * h_new_class_out)6528 bool ClassLinker::LinkClass(Thread* self,
6529                             const char* descriptor,
6530                             Handle<mirror::Class> klass,
6531                             Handle<mirror::ObjectArray<mirror::Class>> interfaces,
6532                             MutableHandle<mirror::Class>* h_new_class_out) {
6533   CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
6534 
6535   if (!LinkSuperClass(klass)) {
6536     return false;
6537   }
6538   ArtMethod* imt_data[ImTable::kSize];
6539   // If there are any new conflicts compared to super class.
6540   bool new_conflict = false;
6541   std::fill_n(imt_data, arraysize(imt_data), Runtime::Current()->GetImtUnimplementedMethod());
6542   if (!LinkMethods(self, klass, interfaces, &new_conflict, imt_data)) {
6543     return false;
6544   }
6545   if (!LinkInstanceFields(self, klass)) {
6546     return false;
6547   }
6548   size_t class_size;
6549   if (!LinkStaticFields(self, klass, &class_size)) {
6550     return false;
6551   }
6552   class_size =
6553       mirror::Class::AdjustClassSizeForReferenceOffsetBitmapDuringLinking(klass.Get(), class_size);
6554   CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
6555 
6556   ImTable* imt = nullptr;
6557   if (klass->ShouldHaveImt()) {
6558     // If there are any new conflicts compared to the super class we can not make a copy. There
6559     // can be cases where both will have a conflict method at the same slot without having the same
6560     // set of conflicts. In this case, we can not share the IMT since the conflict table slow path
6561     // will possibly create a table that is incorrect for either of the classes.
6562     // Same IMT with new_conflict does not happen very often.
6563     if (!new_conflict) {
6564       ImTable* super_imt = klass->FindSuperImt(image_pointer_size_);
6565       if (super_imt != nullptr) {
6566         bool imt_equals = true;
6567         for (size_t i = 0; i < ImTable::kSize && imt_equals; ++i) {
6568           imt_equals = imt_equals && (super_imt->Get(i, image_pointer_size_) == imt_data[i]);
6569         }
6570         if (imt_equals) {
6571           imt = super_imt;
6572         }
6573       }
6574     }
6575     if (imt == nullptr) {
6576       LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
6577       imt = reinterpret_cast<ImTable*>(
6578           allocator->Alloc(self,
6579                            ImTable::SizeInBytes(image_pointer_size_),
6580                            LinearAllocKind::kNoGCRoots));
6581       if (imt == nullptr) {
6582         return false;
6583       }
6584       imt->Populate(imt_data, image_pointer_size_);
6585     }
6586   }
6587 
6588   if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) {
6589     // We don't need to retire this class as it has no embedded tables or it was created the
6590     // correct size during class linker initialization.
6591     CHECK_EQ(klass->GetClassSize(), class_size) << klass->PrettyDescriptor();
6592 
6593     if (klass->ShouldHaveEmbeddedVTable()) {
6594       klass->PopulateEmbeddedVTable(image_pointer_size_);
6595       klass->PopulateReferenceOffsetBitmap();
6596     }
6597     if (klass->ShouldHaveImt()) {
6598       klass->SetImt(imt, image_pointer_size_);
6599     }
6600 
6601     // Update CHA info based on whether we override methods.
6602     // Have to do this before setting the class as resolved which allows
6603     // instantiation of klass.
6604     if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
6605       cha_->UpdateAfterLoadingOf(klass);
6606     }
6607 
6608     // This will notify waiters on klass that saw the not yet resolved
6609     // class in the class_table_ during EnsureResolved.
6610     mirror::Class::SetStatus(klass, ClassStatus::kResolved, self);
6611     h_new_class_out->Assign(klass.Get());
6612   } else {
6613     CHECK(!klass->IsResolved());
6614     // Retire the temporary class and create the correctly sized resolved class.
6615     StackHandleScope<1> hs(self);
6616     Handle<mirror::Class> h_new_class =
6617         hs.NewHandle(mirror::Class::CopyOf(klass, self, class_size, imt, image_pointer_size_));
6618     // Set arrays to null since we don't want to have multiple classes with the same ArtField or
6619     // ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC
6620     // may not see any references to the target space and clean the card for a class if another
6621     // class had the same array pointer.
6622     klass->SetMethodsPtrUnchecked(nullptr, 0, 0);
6623     klass->SetFieldsPtrUnchecked(nullptr);
6624     if (UNLIKELY(h_new_class == nullptr)) {
6625       self->AssertPendingOOMException();
6626       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
6627       return false;
6628     }
6629 
6630     CHECK_EQ(h_new_class->GetClassSize(), class_size);
6631     ObjectLock<mirror::Class> lock(self, h_new_class);
6632     FixupTemporaryDeclaringClass(klass.Get(), h_new_class.Get());
6633 
6634     if (LIKELY(descriptor != nullptr)) {
6635       WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
6636       const ObjPtr<mirror::ClassLoader> class_loader = h_new_class.Get()->GetClassLoader();
6637       ClassTable* const table = InsertClassTableForClassLoader(class_loader);
6638       const ObjPtr<mirror::Class> existing =
6639           table->UpdateClass(h_new_class.Get(), ComputeModifiedUtf8Hash(descriptor));
6640       CHECK_EQ(existing, klass.Get());
6641       WriteBarrierOnClassLoaderLocked(class_loader, h_new_class.Get());
6642     }
6643 
6644     // Update CHA info based on whether we override methods.
6645     // Have to do this before setting the class as resolved which allows
6646     // instantiation of klass.
6647     if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
6648       cha_->UpdateAfterLoadingOf(h_new_class);
6649     }
6650 
6651     // This will notify waiters on temp class that saw the not yet resolved class in the
6652     // class_table_ during EnsureResolved.
6653     mirror::Class::SetStatus(klass, ClassStatus::kRetired, self);
6654 
6655     CHECK_EQ(h_new_class->GetStatus(), ClassStatus::kResolving);
6656     // This will notify waiters on new_class that saw the not yet resolved
6657     // class in the class_table_ during EnsureResolved.
6658     mirror::Class::SetStatus(h_new_class, ClassStatus::kResolved, self);
6659     // Return the new class.
6660     h_new_class_out->Assign(h_new_class.Get());
6661   }
6662   return true;
6663 }
6664 
LoadSuperAndInterfaces(Handle<mirror::Class> klass,const DexFile & dex_file)6665 bool ClassLinker::LoadSuperAndInterfaces(Handle<mirror::Class> klass, const DexFile& dex_file) {
6666   CHECK_EQ(ClassStatus::kIdx, klass->GetStatus());
6667   const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
6668   dex::TypeIndex super_class_idx = class_def.superclass_idx_;
6669   if (super_class_idx.IsValid()) {
6670     // Check that a class does not inherit from itself directly.
6671     //
6672     // TODO: This is a cheap check to detect the straightforward case
6673     // of a class extending itself (b/28685551), but we should do a
6674     // proper cycle detection on loaded classes, to detect all cases
6675     // of class circularity errors (b/28830038).
6676     if (super_class_idx == class_def.class_idx_) {
6677       ThrowClassCircularityError(klass.Get(),
6678                                  "Class %s extends itself",
6679                                  klass->PrettyDescriptor().c_str());
6680       return false;
6681     }
6682 
6683     ObjPtr<mirror::Class> super_class = ResolveType(super_class_idx, klass.Get());
6684     if (super_class == nullptr) {
6685       DCHECK(Thread::Current()->IsExceptionPending());
6686       return false;
6687     }
6688     // Verify
6689     if (!klass->CanAccess(super_class)) {
6690       ThrowIllegalAccessError(klass.Get(), "Class %s extended by class %s is inaccessible",
6691                               super_class->PrettyDescriptor().c_str(),
6692                               klass->PrettyDescriptor().c_str());
6693       return false;
6694     }
6695     CHECK(super_class->IsResolved());
6696     klass->SetSuperClass(super_class);
6697   }
6698   const dex::TypeList* interfaces = dex_file.GetInterfacesList(class_def);
6699   if (interfaces != nullptr) {
6700     for (size_t i = 0; i < interfaces->Size(); i++) {
6701       dex::TypeIndex idx = interfaces->GetTypeItem(i).type_idx_;
6702       if (idx.IsValid()) {
6703         // Check that a class does not implement itself directly.
6704         //
6705         // TODO: This is a cheap check to detect the straightforward case of a class implementing
6706         // itself, but we should do a proper cycle detection on loaded classes, to detect all cases
6707         // of class circularity errors. See b/28685551, b/28830038, and b/301108855
6708         if (idx == class_def.class_idx_) {
6709           ThrowClassCircularityError(
6710               klass.Get(), "Class %s implements itself", klass->PrettyDescriptor().c_str());
6711           return false;
6712         }
6713       }
6714 
6715       ObjPtr<mirror::Class> interface = ResolveType(idx, klass.Get());
6716       if (interface == nullptr) {
6717         DCHECK(Thread::Current()->IsExceptionPending());
6718         return false;
6719       }
6720       // Verify
6721       if (!klass->CanAccess(interface)) {
6722         // TODO: the RI seemed to ignore this in my testing.
6723         ThrowIllegalAccessError(klass.Get(),
6724                                 "Interface %s implemented by class %s is inaccessible",
6725                                 interface->PrettyDescriptor().c_str(),
6726                                 klass->PrettyDescriptor().c_str());
6727         return false;
6728       }
6729     }
6730   }
6731   // Mark the class as loaded.
6732   mirror::Class::SetStatus(klass, ClassStatus::kLoaded, nullptr);
6733   return true;
6734 }
6735 
LinkSuperClass(Handle<mirror::Class> klass)6736 bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) {
6737   CHECK(!klass->IsPrimitive());
6738   ObjPtr<mirror::Class> super = klass->GetSuperClass();
6739   ObjPtr<mirror::Class> object_class = GetClassRoot<mirror::Object>(this);
6740   if (klass.Get() == object_class) {
6741     if (super != nullptr) {
6742       ThrowClassFormatError(klass.Get(), "java.lang.Object must not have a superclass");
6743       return false;
6744     }
6745     return true;
6746   }
6747   if (super == nullptr) {
6748     ThrowLinkageError(klass.Get(), "No superclass defined for class %s",
6749                       klass->PrettyDescriptor().c_str());
6750     return false;
6751   }
6752   // Verify
6753   if (klass->IsInterface() && super != object_class) {
6754     ThrowClassFormatError(klass.Get(), "Interfaces must have java.lang.Object as superclass");
6755     return false;
6756   }
6757   if (super->IsFinal()) {
6758     ThrowVerifyError(klass.Get(),
6759                      "Superclass %s of %s is declared final",
6760                      super->PrettyDescriptor().c_str(),
6761                      klass->PrettyDescriptor().c_str());
6762     return false;
6763   }
6764   if (super->IsInterface()) {
6765     ThrowIncompatibleClassChangeError(klass.Get(),
6766                                       "Superclass %s of %s is an interface",
6767                                       super->PrettyDescriptor().c_str(),
6768                                       klass->PrettyDescriptor().c_str());
6769     return false;
6770   }
6771   if (!klass->CanAccess(super)) {
6772     ThrowIllegalAccessError(klass.Get(), "Superclass %s is inaccessible to class %s",
6773                             super->PrettyDescriptor().c_str(),
6774                             klass->PrettyDescriptor().c_str());
6775     return false;
6776   }
6777   if (!VerifyRecordClass(klass, super)) {
6778     DCHECK(Thread::Current()->IsExceptionPending());
6779     return false;
6780   }
6781 
6782   // Inherit kAccClassIsFinalizable from the superclass in case this
6783   // class doesn't override finalize.
6784   if (super->IsFinalizable()) {
6785     klass->SetFinalizable();
6786   }
6787 
6788   // Inherit class loader flag form super class.
6789   if (super->IsClassLoaderClass()) {
6790     klass->SetClassLoaderClass();
6791   }
6792 
6793   // Inherit reference flags (if any) from the superclass.
6794   uint32_t reference_flags = (super->GetClassFlags() & mirror::kClassFlagReference);
6795   if (reference_flags != 0) {
6796     CHECK_EQ(klass->GetClassFlags(), 0u);
6797     klass->SetClassFlags(klass->GetClassFlags() | reference_flags);
6798   }
6799   // Disallow custom direct subclasses of java.lang.ref.Reference.
6800   if (init_done_ && super == GetClassRoot<mirror::Reference>(this)) {
6801     ThrowLinkageError(klass.Get(),
6802                       "Class %s attempts to subclass java.lang.ref.Reference, which is not allowed",
6803                       klass->PrettyDescriptor().c_str());
6804     return false;
6805   }
6806 
6807   if (kIsDebugBuild) {
6808     // Ensure super classes are fully resolved prior to resolving fields..
6809     while (super != nullptr) {
6810       CHECK(super->IsResolved());
6811       super = super->GetSuperClass();
6812     }
6813   }
6814   return true;
6815 }
6816 
6817 // Comparator for name and signature of a method, used in finding overriding methods. Implementation
6818 // avoids the use of handles, if it didn't then rather than compare dex files we could compare dex
6819 // caches in the implementation below.
6820 class MethodNameAndSignatureComparator final : public ValueObject {
6821  public:
6822   explicit MethodNameAndSignatureComparator(ArtMethod* method)
REQUIRES_SHARED(Locks::mutator_lock_)6823       REQUIRES_SHARED(Locks::mutator_lock_) :
6824       dex_file_(method->GetDexFile()), mid_(&dex_file_->GetMethodId(method->GetDexMethodIndex())),
6825       name_view_() {
6826     DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
6827   }
6828 
GetNameView()6829   ALWAYS_INLINE std::string_view GetNameView() {
6830     if (name_view_.empty()) {
6831       name_view_ = dex_file_->GetStringView(mid_->name_idx_);
6832     }
6833     return name_view_;
6834   }
6835 
HasSameNameAndSignature(ArtMethod * other)6836   bool HasSameNameAndSignature(ArtMethod* other)
6837       REQUIRES_SHARED(Locks::mutator_lock_) {
6838     DCHECK(!other->IsProxyMethod()) << other->PrettyMethod();
6839     const DexFile* other_dex_file = other->GetDexFile();
6840     const dex::MethodId& other_mid = other_dex_file->GetMethodId(other->GetDexMethodIndex());
6841     if (dex_file_ == other_dex_file) {
6842       return mid_->name_idx_ == other_mid.name_idx_ && mid_->proto_idx_ == other_mid.proto_idx_;
6843     }
6844     return GetNameView() == other_dex_file->GetStringView(other_mid.name_idx_) &&
6845            dex_file_->GetMethodSignature(*mid_) == other_dex_file->GetMethodSignature(other_mid);
6846   }
6847 
6848  private:
6849   // Dex file for the method to compare against.
6850   const DexFile* const dex_file_;
6851   // MethodId for the method to compare against.
6852   const dex::MethodId* const mid_;
6853   // Lazily computed name from the dex file's strings.
6854   std::string_view name_view_;
6855 };
6856 
GetImtOwner(ObjPtr<mirror::Class> klass)6857 static ObjPtr<mirror::Class> GetImtOwner(ObjPtr<mirror::Class> klass)
6858     REQUIRES_SHARED(Locks::mutator_lock_) {
6859   ImTable* imt = klass->GetImt(kRuntimePointerSize);
6860   DCHECK(imt != nullptr);
6861   while (klass->HasSuperClass()) {
6862     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
6863     // Abstract classes cannot have IMTs, so we skip them.
6864     while (super_class->IsAbstract()) {
6865       DCHECK(super_class->HasSuperClass());
6866       super_class = super_class->GetSuperClass();
6867     }
6868     DCHECK(super_class->ShouldHaveImt());
6869     if (imt != super_class->GetImt(kRuntimePointerSize)) {
6870       // IMT not shared with the super class, return the current class.
6871       DCHECK_EQ(klass->GetImt(kRuntimePointerSize), imt) << klass->PrettyClass();
6872       return klass;
6873     }
6874     klass = super_class;
6875   }
6876   return nullptr;
6877 }
6878 
AddMethodToConflictTable(ObjPtr<mirror::Class> klass,ArtMethod * conflict_method,ArtMethod * interface_method,ArtMethod * method)6879 ArtMethod* ClassLinker::AddMethodToConflictTable(ObjPtr<mirror::Class> klass,
6880                                                  ArtMethod* conflict_method,
6881                                                  ArtMethod* interface_method,
6882                                                  ArtMethod* method) {
6883   ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
6884   Runtime* const runtime = Runtime::Current();
6885 
6886   // The IMT may be shared with a super class, in which case we need to use that
6887   // super class's `LinearAlloc`. The conflict itself should be limited to
6888   // methods at or higher up the chain of the IMT owner, otherwise class
6889   // linker would have created a different IMT.
6890   ObjPtr<mirror::Class> imt_owner = GetImtOwner(klass);
6891   DCHECK(imt_owner != nullptr);
6892 
6893   LinearAlloc* linear_alloc = GetAllocatorForClassLoader(imt_owner->GetClassLoader());
6894   // If the imt owner is in an image, the imt is also there and not in the
6895   // linear alloc.
6896   DCHECK_IMPLIES(runtime->GetHeap()->FindSpaceFromObject(imt_owner, /*fail_ok=*/true) == nullptr,
6897                  linear_alloc->Contains(klass->GetImt(kRuntimePointerSize)));
6898 
6899   // Create a new entry if the existing one is the shared conflict method.
6900   ArtMethod* new_conflict_method = (conflict_method == runtime->GetImtConflictMethod())
6901       ? runtime->CreateImtConflictMethod(linear_alloc)
6902       : conflict_method;
6903 
6904   // Allocate a new table. Note that we will leak this table at the next conflict,
6905   // but that's a tradeoff compared to making the table fixed size.
6906   void* data = linear_alloc->Alloc(
6907       Thread::Current(),
6908       ImtConflictTable::ComputeSizeWithOneMoreEntry(current_table, image_pointer_size_),
6909       LinearAllocKind::kNoGCRoots);
6910   if (data == nullptr) {
6911     LOG(ERROR) << "Failed to allocate conflict table";
6912     return conflict_method;
6913   }
6914   ImtConflictTable* new_table = new (data) ImtConflictTable(current_table,
6915                                                             interface_method,
6916                                                             method,
6917                                                             image_pointer_size_);
6918 
6919   // Do a fence to ensure threads see the data in the table before it is assigned
6920   // to the conflict method.
6921   // Note that there is a race in the presence of multiple threads and we may leak
6922   // memory from the LinearAlloc, but that's a tradeoff compared to using
6923   // atomic operations.
6924   std::atomic_thread_fence(std::memory_order_release);
6925   new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6926   return new_conflict_method;
6927 }
6928 
SetIMTRef(ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ArtMethod * current_method,bool * new_conflict,ArtMethod ** imt_ref)6929 void ClassLinker::SetIMTRef(ArtMethod* unimplemented_method,
6930                             ArtMethod* imt_conflict_method,
6931                             ArtMethod* current_method,
6932                             /*out*/bool* new_conflict,
6933                             /*out*/ArtMethod** imt_ref) {
6934   // Place method in imt if entry is empty, place conflict otherwise.
6935   if (*imt_ref == unimplemented_method) {
6936     *imt_ref = current_method;
6937   } else if (!(*imt_ref)->IsRuntimeMethod()) {
6938     // If we are not a conflict and we have the same signature and name as the imt
6939     // entry, it must be that we overwrote a superclass vtable entry.
6940     // Note that we have checked IsRuntimeMethod, as there may be multiple different
6941     // conflict methods.
6942     MethodNameAndSignatureComparator imt_comparator(
6943         (*imt_ref)->GetInterfaceMethodIfProxy(image_pointer_size_));
6944     if (imt_comparator.HasSameNameAndSignature(
6945           current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
6946       *imt_ref = current_method;
6947     } else {
6948       *imt_ref = imt_conflict_method;
6949       *new_conflict = true;
6950     }
6951   } else {
6952     // Place the default conflict method. Note that there may be an existing conflict
6953     // method in the IMT, but it could be one tailored to the super class, with a
6954     // specific ImtConflictTable.
6955     *imt_ref = imt_conflict_method;
6956     *new_conflict = true;
6957   }
6958 }
6959 
FillIMTAndConflictTables(ObjPtr<mirror::Class> klass)6960 void ClassLinker::FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) {
6961   DCHECK(klass->ShouldHaveImt()) << klass->PrettyClass();
6962   DCHECK(!klass->IsTemp()) << klass->PrettyClass();
6963   ArtMethod* imt_data[ImTable::kSize];
6964   Runtime* const runtime = Runtime::Current();
6965   ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
6966   ArtMethod* const conflict_method = runtime->GetImtConflictMethod();
6967   std::fill_n(imt_data, arraysize(imt_data), unimplemented_method);
6968   if (klass->GetIfTable() != nullptr) {
6969     bool new_conflict = false;
6970     FillIMTFromIfTable(klass->GetIfTable(),
6971                        unimplemented_method,
6972                        conflict_method,
6973                        klass,
6974                        /*create_conflict_tables=*/true,
6975                        /*ignore_copied_methods=*/false,
6976                        &new_conflict,
6977                        &imt_data[0]);
6978   }
6979   // Compare the IMT with the super class including the conflict methods. If they are equivalent,
6980   // we can just use the same pointer.
6981   ImTable* imt = nullptr;
6982   ImTable* super_imt = klass->FindSuperImt(image_pointer_size_);
6983   if (super_imt != nullptr) {
6984     bool same = true;
6985     for (size_t i = 0; same && i < ImTable::kSize; ++i) {
6986       ArtMethod* method = imt_data[i];
6987       ArtMethod* super_method = super_imt->Get(i, image_pointer_size_);
6988       if (method != super_method) {
6989         bool is_conflict_table = method->IsRuntimeMethod() &&
6990                                  method != unimplemented_method &&
6991                                  method != conflict_method;
6992         // Verify conflict contents.
6993         bool super_conflict_table = super_method->IsRuntimeMethod() &&
6994                                     super_method != unimplemented_method &&
6995                                     super_method != conflict_method;
6996         if (!is_conflict_table || !super_conflict_table) {
6997           same = false;
6998         } else {
6999           ImtConflictTable* table1 = method->GetImtConflictTable(image_pointer_size_);
7000           ImtConflictTable* table2 = super_method->GetImtConflictTable(image_pointer_size_);
7001           same = same && table1->Equals(table2, image_pointer_size_);
7002         }
7003       }
7004     }
7005     if (same) {
7006       imt = super_imt;
7007     }
7008   }
7009   if (imt == nullptr) {
7010     imt = klass->GetImt(image_pointer_size_);
7011     DCHECK(imt != nullptr);
7012     DCHECK_NE(imt, super_imt);
7013     imt->Populate(imt_data, image_pointer_size_);
7014   } else {
7015     klass->SetImt(imt, image_pointer_size_);
7016   }
7017 }
7018 
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc,PointerSize image_pointer_size)7019 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count,
7020                                                       LinearAlloc* linear_alloc,
7021                                                       PointerSize image_pointer_size) {
7022   void* data = linear_alloc->Alloc(Thread::Current(),
7023                                    ImtConflictTable::ComputeSize(count, image_pointer_size),
7024                                    LinearAllocKind::kNoGCRoots);
7025   return (data != nullptr) ? new (data) ImtConflictTable(count, image_pointer_size) : nullptr;
7026 }
7027 
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc)7028 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count, LinearAlloc* linear_alloc) {
7029   return CreateImtConflictTable(count, linear_alloc, image_pointer_size_);
7030 }
7031 
FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ObjPtr<mirror::Class> klass,bool create_conflict_tables,bool ignore_copied_methods,bool * new_conflict,ArtMethod ** imt)7032 void ClassLinker::FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,
7033                                      ArtMethod* unimplemented_method,
7034                                      ArtMethod* imt_conflict_method,
7035                                      ObjPtr<mirror::Class> klass,
7036                                      bool create_conflict_tables,
7037                                      bool ignore_copied_methods,
7038                                      /*out*/bool* new_conflict,
7039                                      /*out*/ArtMethod** imt) {
7040   uint32_t conflict_counts[ImTable::kSize] = {};
7041   for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
7042     ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
7043     const size_t num_virtuals = interface->NumVirtualMethods();
7044     const size_t method_array_count = if_table->GetMethodArrayCount(i);
7045     // Virtual methods can be larger than the if table methods if there are default methods.
7046     DCHECK_GE(num_virtuals, method_array_count);
7047     if (kIsDebugBuild) {
7048       if (klass->IsInterface()) {
7049         DCHECK_EQ(method_array_count, 0u);
7050       } else {
7051         DCHECK_EQ(interface->NumDeclaredVirtualMethods(), method_array_count);
7052       }
7053     }
7054     if (method_array_count == 0) {
7055       continue;
7056     }
7057     ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
7058     for (size_t j = 0; j < method_array_count; ++j) {
7059       ArtMethod* implementation_method =
7060           method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
7061       if (ignore_copied_methods && implementation_method->IsCopied()) {
7062         continue;
7063       }
7064       DCHECK(implementation_method != nullptr);
7065       // Miranda methods cannot be used to implement an interface method, but they are safe to put
7066       // in the IMT since their entrypoint is the interface trampoline. If we put any copied methods
7067       // or interface methods in the IMT here they will not create extra conflicts since we compare
7068       // names and signatures in SetIMTRef.
7069       ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
7070       const uint32_t imt_index = interface_method->GetImtIndex();
7071 
7072       // There is only any conflicts if all of the interface methods for an IMT slot don't have
7073       // the same implementation method, keep track of this to avoid creating a conflict table in
7074       // this case.
7075 
7076       // Conflict table size for each IMT slot.
7077       ++conflict_counts[imt_index];
7078 
7079       SetIMTRef(unimplemented_method,
7080                 imt_conflict_method,
7081                 implementation_method,
7082                 /*out*/new_conflict,
7083                 /*out*/&imt[imt_index]);
7084     }
7085   }
7086 
7087   if (create_conflict_tables) {
7088     // Create the conflict tables.
7089     LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
7090     for (size_t i = 0; i < ImTable::kSize; ++i) {
7091       size_t conflicts = conflict_counts[i];
7092       if (imt[i] == imt_conflict_method) {
7093         ImtConflictTable* new_table = CreateImtConflictTable(conflicts, linear_alloc);
7094         if (new_table != nullptr) {
7095           ArtMethod* new_conflict_method =
7096               Runtime::Current()->CreateImtConflictMethod(linear_alloc);
7097           new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
7098           imt[i] = new_conflict_method;
7099         } else {
7100           LOG(ERROR) << "Failed to allocate conflict table";
7101           imt[i] = imt_conflict_method;
7102         }
7103       } else {
7104         DCHECK_NE(imt[i], imt_conflict_method);
7105       }
7106     }
7107 
7108     for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
7109       ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
7110       const size_t method_array_count = if_table->GetMethodArrayCount(i);
7111       // Virtual methods can be larger than the if table methods if there are default methods.
7112       if (method_array_count == 0) {
7113         continue;
7114       }
7115       ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
7116       for (size_t j = 0; j < method_array_count; ++j) {
7117         ArtMethod* implementation_method =
7118             method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
7119         if (ignore_copied_methods && implementation_method->IsCopied()) {
7120           continue;
7121         }
7122         DCHECK(implementation_method != nullptr);
7123         ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
7124         const uint32_t imt_index = interface_method->GetImtIndex();
7125         if (!imt[imt_index]->IsRuntimeMethod() ||
7126             imt[imt_index] == unimplemented_method ||
7127             imt[imt_index] == imt_conflict_method) {
7128           continue;
7129         }
7130         ImtConflictTable* table = imt[imt_index]->GetImtConflictTable(image_pointer_size_);
7131         const size_t num_entries = table->NumEntries(image_pointer_size_);
7132         table->SetInterfaceMethod(num_entries, image_pointer_size_, interface_method);
7133         table->SetImplementationMethod(num_entries, image_pointer_size_, implementation_method);
7134       }
7135     }
7136   }
7137 }
7138 
7139 namespace {
7140 
7141 // Simple helper function that checks that no subtypes of 'val' are contained within the 'classes'
7142 // set.
NotSubinterfaceOfAny(const ScopedArenaHashSet<mirror::Class * > & classes,ObjPtr<mirror::Class> val)7143 static bool NotSubinterfaceOfAny(
7144     const ScopedArenaHashSet<mirror::Class*>& classes,
7145     ObjPtr<mirror::Class> val)
7146     REQUIRES(Roles::uninterruptible_)
7147     REQUIRES_SHARED(Locks::mutator_lock_) {
7148   DCHECK(val != nullptr);
7149   for (ObjPtr<mirror::Class> c : classes) {
7150     if (val->IsAssignableFrom(c)) {
7151       return false;
7152     }
7153   }
7154   return true;
7155 }
7156 
7157 // We record new interfaces by the index of the direct interface and the index in the
7158 // direct interface's `IfTable`, or `dex::kDexNoIndex` if it's the direct interface itself.
7159 struct NewInterfaceReference {
7160   uint32_t direct_interface_index;
7161   uint32_t direct_interface_iftable_index;
7162 };
7163 
7164 class ProxyInterfacesAccessor {
7165  public:
7166   explicit ProxyInterfacesAccessor(Handle<mirror::ObjectArray<mirror::Class>> interfaces)
REQUIRES_SHARED(Locks::mutator_lock_)7167       REQUIRES_SHARED(Locks::mutator_lock_)
7168       : interfaces_(interfaces) {}
7169 
GetLength()7170   size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
7171     return interfaces_->GetLength();
7172   }
7173 
GetInterface(size_t index)7174   ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
7175     DCHECK_LT(index, GetLength());
7176     return interfaces_->GetWithoutChecks(index);
7177   }
7178 
7179  private:
7180   Handle<mirror::ObjectArray<mirror::Class>> interfaces_;
7181 };
7182 
7183 class NonProxyInterfacesAccessor {
7184  public:
NonProxyInterfacesAccessor(ClassLinker * class_linker,Handle<mirror::Class> klass)7185   NonProxyInterfacesAccessor(ClassLinker* class_linker, Handle<mirror::Class> klass)
7186       REQUIRES_SHARED(Locks::mutator_lock_)
7187       : interfaces_(klass->GetInterfaceTypeList()),
7188         class_linker_(class_linker),
7189         klass_(klass) {
7190     DCHECK(!klass->IsProxyClass());
7191   }
7192 
GetLength()7193   size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
7194     return (interfaces_ != nullptr) ? interfaces_->Size() : 0u;
7195   }
7196 
GetInterface(size_t index)7197   ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
7198     DCHECK_LT(index, GetLength());
7199     dex::TypeIndex type_index = interfaces_->GetTypeItem(index).type_idx_;
7200     return class_linker_->LookupResolvedType(type_index, klass_.Get());
7201   }
7202 
7203  private:
7204   const dex::TypeList* interfaces_;
7205   ClassLinker* class_linker_;
7206   Handle<mirror::Class> klass_;
7207 };
7208 
7209 // Finds new interfaces to add to the interface table in addition to superclass interfaces.
7210 //
7211 // Interfaces in the interface table must satisfy the following constraint:
7212 //     all I, J: Interface | I <: J implies J precedes I
7213 // (note A <: B means that A is a subtype of B). We order this backwards so that we do not need
7214 // to reorder superclass interfaces when new interfaces are added in subclass's interface tables.
7215 //
7216 // This function returns a list of references for all interfaces in the transitive
7217 // closure of the direct interfaces that are not in the superclass interfaces.
7218 // The entries in the list are ordered to satisfy the interface table ordering
7219 // constraint and therefore the interface table formed by appending them to the
7220 // superclass interface table shall also satisfy that constraint.
7221 template <typename InterfaceAccessor>
7222 ALWAYS_INLINE
FindNewIfTableInterfaces(ObjPtr<mirror::IfTable> super_iftable,size_t super_ifcount,ScopedArenaAllocator * allocator,InterfaceAccessor && interfaces,ArrayRef<NewInterfaceReference> initial_storage,ScopedArenaVector<NewInterfaceReference> * supplemental_storage)7223 static ArrayRef<const NewInterfaceReference> FindNewIfTableInterfaces(
7224     ObjPtr<mirror::IfTable> super_iftable,
7225     size_t super_ifcount,
7226     ScopedArenaAllocator* allocator,
7227     InterfaceAccessor&& interfaces,
7228     ArrayRef<NewInterfaceReference> initial_storage,
7229     /*out*/ScopedArenaVector<NewInterfaceReference>* supplemental_storage)
7230     REQUIRES_SHARED(Locks::mutator_lock_) {
7231   ScopedAssertNoThreadSuspension nts(__FUNCTION__);
7232 
7233   // This is the set of all classes already in the iftable. Used to make checking
7234   // if a class has already been added quicker.
7235   constexpr size_t kBufferSize = 32;  // 256 bytes on 64-bit architectures.
7236   mirror::Class* buffer[kBufferSize];
7237   ScopedArenaHashSet<mirror::Class*> classes_in_iftable(buffer, kBufferSize, allocator->Adapter());
7238   // The first super_ifcount elements are from the superclass. We note that they are already added.
7239   for (size_t i = 0; i < super_ifcount; i++) {
7240     ObjPtr<mirror::Class> iface = super_iftable->GetInterface(i);
7241     DCHECK(NotSubinterfaceOfAny(classes_in_iftable, iface)) << "Bad ordering.";
7242     classes_in_iftable.Put(iface.Ptr());
7243   }
7244 
7245   ArrayRef<NewInterfaceReference> current_storage = initial_storage;
7246   DCHECK_NE(current_storage.size(), 0u);
7247   size_t num_new_interfaces = 0u;
7248   auto insert_reference = [&](uint32_t direct_interface_index,
7249                               uint32_t direct_interface_iface_index) {
7250     if (UNLIKELY(num_new_interfaces == current_storage.size())) {
7251       bool copy = current_storage.data() != supplemental_storage->data();
7252       supplemental_storage->resize(2u * num_new_interfaces);
7253       if (copy) {
7254         std::copy_n(current_storage.data(), num_new_interfaces, supplemental_storage->data());
7255       }
7256       current_storage = ArrayRef<NewInterfaceReference>(*supplemental_storage);
7257     }
7258     current_storage[num_new_interfaces] = {direct_interface_index, direct_interface_iface_index};
7259     ++num_new_interfaces;
7260   };
7261 
7262   for (size_t i = 0, num_interfaces = interfaces.GetLength(); i != num_interfaces; ++i) {
7263     ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
7264 
7265     // Let us call the first filled_ifcount elements of iftable the current-iface-list.
7266     // At this point in the loop current-iface-list has the invariant that:
7267     //    for every pair of interfaces I,J within it:
7268     //      if index_of(I) < index_of(J) then I is not a subtype of J
7269 
7270     // If we have already seen this element then all of its super-interfaces must already be in the
7271     // current-iface-list so we can skip adding it.
7272     if (classes_in_iftable.find(interface.Ptr()) == classes_in_iftable.end()) {
7273       // We haven't seen this interface so add all of its super-interfaces onto the
7274       // current-iface-list, skipping those already on it.
7275       int32_t ifcount = interface->GetIfTableCount();
7276       for (int32_t j = 0; j < ifcount; j++) {
7277         ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
7278         if (classes_in_iftable.find(super_interface.Ptr()) == classes_in_iftable.end()) {
7279           DCHECK(NotSubinterfaceOfAny(classes_in_iftable, super_interface)) << "Bad ordering.";
7280           classes_in_iftable.Put(super_interface.Ptr());
7281           insert_reference(i, j);
7282         }
7283       }
7284       // Add this interface reference after all of its super-interfaces.
7285       DCHECK(NotSubinterfaceOfAny(classes_in_iftable, interface)) << "Bad ordering";
7286       classes_in_iftable.Put(interface.Ptr());
7287       insert_reference(i, dex::kDexNoIndex);
7288     } else if (kIsDebugBuild) {
7289       // Check all super-interfaces are already in the list.
7290       int32_t ifcount = interface->GetIfTableCount();
7291       for (int32_t j = 0; j < ifcount; j++) {
7292         ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
7293         DCHECK(classes_in_iftable.find(super_interface.Ptr()) != classes_in_iftable.end())
7294             << "Iftable does not contain " << mirror::Class::PrettyClass(super_interface)
7295             << ", a superinterface of " << interface->PrettyClass();
7296       }
7297     }
7298   }
7299   return ArrayRef<const NewInterfaceReference>(current_storage.data(), num_new_interfaces);
7300 }
7301 
7302 template <typename InterfaceAccessor>
SetupInterfaceLookupTable(Thread * self,Handle<mirror::Class> klass,ScopedArenaAllocator * allocator,InterfaceAccessor && interfaces)7303 static ObjPtr<mirror::IfTable> SetupInterfaceLookupTable(
7304     Thread* self,
7305     Handle<mirror::Class> klass,
7306     ScopedArenaAllocator* allocator,
7307     InterfaceAccessor&& interfaces)
7308     REQUIRES_SHARED(Locks::mutator_lock_) {
7309   DCHECK(klass->HasSuperClass());
7310   ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
7311   DCHECK(super_iftable != nullptr);
7312   const size_t num_interfaces = interfaces.GetLength();
7313 
7314   // If there are no new interfaces, return the interface table from superclass.
7315   // If any implementation methods are overridden, we shall copy the table and
7316   // the method arrays that contain any differences (copy-on-write).
7317   if (num_interfaces == 0) {
7318     return super_iftable;
7319   }
7320 
7321   // Check that every class being implemented is an interface.
7322   for (size_t i = 0; i != num_interfaces; ++i) {
7323     ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
7324     DCHECK(interface != nullptr);
7325     if (UNLIKELY(!interface->IsInterface())) {
7326       ThrowIncompatibleClassChangeError(klass.Get(),
7327                                         "Class %s implements non-interface class %s",
7328                                         klass->PrettyDescriptor().c_str(),
7329                                         interface->PrettyDescriptor().c_str());
7330       return nullptr;
7331     }
7332   }
7333 
7334   static constexpr size_t kMaxStackReferences = 16;
7335   NewInterfaceReference initial_storage[kMaxStackReferences];
7336   ScopedArenaVector<NewInterfaceReference> supplemental_storage(allocator->Adapter());
7337   const size_t super_ifcount = super_iftable->Count();
7338   ArrayRef<const NewInterfaceReference> new_interface_references =
7339       FindNewIfTableInterfaces(
7340           super_iftable,
7341           super_ifcount,
7342           allocator,
7343           interfaces,
7344           ArrayRef<NewInterfaceReference>(initial_storage),
7345           &supplemental_storage);
7346 
7347   // If all declared interfaces were already present in superclass interface table,
7348   // return the interface table from superclass. See above.
7349   if (UNLIKELY(new_interface_references.empty())) {
7350     return super_iftable;
7351   }
7352 
7353   // Create the interface table.
7354   size_t ifcount = super_ifcount + new_interface_references.size();
7355   ObjPtr<mirror::IfTable> iftable = AllocIfTable(self, ifcount, super_iftable->GetClass());
7356   if (UNLIKELY(iftable == nullptr)) {
7357     self->AssertPendingOOMException();
7358     return nullptr;
7359   }
7360   // Fill in table with superclass's iftable.
7361   if (super_ifcount != 0) {
7362     // Reload `super_iftable` as it may have been clobbered by the allocation.
7363     super_iftable = klass->GetSuperClass()->GetIfTable();
7364     for (size_t i = 0; i != super_ifcount; i++) {
7365       ObjPtr<mirror::Class> super_interface = super_iftable->GetInterface(i);
7366       DCHECK(super_interface != nullptr);
7367       iftable->SetInterface(i, super_interface);
7368       ObjPtr<mirror::PointerArray> method_array = super_iftable->GetMethodArrayOrNull(i);
7369       if (method_array != nullptr) {
7370         iftable->SetMethodArray(i, method_array);
7371       }
7372     }
7373   }
7374   // Fill in the table with additional interfaces.
7375   size_t current_index = super_ifcount;
7376   for (NewInterfaceReference ref : new_interface_references) {
7377     ObjPtr<mirror::Class> direct_interface = interfaces.GetInterface(ref.direct_interface_index);
7378     ObjPtr<mirror::Class> new_interface = (ref.direct_interface_iftable_index != dex::kDexNoIndex)
7379         ? direct_interface->GetIfTable()->GetInterface(ref.direct_interface_iftable_index)
7380         : direct_interface;
7381     iftable->SetInterface(current_index, new_interface);
7382     ++current_index;
7383   }
7384   DCHECK_EQ(current_index, ifcount);
7385 
7386   if (kIsDebugBuild) {
7387     // Check that the iftable is ordered correctly.
7388     for (size_t i = 0; i < ifcount; i++) {
7389       ObjPtr<mirror::Class> if_a = iftable->GetInterface(i);
7390       for (size_t j = i + 1; j < ifcount; j++) {
7391         ObjPtr<mirror::Class> if_b = iftable->GetInterface(j);
7392         // !(if_a <: if_b)
7393         CHECK(!if_b->IsAssignableFrom(if_a))
7394             << "Bad interface order: " << mirror::Class::PrettyClass(if_a) << " (index " << i
7395             << ") extends "
7396             << if_b->PrettyClass() << " (index " << j << ") and so should be after it in the "
7397             << "interface list.";
7398       }
7399     }
7400   }
7401 
7402   return iftable;
7403 }
7404 
7405 // Check that all vtable entries are present in this class's virtuals or are the same as a
7406 // superclasses vtable entry.
CheckClassOwnsVTableEntries(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7407 void CheckClassOwnsVTableEntries(Thread* self,
7408                                  Handle<mirror::Class> klass,
7409                                  PointerSize pointer_size)
7410     REQUIRES_SHARED(Locks::mutator_lock_) {
7411   StackHandleScope<2> hs(self);
7412   Handle<mirror::PointerArray> check_vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7413   ObjPtr<mirror::Class> super_temp = (klass->HasSuperClass()) ? klass->GetSuperClass() : nullptr;
7414   Handle<mirror::Class> superclass(hs.NewHandle(super_temp));
7415   int32_t super_vtable_length = (superclass != nullptr) ? superclass->GetVTableLength() : 0;
7416   for (int32_t i = 0; i < check_vtable->GetLength(); ++i) {
7417     ArtMethod* m = check_vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
7418     CHECK(m != nullptr);
7419 
7420     if (m->GetMethodIndexDuringLinking() != i) {
7421       LOG(WARNING) << m->PrettyMethod()
7422                    << " has an unexpected method index for its spot in the vtable for class"
7423                    << klass->PrettyClass();
7424     }
7425     ArraySlice<ArtMethod> virtuals = klass->GetVirtualMethodsSliceUnchecked(pointer_size);
7426     auto is_same_method = [m] (const ArtMethod& meth) {
7427       return &meth == m;
7428     };
7429     if (!((super_vtable_length > i && superclass->GetVTableEntry(i, pointer_size) == m) ||
7430           std::find_if(virtuals.begin(), virtuals.end(), is_same_method) != virtuals.end())) {
7431       LOG(WARNING) << m->PrettyMethod() << " does not seem to be owned by current class "
7432                    << klass->PrettyClass() << " or any of its superclasses!";
7433     }
7434   }
7435 }
7436 
7437 // Check to make sure the vtable does not have duplicates. Duplicates could cause problems when a
7438 // method is overridden in a subclass.
7439 template <PointerSize kPointerSize>
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass)7440 void CheckVTableHasNoDuplicates(Thread* self, Handle<mirror::Class> klass)
7441     REQUIRES_SHARED(Locks::mutator_lock_) {
7442   StackHandleScope<1> hs(self);
7443   Handle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7444   int32_t num_entries = vtable->GetLength();
7445 
7446   // Observations:
7447   //   * The older implementation was O(n^2) and got too expensive for apps with larger classes.
7448   //   * Many classes do not override Object functions (e.g., equals/hashCode/toString). Thus,
7449   //     for many classes outside of libcore a cross-dexfile check has to be run anyways.
7450   //   * In the cross-dexfile case, with the O(n^2), in the best case O(n) cross checks would have
7451   //     to be done. It is thus OK in a single-pass algorithm to read all data, anyways.
7452   //   * The single-pass algorithm will trade memory for speed, but that is OK.
7453 
7454   CHECK_GT(num_entries, 0);
7455 
7456   auto log_fn = [&vtable, &klass](int32_t i, int32_t j) REQUIRES_SHARED(Locks::mutator_lock_) {
7457     ArtMethod* m1 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(i);
7458     ArtMethod* m2 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
7459     LOG(WARNING) << "vtable entries " << i << " and " << j << " are identical for "
7460                  << klass->PrettyClass() << " in method " << m1->PrettyMethod()
7461                 << " (0x" << std::hex << reinterpret_cast<uintptr_t>(m2) << ") and "
7462                 << m2->PrettyMethod() << "  (0x" << std::hex
7463                 << reinterpret_cast<uintptr_t>(m2) << ")";
7464   };
7465   struct BaseHashType {
7466     static size_t HashCombine(size_t seed, size_t val) {
7467       return seed ^ (val + 0x9e3779b9 + (seed << 6) + (seed >> 2));
7468     }
7469   };
7470 
7471   // Check assuming all entries come from the same dex file.
7472   {
7473     // Find the first interesting method and its dex file.
7474     int32_t start = 0;
7475     for (; start < num_entries; ++start) {
7476       ArtMethod* vtable_entry = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start);
7477       // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7478       // maybe).
7479       if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7480                                   vtable_entry->GetAccessFlags())) {
7481         continue;
7482       }
7483       break;
7484     }
7485     if (start == num_entries) {
7486       return;
7487     }
7488     const DexFile* dex_file =
7489         vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start)->
7490             GetInterfaceMethodIfProxy(kPointerSize)->GetDexFile();
7491 
7492     // Helper function to avoid logging if we have to run the cross-file checks.
7493     auto check_fn = [&](bool log_warn) REQUIRES_SHARED(Locks::mutator_lock_) {
7494       // Use a map to store seen entries, as the storage space is too large for a bitvector.
7495       using PairType = std::pair<uint32_t, uint16_t>;
7496       struct PairHash : BaseHashType {
7497         size_t operator()(const PairType& key) const {
7498           return BaseHashType::HashCombine(BaseHashType::HashCombine(0, key.first), key.second);
7499         }
7500       };
7501       HashMap<PairType, int32_t, DefaultMapEmptyFn<PairType, int32_t>, PairHash> seen;
7502       seen.reserve(2 * num_entries);
7503       bool need_slow_path = false;
7504       bool found_dup = false;
7505       for (int i = start; i < num_entries; ++i) {
7506         // Can use Unchecked here as the start loop already ensured that the arrays are correct
7507         // wrt/ kPointerSize.
7508         ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7509         if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7510                                     vtable_entry->GetAccessFlags())) {
7511           continue;
7512         }
7513         ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7514         if (dex_file != m->GetDexFile()) {
7515           need_slow_path = true;
7516           break;
7517         }
7518         const dex::MethodId* m_mid = &dex_file->GetMethodId(m->GetDexMethodIndex());
7519         PairType pair = std::make_pair(m_mid->name_idx_.index_, m_mid->proto_idx_.index_);
7520         auto it = seen.find(pair);
7521         if (it != seen.end()) {
7522           found_dup = true;
7523           if (log_warn) {
7524             log_fn(it->second, i);
7525           }
7526         } else {
7527           seen.insert(std::make_pair(pair, i));
7528         }
7529       }
7530       return std::make_pair(need_slow_path, found_dup);
7531     };
7532     std::pair<bool, bool> result = check_fn(/* log_warn= */ false);
7533     if (!result.first) {
7534       if (result.second) {
7535         check_fn(/* log_warn= */ true);
7536       }
7537       return;
7538     }
7539   }
7540 
7541   // Need to check across dex files.
7542   struct Entry {
7543     size_t cached_hash = 0;
7544     uint32_t name_len = 0;
7545     const char* name = nullptr;
7546     Signature signature = Signature::NoSignature();
7547 
7548     Entry() = default;
7549     Entry(const Entry& other) = default;
7550     Entry& operator=(const Entry& other) = default;
7551 
7552     Entry(const DexFile* dex_file, const dex::MethodId& mid)
7553         : name_len(0),  // Explicit to enforce ordering with -Werror,-Wreorder-ctor.
7554           // This call writes `name_len` and it is therefore necessary that the
7555           // initializer for `name_len` comes before it, otherwise the value
7556           // from the call would be overwritten by that initializer.
7557           name(dex_file->GetStringDataAndUtf16Length(mid.name_idx_, &name_len)),
7558           signature(dex_file->GetMethodSignature(mid)) {
7559       // The `name_len` has been initialized to the UTF16 length. Calculate length in bytes.
7560       if (name[name_len] != 0) {
7561         name_len += strlen(name + name_len);
7562       }
7563     }
7564 
7565     bool operator==(const Entry& other) const {
7566       return name_len == other.name_len &&
7567              memcmp(name, other.name, name_len) == 0 &&
7568              signature == other.signature;
7569     }
7570   };
7571   struct EntryHash {
7572     size_t operator()(const Entry& key) const {
7573       return key.cached_hash;
7574     }
7575   };
7576   HashMap<Entry, int32_t, DefaultMapEmptyFn<Entry, int32_t>, EntryHash> map;
7577   for (int32_t i = 0; i < num_entries; ++i) {
7578     // Can use Unchecked here as the first loop already ensured that the arrays are correct
7579     // wrt/ kPointerSize.
7580     ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7581     // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7582     // maybe).
7583     if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7584                                 vtable_entry->GetAccessFlags())) {
7585       continue;
7586     }
7587     ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7588     const DexFile* dex_file = m->GetDexFile();
7589     const dex::MethodId& mid = dex_file->GetMethodId(m->GetDexMethodIndex());
7590 
7591     Entry e(dex_file, mid);
7592 
7593     size_t string_hash = std::hash<std::string_view>()(std::string_view(e.name, e.name_len));
7594     size_t sig_hash = std::hash<std::string>()(e.signature.ToString());
7595     e.cached_hash = BaseHashType::HashCombine(BaseHashType::HashCombine(0u, string_hash),
7596                                               sig_hash);
7597 
7598     auto it = map.find(e);
7599     if (it != map.end()) {
7600       log_fn(it->second, i);
7601     } else {
7602       map.insert(std::make_pair(e, i));
7603     }
7604   }
7605 }
7606 
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7607 void CheckVTableHasNoDuplicates(Thread* self,
7608                                 Handle<mirror::Class> klass,
7609                                 PointerSize pointer_size)
7610     REQUIRES_SHARED(Locks::mutator_lock_) {
7611   switch (pointer_size) {
7612     case PointerSize::k64:
7613       CheckVTableHasNoDuplicates<PointerSize::k64>(self, klass);
7614       break;
7615     case PointerSize::k32:
7616       CheckVTableHasNoDuplicates<PointerSize::k32>(self, klass);
7617       break;
7618   }
7619 }
7620 
CheckVTable(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7621 static void CheckVTable(Thread* self, Handle<mirror::Class> klass, PointerSize pointer_size)
7622     REQUIRES_SHARED(Locks::mutator_lock_) {
7623   CheckClassOwnsVTableEntries(self, klass, pointer_size);
7624   CheckVTableHasNoDuplicates(self, klass, pointer_size);
7625 }
7626 
7627 }  // namespace
7628 
7629 template <PointerSize kPointerSize>
7630 class ClassLinker::LinkMethodsHelper {
7631  public:
LinkMethodsHelper(ClassLinker * class_linker,Handle<mirror::Class> klass,Thread * self,Runtime * runtime)7632   LinkMethodsHelper(ClassLinker* class_linker,
7633                     Handle<mirror::Class> klass,
7634                     Thread* self,
7635                     Runtime* runtime)
7636       : class_linker_(class_linker),
7637         klass_(klass),
7638         self_(self),
7639         runtime_(runtime),
7640         stack_(runtime->GetArenaPool()),
7641         allocator_(&stack_),
7642         copied_method_records_(copied_method_records_initial_buffer_,
7643                                kCopiedMethodRecordInitialBufferSize,
7644                                allocator_.Adapter()),
7645         num_new_copied_methods_(0u) {
7646   }
7647 
7648   // Links the virtual and interface methods for the given class.
7649   //
7650   // Arguments:
7651   // * self - The current thread.
7652   // * klass - class, whose vtable will be filled in.
7653   // * interfaces - implemented interfaces for a proxy class, otherwise null.
7654   // * out_new_conflict - whether there is a new conflict compared to the superclass.
7655   // * out_imt - interface method table to fill.
7656   bool LinkMethods(
7657       Thread* self,
7658       Handle<mirror::Class> klass,
7659       Handle<mirror::ObjectArray<mirror::Class>> interfaces,
7660       bool* out_new_conflict,
7661       ArtMethod** out_imt)
7662       REQUIRES_SHARED(Locks::mutator_lock_);
7663 
7664  private:
7665   // Allocate a pointer array.
7666   static ObjPtr<mirror::PointerArray> AllocPointerArray(Thread* self, size_t length)
7667       REQUIRES_SHARED(Locks::mutator_lock_);
7668 
7669   // Allocate method arrays for interfaces.
7670   bool AllocateIfTableMethodArrays(Thread* self,
7671                                    Handle<mirror::Class> klass,
7672                                    Handle<mirror::IfTable> iftable)
7673       REQUIRES_SHARED(Locks::mutator_lock_);
7674 
7675   // Assign vtable indexes to declared virtual methods for a non-interface class other
7676   // than `java.lang.Object`. Returns the number of vtable entries on success, 0 on failure.
7677   // This function also assigns vtable indexes for interface methods in new interfaces
7678   // and records data for copied methods which shall be referenced by the vtable.
7679   size_t AssignVTableIndexes(ObjPtr<mirror::Class> klass,
7680                              ObjPtr<mirror::Class> super_class,
7681                              bool is_super_abstract,
7682                              size_t num_virtual_methods,
7683                              ObjPtr<mirror::IfTable> iftable)
7684       REQUIRES_SHARED(Locks::mutator_lock_);
7685 
7686   bool FindCopiedMethodsForInterface(ObjPtr<mirror::Class> klass,
7687                                      size_t num_virtual_methods,
7688                                      ObjPtr<mirror::IfTable> iftable)
7689       REQUIRES_SHARED(Locks::mutator_lock_);
7690 
7691   bool LinkJavaLangObjectMethods(Thread* self, Handle<mirror::Class> klass)
7692       REQUIRES_SHARED(Locks::mutator_lock_) COLD_ATTR;
7693 
7694   void ReallocMethods(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
7695   bool FinalizeIfTable(Handle<mirror::Class> klass,
7696                        MutableHandle<mirror::IfTable> iftable,
7697                        Handle<mirror::PointerArray> vtable,
7698                        bool is_klass_abstract,
7699                        bool is_super_abstract,
7700                        bool* out_new_conflict,
7701                        ArtMethod** out_imt)
7702       REQUIRES_SHARED(Locks::mutator_lock_);
7703 
ClobberOldMethods(LengthPrefixedArray<ArtMethod> * old_methods,LengthPrefixedArray<ArtMethod> * methods)7704   void ClobberOldMethods(LengthPrefixedArray<ArtMethod>* old_methods,
7705                          LengthPrefixedArray<ArtMethod>* methods) {
7706     if (kIsDebugBuild && old_methods != nullptr) {
7707       CHECK(methods != nullptr);
7708       // Put some random garbage in old methods to help find stale pointers.
7709       if (methods != old_methods) {
7710         // Need to make sure the GC is not running since it could be scanning the methods we are
7711         // about to overwrite.
7712         ScopedThreadStateChange tsc(self_, ThreadState::kSuspended);
7713         gc::ScopedGCCriticalSection gcs(self_,
7714                                         gc::kGcCauseClassLinker,
7715                                         gc::kCollectorTypeClassLinker);
7716         const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_methods->size(),
7717                                                                             kMethodSize,
7718                                                                             kMethodAlignment);
7719         memset(old_methods, 0xFEu, old_size);
7720         // Set size to 0 to avoid visiting declaring classes.
7721         if (gUseUserfaultfd) {
7722           old_methods->SetSize(0);
7723         }
7724       }
7725     }
7726   }
7727 
7728   NO_INLINE
LogNewVirtuals(LengthPrefixedArray<ArtMethod> * methods) const7729   void LogNewVirtuals(LengthPrefixedArray<ArtMethod>* methods) const
7730       REQUIRES_SHARED(Locks::mutator_lock_) {
7731     ObjPtr<mirror::Class> klass = klass_.Get();
7732     size_t num_new_copied_methods = num_new_copied_methods_;
7733     size_t old_method_count = methods->size() - num_new_copied_methods;
7734     size_t super_vtable_length = klass->GetSuperClass()->GetVTableLength();
7735     size_t num_miranda_methods = 0u;
7736     size_t num_overriding_default_methods = 0u;
7737     size_t num_default_methods = 0u;
7738     size_t num_overriding_default_conflict_methods = 0u;
7739     size_t num_default_conflict_methods = 0u;
7740     for (size_t i = 0; i != num_new_copied_methods; ++i) {
7741       ArtMethod& m = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
7742       if (m.IsDefault()) {
7743         if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7744           ++num_overriding_default_methods;
7745         } else {
7746           ++num_default_methods;
7747         }
7748       } else if (m.IsDefaultConflicting()) {
7749         if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7750           ++num_overriding_default_conflict_methods;
7751         } else {
7752           ++num_default_conflict_methods;
7753         }
7754       } else {
7755         DCHECK(m.IsMiranda());
7756         ++num_miranda_methods;
7757       }
7758     }
7759     VLOG(class_linker) << klass->PrettyClass() << ": miranda_methods=" << num_miranda_methods
7760                        << " default_methods=" << num_default_methods
7761                        << " overriding_default_methods=" << num_overriding_default_methods
7762                        << " default_conflict_methods=" << num_default_conflict_methods
7763                        << " overriding_default_conflict_methods="
7764                        << num_overriding_default_conflict_methods;
7765   }
7766 
7767   class MethodIndexEmptyFn {
7768    public:
MakeEmpty(uint32_t & item) const7769     void MakeEmpty(uint32_t& item) const {
7770       item = dex::kDexNoIndex;
7771     }
IsEmpty(const uint32_t & item) const7772     bool IsEmpty(const uint32_t& item) const {
7773       return item == dex::kDexNoIndex;
7774     }
7775   };
7776 
7777   class VTableIndexCheckerDebug {
7778    protected:
VTableIndexCheckerDebug(size_t vtable_length)7779     explicit VTableIndexCheckerDebug(size_t vtable_length)
7780         : vtable_length_(vtable_length) {}
7781 
CheckIndex(uint32_t index) const7782     void CheckIndex(uint32_t index) const {
7783       CHECK_LT(index, vtable_length_);
7784     }
7785 
7786    private:
7787     uint32_t vtable_length_;
7788   };
7789 
7790   class VTableIndexCheckerRelease {
7791    protected:
VTableIndexCheckerRelease(size_t vtable_length)7792     explicit VTableIndexCheckerRelease([[maybe_unused]] size_t vtable_length) {}
CheckIndex(uint32_t index) const7793     void CheckIndex([[maybe_unused]] uint32_t index) const {}
7794   };
7795 
7796   using VTableIndexChecker =
7797       std::conditional_t<kIsDebugBuild, VTableIndexCheckerDebug, VTableIndexCheckerRelease>;
7798 
7799   class VTableAccessor : private VTableIndexChecker {
7800    public:
VTableAccessor(uint8_t * raw_vtable,size_t vtable_length)7801     VTableAccessor(uint8_t* raw_vtable, size_t vtable_length)
7802         REQUIRES_SHARED(Locks::mutator_lock_)
7803         : VTableIndexChecker(vtable_length),
7804           raw_vtable_(raw_vtable) {}
7805 
GetVTableEntry(uint32_t index) const7806     ArtMethod* GetVTableEntry(uint32_t index) const REQUIRES_SHARED(Locks::mutator_lock_) {
7807       this->CheckIndex(index);
7808       uint8_t* entry = raw_vtable_ + static_cast<size_t>(kPointerSize) * index;
7809       if (kPointerSize == PointerSize::k64) {
7810         return reinterpret_cast64<ArtMethod*>(*reinterpret_cast<uint64_t*>(entry));
7811       } else {
7812         return reinterpret_cast32<ArtMethod*>(*reinterpret_cast<uint32_t*>(entry));
7813       }
7814     }
7815 
7816    private:
7817     uint8_t* raw_vtable_;
7818   };
7819 
7820   class VTableSignatureHash {
7821    public:
7822     explicit VTableSignatureHash(VTableAccessor accessor)
REQUIRES_SHARED(Locks::mutator_lock_)7823         REQUIRES_SHARED(Locks::mutator_lock_)
7824         : accessor_(accessor) {}
7825 
7826     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const7827     size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7828       return ComputeMethodHash(method);
7829     }
7830 
7831     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t index) const7832     size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7833       return ComputeMethodHash(accessor_.GetVTableEntry(index));
7834     }
7835 
7836    private:
7837     VTableAccessor accessor_;
7838   };
7839 
7840   class VTableSignatureEqual {
7841    public:
7842     explicit VTableSignatureEqual(VTableAccessor accessor)
REQUIRES_SHARED(Locks::mutator_lock_)7843         REQUIRES_SHARED(Locks::mutator_lock_)
7844         : accessor_(accessor) {}
7845 
7846     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,ArtMethod * rhs) const7847     bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7848       return MethodSignatureEquals(accessor_.GetVTableEntry(lhs_index), rhs);
7849     }
7850 
7851     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,uint32_t rhs_index) const7852     bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7853       return (*this)(lhs_index, accessor_.GetVTableEntry(rhs_index));
7854     }
7855 
7856    private:
7857     VTableAccessor accessor_;
7858   };
7859 
7860   using VTableSignatureSet =
7861       ScopedArenaHashSet<uint32_t, MethodIndexEmptyFn, VTableSignatureHash, VTableSignatureEqual>;
7862 
7863   class DeclaredVirtualSignatureHash {
7864    public:
7865     explicit DeclaredVirtualSignatureHash(ObjPtr<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_)7866         REQUIRES_SHARED(Locks::mutator_lock_)
7867         : klass_(klass) {}
7868 
7869     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const7870     size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7871       return ComputeMethodHash(method);
7872     }
7873 
7874     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t index) const7875     size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7876       DCHECK_LT(index, klass_->NumDeclaredVirtualMethods());
7877       ArtMethod* method = klass_->GetVirtualMethodDuringLinking(index, kPointerSize);
7878       return ComputeMethodHash(method->GetInterfaceMethodIfProxy(kPointerSize));
7879     }
7880 
7881    private:
7882     ObjPtr<mirror::Class> klass_;
7883   };
7884 
7885   class DeclaredVirtualSignatureEqual {
7886    public:
7887     explicit DeclaredVirtualSignatureEqual(ObjPtr<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_)7888         REQUIRES_SHARED(Locks::mutator_lock_)
7889         : klass_(klass) {}
7890 
7891     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,ArtMethod * rhs) const7892     bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7893       DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7894       ArtMethod* lhs = klass_->GetVirtualMethodDuringLinking(lhs_index, kPointerSize);
7895       return MethodSignatureEquals(lhs->GetInterfaceMethodIfProxy(kPointerSize), rhs);
7896     }
7897 
7898     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,uint32_t rhs_index) const7899     bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7900       DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7901       DCHECK_LT(rhs_index, klass_->NumDeclaredVirtualMethods());
7902       return lhs_index == rhs_index;
7903     }
7904 
7905    private:
7906     ObjPtr<mirror::Class> klass_;
7907   };
7908 
7909   using DeclaredVirtualSignatureSet = ScopedArenaHashSet<uint32_t,
7910                                                          MethodIndexEmptyFn,
7911                                                          DeclaredVirtualSignatureHash,
7912                                                          DeclaredVirtualSignatureEqual>;
7913 
7914   // Helper class to keep records for determining the correct copied method to create.
7915   class CopiedMethodRecord {
7916    public:
7917     enum class State : uint32_t {
7918       // Note: The `*Single` values are used when we know that there is only one interface
7919       // method with the given signature that's not masked; that method is the main method.
7920       // We use this knowledge for faster masking check, otherwise we need to search for
7921       // a masking method through methods of all interfaces that could potentially mask it.
7922       kAbstractSingle,
7923       kDefaultSingle,
7924       kAbstract,
7925       kDefault,
7926       kDefaultConflict,
7927       kUseSuperMethod,
7928     };
7929 
CopiedMethodRecord()7930     CopiedMethodRecord()
7931         : main_method_(nullptr),
7932           method_index_(0u),
7933           state_(State::kAbstractSingle) {}
7934 
CopiedMethodRecord(ArtMethod * main_method,size_t vtable_index)7935     CopiedMethodRecord(ArtMethod* main_method, size_t vtable_index)
7936         : main_method_(main_method),
7937           method_index_(vtable_index),
7938           state_(State::kAbstractSingle) {}
7939 
7940     // Set main method. The new main method must be more specific implementation.
SetMainMethod(ArtMethod * main_method)7941     void SetMainMethod(ArtMethod* main_method) {
7942       DCHECK(main_method_ != nullptr);
7943       main_method_ = main_method;
7944     }
7945 
7946     // The main method is the first encountered default method if any,
7947     // otherwise the first encountered abstract method.
GetMainMethod() const7948     ArtMethod* GetMainMethod() const {
7949       return main_method_;
7950     }
7951 
SetMethodIndex(size_t method_index)7952     void SetMethodIndex(size_t method_index) {
7953       DCHECK_NE(method_index, dex::kDexNoIndex);
7954       method_index_ = method_index;
7955     }
7956 
GetMethodIndex() const7957     size_t GetMethodIndex() const {
7958       DCHECK_NE(method_index_, dex::kDexNoIndex);
7959       return method_index_;
7960     }
7961 
SetState(State state)7962     void SetState(State state) {
7963       state_ = state;
7964     }
7965 
GetState() const7966     State GetState() const {
7967       return state_;
7968     }
7969 
7970     ALWAYS_INLINE
UpdateStateForInterface(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,ObjPtr<mirror::IfTable> iftable,size_t ifcount,size_t index)7971     void UpdateStateForInterface(ObjPtr<mirror::Class> iface,
7972                                  ArtMethod* interface_method,
7973                                  ObjPtr<mirror::IfTable> iftable,
7974                                  size_t ifcount,
7975                                  size_t index)
7976         REQUIRES_SHARED(Locks::mutator_lock_) {
7977       DCHECK_EQ(ifcount, iftable->Count());
7978       DCHECK_LT(index, ifcount);
7979       DCHECK(iface == interface_method->GetDeclaringClass());
7980       DCHECK(iface == iftable->GetInterface(index));
7981       DCHECK(interface_method->IsDefault());
7982       if (GetState() != State::kDefaultConflict) {
7983         DCHECK(GetState() == State::kDefault);
7984         // We do not record all overriding methods, so we need to walk over all
7985         // interfaces that could mask the `interface_method`.
7986         if (ContainsOverridingMethodOf(iftable, index + 1, ifcount, iface, interface_method)) {
7987           return;  // Found an overriding method that masks `interface_method`.
7988         }
7989         // We have a new default method that's not masked by any other method.
7990         SetState(State::kDefaultConflict);
7991       }
7992     }
7993 
7994     ALWAYS_INLINE
UpdateState(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,size_t vtable_index,ObjPtr<mirror::IfTable> iftable,size_t ifcount,size_t index)7995     void UpdateState(ObjPtr<mirror::Class> iface,
7996                      ArtMethod* interface_method,
7997                      size_t vtable_index,
7998                      ObjPtr<mirror::IfTable> iftable,
7999                      size_t ifcount,
8000                      size_t index)
8001         REQUIRES_SHARED(Locks::mutator_lock_) {
8002       DCHECK_EQ(ifcount, iftable->Count());
8003       DCHECK_LT(index, ifcount);
8004       if (kIsDebugBuild) {
8005         if (interface_method->IsCopied()) {
8006           // Called from `FinalizeState()` for a default method from superclass.
8007           // The `index` points to the last interface inherited from the superclass
8008           // as we need to search only the new interfaces for masking methods.
8009           DCHECK(interface_method->IsDefault());
8010         } else {
8011           DCHECK(iface == interface_method->GetDeclaringClass());
8012           DCHECK(iface == iftable->GetInterface(index));
8013         }
8014       }
8015       DCHECK_EQ(vtable_index, method_index_);
8016       auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
8017         return ContainsImplementingMethod(iftable, index + 1, ifcount, iface, vtable_index);
8018       };
8019       UpdateStateImpl(iface, interface_method, slow_is_masked);
8020     }
8021 
8022     ALWAYS_INLINE
FinalizeState(ArtMethod * super_method,size_t vtable_index,ObjPtr<mirror::IfTable> iftable,size_t ifcount,ObjPtr<mirror::IfTable> super_iftable,size_t super_ifcount)8023     void FinalizeState(ArtMethod* super_method,
8024                        size_t vtable_index,
8025                        ObjPtr<mirror::IfTable> iftable,
8026                        size_t ifcount,
8027                        ObjPtr<mirror::IfTable> super_iftable,
8028                        size_t super_ifcount)
8029         REQUIRES_SHARED(Locks::mutator_lock_) {
8030       DCHECK(super_method->IsCopied());
8031       DCHECK_EQ(vtable_index, method_index_);
8032       DCHECK_EQ(vtable_index, super_method->GetMethodIndex());
8033       DCHECK_NE(super_ifcount, 0u);
8034       if (super_method->IsDefault()) {
8035         if (UNLIKELY(super_method->IsDefaultConflicting())) {
8036           // Some of the default methods that contributed to the conflict in the superclass
8037           // may be masked by new interfaces. Walk over all the interfaces and update state
8038           // as long as the current state is not `kDefaultConflict`.
8039           size_t i = super_ifcount;
8040           while (GetState() != State::kDefaultConflict && i != 0u) {
8041             --i;
8042             ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8043             DCHECK(iface == super_iftable->GetInterface(i));
8044             auto [found, index] =
8045                 MethodArrayContains(super_iftable->GetMethodArrayOrNull(i), super_method);
8046             if (found) {
8047               ArtMethod* interface_method = iface->GetVirtualMethod(index, kPointerSize);
8048               auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
8049                 // Note: The `iftable` has method arrays in range [super_ifcount, ifcount) filled
8050                 // with vtable indexes but the range [0, super_ifcount) is empty, so we need to
8051                 // use the `super_iftable` filled with implementation methods for that range.
8052                 return ContainsImplementingMethod(
8053                            super_iftable, i + 1u, super_ifcount, iface, super_method) ||
8054                        ContainsImplementingMethod(
8055                            iftable, super_ifcount, ifcount, iface, vtable_index);
8056               };
8057               UpdateStateImpl(iface, interface_method, slow_is_masked);
8058             }
8059           }
8060           if (GetState() == State::kDefaultConflict) {
8061             SetState(State::kUseSuperMethod);
8062           }
8063         } else {
8064           // There was exactly one default method in superclass interfaces that was
8065           // not masked by subinterfaces. Use `UpdateState()` to process it and pass
8066           // `super_ifcount - 1` as index for checking if it's been masked by new interfaces.
8067           ObjPtr<mirror::Class> iface = super_method->GetDeclaringClass();
8068           UpdateState(
8069               iface, super_method, vtable_index, iftable, ifcount, /*index=*/ super_ifcount - 1u);
8070           if (GetMainMethod() == super_method) {
8071             DCHECK(GetState() == State::kDefault) << enum_cast<uint32_t>(GetState());
8072             SetState(State::kUseSuperMethod);
8073           }
8074         }
8075       } else {
8076         DCHECK(super_method->IsMiranda());
8077         // Any default methods with this signature in superclass interfaces have been
8078         // masked by subinterfaces. Check if we can reuse the miranda method.
8079         if (GetState() == State::kAbstractSingle || GetState() == State::kAbstract) {
8080           SetState(State::kUseSuperMethod);
8081         }
8082       }
8083     }
8084 
8085    private:
8086     template <typename Predicate>
8087     ALWAYS_INLINE
UpdateStateImpl(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,Predicate && slow_is_masked)8088     void UpdateStateImpl(ObjPtr<mirror::Class> iface,
8089                          ArtMethod* interface_method,
8090                          Predicate&& slow_is_masked)
8091         REQUIRES_SHARED(Locks::mutator_lock_) {
8092       bool have_default = false;
8093       switch (GetState()) {
8094         case State::kDefaultSingle:
8095           have_default = true;
8096           FALLTHROUGH_INTENDED;
8097         case State::kAbstractSingle:
8098           if (GetMainMethod()->GetDeclaringClass()->Implements(iface)) {
8099             return;  // The main method masks the `interface_method`.
8100           }
8101           if (!interface_method->IsDefault()) {
8102             SetState(have_default ? State::kDefault : State::kAbstract);
8103             return;
8104           }
8105           break;
8106         case State::kDefault:
8107           have_default = true;
8108           FALLTHROUGH_INTENDED;
8109         case State::kAbstract:
8110           if (!interface_method->IsDefault()) {
8111             return;  // Keep the same state. We do not need to check for masking.
8112           }
8113           // We do not record all overriding methods, so we need to walk over all
8114           // interfaces that could mask the `interface_method`. The provided
8115           // predicate `slow_is_masked()` does that.
8116           if (slow_is_masked()) {
8117             return;  // Found an overriding method that masks `interface_method`.
8118           }
8119           break;
8120         case State::kDefaultConflict:
8121           return;  // The state cannot change anymore.
8122         default:
8123           LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(GetState());
8124           UNREACHABLE();
8125       }
8126       // We have a new default method that's not masked by any other method.
8127       DCHECK(interface_method->IsDefault());
8128       if (have_default) {
8129         SetState(State::kDefaultConflict);
8130       } else {
8131         SetMainMethod(interface_method);
8132         SetState(State::kDefault);
8133       }
8134     }
8135 
8136     // Determine if the given `iftable` contains in the given range a subinterface of `iface`
8137     // that declares a method with the same name and signature as 'interface_method'.
8138     //
8139     // Arguments
8140     // - iftable: The iftable we are searching for an overriding method.
8141     // - begin:   The start of the range to search.
8142     // - end:     The end of the range to search.
8143     // - iface:   The interface we are checking to see if anything overrides.
8144     // - interface_method:
8145     //            The interface method providing a name and signature we're searching for.
8146     //
8147     // Returns whether an overriding method was found in any subinterface of `iface`.
ContainsOverridingMethodOf(ObjPtr<mirror::IfTable> iftable,size_t begin,size_t end,ObjPtr<mirror::Class> iface,ArtMethod * interface_method)8148     static bool ContainsOverridingMethodOf(ObjPtr<mirror::IfTable> iftable,
8149                                            size_t begin,
8150                                            size_t end,
8151                                            ObjPtr<mirror::Class> iface,
8152                                            ArtMethod* interface_method)
8153         REQUIRES_SHARED(Locks::mutator_lock_) {
8154       for (size_t i = begin; i != end; ++i) {
8155         ObjPtr<mirror::Class> current_iface = iftable->GetInterface(i);
8156         for (ArtMethod& current_method : current_iface->GetDeclaredVirtualMethods(kPointerSize)) {
8157           if (MethodSignatureEquals(&current_method, interface_method)) {
8158             // Check if the i'th interface is a subtype of this one.
8159             if (current_iface->Implements(iface)) {
8160               return true;
8161             }
8162             break;
8163           }
8164         }
8165       }
8166       return false;
8167     }
8168 
8169     // Determine if the given `iftable` contains in the given range a subinterface of `iface`
8170     // that declares a method implemented by 'target'. This is an optimized version of
8171     // `ContainsOverridingMethodOf()` that searches implementation method arrays instead
8172     // of comparing signatures for declared interface methods.
8173     //
8174     // Arguments
8175     // - iftable: The iftable we are searching for an overriding method.
8176     // - begin:   The start of the range to search.
8177     // - end:     The end of the range to search.
8178     // - iface:   The interface we are checking to see if anything overrides.
8179     // - target:  The implementation method we're searching for.
8180     //            Note that the new `iftable` is filled with vtable indexes for new interfaces,
8181     //            so this needs to be the vtable index if we're searching that range.
8182     //
8183     // Returns whether the `target` was found in a method array for any subinterface of `iface`.
8184     template <typename TargetType>
ContainsImplementingMethod(ObjPtr<mirror::IfTable> iftable,size_t begin,size_t end,ObjPtr<mirror::Class> iface,TargetType target)8185     static bool ContainsImplementingMethod(ObjPtr<mirror::IfTable> iftable,
8186                                            size_t begin,
8187                                            size_t end,
8188                                            ObjPtr<mirror::Class> iface,
8189                                            TargetType target)
8190         REQUIRES_SHARED(Locks::mutator_lock_) {
8191       for (size_t i = begin; i != end; ++i) {
8192         if (MethodArrayContains(iftable->GetMethodArrayOrNull(i), target).first &&
8193             iftable->GetInterface(i)->Implements(iface)) {
8194           return true;
8195         }
8196       }
8197       return false;
8198     }
8199 
8200     template <typename TargetType>
MethodArrayContains(ObjPtr<mirror::PointerArray> method_array,TargetType target)8201     static std::pair<bool, size_t> MethodArrayContains(ObjPtr<mirror::PointerArray> method_array,
8202                                                        TargetType target)
8203         REQUIRES_SHARED(Locks::mutator_lock_) {
8204       size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
8205       for (size_t j = 0; j != num_methods; ++j) {
8206         if (method_array->GetElementPtrSize<TargetType, kPointerSize>(j) == target) {
8207           return {true, j};
8208         }
8209       }
8210       return {false, 0};
8211     }
8212 
8213     ArtMethod* main_method_;
8214     uint32_t method_index_;
8215     State state_;
8216   };
8217 
8218   class CopiedMethodRecordEmptyFn {
8219    public:
MakeEmpty(CopiedMethodRecord & item) const8220     void MakeEmpty(CopiedMethodRecord& item) const {
8221       item = CopiedMethodRecord();
8222     }
IsEmpty(const CopiedMethodRecord & item) const8223     bool IsEmpty(const CopiedMethodRecord& item) const {
8224       return item.GetMainMethod() == nullptr;
8225     }
8226   };
8227 
8228   class CopiedMethodRecordHash {
8229    public:
8230     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const8231     size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
8232       DCHECK(method != nullptr);
8233       return ComputeMethodHash(method);
8234     }
8235 
8236     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & record) const8237     size_t operator()(const CopiedMethodRecord& record) const NO_THREAD_SAFETY_ANALYSIS {
8238       return (*this)(record.GetMainMethod());
8239     }
8240   };
8241 
8242   class CopiedMethodRecordEqual {
8243    public:
8244     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & lhs_record,ArtMethod * rhs) const8245     bool operator()(const CopiedMethodRecord& lhs_record,
8246                     ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
8247       ArtMethod* lhs = lhs_record.GetMainMethod();
8248       DCHECK(lhs != nullptr);
8249       DCHECK(rhs != nullptr);
8250       return MethodSignatureEquals(lhs, rhs);
8251     }
8252 
8253     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & lhs_record,const CopiedMethodRecord & rhs_record) const8254     bool operator()(const CopiedMethodRecord& lhs_record,
8255                     const CopiedMethodRecord& rhs_record) const NO_THREAD_SAFETY_ANALYSIS {
8256       return (*this)(lhs_record, rhs_record.GetMainMethod());
8257     }
8258   };
8259 
8260   using CopiedMethodRecordSet = ScopedArenaHashSet<CopiedMethodRecord,
8261                                                    CopiedMethodRecordEmptyFn,
8262                                                    CopiedMethodRecordHash,
8263                                                    CopiedMethodRecordEqual>;
8264 
8265   static constexpr size_t kMethodAlignment = ArtMethod::Alignment(kPointerSize);
8266   static constexpr size_t kMethodSize = ArtMethod::Size(kPointerSize);
8267 
8268   ClassLinker* class_linker_;
8269   Handle<mirror::Class> klass_;
8270   Thread* const self_;
8271   Runtime* const runtime_;
8272 
8273   // These are allocated on the heap to begin, we then transfer to linear alloc when we re-create
8274   // the virtual methods array.
8275   // Need to use low 4GB arenas for compiler or else the pointers wont fit in 32 bit method array
8276   // during cross compilation.
8277   // Use the linear alloc pool since this one is in the low 4gb for the compiler.
8278   ArenaStack stack_;
8279   ScopedArenaAllocator allocator_;
8280 
8281   // If there are multiple methods with the same signature in the superclass vtable
8282   // (which can happen with a new virtual method having the same signature as an
8283   // inaccessible package-private method from another package in the superclass),
8284   // we keep singly-linked lists in this single array that maps vtable index to the
8285   // next vtable index in the list, `dex::kDexNoIndex` denotes the end of a list.
8286   ArrayRef<uint32_t> same_signature_vtable_lists_;
8287 
8288   // Avoid large allocation for a few copied method records.
8289   // Keep the initial buffer on the stack to avoid arena allocations
8290   // if there are no special cases (the first arena allocation is costly).
8291   static constexpr size_t kCopiedMethodRecordInitialBufferSize = 16u;
8292   CopiedMethodRecord copied_method_records_initial_buffer_[kCopiedMethodRecordInitialBufferSize];
8293   CopiedMethodRecordSet copied_method_records_;
8294   size_t num_new_copied_methods_;
8295 };
8296 
8297 template <PointerSize kPointerSize>
8298 NO_INLINE
ReallocMethods(ObjPtr<mirror::Class> klass)8299 void ClassLinker::LinkMethodsHelper<kPointerSize>::ReallocMethods(ObjPtr<mirror::Class> klass) {
8300   // There should be no thread suspension in this function,
8301   // native allocations do not cause thread suspension.
8302   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
8303 
8304   size_t num_new_copied_methods = num_new_copied_methods_;
8305   DCHECK_NE(num_new_copied_methods, 0u);
8306   const size_t old_method_count = klass->NumMethods();
8307   const size_t new_method_count = old_method_count + num_new_copied_methods;
8308 
8309   // Attempt to realloc to save RAM if possible.
8310   LengthPrefixedArray<ArtMethod>* old_methods = klass->GetMethodsPtr();
8311   // The Realloced virtual methods aren't visible from the class roots, so there is no issue
8312   // where GCs could attempt to mark stale pointers due to memcpy. And since we overwrite the
8313   // realloced memory with out->CopyFrom, we are guaranteed to have objects in the to space since
8314   // CopyFrom has internal read barriers.
8315   //
8316   // TODO We should maybe move some of this into mirror::Class or at least into another method.
8317   const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_method_count,
8318                                                                       kMethodSize,
8319                                                                       kMethodAlignment);
8320   const size_t new_size = LengthPrefixedArray<ArtMethod>::ComputeSize(new_method_count,
8321                                                                       kMethodSize,
8322                                                                       kMethodAlignment);
8323   const size_t old_methods_ptr_size = (old_methods != nullptr) ? old_size : 0;
8324   LinearAlloc* allocator = class_linker_->GetAllocatorForClassLoader(klass->GetClassLoader());
8325   auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(allocator->Realloc(
8326       self_, old_methods, old_methods_ptr_size, new_size, LinearAllocKind::kArtMethodArray));
8327   CHECK(methods != nullptr);  // Native allocation failure aborts.
8328 
8329   if (methods != old_methods) {
8330     if (gUseReadBarrier) {
8331       StrideIterator<ArtMethod> out = methods->begin(kMethodSize, kMethodAlignment);
8332       // Copy over the old methods. The `ArtMethod::CopyFrom()` is only necessary to not miss
8333       // read barriers since `LinearAlloc::Realloc()` won't do read barriers when it copies.
8334       for (auto& m : klass->GetMethods(kPointerSize)) {
8335         out->CopyFrom(&m, kPointerSize);
8336         ++out;
8337       }
8338     } else if (gUseUserfaultfd) {
8339       // In order to make compaction code skip updating the declaring_class_ in
8340       // old_methods, convert it into a 'no GC-root' array.
8341       allocator->ConvertToNoGcRoots(old_methods, LinearAllocKind::kArtMethodArray);
8342     }
8343   }
8344 
8345   // Collect and sort copied method records by the vtable index. This places overriding
8346   // copied methods first, sorted by the vtable index already assigned in the superclass,
8347   // followed by copied methods with new signatures in the order in which we encountered
8348   // them when going over virtual methods of new interfaces.
8349   // This order is deterministic but implementation-defined.
8350   //
8351   // Avoid arena allocation for a few records (the first arena allocation is costly).
8352   constexpr size_t kSortedRecordsBufferSize = 16;
8353   CopiedMethodRecord* sorted_records_buffer[kSortedRecordsBufferSize];
8354   CopiedMethodRecord** sorted_records = (num_new_copied_methods <= kSortedRecordsBufferSize)
8355       ? sorted_records_buffer
8356       : allocator_.AllocArray<CopiedMethodRecord*>(num_new_copied_methods);
8357   size_t filled_sorted_records = 0u;
8358   for (CopiedMethodRecord& record : copied_method_records_) {
8359     if (record.GetState() != CopiedMethodRecord::State::kUseSuperMethod) {
8360       DCHECK_LT(filled_sorted_records, num_new_copied_methods);
8361       sorted_records[filled_sorted_records] = &record;
8362       ++filled_sorted_records;
8363     }
8364   }
8365   DCHECK_EQ(filled_sorted_records, num_new_copied_methods);
8366   std::sort(sorted_records,
8367             sorted_records + num_new_copied_methods,
8368             [](const CopiedMethodRecord* lhs, const CopiedMethodRecord* rhs) {
8369               return lhs->GetMethodIndex() < rhs->GetMethodIndex();
8370             });
8371 
8372   if (klass->IsInterface()) {
8373     // Some records may have been pruned. Update method indexes in collected records.
8374     size_t interface_method_index = klass->NumDeclaredVirtualMethods();
8375     for (size_t i = 0; i != num_new_copied_methods; ++i) {
8376       CopiedMethodRecord* record = sorted_records[i];
8377       DCHECK_LE(interface_method_index, record->GetMethodIndex());
8378       record->SetMethodIndex(interface_method_index);
8379       ++interface_method_index;
8380     }
8381   }
8382 
8383   // Add copied methods.
8384   methods->SetSize(new_method_count);
8385   for (size_t i = 0; i != num_new_copied_methods; ++i) {
8386     const CopiedMethodRecord* record = sorted_records[i];
8387     ArtMethod* interface_method = record->GetMainMethod();
8388     DCHECK(!interface_method->IsCopied());
8389     ArtMethod& new_method = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
8390     new_method.CopyFrom(interface_method, kPointerSize);
8391     new_method.SetMethodIndex(dchecked_integral_cast<uint16_t>(record->GetMethodIndex()));
8392     switch (record->GetState()) {
8393       case CopiedMethodRecord::State::kAbstractSingle:
8394       case CopiedMethodRecord::State::kAbstract: {
8395         DCHECK(!klass->IsInterface());  // We do not create miranda methods for interfaces.
8396         uint32_t access_flags = new_method.GetAccessFlags();
8397         DCHECK_EQ(access_flags & (kAccAbstract | kAccIntrinsic | kAccDefault), kAccAbstract)
8398             << "Miranda method should be abstract but not intrinsic or default!";
8399         new_method.SetAccessFlags(access_flags | kAccCopied);
8400         break;
8401       }
8402       case CopiedMethodRecord::State::kDefaultSingle:
8403       case CopiedMethodRecord::State::kDefault: {
8404         DCHECK(!klass->IsInterface());  // We do not copy default methods for interfaces.
8405         // Clear the kAccSkipAccessChecks flag if it is present. Since this class hasn't been
8406         // verified yet it shouldn't have methods that are skipping access checks.
8407         // TODO This is rather arbitrary. We should maybe support classes where only some of its
8408         // methods are skip_access_checks.
8409         DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
8410         static_assert((kAccDefault & kAccIntrinsicBits) != 0);
8411         DCHECK(!new_method.IsIntrinsic()) << "Adding kAccDefault to an intrinsic would be a "
8412                                           << "mistake as it overlaps with kAccIntrinsicBits.";
8413         constexpr uint32_t kSetFlags = kAccDefault | kAccCopied;
8414         constexpr uint32_t kMaskFlags = ~kAccSkipAccessChecks;
8415         new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
8416         break;
8417       }
8418       case CopiedMethodRecord::State::kDefaultConflict: {
8419         // This is a type of default method (there are default method impls, just a conflict)
8420         // so mark this as a default. We use the `kAccAbstract` flag to distinguish it from
8421         // invokable copied default method without using a separate access flag but the default
8422         // conflicting method is technically not abstract and ArtMethod::IsAbstract() shall
8423         // return false. Also clear the kAccSkipAccessChecks bit since this class hasn't been
8424         // verified yet it shouldn't have methods that are skipping access checks. Also clear
8425         // potential kAccSingleImplementation to avoid CHA trying to inline the default method.
8426         uint32_t access_flags = new_method.GetAccessFlags();
8427         DCHECK_EQ(access_flags & (kAccNative | kAccIntrinsic), 0u);
8428         constexpr uint32_t kSetFlags = kAccDefault | kAccAbstract | kAccCopied;
8429         static_assert((kAccDefault & kAccIntrinsicBits) != 0);
8430         DCHECK(!new_method.IsIntrinsic()) << "Adding kAccDefault to an intrinsic would be a "
8431                                           << "mistake as it overlaps with kAccIntrinsicBits.";
8432         constexpr uint32_t kMaskFlags = ~(kAccSkipAccessChecks | kAccSingleImplementation);
8433         new_method.SetAccessFlags((access_flags | kSetFlags) & kMaskFlags);
8434         new_method.SetDataPtrSize(nullptr, kPointerSize);
8435         DCHECK(new_method.IsDefaultConflicting());
8436         DCHECK(!new_method.IsAbstract());
8437         // The actual method might or might not be marked abstract since we just copied it from
8438         // a (possibly default) interface method. We need to set its entry point to be the bridge
8439         // so that the compiler will not invoke the implementation of whatever method we copied
8440         // from.
8441         EnsureThrowsInvocationError(class_linker_, &new_method);
8442         break;
8443       }
8444       default:
8445         LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(record->GetState());
8446         UNREACHABLE();
8447     }
8448   }
8449 
8450   if (VLOG_IS_ON(class_linker)) {
8451     LogNewVirtuals(methods);
8452   }
8453 
8454   class_linker_->UpdateClassMethods(klass, methods);
8455 }
8456 
8457 template <PointerSize kPointerSize>
FinalizeIfTable(Handle<mirror::Class> klass,MutableHandle<mirror::IfTable> iftable,Handle<mirror::PointerArray> vtable,bool is_klass_abstract,bool is_super_abstract,bool * out_new_conflict,ArtMethod ** out_imt)8458 bool ClassLinker::LinkMethodsHelper<kPointerSize>::FinalizeIfTable(
8459     Handle<mirror::Class> klass,
8460     MutableHandle<mirror::IfTable> iftable,
8461     Handle<mirror::PointerArray> vtable,
8462     bool is_klass_abstract,
8463     bool is_super_abstract,
8464     bool* out_new_conflict,
8465     ArtMethod** out_imt) {
8466   size_t ifcount = iftable->Count();
8467   // We do not need a read barrier here as the length is constant, both from-space and
8468   // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
8469   size_t super_ifcount =
8470       klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
8471 
8472   ClassLinker* class_linker = nullptr;
8473   ArtMethod* unimplemented_method = nullptr;
8474   ArtMethod* imt_conflict_method = nullptr;
8475   uintptr_t imt_methods_begin = 0u;
8476   size_t imt_methods_size = 0u;
8477   DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8478   DCHECK_EQ(klass->GetSuperClass()->ShouldHaveImt(), !is_super_abstract);
8479   if (!is_klass_abstract) {
8480     class_linker = class_linker_;
8481     unimplemented_method = runtime_->GetImtUnimplementedMethod();
8482     imt_conflict_method = runtime_->GetImtConflictMethod();
8483     if (is_super_abstract) {
8484       // There was no IMT in superclass to copy to `out_imt[]`, so we need
8485       // to fill it with all implementation methods from superclass.
8486       DCHECK_EQ(imt_methods_begin, 0u);
8487       imt_methods_size = std::numeric_limits<size_t>::max();  // No method at the last byte.
8488     } else {
8489       // If the superclass has IMT, we have already copied it to `out_imt[]` and
8490       // we do not need to call `SetIMTRef()` for interfaces from superclass when
8491       // the implementation method is already in the superclass, only for new methods.
8492       // For simplicity, use the entire method array including direct methods.
8493       LengthPrefixedArray<ArtMethod>* const new_methods = klass->GetMethodsPtr();
8494       if (new_methods != nullptr) {
8495         DCHECK_NE(new_methods->size(), 0u);
8496         imt_methods_begin =
8497             reinterpret_cast<uintptr_t>(&new_methods->At(0, kMethodSize, kMethodAlignment));
8498         imt_methods_size = new_methods->size() * kMethodSize;
8499       }
8500     }
8501   }
8502 
8503   auto update_imt = [=](ObjPtr<mirror::Class> iface, size_t j, ArtMethod* implementation)
8504       REQUIRES_SHARED(Locks::mutator_lock_) {
8505     // Place method in imt if entry is empty, place conflict otherwise.
8506     ArtMethod** imt_ptr = &out_imt[iface->GetVirtualMethod(j, kPointerSize)->GetImtIndex()];
8507     class_linker->SetIMTRef(unimplemented_method,
8508                             imt_conflict_method,
8509                             implementation,
8510                             /*out*/out_new_conflict,
8511                             /*out*/imt_ptr);
8512   };
8513 
8514   // For interfaces inherited from superclass, the new method arrays are empty,
8515   // so use vtable indexes from implementation methods from the superclass method array.
8516   for (size_t i = 0; i != super_ifcount; ++i) {
8517     ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8518     DCHECK(method_array == klass->GetSuperClass()->GetIfTable()->GetMethodArrayOrNull(i));
8519     if (method_array == nullptr) {
8520       continue;
8521     }
8522     size_t num_methods = method_array->GetLength();
8523     ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8524     size_t j = 0;
8525     // First loop has method array shared with the super class.
8526     for (; j != num_methods; ++j) {
8527       ArtMethod* super_implementation =
8528           method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
8529       size_t vtable_index = super_implementation->GetMethodIndex();
8530       ArtMethod* implementation =
8531           vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
8532       // Check if we need to update IMT with this method, see above.
8533       if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
8534         update_imt(iface, j, implementation);
8535       }
8536       if (implementation != super_implementation) {
8537         // Copy-on-write and move to the next loop.
8538         Thread* self = self_;
8539         StackHandleScope<2u> hs(self);
8540         Handle<mirror::PointerArray> old_method_array = hs.NewHandle(method_array);
8541         HandleWrapperObjPtr<mirror::Class> h_iface = hs.NewHandleWrapper(&iface);
8542         if (ifcount == super_ifcount && iftable.Get() == klass->GetSuperClass()->GetIfTable()) {
8543           ObjPtr<mirror::IfTable> new_iftable = ObjPtr<mirror::IfTable>::DownCast(
8544               mirror::ObjectArray<mirror::Object>::CopyOf(
8545                   iftable, self, ifcount * mirror::IfTable::kMax));
8546           if (new_iftable == nullptr) {
8547             return false;
8548           }
8549           iftable.Assign(new_iftable);
8550         }
8551         method_array = ObjPtr<mirror::PointerArray>::DownCast(
8552             mirror::Array::CopyOf(old_method_array, self, num_methods));
8553         if (method_array == nullptr) {
8554           return false;
8555         }
8556         iftable->SetMethodArray(i, method_array);
8557         method_array->SetElementPtrSize(j, implementation, kPointerSize);
8558         ++j;
8559         break;
8560       }
8561     }
8562     // Second loop (if non-empty) has method array different from the superclass.
8563     for (; j != num_methods; ++j) {
8564       ArtMethod* super_implementation =
8565           method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
8566       size_t vtable_index = super_implementation->GetMethodIndex();
8567       ArtMethod* implementation =
8568           vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
8569       method_array->SetElementPtrSize(j, implementation, kPointerSize);
8570       // Check if we need to update IMT with this method, see above.
8571       if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
8572         update_imt(iface, j, implementation);
8573       }
8574     }
8575   }
8576 
8577   // New interface method arrays contain vtable indexes. Translate them to methods.
8578   DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8579   for (size_t i = super_ifcount; i != ifcount; ++i) {
8580     ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8581     if (method_array == nullptr) {
8582       continue;
8583     }
8584     size_t num_methods = method_array->GetLength();
8585     ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8586     for (size_t j = 0; j != num_methods; ++j) {
8587       size_t vtable_index = method_array->GetElementPtrSize<size_t, kPointerSize>(j);
8588       ArtMethod* implementation =
8589           vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
8590       method_array->SetElementPtrSize(j, implementation, kPointerSize);
8591       if (!is_klass_abstract) {
8592         update_imt(iface, j, implementation);
8593       }
8594     }
8595   }
8596 
8597   return true;
8598 }
8599 
8600 template <PointerSize kPointerSize>
AllocPointerArray(Thread * self,size_t length)8601 ObjPtr<mirror::PointerArray> ClassLinker::LinkMethodsHelper<kPointerSize>::AllocPointerArray(
8602     Thread* self, size_t length) {
8603   using PointerArrayType = std::conditional_t<
8604       kPointerSize == PointerSize::k64, mirror::LongArray, mirror::IntArray>;
8605   ObjPtr<mirror::Array> array = PointerArrayType::Alloc(self, length);
8606   return ObjPtr<mirror::PointerArray>::DownCast(array);
8607 }
8608 
8609 template <PointerSize kPointerSize>
AllocateIfTableMethodArrays(Thread * self,Handle<mirror::Class> klass,Handle<mirror::IfTable> iftable)8610 bool ClassLinker::LinkMethodsHelper<kPointerSize>::AllocateIfTableMethodArrays(
8611     Thread* self,
8612     Handle<mirror::Class> klass,
8613     Handle<mirror::IfTable> iftable) {
8614   DCHECK(!klass->IsInterface());
8615   DCHECK(klass_->HasSuperClass());
8616   const size_t ifcount = iftable->Count();
8617   // We do not need a read barrier here as the length is constant, both from-space and
8618   // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
8619   size_t super_ifcount =
8620       klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
8621   if (ifcount == super_ifcount) {
8622     DCHECK(iftable.Get() == klass_->GetSuperClass()->GetIfTable());
8623     return true;
8624   }
8625 
8626   if (kIsDebugBuild) {
8627     // The method array references for superclass interfaces have been copied.
8628     // We shall allocate new arrays if needed (copy-on-write) in `FinalizeIfTable()`.
8629     ObjPtr<mirror::IfTable> super_iftable = klass_->GetSuperClass()->GetIfTable();
8630     for (size_t i = 0; i != super_ifcount; ++i) {
8631       CHECK(iftable->GetInterface(i) == super_iftable->GetInterface(i));
8632       CHECK(iftable->GetMethodArrayOrNull(i) == super_iftable->GetMethodArrayOrNull(i));
8633     }
8634   }
8635 
8636   for (size_t i = super_ifcount; i < ifcount; ++i) {
8637     size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
8638     if (num_methods > 0) {
8639       ObjPtr<mirror::PointerArray> method_array = AllocPointerArray(self, num_methods);
8640       if (UNLIKELY(method_array == nullptr)) {
8641         self->AssertPendingOOMException();
8642         return false;
8643       }
8644       iftable->SetMethodArray(i, method_array);
8645     }
8646   }
8647   return true;
8648 }
8649 
8650 template <PointerSize kPointerSize>
AssignVTableIndexes(ObjPtr<mirror::Class> klass,ObjPtr<mirror::Class> super_class,bool is_super_abstract,size_t num_virtual_methods,ObjPtr<mirror::IfTable> iftable)8651 size_t ClassLinker::LinkMethodsHelper<kPointerSize>::AssignVTableIndexes(
8652     ObjPtr<mirror::Class> klass,
8653     ObjPtr<mirror::Class> super_class,
8654     bool is_super_abstract,
8655     size_t num_virtual_methods,
8656     ObjPtr<mirror::IfTable> iftable) {
8657   DCHECK(!klass->IsInterface());
8658   DCHECK(klass->HasSuperClass());
8659   DCHECK(klass->GetSuperClass() == super_class);
8660 
8661   // There should be no thread suspension unless we want to throw an exception.
8662   // (We are using `ObjPtr<>` and raw vtable pointers that are invalidated by thread suspension.)
8663   std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
8664 
8665   // Prepare a hash table with virtual methods from the superclass.
8666   // For the unlikely cases that there are multiple methods with the same signature
8667   // but different vtable indexes, keep an array with indexes of the previous
8668   // methods with the same signature (walked as singly-linked lists).
8669   uint8_t* raw_super_vtable;
8670   size_t super_vtable_length;
8671   if (is_super_abstract) {
8672     DCHECK(!super_class->ShouldHaveEmbeddedVTable());
8673     ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTableDuringLinking();
8674     DCHECK(super_vtable != nullptr);
8675     raw_super_vtable = reinterpret_cast<uint8_t*>(super_vtable.Ptr()) +
8676                        mirror::Array::DataOffset(static_cast<size_t>(kPointerSize)).Uint32Value();
8677     super_vtable_length = super_vtable->GetLength();
8678   } else {
8679     DCHECK(super_class->ShouldHaveEmbeddedVTable());
8680     raw_super_vtable = reinterpret_cast<uint8_t*>(super_class.Ptr()) +
8681                        mirror::Class::EmbeddedVTableOffset(kPointerSize).Uint32Value();
8682     super_vtable_length = super_class->GetEmbeddedVTableLength();
8683   }
8684   VTableAccessor super_vtable_accessor(raw_super_vtable, super_vtable_length);
8685   static constexpr double kMinLoadFactor = 0.3;
8686   static constexpr double kMaxLoadFactor = 0.5;
8687   static constexpr size_t kMaxStackBuferSize = 256;
8688   const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
8689   const size_t super_vtable_buffer_size = super_vtable_length * 3;
8690   const size_t bit_vector_size = BitVector::BitsToWords(num_virtual_methods);
8691   const size_t total_size =
8692       declared_virtuals_buffer_size + super_vtable_buffer_size + bit_vector_size;
8693 
8694   uint32_t* declared_virtuals_buffer_ptr = (total_size <= kMaxStackBuferSize)
8695       ? reinterpret_cast<uint32_t*>(alloca(total_size * sizeof(uint32_t)))
8696       : allocator_.AllocArray<uint32_t>(total_size);
8697   uint32_t* bit_vector_buffer_ptr = declared_virtuals_buffer_ptr + declared_virtuals_buffer_size;
8698 
8699   DeclaredVirtualSignatureSet declared_virtual_signatures(
8700       kMinLoadFactor,
8701       kMaxLoadFactor,
8702       DeclaredVirtualSignatureHash(klass),
8703       DeclaredVirtualSignatureEqual(klass),
8704       declared_virtuals_buffer_ptr,
8705       declared_virtuals_buffer_size,
8706       allocator_.Adapter());
8707 
8708   ArrayRef<uint32_t> same_signature_vtable_lists;
8709   const bool is_proxy_class = klass->IsProxyClass();
8710   size_t vtable_length = super_vtable_length;
8711 
8712   // Record which declared methods are overriding a super method.
8713   BitVector initialized_methods(/* expandable= */ false,
8714                                 Allocator::GetNoopAllocator(),
8715                                 bit_vector_size,
8716                                 bit_vector_buffer_ptr);
8717 
8718   // Note: our sets hash on the method name, and therefore we pay a high
8719   // performance price when a class has many overloads.
8720   //
8721   // We populate a set of declared signatures instead of signatures from the
8722   // super vtable (which is only lazy populated in case of interface overriding,
8723   // see below). This makes sure that we pay the performance price only on that
8724   // class, and not on its subclasses (except in the case of interface overriding, see below).
8725   for (size_t i = 0; i != num_virtual_methods; ++i) {
8726     ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8727     DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
8728     ArtMethod* signature_method = UNLIKELY(is_proxy_class)
8729         ? virtual_method->GetInterfaceMethodForProxyUnchecked(kPointerSize)
8730         : virtual_method;
8731     size_t hash = ComputeMethodHash(signature_method);
8732     declared_virtual_signatures.PutWithHash(i, hash);
8733   }
8734 
8735   // Loop through each super vtable method and see if they are overridden by a method we added to
8736   // the hash table.
8737   for (size_t j = 0; j < super_vtable_length; ++j) {
8738     // Search the hash table to see if we are overridden by any method.
8739     ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(j);
8740     if (!klass->CanAccessMember(super_method->GetDeclaringClass(),
8741                                 super_method->GetAccessFlags())) {
8742       // Continue on to the next method since this one is package private and cannot be overridden.
8743       // Before Android 4.1, the package-private method super_method might have been incorrectly
8744       // overridden.
8745       continue;
8746     }
8747     size_t hash = (j < mirror::Object::kVTableLength)
8748         ? class_linker_->object_virtual_method_hashes_[j]
8749         : ComputeMethodHash(super_method);
8750     auto it = declared_virtual_signatures.FindWithHash(super_method, hash);
8751     if (it == declared_virtual_signatures.end()) {
8752       continue;
8753     }
8754     ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(*it, kPointerSize);
8755     if (super_method->IsFinal()) {
8756       sants.reset();
8757       ThrowLinkageError(klass, "Method %s overrides final method in class %s",
8758                         virtual_method->PrettyMethod().c_str(),
8759                         super_method->GetDeclaringClassDescriptor());
8760       return 0u;
8761     }
8762     if (initialized_methods.IsBitSet(*it)) {
8763       // The method is overriding more than one method.
8764       // We record that information in a linked list to later set the method in the vtable
8765       // locations that are not the method index.
8766       if (same_signature_vtable_lists.empty()) {
8767         same_signature_vtable_lists = ArrayRef<uint32_t>(
8768             allocator_.AllocArray<uint32_t>(super_vtable_length), super_vtable_length);
8769         std::fill_n(same_signature_vtable_lists.data(), super_vtable_length, dex::kDexNoIndex);
8770         same_signature_vtable_lists_ = same_signature_vtable_lists;
8771       }
8772       same_signature_vtable_lists[j] = virtual_method->GetMethodIndexDuringLinking();
8773     } else {
8774       initialized_methods.SetBit(*it);
8775     }
8776 
8777     // We arbitrarily set to the largest index. This is also expected when
8778     // iterating over the `same_signature_vtable_lists_`.
8779     virtual_method->SetMethodIndex(j);
8780   }
8781 
8782   // Add the non-overridden methods at the end.
8783   for (size_t i = 0; i < num_virtual_methods; ++i) {
8784     if (!initialized_methods.IsBitSet(i)) {
8785       ArtMethod* local_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8786       local_method->SetMethodIndex(vtable_length);
8787       vtable_length++;
8788     }
8789   }
8790 
8791   // A lazily constructed super vtable set, which we only populate in the less
8792   // common sittuation of a superclass implementing a method declared in an
8793   // interface this class inherits.
8794   // We still try to allocate the set on the stack as using the arena will have
8795   // a larger cost.
8796   uint32_t* super_vtable_buffer_ptr = bit_vector_buffer_ptr + bit_vector_size;
8797   VTableSignatureSet super_vtable_signatures(
8798       kMinLoadFactor,
8799       kMaxLoadFactor,
8800       VTableSignatureHash(super_vtable_accessor),
8801       VTableSignatureEqual(super_vtable_accessor),
8802       super_vtable_buffer_ptr,
8803       super_vtable_buffer_size,
8804       allocator_.Adapter());
8805 
8806   // Assign vtable indexes for interface methods in new interfaces and store them
8807   // in implementation method arrays. These shall be replaced by actual method
8808   // pointers later. We do not need to do this for superclass interfaces as we can
8809   // get these vtable indexes from implementation methods in superclass iftable.
8810   // Record data for copied methods which shall be referenced by the vtable.
8811   const size_t ifcount = iftable->Count();
8812   ObjPtr<mirror::IfTable> super_iftable = super_class->GetIfTable();
8813   const size_t super_ifcount = super_iftable->Count();
8814   for (size_t i = ifcount; i != super_ifcount; ) {
8815     --i;
8816     DCHECK_LT(i, ifcount);
8817     ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8818     ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8819     size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
8820     for (size_t j = 0; j != num_methods; ++j) {
8821       ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8822       size_t hash = ComputeMethodHash(interface_method);
8823       ArtMethod* vtable_method = nullptr;
8824       auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8825       if (it1 != declared_virtual_signatures.end()) {
8826         ArtMethod* found_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
8827         // For interface overriding, we only look at public methods.
8828         if (found_method->IsPublic()) {
8829           vtable_method = found_method;
8830         }
8831       } else {
8832         // This situation should be rare (a superclass implements a method
8833         // declared in an interface this class is inheriting). Only in this case
8834         // do we lazily populate the super_vtable_signatures.
8835         if (super_vtable_signatures.empty()) {
8836           for (size_t k = 0; k < super_vtable_length; ++k) {
8837             ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(k);
8838             if (!super_method->IsPublic()) {
8839               // For interface overriding, we only look at public methods.
8840               continue;
8841             }
8842             size_t super_hash = (k < mirror::Object::kVTableLength)
8843                 ? class_linker_->object_virtual_method_hashes_[k]
8844                 : ComputeMethodHash(super_method);
8845             auto [it, inserted] = super_vtable_signatures.InsertWithHash(k, super_hash);
8846             DCHECK(inserted || super_vtable_accessor.GetVTableEntry(*it) == super_method);
8847           }
8848         }
8849         auto it2 = super_vtable_signatures.FindWithHash(interface_method, hash);
8850         if (it2 != super_vtable_signatures.end()) {
8851           vtable_method = super_vtable_accessor.GetVTableEntry(*it2);
8852         }
8853       }
8854 
8855       uint32_t vtable_index = vtable_length;
8856       if (vtable_method != nullptr) {
8857         vtable_index = vtable_method->GetMethodIndexDuringLinking();
8858         if (!vtable_method->IsOverridableByDefaultMethod()) {
8859           method_array->SetElementPtrSize(j, vtable_index, kPointerSize);
8860           continue;
8861         }
8862       }
8863 
8864       auto [it, inserted] = copied_method_records_.InsertWithHash(
8865           CopiedMethodRecord(interface_method, vtable_index), hash);
8866       if (vtable_method != nullptr) {
8867         DCHECK_EQ(vtable_index, it->GetMethodIndex());
8868       } else if (inserted) {
8869         DCHECK_EQ(vtable_index, it->GetMethodIndex());
8870         DCHECK_EQ(vtable_index, vtable_length);
8871         ++vtable_length;
8872       } else {
8873         vtable_index = it->GetMethodIndex();
8874       }
8875       method_array->SetElementPtrSize(j, it->GetMethodIndex(), kPointerSize);
8876       if (inserted) {
8877         it->SetState(interface_method->IsAbstract() ? CopiedMethodRecord::State::kAbstractSingle
8878                                                     : CopiedMethodRecord::State::kDefaultSingle);
8879       } else {
8880         it->UpdateState(iface, interface_method, vtable_index, iftable, ifcount, i);
8881       }
8882     }
8883   }
8884   // Finalize copied method records and check if we can reuse some methods from superclass vtable.
8885   size_t num_new_copied_methods = copied_method_records_.size();
8886   for (CopiedMethodRecord& record : copied_method_records_) {
8887     uint32_t vtable_index = record.GetMethodIndex();
8888     if (vtable_index < super_vtable_length) {
8889       ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(record.GetMethodIndex());
8890       DCHECK(super_method->IsOverridableByDefaultMethod());
8891       record.FinalizeState(
8892           super_method, vtable_index, iftable, ifcount, super_iftable, super_ifcount);
8893       if (record.GetState() == CopiedMethodRecord::State::kUseSuperMethod) {
8894         --num_new_copied_methods;
8895       }
8896     }
8897   }
8898   num_new_copied_methods_ = num_new_copied_methods;
8899 
8900   if (UNLIKELY(!IsUint<16>(vtable_length))) {
8901     sants.reset();
8902     ThrowClassFormatError(klass, "Too many methods defined on class: %zd", vtable_length);
8903     return 0u;
8904   }
8905 
8906   return vtable_length;
8907 }
8908 
8909 template <PointerSize kPointerSize>
FindCopiedMethodsForInterface(ObjPtr<mirror::Class> klass,size_t num_virtual_methods,ObjPtr<mirror::IfTable> iftable)8910 bool ClassLinker::LinkMethodsHelper<kPointerSize>::FindCopiedMethodsForInterface(
8911     ObjPtr<mirror::Class> klass,
8912     size_t num_virtual_methods,
8913     ObjPtr<mirror::IfTable> iftable) {
8914   DCHECK(klass->IsInterface());
8915   DCHECK(klass->HasSuperClass());
8916   DCHECK(klass->GetSuperClass()->IsObjectClass());
8917   DCHECK_EQ(klass->GetSuperClass()->GetIfTableCount(), 0);
8918 
8919   // There should be no thread suspension unless we want to throw an exception.
8920   // (We are using `ObjPtr<>`s that are invalidated by thread suspension.)
8921   std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
8922 
8923   // Prepare a `HashSet<>` with the declared virtual methods. These mask any methods
8924   // from superinterfaces, so we can filter out matching superinterface methods.
8925   static constexpr double kMinLoadFactor = 0.3;
8926   static constexpr double kMaxLoadFactor = 0.5;
8927   static constexpr size_t kMaxStackBuferSize = 256;
8928   const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
8929   uint32_t* declared_virtuals_buffer_ptr = (declared_virtuals_buffer_size <= kMaxStackBuferSize)
8930       ? reinterpret_cast<uint32_t*>(alloca(declared_virtuals_buffer_size * sizeof(uint32_t)))
8931       : allocator_.AllocArray<uint32_t>(declared_virtuals_buffer_size);
8932   DeclaredVirtualSignatureSet declared_virtual_signatures(
8933       kMinLoadFactor,
8934       kMaxLoadFactor,
8935       DeclaredVirtualSignatureHash(klass),
8936       DeclaredVirtualSignatureEqual(klass),
8937       declared_virtuals_buffer_ptr,
8938       declared_virtuals_buffer_size,
8939       allocator_.Adapter());
8940   for (size_t i = 0; i != num_virtual_methods; ++i) {
8941     ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8942     DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
8943     size_t hash = ComputeMethodHash(virtual_method);
8944     declared_virtual_signatures.PutWithHash(i, hash);
8945   }
8946 
8947   // We do not create miranda methods for interface classes, so we do not need to track
8948   // non-default (abstract) interface methods. The downside is that we cannot use the
8949   // optimized code paths with `CopiedMethodRecord::State::kDefaultSingle` and since
8950   // we do not fill method arrays for interfaces, the method search actually has to
8951   // compare signatures instead of searching for the implementing method.
8952   const size_t ifcount = iftable->Count();
8953   size_t new_method_index = num_virtual_methods;
8954   for (size_t i = ifcount; i != 0u; ) {
8955     --i;
8956     DCHECK_LT(i, ifcount);
8957     ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8958     if (!iface->HasDefaultMethods()) {
8959       continue;  // No default methods to process.
8960     }
8961     size_t num_methods = iface->NumDeclaredVirtualMethods();
8962     for (size_t j = 0; j != num_methods; ++j) {
8963       ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8964       if (!interface_method->IsDefault()) {
8965         continue;  // Do not process this non-default method.
8966       }
8967       size_t hash = ComputeMethodHash(interface_method);
8968       auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8969       if (it1 != declared_virtual_signatures.end()) {
8970         // Virtual methods in interfaces are always public.
8971         // This is checked by the `DexFileVerifier`.
8972         DCHECK(klass->GetVirtualMethodDuringLinking(*it1, kPointerSize)->IsPublic());
8973         continue;  // This default method is masked by a method declared in this interface.
8974       }
8975 
8976       CopiedMethodRecord new_record(interface_method, new_method_index);
8977       auto it = copied_method_records_.FindWithHash(new_record, hash);
8978       if (it == copied_method_records_.end()) {
8979         // Pretend that there is another default method and try to update the state.
8980         // If the `interface_method` is not masked, the state shall change to
8981         // `kDefaultConflict`; if it is masked, the state remains `kDefault`.
8982         new_record.SetState(CopiedMethodRecord::State::kDefault);
8983         new_record.UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8984         if (new_record.GetState() == CopiedMethodRecord::State::kDefaultConflict) {
8985           // Insert the new record with the state `kDefault`.
8986           new_record.SetState(CopiedMethodRecord::State::kDefault);
8987           copied_method_records_.PutWithHash(new_record, hash);
8988           DCHECK_EQ(new_method_index, new_record.GetMethodIndex());
8989           ++new_method_index;
8990         }
8991       } else {
8992         it->UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8993       }
8994     }
8995   }
8996 
8997   // Prune records without conflict. (Method indexes are updated in `ReallocMethods()`.)
8998   // We do not copy normal default methods to subinterfaces, instead we find the
8999   // default method with `Class::FindVirtualMethodForInterfaceSuper()` when needed.
9000   size_t num_new_copied_methods = copied_method_records_.size();
9001   for (CopiedMethodRecord& record : copied_method_records_) {
9002     if (record.GetState() != CopiedMethodRecord::State::kDefaultConflict) {
9003       DCHECK(record.GetState() == CopiedMethodRecord::State::kDefault);
9004       record.SetState(CopiedMethodRecord::State::kUseSuperMethod);
9005       --num_new_copied_methods;
9006     }
9007   }
9008   num_new_copied_methods_ = num_new_copied_methods;
9009 
9010   return true;
9011 }
9012 
9013 
9014 template <PointerSize kPointerSize>
9015 FLATTEN
LinkMethods(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,bool * out_new_conflict,ArtMethod ** out_imt)9016 bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkMethods(
9017     Thread* self,
9018     Handle<mirror::Class> klass,
9019     Handle<mirror::ObjectArray<mirror::Class>> interfaces,
9020     bool* out_new_conflict,
9021     ArtMethod** out_imt) {
9022   const size_t num_virtual_methods = klass->NumVirtualMethods();
9023   if (klass->IsInterface()) {
9024     // No vtable.
9025     if (!IsUint<16>(num_virtual_methods)) {
9026       ThrowClassFormatError(klass.Get(), "Too many methods on interface: %zu", num_virtual_methods);
9027       return false;
9028     }
9029     // Assign each method an interface table index and set the default flag.
9030     bool has_defaults = false;
9031     for (size_t i = 0; i < num_virtual_methods; ++i) {
9032       ArtMethod* m = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
9033       m->SetMethodIndex(i);
9034       uint32_t access_flags = m->GetAccessFlags();
9035       DCHECK(!ArtMethod::IsDefault(access_flags));
9036       DCHECK_EQ(!ArtMethod::IsAbstract(access_flags), ArtMethod::IsInvokable(access_flags));
9037       if (ArtMethod::IsInvokable(access_flags)) {
9038         // If the dex file does not support default methods, throw ClassFormatError.
9039         // This check is necessary to protect from odd cases, such as native default
9040         // methods, that the dex file verifier permits for old dex file versions. b/157170505
9041         // FIXME: This should be `if (!m->GetDexFile()->SupportsDefaultMethods())` but we're
9042         // currently running CTS tests for default methods with dex file version 035 which
9043         // does not support default methods. So, we limit this to native methods. b/157718952
9044         if (ArtMethod::IsNative(access_flags)) {
9045           DCHECK(!m->GetDexFile()->SupportsDefaultMethods());
9046           ThrowClassFormatError(klass.Get(),
9047                                 "Dex file does not support default method '%s'",
9048                                 m->PrettyMethod().c_str());
9049           return false;
9050         }
9051         if (!ArtMethod::IsPublic(access_flags)) {
9052           // The verifier should have caught the non-public method for dex version 37.
9053           // Just warn and skip it since this is from before default-methods so we don't
9054           // really need to care that it has code.
9055           LOG(WARNING) << "Default interface method " << m->PrettyMethod() << " is not public! "
9056                        << "This will be a fatal error in subsequent versions of android. "
9057                        << "Continuing anyway.";
9058         }
9059         static_assert((kAccDefault & kAccIntrinsicBits) != 0);
9060         DCHECK(!m->IsIntrinsic()) << "Adding kAccDefault to an intrinsic would be a mistake as it "
9061                                   << "overlaps with kAccIntrinsicBits.";
9062         m->SetAccessFlags(access_flags | kAccDefault);
9063         has_defaults = true;
9064       }
9065     }
9066     // Mark that we have default methods so that we won't need to scan the virtual_methods_ array
9067     // during initialization. This is a performance optimization. We could simply traverse the
9068     // virtual_methods_ array again during initialization.
9069     if (has_defaults) {
9070       klass->SetHasDefaultMethods();
9071     }
9072     ObjPtr<mirror::IfTable> iftable = SetupInterfaceLookupTable(
9073         self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass));
9074     if (UNLIKELY(iftable == nullptr)) {
9075       self->AssertPendingException();
9076       return false;
9077     }
9078     size_t ifcount = iftable->Count();
9079     bool have_super_with_defaults = false;
9080     for (size_t i = 0; i != ifcount; ++i) {
9081       if (iftable->GetInterface(i)->HasDefaultMethods()) {
9082         have_super_with_defaults = true;
9083         break;
9084       }
9085     }
9086     LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
9087     if (have_super_with_defaults) {
9088       if (!FindCopiedMethodsForInterface(klass.Get(), num_virtual_methods, iftable)) {
9089         self->AssertPendingException();
9090         return false;
9091       }
9092       if (num_new_copied_methods_ != 0u) {
9093         // Re-check the number of methods.
9094         size_t final_num_virtual_methods = num_virtual_methods + num_new_copied_methods_;
9095         if (!IsUint<16>(final_num_virtual_methods)) {
9096           ThrowClassFormatError(
9097               klass.Get(), "Too many methods on interface: %zu", final_num_virtual_methods);
9098           return false;
9099         }
9100         ReallocMethods(klass.Get());
9101       }
9102     }
9103     klass->SetIfTable(iftable);
9104     if (kIsDebugBuild) {
9105       // May cause thread suspension, so do this after we're done with `ObjPtr<> iftable`.
9106       ClobberOldMethods(old_methods, klass->GetMethodsPtr());
9107     }
9108     return true;
9109   } else if (LIKELY(klass->HasSuperClass())) {
9110     // We set up the interface lookup table now because we need it to determine if we need
9111     // to update any vtable entries with new default method implementations.
9112     StackHandleScope<3> hs(self);
9113     MutableHandle<mirror::IfTable> iftable = hs.NewHandle(UNLIKELY(klass->IsProxyClass())
9114         ? SetupInterfaceLookupTable(self, klass, &allocator_, ProxyInterfacesAccessor(interfaces))
9115         : SetupInterfaceLookupTable(
9116               self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass)));
9117     if (UNLIKELY(iftable == nullptr)) {
9118       self->AssertPendingException();
9119       return false;
9120     }
9121 
9122     // Copy the IMT from superclass if present and needed. Update with new methods later.
9123     Handle<mirror::Class> super_class = hs.NewHandle(klass->GetSuperClass());
9124     bool is_klass_abstract = klass->IsAbstract();
9125     bool is_super_abstract = super_class->IsAbstract();
9126     DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
9127     DCHECK_EQ(super_class->ShouldHaveImt(), !is_super_abstract);
9128     if (!is_klass_abstract && !is_super_abstract) {
9129       ImTable* super_imt = super_class->GetImt(kPointerSize);
9130       for (size_t i = 0; i < ImTable::kSize; ++i) {
9131         out_imt[i] = super_imt->Get(i, kPointerSize);
9132       }
9133     }
9134 
9135     // If there are no new virtual methods and no new interfaces, we can simply reuse
9136     // the vtable from superclass. We may need to make a copy if it's embedded.
9137     const size_t super_vtable_length = super_class->GetVTableLength();
9138     if (num_virtual_methods == 0 && iftable.Get() == super_class->GetIfTable()) {
9139       DCHECK_EQ(is_super_abstract, !super_class->ShouldHaveEmbeddedVTable());
9140       if (is_super_abstract) {
9141         DCHECK(super_class->IsAbstract() && !super_class->IsArrayClass());
9142         ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTable();
9143         CHECK(super_vtable != nullptr) << super_class->PrettyClass();
9144         klass->SetVTable(super_vtable);
9145         // No IMT in the super class, we need to reconstruct it from the iftable.
9146         if (!is_klass_abstract && iftable->Count() != 0) {
9147           class_linker_->FillIMTFromIfTable(iftable.Get(),
9148                                             runtime_->GetImtUnimplementedMethod(),
9149                                             runtime_->GetImtConflictMethod(),
9150                                             klass.Get(),
9151                                             /*create_conflict_tables=*/false,
9152                                             /*ignore_copied_methods=*/false,
9153                                             out_new_conflict,
9154                                             out_imt);
9155         }
9156       } else {
9157         ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, super_vtable_length);
9158         if (UNLIKELY(vtable == nullptr)) {
9159           self->AssertPendingOOMException();
9160           return false;
9161         }
9162         for (size_t i = 0; i < super_vtable_length; i++) {
9163           vtable->SetElementPtrSize(
9164               i, super_class->GetEmbeddedVTableEntry(i, kPointerSize), kPointerSize);
9165         }
9166         klass->SetVTable(vtable);
9167         // The IMT was already copied from superclass if `klass` is not abstract.
9168       }
9169       klass->SetIfTable(iftable.Get());
9170       return true;
9171     }
9172 
9173     // Allocate method arrays, so that we can link interface methods without thread suspension,
9174     // otherwise GC could miss visiting newly allocated copied methods.
9175     // TODO: Do not allocate copied methods during linking, store only records about what
9176     // we need to allocate and allocate it at the end. Start with superclass iftable and
9177     // perform copy-on-write when needed to facilitate maximum memory sharing.
9178     if (!AllocateIfTableMethodArrays(self, klass, iftable)) {
9179       self->AssertPendingOOMException();
9180       return false;
9181     }
9182 
9183     size_t final_vtable_size = AssignVTableIndexes(
9184         klass.Get(), super_class.Get(), is_super_abstract, num_virtual_methods, iftable.Get());
9185     if (final_vtable_size == 0u) {
9186       self->AssertPendingException();
9187       return false;
9188     }
9189     DCHECK(IsUint<16>(final_vtable_size));
9190 
9191     // Allocate the new vtable.
9192     Handle<mirror::PointerArray> vtable = hs.NewHandle(AllocPointerArray(self, final_vtable_size));
9193     if (UNLIKELY(vtable == nullptr)) {
9194       self->AssertPendingOOMException();
9195       return false;
9196     }
9197 
9198     LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
9199     if (num_new_copied_methods_ != 0u) {
9200       ReallocMethods(klass.Get());
9201     }
9202 
9203     // Store new virtual methods in the new vtable.
9204     ArrayRef<uint32_t> same_signature_vtable_lists = same_signature_vtable_lists_;
9205     for (ArtMethod& virtual_method : klass->GetVirtualMethodsSliceUnchecked(kPointerSize)) {
9206       uint32_t vtable_index = virtual_method.GetMethodIndexDuringLinking();
9207       vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
9208       if (UNLIKELY(vtable_index < same_signature_vtable_lists.size())) {
9209         // We may override more than one method according to JLS, see b/211854716.
9210         while (same_signature_vtable_lists[vtable_index] != dex::kDexNoIndex) {
9211           DCHECK_LT(same_signature_vtable_lists[vtable_index], vtable_index);
9212           vtable_index = same_signature_vtable_lists[vtable_index];
9213           vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
9214           if (kIsDebugBuild) {
9215             ArtMethod* current_method = super_class->GetVTableEntry(vtable_index, kPointerSize);
9216             DCHECK(klass->CanAccessMember(current_method->GetDeclaringClass(),
9217                                           current_method->GetAccessFlags()));
9218             DCHECK(!current_method->IsFinal());
9219           }
9220         }
9221       }
9222     }
9223 
9224     // For non-overridden vtable slots, copy a method from `super_class`.
9225     for (size_t j = 0; j != super_vtable_length; ++j) {
9226       if (vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j) == nullptr) {
9227         ArtMethod* super_method = super_class->GetVTableEntry(j, kPointerSize);
9228         vtable->SetElementPtrSize(j, super_method, kPointerSize);
9229       }
9230     }
9231 
9232     // Update the `iftable` (and IMT) with finalized virtual methods.
9233     if (!FinalizeIfTable(klass,
9234                          iftable,
9235                          vtable,
9236                          is_klass_abstract,
9237                          is_super_abstract,
9238                          out_new_conflict,
9239                          out_imt)) {
9240       self->AssertPendingOOMException();
9241       return false;
9242     }
9243 
9244     klass->SetVTable(vtable.Get());
9245     klass->SetIfTable(iftable.Get());
9246     if (kIsDebugBuild) {
9247       CheckVTable(self, klass, kPointerSize);
9248       ClobberOldMethods(old_methods, klass->GetMethodsPtr());
9249     }
9250     return true;
9251   } else {
9252     return LinkJavaLangObjectMethods(self, klass);
9253   }
9254 }
9255 
9256 template <PointerSize kPointerSize>
LinkJavaLangObjectMethods(Thread * self,Handle<mirror::Class> klass)9257 bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkJavaLangObjectMethods(
9258     Thread* self,
9259     Handle<mirror::Class> klass) {
9260   DCHECK_EQ(klass.Get(), GetClassRoot<mirror::Object>(class_linker_));
9261   DCHECK_EQ(klass->NumVirtualMethods(), mirror::Object::kVTableLength);
9262   static_assert(IsUint<16>(mirror::Object::kVTableLength));
9263   ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, mirror::Object::kVTableLength);
9264   if (UNLIKELY(vtable == nullptr)) {
9265     self->AssertPendingOOMException();
9266     return false;
9267   }
9268   for (size_t i = 0; i < mirror::Object::kVTableLength; ++i) {
9269     ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
9270     vtable->SetElementPtrSize(i, virtual_method, kPointerSize);
9271     virtual_method->SetMethodIndex(i);
9272   }
9273   klass->SetVTable(vtable);
9274   InitializeObjectVirtualMethodHashes(
9275       klass.Get(),
9276       kPointerSize,
9277       ArrayRef<uint32_t>(class_linker_->object_virtual_method_hashes_));
9278   // The interface table is already allocated but there are no interface methods to link.
9279   DCHECK(klass->GetIfTable() != nullptr);
9280   DCHECK_EQ(klass->GetIfTableCount(), 0);
9281   return true;
9282 }
9283 
9284 // Populate the class vtable and itable. Compute return type indices.
LinkMethods(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,bool * out_new_conflict,ArtMethod ** out_imt)9285 bool ClassLinker::LinkMethods(Thread* self,
9286                               Handle<mirror::Class> klass,
9287                               Handle<mirror::ObjectArray<mirror::Class>> interfaces,
9288                               bool* out_new_conflict,
9289                               ArtMethod** out_imt) {
9290   self->AllowThreadSuspension();
9291   // Link virtual methods then interface methods.
9292   Runtime* const runtime = Runtime::Current();
9293   if (LIKELY(GetImagePointerSize() == kRuntimePointerSize)) {
9294     LinkMethodsHelper<kRuntimePointerSize> helper(this, klass, self, runtime);
9295     return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
9296   } else {
9297     constexpr PointerSize kOtherPointerSize =
9298         (kRuntimePointerSize == PointerSize::k64) ? PointerSize::k32 : PointerSize::k64;
9299     LinkMethodsHelper<kOtherPointerSize> helper(this, klass, self, runtime);
9300     return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
9301   }
9302 }
9303 
9304 class ClassLinker::LinkFieldsHelper {
9305  public:
9306   static bool LinkFields(ClassLinker* class_linker,
9307                          Thread* self,
9308                          Handle<mirror::Class> klass,
9309                          bool is_static,
9310                          size_t* class_size)
9311       REQUIRES_SHARED(Locks::mutator_lock_);
9312 
9313  private:
9314   enum class FieldTypeOrder : uint16_t;
9315   class FieldGaps;
9316 
9317   struct FieldTypeOrderAndIndex {
9318     FieldTypeOrder field_type_order;
9319     uint16_t field_index;
9320   };
9321 
9322   static FieldTypeOrder FieldTypeOrderFromFirstDescriptorCharacter(char first_char);
9323 
9324   template <size_t kSize>
9325   static MemberOffset AssignFieldOffset(ArtField* field, MemberOffset field_offset)
9326       REQUIRES_SHARED(Locks::mutator_lock_);
9327 };
9328 
9329 // We use the following order of field types for assigning offsets.
9330 // Some fields can be shuffled forward to fill gaps, see
9331 // `ClassLinker::LinkFieldsHelper::LinkFields()`.
9332 enum class ClassLinker::LinkFieldsHelper::FieldTypeOrder : uint16_t {
9333   kReference = 0u,
9334   kLong,
9335   kDouble,
9336   kInt,
9337   kFloat,
9338   kChar,
9339   kShort,
9340   kBoolean,
9341   kByte,
9342 
9343   kLast64BitType = kDouble,
9344   kLast32BitType = kFloat,
9345   kLast16BitType = kShort,
9346 };
9347 
9348 ALWAYS_INLINE
9349 ClassLinker::LinkFieldsHelper::FieldTypeOrder
FieldTypeOrderFromFirstDescriptorCharacter(char first_char)9350 ClassLinker::LinkFieldsHelper::FieldTypeOrderFromFirstDescriptorCharacter(char first_char) {
9351   switch (first_char) {
9352     case 'J':
9353       return FieldTypeOrder::kLong;
9354     case 'D':
9355       return FieldTypeOrder::kDouble;
9356     case 'I':
9357       return FieldTypeOrder::kInt;
9358     case 'F':
9359       return FieldTypeOrder::kFloat;
9360     case 'C':
9361       return FieldTypeOrder::kChar;
9362     case 'S':
9363       return FieldTypeOrder::kShort;
9364     case 'Z':
9365       return FieldTypeOrder::kBoolean;
9366     case 'B':
9367       return FieldTypeOrder::kByte;
9368     default:
9369       DCHECK(first_char == 'L' || first_char == '[') << first_char;
9370       return FieldTypeOrder::kReference;
9371   }
9372 }
9373 
9374 // Gaps where we can insert fields in object layout.
9375 class ClassLinker::LinkFieldsHelper::FieldGaps {
9376  public:
9377   template <uint32_t kSize>
AlignFieldOffset(MemberOffset field_offset)9378   ALWAYS_INLINE MemberOffset AlignFieldOffset(MemberOffset field_offset) {
9379     static_assert(kSize == 2u || kSize == 4u || kSize == 8u);
9380     if (!IsAligned<kSize>(field_offset.Uint32Value())) {
9381       uint32_t gap_start = field_offset.Uint32Value();
9382       field_offset = MemberOffset(RoundUp(gap_start, kSize));
9383       AddGaps<kSize - 1u>(gap_start, field_offset.Uint32Value());
9384     }
9385     return field_offset;
9386   }
9387 
9388   template <uint32_t kSize>
HasGap() const9389   bool HasGap() const {
9390     static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
9391     return (kSize == 1u && gap1_offset_ != kNoOffset) ||
9392            (kSize <= 2u && gap2_offset_ != kNoOffset) ||
9393            gap4_offset_ != kNoOffset;
9394   }
9395 
9396   template <uint32_t kSize>
ReleaseGap()9397   MemberOffset ReleaseGap() {
9398     static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
9399     uint32_t result;
9400     if (kSize == 1u && gap1_offset_ != kNoOffset) {
9401       DCHECK(gap2_offset_ == kNoOffset || gap2_offset_ > gap1_offset_);
9402       DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap1_offset_);
9403       result = gap1_offset_;
9404       gap1_offset_ = kNoOffset;
9405     } else if (kSize <= 2u && gap2_offset_ != kNoOffset) {
9406       DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap2_offset_);
9407       result = gap2_offset_;
9408       gap2_offset_ = kNoOffset;
9409       if (kSize < 2u) {
9410         AddGaps<1u>(result + kSize, result + 2u);
9411       }
9412     } else {
9413       DCHECK_NE(gap4_offset_, kNoOffset);
9414       result = gap4_offset_;
9415       gap4_offset_ = kNoOffset;
9416       if (kSize < 4u) {
9417         AddGaps<kSize | 2u>(result + kSize, result + 4u);
9418       }
9419     }
9420     return MemberOffset(result);
9421   }
9422 
9423  private:
9424   template <uint32_t kGapsToCheck>
AddGaps(uint32_t gap_start,uint32_t gap_end)9425   void AddGaps(uint32_t gap_start, uint32_t gap_end) {
9426     if ((kGapsToCheck & 1u) != 0u) {
9427       DCHECK_LT(gap_start, gap_end);
9428       DCHECK_ALIGNED(gap_end, 2u);
9429       if ((gap_start & 1u) != 0u) {
9430         DCHECK_EQ(gap1_offset_, kNoOffset);
9431         gap1_offset_ = gap_start;
9432         gap_start += 1u;
9433         if (kGapsToCheck == 1u || gap_start == gap_end) {
9434           DCHECK_EQ(gap_start, gap_end);
9435           return;
9436         }
9437       }
9438     }
9439 
9440     if ((kGapsToCheck & 2u) != 0u) {
9441       DCHECK_LT(gap_start, gap_end);
9442       DCHECK_ALIGNED(gap_start, 2u);
9443       DCHECK_ALIGNED(gap_end, 4u);
9444       if ((gap_start & 2u) != 0u) {
9445         DCHECK_EQ(gap2_offset_, kNoOffset);
9446         gap2_offset_ = gap_start;
9447         gap_start += 2u;
9448         if (kGapsToCheck <= 3u || gap_start == gap_end) {
9449           DCHECK_EQ(gap_start, gap_end);
9450           return;
9451         }
9452       }
9453     }
9454 
9455     if ((kGapsToCheck & 4u) != 0u) {
9456       DCHECK_LT(gap_start, gap_end);
9457       DCHECK_ALIGNED(gap_start, 4u);
9458       DCHECK_ALIGNED(gap_end, 8u);
9459       DCHECK_EQ(gap_start + 4u, gap_end);
9460       DCHECK_EQ(gap4_offset_, kNoOffset);
9461       gap4_offset_ = gap_start;
9462       return;
9463     }
9464 
9465     DCHECK(false) << "Remaining gap: " << gap_start << " to " << gap_end
9466         << " after checking " << kGapsToCheck;
9467   }
9468 
9469   static constexpr uint32_t kNoOffset = static_cast<uint32_t>(-1);
9470 
9471   uint32_t gap4_offset_ = kNoOffset;
9472   uint32_t gap2_offset_ = kNoOffset;
9473   uint32_t gap1_offset_ = kNoOffset;
9474 };
9475 
9476 template <size_t kSize>
9477 ALWAYS_INLINE
AssignFieldOffset(ArtField * field,MemberOffset field_offset)9478 MemberOffset ClassLinker::LinkFieldsHelper::AssignFieldOffset(ArtField* field,
9479                                                               MemberOffset field_offset) {
9480   DCHECK_ALIGNED(field_offset.Uint32Value(), kSize);
9481   DCHECK_EQ(Primitive::ComponentSize(field->GetTypeAsPrimitiveType()), kSize);
9482   field->SetOffset(field_offset);
9483   return MemberOffset(field_offset.Uint32Value() + kSize);
9484 }
9485 
LinkFields(ClassLinker * class_linker,Thread * self,Handle<mirror::Class> klass,bool is_static,size_t * class_size)9486 bool ClassLinker::LinkFieldsHelper::LinkFields(ClassLinker* class_linker,
9487                                                Thread* self,
9488                                                Handle<mirror::Class> klass,
9489                                                bool is_static,
9490                                                size_t* class_size) {
9491   self->AllowThreadSuspension();
9492   LengthPrefixedArray<ArtField>* const fields = klass->GetFieldsPtr();
9493 
9494   // Initialize field_offset
9495   MemberOffset field_offset(0);
9496   if (is_static) {
9497     field_offset = klass->GetFirstReferenceStaticFieldOffsetDuringLinking(
9498         class_linker->GetImagePointerSize());
9499   } else {
9500     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
9501     if (super_class != nullptr) {
9502       CHECK(super_class->IsResolved())
9503           << klass->PrettyClass() << " " << super_class->PrettyClass();
9504       field_offset = MemberOffset(super_class->GetObjectSize());
9505     }
9506   }
9507 
9508   size_t num_fields =
9509       is_static ? klass->ComputeNumStaticFields() : klass->ComputeNumInstanceFields();
9510 
9511   // we want a relatively stable order so that adding new fields
9512   // minimizes disruption of C++ version such as Class and Method.
9513   //
9514   // The overall sort order order is:
9515   // 1) All object reference fields, sorted alphabetically.
9516   // 2) All java long (64-bit) integer fields, sorted alphabetically.
9517   // 3) All java double (64-bit) floating point fields, sorted alphabetically.
9518   // 4) All java int (32-bit) integer fields, sorted alphabetically.
9519   // 5) All java float (32-bit) floating point fields, sorted alphabetically.
9520   // 6) All java char (16-bit) integer fields, sorted alphabetically.
9521   // 7) All java short (16-bit) integer fields, sorted alphabetically.
9522   // 8) All java boolean (8-bit) integer fields, sorted alphabetically.
9523   // 9) All java byte (8-bit) integer fields, sorted alphabetically.
9524   //
9525   // (References are first to increase the chance of reference visiting
9526   // being able to take a fast path using a bitmap of references at the
9527   // start of the object, see `Class::reference_instance_offsets_`.)
9528   //
9529   // Once the fields are sorted in this order we will attempt to fill any gaps
9530   // that might be present in the memory layout of the structure.
9531   // Note that we shall not fill gaps between the superclass fields.
9532 
9533   // Collect fields and their "type order index" (see numbered points above).
9534   const char* old_no_suspend_cause = self->StartAssertNoThreadSuspension(
9535       "Using plain ArtField references");
9536   constexpr size_t kStackBufferEntries = 64;  // Avoid allocations for small number of fields.
9537   FieldTypeOrderAndIndex stack_buffer[kStackBufferEntries];
9538   std::vector<FieldTypeOrderAndIndex> heap_buffer;
9539   ArrayRef<FieldTypeOrderAndIndex> sorted_fields;
9540   if (num_fields <= kStackBufferEntries) {
9541     sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(stack_buffer, num_fields);
9542   } else {
9543     heap_buffer.resize(num_fields);
9544     sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(heap_buffer);
9545   }
9546   size_t num_reference_fields = 0;
9547   size_t primitive_fields_start = num_fields;
9548   DCHECK_LE(num_fields, 1u << 16);
9549   for (size_t i = 0; i != klass->NumFields(); ++i) {
9550     ArtField* field = &fields->At(i);
9551     if (field->IsStatic() != is_static) {
9552       continue;
9553     }
9554     const char* descriptor = field->GetTypeDescriptor();
9555     FieldTypeOrder field_type_order = FieldTypeOrderFromFirstDescriptorCharacter(descriptor[0]);
9556     uint16_t field_index = dchecked_integral_cast<uint16_t>(i);
9557     // Insert references to the start, other fields to the end.
9558     DCHECK_LT(num_reference_fields, primitive_fields_start);
9559     if (field_type_order == FieldTypeOrder::kReference) {
9560       sorted_fields[num_reference_fields] = { field_type_order, field_index };
9561       ++num_reference_fields;
9562     } else {
9563       --primitive_fields_start;
9564       sorted_fields[primitive_fields_start] = { field_type_order, field_index };
9565     }
9566   }
9567   DCHECK_EQ(num_reference_fields, primitive_fields_start);
9568 
9569   // Reference fields are already sorted by field index (and dex field index).
9570   DCHECK(std::is_sorted(
9571       sorted_fields.begin(),
9572       sorted_fields.begin() + num_reference_fields,
9573       [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
9574         ArtField* lhs_field = &fields->At(lhs.field_index);
9575         ArtField* rhs_field = &fields->At(rhs.field_index);
9576         CHECK_EQ(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9577         CHECK_EQ(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9578         CHECK_EQ(lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex(),
9579                  lhs.field_index < rhs.field_index);
9580         return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
9581       }));
9582   // Primitive fields were stored in reverse order of their field index (and dex field index).
9583   DCHECK(std::is_sorted(
9584       sorted_fields.begin() + primitive_fields_start,
9585       sorted_fields.end(),
9586       [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
9587         ArtField* lhs_field = &fields->At(lhs.field_index);
9588         ArtField* rhs_field = &fields->At(rhs.field_index);
9589         CHECK_NE(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9590         CHECK_NE(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9591         CHECK_EQ(lhs_field->GetDexFieldIndex() > rhs_field->GetDexFieldIndex(),
9592                  lhs.field_index > rhs.field_index);
9593         return lhs.field_index > rhs.field_index;
9594       }));
9595   // Sort the primitive fields by the field type order, then field index.
9596   std::sort(sorted_fields.begin() + primitive_fields_start,
9597             sorted_fields.end(),
9598             [](const auto& lhs, const auto& rhs) {
9599               if (lhs.field_type_order != rhs.field_type_order) {
9600                 return lhs.field_type_order < rhs.field_type_order;
9601               } else {
9602                 return lhs.field_index < rhs.field_index;
9603               }
9604             });
9605   // Primitive fields are now sorted by field size (descending), then type, then field index.
9606   DCHECK(std::is_sorted(
9607       sorted_fields.begin() + primitive_fields_start,
9608       sorted_fields.end(),
9609       [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
9610         ArtField* lhs_field = &fields->At(lhs.field_index);
9611         ArtField* rhs_field = &fields->At(rhs.field_index);
9612         Primitive::Type lhs_type = lhs_field->GetTypeAsPrimitiveType();
9613         CHECK_NE(lhs_type, Primitive::kPrimNot);
9614         Primitive::Type rhs_type = rhs_field->GetTypeAsPrimitiveType();
9615         CHECK_NE(rhs_type, Primitive::kPrimNot);
9616         if (lhs_type != rhs_type) {
9617           size_t lhs_size = Primitive::ComponentSize(lhs_type);
9618           size_t rhs_size = Primitive::ComponentSize(rhs_type);
9619           return (lhs_size != rhs_size) ? (lhs_size > rhs_size) : (lhs_type < rhs_type);
9620         } else {
9621           return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
9622         }
9623       }));
9624 
9625   // Process reference fields.
9626   FieldGaps field_gaps;
9627   size_t index = 0u;
9628   if (num_reference_fields != 0u) {
9629     constexpr size_t kReferenceSize = sizeof(mirror::HeapReference<mirror::Object>);
9630     field_offset = field_gaps.AlignFieldOffset<kReferenceSize>(field_offset);
9631     for (; index != num_reference_fields; ++index) {
9632       ArtField* field = &fields->At(sorted_fields[index].field_index);
9633       field_offset = AssignFieldOffset<kReferenceSize>(field, field_offset);
9634     }
9635   }
9636   // Process 64-bit fields.
9637   if (index != num_fields &&
9638       sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
9639     field_offset = field_gaps.AlignFieldOffset<8u>(field_offset);
9640     while (index != num_fields &&
9641            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
9642       ArtField* field = &fields->At(sorted_fields[index].field_index);
9643       field_offset = AssignFieldOffset<8u>(field, field_offset);
9644       ++index;
9645     }
9646   }
9647   // Process 32-bit fields.
9648   if (index != num_fields &&
9649       sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
9650     field_offset = field_gaps.AlignFieldOffset<4u>(field_offset);
9651     if (field_gaps.HasGap<4u>()) {
9652       ArtField* field = &fields->At(sorted_fields[index].field_index);
9653       AssignFieldOffset<4u>(field, field_gaps.ReleaseGap<4u>());  // Ignore return value.
9654       ++index;
9655       DCHECK(!field_gaps.HasGap<4u>());  // There can be only one gap for a 32-bit field.
9656     }
9657     while (index != num_fields &&
9658            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
9659       ArtField* field = &fields->At(sorted_fields[index].field_index);
9660       field_offset = AssignFieldOffset<4u>(field, field_offset);
9661       ++index;
9662     }
9663   }
9664   // Process 16-bit fields.
9665   if (index != num_fields &&
9666       sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
9667     field_offset = field_gaps.AlignFieldOffset<2u>(field_offset);
9668     while (index != num_fields &&
9669            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType &&
9670            field_gaps.HasGap<2u>()) {
9671       ArtField* field = &fields->At(sorted_fields[index].field_index);
9672       AssignFieldOffset<2u>(field, field_gaps.ReleaseGap<2u>());  // Ignore return value.
9673       ++index;
9674     }
9675     while (index != num_fields &&
9676            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
9677       ArtField* field = &fields->At(sorted_fields[index].field_index);
9678       field_offset = AssignFieldOffset<2u>(field, field_offset);
9679       ++index;
9680     }
9681   }
9682   // Process 8-bit fields.
9683   for (; index != num_fields && field_gaps.HasGap<1u>(); ++index) {
9684     ArtField* field = &fields->At(sorted_fields[index].field_index);
9685     AssignFieldOffset<1u>(field, field_gaps.ReleaseGap<1u>());  // Ignore return value.
9686   }
9687   for (; index != num_fields; ++index) {
9688     ArtField* field = &fields->At(sorted_fields[index].field_index);
9689     field_offset = AssignFieldOffset<1u>(field, field_offset);
9690   }
9691 
9692   self->EndAssertNoThreadSuspension(old_no_suspend_cause);
9693 
9694   // We lie to the GC about the java.lang.ref.Reference.referent field, so it doesn't scan it.
9695   DCHECK_IMPLIES(class_linker->init_done_, !klass->DescriptorEquals("Ljava/lang/ref/Reference;"));
9696   if (!is_static &&
9697       UNLIKELY(!class_linker->init_done_) &&
9698       klass->DescriptorEquals("Ljava/lang/ref/Reference;")) {
9699     // We know there are no non-reference fields in the Reference classes, and we know
9700     // that 'referent' is alphabetically the last instance field, so this is easy...
9701     // Note that we cannot use WellKnownClasses fields yet, as this is not
9702     // initialized.
9703     CHECK_EQ(num_reference_fields, num_fields) << klass->PrettyClass();
9704     CHECK_STREQ(fields->At(klass->NumFields() - 2).GetName(), "referent");
9705     CHECK(!fields->At(klass->NumFields() - 2).IsStatic());
9706     CHECK_STREQ(fields->At(klass->NumFields() - 1).GetName(), "slowPathEnabled");
9707     CHECK(fields->At(klass->NumFields() - 1).IsStatic());
9708     --num_reference_fields;
9709   }
9710 
9711   size_t size = field_offset.Uint32Value();
9712   // Update klass
9713   if (is_static) {
9714     klass->SetNumReferenceStaticFields(num_reference_fields);
9715     *class_size = size;
9716   } else {
9717     klass->SetNumReferenceInstanceFields(num_reference_fields);
9718     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
9719     if (num_reference_fields == 0 || super_class == nullptr) {
9720       // object has one reference field, klass, but we ignore it since we always visit the class.
9721       // super_class is null iff the class is java.lang.Object.
9722       if (super_class == nullptr ||
9723           (super_class->GetClassFlags() & mirror::kClassFlagNoReferenceFields) != 0) {
9724         klass->SetClassFlags(klass->GetClassFlags() | mirror::kClassFlagNoReferenceFields);
9725       }
9726     }
9727     if (kIsDebugBuild) {
9728       DCHECK_EQ(super_class == nullptr, klass->DescriptorEquals("Ljava/lang/Object;"));
9729       size_t total_reference_instance_fields = 0;
9730       ObjPtr<mirror::Class> cur_super = klass.Get();
9731       while (cur_super != nullptr) {
9732         total_reference_instance_fields += cur_super->NumReferenceInstanceFieldsDuringLinking();
9733         cur_super = cur_super->GetSuperClass();
9734       }
9735       if (super_class == nullptr) {
9736         CHECK_EQ(total_reference_instance_fields, 1u) << klass->PrettyDescriptor();
9737       } else {
9738         // Check that there is at least num_reference_fields other than Object.class.
9739         CHECK_GE(total_reference_instance_fields, 1u + num_reference_fields)
9740             << klass->PrettyClass();
9741       }
9742     }
9743     if (!klass->IsVariableSize()) {
9744       std::string temp;
9745       DCHECK_GE(size, sizeof(mirror::Object)) << klass->GetDescriptor(&temp);
9746       size_t previous_size = klass->GetObjectSize();
9747       if (previous_size != 0) {
9748         // Make sure that we didn't originally have an incorrect size.
9749         CHECK_EQ(previous_size, size) << klass->GetDescriptor(&temp);
9750       }
9751       klass->SetObjectSize(size);
9752     }
9753   }
9754 
9755   if (kIsDebugBuild) {
9756     // Make sure that the fields array is ordered by name but all reference
9757     // offsets are at the beginning as far as alignment allows.
9758     MemberOffset start_ref_offset = is_static
9759         ? klass->GetFirstReferenceStaticFieldOffsetDuringLinking(class_linker->image_pointer_size_)
9760         : klass->GetFirstReferenceInstanceFieldOffset();
9761     MemberOffset end_ref_offset(start_ref_offset.Uint32Value() +
9762                                 num_reference_fields *
9763                                     sizeof(mirror::HeapReference<mirror::Object>));
9764     MemberOffset current_ref_offset = start_ref_offset;
9765     for (size_t i = 0; i < klass->NumFields(); i++) {
9766       ArtField* field = &fields->At(i);
9767       if (field->IsStatic() != is_static) {
9768         continue;
9769       }
9770       VLOG(class_linker) << "LinkFields: " << (is_static ? "static" : "instance")
9771           << " class=" << klass->PrettyClass() << " field=" << field->PrettyField()
9772           << " offset=" << field->GetOffsetDuringLinking();
9773       if (i != 0) {
9774         ArtField* const prev_field = &fields->At(i - 1);
9775         // NOTE: The field names can be the same. This is not possible in the Java language
9776         // but it's valid Java/dex bytecode and for example proguard can generate such bytecode.
9777         DCHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0);
9778       }
9779       Primitive::Type type = field->GetTypeAsPrimitiveType();
9780       bool is_primitive = type != Primitive::kPrimNot;
9781       if (klass->DescriptorEquals("Ljava/lang/ref/Reference;") &&
9782           strcmp("referent", field->GetName()) == 0) {
9783         is_primitive = true;  // We lied above, so we have to expect a lie here.
9784       }
9785       MemberOffset offset = field->GetOffsetDuringLinking();
9786       if (is_primitive) {
9787         if (offset.Uint32Value() < end_ref_offset.Uint32Value()) {
9788           // Shuffled before references.
9789           size_t type_size = Primitive::ComponentSize(type);
9790           CHECK_LT(type_size, sizeof(mirror::HeapReference<mirror::Object>));
9791           CHECK_LT(offset.Uint32Value(), start_ref_offset.Uint32Value());
9792           CHECK_LE(offset.Uint32Value() + type_size, start_ref_offset.Uint32Value());
9793           CHECK(!IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(offset.Uint32Value()));
9794         }
9795       } else {
9796         CHECK_EQ(current_ref_offset.Uint32Value(), offset.Uint32Value());
9797         current_ref_offset = MemberOffset(current_ref_offset.Uint32Value() +
9798                                           sizeof(mirror::HeapReference<mirror::Object>));
9799       }
9800     }
9801     CHECK_EQ(current_ref_offset.Uint32Value(), end_ref_offset.Uint32Value());
9802   }
9803   return true;
9804 }
9805 
LinkInstanceFields(Thread * self,Handle<mirror::Class> klass)9806 bool ClassLinker::LinkInstanceFields(Thread* self, Handle<mirror::Class> klass) {
9807   CHECK(klass != nullptr);
9808   return LinkFieldsHelper::LinkFields(this, self, klass, false, nullptr);
9809 }
9810 
LinkStaticFields(Thread * self,Handle<mirror::Class> klass,size_t * class_size)9811 bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, size_t* class_size) {
9812   CHECK(klass != nullptr);
9813   return LinkFieldsHelper::LinkFields(this, self, klass, true, class_size);
9814 }
9815 
9816 enum class RecordElementType : uint8_t {
9817   kNames = 0,
9818   kTypes = 1,
9819   kSignatures = 2,
9820   kAnnotationVisibilities = 3,
9821   kAnnotations = 4
9822 };
9823 
9824 static const char* kRecordElementNames[] = {"componentNames",
9825                                             "componentTypes",
9826                                             "componentSignatures",
9827                                             "componentAnnotationVisibilities",
9828                                             "componentAnnotations"};
9829 
9830 class RecordAnnotationVisitor final : public annotations::AnnotationVisitor {
9831  public:
RecordAnnotationVisitor()9832   RecordAnnotationVisitor() {}
9833 
ValidateCounts()9834   bool ValidateCounts() {
9835     if (has_error_) {
9836       return false;
9837     }
9838 
9839     // Verify the counts.
9840     bool annotation_element_exists =
9841         (signatures_count_ != UINT32_MAX) || (annotations_count_ != UINT32_MAX);
9842     if (count_ >= 2) {
9843       SetErrorMsg("Record class can't have more than one @Record Annotation");
9844     } else if (names_count_ == UINT32_MAX) {
9845       SetErrorMsg("componentNames element is required");
9846     } else if (types_count_ == UINT32_MAX) {
9847       SetErrorMsg("componentTypes element is required");
9848     } else if (names_count_ != types_count_) {  // Every component must have a name and a type.
9849       SetErrorMsg(StringPrintf(
9850           "componentTypes is expected to have %i, but has %i types", names_count_, types_count_));
9851       // The other 3 elements are optional, but is expected to have the same count if it exists.
9852     } else if (signatures_count_ != UINT32_MAX && signatures_count_ != names_count_) {
9853       SetErrorMsg(StringPrintf("componentSignatures size is %i, but is expected to be %i",
9854                                signatures_count_,
9855                                names_count_));
9856     } else if (annotation_element_exists && visibilities_count_ != names_count_) {
9857       SetErrorMsg(
9858           StringPrintf("componentAnnotationVisibilities size is %i, but is expected to be %i",
9859                        visibilities_count_,
9860                        names_count_));
9861     } else if (annotation_element_exists && annotations_count_ != names_count_) {
9862       SetErrorMsg(StringPrintf("componentAnnotations size is %i, but is expected to be %i",
9863                                annotations_count_,
9864                                names_count_));
9865     }
9866 
9867     return !has_error_;
9868   }
9869 
IsRecordAnnotationFound()9870   bool IsRecordAnnotationFound() { return count_ != 0; }
9871 
VisitAnnotation(const char * descriptor,uint8_t visibility)9872   annotations::VisitorStatus VisitAnnotation(const char* descriptor, uint8_t visibility) override {
9873     if (has_error_) {
9874       return annotations::VisitorStatus::kVisitBreak;
9875     }
9876 
9877     if (visibility != DexFile::kDexVisibilitySystem) {
9878       return annotations::VisitorStatus::kVisitNext;
9879     }
9880 
9881     if (strcmp(descriptor, "Ldalvik/annotation/Record;") != 0) {
9882       return annotations::VisitorStatus::kVisitNext;
9883     }
9884 
9885     count_ += 1;
9886     if (count_ >= 2) {
9887       return annotations::VisitorStatus::kVisitBreak;
9888     }
9889     return annotations::VisitorStatus::kVisitInner;
9890   }
9891 
VisitAnnotationElement(const char * element_name,uint8_t type,const JValue & value)9892   annotations::VisitorStatus VisitAnnotationElement(const char* element_name,
9893                                                     uint8_t type,
9894                                                     [[maybe_unused]] const JValue& value) override {
9895     if (has_error_) {
9896       return annotations::VisitorStatus::kVisitBreak;
9897     }
9898 
9899     RecordElementType visiting_type;
9900     uint32_t* element_count;
9901     if (strcmp(element_name, "componentNames") == 0) {
9902       visiting_type = RecordElementType::kNames;
9903       element_count = &names_count_;
9904     } else if (strcmp(element_name, "componentTypes") == 0) {
9905       visiting_type = RecordElementType::kTypes;
9906       element_count = &types_count_;
9907     } else if (strcmp(element_name, "componentSignatures") == 0) {
9908       visiting_type = RecordElementType::kSignatures;
9909       element_count = &signatures_count_;
9910     } else if (strcmp(element_name, "componentAnnotationVisibilities") == 0) {
9911       visiting_type = RecordElementType::kAnnotationVisibilities;
9912       element_count = &visibilities_count_;
9913     } else if (strcmp(element_name, "componentAnnotations") == 0) {
9914       visiting_type = RecordElementType::kAnnotations;
9915       element_count = &annotations_count_;
9916     } else {
9917       // ignore this element that could be introduced in the future ART.
9918       return annotations::VisitorStatus::kVisitNext;
9919     }
9920 
9921     if ((*element_count) != UINT32_MAX) {
9922       SetErrorMsg(StringPrintf("Two %s annotation elements are found but only one is expected",
9923                                kRecordElementNames[static_cast<uint8_t>(visiting_type)]));
9924       return annotations::VisitorStatus::kVisitBreak;
9925     }
9926 
9927     if (type != DexFile::kDexAnnotationArray) {
9928       SetErrorMsg(StringPrintf("%s must be array type", element_name));
9929       return annotations::VisitorStatus::kVisitBreak;
9930     }
9931 
9932     *element_count = 0;
9933     visiting_type_ = visiting_type;
9934     return annotations::VisitorStatus::kVisitInner;
9935   }
9936 
VisitArrayElement(uint8_t depth,uint32_t index,uint8_t type,const JValue & value)9937   annotations::VisitorStatus VisitArrayElement(uint8_t depth,
9938                                                uint32_t index,
9939                                                uint8_t type,
9940                                                [[maybe_unused]] const JValue& value) override {
9941     if (has_error_) {
9942       return annotations::VisitorStatus::kVisitBreak;
9943     }
9944     switch (visiting_type_) {
9945       case RecordElementType::kNames: {
9946         if (depth == 0) {
9947           if (!ExpectedTypeOrError(
9948                   type, DexFile::kDexAnnotationString, visiting_type_, index, depth)) {
9949             return annotations::VisitorStatus::kVisitBreak;
9950           }
9951           names_count_++;
9952           return annotations::VisitorStatus::kVisitNext;
9953         }
9954         break;
9955       }
9956       case RecordElementType::kTypes: {
9957         if (depth == 0) {
9958           if (!ExpectedTypeOrError(
9959                   type, DexFile::kDexAnnotationType, visiting_type_, index, depth)) {
9960             return annotations::VisitorStatus::kVisitBreak;
9961           }
9962           types_count_++;
9963           return annotations::VisitorStatus::kVisitNext;
9964         }
9965         break;
9966       }
9967       case RecordElementType::kSignatures: {
9968         if (depth == 0) {
9969           // kDexAnnotationNull implies no generic signature for the component.
9970           if (type != DexFile::kDexAnnotationNull &&
9971               !ExpectedTypeOrError(
9972                   type, DexFile::kDexAnnotationAnnotation, visiting_type_, index, depth)) {
9973             return annotations::VisitorStatus::kVisitBreak;
9974           }
9975           signatures_count_++;
9976           return annotations::VisitorStatus::kVisitNext;
9977         }
9978         break;
9979       }
9980       case RecordElementType::kAnnotationVisibilities: {
9981         if (depth == 0) {
9982           if (!ExpectedTypeOrError(
9983                   type, DexFile::kDexAnnotationArray, visiting_type_, index, depth)) {
9984             return annotations::VisitorStatus::kVisitBreak;
9985           }
9986           visibilities_count_++;
9987           return annotations::VisitorStatus::kVisitInner;
9988         } else if (depth == 1) {
9989           if (!ExpectedTypeOrError(
9990                   type, DexFile::kDexAnnotationByte, visiting_type_, index, depth)) {
9991             return annotations::VisitorStatus::kVisitBreak;
9992           }
9993           return annotations::VisitorStatus::kVisitNext;
9994         }
9995         break;
9996       }
9997       case RecordElementType::kAnnotations: {
9998         if (depth == 0) {
9999           if (!ExpectedTypeOrError(
10000                   type, DexFile::kDexAnnotationArray, visiting_type_, index, depth)) {
10001             return annotations::VisitorStatus::kVisitBreak;
10002           }
10003           annotations_count_++;
10004           return annotations::VisitorStatus::kVisitInner;
10005         } else if (depth == 1) {
10006           if (!ExpectedTypeOrError(
10007                   type, DexFile::kDexAnnotationAnnotation, visiting_type_, index, depth)) {
10008             return annotations::VisitorStatus::kVisitBreak;
10009           }
10010           return annotations::VisitorStatus::kVisitNext;
10011         }
10012         break;
10013       }
10014     }
10015 
10016     // Should never happen if every next depth level is handled above whenever kVisitInner is
10017     // returned.
10018     DCHECK(false) << StringPrintf("Unexpected depth %i for element %s",
10019                                   depth,
10020                                   kRecordElementNames[static_cast<uint8_t>(visiting_type_)]);
10021     return annotations::VisitorStatus::kVisitBreak;
10022   }
10023 
10024  private:
10025   uint32_t count_ = 0;
10026   uint32_t names_count_ = UINT32_MAX;
10027   uint32_t types_count_ = UINT32_MAX;
10028   uint32_t signatures_count_ = UINT32_MAX;
10029   uint32_t visibilities_count_ = UINT32_MAX;
10030   uint32_t annotations_count_ = UINT32_MAX;
10031   RecordElementType visiting_type_;
10032 
ExpectedTypeOrError(uint8_t type,uint8_t expected,RecordElementType visiting_type,uint8_t depth,uint32_t index)10033   inline bool ExpectedTypeOrError(uint8_t type,
10034                                   uint8_t expected,
10035                                   RecordElementType visiting_type,
10036                                   uint8_t depth,
10037                                   uint32_t index) {
10038     if (type == expected) {
10039       return true;
10040     }
10041 
10042     SetErrorMsg(StringPrintf(
10043         "Expect 0x%02x type but got 0x%02x at the index %i and depth %i for the element %s",
10044         expected,
10045         type,
10046         index,
10047         depth,
10048         kRecordElementNames[static_cast<uint8_t>(visiting_type)]));
10049     return false;
10050   }
10051 
10052   DISALLOW_COPY_AND_ASSIGN(RecordAnnotationVisitor);
10053 };
10054 
10055 /**
10056  * Set kClassFlagRecord and verify if klass is a record class.
10057  * If the verification fails, a pending java exception is thrown.
10058  *
10059  * @return false if verification fails. If klass isn't a record class,
10060  * it should always return true.
10061  */
VerifyRecordClass(Handle<mirror::Class> klass,ObjPtr<mirror::Class> super)10062 bool ClassLinker::VerifyRecordClass(Handle<mirror::Class> klass, ObjPtr<mirror::Class> super) {
10063   CHECK(klass != nullptr);
10064   // First, we check the conditions specified in java.lang.Class#isRecord().
10065   // If any of the conditions isn't fulfilled, it's not a record class and
10066   // ART should treat it as a normal class even if it's inherited from java.lang.Record.
10067   if (!klass->IsFinal()) {
10068     return true;
10069   }
10070 
10071   if (super == nullptr) {
10072     return true;
10073   }
10074 
10075   // Compare the string directly when this ClassLinker is initializing before
10076   // WellKnownClasses initializes
10077   if (WellKnownClasses::java_lang_Record == nullptr) {
10078     if (!super->DescriptorEquals("Ljava/lang/Record;")) {
10079       return true;
10080     }
10081   } else {
10082     ObjPtr<mirror::Class> java_lang_Record =
10083         WellKnownClasses::ToClass(WellKnownClasses::java_lang_Record);
10084     if (super.Ptr() != java_lang_Record.Ptr()) {
10085       return true;
10086     }
10087   }
10088 
10089   // Verify @dalvik.annotation.Record
10090   // The annotation has a mandatory element componentNames[] and componentTypes[] of the same size.
10091   // componentSignatures[], componentAnnotationVisibilities[][], componentAnnotations[][] are
10092   // optional, but should have the same size if it exists.
10093   RecordAnnotationVisitor visitor;
10094   annotations::VisitClassAnnotations(klass, &visitor);
10095   if (UNLIKELY(visitor.HasError())) {
10096     ThrowClassFormatError(klass.Get(), "%s", visitor.GetErrorMsg().c_str());
10097     return false;
10098   }
10099 
10100   if (!visitor.IsRecordAnnotationFound()) {
10101     return true;
10102   }
10103 
10104   if (!visitor.ValidateCounts()) {
10105     ThrowClassFormatError(klass.Get(), "%s", visitor.GetErrorMsg().c_str());
10106     return false;
10107   }
10108 
10109   // Set kClassFlagRecord.
10110   klass->SetRecordClass();
10111   return true;
10112 }
10113 
DoResolveString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)10114 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
10115                                                     ObjPtr<mirror::DexCache> dex_cache) {
10116   StackHandleScope<1> hs(Thread::Current());
10117   Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
10118   return DoResolveString(string_idx, h_dex_cache);
10119 }
10120 
DoResolveString(dex::StringIndex string_idx,Handle<mirror::DexCache> dex_cache)10121 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
10122                                                     Handle<mirror::DexCache> dex_cache) {
10123   const DexFile& dex_file = *dex_cache->GetDexFile();
10124   uint32_t utf16_length;
10125   const char* utf8_data = dex_file.GetStringDataAndUtf16Length(string_idx, &utf16_length);
10126   ObjPtr<mirror::String> string = intern_table_->InternStrong(utf16_length, utf8_data);
10127   if (string != nullptr) {
10128     dex_cache->SetResolvedString(string_idx, string);
10129   }
10130   return string;
10131 }
10132 
DoLookupString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)10133 ObjPtr<mirror::String> ClassLinker::DoLookupString(dex::StringIndex string_idx,
10134                                                    ObjPtr<mirror::DexCache> dex_cache) {
10135   DCHECK(dex_cache != nullptr);
10136   const DexFile& dex_file = *dex_cache->GetDexFile();
10137   uint32_t utf16_length;
10138   const char* utf8_data = dex_file.GetStringDataAndUtf16Length(string_idx, &utf16_length);
10139   ObjPtr<mirror::String> string =
10140       intern_table_->LookupStrong(Thread::Current(), utf16_length, utf8_data);
10141   if (string != nullptr) {
10142     dex_cache->SetResolvedString(string_idx, string);
10143   }
10144   return string;
10145 }
10146 
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::Class> referrer)10147 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
10148                                                         ObjPtr<mirror::Class> referrer) {
10149   return DoLookupResolvedType(type_idx, referrer->GetDexCache(), referrer->GetClassLoader());
10150 }
10151 
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)10152 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
10153                                                         ObjPtr<mirror::DexCache> dex_cache,
10154                                                         ObjPtr<mirror::ClassLoader> class_loader) {
10155   DCHECK(dex_cache->GetClassLoader() == class_loader);
10156   const DexFile& dex_file = *dex_cache->GetDexFile();
10157   std::string_view descriptor = dex_file.GetTypeDescriptorView(type_idx);
10158   ObjPtr<mirror::Class> type = LookupResolvedType(descriptor, class_loader);
10159   if (type != nullptr) {
10160     DCHECK(type->IsResolved());
10161     dex_cache->SetResolvedType(type_idx, type);
10162   }
10163   return type;
10164 }
10165 
LookupResolvedType(std::string_view descriptor,ObjPtr<mirror::ClassLoader> class_loader)10166 ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(std::string_view descriptor,
10167                                                       ObjPtr<mirror::ClassLoader> class_loader) {
10168   DCHECK(!descriptor.empty()) << "descriptor is empty string";
10169   ObjPtr<mirror::Class> type = nullptr;
10170   if (descriptor.length() == 1u) {
10171     // only the descriptors of primitive types should be 1 character long, also avoid class lookup
10172     // for primitive classes that aren't backed by dex files.
10173     type = LookupPrimitiveClass(descriptor[0]);
10174   } else {
10175     Thread* const self = Thread::Current();
10176     DCHECK(self != nullptr);
10177     const size_t hash = ComputeModifiedUtf8Hash(descriptor);
10178     // Find the class in the loaded classes table.
10179     type = LookupClass(self, descriptor, hash, class_loader);
10180   }
10181   return (type != nullptr && type->IsResolved()) ? type : nullptr;
10182 }
10183 
10184 template <typename RefType>
DoResolveType(dex::TypeIndex type_idx,RefType referrer)10185 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx, RefType referrer) {
10186   StackHandleScope<2> hs(Thread::Current());
10187   Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
10188   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
10189   return DoResolveType(type_idx, dex_cache, class_loader);
10190 }
10191 
10192 // Instantiate the above.
10193 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
10194                                                           ArtField* referrer);
10195 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
10196                                                           ArtMethod* referrer);
10197 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
10198                                                           ObjPtr<mirror::Class> referrer);
10199 
DoResolveType(dex::TypeIndex type_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)10200 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
10201                                                  Handle<mirror::DexCache> dex_cache,
10202                                                  Handle<mirror::ClassLoader> class_loader) {
10203   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10204   Thread* self = Thread::Current();
10205   const DexFile* dex_file = dex_cache->GetDexFile();
10206   ObjPtr<mirror::Class> resolved = FindClass(self, *dex_file, type_idx, class_loader);
10207   if (resolved != nullptr) {
10208     // TODO: we used to throw here if resolved's class loader was not the
10209     //       boot class loader. This was to permit different classes with the
10210     //       same name to be loaded simultaneously by different loaders
10211     dex_cache->SetResolvedType(type_idx, resolved);
10212   } else {
10213     CHECK(self->IsExceptionPending())
10214         << "Expected pending exception for failed resolution of: "
10215         << dex_file->GetTypeDescriptor(type_idx);
10216     // Convert a ClassNotFoundException to a NoClassDefFoundError.
10217     StackHandleScope<1> hs(self);
10218     Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
10219     if (cause->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException, this))) {
10220       DCHECK(resolved == nullptr);  // No Handle needed to preserve resolved.
10221       self->ClearException();
10222       ThrowNoClassDefFoundError("Failed resolution of: %s", dex_file->GetTypeDescriptor(type_idx));
10223       self->GetException()->SetCause(cause.Get());
10224     }
10225   }
10226   DCHECK((resolved == nullptr) || resolved->IsResolved())
10227       << resolved->PrettyDescriptor() << " " << resolved->GetStatus();
10228   return resolved;
10229 }
10230 
FindResolvedMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)10231 ArtMethod* ClassLinker::FindResolvedMethod(ObjPtr<mirror::Class> klass,
10232                                            ObjPtr<mirror::DexCache> dex_cache,
10233                                            ObjPtr<mirror::ClassLoader> class_loader,
10234                                            uint32_t method_idx) {
10235   DCHECK(dex_cache->GetClassLoader() == class_loader);
10236   // Search for the method using dex_cache and method_idx. The Class::Find*Method()
10237   // functions can optimize the search if the dex_cache is the same as the DexCache
10238   // of the class, with fall-back to name and signature search otherwise.
10239   ArtMethod* resolved = nullptr;
10240   if (klass->IsInterface()) {
10241     resolved = klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
10242   } else {
10243     resolved = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
10244   }
10245   DCHECK(resolved == nullptr || resolved->GetDeclaringClassUnchecked() != nullptr);
10246   if (resolved != nullptr &&
10247       // We pass AccessMethod::kCheck instead of kLinking to not warn yet on the
10248       // access, as we'll be looking if the method can be accessed through an
10249       // interface.
10250       hiddenapi::ShouldDenyAccessToMember(resolved,
10251                                           hiddenapi::AccessContext(class_loader, dex_cache),
10252                                           hiddenapi::AccessMethod::kCheck)) {
10253     // The resolved method that we have found cannot be accessed due to
10254     // hiddenapi (typically it is declared up the hierarchy and is not an SDK
10255     // method). Try to find an interface method from the implemented interfaces which is
10256     // part of the SDK.
10257     ArtMethod* itf_method = klass->FindAccessibleInterfaceMethod(resolved, image_pointer_size_);
10258     if (itf_method == nullptr) {
10259       // No interface method. Call ShouldDenyAccessToMember again but this time
10260       // with AccessMethod::kLinking to ensure that an appropriate warning is
10261       // logged and the enforcement policy is applied.
10262       if (hiddenapi::ShouldDenyAccessToMember(resolved,
10263                                               hiddenapi::AccessContext(class_loader, dex_cache),
10264                                               hiddenapi::AccessMethod::kLinking)) {
10265         resolved = nullptr;
10266       }
10267     } else {
10268       // We found an interface method that is accessible, continue with the resolved method.
10269     }
10270   }
10271   if (resolved != nullptr) {
10272     // In case of jmvti, the dex file gets verified before being registered, so first
10273     // check if it's registered before checking class tables.
10274     const DexFile& dex_file = *dex_cache->GetDexFile();
10275     DCHECK_IMPLIES(
10276         IsDexFileRegistered(Thread::Current(), dex_file),
10277         FindClassTable(Thread::Current(), dex_cache) == ClassTableForClassLoader(class_loader))
10278         << "DexFile referrer: " << dex_file.GetLocation()
10279         << " ClassLoader: " << DescribeLoaders(class_loader, "");
10280     // Be a good citizen and update the dex cache to speed subsequent calls.
10281     dex_cache->SetResolvedMethod(method_idx, resolved);
10282     // Disable the following invariant check as the verifier breaks it. b/73760543
10283     // const DexFile::MethodId& method_id = dex_file.GetMethodId(method_idx);
10284     // DCHECK(LookupResolvedType(method_id.class_idx_, dex_cache, class_loader) != nullptr)
10285     //    << "Method: " << resolved->PrettyMethod() << ", "
10286     //    << "Class: " << klass->PrettyClass() << " (" << klass->GetStatus() << "), "
10287     //    << "DexFile referrer: " << dex_file.GetLocation();
10288   }
10289   return resolved;
10290 }
10291 
10292 // Returns true if `method` is either null or hidden.
10293 // Does not print any warnings if it is hidden.
CheckNoSuchMethod(ArtMethod * method,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)10294 static bool CheckNoSuchMethod(ArtMethod* method,
10295                               ObjPtr<mirror::DexCache> dex_cache,
10296                               ObjPtr<mirror::ClassLoader> class_loader)
10297       REQUIRES_SHARED(Locks::mutator_lock_) {
10298   DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
10299   return method == nullptr || hiddenapi::ShouldDenyAccessToMember(
10300                                   method,
10301                                   hiddenapi::AccessContext(class_loader, dex_cache),
10302                                   hiddenapi::AccessMethod::kCheckWithPolicy);  // no warnings
10303 }
10304 
FindIncompatibleMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)10305 ArtMethod* ClassLinker::FindIncompatibleMethod(ObjPtr<mirror::Class> klass,
10306                                                ObjPtr<mirror::DexCache> dex_cache,
10307                                                ObjPtr<mirror::ClassLoader> class_loader,
10308                                                uint32_t method_idx) {
10309   DCHECK(dex_cache->GetClassLoader() == class_loader);
10310   if (klass->IsInterface()) {
10311     ArtMethod* method = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
10312     return CheckNoSuchMethod(method, dex_cache, class_loader) ? nullptr : method;
10313   } else {
10314     // If there was an interface method with the same signature, we would have
10315     // found it in the "copied" methods. Only DCHECK that the interface method
10316     // really does not exist.
10317     if (kIsDebugBuild) {
10318       ArtMethod* method =
10319           klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
10320       CHECK(CheckNoSuchMethod(method, dex_cache, class_loader) ||
10321             (klass->FindAccessibleInterfaceMethod(method, image_pointer_size_) == nullptr));
10322     }
10323     return nullptr;
10324   }
10325 }
10326 
ResolveMethodId(uint32_t method_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)10327 ArtMethod* ClassLinker::ResolveMethodId(uint32_t method_idx,
10328                                         Handle<mirror::DexCache> dex_cache,
10329                                         Handle<mirror::ClassLoader> class_loader) {
10330   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10331   ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx);
10332   Thread::PoisonObjectPointersIfDebug();
10333   if (resolved != nullptr) {
10334     DCHECK(!resolved->IsRuntimeMethod());
10335     DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
10336     return resolved;
10337   }
10338   // Fail, get the declaring class.
10339   const dex::MethodId& method_id = dex_cache->GetDexFile()->GetMethodId(method_idx);
10340   ObjPtr<mirror::Class> klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
10341   if (klass == nullptr) {
10342     Thread::Current()->AssertPendingException();
10343     return nullptr;
10344   }
10345   return FindResolvedMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
10346 }
10347 
LookupResolvedField(uint32_t field_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,bool is_static)10348 ArtField* ClassLinker::LookupResolvedField(uint32_t field_idx,
10349                                            ObjPtr<mirror::DexCache> dex_cache,
10350                                            ObjPtr<mirror::ClassLoader> class_loader,
10351                                            bool is_static) {
10352   DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
10353   const DexFile& dex_file = *dex_cache->GetDexFile();
10354   const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
10355   ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(field_id.class_idx_);
10356   if (klass == nullptr) {
10357     klass = LookupResolvedType(field_id.class_idx_, dex_cache, class_loader);
10358   }
10359   if (klass == nullptr) {
10360     // The class has not been resolved yet, so the field is also unresolved.
10361     return nullptr;
10362   }
10363   DCHECK(klass->IsResolved());
10364 
10365   return FindResolvedField(klass, dex_cache, class_loader, field_idx, is_static);
10366 }
10367 
ResolveFieldJLS(uint32_t field_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)10368 ArtField* ClassLinker::ResolveFieldJLS(uint32_t field_idx,
10369                                        Handle<mirror::DexCache> dex_cache,
10370                                        Handle<mirror::ClassLoader> class_loader) {
10371   DCHECK(dex_cache != nullptr);
10372   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10373   ArtField* resolved = dex_cache->GetResolvedField(field_idx);
10374   Thread::PoisonObjectPointersIfDebug();
10375   if (resolved != nullptr) {
10376     return resolved;
10377   }
10378   const DexFile& dex_file = *dex_cache->GetDexFile();
10379   const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
10380   ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
10381   if (klass == nullptr) {
10382     DCHECK(Thread::Current()->IsExceptionPending());
10383     return nullptr;
10384   }
10385 
10386   resolved = FindResolvedFieldJLS(klass, dex_cache.Get(), class_loader.Get(), field_idx);
10387   if (resolved == nullptr) {
10388     const char* name = dex_file.GetFieldName(field_id);
10389     const char* type = dex_file.GetFieldTypeDescriptor(field_id);
10390     ThrowNoSuchFieldError("", klass, type, name);
10391   }
10392   return resolved;
10393 }
10394 
FindResolvedField(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx,bool is_static)10395 ArtField* ClassLinker::FindResolvedField(ObjPtr<mirror::Class> klass,
10396                                          ObjPtr<mirror::DexCache> dex_cache,
10397                                          ObjPtr<mirror::ClassLoader> class_loader,
10398                                          uint32_t field_idx,
10399                                          bool is_static) {
10400   DCHECK(dex_cache->GetClassLoader() == class_loader);
10401   ArtField* resolved = klass->FindField(dex_cache, field_idx);
10402   if (resolved == nullptr ||
10403       is_static != resolved->IsStatic() ||
10404       hiddenapi::ShouldDenyAccessToMember(resolved,
10405                                           hiddenapi::AccessContext(class_loader, dex_cache),
10406                                           hiddenapi::AccessMethod::kLinking)) {
10407     return nullptr;
10408   }
10409 
10410   dex_cache->SetResolvedField(field_idx, resolved);
10411   return resolved;
10412 }
10413 
FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx)10414 ArtField* ClassLinker::FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,
10415                                             ObjPtr<mirror::DexCache> dex_cache,
10416                                             ObjPtr<mirror::ClassLoader> class_loader,
10417                                             uint32_t field_idx) {
10418   DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
10419   ArtField* resolved = klass->FindField(dex_cache, field_idx);
10420 
10421   if (resolved != nullptr &&
10422       hiddenapi::ShouldDenyAccessToMember(resolved,
10423                                           hiddenapi::AccessContext(class_loader, dex_cache),
10424                                           hiddenapi::AccessMethod::kLinking)) {
10425     resolved = nullptr;
10426   }
10427 
10428   if (resolved != nullptr) {
10429     dex_cache->SetResolvedField(field_idx, resolved);
10430   }
10431 
10432   return resolved;
10433 }
10434 
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)10435 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(
10436     Thread* self,
10437     dex::ProtoIndex proto_idx,
10438     Handle<mirror::DexCache> dex_cache,
10439     Handle<mirror::ClassLoader> class_loader) {
10440   DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
10441   DCHECK(dex_cache != nullptr);
10442   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10443 
10444   ObjPtr<mirror::MethodType> resolved = dex_cache->GetResolvedMethodType(proto_idx);
10445   if (resolved != nullptr) {
10446     return resolved;
10447   }
10448 
10449   VariableSizedHandleScope raw_method_type_hs(self);
10450   mirror::RawMethodType raw_method_type(&raw_method_type_hs);
10451   if (!ResolveMethodType(self, proto_idx, dex_cache, class_loader, raw_method_type)) {
10452     DCHECK(self->IsExceptionPending());
10453     return nullptr;
10454   }
10455 
10456   // The handle scope was filled with return type and paratemer types.
10457   DCHECK_EQ(raw_method_type_hs.Size(),
10458             dex_cache->GetDexFile()->GetShortyView(proto_idx).length());
10459   ObjPtr<mirror::MethodType> method_type = mirror::MethodType::Create(self, raw_method_type);
10460   if (method_type != nullptr) {
10461     // Ensure all stores for the newly created MethodType are visible, before we attempt to place
10462     // it in the DexCache (b/224733324).
10463     std::atomic_thread_fence(std::memory_order_release);
10464     dex_cache->SetResolvedMethodType(proto_idx, method_type.Ptr());
10465   }
10466   return method_type;
10467 }
10468 
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader,mirror::RawMethodType method_type)10469 bool ClassLinker::ResolveMethodType(Thread* self,
10470                                     dex::ProtoIndex proto_idx,
10471                                     Handle<mirror::DexCache> dex_cache,
10472                                     Handle<mirror::ClassLoader> class_loader,
10473                                     /*out*/ mirror::RawMethodType method_type) {
10474   DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
10475   DCHECK(dex_cache != nullptr);
10476   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10477 
10478   // First resolve the return type.
10479   const DexFile& dex_file = *dex_cache->GetDexFile();
10480   const dex::ProtoId& proto_id = dex_file.GetProtoId(proto_idx);
10481   ObjPtr<mirror::Class> return_type =
10482       ResolveType(proto_id.return_type_idx_, dex_cache, class_loader);
10483   if (return_type == nullptr) {
10484     DCHECK(self->IsExceptionPending());
10485     return false;
10486   }
10487   method_type.SetRType(return_type);
10488 
10489   // Then resolve the argument types.
10490   DexFileParameterIterator it(dex_file, proto_id);
10491   for (; it.HasNext(); it.Next()) {
10492     const dex::TypeIndex type_idx = it.GetTypeIdx();
10493     ObjPtr<mirror::Class> param_type = ResolveType(type_idx, dex_cache, class_loader);
10494     if (param_type == nullptr) {
10495       DCHECK(self->IsExceptionPending());
10496       return false;
10497     }
10498     method_type.AddPType(param_type);
10499   }
10500 
10501   return true;
10502 }
10503 
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,ArtMethod * referrer)10504 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(Thread* self,
10505                                                           dex::ProtoIndex proto_idx,
10506                                                           ArtMethod* referrer) {
10507   StackHandleScope<2> hs(self);
10508   Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
10509   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
10510   return ResolveMethodType(self, proto_idx, dex_cache, class_loader);
10511 }
10512 
ResolveMethodHandleForField(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)10513 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForField(
10514     Thread* self,
10515     const dex::MethodHandleItem& method_handle,
10516     ArtMethod* referrer) {
10517   DexFile::MethodHandleType handle_type =
10518       static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
10519   mirror::MethodHandle::Kind kind;
10520   bool is_put;
10521   bool is_static;
10522   int32_t num_params;
10523   switch (handle_type) {
10524     case DexFile::MethodHandleType::kStaticPut: {
10525       kind = mirror::MethodHandle::Kind::kStaticPut;
10526       is_put = true;
10527       is_static = true;
10528       num_params = 1;
10529       break;
10530     }
10531     case DexFile::MethodHandleType::kStaticGet: {
10532       kind = mirror::MethodHandle::Kind::kStaticGet;
10533       is_put = false;
10534       is_static = true;
10535       num_params = 0;
10536       break;
10537     }
10538     case DexFile::MethodHandleType::kInstancePut: {
10539       kind = mirror::MethodHandle::Kind::kInstancePut;
10540       is_put = true;
10541       is_static = false;
10542       num_params = 2;
10543       break;
10544     }
10545     case DexFile::MethodHandleType::kInstanceGet: {
10546       kind = mirror::MethodHandle::Kind::kInstanceGet;
10547       is_put = false;
10548       is_static = false;
10549       num_params = 1;
10550       break;
10551     }
10552     case DexFile::MethodHandleType::kInvokeStatic:
10553     case DexFile::MethodHandleType::kInvokeInstance:
10554     case DexFile::MethodHandleType::kInvokeConstructor:
10555     case DexFile::MethodHandleType::kInvokeDirect:
10556     case DexFile::MethodHandleType::kInvokeInterface:
10557       LOG(FATAL) << "Unreachable";
10558       UNREACHABLE();
10559   }
10560 
10561   ArtField* target_field =
10562       ResolveField(method_handle.field_or_method_idx_, referrer, is_static);
10563   if (LIKELY(target_field != nullptr)) {
10564     DCHECK_EQ(is_static, target_field->IsStatic()) << target_field->PrettyField();
10565     ObjPtr<mirror::Class> target_class = target_field->GetDeclaringClass();
10566     ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
10567     if (UNLIKELY(!referring_class->CanAccessMember(target_class, target_field->GetAccessFlags()))) {
10568       ThrowIllegalAccessErrorField(referring_class, target_field);
10569       return nullptr;
10570     }
10571     if (UNLIKELY(is_put && target_field->IsFinal())) {
10572       ThrowIllegalAccessErrorField(referring_class, target_field);
10573       return nullptr;
10574     }
10575   } else {
10576     DCHECK(Thread::Current()->IsExceptionPending());
10577     return nullptr;
10578   }
10579 
10580   StackHandleScope<5> hs(self);
10581   ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
10582   Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
10583       mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
10584   if (UNLIKELY(method_params == nullptr)) {
10585     DCHECK(self->IsExceptionPending());
10586     return nullptr;
10587   }
10588 
10589   Handle<mirror::Class> constructor_class;
10590   Handle<mirror::Class> return_type;
10591   switch (handle_type) {
10592     case DexFile::MethodHandleType::kStaticPut: {
10593       method_params->Set(0, target_field->ResolveType());
10594       return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
10595       break;
10596     }
10597     case DexFile::MethodHandleType::kStaticGet: {
10598       return_type = hs.NewHandle(target_field->ResolveType());
10599       break;
10600     }
10601     case DexFile::MethodHandleType::kInstancePut: {
10602       method_params->Set(0, target_field->GetDeclaringClass());
10603       method_params->Set(1, target_field->ResolveType());
10604       return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
10605       break;
10606     }
10607     case DexFile::MethodHandleType::kInstanceGet: {
10608       method_params->Set(0, target_field->GetDeclaringClass());
10609       return_type = hs.NewHandle(target_field->ResolveType());
10610       break;
10611     }
10612     case DexFile::MethodHandleType::kInvokeStatic:
10613     case DexFile::MethodHandleType::kInvokeInstance:
10614     case DexFile::MethodHandleType::kInvokeConstructor:
10615     case DexFile::MethodHandleType::kInvokeDirect:
10616     case DexFile::MethodHandleType::kInvokeInterface:
10617       LOG(FATAL) << "Unreachable";
10618       UNREACHABLE();
10619   }
10620 
10621   for (int32_t i = 0; i < num_params; ++i) {
10622     if (UNLIKELY(method_params->Get(i) == nullptr)) {
10623       DCHECK(self->IsExceptionPending());
10624       return nullptr;
10625     }
10626   }
10627 
10628   if (UNLIKELY(return_type.IsNull())) {
10629     DCHECK(self->IsExceptionPending());
10630     return nullptr;
10631   }
10632 
10633   Handle<mirror::MethodType>
10634       method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
10635   if (UNLIKELY(method_type.IsNull())) {
10636     DCHECK(self->IsExceptionPending());
10637     return nullptr;
10638   }
10639 
10640   Handle<mirror::Field> target(hs.NewHandle(
10641       mirror::Field::CreateFromArtField(self, target_field, /*force_resolve=*/ true)));
10642   return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
10643 }
10644 
ResolveMethodHandleForMethod(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)10645 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForMethod(
10646     Thread* self,
10647     const dex::MethodHandleItem& method_handle,
10648     ArtMethod* referrer) {
10649   DexFile::MethodHandleType handle_type =
10650       static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
10651   mirror::MethodHandle::Kind kind;
10652   uint32_t receiver_count = 0;
10653   ArtMethod* target_method = nullptr;
10654   switch (handle_type) {
10655     case DexFile::MethodHandleType::kStaticPut:
10656     case DexFile::MethodHandleType::kStaticGet:
10657     case DexFile::MethodHandleType::kInstancePut:
10658     case DexFile::MethodHandleType::kInstanceGet:
10659       LOG(FATAL) << "Unreachable";
10660       UNREACHABLE();
10661     case DexFile::MethodHandleType::kInvokeStatic: {
10662       kind = mirror::MethodHandle::Kind::kInvokeStatic;
10663       receiver_count = 0;
10664       target_method = ResolveMethodWithChecks(method_handle.field_or_method_idx_,
10665                                               referrer,
10666                                               InvokeType::kStatic);
10667       break;
10668     }
10669     case DexFile::MethodHandleType::kInvokeInstance: {
10670       kind = mirror::MethodHandle::Kind::kInvokeVirtual;
10671       receiver_count = 1;
10672       target_method = ResolveMethodWithChecks(method_handle.field_or_method_idx_,
10673                                               referrer,
10674                                               InvokeType::kVirtual);
10675       break;
10676     }
10677     case DexFile::MethodHandleType::kInvokeConstructor: {
10678       // Constructors are currently implemented as a transform. They
10679       // are special cased later in this method.
10680       kind = mirror::MethodHandle::Kind::kInvokeTransform;
10681       receiver_count = 0;
10682       target_method = ResolveMethodWithChecks(method_handle.field_or_method_idx_,
10683                                               referrer,
10684                                               InvokeType::kDirect);
10685       break;
10686     }
10687     case DexFile::MethodHandleType::kInvokeDirect: {
10688       kind = mirror::MethodHandle::Kind::kInvokeDirect;
10689       receiver_count = 1;
10690       StackHandleScope<2> hs(self);
10691       // A constant method handle with type kInvokeDirect can refer to
10692       // a method that is private or to a method in a super class. To
10693       // disambiguate the two options, we resolve the method ignoring
10694       // the invocation type to determine if the method is private. We
10695       // then resolve again specifying the intended invocation type to
10696       // force the appropriate checks.
10697       target_method = ResolveMethodId(method_handle.field_or_method_idx_,
10698                                       hs.NewHandle(referrer->GetDexCache()),
10699                                       hs.NewHandle(referrer->GetClassLoader()));
10700       if (UNLIKELY(target_method == nullptr)) {
10701         break;
10702       }
10703 
10704       if (target_method->IsPrivate()) {
10705         kind = mirror::MethodHandle::Kind::kInvokeDirect;
10706         target_method = ResolveMethodWithChecks(method_handle.field_or_method_idx_,
10707                                                 referrer,
10708                                                 InvokeType::kDirect);
10709       } else {
10710         kind = mirror::MethodHandle::Kind::kInvokeSuper;
10711         target_method = ResolveMethodWithChecks(method_handle.field_or_method_idx_,
10712                                                 referrer,
10713                                                 InvokeType::kSuper);
10714         if (UNLIKELY(target_method == nullptr)) {
10715           break;
10716         }
10717         // Find the method specified in the parent in referring class
10718         // so invoke-super invokes the method in the parent of the
10719         // referrer.
10720         target_method =
10721             referrer->GetDeclaringClass()->FindVirtualMethodForVirtual(target_method,
10722                                                                        kRuntimePointerSize);
10723       }
10724       break;
10725     }
10726     case DexFile::MethodHandleType::kInvokeInterface: {
10727       kind = mirror::MethodHandle::Kind::kInvokeInterface;
10728       receiver_count = 1;
10729       target_method = ResolveMethodWithChecks(method_handle.field_or_method_idx_,
10730                                               referrer,
10731                                               InvokeType::kInterface);
10732       break;
10733     }
10734   }
10735 
10736   if (UNLIKELY(target_method == nullptr)) {
10737     DCHECK(Thread::Current()->IsExceptionPending());
10738     return nullptr;
10739   }
10740 
10741   // According to JVMS 4.4.8 none of invoke* MethodHandle-s can target <clinit> methods.
10742   if (UNLIKELY(target_method->IsClassInitializer())) {
10743     ThrowClassFormatError(referrer->GetDeclaringClass(),
10744         "Method handles can't target class initializer method");
10745     return nullptr;
10746   }
10747 
10748   ObjPtr<mirror::Class> target_class = target_method->GetDeclaringClass();
10749   ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
10750   uint32_t access_flags = target_method->GetAccessFlags();
10751   if (UNLIKELY(!referring_class->CanAccessMember(target_class, access_flags))) {
10752     ThrowIllegalAccessErrorMethod(referring_class, target_method);
10753     return nullptr;
10754   }
10755 
10756   // Calculate the number of parameters from the method shorty. We add the
10757   // receiver count (0 or 1) and deduct one for the return value.
10758   uint32_t shorty_length;
10759   target_method->GetShorty(&shorty_length);
10760   int32_t num_params = static_cast<int32_t>(shorty_length + receiver_count - 1);
10761 
10762   StackHandleScope<5> hs(self);
10763   ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
10764   Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
10765       mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
10766   if (method_params.Get() == nullptr) {
10767     DCHECK(self->IsExceptionPending());
10768     return nullptr;
10769   }
10770 
10771   const DexFile* dex_file = referrer->GetDexFile();
10772   const dex::MethodId& method_id = dex_file->GetMethodId(method_handle.field_or_method_idx_);
10773   int32_t index = 0;
10774   if (receiver_count != 0) {
10775     // Insert receiver. Use the class identified in the method handle rather than the declaring
10776     // class of the resolved method which may be super class or default interface method
10777     // (b/115964401).
10778     ObjPtr<mirror::Class> receiver_class = LookupResolvedType(method_id.class_idx_, referrer);
10779     // receiver_class should have been resolved when resolving the target method.
10780     DCHECK(receiver_class != nullptr);
10781     method_params->Set(index++, receiver_class);
10782   }
10783 
10784   const dex::ProtoId& proto_id = dex_file->GetProtoId(method_id.proto_idx_);
10785   DexFileParameterIterator it(*dex_file, proto_id);
10786   while (it.HasNext()) {
10787     DCHECK_LT(index, num_params);
10788     const dex::TypeIndex type_idx = it.GetTypeIdx();
10789     ObjPtr<mirror::Class> klass = ResolveType(type_idx, referrer);
10790     if (nullptr == klass) {
10791       DCHECK(self->IsExceptionPending());
10792       return nullptr;
10793     }
10794     method_params->Set(index++, klass);
10795     it.Next();
10796   }
10797 
10798   Handle<mirror::Class> return_type =
10799       hs.NewHandle(ResolveType(proto_id.return_type_idx_, referrer));
10800   if (UNLIKELY(return_type.IsNull())) {
10801     DCHECK(self->IsExceptionPending());
10802     return nullptr;
10803   }
10804 
10805   Handle<mirror::MethodType>
10806       method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
10807   if (UNLIKELY(method_type.IsNull())) {
10808     DCHECK(self->IsExceptionPending());
10809     return nullptr;
10810   }
10811 
10812   if (UNLIKELY(handle_type == DexFile::MethodHandleType::kInvokeConstructor)) {
10813     Handle<mirror::Class> constructor_class = hs.NewHandle(target_method->GetDeclaringClass());
10814     Handle<mirror::MethodHandlesLookup> lookup =
10815         hs.NewHandle(mirror::MethodHandlesLookup::GetDefault(self));
10816     return lookup->FindConstructor(self, constructor_class, method_type);
10817   }
10818 
10819   uintptr_t target = reinterpret_cast<uintptr_t>(target_method);
10820   return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
10821 }
10822 
ResolveMethodHandle(Thread * self,uint32_t method_handle_idx,ArtMethod * referrer)10823 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandle(Thread* self,
10824                                                               uint32_t method_handle_idx,
10825                                                               ArtMethod* referrer)
10826     REQUIRES_SHARED(Locks::mutator_lock_) {
10827   const DexFile* const dex_file = referrer->GetDexFile();
10828   const dex::MethodHandleItem& method_handle = dex_file->GetMethodHandle(method_handle_idx);
10829   switch (static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_)) {
10830     case DexFile::MethodHandleType::kStaticPut:
10831     case DexFile::MethodHandleType::kStaticGet:
10832     case DexFile::MethodHandleType::kInstancePut:
10833     case DexFile::MethodHandleType::kInstanceGet:
10834       return ResolveMethodHandleForField(self, method_handle, referrer);
10835     case DexFile::MethodHandleType::kInvokeStatic:
10836     case DexFile::MethodHandleType::kInvokeInstance:
10837     case DexFile::MethodHandleType::kInvokeConstructor:
10838     case DexFile::MethodHandleType::kInvokeDirect:
10839     case DexFile::MethodHandleType::kInvokeInterface:
10840       return ResolveMethodHandleForMethod(self, method_handle, referrer);
10841   }
10842 }
10843 
IsQuickResolutionStub(const void * entry_point) const10844 bool ClassLinker::IsQuickResolutionStub(const void* entry_point) const {
10845   return (entry_point == GetQuickResolutionStub()) ||
10846       (quick_resolution_trampoline_ == entry_point);
10847 }
10848 
IsQuickToInterpreterBridge(const void * entry_point) const10849 bool ClassLinker::IsQuickToInterpreterBridge(const void* entry_point) const {
10850   return (entry_point == GetQuickToInterpreterBridge()) ||
10851       (quick_to_interpreter_bridge_trampoline_ == entry_point);
10852 }
10853 
IsQuickGenericJniStub(const void * entry_point) const10854 bool ClassLinker::IsQuickGenericJniStub(const void* entry_point) const {
10855   return (entry_point == GetQuickGenericJniStub()) ||
10856       (quick_generic_jni_trampoline_ == entry_point);
10857 }
10858 
IsJniDlsymLookupStub(const void * entry_point) const10859 bool ClassLinker::IsJniDlsymLookupStub(const void* entry_point) const {
10860   return entry_point == GetJniDlsymLookupStub() ||
10861       (jni_dlsym_lookup_trampoline_ == entry_point);
10862 }
10863 
IsJniDlsymLookupCriticalStub(const void * entry_point) const10864 bool ClassLinker::IsJniDlsymLookupCriticalStub(const void* entry_point) const {
10865   return entry_point == GetJniDlsymLookupCriticalStub() ||
10866       (jni_dlsym_lookup_critical_trampoline_ == entry_point);
10867 }
10868 
GetRuntimeQuickGenericJniStub() const10869 const void* ClassLinker::GetRuntimeQuickGenericJniStub() const {
10870   return GetQuickGenericJniStub();
10871 }
10872 
SetEntryPointsForObsoleteMethod(ArtMethod * method) const10873 void ClassLinker::SetEntryPointsForObsoleteMethod(ArtMethod* method) const {
10874   DCHECK(method->IsObsolete());
10875   // We cannot mess with the entrypoints of native methods because they are used to determine how
10876   // large the method's quick stack frame is. Without this information we cannot walk the stacks.
10877   if (!method->IsNative()) {
10878     method->SetEntryPointFromQuickCompiledCode(GetInvokeObsoleteMethodStub());
10879   }
10880 }
10881 
DumpForSigQuit(std::ostream & os)10882 void ClassLinker::DumpForSigQuit(std::ostream& os) {
10883   ScopedObjectAccess soa(Thread::Current());
10884   ReaderMutexLock mu(soa.Self(), *Locks::classlinker_classes_lock_);
10885   os << "Zygote loaded classes=" << NumZygoteClasses() << " post zygote classes="
10886      << NumNonZygoteClasses() << "\n";
10887   ReaderMutexLock mu2(soa.Self(), *Locks::dex_lock_);
10888   os << "Dumping registered class loaders\n";
10889   size_t class_loader_index = 0;
10890   for (const ClassLoaderData& class_loader : class_loaders_) {
10891     ObjPtr<mirror::ClassLoader> loader =
10892         ObjPtr<mirror::ClassLoader>::DownCast(soa.Self()->DecodeJObject(class_loader.weak_root));
10893     if (loader != nullptr) {
10894       os << "#" << class_loader_index++ << " " << loader->GetClass()->PrettyDescriptor() << ": [";
10895       bool saw_one_dex_file = false;
10896       for (const auto& entry : dex_caches_) {
10897         const DexCacheData& dex_cache = entry.second;
10898         if (dex_cache.class_table == class_loader.class_table) {
10899           if (saw_one_dex_file) {
10900             os << ":";
10901           }
10902           saw_one_dex_file = true;
10903           os << entry.first->GetLocation();
10904         }
10905       }
10906       os << "]";
10907       bool found_parent = false;
10908       if (loader->GetParent() != nullptr) {
10909         size_t parent_index = 0;
10910         for (const ClassLoaderData& class_loader2 : class_loaders_) {
10911           ObjPtr<mirror::ClassLoader> loader2 = ObjPtr<mirror::ClassLoader>::DownCast(
10912               soa.Self()->DecodeJObject(class_loader2.weak_root));
10913           if (loader2 == loader->GetParent()) {
10914             os << ", parent #" << parent_index;
10915             found_parent = true;
10916             break;
10917           }
10918           parent_index++;
10919         }
10920         if (!found_parent) {
10921           os << ", unregistered parent of type "
10922              << loader->GetParent()->GetClass()->PrettyDescriptor();
10923         }
10924       } else {
10925         os << ", no parent";
10926       }
10927       os << "\n";
10928     }
10929   }
10930   os << "Done dumping class loaders\n";
10931   Runtime* runtime = Runtime::Current();
10932   os << "Classes initialized: " << runtime->GetStat(KIND_GLOBAL_CLASS_INIT_COUNT) << " in "
10933      << PrettyDuration(runtime->GetStat(KIND_GLOBAL_CLASS_INIT_TIME)) << "\n";
10934 }
10935 
10936 class CountClassesVisitor : public ClassLoaderVisitor {
10937  public:
CountClassesVisitor()10938   CountClassesVisitor() : num_zygote_classes(0), num_non_zygote_classes(0) {}
10939 
Visit(ObjPtr<mirror::ClassLoader> class_loader)10940   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
10941       REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
10942     ClassTable* const class_table = class_loader->GetClassTable();
10943     if (class_table != nullptr) {
10944       num_zygote_classes += class_table->NumZygoteClasses(class_loader);
10945       num_non_zygote_classes += class_table->NumNonZygoteClasses(class_loader);
10946     }
10947   }
10948 
10949   size_t num_zygote_classes;
10950   size_t num_non_zygote_classes;
10951 };
10952 
NumZygoteClasses() const10953 size_t ClassLinker::NumZygoteClasses() const {
10954   CountClassesVisitor visitor;
10955   VisitClassLoaders(&visitor);
10956   return visitor.num_zygote_classes + boot_class_table_->NumZygoteClasses(nullptr);
10957 }
10958 
NumNonZygoteClasses() const10959 size_t ClassLinker::NumNonZygoteClasses() const {
10960   CountClassesVisitor visitor;
10961   VisitClassLoaders(&visitor);
10962   return visitor.num_non_zygote_classes + boot_class_table_->NumNonZygoteClasses(nullptr);
10963 }
10964 
NumLoadedClasses()10965 size_t ClassLinker::NumLoadedClasses() {
10966   ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
10967   // Only return non zygote classes since these are the ones which apps which care about.
10968   return NumNonZygoteClasses();
10969 }
10970 
GetClassesLockOwner()10971 pid_t ClassLinker::GetClassesLockOwner() {
10972   return Locks::classlinker_classes_lock_->GetExclusiveOwnerTid();
10973 }
10974 
GetDexLockOwner()10975 pid_t ClassLinker::GetDexLockOwner() {
10976   return Locks::dex_lock_->GetExclusiveOwnerTid();
10977 }
10978 
SetClassRoot(ClassRoot class_root,ObjPtr<mirror::Class> klass)10979 void ClassLinker::SetClassRoot(ClassRoot class_root, ObjPtr<mirror::Class> klass) {
10980   DCHECK(!init_done_);
10981 
10982   DCHECK(klass != nullptr);
10983   DCHECK(klass->GetClassLoader() == nullptr);
10984 
10985   mirror::ObjectArray<mirror::Class>* class_roots = class_roots_.Read();
10986   DCHECK(class_roots != nullptr);
10987   DCHECK_LT(static_cast<uint32_t>(class_root), static_cast<uint32_t>(ClassRoot::kMax));
10988   int32_t index = static_cast<int32_t>(class_root);
10989   DCHECK(class_roots->Get(index) == nullptr);
10990   class_roots->Set<false>(index, klass);
10991 }
10992 
CreateWellKnownClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files,Handle<mirror::Class> loader_class,Handle<mirror::ClassLoader> parent_loader,Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries,Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries_after)10993 ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
10994     Thread* self,
10995     const std::vector<const DexFile*>& dex_files,
10996     Handle<mirror::Class> loader_class,
10997     Handle<mirror::ClassLoader> parent_loader,
10998     Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries,
10999     Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries_after) {
11000   CHECK(loader_class.Get() == WellKnownClasses::dalvik_system_PathClassLoader ||
11001         loader_class.Get() == WellKnownClasses::dalvik_system_DelegateLastClassLoader ||
11002         loader_class.Get() == WellKnownClasses::dalvik_system_InMemoryDexClassLoader);
11003 
11004   StackHandleScope<5> hs(self);
11005 
11006   ArtField* dex_elements_field = WellKnownClasses::dalvik_system_DexPathList_dexElements;
11007 
11008   Handle<mirror::Class> dex_elements_class(hs.NewHandle(dex_elements_field->ResolveType()));
11009   DCHECK(dex_elements_class != nullptr);
11010   DCHECK(dex_elements_class->IsArrayClass());
11011   Handle<mirror::ObjectArray<mirror::Object>> h_dex_elements(hs.NewHandle(
11012       mirror::ObjectArray<mirror::Object>::Alloc(self,
11013                                                  dex_elements_class.Get(),
11014                                                  dex_files.size())));
11015   Handle<mirror::Class> h_dex_element_class =
11016       hs.NewHandle(dex_elements_class->GetComponentType());
11017 
11018   ArtField* element_file_field = WellKnownClasses::dalvik_system_DexPathList__Element_dexFile;
11019   DCHECK_EQ(h_dex_element_class.Get(), element_file_field->GetDeclaringClass());
11020 
11021   ArtField* cookie_field = WellKnownClasses::dalvik_system_DexFile_cookie;
11022   DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
11023 
11024   ArtField* file_name_field = WellKnownClasses::dalvik_system_DexFile_fileName;
11025   DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
11026 
11027   // Fill the elements array.
11028   int32_t index = 0;
11029   for (const DexFile* dex_file : dex_files) {
11030     StackHandleScope<4> hs2(self);
11031 
11032     // CreateWellKnownClassLoader is only used by gtests and compiler.
11033     // Index 0 of h_long_array is supposed to be the oat file but we can leave it null.
11034     Handle<mirror::LongArray> h_long_array = hs2.NewHandle(mirror::LongArray::Alloc(
11035         self,
11036         kDexFileIndexStart + 1));
11037     DCHECK(h_long_array != nullptr);
11038     h_long_array->Set(kDexFileIndexStart, reinterpret_cast64<int64_t>(dex_file));
11039 
11040     // Note that this creates a finalizable dalvik.system.DexFile object and a corresponding
11041     // FinalizerReference which will never get cleaned up without a started runtime.
11042     Handle<mirror::Object> h_dex_file = hs2.NewHandle(
11043         cookie_field->GetDeclaringClass()->AllocObject(self));
11044     DCHECK(h_dex_file != nullptr);
11045     cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get());
11046 
11047     Handle<mirror::String> h_file_name = hs2.NewHandle(
11048         mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str()));
11049     DCHECK(h_file_name != nullptr);
11050     file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get());
11051 
11052     Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self));
11053     DCHECK(h_element != nullptr);
11054     element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get());
11055 
11056     h_dex_elements->Set(index, h_element.Get());
11057     index++;
11058   }
11059   DCHECK_EQ(index, h_dex_elements->GetLength());
11060 
11061   // Create DexPathList.
11062   Handle<mirror::Object> h_dex_path_list = hs.NewHandle(
11063       dex_elements_field->GetDeclaringClass()->AllocObject(self));
11064   DCHECK(h_dex_path_list != nullptr);
11065   // Set elements.
11066   dex_elements_field->SetObject<false>(h_dex_path_list.Get(), h_dex_elements.Get());
11067   // Create an empty List for the "nativeLibraryDirectories," required for native tests.
11068   // Note: this code is uncommon(oatdump)/testing-only, so don't add further WellKnownClasses
11069   //       elements.
11070   {
11071     ArtField* native_lib_dirs = dex_elements_field->GetDeclaringClass()->
11072         FindDeclaredInstanceField("nativeLibraryDirectories", "Ljava/util/List;");
11073     DCHECK(native_lib_dirs != nullptr);
11074     ObjPtr<mirror::Class> list_class = FindSystemClass(self, "Ljava/util/ArrayList;");
11075     DCHECK(list_class != nullptr);
11076     {
11077       StackHandleScope<1> h_list_scope(self);
11078       Handle<mirror::Class> h_list_class(h_list_scope.NewHandle<mirror::Class>(list_class));
11079       bool list_init = EnsureInitialized(self, h_list_class, true, true);
11080       DCHECK(list_init);
11081       list_class = h_list_class.Get();
11082     }
11083     ObjPtr<mirror::Object> list_object = list_class->AllocObject(self);
11084     // Note: we leave the object uninitialized. This must never leak into any non-testing code, but
11085     //       is fine for testing. While it violates a Java-code invariant (the elementData field is
11086     //       normally never null), as long as one does not try to add elements, this will still
11087     //       work.
11088     native_lib_dirs->SetObject<false>(h_dex_path_list.Get(), list_object);
11089   }
11090 
11091   // Create the class loader..
11092   Handle<mirror::ClassLoader> h_class_loader = hs.NewHandle<mirror::ClassLoader>(
11093       ObjPtr<mirror::ClassLoader>::DownCast(loader_class->AllocObject(self)));
11094   DCHECK(h_class_loader != nullptr);
11095   // Set DexPathList.
11096   ArtField* path_list_field = WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList;
11097   DCHECK(path_list_field != nullptr);
11098   path_list_field->SetObject<false>(h_class_loader.Get(), h_dex_path_list.Get());
11099 
11100   // Make a pretend boot-classpath.
11101   // TODO: Should we scan the image?
11102   ArtField* const parent_field = WellKnownClasses::java_lang_ClassLoader_parent;
11103   DCHECK(parent_field != nullptr);
11104   if (parent_loader.Get() == nullptr) {
11105     ObjPtr<mirror::Object> boot_loader(
11106         WellKnownClasses::java_lang_BootClassLoader->AllocObject(self));
11107     parent_field->SetObject<false>(h_class_loader.Get(), boot_loader);
11108   } else {
11109     parent_field->SetObject<false>(h_class_loader.Get(), parent_loader.Get());
11110   }
11111 
11112   ArtField* shared_libraries_field =
11113       WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders;
11114   DCHECK(shared_libraries_field != nullptr);
11115   shared_libraries_field->SetObject<false>(h_class_loader.Get(), shared_libraries.Get());
11116 
11117   ArtField* shared_libraries_after_field =
11118         WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter;
11119   DCHECK(shared_libraries_after_field != nullptr);
11120   shared_libraries_after_field->SetObject<false>(h_class_loader.Get(),
11121                                                  shared_libraries_after.Get());
11122   return h_class_loader.Get();
11123 }
11124 
CreatePathClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files)11125 jobject ClassLinker::CreatePathClassLoader(Thread* self,
11126                                            const std::vector<const DexFile*>& dex_files) {
11127   StackHandleScope<3u> hs(self);
11128   Handle<mirror::Class> d_s_pcl =
11129       hs.NewHandle(WellKnownClasses::dalvik_system_PathClassLoader.Get());
11130   auto null_parent = hs.NewHandle<mirror::ClassLoader>(nullptr);
11131   auto null_libs = hs.NewHandle<mirror::ObjectArray<mirror::ClassLoader>>(nullptr);
11132   ObjPtr<mirror::ClassLoader> class_loader =
11133       CreateWellKnownClassLoader(self, dex_files, d_s_pcl, null_parent, null_libs, null_libs);
11134   return Runtime::Current()->GetJavaVM()->AddGlobalRef(self, class_loader);
11135 }
11136 
DropFindArrayClassCache()11137 void ClassLinker::DropFindArrayClassCache() {
11138   for (size_t i = 0; i < kFindArrayCacheSize; i++) {
11139     find_array_class_cache_[i].store(GcRoot<mirror::Class>(nullptr), std::memory_order_relaxed);
11140   }
11141   find_array_class_cache_next_victim_ = 0;
11142 }
11143 
VisitClassLoaders(ClassLoaderVisitor * visitor) const11144 void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
11145   Thread* const self = Thread::Current();
11146   for (const ClassLoaderData& data : class_loaders_) {
11147     // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
11148     ObjPtr<mirror::ClassLoader> class_loader = ObjPtr<mirror::ClassLoader>::DownCast(
11149         self->DecodeJObject(data.weak_root));
11150     if (class_loader != nullptr) {
11151       visitor->Visit(class_loader);
11152     }
11153   }
11154 }
11155 
VisitDexCaches(DexCacheVisitor * visitor) const11156 void ClassLinker::VisitDexCaches(DexCacheVisitor* visitor) const {
11157   Thread* const self = Thread::Current();
11158   for (const auto& it : dex_caches_) {
11159     // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
11160     ObjPtr<mirror::DexCache> dex_cache = ObjPtr<mirror::DexCache>::DownCast(
11161         self->DecodeJObject(it.second.weak_root));
11162     if (dex_cache != nullptr) {
11163       visitor->Visit(dex_cache);
11164     }
11165   }
11166 }
11167 
VisitAllocators(AllocatorVisitor * visitor) const11168 void ClassLinker::VisitAllocators(AllocatorVisitor* visitor) const {
11169   for (const ClassLoaderData& data : class_loaders_) {
11170     LinearAlloc* alloc = data.allocator;
11171     if (alloc != nullptr && !visitor->Visit(alloc)) {
11172         break;
11173     }
11174   }
11175 }
11176 
InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,ObjPtr<mirror::ClassLoader> class_loader)11177 void ClassLinker::InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,
11178                                                ObjPtr<mirror::ClassLoader> class_loader) {
11179   DCHECK(dex_file != nullptr);
11180   Thread* const self = Thread::Current();
11181   WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
11182   ClassTable* const table = ClassTableForClassLoader(class_loader);
11183   DCHECK(table != nullptr);
11184   if (table->InsertStrongRoot(dex_file)) {
11185     WriteBarrierOnClassLoaderLocked(class_loader, dex_file);
11186   } else {
11187     // Write-barrier not required if strong-root isn't inserted.
11188   }
11189 }
11190 
CleanupClassLoaders()11191 void ClassLinker::CleanupClassLoaders() {
11192   Thread* const self = Thread::Current();
11193   std::list<ClassLoaderData> to_delete;
11194   // Do the delete outside the lock to avoid lock violation in jit code cache.
11195   {
11196     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
11197     for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
11198       auto this_it = it;
11199       ++it;
11200       const ClassLoaderData& data = *this_it;
11201       // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
11202       ObjPtr<mirror::ClassLoader> class_loader =
11203           ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(data.weak_root));
11204       if (class_loader == nullptr) {
11205         VLOG(class_linker) << "Freeing class loader";
11206         to_delete.splice(to_delete.end(), class_loaders_, this_it);
11207       }
11208     }
11209   }
11210   if (to_delete.empty()) {
11211     return;
11212   }
11213   std::set<const OatFile*> unregistered_oat_files;
11214   JavaVMExt* vm = self->GetJniEnv()->GetVm();
11215   {
11216     WriterMutexLock mu(self, *Locks::dex_lock_);
11217     for (auto it = dex_caches_.begin(), end = dex_caches_.end(); it != end; ) {
11218       const DexFile* dex_file = it->first;
11219       const DexCacheData& data = it->second;
11220       if (self->DecodeJObject(data.weak_root) == nullptr) {
11221         DCHECK(to_delete.end() != std::find_if(
11222             to_delete.begin(),
11223             to_delete.end(),
11224             [&](const ClassLoaderData& cld) { return cld.class_table == data.class_table; }));
11225         if (dex_file->GetOatDexFile() != nullptr &&
11226             dex_file->GetOatDexFile()->GetOatFile() != nullptr &&
11227             dex_file->GetOatDexFile()->GetOatFile()->IsExecutable()) {
11228           unregistered_oat_files.insert(dex_file->GetOatDexFile()->GetOatFile());
11229         }
11230         vm->DeleteWeakGlobalRef(self, data.weak_root);
11231         it = dex_caches_.erase(it);
11232       } else {
11233         ++it;
11234       }
11235     }
11236   }
11237   {
11238     ScopedDebugDisallowReadBarriers sddrb(self);
11239     for (ClassLoaderData& data : to_delete) {
11240       // CHA unloading analysis and SingleImplementaion cleanups are required.
11241       PrepareToDeleteClassLoader(self, data, /*cleanup_cha=*/true);
11242     }
11243   }
11244   for (const ClassLoaderData& data : to_delete) {
11245     delete data.allocator;
11246     delete data.class_table;
11247   }
11248   Runtime* runtime = Runtime::Current();
11249   if (!unregistered_oat_files.empty()) {
11250     for (const OatFile* oat_file : unregistered_oat_files) {
11251       // Notify the fault handler about removal of the executable code range if needed.
11252       DCHECK(oat_file->IsExecutable());
11253       size_t exec_offset = oat_file->GetOatHeader().GetExecutableOffset();
11254       DCHECK_LE(exec_offset, oat_file->Size());
11255       size_t exec_size = oat_file->Size() - exec_offset;
11256       if (exec_size != 0u) {
11257         runtime->RemoveGeneratedCodeRange(oat_file->Begin() + exec_offset, exec_size);
11258       }
11259     }
11260   }
11261 
11262   if (runtime->GetStartupLinearAlloc() != nullptr) {
11263     // Because the startup linear alloc can contain dex cache arrays associated
11264     // to class loaders that got unloaded, we need to delete these
11265     // arrays.
11266     StartupCompletedTask::DeleteStartupDexCaches(self, /* called_by_gc= */ true);
11267     DCHECK_EQ(runtime->GetStartupLinearAlloc(), nullptr);
11268   }
11269 }
11270 
11271 class ClassLinker::FindVirtualMethodHolderVisitor : public ClassVisitor {
11272  public:
FindVirtualMethodHolderVisitor(const ArtMethod * method,PointerSize pointer_size)11273   FindVirtualMethodHolderVisitor(const ArtMethod* method, PointerSize pointer_size)
11274       : method_(method),
11275         pointer_size_(pointer_size) {}
11276 
operator ()(ObjPtr<mirror::Class> klass)11277   bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) override {
11278     if (klass->GetVirtualMethodsSliceUnchecked(pointer_size_).Contains(method_)) {
11279       holder_ = klass;
11280     }
11281     // Return false to stop searching if holder_ is not null.
11282     return holder_ == nullptr;
11283   }
11284 
11285   ObjPtr<mirror::Class> holder_ = nullptr;
11286   const ArtMethod* const method_;
11287   const PointerSize pointer_size_;
11288 };
11289 
GetHoldingClassOfCopiedMethod(ArtMethod * method)11290 ObjPtr<mirror::Class> ClassLinker::GetHoldingClassOfCopiedMethod(ArtMethod* method) {
11291   ScopedTrace trace(__FUNCTION__);  // Since this function is slow, have a trace to notify people.
11292   CHECK(method->IsCopied());
11293   FindVirtualMethodHolderVisitor visitor(method, image_pointer_size_);
11294   VisitClasses(&visitor);
11295   DCHECK(visitor.holder_ != nullptr);
11296   return visitor.holder_;
11297 }
11298 
GetHoldingClassLoaderOfCopiedMethod(Thread * self,ArtMethod * method)11299 ObjPtr<mirror::ClassLoader> ClassLinker::GetHoldingClassLoaderOfCopiedMethod(Thread* self,
11300                                                                              ArtMethod* method) {
11301   // Note: `GetHoldingClassOfCopiedMethod(method)` is a lot more expensive than finding
11302   // the class loader, so we're using it only to verify the result in debug mode.
11303   CHECK(method->IsCopied());
11304   gc::Heap* heap = Runtime::Current()->GetHeap();
11305   // Check if the copied method is in the boot class path.
11306   if (heap->IsBootImageAddress(method) || GetAllocatorForClassLoader(nullptr)->Contains(method)) {
11307     DCHECK(GetHoldingClassOfCopiedMethod(method)->GetClassLoader() == nullptr);
11308     return nullptr;
11309   }
11310   // Check if the copied method is in an app image.
11311   // Note: Continuous spaces contain boot image spaces and app image spaces.
11312   // However, they are sorted by address, so boot images are not trivial to skip.
11313   ArrayRef<gc::space::ContinuousSpace* const> spaces(heap->GetContinuousSpaces());
11314   DCHECK_GE(spaces.size(), heap->GetBootImageSpaces().size());
11315   for (gc::space::ContinuousSpace* space : spaces) {
11316     if (space->IsImageSpace()) {
11317       gc::space::ImageSpace* image_space = space->AsImageSpace();
11318       size_t offset = reinterpret_cast<const uint8_t*>(method) - image_space->Begin();
11319       const ImageSection& methods_section = image_space->GetImageHeader().GetMethodsSection();
11320       if (offset - methods_section.Offset() < methods_section.Size()) {
11321         // Grab the class loader from the first non-BCP class in the app image class table.
11322         // Note: If we allow classes from arbitrary parent or library class loaders in app
11323         // images, this shall need to be updated to actually search for the exact class.
11324         const ImageSection& class_table_section =
11325             image_space->GetImageHeader().GetClassTableSection();
11326         CHECK_NE(class_table_section.Size(), 0u);
11327         const uint8_t* ptr = image_space->Begin() + class_table_section.Offset();
11328         size_t read_count = 0;
11329         ClassTable::ClassSet class_set(ptr, /*make_copy_of_data=*/ false, &read_count);
11330         CHECK(!class_set.empty());
11331         auto it = class_set.begin();
11332         // No read barrier needed for references to non-movable image classes.
11333         while ((*it).Read<kWithoutReadBarrier>()->IsBootStrapClassLoaded()) {
11334           ++it;
11335           CHECK(it != class_set.end());
11336         }
11337         ObjPtr<mirror::ClassLoader> class_loader =
11338             (*it).Read<kWithoutReadBarrier>()->GetClassLoader();
11339         DCHECK(GetHoldingClassOfCopiedMethod(method)->GetClassLoader() == class_loader);
11340         return class_loader;
11341       }
11342     }
11343   }
11344   // Otherwise, the method must be in one of the `LinearAlloc` memory areas.
11345   jweak result = nullptr;
11346   {
11347     ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
11348     for (const ClassLoaderData& data : class_loaders_) {
11349       if (data.allocator->Contains(method)) {
11350         result = data.weak_root;
11351         break;
11352       }
11353     }
11354   }
11355   CHECK(result != nullptr) << "Did not find allocator holding the copied method: " << method
11356       << " " << method->PrettyMethod();
11357   // The `method` is alive, so the class loader must also be alive.
11358   return ObjPtr<mirror::ClassLoader>::DownCast(
11359       Runtime::Current()->GetJavaVM()->DecodeWeakGlobalAsStrong(result));
11360 }
11361 
DenyAccessBasedOnPublicSdk(ArtMethod * art_method) const11362 bool ClassLinker::DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtMethod* art_method) const
11363     REQUIRES_SHARED(Locks::mutator_lock_) {
11364   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11365   LOG(FATAL) << "UNREACHABLE";
11366   UNREACHABLE();
11367 }
11368 
DenyAccessBasedOnPublicSdk(ArtField * art_field) const11369 bool ClassLinker::DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtField* art_field) const
11370     REQUIRES_SHARED(Locks::mutator_lock_) {
11371   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11372   LOG(FATAL) << "UNREACHABLE";
11373   UNREACHABLE();
11374 }
11375 
DenyAccessBasedOnPublicSdk(std::string_view type_descriptor) const11376 bool ClassLinker::DenyAccessBasedOnPublicSdk(
11377     [[maybe_unused]] std::string_view type_descriptor) const {
11378   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11379   LOG(FATAL) << "UNREACHABLE";
11380   UNREACHABLE();
11381 }
11382 
SetEnablePublicSdkChecks(bool enabled)11383 void ClassLinker::SetEnablePublicSdkChecks([[maybe_unused]] bool enabled) {
11384   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11385   LOG(FATAL) << "UNREACHABLE";
11386   UNREACHABLE();
11387 }
11388 
TransactionWriteConstraint(Thread * self,ObjPtr<mirror::Object> obj)11389 bool ClassLinker::TransactionWriteConstraint(
11390     [[maybe_unused]] Thread* self, [[maybe_unused]] ObjPtr<mirror::Object> obj) {
11391   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11392   LOG(FATAL) << "UNREACHABLE";
11393   UNREACHABLE();
11394 }
11395 
TransactionWriteValueConstraint(Thread * self,ObjPtr<mirror::Object> value)11396 bool ClassLinker::TransactionWriteValueConstraint(
11397     [[maybe_unused]] Thread* self, [[maybe_unused]] ObjPtr<mirror::Object> value) {
11398   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11399   LOG(FATAL) << "UNREACHABLE";
11400   UNREACHABLE();
11401 }
11402 
TransactionAllocationConstraint(Thread * self,ObjPtr<mirror::Class> klass)11403 bool ClassLinker::TransactionAllocationConstraint(
11404     [[maybe_unused]] Thread* self, [[maybe_unused]] ObjPtr<mirror::Class> klass) {
11405   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11406   LOG(FATAL) << "UNREACHABLE";
11407   UNREACHABLE();
11408 }
11409 
RecordWriteFieldBoolean(mirror::Object * obj,MemberOffset field_offset,uint8_t value,bool is_volatile)11410 void ClassLinker::RecordWriteFieldBoolean([[maybe_unused]] mirror::Object* obj,
11411                                           [[maybe_unused]] MemberOffset field_offset,
11412                                           [[maybe_unused]] uint8_t value,
11413                                           [[maybe_unused]] bool is_volatile) {
11414   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11415   LOG(FATAL) << "UNREACHABLE";
11416   UNREACHABLE();
11417 }
11418 
RecordWriteFieldByte(mirror::Object * obj,MemberOffset field_offset,int8_t value,bool is_volatile)11419 void ClassLinker::RecordWriteFieldByte([[maybe_unused]] mirror::Object* obj,
11420                                        [[maybe_unused]] MemberOffset field_offset,
11421                                        [[maybe_unused]] int8_t value,
11422                                        [[maybe_unused]] bool is_volatile) {
11423   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11424   LOG(FATAL) << "UNREACHABLE";
11425   UNREACHABLE();
11426 }
11427 
RecordWriteFieldChar(mirror::Object * obj,MemberOffset field_offset,uint16_t value,bool is_volatile)11428 void ClassLinker::RecordWriteFieldChar([[maybe_unused]] mirror::Object* obj,
11429                                        [[maybe_unused]] MemberOffset field_offset,
11430                                        [[maybe_unused]] uint16_t value,
11431                                        [[maybe_unused]] bool is_volatile) {
11432   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11433   LOG(FATAL) << "UNREACHABLE";
11434   UNREACHABLE();
11435 }
11436 
RecordWriteFieldShort(mirror::Object * obj,MemberOffset field_offset,int16_t value,bool is_volatile)11437 void ClassLinker::RecordWriteFieldShort([[maybe_unused]] mirror::Object* obj,
11438                                         [[maybe_unused]] MemberOffset field_offset,
11439                                         [[maybe_unused]] int16_t value,
11440                                         [[maybe_unused]] bool is_volatile) {
11441   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11442   LOG(FATAL) << "UNREACHABLE";
11443   UNREACHABLE();
11444 }
11445 
RecordWriteField32(mirror::Object * obj,MemberOffset field_offset,uint32_t value,bool is_volatile)11446 void ClassLinker::RecordWriteField32([[maybe_unused]] mirror::Object* obj,
11447                                      [[maybe_unused]] MemberOffset field_offset,
11448                                      [[maybe_unused]] uint32_t value,
11449                                      [[maybe_unused]] bool is_volatile) {
11450   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11451   LOG(FATAL) << "UNREACHABLE";
11452   UNREACHABLE();
11453 }
11454 
RecordWriteField64(mirror::Object * obj,MemberOffset field_offset,uint64_t value,bool is_volatile)11455 void ClassLinker::RecordWriteField64([[maybe_unused]] mirror::Object* obj,
11456                                      [[maybe_unused]] MemberOffset field_offset,
11457                                      [[maybe_unused]] uint64_t value,
11458                                      [[maybe_unused]] bool is_volatile) {
11459   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11460   LOG(FATAL) << "UNREACHABLE";
11461   UNREACHABLE();
11462 }
11463 
RecordWriteFieldReference(mirror::Object * obj,MemberOffset field_offset,ObjPtr<mirror::Object> value,bool is_volatile)11464 void ClassLinker::RecordWriteFieldReference([[maybe_unused]] mirror::Object* obj,
11465                                             [[maybe_unused]] MemberOffset field_offset,
11466                                             [[maybe_unused]] ObjPtr<mirror::Object> value,
11467                                             [[maybe_unused]] bool is_volatile) {
11468   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11469   LOG(FATAL) << "UNREACHABLE";
11470   UNREACHABLE();
11471 }
11472 
RecordWriteArray(mirror::Array * array,size_t index,uint64_t value)11473 void ClassLinker::RecordWriteArray([[maybe_unused]] mirror::Array* array,
11474                                    [[maybe_unused]] size_t index,
11475                                    [[maybe_unused]] uint64_t value) {
11476   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11477   LOG(FATAL) << "UNREACHABLE";
11478   UNREACHABLE();
11479 }
11480 
RecordStrongStringInsertion(ObjPtr<mirror::String> s)11481 void ClassLinker::RecordStrongStringInsertion([[maybe_unused]] ObjPtr<mirror::String> s) {
11482   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11483   LOG(FATAL) << "UNREACHABLE";
11484   UNREACHABLE();
11485 }
11486 
RecordWeakStringInsertion(ObjPtr<mirror::String> s)11487 void ClassLinker::RecordWeakStringInsertion([[maybe_unused]] ObjPtr<mirror::String> s) {
11488   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11489   LOG(FATAL) << "UNREACHABLE";
11490   UNREACHABLE();
11491 }
11492 
RecordStrongStringRemoval(ObjPtr<mirror::String> s)11493 void ClassLinker::RecordStrongStringRemoval([[maybe_unused]] ObjPtr<mirror::String> s) {
11494   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11495   LOG(FATAL) << "UNREACHABLE";
11496   UNREACHABLE();
11497 }
11498 
RecordWeakStringRemoval(ObjPtr<mirror::String> s)11499 void ClassLinker::RecordWeakStringRemoval([[maybe_unused]] ObjPtr<mirror::String> s) {
11500   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11501   LOG(FATAL) << "UNREACHABLE";
11502   UNREACHABLE();
11503 }
11504 
RecordResolveString(ObjPtr<mirror::DexCache> dex_cache,dex::StringIndex string_idx)11505 void ClassLinker::RecordResolveString([[maybe_unused]] ObjPtr<mirror::DexCache> dex_cache,
11506                                       [[maybe_unused]] dex::StringIndex string_idx) {
11507   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11508   LOG(FATAL) << "UNREACHABLE";
11509   UNREACHABLE();
11510 }
11511 
RecordResolveMethodType(ObjPtr<mirror::DexCache> dex_cache,dex::ProtoIndex proto_idx)11512 void ClassLinker::RecordResolveMethodType([[maybe_unused]] ObjPtr<mirror::DexCache> dex_cache,
11513                                           [[maybe_unused]] dex::ProtoIndex proto_idx) {
11514   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11515   LOG(FATAL) << "UNREACHABLE";
11516   UNREACHABLE();
11517 }
11518 
ThrowTransactionAbortError(Thread * self)11519 void ClassLinker::ThrowTransactionAbortError([[maybe_unused]] Thread* self) {
11520   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11521   LOG(FATAL) << "UNREACHABLE";
11522   UNREACHABLE();
11523 }
11524 
AbortTransactionF(Thread * self,const char * fmt,...)11525 void ClassLinker::AbortTransactionF(
11526     [[maybe_unused]] Thread* self, [[maybe_unused]] const char* fmt, ...) {
11527   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11528   LOG(FATAL) << "UNREACHABLE";
11529   UNREACHABLE();
11530 }
11531 
AbortTransactionV(Thread * self,const char * fmt,va_list args)11532 void ClassLinker::AbortTransactionV([[maybe_unused]] Thread* self,
11533                                     [[maybe_unused]] const char* fmt,
11534                                     [[maybe_unused]] va_list args) {
11535   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11536   LOG(FATAL) << "UNREACHABLE";
11537   UNREACHABLE();
11538 }
11539 
IsTransactionAborted() const11540 bool ClassLinker::IsTransactionAborted() const {
11541   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11542   LOG(FATAL) << "UNREACHABLE";
11543   UNREACHABLE();
11544 }
11545 
VisitTransactionRoots(RootVisitor * visitor)11546 void ClassLinker::VisitTransactionRoots([[maybe_unused]] RootVisitor* visitor) {
11547   // Nothing to do for normal `ClassLinker`, only `AotClassLinker` handles transactions.
11548 }
11549 
GetTransactionalInterpreter()11550 const void* ClassLinker::GetTransactionalInterpreter() {
11551   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11552   LOG(FATAL) << "UNREACHABLE";
11553   UNREACHABLE();
11554 }
11555 
RemoveDexFromCaches(const DexFile & dex_file)11556 void ClassLinker::RemoveDexFromCaches(const DexFile& dex_file) {
11557   ReaderMutexLock mu(Thread::Current(), *Locks::dex_lock_);
11558 
11559   auto it = dex_caches_.find(&dex_file);
11560   if (it != dex_caches_.end()) {
11561       dex_caches_.erase(it);
11562   }
11563 }
11564 
11565 // GetClassLoadersVisitor collects visited class loaders.
11566 class GetClassLoadersVisitor : public ClassLoaderVisitor {
11567  public:
GetClassLoadersVisitor(VariableSizedHandleScope * class_loaders)11568   explicit GetClassLoadersVisitor(VariableSizedHandleScope* class_loaders)
11569       : class_loaders_(class_loaders) {}
11570 
Visit(ObjPtr<mirror::ClassLoader> class_loader)11571   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
11572       REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
11573     DCHECK(class_loader != nullptr);
11574     class_loaders_->NewHandle(class_loader);
11575   }
11576 
11577  private:
11578   VariableSizedHandleScope* const class_loaders_;
11579 };
11580 
GetClassLoaders(Thread * self,VariableSizedHandleScope * handles)11581 void ClassLinker::GetClassLoaders(Thread* self, VariableSizedHandleScope* handles) {
11582   GetClassLoadersVisitor class_loader_visitor(handles);
11583   ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
11584   VisitClassLoaders(&class_loader_visitor);
11585 }
11586 
11587 // Instantiate ClassLinker::AllocClass.
11588 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ true>(
11589     Thread* self,
11590     ObjPtr<mirror::Class> java_lang_Class,
11591     uint32_t class_size);
11592 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ false>(
11593     Thread* self,
11594     ObjPtr<mirror::Class> java_lang_Class,
11595     uint32_t class_size);
11596 
11597 }  // namespace art
11598