• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "class_linker.h"
18 
19 #include <unistd.h>
20 
21 #include <algorithm>
22 #include <deque>
23 #include <forward_list>
24 #include <iostream>
25 #include <map>
26 #include <memory>
27 #include <queue>
28 #include <string>
29 #include <string_view>
30 #include <tuple>
31 #include <utility>
32 #include <vector>
33 
34 #include "android-base/stringprintf.h"
35 
36 #include "art_field-inl.h"
37 #include "art_method-inl.h"
38 #include "barrier.h"
39 #include "base/arena_allocator.h"
40 #include "base/casts.h"
41 #include "base/file_utils.h"
42 #include "base/hash_map.h"
43 #include "base/hash_set.h"
44 #include "base/leb128.h"
45 #include "base/logging.h"
46 #include "base/metrics/metrics.h"
47 #include "base/mutex-inl.h"
48 #include "base/os.h"
49 #include "base/quasi_atomic.h"
50 #include "base/scoped_arena_containers.h"
51 #include "base/scoped_flock.h"
52 #include "base/stl_util.h"
53 #include "base/string_view_cpp20.h"
54 #include "base/systrace.h"
55 #include "base/time_utils.h"
56 #include "base/unix_file/fd_file.h"
57 #include "base/utils.h"
58 #include "base/value_object.h"
59 #include "cha.h"
60 #include "class_linker-inl.h"
61 #include "class_loader_utils.h"
62 #include "class_root-inl.h"
63 #include "class_table-inl.h"
64 #include "compiler_callbacks.h"
65 #include "debug_print.h"
66 #include "debugger.h"
67 #include "dex/class_accessor-inl.h"
68 #include "dex/descriptors_names.h"
69 #include "dex/dex_file-inl.h"
70 #include "dex/dex_file_exception_helpers.h"
71 #include "dex/dex_file_loader.h"
72 #include "dex/signature-inl.h"
73 #include "dex/utf.h"
74 #include "entrypoints/entrypoint_utils-inl.h"
75 #include "entrypoints/runtime_asm_entrypoints.h"
76 #include "experimental_flags.h"
77 #include "gc/accounting/card_table-inl.h"
78 #include "gc/accounting/heap_bitmap-inl.h"
79 #include "gc/accounting/space_bitmap-inl.h"
80 #include "gc/heap-visit-objects-inl.h"
81 #include "gc/heap.h"
82 #include "gc/scoped_gc_critical_section.h"
83 #include "gc/space/image_space.h"
84 #include "gc/space/space-inl.h"
85 #include "gc_root-inl.h"
86 #include "handle_scope-inl.h"
87 #include "hidden_api.h"
88 #include "image-inl.h"
89 #include "imt_conflict_table.h"
90 #include "imtable-inl.h"
91 #include "intern_table-inl.h"
92 #include "interpreter/interpreter.h"
93 #include "interpreter/mterp/nterp.h"
94 #include "jit/debugger_interface.h"
95 #include "jit/jit.h"
96 #include "jit/jit_code_cache.h"
97 #include "jni/java_vm_ext.h"
98 #include "jni/jni_internal.h"
99 #include "linear_alloc.h"
100 #include "mirror/array-alloc-inl.h"
101 #include "mirror/array-inl.h"
102 #include "mirror/call_site.h"
103 #include "mirror/class-alloc-inl.h"
104 #include "mirror/class-inl.h"
105 #include "mirror/class.h"
106 #include "mirror/class_ext.h"
107 #include "mirror/class_loader.h"
108 #include "mirror/dex_cache-inl.h"
109 #include "mirror/dex_cache.h"
110 #include "mirror/emulated_stack_frame.h"
111 #include "mirror/field.h"
112 #include "mirror/iftable-inl.h"
113 #include "mirror/method.h"
114 #include "mirror/method_handle_impl.h"
115 #include "mirror/method_handles_lookup.h"
116 #include "mirror/method_type.h"
117 #include "mirror/object-inl.h"
118 #include "mirror/object-refvisitor-inl.h"
119 #include "mirror/object.h"
120 #include "mirror/object_array-alloc-inl.h"
121 #include "mirror/object_array-inl.h"
122 #include "mirror/object_array.h"
123 #include "mirror/object_reference.h"
124 #include "mirror/object_reference-inl.h"
125 #include "mirror/proxy.h"
126 #include "mirror/reference-inl.h"
127 #include "mirror/stack_trace_element.h"
128 #include "mirror/string-inl.h"
129 #include "mirror/throwable.h"
130 #include "mirror/var_handle.h"
131 #include "native/dalvik_system_DexFile.h"
132 #include "nativehelper/scoped_local_ref.h"
133 #include "nterp_helpers.h"
134 #include "oat.h"
135 #include "oat_file-inl.h"
136 #include "oat_file.h"
137 #include "oat_file_assistant.h"
138 #include "oat_file_manager.h"
139 #include "object_lock.h"
140 #include "profile/profile_compilation_info.h"
141 #include "runtime.h"
142 #include "runtime_callbacks.h"
143 #include "scoped_thread_state_change-inl.h"
144 #include "thread-inl.h"
145 #include "thread.h"
146 #include "thread_list.h"
147 #include "trace.h"
148 #include "transaction.h"
149 #include "vdex_file.h"
150 #include "verifier/class_verifier.h"
151 #include "verifier/verifier_deps.h"
152 #include "well_known_classes.h"
153 
154 namespace art {
155 
156 using android::base::StringPrintf;
157 
158 static constexpr bool kCheckImageObjects = kIsDebugBuild;
159 static constexpr bool kVerifyArtMethodDeclaringClasses = kIsDebugBuild;
160 
161 static void ThrowNoClassDefFoundError(const char* fmt, ...)
162     __attribute__((__format__(__printf__, 1, 2)))
163     REQUIRES_SHARED(Locks::mutator_lock_);
ThrowNoClassDefFoundError(const char * fmt,...)164 static void ThrowNoClassDefFoundError(const char* fmt, ...) {
165   va_list args;
166   va_start(args, fmt);
167   Thread* self = Thread::Current();
168   self->ThrowNewExceptionV("Ljava/lang/NoClassDefFoundError;", fmt, args);
169   va_end(args);
170 }
171 
GetErroneousStateError(ObjPtr<mirror::Class> c)172 static ObjPtr<mirror::Object> GetErroneousStateError(ObjPtr<mirror::Class> c)
173     REQUIRES_SHARED(Locks::mutator_lock_) {
174   ObjPtr<mirror::ClassExt> ext(c->GetExtData());
175   if (ext == nullptr) {
176     return nullptr;
177   } else {
178     return ext->GetErroneousStateError();
179   }
180 }
181 
IsVerifyError(ObjPtr<mirror::Object> obj)182 static bool IsVerifyError(ObjPtr<mirror::Object> obj)
183     REQUIRES_SHARED(Locks::mutator_lock_) {
184   // This is slow, but we only use it for rethrowing an error, and for DCHECK.
185   return obj->GetClass()->DescriptorEquals("Ljava/lang/VerifyError;");
186 }
187 
188 // Helper for ThrowEarlierClassFailure. Throws the stored error.
HandleEarlierErroneousStateError(Thread * self,ClassLinker * class_linker,ObjPtr<mirror::Class> c)189 static void HandleEarlierErroneousStateError(Thread* self,
190                                              ClassLinker* class_linker,
191                                              ObjPtr<mirror::Class> c)
192     REQUIRES_SHARED(Locks::mutator_lock_) {
193   ObjPtr<mirror::Object> obj = GetErroneousStateError(c);
194   DCHECK(obj != nullptr);
195   self->AssertNoPendingException();
196   DCHECK(!obj->IsClass());
197   ObjPtr<mirror::Class> throwable_class = GetClassRoot<mirror::Throwable>(class_linker);
198   ObjPtr<mirror::Class> error_class = obj->GetClass();
199   CHECK(throwable_class->IsAssignableFrom(error_class));
200   self->SetException(obj->AsThrowable());
201   self->AssertPendingException();
202 }
203 
UpdateClassAfterVerification(Handle<mirror::Class> klass,PointerSize pointer_size,verifier::FailureKind failure_kind)204 static void UpdateClassAfterVerification(Handle<mirror::Class> klass,
205                                          PointerSize pointer_size,
206                                          verifier::FailureKind failure_kind)
207     REQUIRES_SHARED(Locks::mutator_lock_) {
208   Runtime* runtime = Runtime::Current();
209   ClassLinker* class_linker = runtime->GetClassLinker();
210   if (klass->IsVerified() && (failure_kind == verifier::FailureKind::kNoFailure)) {
211     klass->SetSkipAccessChecksFlagOnAllMethods(pointer_size);
212   }
213 
214   // Now that the class has passed verification, try to set nterp entrypoints
215   // to methods that currently use the switch interpreter.
216   if (interpreter::CanRuntimeUseNterp()) {
217     for (ArtMethod& m : klass->GetMethods(pointer_size)) {
218       if (class_linker->IsQuickToInterpreterBridge(m.GetEntryPointFromQuickCompiledCode())) {
219         runtime->GetInstrumentation()->InitializeMethodsCode(&m, /*aot_code=*/nullptr);
220       }
221     }
222   }
223 }
224 
225 // Callback responsible for making a batch of classes visibly initialized
226 // after all threads have called it from a checkpoint, ensuring visibility.
227 class ClassLinker::VisiblyInitializedCallback final
228     : public Closure, public IntrusiveForwardListNode<VisiblyInitializedCallback> {
229  public:
VisiblyInitializedCallback(ClassLinker * class_linker)230   explicit VisiblyInitializedCallback(ClassLinker* class_linker)
231       : class_linker_(class_linker),
232         num_classes_(0u),
233         thread_visibility_counter_(0),
234         barriers_() {
235     std::fill_n(classes_, kMaxClasses, nullptr);
236   }
237 
IsEmpty() const238   bool IsEmpty() const {
239     DCHECK_LE(num_classes_, kMaxClasses);
240     return num_classes_ == 0u;
241   }
242 
IsFull() const243   bool IsFull() const {
244     DCHECK_LE(num_classes_, kMaxClasses);
245     return num_classes_ == kMaxClasses;
246   }
247 
AddClass(Thread * self,ObjPtr<mirror::Class> klass)248   void AddClass(Thread* self, ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
249     DCHECK_EQ(klass->GetStatus(), ClassStatus::kInitialized);
250     DCHECK(!IsFull());
251     classes_[num_classes_] = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, klass);
252     ++num_classes_;
253   }
254 
AddBarrier(Barrier * barrier)255   void AddBarrier(Barrier* barrier) {
256     barriers_.push_front(barrier);
257   }
258 
GetAndClearBarriers()259   std::forward_list<Barrier*> GetAndClearBarriers() {
260     std::forward_list<Barrier*> result;
261     result.swap(barriers_);
262     result.reverse();  // Return barriers in insertion order.
263     return result;
264   }
265 
MakeVisible(Thread * self)266   void MakeVisible(Thread* self) {
267     DCHECK_EQ(thread_visibility_counter_.load(std::memory_order_relaxed), 0);
268     size_t count = Runtime::Current()->GetThreadList()->RunCheckpoint(this);
269     AdjustThreadVisibilityCounter(self, count);
270   }
271 
Run(Thread * self)272   void Run(Thread* self) override {
273     self->ClearMakeVisiblyInitializedCounter();
274     AdjustThreadVisibilityCounter(self, -1);
275   }
276 
277  private:
AdjustThreadVisibilityCounter(Thread * self,ssize_t adjustment)278   void AdjustThreadVisibilityCounter(Thread* self, ssize_t adjustment) {
279     ssize_t old = thread_visibility_counter_.fetch_add(adjustment, std::memory_order_relaxed);
280     if (old + adjustment == 0) {
281       // All threads passed the checkpoint. Mark classes as visibly initialized.
282       {
283         ScopedObjectAccess soa(self);
284         StackHandleScope<1u> hs(self);
285         MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
286         JavaVMExt* vm = self->GetJniEnv()->GetVm();
287         for (size_t i = 0, num = num_classes_; i != num; ++i) {
288           klass.Assign(ObjPtr<mirror::Class>::DownCast(self->DecodeJObject(classes_[i])));
289           vm->DeleteWeakGlobalRef(self, classes_[i]);
290           if (klass != nullptr) {
291             mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
292             class_linker_->FixupStaticTrampolines(self, klass.Get());
293           }
294         }
295         num_classes_ = 0u;
296       }
297       class_linker_->VisiblyInitializedCallbackDone(self, this);
298     }
299   }
300 
301   static constexpr size_t kMaxClasses = 16;
302 
303   ClassLinker* const class_linker_;
304   size_t num_classes_;
305   jweak classes_[kMaxClasses];
306 
307   // The thread visibility counter starts at 0 and it is incremented by the number of
308   // threads that need to run this callback (by the thread that request the callback
309   // to be run) and decremented once for each `Run()` execution. When it reaches 0,
310   // whether after the increment or after a decrement, we know that `Run()` was executed
311   // for all threads and therefore we can mark the classes as visibly initialized.
312   std::atomic<ssize_t> thread_visibility_counter_;
313 
314   // List of barries to `Pass()` for threads that wait for the callback to complete.
315   std::forward_list<Barrier*> barriers_;
316 };
317 
MakeInitializedClassesVisiblyInitialized(Thread * self,bool wait)318 void ClassLinker::MakeInitializedClassesVisiblyInitialized(Thread* self, bool wait) {
319   if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
320     return;  // Nothing to do. Thanks to the x86 memory model classes skip the initialized status.
321   }
322   std::optional<Barrier> maybe_barrier;  // Avoid constructing the Barrier for `wait == false`.
323   if (wait) {
324     maybe_barrier.emplace(0);
325   }
326   int wait_count = 0;
327   VisiblyInitializedCallback* callback = nullptr;
328   {
329     MutexLock lock(self, visibly_initialized_callback_lock_);
330     if (visibly_initialized_callback_ != nullptr && !visibly_initialized_callback_->IsEmpty()) {
331       callback = visibly_initialized_callback_.release();
332       running_visibly_initialized_callbacks_.push_front(*callback);
333     }
334     if (wait) {
335       DCHECK(maybe_barrier.has_value());
336       Barrier* barrier = std::addressof(*maybe_barrier);
337       for (VisiblyInitializedCallback& cb : running_visibly_initialized_callbacks_) {
338         cb.AddBarrier(barrier);
339         ++wait_count;
340       }
341     }
342   }
343   if (callback != nullptr) {
344     callback->MakeVisible(self);
345   }
346   if (wait_count != 0) {
347     DCHECK(maybe_barrier.has_value());
348     maybe_barrier->Increment(self, wait_count);
349   }
350 }
351 
VisiblyInitializedCallbackDone(Thread * self,VisiblyInitializedCallback * callback)352 void ClassLinker::VisiblyInitializedCallbackDone(Thread* self,
353                                                  VisiblyInitializedCallback* callback) {
354   MutexLock lock(self, visibly_initialized_callback_lock_);
355   // Pass the barriers if requested.
356   for (Barrier* barrier : callback->GetAndClearBarriers()) {
357     barrier->Pass(self);
358   }
359   // Remove the callback from the list of running callbacks.
360   auto before = running_visibly_initialized_callbacks_.before_begin();
361   auto it = running_visibly_initialized_callbacks_.begin();
362   DCHECK(it != running_visibly_initialized_callbacks_.end());
363   while (std::addressof(*it) != callback) {
364     before = it;
365     ++it;
366     DCHECK(it != running_visibly_initialized_callbacks_.end());
367   }
368   running_visibly_initialized_callbacks_.erase_after(before);
369   // Reuse or destroy the callback object.
370   if (visibly_initialized_callback_ == nullptr) {
371     visibly_initialized_callback_.reset(callback);
372   } else {
373     delete callback;
374   }
375 }
376 
ForceClassInitialized(Thread * self,Handle<mirror::Class> klass)377 void ClassLinker::ForceClassInitialized(Thread* self, Handle<mirror::Class> klass) {
378   ClassLinker::VisiblyInitializedCallback* cb = MarkClassInitialized(self, klass);
379   if (cb != nullptr) {
380     cb->MakeVisible(self);
381   }
382   ScopedThreadSuspension sts(self, ThreadState::kSuspended);
383   MakeInitializedClassesVisiblyInitialized(self, /*wait=*/true);
384 }
385 
MarkClassInitialized(Thread * self,Handle<mirror::Class> klass)386 ClassLinker::VisiblyInitializedCallback* ClassLinker::MarkClassInitialized(
387     Thread* self, Handle<mirror::Class> klass) {
388   if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
389     // Thanks to the x86 memory model, we do not need any memory fences and
390     // we can immediately mark the class as visibly initialized.
391     mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
392     FixupStaticTrampolines(self, klass.Get());
393     return nullptr;
394   }
395   if (Runtime::Current()->IsActiveTransaction()) {
396     // Transactions are single-threaded, so we can mark the class as visibly intialized.
397     // (Otherwise we'd need to track the callback's entry in the transaction for rollback.)
398     mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
399     FixupStaticTrampolines(self, klass.Get());
400     return nullptr;
401   }
402   mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
403   MutexLock lock(self, visibly_initialized_callback_lock_);
404   if (visibly_initialized_callback_ == nullptr) {
405     visibly_initialized_callback_.reset(new VisiblyInitializedCallback(this));
406   }
407   DCHECK(!visibly_initialized_callback_->IsFull());
408   visibly_initialized_callback_->AddClass(self, klass.Get());
409 
410   if (visibly_initialized_callback_->IsFull()) {
411     VisiblyInitializedCallback* callback = visibly_initialized_callback_.release();
412     running_visibly_initialized_callbacks_.push_front(*callback);
413     return callback;
414   } else {
415     return nullptr;
416   }
417 }
418 
RegisterNative(Thread * self,ArtMethod * method,const void * native_method)419 const void* ClassLinker::RegisterNative(
420     Thread* self, ArtMethod* method, const void* native_method) {
421   CHECK(method->IsNative()) << method->PrettyMethod();
422   CHECK(native_method != nullptr) << method->PrettyMethod();
423   void* new_native_method = nullptr;
424   Runtime* runtime = Runtime::Current();
425   runtime->GetRuntimeCallbacks()->RegisterNativeMethod(method,
426                                                        native_method,
427                                                        /*out*/&new_native_method);
428   if (method->IsCriticalNative()) {
429     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
430     // Remove old registered method if any.
431     auto it = critical_native_code_with_clinit_check_.find(method);
432     if (it != critical_native_code_with_clinit_check_.end()) {
433       critical_native_code_with_clinit_check_.erase(it);
434     }
435     // To ensure correct memory visibility, we need the class to be visibly
436     // initialized before we can set the JNI entrypoint.
437     if (method->GetDeclaringClass()->IsVisiblyInitialized()) {
438       method->SetEntryPointFromJni(new_native_method);
439     } else {
440       critical_native_code_with_clinit_check_.emplace(method, new_native_method);
441     }
442   } else {
443     method->SetEntryPointFromJni(new_native_method);
444   }
445   return new_native_method;
446 }
447 
UnregisterNative(Thread * self,ArtMethod * method)448 void ClassLinker::UnregisterNative(Thread* self, ArtMethod* method) {
449   CHECK(method->IsNative()) << method->PrettyMethod();
450   // Restore stub to lookup native pointer via dlsym.
451   if (method->IsCriticalNative()) {
452     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
453     auto it = critical_native_code_with_clinit_check_.find(method);
454     if (it != critical_native_code_with_clinit_check_.end()) {
455       critical_native_code_with_clinit_check_.erase(it);
456     }
457     method->SetEntryPointFromJni(GetJniDlsymLookupCriticalStub());
458   } else {
459     method->SetEntryPointFromJni(GetJniDlsymLookupStub());
460   }
461 }
462 
GetRegisteredNative(Thread * self,ArtMethod * method)463 const void* ClassLinker::GetRegisteredNative(Thread* self, ArtMethod* method) {
464   if (method->IsCriticalNative()) {
465     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
466     auto it = critical_native_code_with_clinit_check_.find(method);
467     if (it != critical_native_code_with_clinit_check_.end()) {
468       return it->second;
469     }
470     const void* native_code = method->GetEntryPointFromJni();
471     return IsJniDlsymLookupCriticalStub(native_code) ? nullptr : native_code;
472   } else {
473     const void* native_code = method->GetEntryPointFromJni();
474     return IsJniDlsymLookupStub(native_code) ? nullptr : native_code;
475   }
476 }
477 
ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,bool wrap_in_no_class_def,bool log)478 void ClassLinker::ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,
479                                            bool wrap_in_no_class_def,
480                                            bool log) {
481   // The class failed to initialize on a previous attempt, so we want to throw
482   // a NoClassDefFoundError (v2 2.17.5).  The exception to this rule is if we
483   // failed in verification, in which case v2 5.4.1 says we need to re-throw
484   // the previous error.
485   Runtime* const runtime = Runtime::Current();
486   if (!runtime->IsAotCompiler()) {  // Give info if this occurs at runtime.
487     std::string extra;
488     ObjPtr<mirror::Object> verify_error = GetErroneousStateError(c);
489     if (verify_error != nullptr) {
490       DCHECK(!verify_error->IsClass());
491       extra = verify_error->AsThrowable()->Dump();
492     }
493     if (log) {
494       LOG(INFO) << "Rejecting re-init on previously-failed class " << c->PrettyClass()
495                 << ": " << extra;
496     }
497   }
498 
499   CHECK(c->IsErroneous()) << c->PrettyClass() << " " << c->GetStatus();
500   Thread* self = Thread::Current();
501   if (runtime->IsAotCompiler()) {
502     // At compile time, accurate errors and NCDFE are disabled to speed compilation.
503     ObjPtr<mirror::Throwable> pre_allocated = runtime->GetPreAllocatedNoClassDefFoundError();
504     self->SetException(pre_allocated);
505   } else {
506     ObjPtr<mirror::Object> erroneous_state_error = GetErroneousStateError(c);
507     if (erroneous_state_error != nullptr) {
508       // Rethrow stored error.
509       HandleEarlierErroneousStateError(self, this, c);
510     }
511     // TODO This might be wrong if we hit an OOME while allocating the ClassExt. In that case we
512     // might have meant to go down the earlier if statement with the original error but it got
513     // swallowed by the OOM so we end up here.
514     if (erroneous_state_error == nullptr ||
515         (wrap_in_no_class_def && !IsVerifyError(erroneous_state_error))) {
516       // If there isn't a recorded earlier error, or this is a repeat throw from initialization,
517       // the top-level exception must be a NoClassDefFoundError. The potentially already pending
518       // exception will be a cause.
519       self->ThrowNewWrappedException("Ljava/lang/NoClassDefFoundError;",
520                                      c->PrettyDescriptor().c_str());
521     }
522   }
523 }
524 
VlogClassInitializationFailure(Handle<mirror::Class> klass)525 static void VlogClassInitializationFailure(Handle<mirror::Class> klass)
526     REQUIRES_SHARED(Locks::mutator_lock_) {
527   if (VLOG_IS_ON(class_linker)) {
528     std::string temp;
529     LOG(INFO) << "Failed to initialize class " << klass->GetDescriptor(&temp) << " from "
530               << klass->GetLocation() << "\n" << Thread::Current()->GetException()->Dump();
531   }
532 }
533 
WrapExceptionInInitializer(Handle<mirror::Class> klass)534 static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
535     REQUIRES_SHARED(Locks::mutator_lock_) {
536   Thread* self = Thread::Current();
537   JNIEnv* env = self->GetJniEnv();
538 
539   ScopedLocalRef<jthrowable> cause(env, env->ExceptionOccurred());
540   CHECK(cause.get() != nullptr);
541 
542   // Boot classpath classes should not fail initialization. This is a consistency debug check.
543   // This cannot in general be guaranteed, but in all likelihood leads to breakage down the line.
544   if (klass->GetClassLoader() == nullptr && !Runtime::Current()->IsAotCompiler()) {
545     std::string tmp;
546     // We want to LOG(FATAL) on debug builds since this really shouldn't be happening but we need to
547     // make sure to only do it if we don't have AsyncExceptions being thrown around since those
548     // could have caused the error.
549     bool known_impossible = kIsDebugBuild && !Runtime::Current()->AreAsyncExceptionsThrown();
550     LOG(known_impossible ? FATAL : WARNING) << klass->GetDescriptor(&tmp)
551                                             << " failed initialization: "
552                                             << self->GetException()->Dump();
553   }
554 
555   env->ExceptionClear();
556   bool is_error = env->IsInstanceOf(cause.get(), WellKnownClasses::java_lang_Error);
557   env->Throw(cause.get());
558 
559   // We only wrap non-Error exceptions; an Error can just be used as-is.
560   if (!is_error) {
561     self->ThrowNewWrappedException("Ljava/lang/ExceptionInInitializerError;", nullptr);
562   }
563   VlogClassInitializationFailure(klass);
564 }
565 
ClassLinker(InternTable * intern_table,bool fast_class_not_found_exceptions)566 ClassLinker::ClassLinker(InternTable* intern_table, bool fast_class_not_found_exceptions)
567     : boot_class_table_(new ClassTable()),
568       failed_dex_cache_class_lookups_(0),
569       class_roots_(nullptr),
570       find_array_class_cache_next_victim_(0),
571       init_done_(false),
572       log_new_roots_(false),
573       intern_table_(intern_table),
574       fast_class_not_found_exceptions_(fast_class_not_found_exceptions),
575       jni_dlsym_lookup_trampoline_(nullptr),
576       jni_dlsym_lookup_critical_trampoline_(nullptr),
577       quick_resolution_trampoline_(nullptr),
578       quick_imt_conflict_trampoline_(nullptr),
579       quick_generic_jni_trampoline_(nullptr),
580       quick_to_interpreter_bridge_trampoline_(nullptr),
581       nterp_trampoline_(nullptr),
582       image_pointer_size_(kRuntimePointerSize),
583       visibly_initialized_callback_lock_("visibly initialized callback lock"),
584       visibly_initialized_callback_(nullptr),
585       critical_native_code_with_clinit_check_lock_("critical native code with clinit check lock"),
586       critical_native_code_with_clinit_check_(),
587       cha_(Runtime::Current()->IsAotCompiler() ? nullptr : new ClassHierarchyAnalysis()) {
588   // For CHA disabled during Aot, see b/34193647.
589 
590   CHECK(intern_table_ != nullptr);
591   static_assert(kFindArrayCacheSize == arraysize(find_array_class_cache_),
592                 "Array cache size wrong.");
593   std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
594 }
595 
CheckSystemClass(Thread * self,Handle<mirror::Class> c1,const char * descriptor)596 void ClassLinker::CheckSystemClass(Thread* self, Handle<mirror::Class> c1, const char* descriptor) {
597   ObjPtr<mirror::Class> c2 = FindSystemClass(self, descriptor);
598   if (c2 == nullptr) {
599     LOG(FATAL) << "Could not find class " << descriptor;
600     UNREACHABLE();
601   }
602   if (c1.Get() != c2) {
603     std::ostringstream os1, os2;
604     c1->DumpClass(os1, mirror::Class::kDumpClassFullDetail);
605     c2->DumpClass(os2, mirror::Class::kDumpClassFullDetail);
606     LOG(FATAL) << "InitWithoutImage: Class mismatch for " << descriptor
607                << ". This is most likely the result of a broken build. Make sure that "
608                << "libcore and art projects match.\n\n"
609                << os1.str() << "\n\n" << os2.str();
610     UNREACHABLE();
611   }
612 }
613 
AllocIfTable(Thread * self,size_t ifcount,ObjPtr<mirror::Class> iftable_class)614 ObjPtr<mirror::IfTable> AllocIfTable(Thread* self,
615                                      size_t ifcount,
616                                      ObjPtr<mirror::Class> iftable_class)
617     REQUIRES_SHARED(Locks::mutator_lock_) {
618   DCHECK(iftable_class->IsArrayClass());
619   DCHECK(iftable_class->GetComponentType()->IsObjectClass());
620   return ObjPtr<mirror::IfTable>::DownCast(ObjPtr<mirror::ObjectArray<mirror::Object>>(
621       mirror::IfTable::Alloc(self, iftable_class, ifcount * mirror::IfTable::kMax)));
622 }
623 
InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,std::string * error_msg)624 bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,
625                                    std::string* error_msg) {
626   VLOG(startup) << "ClassLinker::Init";
627 
628   Thread* const self = Thread::Current();
629   Runtime* const runtime = Runtime::Current();
630   gc::Heap* const heap = runtime->GetHeap();
631 
632   CHECK(!heap->HasBootImageSpace()) << "Runtime has image. We should use it.";
633   CHECK(!init_done_);
634 
635   // Use the pointer size from the runtime since we are probably creating the image.
636   image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet());
637 
638   // java_lang_Class comes first, it's needed for AllocClass
639   // The GC can't handle an object with a null class since we can't get the size of this object.
640   heap->IncrementDisableMovingGC(self);
641   StackHandleScope<64> hs(self);  // 64 is picked arbitrarily.
642   auto class_class_size = mirror::Class::ClassClassSize(image_pointer_size_);
643   // Allocate the object as non-movable so that there are no cases where Object::IsClass returns
644   // the incorrect result when comparing to-space vs from-space.
645   Handle<mirror::Class> java_lang_Class(hs.NewHandle(ObjPtr<mirror::Class>::DownCast(
646       heap->AllocNonMovableObject(self, nullptr, class_class_size, VoidFunctor()))));
647   CHECK(java_lang_Class != nullptr);
648   java_lang_Class->SetClassFlags(mirror::kClassFlagClass);
649   java_lang_Class->SetClass(java_lang_Class.Get());
650   if (kUseBakerReadBarrier) {
651     java_lang_Class->AssertReadBarrierState();
652   }
653   java_lang_Class->SetClassSize(class_class_size);
654   java_lang_Class->SetPrimitiveType(Primitive::kPrimNot);
655   heap->DecrementDisableMovingGC(self);
656   // AllocClass(ObjPtr<mirror::Class>) can now be used
657 
658   // Class[] is used for reflection support.
659   auto class_array_class_size = mirror::ObjectArray<mirror::Class>::ClassSize(image_pointer_size_);
660   Handle<mirror::Class> class_array_class(hs.NewHandle(
661       AllocClass(self, java_lang_Class.Get(), class_array_class_size)));
662   class_array_class->SetComponentType(java_lang_Class.Get());
663 
664   // java_lang_Object comes next so that object_array_class can be created.
665   Handle<mirror::Class> java_lang_Object(hs.NewHandle(
666       AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize(image_pointer_size_))));
667   CHECK(java_lang_Object != nullptr);
668   // backfill Object as the super class of Class.
669   java_lang_Class->SetSuperClass(java_lang_Object.Get());
670   mirror::Class::SetStatus(java_lang_Object, ClassStatus::kLoaded, self);
671 
672   java_lang_Object->SetObjectSize(sizeof(mirror::Object));
673   // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
674   // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
675   runtime->SetSentinel(heap->AllocNonMovableObject(self,
676                                                    java_lang_Object.Get(),
677                                                    java_lang_Object->GetObjectSize(),
678                                                    VoidFunctor()));
679 
680   // Initialize the SubtypeCheck bitstring for java.lang.Object and java.lang.Class.
681   if (kBitstringSubtypeCheckEnabled) {
682     // It might seem the lock here is unnecessary, however all the SubtypeCheck
683     // functions are annotated to require locks all the way down.
684     //
685     // We take the lock here to avoid using NO_THREAD_SAFETY_ANALYSIS.
686     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
687     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Object.Get());
688     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Class.Get());
689   }
690 
691   // Object[] next to hold class roots.
692   Handle<mirror::Class> object_array_class(hs.NewHandle(
693       AllocClass(self, java_lang_Class.Get(),
694                  mirror::ObjectArray<mirror::Object>::ClassSize(image_pointer_size_))));
695   object_array_class->SetComponentType(java_lang_Object.Get());
696 
697   // Setup java.lang.String.
698   //
699   // We make this class non-movable for the unlikely case where it were to be
700   // moved by a sticky-bit (minor) collection when using the Generational
701   // Concurrent Copying (CC) collector, potentially creating a stale reference
702   // in the `klass_` field of one of its instances allocated in the Large-Object
703   // Space (LOS) -- see the comment about the dirty card scanning logic in
704   // art::gc::collector::ConcurrentCopying::MarkingPhase.
705   Handle<mirror::Class> java_lang_String(hs.NewHandle(
706       AllocClass</* kMovable= */ false>(
707           self, java_lang_Class.Get(), mirror::String::ClassSize(image_pointer_size_))));
708   java_lang_String->SetStringClass();
709   mirror::Class::SetStatus(java_lang_String, ClassStatus::kResolved, self);
710 
711   // Setup java.lang.ref.Reference.
712   Handle<mirror::Class> java_lang_ref_Reference(hs.NewHandle(
713       AllocClass(self, java_lang_Class.Get(), mirror::Reference::ClassSize(image_pointer_size_))));
714   java_lang_ref_Reference->SetObjectSize(mirror::Reference::InstanceSize());
715   mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kResolved, self);
716 
717   // Create storage for root classes, save away our work so far (requires descriptors).
718   class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
719       mirror::ObjectArray<mirror::Class>::Alloc(self,
720                                                 object_array_class.Get(),
721                                                 static_cast<int32_t>(ClassRoot::kMax)));
722   CHECK(!class_roots_.IsNull());
723   SetClassRoot(ClassRoot::kJavaLangClass, java_lang_Class.Get());
724   SetClassRoot(ClassRoot::kJavaLangObject, java_lang_Object.Get());
725   SetClassRoot(ClassRoot::kClassArrayClass, class_array_class.Get());
726   SetClassRoot(ClassRoot::kObjectArrayClass, object_array_class.Get());
727   SetClassRoot(ClassRoot::kJavaLangString, java_lang_String.Get());
728   SetClassRoot(ClassRoot::kJavaLangRefReference, java_lang_ref_Reference.Get());
729 
730   // Fill in the empty iftable. Needs to be done after the kObjectArrayClass root is set.
731   java_lang_Object->SetIfTable(AllocIfTable(self, 0, object_array_class.Get()));
732 
733   // Create array interface entries to populate once we can load system classes.
734   object_array_class->SetIfTable(AllocIfTable(self, 2, object_array_class.Get()));
735   DCHECK_EQ(GetArrayIfTable(), object_array_class->GetIfTable());
736 
737   // Setup the primitive type classes.
738   CreatePrimitiveClass(self, Primitive::kPrimBoolean, ClassRoot::kPrimitiveBoolean);
739   CreatePrimitiveClass(self, Primitive::kPrimByte, ClassRoot::kPrimitiveByte);
740   CreatePrimitiveClass(self, Primitive::kPrimChar, ClassRoot::kPrimitiveChar);
741   CreatePrimitiveClass(self, Primitive::kPrimShort, ClassRoot::kPrimitiveShort);
742   CreatePrimitiveClass(self, Primitive::kPrimInt, ClassRoot::kPrimitiveInt);
743   CreatePrimitiveClass(self, Primitive::kPrimLong, ClassRoot::kPrimitiveLong);
744   CreatePrimitiveClass(self, Primitive::kPrimFloat, ClassRoot::kPrimitiveFloat);
745   CreatePrimitiveClass(self, Primitive::kPrimDouble, ClassRoot::kPrimitiveDouble);
746   CreatePrimitiveClass(self, Primitive::kPrimVoid, ClassRoot::kPrimitiveVoid);
747 
748   // Allocate the primitive array classes. We need only the native pointer
749   // array at this point (int[] or long[], depending on architecture) but
750   // we shall perform the same setup steps for all primitive array classes.
751   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveBoolean, ClassRoot::kBooleanArrayClass);
752   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveByte, ClassRoot::kByteArrayClass);
753   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveChar, ClassRoot::kCharArrayClass);
754   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveShort, ClassRoot::kShortArrayClass);
755   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveInt, ClassRoot::kIntArrayClass);
756   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveLong, ClassRoot::kLongArrayClass);
757   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveFloat, ClassRoot::kFloatArrayClass);
758   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveDouble, ClassRoot::kDoubleArrayClass);
759 
760   // now that these are registered, we can use AllocClass() and AllocObjectArray
761 
762   // Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache.
763   Handle<mirror::Class> java_lang_DexCache(hs.NewHandle(
764       AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize(image_pointer_size_))));
765   SetClassRoot(ClassRoot::kJavaLangDexCache, java_lang_DexCache.Get());
766   java_lang_DexCache->SetDexCacheClass();
767   java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize());
768   mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kResolved, self);
769 
770 
771   // Setup dalvik.system.ClassExt
772   Handle<mirror::Class> dalvik_system_ClassExt(hs.NewHandle(
773       AllocClass(self, java_lang_Class.Get(), mirror::ClassExt::ClassSize(image_pointer_size_))));
774   SetClassRoot(ClassRoot::kDalvikSystemClassExt, dalvik_system_ClassExt.Get());
775   mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kResolved, self);
776 
777   // Set up array classes for string, field, method
778   Handle<mirror::Class> object_array_string(hs.NewHandle(
779       AllocClass(self, java_lang_Class.Get(),
780                  mirror::ObjectArray<mirror::String>::ClassSize(image_pointer_size_))));
781   object_array_string->SetComponentType(java_lang_String.Get());
782   SetClassRoot(ClassRoot::kJavaLangStringArrayClass, object_array_string.Get());
783 
784   LinearAlloc* linear_alloc = runtime->GetLinearAlloc();
785   // Create runtime resolution and imt conflict methods.
786   runtime->SetResolutionMethod(runtime->CreateResolutionMethod());
787   runtime->SetImtConflictMethod(runtime->CreateImtConflictMethod(linear_alloc));
788   runtime->SetImtUnimplementedMethod(runtime->CreateImtConflictMethod(linear_alloc));
789 
790   // Setup boot_class_path_ and register class_path now that we can use AllocObjectArray to create
791   // DexCache instances. Needs to be after String, Field, Method arrays since AllocDexCache uses
792   // these roots.
793   if (boot_class_path.empty()) {
794     *error_msg = "Boot classpath is empty.";
795     return false;
796   }
797   for (auto& dex_file : boot_class_path) {
798     if (dex_file == nullptr) {
799       *error_msg = "Null dex file.";
800       return false;
801     }
802     AppendToBootClassPath(self, dex_file.get());
803     boot_dex_files_.push_back(std::move(dex_file));
804   }
805 
806   // now we can use FindSystemClass
807 
808   // Set up GenericJNI entrypoint. That is mainly a hack for common_compiler_test.h so that
809   // we do not need friend classes or a publicly exposed setter.
810   quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
811   if (!runtime->IsAotCompiler()) {
812     // We need to set up the generic trampolines since we don't have an image.
813     jni_dlsym_lookup_trampoline_ = GetJniDlsymLookupStub();
814     jni_dlsym_lookup_critical_trampoline_ = GetJniDlsymLookupCriticalStub();
815     quick_resolution_trampoline_ = GetQuickResolutionStub();
816     quick_imt_conflict_trampoline_ = GetQuickImtConflictStub();
817     quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
818     quick_to_interpreter_bridge_trampoline_ = GetQuickToInterpreterBridge();
819     nterp_trampoline_ = interpreter::GetNterpEntryPoint();
820   }
821 
822   // Object, String, ClassExt and DexCache need to be rerun through FindSystemClass to finish init
823   mirror::Class::SetStatus(java_lang_Object, ClassStatus::kNotReady, self);
824   CheckSystemClass(self, java_lang_Object, "Ljava/lang/Object;");
825   CHECK_EQ(java_lang_Object->GetObjectSize(), mirror::Object::InstanceSize());
826   mirror::Class::SetStatus(java_lang_String, ClassStatus::kNotReady, self);
827   CheckSystemClass(self, java_lang_String, "Ljava/lang/String;");
828   mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kNotReady, self);
829   CheckSystemClass(self, java_lang_DexCache, "Ljava/lang/DexCache;");
830   CHECK_EQ(java_lang_DexCache->GetObjectSize(), mirror::DexCache::InstanceSize());
831   mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kNotReady, self);
832   CheckSystemClass(self, dalvik_system_ClassExt, "Ldalvik/system/ClassExt;");
833   CHECK_EQ(dalvik_system_ClassExt->GetObjectSize(), mirror::ClassExt::InstanceSize());
834 
835   // Run Class through FindSystemClass. This initializes the dex_cache_ fields and register it
836   // in class_table_.
837   CheckSystemClass(self, java_lang_Class, "Ljava/lang/Class;");
838 
839   // Setup core array classes, i.e. Object[], String[] and Class[] and primitive
840   // arrays - can't be done until Object has a vtable and component classes are loaded.
841   FinishCoreArrayClassSetup(ClassRoot::kObjectArrayClass);
842   FinishCoreArrayClassSetup(ClassRoot::kClassArrayClass);
843   FinishCoreArrayClassSetup(ClassRoot::kJavaLangStringArrayClass);
844   FinishCoreArrayClassSetup(ClassRoot::kBooleanArrayClass);
845   FinishCoreArrayClassSetup(ClassRoot::kByteArrayClass);
846   FinishCoreArrayClassSetup(ClassRoot::kCharArrayClass);
847   FinishCoreArrayClassSetup(ClassRoot::kShortArrayClass);
848   FinishCoreArrayClassSetup(ClassRoot::kIntArrayClass);
849   FinishCoreArrayClassSetup(ClassRoot::kLongArrayClass);
850   FinishCoreArrayClassSetup(ClassRoot::kFloatArrayClass);
851   FinishCoreArrayClassSetup(ClassRoot::kDoubleArrayClass);
852 
853   // Setup the single, global copy of "iftable".
854   auto java_lang_Cloneable = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Cloneable;"));
855   CHECK(java_lang_Cloneable != nullptr);
856   auto java_io_Serializable = hs.NewHandle(FindSystemClass(self, "Ljava/io/Serializable;"));
857   CHECK(java_io_Serializable != nullptr);
858   // We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
859   // crawl up and explicitly list all of the supers as well.
860   object_array_class->GetIfTable()->SetInterface(0, java_lang_Cloneable.Get());
861   object_array_class->GetIfTable()->SetInterface(1, java_io_Serializable.Get());
862 
863   // Check Class[] and Object[]'s interfaces.
864   CHECK_EQ(java_lang_Cloneable.Get(), class_array_class->GetDirectInterface(0));
865   CHECK_EQ(java_io_Serializable.Get(), class_array_class->GetDirectInterface(1));
866   CHECK_EQ(java_lang_Cloneable.Get(), object_array_class->GetDirectInterface(0));
867   CHECK_EQ(java_io_Serializable.Get(), object_array_class->GetDirectInterface(1));
868 
869   CHECK_EQ(object_array_string.Get(),
870            FindSystemClass(self, GetClassRootDescriptor(ClassRoot::kJavaLangStringArrayClass)));
871 
872   // End of special init trickery, all subsequent classes may be loaded via FindSystemClass.
873 
874   // Create java.lang.reflect.Proxy root.
875   SetClassRoot(ClassRoot::kJavaLangReflectProxy,
876                FindSystemClass(self, "Ljava/lang/reflect/Proxy;"));
877 
878   // Create java.lang.reflect.Field.class root.
879   ObjPtr<mirror::Class> class_root = FindSystemClass(self, "Ljava/lang/reflect/Field;");
880   CHECK(class_root != nullptr);
881   SetClassRoot(ClassRoot::kJavaLangReflectField, class_root);
882 
883   // Create java.lang.reflect.Field array root.
884   class_root = FindSystemClass(self, "[Ljava/lang/reflect/Field;");
885   CHECK(class_root != nullptr);
886   SetClassRoot(ClassRoot::kJavaLangReflectFieldArrayClass, class_root);
887 
888   // Create java.lang.reflect.Constructor.class root and array root.
889   class_root = FindSystemClass(self, "Ljava/lang/reflect/Constructor;");
890   CHECK(class_root != nullptr);
891   SetClassRoot(ClassRoot::kJavaLangReflectConstructor, class_root);
892   class_root = FindSystemClass(self, "[Ljava/lang/reflect/Constructor;");
893   CHECK(class_root != nullptr);
894   SetClassRoot(ClassRoot::kJavaLangReflectConstructorArrayClass, class_root);
895 
896   // Create java.lang.reflect.Method.class root and array root.
897   class_root = FindSystemClass(self, "Ljava/lang/reflect/Method;");
898   CHECK(class_root != nullptr);
899   SetClassRoot(ClassRoot::kJavaLangReflectMethod, class_root);
900   class_root = FindSystemClass(self, "[Ljava/lang/reflect/Method;");
901   CHECK(class_root != nullptr);
902   SetClassRoot(ClassRoot::kJavaLangReflectMethodArrayClass, class_root);
903 
904   // Create java.lang.invoke.CallSite.class root
905   class_root = FindSystemClass(self, "Ljava/lang/invoke/CallSite;");
906   CHECK(class_root != nullptr);
907   SetClassRoot(ClassRoot::kJavaLangInvokeCallSite, class_root);
908 
909   // Create java.lang.invoke.MethodType.class root
910   class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodType;");
911   CHECK(class_root != nullptr);
912   SetClassRoot(ClassRoot::kJavaLangInvokeMethodType, class_root);
913 
914   // Create java.lang.invoke.MethodHandleImpl.class root
915   class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandleImpl;");
916   CHECK(class_root != nullptr);
917   SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandleImpl, class_root);
918   SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandle, class_root->GetSuperClass());
919 
920   // Create java.lang.invoke.MethodHandles.Lookup.class root
921   class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandles$Lookup;");
922   CHECK(class_root != nullptr);
923   SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandlesLookup, class_root);
924 
925   // Create java.lang.invoke.VarHandle.class root
926   class_root = FindSystemClass(self, "Ljava/lang/invoke/VarHandle;");
927   CHECK(class_root != nullptr);
928   SetClassRoot(ClassRoot::kJavaLangInvokeVarHandle, class_root);
929 
930   // Create java.lang.invoke.FieldVarHandle.class root
931   class_root = FindSystemClass(self, "Ljava/lang/invoke/FieldVarHandle;");
932   CHECK(class_root != nullptr);
933   SetClassRoot(ClassRoot::kJavaLangInvokeFieldVarHandle, class_root);
934 
935   // Create java.lang.invoke.StaticFieldVarHandle.class root
936   class_root = FindSystemClass(self, "Ljava/lang/invoke/StaticFieldVarHandle;");
937   CHECK(class_root != nullptr);
938   SetClassRoot(ClassRoot::kJavaLangInvokeStaticFieldVarHandle, class_root);
939 
940   // Create java.lang.invoke.ArrayElementVarHandle.class root
941   class_root = FindSystemClass(self, "Ljava/lang/invoke/ArrayElementVarHandle;");
942   CHECK(class_root != nullptr);
943   SetClassRoot(ClassRoot::kJavaLangInvokeArrayElementVarHandle, class_root);
944 
945   // Create java.lang.invoke.ByteArrayViewVarHandle.class root
946   class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteArrayViewVarHandle;");
947   CHECK(class_root != nullptr);
948   SetClassRoot(ClassRoot::kJavaLangInvokeByteArrayViewVarHandle, class_root);
949 
950   // Create java.lang.invoke.ByteBufferViewVarHandle.class root
951   class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteBufferViewVarHandle;");
952   CHECK(class_root != nullptr);
953   SetClassRoot(ClassRoot::kJavaLangInvokeByteBufferViewVarHandle, class_root);
954 
955   class_root = FindSystemClass(self, "Ldalvik/system/EmulatedStackFrame;");
956   CHECK(class_root != nullptr);
957   SetClassRoot(ClassRoot::kDalvikSystemEmulatedStackFrame, class_root);
958 
959   // java.lang.ref classes need to be specially flagged, but otherwise are normal classes
960   // finish initializing Reference class
961   mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kNotReady, self);
962   CheckSystemClass(self, java_lang_ref_Reference, "Ljava/lang/ref/Reference;");
963   CHECK_EQ(java_lang_ref_Reference->GetObjectSize(), mirror::Reference::InstanceSize());
964   CHECK_EQ(java_lang_ref_Reference->GetClassSize(),
965            mirror::Reference::ClassSize(image_pointer_size_));
966   class_root = FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;");
967   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
968   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagFinalizerReference);
969   class_root = FindSystemClass(self, "Ljava/lang/ref/PhantomReference;");
970   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
971   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagPhantomReference);
972   class_root = FindSystemClass(self, "Ljava/lang/ref/SoftReference;");
973   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
974   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagSoftReference);
975   class_root = FindSystemClass(self, "Ljava/lang/ref/WeakReference;");
976   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
977   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagWeakReference);
978 
979   // Setup the ClassLoader, verifying the object_size_.
980   class_root = FindSystemClass(self, "Ljava/lang/ClassLoader;");
981   class_root->SetClassLoaderClass();
982   CHECK_EQ(class_root->GetObjectSize(), mirror::ClassLoader::InstanceSize());
983   SetClassRoot(ClassRoot::kJavaLangClassLoader, class_root);
984 
985   // Set up java.lang.Throwable, java.lang.ClassNotFoundException, and
986   // java.lang.StackTraceElement as a convenience.
987   SetClassRoot(ClassRoot::kJavaLangThrowable, FindSystemClass(self, "Ljava/lang/Throwable;"));
988   SetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
989                FindSystemClass(self, "Ljava/lang/ClassNotFoundException;"));
990   SetClassRoot(ClassRoot::kJavaLangStackTraceElement,
991                FindSystemClass(self, "Ljava/lang/StackTraceElement;"));
992   SetClassRoot(ClassRoot::kJavaLangStackTraceElementArrayClass,
993                FindSystemClass(self, "[Ljava/lang/StackTraceElement;"));
994   SetClassRoot(ClassRoot::kJavaLangClassLoaderArrayClass,
995                FindSystemClass(self, "[Ljava/lang/ClassLoader;"));
996 
997   // Create conflict tables that depend on the class linker.
998   runtime->FixupConflictTables();
999 
1000   FinishInit(self);
1001 
1002   VLOG(startup) << "ClassLinker::InitFromCompiler exiting";
1003 
1004   return true;
1005 }
1006 
CreateStringInitBindings(Thread * self,ClassLinker * class_linker)1007 static void CreateStringInitBindings(Thread* self, ClassLinker* class_linker)
1008     REQUIRES_SHARED(Locks::mutator_lock_) {
1009   // Find String.<init> -> StringFactory bindings.
1010   ObjPtr<mirror::Class> string_factory_class =
1011       class_linker->FindSystemClass(self, "Ljava/lang/StringFactory;");
1012   CHECK(string_factory_class != nullptr);
1013   ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>(class_linker);
1014   WellKnownClasses::InitStringInit(string_class, string_factory_class);
1015   // Update the primordial thread.
1016   self->InitStringEntryPoints();
1017 }
1018 
FinishInit(Thread * self)1019 void ClassLinker::FinishInit(Thread* self) {
1020   VLOG(startup) << "ClassLinker::FinishInit entering";
1021 
1022   CreateStringInitBindings(self, this);
1023 
1024   // Let the heap know some key offsets into java.lang.ref instances
1025   // Note: we hard code the field indexes here rather than using FindInstanceField
1026   // as the types of the field can't be resolved prior to the runtime being
1027   // fully initialized
1028   StackHandleScope<3> hs(self);
1029   Handle<mirror::Class> java_lang_ref_Reference =
1030       hs.NewHandle(GetClassRoot<mirror::Reference>(this));
1031   Handle<mirror::Class> java_lang_ref_FinalizerReference =
1032       hs.NewHandle(FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;"));
1033 
1034   ArtField* pendingNext = java_lang_ref_Reference->GetInstanceField(0);
1035   CHECK_STREQ(pendingNext->GetName(), "pendingNext");
1036   CHECK_STREQ(pendingNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
1037 
1038   ArtField* queue = java_lang_ref_Reference->GetInstanceField(1);
1039   CHECK_STREQ(queue->GetName(), "queue");
1040   CHECK_STREQ(queue->GetTypeDescriptor(), "Ljava/lang/ref/ReferenceQueue;");
1041 
1042   ArtField* queueNext = java_lang_ref_Reference->GetInstanceField(2);
1043   CHECK_STREQ(queueNext->GetName(), "queueNext");
1044   CHECK_STREQ(queueNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
1045 
1046   ArtField* referent = java_lang_ref_Reference->GetInstanceField(3);
1047   CHECK_STREQ(referent->GetName(), "referent");
1048   CHECK_STREQ(referent->GetTypeDescriptor(), "Ljava/lang/Object;");
1049 
1050   ArtField* zombie = java_lang_ref_FinalizerReference->GetInstanceField(2);
1051   CHECK_STREQ(zombie->GetName(), "zombie");
1052   CHECK_STREQ(zombie->GetTypeDescriptor(), "Ljava/lang/Object;");
1053 
1054   // ensure all class_roots_ are initialized
1055   for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
1056     ClassRoot class_root = static_cast<ClassRoot>(i);
1057     ObjPtr<mirror::Class> klass = GetClassRoot(class_root);
1058     CHECK(klass != nullptr);
1059     DCHECK(klass->IsArrayClass() || klass->IsPrimitive() || klass->GetDexCache() != nullptr);
1060     // note SetClassRoot does additional validation.
1061     // if possible add new checks there to catch errors early
1062   }
1063 
1064   CHECK(GetArrayIfTable() != nullptr);
1065 
1066   // disable the slow paths in FindClass and CreatePrimitiveClass now
1067   // that Object, Class, and Object[] are setup
1068   init_done_ = true;
1069 
1070   // Under sanitization, the small carve-out to handle stack overflow might not be enough to
1071   // initialize the StackOverflowError class (as it might require running the verifier). Instead,
1072   // ensure that the class will be initialized.
1073   if (kMemoryToolIsAvailable && !Runtime::Current()->IsAotCompiler()) {
1074     verifier::ClassVerifier::Init(this);  // Need to prepare the verifier.
1075 
1076     ObjPtr<mirror::Class> soe_klass = FindSystemClass(self, "Ljava/lang/StackOverflowError;");
1077     if (soe_klass == nullptr || !EnsureInitialized(self, hs.NewHandle(soe_klass), true, true)) {
1078       // Strange, but don't crash.
1079       LOG(WARNING) << "Could not prepare StackOverflowError.";
1080       self->ClearException();
1081     }
1082   }
1083 
1084   VLOG(startup) << "ClassLinker::FinishInit exiting";
1085 }
1086 
RunRootClinits(Thread * self)1087 void ClassLinker::RunRootClinits(Thread* self) {
1088   for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); ++i) {
1089     ObjPtr<mirror::Class> c = GetClassRoot(ClassRoot(i), this);
1090     if (!c->IsArrayClass() && !c->IsPrimitive()) {
1091       StackHandleScope<1> hs(self);
1092       Handle<mirror::Class> h_class(hs.NewHandle(c));
1093       if (!EnsureInitialized(self, h_class, true, true)) {
1094         LOG(FATAL) << "Exception when initializing " << h_class->PrettyClass()
1095             << ": " << self->GetException()->Dump();
1096       }
1097     } else {
1098       DCHECK(c->IsInitialized());
1099     }
1100   }
1101 }
1102 
1103 ALWAYS_INLINE
ComputeMethodHash(ArtMethod * method)1104 static uint32_t ComputeMethodHash(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
1105   DCHECK(!method->IsRuntimeMethod());
1106   DCHECK(!method->IsProxyMethod());
1107   DCHECK(!method->IsObsolete());
1108   // Do not use `ArtMethod::GetNameView()` to avoid unnecessary runtime/proxy/obsolete method
1109   // checks. It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1110   const DexFile& dex_file = method->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1111   const dex::MethodId& method_id = dex_file.GetMethodId(method->GetDexMethodIndex());
1112   std::string_view name = dex_file.GetMethodNameView(method_id);
1113   return ComputeModifiedUtf8Hash(name);
1114 }
1115 
1116 ALWAYS_INLINE
MethodSignatureEquals(ArtMethod * lhs,ArtMethod * rhs)1117 static bool MethodSignatureEquals(ArtMethod* lhs, ArtMethod* rhs)
1118     REQUIRES_SHARED(Locks::mutator_lock_) {
1119   DCHECK(!lhs->IsRuntimeMethod());
1120   DCHECK(!lhs->IsProxyMethod());
1121   DCHECK(!lhs->IsObsolete());
1122   DCHECK(!rhs->IsRuntimeMethod());
1123   DCHECK(!rhs->IsProxyMethod());
1124   DCHECK(!rhs->IsObsolete());
1125   // Do not use `ArtMethod::GetDexFile()` to avoid unnecessary obsolete method checks.
1126   // It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1127   const DexFile& lhs_dex_file = lhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1128   const DexFile& rhs_dex_file = rhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1129   const dex::MethodId& lhs_mid = lhs_dex_file.GetMethodId(lhs->GetDexMethodIndex());
1130   const dex::MethodId& rhs_mid = rhs_dex_file.GetMethodId(rhs->GetDexMethodIndex());
1131   if (&lhs_dex_file == &rhs_dex_file) {
1132     return lhs_mid.name_idx_ == rhs_mid.name_idx_ &&
1133            lhs_mid.proto_idx_ == rhs_mid.proto_idx_;
1134   } else {
1135     return
1136         lhs_dex_file.GetMethodNameView(lhs_mid) == rhs_dex_file.GetMethodNameView(rhs_mid) &&
1137         lhs_dex_file.GetMethodSignature(lhs_mid) == rhs_dex_file.GetMethodSignature(rhs_mid);
1138   }
1139 }
1140 
InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,PointerSize pointer_size,ArrayRef<uint32_t> virtual_method_hashes)1141 static void InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,
1142                                                 PointerSize pointer_size,
1143                                                 /*out*/ ArrayRef<uint32_t> virtual_method_hashes)
1144     REQUIRES_SHARED(Locks::mutator_lock_) {
1145   ArraySlice<ArtMethod> virtual_methods = java_lang_Object->GetVirtualMethods(pointer_size);
1146   DCHECK_EQ(virtual_method_hashes.size(), virtual_methods.size());
1147   for (size_t i = 0; i != virtual_method_hashes.size(); ++i) {
1148     virtual_method_hashes[i] = ComputeMethodHash(&virtual_methods[i]);
1149   }
1150 }
1151 
1152 struct TrampolineCheckData {
1153   const void* quick_resolution_trampoline;
1154   const void* quick_imt_conflict_trampoline;
1155   const void* quick_generic_jni_trampoline;
1156   const void* quick_to_interpreter_bridge_trampoline;
1157   const void* nterp_trampoline;
1158   PointerSize pointer_size;
1159   ArtMethod* m;
1160   bool error;
1161 };
1162 
InitFromBootImage(std::string * error_msg)1163 bool ClassLinker::InitFromBootImage(std::string* error_msg) {
1164   VLOG(startup) << __FUNCTION__ << " entering";
1165   CHECK(!init_done_);
1166 
1167   Runtime* const runtime = Runtime::Current();
1168   Thread* const self = Thread::Current();
1169   gc::Heap* const heap = runtime->GetHeap();
1170   std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces();
1171   CHECK(!spaces.empty());
1172   const ImageHeader& image_header = spaces[0]->GetImageHeader();
1173   uint32_t pointer_size_unchecked = image_header.GetPointerSizeUnchecked();
1174   if (!ValidPointerSize(pointer_size_unchecked)) {
1175     *error_msg = StringPrintf("Invalid image pointer size: %u", pointer_size_unchecked);
1176     return false;
1177   }
1178   image_pointer_size_ = image_header.GetPointerSize();
1179   if (!runtime->IsAotCompiler()) {
1180     // Only the Aot compiler supports having an image with a different pointer size than the
1181     // runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart
1182     // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
1183     if (image_pointer_size_ != kRuntimePointerSize) {
1184       *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
1185                                 static_cast<size_t>(image_pointer_size_),
1186                                 sizeof(void*));
1187       return false;
1188     }
1189   }
1190   DCHECK(!runtime->HasResolutionMethod());
1191   runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod));
1192   runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod));
1193   runtime->SetImtUnimplementedMethod(
1194       image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod));
1195   runtime->SetCalleeSaveMethod(
1196       image_header.GetImageMethod(ImageHeader::kSaveAllCalleeSavesMethod),
1197       CalleeSaveType::kSaveAllCalleeSaves);
1198   runtime->SetCalleeSaveMethod(
1199       image_header.GetImageMethod(ImageHeader::kSaveRefsOnlyMethod),
1200       CalleeSaveType::kSaveRefsOnly);
1201   runtime->SetCalleeSaveMethod(
1202       image_header.GetImageMethod(ImageHeader::kSaveRefsAndArgsMethod),
1203       CalleeSaveType::kSaveRefsAndArgs);
1204   runtime->SetCalleeSaveMethod(
1205       image_header.GetImageMethod(ImageHeader::kSaveEverythingMethod),
1206       CalleeSaveType::kSaveEverything);
1207   runtime->SetCalleeSaveMethod(
1208       image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForClinit),
1209       CalleeSaveType::kSaveEverythingForClinit);
1210   runtime->SetCalleeSaveMethod(
1211       image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForSuspendCheck),
1212       CalleeSaveType::kSaveEverythingForSuspendCheck);
1213 
1214   std::vector<const OatFile*> oat_files =
1215       runtime->GetOatFileManager().RegisterImageOatFiles(spaces);
1216   DCHECK(!oat_files.empty());
1217   const OatHeader& default_oat_header = oat_files[0]->GetOatHeader();
1218   jni_dlsym_lookup_trampoline_ = default_oat_header.GetJniDlsymLookupTrampoline();
1219   jni_dlsym_lookup_critical_trampoline_ = default_oat_header.GetJniDlsymLookupCriticalTrampoline();
1220   quick_resolution_trampoline_ = default_oat_header.GetQuickResolutionTrampoline();
1221   quick_imt_conflict_trampoline_ = default_oat_header.GetQuickImtConflictTrampoline();
1222   quick_generic_jni_trampoline_ = default_oat_header.GetQuickGenericJniTrampoline();
1223   quick_to_interpreter_bridge_trampoline_ = default_oat_header.GetQuickToInterpreterBridge();
1224   nterp_trampoline_ = default_oat_header.GetNterpTrampoline();
1225   if (kIsDebugBuild) {
1226     // Check that the other images use the same trampoline.
1227     for (size_t i = 1; i < oat_files.size(); ++i) {
1228       const OatHeader& ith_oat_header = oat_files[i]->GetOatHeader();
1229       const void* ith_jni_dlsym_lookup_trampoline_ =
1230           ith_oat_header.GetJniDlsymLookupTrampoline();
1231       const void* ith_jni_dlsym_lookup_critical_trampoline_ =
1232           ith_oat_header.GetJniDlsymLookupCriticalTrampoline();
1233       const void* ith_quick_resolution_trampoline =
1234           ith_oat_header.GetQuickResolutionTrampoline();
1235       const void* ith_quick_imt_conflict_trampoline =
1236           ith_oat_header.GetQuickImtConflictTrampoline();
1237       const void* ith_quick_generic_jni_trampoline =
1238           ith_oat_header.GetQuickGenericJniTrampoline();
1239       const void* ith_quick_to_interpreter_bridge_trampoline =
1240           ith_oat_header.GetQuickToInterpreterBridge();
1241       const void* ith_nterp_trampoline =
1242           ith_oat_header.GetNterpTrampoline();
1243       if (ith_jni_dlsym_lookup_trampoline_ != jni_dlsym_lookup_trampoline_ ||
1244           ith_jni_dlsym_lookup_critical_trampoline_ != jni_dlsym_lookup_critical_trampoline_ ||
1245           ith_quick_resolution_trampoline != quick_resolution_trampoline_ ||
1246           ith_quick_imt_conflict_trampoline != quick_imt_conflict_trampoline_ ||
1247           ith_quick_generic_jni_trampoline != quick_generic_jni_trampoline_ ||
1248           ith_quick_to_interpreter_bridge_trampoline != quick_to_interpreter_bridge_trampoline_ ||
1249           ith_nterp_trampoline != nterp_trampoline_) {
1250         // Make sure that all methods in this image do not contain those trampolines as
1251         // entrypoints. Otherwise the class-linker won't be able to work with a single set.
1252         TrampolineCheckData data;
1253         data.error = false;
1254         data.pointer_size = GetImagePointerSize();
1255         data.quick_resolution_trampoline = ith_quick_resolution_trampoline;
1256         data.quick_imt_conflict_trampoline = ith_quick_imt_conflict_trampoline;
1257         data.quick_generic_jni_trampoline = ith_quick_generic_jni_trampoline;
1258         data.quick_to_interpreter_bridge_trampoline = ith_quick_to_interpreter_bridge_trampoline;
1259         data.nterp_trampoline = ith_nterp_trampoline;
1260         ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
1261         auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1262           if (obj->IsClass()) {
1263             ObjPtr<mirror::Class> klass = obj->AsClass();
1264             for (ArtMethod& m : klass->GetMethods(data.pointer_size)) {
1265               const void* entrypoint =
1266                   m.GetEntryPointFromQuickCompiledCodePtrSize(data.pointer_size);
1267               if (entrypoint == data.quick_resolution_trampoline ||
1268                   entrypoint == data.quick_imt_conflict_trampoline ||
1269                   entrypoint == data.quick_generic_jni_trampoline ||
1270                   entrypoint == data.quick_to_interpreter_bridge_trampoline) {
1271                 data.m = &m;
1272                 data.error = true;
1273                 return;
1274               }
1275             }
1276           }
1277         };
1278         spaces[i]->GetLiveBitmap()->Walk(visitor);
1279         if (data.error) {
1280           ArtMethod* m = data.m;
1281           LOG(ERROR) << "Found a broken ArtMethod: " << ArtMethod::PrettyMethod(m);
1282           *error_msg = "Found an ArtMethod with a bad entrypoint";
1283           return false;
1284         }
1285       }
1286     }
1287   }
1288 
1289   class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
1290       ObjPtr<mirror::ObjectArray<mirror::Class>>::DownCast(
1291           image_header.GetImageRoot(ImageHeader::kClassRoots)));
1292   DCHECK_EQ(GetClassRoot<mirror::Class>(this)->GetClassFlags(), mirror::kClassFlagClass);
1293 
1294   DCHECK_EQ(GetClassRoot<mirror::Object>(this)->GetObjectSize(), sizeof(mirror::Object));
1295   ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
1296       ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
1297           image_header.GetImageRoot(ImageHeader::kBootImageLiveObjects));
1298   runtime->SetSentinel(boot_image_live_objects->Get(ImageHeader::kClearedJniWeakSentinel));
1299   DCHECK(runtime->GetSentinel().Read()->GetClass() == GetClassRoot<mirror::Object>(this));
1300 
1301   for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
1302     // Boot class loader, use a null handle.
1303     std::vector<std::unique_ptr<const DexFile>> dex_files;
1304     if (!AddImageSpace(spaces[i],
1305                        ScopedNullHandle<mirror::ClassLoader>(),
1306                        /*out*/&dex_files,
1307                        error_msg)) {
1308       return false;
1309     }
1310     // Append opened dex files at the end.
1311     boot_dex_files_.insert(boot_dex_files_.end(),
1312                            std::make_move_iterator(dex_files.begin()),
1313                            std::make_move_iterator(dex_files.end()));
1314   }
1315   for (const std::unique_ptr<const DexFile>& dex_file : boot_dex_files_) {
1316     OatDexFile::MadviseDexFileAtLoad(*dex_file);
1317   }
1318   InitializeObjectVirtualMethodHashes(GetClassRoot<mirror::Object>(this),
1319                                       image_pointer_size_,
1320                                       ArrayRef<uint32_t>(object_virtual_method_hashes_));
1321   FinishInit(self);
1322 
1323   VLOG(startup) << __FUNCTION__ << " exiting";
1324   return true;
1325 }
1326 
AddExtraBootDexFiles(Thread * self,std::vector<std::unique_ptr<const DexFile>> && additional_dex_files)1327 void ClassLinker::AddExtraBootDexFiles(
1328     Thread* self,
1329     std::vector<std::unique_ptr<const DexFile>>&& additional_dex_files) {
1330   for (std::unique_ptr<const DexFile>& dex_file : additional_dex_files) {
1331     AppendToBootClassPath(self, dex_file.get());
1332     if (kIsDebugBuild) {
1333       for (const auto& boot_dex_file : boot_dex_files_) {
1334         DCHECK_NE(boot_dex_file->GetLocation(), dex_file->GetLocation());
1335       }
1336     }
1337     boot_dex_files_.push_back(std::move(dex_file));
1338   }
1339 }
1340 
IsBootClassLoader(ScopedObjectAccessAlreadyRunnable & soa,ObjPtr<mirror::ClassLoader> class_loader)1341 bool ClassLinker::IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
1342                                     ObjPtr<mirror::ClassLoader> class_loader) {
1343   return class_loader == nullptr ||
1344        soa.Decode<mirror::Class>(WellKnownClasses::java_lang_BootClassLoader) ==
1345            class_loader->GetClass();
1346 }
1347 
1348 class CHAOnDeleteUpdateClassVisitor {
1349  public:
CHAOnDeleteUpdateClassVisitor(LinearAlloc * alloc)1350   explicit CHAOnDeleteUpdateClassVisitor(LinearAlloc* alloc)
1351       : allocator_(alloc), cha_(Runtime::Current()->GetClassLinker()->GetClassHierarchyAnalysis()),
1352         pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()),
1353         self_(Thread::Current()) {}
1354 
operator ()(ObjPtr<mirror::Class> klass)1355   bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
1356     // This class is going to be unloaded. Tell CHA about it.
1357     cha_->ResetSingleImplementationInHierarchy(klass, allocator_, pointer_size_);
1358     return true;
1359   }
1360  private:
1361   const LinearAlloc* allocator_;
1362   const ClassHierarchyAnalysis* cha_;
1363   const PointerSize pointer_size_;
1364   const Thread* self_;
1365 };
1366 
1367 /*
1368  * A class used to ensure that all references to strings interned in an AppImage have been
1369  * properly recorded in the interned references list, and is only ever run in debug mode.
1370  */
1371 class CountInternedStringReferencesVisitor {
1372  public:
CountInternedStringReferencesVisitor(const gc::space::ImageSpace & space,const InternTable::UnorderedSet & image_interns)1373   CountInternedStringReferencesVisitor(const gc::space::ImageSpace& space,
1374                                        const InternTable::UnorderedSet& image_interns)
1375       : space_(space),
1376         image_interns_(image_interns),
1377         count_(0u) {}
1378 
TestObject(ObjPtr<mirror::Object> referred_obj) const1379   void TestObject(ObjPtr<mirror::Object> referred_obj) const
1380       REQUIRES_SHARED(Locks::mutator_lock_) {
1381     if (referred_obj != nullptr &&
1382         space_.HasAddress(referred_obj.Ptr()) &&
1383         referred_obj->IsString()) {
1384       ObjPtr<mirror::String> referred_str = referred_obj->AsString();
1385       uint32_t hash = static_cast<uint32_t>(referred_str->GetStoredHashCode());
1386       // All image strings have the hash code calculated, even if they are not interned.
1387       DCHECK_EQ(hash, static_cast<uint32_t>(referred_str->ComputeHashCode()));
1388       auto it = image_interns_.FindWithHash(GcRoot<mirror::String>(referred_str), hash);
1389       if (it != image_interns_.end() && it->Read() == referred_str) {
1390         ++count_;
1391       }
1392     }
1393   }
1394 
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const1395   void VisitRootIfNonNull(
1396       mirror::CompressedReference<mirror::Object>* root) const
1397       REQUIRES_SHARED(Locks::mutator_lock_) {
1398     if (!root->IsNull()) {
1399       VisitRoot(root);
1400     }
1401   }
1402 
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const1403   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1404       REQUIRES_SHARED(Locks::mutator_lock_) {
1405     TestObject(root->AsMirrorPtr());
1406   }
1407 
1408   // Visit Class Fields
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const1409   void operator()(ObjPtr<mirror::Object> obj,
1410                   MemberOffset offset,
1411                   bool is_static ATTRIBUTE_UNUSED) const
1412       REQUIRES_SHARED(Locks::mutator_lock_) {
1413     // References within image or across images don't need a read barrier.
1414     ObjPtr<mirror::Object> referred_obj =
1415         obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1416     TestObject(referred_obj);
1417   }
1418 
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const1419   void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1420                   ObjPtr<mirror::Reference> ref) const
1421       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
1422     operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
1423   }
1424 
GetCount() const1425   size_t GetCount() const {
1426     return count_;
1427   }
1428 
1429  private:
1430   const gc::space::ImageSpace& space_;
1431   const InternTable::UnorderedSet& image_interns_;
1432   mutable size_t count_;  // Modified from the `const` callbacks.
1433 };
1434 
1435 /*
1436  * This function counts references to strings interned in the AppImage.
1437  * This is used in debug build to check against the number of the recorded references.
1438  */
CountInternedStringReferences(gc::space::ImageSpace & space,const InternTable::UnorderedSet & image_interns)1439 size_t CountInternedStringReferences(gc::space::ImageSpace& space,
1440                                      const InternTable::UnorderedSet& image_interns)
1441     REQUIRES_SHARED(Locks::mutator_lock_) {
1442   const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap();
1443   const ImageHeader& image_header = space.GetImageHeader();
1444   const uint8_t* target_base = space.GetMemMap()->Begin();
1445   const ImageSection& objects_section = image_header.GetObjectsSection();
1446 
1447   auto objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
1448   auto objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
1449 
1450   CountInternedStringReferencesVisitor visitor(space, image_interns);
1451   bitmap->VisitMarkedRange(objects_begin,
1452                            objects_end,
1453                            [&space, &visitor](mirror::Object* obj)
1454     REQUIRES_SHARED(Locks::mutator_lock_) {
1455     if (space.HasAddress(obj)) {
1456       if (obj->IsDexCache()) {
1457         obj->VisitReferences</* kVisitNativeRoots= */ true,
1458                              kVerifyNone,
1459                              kWithoutReadBarrier>(visitor, visitor);
1460       } else {
1461         // Don't visit native roots for non-dex-cache as they can't contain
1462         // native references to strings.  This is verified during compilation
1463         // by ImageWriter::VerifyNativeGCRootInvariants.
1464         obj->VisitReferences</* kVisitNativeRoots= */ false,
1465                              kVerifyNone,
1466                              kWithoutReadBarrier>(visitor, visitor);
1467       }
1468     }
1469   });
1470   return visitor.GetCount();
1471 }
1472 
1473 template <typename Visitor>
VisitInternedStringReferences(gc::space::ImageSpace * space,const Visitor & visitor)1474 static void VisitInternedStringReferences(
1475     gc::space::ImageSpace* space,
1476     const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1477   const uint8_t* target_base = space->Begin();
1478   const ImageSection& sro_section =
1479       space->GetImageHeader().GetImageStringReferenceOffsetsSection();
1480   const size_t num_string_offsets = sro_section.Size() / sizeof(AppImageReferenceOffsetInfo);
1481 
1482   VLOG(image)
1483       << "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = "
1484       << num_string_offsets;
1485 
1486   const auto* sro_base =
1487       reinterpret_cast<const AppImageReferenceOffsetInfo*>(target_base + sro_section.Offset());
1488 
1489   for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
1490     uint32_t base_offset = sro_base[offset_index].first;
1491 
1492     uint32_t raw_member_offset = sro_base[offset_index].second;
1493     DCHECK_ALIGNED(base_offset, 2);
1494     DCHECK_ALIGNED(raw_member_offset, 2);
1495 
1496     ObjPtr<mirror::Object> obj_ptr =
1497         reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
1498     MemberOffset member_offset(raw_member_offset);
1499     ObjPtr<mirror::String> referred_string =
1500         obj_ptr->GetFieldObject<mirror::String,
1501                                 kVerifyNone,
1502                                 kWithoutReadBarrier,
1503                                 /* kIsVolatile= */ false>(member_offset);
1504     DCHECK(referred_string != nullptr);
1505 
1506     ObjPtr<mirror::String> visited = visitor(referred_string);
1507     if (visited != referred_string) {
1508       obj_ptr->SetFieldObject</* kTransactionActive= */ false,
1509                               /* kCheckTransaction= */ false,
1510                               kVerifyNone,
1511                               /* kIsVolatile= */ false>(member_offset, visited);
1512     }
1513   }
1514 }
1515 
VerifyInternedStringReferences(gc::space::ImageSpace * space)1516 static void VerifyInternedStringReferences(gc::space::ImageSpace* space)
1517     REQUIRES_SHARED(Locks::mutator_lock_) {
1518   InternTable::UnorderedSet image_interns;
1519   const ImageSection& section = space->GetImageHeader().GetInternedStringsSection();
1520   if (section.Size() > 0) {
1521     size_t read_count;
1522     const uint8_t* data = space->Begin() + section.Offset();
1523     InternTable::UnorderedSet image_set(data, /*make_copy_of_data=*/ false, &read_count);
1524     image_set.swap(image_interns);
1525   }
1526   size_t num_recorded_refs = 0u;
1527   VisitInternedStringReferences(
1528       space,
1529       [&image_interns, &num_recorded_refs](ObjPtr<mirror::String> str)
1530           REQUIRES_SHARED(Locks::mutator_lock_) {
1531         auto it = image_interns.find(GcRoot<mirror::String>(str));
1532         CHECK(it != image_interns.end());
1533         CHECK(it->Read() == str);
1534         ++num_recorded_refs;
1535         return str;
1536       });
1537   size_t num_found_refs = CountInternedStringReferences(*space, image_interns);
1538   CHECK_EQ(num_recorded_refs, num_found_refs);
1539 }
1540 
1541 // new_class_set is the set of classes that were read from the class table section in the image.
1542 // If there was no class table section, it is null.
1543 // Note: using a class here to avoid having to make ClassLinker internals public.
1544 class AppImageLoadingHelper {
1545  public:
1546   static void Update(
1547       ClassLinker* class_linker,
1548       gc::space::ImageSpace* space,
1549       Handle<mirror::ClassLoader> class_loader,
1550       Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
1551       REQUIRES(!Locks::dex_lock_)
1552       REQUIRES_SHARED(Locks::mutator_lock_);
1553 
1554   static void HandleAppImageStrings(gc::space::ImageSpace* space)
1555       REQUIRES_SHARED(Locks::mutator_lock_);
1556 };
1557 
Update(ClassLinker * class_linker,gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)1558 void AppImageLoadingHelper::Update(
1559     ClassLinker* class_linker,
1560     gc::space::ImageSpace* space,
1561     Handle<mirror::ClassLoader> class_loader,
1562     Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
1563     REQUIRES(!Locks::dex_lock_)
1564     REQUIRES_SHARED(Locks::mutator_lock_) {
1565   ScopedTrace app_image_timing("AppImage:Updating");
1566 
1567   if (kIsDebugBuild && ClassLinker::kAppImageMayContainStrings) {
1568     // In debug build, verify the string references before applying
1569     // the Runtime::LoadAppImageStartupCache() option.
1570     VerifyInternedStringReferences(space);
1571   }
1572 
1573   Thread* const self = Thread::Current();
1574   Runtime* const runtime = Runtime::Current();
1575   gc::Heap* const heap = runtime->GetHeap();
1576   const ImageHeader& header = space->GetImageHeader();
1577   {
1578     // Register dex caches with the class loader.
1579     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
1580     for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
1581       const DexFile* const dex_file = dex_cache->GetDexFile();
1582       {
1583         WriterMutexLock mu2(self, *Locks::dex_lock_);
1584         CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
1585         class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
1586       }
1587     }
1588   }
1589 
1590   if (ClassLinker::kAppImageMayContainStrings) {
1591     HandleAppImageStrings(space);
1592   }
1593 
1594   if (kVerifyArtMethodDeclaringClasses) {
1595     ScopedTrace timing("AppImage:VerifyDeclaringClasses");
1596     ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_);
1597     gc::accounting::HeapBitmap* live_bitmap = heap->GetLiveBitmap();
1598     header.VisitPackedArtMethods([&](ArtMethod& method)
1599         REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
1600       ObjPtr<mirror::Class> klass = method.GetDeclaringClassUnchecked();
1601       if (klass != nullptr) {
1602         CHECK(live_bitmap->Test(klass.Ptr())) << "Image method has unmarked declaring class";
1603       }
1604     }, space->Begin(), kRuntimePointerSize);
1605   }
1606 }
1607 
HandleAppImageStrings(gc::space::ImageSpace * space)1608 void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space) {
1609   // Iterate over the string reference offsets stored in the image and intern
1610   // the strings they point to.
1611   ScopedTrace timing("AppImage:InternString");
1612 
1613   Runtime* const runtime = Runtime::Current();
1614   InternTable* const intern_table = runtime->GetInternTable();
1615 
1616   // Add the intern table, removing any conflicts. For conflicts, store the new address in a map
1617   // for faster lookup.
1618   // TODO: Optimize with a bitmap or bloom filter
1619   SafeMap<mirror::String*, mirror::String*> intern_remap;
1620   auto func = [&](InternTable::UnorderedSet& interns)
1621       REQUIRES_SHARED(Locks::mutator_lock_)
1622       REQUIRES(Locks::intern_table_lock_) {
1623     const size_t non_boot_image_strings = intern_table->CountInterns(
1624         /*visit_boot_images=*/false,
1625         /*visit_non_boot_images=*/true);
1626     VLOG(image) << "AppImage:stringsInInternTableSize = " << interns.size();
1627     VLOG(image) << "AppImage:nonBootImageInternStrings = " << non_boot_image_strings;
1628     // Visit the smaller of the two sets to compute the intersection.
1629     if (interns.size() < non_boot_image_strings) {
1630       for (auto it = interns.begin(); it != interns.end(); ) {
1631         ObjPtr<mirror::String> string = it->Read();
1632         ObjPtr<mirror::String> existing = intern_table->LookupWeakLocked(string);
1633         if (existing == nullptr) {
1634           existing = intern_table->LookupStrongLocked(string);
1635         }
1636         if (existing != nullptr) {
1637           intern_remap.Put(string.Ptr(), existing.Ptr());
1638           it = interns.erase(it);
1639         } else {
1640           ++it;
1641         }
1642       }
1643     } else {
1644       intern_table->VisitInterns([&](const GcRoot<mirror::String>& root)
1645           REQUIRES_SHARED(Locks::mutator_lock_)
1646           REQUIRES(Locks::intern_table_lock_) {
1647         auto it = interns.find(root);
1648         if (it != interns.end()) {
1649           ObjPtr<mirror::String> existing = root.Read();
1650           intern_remap.Put(it->Read(), existing.Ptr());
1651           it = interns.erase(it);
1652         }
1653       }, /*visit_boot_images=*/false, /*visit_non_boot_images=*/true);
1654     }
1655     // Consistency check to ensure correctness.
1656     if (kIsDebugBuild) {
1657       for (GcRoot<mirror::String>& root : interns) {
1658         ObjPtr<mirror::String> string = root.Read();
1659         CHECK(intern_table->LookupWeakLocked(string) == nullptr) << string->ToModifiedUtf8();
1660         CHECK(intern_table->LookupStrongLocked(string) == nullptr) << string->ToModifiedUtf8();
1661       }
1662     }
1663   };
1664   intern_table->AddImageStringsToTable(space, func);
1665   if (!intern_remap.empty()) {
1666     VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
1667     VisitInternedStringReferences(
1668         space,
1669         [&intern_remap](ObjPtr<mirror::String> str) REQUIRES_SHARED(Locks::mutator_lock_) {
1670           auto it = intern_remap.find(str.Ptr());
1671           if (it != intern_remap.end()) {
1672             return ObjPtr<mirror::String>(it->second);
1673           }
1674           return str;
1675         });
1676   }
1677 }
1678 
OpenOatDexFile(const OatFile * oat_file,const char * location,std::string * error_msg)1679 static std::unique_ptr<const DexFile> OpenOatDexFile(const OatFile* oat_file,
1680                                                      const char* location,
1681                                                      std::string* error_msg)
1682     REQUIRES_SHARED(Locks::mutator_lock_) {
1683   DCHECK(error_msg != nullptr);
1684   std::unique_ptr<const DexFile> dex_file;
1685   const OatDexFile* oat_dex_file = oat_file->GetOatDexFile(location, nullptr, error_msg);
1686   if (oat_dex_file == nullptr) {
1687     return std::unique_ptr<const DexFile>();
1688   }
1689   std::string inner_error_msg;
1690   dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
1691   if (dex_file == nullptr) {
1692     *error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'",
1693                               location,
1694                               oat_file->GetLocation().c_str(),
1695                               inner_error_msg.c_str());
1696     return std::unique_ptr<const DexFile>();
1697   }
1698 
1699   if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
1700     *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
1701                               location,
1702                               dex_file->GetLocationChecksum(),
1703                               oat_dex_file->GetDexFileLocationChecksum());
1704     return std::unique_ptr<const DexFile>();
1705   }
1706   return dex_file;
1707 }
1708 
OpenImageDexFiles(gc::space::ImageSpace * space,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1709 bool ClassLinker::OpenImageDexFiles(gc::space::ImageSpace* space,
1710                                     std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1711                                     std::string* error_msg) {
1712   ScopedAssertNoThreadSuspension nts(__FUNCTION__);
1713   const ImageHeader& header = space->GetImageHeader();
1714   ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1715   DCHECK(dex_caches_object != nullptr);
1716   ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
1717       dex_caches_object->AsObjectArray<mirror::DexCache>();
1718   const OatFile* oat_file = space->GetOatFile();
1719   for (auto dex_cache : dex_caches->Iterate()) {
1720     std::string dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8());
1721     std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1722                                                              dex_file_location.c_str(),
1723                                                              error_msg);
1724     if (dex_file == nullptr) {
1725       return false;
1726     }
1727     dex_cache->SetDexFile(dex_file.get());
1728     out_dex_files->push_back(std::move(dex_file));
1729   }
1730   return true;
1731 }
1732 
1733 // Helper class for ArtMethod checks when adding an image. Keeps all required functionality
1734 // together and caches some intermediate results.
1735 class ImageChecker final {
1736  public:
CheckObjects(gc::Heap * heap,ClassLinker * class_linker)1737   static void CheckObjects(gc::Heap* heap, ClassLinker* class_linker)
1738       REQUIRES_SHARED(Locks::mutator_lock_) {
1739     ImageChecker ic(heap, class_linker);
1740     auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1741       DCHECK(obj != nullptr);
1742       CHECK(obj->GetClass() != nullptr) << "Null class in object " << obj;
1743       CHECK(obj->GetClass()->GetClass() != nullptr) << "Null class class " << obj;
1744       if (obj->IsClass()) {
1745         auto klass = obj->AsClass();
1746         for (ArtField& field : klass->GetIFields()) {
1747           CHECK_EQ(field.GetDeclaringClass(), klass);
1748         }
1749         for (ArtField& field : klass->GetSFields()) {
1750           CHECK_EQ(field.GetDeclaringClass(), klass);
1751         }
1752         const PointerSize pointer_size = ic.pointer_size_;
1753         for (ArtMethod& m : klass->GetMethods(pointer_size)) {
1754           ic.CheckArtMethod(&m, klass);
1755         }
1756         ObjPtr<mirror::PointerArray> vtable = klass->GetVTable();
1757         if (vtable != nullptr) {
1758           ic.CheckArtMethodPointerArray(vtable, nullptr);
1759         }
1760         if (klass->ShouldHaveImt()) {
1761           ImTable* imt = klass->GetImt(pointer_size);
1762           for (size_t i = 0; i < ImTable::kSize; ++i) {
1763             ic.CheckArtMethod(imt->Get(i, pointer_size), nullptr);
1764           }
1765         }
1766         if (klass->ShouldHaveEmbeddedVTable()) {
1767           for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) {
1768             ic.CheckArtMethod(klass->GetEmbeddedVTableEntry(i, pointer_size), nullptr);
1769           }
1770         }
1771         ObjPtr<mirror::IfTable> iftable = klass->GetIfTable();
1772         for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
1773           if (iftable->GetMethodArrayCount(i) > 0) {
1774             ic.CheckArtMethodPointerArray(iftable->GetMethodArray(i), nullptr);
1775           }
1776         }
1777       }
1778     };
1779     heap->VisitObjects(visitor);
1780   }
1781 
1782  private:
ImageChecker(gc::Heap * heap,ClassLinker * class_linker)1783   ImageChecker(gc::Heap* heap, ClassLinker* class_linker)
1784      :  spaces_(heap->GetBootImageSpaces()),
1785         pointer_size_(class_linker->GetImagePointerSize()) {
1786     space_begin_.reserve(spaces_.size());
1787     method_sections_.reserve(spaces_.size());
1788     runtime_method_sections_.reserve(spaces_.size());
1789     for (gc::space::ImageSpace* space : spaces_) {
1790       space_begin_.push_back(space->Begin());
1791       auto& header = space->GetImageHeader();
1792       method_sections_.push_back(&header.GetMethodsSection());
1793       runtime_method_sections_.push_back(&header.GetRuntimeMethodsSection());
1794     }
1795   }
1796 
CheckArtMethod(ArtMethod * m,ObjPtr<mirror::Class> expected_class)1797   void CheckArtMethod(ArtMethod* m, ObjPtr<mirror::Class> expected_class)
1798       REQUIRES_SHARED(Locks::mutator_lock_) {
1799     if (m->IsRuntimeMethod()) {
1800       ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked();
1801       CHECK(declaring_class == nullptr) << declaring_class << " " << m->PrettyMethod();
1802     } else if (m->IsCopied()) {
1803       CHECK(m->GetDeclaringClass() != nullptr) << m->PrettyMethod();
1804     } else if (expected_class != nullptr) {
1805       CHECK_EQ(m->GetDeclaringClassUnchecked(), expected_class) << m->PrettyMethod();
1806     }
1807     if (!spaces_.empty()) {
1808       bool contains = false;
1809       for (size_t i = 0; !contains && i != space_begin_.size(); ++i) {
1810         const size_t offset = reinterpret_cast<uint8_t*>(m) - space_begin_[i];
1811         contains = method_sections_[i]->Contains(offset) ||
1812             runtime_method_sections_[i]->Contains(offset);
1813       }
1814       CHECK(contains) << m << " not found";
1815     }
1816   }
1817 
CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr,ObjPtr<mirror::Class> expected_class)1818   void CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr,
1819                                   ObjPtr<mirror::Class> expected_class)
1820       REQUIRES_SHARED(Locks::mutator_lock_) {
1821     CHECK(arr != nullptr);
1822     for (int32_t j = 0; j < arr->GetLength(); ++j) {
1823       auto* method = arr->GetElementPtrSize<ArtMethod*>(j, pointer_size_);
1824       // expected_class == null means we are a dex cache.
1825       if (expected_class != nullptr) {
1826         CHECK(method != nullptr);
1827       }
1828       if (method != nullptr) {
1829         CheckArtMethod(method, expected_class);
1830       }
1831     }
1832   }
1833 
1834   const std::vector<gc::space::ImageSpace*>& spaces_;
1835   const PointerSize pointer_size_;
1836 
1837   // Cached sections from the spaces.
1838   std::vector<const uint8_t*> space_begin_;
1839   std::vector<const ImageSection*> method_sections_;
1840   std::vector<const ImageSection*> runtime_method_sections_;
1841 };
1842 
VerifyAppImage(const ImageHeader & header,const Handle<mirror::ClassLoader> & class_loader,ClassTable * class_table,gc::space::ImageSpace * space)1843 static void VerifyAppImage(const ImageHeader& header,
1844                            const Handle<mirror::ClassLoader>& class_loader,
1845                            ClassTable* class_table,
1846                            gc::space::ImageSpace* space)
1847     REQUIRES_SHARED(Locks::mutator_lock_) {
1848   header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
1849     ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
1850     if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
1851       CHECK_EQ(class_table->LookupByDescriptor(klass), klass)
1852           << mirror::Class::PrettyClass(klass);
1853     }
1854   }, space->Begin(), kRuntimePointerSize);
1855   {
1856     // Verify that all direct interfaces of classes in the class table are also resolved.
1857     std::vector<ObjPtr<mirror::Class>> classes;
1858     auto verify_direct_interfaces_in_table = [&](ObjPtr<mirror::Class> klass)
1859         REQUIRES_SHARED(Locks::mutator_lock_) {
1860       if (!klass->IsPrimitive() && klass->GetClassLoader() == class_loader.Get()) {
1861         classes.push_back(klass);
1862       }
1863       return true;
1864     };
1865     class_table->Visit(verify_direct_interfaces_in_table);
1866     for (ObjPtr<mirror::Class> klass : classes) {
1867       for (uint32_t i = 0, num = klass->NumDirectInterfaces(); i != num; ++i) {
1868         CHECK(klass->GetDirectInterface(i) != nullptr)
1869             << klass->PrettyDescriptor() << " iface #" << i;
1870       }
1871     }
1872   }
1873 }
1874 
AddImageSpace(gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1875 bool ClassLinker::AddImageSpace(
1876     gc::space::ImageSpace* space,
1877     Handle<mirror::ClassLoader> class_loader,
1878     std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1879     std::string* error_msg) {
1880   DCHECK(out_dex_files != nullptr);
1881   DCHECK(error_msg != nullptr);
1882   const uint64_t start_time = NanoTime();
1883   const bool app_image = class_loader != nullptr;
1884   const ImageHeader& header = space->GetImageHeader();
1885   ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1886   DCHECK(dex_caches_object != nullptr);
1887   Runtime* const runtime = Runtime::Current();
1888   gc::Heap* const heap = runtime->GetHeap();
1889   Thread* const self = Thread::Current();
1890   // Check that the image is what we are expecting.
1891   if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) {
1892     *error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu",
1893                               static_cast<size_t>(space->GetImageHeader().GetPointerSize()),
1894                               image_pointer_size_);
1895     return false;
1896   }
1897   size_t expected_image_roots = ImageHeader::NumberOfImageRoots(app_image);
1898   if (static_cast<size_t>(header.GetImageRoots()->GetLength()) != expected_image_roots) {
1899     *error_msg = StringPrintf("Expected %zu image roots but got %d",
1900                               expected_image_roots,
1901                               header.GetImageRoots()->GetLength());
1902     return false;
1903   }
1904   StackHandleScope<3> hs(self);
1905   Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
1906       hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
1907   Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
1908       header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
1909   MutableHandle<mirror::ClassLoader> image_class_loader(hs.NewHandle(
1910       app_image ? header.GetImageRoot(ImageHeader::kAppImageClassLoader)->AsClassLoader()
1911                 : nullptr));
1912   DCHECK(class_roots != nullptr);
1913   if (class_roots->GetLength() != static_cast<int32_t>(ClassRoot::kMax)) {
1914     *error_msg = StringPrintf("Expected %d class roots but got %d",
1915                               class_roots->GetLength(),
1916                               static_cast<int32_t>(ClassRoot::kMax));
1917     return false;
1918   }
1919   // Check against existing class roots to make sure they match the ones in the boot image.
1920   ObjPtr<mirror::ObjectArray<mirror::Class>> existing_class_roots = GetClassRoots();
1921   for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
1922     if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i), existing_class_roots)) {
1923       *error_msg = "App image class roots must have pointer equality with runtime ones.";
1924       return false;
1925     }
1926   }
1927   const OatFile* oat_file = space->GetOatFile();
1928   if (oat_file->GetOatHeader().GetDexFileCount() !=
1929       static_cast<uint32_t>(dex_caches->GetLength())) {
1930     *error_msg = "Dex cache count and dex file count mismatch while trying to initialize from "
1931                  "image";
1932     return false;
1933   }
1934 
1935   for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
1936     std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
1937     std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1938                                                              dex_file_location.c_str(),
1939                                                              error_msg);
1940     if (dex_file == nullptr) {
1941       return false;
1942     }
1943 
1944     {
1945       // Native fields are all null.  Initialize them.
1946       WriterMutexLock mu(self, *Locks::dex_lock_);
1947       dex_cache->Initialize(dex_file.get(), class_loader.Get());
1948     }
1949     if (!app_image) {
1950       // Register dex files, keep track of existing ones that are conflicts.
1951       AppendToBootClassPath(dex_file.get(), dex_cache);
1952     }
1953     out_dex_files->push_back(std::move(dex_file));
1954   }
1955 
1956   if (app_image) {
1957     ScopedObjectAccessUnchecked soa(Thread::Current());
1958     ScopedAssertNoThreadSuspension sants("Checking app image", soa.Self());
1959     if (IsBootClassLoader(soa, image_class_loader.Get())) {
1960       *error_msg = "Unexpected BootClassLoader in app image";
1961       return false;
1962     }
1963   }
1964 
1965   if (kCheckImageObjects) {
1966     if (!app_image) {
1967       ImageChecker::CheckObjects(heap, this);
1968     }
1969   }
1970 
1971   // Set entry point to interpreter if in InterpretOnly mode.
1972   if (!runtime->IsAotCompiler() && runtime->GetInstrumentation()->InterpretOnly()) {
1973     // Set image methods' entry point to interpreter.
1974     header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
1975       if (!method.IsRuntimeMethod()) {
1976         DCHECK(method.GetDeclaringClass() != nullptr);
1977         if (!method.IsNative() && !method.IsResolutionMethod()) {
1978           method.SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(),
1979                                                             image_pointer_size_);
1980         }
1981       }
1982     }, space->Begin(), image_pointer_size_);
1983   }
1984 
1985   if (!runtime->IsAotCompiler()) {
1986     // If we are profiling the boot classpath, disable the shared memory for
1987     // boot image method optimization. We need to disable it before doing
1988     // ResetCounter below, as counters of shared memory method always hold the
1989     // "hot" value.
1990     if (runtime->GetJITOptions()->GetProfileSaverOptions().GetProfileBootClassPath()) {
1991       header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
1992         method.ClearMemorySharedMethod();
1993       }, space->Begin(), image_pointer_size_);
1994     }
1995 
1996     ScopedTrace trace("AppImage:UpdateCodeItemAndNterp");
1997     bool can_use_nterp = interpreter::CanRuntimeUseNterp();
1998     uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold();
1999     header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2000       // In the image, the `data` pointer field of the ArtMethod contains the code
2001       // item offset. Change this to the actual pointer to the code item.
2002       if (method.HasCodeItem()) {
2003         const dex::CodeItem* code_item = method.GetDexFile()->GetCodeItem(
2004             reinterpret_cast32<uint32_t>(method.GetDataPtrSize(image_pointer_size_)));
2005         method.SetCodeItem(code_item, method.GetDexFile()->IsCompactDexFile());
2006         // The hotness counter may have changed since we compiled the image, so
2007         // reset it with the runtime value.
2008         method.ResetCounter(hotness_threshold);
2009       }
2010       if (method.GetEntryPointFromQuickCompiledCode() == nterp_trampoline_) {
2011         if (can_use_nterp) {
2012           // Set image methods' entry point that point to the nterp trampoline to the
2013           // nterp entry point. This allows taking the fast path when doing a
2014           // nterp->nterp call.
2015           DCHECK_IMPLIES(NeedsClinitCheckBeforeCall(&method),
2016                          method.GetDeclaringClass()->IsVisiblyInitialized());
2017           method.SetEntryPointFromQuickCompiledCode(interpreter::GetNterpEntryPoint());
2018         } else {
2019           method.SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
2020         }
2021       }
2022     }, space->Begin(), image_pointer_size_);
2023   }
2024 
2025   if (runtime->IsVerificationSoftFail()) {
2026     header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2027       if (!method.IsNative() && method.IsInvokable()) {
2028         method.ClearSkipAccessChecks();
2029       }
2030     }, space->Begin(), image_pointer_size_);
2031   }
2032 
2033   ClassTable* class_table = nullptr;
2034   {
2035     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2036     class_table = InsertClassTableForClassLoader(class_loader.Get());
2037   }
2038   // If we have a class table section, read it and use it for verification in
2039   // UpdateAppImageClassLoadersAndDexCaches.
2040   ClassTable::ClassSet temp_set;
2041   const ImageSection& class_table_section = header.GetClassTableSection();
2042   const bool added_class_table = class_table_section.Size() > 0u;
2043   if (added_class_table) {
2044     const uint64_t start_time2 = NanoTime();
2045     size_t read_count = 0;
2046     temp_set = ClassTable::ClassSet(space->Begin() + class_table_section.Offset(),
2047                                     /*make copy*/false,
2048                                     &read_count);
2049     VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
2050   }
2051   if (app_image) {
2052     AppImageLoadingHelper::Update(this, space, class_loader, dex_caches);
2053 
2054     {
2055       ScopedTrace trace("AppImage:UpdateClassLoaders");
2056       // Update class loader and resolved strings. If added_class_table is false, the resolved
2057       // strings were forwarded UpdateAppImageClassLoadersAndDexCaches.
2058       ObjPtr<mirror::ClassLoader> loader(class_loader.Get());
2059       for (const ClassTable::TableSlot& root : temp_set) {
2060         // Note: We probably don't need the read barrier unless we copy the app image objects into
2061         // the region space.
2062         ObjPtr<mirror::Class> klass(root.Read());
2063         // Do not update class loader for boot image classes where the app image
2064         // class loader is only the initiating loader but not the defining loader.
2065         // Avoid read barrier since we are comparing against null.
2066         if (klass->GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
2067           klass->SetClassLoader(loader);
2068         }
2069       }
2070     }
2071 
2072     if (kBitstringSubtypeCheckEnabled) {
2073       // Every class in the app image has initially SubtypeCheckInfo in the
2074       // Uninitialized state.
2075       //
2076       // The SubtypeCheck invariants imply that a SubtypeCheckInfo is at least Initialized
2077       // after class initialization is complete. The app image ClassStatus as-is
2078       // are almost all ClassStatus::Initialized, and being in the
2079       // SubtypeCheckInfo::kUninitialized state is violating that invariant.
2080       //
2081       // Force every app image class's SubtypeCheck to be at least kIninitialized.
2082       //
2083       // See also ImageWriter::FixupClass.
2084       ScopedTrace trace("AppImage:RecacluateSubtypeCheckBitstrings");
2085       MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2086       for (const ClassTable::TableSlot& root : temp_set) {
2087         SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(root.Read());
2088       }
2089     }
2090   }
2091   if (!oat_file->GetBssGcRoots().empty()) {
2092     // Insert oat file to class table for visiting .bss GC roots.
2093     class_table->InsertOatFile(oat_file);
2094   }
2095 
2096   if (added_class_table) {
2097     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2098     class_table->AddClassSet(std::move(temp_set));
2099   }
2100 
2101   if (kIsDebugBuild && app_image) {
2102     // This verification needs to happen after the classes have been added to the class loader.
2103     // Since it ensures classes are in the class table.
2104     ScopedTrace trace("AppImage:Verify");
2105     VerifyAppImage(header, class_loader, class_table, space);
2106   }
2107 
2108   VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
2109   return true;
2110 }
2111 
VisitClassRoots(RootVisitor * visitor,VisitRootFlags flags)2112 void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
2113   // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
2114   // enabling tracing requires the mutator lock, there are no race conditions here.
2115   const bool tracing_enabled = Trace::IsTracingEnabled();
2116   Thread* const self = Thread::Current();
2117   WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2118   if (kUseReadBarrier) {
2119     // We do not track new roots for CC.
2120     DCHECK_EQ(0, flags & (kVisitRootFlagNewRoots |
2121                           kVisitRootFlagClearRootLog |
2122                           kVisitRootFlagStartLoggingNewRoots |
2123                           kVisitRootFlagStopLoggingNewRoots));
2124   }
2125   if ((flags & kVisitRootFlagAllRoots) != 0) {
2126     // Argument for how root visiting deals with ArtField and ArtMethod roots.
2127     // There is 3 GC cases to handle:
2128     // Non moving concurrent:
2129     // This case is easy to handle since the reference members of ArtMethod and ArtFields are held
2130     // live by the class and class roots.
2131     //
2132     // Moving non-concurrent:
2133     // This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move.
2134     // To prevent missing roots, this case needs to ensure that there is no
2135     // suspend points between the point which we allocate ArtMethod arrays and place them in a
2136     // class which is in the class table.
2137     //
2138     // Moving concurrent:
2139     // Need to make sure to not copy ArtMethods without doing read barriers since the roots are
2140     // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
2141     //
2142     // Use an unbuffered visitor since the class table uses a temporary GcRoot for holding decoded
2143     // ClassTable::TableSlot. The buffered root visiting would access a stale stack location for
2144     // these objects.
2145     UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
2146     boot_class_table_->VisitRoots(root_visitor);
2147     // If tracing is enabled, then mark all the class loaders to prevent unloading.
2148     if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
2149       for (const ClassLoaderData& data : class_loaders_) {
2150         GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
2151         root.VisitRoot(visitor, RootInfo(kRootVMInternal));
2152       }
2153     }
2154   } else if (!kUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
2155     for (auto& root : new_class_roots_) {
2156       ObjPtr<mirror::Class> old_ref = root.Read<kWithoutReadBarrier>();
2157       root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2158       ObjPtr<mirror::Class> new_ref = root.Read<kWithoutReadBarrier>();
2159       // Concurrent moving GC marked new roots through the to-space invariant.
2160       CHECK_EQ(new_ref, old_ref);
2161     }
2162     for (const OatFile* oat_file : new_bss_roots_boot_oat_files_) {
2163       for (GcRoot<mirror::Object>& root : oat_file->GetBssGcRoots()) {
2164         ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2165         if (old_ref != nullptr) {
2166           DCHECK(old_ref->IsClass());
2167           root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2168           ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2169           // Concurrent moving GC marked new roots through the to-space invariant.
2170           CHECK_EQ(new_ref, old_ref);
2171         }
2172       }
2173     }
2174   }
2175   if (!kUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
2176     new_class_roots_.clear();
2177     new_bss_roots_boot_oat_files_.clear();
2178   }
2179   if (!kUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
2180     log_new_roots_ = true;
2181   } else if (!kUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
2182     log_new_roots_ = false;
2183   }
2184   // We deliberately ignore the class roots in the image since we
2185   // handle image roots by using the MS/CMS rescanning of dirty cards.
2186 }
2187 
2188 // Keep in sync with InitCallback. Anything we visit, we need to
2189 // reinit references to when reinitializing a ClassLinker from a
2190 // mapped image.
VisitRoots(RootVisitor * visitor,VisitRootFlags flags)2191 void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
2192   class_roots_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
2193   VisitClassRoots(visitor, flags);
2194   // Instead of visiting the find_array_class_cache_ drop it so that it doesn't prevent class
2195   // unloading if we are marking roots.
2196   DropFindArrayClassCache();
2197 }
2198 
2199 class VisitClassLoaderClassesVisitor : public ClassLoaderVisitor {
2200  public:
VisitClassLoaderClassesVisitor(ClassVisitor * visitor)2201   explicit VisitClassLoaderClassesVisitor(ClassVisitor* visitor)
2202       : visitor_(visitor),
2203         done_(false) {}
2204 
Visit(ObjPtr<mirror::ClassLoader> class_loader)2205   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
2206       REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
2207     ClassTable* const class_table = class_loader->GetClassTable();
2208     if (!done_ && class_table != nullptr) {
2209       DefiningClassLoaderFilterVisitor visitor(class_loader, visitor_);
2210       if (!class_table->Visit(visitor)) {
2211         // If the visitor ClassTable returns false it means that we don't need to continue.
2212         done_ = true;
2213       }
2214     }
2215   }
2216 
2217  private:
2218   // Class visitor that limits the class visits from a ClassTable to the classes with
2219   // the provided defining class loader. This filter is used to avoid multiple visits
2220   // of the same class which can be recorded for multiple initiating class loaders.
2221   class DefiningClassLoaderFilterVisitor : public ClassVisitor {
2222    public:
DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,ClassVisitor * visitor)2223     DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,
2224                                      ClassVisitor* visitor)
2225         : defining_class_loader_(defining_class_loader), visitor_(visitor) { }
2226 
operator ()(ObjPtr<mirror::Class> klass)2227     bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2228       if (klass->GetClassLoader() != defining_class_loader_) {
2229         return true;
2230       }
2231       return (*visitor_)(klass);
2232     }
2233 
2234     const ObjPtr<mirror::ClassLoader> defining_class_loader_;
2235     ClassVisitor* const visitor_;
2236   };
2237 
2238   ClassVisitor* const visitor_;
2239   // If done is true then we don't need to do any more visiting.
2240   bool done_;
2241 };
2242 
VisitClassesInternal(ClassVisitor * visitor)2243 void ClassLinker::VisitClassesInternal(ClassVisitor* visitor) {
2244   if (boot_class_table_->Visit(*visitor)) {
2245     VisitClassLoaderClassesVisitor loader_visitor(visitor);
2246     VisitClassLoaders(&loader_visitor);
2247   }
2248 }
2249 
VisitClasses(ClassVisitor * visitor)2250 void ClassLinker::VisitClasses(ClassVisitor* visitor) {
2251   Thread* const self = Thread::Current();
2252   ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2253   // Not safe to have thread suspension when we are holding a lock.
2254   if (self != nullptr) {
2255     ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2256     VisitClassesInternal(visitor);
2257   } else {
2258     VisitClassesInternal(visitor);
2259   }
2260 }
2261 
2262 class GetClassesInToVector : public ClassVisitor {
2263  public:
operator ()(ObjPtr<mirror::Class> klass)2264   bool operator()(ObjPtr<mirror::Class> klass) override {
2265     classes_.push_back(klass);
2266     return true;
2267   }
2268   std::vector<ObjPtr<mirror::Class>> classes_;
2269 };
2270 
2271 class GetClassInToObjectArray : public ClassVisitor {
2272  public:
GetClassInToObjectArray(mirror::ObjectArray<mirror::Class> * arr)2273   explicit GetClassInToObjectArray(mirror::ObjectArray<mirror::Class>* arr)
2274       : arr_(arr), index_(0) {}
2275 
operator ()(ObjPtr<mirror::Class> klass)2276   bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2277     ++index_;
2278     if (index_ <= arr_->GetLength()) {
2279       arr_->Set(index_ - 1, klass);
2280       return true;
2281     }
2282     return false;
2283   }
2284 
Succeeded() const2285   bool Succeeded() const REQUIRES_SHARED(Locks::mutator_lock_) {
2286     return index_ <= arr_->GetLength();
2287   }
2288 
2289  private:
2290   mirror::ObjectArray<mirror::Class>* const arr_;
2291   int32_t index_;
2292 };
2293 
VisitClassesWithoutClassesLock(ClassVisitor * visitor)2294 void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) {
2295   // TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem
2296   // is avoiding duplicates.
2297   if (!kMovingClasses) {
2298     ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2299     GetClassesInToVector accumulator;
2300     VisitClasses(&accumulator);
2301     for (ObjPtr<mirror::Class> klass : accumulator.classes_) {
2302       if (!visitor->operator()(klass)) {
2303         return;
2304       }
2305     }
2306   } else {
2307     Thread* const self = Thread::Current();
2308     StackHandleScope<1> hs(self);
2309     auto classes = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
2310     // We size the array assuming classes won't be added to the class table during the visit.
2311     // If this assumption fails we iterate again.
2312     while (true) {
2313       size_t class_table_size;
2314       {
2315         ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2316         // Add 100 in case new classes get loaded when we are filling in the object array.
2317         class_table_size = NumZygoteClasses() + NumNonZygoteClasses() + 100;
2318       }
2319       ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
2320       classes.Assign(
2321           mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
2322       CHECK(classes != nullptr);  // OOME.
2323       GetClassInToObjectArray accumulator(classes.Get());
2324       VisitClasses(&accumulator);
2325       if (accumulator.Succeeded()) {
2326         break;
2327       }
2328     }
2329     for (int32_t i = 0; i < classes->GetLength(); ++i) {
2330       // If the class table shrank during creation of the clases array we expect null elements. If
2331       // the class table grew then the loop repeats. If classes are created after the loop has
2332       // finished then we don't visit.
2333       ObjPtr<mirror::Class> klass = classes->Get(i);
2334       if (klass != nullptr && !visitor->operator()(klass)) {
2335         return;
2336       }
2337     }
2338   }
2339 }
2340 
~ClassLinker()2341 ClassLinker::~ClassLinker() {
2342   Thread* const self = Thread::Current();
2343   for (const ClassLoaderData& data : class_loaders_) {
2344     // CHA unloading analysis is not needed. No negative consequences are expected because
2345     // all the classloaders are deleted at the same time.
2346     DeleteClassLoader(self, data, /*cleanup_cha=*/ false);
2347   }
2348   class_loaders_.clear();
2349   while (!running_visibly_initialized_callbacks_.empty()) {
2350     std::unique_ptr<VisiblyInitializedCallback> callback(
2351         std::addressof(running_visibly_initialized_callbacks_.front()));
2352     running_visibly_initialized_callbacks_.pop_front();
2353   }
2354 }
2355 
DeleteClassLoader(Thread * self,const ClassLoaderData & data,bool cleanup_cha)2356 void ClassLinker::DeleteClassLoader(Thread* self, const ClassLoaderData& data, bool cleanup_cha) {
2357   Runtime* const runtime = Runtime::Current();
2358   JavaVMExt* const vm = runtime->GetJavaVM();
2359   vm->DeleteWeakGlobalRef(self, data.weak_root);
2360   // Notify the JIT that we need to remove the methods and/or profiling info.
2361   if (runtime->GetJit() != nullptr) {
2362     jit::JitCodeCache* code_cache = runtime->GetJit()->GetCodeCache();
2363     if (code_cache != nullptr) {
2364       // For the JIT case, RemoveMethodsIn removes the CHA dependencies.
2365       code_cache->RemoveMethodsIn(self, *data.allocator);
2366     }
2367   } else if (cha_ != nullptr) {
2368     // If we don't have a JIT, we need to manually remove the CHA dependencies manually.
2369     cha_->RemoveDependenciesForLinearAlloc(data.allocator);
2370   }
2371   // Cleanup references to single implementation ArtMethods that will be deleted.
2372   if (cleanup_cha) {
2373     CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
2374     data.class_table->Visit<kWithoutReadBarrier>(visitor);
2375   }
2376   {
2377     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
2378     auto end = critical_native_code_with_clinit_check_.end();
2379     for (auto it = critical_native_code_with_clinit_check_.begin(); it != end; ) {
2380       if (data.allocator->ContainsUnsafe(it->first)) {
2381         it = critical_native_code_with_clinit_check_.erase(it);
2382       } else {
2383         ++it;
2384       }
2385     }
2386   }
2387 
2388   delete data.allocator;
2389   delete data.class_table;
2390 }
2391 
AllocPointerArray(Thread * self,size_t length)2392 ObjPtr<mirror::PointerArray> ClassLinker::AllocPointerArray(Thread* self, size_t length) {
2393   return ObjPtr<mirror::PointerArray>::DownCast(
2394       image_pointer_size_ == PointerSize::k64
2395           ? ObjPtr<mirror::Array>(mirror::LongArray::Alloc(self, length))
2396           : ObjPtr<mirror::Array>(mirror::IntArray::Alloc(self, length)));
2397 }
2398 
AllocDexCache(Thread * self,const DexFile & dex_file)2399 ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(Thread* self, const DexFile& dex_file) {
2400   StackHandleScope<1> hs(self);
2401   auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
2402       GetClassRoot<mirror::DexCache>(this)->AllocObject(self))));
2403   if (dex_cache == nullptr) {
2404     self->AssertPendingOOMException();
2405     return nullptr;
2406   }
2407   // Use InternWeak() so that the location String can be collected when the ClassLoader
2408   // with this DexCache is collected.
2409   ObjPtr<mirror::String> location = intern_table_->InternWeak(dex_file.GetLocation().c_str());
2410   if (location == nullptr) {
2411     self->AssertPendingOOMException();
2412     return nullptr;
2413   }
2414   dex_cache->SetLocation(location);
2415   return dex_cache.Get();
2416 }
2417 
AllocAndInitializeDexCache(Thread * self,const DexFile & dex_file,ObjPtr<mirror::ClassLoader> class_loader)2418 ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(
2419     Thread* self, const DexFile& dex_file, ObjPtr<mirror::ClassLoader> class_loader) {
2420   StackHandleScope<1> hs(self);
2421   Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
2422   ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(self, dex_file);
2423   if (dex_cache != nullptr) {
2424     WriterMutexLock mu(self, *Locks::dex_lock_);
2425     dex_cache->Initialize(&dex_file, h_class_loader.Get());
2426   }
2427   return dex_cache;
2428 }
2429 
2430 template <bool kMovable, typename PreFenceVisitor>
AllocClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class,uint32_t class_size,const PreFenceVisitor & pre_fence_visitor)2431 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2432                                               ObjPtr<mirror::Class> java_lang_Class,
2433                                               uint32_t class_size,
2434                                               const PreFenceVisitor& pre_fence_visitor) {
2435   DCHECK_GE(class_size, sizeof(mirror::Class));
2436   gc::Heap* heap = Runtime::Current()->GetHeap();
2437   ObjPtr<mirror::Object> k = (kMovingClasses && kMovable) ?
2438       heap->AllocObject(self, java_lang_Class, class_size, pre_fence_visitor) :
2439       heap->AllocNonMovableObject(self, java_lang_Class, class_size, pre_fence_visitor);
2440   if (UNLIKELY(k == nullptr)) {
2441     self->AssertPendingOOMException();
2442     return nullptr;
2443   }
2444   return k->AsClass();
2445 }
2446 
2447 template <bool kMovable>
AllocClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class,uint32_t class_size)2448 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2449                                               ObjPtr<mirror::Class> java_lang_Class,
2450                                               uint32_t class_size) {
2451   mirror::Class::InitializeClassVisitor visitor(class_size);
2452   return AllocClass<kMovable>(self, java_lang_Class, class_size, visitor);
2453 }
2454 
AllocClass(Thread * self,uint32_t class_size)2455 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self, uint32_t class_size) {
2456   return AllocClass(self, GetClassRoot<mirror::Class>(this), class_size);
2457 }
2458 
AllocPrimitiveArrayClass(Thread * self,ClassRoot primitive_root,ClassRoot array_root)2459 void ClassLinker::AllocPrimitiveArrayClass(Thread* self,
2460                                            ClassRoot primitive_root,
2461                                            ClassRoot array_root) {
2462   // We make this class non-movable for the unlikely case where it were to be
2463   // moved by a sticky-bit (minor) collection when using the Generational
2464   // Concurrent Copying (CC) collector, potentially creating a stale reference
2465   // in the `klass_` field of one of its instances allocated in the Large-Object
2466   // Space (LOS) -- see the comment about the dirty card scanning logic in
2467   // art::gc::collector::ConcurrentCopying::MarkingPhase.
2468   ObjPtr<mirror::Class> array_class = AllocClass</* kMovable= */ false>(
2469       self, GetClassRoot<mirror::Class>(this), mirror::Array::ClassSize(image_pointer_size_));
2470   ObjPtr<mirror::Class> component_type = GetClassRoot(primitive_root, this);
2471   DCHECK(component_type->IsPrimitive());
2472   array_class->SetComponentType(component_type);
2473   SetClassRoot(array_root, array_class);
2474 }
2475 
FinishArrayClassSetup(ObjPtr<mirror::Class> array_class)2476 void ClassLinker::FinishArrayClassSetup(ObjPtr<mirror::Class> array_class) {
2477   ObjPtr<mirror::Class> java_lang_Object = GetClassRoot<mirror::Object>(this);
2478   array_class->SetSuperClass(java_lang_Object);
2479   array_class->SetVTable(java_lang_Object->GetVTable());
2480   array_class->SetPrimitiveType(Primitive::kPrimNot);
2481   ObjPtr<mirror::Class> component_type = array_class->GetComponentType();
2482   array_class->SetClassFlags(component_type->IsPrimitive()
2483                                  ? mirror::kClassFlagNoReferenceFields
2484                                  : mirror::kClassFlagObjectArray);
2485   array_class->SetClassLoader(component_type->GetClassLoader());
2486   array_class->SetStatusForPrimitiveOrArray(ClassStatus::kLoaded);
2487   array_class->PopulateEmbeddedVTable(image_pointer_size_);
2488   ImTable* object_imt = java_lang_Object->GetImt(image_pointer_size_);
2489   array_class->SetImt(object_imt, image_pointer_size_);
2490   DCHECK_EQ(array_class->NumMethods(), 0u);
2491 
2492   // don't need to set new_class->SetObjectSize(..)
2493   // because Object::SizeOf delegates to Array::SizeOf
2494 
2495   // All arrays have java/lang/Cloneable and java/io/Serializable as
2496   // interfaces.  We need to set that up here, so that stuff like
2497   // "instanceof" works right.
2498 
2499   // Use the single, global copies of "interfaces" and "iftable"
2500   // (remember not to free them for arrays).
2501   {
2502     ObjPtr<mirror::IfTable> array_iftable = GetArrayIfTable();
2503     CHECK(array_iftable != nullptr);
2504     array_class->SetIfTable(array_iftable);
2505   }
2506 
2507   // Inherit access flags from the component type.
2508   int access_flags = component_type->GetAccessFlags();
2509   // Lose any implementation detail flags; in particular, arrays aren't finalizable.
2510   access_flags &= kAccJavaFlagsMask;
2511   // Arrays can't be used as a superclass or interface, so we want to add "abstract final"
2512   // and remove "interface".
2513   access_flags |= kAccAbstract | kAccFinal;
2514   access_flags &= ~kAccInterface;
2515 
2516   array_class->SetAccessFlagsDuringLinking(access_flags);
2517 
2518   // Array classes are fully initialized either during single threaded startup,
2519   // or from a pre-fence visitor, so visibly initialized.
2520   array_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
2521 }
2522 
FinishCoreArrayClassSetup(ClassRoot array_root)2523 void ClassLinker::FinishCoreArrayClassSetup(ClassRoot array_root) {
2524   // Do not hold lock on the array class object, the initialization of
2525   // core array classes is done while the process is still single threaded.
2526   ObjPtr<mirror::Class> array_class = GetClassRoot(array_root, this);
2527   FinishArrayClassSetup(array_class);
2528 
2529   std::string temp;
2530   const char* descriptor = array_class->GetDescriptor(&temp);
2531   size_t hash = ComputeModifiedUtf8Hash(descriptor);
2532   ObjPtr<mirror::Class> existing = InsertClass(descriptor, array_class, hash);
2533   CHECK(existing == nullptr);
2534 }
2535 
AllocStackTraceElementArray(Thread * self,size_t length)2536 ObjPtr<mirror::ObjectArray<mirror::StackTraceElement>> ClassLinker::AllocStackTraceElementArray(
2537     Thread* self,
2538     size_t length) {
2539   return mirror::ObjectArray<mirror::StackTraceElement>::Alloc(
2540       self, GetClassRoot<mirror::ObjectArray<mirror::StackTraceElement>>(this), length);
2541 }
2542 
EnsureResolved(Thread * self,const char * descriptor,ObjPtr<mirror::Class> klass)2543 ObjPtr<mirror::Class> ClassLinker::EnsureResolved(Thread* self,
2544                                                   const char* descriptor,
2545                                                   ObjPtr<mirror::Class> klass) {
2546   DCHECK(klass != nullptr);
2547   if (kIsDebugBuild) {
2548     StackHandleScope<1> hs(self);
2549     HandleWrapperObjPtr<mirror::Class> h = hs.NewHandleWrapper(&klass);
2550     Thread::PoisonObjectPointersIfDebug();
2551   }
2552 
2553   // For temporary classes we must wait for them to be retired.
2554   if (init_done_ && klass->IsTemp()) {
2555     CHECK(!klass->IsResolved());
2556     if (klass->IsErroneousUnresolved()) {
2557       ThrowEarlierClassFailure(klass);
2558       return nullptr;
2559     }
2560     StackHandleScope<1> hs(self);
2561     Handle<mirror::Class> h_class(hs.NewHandle(klass));
2562     ObjectLock<mirror::Class> lock(self, h_class);
2563     // Loop and wait for the resolving thread to retire this class.
2564     while (!h_class->IsRetired() && !h_class->IsErroneousUnresolved()) {
2565       lock.WaitIgnoringInterrupts();
2566     }
2567     if (h_class->IsErroneousUnresolved()) {
2568       ThrowEarlierClassFailure(h_class.Get());
2569       return nullptr;
2570     }
2571     CHECK(h_class->IsRetired());
2572     // Get the updated class from class table.
2573     klass = LookupClass(self, descriptor, h_class.Get()->GetClassLoader());
2574   }
2575 
2576   // Wait for the class if it has not already been linked.
2577   size_t index = 0;
2578   // Maximum number of yield iterations until we start sleeping.
2579   static const size_t kNumYieldIterations = 1000;
2580   // How long each sleep is in us.
2581   static const size_t kSleepDurationUS = 1000;  // 1 ms.
2582   while (!klass->IsResolved() && !klass->IsErroneousUnresolved()) {
2583     StackHandleScope<1> hs(self);
2584     HandleWrapperObjPtr<mirror::Class> h_class(hs.NewHandleWrapper(&klass));
2585     {
2586       ObjectTryLock<mirror::Class> lock(self, h_class);
2587       // Can not use a monitor wait here since it may block when returning and deadlock if another
2588       // thread has locked klass.
2589       if (lock.Acquired()) {
2590         // Check for circular dependencies between classes, the lock is required for SetStatus.
2591         if (!h_class->IsResolved() && h_class->GetClinitThreadId() == self->GetTid()) {
2592           ThrowClassCircularityError(h_class.Get());
2593           mirror::Class::SetStatus(h_class, ClassStatus::kErrorUnresolved, self);
2594           return nullptr;
2595         }
2596       }
2597     }
2598     {
2599       // Handle wrapper deals with klass moving.
2600       ScopedThreadSuspension sts(self, ThreadState::kSuspended);
2601       if (index < kNumYieldIterations) {
2602         sched_yield();
2603       } else {
2604         usleep(kSleepDurationUS);
2605       }
2606     }
2607     ++index;
2608   }
2609 
2610   if (klass->IsErroneousUnresolved()) {
2611     ThrowEarlierClassFailure(klass);
2612     return nullptr;
2613   }
2614   // Return the loaded class.  No exceptions should be pending.
2615   CHECK(klass->IsResolved()) << klass->PrettyClass();
2616   self->AssertNoPendingException();
2617   return klass;
2618 }
2619 
2620 using ClassPathEntry = std::pair<const DexFile*, const dex::ClassDef*>;
2621 
2622 // Search a collection of DexFiles for a descriptor
FindInClassPath(const char * descriptor,size_t hash,const std::vector<const DexFile * > & class_path)2623 ClassPathEntry FindInClassPath(const char* descriptor,
2624                                size_t hash, const std::vector<const DexFile*>& class_path) {
2625   for (const DexFile* dex_file : class_path) {
2626     DCHECK(dex_file != nullptr);
2627     const dex::ClassDef* dex_class_def = OatDexFile::FindClassDef(*dex_file, descriptor, hash);
2628     if (dex_class_def != nullptr) {
2629       return ClassPathEntry(dex_file, dex_class_def);
2630     }
2631   }
2632   return ClassPathEntry(nullptr, nullptr);
2633 }
2634 
2635 // Helper macro to make sure each class loader lookup call handles the case the
2636 // class loader is not recognized, or the lookup threw an exception.
2637 #define RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(call_, result_, thread_) \
2638 do {                                                                          \
2639   auto local_call = call_;                                                    \
2640   if (!local_call) {                                                          \
2641     return false;                                                             \
2642   }                                                                           \
2643   auto local_result = result_;                                                \
2644   if (local_result != nullptr) {                                              \
2645     return true;                                                              \
2646   }                                                                           \
2647   auto local_thread = thread_;                                                \
2648   if (local_thread->IsExceptionPending()) {                                   \
2649     /* Pending exception means there was an error other than */               \
2650     /* ClassNotFound that must be returned to the caller. */                  \
2651     return false;                                                             \
2652   }                                                                           \
2653 } while (0)
2654 
FindClassInSharedLibraries(ScopedObjectAccessAlreadyRunnable & soa,Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)2655 bool ClassLinker::FindClassInSharedLibraries(ScopedObjectAccessAlreadyRunnable& soa,
2656                                              Thread* self,
2657                                              const char* descriptor,
2658                                              size_t hash,
2659                                              Handle<mirror::ClassLoader> class_loader,
2660                                              /*out*/ ObjPtr<mirror::Class>* result) {
2661   ArtField* field =
2662       jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
2663   return FindClassInSharedLibrariesHelper(soa, self, descriptor, hash, class_loader, field, result);
2664 }
2665 
FindClassInSharedLibrariesHelper(ScopedObjectAccessAlreadyRunnable & soa,Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ArtField * field,ObjPtr<mirror::Class> * result)2666 bool ClassLinker::FindClassInSharedLibrariesHelper(ScopedObjectAccessAlreadyRunnable& soa,
2667                                                    Thread* self,
2668                                                    const char* descriptor,
2669                                                    size_t hash,
2670                                                    Handle<mirror::ClassLoader> class_loader,
2671                                                    ArtField* field,
2672                                                    /*out*/ ObjPtr<mirror::Class>* result) {
2673   ObjPtr<mirror::Object> raw_shared_libraries = field->GetObject(class_loader.Get());
2674   if (raw_shared_libraries == nullptr) {
2675     return true;
2676   }
2677 
2678   StackHandleScope<2> hs(self);
2679   Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries(
2680       hs.NewHandle(raw_shared_libraries->AsObjectArray<mirror::ClassLoader>()));
2681   MutableHandle<mirror::ClassLoader> temp_loader = hs.NewHandle<mirror::ClassLoader>(nullptr);
2682   for (auto loader : shared_libraries.Iterate<mirror::ClassLoader>()) {
2683     temp_loader.Assign(loader);
2684     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2685         FindClassInBaseDexClassLoader(soa, self, descriptor, hash, temp_loader, result),
2686         *result,
2687         self);
2688   }
2689   return true;
2690 }
2691 
FindClassInSharedLibrariesAfter(ScopedObjectAccessAlreadyRunnable & soa,Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)2692 bool ClassLinker::FindClassInSharedLibrariesAfter(ScopedObjectAccessAlreadyRunnable& soa,
2693                                                   Thread* self,
2694                                                   const char* descriptor,
2695                                                   size_t hash,
2696                                                   Handle<mirror::ClassLoader> class_loader,
2697                                                   /*out*/ ObjPtr<mirror::Class>* result) {
2698   ArtField* field = jni::DecodeArtField(
2699       WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter);
2700   return FindClassInSharedLibrariesHelper(soa, self, descriptor, hash, class_loader, field, result);
2701 }
2702 
FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnable & soa,Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)2703 bool ClassLinker::FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
2704                                                 Thread* self,
2705                                                 const char* descriptor,
2706                                                 size_t hash,
2707                                                 Handle<mirror::ClassLoader> class_loader,
2708                                                 /*out*/ ObjPtr<mirror::Class>* result) {
2709   // Termination case: boot class loader.
2710   if (IsBootClassLoader(soa, class_loader.Get())) {
2711     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2712         FindClassInBootClassLoaderClassPath(self, descriptor, hash, result), *result, self);
2713     return true;
2714   }
2715 
2716   if (IsPathOrDexClassLoader(soa, class_loader) || IsInMemoryDexClassLoader(soa, class_loader)) {
2717     // For regular path or dex class loader the search order is:
2718     //    - parent
2719     //    - shared libraries
2720     //    - class loader dex files
2721 
2722     // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
2723     StackHandleScope<1> hs(self);
2724     Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
2725     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2726         FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result),
2727         *result,
2728         self);
2729     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2730         FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result),
2731         *result,
2732         self);
2733     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2734         FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader, result),
2735         *result,
2736         self);
2737     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2738         FindClassInSharedLibrariesAfter(soa, self, descriptor, hash, class_loader, result),
2739         *result,
2740         self);
2741     // We did not find a class, but the class loader chain was recognized, so we
2742     // return true.
2743     return true;
2744   }
2745 
2746   if (IsDelegateLastClassLoader(soa, class_loader)) {
2747     // For delegate last, the search order is:
2748     //    - boot class path
2749     //    - shared libraries
2750     //    - class loader dex files
2751     //    - parent
2752     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2753         FindClassInBootClassLoaderClassPath(self, descriptor, hash, result), *result, self);
2754     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2755         FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result),
2756         *result,
2757         self);
2758     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2759         FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader, result),
2760         *result,
2761         self);
2762     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2763         FindClassInSharedLibrariesAfter(soa, self, descriptor, hash, class_loader, result),
2764         *result,
2765         self);
2766 
2767     // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
2768     StackHandleScope<1> hs(self);
2769     Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
2770     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2771         FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result),
2772         *result,
2773         self);
2774     // We did not find a class, but the class loader chain was recognized, so we
2775     // return true.
2776     return true;
2777   }
2778 
2779   // Unsupported class loader.
2780   *result = nullptr;
2781   return false;
2782 }
2783 
2784 #undef RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION
2785 
2786 namespace {
2787 
2788 // Matches exceptions caught in DexFile.defineClass.
MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,ClassLinker * class_linker)2789 ALWAYS_INLINE bool MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,
2790                                                   ClassLinker* class_linker)
2791     REQUIRES_SHARED(Locks::mutator_lock_) {
2792   return
2793       // ClassNotFoundException.
2794       throwable->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
2795                                          class_linker))
2796       ||
2797       // NoClassDefFoundError. TODO: Reconsider this. b/130746382.
2798       throwable->InstanceOf(Runtime::Current()->GetPreAllocatedNoClassDefFoundError()->GetClass());
2799 }
2800 
2801 // Clear exceptions caught in DexFile.defineClass.
FilterDexFileCaughtExceptions(Thread * self,ClassLinker * class_linker)2802 ALWAYS_INLINE void FilterDexFileCaughtExceptions(Thread* self, ClassLinker* class_linker)
2803     REQUIRES_SHARED(Locks::mutator_lock_) {
2804   if (MatchesDexFileCaughtExceptions(self->GetException(), class_linker)) {
2805     self->ClearException();
2806   }
2807 }
2808 
2809 }  // namespace
2810 
2811 // Finds the class in the boot class loader.
2812 // If the class is found the method returns the resolved class. Otherwise it returns null.
FindClassInBootClassLoaderClassPath(Thread * self,const char * descriptor,size_t hash,ObjPtr<mirror::Class> * result)2813 bool ClassLinker::FindClassInBootClassLoaderClassPath(Thread* self,
2814                                                       const char* descriptor,
2815                                                       size_t hash,
2816                                                       /*out*/ ObjPtr<mirror::Class>* result) {
2817   ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
2818   if (pair.second != nullptr) {
2819     ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, nullptr);
2820     if (klass != nullptr) {
2821       *result = EnsureResolved(self, descriptor, klass);
2822     } else {
2823       *result = DefineClass(self,
2824                             descriptor,
2825                             hash,
2826                             ScopedNullHandle<mirror::ClassLoader>(),
2827                             *pair.first,
2828                             *pair.second);
2829     }
2830     if (*result == nullptr) {
2831       CHECK(self->IsExceptionPending()) << descriptor;
2832       FilterDexFileCaughtExceptions(self, this);
2833     }
2834   }
2835   // The boot classloader is always a known lookup.
2836   return true;
2837 }
2838 
FindClassInBaseDexClassLoaderClassPath(ScopedObjectAccessAlreadyRunnable & soa,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)2839 bool ClassLinker::FindClassInBaseDexClassLoaderClassPath(
2840     ScopedObjectAccessAlreadyRunnable& soa,
2841     const char* descriptor,
2842     size_t hash,
2843     Handle<mirror::ClassLoader> class_loader,
2844     /*out*/ ObjPtr<mirror::Class>* result) {
2845   DCHECK(IsPathOrDexClassLoader(soa, class_loader) ||
2846          IsInMemoryDexClassLoader(soa, class_loader) ||
2847          IsDelegateLastClassLoader(soa, class_loader))
2848       << "Unexpected class loader for descriptor " << descriptor;
2849 
2850   const DexFile* dex_file = nullptr;
2851   const dex::ClassDef* class_def = nullptr;
2852   ObjPtr<mirror::Class> ret;
2853   auto find_class_def = [&](const DexFile* cp_dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
2854     const dex::ClassDef* cp_class_def = OatDexFile::FindClassDef(*cp_dex_file, descriptor, hash);
2855     if (cp_class_def != nullptr) {
2856       dex_file = cp_dex_file;
2857       class_def = cp_class_def;
2858       return false;  // Found a class definition, stop visit.
2859     }
2860     return true;  // Continue with the next DexFile.
2861   };
2862   VisitClassLoaderDexFiles(soa, class_loader, find_class_def);
2863 
2864   if (class_def != nullptr) {
2865     *result = DefineClass(soa.Self(), descriptor, hash, class_loader, *dex_file, *class_def);
2866     if (UNLIKELY(*result == nullptr)) {
2867       CHECK(soa.Self()->IsExceptionPending()) << descriptor;
2868       FilterDexFileCaughtExceptions(soa.Self(), this);
2869     } else {
2870       DCHECK(!soa.Self()->IsExceptionPending());
2871     }
2872   }
2873   // A BaseDexClassLoader is always a known lookup.
2874   return true;
2875 }
2876 
FindClass(Thread * self,const char * descriptor,Handle<mirror::ClassLoader> class_loader)2877 ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
2878                                              const char* descriptor,
2879                                              Handle<mirror::ClassLoader> class_loader) {
2880   DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
2881   DCHECK(self != nullptr);
2882   self->AssertNoPendingException();
2883   self->PoisonObjectPointers();  // For DefineClass, CreateArrayClass, etc...
2884   if (descriptor[1] == '\0') {
2885     // only the descriptors of primitive types should be 1 character long, also avoid class lookup
2886     // for primitive classes that aren't backed by dex files.
2887     return FindPrimitiveClass(descriptor[0]);
2888   }
2889   const size_t hash = ComputeModifiedUtf8Hash(descriptor);
2890   // Find the class in the loaded classes table.
2891   ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, class_loader.Get());
2892   if (klass != nullptr) {
2893     return EnsureResolved(self, descriptor, klass);
2894   }
2895   // Class is not yet loaded.
2896   if (descriptor[0] != '[' && class_loader == nullptr) {
2897     // Non-array class and the boot class loader, search the boot class path.
2898     ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
2899     if (pair.second != nullptr) {
2900       return DefineClass(self,
2901                          descriptor,
2902                          hash,
2903                          ScopedNullHandle<mirror::ClassLoader>(),
2904                          *pair.first,
2905                          *pair.second);
2906     } else {
2907       // The boot class loader is searched ahead of the application class loader, failures are
2908       // expected and will be wrapped in a ClassNotFoundException. Use the pre-allocated error to
2909       // trigger the chaining with a proper stack trace.
2910       ObjPtr<mirror::Throwable> pre_allocated =
2911           Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
2912       self->SetException(pre_allocated);
2913       return nullptr;
2914     }
2915   }
2916   ObjPtr<mirror::Class> result_ptr;
2917   bool descriptor_equals;
2918   if (descriptor[0] == '[') {
2919     result_ptr = CreateArrayClass(self, descriptor, hash, class_loader);
2920     DCHECK_EQ(result_ptr == nullptr, self->IsExceptionPending());
2921     DCHECK(result_ptr == nullptr || result_ptr->DescriptorEquals(descriptor));
2922     descriptor_equals = true;
2923   } else {
2924     ScopedObjectAccessUnchecked soa(self);
2925     bool known_hierarchy =
2926         FindClassInBaseDexClassLoader(soa, self, descriptor, hash, class_loader, &result_ptr);
2927     if (result_ptr != nullptr) {
2928       // The chain was understood and we found the class. We still need to add the class to
2929       // the class table to protect from racy programs that can try and redefine the path list
2930       // which would change the Class<?> returned for subsequent evaluation of const-class.
2931       DCHECK(known_hierarchy);
2932       DCHECK(result_ptr->DescriptorEquals(descriptor));
2933       descriptor_equals = true;
2934     } else if (!self->IsExceptionPending()) {
2935       // Either the chain wasn't understood or the class wasn't found.
2936       // If there is a pending exception we didn't clear, it is a not a ClassNotFoundException and
2937       // we should return it instead of silently clearing and retrying.
2938       //
2939       // If the chain was understood but we did not find the class, let the Java-side
2940       // rediscover all this and throw the exception with the right stack trace. Note that
2941       // the Java-side could still succeed for racy programs if another thread is actively
2942       // modifying the class loader's path list.
2943 
2944       // The runtime is not allowed to call into java from a runtime-thread so just abort.
2945       if (self->IsRuntimeThread()) {
2946         // Oops, we can't call into java so we can't run actual class-loader code.
2947         // This is true for e.g. for the compiler (jit or aot).
2948         ObjPtr<mirror::Throwable> pre_allocated =
2949             Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
2950         self->SetException(pre_allocated);
2951         return nullptr;
2952       }
2953 
2954       // Inlined DescriptorToDot(descriptor) with extra validation.
2955       //
2956       // Throw NoClassDefFoundError early rather than potentially load a class only to fail
2957       // the DescriptorEquals() check below and give a confusing error message. For example,
2958       // when native code erroneously calls JNI GetFieldId() with signature "java/lang/String"
2959       // instead of "Ljava/lang/String;", the message below using the "dot" names would be
2960       // "class loader [...] returned class java.lang.String instead of java.lang.String".
2961       size_t descriptor_length = strlen(descriptor);
2962       if (UNLIKELY(descriptor[0] != 'L') ||
2963           UNLIKELY(descriptor[descriptor_length - 1] != ';') ||
2964           UNLIKELY(memchr(descriptor + 1, '.', descriptor_length - 2) != nullptr)) {
2965         ThrowNoClassDefFoundError("Invalid descriptor: %s.", descriptor);
2966         return nullptr;
2967       }
2968 
2969       std::string class_name_string(descriptor + 1, descriptor_length - 2);
2970       std::replace(class_name_string.begin(), class_name_string.end(), '/', '.');
2971       if (known_hierarchy &&
2972           fast_class_not_found_exceptions_ &&
2973           !Runtime::Current()->IsJavaDebuggable()) {
2974         // For known hierarchy, we know that the class is going to throw an exception. If we aren't
2975         // debuggable, optimize this path by throwing directly here without going back to Java
2976         // language. This reduces how many ClassNotFoundExceptions happen.
2977         self->ThrowNewExceptionF("Ljava/lang/ClassNotFoundException;",
2978                                  "%s",
2979                                  class_name_string.c_str());
2980       } else {
2981         ScopedLocalRef<jobject> class_loader_object(
2982             soa.Env(), soa.AddLocalReference<jobject>(class_loader.Get()));
2983         ScopedLocalRef<jobject> result(soa.Env(), nullptr);
2984         {
2985           ScopedThreadStateChange tsc(self, ThreadState::kNative);
2986           ScopedLocalRef<jobject> class_name_object(
2987               soa.Env(), soa.Env()->NewStringUTF(class_name_string.c_str()));
2988           if (class_name_object.get() == nullptr) {
2989             DCHECK(self->IsExceptionPending());  // OOME.
2990             return nullptr;
2991           }
2992           CHECK(class_loader_object.get() != nullptr);
2993           result.reset(soa.Env()->CallObjectMethod(class_loader_object.get(),
2994                                                    WellKnownClasses::java_lang_ClassLoader_loadClass,
2995                                                    class_name_object.get()));
2996         }
2997         if (result.get() == nullptr && !self->IsExceptionPending()) {
2998           // broken loader - throw NPE to be compatible with Dalvik
2999           ThrowNullPointerException(StringPrintf("ClassLoader.loadClass returned null for %s",
3000                                                  class_name_string.c_str()).c_str());
3001           return nullptr;
3002         }
3003         result_ptr = soa.Decode<mirror::Class>(result.get());
3004         // Check the name of the returned class.
3005         descriptor_equals = (result_ptr != nullptr) && result_ptr->DescriptorEquals(descriptor);
3006       }
3007     } else {
3008       DCHECK(!MatchesDexFileCaughtExceptions(self->GetException(), this));
3009     }
3010   }
3011 
3012   if (self->IsExceptionPending()) {
3013     // If the ClassLoader threw or array class allocation failed, pass that exception up.
3014     // However, to comply with the RI behavior, first check if another thread succeeded.
3015     result_ptr = LookupClass(self, descriptor, hash, class_loader.Get());
3016     if (result_ptr != nullptr && !result_ptr->IsErroneous()) {
3017       self->ClearException();
3018       return EnsureResolved(self, descriptor, result_ptr);
3019     }
3020     return nullptr;
3021   }
3022 
3023   // Try to insert the class to the class table, checking for mismatch.
3024   ObjPtr<mirror::Class> old;
3025   {
3026     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3027     ClassTable* const class_table = InsertClassTableForClassLoader(class_loader.Get());
3028     old = class_table->Lookup(descriptor, hash);
3029     if (old == nullptr) {
3030       old = result_ptr;  // For the comparison below, after releasing the lock.
3031       if (descriptor_equals) {
3032         class_table->InsertWithHash(result_ptr, hash);
3033         WriteBarrier::ForEveryFieldWrite(class_loader.Get());
3034       }  // else throw below, after releasing the lock.
3035     }
3036   }
3037   if (UNLIKELY(old != result_ptr)) {
3038     // Return `old` (even if `!descriptor_equals`) to mimic the RI behavior for parallel
3039     // capable class loaders.  (All class loaders are considered parallel capable on Android.)
3040     ObjPtr<mirror::Class> loader_class = class_loader->GetClass();
3041     const char* loader_class_name =
3042         loader_class->GetDexFile().StringByTypeIdx(loader_class->GetDexTypeIndex());
3043     LOG(WARNING) << "Initiating class loader of type " << DescriptorToDot(loader_class_name)
3044         << " is not well-behaved; it returned a different Class for racing loadClass(\""
3045         << DescriptorToDot(descriptor) << "\").";
3046     return EnsureResolved(self, descriptor, old);
3047   }
3048   if (UNLIKELY(!descriptor_equals)) {
3049     std::string result_storage;
3050     const char* result_name = result_ptr->GetDescriptor(&result_storage);
3051     std::string loader_storage;
3052     const char* loader_class_name = class_loader->GetClass()->GetDescriptor(&loader_storage);
3053     ThrowNoClassDefFoundError(
3054         "Initiating class loader of type %s returned class %s instead of %s.",
3055         DescriptorToDot(loader_class_name).c_str(),
3056         DescriptorToDot(result_name).c_str(),
3057         DescriptorToDot(descriptor).c_str());
3058     return nullptr;
3059   }
3060   // Success.
3061   return result_ptr;
3062 }
3063 
3064 // Helper for maintaining DefineClass counting. We need to notify callbacks when we start/end a
3065 // define-class and how many recursive DefineClasses we are at in order to allow for doing  things
3066 // like pausing class definition.
3067 struct ScopedDefiningClass {
3068  public:
REQUIRES_SHAREDart::ScopedDefiningClass3069   explicit ScopedDefiningClass(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
3070       : self_(self), returned_(false) {
3071     Locks::mutator_lock_->AssertSharedHeld(self_);
3072     Runtime::Current()->GetRuntimeCallbacks()->BeginDefineClass();
3073     self_->IncrDefineClassCount();
3074   }
REQUIRES_SHAREDart::ScopedDefiningClass3075   ~ScopedDefiningClass() REQUIRES_SHARED(Locks::mutator_lock_) {
3076     Locks::mutator_lock_->AssertSharedHeld(self_);
3077     CHECK(returned_);
3078   }
3079 
Finishart::ScopedDefiningClass3080   ObjPtr<mirror::Class> Finish(Handle<mirror::Class> h_klass)
3081       REQUIRES_SHARED(Locks::mutator_lock_) {
3082     CHECK(!returned_);
3083     self_->DecrDefineClassCount();
3084     Runtime::Current()->GetRuntimeCallbacks()->EndDefineClass();
3085     Thread::PoisonObjectPointersIfDebug();
3086     returned_ = true;
3087     return h_klass.Get();
3088   }
3089 
Finishart::ScopedDefiningClass3090   ObjPtr<mirror::Class> Finish(ObjPtr<mirror::Class> klass)
3091       REQUIRES_SHARED(Locks::mutator_lock_) {
3092     StackHandleScope<1> hs(self_);
3093     Handle<mirror::Class> h_klass(hs.NewHandle(klass));
3094     return Finish(h_klass);
3095   }
3096 
Finishart::ScopedDefiningClass3097   ObjPtr<mirror::Class> Finish(nullptr_t np ATTRIBUTE_UNUSED)
3098       REQUIRES_SHARED(Locks::mutator_lock_) {
3099     ScopedNullHandle<mirror::Class> snh;
3100     return Finish(snh);
3101   }
3102 
3103  private:
3104   Thread* self_;
3105   bool returned_;
3106 };
3107 
DefineClass(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,const dex::ClassDef & dex_class_def)3108 ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
3109                                                const char* descriptor,
3110                                                size_t hash,
3111                                                Handle<mirror::ClassLoader> class_loader,
3112                                                const DexFile& dex_file,
3113                                                const dex::ClassDef& dex_class_def) {
3114   ScopedDefiningClass sdc(self);
3115   StackHandleScope<3> hs(self);
3116   metrics::AutoTimer timer{GetMetrics()->ClassLoadingTotalTime()};
3117   auto klass = hs.NewHandle<mirror::Class>(nullptr);
3118 
3119   // Load the class from the dex file.
3120   if (UNLIKELY(!init_done_)) {
3121     // finish up init of hand crafted class_roots_
3122     if (strcmp(descriptor, "Ljava/lang/Object;") == 0) {
3123       klass.Assign(GetClassRoot<mirror::Object>(this));
3124     } else if (strcmp(descriptor, "Ljava/lang/Class;") == 0) {
3125       klass.Assign(GetClassRoot<mirror::Class>(this));
3126     } else if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3127       klass.Assign(GetClassRoot<mirror::String>(this));
3128     } else if (strcmp(descriptor, "Ljava/lang/ref/Reference;") == 0) {
3129       klass.Assign(GetClassRoot<mirror::Reference>(this));
3130     } else if (strcmp(descriptor, "Ljava/lang/DexCache;") == 0) {
3131       klass.Assign(GetClassRoot<mirror::DexCache>(this));
3132     } else if (strcmp(descriptor, "Ldalvik/system/ClassExt;") == 0) {
3133       klass.Assign(GetClassRoot<mirror::ClassExt>(this));
3134     }
3135   }
3136 
3137   // For AOT-compilation of an app, we may use only a public SDK to resolve symbols. If the SDK
3138   // checks are configured (a non null SdkChecker) and the descriptor is not in the provided
3139   // public class path then we prevent the definition of the class.
3140   //
3141   // NOTE that we only do the checks for the boot classpath APIs. Anything else, like the app
3142   // classpath is not checked.
3143   if (class_loader == nullptr &&
3144       Runtime::Current()->IsAotCompiler() &&
3145       DenyAccessBasedOnPublicSdk(descriptor)) {
3146     ObjPtr<mirror::Throwable> pre_allocated =
3147         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3148     self->SetException(pre_allocated);
3149     return sdc.Finish(nullptr);
3150   }
3151 
3152   // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
3153   // code to be executed. We put it up here so we can avoid all the allocations associated with
3154   // creating the class. This can happen with (eg) jit threads.
3155   if (!self->CanLoadClasses()) {
3156     // Make sure we don't try to load anything, potentially causing an infinite loop.
3157     ObjPtr<mirror::Throwable> pre_allocated =
3158         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3159     self->SetException(pre_allocated);
3160     return sdc.Finish(nullptr);
3161   }
3162 
3163   if (klass == nullptr) {
3164     // Allocate a class with the status of not ready.
3165     // Interface object should get the right size here. Regular class will
3166     // figure out the right size later and be replaced with one of the right
3167     // size when the class becomes resolved.
3168     if (CanAllocClass()) {
3169       klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def)));
3170     } else {
3171       return sdc.Finish(nullptr);
3172     }
3173   }
3174   if (UNLIKELY(klass == nullptr)) {
3175     self->AssertPendingOOMException();
3176     return sdc.Finish(nullptr);
3177   }
3178   // Get the real dex file. This will return the input if there aren't any callbacks or they do
3179   // nothing.
3180   DexFile const* new_dex_file = nullptr;
3181   dex::ClassDef const* new_class_def = nullptr;
3182   // TODO We should ideally figure out some way to move this after we get a lock on the klass so it
3183   // will only be called once.
3184   Runtime::Current()->GetRuntimeCallbacks()->ClassPreDefine(descriptor,
3185                                                             klass,
3186                                                             class_loader,
3187                                                             dex_file,
3188                                                             dex_class_def,
3189                                                             &new_dex_file,
3190                                                             &new_class_def);
3191   // Check to see if an exception happened during runtime callbacks. Return if so.
3192   if (self->IsExceptionPending()) {
3193     return sdc.Finish(nullptr);
3194   }
3195   ObjPtr<mirror::DexCache> dex_cache = RegisterDexFile(*new_dex_file, class_loader.Get());
3196   if (dex_cache == nullptr) {
3197     self->AssertPendingException();
3198     return sdc.Finish(nullptr);
3199   }
3200   klass->SetDexCache(dex_cache);
3201   SetupClass(*new_dex_file, *new_class_def, klass, class_loader.Get());
3202 
3203   // Mark the string class by setting its access flag.
3204   if (UNLIKELY(!init_done_)) {
3205     if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3206       klass->SetStringClass();
3207     }
3208   }
3209 
3210   ObjectLock<mirror::Class> lock(self, klass);
3211   klass->SetClinitThreadId(self->GetTid());
3212   // Make sure we have a valid empty iftable even if there are errors.
3213   klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
3214 
3215   // Add the newly loaded class to the loaded classes table.
3216   ObjPtr<mirror::Class> existing = InsertClass(descriptor, klass.Get(), hash);
3217   if (existing != nullptr) {
3218     // We failed to insert because we raced with another thread. Calling EnsureResolved may cause
3219     // this thread to block.
3220     return sdc.Finish(EnsureResolved(self, descriptor, existing));
3221   }
3222 
3223   // Load the fields and other things after we are inserted in the table. This is so that we don't
3224   // end up allocating unfree-able linear alloc resources and then lose the race condition. The
3225   // other reason is that the field roots are only visited from the class table. So we need to be
3226   // inserted before we allocate / fill in these fields.
3227   LoadClass(self, *new_dex_file, *new_class_def, klass);
3228   if (self->IsExceptionPending()) {
3229     VLOG(class_linker) << self->GetException()->Dump();
3230     // An exception occured during load, set status to erroneous while holding klass' lock in case
3231     // notification is necessary.
3232     if (!klass->IsErroneous()) {
3233       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3234     }
3235     return sdc.Finish(nullptr);
3236   }
3237 
3238   // Finish loading (if necessary) by finding parents
3239   CHECK(!klass->IsLoaded());
3240   if (!LoadSuperAndInterfaces(klass, *new_dex_file)) {
3241     // Loading failed.
3242     if (!klass->IsErroneous()) {
3243       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3244     }
3245     return sdc.Finish(nullptr);
3246   }
3247   CHECK(klass->IsLoaded());
3248 
3249   // At this point the class is loaded. Publish a ClassLoad event.
3250   // Note: this may be a temporary class. It is a listener's responsibility to handle this.
3251   Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(klass);
3252 
3253   // Link the class (if necessary)
3254   CHECK(!klass->IsResolved());
3255   // TODO: Use fast jobjects?
3256   auto interfaces = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
3257 
3258   MutableHandle<mirror::Class> h_new_class = hs.NewHandle<mirror::Class>(nullptr);
3259   if (!LinkClass(self, descriptor, klass, interfaces, &h_new_class)) {
3260     // Linking failed.
3261     if (!klass->IsErroneous()) {
3262       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3263     }
3264     return sdc.Finish(nullptr);
3265   }
3266   self->AssertNoPendingException();
3267   CHECK(h_new_class != nullptr) << descriptor;
3268   CHECK(h_new_class->IsResolved()) << descriptor << " " << h_new_class->GetStatus();
3269 
3270   // Instrumentation may have updated entrypoints for all methods of all
3271   // classes. However it could not update methods of this class while we
3272   // were loading it. Now the class is resolved, we can update entrypoints
3273   // as required by instrumentation.
3274   if (Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()) {
3275     // We must be in the kRunnable state to prevent instrumentation from
3276     // suspending all threads to update entrypoints while we are doing it
3277     // for this class.
3278     DCHECK_EQ(self->GetState(), ThreadState::kRunnable);
3279     Runtime::Current()->GetInstrumentation()->InstallStubsForClass(h_new_class.Get());
3280   }
3281 
3282   /*
3283    * We send CLASS_PREPARE events to the debugger from here.  The
3284    * definition of "preparation" is creating the static fields for a
3285    * class and initializing them to the standard default values, but not
3286    * executing any code (that comes later, during "initialization").
3287    *
3288    * We did the static preparation in LinkClass.
3289    *
3290    * The class has been prepared and resolved but possibly not yet verified
3291    * at this point.
3292    */
3293   Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(klass, h_new_class);
3294 
3295   // Notify native debugger of the new class and its layout.
3296   jit::Jit::NewTypeLoadedIfUsingJit(h_new_class.Get());
3297 
3298   return sdc.Finish(h_new_class);
3299 }
3300 
SizeOfClassWithoutEmbeddedTables(const DexFile & dex_file,const dex::ClassDef & dex_class_def)3301 uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
3302                                                        const dex::ClassDef& dex_class_def) {
3303   size_t num_ref = 0;
3304   size_t num_8 = 0;
3305   size_t num_16 = 0;
3306   size_t num_32 = 0;
3307   size_t num_64 = 0;
3308   ClassAccessor accessor(dex_file, dex_class_def);
3309   // We allow duplicate definitions of the same field in a class_data_item
3310   // but ignore the repeated indexes here, b/21868015.
3311   uint32_t last_field_idx = dex::kDexNoIndex;
3312   for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
3313     uint32_t field_idx = field.GetIndex();
3314     // Ordering enforced by DexFileVerifier.
3315     DCHECK(last_field_idx == dex::kDexNoIndex || last_field_idx <= field_idx);
3316     if (UNLIKELY(field_idx == last_field_idx)) {
3317       continue;
3318     }
3319     last_field_idx = field_idx;
3320     const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
3321     const char* descriptor = dex_file.GetFieldTypeDescriptor(field_id);
3322     char c = descriptor[0];
3323     switch (c) {
3324       case 'L':
3325       case '[':
3326         num_ref++;
3327         break;
3328       case 'J':
3329       case 'D':
3330         num_64++;
3331         break;
3332       case 'I':
3333       case 'F':
3334         num_32++;
3335         break;
3336       case 'S':
3337       case 'C':
3338         num_16++;
3339         break;
3340       case 'B':
3341       case 'Z':
3342         num_8++;
3343         break;
3344       default:
3345         LOG(FATAL) << "Unknown descriptor: " << c;
3346         UNREACHABLE();
3347     }
3348   }
3349   return mirror::Class::ComputeClassSize(false,
3350                                          0,
3351                                          num_8,
3352                                          num_16,
3353                                          num_32,
3354                                          num_64,
3355                                          num_ref,
3356                                          image_pointer_size_);
3357 }
3358 
FixupStaticTrampolines(Thread * self,ObjPtr<mirror::Class> klass)3359 void ClassLinker::FixupStaticTrampolines(Thread* self, ObjPtr<mirror::Class> klass) {
3360   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3361   DCHECK(klass->IsVisiblyInitialized()) << klass->PrettyDescriptor();
3362   size_t num_direct_methods = klass->NumDirectMethods();
3363   if (num_direct_methods == 0) {
3364     return;  // No direct methods => no static methods.
3365   }
3366   if (UNLIKELY(klass->IsProxyClass())) {
3367     return;
3368   }
3369   PointerSize pointer_size = image_pointer_size_;
3370   if (std::any_of(klass->GetDirectMethods(pointer_size).begin(),
3371                   klass->GetDirectMethods(pointer_size).end(),
3372                   [](const ArtMethod& m) { return m.IsCriticalNative(); })) {
3373     // Store registered @CriticalNative methods, if any, to JNI entrypoints.
3374     // Direct methods are a contiguous chunk of memory, so use the ordering of the map.
3375     ArtMethod* first_method = klass->GetDirectMethod(0u, pointer_size);
3376     ArtMethod* last_method = klass->GetDirectMethod(num_direct_methods - 1u, pointer_size);
3377     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
3378     auto lb = critical_native_code_with_clinit_check_.lower_bound(first_method);
3379     while (lb != critical_native_code_with_clinit_check_.end() && lb->first <= last_method) {
3380       lb->first->SetEntryPointFromJni(lb->second);
3381       lb = critical_native_code_with_clinit_check_.erase(lb);
3382     }
3383   }
3384   Runtime* runtime = Runtime::Current();
3385   if (runtime->IsAotCompiler()) {
3386     // We should not update entrypoints when running the transactional
3387     // interpreter.
3388     return;
3389   }
3390 
3391   instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
3392   // Link the code of methods skipped by LinkCode.
3393   for (size_t method_index = 0; method_index < num_direct_methods; ++method_index) {
3394     ArtMethod* method = klass->GetDirectMethod(method_index, pointer_size);
3395     if (!method->IsStatic()) {
3396       // Only update static methods.
3397       continue;
3398     }
3399     instrumentation->UpdateMethodsCode(method, instrumentation->GetCodeForInvoke(method));
3400   }
3401   // Ignore virtual methods on the iterator.
3402 }
3403 
3404 // Does anything needed to make sure that the compiler will not generate a direct invoke to this
3405 // method. Should only be called on non-invokable methods.
EnsureThrowsInvocationError(ClassLinker * class_linker,ArtMethod * method)3406 inline void EnsureThrowsInvocationError(ClassLinker* class_linker, ArtMethod* method)
3407     REQUIRES_SHARED(Locks::mutator_lock_) {
3408   DCHECK(method != nullptr);
3409   DCHECK(!method->IsInvokable());
3410   method->SetEntryPointFromQuickCompiledCodePtrSize(
3411       class_linker->GetQuickToInterpreterBridgeTrampoline(),
3412       class_linker->GetImagePointerSize());
3413 }
3414 
LinkCode(ClassLinker * class_linker,ArtMethod * method,const OatFile::OatClass * oat_class,uint32_t class_def_method_index)3415 static void LinkCode(ClassLinker* class_linker,
3416                      ArtMethod* method,
3417                      const OatFile::OatClass* oat_class,
3418                      uint32_t class_def_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
3419   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3420   Runtime* const runtime = Runtime::Current();
3421   if (runtime->IsAotCompiler()) {
3422     // The following code only applies to a non-compiler runtime.
3423     return;
3424   }
3425 
3426   // Method shouldn't have already been linked.
3427   DCHECK(method->GetEntryPointFromQuickCompiledCode() == nullptr);
3428   DCHECK(!method->GetDeclaringClass()->IsVisiblyInitialized());  // Actually ClassStatus::Idx.
3429 
3430   if (!method->IsInvokable()) {
3431     EnsureThrowsInvocationError(class_linker, method);
3432     return;
3433   }
3434 
3435   const void* quick_code = nullptr;
3436   if (oat_class != nullptr) {
3437     // Every kind of method should at least get an invoke stub from the oat_method.
3438     // non-abstract methods also get their code pointers.
3439     const OatFile::OatMethod oat_method = oat_class->GetOatMethod(class_def_method_index);
3440     quick_code = oat_method.GetQuickCode();
3441   }
3442   runtime->GetInstrumentation()->InitializeMethodsCode(method, quick_code);
3443 
3444   if (method->IsNative()) {
3445     // Set up the dlsym lookup stub. Do not go through `UnregisterNative()`
3446     // as the extra processing for @CriticalNative is not needed yet.
3447     method->SetEntryPointFromJni(
3448         method->IsCriticalNative() ? GetJniDlsymLookupCriticalStub() : GetJniDlsymLookupStub());
3449   }
3450 }
3451 
SetupClass(const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass,ObjPtr<mirror::ClassLoader> class_loader)3452 void ClassLinker::SetupClass(const DexFile& dex_file,
3453                              const dex::ClassDef& dex_class_def,
3454                              Handle<mirror::Class> klass,
3455                              ObjPtr<mirror::ClassLoader> class_loader) {
3456   CHECK(klass != nullptr);
3457   CHECK(klass->GetDexCache() != nullptr);
3458   CHECK_EQ(ClassStatus::kNotReady, klass->GetStatus());
3459   const char* descriptor = dex_file.GetClassDescriptor(dex_class_def);
3460   CHECK(descriptor != nullptr);
3461 
3462   klass->SetClass(GetClassRoot<mirror::Class>(this));
3463   uint32_t access_flags = dex_class_def.GetJavaAccessFlags();
3464   CHECK_EQ(access_flags & ~kAccJavaFlagsMask, 0U);
3465   klass->SetAccessFlagsDuringLinking(access_flags);
3466   klass->SetClassLoader(class_loader);
3467   DCHECK_EQ(klass->GetPrimitiveType(), Primitive::kPrimNot);
3468   mirror::Class::SetStatus(klass, ClassStatus::kIdx, nullptr);
3469 
3470   klass->SetDexClassDefIndex(dex_file.GetIndexForClassDef(dex_class_def));
3471   klass->SetDexTypeIndex(dex_class_def.class_idx_);
3472 }
3473 
AllocArtFieldArray(Thread * self,LinearAlloc * allocator,size_t length)3474 LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self,
3475                                                                LinearAlloc* allocator,
3476                                                                size_t length) {
3477   if (length == 0) {
3478     return nullptr;
3479   }
3480   // If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>.
3481   static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4.");
3482   size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length);
3483   void* array_storage = allocator->Alloc(self, storage_size);
3484   auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length);
3485   CHECK(ret != nullptr);
3486   std::uninitialized_fill_n(&ret->At(0), length, ArtField());
3487   return ret;
3488 }
3489 
AllocArtMethodArray(Thread * self,LinearAlloc * allocator,size_t length)3490 LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self,
3491                                                                  LinearAlloc* allocator,
3492                                                                  size_t length) {
3493   if (length == 0) {
3494     return nullptr;
3495   }
3496   const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_);
3497   const size_t method_size = ArtMethod::Size(image_pointer_size_);
3498   const size_t storage_size =
3499       LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment);
3500   void* array_storage = allocator->Alloc(self, storage_size);
3501   auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length);
3502   CHECK(ret != nullptr);
3503   for (size_t i = 0; i < length; ++i) {
3504     new(reinterpret_cast<void*>(&ret->At(i, method_size, method_alignment))) ArtMethod;
3505   }
3506   return ret;
3507 }
3508 
GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3509 LinearAlloc* ClassLinker::GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3510   if (class_loader == nullptr) {
3511     return Runtime::Current()->GetLinearAlloc();
3512   }
3513   LinearAlloc* allocator = class_loader->GetAllocator();
3514   DCHECK(allocator != nullptr);
3515   return allocator;
3516 }
3517 
GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3518 LinearAlloc* ClassLinker::GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3519   if (class_loader == nullptr) {
3520     return Runtime::Current()->GetLinearAlloc();
3521   }
3522   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
3523   LinearAlloc* allocator = class_loader->GetAllocator();
3524   if (allocator == nullptr) {
3525     RegisterClassLoader(class_loader);
3526     allocator = class_loader->GetAllocator();
3527     CHECK(allocator != nullptr);
3528   }
3529   return allocator;
3530 }
3531 
LoadClass(Thread * self,const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass)3532 void ClassLinker::LoadClass(Thread* self,
3533                             const DexFile& dex_file,
3534                             const dex::ClassDef& dex_class_def,
3535                             Handle<mirror::Class> klass) {
3536   ClassAccessor accessor(dex_file,
3537                          dex_class_def,
3538                          /* parse_hiddenapi_class_data= */ klass->IsBootStrapClassLoaded());
3539   if (!accessor.HasClassData()) {
3540     return;
3541   }
3542   Runtime* const runtime = Runtime::Current();
3543   {
3544     // Note: We cannot have thread suspension until the field and method arrays are setup or else
3545     // Class::VisitFieldRoots may miss some fields or methods.
3546     ScopedAssertNoThreadSuspension nts(__FUNCTION__);
3547     // Load static fields.
3548     // We allow duplicate definitions of the same field in a class_data_item
3549     // but ignore the repeated indexes here, b/21868015.
3550     LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
3551     LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self,
3552                                                                 allocator,
3553                                                                 accessor.NumStaticFields());
3554     LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self,
3555                                                                 allocator,
3556                                                                 accessor.NumInstanceFields());
3557     size_t num_sfields = 0u;
3558     size_t num_ifields = 0u;
3559     uint32_t last_static_field_idx = 0u;
3560     uint32_t last_instance_field_idx = 0u;
3561 
3562     // Methods
3563     bool has_oat_class = false;
3564     const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
3565         ? OatFile::FindOatClass(dex_file, klass->GetDexClassDefIndex(), &has_oat_class)
3566         : OatFile::OatClass::Invalid();
3567     const OatFile::OatClass* oat_class_ptr = has_oat_class ? &oat_class : nullptr;
3568     klass->SetMethodsPtr(
3569         AllocArtMethodArray(self, allocator, accessor.NumMethods()),
3570         accessor.NumDirectMethods(),
3571         accessor.NumVirtualMethods());
3572     size_t class_def_method_index = 0;
3573     uint32_t last_dex_method_index = dex::kDexNoIndex;
3574     size_t last_class_def_method_index = 0;
3575 
3576     uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold();
3577     // Use the visitor since the ranged based loops are bit slower from seeking. Seeking to the
3578     // methods needs to decode all of the fields.
3579     accessor.VisitFieldsAndMethods([&](
3580         const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3581           uint32_t field_idx = field.GetIndex();
3582           DCHECK_GE(field_idx, last_static_field_idx);  // Ordering enforced by DexFileVerifier.
3583           if (num_sfields == 0 || LIKELY(field_idx > last_static_field_idx)) {
3584             LoadField(field, klass, &sfields->At(num_sfields));
3585             ++num_sfields;
3586             last_static_field_idx = field_idx;
3587           }
3588         }, [&](const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3589           uint32_t field_idx = field.GetIndex();
3590           DCHECK_GE(field_idx, last_instance_field_idx);  // Ordering enforced by DexFileVerifier.
3591           if (num_ifields == 0 || LIKELY(field_idx > last_instance_field_idx)) {
3592             LoadField(field, klass, &ifields->At(num_ifields));
3593             ++num_ifields;
3594             last_instance_field_idx = field_idx;
3595           }
3596         }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3597           ArtMethod* art_method = klass->GetDirectMethodUnchecked(class_def_method_index,
3598               image_pointer_size_);
3599           LoadMethod(dex_file, method, klass.Get(), art_method);
3600           LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3601           uint32_t it_method_index = method.GetIndex();
3602           if (last_dex_method_index == it_method_index) {
3603             // duplicate case
3604             art_method->SetMethodIndex(last_class_def_method_index);
3605           } else {
3606             art_method->SetMethodIndex(class_def_method_index);
3607             last_dex_method_index = it_method_index;
3608             last_class_def_method_index = class_def_method_index;
3609           }
3610           art_method->ResetCounter(hotness_threshold);
3611           ++class_def_method_index;
3612         }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3613           ArtMethod* art_method = klass->GetVirtualMethodUnchecked(
3614               class_def_method_index - accessor.NumDirectMethods(),
3615               image_pointer_size_);
3616           art_method->ResetCounter(hotness_threshold);
3617           LoadMethod(dex_file, method, klass.Get(), art_method);
3618           LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3619           ++class_def_method_index;
3620         });
3621 
3622     if (UNLIKELY(num_ifields + num_sfields != accessor.NumFields())) {
3623       LOG(WARNING) << "Duplicate fields in class " << klass->PrettyDescriptor()
3624           << " (unique static fields: " << num_sfields << "/" << accessor.NumStaticFields()
3625           << ", unique instance fields: " << num_ifields << "/" << accessor.NumInstanceFields()
3626           << ")";
3627       // NOTE: Not shrinking the over-allocated sfields/ifields, just setting size.
3628       if (sfields != nullptr) {
3629         sfields->SetSize(num_sfields);
3630       }
3631       if (ifields != nullptr) {
3632         ifields->SetSize(num_ifields);
3633       }
3634     }
3635     // Set the field arrays.
3636     klass->SetSFieldsPtr(sfields);
3637     DCHECK_EQ(klass->NumStaticFields(), num_sfields);
3638     klass->SetIFieldsPtr(ifields);
3639     DCHECK_EQ(klass->NumInstanceFields(), num_ifields);
3640   }
3641   // Ensure that the card is marked so that remembered sets pick up native roots.
3642   WriteBarrier::ForEveryFieldWrite(klass.Get());
3643   self->AllowThreadSuspension();
3644 }
3645 
LoadField(const ClassAccessor::Field & field,Handle<mirror::Class> klass,ArtField * dst)3646 void ClassLinker::LoadField(const ClassAccessor::Field& field,
3647                             Handle<mirror::Class> klass,
3648                             ArtField* dst) {
3649   const uint32_t field_idx = field.GetIndex();
3650   dst->SetDexFieldIndex(field_idx);
3651   dst->SetDeclaringClass(klass.Get());
3652 
3653   // Get access flags from the DexFile and set hiddenapi runtime access flags.
3654   dst->SetAccessFlags(field.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(field));
3655 }
3656 
LoadMethod(const DexFile & dex_file,const ClassAccessor::Method & method,ObjPtr<mirror::Class> klass,ArtMethod * dst)3657 void ClassLinker::LoadMethod(const DexFile& dex_file,
3658                              const ClassAccessor::Method& method,
3659                              ObjPtr<mirror::Class> klass,
3660                              ArtMethod* dst) {
3661   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3662 
3663   const uint32_t dex_method_idx = method.GetIndex();
3664   const dex::MethodId& method_id = dex_file.GetMethodId(dex_method_idx);
3665   uint32_t name_utf16_length;
3666   const char* method_name = dex_file.StringDataAndUtf16LengthByIdx(method_id.name_idx_,
3667                                                                    &name_utf16_length);
3668   std::string_view shorty = dex_file.GetShortyView(dex_file.GetProtoId(method_id.proto_idx_));
3669 
3670   dst->SetDexMethodIndex(dex_method_idx);
3671   dst->SetDeclaringClass(klass);
3672 
3673   // Get access flags from the DexFile and set hiddenapi runtime access flags.
3674   uint32_t access_flags = method.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(method);
3675 
3676   auto has_ascii_name = [method_name, name_utf16_length](const char* ascii_name,
3677                                                          size_t length) ALWAYS_INLINE {
3678     DCHECK_EQ(strlen(ascii_name), length);
3679     return length == name_utf16_length &&
3680            method_name[length] == 0 &&  // Is `method_name` an ASCII string?
3681            memcmp(ascii_name, method_name, length) == 0;
3682   };
3683   if (UNLIKELY(has_ascii_name("finalize", sizeof("finalize") - 1u))) {
3684     // Set finalizable flag on declaring class.
3685     if (shorty == "V") {
3686       // Void return type.
3687       if (klass->GetClassLoader() != nullptr) {  // All non-boot finalizer methods are flagged.
3688         klass->SetFinalizable();
3689       } else {
3690         std::string_view klass_descriptor =
3691             dex_file.GetTypeDescriptorView(dex_file.GetTypeId(klass->GetDexTypeIndex()));
3692         // The Enum class declares a "final" finalize() method to prevent subclasses from
3693         // introducing a finalizer. We don't want to set the finalizable flag for Enum or its
3694         // subclasses, so we exclude it here.
3695         // We also want to avoid setting the flag on Object, where we know that finalize() is
3696         // empty.
3697         if (klass_descriptor != "Ljava/lang/Object;" &&
3698             klass_descriptor != "Ljava/lang/Enum;") {
3699           klass->SetFinalizable();
3700         }
3701       }
3702     }
3703   } else if (method_name[0] == '<') {
3704     // Fix broken access flags for initializers. Bug 11157540.
3705     bool is_init = has_ascii_name("<init>", sizeof("<init>") - 1u);
3706     bool is_clinit = has_ascii_name("<clinit>", sizeof("<clinit>") - 1u);
3707     if (UNLIKELY(!is_init && !is_clinit)) {
3708       LOG(WARNING) << "Unexpected '<' at start of method name " << method_name;
3709     } else {
3710       if (UNLIKELY((access_flags & kAccConstructor) == 0)) {
3711         LOG(WARNING) << method_name << " didn't have expected constructor access flag in class "
3712             << klass->PrettyDescriptor() << " in dex file " << dex_file.GetLocation();
3713         access_flags |= kAccConstructor;
3714       }
3715     }
3716   }
3717 
3718   // Check for nterp invoke fast-path based on shorty.
3719   bool all_parameters_are_reference = true;
3720   bool all_parameters_are_reference_or_int = true;
3721   for (size_t i = 1; i < shorty.length(); ++i) {
3722     if (shorty[i] != 'L') {
3723       all_parameters_are_reference = false;
3724       if (shorty[i] == 'F' || shorty[i] == 'D' || shorty[i] == 'J') {
3725         all_parameters_are_reference_or_int = false;
3726         break;
3727       }
3728     }
3729   }
3730   if (all_parameters_are_reference_or_int && shorty[0] != 'F' && shorty[0] != 'D') {
3731     access_flags |= kAccNterpInvokeFastPathFlag;
3732   }
3733 
3734   if (UNLIKELY((access_flags & kAccNative) != 0u)) {
3735     // Check if the native method is annotated with @FastNative or @CriticalNative.
3736     const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
3737     access_flags |=
3738         annotations::GetNativeMethodAnnotationAccessFlags(dex_file, class_def, dex_method_idx);
3739     dst->SetAccessFlags(access_flags);
3740     DCHECK(!dst->IsAbstract());
3741     DCHECK(!dst->HasCodeItem());
3742     DCHECK_EQ(method.GetCodeItemOffset(), 0u);
3743     dst->SetDataPtrSize(nullptr, image_pointer_size_);  // JNI stub/trampoline not linked yet.
3744   } else if ((access_flags & kAccAbstract) != 0u) {
3745     dst->SetAccessFlags(access_flags);
3746     // Must be done after SetAccessFlags since IsAbstract depends on it.
3747     DCHECK(dst->IsAbstract());
3748     if (klass->IsInterface()) {
3749       dst->CalculateAndSetImtIndex();
3750     }
3751     DCHECK(!dst->HasCodeItem());
3752     DCHECK_EQ(method.GetCodeItemOffset(), 0u);
3753     dst->SetDataPtrSize(nullptr, image_pointer_size_);  // Single implementation not set yet.
3754   } else {
3755     // Check for nterp entry fast-path based on shorty.
3756     if (all_parameters_are_reference) {
3757       access_flags |= kAccNterpEntryPointFastPathFlag;
3758     }
3759     const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
3760     if (annotations::MethodIsNeverCompile(dex_file, class_def, dex_method_idx)) {
3761       access_flags |= kAccCompileDontBother;
3762     }
3763     dst->SetAccessFlags(access_flags);
3764     DCHECK(!dst->IsAbstract());
3765     DCHECK(dst->HasCodeItem());
3766     uint32_t code_item_offset = method.GetCodeItemOffset();
3767     DCHECK_NE(code_item_offset, 0u);
3768     if (Runtime::Current()->IsAotCompiler()) {
3769       dst->SetDataPtrSize(reinterpret_cast32<void*>(code_item_offset), image_pointer_size_);
3770     } else {
3771       dst->SetCodeItem(dex_file.GetCodeItem(code_item_offset), dex_file.IsCompactDexFile());
3772     }
3773   }
3774 
3775   if (Runtime::Current()->IsZygote() &&
3776       !Runtime::Current()->GetJITOptions()->GetProfileSaverOptions().GetProfileBootClassPath()) {
3777     dst->SetMemorySharedMethod();
3778   }
3779 }
3780 
AppendToBootClassPath(Thread * self,const DexFile * dex_file)3781 void ClassLinker::AppendToBootClassPath(Thread* self, const DexFile* dex_file) {
3782   ObjPtr<mirror::DexCache> dex_cache =
3783       AllocAndInitializeDexCache(self, *dex_file, /* class_loader= */ nullptr);
3784   CHECK(dex_cache != nullptr) << "Failed to allocate dex cache for " << dex_file->GetLocation();
3785   AppendToBootClassPath(dex_file, dex_cache);
3786 }
3787 
AppendToBootClassPath(const DexFile * dex_file,ObjPtr<mirror::DexCache> dex_cache)3788 void ClassLinker::AppendToBootClassPath(const DexFile* dex_file,
3789                                         ObjPtr<mirror::DexCache> dex_cache) {
3790   CHECK(dex_file != nullptr);
3791   CHECK(dex_cache != nullptr) << dex_file->GetLocation();
3792   CHECK_EQ(dex_cache->GetDexFile(), dex_file) << dex_file->GetLocation();
3793   boot_class_path_.push_back(dex_file);
3794   WriterMutexLock mu(Thread::Current(), *Locks::dex_lock_);
3795   RegisterDexFileLocked(*dex_file, dex_cache, /* class_loader= */ nullptr);
3796 }
3797 
RegisterDexFileLocked(const DexFile & dex_file,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)3798 void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
3799                                         ObjPtr<mirror::DexCache> dex_cache,
3800                                         ObjPtr<mirror::ClassLoader> class_loader) {
3801   Thread* const self = Thread::Current();
3802   Locks::dex_lock_->AssertExclusiveHeld(self);
3803   CHECK(dex_cache != nullptr) << dex_file.GetLocation();
3804   CHECK_EQ(dex_cache->GetDexFile(), &dex_file) << dex_file.GetLocation();
3805   // For app images, the dex cache location may be a suffix of the dex file location since the
3806   // dex file location is an absolute path.
3807   const std::string dex_cache_location = dex_cache->GetLocation()->ToModifiedUtf8();
3808   const size_t dex_cache_length = dex_cache_location.length();
3809   CHECK_GT(dex_cache_length, 0u) << dex_file.GetLocation();
3810   std::string dex_file_location = dex_file.GetLocation();
3811   // The following paths checks don't work on preopt when using boot dex files, where the dex
3812   // cache location is the one on device, and the dex_file's location is the one on host.
3813   if (!(Runtime::Current()->IsAotCompiler() && class_loader == nullptr && !kIsTargetBuild)) {
3814     CHECK_GE(dex_file_location.length(), dex_cache_length)
3815         << dex_cache_location << " " << dex_file.GetLocation();
3816     const std::string dex_file_suffix = dex_file_location.substr(
3817         dex_file_location.length() - dex_cache_length,
3818         dex_cache_length);
3819     // Example dex_cache location is SettingsProvider.apk and
3820     // dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk
3821     CHECK_EQ(dex_cache_location, dex_file_suffix);
3822   }
3823   const OatFile* oat_file =
3824       (dex_file.GetOatDexFile() != nullptr) ? dex_file.GetOatDexFile()->GetOatFile() : nullptr;
3825   // Clean up pass to remove null dex caches; null dex caches can occur due to class unloading
3826   // and we are lazily removing null entries. Also check if we need to initialize OatFile data
3827   // (.data.bimg.rel.ro and .bss sections) needed for code execution.
3828   bool initialize_oat_file_data = (oat_file != nullptr) && oat_file->IsExecutable();
3829   JavaVMExt* const vm = self->GetJniEnv()->GetVm();
3830   for (auto it = dex_caches_.begin(); it != dex_caches_.end(); ) {
3831     const DexCacheData& data = it->second;
3832     if (self->IsJWeakCleared(data.weak_root)) {
3833       vm->DeleteWeakGlobalRef(self, data.weak_root);
3834       it = dex_caches_.erase(it);
3835     } else {
3836       if (initialize_oat_file_data &&
3837           it->first->GetOatDexFile() != nullptr &&
3838           it->first->GetOatDexFile()->GetOatFile() == oat_file) {
3839         initialize_oat_file_data = false;  // Already initialized.
3840       }
3841       ++it;
3842     }
3843   }
3844   if (initialize_oat_file_data) {
3845     oat_file->InitializeRelocations();
3846   }
3847   // Let hiddenapi assign a domain to the newly registered dex file.
3848   hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
3849 
3850   jweak dex_cache_jweak = vm->AddWeakGlobalRef(self, dex_cache);
3851   DexCacheData data;
3852   data.weak_root = dex_cache_jweak;
3853   data.class_table = ClassTableForClassLoader(class_loader);
3854   AddNativeDebugInfoForDex(self, &dex_file);
3855   DCHECK(data.class_table != nullptr);
3856   // Make sure to hold the dex cache live in the class table. This case happens for the boot class
3857   // path dex caches without an image.
3858   data.class_table->InsertStrongRoot(dex_cache);
3859   // Make sure that the dex cache holds the classloader live.
3860   dex_cache->SetClassLoader(class_loader);
3861   if (class_loader != nullptr) {
3862     // Since we added a strong root to the class table, do the write barrier as required for
3863     // remembered sets and generational GCs.
3864     WriteBarrier::ForEveryFieldWrite(class_loader);
3865   }
3866   bool inserted = dex_caches_.emplace(&dex_file, std::move(data)).second;
3867   CHECK(inserted);
3868 }
3869 
DecodeDexCacheLocked(Thread * self,const DexCacheData * data)3870 ObjPtr<mirror::DexCache> ClassLinker::DecodeDexCacheLocked(Thread* self, const DexCacheData* data) {
3871   return data != nullptr
3872       ? ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data->weak_root))
3873       : nullptr;
3874 }
3875 
IsSameClassLoader(ObjPtr<mirror::DexCache> dex_cache,const DexCacheData * data,ObjPtr<mirror::ClassLoader> class_loader)3876 bool ClassLinker::IsSameClassLoader(
3877     ObjPtr<mirror::DexCache> dex_cache,
3878     const DexCacheData* data,
3879     ObjPtr<mirror::ClassLoader> class_loader) {
3880   CHECK(data != nullptr);
3881   DCHECK_EQ(FindDexCacheDataLocked(*dex_cache->GetDexFile()), data);
3882   return data->class_table == ClassTableForClassLoader(class_loader);
3883 }
3884 
RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)3885 void ClassLinker::RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,
3886                                            ObjPtr<mirror::ClassLoader> class_loader) {
3887   SCOPED_TRACE << __FUNCTION__ << " " << dex_cache->GetDexFile()->GetLocation();
3888   Thread* self = Thread::Current();
3889   StackHandleScope<2> hs(self);
3890   Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
3891   Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
3892   const DexFile* dex_file = dex_cache->GetDexFile();
3893   DCHECK(dex_file != nullptr) << "Attempt to register uninitialized dex_cache object!";
3894   if (kIsDebugBuild) {
3895     ReaderMutexLock mu(self, *Locks::dex_lock_);
3896     const DexCacheData* old_data = FindDexCacheDataLocked(*dex_file);
3897     ObjPtr<mirror::DexCache> old_dex_cache = DecodeDexCacheLocked(self, old_data);
3898     DCHECK(old_dex_cache.IsNull()) << "Attempt to manually register a dex cache thats already "
3899                                    << "been registered on dex file " << dex_file->GetLocation();
3900   }
3901   ClassTable* table;
3902   {
3903     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3904     table = InsertClassTableForClassLoader(h_class_loader.Get());
3905   }
3906   // Avoid a deadlock between a garbage collecting thread running a checkpoint,
3907   // a thread holding the dex lock and blocking on a condition variable regarding
3908   // weak references access, and a thread blocking on the dex lock.
3909   gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
3910   WriterMutexLock mu(self, *Locks::dex_lock_);
3911   RegisterDexFileLocked(*dex_file, h_dex_cache.Get(), h_class_loader.Get());
3912   table->InsertStrongRoot(h_dex_cache.Get());
3913   if (h_class_loader.Get() != nullptr) {
3914     // Since we added a strong root to the class table, do the write barrier as required for
3915     // remembered sets and generational GCs.
3916     WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
3917   }
3918 }
3919 
ThrowDexFileAlreadyRegisteredError(Thread * self,const DexFile & dex_file)3920 static void ThrowDexFileAlreadyRegisteredError(Thread* self, const DexFile& dex_file)
3921     REQUIRES_SHARED(Locks::mutator_lock_) {
3922   self->ThrowNewExceptionF("Ljava/lang/InternalError;",
3923                            "Attempt to register dex file %s with multiple class loaders",
3924                            dex_file.GetLocation().c_str());
3925 }
3926 
RegisterDexFile(const DexFile & dex_file,ObjPtr<mirror::ClassLoader> class_loader)3927 ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
3928                                                       ObjPtr<mirror::ClassLoader> class_loader) {
3929   Thread* self = Thread::Current();
3930   ObjPtr<mirror::DexCache> old_dex_cache;
3931   bool registered_with_another_class_loader = false;
3932   {
3933     ReaderMutexLock mu(self, *Locks::dex_lock_);
3934     const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
3935     old_dex_cache = DecodeDexCacheLocked(self, old_data);
3936     if (old_dex_cache != nullptr) {
3937       if (IsSameClassLoader(old_dex_cache, old_data, class_loader)) {
3938         return old_dex_cache;
3939       } else {
3940         // TODO This is not very clean looking. Should maybe try to make a way to request exceptions
3941         // be thrown when it's safe to do so to simplify this.
3942         registered_with_another_class_loader = true;
3943       }
3944     }
3945   }
3946   // We need to have released the dex_lock_ to allocate safely.
3947   if (registered_with_another_class_loader) {
3948     ThrowDexFileAlreadyRegisteredError(self, dex_file);
3949     return nullptr;
3950   }
3951   SCOPED_TRACE << __FUNCTION__ << " " << dex_file.GetLocation();
3952   LinearAlloc* const linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader);
3953   DCHECK(linear_alloc != nullptr);
3954   ClassTable* table;
3955   {
3956     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3957     table = InsertClassTableForClassLoader(class_loader);
3958   }
3959   // Don't alloc while holding the lock, since allocation may need to
3960   // suspend all threads and another thread may need the dex_lock_ to
3961   // get to a suspend point.
3962   StackHandleScope<3> hs(self);
3963   Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
3964   Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(self, dex_file)));
3965   {
3966     // Avoid a deadlock between a garbage collecting thread running a checkpoint,
3967     // a thread holding the dex lock and blocking on a condition variable regarding
3968     // weak references access, and a thread blocking on the dex lock.
3969     gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
3970     WriterMutexLock mu(self, *Locks::dex_lock_);
3971     const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
3972     old_dex_cache = DecodeDexCacheLocked(self, old_data);
3973     if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
3974       // Do Initialize while holding dex lock to make sure two threads don't call it
3975       // at the same time with the same dex cache. Since the .bss is shared this can cause failing
3976       // DCHECK that the arrays are null.
3977       h_dex_cache->Initialize(&dex_file, h_class_loader.Get());
3978       RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
3979     }
3980     if (old_dex_cache != nullptr) {
3981       // Another thread managed to initialize the dex cache faster, so use that DexCache.
3982       // If this thread encountered OOME, ignore it.
3983       DCHECK_EQ(h_dex_cache == nullptr, self->IsExceptionPending());
3984       self->ClearException();
3985       // We cannot call EnsureSameClassLoader() or allocate an exception while holding the
3986       // dex_lock_.
3987       if (IsSameClassLoader(old_dex_cache, old_data, h_class_loader.Get())) {
3988         return old_dex_cache;
3989       } else {
3990         registered_with_another_class_loader = true;
3991       }
3992     }
3993   }
3994   if (registered_with_another_class_loader) {
3995     ThrowDexFileAlreadyRegisteredError(self, dex_file);
3996     return nullptr;
3997   }
3998   if (h_dex_cache == nullptr) {
3999     self->AssertPendingOOMException();
4000     return nullptr;
4001   }
4002   table->InsertStrongRoot(h_dex_cache.Get());
4003   if (h_class_loader.Get() != nullptr) {
4004     // Since we added a strong root to the class table, do the write barrier as required for
4005     // remembered sets and generational GCs.
4006     WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
4007   }
4008   VLOG(class_linker) << "Registered dex file " << dex_file.GetLocation();
4009   PaletteNotifyDexFileLoaded(dex_file.GetLocation().c_str());
4010   return h_dex_cache.Get();
4011 }
4012 
IsDexFileRegistered(Thread * self,const DexFile & dex_file)4013 bool ClassLinker::IsDexFileRegistered(Thread* self, const DexFile& dex_file) {
4014   ReaderMutexLock mu(self, *Locks::dex_lock_);
4015   return DecodeDexCacheLocked(self, FindDexCacheDataLocked(dex_file)) != nullptr;
4016 }
4017 
FindDexCache(Thread * self,const DexFile & dex_file)4018 ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const DexFile& dex_file) {
4019   ReaderMutexLock mu(self, *Locks::dex_lock_);
4020   const DexCacheData* dex_cache_data = FindDexCacheDataLocked(dex_file);
4021   ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
4022   if (dex_cache != nullptr) {
4023     return dex_cache;
4024   }
4025   // Failure, dump diagnostic and abort.
4026   for (const auto& entry : dex_caches_) {
4027     const DexCacheData& data = entry.second;
4028     if (DecodeDexCacheLocked(self, &data) != nullptr) {
4029       LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << entry.first->GetLocation();
4030     }
4031   }
4032   LOG(FATAL) << "Failed to find DexCache for DexFile " << dex_file.GetLocation()
4033              << " " << &dex_file;
4034   UNREACHABLE();
4035 }
4036 
FindDexCache(Thread * self,const OatDexFile & oat_dex_file)4037 ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const OatDexFile& oat_dex_file) {
4038   ReaderMutexLock mu(self, *Locks::dex_lock_);
4039   const DexCacheData* dex_cache_data = FindDexCacheDataLocked(oat_dex_file);
4040   ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
4041   if (dex_cache != nullptr) {
4042     return dex_cache;
4043   }
4044   // Failure, dump diagnostic and abort.
4045   for (const auto& entry : dex_caches_) {
4046     const DexCacheData& data = entry.second;
4047     if (DecodeDexCacheLocked(self, &data) != nullptr) {
4048       LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << entry.first->GetLocation();
4049     }
4050   }
4051   LOG(FATAL) << "Failed to find DexCache for OatDexFile " << oat_dex_file.GetDexFileLocation()
4052              << " " << &oat_dex_file;
4053   UNREACHABLE();
4054 }
4055 
FindClassTable(Thread * self,ObjPtr<mirror::DexCache> dex_cache)4056 ClassTable* ClassLinker::FindClassTable(Thread* self, ObjPtr<mirror::DexCache> dex_cache) {
4057   const DexFile* dex_file = dex_cache->GetDexFile();
4058   DCHECK(dex_file != nullptr);
4059   ReaderMutexLock mu(self, *Locks::dex_lock_);
4060   auto it = dex_caches_.find(dex_file);
4061   if (it != dex_caches_.end()) {
4062     const DexCacheData& data = it->second;
4063     ObjPtr<mirror::DexCache> registered_dex_cache = DecodeDexCacheLocked(self, &data);
4064     if (registered_dex_cache != nullptr) {
4065       CHECK_EQ(registered_dex_cache, dex_cache) << dex_file->GetLocation();
4066       return data.class_table;
4067     }
4068   }
4069   return nullptr;
4070 }
4071 
FindDexCacheDataLocked(const OatDexFile & oat_dex_file)4072 const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(
4073     const OatDexFile& oat_dex_file) {
4074   auto it = std::find_if(dex_caches_.begin(), dex_caches_.end(), [&](const auto& entry) {
4075     return entry.first->GetOatDexFile() == &oat_dex_file;
4076   });
4077   return it != dex_caches_.end() ? &it->second : nullptr;
4078 }
4079 
FindDexCacheDataLocked(const DexFile & dex_file)4080 const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(const DexFile& dex_file) {
4081   auto it = dex_caches_.find(&dex_file);
4082   return it != dex_caches_.end() ? &it->second : nullptr;
4083 }
4084 
CreatePrimitiveClass(Thread * self,Primitive::Type type,ClassRoot primitive_root)4085 void ClassLinker::CreatePrimitiveClass(Thread* self,
4086                                        Primitive::Type type,
4087                                        ClassRoot primitive_root) {
4088   ObjPtr<mirror::Class> primitive_class =
4089       AllocClass(self, mirror::Class::PrimitiveClassSize(image_pointer_size_));
4090   CHECK(primitive_class != nullptr) << "OOM for primitive class " << type;
4091   // Do not hold lock on the primitive class object, the initialization of
4092   // primitive classes is done while the process is still single threaded.
4093   primitive_class->SetAccessFlagsDuringLinking(kAccPublic | kAccFinal | kAccAbstract);
4094   primitive_class->SetPrimitiveType(type);
4095   primitive_class->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
4096   DCHECK_EQ(primitive_class->NumMethods(), 0u);
4097   // Primitive classes are initialized during single threaded startup, so visibly initialized.
4098   primitive_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
4099   const char* descriptor = Primitive::Descriptor(type);
4100   ObjPtr<mirror::Class> existing = InsertClass(descriptor,
4101                                                primitive_class,
4102                                                ComputeModifiedUtf8Hash(descriptor));
4103   CHECK(existing == nullptr) << "InitPrimitiveClass(" << type << ") failed";
4104   SetClassRoot(primitive_root, primitive_class);
4105 }
4106 
GetArrayIfTable()4107 inline ObjPtr<mirror::IfTable> ClassLinker::GetArrayIfTable() {
4108   return GetClassRoot<mirror::ObjectArray<mirror::Object>>(this)->GetIfTable();
4109 }
4110 
4111 // Create an array class (i.e. the class object for the array, not the
4112 // array itself).  "descriptor" looks like "[C" or "[[[[B" or
4113 // "[Ljava/lang/String;".
4114 //
4115 // If "descriptor" refers to an array of primitives, look up the
4116 // primitive type's internally-generated class object.
4117 //
4118 // "class_loader" is the class loader of the class that's referring to
4119 // us.  It's used to ensure that we're looking for the element type in
4120 // the right context.  It does NOT become the class loader for the
4121 // array class; that always comes from the base element class.
4122 //
4123 // Returns null with an exception raised on failure.
CreateArrayClass(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader)4124 ObjPtr<mirror::Class> ClassLinker::CreateArrayClass(Thread* self,
4125                                                     const char* descriptor,
4126                                                     size_t hash,
4127                                                     Handle<mirror::ClassLoader> class_loader) {
4128   // Identify the underlying component type
4129   CHECK_EQ('[', descriptor[0]);
4130   StackHandleScope<2> hs(self);
4131 
4132   // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4133   // code to be executed. We put it up here so we can avoid all the allocations associated with
4134   // creating the class. This can happen with (eg) jit threads.
4135   if (!self->CanLoadClasses()) {
4136     // Make sure we don't try to load anything, potentially causing an infinite loop.
4137     ObjPtr<mirror::Throwable> pre_allocated =
4138         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4139     self->SetException(pre_allocated);
4140     return nullptr;
4141   }
4142 
4143   MutableHandle<mirror::Class> component_type(hs.NewHandle(FindClass(self, descriptor + 1,
4144                                                                      class_loader)));
4145   if (component_type == nullptr) {
4146     DCHECK(self->IsExceptionPending());
4147     // We need to accept erroneous classes as component types.
4148     const size_t component_hash = ComputeModifiedUtf8Hash(descriptor + 1);
4149     component_type.Assign(LookupClass(self, descriptor + 1, component_hash, class_loader.Get()));
4150     if (component_type == nullptr) {
4151       DCHECK(self->IsExceptionPending());
4152       return nullptr;
4153     } else {
4154       self->ClearException();
4155     }
4156   }
4157   if (UNLIKELY(component_type->IsPrimitiveVoid())) {
4158     ThrowNoClassDefFoundError("Attempt to create array of void primitive type");
4159     return nullptr;
4160   }
4161   // See if the component type is already loaded.  Array classes are
4162   // always associated with the class loader of their underlying
4163   // element type -- an array of Strings goes with the loader for
4164   // java/lang/String -- so we need to look for it there.  (The
4165   // caller should have checked for the existence of the class
4166   // before calling here, but they did so with *their* class loader,
4167   // not the component type's loader.)
4168   //
4169   // If we find it, the caller adds "loader" to the class' initiating
4170   // loader list, which should prevent us from going through this again.
4171   //
4172   // This call is unnecessary if "loader" and "component_type->GetClassLoader()"
4173   // are the same, because our caller (FindClass) just did the
4174   // lookup.  (Even if we get this wrong we still have correct behavior,
4175   // because we effectively do this lookup again when we add the new
4176   // class to the hash table --- necessary because of possible races with
4177   // other threads.)
4178   if (class_loader.Get() != component_type->GetClassLoader()) {
4179     ObjPtr<mirror::Class> new_class =
4180         LookupClass(self, descriptor, hash, component_type->GetClassLoader());
4181     if (new_class != nullptr) {
4182       return new_class;
4183     }
4184   }
4185   // Core array classes, i.e. Object[], Class[], String[] and primitive
4186   // arrays, have special initialization and they should be found above.
4187   DCHECK_IMPLIES(component_type->IsObjectClass(),
4188                  // Guard from false positives for errors before setting superclass.
4189                  component_type->IsErroneousUnresolved());
4190   DCHECK(!component_type->IsStringClass());
4191   DCHECK(!component_type->IsClassClass());
4192   DCHECK(!component_type->IsPrimitive());
4193 
4194   // Fill out the fields in the Class.
4195   //
4196   // It is possible to execute some methods against arrays, because
4197   // all arrays are subclasses of java_lang_Object_, so we need to set
4198   // up a vtable.  We can just point at the one in java_lang_Object_.
4199   //
4200   // Array classes are simple enough that we don't need to do a full
4201   // link step.
4202   size_t array_class_size = mirror::Array::ClassSize(image_pointer_size_);
4203   auto visitor = [this, array_class_size, component_type](ObjPtr<mirror::Object> obj,
4204                                                           size_t usable_size)
4205       REQUIRES_SHARED(Locks::mutator_lock_) {
4206     ScopedAssertNoNewTransactionRecords sanntr("CreateArrayClass");
4207     mirror::Class::InitializeClassVisitor init_class(array_class_size);
4208     init_class(obj, usable_size);
4209     ObjPtr<mirror::Class> klass = ObjPtr<mirror::Class>::DownCast(obj);
4210     klass->SetComponentType(component_type.Get());
4211     // Do not hold lock for initialization, the fence issued after the visitor
4212     // returns ensures memory visibility together with the implicit consume
4213     // semantics (for all supported architectures) for any thread that loads
4214     // the array class reference from any memory locations afterwards.
4215     FinishArrayClassSetup(klass);
4216   };
4217   auto new_class = hs.NewHandle<mirror::Class>(
4218       AllocClass(self, GetClassRoot<mirror::Class>(this), array_class_size, visitor));
4219   if (new_class == nullptr) {
4220     self->AssertPendingOOMException();
4221     return nullptr;
4222   }
4223 
4224   ObjPtr<mirror::Class> existing = InsertClass(descriptor, new_class.Get(), hash);
4225   if (existing == nullptr) {
4226     // We postpone ClassLoad and ClassPrepare events to this point in time to avoid
4227     // duplicate events in case of races. Array classes don't really follow dedicated
4228     // load and prepare, anyways.
4229     Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(new_class);
4230     Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(new_class, new_class);
4231 
4232     jit::Jit::NewTypeLoadedIfUsingJit(new_class.Get());
4233     return new_class.Get();
4234   }
4235   // Another thread must have loaded the class after we
4236   // started but before we finished.  Abandon what we've
4237   // done.
4238   //
4239   // (Yes, this happens.)
4240 
4241   return existing;
4242 }
4243 
LookupPrimitiveClass(char type)4244 ObjPtr<mirror::Class> ClassLinker::LookupPrimitiveClass(char type) {
4245   ClassRoot class_root;
4246   switch (type) {
4247     case 'B': class_root = ClassRoot::kPrimitiveByte; break;
4248     case 'C': class_root = ClassRoot::kPrimitiveChar; break;
4249     case 'D': class_root = ClassRoot::kPrimitiveDouble; break;
4250     case 'F': class_root = ClassRoot::kPrimitiveFloat; break;
4251     case 'I': class_root = ClassRoot::kPrimitiveInt; break;
4252     case 'J': class_root = ClassRoot::kPrimitiveLong; break;
4253     case 'S': class_root = ClassRoot::kPrimitiveShort; break;
4254     case 'Z': class_root = ClassRoot::kPrimitiveBoolean; break;
4255     case 'V': class_root = ClassRoot::kPrimitiveVoid; break;
4256     default:
4257       return nullptr;
4258   }
4259   return GetClassRoot(class_root, this);
4260 }
4261 
FindPrimitiveClass(char type)4262 ObjPtr<mirror::Class> ClassLinker::FindPrimitiveClass(char type) {
4263   ObjPtr<mirror::Class> result = LookupPrimitiveClass(type);
4264   if (UNLIKELY(result == nullptr)) {
4265     std::string printable_type(PrintableChar(type));
4266     ThrowNoClassDefFoundError("Not a primitive type: %s", printable_type.c_str());
4267   }
4268   return result;
4269 }
4270 
InsertClass(const char * descriptor,ObjPtr<mirror::Class> klass,size_t hash)4271 ObjPtr<mirror::Class> ClassLinker::InsertClass(const char* descriptor,
4272                                                ObjPtr<mirror::Class> klass,
4273                                                size_t hash) {
4274   DCHECK(Thread::Current()->CanLoadClasses());
4275   if (VLOG_IS_ON(class_linker)) {
4276     ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache();
4277     std::string source;
4278     if (dex_cache != nullptr) {
4279       source += " from ";
4280       source += dex_cache->GetLocation()->ToModifiedUtf8();
4281     }
4282     LOG(INFO) << "Loaded class " << descriptor << source;
4283   }
4284   {
4285     WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4286     const ObjPtr<mirror::ClassLoader> class_loader = klass->GetClassLoader();
4287     ClassTable* const class_table = InsertClassTableForClassLoader(class_loader);
4288     ObjPtr<mirror::Class> existing = class_table->Lookup(descriptor, hash);
4289     if (existing != nullptr) {
4290       return existing;
4291     }
4292     VerifyObject(klass);
4293     class_table->InsertWithHash(klass, hash);
4294     if (class_loader != nullptr) {
4295       // This is necessary because we need to have the card dirtied for remembered sets.
4296       WriteBarrier::ForEveryFieldWrite(class_loader);
4297     }
4298     if (log_new_roots_) {
4299       new_class_roots_.push_back(GcRoot<mirror::Class>(klass));
4300     }
4301   }
4302   if (kIsDebugBuild) {
4303     // Test that copied methods correctly can find their holder.
4304     for (ArtMethod& method : klass->GetCopiedMethods(image_pointer_size_)) {
4305       CHECK_EQ(GetHoldingClassOfCopiedMethod(&method), klass);
4306     }
4307   }
4308   return nullptr;
4309 }
4310 
WriteBarrierForBootOatFileBssRoots(const OatFile * oat_file)4311 void ClassLinker::WriteBarrierForBootOatFileBssRoots(const OatFile* oat_file) {
4312   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4313   DCHECK(!oat_file->GetBssGcRoots().empty()) << oat_file->GetLocation();
4314   if (log_new_roots_ && !ContainsElement(new_bss_roots_boot_oat_files_, oat_file)) {
4315     new_bss_roots_boot_oat_files_.push_back(oat_file);
4316   }
4317 }
4318 
4319 // TODO This should really be in mirror::Class.
UpdateClassMethods(ObjPtr<mirror::Class> klass,LengthPrefixedArray<ArtMethod> * new_methods)4320 void ClassLinker::UpdateClassMethods(ObjPtr<mirror::Class> klass,
4321                                      LengthPrefixedArray<ArtMethod>* new_methods) {
4322   klass->SetMethodsPtrUnchecked(new_methods,
4323                                 klass->NumDirectMethods(),
4324                                 klass->NumDeclaredVirtualMethods());
4325   // Need to mark the card so that the remembered sets and mod union tables get updated.
4326   WriteBarrier::ForEveryFieldWrite(klass);
4327 }
4328 
LookupClass(Thread * self,const char * descriptor,ObjPtr<mirror::ClassLoader> class_loader)4329 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4330                                                const char* descriptor,
4331                                                ObjPtr<mirror::ClassLoader> class_loader) {
4332   return LookupClass(self, descriptor, ComputeModifiedUtf8Hash(descriptor), class_loader);
4333 }
4334 
LookupClass(Thread * self,const char * descriptor,size_t hash,ObjPtr<mirror::ClassLoader> class_loader)4335 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4336                                                const char* descriptor,
4337                                                size_t hash,
4338                                                ObjPtr<mirror::ClassLoader> class_loader) {
4339   ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4340   ClassTable* const class_table = ClassTableForClassLoader(class_loader);
4341   if (class_table != nullptr) {
4342     ObjPtr<mirror::Class> result = class_table->Lookup(descriptor, hash);
4343     if (result != nullptr) {
4344       return result;
4345     }
4346   }
4347   return nullptr;
4348 }
4349 
4350 class MoveClassTableToPreZygoteVisitor : public ClassLoaderVisitor {
4351  public:
MoveClassTableToPreZygoteVisitor()4352   MoveClassTableToPreZygoteVisitor() {}
4353 
Visit(ObjPtr<mirror::ClassLoader> class_loader)4354   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
4355       REQUIRES(Locks::classlinker_classes_lock_)
4356       REQUIRES_SHARED(Locks::mutator_lock_) override {
4357     ClassTable* const class_table = class_loader->GetClassTable();
4358     if (class_table != nullptr) {
4359       class_table->FreezeSnapshot();
4360     }
4361   }
4362 };
4363 
MoveClassTableToPreZygote()4364 void ClassLinker::MoveClassTableToPreZygote() {
4365   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4366   boot_class_table_->FreezeSnapshot();
4367   MoveClassTableToPreZygoteVisitor visitor;
4368   VisitClassLoaders(&visitor);
4369 }
4370 
4371 // Look up classes by hash and descriptor and put all matching ones in the result array.
4372 class LookupClassesVisitor : public ClassLoaderVisitor {
4373  public:
LookupClassesVisitor(const char * descriptor,size_t hash,std::vector<ObjPtr<mirror::Class>> * result)4374   LookupClassesVisitor(const char* descriptor,
4375                        size_t hash,
4376                        std::vector<ObjPtr<mirror::Class>>* result)
4377      : descriptor_(descriptor),
4378        hash_(hash),
4379        result_(result) {}
4380 
Visit(ObjPtr<mirror::ClassLoader> class_loader)4381   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
4382       REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
4383     ClassTable* const class_table = class_loader->GetClassTable();
4384     ObjPtr<mirror::Class> klass = class_table->Lookup(descriptor_, hash_);
4385     // Add `klass` only if `class_loader` is its defining (not just initiating) class loader.
4386     if (klass != nullptr && klass->GetClassLoader() == class_loader) {
4387       result_->push_back(klass);
4388     }
4389   }
4390 
4391  private:
4392   const char* const descriptor_;
4393   const size_t hash_;
4394   std::vector<ObjPtr<mirror::Class>>* const result_;
4395 };
4396 
LookupClasses(const char * descriptor,std::vector<ObjPtr<mirror::Class>> & result)4397 void ClassLinker::LookupClasses(const char* descriptor,
4398                                 std::vector<ObjPtr<mirror::Class>>& result) {
4399   result.clear();
4400   Thread* const self = Thread::Current();
4401   ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4402   const size_t hash = ComputeModifiedUtf8Hash(descriptor);
4403   ObjPtr<mirror::Class> klass = boot_class_table_->Lookup(descriptor, hash);
4404   if (klass != nullptr) {
4405     DCHECK(klass->GetClassLoader() == nullptr);
4406     result.push_back(klass);
4407   }
4408   LookupClassesVisitor visitor(descriptor, hash, &result);
4409   VisitClassLoaders(&visitor);
4410 }
4411 
AttemptSupertypeVerification(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,Handle<mirror::Class> supertype)4412 bool ClassLinker::AttemptSupertypeVerification(Thread* self,
4413                                                verifier::VerifierDeps* verifier_deps,
4414                                                Handle<mirror::Class> klass,
4415                                                Handle<mirror::Class> supertype) {
4416   DCHECK(self != nullptr);
4417   DCHECK(klass != nullptr);
4418   DCHECK(supertype != nullptr);
4419 
4420   if (!supertype->IsVerified() && !supertype->IsErroneous()) {
4421     VerifyClass(self, verifier_deps, supertype);
4422   }
4423 
4424   if (supertype->IsVerified()
4425       || supertype->ShouldVerifyAtRuntime()
4426       || supertype->IsVerifiedNeedsAccessChecks()) {
4427     // The supertype is either verified, or we soft failed at AOT time.
4428     DCHECK(supertype->IsVerified() || Runtime::Current()->IsAotCompiler());
4429     return true;
4430   }
4431   // If we got this far then we have a hard failure.
4432   std::string error_msg =
4433       StringPrintf("Rejecting class %s that attempts to sub-type erroneous class %s",
4434                    klass->PrettyDescriptor().c_str(),
4435                    supertype->PrettyDescriptor().c_str());
4436   LOG(WARNING) << error_msg  << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4437   StackHandleScope<1> hs(self);
4438   Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
4439   if (cause != nullptr) {
4440     // Set during VerifyClass call (if at all).
4441     self->ClearException();
4442   }
4443   // Change into a verify error.
4444   ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
4445   if (cause != nullptr) {
4446     self->GetException()->SetCause(cause.Get());
4447   }
4448   ClassReference ref(klass->GetDexCache()->GetDexFile(), klass->GetDexClassDefIndex());
4449   if (Runtime::Current()->IsAotCompiler()) {
4450     Runtime::Current()->GetCompilerCallbacks()->ClassRejected(ref);
4451   }
4452   // Need to grab the lock to change status.
4453   ObjectLock<mirror::Class> super_lock(self, klass);
4454   mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
4455   return false;
4456 }
4457 
VerifyClass(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level)4458 verifier::FailureKind ClassLinker::VerifyClass(Thread* self,
4459                                                verifier::VerifierDeps* verifier_deps,
4460                                                Handle<mirror::Class> klass,
4461                                                verifier::HardFailLogMode log_level) {
4462   {
4463     // TODO: assert that the monitor on the Class is held
4464     ObjectLock<mirror::Class> lock(self, klass);
4465 
4466     // Is somebody verifying this now?
4467     ClassStatus old_status = klass->GetStatus();
4468     while (old_status == ClassStatus::kVerifying) {
4469       lock.WaitIgnoringInterrupts();
4470       // WaitIgnoringInterrupts can still receive an interrupt and return early, in this
4471       // case we may see the same status again. b/62912904. This is why the check is
4472       // greater or equal.
4473       CHECK(klass->IsErroneous() || (klass->GetStatus() >= old_status))
4474           << "Class '" << klass->PrettyClass()
4475           << "' performed an illegal verification state transition from " << old_status
4476           << " to " << klass->GetStatus();
4477       old_status = klass->GetStatus();
4478     }
4479 
4480     // The class might already be erroneous, for example at compile time if we attempted to verify
4481     // this class as a parent to another.
4482     if (klass->IsErroneous()) {
4483       ThrowEarlierClassFailure(klass.Get());
4484       return verifier::FailureKind::kHardFailure;
4485     }
4486 
4487     // Don't attempt to re-verify if already verified.
4488     if (klass->IsVerified()) {
4489       if (verifier_deps != nullptr &&
4490           verifier_deps->ContainsDexFile(klass->GetDexFile()) &&
4491           !verifier_deps->HasRecordedVerifiedStatus(klass->GetDexFile(), *klass->GetClassDef()) &&
4492           !Runtime::Current()->IsAotCompiler()) {
4493         // If the klass is verified, but `verifier_deps` did not record it, this
4494         // means we are running background verification of a secondary dex file.
4495         // Re-run the verifier to populate `verifier_deps`.
4496         // No need to run the verification when running on the AOT Compiler, as
4497         // the driver handles those multithreaded cases already.
4498         std::string error_msg;
4499         verifier::FailureKind failure =
4500             PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
4501         // We could have soft failures, so just check that we don't have a hard
4502         // failure.
4503         DCHECK_NE(failure, verifier::FailureKind::kHardFailure) << error_msg;
4504       }
4505       return verifier::FailureKind::kNoFailure;
4506     }
4507 
4508     if (klass->IsVerifiedNeedsAccessChecks()) {
4509       if (!Runtime::Current()->IsAotCompiler()) {
4510         // Mark the class as having a verification attempt to avoid re-running
4511         // the verifier.
4512         mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4513       }
4514       return verifier::FailureKind::kAccessChecksFailure;
4515     }
4516 
4517     // For AOT, don't attempt to re-verify if we have already found we should
4518     // verify at runtime.
4519     if (klass->ShouldVerifyAtRuntime()) {
4520       CHECK(Runtime::Current()->IsAotCompiler());
4521       return verifier::FailureKind::kSoftFailure;
4522     }
4523 
4524     DCHECK_EQ(klass->GetStatus(), ClassStatus::kResolved);
4525     mirror::Class::SetStatus(klass, ClassStatus::kVerifying, self);
4526 
4527     // Skip verification if disabled.
4528     if (!Runtime::Current()->IsVerificationEnabled()) {
4529       mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4530       UpdateClassAfterVerification(klass, image_pointer_size_, verifier::FailureKind::kNoFailure);
4531       return verifier::FailureKind::kNoFailure;
4532     }
4533   }
4534 
4535   VLOG(class_linker) << "Beginning verification for class: "
4536                      << klass->PrettyDescriptor()
4537                      << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4538 
4539   // Verify super class.
4540   StackHandleScope<2> hs(self);
4541   MutableHandle<mirror::Class> supertype(hs.NewHandle(klass->GetSuperClass()));
4542   // If we have a superclass and we get a hard verification failure we can return immediately.
4543   if (supertype != nullptr &&
4544       !AttemptSupertypeVerification(self, verifier_deps, klass, supertype)) {
4545     CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
4546     return verifier::FailureKind::kHardFailure;
4547   }
4548 
4549   // Verify all default super-interfaces.
4550   //
4551   // (1) Don't bother if the superclass has already had a soft verification failure.
4552   //
4553   // (2) Interfaces shouldn't bother to do this recursive verification because they cannot cause
4554   //     recursive initialization by themselves. This is because when an interface is initialized
4555   //     directly it must not initialize its superinterfaces. We are allowed to verify regardless
4556   //     but choose not to for an optimization. If the interfaces is being verified due to a class
4557   //     initialization (which would need all the default interfaces to be verified) the class code
4558   //     will trigger the recursive verification anyway.
4559   if ((supertype == nullptr || supertype->IsVerified())  // See (1)
4560       && !klass->IsInterface()) {                              // See (2)
4561     int32_t iftable_count = klass->GetIfTableCount();
4562     MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
4563     // Loop through all interfaces this class has defined. It doesn't matter the order.
4564     for (int32_t i = 0; i < iftable_count; i++) {
4565       iface.Assign(klass->GetIfTable()->GetInterface(i));
4566       DCHECK(iface != nullptr);
4567       // We only care if we have default interfaces and can skip if we are already verified...
4568       if (LIKELY(!iface->HasDefaultMethods() || iface->IsVerified())) {
4569         continue;
4570       } else if (UNLIKELY(!AttemptSupertypeVerification(self, verifier_deps, klass, iface))) {
4571         // We had a hard failure while verifying this interface. Just return immediately.
4572         CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
4573         return verifier::FailureKind::kHardFailure;
4574       } else if (UNLIKELY(!iface->IsVerified())) {
4575         // We softly failed to verify the iface. Stop checking and clean up.
4576         // Put the iface into the supertype handle so we know what caused us to fail.
4577         supertype.Assign(iface.Get());
4578         break;
4579       }
4580     }
4581   }
4582 
4583   // At this point if verification failed, then supertype is the "first" supertype that failed
4584   // verification (without a specific order). If verification succeeded, then supertype is either
4585   // null or the original superclass of klass and is verified.
4586   DCHECK(supertype == nullptr ||
4587          supertype.Get() == klass->GetSuperClass() ||
4588          !supertype->IsVerified());
4589 
4590   // Try to use verification information from the oat file, otherwise do runtime verification.
4591   const DexFile& dex_file = *klass->GetDexCache()->GetDexFile();
4592   ClassStatus oat_file_class_status(ClassStatus::kNotReady);
4593   bool preverified = VerifyClassUsingOatFile(self, dex_file, klass, oat_file_class_status);
4594 
4595   VLOG(class_linker) << "Class preverified status for class "
4596                      << klass->PrettyDescriptor()
4597                      << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4598                      << ": "
4599                      << preverified
4600                      << "( " << oat_file_class_status << ")";
4601 
4602   // If the oat file says the class had an error, re-run the verifier. That way we will either:
4603   // 1) Be successful at runtime, or
4604   // 2) Get a precise error message.
4605   DCHECK_IMPLIES(mirror::Class::IsErroneous(oat_file_class_status), !preverified);
4606 
4607   std::string error_msg;
4608   verifier::FailureKind verifier_failure = verifier::FailureKind::kNoFailure;
4609   if (!preverified) {
4610     verifier_failure = PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
4611   } else if (oat_file_class_status == ClassStatus::kVerifiedNeedsAccessChecks) {
4612     verifier_failure = verifier::FailureKind::kAccessChecksFailure;
4613   }
4614 
4615   // Verification is done, grab the lock again.
4616   ObjectLock<mirror::Class> lock(self, klass);
4617   self->AssertNoPendingException();
4618 
4619   if (verifier_failure == verifier::FailureKind::kHardFailure) {
4620     VLOG(verifier) << "Verification failed on class " << klass->PrettyDescriptor()
4621                   << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4622                   << " because: " << error_msg;
4623     ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
4624     mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
4625     return verifier_failure;
4626   }
4627 
4628   // Make sure all classes referenced by catch blocks are resolved.
4629   ResolveClassExceptionHandlerTypes(klass);
4630 
4631   if (Runtime::Current()->IsAotCompiler()) {
4632     if (supertype != nullptr && supertype->ShouldVerifyAtRuntime()) {
4633       // Regardless of our own verification result, we need to verify the class
4634       // at runtime if the super class is not verified. This is required in case
4635       // we generate an app/boot image.
4636       mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4637     } else if (verifier_failure == verifier::FailureKind::kNoFailure) {
4638       mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4639     } else if (verifier_failure == verifier::FailureKind::kSoftFailure ||
4640                verifier_failure == verifier::FailureKind::kTypeChecksFailure) {
4641       mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4642     } else {
4643       mirror::Class::SetStatus(klass, ClassStatus::kVerifiedNeedsAccessChecks, self);
4644     }
4645     // Notify the compiler about the verification status, in case the class
4646     // was verified implicitly (eg super class of a compiled class). When the
4647     // compiler unloads dex file after compilation, we still want to keep
4648     // verification states.
4649     Runtime::Current()->GetCompilerCallbacks()->UpdateClassState(
4650         ClassReference(&klass->GetDexFile(), klass->GetDexClassDefIndex()), klass->GetStatus());
4651   } else {
4652     mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4653   }
4654 
4655   UpdateClassAfterVerification(klass, image_pointer_size_, verifier_failure);
4656   return verifier_failure;
4657 }
4658 
PerformClassVerification(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level,std::string * error_msg)4659 verifier::FailureKind ClassLinker::PerformClassVerification(Thread* self,
4660                                                             verifier::VerifierDeps* verifier_deps,
4661                                                             Handle<mirror::Class> klass,
4662                                                             verifier::HardFailLogMode log_level,
4663                                                             std::string* error_msg) {
4664   Runtime* const runtime = Runtime::Current();
4665   StackHandleScope<2> hs(self);
4666   Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
4667   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
4668   return verifier::ClassVerifier::VerifyClass(self,
4669                                               verifier_deps,
4670                                               dex_cache->GetDexFile(),
4671                                               klass,
4672                                               dex_cache,
4673                                               class_loader,
4674                                               *klass->GetClassDef(),
4675                                               runtime->GetCompilerCallbacks(),
4676                                               log_level,
4677                                               Runtime::Current()->GetTargetSdkVersion(),
4678                                               error_msg);
4679 }
4680 
VerifyClassUsingOatFile(Thread * self,const DexFile & dex_file,Handle<mirror::Class> klass,ClassStatus & oat_file_class_status)4681 bool ClassLinker::VerifyClassUsingOatFile(Thread* self,
4682                                           const DexFile& dex_file,
4683                                           Handle<mirror::Class> klass,
4684                                           ClassStatus& oat_file_class_status) {
4685   // If we're compiling, we can only verify the class using the oat file if
4686   // we are not compiling the image or if the class we're verifying is not part of
4687   // the compilation unit (app - dependencies). We will let the compiler callback
4688   // tell us about the latter.
4689   if (Runtime::Current()->IsAotCompiler()) {
4690     CompilerCallbacks* callbacks = Runtime::Current()->GetCompilerCallbacks();
4691     // We are compiling an app (not the image).
4692     if (!callbacks->CanUseOatStatusForVerification(klass.Get())) {
4693       return false;
4694     }
4695   }
4696 
4697   const OatDexFile* oat_dex_file = dex_file.GetOatDexFile();
4698   // In case we run without an image there won't be a backing oat file.
4699   if (oat_dex_file == nullptr || oat_dex_file->GetOatFile() == nullptr) {
4700     return false;
4701   }
4702 
4703   uint16_t class_def_index = klass->GetDexClassDefIndex();
4704   oat_file_class_status = oat_dex_file->GetOatClass(class_def_index).GetStatus();
4705   if (oat_file_class_status >= ClassStatus::kVerified) {
4706     return true;
4707   }
4708   if (oat_file_class_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
4709     // We return that the clas has already been verified, and the caller should
4710     // check the class status to ensure we run with access checks.
4711     return true;
4712   }
4713 
4714   // Check the class status with the vdex file.
4715   const OatFile* oat_file = oat_dex_file->GetOatFile();
4716   if (oat_file != nullptr) {
4717     ClassStatus vdex_status = oat_file->GetVdexFile()->ComputeClassStatus(self, klass);
4718     if (vdex_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
4719       VLOG(verifier) << "Vdex verification success for " << klass->PrettyClass();
4720       oat_file_class_status = vdex_status;
4721       return true;
4722     }
4723   }
4724 
4725   // If we only verified a subset of the classes at compile time, we can end up with classes that
4726   // were resolved by the verifier.
4727   if (oat_file_class_status == ClassStatus::kResolved) {
4728     return false;
4729   }
4730   // We never expect a .oat file to have kRetryVerificationAtRuntime statuses.
4731   CHECK_NE(oat_file_class_status, ClassStatus::kRetryVerificationAtRuntime)
4732       << klass->PrettyClass() << " " << dex_file.GetLocation();
4733 
4734   if (mirror::Class::IsErroneous(oat_file_class_status)) {
4735     // Compile time verification failed with a hard error. We'll re-run
4736     // verification, which might be successful at runtime.
4737     return false;
4738   }
4739   if (oat_file_class_status == ClassStatus::kNotReady) {
4740     // Status is uninitialized if we couldn't determine the status at compile time, for example,
4741     // not loading the class.
4742     // TODO: when the verifier doesn't rely on Class-es failing to resolve/load the type hierarchy
4743     // isn't a problem and this case shouldn't occur
4744     return false;
4745   }
4746   std::string temp;
4747   LOG(FATAL) << "Unexpected class status: " << oat_file_class_status
4748              << " " << dex_file.GetLocation() << " " << klass->PrettyClass() << " "
4749              << klass->GetDescriptor(&temp);
4750   UNREACHABLE();
4751 }
4752 
ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass)4753 void ClassLinker::ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass) {
4754   for (ArtMethod& method : klass->GetMethods(image_pointer_size_)) {
4755     ResolveMethodExceptionHandlerTypes(&method);
4756   }
4757 }
4758 
ResolveMethodExceptionHandlerTypes(ArtMethod * method)4759 void ClassLinker::ResolveMethodExceptionHandlerTypes(ArtMethod* method) {
4760   // similar to DexVerifier::ScanTryCatchBlocks and dex2oat's ResolveExceptionsForMethod.
4761   CodeItemDataAccessor accessor(method->DexInstructionData());
4762   if (!accessor.HasCodeItem()) {
4763     return;  // native or abstract method
4764   }
4765   if (accessor.TriesSize() == 0) {
4766     return;  // nothing to process
4767   }
4768   const uint8_t* handlers_ptr = accessor.GetCatchHandlerData(0);
4769   uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
4770   for (uint32_t idx = 0; idx < handlers_size; idx++) {
4771     CatchHandlerIterator iterator(handlers_ptr);
4772     for (; iterator.HasNext(); iterator.Next()) {
4773       // Ensure exception types are resolved so that they don't need resolution to be delivered,
4774       // unresolved exception types will be ignored by exception delivery
4775       if (iterator.GetHandlerTypeIndex().IsValid()) {
4776         ObjPtr<mirror::Class> exception_type = ResolveType(iterator.GetHandlerTypeIndex(), method);
4777         if (exception_type == nullptr) {
4778           DCHECK(Thread::Current()->IsExceptionPending());
4779           Thread::Current()->ClearException();
4780         }
4781       }
4782     }
4783     handlers_ptr = iterator.EndDataPointer();
4784   }
4785 }
4786 
CreateProxyClass(ScopedObjectAccessAlreadyRunnable & soa,jstring name,jobjectArray interfaces,jobject loader,jobjectArray methods,jobjectArray throws)4787 ObjPtr<mirror::Class> ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& soa,
4788                                                     jstring name,
4789                                                     jobjectArray interfaces,
4790                                                     jobject loader,
4791                                                     jobjectArray methods,
4792                                                     jobjectArray throws) {
4793   Thread* self = soa.Self();
4794 
4795   // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4796   // code to be executed. We put it up here so we can avoid all the allocations associated with
4797   // creating the class. This can happen with (eg) jit-threads.
4798   if (!self->CanLoadClasses()) {
4799     // Make sure we don't try to load anything, potentially causing an infinite loop.
4800     ObjPtr<mirror::Throwable> pre_allocated =
4801         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4802     self->SetException(pre_allocated);
4803     return nullptr;
4804   }
4805 
4806   StackHandleScope<12> hs(self);
4807   MutableHandle<mirror::Class> temp_klass(hs.NewHandle(
4808       AllocClass(self, GetClassRoot<mirror::Class>(this), sizeof(mirror::Class))));
4809   if (temp_klass == nullptr) {
4810     CHECK(self->IsExceptionPending());  // OOME.
4811     return nullptr;
4812   }
4813   DCHECK(temp_klass->GetClass() != nullptr);
4814   temp_klass->SetObjectSize(sizeof(mirror::Proxy));
4815   // Set the class access flags incl. VerificationAttempted, so we do not try to set the flag on
4816   // the methods.
4817   temp_klass->SetAccessFlagsDuringLinking(kAccClassIsProxy | kAccPublic | kAccFinal);
4818   temp_klass->SetClassLoader(soa.Decode<mirror::ClassLoader>(loader));
4819   DCHECK_EQ(temp_klass->GetPrimitiveType(), Primitive::kPrimNot);
4820   temp_klass->SetName(soa.Decode<mirror::String>(name));
4821   temp_klass->SetDexCache(GetClassRoot<mirror::Proxy>(this)->GetDexCache());
4822   // Object has an empty iftable, copy it for that reason.
4823   temp_klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
4824   mirror::Class::SetStatus(temp_klass, ClassStatus::kIdx, self);
4825   std::string storage;
4826   const char* descriptor = temp_klass->GetDescriptor(&storage);
4827   const size_t hash = ComputeModifiedUtf8Hash(descriptor);
4828 
4829   // Needs to be before we insert the class so that the allocator field is set.
4830   LinearAlloc* const allocator = GetOrCreateAllocatorForClassLoader(temp_klass->GetClassLoader());
4831 
4832   // Insert the class before loading the fields as the field roots
4833   // (ArtField::declaring_class_) are only visited from the class
4834   // table. There can't be any suspend points between inserting the
4835   // class and setting the field arrays below.
4836   ObjPtr<mirror::Class> existing = InsertClass(descriptor, temp_klass.Get(), hash);
4837   CHECK(existing == nullptr);
4838 
4839   // Instance fields are inherited, but we add a couple of static fields...
4840   const size_t num_fields = 2;
4841   LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, allocator, num_fields);
4842   temp_klass->SetSFieldsPtr(sfields);
4843 
4844   // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by
4845   // our proxy, so Class.getInterfaces doesn't return the flattened set.
4846   ArtField& interfaces_sfield = sfields->At(0);
4847   interfaces_sfield.SetDexFieldIndex(0);
4848   interfaces_sfield.SetDeclaringClass(temp_klass.Get());
4849   interfaces_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
4850 
4851   // 2. Create a static field 'throws' that holds exceptions thrown by our methods.
4852   ArtField& throws_sfield = sfields->At(1);
4853   throws_sfield.SetDexFieldIndex(1);
4854   throws_sfield.SetDeclaringClass(temp_klass.Get());
4855   throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
4856 
4857   // Proxies have 1 direct method, the constructor
4858   const size_t num_direct_methods = 1;
4859 
4860   // The array we get passed contains all methods, including private and static
4861   // ones that aren't proxied. We need to filter those out since only interface
4862   // methods (non-private & virtual) are actually proxied.
4863   Handle<mirror::ObjectArray<mirror::Method>> h_methods =
4864       hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Method>>(methods));
4865   DCHECK_EQ(h_methods->GetClass(), GetClassRoot<mirror::ObjectArray<mirror::Method>>())
4866       << mirror::Class::PrettyClass(h_methods->GetClass());
4867   // List of the actual virtual methods this class will have.
4868   std::vector<ArtMethod*> proxied_methods;
4869   std::vector<size_t> proxied_throws_idx;
4870   proxied_methods.reserve(h_methods->GetLength());
4871   proxied_throws_idx.reserve(h_methods->GetLength());
4872   // Filter out to only the non-private virtual methods.
4873   for (auto [mirror, idx] : ZipCount(h_methods.Iterate<mirror::Method>())) {
4874     ArtMethod* m = mirror->GetArtMethod();
4875     if (!m->IsPrivate() && !m->IsStatic()) {
4876       proxied_methods.push_back(m);
4877       proxied_throws_idx.push_back(idx);
4878     }
4879   }
4880   const size_t num_virtual_methods = proxied_methods.size();
4881   // We also need to filter out the 'throws'. The 'throws' are a Class[][] that
4882   // contains an array of all the classes each function is declared to throw.
4883   // This is used to wrap unexpected exceptions in a
4884   // UndeclaredThrowableException exception. This array is in the same order as
4885   // the methods array and like the methods array must be filtered to remove any
4886   // non-proxied methods.
4887   const bool has_filtered_methods =
4888       static_cast<int32_t>(num_virtual_methods) != h_methods->GetLength();
4889   MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> original_proxied_throws(
4890       hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(throws)));
4891   MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> proxied_throws(
4892       hs.NewHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(
4893           (has_filtered_methods)
4894               ? mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>::Alloc(
4895                     self, original_proxied_throws->GetClass(), num_virtual_methods)
4896               : original_proxied_throws.Get()));
4897   if (proxied_throws.IsNull() && !original_proxied_throws.IsNull()) {
4898     self->AssertPendingOOMException();
4899     return nullptr;
4900   }
4901   if (has_filtered_methods) {
4902     for (auto [orig_idx, new_idx] : ZipCount(MakeIterationRange(proxied_throws_idx))) {
4903       DCHECK_LE(new_idx, orig_idx);
4904       proxied_throws->Set(new_idx, original_proxied_throws->Get(orig_idx));
4905     }
4906   }
4907 
4908   // Create the methods array.
4909   LengthPrefixedArray<ArtMethod>* proxy_class_methods = AllocArtMethodArray(
4910         self, allocator, num_direct_methods + num_virtual_methods);
4911   // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
4912   // want to throw OOM in the future.
4913   if (UNLIKELY(proxy_class_methods == nullptr)) {
4914     self->AssertPendingOOMException();
4915     return nullptr;
4916   }
4917   temp_klass->SetMethodsPtr(proxy_class_methods, num_direct_methods, num_virtual_methods);
4918 
4919   // Create the single direct method.
4920   CreateProxyConstructor(temp_klass, temp_klass->GetDirectMethodUnchecked(0, image_pointer_size_));
4921 
4922   // Create virtual method using specified prototypes.
4923   // TODO These should really use the iterators.
4924   for (size_t i = 0; i < num_virtual_methods; ++i) {
4925     auto* virtual_method = temp_klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
4926     auto* prototype = proxied_methods[i];
4927     CreateProxyMethod(temp_klass, prototype, virtual_method);
4928     DCHECK(virtual_method->GetDeclaringClass() != nullptr);
4929     DCHECK(prototype->GetDeclaringClass() != nullptr);
4930   }
4931 
4932   // The super class is java.lang.reflect.Proxy
4933   temp_klass->SetSuperClass(GetClassRoot<mirror::Proxy>(this));
4934   // Now effectively in the loaded state.
4935   mirror::Class::SetStatus(temp_klass, ClassStatus::kLoaded, self);
4936   self->AssertNoPendingException();
4937 
4938   // At this point the class is loaded. Publish a ClassLoad event.
4939   // Note: this may be a temporary class. It is a listener's responsibility to handle this.
4940   Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(temp_klass);
4941 
4942   MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
4943   {
4944     // Must hold lock on object when resolved.
4945     ObjectLock<mirror::Class> resolution_lock(self, temp_klass);
4946     // Link the fields and virtual methods, creating vtable and iftables.
4947     // The new class will replace the old one in the class table.
4948     Handle<mirror::ObjectArray<mirror::Class>> h_interfaces(
4949         hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces)));
4950     if (!LinkClass(self, descriptor, temp_klass, h_interfaces, &klass)) {
4951       if (!temp_klass->IsErroneous()) {
4952         mirror::Class::SetStatus(temp_klass, ClassStatus::kErrorUnresolved, self);
4953       }
4954       return nullptr;
4955     }
4956   }
4957   CHECK(temp_klass->IsRetired());
4958   CHECK_NE(temp_klass.Get(), klass.Get());
4959 
4960   CHECK_EQ(interfaces_sfield.GetDeclaringClass(), klass.Get());
4961   interfaces_sfield.SetObject<false>(
4962       klass.Get(),
4963       soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
4964   CHECK_EQ(throws_sfield.GetDeclaringClass(), klass.Get());
4965   throws_sfield.SetObject<false>(
4966       klass.Get(),
4967       proxied_throws.Get());
4968 
4969   Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(temp_klass, klass);
4970 
4971   // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
4972   // See also ClassLinker::EnsureInitialized().
4973   if (kBitstringSubtypeCheckEnabled) {
4974     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
4975     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(klass.Get());
4976     // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck for j.l.r.Proxy is already assigned.
4977   }
4978 
4979   VisiblyInitializedCallback* callback = nullptr;
4980   {
4981     // Lock on klass is released. Lock new class object.
4982     ObjectLock<mirror::Class> initialization_lock(self, klass);
4983     // Conservatively go through the ClassStatus::kInitialized state.
4984     callback = MarkClassInitialized(self, klass);
4985   }
4986   if (callback != nullptr) {
4987     callback->MakeVisible(self);
4988   }
4989 
4990   // Consistency checks.
4991   if (kIsDebugBuild) {
4992     CHECK(klass->GetIFieldsPtr() == nullptr);
4993     CheckProxyConstructor(klass->GetDirectMethod(0, image_pointer_size_));
4994 
4995     for (size_t i = 0; i < num_virtual_methods; ++i) {
4996       auto* virtual_method = klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
4997       CheckProxyMethod(virtual_method, proxied_methods[i]);
4998     }
4999 
5000     StackHandleScope<1> hs2(self);
5001     Handle<mirror::String> decoded_name = hs2.NewHandle(soa.Decode<mirror::String>(name));
5002     std::string interfaces_field_name(StringPrintf("java.lang.Class[] %s.interfaces",
5003                                                    decoded_name->ToModifiedUtf8().c_str()));
5004     CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(0)), interfaces_field_name);
5005 
5006     std::string throws_field_name(StringPrintf("java.lang.Class[][] %s.throws",
5007                                                decoded_name->ToModifiedUtf8().c_str()));
5008     CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(1)), throws_field_name);
5009 
5010     CHECK_EQ(klass.Get()->GetProxyInterfaces(),
5011              soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
5012     CHECK_EQ(klass.Get()->GetProxyThrows(),
5013              proxied_throws.Get());
5014   }
5015   return klass.Get();
5016 }
5017 
CreateProxyConstructor(Handle<mirror::Class> klass,ArtMethod * out)5018 void ClassLinker::CreateProxyConstructor(Handle<mirror::Class> klass, ArtMethod* out) {
5019   // Create constructor for Proxy that must initialize the method.
5020   ObjPtr<mirror::Class> proxy_class = GetClassRoot<mirror::Proxy>(this);
5021   CHECK_EQ(proxy_class->NumDirectMethods(), 21u);
5022 
5023   // Find the <init>(InvocationHandler)V method. The exact method offset varies depending
5024   // on which front-end compiler was used to build the libcore DEX files.
5025   ArtMethod* proxy_constructor =
5026       jni::DecodeArtMethod(WellKnownClasses::java_lang_reflect_Proxy_init);
5027   DCHECK(proxy_constructor != nullptr)
5028       << "Could not find <init> method in java.lang.reflect.Proxy";
5029 
5030   // Clone the existing constructor of Proxy (our constructor would just invoke it so steal its
5031   // code_ too)
5032   DCHECK(out != nullptr);
5033   out->CopyFrom(proxy_constructor, image_pointer_size_);
5034   // Make this constructor public and fix the class to be our Proxy version.
5035   // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5036   // Note that the compiler calls a ResolveMethod() overload that does not handle a Proxy referrer.
5037   out->SetAccessFlags((out->GetAccessFlags() & ~kAccProtected) |
5038                       kAccPublic |
5039                       kAccCompileDontBother);
5040   out->SetDeclaringClass(klass.Get());
5041 
5042   // Set the original constructor method.
5043   out->SetDataPtrSize(proxy_constructor, image_pointer_size_);
5044 }
5045 
CheckProxyConstructor(ArtMethod * constructor) const5046 void ClassLinker::CheckProxyConstructor(ArtMethod* constructor) const {
5047   CHECK(constructor->IsConstructor());
5048   auto* np = constructor->GetInterfaceMethodIfProxy(image_pointer_size_);
5049   CHECK_STREQ(np->GetName(), "<init>");
5050   CHECK_STREQ(np->GetSignature().ToString().c_str(), "(Ljava/lang/reflect/InvocationHandler;)V");
5051   DCHECK(constructor->IsPublic());
5052 }
5053 
CreateProxyMethod(Handle<mirror::Class> klass,ArtMethod * prototype,ArtMethod * out)5054 void ClassLinker::CreateProxyMethod(Handle<mirror::Class> klass, ArtMethod* prototype,
5055                                     ArtMethod* out) {
5056   // We steal everything from the prototype (such as DexCache, invoke stub, etc.) then specialize
5057   // as necessary
5058   DCHECK(out != nullptr);
5059   out->CopyFrom(prototype, image_pointer_size_);
5060 
5061   // Set class to be the concrete proxy class.
5062   out->SetDeclaringClass(klass.Get());
5063   // Clear the abstract and default flags to ensure that defaults aren't picked in
5064   // preference to the invocation handler.
5065   const uint32_t kRemoveFlags = kAccAbstract | kAccDefault;
5066   // Make the method final.
5067   // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5068   const uint32_t kAddFlags = kAccFinal | kAccCompileDontBother;
5069   out->SetAccessFlags((out->GetAccessFlags() & ~kRemoveFlags) | kAddFlags);
5070 
5071   // Set the original interface method.
5072   out->SetDataPtrSize(prototype, image_pointer_size_);
5073 
5074   // At runtime the method looks like a reference and argument saving method, clone the code
5075   // related parameters from this method.
5076   out->SetEntryPointFromQuickCompiledCode(GetQuickProxyInvokeHandler());
5077 }
5078 
CheckProxyMethod(ArtMethod * method,ArtMethod * prototype) const5079 void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) const {
5080   // Basic consistency checks.
5081   CHECK(!prototype->IsFinal());
5082   CHECK(method->IsFinal());
5083   CHECK(method->IsInvokable());
5084 
5085   // The proxy method doesn't have its own dex cache or dex file and so it steals those of its
5086   // interface prototype. The exception to this are Constructors and the Class of the Proxy itself.
5087   CHECK_EQ(prototype->GetDexMethodIndex(), method->GetDexMethodIndex());
5088   CHECK_EQ(prototype, method->GetInterfaceMethodIfProxy(image_pointer_size_));
5089 }
5090 
CanWeInitializeClass(ObjPtr<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5091 bool ClassLinker::CanWeInitializeClass(ObjPtr<mirror::Class> klass, bool can_init_statics,
5092                                        bool can_init_parents) {
5093   if (can_init_statics && can_init_parents) {
5094     return true;
5095   }
5096   if (!can_init_statics) {
5097     // Check if there's a class initializer.
5098     ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5099     if (clinit != nullptr) {
5100       return false;
5101     }
5102     // Check if there are encoded static values needing initialization.
5103     if (klass->NumStaticFields() != 0) {
5104       const dex::ClassDef* dex_class_def = klass->GetClassDef();
5105       DCHECK(dex_class_def != nullptr);
5106       if (dex_class_def->static_values_off_ != 0) {
5107         return false;
5108       }
5109     }
5110   }
5111   // If we are a class we need to initialize all interfaces with default methods when we are
5112   // initialized. Check all of them.
5113   if (!klass->IsInterface()) {
5114     size_t num_interfaces = klass->GetIfTableCount();
5115     for (size_t i = 0; i < num_interfaces; i++) {
5116       ObjPtr<mirror::Class> iface = klass->GetIfTable()->GetInterface(i);
5117       if (iface->HasDefaultMethods() && !iface->IsInitialized()) {
5118         if (!can_init_parents || !CanWeInitializeClass(iface, can_init_statics, can_init_parents)) {
5119           return false;
5120         }
5121       }
5122     }
5123   }
5124   if (klass->IsInterface() || !klass->HasSuperClass()) {
5125     return true;
5126   }
5127   ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5128   if (super_class->IsInitialized()) {
5129     return true;
5130   }
5131   return can_init_parents && CanWeInitializeClass(super_class, can_init_statics, can_init_parents);
5132 }
5133 
InitializeClass(Thread * self,Handle<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5134 bool ClassLinker::InitializeClass(Thread* self,
5135                                   Handle<mirror::Class> klass,
5136                                   bool can_init_statics,
5137                                   bool can_init_parents) {
5138   // see JLS 3rd edition, 12.4.2 "Detailed Initialization Procedure" for the locking protocol
5139 
5140   // Are we already initialized and therefore done?
5141   // Note: we differ from the JLS here as we don't do this under the lock, this is benign as
5142   // an initialized class will never change its state.
5143   if (klass->IsInitialized()) {
5144     return true;
5145   }
5146 
5147   // Fast fail if initialization requires a full runtime. Not part of the JLS.
5148   if (!CanWeInitializeClass(klass.Get(), can_init_statics, can_init_parents)) {
5149     return false;
5150   }
5151 
5152   self->AllowThreadSuspension();
5153   Runtime* const runtime = Runtime::Current();
5154   const bool stats_enabled = runtime->HasStatsEnabled();
5155   uint64_t t0;
5156   {
5157     ObjectLock<mirror::Class> lock(self, klass);
5158 
5159     // Re-check under the lock in case another thread initialized ahead of us.
5160     if (klass->IsInitialized()) {
5161       return true;
5162     }
5163 
5164     // Was the class already found to be erroneous? Done under the lock to match the JLS.
5165     if (klass->IsErroneous()) {
5166       ThrowEarlierClassFailure(klass.Get(), true, /* log= */ true);
5167       VlogClassInitializationFailure(klass);
5168       return false;
5169     }
5170 
5171     CHECK(klass->IsResolved() && !klass->IsErroneousResolved())
5172         << klass->PrettyClass() << ": state=" << klass->GetStatus();
5173 
5174     if (!klass->IsVerified()) {
5175       VerifyClass(self, /*verifier_deps= */ nullptr, klass);
5176       if (!klass->IsVerified()) {
5177         // We failed to verify, expect either the klass to be erroneous or verification failed at
5178         // compile time.
5179         if (klass->IsErroneous()) {
5180           // The class is erroneous. This may be a verifier error, or another thread attempted
5181           // verification and/or initialization and failed. We can distinguish those cases by
5182           // whether an exception is already pending.
5183           if (self->IsExceptionPending()) {
5184             // Check that it's a VerifyError.
5185             DCHECK(IsVerifyError(self->GetException()));
5186           } else {
5187             // Check that another thread attempted initialization.
5188             DCHECK_NE(0, klass->GetClinitThreadId());
5189             DCHECK_NE(self->GetTid(), klass->GetClinitThreadId());
5190             // Need to rethrow the previous failure now.
5191             ThrowEarlierClassFailure(klass.Get(), true);
5192           }
5193           VlogClassInitializationFailure(klass);
5194         } else {
5195           CHECK(Runtime::Current()->IsAotCompiler());
5196           CHECK(klass->ShouldVerifyAtRuntime() || klass->IsVerifiedNeedsAccessChecks());
5197           self->AssertNoPendingException();
5198           self->SetException(Runtime::Current()->GetPreAllocatedNoClassDefFoundError());
5199         }
5200         self->AssertPendingException();
5201         return false;
5202       } else {
5203         self->AssertNoPendingException();
5204       }
5205 
5206       // A separate thread could have moved us all the way to initialized. A "simple" example
5207       // involves a subclass of the current class being initialized at the same time (which
5208       // will implicitly initialize the superclass, if scheduled that way). b/28254258
5209       DCHECK(!klass->IsErroneous()) << klass->GetStatus();
5210       if (klass->IsInitialized()) {
5211         return true;
5212       }
5213     }
5214 
5215     // If the class is ClassStatus::kInitializing, either this thread is
5216     // initializing higher up the stack or another thread has beat us
5217     // to initializing and we need to wait. Either way, this
5218     // invocation of InitializeClass will not be responsible for
5219     // running <clinit> and will return.
5220     if (klass->GetStatus() == ClassStatus::kInitializing) {
5221       // Could have got an exception during verification.
5222       if (self->IsExceptionPending()) {
5223         VlogClassInitializationFailure(klass);
5224         return false;
5225       }
5226       // We caught somebody else in the act; was it us?
5227       if (klass->GetClinitThreadId() == self->GetTid()) {
5228         // Yes. That's fine. Return so we can continue initializing.
5229         return true;
5230       }
5231       // No. That's fine. Wait for another thread to finish initializing.
5232       return WaitForInitializeClass(klass, self, lock);
5233     }
5234 
5235     // Try to get the oat class's status for this class if the oat file is present. The compiler
5236     // tries to validate superclass descriptors, and writes the result into the oat file.
5237     // Runtime correctness is guaranteed by classpath checks done on loading. If the classpath
5238     // is different at runtime than it was at compile time, the oat file is rejected. So if the
5239     // oat file is present, the classpaths must match, and the runtime time check can be skipped.
5240     bool has_oat_class = false;
5241     const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
5242         ? OatFile::FindOatClass(klass->GetDexFile(), klass->GetDexClassDefIndex(), &has_oat_class)
5243         : OatFile::OatClass::Invalid();
5244     if (oat_class.GetStatus() < ClassStatus::kSuperclassValidated &&
5245         !ValidateSuperClassDescriptors(klass)) {
5246       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5247       return false;
5248     }
5249     self->AllowThreadSuspension();
5250 
5251     CHECK_EQ(klass->GetStatus(), ClassStatus::kVerified) << klass->PrettyClass()
5252         << " self.tid=" << self->GetTid() << " clinit.tid=" << klass->GetClinitThreadId();
5253 
5254     // From here out other threads may observe that we're initializing and so changes of state
5255     // require the a notification.
5256     klass->SetClinitThreadId(self->GetTid());
5257     mirror::Class::SetStatus(klass, ClassStatus::kInitializing, self);
5258 
5259     t0 = stats_enabled ? NanoTime() : 0u;
5260   }
5261 
5262   uint64_t t_sub = 0;
5263 
5264   // Initialize super classes, must be done while initializing for the JLS.
5265   if (!klass->IsInterface() && klass->HasSuperClass()) {
5266     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5267     if (!super_class->IsInitialized()) {
5268       CHECK(!super_class->IsInterface());
5269       CHECK(can_init_parents);
5270       StackHandleScope<1> hs(self);
5271       Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
5272       uint64_t super_t0 = stats_enabled ? NanoTime() : 0u;
5273       bool super_initialized = InitializeClass(self, handle_scope_super, can_init_statics, true);
5274       uint64_t super_t1 = stats_enabled ? NanoTime() : 0u;
5275       if (!super_initialized) {
5276         // The super class was verified ahead of entering initializing, we should only be here if
5277         // the super class became erroneous due to initialization.
5278         // For the case of aot compiler, the super class might also be initializing but we don't
5279         // want to process circular dependencies in pre-compile.
5280         CHECK(self->IsExceptionPending())
5281             << "Super class initialization failed for "
5282             << handle_scope_super->PrettyDescriptor()
5283             << " that has unexpected status " << handle_scope_super->GetStatus()
5284             << "\nPending exception:\n"
5285             << (self->GetException() != nullptr ? self->GetException()->Dump() : "");
5286         ObjectLock<mirror::Class> lock(self, klass);
5287         // Initialization failed because the super-class is erroneous.
5288         mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5289         return false;
5290       }
5291       t_sub = super_t1 - super_t0;
5292     }
5293   }
5294 
5295   if (!klass->IsInterface()) {
5296     // Initialize interfaces with default methods for the JLS.
5297     size_t num_direct_interfaces = klass->NumDirectInterfaces();
5298     // Only setup the (expensive) handle scope if we actually need to.
5299     if (UNLIKELY(num_direct_interfaces > 0)) {
5300       StackHandleScope<1> hs_iface(self);
5301       MutableHandle<mirror::Class> handle_scope_iface(hs_iface.NewHandle<mirror::Class>(nullptr));
5302       for (size_t i = 0; i < num_direct_interfaces; i++) {
5303         handle_scope_iface.Assign(klass->GetDirectInterface(i));
5304         CHECK(handle_scope_iface != nullptr) << klass->PrettyDescriptor() << " iface #" << i;
5305         CHECK(handle_scope_iface->IsInterface());
5306         if (handle_scope_iface->HasBeenRecursivelyInitialized()) {
5307           // We have already done this for this interface. Skip it.
5308           continue;
5309         }
5310         // We cannot just call initialize class directly because we need to ensure that ALL
5311         // interfaces with default methods are initialized. Non-default interface initialization
5312         // will not affect other non-default super-interfaces.
5313         // This is not very precise, misses all walking.
5314         uint64_t inf_t0 = stats_enabled ? NanoTime() : 0u;
5315         bool iface_initialized = InitializeDefaultInterfaceRecursive(self,
5316                                                                      handle_scope_iface,
5317                                                                      can_init_statics,
5318                                                                      can_init_parents);
5319         uint64_t inf_t1 = stats_enabled ? NanoTime() : 0u;
5320         if (!iface_initialized) {
5321           ObjectLock<mirror::Class> lock(self, klass);
5322           // Initialization failed because one of our interfaces with default methods is erroneous.
5323           mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5324           return false;
5325         }
5326         t_sub += inf_t1 - inf_t0;
5327       }
5328     }
5329   }
5330 
5331   const size_t num_static_fields = klass->NumStaticFields();
5332   if (num_static_fields > 0) {
5333     const dex::ClassDef* dex_class_def = klass->GetClassDef();
5334     CHECK(dex_class_def != nullptr);
5335     StackHandleScope<3> hs(self);
5336     Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
5337     Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
5338 
5339     // Eagerly fill in static fields so that the we don't have to do as many expensive
5340     // Class::FindStaticField in ResolveField.
5341     for (size_t i = 0; i < num_static_fields; ++i) {
5342       ArtField* field = klass->GetStaticField(i);
5343       const uint32_t field_idx = field->GetDexFieldIndex();
5344       ArtField* resolved_field = dex_cache->GetResolvedField(field_idx);
5345       if (resolved_field == nullptr) {
5346         // Populating cache of a dex file which defines `klass` should always be allowed.
5347         DCHECK(!hiddenapi::ShouldDenyAccessToMember(
5348             field,
5349             hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
5350             hiddenapi::AccessMethod::kNone));
5351         dex_cache->SetResolvedField(field_idx, field);
5352       } else {
5353         DCHECK_EQ(field, resolved_field);
5354       }
5355     }
5356 
5357     annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache,
5358                                                                  class_loader,
5359                                                                  this,
5360                                                                  *dex_class_def);
5361     const DexFile& dex_file = *dex_cache->GetDexFile();
5362 
5363     if (value_it.HasNext()) {
5364       ClassAccessor accessor(dex_file, *dex_class_def);
5365       CHECK(can_init_statics);
5366       for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
5367         if (!value_it.HasNext()) {
5368           break;
5369         }
5370         ArtField* art_field = ResolveField(field.GetIndex(),
5371                                            dex_cache,
5372                                            class_loader,
5373                                            /* is_static= */ true);
5374         if (Runtime::Current()->IsActiveTransaction()) {
5375           value_it.ReadValueToField<true>(art_field);
5376         } else {
5377           value_it.ReadValueToField<false>(art_field);
5378         }
5379         if (self->IsExceptionPending()) {
5380           break;
5381         }
5382         value_it.Next();
5383       }
5384       DCHECK(self->IsExceptionPending() || !value_it.HasNext());
5385     }
5386   }
5387 
5388 
5389   if (!self->IsExceptionPending()) {
5390     ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5391     if (clinit != nullptr) {
5392       CHECK(can_init_statics);
5393       JValue result;
5394       clinit->Invoke(self, nullptr, 0, &result, "V");
5395     }
5396   }
5397   self->AllowThreadSuspension();
5398   uint64_t t1 = stats_enabled ? NanoTime() : 0u;
5399 
5400   VisiblyInitializedCallback* callback = nullptr;
5401   bool success = true;
5402   {
5403     ObjectLock<mirror::Class> lock(self, klass);
5404 
5405     if (self->IsExceptionPending()) {
5406       WrapExceptionInInitializer(klass);
5407       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5408       success = false;
5409     } else if (Runtime::Current()->IsTransactionAborted()) {
5410       // The exception thrown when the transaction aborted has been caught and cleared
5411       // so we need to throw it again now.
5412       VLOG(compiler) << "Return from class initializer of "
5413                      << mirror::Class::PrettyDescriptor(klass.Get())
5414                      << " without exception while transaction was aborted: re-throw it now.";
5415       runtime->ThrowTransactionAbortError(self);
5416       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5417       success = false;
5418     } else {
5419       if (stats_enabled) {
5420         RuntimeStats* global_stats = runtime->GetStats();
5421         RuntimeStats* thread_stats = self->GetStats();
5422         ++global_stats->class_init_count;
5423         ++thread_stats->class_init_count;
5424         global_stats->class_init_time_ns += (t1 - t0 - t_sub);
5425         thread_stats->class_init_time_ns += (t1 - t0 - t_sub);
5426       }
5427       // Set the class as initialized except if failed to initialize static fields.
5428       callback = MarkClassInitialized(self, klass);
5429       if (VLOG_IS_ON(class_linker)) {
5430         std::string temp;
5431         LOG(INFO) << "Initialized class " << klass->GetDescriptor(&temp) << " from " <<
5432             klass->GetLocation();
5433       }
5434     }
5435   }
5436   if (callback != nullptr) {
5437     callback->MakeVisible(self);
5438   }
5439   return success;
5440 }
5441 
5442 // We recursively run down the tree of interfaces. We need to do this in the order they are declared
5443 // and perform the initialization only on those interfaces that contain default methods.
InitializeDefaultInterfaceRecursive(Thread * self,Handle<mirror::Class> iface,bool can_init_statics,bool can_init_parents)5444 bool ClassLinker::InitializeDefaultInterfaceRecursive(Thread* self,
5445                                                       Handle<mirror::Class> iface,
5446                                                       bool can_init_statics,
5447                                                       bool can_init_parents) {
5448   CHECK(iface->IsInterface());
5449   size_t num_direct_ifaces = iface->NumDirectInterfaces();
5450   // Only create the (expensive) handle scope if we need it.
5451   if (UNLIKELY(num_direct_ifaces > 0)) {
5452     StackHandleScope<1> hs(self);
5453     MutableHandle<mirror::Class> handle_super_iface(hs.NewHandle<mirror::Class>(nullptr));
5454     // First we initialize all of iface's super-interfaces recursively.
5455     for (size_t i = 0; i < num_direct_ifaces; i++) {
5456       ObjPtr<mirror::Class> super_iface = iface->GetDirectInterface(i);
5457       CHECK(super_iface != nullptr) << iface->PrettyDescriptor() << " iface #" << i;
5458       if (!super_iface->HasBeenRecursivelyInitialized()) {
5459         // Recursive step
5460         handle_super_iface.Assign(super_iface);
5461         if (!InitializeDefaultInterfaceRecursive(self,
5462                                                  handle_super_iface,
5463                                                  can_init_statics,
5464                                                  can_init_parents)) {
5465           return false;
5466         }
5467       }
5468     }
5469   }
5470 
5471   bool result = true;
5472   // Then we initialize 'iface' if it has default methods. We do not need to (and in fact must not)
5473   // initialize if we don't have default methods.
5474   if (iface->HasDefaultMethods()) {
5475     result = EnsureInitialized(self, iface, can_init_statics, can_init_parents);
5476   }
5477 
5478   // Mark that this interface has undergone recursive default interface initialization so we know we
5479   // can skip it on any later class initializations. We do this even if we are not a default
5480   // interface since we can still avoid the traversal. This is purely a performance optimization.
5481   if (result) {
5482     // TODO This should be done in a better way
5483     // Note: Use a try-lock to avoid blocking when someone else is holding the lock on this
5484     //       interface. It is bad (Java) style, but not impossible. Marking the recursive
5485     //       initialization is a performance optimization (to avoid another idempotent visit
5486     //       for other implementing classes/interfaces), and can be revisited later.
5487     ObjectTryLock<mirror::Class> lock(self, iface);
5488     if (lock.Acquired()) {
5489       iface->SetRecursivelyInitialized();
5490     }
5491   }
5492   return result;
5493 }
5494 
WaitForInitializeClass(Handle<mirror::Class> klass,Thread * self,ObjectLock<mirror::Class> & lock)5495 bool ClassLinker::WaitForInitializeClass(Handle<mirror::Class> klass,
5496                                          Thread* self,
5497                                          ObjectLock<mirror::Class>& lock)
5498     REQUIRES_SHARED(Locks::mutator_lock_) {
5499   while (true) {
5500     self->AssertNoPendingException();
5501     CHECK(!klass->IsInitialized());
5502     lock.WaitIgnoringInterrupts();
5503 
5504     // When we wake up, repeat the test for init-in-progress.  If
5505     // there's an exception pending (only possible if
5506     // we were not using WaitIgnoringInterrupts), bail out.
5507     if (self->IsExceptionPending()) {
5508       WrapExceptionInInitializer(klass);
5509       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5510       return false;
5511     }
5512     // Spurious wakeup? Go back to waiting.
5513     if (klass->GetStatus() == ClassStatus::kInitializing) {
5514       continue;
5515     }
5516     if (klass->GetStatus() == ClassStatus::kVerified &&
5517         Runtime::Current()->IsAotCompiler()) {
5518       // Compile time initialization failed.
5519       return false;
5520     }
5521     if (klass->IsErroneous()) {
5522       // The caller wants an exception, but it was thrown in a
5523       // different thread.  Synthesize one here.
5524       ThrowNoClassDefFoundError("<clinit> failed for class %s; see exception in other thread",
5525                                 klass->PrettyDescriptor().c_str());
5526       VlogClassInitializationFailure(klass);
5527       return false;
5528     }
5529     if (klass->IsInitialized()) {
5530       return true;
5531     }
5532     LOG(FATAL) << "Unexpected class status. " << klass->PrettyClass() << " is "
5533         << klass->GetStatus();
5534   }
5535   UNREACHABLE();
5536 }
5537 
ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m)5538 static void ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,
5539                                                           Handle<mirror::Class> super_klass,
5540                                                           ArtMethod* method,
5541                                                           ArtMethod* m)
5542     REQUIRES_SHARED(Locks::mutator_lock_) {
5543   DCHECK(Thread::Current()->IsExceptionPending());
5544   DCHECK(!m->IsProxyMethod());
5545   const DexFile* dex_file = m->GetDexFile();
5546   const dex::MethodId& method_id = dex_file->GetMethodId(m->GetDexMethodIndex());
5547   const dex::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id);
5548   dex::TypeIndex return_type_idx = proto_id.return_type_idx_;
5549   std::string return_type = dex_file->PrettyType(return_type_idx);
5550   std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
5551   ThrowWrappedLinkageError(klass.Get(),
5552                            "While checking class %s method %s signature against %s %s: "
5553                            "Failed to resolve return type %s with %s",
5554                            mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5555                            ArtMethod::PrettyMethod(method).c_str(),
5556                            super_klass->IsInterface() ? "interface" : "superclass",
5557                            mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
5558                            return_type.c_str(), class_loader.c_str());
5559 }
5560 
ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m,uint32_t index,dex::TypeIndex arg_type_idx)5561 static void ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,
5562                                                    Handle<mirror::Class> super_klass,
5563                                                    ArtMethod* method,
5564                                                    ArtMethod* m,
5565                                                    uint32_t index,
5566                                                    dex::TypeIndex arg_type_idx)
5567     REQUIRES_SHARED(Locks::mutator_lock_) {
5568   DCHECK(Thread::Current()->IsExceptionPending());
5569   DCHECK(!m->IsProxyMethod());
5570   const DexFile* dex_file = m->GetDexFile();
5571   std::string arg_type = dex_file->PrettyType(arg_type_idx);
5572   std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
5573   ThrowWrappedLinkageError(klass.Get(),
5574                            "While checking class %s method %s signature against %s %s: "
5575                            "Failed to resolve arg %u type %s with %s",
5576                            mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5577                            ArtMethod::PrettyMethod(method).c_str(),
5578                            super_klass->IsInterface() ? "interface" : "superclass",
5579                            mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
5580                            index, arg_type.c_str(), class_loader.c_str());
5581 }
5582 
ThrowSignatureMismatch(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,const std::string & error_msg)5583 static void ThrowSignatureMismatch(Handle<mirror::Class> klass,
5584                                    Handle<mirror::Class> super_klass,
5585                                    ArtMethod* method,
5586                                    const std::string& error_msg)
5587     REQUIRES_SHARED(Locks::mutator_lock_) {
5588   ThrowLinkageError(klass.Get(),
5589                     "Class %s method %s resolves differently in %s %s: %s",
5590                     mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5591                     ArtMethod::PrettyMethod(method).c_str(),
5592                     super_klass->IsInterface() ? "interface" : "superclass",
5593                     mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
5594                     error_msg.c_str());
5595 }
5596 
HasSameSignatureWithDifferentClassLoaders(Thread * self,Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method1,ArtMethod * method2)5597 static bool HasSameSignatureWithDifferentClassLoaders(Thread* self,
5598                                                       Handle<mirror::Class> klass,
5599                                                       Handle<mirror::Class> super_klass,
5600                                                       ArtMethod* method1,
5601                                                       ArtMethod* method2)
5602     REQUIRES_SHARED(Locks::mutator_lock_) {
5603   {
5604     StackHandleScope<1> hs(self);
5605     Handle<mirror::Class> return_type(hs.NewHandle(method1->ResolveReturnType()));
5606     if (UNLIKELY(return_type == nullptr)) {
5607       ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method1);
5608       return false;
5609     }
5610     ObjPtr<mirror::Class> other_return_type = method2->ResolveReturnType();
5611     if (UNLIKELY(other_return_type == nullptr)) {
5612       ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method2);
5613       return false;
5614     }
5615     if (UNLIKELY(other_return_type != return_type.Get())) {
5616       ThrowSignatureMismatch(klass, super_klass, method1,
5617                              StringPrintf("Return types mismatch: %s(%p) vs %s(%p)",
5618                                           return_type->PrettyClassAndClassLoader().c_str(),
5619                                           return_type.Get(),
5620                                           other_return_type->PrettyClassAndClassLoader().c_str(),
5621                                           other_return_type.Ptr()));
5622       return false;
5623     }
5624   }
5625   const dex::TypeList* types1 = method1->GetParameterTypeList();
5626   const dex::TypeList* types2 = method2->GetParameterTypeList();
5627   if (types1 == nullptr) {
5628     if (types2 != nullptr && types2->Size() != 0) {
5629       ThrowSignatureMismatch(klass, super_klass, method1,
5630                              StringPrintf("Type list mismatch with %s",
5631                                           method2->PrettyMethod(true).c_str()));
5632       return false;
5633     }
5634     return true;
5635   } else if (UNLIKELY(types2 == nullptr)) {
5636     if (types1->Size() != 0) {
5637       ThrowSignatureMismatch(klass, super_klass, method1,
5638                              StringPrintf("Type list mismatch with %s",
5639                                           method2->PrettyMethod(true).c_str()));
5640       return false;
5641     }
5642     return true;
5643   }
5644   uint32_t num_types = types1->Size();
5645   if (UNLIKELY(num_types != types2->Size())) {
5646     ThrowSignatureMismatch(klass, super_klass, method1,
5647                            StringPrintf("Type list mismatch with %s",
5648                                         method2->PrettyMethod(true).c_str()));
5649     return false;
5650   }
5651   for (uint32_t i = 0; i < num_types; ++i) {
5652     StackHandleScope<1> hs(self);
5653     dex::TypeIndex param_type_idx = types1->GetTypeItem(i).type_idx_;
5654     Handle<mirror::Class> param_type(hs.NewHandle(
5655         method1->ResolveClassFromTypeIndex(param_type_idx)));
5656     if (UNLIKELY(param_type == nullptr)) {
5657       ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
5658                                              method1, i, param_type_idx);
5659       return false;
5660     }
5661     dex::TypeIndex other_param_type_idx = types2->GetTypeItem(i).type_idx_;
5662     ObjPtr<mirror::Class> other_param_type =
5663         method2->ResolveClassFromTypeIndex(other_param_type_idx);
5664     if (UNLIKELY(other_param_type == nullptr)) {
5665       ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
5666                                              method2, i, other_param_type_idx);
5667       return false;
5668     }
5669     if (UNLIKELY(param_type.Get() != other_param_type)) {
5670       ThrowSignatureMismatch(klass, super_klass, method1,
5671                              StringPrintf("Parameter %u type mismatch: %s(%p) vs %s(%p)",
5672                                           i,
5673                                           param_type->PrettyClassAndClassLoader().c_str(),
5674                                           param_type.Get(),
5675                                           other_param_type->PrettyClassAndClassLoader().c_str(),
5676                                           other_param_type.Ptr()));
5677       return false;
5678     }
5679   }
5680   return true;
5681 }
5682 
5683 
ValidateSuperClassDescriptors(Handle<mirror::Class> klass)5684 bool ClassLinker::ValidateSuperClassDescriptors(Handle<mirror::Class> klass) {
5685   if (klass->IsInterface()) {
5686     return true;
5687   }
5688   // Begin with the methods local to the superclass.
5689   Thread* self = Thread::Current();
5690   StackHandleScope<1> hs(self);
5691   MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(nullptr));
5692   if (klass->HasSuperClass() &&
5693       klass->GetClassLoader() != klass->GetSuperClass()->GetClassLoader()) {
5694     super_klass.Assign(klass->GetSuperClass());
5695     for (int i = klass->GetSuperClass()->GetVTableLength() - 1; i >= 0; --i) {
5696       auto* m = klass->GetVTableEntry(i, image_pointer_size_);
5697       auto* super_m = klass->GetSuperClass()->GetVTableEntry(i, image_pointer_size_);
5698       if (m != super_m) {
5699         if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5700                                                                 klass,
5701                                                                 super_klass,
5702                                                                 m,
5703                                                                 super_m))) {
5704           self->AssertPendingException();
5705           return false;
5706         }
5707       }
5708     }
5709   }
5710   for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
5711     super_klass.Assign(klass->GetIfTable()->GetInterface(i));
5712     if (klass->GetClassLoader() != super_klass->GetClassLoader()) {
5713       uint32_t num_methods = super_klass->NumVirtualMethods();
5714       for (uint32_t j = 0; j < num_methods; ++j) {
5715         auto* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
5716             j, image_pointer_size_);
5717         auto* super_m = super_klass->GetVirtualMethod(j, image_pointer_size_);
5718         if (m != super_m) {
5719           if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5720                                                                   klass,
5721                                                                   super_klass,
5722                                                                   m,
5723                                                                   super_m))) {
5724             self->AssertPendingException();
5725             return false;
5726           }
5727         }
5728       }
5729     }
5730   }
5731   return true;
5732 }
5733 
EnsureInitialized(Thread * self,Handle<mirror::Class> c,bool can_init_fields,bool can_init_parents)5734 bool ClassLinker::EnsureInitialized(Thread* self,
5735                                     Handle<mirror::Class> c,
5736                                     bool can_init_fields,
5737                                     bool can_init_parents) {
5738   DCHECK(c != nullptr);
5739 
5740   if (c->IsInitialized()) {
5741     // If we've seen an initialized but not visibly initialized class
5742     // many times, request visible initialization.
5743     if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
5744       // Thanks to the x86 memory model classes skip the initialized status.
5745       DCHECK(c->IsVisiblyInitialized());
5746     } else if (UNLIKELY(!c->IsVisiblyInitialized())) {
5747       if (self->IncrementMakeVisiblyInitializedCounter()) {
5748         MakeInitializedClassesVisiblyInitialized(self, /*wait=*/ false);
5749       }
5750     }
5751     return true;
5752   }
5753   // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5754   //
5755   // Ensure the bitstring is initialized before any of the class initialization
5756   // logic occurs. Once a class initializer starts running, objects can
5757   // escape into the heap and use the subtype checking code.
5758   //
5759   // Note: A class whose SubtypeCheckInfo is at least Initialized means it
5760   // can be used as a source for the IsSubClass check, and that all ancestors
5761   // of the class are Assigned (can be used as a target for IsSubClass check)
5762   // or Overflowed (can be used as a source for IsSubClass check).
5763   if (kBitstringSubtypeCheckEnabled) {
5764     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
5765     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(c.Get());
5766     // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck is already initialized.
5767   }
5768   const bool success = InitializeClass(self, c, can_init_fields, can_init_parents);
5769   if (!success) {
5770     if (can_init_fields && can_init_parents) {
5771       CHECK(self->IsExceptionPending()) << c->PrettyClass();
5772     } else {
5773       // There may or may not be an exception pending. If there is, clear it.
5774       // We propagate the exception only if we can initialize fields and parents.
5775       self->ClearException();
5776     }
5777   } else {
5778     self->AssertNoPendingException();
5779   }
5780   return success;
5781 }
5782 
FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,ObjPtr<mirror::Class> new_class)5783 void ClassLinker::FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,
5784                                                ObjPtr<mirror::Class> new_class) {
5785   DCHECK_EQ(temp_class->NumInstanceFields(), 0u);
5786   for (ArtField& field : new_class->GetIFields()) {
5787     if (field.GetDeclaringClass() == temp_class) {
5788       field.SetDeclaringClass(new_class);
5789     }
5790   }
5791 
5792   DCHECK_EQ(temp_class->NumStaticFields(), 0u);
5793   for (ArtField& field : new_class->GetSFields()) {
5794     if (field.GetDeclaringClass() == temp_class) {
5795       field.SetDeclaringClass(new_class);
5796     }
5797   }
5798 
5799   DCHECK_EQ(temp_class->NumDirectMethods(), 0u);
5800   DCHECK_EQ(temp_class->NumVirtualMethods(), 0u);
5801   for (auto& method : new_class->GetMethods(image_pointer_size_)) {
5802     if (method.GetDeclaringClass() == temp_class) {
5803       method.SetDeclaringClass(new_class);
5804     }
5805   }
5806 
5807   // Make sure the remembered set and mod-union tables know that we updated some of the native
5808   // roots.
5809   WriteBarrier::ForEveryFieldWrite(new_class);
5810 }
5811 
RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader)5812 void ClassLinker::RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
5813   CHECK(class_loader->GetAllocator() == nullptr);
5814   CHECK(class_loader->GetClassTable() == nullptr);
5815   Thread* const self = Thread::Current();
5816   ClassLoaderData data;
5817   data.weak_root = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, class_loader);
5818   // Create and set the class table.
5819   data.class_table = new ClassTable;
5820   class_loader->SetClassTable(data.class_table);
5821   // Create and set the linear allocator.
5822   data.allocator = Runtime::Current()->CreateLinearAlloc();
5823   class_loader->SetAllocator(data.allocator);
5824   // Add to the list so that we know to free the data later.
5825   class_loaders_.push_back(data);
5826 }
5827 
InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)5828 ClassTable* ClassLinker::InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
5829   if (class_loader == nullptr) {
5830     return boot_class_table_.get();
5831   }
5832   ClassTable* class_table = class_loader->GetClassTable();
5833   if (class_table == nullptr) {
5834     RegisterClassLoader(class_loader);
5835     class_table = class_loader->GetClassTable();
5836     DCHECK(class_table != nullptr);
5837   }
5838   return class_table;
5839 }
5840 
ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)5841 ClassTable* ClassLinker::ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
5842   return class_loader == nullptr ? boot_class_table_.get() : class_loader->GetClassTable();
5843 }
5844 
FindSuperImt(ObjPtr<mirror::Class> klass,PointerSize pointer_size)5845 static ImTable* FindSuperImt(ObjPtr<mirror::Class> klass, PointerSize pointer_size)
5846     REQUIRES_SHARED(Locks::mutator_lock_) {
5847   while (klass->HasSuperClass()) {
5848     klass = klass->GetSuperClass();
5849     if (klass->ShouldHaveImt()) {
5850       return klass->GetImt(pointer_size);
5851     }
5852   }
5853   return nullptr;
5854 }
5855 
LinkClass(Thread * self,const char * descriptor,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,MutableHandle<mirror::Class> * h_new_class_out)5856 bool ClassLinker::LinkClass(Thread* self,
5857                             const char* descriptor,
5858                             Handle<mirror::Class> klass,
5859                             Handle<mirror::ObjectArray<mirror::Class>> interfaces,
5860                             MutableHandle<mirror::Class>* h_new_class_out) {
5861   CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
5862 
5863   if (!LinkSuperClass(klass)) {
5864     return false;
5865   }
5866   ArtMethod* imt_data[ImTable::kSize];
5867   // If there are any new conflicts compared to super class.
5868   bool new_conflict = false;
5869   std::fill_n(imt_data, arraysize(imt_data), Runtime::Current()->GetImtUnimplementedMethod());
5870   if (!LinkMethods(self, klass, interfaces, &new_conflict, imt_data)) {
5871     return false;
5872   }
5873   if (!LinkInstanceFields(self, klass)) {
5874     return false;
5875   }
5876   size_t class_size;
5877   if (!LinkStaticFields(self, klass, &class_size)) {
5878     return false;
5879   }
5880   CreateReferenceInstanceOffsets(klass);
5881   CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
5882 
5883   ImTable* imt = nullptr;
5884   if (klass->ShouldHaveImt()) {
5885     // If there are any new conflicts compared to the super class we can not make a copy. There
5886     // can be cases where both will have a conflict method at the same slot without having the same
5887     // set of conflicts. In this case, we can not share the IMT since the conflict table slow path
5888     // will possibly create a table that is incorrect for either of the classes.
5889     // Same IMT with new_conflict does not happen very often.
5890     if (!new_conflict) {
5891       ImTable* super_imt = FindSuperImt(klass.Get(), image_pointer_size_);
5892       if (super_imt != nullptr) {
5893         bool imt_equals = true;
5894         for (size_t i = 0; i < ImTable::kSize && imt_equals; ++i) {
5895           imt_equals = imt_equals && (super_imt->Get(i, image_pointer_size_) == imt_data[i]);
5896         }
5897         if (imt_equals) {
5898           imt = super_imt;
5899         }
5900       }
5901     }
5902     if (imt == nullptr) {
5903       LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
5904       imt = reinterpret_cast<ImTable*>(
5905           allocator->Alloc(self, ImTable::SizeInBytes(image_pointer_size_)));
5906       if (imt == nullptr) {
5907         return false;
5908       }
5909       imt->Populate(imt_data, image_pointer_size_);
5910     }
5911   }
5912 
5913   if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) {
5914     // We don't need to retire this class as it has no embedded tables or it was created the
5915     // correct size during class linker initialization.
5916     CHECK_EQ(klass->GetClassSize(), class_size) << klass->PrettyDescriptor();
5917 
5918     if (klass->ShouldHaveEmbeddedVTable()) {
5919       klass->PopulateEmbeddedVTable(image_pointer_size_);
5920     }
5921     if (klass->ShouldHaveImt()) {
5922       klass->SetImt(imt, image_pointer_size_);
5923     }
5924 
5925     // Update CHA info based on whether we override methods.
5926     // Have to do this before setting the class as resolved which allows
5927     // instantiation of klass.
5928     if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
5929       cha_->UpdateAfterLoadingOf(klass);
5930     }
5931 
5932     // This will notify waiters on klass that saw the not yet resolved
5933     // class in the class_table_ during EnsureResolved.
5934     mirror::Class::SetStatus(klass, ClassStatus::kResolved, self);
5935     h_new_class_out->Assign(klass.Get());
5936   } else {
5937     CHECK(!klass->IsResolved());
5938     // Retire the temporary class and create the correctly sized resolved class.
5939     StackHandleScope<1> hs(self);
5940     Handle<mirror::Class> h_new_class =
5941         hs.NewHandle(mirror::Class::CopyOf(klass, self, class_size, imt, image_pointer_size_));
5942     // Set arrays to null since we don't want to have multiple classes with the same ArtField or
5943     // ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC
5944     // may not see any references to the target space and clean the card for a class if another
5945     // class had the same array pointer.
5946     klass->SetMethodsPtrUnchecked(nullptr, 0, 0);
5947     klass->SetSFieldsPtrUnchecked(nullptr);
5948     klass->SetIFieldsPtrUnchecked(nullptr);
5949     if (UNLIKELY(h_new_class == nullptr)) {
5950       self->AssertPendingOOMException();
5951       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
5952       return false;
5953     }
5954 
5955     CHECK_EQ(h_new_class->GetClassSize(), class_size);
5956     ObjectLock<mirror::Class> lock(self, h_new_class);
5957     FixupTemporaryDeclaringClass(klass.Get(), h_new_class.Get());
5958 
5959     if (LIKELY(descriptor != nullptr)) {
5960       WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
5961       const ObjPtr<mirror::ClassLoader> class_loader = h_new_class.Get()->GetClassLoader();
5962       ClassTable* const table = InsertClassTableForClassLoader(class_loader);
5963       const ObjPtr<mirror::Class> existing =
5964           table->UpdateClass(descriptor, h_new_class.Get(), ComputeModifiedUtf8Hash(descriptor));
5965       if (class_loader != nullptr) {
5966         // We updated the class in the class table, perform the write barrier so that the GC knows
5967         // about the change.
5968         WriteBarrier::ForEveryFieldWrite(class_loader);
5969       }
5970       CHECK_EQ(existing, klass.Get());
5971       if (log_new_roots_) {
5972         new_class_roots_.push_back(GcRoot<mirror::Class>(h_new_class.Get()));
5973       }
5974     }
5975 
5976     // Update CHA info based on whether we override methods.
5977     // Have to do this before setting the class as resolved which allows
5978     // instantiation of klass.
5979     if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
5980       cha_->UpdateAfterLoadingOf(h_new_class);
5981     }
5982 
5983     // This will notify waiters on temp class that saw the not yet resolved class in the
5984     // class_table_ during EnsureResolved.
5985     mirror::Class::SetStatus(klass, ClassStatus::kRetired, self);
5986 
5987     CHECK_EQ(h_new_class->GetStatus(), ClassStatus::kResolving);
5988     // This will notify waiters on new_class that saw the not yet resolved
5989     // class in the class_table_ during EnsureResolved.
5990     mirror::Class::SetStatus(h_new_class, ClassStatus::kResolved, self);
5991     // Return the new class.
5992     h_new_class_out->Assign(h_new_class.Get());
5993   }
5994   return true;
5995 }
5996 
LoadSuperAndInterfaces(Handle<mirror::Class> klass,const DexFile & dex_file)5997 bool ClassLinker::LoadSuperAndInterfaces(Handle<mirror::Class> klass, const DexFile& dex_file) {
5998   CHECK_EQ(ClassStatus::kIdx, klass->GetStatus());
5999   const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
6000   dex::TypeIndex super_class_idx = class_def.superclass_idx_;
6001   if (super_class_idx.IsValid()) {
6002     // Check that a class does not inherit from itself directly.
6003     //
6004     // TODO: This is a cheap check to detect the straightforward case
6005     // of a class extending itself (b/28685551), but we should do a
6006     // proper cycle detection on loaded classes, to detect all cases
6007     // of class circularity errors (b/28830038).
6008     if (super_class_idx == class_def.class_idx_) {
6009       ThrowClassCircularityError(klass.Get(),
6010                                  "Class %s extends itself",
6011                                  klass->PrettyDescriptor().c_str());
6012       return false;
6013     }
6014 
6015     ObjPtr<mirror::Class> super_class = ResolveType(super_class_idx, klass.Get());
6016     if (super_class == nullptr) {
6017       DCHECK(Thread::Current()->IsExceptionPending());
6018       return false;
6019     }
6020     // Verify
6021     if (!klass->CanAccess(super_class)) {
6022       ThrowIllegalAccessError(klass.Get(), "Class %s extended by class %s is inaccessible",
6023                               super_class->PrettyDescriptor().c_str(),
6024                               klass->PrettyDescriptor().c_str());
6025       return false;
6026     }
6027     CHECK(super_class->IsResolved());
6028     klass->SetSuperClass(super_class);
6029   }
6030   const dex::TypeList* interfaces = dex_file.GetInterfacesList(class_def);
6031   if (interfaces != nullptr) {
6032     for (size_t i = 0; i < interfaces->Size(); i++) {
6033       dex::TypeIndex idx = interfaces->GetTypeItem(i).type_idx_;
6034       ObjPtr<mirror::Class> interface = ResolveType(idx, klass.Get());
6035       if (interface == nullptr) {
6036         DCHECK(Thread::Current()->IsExceptionPending());
6037         return false;
6038       }
6039       // Verify
6040       if (!klass->CanAccess(interface)) {
6041         // TODO: the RI seemed to ignore this in my testing.
6042         ThrowIllegalAccessError(klass.Get(),
6043                                 "Interface %s implemented by class %s is inaccessible",
6044                                 interface->PrettyDescriptor().c_str(),
6045                                 klass->PrettyDescriptor().c_str());
6046         return false;
6047       }
6048     }
6049   }
6050   // Mark the class as loaded.
6051   mirror::Class::SetStatus(klass, ClassStatus::kLoaded, nullptr);
6052   return true;
6053 }
6054 
LinkSuperClass(Handle<mirror::Class> klass)6055 bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) {
6056   CHECK(!klass->IsPrimitive());
6057   ObjPtr<mirror::Class> super = klass->GetSuperClass();
6058   ObjPtr<mirror::Class> object_class = GetClassRoot<mirror::Object>(this);
6059   if (klass.Get() == object_class) {
6060     if (super != nullptr) {
6061       ThrowClassFormatError(klass.Get(), "java.lang.Object must not have a superclass");
6062       return false;
6063     }
6064     return true;
6065   }
6066   if (super == nullptr) {
6067     ThrowLinkageError(klass.Get(), "No superclass defined for class %s",
6068                       klass->PrettyDescriptor().c_str());
6069     return false;
6070   }
6071   // Verify
6072   if (klass->IsInterface() && super != object_class) {
6073     ThrowClassFormatError(klass.Get(), "Interfaces must have java.lang.Object as superclass");
6074     return false;
6075   }
6076   if (super->IsFinal()) {
6077     ThrowVerifyError(klass.Get(),
6078                      "Superclass %s of %s is declared final",
6079                      super->PrettyDescriptor().c_str(),
6080                      klass->PrettyDescriptor().c_str());
6081     return false;
6082   }
6083   if (super->IsInterface()) {
6084     ThrowIncompatibleClassChangeError(klass.Get(),
6085                                       "Superclass %s of %s is an interface",
6086                                       super->PrettyDescriptor().c_str(),
6087                                       klass->PrettyDescriptor().c_str());
6088     return false;
6089   }
6090   if (!klass->CanAccess(super)) {
6091     ThrowIllegalAccessError(klass.Get(), "Superclass %s is inaccessible to class %s",
6092                             super->PrettyDescriptor().c_str(),
6093                             klass->PrettyDescriptor().c_str());
6094     return false;
6095   }
6096 
6097   // Inherit kAccClassIsFinalizable from the superclass in case this
6098   // class doesn't override finalize.
6099   if (super->IsFinalizable()) {
6100     klass->SetFinalizable();
6101   }
6102 
6103   // Inherit class loader flag form super class.
6104   if (super->IsClassLoaderClass()) {
6105     klass->SetClassLoaderClass();
6106   }
6107 
6108   // Inherit reference flags (if any) from the superclass.
6109   uint32_t reference_flags = (super->GetClassFlags() & mirror::kClassFlagReference);
6110   if (reference_flags != 0) {
6111     CHECK_EQ(klass->GetClassFlags(), 0u);
6112     klass->SetClassFlags(klass->GetClassFlags() | reference_flags);
6113   }
6114   // Disallow custom direct subclasses of java.lang.ref.Reference.
6115   if (init_done_ && super == GetClassRoot<mirror::Reference>(this)) {
6116     ThrowLinkageError(klass.Get(),
6117                       "Class %s attempts to subclass java.lang.ref.Reference, which is not allowed",
6118                       klass->PrettyDescriptor().c_str());
6119     return false;
6120   }
6121 
6122   if (kIsDebugBuild) {
6123     // Ensure super classes are fully resolved prior to resolving fields..
6124     while (super != nullptr) {
6125       CHECK(super->IsResolved());
6126       super = super->GetSuperClass();
6127     }
6128   }
6129   return true;
6130 }
6131 
6132 // Comparator for name and signature of a method, used in finding overriding methods. Implementation
6133 // avoids the use of handles, if it didn't then rather than compare dex files we could compare dex
6134 // caches in the implementation below.
6135 class MethodNameAndSignatureComparator final : public ValueObject {
6136  public:
6137   explicit MethodNameAndSignatureComparator(ArtMethod* method)
REQUIRES_SHARED(Locks::mutator_lock_)6138       REQUIRES_SHARED(Locks::mutator_lock_) :
6139       dex_file_(method->GetDexFile()), mid_(&dex_file_->GetMethodId(method->GetDexMethodIndex())),
6140       name_view_() {
6141     DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
6142   }
6143 
GetNameView()6144   ALWAYS_INLINE std::string_view GetNameView() {
6145     if (name_view_.empty()) {
6146       name_view_ = dex_file_->StringViewByIdx(mid_->name_idx_);
6147     }
6148     return name_view_;
6149   }
6150 
HasSameNameAndSignature(ArtMethod * other)6151   bool HasSameNameAndSignature(ArtMethod* other)
6152       REQUIRES_SHARED(Locks::mutator_lock_) {
6153     DCHECK(!other->IsProxyMethod()) << other->PrettyMethod();
6154     const DexFile* other_dex_file = other->GetDexFile();
6155     const dex::MethodId& other_mid = other_dex_file->GetMethodId(other->GetDexMethodIndex());
6156     if (dex_file_ == other_dex_file) {
6157       return mid_->name_idx_ == other_mid.name_idx_ && mid_->proto_idx_ == other_mid.proto_idx_;
6158     }
6159     return GetNameView() == other_dex_file->StringViewByIdx(other_mid.name_idx_) &&
6160            dex_file_->GetMethodSignature(*mid_) == other_dex_file->GetMethodSignature(other_mid);
6161   }
6162 
6163  private:
6164   // Dex file for the method to compare against.
6165   const DexFile* const dex_file_;
6166   // MethodId for the method to compare against.
6167   const dex::MethodId* const mid_;
6168   // Lazily computed name from the dex file's strings.
6169   std::string_view name_view_;
6170 };
6171 
AddMethodToConflictTable(ObjPtr<mirror::Class> klass,ArtMethod * conflict_method,ArtMethod * interface_method,ArtMethod * method)6172 ArtMethod* ClassLinker::AddMethodToConflictTable(ObjPtr<mirror::Class> klass,
6173                                                  ArtMethod* conflict_method,
6174                                                  ArtMethod* interface_method,
6175                                                  ArtMethod* method) {
6176   ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
6177   Runtime* const runtime = Runtime::Current();
6178   LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
6179 
6180   // Create a new entry if the existing one is the shared conflict method.
6181   ArtMethod* new_conflict_method = (conflict_method == runtime->GetImtConflictMethod())
6182       ? runtime->CreateImtConflictMethod(linear_alloc)
6183       : conflict_method;
6184 
6185   // Allocate a new table. Note that we will leak this table at the next conflict,
6186   // but that's a tradeoff compared to making the table fixed size.
6187   void* data = linear_alloc->Alloc(
6188       Thread::Current(), ImtConflictTable::ComputeSizeWithOneMoreEntry(current_table,
6189                                                                        image_pointer_size_));
6190   if (data == nullptr) {
6191     LOG(ERROR) << "Failed to allocate conflict table";
6192     return conflict_method;
6193   }
6194   ImtConflictTable* new_table = new (data) ImtConflictTable(current_table,
6195                                                             interface_method,
6196                                                             method,
6197                                                             image_pointer_size_);
6198 
6199   // Do a fence to ensure threads see the data in the table before it is assigned
6200   // to the conflict method.
6201   // Note that there is a race in the presence of multiple threads and we may leak
6202   // memory from the LinearAlloc, but that's a tradeoff compared to using
6203   // atomic operations.
6204   std::atomic_thread_fence(std::memory_order_release);
6205   new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6206   return new_conflict_method;
6207 }
6208 
SetIMTRef(ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ArtMethod * current_method,bool * new_conflict,ArtMethod ** imt_ref)6209 void ClassLinker::SetIMTRef(ArtMethod* unimplemented_method,
6210                             ArtMethod* imt_conflict_method,
6211                             ArtMethod* current_method,
6212                             /*out*/bool* new_conflict,
6213                             /*out*/ArtMethod** imt_ref) {
6214   // Place method in imt if entry is empty, place conflict otherwise.
6215   if (*imt_ref == unimplemented_method) {
6216     *imt_ref = current_method;
6217   } else if (!(*imt_ref)->IsRuntimeMethod()) {
6218     // If we are not a conflict and we have the same signature and name as the imt
6219     // entry, it must be that we overwrote a superclass vtable entry.
6220     // Note that we have checked IsRuntimeMethod, as there may be multiple different
6221     // conflict methods.
6222     MethodNameAndSignatureComparator imt_comparator(
6223         (*imt_ref)->GetInterfaceMethodIfProxy(image_pointer_size_));
6224     if (imt_comparator.HasSameNameAndSignature(
6225           current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
6226       *imt_ref = current_method;
6227     } else {
6228       *imt_ref = imt_conflict_method;
6229       *new_conflict = true;
6230     }
6231   } else {
6232     // Place the default conflict method. Note that there may be an existing conflict
6233     // method in the IMT, but it could be one tailored to the super class, with a
6234     // specific ImtConflictTable.
6235     *imt_ref = imt_conflict_method;
6236     *new_conflict = true;
6237   }
6238 }
6239 
FillIMTAndConflictTables(ObjPtr<mirror::Class> klass)6240 void ClassLinker::FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) {
6241   DCHECK(klass->ShouldHaveImt()) << klass->PrettyClass();
6242   DCHECK(!klass->IsTemp()) << klass->PrettyClass();
6243   ArtMethod* imt_data[ImTable::kSize];
6244   Runtime* const runtime = Runtime::Current();
6245   ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
6246   ArtMethod* const conflict_method = runtime->GetImtConflictMethod();
6247   std::fill_n(imt_data, arraysize(imt_data), unimplemented_method);
6248   if (klass->GetIfTable() != nullptr) {
6249     bool new_conflict = false;
6250     FillIMTFromIfTable(klass->GetIfTable(),
6251                        unimplemented_method,
6252                        conflict_method,
6253                        klass,
6254                        /*create_conflict_tables=*/true,
6255                        /*ignore_copied_methods=*/false,
6256                        &new_conflict,
6257                        &imt_data[0]);
6258   }
6259   // Compare the IMT with the super class including the conflict methods. If they are equivalent,
6260   // we can just use the same pointer.
6261   ImTable* imt = nullptr;
6262   ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
6263   if (super_class != nullptr && super_class->ShouldHaveImt()) {
6264     ImTable* super_imt = super_class->GetImt(image_pointer_size_);
6265     bool same = true;
6266     for (size_t i = 0; same && i < ImTable::kSize; ++i) {
6267       ArtMethod* method = imt_data[i];
6268       ArtMethod* super_method = super_imt->Get(i, image_pointer_size_);
6269       if (method != super_method) {
6270         bool is_conflict_table = method->IsRuntimeMethod() &&
6271                                  method != unimplemented_method &&
6272                                  method != conflict_method;
6273         // Verify conflict contents.
6274         bool super_conflict_table = super_method->IsRuntimeMethod() &&
6275                                     super_method != unimplemented_method &&
6276                                     super_method != conflict_method;
6277         if (!is_conflict_table || !super_conflict_table) {
6278           same = false;
6279         } else {
6280           ImtConflictTable* table1 = method->GetImtConflictTable(image_pointer_size_);
6281           ImtConflictTable* table2 = super_method->GetImtConflictTable(image_pointer_size_);
6282           same = same && table1->Equals(table2, image_pointer_size_);
6283         }
6284       }
6285     }
6286     if (same) {
6287       imt = super_imt;
6288     }
6289   }
6290   if (imt == nullptr) {
6291     imt = klass->GetImt(image_pointer_size_);
6292     DCHECK(imt != nullptr);
6293     imt->Populate(imt_data, image_pointer_size_);
6294   } else {
6295     klass->SetImt(imt, image_pointer_size_);
6296   }
6297 }
6298 
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc,PointerSize image_pointer_size)6299 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count,
6300                                                       LinearAlloc* linear_alloc,
6301                                                       PointerSize image_pointer_size) {
6302   void* data = linear_alloc->Alloc(Thread::Current(),
6303                                    ImtConflictTable::ComputeSize(count,
6304                                                                  image_pointer_size));
6305   return (data != nullptr) ? new (data) ImtConflictTable(count, image_pointer_size) : nullptr;
6306 }
6307 
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc)6308 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count, LinearAlloc* linear_alloc) {
6309   return CreateImtConflictTable(count, linear_alloc, image_pointer_size_);
6310 }
6311 
FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ObjPtr<mirror::Class> klass,bool create_conflict_tables,bool ignore_copied_methods,bool * new_conflict,ArtMethod ** imt)6312 void ClassLinker::FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,
6313                                      ArtMethod* unimplemented_method,
6314                                      ArtMethod* imt_conflict_method,
6315                                      ObjPtr<mirror::Class> klass,
6316                                      bool create_conflict_tables,
6317                                      bool ignore_copied_methods,
6318                                      /*out*/bool* new_conflict,
6319                                      /*out*/ArtMethod** imt) {
6320   uint32_t conflict_counts[ImTable::kSize] = {};
6321   for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
6322     ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
6323     const size_t num_virtuals = interface->NumVirtualMethods();
6324     const size_t method_array_count = if_table->GetMethodArrayCount(i);
6325     // Virtual methods can be larger than the if table methods if there are default methods.
6326     DCHECK_GE(num_virtuals, method_array_count);
6327     if (kIsDebugBuild) {
6328       if (klass->IsInterface()) {
6329         DCHECK_EQ(method_array_count, 0u);
6330       } else {
6331         DCHECK_EQ(interface->NumDeclaredVirtualMethods(), method_array_count);
6332       }
6333     }
6334     if (method_array_count == 0) {
6335       continue;
6336     }
6337     ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
6338     for (size_t j = 0; j < method_array_count; ++j) {
6339       ArtMethod* implementation_method =
6340           method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6341       if (ignore_copied_methods && implementation_method->IsCopied()) {
6342         continue;
6343       }
6344       DCHECK(implementation_method != nullptr);
6345       // Miranda methods cannot be used to implement an interface method, but they are safe to put
6346       // in the IMT since their entrypoint is the interface trampoline. If we put any copied methods
6347       // or interface methods in the IMT here they will not create extra conflicts since we compare
6348       // names and signatures in SetIMTRef.
6349       ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
6350       const uint32_t imt_index = interface_method->GetImtIndex();
6351 
6352       // There is only any conflicts if all of the interface methods for an IMT slot don't have
6353       // the same implementation method, keep track of this to avoid creating a conflict table in
6354       // this case.
6355 
6356       // Conflict table size for each IMT slot.
6357       ++conflict_counts[imt_index];
6358 
6359       SetIMTRef(unimplemented_method,
6360                 imt_conflict_method,
6361                 implementation_method,
6362                 /*out*/new_conflict,
6363                 /*out*/&imt[imt_index]);
6364     }
6365   }
6366 
6367   if (create_conflict_tables) {
6368     // Create the conflict tables.
6369     LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
6370     for (size_t i = 0; i < ImTable::kSize; ++i) {
6371       size_t conflicts = conflict_counts[i];
6372       if (imt[i] == imt_conflict_method) {
6373         ImtConflictTable* new_table = CreateImtConflictTable(conflicts, linear_alloc);
6374         if (new_table != nullptr) {
6375           ArtMethod* new_conflict_method =
6376               Runtime::Current()->CreateImtConflictMethod(linear_alloc);
6377           new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6378           imt[i] = new_conflict_method;
6379         } else {
6380           LOG(ERROR) << "Failed to allocate conflict table";
6381           imt[i] = imt_conflict_method;
6382         }
6383       } else {
6384         DCHECK_NE(imt[i], imt_conflict_method);
6385       }
6386     }
6387 
6388     for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
6389       ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
6390       const size_t method_array_count = if_table->GetMethodArrayCount(i);
6391       // Virtual methods can be larger than the if table methods if there are default methods.
6392       if (method_array_count == 0) {
6393         continue;
6394       }
6395       ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
6396       for (size_t j = 0; j < method_array_count; ++j) {
6397         ArtMethod* implementation_method =
6398             method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6399         if (ignore_copied_methods && implementation_method->IsCopied()) {
6400           continue;
6401         }
6402         DCHECK(implementation_method != nullptr);
6403         ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
6404         const uint32_t imt_index = interface_method->GetImtIndex();
6405         if (!imt[imt_index]->IsRuntimeMethod() ||
6406             imt[imt_index] == unimplemented_method ||
6407             imt[imt_index] == imt_conflict_method) {
6408           continue;
6409         }
6410         ImtConflictTable* table = imt[imt_index]->GetImtConflictTable(image_pointer_size_);
6411         const size_t num_entries = table->NumEntries(image_pointer_size_);
6412         table->SetInterfaceMethod(num_entries, image_pointer_size_, interface_method);
6413         table->SetImplementationMethod(num_entries, image_pointer_size_, implementation_method);
6414       }
6415     }
6416   }
6417 }
6418 
6419 namespace {
6420 
6421 // Simple helper function that checks that no subtypes of 'val' are contained within the 'classes'
6422 // set.
NotSubinterfaceOfAny(const ScopedArenaHashSet<mirror::Class * > & classes,ObjPtr<mirror::Class> val)6423 static bool NotSubinterfaceOfAny(
6424     const ScopedArenaHashSet<mirror::Class*>& classes,
6425     ObjPtr<mirror::Class> val)
6426     REQUIRES(Roles::uninterruptible_)
6427     REQUIRES_SHARED(Locks::mutator_lock_) {
6428   DCHECK(val != nullptr);
6429   for (ObjPtr<mirror::Class> c : classes) {
6430     if (val->IsAssignableFrom(c)) {
6431       return false;
6432     }
6433   }
6434   return true;
6435 }
6436 
6437 // We record new interfaces by the index of the direct interface and the index in the
6438 // direct interface's `IfTable`, or `dex::kDexNoIndex` if it's the direct interface itself.
6439 struct NewInterfaceReference {
6440   uint32_t direct_interface_index;
6441   uint32_t direct_interface_iftable_index;
6442 };
6443 
6444 class ProxyInterfacesAccessor {
6445  public:
6446   explicit ProxyInterfacesAccessor(Handle<mirror::ObjectArray<mirror::Class>> interfaces)
REQUIRES_SHARED(Locks::mutator_lock_)6447       REQUIRES_SHARED(Locks::mutator_lock_)
6448       : interfaces_(interfaces) {}
6449 
GetLength()6450   size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
6451     return interfaces_->GetLength();
6452   }
6453 
GetInterface(size_t index)6454   ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
6455     DCHECK_LT(index, GetLength());
6456     return interfaces_->GetWithoutChecks(index);
6457   }
6458 
6459  private:
6460   Handle<mirror::ObjectArray<mirror::Class>> interfaces_;
6461 };
6462 
6463 class NonProxyInterfacesAccessor {
6464  public:
NonProxyInterfacesAccessor(ClassLinker * class_linker,Handle<mirror::Class> klass)6465   NonProxyInterfacesAccessor(ClassLinker* class_linker, Handle<mirror::Class> klass)
6466       REQUIRES_SHARED(Locks::mutator_lock_)
6467       : interfaces_(klass->GetInterfaceTypeList()),
6468         class_linker_(class_linker),
6469         klass_(klass) {
6470     DCHECK(!klass->IsProxyClass());
6471   }
6472 
GetLength()6473   size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
6474     return (interfaces_ != nullptr) ? interfaces_->Size() : 0u;
6475   }
6476 
GetInterface(size_t index)6477   ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
6478     DCHECK_LT(index, GetLength());
6479     dex::TypeIndex type_index = interfaces_->GetTypeItem(index).type_idx_;
6480     return class_linker_->LookupResolvedType(type_index, klass_.Get());
6481   }
6482 
6483  private:
6484   const dex::TypeList* interfaces_;
6485   ClassLinker* class_linker_;
6486   Handle<mirror::Class> klass_;
6487 };
6488 
6489 // Finds new interfaces to add to the interface table in addition to superclass interfaces.
6490 //
6491 // Interfaces in the interface table must satisfy the following constraint:
6492 //     all I, J: Interface | I <: J implies J precedes I
6493 // (note A <: B means that A is a subtype of B). We order this backwards so that we do not need
6494 // to reorder superclass interfaces when new interfaces are added in subclass's interface tables.
6495 //
6496 // This function returns a list of references for all interfaces in the transitive
6497 // closure of the direct interfaces that are not in the superclass interfaces.
6498 // The entries in the list are ordered to satisfy the interface table ordering
6499 // constraint and therefore the interface table formed by appending them to the
6500 // superclass interface table shall also satisfy that constraint.
6501 template <typename InterfaceAccessor>
6502 ALWAYS_INLINE
FindNewIfTableInterfaces(ObjPtr<mirror::IfTable> super_iftable,size_t super_ifcount,ScopedArenaAllocator * allocator,InterfaceAccessor && interfaces,ArrayRef<NewInterfaceReference> initial_storage,ScopedArenaVector<NewInterfaceReference> * supplemental_storage)6503 static ArrayRef<const NewInterfaceReference> FindNewIfTableInterfaces(
6504     ObjPtr<mirror::IfTable> super_iftable,
6505     size_t super_ifcount,
6506     ScopedArenaAllocator* allocator,
6507     InterfaceAccessor&& interfaces,
6508     ArrayRef<NewInterfaceReference> initial_storage,
6509     /*out*/ScopedArenaVector<NewInterfaceReference>* supplemental_storage)
6510     REQUIRES_SHARED(Locks::mutator_lock_) {
6511   ScopedAssertNoThreadSuspension nts(__FUNCTION__);
6512 
6513   // This is the set of all classes already in the iftable. Used to make checking
6514   // if a class has already been added quicker.
6515   constexpr size_t kBufferSize = 32;  // 256 bytes on 64-bit architectures.
6516   mirror::Class* buffer[kBufferSize];
6517   ScopedArenaHashSet<mirror::Class*> classes_in_iftable(buffer, kBufferSize, allocator->Adapter());
6518   // The first super_ifcount elements are from the superclass. We note that they are already added.
6519   for (size_t i = 0; i < super_ifcount; i++) {
6520     ObjPtr<mirror::Class> iface = super_iftable->GetInterface(i);
6521     DCHECK(NotSubinterfaceOfAny(classes_in_iftable, iface)) << "Bad ordering.";
6522     classes_in_iftable.Put(iface.Ptr());
6523   }
6524 
6525   ArrayRef<NewInterfaceReference> current_storage = initial_storage;
6526   DCHECK_NE(current_storage.size(), 0u);
6527   size_t num_new_interfaces = 0u;
6528   auto insert_reference = [&](uint32_t direct_interface_index,
6529                               uint32_t direct_interface_iface_index) {
6530     if (UNLIKELY(num_new_interfaces == current_storage.size())) {
6531       bool copy = current_storage.data() != supplemental_storage->data();
6532       supplemental_storage->resize(2u * num_new_interfaces);
6533       if (copy) {
6534         std::copy_n(current_storage.data(), num_new_interfaces, supplemental_storage->data());
6535       }
6536       current_storage = ArrayRef<NewInterfaceReference>(*supplemental_storage);
6537     }
6538     current_storage[num_new_interfaces] = {direct_interface_index, direct_interface_iface_index};
6539     ++num_new_interfaces;
6540   };
6541 
6542   for (size_t i = 0, num_interfaces = interfaces.GetLength(); i != num_interfaces; ++i) {
6543     ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
6544 
6545     // Let us call the first filled_ifcount elements of iftable the current-iface-list.
6546     // At this point in the loop current-iface-list has the invariant that:
6547     //    for every pair of interfaces I,J within it:
6548     //      if index_of(I) < index_of(J) then I is not a subtype of J
6549 
6550     // If we have already seen this element then all of its super-interfaces must already be in the
6551     // current-iface-list so we can skip adding it.
6552     if (classes_in_iftable.find(interface.Ptr()) == classes_in_iftable.end()) {
6553       // We haven't seen this interface so add all of its super-interfaces onto the
6554       // current-iface-list, skipping those already on it.
6555       int32_t ifcount = interface->GetIfTableCount();
6556       for (int32_t j = 0; j < ifcount; j++) {
6557         ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
6558         if (classes_in_iftable.find(super_interface.Ptr()) == classes_in_iftable.end()) {
6559           DCHECK(NotSubinterfaceOfAny(classes_in_iftable, super_interface)) << "Bad ordering.";
6560           classes_in_iftable.Put(super_interface.Ptr());
6561           insert_reference(i, j);
6562         }
6563       }
6564       // Add this interface reference after all of its super-interfaces.
6565       DCHECK(NotSubinterfaceOfAny(classes_in_iftable, interface)) << "Bad ordering";
6566       classes_in_iftable.Put(interface.Ptr());
6567       insert_reference(i, dex::kDexNoIndex);
6568     } else if (kIsDebugBuild) {
6569       // Check all super-interfaces are already in the list.
6570       int32_t ifcount = interface->GetIfTableCount();
6571       for (int32_t j = 0; j < ifcount; j++) {
6572         ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
6573         DCHECK(classes_in_iftable.find(super_interface.Ptr()) != classes_in_iftable.end())
6574             << "Iftable does not contain " << mirror::Class::PrettyClass(super_interface)
6575             << ", a superinterface of " << interface->PrettyClass();
6576       }
6577     }
6578   }
6579   return ArrayRef<const NewInterfaceReference>(current_storage.data(), num_new_interfaces);
6580 }
6581 
6582 template <typename InterfaceAccessor>
SetupInterfaceLookupTable(Thread * self,Handle<mirror::Class> klass,ScopedArenaAllocator * allocator,InterfaceAccessor && interfaces)6583 static ObjPtr<mirror::IfTable> SetupInterfaceLookupTable(
6584     Thread* self,
6585     Handle<mirror::Class> klass,
6586     ScopedArenaAllocator* allocator,
6587     InterfaceAccessor&& interfaces)
6588     REQUIRES_SHARED(Locks::mutator_lock_) {
6589   DCHECK(klass->HasSuperClass());
6590   ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
6591   DCHECK(super_iftable != nullptr);
6592   const size_t num_interfaces = interfaces.GetLength();
6593 
6594   // If there are no new interfaces, return the interface table from superclass.
6595   // If any implementation methods are overridden, we shall copy the table and
6596   // the method arrays that contain any differences (copy-on-write).
6597   if (num_interfaces == 0) {
6598     return super_iftable;
6599   }
6600 
6601   // Check that every class being implemented is an interface.
6602   for (size_t i = 0; i != num_interfaces; ++i) {
6603     ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
6604     DCHECK(interface != nullptr);
6605     if (UNLIKELY(!interface->IsInterface())) {
6606       ThrowIncompatibleClassChangeError(klass.Get(),
6607                                         "Class %s implements non-interface class %s",
6608                                         klass->PrettyDescriptor().c_str(),
6609                                         interface->PrettyDescriptor().c_str());
6610       return nullptr;
6611     }
6612   }
6613 
6614   static constexpr size_t kMaxStackReferences = 16;
6615   NewInterfaceReference initial_storage[kMaxStackReferences];
6616   ScopedArenaVector<NewInterfaceReference> supplemental_storage(allocator->Adapter());
6617   const size_t super_ifcount = super_iftable->Count();
6618   ArrayRef<const NewInterfaceReference> new_interface_references =
6619       FindNewIfTableInterfaces(
6620           super_iftable,
6621           super_ifcount,
6622           allocator,
6623           interfaces,
6624           ArrayRef<NewInterfaceReference>(initial_storage),
6625           &supplemental_storage);
6626 
6627   // If all declared interfaces were already present in superclass interface table,
6628   // return the interface table from superclass. See above.
6629   if (UNLIKELY(new_interface_references.empty())) {
6630     return super_iftable;
6631   }
6632 
6633   // Create the interface table.
6634   size_t ifcount = super_ifcount + new_interface_references.size();
6635   ObjPtr<mirror::IfTable> iftable = AllocIfTable(self, ifcount, super_iftable->GetClass());
6636   if (UNLIKELY(iftable == nullptr)) {
6637     self->AssertPendingOOMException();
6638     return nullptr;
6639   }
6640   // Fill in table with superclass's iftable.
6641   if (super_ifcount != 0) {
6642     // Reload `super_iftable` as it may have been clobbered by the allocation.
6643     super_iftable = klass->GetSuperClass()->GetIfTable();
6644     for (size_t i = 0; i != super_ifcount; i++) {
6645       ObjPtr<mirror::Class> super_interface = super_iftable->GetInterface(i);
6646       DCHECK(super_interface != nullptr);
6647       iftable->SetInterface(i, super_interface);
6648       ObjPtr<mirror::PointerArray> method_array = super_iftable->GetMethodArrayOrNull(i);
6649       if (method_array != nullptr) {
6650         iftable->SetMethodArray(i, method_array);
6651       }
6652     }
6653   }
6654   // Fill in the table with additional interfaces.
6655   size_t current_index = super_ifcount;
6656   for (NewInterfaceReference ref : new_interface_references) {
6657     ObjPtr<mirror::Class> direct_interface = interfaces.GetInterface(ref.direct_interface_index);
6658     ObjPtr<mirror::Class> new_interface = (ref.direct_interface_iftable_index != dex::kDexNoIndex)
6659         ? direct_interface->GetIfTable()->GetInterface(ref.direct_interface_iftable_index)
6660         : direct_interface;
6661     iftable->SetInterface(current_index, new_interface);
6662     ++current_index;
6663   }
6664   DCHECK_EQ(current_index, ifcount);
6665 
6666   if (kIsDebugBuild) {
6667     // Check that the iftable is ordered correctly.
6668     for (size_t i = 0; i < ifcount; i++) {
6669       ObjPtr<mirror::Class> if_a = iftable->GetInterface(i);
6670       for (size_t j = i + 1; j < ifcount; j++) {
6671         ObjPtr<mirror::Class> if_b = iftable->GetInterface(j);
6672         // !(if_a <: if_b)
6673         CHECK(!if_b->IsAssignableFrom(if_a))
6674             << "Bad interface order: " << mirror::Class::PrettyClass(if_a) << " (index " << i
6675             << ") extends "
6676             << if_b->PrettyClass() << " (index " << j << ") and so should be after it in the "
6677             << "interface list.";
6678       }
6679     }
6680   }
6681 
6682   return iftable;
6683 }
6684 
6685 // Check that all vtable entries are present in this class's virtuals or are the same as a
6686 // superclasses vtable entry.
CheckClassOwnsVTableEntries(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)6687 void CheckClassOwnsVTableEntries(Thread* self,
6688                                  Handle<mirror::Class> klass,
6689                                  PointerSize pointer_size)
6690     REQUIRES_SHARED(Locks::mutator_lock_) {
6691   StackHandleScope<2> hs(self);
6692   Handle<mirror::PointerArray> check_vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
6693   ObjPtr<mirror::Class> super_temp = (klass->HasSuperClass()) ? klass->GetSuperClass() : nullptr;
6694   Handle<mirror::Class> superclass(hs.NewHandle(super_temp));
6695   int32_t super_vtable_length = (superclass != nullptr) ? superclass->GetVTableLength() : 0;
6696   for (int32_t i = 0; i < check_vtable->GetLength(); ++i) {
6697     ArtMethod* m = check_vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
6698     CHECK(m != nullptr);
6699 
6700     if (m->GetMethodIndexDuringLinking() != i) {
6701       LOG(WARNING) << m->PrettyMethod()
6702                    << " has an unexpected method index for its spot in the vtable for class"
6703                    << klass->PrettyClass();
6704     }
6705     ArraySlice<ArtMethod> virtuals = klass->GetVirtualMethodsSliceUnchecked(pointer_size);
6706     auto is_same_method = [m] (const ArtMethod& meth) {
6707       return &meth == m;
6708     };
6709     if (!((super_vtable_length > i && superclass->GetVTableEntry(i, pointer_size) == m) ||
6710           std::find_if(virtuals.begin(), virtuals.end(), is_same_method) != virtuals.end())) {
6711       LOG(WARNING) << m->PrettyMethod() << " does not seem to be owned by current class "
6712                    << klass->PrettyClass() << " or any of its superclasses!";
6713     }
6714   }
6715 }
6716 
6717 // Check to make sure the vtable does not have duplicates. Duplicates could cause problems when a
6718 // method is overridden in a subclass.
6719 template <PointerSize kPointerSize>
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass)6720 void CheckVTableHasNoDuplicates(Thread* self, Handle<mirror::Class> klass)
6721     REQUIRES_SHARED(Locks::mutator_lock_) {
6722   StackHandleScope<1> hs(self);
6723   Handle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
6724   int32_t num_entries = vtable->GetLength();
6725 
6726   // Observations:
6727   //   * The older implementation was O(n^2) and got too expensive for apps with larger classes.
6728   //   * Many classes do not override Object functions (e.g., equals/hashCode/toString). Thus,
6729   //     for many classes outside of libcore a cross-dexfile check has to be run anyways.
6730   //   * In the cross-dexfile case, with the O(n^2), in the best case O(n) cross checks would have
6731   //     to be done. It is thus OK in a single-pass algorithm to read all data, anyways.
6732   //   * The single-pass algorithm will trade memory for speed, but that is OK.
6733 
6734   CHECK_GT(num_entries, 0);
6735 
6736   auto log_fn = [&vtable, &klass](int32_t i, int32_t j) REQUIRES_SHARED(Locks::mutator_lock_) {
6737     ArtMethod* m1 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(i);
6738     ArtMethod* m2 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
6739     LOG(WARNING) << "vtable entries " << i << " and " << j << " are identical for "
6740                  << klass->PrettyClass() << " in method " << m1->PrettyMethod()
6741                 << " (0x" << std::hex << reinterpret_cast<uintptr_t>(m2) << ") and "
6742                 << m2->PrettyMethod() << "  (0x" << std::hex
6743                 << reinterpret_cast<uintptr_t>(m2) << ")";
6744   };
6745   struct BaseHashType {
6746     static size_t HashCombine(size_t seed, size_t val) {
6747       return seed ^ (val + 0x9e3779b9 + (seed << 6) + (seed >> 2));
6748     }
6749   };
6750 
6751   // Check assuming all entries come from the same dex file.
6752   {
6753     // Find the first interesting method and its dex file.
6754     int32_t start = 0;
6755     for (; start < num_entries; ++start) {
6756       ArtMethod* vtable_entry = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start);
6757       // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
6758       // maybe).
6759       if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
6760                                   vtable_entry->GetAccessFlags())) {
6761         continue;
6762       }
6763       break;
6764     }
6765     if (start == num_entries) {
6766       return;
6767     }
6768     const DexFile* dex_file =
6769         vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start)->
6770             GetInterfaceMethodIfProxy(kPointerSize)->GetDexFile();
6771 
6772     // Helper function to avoid logging if we have to run the cross-file checks.
6773     auto check_fn = [&](bool log_warn) REQUIRES_SHARED(Locks::mutator_lock_) {
6774       // Use a map to store seen entries, as the storage space is too large for a bitvector.
6775       using PairType = std::pair<uint32_t, uint16_t>;
6776       struct PairHash : BaseHashType {
6777         size_t operator()(const PairType& key) const {
6778           return BaseHashType::HashCombine(BaseHashType::HashCombine(0, key.first), key.second);
6779         }
6780       };
6781       HashMap<PairType, int32_t, DefaultMapEmptyFn<PairType, int32_t>, PairHash> seen;
6782       seen.reserve(2 * num_entries);
6783       bool need_slow_path = false;
6784       bool found_dup = false;
6785       for (int i = start; i < num_entries; ++i) {
6786         // Can use Unchecked here as the start loop already ensured that the arrays are correct
6787         // wrt/ kPointerSize.
6788         ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
6789         if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
6790                                     vtable_entry->GetAccessFlags())) {
6791           continue;
6792         }
6793         ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
6794         if (dex_file != m->GetDexFile()) {
6795           need_slow_path = true;
6796           break;
6797         }
6798         const dex::MethodId* m_mid = &dex_file->GetMethodId(m->GetDexMethodIndex());
6799         PairType pair = std::make_pair(m_mid->name_idx_.index_, m_mid->proto_idx_.index_);
6800         auto it = seen.find(pair);
6801         if (it != seen.end()) {
6802           found_dup = true;
6803           if (log_warn) {
6804             log_fn(it->second, i);
6805           }
6806         } else {
6807           seen.insert(std::make_pair(pair, i));
6808         }
6809       }
6810       return std::make_pair(need_slow_path, found_dup);
6811     };
6812     std::pair<bool, bool> result = check_fn(/* log_warn= */ false);
6813     if (!result.first) {
6814       if (result.second) {
6815         check_fn(/* log_warn= */ true);
6816       }
6817       return;
6818     }
6819   }
6820 
6821   // Need to check across dex files.
6822   struct Entry {
6823     size_t cached_hash = 0;
6824     uint32_t name_len = 0;
6825     const char* name = nullptr;
6826     Signature signature = Signature::NoSignature();
6827 
6828     Entry() = default;
6829     Entry(const Entry& other) = default;
6830     Entry& operator=(const Entry& other) = default;
6831 
6832     Entry(const DexFile* dex_file, const dex::MethodId& mid)
6833         : name_len(0),  // Explicit to enforce ordering with -Werror,-Wreorder-ctor.
6834           // This call writes `name_len` and it is therefore necessary that the
6835           // initializer for `name_len` comes before it, otherwise the value
6836           // from the call would be overwritten by that initializer.
6837           name(dex_file->StringDataAndUtf16LengthByIdx(mid.name_idx_, &name_len)),
6838           signature(dex_file->GetMethodSignature(mid)) {
6839       // The `name_len` has been initialized to the UTF16 length. Calculate length in bytes.
6840       if (name[name_len] != 0) {
6841         name_len += strlen(name + name_len);
6842       }
6843     }
6844 
6845     bool operator==(const Entry& other) const {
6846       return name_len == other.name_len &&
6847              memcmp(name, other.name, name_len) == 0 &&
6848              signature == other.signature;
6849     }
6850   };
6851   struct EntryHash {
6852     size_t operator()(const Entry& key) const {
6853       return key.cached_hash;
6854     }
6855   };
6856   HashMap<Entry, int32_t, DefaultMapEmptyFn<Entry, int32_t>, EntryHash> map;
6857   for (int32_t i = 0; i < num_entries; ++i) {
6858     // Can use Unchecked here as the first loop already ensured that the arrays are correct
6859     // wrt/ kPointerSize.
6860     ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
6861     // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
6862     // maybe).
6863     if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
6864                                 vtable_entry->GetAccessFlags())) {
6865       continue;
6866     }
6867     ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
6868     const DexFile* dex_file = m->GetDexFile();
6869     const dex::MethodId& mid = dex_file->GetMethodId(m->GetDexMethodIndex());
6870 
6871     Entry e(dex_file, mid);
6872 
6873     size_t string_hash = std::hash<std::string_view>()(std::string_view(e.name, e.name_len));
6874     size_t sig_hash = std::hash<std::string>()(e.signature.ToString());
6875     e.cached_hash = BaseHashType::HashCombine(BaseHashType::HashCombine(0u, string_hash),
6876                                               sig_hash);
6877 
6878     auto it = map.find(e);
6879     if (it != map.end()) {
6880       log_fn(it->second, i);
6881     } else {
6882       map.insert(std::make_pair(e, i));
6883     }
6884   }
6885 }
6886 
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)6887 void CheckVTableHasNoDuplicates(Thread* self,
6888                                 Handle<mirror::Class> klass,
6889                                 PointerSize pointer_size)
6890     REQUIRES_SHARED(Locks::mutator_lock_) {
6891   switch (pointer_size) {
6892     case PointerSize::k64:
6893       CheckVTableHasNoDuplicates<PointerSize::k64>(self, klass);
6894       break;
6895     case PointerSize::k32:
6896       CheckVTableHasNoDuplicates<PointerSize::k32>(self, klass);
6897       break;
6898   }
6899 }
6900 
CheckVTable(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)6901 static void CheckVTable(Thread* self, Handle<mirror::Class> klass, PointerSize pointer_size)
6902     REQUIRES_SHARED(Locks::mutator_lock_) {
6903   CheckClassOwnsVTableEntries(self, klass, pointer_size);
6904   CheckVTableHasNoDuplicates(self, klass, pointer_size);
6905 }
6906 
6907 }  // namespace
6908 
6909 template <PointerSize kPointerSize>
6910 class ClassLinker::LinkMethodsHelper {
6911  public:
LinkMethodsHelper(ClassLinker * class_linker,Handle<mirror::Class> klass,Thread * self,Runtime * runtime)6912   LinkMethodsHelper(ClassLinker* class_linker,
6913                     Handle<mirror::Class> klass,
6914                     Thread* self,
6915                     Runtime* runtime)
6916       : class_linker_(class_linker),
6917         klass_(klass),
6918         self_(self),
6919         runtime_(runtime),
6920         stack_(runtime->GetLinearAlloc()->GetArenaPool()),
6921         allocator_(&stack_),
6922         copied_method_records_(copied_method_records_initial_buffer_,
6923                                kCopiedMethodRecordInitialBufferSize,
6924                                allocator_.Adapter()),
6925         num_new_copied_methods_(0u) {
6926   }
6927 
6928   // Links the virtual and interface methods for the given class.
6929   //
6930   // Arguments:
6931   // * self - The current thread.
6932   // * klass - class, whose vtable will be filled in.
6933   // * interfaces - implemented interfaces for a proxy class, otherwise null.
6934   // * out_new_conflict - whether there is a new conflict compared to the superclass.
6935   // * out_imt - interface method table to fill.
6936   bool LinkMethods(
6937       Thread* self,
6938       Handle<mirror::Class> klass,
6939       Handle<mirror::ObjectArray<mirror::Class>> interfaces,
6940       bool* out_new_conflict,
6941       ArtMethod** out_imt)
6942       REQUIRES_SHARED(Locks::mutator_lock_);
6943 
6944  private:
6945   // Allocate a pointer array.
6946   static ObjPtr<mirror::PointerArray> AllocPointerArray(Thread* self, size_t length)
6947       REQUIRES_SHARED(Locks::mutator_lock_);
6948 
6949   // Allocate method arrays for interfaces.
6950   bool AllocateIfTableMethodArrays(Thread* self,
6951                                    Handle<mirror::Class> klass,
6952                                    Handle<mirror::IfTable> iftable)
6953       REQUIRES_SHARED(Locks::mutator_lock_);
6954 
6955   // Assign vtable indexes to declared virtual methods for a non-interface class other
6956   // than `java.lang.Object`. Returns the number of vtable entries on success, 0 on failure.
6957   // This function also assigns vtable indexes for interface methods in new interfaces
6958   // and records data for copied methods which shall be referenced by the vtable.
6959   size_t AssignVTableIndexes(ObjPtr<mirror::Class> klass,
6960                              ObjPtr<mirror::Class> super_class,
6961                              bool is_super_abstract,
6962                              size_t num_virtual_methods,
6963                              ObjPtr<mirror::IfTable> iftable)
6964       REQUIRES_SHARED(Locks::mutator_lock_);
6965 
6966   bool FindCopiedMethodsForInterface(ObjPtr<mirror::Class> klass,
6967                                      size_t num_virtual_methods,
6968                                      ObjPtr<mirror::IfTable> iftable)
6969       REQUIRES_SHARED(Locks::mutator_lock_);
6970 
6971   bool LinkJavaLangObjectMethods(Thread* self, Handle<mirror::Class> klass)
6972       REQUIRES_SHARED(Locks::mutator_lock_) COLD_ATTR;
6973 
6974   void ReallocMethods(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
6975   bool FinalizeIfTable(Handle<mirror::Class> klass,
6976                        MutableHandle<mirror::IfTable> iftable,
6977                        Handle<mirror::PointerArray> vtable,
6978                        bool is_klass_abstract,
6979                        bool is_super_abstract,
6980                        bool* out_new_conflict,
6981                        ArtMethod** out_imt)
6982       REQUIRES_SHARED(Locks::mutator_lock_);
6983 
ClobberOldMethods(LengthPrefixedArray<ArtMethod> * old_methods,LengthPrefixedArray<ArtMethod> * methods)6984   void ClobberOldMethods(LengthPrefixedArray<ArtMethod>* old_methods,
6985                          LengthPrefixedArray<ArtMethod>* methods) {
6986     if (kIsDebugBuild && old_methods != nullptr) {
6987       CHECK(methods != nullptr);
6988       // Put some random garbage in old methods to help find stale pointers.
6989       if (methods != old_methods) {
6990         // Need to make sure the GC is not running since it could be scanning the methods we are
6991         // about to overwrite.
6992         ScopedThreadStateChange tsc(self_, ThreadState::kSuspended);
6993         gc::ScopedGCCriticalSection gcs(self_,
6994                                         gc::kGcCauseClassLinker,
6995                                         gc::kCollectorTypeClassLinker);
6996         const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_methods->size(),
6997                                                                             kMethodSize,
6998                                                                             kMethodAlignment);
6999         memset(old_methods, 0xFEu, old_size);
7000       }
7001     }
7002   }
7003 
7004   NO_INLINE
LogNewVirtuals(LengthPrefixedArray<ArtMethod> * methods) const7005   void LogNewVirtuals(LengthPrefixedArray<ArtMethod>* methods) const
7006       REQUIRES_SHARED(Locks::mutator_lock_) {
7007     ObjPtr<mirror::Class> klass = klass_.Get();
7008     size_t num_new_copied_methods = num_new_copied_methods_;
7009     size_t old_method_count = methods->size() - num_new_copied_methods;
7010     size_t super_vtable_length = klass->GetSuperClass()->GetVTableLength();
7011     size_t num_miranda_methods = 0u;
7012     size_t num_overriding_default_methods = 0u;
7013     size_t num_default_methods = 0u;
7014     size_t num_overriding_default_conflict_methods = 0u;
7015     size_t num_default_conflict_methods = 0u;
7016     for (size_t i = 0; i != num_new_copied_methods; ++i) {
7017       ArtMethod& m = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
7018       if (m.IsDefault()) {
7019         if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7020           ++num_overriding_default_methods;
7021         } else {
7022           ++num_default_methods;
7023         }
7024       } else if (m.IsDefaultConflicting()) {
7025         if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7026           ++num_overriding_default_conflict_methods;
7027         } else {
7028           ++num_default_conflict_methods;
7029         }
7030       } else {
7031         DCHECK(m.IsMiranda());
7032         ++num_miranda_methods;
7033       }
7034     }
7035     VLOG(class_linker) << klass->PrettyClass() << ": miranda_methods=" << num_miranda_methods
7036                        << " default_methods=" << num_default_methods
7037                        << " overriding_default_methods=" << num_overriding_default_methods
7038                        << " default_conflict_methods=" << num_default_conflict_methods
7039                        << " overriding_default_conflict_methods="
7040                        << num_overriding_default_conflict_methods;
7041   }
7042 
7043   class MethodIndexEmptyFn {
7044    public:
MakeEmpty(uint32_t & item) const7045     void MakeEmpty(uint32_t& item) const {
7046       item = dex::kDexNoIndex;
7047     }
IsEmpty(const uint32_t & item) const7048     bool IsEmpty(const uint32_t& item) const {
7049       return item == dex::kDexNoIndex;
7050     }
7051   };
7052 
7053   class VTableIndexCheckerDebug {
7054    protected:
VTableIndexCheckerDebug(size_t vtable_length)7055     explicit VTableIndexCheckerDebug(size_t vtable_length)
7056         : vtable_length_(vtable_length) {}
7057 
CheckIndex(uint32_t index) const7058     void CheckIndex(uint32_t index) const {
7059       CHECK_LT(index, vtable_length_);
7060     }
7061 
7062    private:
7063     uint32_t vtable_length_;
7064   };
7065 
7066   class VTableIndexCheckerRelease {
7067    protected:
VTableIndexCheckerRelease(size_t vtable_length ATTRIBUTE_UNUSED)7068     explicit VTableIndexCheckerRelease(size_t vtable_length ATTRIBUTE_UNUSED) {}
CheckIndex(uint32_t index ATTRIBUTE_UNUSED) const7069     void CheckIndex(uint32_t index ATTRIBUTE_UNUSED) const {}
7070   };
7071 
7072   using VTableIndexChecker =
7073       std::conditional_t<kIsDebugBuild, VTableIndexCheckerDebug, VTableIndexCheckerRelease>;
7074 
7075   class VTableAccessor : private VTableIndexChecker {
7076    public:
VTableAccessor(uint8_t * raw_vtable,size_t vtable_length)7077     VTableAccessor(uint8_t* raw_vtable, size_t vtable_length)
7078         REQUIRES_SHARED(Locks::mutator_lock_)
7079         : VTableIndexChecker(vtable_length),
7080           raw_vtable_(raw_vtable) {}
7081 
GetVTableEntry(uint32_t index) const7082     ArtMethod* GetVTableEntry(uint32_t index) const REQUIRES_SHARED(Locks::mutator_lock_) {
7083       this->CheckIndex(index);
7084       uint8_t* entry = raw_vtable_ + static_cast<size_t>(kPointerSize) * index;
7085       if (kPointerSize == PointerSize::k64) {
7086         return reinterpret_cast64<ArtMethod*>(*reinterpret_cast<uint64_t*>(entry));
7087       } else {
7088         return reinterpret_cast32<ArtMethod*>(*reinterpret_cast<uint32_t*>(entry));
7089       }
7090     }
7091 
7092    private:
7093     uint8_t* raw_vtable_;
7094   };
7095 
7096   class VTableSignatureHash {
7097    public:
7098     explicit VTableSignatureHash(VTableAccessor accessor)
REQUIRES_SHARED(Locks::mutator_lock_)7099         REQUIRES_SHARED(Locks::mutator_lock_)
7100         : accessor_(accessor) {}
7101 
7102     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const7103     size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7104       return ComputeMethodHash(method);
7105     }
7106 
7107     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t index) const7108     size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7109       return ComputeMethodHash(accessor_.GetVTableEntry(index));
7110     }
7111 
7112    private:
7113     VTableAccessor accessor_;
7114   };
7115 
7116   class VTableSignatureEqual {
7117    public:
7118     explicit VTableSignatureEqual(VTableAccessor accessor)
REQUIRES_SHARED(Locks::mutator_lock_)7119         REQUIRES_SHARED(Locks::mutator_lock_)
7120         : accessor_(accessor) {}
7121 
7122     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,ArtMethod * rhs) const7123     bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7124       return MethodSignatureEquals(accessor_.GetVTableEntry(lhs_index), rhs);
7125     }
7126 
7127     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,uint32_t rhs_index) const7128     bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7129       return (*this)(lhs_index, accessor_.GetVTableEntry(rhs_index));
7130     }
7131 
7132    private:
7133     VTableAccessor accessor_;
7134   };
7135 
7136   using VTableSignatureSet =
7137       ScopedArenaHashSet<uint32_t, MethodIndexEmptyFn, VTableSignatureHash, VTableSignatureEqual>;
7138 
7139   class DeclaredVirtualSignatureHash {
7140    public:
7141     explicit DeclaredVirtualSignatureHash(ObjPtr<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_)7142         REQUIRES_SHARED(Locks::mutator_lock_)
7143         : klass_(klass) {}
7144 
7145     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const7146     size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7147       return ComputeMethodHash(method);
7148     }
7149 
7150     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t index) const7151     size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7152       DCHECK_LT(index, klass_->NumDeclaredVirtualMethods());
7153       ArtMethod* method = klass_->GetVirtualMethodDuringLinking(index, kPointerSize);
7154       return ComputeMethodHash(method->GetInterfaceMethodIfProxy(kPointerSize));
7155     }
7156 
7157    private:
7158     ObjPtr<mirror::Class> klass_;
7159   };
7160 
7161   class DeclaredVirtualSignatureEqual {
7162    public:
7163     explicit DeclaredVirtualSignatureEqual(ObjPtr<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_)7164         REQUIRES_SHARED(Locks::mutator_lock_)
7165         : klass_(klass) {}
7166 
7167     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,ArtMethod * rhs) const7168     bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7169       DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7170       ArtMethod* lhs = klass_->GetVirtualMethodDuringLinking(lhs_index, kPointerSize);
7171       return MethodSignatureEquals(lhs->GetInterfaceMethodIfProxy(kPointerSize), rhs);
7172     }
7173 
7174     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,uint32_t rhs_index) const7175     bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7176       DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7177       DCHECK_LT(rhs_index, klass_->NumDeclaredVirtualMethods());
7178       return lhs_index == rhs_index;
7179     }
7180 
7181    private:
7182     ObjPtr<mirror::Class> klass_;
7183   };
7184 
7185   using DeclaredVirtualSignatureSet = ScopedArenaHashSet<uint32_t,
7186                                                          MethodIndexEmptyFn,
7187                                                          DeclaredVirtualSignatureHash,
7188                                                          DeclaredVirtualSignatureEqual>;
7189 
7190   // Helper class to keep records for determining the correct copied method to create.
7191   class CopiedMethodRecord {
7192    public:
7193     enum class State : uint32_t {
7194       // Note: The `*Single` values are used when we know that there is only one interface
7195       // method with the given signature that's not masked; that method is the main method.
7196       // We use this knowledge for faster masking check, otherwise we need to search for
7197       // a masking method through methods of all interfaces that could potentially mask it.
7198       kAbstractSingle,
7199       kDefaultSingle,
7200       kAbstract,
7201       kDefault,
7202       kDefaultConflict,
7203       kUseSuperMethod,
7204     };
7205 
CopiedMethodRecord()7206     CopiedMethodRecord()
7207         : main_method_(nullptr),
7208           method_index_(0u),
7209           state_(State::kAbstractSingle) {}
7210 
CopiedMethodRecord(ArtMethod * main_method,size_t vtable_index)7211     CopiedMethodRecord(ArtMethod* main_method, size_t vtable_index)
7212         : main_method_(main_method),
7213           method_index_(vtable_index),
7214           state_(State::kAbstractSingle) {}
7215 
7216     // Set main method. The new main method must be more specific implementation.
SetMainMethod(ArtMethod * main_method)7217     void SetMainMethod(ArtMethod* main_method) {
7218       DCHECK(main_method_ != nullptr);
7219       main_method_ = main_method;
7220     }
7221 
7222     // The main method is the first encountered default method if any,
7223     // otherwise the first encountered abstract method.
GetMainMethod() const7224     ArtMethod* GetMainMethod() const {
7225       return main_method_;
7226     }
7227 
SetMethodIndex(size_t method_index)7228     void SetMethodIndex(size_t method_index) {
7229       DCHECK_NE(method_index, dex::kDexNoIndex);
7230       method_index_ = method_index;
7231     }
7232 
GetMethodIndex() const7233     size_t GetMethodIndex() const {
7234       DCHECK_NE(method_index_, dex::kDexNoIndex);
7235       return method_index_;
7236     }
7237 
SetState(State state)7238     void SetState(State state) {
7239       state_ = state;
7240     }
7241 
GetState() const7242     State GetState() const {
7243       return state_;
7244     }
7245 
7246     ALWAYS_INLINE
UpdateStateForInterface(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,ObjPtr<mirror::IfTable> iftable,size_t ifcount,size_t index)7247     void UpdateStateForInterface(ObjPtr<mirror::Class> iface,
7248                                  ArtMethod* interface_method,
7249                                  ObjPtr<mirror::IfTable> iftable,
7250                                  size_t ifcount,
7251                                  size_t index)
7252         REQUIRES_SHARED(Locks::mutator_lock_) {
7253       DCHECK_EQ(ifcount, iftable->Count());
7254       DCHECK_LT(index, ifcount);
7255       DCHECK(iface == interface_method->GetDeclaringClass());
7256       DCHECK(iface == iftable->GetInterface(index));
7257       DCHECK(interface_method->IsDefault());
7258       if (GetState() != State::kDefaultConflict) {
7259         DCHECK(GetState() == State::kDefault);
7260         // We do not record all overriding methods, so we need to walk over all
7261         // interfaces that could mask the `interface_method`.
7262         if (ContainsOverridingMethodOf(iftable, index + 1, ifcount, iface, interface_method)) {
7263           return;  // Found an overriding method that masks `interface_method`.
7264         }
7265         // We have a new default method that's not masked by any other method.
7266         SetState(State::kDefaultConflict);
7267       }
7268     }
7269 
7270     ALWAYS_INLINE
UpdateState(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,size_t vtable_index,ObjPtr<mirror::IfTable> iftable,size_t ifcount,size_t index)7271     void UpdateState(ObjPtr<mirror::Class> iface,
7272                      ArtMethod* interface_method,
7273                      size_t vtable_index,
7274                      ObjPtr<mirror::IfTable> iftable,
7275                      size_t ifcount,
7276                      size_t index)
7277         REQUIRES_SHARED(Locks::mutator_lock_) {
7278       DCHECK_EQ(ifcount, iftable->Count());
7279       DCHECK_LT(index, ifcount);
7280       if (kIsDebugBuild) {
7281         if (interface_method->IsCopied()) {
7282           // Called from `FinalizeState()` for a default method from superclass.
7283           // The `index` points to the last interface inherited from the superclass
7284           // as we need to search only the new interfaces for masking methods.
7285           DCHECK(interface_method->IsDefault());
7286         } else {
7287           DCHECK(iface == interface_method->GetDeclaringClass());
7288           DCHECK(iface == iftable->GetInterface(index));
7289         }
7290       }
7291       DCHECK_EQ(vtable_index, method_index_);
7292       auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
7293         return ContainsImplementingMethod(iftable, index + 1, ifcount, iface, vtable_index);
7294       };
7295       UpdateStateImpl(iface, interface_method, slow_is_masked);
7296     }
7297 
7298     ALWAYS_INLINE
FinalizeState(ArtMethod * super_method,size_t vtable_index,ObjPtr<mirror::IfTable> iftable,size_t ifcount,ObjPtr<mirror::IfTable> super_iftable,size_t super_ifcount)7299     void FinalizeState(ArtMethod* super_method,
7300                        size_t vtable_index,
7301                        ObjPtr<mirror::IfTable> iftable,
7302                        size_t ifcount,
7303                        ObjPtr<mirror::IfTable> super_iftable,
7304                        size_t super_ifcount)
7305         REQUIRES_SHARED(Locks::mutator_lock_) {
7306       DCHECK(super_method->IsCopied());
7307       DCHECK_EQ(vtable_index, method_index_);
7308       DCHECK_EQ(vtable_index, super_method->GetMethodIndex());
7309       DCHECK_NE(super_ifcount, 0u);
7310       if (super_method->IsDefault()) {
7311         if (UNLIKELY(super_method->IsDefaultConflicting())) {
7312           // Some of the default methods that contributed to the conflict in the superclass
7313           // may be masked by new interfaces. Walk over all the interfaces and update state
7314           // as long as the current state is not `kDefaultConflict`.
7315           size_t i = super_ifcount;
7316           while (GetState() != State::kDefaultConflict && i != 0u) {
7317             --i;
7318             ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
7319             DCHECK(iface == super_iftable->GetInterface(i));
7320             auto [found, index] =
7321                 MethodArrayContains(super_iftable->GetMethodArrayOrNull(i), super_method);
7322             if (found) {
7323               ArtMethod* interface_method = iface->GetVirtualMethod(index, kPointerSize);
7324               auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
7325                 // Note: The `iftable` has method arrays in range [super_ifcount, ifcount) filled
7326                 // with vtable indexes but the range [0, super_ifcount) is empty, so we need to
7327                 // use the `super_iftable` filled with implementation methods for that range.
7328                 return ContainsImplementingMethod(
7329                            super_iftable, i + 1u, super_ifcount, iface, super_method) ||
7330                        ContainsImplementingMethod(
7331                            iftable, super_ifcount, ifcount, iface, vtable_index);
7332               };
7333               UpdateStateImpl(iface, interface_method, slow_is_masked);
7334             }
7335           }
7336           if (GetState() == State::kDefaultConflict) {
7337             SetState(State::kUseSuperMethod);
7338           }
7339         } else {
7340           // There was exactly one default method in superclass interfaces that was
7341           // not masked by subinterfaces. Use `UpdateState()` to process it and pass
7342           // `super_ifcount - 1` as index for checking if it's been masked by new interfaces.
7343           ObjPtr<mirror::Class> iface = super_method->GetDeclaringClass();
7344           UpdateState(
7345               iface, super_method, vtable_index, iftable, ifcount, /*index=*/ super_ifcount - 1u);
7346           if (GetMainMethod() == super_method) {
7347             DCHECK(GetState() == State::kDefault) << enum_cast<uint32_t>(GetState());
7348             SetState(State::kUseSuperMethod);
7349           }
7350         }
7351       } else {
7352         DCHECK(super_method->IsMiranda());
7353         // Any default methods with this signature in superclass interfaces have been
7354         // masked by subinterfaces. Check if we can reuse the miranda method.
7355         if (GetState() == State::kAbstractSingle || GetState() == State::kAbstract) {
7356           SetState(State::kUseSuperMethod);
7357         }
7358       }
7359     }
7360 
7361    private:
7362     template <typename Predicate>
7363     ALWAYS_INLINE
UpdateStateImpl(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,Predicate && slow_is_masked)7364     void UpdateStateImpl(ObjPtr<mirror::Class> iface,
7365                          ArtMethod* interface_method,
7366                          Predicate&& slow_is_masked)
7367         REQUIRES_SHARED(Locks::mutator_lock_) {
7368       bool have_default = false;
7369       switch (GetState()) {
7370         case State::kDefaultSingle:
7371           have_default = true;
7372           FALLTHROUGH_INTENDED;
7373         case State::kAbstractSingle:
7374           if (GetMainMethod()->GetDeclaringClass()->Implements(iface)) {
7375             return;  // The main method masks the `interface_method`.
7376           }
7377           if (!interface_method->IsDefault()) {
7378             SetState(have_default ? State::kDefault : State::kAbstract);
7379             return;
7380           }
7381           break;
7382         case State::kDefault:
7383           have_default = true;
7384           FALLTHROUGH_INTENDED;
7385         case State::kAbstract:
7386           if (!interface_method->IsDefault()) {
7387             return;  // Keep the same state. We do not need to check for masking.
7388           }
7389           // We do not record all overriding methods, so we need to walk over all
7390           // interfaces that could mask the `interface_method`. The provided
7391           // predicate `slow_is_masked()` does that.
7392           if (slow_is_masked()) {
7393             return;  // Found an overriding method that masks `interface_method`.
7394           }
7395           break;
7396         case State::kDefaultConflict:
7397           return;  // The state cannot change anymore.
7398         default:
7399           LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(GetState());
7400           UNREACHABLE();
7401       }
7402       // We have a new default method that's not masked by any other method.
7403       DCHECK(interface_method->IsDefault());
7404       if (have_default) {
7405         SetState(State::kDefaultConflict);
7406       } else {
7407         SetMainMethod(interface_method);
7408         SetState(State::kDefault);
7409       }
7410     }
7411 
7412     // Determine if the given `iftable` contains in the given range a subinterface of `iface`
7413     // that declares a method with the same name and signature as 'interface_method'.
7414     //
7415     // Arguments
7416     // - iftable: The iftable we are searching for an overriding method.
7417     // - begin:   The start of the range to search.
7418     // - end:     The end of the range to search.
7419     // - iface:   The interface we are checking to see if anything overrides.
7420     // - interface_method:
7421     //            The interface method providing a name and signature we're searching for.
7422     //
7423     // Returns whether an overriding method was found in any subinterface of `iface`.
ContainsOverridingMethodOf(ObjPtr<mirror::IfTable> iftable,size_t begin,size_t end,ObjPtr<mirror::Class> iface,ArtMethod * interface_method)7424     static bool ContainsOverridingMethodOf(ObjPtr<mirror::IfTable> iftable,
7425                                            size_t begin,
7426                                            size_t end,
7427                                            ObjPtr<mirror::Class> iface,
7428                                            ArtMethod* interface_method)
7429         REQUIRES_SHARED(Locks::mutator_lock_) {
7430       for (size_t i = begin; i != end; ++i) {
7431         ObjPtr<mirror::Class> current_iface = iftable->GetInterface(i);
7432         for (ArtMethod& current_method : current_iface->GetDeclaredVirtualMethods(kPointerSize)) {
7433           if (MethodSignatureEquals(&current_method, interface_method)) {
7434             // Check if the i'th interface is a subtype of this one.
7435             if (current_iface->Implements(iface)) {
7436               return true;
7437             }
7438             break;
7439           }
7440         }
7441       }
7442       return false;
7443     }
7444 
7445     // Determine if the given `iftable` contains in the given range a subinterface of `iface`
7446     // that declares a method implemented by 'target'. This is an optimized version of
7447     // `ContainsOverridingMethodOf()` that searches implementation method arrays instead
7448     // of comparing signatures for declared interface methods.
7449     //
7450     // Arguments
7451     // - iftable: The iftable we are searching for an overriding method.
7452     // - begin:   The start of the range to search.
7453     // - end:     The end of the range to search.
7454     // - iface:   The interface we are checking to see if anything overrides.
7455     // - target:  The implementation method we're searching for.
7456     //            Note that the new `iftable` is filled with vtable indexes for new interfaces,
7457     //            so this needs to be the vtable index if we're searching that range.
7458     //
7459     // Returns whether the `target` was found in a method array for any subinterface of `iface`.
7460     template <typename TargetType>
ContainsImplementingMethod(ObjPtr<mirror::IfTable> iftable,size_t begin,size_t end,ObjPtr<mirror::Class> iface,TargetType target)7461     static bool ContainsImplementingMethod(ObjPtr<mirror::IfTable> iftable,
7462                                            size_t begin,
7463                                            size_t end,
7464                                            ObjPtr<mirror::Class> iface,
7465                                            TargetType target)
7466         REQUIRES_SHARED(Locks::mutator_lock_) {
7467       for (size_t i = begin; i != end; ++i) {
7468         if (MethodArrayContains(iftable->GetMethodArrayOrNull(i), target).first &&
7469             iftable->GetInterface(i)->Implements(iface)) {
7470           return true;
7471         }
7472       }
7473       return false;
7474     }
7475 
7476     template <typename TargetType>
MethodArrayContains(ObjPtr<mirror::PointerArray> method_array,TargetType target)7477     static std::pair<bool, size_t> MethodArrayContains(ObjPtr<mirror::PointerArray> method_array,
7478                                                        TargetType target)
7479         REQUIRES_SHARED(Locks::mutator_lock_) {
7480       size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
7481       for (size_t j = 0; j != num_methods; ++j) {
7482         if (method_array->GetElementPtrSize<TargetType, kPointerSize>(j) == target) {
7483           return {true, j};
7484         }
7485       }
7486       return {false, 0};
7487     }
7488 
7489     ArtMethod* main_method_;
7490     uint32_t method_index_;
7491     State state_;
7492   };
7493 
7494   class CopiedMethodRecordEmptyFn {
7495    public:
MakeEmpty(CopiedMethodRecord & item) const7496     void MakeEmpty(CopiedMethodRecord& item) const {
7497       item = CopiedMethodRecord();
7498     }
IsEmpty(const CopiedMethodRecord & item) const7499     bool IsEmpty(const CopiedMethodRecord& item) const {
7500       return item.GetMainMethod() == nullptr;
7501     }
7502   };
7503 
7504   class CopiedMethodRecordHash {
7505    public:
7506     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const7507     size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7508       DCHECK(method != nullptr);
7509       return ComputeMethodHash(method);
7510     }
7511 
7512     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & record) const7513     size_t operator()(const CopiedMethodRecord& record) const NO_THREAD_SAFETY_ANALYSIS {
7514       return (*this)(record.GetMainMethod());
7515     }
7516   };
7517 
7518   class CopiedMethodRecordEqual {
7519    public:
7520     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & lhs_record,ArtMethod * rhs) const7521     bool operator()(const CopiedMethodRecord& lhs_record,
7522                     ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7523       ArtMethod* lhs = lhs_record.GetMainMethod();
7524       DCHECK(lhs != nullptr);
7525       DCHECK(rhs != nullptr);
7526       return MethodSignatureEquals(lhs, rhs);
7527     }
7528 
7529     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & lhs_record,const CopiedMethodRecord & rhs_record) const7530     bool operator()(const CopiedMethodRecord& lhs_record,
7531                     const CopiedMethodRecord& rhs_record) const NO_THREAD_SAFETY_ANALYSIS {
7532       return (*this)(lhs_record, rhs_record.GetMainMethod());
7533     }
7534   };
7535 
7536   using CopiedMethodRecordSet = ScopedArenaHashSet<CopiedMethodRecord,
7537                                                    CopiedMethodRecordEmptyFn,
7538                                                    CopiedMethodRecordHash,
7539                                                    CopiedMethodRecordEqual>;
7540 
7541   static constexpr size_t kMethodAlignment = ArtMethod::Alignment(kPointerSize);
7542   static constexpr size_t kMethodSize = ArtMethod::Size(kPointerSize);
7543 
7544   ClassLinker* class_linker_;
7545   Handle<mirror::Class> klass_;
7546   Thread* const self_;
7547   Runtime* const runtime_;
7548 
7549   // These are allocated on the heap to begin, we then transfer to linear alloc when we re-create
7550   // the virtual methods array.
7551   // Need to use low 4GB arenas for compiler or else the pointers wont fit in 32 bit method array
7552   // during cross compilation.
7553   // Use the linear alloc pool since this one is in the low 4gb for the compiler.
7554   ArenaStack stack_;
7555   ScopedArenaAllocator allocator_;
7556 
7557   // If there are multiple methods with the same signature in the superclass vtable
7558   // (which can happen with a new virtual method having the same signature as an
7559   // inaccessible package-private method from another package in the superclass),
7560   // we keep singly-linked lists in this single array that maps vtable index to the
7561   // next vtable index in the list, `dex::kDexNoIndex` denotes the end of a list.
7562   ArrayRef<uint32_t> same_signature_vtable_lists_;
7563 
7564   // Avoid large allocation for a few copied method records.
7565   // Keep the initial buffer on the stack to avoid arena allocations
7566   // if there are no special cases (the first arena allocation is costly).
7567   static constexpr size_t kCopiedMethodRecordInitialBufferSize = 16u;
7568   CopiedMethodRecord copied_method_records_initial_buffer_[kCopiedMethodRecordInitialBufferSize];
7569   CopiedMethodRecordSet copied_method_records_;
7570   size_t num_new_copied_methods_;
7571 };
7572 
7573 template <PointerSize kPointerSize>
7574 NO_INLINE
ReallocMethods(ObjPtr<mirror::Class> klass)7575 void ClassLinker::LinkMethodsHelper<kPointerSize>::ReallocMethods(ObjPtr<mirror::Class> klass) {
7576   // There should be no thread suspension in this function,
7577   // native allocations do not cause thread suspension.
7578   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
7579 
7580   size_t num_new_copied_methods = num_new_copied_methods_;
7581   DCHECK_NE(num_new_copied_methods, 0u);
7582   const size_t old_method_count = klass->NumMethods();
7583   const size_t new_method_count = old_method_count + num_new_copied_methods;
7584 
7585   // Attempt to realloc to save RAM if possible.
7586   LengthPrefixedArray<ArtMethod>* old_methods = klass->GetMethodsPtr();
7587   // The Realloced virtual methods aren't visible from the class roots, so there is no issue
7588   // where GCs could attempt to mark stale pointers due to memcpy. And since we overwrite the
7589   // realloced memory with out->CopyFrom, we are guaranteed to have objects in the to space since
7590   // CopyFrom has internal read barriers.
7591   //
7592   // TODO We should maybe move some of this into mirror::Class or at least into another method.
7593   const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_method_count,
7594                                                                       kMethodSize,
7595                                                                       kMethodAlignment);
7596   const size_t new_size = LengthPrefixedArray<ArtMethod>::ComputeSize(new_method_count,
7597                                                                       kMethodSize,
7598                                                                       kMethodAlignment);
7599   const size_t old_methods_ptr_size = (old_methods != nullptr) ? old_size : 0;
7600   auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
7601       class_linker_->GetAllocatorForClassLoader(klass->GetClassLoader())->Realloc(
7602           self_, old_methods, old_methods_ptr_size, new_size));
7603   CHECK(methods != nullptr);  // Native allocation failure aborts.
7604 
7605   if (methods != old_methods) {
7606     StrideIterator<ArtMethod> out = methods->begin(kMethodSize, kMethodAlignment);
7607     // Copy over the old methods. The `ArtMethod::CopyFrom()` is only necessary to not miss
7608     // read barriers since `LinearAlloc::Realloc()` won't do read barriers when it copies.
7609     for (auto& m : klass->GetMethods(kPointerSize)) {
7610       out->CopyFrom(&m, kPointerSize);
7611       ++out;
7612     }
7613   }
7614 
7615   // Collect and sort copied method records by the vtable index. This places overriding
7616   // copied methods first, sorted by the vtable index already assigned in the superclass,
7617   // followed by copied methods with new signatures in the order in which we encountered
7618   // them when going over virtual methods of new interfaces.
7619   // This order is deterministic but implementation-defined.
7620   //
7621   // Avoid arena allocation for a few records (the first arena allocation is costly).
7622   constexpr size_t kSortedRecordsBufferSize = 16;
7623   CopiedMethodRecord* sorted_records_buffer[kSortedRecordsBufferSize];
7624   CopiedMethodRecord** sorted_records = (num_new_copied_methods <= kSortedRecordsBufferSize)
7625       ? sorted_records_buffer
7626       : allocator_.AllocArray<CopiedMethodRecord*>(num_new_copied_methods);
7627   size_t filled_sorted_records = 0u;
7628   for (CopiedMethodRecord& record : copied_method_records_) {
7629     if (record.GetState() != CopiedMethodRecord::State::kUseSuperMethod) {
7630       DCHECK_LT(filled_sorted_records, num_new_copied_methods);
7631       sorted_records[filled_sorted_records] = &record;
7632       ++filled_sorted_records;
7633     }
7634   }
7635   DCHECK_EQ(filled_sorted_records, num_new_copied_methods);
7636   std::sort(sorted_records,
7637             sorted_records + num_new_copied_methods,
7638             [](const CopiedMethodRecord* lhs, const CopiedMethodRecord* rhs) {
7639               return lhs->GetMethodIndex() < rhs->GetMethodIndex();
7640             });
7641 
7642   if (klass->IsInterface()) {
7643     // Some records may have been pruned. Update method indexes in collected records.
7644     size_t interface_method_index = klass->NumDeclaredVirtualMethods();
7645     for (size_t i = 0; i != num_new_copied_methods; ++i) {
7646       CopiedMethodRecord* record = sorted_records[i];
7647       DCHECK_LE(interface_method_index, record->GetMethodIndex());
7648       record->SetMethodIndex(interface_method_index);
7649       ++interface_method_index;
7650     }
7651   }
7652 
7653   // Add copied methods.
7654   methods->SetSize(new_method_count);
7655   for (size_t i = 0; i != num_new_copied_methods; ++i) {
7656     const CopiedMethodRecord* record = sorted_records[i];
7657     ArtMethod* interface_method = record->GetMainMethod();
7658     DCHECK(!interface_method->IsCopied());
7659     ArtMethod& new_method = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
7660     new_method.CopyFrom(interface_method, kPointerSize);
7661     new_method.SetMethodIndex(dchecked_integral_cast<uint16_t>(record->GetMethodIndex()));
7662     switch (record->GetState()) {
7663       case CopiedMethodRecord::State::kAbstractSingle:
7664       case CopiedMethodRecord::State::kAbstract: {
7665         DCHECK(!klass->IsInterface());  // We do not create miranda methods for interfaces.
7666         uint32_t access_flags = new_method.GetAccessFlags();
7667         DCHECK_EQ(access_flags & (kAccAbstract | kAccIntrinsic | kAccDefault), kAccAbstract)
7668             << "Miranda method should be abstract but not intrinsic or default!";
7669         new_method.SetAccessFlags(access_flags | kAccCopied);
7670         break;
7671       }
7672       case CopiedMethodRecord::State::kDefaultSingle:
7673       case CopiedMethodRecord::State::kDefault: {
7674         DCHECK(!klass->IsInterface());  // We do not copy default methods for interfaces.
7675         // Clear the kAccSkipAccessChecks flag if it is present. Since this class hasn't been
7676         // verified yet it shouldn't have methods that are skipping access checks.
7677         // TODO This is rather arbitrary. We should maybe support classes where only some of its
7678         // methods are skip_access_checks.
7679         DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
7680         constexpr uint32_t kSetFlags = kAccDefault | kAccCopied;
7681         constexpr uint32_t kMaskFlags = ~kAccSkipAccessChecks;
7682         new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
7683         break;
7684       }
7685       case CopiedMethodRecord::State::kDefaultConflict: {
7686         // This is a type of default method (there are default method impls, just a conflict)
7687         // so mark this as a default. We use the `kAccAbstract` flag to distinguish it from
7688         // invokable copied default method without using a separate access flag but the default
7689         // conflicting method is technically not abstract and ArtMethod::IsAbstract() shall
7690         // return false. Also clear the kAccSkipAccessChecks bit since this class hasn't been
7691         // verified yet it shouldn't have methods that are skipping access checks. Also clear
7692         // potential kAccSingleImplementation to avoid CHA trying to inline the default method.
7693         uint32_t access_flags = new_method.GetAccessFlags();
7694         DCHECK_EQ(access_flags & (kAccNative | kAccIntrinsic), 0u);
7695         constexpr uint32_t kSetFlags = kAccDefault | kAccAbstract | kAccCopied;
7696         constexpr uint32_t kMaskFlags = ~(kAccSkipAccessChecks | kAccSingleImplementation);
7697         new_method.SetAccessFlags((access_flags | kSetFlags) & kMaskFlags);
7698         DCHECK(new_method.IsDefaultConflicting());
7699         DCHECK(!new_method.IsAbstract());
7700         // The actual method might or might not be marked abstract since we just copied it from
7701         // a (possibly default) interface method. We need to set its entry point to be the bridge
7702         // so that the compiler will not invoke the implementation of whatever method we copied
7703         // from.
7704         EnsureThrowsInvocationError(class_linker_, &new_method);
7705         break;
7706       }
7707       default:
7708         LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(record->GetState());
7709         UNREACHABLE();
7710     }
7711   }
7712 
7713   if (VLOG_IS_ON(class_linker)) {
7714     LogNewVirtuals(methods);
7715   }
7716 
7717   class_linker_->UpdateClassMethods(klass, methods);
7718 }
7719 
7720 template <PointerSize kPointerSize>
FinalizeIfTable(Handle<mirror::Class> klass,MutableHandle<mirror::IfTable> iftable,Handle<mirror::PointerArray> vtable,bool is_klass_abstract,bool is_super_abstract,bool * out_new_conflict,ArtMethod ** out_imt)7721 bool ClassLinker::LinkMethodsHelper<kPointerSize>::FinalizeIfTable(
7722     Handle<mirror::Class> klass,
7723     MutableHandle<mirror::IfTable> iftable,
7724     Handle<mirror::PointerArray> vtable,
7725     bool is_klass_abstract,
7726     bool is_super_abstract,
7727     bool* out_new_conflict,
7728     ArtMethod** out_imt) {
7729   size_t ifcount = iftable->Count();
7730   // We do not need a read barrier here as the length is constant, both from-space and
7731   // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
7732   size_t super_ifcount =
7733       klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
7734 
7735   ClassLinker* class_linker = nullptr;
7736   ArtMethod* unimplemented_method = nullptr;
7737   ArtMethod* imt_conflict_method = nullptr;
7738   uintptr_t imt_methods_begin = 0u;
7739   size_t imt_methods_size = 0u;
7740   DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
7741   DCHECK_EQ(klass->GetSuperClass()->ShouldHaveImt(), !is_super_abstract);
7742   if (!is_klass_abstract) {
7743     class_linker = class_linker_;
7744     unimplemented_method = runtime_->GetImtUnimplementedMethod();
7745     imt_conflict_method = runtime_->GetImtConflictMethod();
7746     if (is_super_abstract) {
7747       // There was no IMT in superclass to copy to `out_imt[]`, so we need
7748       // to fill it with all implementation methods from superclass.
7749       DCHECK_EQ(imt_methods_begin, 0u);
7750       imt_methods_size = std::numeric_limits<size_t>::max();  // No method at the last byte.
7751     } else {
7752       // If the superclass has IMT, we have already copied it to `out_imt[]` and
7753       // we do not need to call `SetIMTRef()` for interfaces from superclass when
7754       // the implementation method is already in the superclass, only for new methods.
7755       // For simplicity, use the entire method array including direct methods.
7756       LengthPrefixedArray<ArtMethod>* const new_methods = klass->GetMethodsPtr();
7757       if (new_methods != nullptr) {
7758         DCHECK_NE(new_methods->size(), 0u);
7759         imt_methods_begin = reinterpret_cast<uintptr_t>(&new_methods->At(0));
7760         imt_methods_size = new_methods->size() * kMethodSize;
7761       }
7762     }
7763   }
7764 
7765   auto update_imt = [=](ObjPtr<mirror::Class> iface, size_t j, ArtMethod* implementation)
7766       REQUIRES_SHARED(Locks::mutator_lock_) {
7767     // Place method in imt if entry is empty, place conflict otherwise.
7768     ArtMethod** imt_ptr = &out_imt[iface->GetVirtualMethod(j, kPointerSize)->GetImtIndex()];
7769     class_linker->SetIMTRef(unimplemented_method,
7770                             imt_conflict_method,
7771                             implementation,
7772                             /*out*/out_new_conflict,
7773                             /*out*/imt_ptr);
7774   };
7775 
7776   // For interfaces inherited from superclass, the new method arrays are empty,
7777   // so use vtable indexes from implementation methods from the superclass method array.
7778   for (size_t i = 0; i != super_ifcount; ++i) {
7779     ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
7780     DCHECK(method_array == klass->GetSuperClass()->GetIfTable()->GetMethodArrayOrNull(i));
7781     if (method_array == nullptr) {
7782       continue;
7783     }
7784     size_t num_methods = method_array->GetLength();
7785     ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
7786     size_t j = 0;
7787     // First loop has method array shared with the super class.
7788     for (; j != num_methods; ++j) {
7789       ArtMethod* super_implementation =
7790           method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
7791       size_t vtable_index = super_implementation->GetMethodIndex();
7792       ArtMethod* implementation =
7793           vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
7794       // Check if we need to update IMT with this method, see above.
7795       if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
7796         update_imt(iface, j, implementation);
7797       }
7798       if (implementation != super_implementation) {
7799         // Copy-on-write and move to the next loop.
7800         Thread* self = self_;
7801         StackHandleScope<2u> hs(self);
7802         Handle<mirror::PointerArray> old_method_array = hs.NewHandle(method_array);
7803         HandleWrapperObjPtr<mirror::Class> h_iface = hs.NewHandleWrapper(&iface);
7804         if (ifcount == super_ifcount && iftable.Get() == klass->GetSuperClass()->GetIfTable()) {
7805           ObjPtr<mirror::IfTable> new_iftable = ObjPtr<mirror::IfTable>::DownCast(
7806               mirror::ObjectArray<mirror::Object>::CopyOf(
7807                   iftable, self, ifcount * mirror::IfTable::kMax));
7808           if (new_iftable == nullptr) {
7809             return false;
7810           }
7811           iftable.Assign(new_iftable);
7812         }
7813         method_array = ObjPtr<mirror::PointerArray>::DownCast(
7814             mirror::Array::CopyOf(old_method_array, self, num_methods));
7815         if (method_array == nullptr) {
7816           return false;
7817         }
7818         iftable->SetMethodArray(i, method_array);
7819         method_array->SetElementPtrSize(j, implementation, kPointerSize);
7820         ++j;
7821         break;
7822       }
7823     }
7824     // Second loop (if non-empty) has method array different from the superclass.
7825     for (; j != num_methods; ++j) {
7826       ArtMethod* super_implementation =
7827           method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
7828       size_t vtable_index = super_implementation->GetMethodIndex();
7829       ArtMethod* implementation =
7830           vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
7831       method_array->SetElementPtrSize(j, implementation, kPointerSize);
7832       // Check if we need to update IMT with this method, see above.
7833       if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
7834         update_imt(iface, j, implementation);
7835       }
7836     }
7837   }
7838 
7839   // New interface method arrays contain vtable indexes. Translate them to methods.
7840   DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
7841   for (size_t i = super_ifcount; i != ifcount; ++i) {
7842     ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
7843     if (method_array == nullptr) {
7844       continue;
7845     }
7846     size_t num_methods = method_array->GetLength();
7847     ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
7848     for (size_t j = 0; j != num_methods; ++j) {
7849       size_t vtable_index = method_array->GetElementPtrSize<size_t, kPointerSize>(j);
7850       ArtMethod* implementation =
7851           vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
7852       method_array->SetElementPtrSize(j, implementation, kPointerSize);
7853       if (!is_klass_abstract) {
7854         update_imt(iface, j, implementation);
7855       }
7856     }
7857   }
7858 
7859   return true;
7860 }
7861 
7862 NO_INLINE
ThrowIllegalAccessErrorForImplementingMethod(ObjPtr<mirror::Class> klass,ArtMethod * vtable_method,ArtMethod * interface_method)7863 static void ThrowIllegalAccessErrorForImplementingMethod(ObjPtr<mirror::Class> klass,
7864                                                          ArtMethod* vtable_method,
7865                                                          ArtMethod* interface_method)
7866     REQUIRES_SHARED(Locks::mutator_lock_) {
7867   DCHECK(!vtable_method->IsAbstract());
7868   DCHECK(!vtable_method->IsPublic());
7869   ThrowIllegalAccessError(
7870       klass,
7871       "Method '%s' implementing interface method '%s' is not public",
7872       vtable_method->PrettyMethod().c_str(),
7873       interface_method->PrettyMethod().c_str());
7874 }
7875 
7876 template <PointerSize kPointerSize>
AllocPointerArray(Thread * self,size_t length)7877 ObjPtr<mirror::PointerArray> ClassLinker::LinkMethodsHelper<kPointerSize>::AllocPointerArray(
7878     Thread* self, size_t length) {
7879   using PointerArrayType = std::conditional_t<
7880       kPointerSize == PointerSize::k64, mirror::LongArray, mirror::IntArray>;
7881   ObjPtr<mirror::Array> array = PointerArrayType::Alloc(self, length);
7882   return ObjPtr<mirror::PointerArray>::DownCast(array);
7883 }
7884 
7885 template <PointerSize kPointerSize>
AllocateIfTableMethodArrays(Thread * self,Handle<mirror::Class> klass,Handle<mirror::IfTable> iftable)7886 bool ClassLinker::LinkMethodsHelper<kPointerSize>::AllocateIfTableMethodArrays(
7887     Thread* self,
7888     Handle<mirror::Class> klass,
7889     Handle<mirror::IfTable> iftable) {
7890   DCHECK(!klass->IsInterface());
7891   DCHECK(klass_->HasSuperClass());
7892   const size_t ifcount = iftable->Count();
7893   // We do not need a read barrier here as the length is constant, both from-space and
7894   // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
7895   size_t super_ifcount =
7896       klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
7897   if (ifcount == super_ifcount) {
7898     DCHECK(iftable.Get() == klass_->GetSuperClass()->GetIfTable());
7899     return true;
7900   }
7901 
7902   if (kIsDebugBuild) {
7903     // The method array references for superclass interfaces have been copied.
7904     // We shall allocate new arrays if needed (copy-on-write) in `FinalizeIfTable()`.
7905     ObjPtr<mirror::IfTable> super_iftable = klass_->GetSuperClass()->GetIfTable();
7906     for (size_t i = 0; i != super_ifcount; ++i) {
7907       CHECK(iftable->GetInterface(i) == super_iftable->GetInterface(i));
7908       CHECK(iftable->GetMethodArrayOrNull(i) == super_iftable->GetMethodArrayOrNull(i));
7909     }
7910   }
7911 
7912   for (size_t i = super_ifcount; i < ifcount; ++i) {
7913     size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
7914     if (num_methods > 0) {
7915       ObjPtr<mirror::PointerArray> method_array = AllocPointerArray(self, num_methods);
7916       if (UNLIKELY(method_array == nullptr)) {
7917         self->AssertPendingOOMException();
7918         return false;
7919       }
7920       iftable->SetMethodArray(i, method_array);
7921     }
7922   }
7923   return true;
7924 }
7925 
7926 template <PointerSize kPointerSize>
AssignVTableIndexes(ObjPtr<mirror::Class> klass,ObjPtr<mirror::Class> super_class,bool is_super_abstract,size_t num_virtual_methods,ObjPtr<mirror::IfTable> iftable)7927 size_t ClassLinker::LinkMethodsHelper<kPointerSize>::AssignVTableIndexes(
7928     ObjPtr<mirror::Class> klass,
7929     ObjPtr<mirror::Class> super_class,
7930     bool is_super_abstract,
7931     size_t num_virtual_methods,
7932     ObjPtr<mirror::IfTable> iftable) {
7933   DCHECK(!klass->IsInterface());
7934   DCHECK(klass->HasSuperClass());
7935   DCHECK(klass->GetSuperClass() == super_class);
7936 
7937   // There should be no thread suspension unless we want to throw an exception.
7938   // (We are using `ObjPtr<>` and raw vtable pointers that are invalidated by thread suspension.)
7939   std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
7940 
7941   // Prepare a hash table with virtual methods from the superclass.
7942   // For the unlikely cases that there are multiple methods with the same signature
7943   // but different vtable indexes, keep an array with indexes of the previous
7944   // methods with the same signature (walked as singly-linked lists).
7945   uint8_t* raw_super_vtable;
7946   size_t super_vtable_length;
7947   if (is_super_abstract) {
7948     DCHECK(!super_class->ShouldHaveEmbeddedVTable());
7949     ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTableDuringLinking();
7950     DCHECK(super_vtable != nullptr);
7951     raw_super_vtable = reinterpret_cast<uint8_t*>(super_vtable.Ptr()) +
7952                        mirror::Array::DataOffset(static_cast<size_t>(kPointerSize)).Uint32Value();
7953     super_vtable_length = super_vtable->GetLength();
7954   } else {
7955     DCHECK(super_class->ShouldHaveEmbeddedVTable());
7956     raw_super_vtable = reinterpret_cast<uint8_t*>(super_class.Ptr()) +
7957                        mirror::Class::EmbeddedVTableOffset(kPointerSize).Uint32Value();
7958     super_vtable_length = super_class->GetEmbeddedVTableLength();
7959   }
7960   VTableAccessor super_vtable_accessor(raw_super_vtable, super_vtable_length);
7961   static constexpr double kMinLoadFactor = 0.3;
7962   static constexpr double kMaxLoadFactor = 0.5;
7963   static constexpr size_t kMaxStackBuferSize = 256;
7964   const size_t super_vtable_buffer_size = super_vtable_length * 3;
7965   const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
7966   const size_t total_buffer_size = super_vtable_buffer_size + declared_virtuals_buffer_size;
7967   uint32_t* super_vtable_buffer_ptr = (total_buffer_size <= kMaxStackBuferSize)
7968       ? reinterpret_cast<uint32_t*>(alloca(total_buffer_size * sizeof(uint32_t)))
7969       : allocator_.AllocArray<uint32_t>(total_buffer_size);
7970   uint32_t* declared_virtuals_buffer_ptr = super_vtable_buffer_ptr + super_vtable_buffer_size;
7971   VTableSignatureSet super_vtable_signatures(
7972       kMinLoadFactor,
7973       kMaxLoadFactor,
7974       VTableSignatureHash(super_vtable_accessor),
7975       VTableSignatureEqual(super_vtable_accessor),
7976       super_vtable_buffer_ptr,
7977       super_vtable_buffer_size,
7978       allocator_.Adapter());
7979   ArrayRef<uint32_t> same_signature_vtable_lists;
7980   // Insert the first `mirror::Object::kVTableLength` indexes with pre-calculated hashes.
7981   DCHECK_GE(super_vtable_length, mirror::Object::kVTableLength);
7982   for (uint32_t i = 0; i != mirror::Object::kVTableLength; ++i) {
7983     size_t hash = class_linker_->object_virtual_method_hashes_[i];
7984     // There are no duplicate signatures in `java.lang.Object`, so use `HashSet<>::PutWithHash()`.
7985     // This avoids equality comparison for the three `java.lang.Object.wait()` overloads.
7986     super_vtable_signatures.PutWithHash(i, hash);
7987   }
7988   // Insert the remaining indexes, check for duplicate signatures.
7989   if (super_vtable_length > mirror::Object::kVTableLength) {
7990     for (size_t i = mirror::Object::kVTableLength; i < super_vtable_length; ++i) {
7991       // Use `super_vtable_accessor` for getting the method for hash calculation.
7992       // Letting `HashSet<>::insert()` use the internal accessor copy in the hash
7993       // function prevents the compiler from optimizing this properly because the
7994       // compiler cannot prove that the accessor copy is immutable.
7995       size_t hash = ComputeMethodHash(super_vtable_accessor.GetVTableEntry(i));
7996       auto [it, inserted] = super_vtable_signatures.InsertWithHash(i, hash);
7997       if (UNLIKELY(!inserted)) {
7998         if (same_signature_vtable_lists.empty()) {
7999           same_signature_vtable_lists = ArrayRef<uint32_t>(
8000               allocator_.AllocArray<uint32_t>(super_vtable_length), super_vtable_length);
8001           std::fill_n(same_signature_vtable_lists.data(), super_vtable_length, dex::kDexNoIndex);
8002           same_signature_vtable_lists_ = same_signature_vtable_lists;
8003         }
8004         DCHECK_LT(*it, i);
8005         same_signature_vtable_lists[i] = *it;
8006         *it = i;
8007       }
8008     }
8009   }
8010 
8011   // For each declared virtual method, look for a superclass virtual method
8012   // to override and assign a new vtable index if no method was overridden.
8013   DeclaredVirtualSignatureSet declared_virtual_signatures(
8014       kMinLoadFactor,
8015       kMaxLoadFactor,
8016       DeclaredVirtualSignatureHash(klass),
8017       DeclaredVirtualSignatureEqual(klass),
8018       declared_virtuals_buffer_ptr,
8019       declared_virtuals_buffer_size,
8020       allocator_.Adapter());
8021   const bool is_proxy_class = klass->IsProxyClass();
8022   size_t vtable_length = super_vtable_length;
8023   for (size_t i = 0; i != num_virtual_methods; ++i) {
8024     ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8025     DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
8026     ArtMethod* signature_method = UNLIKELY(is_proxy_class)
8027         ? virtual_method->GetInterfaceMethodForProxyUnchecked(kPointerSize)
8028         : virtual_method;
8029     size_t hash = ComputeMethodHash(signature_method);
8030     declared_virtual_signatures.PutWithHash(i, hash);
8031     auto it = super_vtable_signatures.FindWithHash(signature_method, hash);
8032     if (it != super_vtable_signatures.end()) {
8033       size_t super_index = *it;
8034       DCHECK_LT(super_index, super_vtable_length);
8035       ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(super_index);
8036       // Historical note: Before Android 4.1, an inaccessible package-private
8037       // superclass method would have been incorrectly overridden.
8038       bool overrides = klass->CanAccessMember(super_method->GetDeclaringClass(),
8039                                               super_method->GetAccessFlags());
8040       if (overrides && super_method->IsFinal()) {
8041         sants.reset();
8042         ThrowLinkageError(klass, "Method %s overrides final method in class %s",
8043                           virtual_method->PrettyMethod().c_str(),
8044                           super_method->GetDeclaringClassDescriptor());
8045         return 0u;
8046       }
8047       if (UNLIKELY(!same_signature_vtable_lists.empty())) {
8048         // We may override more than one method according to JLS, see b/211854716 .
8049         // We record the highest overridden vtable index here so that we can walk
8050         // the list to find other overridden methods when constructing the vtable.
8051         // However, we walk all the methods to check for final method overriding.
8052         size_t current_index = super_index;
8053         while (same_signature_vtable_lists[current_index] != dex::kDexNoIndex) {
8054           DCHECK_LT(same_signature_vtable_lists[current_index], current_index);
8055           current_index = same_signature_vtable_lists[current_index];
8056           ArtMethod* current_method = super_vtable_accessor.GetVTableEntry(current_index);
8057           if (klass->CanAccessMember(current_method->GetDeclaringClass(),
8058                                      current_method->GetAccessFlags())) {
8059             if (current_method->IsFinal()) {
8060               sants.reset();
8061               ThrowLinkageError(klass, "Method %s overrides final method in class %s",
8062                                 virtual_method->PrettyMethod().c_str(),
8063                                 current_method->GetDeclaringClassDescriptor());
8064               return 0u;
8065             }
8066             if (!overrides) {
8067               overrides = true;
8068               super_index = current_index;
8069               super_method = current_method;
8070             }
8071           }
8072         }
8073       }
8074       if (overrides) {
8075         virtual_method->SetMethodIndex(super_index);
8076         continue;
8077       }
8078     }
8079     // The method does not override any method from superclass, so it needs a new vtable index.
8080     virtual_method->SetMethodIndex(vtable_length);
8081     ++vtable_length;
8082   }
8083 
8084   // Assign vtable indexes for interface methods in new interfaces and store them
8085   // in implementation method arrays. These shall be replaced by actual method
8086   // pointers later. We do not need to do this for superclass interfaces as we can
8087   // get these vtable indexes from implementation methods in superclass iftable.
8088   // Record data for copied methods which shall be referenced by the vtable.
8089   const size_t ifcount = iftable->Count();
8090   ObjPtr<mirror::IfTable> super_iftable = super_class->GetIfTable();
8091   const size_t super_ifcount = super_iftable->Count();
8092   for (size_t i = ifcount; i != super_ifcount; ) {
8093     --i;
8094     DCHECK_LT(i, ifcount);
8095     ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8096     ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8097     size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
8098     for (size_t j = 0; j != num_methods; ++j) {
8099       ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8100       size_t hash = ComputeMethodHash(interface_method);
8101       ArtMethod* vtable_method = nullptr;
8102       bool found = false;
8103       auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8104       if (it1 != declared_virtual_signatures.end()) {
8105         vtable_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
8106         found = true;
8107       } else {
8108         auto it2 = super_vtable_signatures.FindWithHash(interface_method, hash);
8109         if (it2 != super_vtable_signatures.end()) {
8110           // If there are multiple vtable methods with the same signature, the one with
8111           // the highest vtable index is not nessarily the one in most-derived class.
8112           // Find the most-derived method. See b/211854716 .
8113           vtable_method = super_vtable_accessor.GetVTableEntry(*it2);
8114           if (UNLIKELY(!same_signature_vtable_lists.empty())) {
8115             size_t current_index = *it2;
8116             while (same_signature_vtable_lists[current_index] != dex::kDexNoIndex) {
8117               DCHECK_LT(same_signature_vtable_lists[current_index], current_index);
8118               current_index = same_signature_vtable_lists[current_index];
8119               ArtMethod* current_method = super_vtable_accessor.GetVTableEntry(current_index);
8120               ObjPtr<mirror::Class> current_class = current_method->GetDeclaringClass();
8121               if (current_class->IsSubClass(vtable_method->GetDeclaringClass())) {
8122                 vtable_method = current_method;
8123               }
8124             }
8125           }
8126           found = true;
8127         }
8128       }
8129       uint32_t vtable_index = vtable_length;
8130       if (found) {
8131         DCHECK(vtable_method != nullptr);
8132         if (!vtable_method->IsAbstract() && !vtable_method->IsPublic()) {
8133           // FIXME: Delay the exception until we actually try to call the method. b/211854716
8134           sants.reset();
8135           ThrowIllegalAccessErrorForImplementingMethod(klass, vtable_method, interface_method);
8136           return 0u;
8137         }
8138         vtable_index = vtable_method->GetMethodIndexDuringLinking();
8139         if (!vtable_method->IsOverridableByDefaultMethod()) {
8140           method_array->SetElementPtrSize(j, vtable_index, kPointerSize);
8141           continue;
8142         }
8143       }
8144 
8145       auto [it, inserted] = copied_method_records_.InsertWithHash(
8146           CopiedMethodRecord(interface_method, vtable_index), hash);
8147       if (found) {
8148         DCHECK_EQ(vtable_index, it->GetMethodIndex());
8149       } else if (inserted) {
8150         DCHECK_EQ(vtable_index, it->GetMethodIndex());
8151         DCHECK_EQ(vtable_index, vtable_length);
8152         ++vtable_length;
8153       } else {
8154         vtable_index = it->GetMethodIndex();
8155       }
8156       method_array->SetElementPtrSize(j, it->GetMethodIndex(), kPointerSize);
8157       if (inserted) {
8158         it->SetState(interface_method->IsAbstract() ? CopiedMethodRecord::State::kAbstractSingle
8159                                                     : CopiedMethodRecord::State::kDefaultSingle);
8160       } else {
8161         it->UpdateState(iface, interface_method, vtable_index, iftable, ifcount, i);
8162       }
8163     }
8164   }
8165   // Finalize copied method records and check if we can reuse some methods from superclass vtable.
8166   size_t num_new_copied_methods = copied_method_records_.size();
8167   for (CopiedMethodRecord& record : copied_method_records_) {
8168     uint32_t vtable_index = record.GetMethodIndex();
8169     if (vtable_index < super_vtable_length) {
8170       ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(record.GetMethodIndex());
8171       DCHECK(super_method->IsOverridableByDefaultMethod());
8172       record.FinalizeState(
8173           super_method, vtable_index, iftable, ifcount, super_iftable, super_ifcount);
8174       if (record.GetState() == CopiedMethodRecord::State::kUseSuperMethod) {
8175         --num_new_copied_methods;
8176       }
8177     }
8178   }
8179   num_new_copied_methods_ = num_new_copied_methods;
8180 
8181   if (UNLIKELY(!IsUint<16>(vtable_length))) {
8182     sants.reset();
8183     ThrowClassFormatError(klass, "Too many methods defined on class: %zd", vtable_length);
8184     return 0u;
8185   }
8186 
8187   return vtable_length;
8188 }
8189 
8190 template <PointerSize kPointerSize>
FindCopiedMethodsForInterface(ObjPtr<mirror::Class> klass,size_t num_virtual_methods,ObjPtr<mirror::IfTable> iftable)8191 bool ClassLinker::LinkMethodsHelper<kPointerSize>::FindCopiedMethodsForInterface(
8192     ObjPtr<mirror::Class> klass,
8193     size_t num_virtual_methods,
8194     ObjPtr<mirror::IfTable> iftable) {
8195   DCHECK(klass->IsInterface());
8196   DCHECK(klass->HasSuperClass());
8197   DCHECK(klass->GetSuperClass()->IsObjectClass());
8198   DCHECK_EQ(klass->GetSuperClass()->GetIfTableCount(), 0);
8199 
8200   // There should be no thread suspension unless we want to throw an exception.
8201   // (We are using `ObjPtr<>`s that are invalidated by thread suspension.)
8202   std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
8203 
8204   // Prepare a `HashSet<>` with the declared virtual methods. These mask any methods
8205   // from superinterfaces, so we can filter out matching superinterface methods.
8206   static constexpr double kMinLoadFactor = 0.3;
8207   static constexpr double kMaxLoadFactor = 0.5;
8208   static constexpr size_t kMaxStackBuferSize = 256;
8209   const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
8210   uint32_t* declared_virtuals_buffer_ptr = (declared_virtuals_buffer_size <= kMaxStackBuferSize)
8211       ? reinterpret_cast<uint32_t*>(alloca(declared_virtuals_buffer_size * sizeof(uint32_t)))
8212       : allocator_.AllocArray<uint32_t>(declared_virtuals_buffer_size);
8213   DeclaredVirtualSignatureSet declared_virtual_signatures(
8214       kMinLoadFactor,
8215       kMaxLoadFactor,
8216       DeclaredVirtualSignatureHash(klass),
8217       DeclaredVirtualSignatureEqual(klass),
8218       declared_virtuals_buffer_ptr,
8219       declared_virtuals_buffer_size,
8220       allocator_.Adapter());
8221   for (size_t i = 0; i != num_virtual_methods; ++i) {
8222     ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8223     DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
8224     size_t hash = ComputeMethodHash(virtual_method);
8225     declared_virtual_signatures.PutWithHash(i, hash);
8226   }
8227 
8228   // We do not create miranda methods for interface classes, so we do not need to track
8229   // non-default (abstract) interface methods. The downside is that we cannot use the
8230   // optimized code paths with `CopiedMethodRecord::State::kDefaultSingle` and since
8231   // we do not fill method arrays for interfaces, the method search actually has to
8232   // compare signatures instead of searching for the implementing method.
8233   const size_t ifcount = iftable->Count();
8234   size_t new_method_index = num_virtual_methods;
8235   for (size_t i = ifcount; i != 0u; ) {
8236     --i;
8237     DCHECK_LT(i, ifcount);
8238     ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8239     if (!iface->HasDefaultMethods()) {
8240       continue;  // No default methods to process.
8241     }
8242     size_t num_methods = iface->NumDeclaredVirtualMethods();
8243     for (size_t j = 0; j != num_methods; ++j) {
8244       ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8245       if (!interface_method->IsDefault()) {
8246         continue;  // Do not process this non-default method.
8247       }
8248       size_t hash = ComputeMethodHash(interface_method);
8249       auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8250       if (it1 != declared_virtual_signatures.end()) {
8251         ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
8252         if (!virtual_method->IsAbstract() && !virtual_method->IsPublic()) {
8253           sants.reset();
8254           ThrowIllegalAccessErrorForImplementingMethod(klass, virtual_method, interface_method);
8255           return false;
8256         }
8257         continue;  // This default method is masked by a method declared in this interface.
8258       }
8259 
8260       CopiedMethodRecord new_record(interface_method, new_method_index);
8261       auto it = copied_method_records_.FindWithHash(new_record, hash);
8262       if (it == copied_method_records_.end()) {
8263         // Pretend that there is another default method and try to update the state.
8264         // If the `interface_method` is not masked, the state shall change to
8265         // `kDefaultConflict`; if it is masked, the state remains `kDefault`.
8266         new_record.SetState(CopiedMethodRecord::State::kDefault);
8267         new_record.UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8268         if (new_record.GetState() == CopiedMethodRecord::State::kDefaultConflict) {
8269           // Insert the new record with the state `kDefault`.
8270           new_record.SetState(CopiedMethodRecord::State::kDefault);
8271           copied_method_records_.PutWithHash(new_record, hash);
8272           DCHECK_EQ(new_method_index, new_record.GetMethodIndex());
8273           ++new_method_index;
8274         }
8275       } else {
8276         it->UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8277       }
8278     }
8279   }
8280 
8281   // Prune records without conflict. (Method indexes are updated in `ReallocMethods()`.)
8282   // We do not copy normal default methods to subinterfaces, instead we find the
8283   // default method with `Class::FindVirtualMethodForInterfaceSuper()` when needed.
8284   size_t num_new_copied_methods = copied_method_records_.size();
8285   for (CopiedMethodRecord& record : copied_method_records_) {
8286     if (record.GetState() != CopiedMethodRecord::State::kDefaultConflict) {
8287       DCHECK(record.GetState() == CopiedMethodRecord::State::kDefault);
8288       record.SetState(CopiedMethodRecord::State::kUseSuperMethod);
8289       --num_new_copied_methods;
8290     }
8291   }
8292   num_new_copied_methods_ = num_new_copied_methods;
8293 
8294   return true;
8295 }
8296 
8297 
8298 template <PointerSize kPointerSize>
8299 FLATTEN
LinkMethods(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,bool * out_new_conflict,ArtMethod ** out_imt)8300 bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkMethods(
8301     Thread* self,
8302     Handle<mirror::Class> klass,
8303     Handle<mirror::ObjectArray<mirror::Class>> interfaces,
8304     bool* out_new_conflict,
8305     ArtMethod** out_imt) {
8306   const size_t num_virtual_methods = klass->NumVirtualMethods();
8307   if (klass->IsInterface()) {
8308     // No vtable.
8309     if (!IsUint<16>(num_virtual_methods)) {
8310       ThrowClassFormatError(klass.Get(), "Too many methods on interface: %zu", num_virtual_methods);
8311       return false;
8312     }
8313     bool has_defaults = false;
8314     // Assign each method an IMT index and set the default flag.
8315     for (size_t i = 0; i < num_virtual_methods; ++i) {
8316       ArtMethod* m = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8317       m->SetMethodIndex(i);
8318       if (!m->IsAbstract()) {
8319         // If the dex file does not support default methods, throw ClassFormatError.
8320         // This check is necessary to protect from odd cases, such as native default
8321         // methods, that the dex file verifier permits for old dex file versions. b/157170505
8322         // FIXME: This should be `if (!m->GetDexFile()->SupportsDefaultMethods())` but we're
8323         // currently running CTS tests for default methods with dex file version 035 which
8324         // does not support default methods. So, we limit this to native methods. b/157718952
8325         if (m->IsNative()) {
8326           DCHECK(!m->GetDexFile()->SupportsDefaultMethods());
8327           ThrowClassFormatError(klass.Get(),
8328                                 "Dex file does not support default method '%s'",
8329                                 m->PrettyMethod().c_str());
8330           return false;
8331         }
8332         if (!m->IsPublic()) {
8333           // The verifier should have caught the non-public method for dex version 37.
8334           // Just warn and skip it since this is from before default-methods so we don't
8335           // really need to care that it has code.
8336           LOG(WARNING) << "Default interface method " << m->PrettyMethod() << " is not public! "
8337                        << "This will be a fatal error in subsequent versions of android. "
8338                        << "Continuing anyway.";
8339         }
8340         m->SetAccessFlags(m->GetAccessFlags() | kAccDefault);
8341         has_defaults = true;
8342       }
8343     }
8344     // Mark that we have default methods so that we won't need to scan the virtual_methods_ array
8345     // during initialization. This is a performance optimization. We could simply traverse the
8346     // virtual_methods_ array again during initialization.
8347     if (has_defaults) {
8348       klass->SetHasDefaultMethods();
8349     }
8350     ObjPtr<mirror::IfTable> iftable = SetupInterfaceLookupTable(
8351         self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass));
8352     if (UNLIKELY(iftable == nullptr)) {
8353       self->AssertPendingException();
8354       return false;
8355     }
8356     size_t ifcount = iftable->Count();
8357     bool have_super_with_defaults = false;
8358     for (size_t i = 0; i != ifcount; ++i) {
8359       if (iftable->GetInterface(i)->HasDefaultMethods()) {
8360         have_super_with_defaults = true;
8361         break;
8362       }
8363     }
8364     LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8365     if (have_super_with_defaults) {
8366       if (!FindCopiedMethodsForInterface(klass.Get(), num_virtual_methods, iftable)) {
8367         self->AssertPendingException();
8368         return false;
8369       }
8370       if (num_new_copied_methods_ != 0u) {
8371         // Re-check the number of methods.
8372         size_t final_num_virtual_methods = num_virtual_methods + num_new_copied_methods_;
8373         if (!IsUint<16>(final_num_virtual_methods)) {
8374           ThrowClassFormatError(
8375               klass.Get(), "Too many methods on interface: %zu", final_num_virtual_methods);
8376           return false;
8377         }
8378         ReallocMethods(klass.Get());
8379       }
8380     }
8381     klass->SetIfTable(iftable);
8382     if (kIsDebugBuild) {
8383       // May cause thread suspension, so do this after we're done with `ObjPtr<> iftable`.
8384       ClobberOldMethods(old_methods, klass->GetMethodsPtr());
8385     }
8386     return true;
8387   } else if (LIKELY(klass->HasSuperClass())) {
8388     // We set up the interface lookup table now because we need it to determine if we need
8389     // to update any vtable entries with new default method implementations.
8390     StackHandleScope<3> hs(self);
8391     MutableHandle<mirror::IfTable> iftable = hs.NewHandle(UNLIKELY(klass->IsProxyClass())
8392         ? SetupInterfaceLookupTable(self, klass, &allocator_, ProxyInterfacesAccessor(interfaces))
8393         : SetupInterfaceLookupTable(
8394               self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass)));
8395     if (UNLIKELY(iftable == nullptr)) {
8396       self->AssertPendingException();
8397       return false;
8398     }
8399 
8400     // Copy the IMT from superclass if present and needed. Update with new methods later.
8401     Handle<mirror::Class> super_class = hs.NewHandle(klass->GetSuperClass());
8402     bool is_klass_abstract = klass->IsAbstract();
8403     bool is_super_abstract = super_class->IsAbstract();
8404     DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8405     DCHECK_EQ(super_class->ShouldHaveImt(), !is_super_abstract);
8406     if (!is_klass_abstract && !is_super_abstract) {
8407       ImTable* super_imt = super_class->GetImt(kPointerSize);
8408       for (size_t i = 0; i < ImTable::kSize; ++i) {
8409         out_imt[i] = super_imt->Get(i, kPointerSize);
8410       }
8411     }
8412 
8413     // If there are no new virtual methods and no new interfaces, we can simply reuse
8414     // the vtable from superclass. We may need to make a copy if it's embedded.
8415     const size_t super_vtable_length = super_class->GetVTableLength();
8416     if (num_virtual_methods == 0 && iftable.Get() == super_class->GetIfTable()) {
8417       DCHECK_EQ(is_super_abstract, !super_class->ShouldHaveEmbeddedVTable());
8418       if (is_super_abstract) {
8419         DCHECK(super_class->IsAbstract() && !super_class->IsArrayClass());
8420         ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTable();
8421         CHECK(super_vtable != nullptr) << super_class->PrettyClass();
8422         klass->SetVTable(super_vtable);
8423         // No IMT in the super class, we need to reconstruct it from the iftable.
8424         if (!is_klass_abstract && iftable->Count() != 0) {
8425           class_linker_->FillIMTFromIfTable(iftable.Get(),
8426                                             runtime_->GetImtUnimplementedMethod(),
8427                                             runtime_->GetImtConflictMethod(),
8428                                             klass.Get(),
8429                                             /*create_conflict_tables=*/false,
8430                                             /*ignore_copied_methods=*/false,
8431                                             out_new_conflict,
8432                                             out_imt);
8433         }
8434       } else {
8435         ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, super_vtable_length);
8436         if (UNLIKELY(vtable == nullptr)) {
8437           self->AssertPendingOOMException();
8438           return false;
8439         }
8440         for (size_t i = 0; i < super_vtable_length; i++) {
8441           vtable->SetElementPtrSize(
8442               i, super_class->GetEmbeddedVTableEntry(i, kPointerSize), kPointerSize);
8443         }
8444         klass->SetVTable(vtable);
8445         // The IMT was already copied from superclass if `klass` is not abstract.
8446       }
8447       klass->SetIfTable(iftable.Get());
8448       return true;
8449     }
8450 
8451     // Allocate method arrays, so that we can link interface methods without thread suspension,
8452     // otherwise GC could miss visiting newly allocated copied methods.
8453     // TODO: Do not allocate copied methods during linking, store only records about what
8454     // we need to allocate and allocate it at the end. Start with superclass iftable and
8455     // perform copy-on-write when needed to facilitate maximum memory sharing.
8456     if (!AllocateIfTableMethodArrays(self, klass, iftable)) {
8457       self->AssertPendingOOMException();
8458       return false;
8459     }
8460 
8461     size_t final_vtable_size = AssignVTableIndexes(
8462         klass.Get(), super_class.Get(), is_super_abstract, num_virtual_methods, iftable.Get());
8463     if (final_vtable_size == 0u) {
8464       self->AssertPendingException();
8465       return false;
8466     }
8467     DCHECK(IsUint<16>(final_vtable_size));
8468 
8469     // Allocate the new vtable.
8470     Handle<mirror::PointerArray> vtable = hs.NewHandle(AllocPointerArray(self, final_vtable_size));
8471     if (UNLIKELY(vtable == nullptr)) {
8472       self->AssertPendingOOMException();
8473       return false;
8474     }
8475 
8476     LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8477     if (num_new_copied_methods_ != 0u) {
8478       ReallocMethods(klass.Get());
8479     }
8480 
8481     // Store new virtual methods in the new vtable.
8482     ArrayRef<uint32_t> same_signature_vtable_lists = same_signature_vtable_lists_;
8483     for (ArtMethod& virtual_method : klass->GetVirtualMethodsSliceUnchecked(kPointerSize)) {
8484       uint32_t vtable_index = virtual_method.GetMethodIndexDuringLinking();
8485       vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
8486       if (UNLIKELY(vtable_index < same_signature_vtable_lists.size())) {
8487         // We may override more than one method according to JLS, see b/211854716 .
8488         // If we do, arbitrarily update the method index to the lowest overridden vtable index.
8489         while (same_signature_vtable_lists[vtable_index] != dex::kDexNoIndex) {
8490           DCHECK_LT(same_signature_vtable_lists[vtable_index], vtable_index);
8491           vtable_index = same_signature_vtable_lists[vtable_index];
8492           ArtMethod* current_method = super_class->GetVTableEntry(vtable_index, kPointerSize);
8493           if (klass->CanAccessMember(current_method->GetDeclaringClass(),
8494                                      current_method->GetAccessFlags())) {
8495             DCHECK(!current_method->IsFinal());
8496             vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
8497             virtual_method.SetMethodIndex(vtable_index);
8498           }
8499         }
8500       }
8501     }
8502 
8503     // For non-overridden vtable slots, copy a method from `super_class`.
8504     for (size_t j = 0; j != super_vtable_length; ++j) {
8505       if (vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j) == nullptr) {
8506         ArtMethod* super_method = super_class->GetVTableEntry(j, kPointerSize);
8507         vtable->SetElementPtrSize(j, super_method, kPointerSize);
8508       }
8509     }
8510 
8511     // Update the `iftable` (and IMT) with finalized virtual methods.
8512     if (!FinalizeIfTable(klass,
8513                          iftable,
8514                          vtable,
8515                          is_klass_abstract,
8516                          is_super_abstract,
8517                          out_new_conflict,
8518                          out_imt)) {
8519       self->AssertPendingOOMException();
8520       return false;
8521     }
8522 
8523     klass->SetVTable(vtable.Get());
8524     klass->SetIfTable(iftable.Get());
8525     if (kIsDebugBuild) {
8526       CheckVTable(self, klass, kPointerSize);
8527       ClobberOldMethods(old_methods, klass->GetMethodsPtr());
8528     }
8529     return true;
8530   } else {
8531     return LinkJavaLangObjectMethods(self, klass);
8532   }
8533 }
8534 
8535 template <PointerSize kPointerSize>
LinkJavaLangObjectMethods(Thread * self,Handle<mirror::Class> klass)8536 bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkJavaLangObjectMethods(
8537     Thread* self,
8538     Handle<mirror::Class> klass) {
8539   DCHECK_EQ(klass.Get(), GetClassRoot<mirror::Object>(class_linker_));
8540   DCHECK_EQ(klass->NumVirtualMethods(), mirror::Object::kVTableLength);
8541   static_assert(IsUint<16>(mirror::Object::kVTableLength));
8542   ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, mirror::Object::kVTableLength);
8543   if (UNLIKELY(vtable == nullptr)) {
8544     self->AssertPendingOOMException();
8545     return false;
8546   }
8547   for (size_t i = 0; i < mirror::Object::kVTableLength; ++i) {
8548     ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8549     vtable->SetElementPtrSize(i, virtual_method, kPointerSize);
8550     virtual_method->SetMethodIndex(i);
8551   }
8552   klass->SetVTable(vtable);
8553   InitializeObjectVirtualMethodHashes(
8554       klass.Get(),
8555       kPointerSize,
8556       ArrayRef<uint32_t>(class_linker_->object_virtual_method_hashes_));
8557   // The interface table is already allocated but there are no interface methods to link.
8558   DCHECK(klass->GetIfTable() != nullptr);
8559   DCHECK_EQ(klass->GetIfTableCount(), 0);
8560   return true;
8561 }
8562 
8563 // Populate the class vtable and itable. Compute return type indices.
LinkMethods(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,bool * out_new_conflict,ArtMethod ** out_imt)8564 bool ClassLinker::LinkMethods(Thread* self,
8565                               Handle<mirror::Class> klass,
8566                               Handle<mirror::ObjectArray<mirror::Class>> interfaces,
8567                               bool* out_new_conflict,
8568                               ArtMethod** out_imt) {
8569   self->AllowThreadSuspension();
8570   // Link virtual methods then interface methods.
8571   Runtime* const runtime = Runtime::Current();
8572   if (LIKELY(GetImagePointerSize() == kRuntimePointerSize)) {
8573     LinkMethodsHelper<kRuntimePointerSize> helper(this, klass, self, runtime);
8574     return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
8575   } else {
8576     constexpr PointerSize kOtherPointerSize =
8577         (kRuntimePointerSize == PointerSize::k64) ? PointerSize::k32 : PointerSize::k64;
8578     LinkMethodsHelper<kOtherPointerSize> helper(this, klass, self, runtime);
8579     return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
8580   }
8581 }
8582 
8583 class ClassLinker::LinkFieldsHelper {
8584  public:
8585   static bool LinkFields(ClassLinker* class_linker,
8586                          Thread* self,
8587                          Handle<mirror::Class> klass,
8588                          bool is_static,
8589                          size_t* class_size)
8590       REQUIRES_SHARED(Locks::mutator_lock_);
8591 
8592  private:
8593   enum class FieldTypeOrder : uint16_t;
8594   class FieldGaps;
8595 
8596   struct FieldTypeOrderAndIndex {
8597     FieldTypeOrder field_type_order;
8598     uint16_t field_index;
8599   };
8600 
8601   static FieldTypeOrder FieldTypeOrderFromFirstDescriptorCharacter(char first_char);
8602 
8603   template <size_t kSize>
8604   static MemberOffset AssignFieldOffset(ArtField* field, MemberOffset field_offset)
8605       REQUIRES_SHARED(Locks::mutator_lock_);
8606 };
8607 
8608 // We use the following order of field types for assigning offsets.
8609 // Some fields can be shuffled forward to fill gaps, see `ClassLinker::LinkFields()`.
8610 enum class ClassLinker::LinkFieldsHelper::FieldTypeOrder : uint16_t {
8611   kReference = 0u,
8612   kLong,
8613   kDouble,
8614   kInt,
8615   kFloat,
8616   kChar,
8617   kShort,
8618   kBoolean,
8619   kByte,
8620 
8621   kLast64BitType = kDouble,
8622   kLast32BitType = kFloat,
8623   kLast16BitType = kShort,
8624 };
8625 
8626 ALWAYS_INLINE
8627 ClassLinker::LinkFieldsHelper::FieldTypeOrder
FieldTypeOrderFromFirstDescriptorCharacter(char first_char)8628 ClassLinker::LinkFieldsHelper::FieldTypeOrderFromFirstDescriptorCharacter(char first_char) {
8629   switch (first_char) {
8630     case 'J':
8631       return FieldTypeOrder::kLong;
8632     case 'D':
8633       return FieldTypeOrder::kDouble;
8634     case 'I':
8635       return FieldTypeOrder::kInt;
8636     case 'F':
8637       return FieldTypeOrder::kFloat;
8638     case 'C':
8639       return FieldTypeOrder::kChar;
8640     case 'S':
8641       return FieldTypeOrder::kShort;
8642     case 'Z':
8643       return FieldTypeOrder::kBoolean;
8644     case 'B':
8645       return FieldTypeOrder::kByte;
8646     default:
8647       DCHECK(first_char == 'L' || first_char == '[') << first_char;
8648       return FieldTypeOrder::kReference;
8649   }
8650 }
8651 
8652 // Gaps where we can insert fields in object layout.
8653 class ClassLinker::LinkFieldsHelper::FieldGaps {
8654  public:
8655   template <uint32_t kSize>
AlignFieldOffset(MemberOffset field_offset)8656   ALWAYS_INLINE MemberOffset AlignFieldOffset(MemberOffset field_offset) {
8657     static_assert(kSize == 2u || kSize == 4u || kSize == 8u);
8658     if (!IsAligned<kSize>(field_offset.Uint32Value())) {
8659       uint32_t gap_start = field_offset.Uint32Value();
8660       field_offset = MemberOffset(RoundUp(gap_start, kSize));
8661       AddGaps<kSize - 1u>(gap_start, field_offset.Uint32Value());
8662     }
8663     return field_offset;
8664   }
8665 
8666   template <uint32_t kSize>
HasGap() const8667   bool HasGap() const {
8668     static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
8669     return (kSize == 1u && gap1_offset_ != kNoOffset) ||
8670            (kSize <= 2u && gap2_offset_ != kNoOffset) ||
8671            gap4_offset_ != kNoOffset;
8672   }
8673 
8674   template <uint32_t kSize>
ReleaseGap()8675   MemberOffset ReleaseGap() {
8676     static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
8677     uint32_t result;
8678     if (kSize == 1u && gap1_offset_ != kNoOffset) {
8679       DCHECK(gap2_offset_ == kNoOffset || gap2_offset_ > gap1_offset_);
8680       DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap1_offset_);
8681       result = gap1_offset_;
8682       gap1_offset_ = kNoOffset;
8683     } else if (kSize <= 2u && gap2_offset_ != kNoOffset) {
8684       DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap2_offset_);
8685       result = gap2_offset_;
8686       gap2_offset_ = kNoOffset;
8687       if (kSize < 2u) {
8688         AddGaps<1u>(result + kSize, result + 2u);
8689       }
8690     } else {
8691       DCHECK_NE(gap4_offset_, kNoOffset);
8692       result = gap4_offset_;
8693       gap4_offset_ = kNoOffset;
8694       if (kSize < 4u) {
8695         AddGaps<kSize | 2u>(result + kSize, result + 4u);
8696       }
8697     }
8698     return MemberOffset(result);
8699   }
8700 
8701  private:
8702   template <uint32_t kGapsToCheck>
AddGaps(uint32_t gap_start,uint32_t gap_end)8703   void AddGaps(uint32_t gap_start, uint32_t gap_end) {
8704     if ((kGapsToCheck & 1u) != 0u) {
8705       DCHECK_LT(gap_start, gap_end);
8706       DCHECK_ALIGNED(gap_end, 2u);
8707       if ((gap_start & 1u) != 0u) {
8708         DCHECK_EQ(gap1_offset_, kNoOffset);
8709         gap1_offset_ = gap_start;
8710         gap_start += 1u;
8711         if (kGapsToCheck == 1u || gap_start == gap_end) {
8712           DCHECK_EQ(gap_start, gap_end);
8713           return;
8714         }
8715       }
8716     }
8717 
8718     if ((kGapsToCheck & 2u) != 0u) {
8719       DCHECK_LT(gap_start, gap_end);
8720       DCHECK_ALIGNED(gap_start, 2u);
8721       DCHECK_ALIGNED(gap_end, 4u);
8722       if ((gap_start & 2u) != 0u) {
8723         DCHECK_EQ(gap2_offset_, kNoOffset);
8724         gap2_offset_ = gap_start;
8725         gap_start += 2u;
8726         if (kGapsToCheck <= 3u || gap_start == gap_end) {
8727           DCHECK_EQ(gap_start, gap_end);
8728           return;
8729         }
8730       }
8731     }
8732 
8733     if ((kGapsToCheck & 4u) != 0u) {
8734       DCHECK_LT(gap_start, gap_end);
8735       DCHECK_ALIGNED(gap_start, 4u);
8736       DCHECK_ALIGNED(gap_end, 8u);
8737       DCHECK_EQ(gap_start + 4u, gap_end);
8738       DCHECK_EQ(gap4_offset_, kNoOffset);
8739       gap4_offset_ = gap_start;
8740       return;
8741     }
8742 
8743     DCHECK(false) << "Remaining gap: " << gap_start << " to " << gap_end
8744         << " after checking " << kGapsToCheck;
8745   }
8746 
8747   static constexpr uint32_t kNoOffset = static_cast<uint32_t>(-1);
8748 
8749   uint32_t gap4_offset_ = kNoOffset;
8750   uint32_t gap2_offset_ = kNoOffset;
8751   uint32_t gap1_offset_ = kNoOffset;
8752 };
8753 
8754 template <size_t kSize>
8755 ALWAYS_INLINE
AssignFieldOffset(ArtField * field,MemberOffset field_offset)8756 MemberOffset ClassLinker::LinkFieldsHelper::AssignFieldOffset(ArtField* field,
8757                                                               MemberOffset field_offset) {
8758   DCHECK_ALIGNED(field_offset.Uint32Value(), kSize);
8759   DCHECK_EQ(Primitive::ComponentSize(field->GetTypeAsPrimitiveType()), kSize);
8760   field->SetOffset(field_offset);
8761   return MemberOffset(field_offset.Uint32Value() + kSize);
8762 }
8763 
LinkFields(ClassLinker * class_linker,Thread * self,Handle<mirror::Class> klass,bool is_static,size_t * class_size)8764 bool ClassLinker::LinkFieldsHelper::LinkFields(ClassLinker* class_linker,
8765                                                Thread* self,
8766                                                Handle<mirror::Class> klass,
8767                                                bool is_static,
8768                                                size_t* class_size) {
8769   self->AllowThreadSuspension();
8770   const size_t num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields();
8771   LengthPrefixedArray<ArtField>* const fields = is_static ? klass->GetSFieldsPtr() :
8772       klass->GetIFieldsPtr();
8773 
8774   // Initialize field_offset
8775   MemberOffset field_offset(0);
8776   if (is_static) {
8777     field_offset = klass->GetFirstReferenceStaticFieldOffsetDuringLinking(
8778         class_linker->GetImagePointerSize());
8779   } else {
8780     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
8781     if (super_class != nullptr) {
8782       CHECK(super_class->IsResolved())
8783           << klass->PrettyClass() << " " << super_class->PrettyClass();
8784       field_offset = MemberOffset(super_class->GetObjectSize());
8785     }
8786   }
8787 
8788   CHECK_EQ(num_fields == 0, fields == nullptr) << klass->PrettyClass();
8789 
8790   // we want a relatively stable order so that adding new fields
8791   // minimizes disruption of C++ version such as Class and Method.
8792   //
8793   // The overall sort order order is:
8794   // 1) All object reference fields, sorted alphabetically.
8795   // 2) All java long (64-bit) integer fields, sorted alphabetically.
8796   // 3) All java double (64-bit) floating point fields, sorted alphabetically.
8797   // 4) All java int (32-bit) integer fields, sorted alphabetically.
8798   // 5) All java float (32-bit) floating point fields, sorted alphabetically.
8799   // 6) All java char (16-bit) integer fields, sorted alphabetically.
8800   // 7) All java short (16-bit) integer fields, sorted alphabetically.
8801   // 8) All java boolean (8-bit) integer fields, sorted alphabetically.
8802   // 9) All java byte (8-bit) integer fields, sorted alphabetically.
8803   //
8804   // (References are first to increase the chance of reference visiting
8805   // being able to take a fast path using a bitmap of references at the
8806   // start of the object, see `Class::reference_instance_offsets_`.)
8807   //
8808   // Once the fields are sorted in this order we will attempt to fill any gaps
8809   // that might be present in the memory layout of the structure.
8810   // Note that we shall not fill gaps between the superclass fields.
8811 
8812   // Collect fields and their "type order index" (see numbered points above).
8813   const char* old_no_suspend_cause = self->StartAssertNoThreadSuspension(
8814       "Using plain ArtField references");
8815   constexpr size_t kStackBufferEntries = 64;  // Avoid allocations for small number of fields.
8816   FieldTypeOrderAndIndex stack_buffer[kStackBufferEntries];
8817   std::vector<FieldTypeOrderAndIndex> heap_buffer;
8818   ArrayRef<FieldTypeOrderAndIndex> sorted_fields;
8819   if (num_fields <= kStackBufferEntries) {
8820     sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(stack_buffer, num_fields);
8821   } else {
8822     heap_buffer.resize(num_fields);
8823     sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(heap_buffer);
8824   }
8825   size_t num_reference_fields = 0;
8826   size_t primitive_fields_start = num_fields;
8827   DCHECK_LE(num_fields, 1u << 16);
8828   for (size_t i = 0; i != num_fields; ++i) {
8829     ArtField* field = &fields->At(i);
8830     const char* descriptor = field->GetTypeDescriptor();
8831     FieldTypeOrder field_type_order = FieldTypeOrderFromFirstDescriptorCharacter(descriptor[0]);
8832     uint16_t field_index = dchecked_integral_cast<uint16_t>(i);
8833     // Insert references to the start, other fields to the end.
8834     DCHECK_LT(num_reference_fields, primitive_fields_start);
8835     if (field_type_order == FieldTypeOrder::kReference) {
8836       sorted_fields[num_reference_fields] = { field_type_order, field_index };
8837       ++num_reference_fields;
8838     } else {
8839       --primitive_fields_start;
8840       sorted_fields[primitive_fields_start] = { field_type_order, field_index };
8841     }
8842   }
8843   DCHECK_EQ(num_reference_fields, primitive_fields_start);
8844 
8845   // Reference fields are already sorted by field index (and dex field index).
8846   DCHECK(std::is_sorted(
8847       sorted_fields.begin(),
8848       sorted_fields.begin() + num_reference_fields,
8849       [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8850         ArtField* lhs_field = &fields->At(lhs.field_index);
8851         ArtField* rhs_field = &fields->At(rhs.field_index);
8852         CHECK_EQ(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8853         CHECK_EQ(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8854         CHECK_EQ(lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex(),
8855                  lhs.field_index < rhs.field_index);
8856         return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
8857       }));
8858   // Primitive fields were stored in reverse order of their field index (and dex field index).
8859   DCHECK(std::is_sorted(
8860       sorted_fields.begin() + primitive_fields_start,
8861       sorted_fields.end(),
8862       [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8863         ArtField* lhs_field = &fields->At(lhs.field_index);
8864         ArtField* rhs_field = &fields->At(rhs.field_index);
8865         CHECK_NE(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8866         CHECK_NE(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8867         CHECK_EQ(lhs_field->GetDexFieldIndex() > rhs_field->GetDexFieldIndex(),
8868                  lhs.field_index > rhs.field_index);
8869         return lhs.field_index > rhs.field_index;
8870       }));
8871   // Sort the primitive fields by the field type order, then field index.
8872   std::sort(sorted_fields.begin() + primitive_fields_start,
8873             sorted_fields.end(),
8874             [](const auto& lhs, const auto& rhs) {
8875               if (lhs.field_type_order != rhs.field_type_order) {
8876                 return lhs.field_type_order < rhs.field_type_order;
8877               } else {
8878                 return lhs.field_index < rhs.field_index;
8879               }
8880             });
8881   // Primitive fields are now sorted by field size (descending), then type, then field index.
8882   DCHECK(std::is_sorted(
8883       sorted_fields.begin() + primitive_fields_start,
8884       sorted_fields.end(),
8885       [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8886         ArtField* lhs_field = &fields->At(lhs.field_index);
8887         ArtField* rhs_field = &fields->At(rhs.field_index);
8888         Primitive::Type lhs_type = lhs_field->GetTypeAsPrimitiveType();
8889         CHECK_NE(lhs_type, Primitive::kPrimNot);
8890         Primitive::Type rhs_type = rhs_field->GetTypeAsPrimitiveType();
8891         CHECK_NE(rhs_type, Primitive::kPrimNot);
8892         if (lhs_type != rhs_type) {
8893           size_t lhs_size = Primitive::ComponentSize(lhs_type);
8894           size_t rhs_size = Primitive::ComponentSize(rhs_type);
8895           return (lhs_size != rhs_size) ? (lhs_size > rhs_size) : (lhs_type < rhs_type);
8896         } else {
8897           return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
8898         }
8899       }));
8900 
8901   // Process reference fields.
8902   FieldGaps field_gaps;
8903   size_t index = 0u;
8904   if (num_reference_fields != 0u) {
8905     constexpr size_t kReferenceSize = sizeof(mirror::HeapReference<mirror::Object>);
8906     field_offset = field_gaps.AlignFieldOffset<kReferenceSize>(field_offset);
8907     for (; index != num_reference_fields; ++index) {
8908       ArtField* field = &fields->At(sorted_fields[index].field_index);
8909       field_offset = AssignFieldOffset<kReferenceSize>(field, field_offset);
8910     }
8911   }
8912   // Process 64-bit fields.
8913   if (index != num_fields &&
8914       sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
8915     field_offset = field_gaps.AlignFieldOffset<8u>(field_offset);
8916     while (index != num_fields &&
8917            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
8918       ArtField* field = &fields->At(sorted_fields[index].field_index);
8919       field_offset = AssignFieldOffset<8u>(field, field_offset);
8920       ++index;
8921     }
8922   }
8923   // Process 32-bit fields.
8924   if (index != num_fields &&
8925       sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
8926     field_offset = field_gaps.AlignFieldOffset<4u>(field_offset);
8927     if (field_gaps.HasGap<4u>()) {
8928       ArtField* field = &fields->At(sorted_fields[index].field_index);
8929       AssignFieldOffset<4u>(field, field_gaps.ReleaseGap<4u>());  // Ignore return value.
8930       ++index;
8931       DCHECK(!field_gaps.HasGap<4u>());  // There can be only one gap for a 32-bit field.
8932     }
8933     while (index != num_fields &&
8934            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
8935       ArtField* field = &fields->At(sorted_fields[index].field_index);
8936       field_offset = AssignFieldOffset<4u>(field, field_offset);
8937       ++index;
8938     }
8939   }
8940   // Process 16-bit fields.
8941   if (index != num_fields &&
8942       sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
8943     field_offset = field_gaps.AlignFieldOffset<2u>(field_offset);
8944     while (index != num_fields &&
8945            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType &&
8946            field_gaps.HasGap<2u>()) {
8947       ArtField* field = &fields->At(sorted_fields[index].field_index);
8948       AssignFieldOffset<2u>(field, field_gaps.ReleaseGap<2u>());  // Ignore return value.
8949       ++index;
8950     }
8951     while (index != num_fields &&
8952            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
8953       ArtField* field = &fields->At(sorted_fields[index].field_index);
8954       field_offset = AssignFieldOffset<2u>(field, field_offset);
8955       ++index;
8956     }
8957   }
8958   // Process 8-bit fields.
8959   for (; index != num_fields && field_gaps.HasGap<1u>(); ++index) {
8960     ArtField* field = &fields->At(sorted_fields[index].field_index);
8961     AssignFieldOffset<1u>(field, field_gaps.ReleaseGap<1u>());  // Ignore return value.
8962   }
8963   for (; index != num_fields; ++index) {
8964     ArtField* field = &fields->At(sorted_fields[index].field_index);
8965     field_offset = AssignFieldOffset<1u>(field, field_offset);
8966   }
8967 
8968   self->EndAssertNoThreadSuspension(old_no_suspend_cause);
8969 
8970   // We lie to the GC about the java.lang.ref.Reference.referent field, so it doesn't scan it.
8971   DCHECK_IMPLIES(class_linker->init_done_, !klass->DescriptorEquals("Ljava/lang/ref/Reference;"));
8972   if (!is_static &&
8973       UNLIKELY(!class_linker->init_done_) &&
8974       klass->DescriptorEquals("Ljava/lang/ref/Reference;")) {
8975     // We know there are no non-reference fields in the Reference classes, and we know
8976     // that 'referent' is alphabetically last, so this is easy...
8977     CHECK_EQ(num_reference_fields, num_fields) << klass->PrettyClass();
8978     CHECK_STREQ(fields->At(num_fields - 1).GetName(), "referent")
8979         << klass->PrettyClass();
8980     --num_reference_fields;
8981   }
8982 
8983   size_t size = field_offset.Uint32Value();
8984   // Update klass
8985   if (is_static) {
8986     klass->SetNumReferenceStaticFields(num_reference_fields);
8987     *class_size = size;
8988   } else {
8989     klass->SetNumReferenceInstanceFields(num_reference_fields);
8990     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
8991     if (num_reference_fields == 0 || super_class == nullptr) {
8992       // object has one reference field, klass, but we ignore it since we always visit the class.
8993       // super_class is null iff the class is java.lang.Object.
8994       if (super_class == nullptr ||
8995           (super_class->GetClassFlags() & mirror::kClassFlagNoReferenceFields) != 0) {
8996         klass->SetClassFlags(klass->GetClassFlags() | mirror::kClassFlagNoReferenceFields);
8997       }
8998     }
8999     if (kIsDebugBuild) {
9000       DCHECK_EQ(super_class == nullptr, klass->DescriptorEquals("Ljava/lang/Object;"));
9001       size_t total_reference_instance_fields = 0;
9002       ObjPtr<mirror::Class> cur_super = klass.Get();
9003       while (cur_super != nullptr) {
9004         total_reference_instance_fields += cur_super->NumReferenceInstanceFieldsDuringLinking();
9005         cur_super = cur_super->GetSuperClass();
9006       }
9007       if (super_class == nullptr) {
9008         CHECK_EQ(total_reference_instance_fields, 1u) << klass->PrettyDescriptor();
9009       } else {
9010         // Check that there is at least num_reference_fields other than Object.class.
9011         CHECK_GE(total_reference_instance_fields, 1u + num_reference_fields)
9012             << klass->PrettyClass();
9013       }
9014     }
9015     if (!klass->IsVariableSize()) {
9016       std::string temp;
9017       DCHECK_GE(size, sizeof(mirror::Object)) << klass->GetDescriptor(&temp);
9018       size_t previous_size = klass->GetObjectSize();
9019       if (previous_size != 0) {
9020         // Make sure that we didn't originally have an incorrect size.
9021         CHECK_EQ(previous_size, size) << klass->GetDescriptor(&temp);
9022       }
9023       klass->SetObjectSize(size);
9024     }
9025   }
9026 
9027   if (kIsDebugBuild) {
9028     // Make sure that the fields array is ordered by name but all reference
9029     // offsets are at the beginning as far as alignment allows.
9030     MemberOffset start_ref_offset = is_static
9031         ? klass->GetFirstReferenceStaticFieldOffsetDuringLinking(class_linker->image_pointer_size_)
9032         : klass->GetFirstReferenceInstanceFieldOffset();
9033     MemberOffset end_ref_offset(start_ref_offset.Uint32Value() +
9034                                 num_reference_fields *
9035                                     sizeof(mirror::HeapReference<mirror::Object>));
9036     MemberOffset current_ref_offset = start_ref_offset;
9037     for (size_t i = 0; i < num_fields; i++) {
9038       ArtField* field = &fields->At(i);
9039       VLOG(class_linker) << "LinkFields: " << (is_static ? "static" : "instance")
9040           << " class=" << klass->PrettyClass() << " field=" << field->PrettyField()
9041           << " offset=" << field->GetOffsetDuringLinking();
9042       if (i != 0) {
9043         ArtField* const prev_field = &fields->At(i - 1);
9044         // NOTE: The field names can be the same. This is not possible in the Java language
9045         // but it's valid Java/dex bytecode and for example proguard can generate such bytecode.
9046         DCHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0);
9047       }
9048       Primitive::Type type = field->GetTypeAsPrimitiveType();
9049       bool is_primitive = type != Primitive::kPrimNot;
9050       if (klass->DescriptorEquals("Ljava/lang/ref/Reference;") &&
9051           strcmp("referent", field->GetName()) == 0) {
9052         is_primitive = true;  // We lied above, so we have to expect a lie here.
9053       }
9054       MemberOffset offset = field->GetOffsetDuringLinking();
9055       if (is_primitive) {
9056         if (offset.Uint32Value() < end_ref_offset.Uint32Value()) {
9057           // Shuffled before references.
9058           size_t type_size = Primitive::ComponentSize(type);
9059           CHECK_LT(type_size, sizeof(mirror::HeapReference<mirror::Object>));
9060           CHECK_LT(offset.Uint32Value(), start_ref_offset.Uint32Value());
9061           CHECK_LE(offset.Uint32Value() + type_size, start_ref_offset.Uint32Value());
9062           CHECK(!IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(offset.Uint32Value()));
9063         }
9064       } else {
9065         CHECK_EQ(current_ref_offset.Uint32Value(), offset.Uint32Value());
9066         current_ref_offset = MemberOffset(current_ref_offset.Uint32Value() +
9067                                           sizeof(mirror::HeapReference<mirror::Object>));
9068       }
9069     }
9070     CHECK_EQ(current_ref_offset.Uint32Value(), end_ref_offset.Uint32Value());
9071   }
9072   return true;
9073 }
9074 
LinkInstanceFields(Thread * self,Handle<mirror::Class> klass)9075 bool ClassLinker::LinkInstanceFields(Thread* self, Handle<mirror::Class> klass) {
9076   CHECK(klass != nullptr);
9077   return LinkFieldsHelper::LinkFields(this, self, klass, false, nullptr);
9078 }
9079 
LinkStaticFields(Thread * self,Handle<mirror::Class> klass,size_t * class_size)9080 bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, size_t* class_size) {
9081   CHECK(klass != nullptr);
9082   return LinkFieldsHelper::LinkFields(this, self, klass, true, class_size);
9083 }
9084 
9085 //  Set the bitmap of reference instance field offsets.
CreateReferenceInstanceOffsets(Handle<mirror::Class> klass)9086 void ClassLinker::CreateReferenceInstanceOffsets(Handle<mirror::Class> klass) {
9087   uint32_t reference_offsets = 0;
9088   ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
9089   // Leave the reference offsets as 0 for mirror::Object (the class field is handled specially).
9090   if (super_class != nullptr) {
9091     reference_offsets = super_class->GetReferenceInstanceOffsets();
9092     // Compute reference offsets unless our superclass overflowed.
9093     if (reference_offsets != mirror::Class::kClassWalkSuper) {
9094       size_t num_reference_fields = klass->NumReferenceInstanceFieldsDuringLinking();
9095       if (num_reference_fields != 0u) {
9096         // All of the fields that contain object references are guaranteed be grouped in memory
9097         // starting at an appropriately aligned address after super class object data.
9098         uint32_t start_offset = RoundUp(super_class->GetObjectSize(),
9099                                         sizeof(mirror::HeapReference<mirror::Object>));
9100         uint32_t start_bit = (start_offset - mirror::kObjectHeaderSize) /
9101             sizeof(mirror::HeapReference<mirror::Object>);
9102         if (start_bit + num_reference_fields > 32) {
9103           reference_offsets = mirror::Class::kClassWalkSuper;
9104         } else {
9105           reference_offsets |= (0xffffffffu << start_bit) &
9106                                (0xffffffffu >> (32 - (start_bit + num_reference_fields)));
9107         }
9108       }
9109     }
9110   }
9111   klass->SetReferenceInstanceOffsets(reference_offsets);
9112 }
9113 
DoResolveString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)9114 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
9115                                                     ObjPtr<mirror::DexCache> dex_cache) {
9116   StackHandleScope<1> hs(Thread::Current());
9117   Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
9118   return DoResolveString(string_idx, h_dex_cache);
9119 }
9120 
DoResolveString(dex::StringIndex string_idx,Handle<mirror::DexCache> dex_cache)9121 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
9122                                                     Handle<mirror::DexCache> dex_cache) {
9123   const DexFile& dex_file = *dex_cache->GetDexFile();
9124   uint32_t utf16_length;
9125   const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
9126   ObjPtr<mirror::String> string = intern_table_->InternStrong(utf16_length, utf8_data);
9127   if (string != nullptr) {
9128     dex_cache->SetResolvedString(string_idx, string);
9129   }
9130   return string;
9131 }
9132 
DoLookupString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)9133 ObjPtr<mirror::String> ClassLinker::DoLookupString(dex::StringIndex string_idx,
9134                                                    ObjPtr<mirror::DexCache> dex_cache) {
9135   DCHECK(dex_cache != nullptr);
9136   const DexFile& dex_file = *dex_cache->GetDexFile();
9137   uint32_t utf16_length;
9138   const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
9139   ObjPtr<mirror::String> string =
9140       intern_table_->LookupStrong(Thread::Current(), utf16_length, utf8_data);
9141   if (string != nullptr) {
9142     dex_cache->SetResolvedString(string_idx, string);
9143   }
9144   return string;
9145 }
9146 
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::Class> referrer)9147 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
9148                                                         ObjPtr<mirror::Class> referrer) {
9149   return DoLookupResolvedType(type_idx, referrer->GetDexCache(), referrer->GetClassLoader());
9150 }
9151 
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)9152 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
9153                                                         ObjPtr<mirror::DexCache> dex_cache,
9154                                                         ObjPtr<mirror::ClassLoader> class_loader) {
9155   DCHECK(dex_cache->GetClassLoader() == class_loader);
9156   const DexFile& dex_file = *dex_cache->GetDexFile();
9157   const char* descriptor = dex_file.StringByTypeIdx(type_idx);
9158   ObjPtr<mirror::Class> type = LookupResolvedType(descriptor, class_loader);
9159   if (type != nullptr) {
9160     DCHECK(type->IsResolved());
9161     dex_cache->SetResolvedType(type_idx, type);
9162   }
9163   return type;
9164 }
9165 
LookupResolvedType(const char * descriptor,ObjPtr<mirror::ClassLoader> class_loader)9166 ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(const char* descriptor,
9167                                                       ObjPtr<mirror::ClassLoader> class_loader) {
9168   DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
9169   ObjPtr<mirror::Class> type = nullptr;
9170   if (descriptor[1] == '\0') {
9171     // only the descriptors of primitive types should be 1 character long, also avoid class lookup
9172     // for primitive classes that aren't backed by dex files.
9173     type = LookupPrimitiveClass(descriptor[0]);
9174   } else {
9175     Thread* const self = Thread::Current();
9176     DCHECK(self != nullptr);
9177     const size_t hash = ComputeModifiedUtf8Hash(descriptor);
9178     // Find the class in the loaded classes table.
9179     type = LookupClass(self, descriptor, hash, class_loader);
9180   }
9181   return (type != nullptr && type->IsResolved()) ? type : nullptr;
9182 }
9183 
9184 template <typename RefType>
DoResolveType(dex::TypeIndex type_idx,RefType referrer)9185 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx, RefType referrer) {
9186   StackHandleScope<2> hs(Thread::Current());
9187   Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
9188   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
9189   return DoResolveType(type_idx, dex_cache, class_loader);
9190 }
9191 
9192 // Instantiate the above.
9193 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9194                                                           ArtField* referrer);
9195 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9196                                                           ArtMethod* referrer);
9197 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9198                                                           ObjPtr<mirror::Class> referrer);
9199 
DoResolveType(dex::TypeIndex type_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)9200 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9201                                                  Handle<mirror::DexCache> dex_cache,
9202                                                  Handle<mirror::ClassLoader> class_loader) {
9203   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
9204   Thread* self = Thread::Current();
9205   const char* descriptor = dex_cache->GetDexFile()->StringByTypeIdx(type_idx);
9206   ObjPtr<mirror::Class> resolved = FindClass(self, descriptor, class_loader);
9207   if (resolved != nullptr) {
9208     // TODO: we used to throw here if resolved's class loader was not the
9209     //       boot class loader. This was to permit different classes with the
9210     //       same name to be loaded simultaneously by different loaders
9211     dex_cache->SetResolvedType(type_idx, resolved);
9212   } else {
9213     CHECK(self->IsExceptionPending())
9214         << "Expected pending exception for failed resolution of: " << descriptor;
9215     // Convert a ClassNotFoundException to a NoClassDefFoundError.
9216     StackHandleScope<1> hs(self);
9217     Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
9218     if (cause->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException, this))) {
9219       DCHECK(resolved == nullptr);  // No Handle needed to preserve resolved.
9220       self->ClearException();
9221       ThrowNoClassDefFoundError("Failed resolution of: %s", descriptor);
9222       self->GetException()->SetCause(cause.Get());
9223     }
9224   }
9225   DCHECK((resolved == nullptr) || resolved->IsResolved())
9226       << resolved->PrettyDescriptor() << " " << resolved->GetStatus();
9227   return resolved;
9228 }
9229 
FindResolvedMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)9230 ArtMethod* ClassLinker::FindResolvedMethod(ObjPtr<mirror::Class> klass,
9231                                            ObjPtr<mirror::DexCache> dex_cache,
9232                                            ObjPtr<mirror::ClassLoader> class_loader,
9233                                            uint32_t method_idx) {
9234   DCHECK(dex_cache->GetClassLoader() == class_loader);
9235   // Search for the method using dex_cache and method_idx. The Class::Find*Method()
9236   // functions can optimize the search if the dex_cache is the same as the DexCache
9237   // of the class, with fall-back to name and signature search otherwise.
9238   ArtMethod* resolved = nullptr;
9239   if (klass->IsInterface()) {
9240     resolved = klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
9241   } else {
9242     resolved = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
9243   }
9244   DCHECK(resolved == nullptr || resolved->GetDeclaringClassUnchecked() != nullptr);
9245   if (resolved != nullptr &&
9246       // We pass AccessMethod::kNone instead of kLinking to not warn yet on the
9247       // access, as we'll be looking if the method can be accessed through an
9248       // interface.
9249       hiddenapi::ShouldDenyAccessToMember(resolved,
9250                                           hiddenapi::AccessContext(class_loader, dex_cache),
9251                                           hiddenapi::AccessMethod::kNone)) {
9252     // The resolved method that we have found cannot be accessed due to
9253     // hiddenapi (typically it is declared up the hierarchy and is not an SDK
9254     // method). Try to find an interface method from the implemented interfaces which is
9255     // part of the SDK.
9256     ArtMethod* itf_method = klass->FindAccessibleInterfaceMethod(resolved, image_pointer_size_);
9257     if (itf_method == nullptr) {
9258       // No interface method. Call ShouldDenyAccessToMember again but this time
9259       // with AccessMethod::kLinking to ensure that an appropriate warning is
9260       // logged.
9261       hiddenapi::ShouldDenyAccessToMember(resolved,
9262                                           hiddenapi::AccessContext(class_loader, dex_cache),
9263                                           hiddenapi::AccessMethod::kLinking);
9264       resolved = nullptr;
9265     } else {
9266       // We found an interface method that is accessible, continue with the resolved method.
9267     }
9268   }
9269   if (resolved != nullptr) {
9270     // In case of jmvti, the dex file gets verified before being registered, so first
9271     // check if it's registered before checking class tables.
9272     const DexFile& dex_file = *dex_cache->GetDexFile();
9273     DCHECK_IMPLIES(
9274         IsDexFileRegistered(Thread::Current(), dex_file),
9275         FindClassTable(Thread::Current(), dex_cache) == ClassTableForClassLoader(class_loader))
9276         << "DexFile referrer: " << dex_file.GetLocation()
9277         << " ClassLoader: " << DescribeLoaders(class_loader, "");
9278     // Be a good citizen and update the dex cache to speed subsequent calls.
9279     dex_cache->SetResolvedMethod(method_idx, resolved);
9280     // Disable the following invariant check as the verifier breaks it. b/73760543
9281     // const DexFile::MethodId& method_id = dex_file.GetMethodId(method_idx);
9282     // DCHECK(LookupResolvedType(method_id.class_idx_, dex_cache, class_loader) != nullptr)
9283     //    << "Method: " << resolved->PrettyMethod() << ", "
9284     //    << "Class: " << klass->PrettyClass() << " (" << klass->GetStatus() << "), "
9285     //    << "DexFile referrer: " << dex_file.GetLocation();
9286   }
9287   return resolved;
9288 }
9289 
9290 // Returns true if `method` is either null or hidden.
9291 // Does not print any warnings if it is hidden.
CheckNoSuchMethod(ArtMethod * method,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)9292 static bool CheckNoSuchMethod(ArtMethod* method,
9293                               ObjPtr<mirror::DexCache> dex_cache,
9294                               ObjPtr<mirror::ClassLoader> class_loader)
9295       REQUIRES_SHARED(Locks::mutator_lock_) {
9296   DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
9297   return method == nullptr ||
9298          hiddenapi::ShouldDenyAccessToMember(method,
9299                                              hiddenapi::AccessContext(class_loader, dex_cache),
9300                                              hiddenapi::AccessMethod::kNone);  // no warnings
9301 }
9302 
FindIncompatibleMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)9303 ArtMethod* ClassLinker::FindIncompatibleMethod(ObjPtr<mirror::Class> klass,
9304                                                ObjPtr<mirror::DexCache> dex_cache,
9305                                                ObjPtr<mirror::ClassLoader> class_loader,
9306                                                uint32_t method_idx) {
9307   DCHECK(dex_cache->GetClassLoader() == class_loader);
9308   if (klass->IsInterface()) {
9309     ArtMethod* method = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
9310     return CheckNoSuchMethod(method, dex_cache, class_loader) ? nullptr : method;
9311   } else {
9312     // If there was an interface method with the same signature, we would have
9313     // found it in the "copied" methods. Only DCHECK that the interface method
9314     // really does not exist.
9315     if (kIsDebugBuild) {
9316       ArtMethod* method =
9317           klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
9318       CHECK(CheckNoSuchMethod(method, dex_cache, class_loader) ||
9319             (klass->FindAccessibleInterfaceMethod(method, image_pointer_size_) == nullptr));
9320     }
9321     return nullptr;
9322   }
9323 }
9324 
ResolveMethodWithoutInvokeType(uint32_t method_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)9325 ArtMethod* ClassLinker::ResolveMethodWithoutInvokeType(uint32_t method_idx,
9326                                                        Handle<mirror::DexCache> dex_cache,
9327                                                        Handle<mirror::ClassLoader> class_loader) {
9328   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
9329   ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx);
9330   Thread::PoisonObjectPointersIfDebug();
9331   if (resolved != nullptr) {
9332     DCHECK(!resolved->IsRuntimeMethod());
9333     DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
9334     return resolved;
9335   }
9336   // Fail, get the declaring class.
9337   const dex::MethodId& method_id = dex_cache->GetDexFile()->GetMethodId(method_idx);
9338   ObjPtr<mirror::Class> klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
9339   if (klass == nullptr) {
9340     Thread::Current()->AssertPendingException();
9341     return nullptr;
9342   }
9343   if (klass->IsInterface()) {
9344     resolved = klass->FindInterfaceMethod(dex_cache.Get(), method_idx, image_pointer_size_);
9345   } else {
9346     resolved = klass->FindClassMethod(dex_cache.Get(), method_idx, image_pointer_size_);
9347   }
9348   if (resolved != nullptr &&
9349       hiddenapi::ShouldDenyAccessToMember(
9350           resolved,
9351           hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
9352           hiddenapi::AccessMethod::kLinking)) {
9353     resolved = nullptr;
9354   }
9355   return resolved;
9356 }
9357 
LookupResolvedField(uint32_t field_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,bool is_static)9358 ArtField* ClassLinker::LookupResolvedField(uint32_t field_idx,
9359                                            ObjPtr<mirror::DexCache> dex_cache,
9360                                            ObjPtr<mirror::ClassLoader> class_loader,
9361                                            bool is_static) {
9362   DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
9363   const DexFile& dex_file = *dex_cache->GetDexFile();
9364   const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
9365   ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(field_id.class_idx_);
9366   if (klass == nullptr) {
9367     klass = LookupResolvedType(field_id.class_idx_, dex_cache, class_loader);
9368   }
9369   if (klass == nullptr) {
9370     // The class has not been resolved yet, so the field is also unresolved.
9371     return nullptr;
9372   }
9373   DCHECK(klass->IsResolved());
9374 
9375   return FindResolvedField(klass, dex_cache, class_loader, field_idx, is_static);
9376 }
9377 
ResolveFieldJLS(uint32_t field_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)9378 ArtField* ClassLinker::ResolveFieldJLS(uint32_t field_idx,
9379                                        Handle<mirror::DexCache> dex_cache,
9380                                        Handle<mirror::ClassLoader> class_loader) {
9381   DCHECK(dex_cache != nullptr);
9382   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
9383   ArtField* resolved = dex_cache->GetResolvedField(field_idx);
9384   Thread::PoisonObjectPointersIfDebug();
9385   if (resolved != nullptr) {
9386     return resolved;
9387   }
9388   const DexFile& dex_file = *dex_cache->GetDexFile();
9389   const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
9390   ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
9391   if (klass == nullptr) {
9392     DCHECK(Thread::Current()->IsExceptionPending());
9393     return nullptr;
9394   }
9395 
9396   resolved = FindResolvedFieldJLS(klass, dex_cache.Get(), class_loader.Get(), field_idx);
9397   if (resolved == nullptr) {
9398     const char* name = dex_file.GetFieldName(field_id);
9399     const char* type = dex_file.GetFieldTypeDescriptor(field_id);
9400     ThrowNoSuchFieldError("", klass, type, name);
9401   }
9402   return resolved;
9403 }
9404 
FindResolvedField(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx,bool is_static)9405 ArtField* ClassLinker::FindResolvedField(ObjPtr<mirror::Class> klass,
9406                                          ObjPtr<mirror::DexCache> dex_cache,
9407                                          ObjPtr<mirror::ClassLoader> class_loader,
9408                                          uint32_t field_idx,
9409                                          bool is_static) {
9410   DCHECK(dex_cache->GetClassLoader() == class_loader);
9411   ArtField* resolved = is_static ? klass->FindStaticField(dex_cache, field_idx)
9412                                  : klass->FindInstanceField(dex_cache, field_idx);
9413   if (resolved != nullptr &&
9414       hiddenapi::ShouldDenyAccessToMember(resolved,
9415                                           hiddenapi::AccessContext(class_loader, dex_cache),
9416                                           hiddenapi::AccessMethod::kLinking)) {
9417     resolved = nullptr;
9418   }
9419 
9420   if (resolved != nullptr) {
9421     dex_cache->SetResolvedField(field_idx, resolved);
9422   }
9423 
9424   return resolved;
9425 }
9426 
FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx)9427 ArtField* ClassLinker::FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,
9428                                             ObjPtr<mirror::DexCache> dex_cache,
9429                                             ObjPtr<mirror::ClassLoader> class_loader,
9430                                             uint32_t field_idx) {
9431   DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
9432   ArtField* resolved = klass->FindField(dex_cache, field_idx);
9433 
9434   if (resolved != nullptr &&
9435       hiddenapi::ShouldDenyAccessToMember(resolved,
9436                                           hiddenapi::AccessContext(class_loader, dex_cache),
9437                                           hiddenapi::AccessMethod::kLinking)) {
9438     resolved = nullptr;
9439   }
9440 
9441   if (resolved != nullptr) {
9442     dex_cache->SetResolvedField(field_idx, resolved);
9443   }
9444 
9445   return resolved;
9446 }
9447 
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)9448 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(
9449     Thread* self,
9450     dex::ProtoIndex proto_idx,
9451     Handle<mirror::DexCache> dex_cache,
9452     Handle<mirror::ClassLoader> class_loader) {
9453   DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
9454   DCHECK(dex_cache != nullptr);
9455   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
9456 
9457   ObjPtr<mirror::MethodType> resolved = dex_cache->GetResolvedMethodType(proto_idx);
9458   if (resolved != nullptr) {
9459     return resolved;
9460   }
9461 
9462   StackHandleScope<4> hs(self);
9463 
9464   // First resolve the return type.
9465   const DexFile& dex_file = *dex_cache->GetDexFile();
9466   const dex::ProtoId& proto_id = dex_file.GetProtoId(proto_idx);
9467   Handle<mirror::Class> return_type(hs.NewHandle(
9468       ResolveType(proto_id.return_type_idx_, dex_cache, class_loader)));
9469   if (return_type == nullptr) {
9470     DCHECK(self->IsExceptionPending());
9471     return nullptr;
9472   }
9473 
9474   // Then resolve the argument types.
9475   //
9476   // TODO: Is there a better way to figure out the number of method arguments
9477   // other than by looking at the shorty ?
9478   const size_t num_method_args = strlen(dex_file.StringDataByIdx(proto_id.shorty_idx_)) - 1;
9479 
9480   ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
9481   Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9482       mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_method_args)));
9483   if (method_params == nullptr) {
9484     DCHECK(self->IsExceptionPending());
9485     return nullptr;
9486   }
9487 
9488   DexFileParameterIterator it(dex_file, proto_id);
9489   int32_t i = 0;
9490   MutableHandle<mirror::Class> param_class = hs.NewHandle<mirror::Class>(nullptr);
9491   for (; it.HasNext(); it.Next()) {
9492     const dex::TypeIndex type_idx = it.GetTypeIdx();
9493     param_class.Assign(ResolveType(type_idx, dex_cache, class_loader));
9494     if (param_class == nullptr) {
9495       DCHECK(self->IsExceptionPending());
9496       return nullptr;
9497     }
9498 
9499     method_params->Set(i++, param_class.Get());
9500   }
9501 
9502   DCHECK(!it.HasNext());
9503 
9504   Handle<mirror::MethodType> type = hs.NewHandle(
9505       mirror::MethodType::Create(self, return_type, method_params));
9506   if (type != nullptr) {
9507     dex_cache->SetResolvedMethodType(proto_idx, type.Get());
9508   }
9509 
9510   return type.Get();
9511 }
9512 
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,ArtMethod * referrer)9513 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(Thread* self,
9514                                                           dex::ProtoIndex proto_idx,
9515                                                           ArtMethod* referrer) {
9516   StackHandleScope<2> hs(self);
9517   Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
9518   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
9519   return ResolveMethodType(self, proto_idx, dex_cache, class_loader);
9520 }
9521 
ResolveMethodHandleForField(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)9522 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForField(
9523     Thread* self,
9524     const dex::MethodHandleItem& method_handle,
9525     ArtMethod* referrer) {
9526   DexFile::MethodHandleType handle_type =
9527       static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
9528   mirror::MethodHandle::Kind kind;
9529   bool is_put;
9530   bool is_static;
9531   int32_t num_params;
9532   switch (handle_type) {
9533     case DexFile::MethodHandleType::kStaticPut: {
9534       kind = mirror::MethodHandle::Kind::kStaticPut;
9535       is_put = true;
9536       is_static = true;
9537       num_params = 1;
9538       break;
9539     }
9540     case DexFile::MethodHandleType::kStaticGet: {
9541       kind = mirror::MethodHandle::Kind::kStaticGet;
9542       is_put = false;
9543       is_static = true;
9544       num_params = 0;
9545       break;
9546     }
9547     case DexFile::MethodHandleType::kInstancePut: {
9548       kind = mirror::MethodHandle::Kind::kInstancePut;
9549       is_put = true;
9550       is_static = false;
9551       num_params = 2;
9552       break;
9553     }
9554     case DexFile::MethodHandleType::kInstanceGet: {
9555       kind = mirror::MethodHandle::Kind::kInstanceGet;
9556       is_put = false;
9557       is_static = false;
9558       num_params = 1;
9559       break;
9560     }
9561     case DexFile::MethodHandleType::kInvokeStatic:
9562     case DexFile::MethodHandleType::kInvokeInstance:
9563     case DexFile::MethodHandleType::kInvokeConstructor:
9564     case DexFile::MethodHandleType::kInvokeDirect:
9565     case DexFile::MethodHandleType::kInvokeInterface:
9566       UNREACHABLE();
9567   }
9568 
9569   ArtField* target_field =
9570       ResolveField(method_handle.field_or_method_idx_, referrer, is_static);
9571   if (LIKELY(target_field != nullptr)) {
9572     ObjPtr<mirror::Class> target_class = target_field->GetDeclaringClass();
9573     ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9574     if (UNLIKELY(!referring_class->CanAccessMember(target_class, target_field->GetAccessFlags()))) {
9575       ThrowIllegalAccessErrorField(referring_class, target_field);
9576       return nullptr;
9577     }
9578     if (UNLIKELY(is_put && target_field->IsFinal())) {
9579       ThrowIllegalAccessErrorField(referring_class, target_field);
9580       return nullptr;
9581     }
9582   } else {
9583     DCHECK(Thread::Current()->IsExceptionPending());
9584     return nullptr;
9585   }
9586 
9587   StackHandleScope<4> hs(self);
9588   ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
9589   Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9590       mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
9591   if (UNLIKELY(method_params == nullptr)) {
9592     DCHECK(self->IsExceptionPending());
9593     return nullptr;
9594   }
9595 
9596   Handle<mirror::Class> constructor_class;
9597   Handle<mirror::Class> return_type;
9598   switch (handle_type) {
9599     case DexFile::MethodHandleType::kStaticPut: {
9600       method_params->Set(0, target_field->ResolveType());
9601       return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
9602       break;
9603     }
9604     case DexFile::MethodHandleType::kStaticGet: {
9605       return_type = hs.NewHandle(target_field->ResolveType());
9606       break;
9607     }
9608     case DexFile::MethodHandleType::kInstancePut: {
9609       method_params->Set(0, target_field->GetDeclaringClass());
9610       method_params->Set(1, target_field->ResolveType());
9611       return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
9612       break;
9613     }
9614     case DexFile::MethodHandleType::kInstanceGet: {
9615       method_params->Set(0, target_field->GetDeclaringClass());
9616       return_type = hs.NewHandle(target_field->ResolveType());
9617       break;
9618     }
9619     case DexFile::MethodHandleType::kInvokeStatic:
9620     case DexFile::MethodHandleType::kInvokeInstance:
9621     case DexFile::MethodHandleType::kInvokeConstructor:
9622     case DexFile::MethodHandleType::kInvokeDirect:
9623     case DexFile::MethodHandleType::kInvokeInterface:
9624       UNREACHABLE();
9625   }
9626 
9627   for (int32_t i = 0; i < num_params; ++i) {
9628     if (UNLIKELY(method_params->Get(i) == nullptr)) {
9629       DCHECK(self->IsExceptionPending());
9630       return nullptr;
9631     }
9632   }
9633 
9634   if (UNLIKELY(return_type.IsNull())) {
9635     DCHECK(self->IsExceptionPending());
9636     return nullptr;
9637   }
9638 
9639   Handle<mirror::MethodType>
9640       method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9641   if (UNLIKELY(method_type.IsNull())) {
9642     DCHECK(self->IsExceptionPending());
9643     return nullptr;
9644   }
9645 
9646   uintptr_t target = reinterpret_cast<uintptr_t>(target_field);
9647   return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9648 }
9649 
ResolveMethodHandleForMethod(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)9650 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForMethod(
9651     Thread* self,
9652     const dex::MethodHandleItem& method_handle,
9653     ArtMethod* referrer) {
9654   DexFile::MethodHandleType handle_type =
9655       static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
9656   mirror::MethodHandle::Kind kind;
9657   uint32_t receiver_count = 0;
9658   ArtMethod* target_method = nullptr;
9659   switch (handle_type) {
9660     case DexFile::MethodHandleType::kStaticPut:
9661     case DexFile::MethodHandleType::kStaticGet:
9662     case DexFile::MethodHandleType::kInstancePut:
9663     case DexFile::MethodHandleType::kInstanceGet:
9664       UNREACHABLE();
9665     case DexFile::MethodHandleType::kInvokeStatic: {
9666       kind = mirror::MethodHandle::Kind::kInvokeStatic;
9667       receiver_count = 0;
9668       target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9669                                                             method_handle.field_or_method_idx_,
9670                                                             referrer,
9671                                                             InvokeType::kStatic);
9672       break;
9673     }
9674     case DexFile::MethodHandleType::kInvokeInstance: {
9675       kind = mirror::MethodHandle::Kind::kInvokeVirtual;
9676       receiver_count = 1;
9677       target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9678                                                             method_handle.field_or_method_idx_,
9679                                                             referrer,
9680                                                             InvokeType::kVirtual);
9681       break;
9682     }
9683     case DexFile::MethodHandleType::kInvokeConstructor: {
9684       // Constructors are currently implemented as a transform. They
9685       // are special cased later in this method.
9686       kind = mirror::MethodHandle::Kind::kInvokeTransform;
9687       receiver_count = 0;
9688       target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9689                                                             method_handle.field_or_method_idx_,
9690                                                             referrer,
9691                                                             InvokeType::kDirect);
9692       break;
9693     }
9694     case DexFile::MethodHandleType::kInvokeDirect: {
9695       kind = mirror::MethodHandle::Kind::kInvokeDirect;
9696       receiver_count = 1;
9697       StackHandleScope<2> hs(self);
9698       // A constant method handle with type kInvokeDirect can refer to
9699       // a method that is private or to a method in a super class. To
9700       // disambiguate the two options, we resolve the method ignoring
9701       // the invocation type to determine if the method is private. We
9702       // then resolve again specifying the intended invocation type to
9703       // force the appropriate checks.
9704       target_method = ResolveMethodWithoutInvokeType(method_handle.field_or_method_idx_,
9705                                                      hs.NewHandle(referrer->GetDexCache()),
9706                                                      hs.NewHandle(referrer->GetClassLoader()));
9707       if (UNLIKELY(target_method == nullptr)) {
9708         break;
9709       }
9710 
9711       if (target_method->IsPrivate()) {
9712         kind = mirror::MethodHandle::Kind::kInvokeDirect;
9713         target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9714                                                               method_handle.field_or_method_idx_,
9715                                                               referrer,
9716                                                               InvokeType::kDirect);
9717       } else {
9718         kind = mirror::MethodHandle::Kind::kInvokeSuper;
9719         target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9720                                                               method_handle.field_or_method_idx_,
9721                                                               referrer,
9722                                                               InvokeType::kSuper);
9723         if (UNLIKELY(target_method == nullptr)) {
9724           break;
9725         }
9726         // Find the method specified in the parent in referring class
9727         // so invoke-super invokes the method in the parent of the
9728         // referrer.
9729         target_method =
9730             referrer->GetDeclaringClass()->FindVirtualMethodForVirtual(target_method,
9731                                                                        kRuntimePointerSize);
9732       }
9733       break;
9734     }
9735     case DexFile::MethodHandleType::kInvokeInterface: {
9736       kind = mirror::MethodHandle::Kind::kInvokeInterface;
9737       receiver_count = 1;
9738       target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9739                                                             method_handle.field_or_method_idx_,
9740                                                             referrer,
9741                                                             InvokeType::kInterface);
9742       break;
9743     }
9744   }
9745 
9746   if (UNLIKELY(target_method == nullptr)) {
9747     DCHECK(Thread::Current()->IsExceptionPending());
9748     return nullptr;
9749   }
9750 
9751   ObjPtr<mirror::Class> target_class = target_method->GetDeclaringClass();
9752   ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9753   uint32_t access_flags = target_method->GetAccessFlags();
9754   if (UNLIKELY(!referring_class->CanAccessMember(target_class, access_flags))) {
9755     ThrowIllegalAccessErrorMethod(referring_class, target_method);
9756     return nullptr;
9757   }
9758 
9759   // Calculate the number of parameters from the method shorty. We add the
9760   // receiver count (0 or 1) and deduct one for the return value.
9761   uint32_t shorty_length;
9762   target_method->GetShorty(&shorty_length);
9763   int32_t num_params = static_cast<int32_t>(shorty_length + receiver_count - 1);
9764 
9765   StackHandleScope<5> hs(self);
9766   ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
9767   Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9768       mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
9769   if (method_params.Get() == nullptr) {
9770     DCHECK(self->IsExceptionPending());
9771     return nullptr;
9772   }
9773 
9774   const DexFile* dex_file = referrer->GetDexFile();
9775   const dex::MethodId& method_id = dex_file->GetMethodId(method_handle.field_or_method_idx_);
9776   int32_t index = 0;
9777   if (receiver_count != 0) {
9778     // Insert receiver. Use the class identified in the method handle rather than the declaring
9779     // class of the resolved method which may be super class or default interface method
9780     // (b/115964401).
9781     ObjPtr<mirror::Class> receiver_class = LookupResolvedType(method_id.class_idx_, referrer);
9782     // receiver_class should have been resolved when resolving the target method.
9783     DCHECK(receiver_class != nullptr);
9784     method_params->Set(index++, receiver_class);
9785   }
9786 
9787   const dex::ProtoId& proto_id = dex_file->GetProtoId(method_id.proto_idx_);
9788   DexFileParameterIterator it(*dex_file, proto_id);
9789   while (it.HasNext()) {
9790     DCHECK_LT(index, num_params);
9791     const dex::TypeIndex type_idx = it.GetTypeIdx();
9792     ObjPtr<mirror::Class> klass = ResolveType(type_idx, referrer);
9793     if (nullptr == klass) {
9794       DCHECK(self->IsExceptionPending());
9795       return nullptr;
9796     }
9797     method_params->Set(index++, klass);
9798     it.Next();
9799   }
9800 
9801   Handle<mirror::Class> return_type =
9802       hs.NewHandle(ResolveType(proto_id.return_type_idx_, referrer));
9803   if (UNLIKELY(return_type.IsNull())) {
9804     DCHECK(self->IsExceptionPending());
9805     return nullptr;
9806   }
9807 
9808   Handle<mirror::MethodType>
9809       method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9810   if (UNLIKELY(method_type.IsNull())) {
9811     DCHECK(self->IsExceptionPending());
9812     return nullptr;
9813   }
9814 
9815   if (UNLIKELY(handle_type == DexFile::MethodHandleType::kInvokeConstructor)) {
9816     Handle<mirror::Class> constructor_class = hs.NewHandle(target_method->GetDeclaringClass());
9817     Handle<mirror::MethodHandlesLookup> lookup =
9818         hs.NewHandle(mirror::MethodHandlesLookup::GetDefault(self));
9819     return lookup->FindConstructor(self, constructor_class, method_type);
9820   }
9821 
9822   uintptr_t target = reinterpret_cast<uintptr_t>(target_method);
9823   return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9824 }
9825 
ResolveMethodHandle(Thread * self,uint32_t method_handle_idx,ArtMethod * referrer)9826 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandle(Thread* self,
9827                                                               uint32_t method_handle_idx,
9828                                                               ArtMethod* referrer)
9829     REQUIRES_SHARED(Locks::mutator_lock_) {
9830   const DexFile* const dex_file = referrer->GetDexFile();
9831   const dex::MethodHandleItem& method_handle = dex_file->GetMethodHandle(method_handle_idx);
9832   switch (static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_)) {
9833     case DexFile::MethodHandleType::kStaticPut:
9834     case DexFile::MethodHandleType::kStaticGet:
9835     case DexFile::MethodHandleType::kInstancePut:
9836     case DexFile::MethodHandleType::kInstanceGet:
9837       return ResolveMethodHandleForField(self, method_handle, referrer);
9838     case DexFile::MethodHandleType::kInvokeStatic:
9839     case DexFile::MethodHandleType::kInvokeInstance:
9840     case DexFile::MethodHandleType::kInvokeConstructor:
9841     case DexFile::MethodHandleType::kInvokeDirect:
9842     case DexFile::MethodHandleType::kInvokeInterface:
9843       return ResolveMethodHandleForMethod(self, method_handle, referrer);
9844   }
9845 }
9846 
IsQuickResolutionStub(const void * entry_point) const9847 bool ClassLinker::IsQuickResolutionStub(const void* entry_point) const {
9848   return (entry_point == GetQuickResolutionStub()) ||
9849       (quick_resolution_trampoline_ == entry_point);
9850 }
9851 
IsQuickToInterpreterBridge(const void * entry_point) const9852 bool ClassLinker::IsQuickToInterpreterBridge(const void* entry_point) const {
9853   return (entry_point == GetQuickToInterpreterBridge()) ||
9854       (quick_to_interpreter_bridge_trampoline_ == entry_point);
9855 }
9856 
IsQuickGenericJniStub(const void * entry_point) const9857 bool ClassLinker::IsQuickGenericJniStub(const void* entry_point) const {
9858   return (entry_point == GetQuickGenericJniStub()) ||
9859       (quick_generic_jni_trampoline_ == entry_point);
9860 }
9861 
IsJniDlsymLookupStub(const void * entry_point) const9862 bool ClassLinker::IsJniDlsymLookupStub(const void* entry_point) const {
9863   return entry_point == GetJniDlsymLookupStub() ||
9864       (jni_dlsym_lookup_trampoline_ == entry_point);
9865 }
9866 
IsJniDlsymLookupCriticalStub(const void * entry_point) const9867 bool ClassLinker::IsJniDlsymLookupCriticalStub(const void* entry_point) const {
9868   return entry_point == GetJniDlsymLookupCriticalStub() ||
9869       (jni_dlsym_lookup_critical_trampoline_ == entry_point);
9870 }
9871 
GetRuntimeQuickGenericJniStub() const9872 const void* ClassLinker::GetRuntimeQuickGenericJniStub() const {
9873   return GetQuickGenericJniStub();
9874 }
9875 
SetEntryPointsForObsoleteMethod(ArtMethod * method) const9876 void ClassLinker::SetEntryPointsForObsoleteMethod(ArtMethod* method) const {
9877   DCHECK(method->IsObsolete());
9878   // We cannot mess with the entrypoints of native methods because they are used to determine how
9879   // large the method's quick stack frame is. Without this information we cannot walk the stacks.
9880   if (!method->IsNative()) {
9881     method->SetEntryPointFromQuickCompiledCode(GetInvokeObsoleteMethodStub());
9882   }
9883 }
9884 
DumpForSigQuit(std::ostream & os)9885 void ClassLinker::DumpForSigQuit(std::ostream& os) {
9886   ScopedObjectAccess soa(Thread::Current());
9887   ReaderMutexLock mu(soa.Self(), *Locks::classlinker_classes_lock_);
9888   os << "Zygote loaded classes=" << NumZygoteClasses() << " post zygote classes="
9889      << NumNonZygoteClasses() << "\n";
9890   ReaderMutexLock mu2(soa.Self(), *Locks::dex_lock_);
9891   os << "Dumping registered class loaders\n";
9892   size_t class_loader_index = 0;
9893   for (const ClassLoaderData& class_loader : class_loaders_) {
9894     ObjPtr<mirror::ClassLoader> loader =
9895         ObjPtr<mirror::ClassLoader>::DownCast(soa.Self()->DecodeJObject(class_loader.weak_root));
9896     if (loader != nullptr) {
9897       os << "#" << class_loader_index++ << " " << loader->GetClass()->PrettyDescriptor() << ": [";
9898       bool saw_one_dex_file = false;
9899       for (const auto& entry : dex_caches_) {
9900         const DexCacheData& dex_cache = entry.second;
9901         if (dex_cache.class_table == class_loader.class_table) {
9902           if (saw_one_dex_file) {
9903             os << ":";
9904           }
9905           saw_one_dex_file = true;
9906           os << entry.first->GetLocation();
9907         }
9908       }
9909       os << "]";
9910       bool found_parent = false;
9911       if (loader->GetParent() != nullptr) {
9912         size_t parent_index = 0;
9913         for (const ClassLoaderData& class_loader2 : class_loaders_) {
9914           ObjPtr<mirror::ClassLoader> loader2 = ObjPtr<mirror::ClassLoader>::DownCast(
9915               soa.Self()->DecodeJObject(class_loader2.weak_root));
9916           if (loader2 == loader->GetParent()) {
9917             os << ", parent #" << parent_index;
9918             found_parent = true;
9919             break;
9920           }
9921           parent_index++;
9922         }
9923         if (!found_parent) {
9924           os << ", unregistered parent of type "
9925              << loader->GetParent()->GetClass()->PrettyDescriptor();
9926         }
9927       } else {
9928         os << ", no parent";
9929       }
9930       os << "\n";
9931     }
9932   }
9933   os << "Done dumping class loaders\n";
9934   Runtime* runtime = Runtime::Current();
9935   os << "Classes initialized: " << runtime->GetStat(KIND_GLOBAL_CLASS_INIT_COUNT) << " in "
9936      << PrettyDuration(runtime->GetStat(KIND_GLOBAL_CLASS_INIT_TIME)) << "\n";
9937 }
9938 
9939 class CountClassesVisitor : public ClassLoaderVisitor {
9940  public:
CountClassesVisitor()9941   CountClassesVisitor() : num_zygote_classes(0), num_non_zygote_classes(0) {}
9942 
Visit(ObjPtr<mirror::ClassLoader> class_loader)9943   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
9944       REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
9945     ClassTable* const class_table = class_loader->GetClassTable();
9946     if (class_table != nullptr) {
9947       num_zygote_classes += class_table->NumZygoteClasses(class_loader);
9948       num_non_zygote_classes += class_table->NumNonZygoteClasses(class_loader);
9949     }
9950   }
9951 
9952   size_t num_zygote_classes;
9953   size_t num_non_zygote_classes;
9954 };
9955 
NumZygoteClasses() const9956 size_t ClassLinker::NumZygoteClasses() const {
9957   CountClassesVisitor visitor;
9958   VisitClassLoaders(&visitor);
9959   return visitor.num_zygote_classes + boot_class_table_->NumZygoteClasses(nullptr);
9960 }
9961 
NumNonZygoteClasses() const9962 size_t ClassLinker::NumNonZygoteClasses() const {
9963   CountClassesVisitor visitor;
9964   VisitClassLoaders(&visitor);
9965   return visitor.num_non_zygote_classes + boot_class_table_->NumNonZygoteClasses(nullptr);
9966 }
9967 
NumLoadedClasses()9968 size_t ClassLinker::NumLoadedClasses() {
9969   ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
9970   // Only return non zygote classes since these are the ones which apps which care about.
9971   return NumNonZygoteClasses();
9972 }
9973 
GetClassesLockOwner()9974 pid_t ClassLinker::GetClassesLockOwner() {
9975   return Locks::classlinker_classes_lock_->GetExclusiveOwnerTid();
9976 }
9977 
GetDexLockOwner()9978 pid_t ClassLinker::GetDexLockOwner() {
9979   return Locks::dex_lock_->GetExclusiveOwnerTid();
9980 }
9981 
SetClassRoot(ClassRoot class_root,ObjPtr<mirror::Class> klass)9982 void ClassLinker::SetClassRoot(ClassRoot class_root, ObjPtr<mirror::Class> klass) {
9983   DCHECK(!init_done_);
9984 
9985   DCHECK(klass != nullptr);
9986   DCHECK(klass->GetClassLoader() == nullptr);
9987 
9988   mirror::ObjectArray<mirror::Class>* class_roots = class_roots_.Read();
9989   DCHECK(class_roots != nullptr);
9990   DCHECK_LT(static_cast<uint32_t>(class_root), static_cast<uint32_t>(ClassRoot::kMax));
9991   int32_t index = static_cast<int32_t>(class_root);
9992   DCHECK(class_roots->Get(index) == nullptr);
9993   class_roots->Set<false>(index, klass);
9994 }
9995 
CreateWellKnownClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files,Handle<mirror::Class> loader_class,Handle<mirror::ClassLoader> parent_loader,Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries,Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries_after)9996 ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
9997     Thread* self,
9998     const std::vector<const DexFile*>& dex_files,
9999     Handle<mirror::Class> loader_class,
10000     Handle<mirror::ClassLoader> parent_loader,
10001     Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries,
10002     Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries_after) {
10003 
10004   StackHandleScope<5> hs(self);
10005 
10006   ArtField* dex_elements_field =
10007       jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList_dexElements);
10008 
10009   Handle<mirror::Class> dex_elements_class(hs.NewHandle(dex_elements_field->ResolveType()));
10010   DCHECK(dex_elements_class != nullptr);
10011   DCHECK(dex_elements_class->IsArrayClass());
10012   Handle<mirror::ObjectArray<mirror::Object>> h_dex_elements(hs.NewHandle(
10013       mirror::ObjectArray<mirror::Object>::Alloc(self,
10014                                                  dex_elements_class.Get(),
10015                                                  dex_files.size())));
10016   Handle<mirror::Class> h_dex_element_class =
10017       hs.NewHandle(dex_elements_class->GetComponentType());
10018 
10019   ArtField* element_file_field =
10020       jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList__Element_dexFile);
10021   DCHECK_EQ(h_dex_element_class.Get(), element_file_field->GetDeclaringClass());
10022 
10023   ArtField* cookie_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
10024   DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
10025 
10026   ArtField* file_name_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_fileName);
10027   DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
10028 
10029   // Fill the elements array.
10030   int32_t index = 0;
10031   for (const DexFile* dex_file : dex_files) {
10032     StackHandleScope<4> hs2(self);
10033 
10034     // CreateWellKnownClassLoader is only used by gtests and compiler.
10035     // Index 0 of h_long_array is supposed to be the oat file but we can leave it null.
10036     Handle<mirror::LongArray> h_long_array = hs2.NewHandle(mirror::LongArray::Alloc(
10037         self,
10038         kDexFileIndexStart + 1));
10039     DCHECK(h_long_array != nullptr);
10040     h_long_array->Set(kDexFileIndexStart, reinterpret_cast64<int64_t>(dex_file));
10041 
10042     // Note that this creates a finalizable dalvik.system.DexFile object and a corresponding
10043     // FinalizerReference which will never get cleaned up without a started runtime.
10044     Handle<mirror::Object> h_dex_file = hs2.NewHandle(
10045         cookie_field->GetDeclaringClass()->AllocObject(self));
10046     DCHECK(h_dex_file != nullptr);
10047     cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get());
10048 
10049     Handle<mirror::String> h_file_name = hs2.NewHandle(
10050         mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str()));
10051     DCHECK(h_file_name != nullptr);
10052     file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get());
10053 
10054     Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self));
10055     DCHECK(h_element != nullptr);
10056     element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get());
10057 
10058     h_dex_elements->Set(index, h_element.Get());
10059     index++;
10060   }
10061   DCHECK_EQ(index, h_dex_elements->GetLength());
10062 
10063   // Create DexPathList.
10064   Handle<mirror::Object> h_dex_path_list = hs.NewHandle(
10065       dex_elements_field->GetDeclaringClass()->AllocObject(self));
10066   DCHECK(h_dex_path_list != nullptr);
10067   // Set elements.
10068   dex_elements_field->SetObject<false>(h_dex_path_list.Get(), h_dex_elements.Get());
10069   // Create an empty List for the "nativeLibraryDirectories," required for native tests.
10070   // Note: this code is uncommon(oatdump)/testing-only, so don't add further WellKnownClasses
10071   //       elements.
10072   {
10073     ArtField* native_lib_dirs = dex_elements_field->GetDeclaringClass()->
10074         FindDeclaredInstanceField("nativeLibraryDirectories", "Ljava/util/List;");
10075     DCHECK(native_lib_dirs != nullptr);
10076     ObjPtr<mirror::Class> list_class = FindSystemClass(self, "Ljava/util/ArrayList;");
10077     DCHECK(list_class != nullptr);
10078     {
10079       StackHandleScope<1> h_list_scope(self);
10080       Handle<mirror::Class> h_list_class(h_list_scope.NewHandle<mirror::Class>(list_class));
10081       bool list_init = EnsureInitialized(self, h_list_class, true, true);
10082       DCHECK(list_init);
10083       list_class = h_list_class.Get();
10084     }
10085     ObjPtr<mirror::Object> list_object = list_class->AllocObject(self);
10086     // Note: we leave the object uninitialized. This must never leak into any non-testing code, but
10087     //       is fine for testing. While it violates a Java-code invariant (the elementData field is
10088     //       normally never null), as long as one does not try to add elements, this will still
10089     //       work.
10090     native_lib_dirs->SetObject<false>(h_dex_path_list.Get(), list_object);
10091   }
10092 
10093   // Create the class loader..
10094   Handle<mirror::ClassLoader> h_class_loader = hs.NewHandle<mirror::ClassLoader>(
10095       ObjPtr<mirror::ClassLoader>::DownCast(loader_class->AllocObject(self)));
10096   DCHECK(h_class_loader != nullptr);
10097   // Set DexPathList.
10098   ArtField* path_list_field =
10099       jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList);
10100   DCHECK(path_list_field != nullptr);
10101   path_list_field->SetObject<false>(h_class_loader.Get(), h_dex_path_list.Get());
10102 
10103   // Make a pretend boot-classpath.
10104   // TODO: Should we scan the image?
10105   ArtField* const parent_field =
10106       jni::DecodeArtField(WellKnownClasses::java_lang_ClassLoader_parent);
10107   DCHECK(parent_field != nullptr);
10108   if (parent_loader.Get() == nullptr) {
10109     ScopedObjectAccessUnchecked soa(self);
10110     ObjPtr<mirror::Object> boot_loader(soa.Decode<mirror::Class>(
10111         WellKnownClasses::java_lang_BootClassLoader)->AllocObject(self));
10112     parent_field->SetObject<false>(h_class_loader.Get(), boot_loader);
10113   } else {
10114     parent_field->SetObject<false>(h_class_loader.Get(), parent_loader.Get());
10115   }
10116 
10117   ArtField* shared_libraries_field =
10118       jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
10119   DCHECK(shared_libraries_field != nullptr);
10120   shared_libraries_field->SetObject<false>(h_class_loader.Get(), shared_libraries.Get());
10121 
10122   ArtField* shared_libraries_after_field =
10123         jni::DecodeArtField(
10124         WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter);
10125   DCHECK(shared_libraries_after_field != nullptr);
10126   shared_libraries_after_field->SetObject<false>(h_class_loader.Get(),
10127                                                  shared_libraries_after.Get());
10128   return h_class_loader.Get();
10129 }
10130 
CreateWellKnownClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files,jclass loader_class,jobject parent_loader,jobject shared_libraries,jobject shared_libraries_after)10131 jobject ClassLinker::CreateWellKnownClassLoader(Thread* self,
10132                                                 const std::vector<const DexFile*>& dex_files,
10133                                                 jclass loader_class,
10134                                                 jobject parent_loader,
10135                                                 jobject shared_libraries,
10136                                                 jobject shared_libraries_after) {
10137   CHECK(self->GetJniEnv()->IsSameObject(loader_class,
10138                                         WellKnownClasses::dalvik_system_PathClassLoader) ||
10139         self->GetJniEnv()->IsSameObject(loader_class,
10140                                         WellKnownClasses::dalvik_system_DelegateLastClassLoader) ||
10141         self->GetJniEnv()->IsSameObject(loader_class,
10142                                         WellKnownClasses::dalvik_system_InMemoryDexClassLoader));
10143 
10144   // SOAAlreadyRunnable is protected, and we need something to add a global reference.
10145   // We could move the jobject to the callers, but all call-sites do this...
10146   ScopedObjectAccessUnchecked soa(self);
10147 
10148   // For now, create a libcore-level DexFile for each ART DexFile. This "explodes" multidex.
10149   StackHandleScope<5> hs(self);
10150 
10151   Handle<mirror::Class> h_loader_class =
10152       hs.NewHandle<mirror::Class>(soa.Decode<mirror::Class>(loader_class));
10153   Handle<mirror::ClassLoader> h_parent =
10154       hs.NewHandle<mirror::ClassLoader>(soa.Decode<mirror::ClassLoader>(parent_loader));
10155   Handle<mirror::ObjectArray<mirror::ClassLoader>> h_shared_libraries =
10156       hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ClassLoader>>(shared_libraries));
10157   Handle<mirror::ObjectArray<mirror::ClassLoader>> h_shared_libraries_after =
10158         hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ClassLoader>>(shared_libraries_after));
10159 
10160   ObjPtr<mirror::ClassLoader> loader = CreateWellKnownClassLoader(
10161       self,
10162       dex_files,
10163       h_loader_class,
10164       h_parent,
10165       h_shared_libraries,
10166       h_shared_libraries_after);
10167 
10168   // Make it a global ref and return.
10169   ScopedLocalRef<jobject> local_ref(
10170       soa.Env(), soa.Env()->AddLocalReference<jobject>(loader));
10171   return soa.Env()->NewGlobalRef(local_ref.get());
10172 }
10173 
CreatePathClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files)10174 jobject ClassLinker::CreatePathClassLoader(Thread* self,
10175                                            const std::vector<const DexFile*>& dex_files) {
10176   return CreateWellKnownClassLoader(self,
10177                                     dex_files,
10178                                     WellKnownClasses::dalvik_system_PathClassLoader,
10179                                     nullptr);
10180 }
10181 
DropFindArrayClassCache()10182 void ClassLinker::DropFindArrayClassCache() {
10183   std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
10184   find_array_class_cache_next_victim_ = 0;
10185 }
10186 
VisitClassLoaders(ClassLoaderVisitor * visitor) const10187 void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
10188   Thread* const self = Thread::Current();
10189   for (const ClassLoaderData& data : class_loaders_) {
10190     // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10191     ObjPtr<mirror::ClassLoader> class_loader = ObjPtr<mirror::ClassLoader>::DownCast(
10192         self->DecodeJObject(data.weak_root));
10193     if (class_loader != nullptr) {
10194       visitor->Visit(class_loader);
10195     }
10196   }
10197 }
10198 
VisitAllocators(AllocatorVisitor * visitor) const10199 void ClassLinker::VisitAllocators(AllocatorVisitor* visitor) const {
10200   for (const ClassLoaderData& data : class_loaders_) {
10201     LinearAlloc* alloc = data.allocator;
10202     if (alloc != nullptr && !visitor->Visit(alloc)) {
10203         break;
10204     }
10205   }
10206 }
10207 
InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,ObjPtr<mirror::ClassLoader> class_loader)10208 void ClassLinker::InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,
10209                                                ObjPtr<mirror::ClassLoader> class_loader) {
10210   DCHECK(dex_file != nullptr);
10211   Thread* const self = Thread::Current();
10212   WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
10213   ClassTable* const table = ClassTableForClassLoader(class_loader);
10214   DCHECK(table != nullptr);
10215   if (table->InsertStrongRoot(dex_file) && class_loader != nullptr) {
10216     // It was not already inserted, perform the write barrier to let the GC know the class loader's
10217     // class table was modified.
10218     WriteBarrier::ForEveryFieldWrite(class_loader);
10219   }
10220 }
10221 
CleanupClassLoaders()10222 void ClassLinker::CleanupClassLoaders() {
10223   Thread* const self = Thread::Current();
10224   std::vector<ClassLoaderData> to_delete;
10225   // Do the delete outside the lock to avoid lock violation in jit code cache.
10226   {
10227     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
10228     for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
10229       const ClassLoaderData& data = *it;
10230       // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10231       ObjPtr<mirror::ClassLoader> class_loader =
10232           ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(data.weak_root));
10233       if (class_loader != nullptr) {
10234         ++it;
10235       } else {
10236         VLOG(class_linker) << "Freeing class loader";
10237         to_delete.push_back(data);
10238         it = class_loaders_.erase(it);
10239       }
10240     }
10241   }
10242   for (ClassLoaderData& data : to_delete) {
10243     // CHA unloading analysis and SingleImplementaion cleanups are required.
10244     DeleteClassLoader(self, data, /*cleanup_cha=*/ true);
10245   }
10246 }
10247 
10248 class ClassLinker::FindVirtualMethodHolderVisitor : public ClassVisitor {
10249  public:
FindVirtualMethodHolderVisitor(const ArtMethod * method,PointerSize pointer_size)10250   FindVirtualMethodHolderVisitor(const ArtMethod* method, PointerSize pointer_size)
10251       : method_(method),
10252         pointer_size_(pointer_size) {}
10253 
operator ()(ObjPtr<mirror::Class> klass)10254   bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) override {
10255     if (klass->GetVirtualMethodsSliceUnchecked(pointer_size_).Contains(method_)) {
10256       holder_ = klass;
10257     }
10258     // Return false to stop searching if holder_ is not null.
10259     return holder_ == nullptr;
10260   }
10261 
10262   ObjPtr<mirror::Class> holder_ = nullptr;
10263   const ArtMethod* const method_;
10264   const PointerSize pointer_size_;
10265 };
10266 
GetHoldingClassOfCopiedMethod(ArtMethod * method)10267 ObjPtr<mirror::Class> ClassLinker::GetHoldingClassOfCopiedMethod(ArtMethod* method) {
10268   ScopedTrace trace(__FUNCTION__);  // Since this function is slow, have a trace to notify people.
10269   CHECK(method->IsCopied());
10270   FindVirtualMethodHolderVisitor visitor(method, image_pointer_size_);
10271   VisitClasses(&visitor);
10272   return visitor.holder_;
10273 }
10274 
DenyAccessBasedOnPublicSdk(ArtMethod * art_method ATTRIBUTE_UNUSED) const10275 bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtMethod* art_method ATTRIBUTE_UNUSED) const
10276     REQUIRES_SHARED(Locks::mutator_lock_) {
10277   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10278   LOG(FATAL) << "UNREACHABLE";
10279   UNREACHABLE();
10280 }
10281 
DenyAccessBasedOnPublicSdk(ArtField * art_field ATTRIBUTE_UNUSED) const10282 bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtField* art_field ATTRIBUTE_UNUSED) const
10283     REQUIRES_SHARED(Locks::mutator_lock_) {
10284   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10285   LOG(FATAL) << "UNREACHABLE";
10286   UNREACHABLE();
10287 }
10288 
DenyAccessBasedOnPublicSdk(const char * type_descriptor ATTRIBUTE_UNUSED) const10289 bool ClassLinker::DenyAccessBasedOnPublicSdk(const char* type_descriptor ATTRIBUTE_UNUSED) const {
10290   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10291   LOG(FATAL) << "UNREACHABLE";
10292   UNREACHABLE();
10293 }
10294 
SetEnablePublicSdkChecks(bool enabled ATTRIBUTE_UNUSED)10295 void ClassLinker::SetEnablePublicSdkChecks(bool enabled ATTRIBUTE_UNUSED) {
10296   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10297   LOG(FATAL) << "UNREACHABLE";
10298   UNREACHABLE();
10299 }
10300 
RemoveDexFromCaches(const DexFile & dex_file)10301 void ClassLinker::RemoveDexFromCaches(const DexFile& dex_file) {
10302   ReaderMutexLock mu(Thread::Current(), *Locks::dex_lock_);
10303 
10304   auto it = dex_caches_.find(&dex_file);
10305   if (it != dex_caches_.end()) {
10306       dex_caches_.erase(it);
10307   }
10308 }
10309 
10310 // Instantiate ClassLinker::AllocClass.
10311 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ true>(
10312     Thread* self,
10313     ObjPtr<mirror::Class> java_lang_Class,
10314     uint32_t class_size);
10315 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ false>(
10316     Thread* self,
10317     ObjPtr<mirror::Class> java_lang_Class,
10318     uint32_t class_size);
10319 
10320 }  // namespace art
10321