1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "class_linker.h"
18
19 #include <unistd.h>
20
21 #include <algorithm>
22 #include <deque>
23 #include <forward_list>
24 #include <iostream>
25 #include <iterator>
26 #include <map>
27 #include <memory>
28 #include <queue>
29 #include <string>
30 #include <string_view>
31 #include <tuple>
32 #include <utility>
33 #include <vector>
34
35 #include "android-base/stringprintf.h"
36 #include "android-base/strings.h"
37 #include "art_field-inl.h"
38 #include "art_method-inl.h"
39 #include "barrier.h"
40 #include "base/arena_allocator.h"
41 #include "base/arena_bit_vector.h"
42 #include "base/casts.h"
43 #include "base/file_utils.h"
44 #include "base/hash_map.h"
45 #include "base/hash_set.h"
46 #include "base/leb128.h"
47 #include "base/logging.h"
48 #include "base/mem_map_arena_pool.h"
49 #include "base/metrics/metrics.h"
50 #include "base/mutex-inl.h"
51 #include "base/os.h"
52 #include "base/quasi_atomic.h"
53 #include "base/scoped_arena_containers.h"
54 #include "base/scoped_flock.h"
55 #include "base/stl_util.h"
56 #include "base/string_view_cpp20.h"
57 #include "base/systrace.h"
58 #include "base/time_utils.h"
59 #include "base/unix_file/fd_file.h"
60 #include "base/utils.h"
61 #include "base/value_object.h"
62 #include "cha.h"
63 #include "class_linker-inl.h"
64 #include "class_loader_utils.h"
65 #include "class_root-inl.h"
66 #include "class_table-inl.h"
67 #include "compiler_callbacks.h"
68 #include "debug_print.h"
69 #include "debugger.h"
70 #include "dex/class_accessor-inl.h"
71 #include "dex/descriptors_names.h"
72 #include "dex/dex_file-inl.h"
73 #include "dex/dex_file_annotations.h"
74 #include "dex/dex_file_exception_helpers.h"
75 #include "dex/dex_file_loader.h"
76 #include "dex/signature-inl.h"
77 #include "dex/utf.h"
78 #include "entrypoints/entrypoint_utils-inl.h"
79 #include "entrypoints/runtime_asm_entrypoints.h"
80 #include "experimental_flags.h"
81 #include "gc/accounting/card_table-inl.h"
82 #include "gc/accounting/heap_bitmap-inl.h"
83 #include "gc/accounting/space_bitmap-inl.h"
84 #include "gc/heap-visit-objects-inl.h"
85 #include "gc/heap.h"
86 #include "gc/scoped_gc_critical_section.h"
87 #include "gc/space/image_space.h"
88 #include "gc/space/space-inl.h"
89 #include "gc_root-inl.h"
90 #include "handle_scope-inl.h"
91 #include "hidden_api.h"
92 #include "image-inl.h"
93 #include "imt_conflict_table.h"
94 #include "imtable-inl.h"
95 #include "intern_table-inl.h"
96 #include "interpreter/interpreter.h"
97 #include "interpreter/mterp/nterp.h"
98 #include "jit/debugger_interface.h"
99 #include "jit/jit.h"
100 #include "jit/jit_code_cache.h"
101 #include "jni/java_vm_ext.h"
102 #include "jni/jni_internal.h"
103 #include "linear_alloc-inl.h"
104 #include "mirror/array-alloc-inl.h"
105 #include "mirror/array-inl.h"
106 #include "mirror/call_site.h"
107 #include "mirror/class-alloc-inl.h"
108 #include "mirror/class-inl.h"
109 #include "mirror/class.h"
110 #include "mirror/class_ext.h"
111 #include "mirror/class_loader.h"
112 #include "mirror/dex_cache-inl.h"
113 #include "mirror/dex_cache.h"
114 #include "mirror/emulated_stack_frame.h"
115 #include "mirror/field.h"
116 #include "mirror/iftable-inl.h"
117 #include "mirror/method.h"
118 #include "mirror/method_handle_impl.h"
119 #include "mirror/method_handles_lookup.h"
120 #include "mirror/method_type.h"
121 #include "mirror/object-inl.h"
122 #include "mirror/object-refvisitor-inl.h"
123 #include "mirror/object.h"
124 #include "mirror/object_array-alloc-inl.h"
125 #include "mirror/object_array-inl.h"
126 #include "mirror/object_array.h"
127 #include "mirror/object_reference-inl.h"
128 #include "mirror/object_reference.h"
129 #include "mirror/proxy.h"
130 #include "mirror/reference-inl.h"
131 #include "mirror/stack_trace_element.h"
132 #include "mirror/string-inl.h"
133 #include "mirror/throwable.h"
134 #include "mirror/var_handle.h"
135 #include "native/dalvik_system_DexFile.h"
136 #include "nativehelper/scoped_local_ref.h"
137 #include "nterp_helpers.h"
138 #include "oat.h"
139 #include "oat_file-inl.h"
140 #include "oat_file.h"
141 #include "oat_file_assistant.h"
142 #include "oat_file_manager.h"
143 #include "object_lock.h"
144 #include "profile/profile_compilation_info.h"
145 #include "runtime.h"
146 #include "runtime_callbacks.h"
147 #include "scoped_thread_state_change-inl.h"
148 #include "startup_completed_task.h"
149 #include "thread-inl.h"
150 #include "thread.h"
151 #include "thread_list.h"
152 #include "trace.h"
153 #include "transaction.h"
154 #include "vdex_file.h"
155 #include "verifier/class_verifier.h"
156 #include "verifier/verifier_deps.h"
157 #include "well_known_classes.h"
158
159 namespace art {
160
161 using android::base::StringPrintf;
162
163 static constexpr bool kCheckImageObjects = kIsDebugBuild;
164 static constexpr bool kVerifyArtMethodDeclaringClasses = kIsDebugBuild;
165
166 static void ThrowNoClassDefFoundError(const char* fmt, ...)
167 __attribute__((__format__(__printf__, 1, 2)))
168 REQUIRES_SHARED(Locks::mutator_lock_);
ThrowNoClassDefFoundError(const char * fmt,...)169 static void ThrowNoClassDefFoundError(const char* fmt, ...) {
170 va_list args;
171 va_start(args, fmt);
172 Thread* self = Thread::Current();
173 self->ThrowNewExceptionV("Ljava/lang/NoClassDefFoundError;", fmt, args);
174 va_end(args);
175 }
176
GetErroneousStateError(ObjPtr<mirror::Class> c)177 static ObjPtr<mirror::Object> GetErroneousStateError(ObjPtr<mirror::Class> c)
178 REQUIRES_SHARED(Locks::mutator_lock_) {
179 ObjPtr<mirror::ClassExt> ext(c->GetExtData());
180 if (ext == nullptr) {
181 return nullptr;
182 } else {
183 return ext->GetErroneousStateError();
184 }
185 }
186
IsVerifyError(ObjPtr<mirror::Object> obj)187 static bool IsVerifyError(ObjPtr<mirror::Object> obj)
188 REQUIRES_SHARED(Locks::mutator_lock_) {
189 // This is slow, but we only use it for rethrowing an error, and for DCHECK.
190 return obj->GetClass()->DescriptorEquals("Ljava/lang/VerifyError;");
191 }
192
193 // Helper for ThrowEarlierClassFailure. Throws the stored error.
HandleEarlierErroneousStateError(Thread * self,ClassLinker * class_linker,ObjPtr<mirror::Class> c)194 static void HandleEarlierErroneousStateError(Thread* self,
195 ClassLinker* class_linker,
196 ObjPtr<mirror::Class> c)
197 REQUIRES_SHARED(Locks::mutator_lock_) {
198 ObjPtr<mirror::Object> obj = GetErroneousStateError(c);
199 DCHECK(obj != nullptr);
200 self->AssertNoPendingException();
201 DCHECK(!obj->IsClass());
202 ObjPtr<mirror::Class> throwable_class = GetClassRoot<mirror::Throwable>(class_linker);
203 ObjPtr<mirror::Class> error_class = obj->GetClass();
204 CHECK(throwable_class->IsAssignableFrom(error_class));
205 self->SetException(obj->AsThrowable());
206 self->AssertPendingException();
207 }
208
UpdateClassAfterVerification(Handle<mirror::Class> klass,PointerSize pointer_size,verifier::FailureKind failure_kind)209 static void UpdateClassAfterVerification(Handle<mirror::Class> klass,
210 PointerSize pointer_size,
211 verifier::FailureKind failure_kind)
212 REQUIRES_SHARED(Locks::mutator_lock_) {
213 Runtime* runtime = Runtime::Current();
214 ClassLinker* class_linker = runtime->GetClassLinker();
215 if (klass->IsVerified() && (failure_kind == verifier::FailureKind::kNoFailure)) {
216 klass->SetSkipAccessChecksFlagOnAllMethods(pointer_size);
217 }
218
219 // Now that the class has passed verification, try to set nterp entrypoints
220 // to methods that currently use the switch interpreter.
221 if (interpreter::CanRuntimeUseNterp()) {
222 for (ArtMethod& m : klass->GetMethods(pointer_size)) {
223 if (class_linker->IsQuickToInterpreterBridge(m.GetEntryPointFromQuickCompiledCode())) {
224 runtime->GetInstrumentation()->InitializeMethodsCode(&m, /*aot_code=*/nullptr);
225 }
226 }
227 }
228 }
229
230 // Callback responsible for making a batch of classes visibly initialized
231 // after all threads have called it from a checkpoint, ensuring visibility.
232 class ClassLinker::VisiblyInitializedCallback final
233 : public Closure, public IntrusiveForwardListNode<VisiblyInitializedCallback> {
234 public:
VisiblyInitializedCallback(ClassLinker * class_linker)235 explicit VisiblyInitializedCallback(ClassLinker* class_linker)
236 : class_linker_(class_linker),
237 num_classes_(0u),
238 thread_visibility_counter_(0),
239 barriers_() {
240 std::fill_n(classes_, kMaxClasses, nullptr);
241 }
242
IsEmpty() const243 bool IsEmpty() const {
244 DCHECK_LE(num_classes_, kMaxClasses);
245 return num_classes_ == 0u;
246 }
247
IsFull() const248 bool IsFull() const {
249 DCHECK_LE(num_classes_, kMaxClasses);
250 return num_classes_ == kMaxClasses;
251 }
252
AddClass(Thread * self,ObjPtr<mirror::Class> klass)253 void AddClass(Thread* self, ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
254 DCHECK_EQ(klass->GetStatus(), ClassStatus::kInitialized);
255 DCHECK(!IsFull());
256 classes_[num_classes_] = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, klass);
257 ++num_classes_;
258 }
259
AddBarrier(Barrier * barrier)260 void AddBarrier(Barrier* barrier) {
261 barriers_.push_front(barrier);
262 }
263
GetAndClearBarriers()264 std::forward_list<Barrier*> GetAndClearBarriers() {
265 std::forward_list<Barrier*> result;
266 result.swap(barriers_);
267 result.reverse(); // Return barriers in insertion order.
268 return result;
269 }
270
MakeVisible(Thread * self)271 void MakeVisible(Thread* self) {
272 DCHECK_EQ(thread_visibility_counter_.load(std::memory_order_relaxed), 0);
273 size_t count = Runtime::Current()->GetThreadList()->RunCheckpoint(this);
274 AdjustThreadVisibilityCounter(self, count);
275 }
276
Run(Thread * self)277 void Run(Thread* self) override {
278 AdjustThreadVisibilityCounter(self, -1);
279 }
280
281 private:
AdjustThreadVisibilityCounter(Thread * self,ssize_t adjustment)282 void AdjustThreadVisibilityCounter(Thread* self, ssize_t adjustment) {
283 ssize_t old = thread_visibility_counter_.fetch_add(adjustment, std::memory_order_relaxed);
284 if (old + adjustment == 0) {
285 // All threads passed the checkpoint. Mark classes as visibly initialized.
286 {
287 ScopedObjectAccess soa(self);
288 StackHandleScope<1u> hs(self);
289 MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
290 JavaVMExt* vm = self->GetJniEnv()->GetVm();
291 for (size_t i = 0, num = num_classes_; i != num; ++i) {
292 klass.Assign(ObjPtr<mirror::Class>::DownCast(self->DecodeJObject(classes_[i])));
293 vm->DeleteWeakGlobalRef(self, classes_[i]);
294 if (klass != nullptr) {
295 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
296 class_linker_->FixupStaticTrampolines(self, klass.Get());
297 }
298 }
299 num_classes_ = 0u;
300 }
301 class_linker_->VisiblyInitializedCallbackDone(self, this);
302 }
303 }
304
305 // Making classes initialized in bigger batches helps with app startup for
306 // apps that initialize a lot of classes by running fewer checkpoints.
307 // (On the other hand, bigger batches make class initialization checks more
308 // likely to take a slow path but that is mitigated by making partially
309 // filled buffers visibly initialized if we take the slow path many times.
310 // See `Thread::kMakeVisiblyInitializedCounterTriggerCount`.)
311 static constexpr size_t kMaxClasses = 48;
312
313 ClassLinker* const class_linker_;
314 size_t num_classes_;
315 jweak classes_[kMaxClasses];
316
317 // The thread visibility counter starts at 0 and it is incremented by the number of
318 // threads that need to run this callback (by the thread that request the callback
319 // to be run) and decremented once for each `Run()` execution. When it reaches 0,
320 // whether after the increment or after a decrement, we know that `Run()` was executed
321 // for all threads and therefore we can mark the classes as visibly initialized.
322 std::atomic<ssize_t> thread_visibility_counter_;
323
324 // List of barries to `Pass()` for threads that wait for the callback to complete.
325 std::forward_list<Barrier*> barriers_;
326 };
327
MakeInitializedClassesVisiblyInitialized(Thread * self,bool wait)328 void ClassLinker::MakeInitializedClassesVisiblyInitialized(Thread* self, bool wait) {
329 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
330 return; // Nothing to do. Thanks to the x86 memory model classes skip the initialized status.
331 }
332 std::optional<Barrier> maybe_barrier; // Avoid constructing the Barrier for `wait == false`.
333 if (wait) {
334 Locks::mutator_lock_->AssertNotHeld(self);
335 maybe_barrier.emplace(0);
336 }
337 int wait_count = 0;
338 VisiblyInitializedCallback* callback = nullptr;
339 {
340 MutexLock lock(self, visibly_initialized_callback_lock_);
341 if (visibly_initialized_callback_ != nullptr && !visibly_initialized_callback_->IsEmpty()) {
342 callback = visibly_initialized_callback_.release();
343 running_visibly_initialized_callbacks_.push_front(*callback);
344 }
345 if (wait) {
346 DCHECK(maybe_barrier.has_value());
347 Barrier* barrier = std::addressof(*maybe_barrier);
348 for (VisiblyInitializedCallback& cb : running_visibly_initialized_callbacks_) {
349 cb.AddBarrier(barrier);
350 ++wait_count;
351 }
352 }
353 }
354 if (callback != nullptr) {
355 callback->MakeVisible(self);
356 }
357 if (wait_count != 0) {
358 DCHECK(maybe_barrier.has_value());
359 maybe_barrier->Increment(self, wait_count);
360 }
361 }
362
VisiblyInitializedCallbackDone(Thread * self,VisiblyInitializedCallback * callback)363 void ClassLinker::VisiblyInitializedCallbackDone(Thread* self,
364 VisiblyInitializedCallback* callback) {
365 MutexLock lock(self, visibly_initialized_callback_lock_);
366 // Pass the barriers if requested.
367 for (Barrier* barrier : callback->GetAndClearBarriers()) {
368 barrier->Pass(self);
369 }
370 // Remove the callback from the list of running callbacks.
371 auto before = running_visibly_initialized_callbacks_.before_begin();
372 auto it = running_visibly_initialized_callbacks_.begin();
373 DCHECK(it != running_visibly_initialized_callbacks_.end());
374 while (std::addressof(*it) != callback) {
375 before = it;
376 ++it;
377 DCHECK(it != running_visibly_initialized_callbacks_.end());
378 }
379 running_visibly_initialized_callbacks_.erase_after(before);
380 // Reuse or destroy the callback object.
381 if (visibly_initialized_callback_ == nullptr) {
382 visibly_initialized_callback_.reset(callback);
383 } else {
384 delete callback;
385 }
386 }
387
ForceClassInitialized(Thread * self,Handle<mirror::Class> klass)388 void ClassLinker::ForceClassInitialized(Thread* self, Handle<mirror::Class> klass) {
389 ClassLinker::VisiblyInitializedCallback* cb = MarkClassInitialized(self, klass);
390 if (cb != nullptr) {
391 cb->MakeVisible(self);
392 }
393 ScopedThreadSuspension sts(self, ThreadState::kSuspended);
394 MakeInitializedClassesVisiblyInitialized(self, /*wait=*/true);
395 }
396
MarkClassInitialized(Thread * self,Handle<mirror::Class> klass)397 ClassLinker::VisiblyInitializedCallback* ClassLinker::MarkClassInitialized(
398 Thread* self, Handle<mirror::Class> klass) {
399 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
400 // Thanks to the x86 memory model, we do not need any memory fences and
401 // we can immediately mark the class as visibly initialized.
402 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
403 FixupStaticTrampolines(self, klass.Get());
404 return nullptr;
405 }
406 if (Runtime::Current()->IsActiveTransaction()) {
407 // Transactions are single-threaded, so we can mark the class as visibly intialized.
408 // (Otherwise we'd need to track the callback's entry in the transaction for rollback.)
409 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
410 FixupStaticTrampolines(self, klass.Get());
411 return nullptr;
412 }
413 mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
414 MutexLock lock(self, visibly_initialized_callback_lock_);
415 if (visibly_initialized_callback_ == nullptr) {
416 visibly_initialized_callback_.reset(new VisiblyInitializedCallback(this));
417 }
418 DCHECK(!visibly_initialized_callback_->IsFull());
419 visibly_initialized_callback_->AddClass(self, klass.Get());
420
421 if (visibly_initialized_callback_->IsFull()) {
422 VisiblyInitializedCallback* callback = visibly_initialized_callback_.release();
423 running_visibly_initialized_callbacks_.push_front(*callback);
424 return callback;
425 } else {
426 return nullptr;
427 }
428 }
429
RegisterNative(Thread * self,ArtMethod * method,const void * native_method)430 const void* ClassLinker::RegisterNative(
431 Thread* self, ArtMethod* method, const void* native_method) {
432 CHECK(method->IsNative()) << method->PrettyMethod();
433 CHECK(native_method != nullptr) << method->PrettyMethod();
434 void* new_native_method = nullptr;
435 Runtime* runtime = Runtime::Current();
436 runtime->GetRuntimeCallbacks()->RegisterNativeMethod(method,
437 native_method,
438 /*out*/&new_native_method);
439 if (method->IsCriticalNative()) {
440 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
441 // Remove old registered method if any.
442 auto it = critical_native_code_with_clinit_check_.find(method);
443 if (it != critical_native_code_with_clinit_check_.end()) {
444 critical_native_code_with_clinit_check_.erase(it);
445 }
446 // To ensure correct memory visibility, we need the class to be visibly
447 // initialized before we can set the JNI entrypoint.
448 if (method->GetDeclaringClass()->IsVisiblyInitialized()) {
449 method->SetEntryPointFromJni(new_native_method);
450 } else {
451 critical_native_code_with_clinit_check_.emplace(method, new_native_method);
452 }
453 } else {
454 method->SetEntryPointFromJni(new_native_method);
455 }
456 return new_native_method;
457 }
458
UnregisterNative(Thread * self,ArtMethod * method)459 void ClassLinker::UnregisterNative(Thread* self, ArtMethod* method) {
460 CHECK(method->IsNative()) << method->PrettyMethod();
461 // Restore stub to lookup native pointer via dlsym.
462 if (method->IsCriticalNative()) {
463 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
464 auto it = critical_native_code_with_clinit_check_.find(method);
465 if (it != critical_native_code_with_clinit_check_.end()) {
466 critical_native_code_with_clinit_check_.erase(it);
467 }
468 method->SetEntryPointFromJni(GetJniDlsymLookupCriticalStub());
469 } else {
470 method->SetEntryPointFromJni(GetJniDlsymLookupStub());
471 }
472 }
473
GetRegisteredNative(Thread * self,ArtMethod * method)474 const void* ClassLinker::GetRegisteredNative(Thread* self, ArtMethod* method) {
475 if (method->IsCriticalNative()) {
476 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
477 auto it = critical_native_code_with_clinit_check_.find(method);
478 if (it != critical_native_code_with_clinit_check_.end()) {
479 return it->second;
480 }
481 const void* native_code = method->GetEntryPointFromJni();
482 return IsJniDlsymLookupCriticalStub(native_code) ? nullptr : native_code;
483 } else {
484 const void* native_code = method->GetEntryPointFromJni();
485 return IsJniDlsymLookupStub(native_code) ? nullptr : native_code;
486 }
487 }
488
ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,bool wrap_in_no_class_def,bool log)489 void ClassLinker::ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,
490 bool wrap_in_no_class_def,
491 bool log) {
492 // The class failed to initialize on a previous attempt, so we want to throw
493 // a NoClassDefFoundError (v2 2.17.5). The exception to this rule is if we
494 // failed in verification, in which case v2 5.4.1 says we need to re-throw
495 // the previous error.
496 Runtime* const runtime = Runtime::Current();
497 if (!runtime->IsAotCompiler()) { // Give info if this occurs at runtime.
498 std::string extra;
499 ObjPtr<mirror::Object> verify_error = GetErroneousStateError(c);
500 if (verify_error != nullptr) {
501 DCHECK(!verify_error->IsClass());
502 extra = verify_error->AsThrowable()->Dump();
503 }
504 if (log) {
505 LOG(INFO) << "Rejecting re-init on previously-failed class " << c->PrettyClass()
506 << ": " << extra;
507 }
508 }
509
510 CHECK(c->IsErroneous()) << c->PrettyClass() << " " << c->GetStatus();
511 Thread* self = Thread::Current();
512 if (runtime->IsAotCompiler()) {
513 // At compile time, accurate errors and NCDFE are disabled to speed compilation.
514 ObjPtr<mirror::Throwable> pre_allocated = runtime->GetPreAllocatedNoClassDefFoundError();
515 self->SetException(pre_allocated);
516 } else {
517 ObjPtr<mirror::Object> erroneous_state_error = GetErroneousStateError(c);
518 if (erroneous_state_error != nullptr) {
519 // Rethrow stored error.
520 HandleEarlierErroneousStateError(self, this, c);
521 }
522 // TODO This might be wrong if we hit an OOME while allocating the ClassExt. In that case we
523 // might have meant to go down the earlier if statement with the original error but it got
524 // swallowed by the OOM so we end up here.
525 if (erroneous_state_error == nullptr ||
526 (wrap_in_no_class_def && !IsVerifyError(erroneous_state_error))) {
527 // If there isn't a recorded earlier error, or this is a repeat throw from initialization,
528 // the top-level exception must be a NoClassDefFoundError. The potentially already pending
529 // exception will be a cause.
530 self->ThrowNewWrappedException("Ljava/lang/NoClassDefFoundError;",
531 c->PrettyDescriptor().c_str());
532 }
533 }
534 }
535
VlogClassInitializationFailure(Handle<mirror::Class> klass)536 static void VlogClassInitializationFailure(Handle<mirror::Class> klass)
537 REQUIRES_SHARED(Locks::mutator_lock_) {
538 if (VLOG_IS_ON(class_linker)) {
539 std::string temp;
540 LOG(INFO) << "Failed to initialize class " << klass->GetDescriptor(&temp) << " from "
541 << klass->GetLocation() << "\n" << Thread::Current()->GetException()->Dump();
542 }
543 }
544
WrapExceptionInInitializer(Handle<mirror::Class> klass)545 static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
546 REQUIRES_SHARED(Locks::mutator_lock_) {
547 Thread* self = Thread::Current();
548
549 ObjPtr<mirror::Throwable> cause = self->GetException();
550 CHECK(cause != nullptr);
551
552 // Boot classpath classes should not fail initialization. This is a consistency debug check.
553 // This cannot in general be guaranteed, but in all likelihood leads to breakage down the line.
554 if (klass->GetClassLoader() == nullptr && !Runtime::Current()->IsAotCompiler()) {
555 std::string tmp;
556 // We want to LOG(FATAL) on debug builds since this really shouldn't be happening but we need to
557 // make sure to only do it if we don't have AsyncExceptions being thrown around since those
558 // could have caused the error.
559 bool known_impossible = kIsDebugBuild && !Runtime::Current()->AreAsyncExceptionsThrown();
560 LOG(known_impossible ? FATAL : WARNING) << klass->GetDescriptor(&tmp)
561 << " failed initialization: "
562 << self->GetException()->Dump();
563 }
564
565 // We only wrap non-Error exceptions; an Error can just be used as-is.
566 if (!cause->IsError()) {
567 self->ThrowNewWrappedException("Ljava/lang/ExceptionInInitializerError;", nullptr);
568 }
569 VlogClassInitializationFailure(klass);
570 }
571
ClassLinker(InternTable * intern_table,bool fast_class_not_found_exceptions)572 ClassLinker::ClassLinker(InternTable* intern_table, bool fast_class_not_found_exceptions)
573 : boot_class_table_(new ClassTable()),
574 failed_dex_cache_class_lookups_(0),
575 class_roots_(nullptr),
576 find_array_class_cache_next_victim_(0),
577 init_done_(false),
578 log_new_roots_(false),
579 intern_table_(intern_table),
580 fast_class_not_found_exceptions_(fast_class_not_found_exceptions),
581 jni_dlsym_lookup_trampoline_(nullptr),
582 jni_dlsym_lookup_critical_trampoline_(nullptr),
583 quick_resolution_trampoline_(nullptr),
584 quick_imt_conflict_trampoline_(nullptr),
585 quick_generic_jni_trampoline_(nullptr),
586 quick_to_interpreter_bridge_trampoline_(nullptr),
587 nterp_trampoline_(nullptr),
588 image_pointer_size_(kRuntimePointerSize),
589 visibly_initialized_callback_lock_("visibly initialized callback lock"),
590 visibly_initialized_callback_(nullptr),
591 critical_native_code_with_clinit_check_lock_("critical native code with clinit check lock"),
592 critical_native_code_with_clinit_check_(),
593 cha_(Runtime::Current()->IsAotCompiler() ? nullptr : new ClassHierarchyAnalysis()) {
594 // For CHA disabled during Aot, see b/34193647.
595
596 CHECK(intern_table_ != nullptr);
597 static_assert(kFindArrayCacheSize == arraysize(find_array_class_cache_),
598 "Array cache size wrong.");
599 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
600 }
601
CheckSystemClass(Thread * self,Handle<mirror::Class> c1,const char * descriptor)602 void ClassLinker::CheckSystemClass(Thread* self, Handle<mirror::Class> c1, const char* descriptor) {
603 ObjPtr<mirror::Class> c2 = FindSystemClass(self, descriptor);
604 if (c2 == nullptr) {
605 LOG(FATAL) << "Could not find class " << descriptor;
606 UNREACHABLE();
607 }
608 if (c1.Get() != c2) {
609 std::ostringstream os1, os2;
610 c1->DumpClass(os1, mirror::Class::kDumpClassFullDetail);
611 c2->DumpClass(os2, mirror::Class::kDumpClassFullDetail);
612 LOG(FATAL) << "InitWithoutImage: Class mismatch for " << descriptor
613 << ". This is most likely the result of a broken build. Make sure that "
614 << "libcore and art projects match.\n\n"
615 << os1.str() << "\n\n" << os2.str();
616 UNREACHABLE();
617 }
618 }
619
AllocIfTable(Thread * self,size_t ifcount,ObjPtr<mirror::Class> iftable_class)620 ObjPtr<mirror::IfTable> AllocIfTable(Thread* self,
621 size_t ifcount,
622 ObjPtr<mirror::Class> iftable_class)
623 REQUIRES_SHARED(Locks::mutator_lock_) {
624 DCHECK(iftable_class->IsArrayClass());
625 DCHECK(iftable_class->GetComponentType()->IsObjectClass());
626 return ObjPtr<mirror::IfTable>::DownCast(ObjPtr<mirror::ObjectArray<mirror::Object>>(
627 mirror::IfTable::Alloc(self, iftable_class, ifcount * mirror::IfTable::kMax)));
628 }
629
InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,std::string * error_msg)630 bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,
631 std::string* error_msg) {
632 VLOG(startup) << "ClassLinker::Init";
633
634 Thread* const self = Thread::Current();
635 Runtime* const runtime = Runtime::Current();
636 gc::Heap* const heap = runtime->GetHeap();
637
638 CHECK(!heap->HasBootImageSpace()) << "Runtime has image. We should use it.";
639 CHECK(!init_done_);
640
641 // Use the pointer size from the runtime since we are probably creating the image.
642 image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet());
643
644 // java_lang_Class comes first, it's needed for AllocClass
645 // The GC can't handle an object with a null class since we can't get the size of this object.
646 heap->IncrementDisableMovingGC(self);
647 StackHandleScope<64> hs(self); // 64 is picked arbitrarily.
648 auto class_class_size = mirror::Class::ClassClassSize(image_pointer_size_);
649 // Allocate the object as non-movable so that there are no cases where Object::IsClass returns
650 // the incorrect result when comparing to-space vs from-space.
651 Handle<mirror::Class> java_lang_Class(hs.NewHandle(ObjPtr<mirror::Class>::DownCast(
652 heap->AllocNonMovableObject(self, nullptr, class_class_size, VoidFunctor()))));
653 CHECK(java_lang_Class != nullptr);
654 java_lang_Class->SetClassFlags(mirror::kClassFlagClass);
655 java_lang_Class->SetClass(java_lang_Class.Get());
656 if (kUseBakerReadBarrier) {
657 java_lang_Class->AssertReadBarrierState();
658 }
659 java_lang_Class->SetClassSize(class_class_size);
660 java_lang_Class->SetPrimitiveType(Primitive::kPrimNot);
661 heap->DecrementDisableMovingGC(self);
662 // AllocClass(ObjPtr<mirror::Class>) can now be used
663
664 // Class[] is used for reflection support.
665 auto class_array_class_size = mirror::ObjectArray<mirror::Class>::ClassSize(image_pointer_size_);
666 Handle<mirror::Class> class_array_class(hs.NewHandle(
667 AllocClass(self, java_lang_Class.Get(), class_array_class_size)));
668 class_array_class->SetComponentType(java_lang_Class.Get());
669
670 // java_lang_Object comes next so that object_array_class can be created.
671 Handle<mirror::Class> java_lang_Object(hs.NewHandle(
672 AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize(image_pointer_size_))));
673 CHECK(java_lang_Object != nullptr);
674 // backfill Object as the super class of Class.
675 java_lang_Class->SetSuperClass(java_lang_Object.Get());
676 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kLoaded, self);
677
678 java_lang_Object->SetObjectSize(sizeof(mirror::Object));
679 // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
680 // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
681 runtime->SetSentinel(heap->AllocNonMovableObject(self,
682 java_lang_Object.Get(),
683 java_lang_Object->GetObjectSize(),
684 VoidFunctor()));
685
686 // Initialize the SubtypeCheck bitstring for java.lang.Object and java.lang.Class.
687 if (kBitstringSubtypeCheckEnabled) {
688 // It might seem the lock here is unnecessary, however all the SubtypeCheck
689 // functions are annotated to require locks all the way down.
690 //
691 // We take the lock here to avoid using NO_THREAD_SAFETY_ANALYSIS.
692 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
693 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Object.Get());
694 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Class.Get());
695 }
696
697 // Object[] next to hold class roots.
698 Handle<mirror::Class> object_array_class(hs.NewHandle(
699 AllocClass(self, java_lang_Class.Get(),
700 mirror::ObjectArray<mirror::Object>::ClassSize(image_pointer_size_))));
701 object_array_class->SetComponentType(java_lang_Object.Get());
702
703 // Setup java.lang.String.
704 //
705 // We make this class non-movable for the unlikely case where it were to be
706 // moved by a sticky-bit (minor) collection when using the Generational
707 // Concurrent Copying (CC) collector, potentially creating a stale reference
708 // in the `klass_` field of one of its instances allocated in the Large-Object
709 // Space (LOS) -- see the comment about the dirty card scanning logic in
710 // art::gc::collector::ConcurrentCopying::MarkingPhase.
711 Handle<mirror::Class> java_lang_String(hs.NewHandle(
712 AllocClass</* kMovable= */ false>(
713 self, java_lang_Class.Get(), mirror::String::ClassSize(image_pointer_size_))));
714 java_lang_String->SetStringClass();
715 mirror::Class::SetStatus(java_lang_String, ClassStatus::kResolved, self);
716
717 // Setup java.lang.ref.Reference.
718 Handle<mirror::Class> java_lang_ref_Reference(hs.NewHandle(
719 AllocClass(self, java_lang_Class.Get(), mirror::Reference::ClassSize(image_pointer_size_))));
720 java_lang_ref_Reference->SetObjectSize(mirror::Reference::InstanceSize());
721 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kResolved, self);
722
723 // Create storage for root classes, save away our work so far (requires descriptors).
724 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
725 mirror::ObjectArray<mirror::Class>::Alloc(self,
726 object_array_class.Get(),
727 static_cast<int32_t>(ClassRoot::kMax)));
728 CHECK(!class_roots_.IsNull());
729 SetClassRoot(ClassRoot::kJavaLangClass, java_lang_Class.Get());
730 SetClassRoot(ClassRoot::kJavaLangObject, java_lang_Object.Get());
731 SetClassRoot(ClassRoot::kClassArrayClass, class_array_class.Get());
732 SetClassRoot(ClassRoot::kObjectArrayClass, object_array_class.Get());
733 SetClassRoot(ClassRoot::kJavaLangString, java_lang_String.Get());
734 SetClassRoot(ClassRoot::kJavaLangRefReference, java_lang_ref_Reference.Get());
735
736 // Fill in the empty iftable. Needs to be done after the kObjectArrayClass root is set.
737 java_lang_Object->SetIfTable(AllocIfTable(self, 0, object_array_class.Get()));
738
739 // Create array interface entries to populate once we can load system classes.
740 object_array_class->SetIfTable(AllocIfTable(self, 2, object_array_class.Get()));
741 DCHECK_EQ(GetArrayIfTable(), object_array_class->GetIfTable());
742
743 // Setup the primitive type classes.
744 CreatePrimitiveClass(self, Primitive::kPrimBoolean, ClassRoot::kPrimitiveBoolean);
745 CreatePrimitiveClass(self, Primitive::kPrimByte, ClassRoot::kPrimitiveByte);
746 CreatePrimitiveClass(self, Primitive::kPrimChar, ClassRoot::kPrimitiveChar);
747 CreatePrimitiveClass(self, Primitive::kPrimShort, ClassRoot::kPrimitiveShort);
748 CreatePrimitiveClass(self, Primitive::kPrimInt, ClassRoot::kPrimitiveInt);
749 CreatePrimitiveClass(self, Primitive::kPrimLong, ClassRoot::kPrimitiveLong);
750 CreatePrimitiveClass(self, Primitive::kPrimFloat, ClassRoot::kPrimitiveFloat);
751 CreatePrimitiveClass(self, Primitive::kPrimDouble, ClassRoot::kPrimitiveDouble);
752 CreatePrimitiveClass(self, Primitive::kPrimVoid, ClassRoot::kPrimitiveVoid);
753
754 // Allocate the primitive array classes. We need only the native pointer
755 // array at this point (int[] or long[], depending on architecture) but
756 // we shall perform the same setup steps for all primitive array classes.
757 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveBoolean, ClassRoot::kBooleanArrayClass);
758 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveByte, ClassRoot::kByteArrayClass);
759 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveChar, ClassRoot::kCharArrayClass);
760 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveShort, ClassRoot::kShortArrayClass);
761 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveInt, ClassRoot::kIntArrayClass);
762 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveLong, ClassRoot::kLongArrayClass);
763 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveFloat, ClassRoot::kFloatArrayClass);
764 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveDouble, ClassRoot::kDoubleArrayClass);
765
766 // now that these are registered, we can use AllocClass() and AllocObjectArray
767
768 // Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache.
769 Handle<mirror::Class> java_lang_DexCache(hs.NewHandle(
770 AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize(image_pointer_size_))));
771 SetClassRoot(ClassRoot::kJavaLangDexCache, java_lang_DexCache.Get());
772 java_lang_DexCache->SetDexCacheClass();
773 java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize());
774 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kResolved, self);
775
776
777 // Setup dalvik.system.ClassExt
778 Handle<mirror::Class> dalvik_system_ClassExt(hs.NewHandle(
779 AllocClass(self, java_lang_Class.Get(), mirror::ClassExt::ClassSize(image_pointer_size_))));
780 SetClassRoot(ClassRoot::kDalvikSystemClassExt, dalvik_system_ClassExt.Get());
781 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kResolved, self);
782
783 // Set up array classes for string, field, method
784 Handle<mirror::Class> object_array_string(hs.NewHandle(
785 AllocClass(self, java_lang_Class.Get(),
786 mirror::ObjectArray<mirror::String>::ClassSize(image_pointer_size_))));
787 object_array_string->SetComponentType(java_lang_String.Get());
788 SetClassRoot(ClassRoot::kJavaLangStringArrayClass, object_array_string.Get());
789
790 LinearAlloc* linear_alloc = runtime->GetLinearAlloc();
791 // Create runtime resolution and imt conflict methods.
792 runtime->SetResolutionMethod(runtime->CreateResolutionMethod());
793 runtime->SetImtConflictMethod(runtime->CreateImtConflictMethod(linear_alloc));
794 runtime->SetImtUnimplementedMethod(runtime->CreateImtConflictMethod(linear_alloc));
795
796 // Setup boot_class_path_ and register class_path now that we can use AllocObjectArray to create
797 // DexCache instances. Needs to be after String, Field, Method arrays since AllocDexCache uses
798 // these roots.
799 if (boot_class_path.empty()) {
800 *error_msg = "Boot classpath is empty.";
801 return false;
802 }
803 for (auto& dex_file : boot_class_path) {
804 if (dex_file == nullptr) {
805 *error_msg = "Null dex file.";
806 return false;
807 }
808 AppendToBootClassPath(self, dex_file.get());
809 boot_dex_files_.push_back(std::move(dex_file));
810 }
811
812 // now we can use FindSystemClass
813
814 // Set up GenericJNI entrypoint. That is mainly a hack for common_compiler_test.h so that
815 // we do not need friend classes or a publicly exposed setter.
816 quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
817 if (!runtime->IsAotCompiler()) {
818 // We need to set up the generic trampolines since we don't have an image.
819 jni_dlsym_lookup_trampoline_ = GetJniDlsymLookupStub();
820 jni_dlsym_lookup_critical_trampoline_ = GetJniDlsymLookupCriticalStub();
821 quick_resolution_trampoline_ = GetQuickResolutionStub();
822 quick_imt_conflict_trampoline_ = GetQuickImtConflictStub();
823 quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
824 quick_to_interpreter_bridge_trampoline_ = GetQuickToInterpreterBridge();
825 nterp_trampoline_ = interpreter::GetNterpEntryPoint();
826 }
827
828 // Object, String, ClassExt and DexCache need to be rerun through FindSystemClass to finish init
829 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kNotReady, self);
830 CheckSystemClass(self, java_lang_Object, "Ljava/lang/Object;");
831 CHECK_EQ(java_lang_Object->GetObjectSize(), mirror::Object::InstanceSize());
832 mirror::Class::SetStatus(java_lang_String, ClassStatus::kNotReady, self);
833 CheckSystemClass(self, java_lang_String, "Ljava/lang/String;");
834 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kNotReady, self);
835 CheckSystemClass(self, java_lang_DexCache, "Ljava/lang/DexCache;");
836 CHECK_EQ(java_lang_DexCache->GetObjectSize(), mirror::DexCache::InstanceSize());
837 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kNotReady, self);
838 CheckSystemClass(self, dalvik_system_ClassExt, "Ldalvik/system/ClassExt;");
839 CHECK_EQ(dalvik_system_ClassExt->GetObjectSize(), mirror::ClassExt::InstanceSize());
840
841 // Run Class through FindSystemClass. This initializes the dex_cache_ fields and register it
842 // in class_table_.
843 CheckSystemClass(self, java_lang_Class, "Ljava/lang/Class;");
844
845 // Setup core array classes, i.e. Object[], String[] and Class[] and primitive
846 // arrays - can't be done until Object has a vtable and component classes are loaded.
847 FinishCoreArrayClassSetup(ClassRoot::kObjectArrayClass);
848 FinishCoreArrayClassSetup(ClassRoot::kClassArrayClass);
849 FinishCoreArrayClassSetup(ClassRoot::kJavaLangStringArrayClass);
850 FinishCoreArrayClassSetup(ClassRoot::kBooleanArrayClass);
851 FinishCoreArrayClassSetup(ClassRoot::kByteArrayClass);
852 FinishCoreArrayClassSetup(ClassRoot::kCharArrayClass);
853 FinishCoreArrayClassSetup(ClassRoot::kShortArrayClass);
854 FinishCoreArrayClassSetup(ClassRoot::kIntArrayClass);
855 FinishCoreArrayClassSetup(ClassRoot::kLongArrayClass);
856 FinishCoreArrayClassSetup(ClassRoot::kFloatArrayClass);
857 FinishCoreArrayClassSetup(ClassRoot::kDoubleArrayClass);
858
859 // Setup the single, global copy of "iftable".
860 auto java_lang_Cloneable = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Cloneable;"));
861 CHECK(java_lang_Cloneable != nullptr);
862 auto java_io_Serializable = hs.NewHandle(FindSystemClass(self, "Ljava/io/Serializable;"));
863 CHECK(java_io_Serializable != nullptr);
864 // We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
865 // crawl up and explicitly list all of the supers as well.
866 object_array_class->GetIfTable()->SetInterface(0, java_lang_Cloneable.Get());
867 object_array_class->GetIfTable()->SetInterface(1, java_io_Serializable.Get());
868
869 // Check Class[] and Object[]'s interfaces.
870 CHECK_EQ(java_lang_Cloneable.Get(), class_array_class->GetDirectInterface(0));
871 CHECK_EQ(java_io_Serializable.Get(), class_array_class->GetDirectInterface(1));
872 CHECK_EQ(java_lang_Cloneable.Get(), object_array_class->GetDirectInterface(0));
873 CHECK_EQ(java_io_Serializable.Get(), object_array_class->GetDirectInterface(1));
874
875 CHECK_EQ(object_array_string.Get(),
876 FindSystemClass(self, GetClassRootDescriptor(ClassRoot::kJavaLangStringArrayClass)));
877
878 // End of special init trickery, all subsequent classes may be loaded via FindSystemClass.
879
880 // Create java.lang.reflect.Proxy root.
881 SetClassRoot(ClassRoot::kJavaLangReflectProxy,
882 FindSystemClass(self, "Ljava/lang/reflect/Proxy;"));
883
884 // Create java.lang.reflect.Field.class root.
885 ObjPtr<mirror::Class> class_root = FindSystemClass(self, "Ljava/lang/reflect/Field;");
886 CHECK(class_root != nullptr);
887 SetClassRoot(ClassRoot::kJavaLangReflectField, class_root);
888
889 // Create java.lang.reflect.Field array root.
890 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Field;");
891 CHECK(class_root != nullptr);
892 SetClassRoot(ClassRoot::kJavaLangReflectFieldArrayClass, class_root);
893
894 // Create java.lang.reflect.Constructor.class root and array root.
895 class_root = FindSystemClass(self, "Ljava/lang/reflect/Constructor;");
896 CHECK(class_root != nullptr);
897 SetClassRoot(ClassRoot::kJavaLangReflectConstructor, class_root);
898 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Constructor;");
899 CHECK(class_root != nullptr);
900 SetClassRoot(ClassRoot::kJavaLangReflectConstructorArrayClass, class_root);
901
902 // Create java.lang.reflect.Method.class root and array root.
903 class_root = FindSystemClass(self, "Ljava/lang/reflect/Method;");
904 CHECK(class_root != nullptr);
905 SetClassRoot(ClassRoot::kJavaLangReflectMethod, class_root);
906 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Method;");
907 CHECK(class_root != nullptr);
908 SetClassRoot(ClassRoot::kJavaLangReflectMethodArrayClass, class_root);
909
910 // Create java.lang.invoke.CallSite.class root
911 class_root = FindSystemClass(self, "Ljava/lang/invoke/CallSite;");
912 CHECK(class_root != nullptr);
913 SetClassRoot(ClassRoot::kJavaLangInvokeCallSite, class_root);
914
915 // Create java.lang.invoke.MethodType.class root
916 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodType;");
917 CHECK(class_root != nullptr);
918 SetClassRoot(ClassRoot::kJavaLangInvokeMethodType, class_root);
919
920 // Create java.lang.invoke.MethodHandleImpl.class root
921 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandleImpl;");
922 CHECK(class_root != nullptr);
923 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandleImpl, class_root);
924 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandle, class_root->GetSuperClass());
925
926 // Create java.lang.invoke.MethodHandles.Lookup.class root
927 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandles$Lookup;");
928 CHECK(class_root != nullptr);
929 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandlesLookup, class_root);
930
931 // Create java.lang.invoke.VarHandle.class root
932 class_root = FindSystemClass(self, "Ljava/lang/invoke/VarHandle;");
933 CHECK(class_root != nullptr);
934 SetClassRoot(ClassRoot::kJavaLangInvokeVarHandle, class_root);
935
936 // Create java.lang.invoke.FieldVarHandle.class root
937 class_root = FindSystemClass(self, "Ljava/lang/invoke/FieldVarHandle;");
938 CHECK(class_root != nullptr);
939 SetClassRoot(ClassRoot::kJavaLangInvokeFieldVarHandle, class_root);
940
941 // Create java.lang.invoke.StaticFieldVarHandle.class root
942 class_root = FindSystemClass(self, "Ljava/lang/invoke/StaticFieldVarHandle;");
943 CHECK(class_root != nullptr);
944 SetClassRoot(ClassRoot::kJavaLangInvokeStaticFieldVarHandle, class_root);
945
946 // Create java.lang.invoke.ArrayElementVarHandle.class root
947 class_root = FindSystemClass(self, "Ljava/lang/invoke/ArrayElementVarHandle;");
948 CHECK(class_root != nullptr);
949 SetClassRoot(ClassRoot::kJavaLangInvokeArrayElementVarHandle, class_root);
950
951 // Create java.lang.invoke.ByteArrayViewVarHandle.class root
952 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteArrayViewVarHandle;");
953 CHECK(class_root != nullptr);
954 SetClassRoot(ClassRoot::kJavaLangInvokeByteArrayViewVarHandle, class_root);
955
956 // Create java.lang.invoke.ByteBufferViewVarHandle.class root
957 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteBufferViewVarHandle;");
958 CHECK(class_root != nullptr);
959 SetClassRoot(ClassRoot::kJavaLangInvokeByteBufferViewVarHandle, class_root);
960
961 class_root = FindSystemClass(self, "Ldalvik/system/EmulatedStackFrame;");
962 CHECK(class_root != nullptr);
963 SetClassRoot(ClassRoot::kDalvikSystemEmulatedStackFrame, class_root);
964
965 // java.lang.ref classes need to be specially flagged, but otherwise are normal classes
966 // finish initializing Reference class
967 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kNotReady, self);
968 CheckSystemClass(self, java_lang_ref_Reference, "Ljava/lang/ref/Reference;");
969 CHECK_EQ(java_lang_ref_Reference->GetObjectSize(), mirror::Reference::InstanceSize());
970 CHECK_EQ(java_lang_ref_Reference->GetClassSize(),
971 mirror::Reference::ClassSize(image_pointer_size_));
972 class_root = FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;");
973 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
974 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagFinalizerReference);
975 class_root = FindSystemClass(self, "Ljava/lang/ref/PhantomReference;");
976 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
977 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagPhantomReference);
978 class_root = FindSystemClass(self, "Ljava/lang/ref/SoftReference;");
979 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
980 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagSoftReference);
981 class_root = FindSystemClass(self, "Ljava/lang/ref/WeakReference;");
982 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
983 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagWeakReference);
984
985 // Setup the ClassLoader, verifying the object_size_.
986 class_root = FindSystemClass(self, "Ljava/lang/ClassLoader;");
987 class_root->SetClassLoaderClass();
988 CHECK_EQ(class_root->GetObjectSize(), mirror::ClassLoader::InstanceSize());
989 SetClassRoot(ClassRoot::kJavaLangClassLoader, class_root);
990
991 // Set up java.lang.Throwable, java.lang.ClassNotFoundException, and
992 // java.lang.StackTraceElement as a convenience.
993 SetClassRoot(ClassRoot::kJavaLangThrowable, FindSystemClass(self, "Ljava/lang/Throwable;"));
994 SetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
995 FindSystemClass(self, "Ljava/lang/ClassNotFoundException;"));
996 SetClassRoot(ClassRoot::kJavaLangStackTraceElement,
997 FindSystemClass(self, "Ljava/lang/StackTraceElement;"));
998 SetClassRoot(ClassRoot::kJavaLangStackTraceElementArrayClass,
999 FindSystemClass(self, "[Ljava/lang/StackTraceElement;"));
1000 SetClassRoot(ClassRoot::kJavaLangClassLoaderArrayClass,
1001 FindSystemClass(self, "[Ljava/lang/ClassLoader;"));
1002
1003 // Create conflict tables that depend on the class linker.
1004 runtime->FixupConflictTables();
1005
1006 FinishInit(self);
1007
1008 VLOG(startup) << "ClassLinker::InitFromCompiler exiting";
1009
1010 return true;
1011 }
1012
CreateStringInitBindings(Thread * self,ClassLinker * class_linker)1013 static void CreateStringInitBindings(Thread* self, ClassLinker* class_linker)
1014 REQUIRES_SHARED(Locks::mutator_lock_) {
1015 // Find String.<init> -> StringFactory bindings.
1016 ObjPtr<mirror::Class> string_factory_class =
1017 class_linker->FindSystemClass(self, "Ljava/lang/StringFactory;");
1018 CHECK(string_factory_class != nullptr);
1019 ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>(class_linker);
1020 WellKnownClasses::InitStringInit(string_class, string_factory_class);
1021 // Update the primordial thread.
1022 self->InitStringEntryPoints();
1023 }
1024
FinishInit(Thread * self)1025 void ClassLinker::FinishInit(Thread* self) {
1026 VLOG(startup) << "ClassLinker::FinishInit entering";
1027
1028 CreateStringInitBindings(self, this);
1029
1030 // Let the heap know some key offsets into java.lang.ref instances
1031 // Note: we hard code the field indexes here rather than using FindInstanceField
1032 // as the types of the field can't be resolved prior to the runtime being
1033 // fully initialized
1034 StackHandleScope<3> hs(self);
1035 Handle<mirror::Class> java_lang_ref_Reference =
1036 hs.NewHandle(GetClassRoot<mirror::Reference>(this));
1037 Handle<mirror::Class> java_lang_ref_FinalizerReference =
1038 hs.NewHandle(FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;"));
1039
1040 ArtField* pendingNext = java_lang_ref_Reference->GetInstanceField(0);
1041 CHECK_STREQ(pendingNext->GetName(), "pendingNext");
1042 CHECK_STREQ(pendingNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
1043
1044 ArtField* queue = java_lang_ref_Reference->GetInstanceField(1);
1045 CHECK_STREQ(queue->GetName(), "queue");
1046 CHECK_STREQ(queue->GetTypeDescriptor(), "Ljava/lang/ref/ReferenceQueue;");
1047
1048 ArtField* queueNext = java_lang_ref_Reference->GetInstanceField(2);
1049 CHECK_STREQ(queueNext->GetName(), "queueNext");
1050 CHECK_STREQ(queueNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
1051
1052 ArtField* referent = java_lang_ref_Reference->GetInstanceField(3);
1053 CHECK_STREQ(referent->GetName(), "referent");
1054 CHECK_STREQ(referent->GetTypeDescriptor(), "Ljava/lang/Object;");
1055
1056 ArtField* zombie = java_lang_ref_FinalizerReference->GetInstanceField(2);
1057 CHECK_STREQ(zombie->GetName(), "zombie");
1058 CHECK_STREQ(zombie->GetTypeDescriptor(), "Ljava/lang/Object;");
1059
1060 // ensure all class_roots_ are initialized
1061 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
1062 ClassRoot class_root = static_cast<ClassRoot>(i);
1063 ObjPtr<mirror::Class> klass = GetClassRoot(class_root);
1064 CHECK(klass != nullptr);
1065 DCHECK(klass->IsArrayClass() || klass->IsPrimitive() || klass->GetDexCache() != nullptr);
1066 // note SetClassRoot does additional validation.
1067 // if possible add new checks there to catch errors early
1068 }
1069
1070 CHECK(GetArrayIfTable() != nullptr);
1071
1072 // disable the slow paths in FindClass and CreatePrimitiveClass now
1073 // that Object, Class, and Object[] are setup
1074 init_done_ = true;
1075
1076 // Under sanitization, the small carve-out to handle stack overflow might not be enough to
1077 // initialize the StackOverflowError class (as it might require running the verifier). Instead,
1078 // ensure that the class will be initialized.
1079 if (kMemoryToolIsAvailable && !Runtime::Current()->IsAotCompiler()) {
1080 verifier::ClassVerifier::Init(this); // Need to prepare the verifier.
1081
1082 ObjPtr<mirror::Class> soe_klass = FindSystemClass(self, "Ljava/lang/StackOverflowError;");
1083 if (soe_klass == nullptr || !EnsureInitialized(self, hs.NewHandle(soe_klass), true, true)) {
1084 // Strange, but don't crash.
1085 LOG(WARNING) << "Could not prepare StackOverflowError.";
1086 self->ClearException();
1087 }
1088 }
1089
1090 VLOG(startup) << "ClassLinker::FinishInit exiting";
1091 }
1092
EnsureRootInitialized(ClassLinker * class_linker,Thread * self,ObjPtr<mirror::Class> klass)1093 static void EnsureRootInitialized(ClassLinker* class_linker,
1094 Thread* self,
1095 ObjPtr<mirror::Class> klass)
1096 REQUIRES_SHARED(Locks::mutator_lock_) {
1097 if (!klass->IsVisiblyInitialized()) {
1098 DCHECK(!klass->IsArrayClass());
1099 DCHECK(!klass->IsPrimitive());
1100 StackHandleScope<1> hs(self);
1101 Handle<mirror::Class> h_class(hs.NewHandle(klass));
1102 if (!class_linker->EnsureInitialized(
1103 self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true)) {
1104 LOG(FATAL) << "Exception when initializing " << h_class->PrettyClass()
1105 << ": " << self->GetException()->Dump();
1106 }
1107 }
1108 }
1109
RunEarlyRootClinits(Thread * self)1110 void ClassLinker::RunEarlyRootClinits(Thread* self) {
1111 StackHandleScope<1u> hs(self);
1112 Handle<mirror::ObjectArray<mirror::Class>> class_roots = hs.NewHandle(GetClassRoots());
1113 EnsureRootInitialized(this, self, GetClassRoot<mirror::Class>(class_roots.Get()));
1114 EnsureRootInitialized(this, self, GetClassRoot<mirror::String>(class_roots.Get()));
1115 // `Field` class is needed for register_java_net_InetAddress in libcore, b/28153851.
1116 EnsureRootInitialized(this, self, GetClassRoot<mirror::Field>(class_roots.Get()));
1117
1118 WellKnownClasses::Init(self->GetJniEnv());
1119
1120 // `FinalizerReference` class is needed for initialization of `java.net.InetAddress`.
1121 // (Indirectly by constructing a `ObjectStreamField` which uses a `StringBuilder`
1122 // and, when resizing, initializes the `System` class for `System.arraycopy()`
1123 // and `System.<clinit> creates a finalizable object.)
1124 EnsureRootInitialized(
1125 this, self, WellKnownClasses::java_lang_ref_FinalizerReference_add->GetDeclaringClass());
1126 }
1127
RunRootClinits(Thread * self)1128 void ClassLinker::RunRootClinits(Thread* self) {
1129 StackHandleScope<1u> hs(self);
1130 Handle<mirror::ObjectArray<mirror::Class>> class_roots = hs.NewHandle(GetClassRoots());
1131 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); ++i) {
1132 EnsureRootInitialized(this, self, GetClassRoot(ClassRoot(i), class_roots.Get()));
1133 }
1134
1135 // Make sure certain well-known classes are initialized. Note that well-known
1136 // classes are always in the boot image, so this code is primarily intended
1137 // for running without boot image but may be needed for boot image if the
1138 // AOT-initialization fails due to introduction of new code to `<clinit>`.
1139 ArtMethod* methods_of_classes_to_initialize[] = {
1140 // Initialize primitive boxing classes (avoid check at runtime).
1141 WellKnownClasses::java_lang_Boolean_valueOf,
1142 WellKnownClasses::java_lang_Byte_valueOf,
1143 WellKnownClasses::java_lang_Character_valueOf,
1144 WellKnownClasses::java_lang_Double_valueOf,
1145 WellKnownClasses::java_lang_Float_valueOf,
1146 WellKnownClasses::java_lang_Integer_valueOf,
1147 WellKnownClasses::java_lang_Long_valueOf,
1148 WellKnownClasses::java_lang_Short_valueOf,
1149 // Initialize `StackOverflowError`.
1150 WellKnownClasses::java_lang_StackOverflowError_init,
1151 // Ensure class loader classes are initialized (avoid check at runtime).
1152 // Superclass `ClassLoader` is a class root and already initialized above.
1153 // Superclass `BaseDexClassLoader` is initialized implicitly.
1154 WellKnownClasses::dalvik_system_DelegateLastClassLoader_init,
1155 WellKnownClasses::dalvik_system_DexClassLoader_init,
1156 WellKnownClasses::dalvik_system_InMemoryDexClassLoader_init,
1157 WellKnownClasses::dalvik_system_PathClassLoader_init,
1158 WellKnownClasses::java_lang_BootClassLoader_init,
1159 // Ensure `Daemons` class is initialized (avoid check at runtime).
1160 WellKnownClasses::java_lang_Daemons_start,
1161 // Ensure `Thread` and `ThreadGroup` classes are initialized (avoid check at runtime).
1162 WellKnownClasses::java_lang_Thread_init,
1163 WellKnownClasses::java_lang_ThreadGroup_add,
1164 // Ensure reference classes are initialized (avoid check at runtime).
1165 // The `FinalizerReference` class was initialized in `RunEarlyRootClinits()`.
1166 WellKnownClasses::java_lang_ref_ReferenceQueue_add,
1167 // Ensure `InvocationTargetException` class is initialized (avoid check at runtime).
1168 WellKnownClasses::java_lang_reflect_InvocationTargetException_init,
1169 // Ensure `Parameter` class is initialized (avoid check at runtime).
1170 WellKnownClasses::java_lang_reflect_Parameter_init,
1171 // Ensure `MethodHandles` class is initialized (avoid check at runtime).
1172 WellKnownClasses::java_lang_invoke_MethodHandles_lookup,
1173 // Ensure `DirectByteBuffer` class is initialized (avoid check at runtime).
1174 WellKnownClasses::java_nio_DirectByteBuffer_init,
1175 // Ensure `FloatingDecimal` class is initialized (avoid check at runtime).
1176 WellKnownClasses::jdk_internal_math_FloatingDecimal_getBinaryToASCIIConverter_D,
1177 // Ensure reflection annotation classes are initialized (avoid check at runtime).
1178 WellKnownClasses::libcore_reflect_AnnotationFactory_createAnnotation,
1179 WellKnownClasses::libcore_reflect_AnnotationMember_init,
1180 // We're suppressing exceptions from `DdmServer` and we do not want to repeatedly
1181 // suppress class initialization error (say, due to OOM), so initialize it early.
1182 WellKnownClasses::org_apache_harmony_dalvik_ddmc_DdmServer_dispatch,
1183 };
1184 for (ArtMethod* method : methods_of_classes_to_initialize) {
1185 EnsureRootInitialized(this, self, method->GetDeclaringClass());
1186 }
1187 ArtField* fields_of_classes_to_initialize[] = {
1188 // Ensure classes used by class loaders are initialized (avoid check at runtime).
1189 WellKnownClasses::dalvik_system_DexFile_cookie,
1190 WellKnownClasses::dalvik_system_DexPathList_dexElements,
1191 WellKnownClasses::dalvik_system_DexPathList__Element_dexFile,
1192 // Ensure `VMRuntime` is initialized (avoid check at runtime).
1193 WellKnownClasses::dalvik_system_VMRuntime_nonSdkApiUsageConsumer,
1194 // Initialize empty arrays needed by `StackOverflowError`.
1195 WellKnownClasses::java_util_Collections_EMPTY_LIST,
1196 WellKnownClasses::libcore_util_EmptyArray_STACK_TRACE_ELEMENT,
1197 };
1198 for (ArtField* field : fields_of_classes_to_initialize) {
1199 EnsureRootInitialized(this, self, field->GetDeclaringClass());
1200 }
1201 }
1202
1203 ALWAYS_INLINE
ComputeMethodHash(ArtMethod * method)1204 static uint32_t ComputeMethodHash(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
1205 DCHECK(!method->IsRuntimeMethod());
1206 DCHECK(!method->IsProxyMethod());
1207 DCHECK(!method->IsObsolete());
1208 // Do not use `ArtMethod::GetNameView()` to avoid unnecessary runtime/proxy/obsolete method
1209 // checks. It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1210 const DexFile& dex_file = method->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1211 const dex::MethodId& method_id = dex_file.GetMethodId(method->GetDexMethodIndex());
1212 std::string_view name = dex_file.GetMethodNameView(method_id);
1213 return ComputeModifiedUtf8Hash(name);
1214 }
1215
1216 ALWAYS_INLINE
MethodSignatureEquals(ArtMethod * lhs,ArtMethod * rhs)1217 static bool MethodSignatureEquals(ArtMethod* lhs, ArtMethod* rhs)
1218 REQUIRES_SHARED(Locks::mutator_lock_) {
1219 DCHECK(!lhs->IsRuntimeMethod());
1220 DCHECK(!lhs->IsProxyMethod());
1221 DCHECK(!lhs->IsObsolete());
1222 DCHECK(!rhs->IsRuntimeMethod());
1223 DCHECK(!rhs->IsProxyMethod());
1224 DCHECK(!rhs->IsObsolete());
1225 // Do not use `ArtMethod::GetDexFile()` to avoid unnecessary obsolete method checks.
1226 // It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1227 const DexFile& lhs_dex_file = lhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1228 const DexFile& rhs_dex_file = rhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1229 const dex::MethodId& lhs_mid = lhs_dex_file.GetMethodId(lhs->GetDexMethodIndex());
1230 const dex::MethodId& rhs_mid = rhs_dex_file.GetMethodId(rhs->GetDexMethodIndex());
1231 if (&lhs_dex_file == &rhs_dex_file) {
1232 return lhs_mid.name_idx_ == rhs_mid.name_idx_ &&
1233 lhs_mid.proto_idx_ == rhs_mid.proto_idx_;
1234 } else {
1235 return
1236 lhs_dex_file.GetMethodNameView(lhs_mid) == rhs_dex_file.GetMethodNameView(rhs_mid) &&
1237 lhs_dex_file.GetMethodSignature(lhs_mid) == rhs_dex_file.GetMethodSignature(rhs_mid);
1238 }
1239 }
1240
InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,PointerSize pointer_size,ArrayRef<uint32_t> virtual_method_hashes)1241 static void InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,
1242 PointerSize pointer_size,
1243 /*out*/ ArrayRef<uint32_t> virtual_method_hashes)
1244 REQUIRES_SHARED(Locks::mutator_lock_) {
1245 ArraySlice<ArtMethod> virtual_methods = java_lang_Object->GetVirtualMethods(pointer_size);
1246 DCHECK_EQ(virtual_method_hashes.size(), virtual_methods.size());
1247 for (size_t i = 0; i != virtual_method_hashes.size(); ++i) {
1248 virtual_method_hashes[i] = ComputeMethodHash(&virtual_methods[i]);
1249 }
1250 }
1251
1252 struct TrampolineCheckData {
1253 const void* quick_resolution_trampoline;
1254 const void* quick_imt_conflict_trampoline;
1255 const void* quick_generic_jni_trampoline;
1256 const void* quick_to_interpreter_bridge_trampoline;
1257 const void* nterp_trampoline;
1258 PointerSize pointer_size;
1259 ArtMethod* m;
1260 bool error;
1261 };
1262
InitFromBootImage(std::string * error_msg)1263 bool ClassLinker::InitFromBootImage(std::string* error_msg) {
1264 VLOG(startup) << __FUNCTION__ << " entering";
1265 CHECK(!init_done_);
1266
1267 Runtime* const runtime = Runtime::Current();
1268 Thread* const self = Thread::Current();
1269 gc::Heap* const heap = runtime->GetHeap();
1270 std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces();
1271 CHECK(!spaces.empty());
1272 const ImageHeader& image_header = spaces[0]->GetImageHeader();
1273 uint32_t pointer_size_unchecked = image_header.GetPointerSizeUnchecked();
1274 if (!ValidPointerSize(pointer_size_unchecked)) {
1275 *error_msg = StringPrintf("Invalid image pointer size: %u", pointer_size_unchecked);
1276 return false;
1277 }
1278 image_pointer_size_ = image_header.GetPointerSize();
1279 if (!runtime->IsAotCompiler()) {
1280 // Only the Aot compiler supports having an image with a different pointer size than the
1281 // runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart
1282 // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
1283 if (image_pointer_size_ != kRuntimePointerSize) {
1284 *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
1285 static_cast<size_t>(image_pointer_size_),
1286 sizeof(void*));
1287 return false;
1288 }
1289 }
1290 DCHECK(!runtime->HasResolutionMethod());
1291 runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod));
1292 runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod));
1293 runtime->SetImtUnimplementedMethod(
1294 image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod));
1295 runtime->SetCalleeSaveMethod(
1296 image_header.GetImageMethod(ImageHeader::kSaveAllCalleeSavesMethod),
1297 CalleeSaveType::kSaveAllCalleeSaves);
1298 runtime->SetCalleeSaveMethod(
1299 image_header.GetImageMethod(ImageHeader::kSaveRefsOnlyMethod),
1300 CalleeSaveType::kSaveRefsOnly);
1301 runtime->SetCalleeSaveMethod(
1302 image_header.GetImageMethod(ImageHeader::kSaveRefsAndArgsMethod),
1303 CalleeSaveType::kSaveRefsAndArgs);
1304 runtime->SetCalleeSaveMethod(
1305 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethod),
1306 CalleeSaveType::kSaveEverything);
1307 runtime->SetCalleeSaveMethod(
1308 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForClinit),
1309 CalleeSaveType::kSaveEverythingForClinit);
1310 runtime->SetCalleeSaveMethod(
1311 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForSuspendCheck),
1312 CalleeSaveType::kSaveEverythingForSuspendCheck);
1313
1314 std::vector<const OatFile*> oat_files =
1315 runtime->GetOatFileManager().RegisterImageOatFiles(spaces);
1316 DCHECK(!oat_files.empty());
1317 const OatHeader& default_oat_header = oat_files[0]->GetOatHeader();
1318 jni_dlsym_lookup_trampoline_ = default_oat_header.GetJniDlsymLookupTrampoline();
1319 jni_dlsym_lookup_critical_trampoline_ = default_oat_header.GetJniDlsymLookupCriticalTrampoline();
1320 quick_resolution_trampoline_ = default_oat_header.GetQuickResolutionTrampoline();
1321 quick_imt_conflict_trampoline_ = default_oat_header.GetQuickImtConflictTrampoline();
1322 quick_generic_jni_trampoline_ = default_oat_header.GetQuickGenericJniTrampoline();
1323 quick_to_interpreter_bridge_trampoline_ = default_oat_header.GetQuickToInterpreterBridge();
1324 nterp_trampoline_ = default_oat_header.GetNterpTrampoline();
1325 if (kIsDebugBuild) {
1326 // Check that the other images use the same trampoline.
1327 for (size_t i = 1; i < oat_files.size(); ++i) {
1328 const OatHeader& ith_oat_header = oat_files[i]->GetOatHeader();
1329 const void* ith_jni_dlsym_lookup_trampoline_ =
1330 ith_oat_header.GetJniDlsymLookupTrampoline();
1331 const void* ith_jni_dlsym_lookup_critical_trampoline_ =
1332 ith_oat_header.GetJniDlsymLookupCriticalTrampoline();
1333 const void* ith_quick_resolution_trampoline =
1334 ith_oat_header.GetQuickResolutionTrampoline();
1335 const void* ith_quick_imt_conflict_trampoline =
1336 ith_oat_header.GetQuickImtConflictTrampoline();
1337 const void* ith_quick_generic_jni_trampoline =
1338 ith_oat_header.GetQuickGenericJniTrampoline();
1339 const void* ith_quick_to_interpreter_bridge_trampoline =
1340 ith_oat_header.GetQuickToInterpreterBridge();
1341 const void* ith_nterp_trampoline =
1342 ith_oat_header.GetNterpTrampoline();
1343 if (ith_jni_dlsym_lookup_trampoline_ != jni_dlsym_lookup_trampoline_ ||
1344 ith_jni_dlsym_lookup_critical_trampoline_ != jni_dlsym_lookup_critical_trampoline_ ||
1345 ith_quick_resolution_trampoline != quick_resolution_trampoline_ ||
1346 ith_quick_imt_conflict_trampoline != quick_imt_conflict_trampoline_ ||
1347 ith_quick_generic_jni_trampoline != quick_generic_jni_trampoline_ ||
1348 ith_quick_to_interpreter_bridge_trampoline != quick_to_interpreter_bridge_trampoline_ ||
1349 ith_nterp_trampoline != nterp_trampoline_) {
1350 // Make sure that all methods in this image do not contain those trampolines as
1351 // entrypoints. Otherwise the class-linker won't be able to work with a single set.
1352 TrampolineCheckData data;
1353 data.error = false;
1354 data.pointer_size = GetImagePointerSize();
1355 data.quick_resolution_trampoline = ith_quick_resolution_trampoline;
1356 data.quick_imt_conflict_trampoline = ith_quick_imt_conflict_trampoline;
1357 data.quick_generic_jni_trampoline = ith_quick_generic_jni_trampoline;
1358 data.quick_to_interpreter_bridge_trampoline = ith_quick_to_interpreter_bridge_trampoline;
1359 data.nterp_trampoline = ith_nterp_trampoline;
1360 ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
1361 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1362 if (obj->IsClass()) {
1363 ObjPtr<mirror::Class> klass = obj->AsClass();
1364 for (ArtMethod& m : klass->GetMethods(data.pointer_size)) {
1365 const void* entrypoint =
1366 m.GetEntryPointFromQuickCompiledCodePtrSize(data.pointer_size);
1367 if (entrypoint == data.quick_resolution_trampoline ||
1368 entrypoint == data.quick_imt_conflict_trampoline ||
1369 entrypoint == data.quick_generic_jni_trampoline ||
1370 entrypoint == data.quick_to_interpreter_bridge_trampoline) {
1371 data.m = &m;
1372 data.error = true;
1373 return;
1374 }
1375 }
1376 }
1377 };
1378 spaces[i]->GetLiveBitmap()->Walk(visitor);
1379 if (data.error) {
1380 ArtMethod* m = data.m;
1381 LOG(ERROR) << "Found a broken ArtMethod: " << ArtMethod::PrettyMethod(m);
1382 *error_msg = "Found an ArtMethod with a bad entrypoint";
1383 return false;
1384 }
1385 }
1386 }
1387 }
1388
1389 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
1390 ObjPtr<mirror::ObjectArray<mirror::Class>>::DownCast(
1391 image_header.GetImageRoot(ImageHeader::kClassRoots)));
1392 DCHECK_EQ(GetClassRoot<mirror::Class>(this)->GetClassFlags(), mirror::kClassFlagClass);
1393
1394 DCHECK_EQ(GetClassRoot<mirror::Object>(this)->GetObjectSize(), sizeof(mirror::Object));
1395 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
1396 ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
1397 image_header.GetImageRoot(ImageHeader::kBootImageLiveObjects));
1398 runtime->SetSentinel(boot_image_live_objects->Get(ImageHeader::kClearedJniWeakSentinel));
1399 DCHECK(runtime->GetSentinel().Read()->GetClass() == GetClassRoot<mirror::Object>(this));
1400
1401 // Boot class loader, use a null handle.
1402 if (!AddImageSpaces(ArrayRef<gc::space::ImageSpace*>(spaces),
1403 ScopedNullHandle<mirror::ClassLoader>(),
1404 /*context=*/nullptr,
1405 &boot_dex_files_,
1406 error_msg)) {
1407 return false;
1408 }
1409 InitializeObjectVirtualMethodHashes(GetClassRoot<mirror::Object>(this),
1410 image_pointer_size_,
1411 ArrayRef<uint32_t>(object_virtual_method_hashes_));
1412 FinishInit(self);
1413
1414 VLOG(startup) << __FUNCTION__ << " exiting";
1415 return true;
1416 }
1417
AddExtraBootDexFiles(Thread * self,std::vector<std::unique_ptr<const DexFile>> && additional_dex_files)1418 void ClassLinker::AddExtraBootDexFiles(
1419 Thread* self,
1420 std::vector<std::unique_ptr<const DexFile>>&& additional_dex_files) {
1421 for (std::unique_ptr<const DexFile>& dex_file : additional_dex_files) {
1422 AppendToBootClassPath(self, dex_file.get());
1423 if (kIsDebugBuild) {
1424 for (const auto& boot_dex_file : boot_dex_files_) {
1425 DCHECK_NE(boot_dex_file->GetLocation(), dex_file->GetLocation());
1426 }
1427 }
1428 boot_dex_files_.push_back(std::move(dex_file));
1429 }
1430 }
1431
IsBootClassLoader(ObjPtr<mirror::Object> class_loader)1432 bool ClassLinker::IsBootClassLoader(ObjPtr<mirror::Object> class_loader) {
1433 return class_loader == nullptr ||
1434 WellKnownClasses::java_lang_BootClassLoader == class_loader->GetClass();
1435 }
1436
1437 class CHAOnDeleteUpdateClassVisitor {
1438 public:
CHAOnDeleteUpdateClassVisitor(LinearAlloc * alloc)1439 explicit CHAOnDeleteUpdateClassVisitor(LinearAlloc* alloc)
1440 : allocator_(alloc), cha_(Runtime::Current()->GetClassLinker()->GetClassHierarchyAnalysis()),
1441 pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()),
1442 self_(Thread::Current()) {}
1443
operator ()(ObjPtr<mirror::Class> klass)1444 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
1445 // This class is going to be unloaded. Tell CHA about it.
1446 cha_->ResetSingleImplementationInHierarchy(klass, allocator_, pointer_size_);
1447 return true;
1448 }
1449 private:
1450 const LinearAlloc* allocator_;
1451 const ClassHierarchyAnalysis* cha_;
1452 const PointerSize pointer_size_;
1453 const Thread* self_;
1454 };
1455
1456 /*
1457 * A class used to ensure that all references to strings interned in an AppImage have been
1458 * properly recorded in the interned references list, and is only ever run in debug mode.
1459 */
1460 class CountInternedStringReferencesVisitor {
1461 public:
CountInternedStringReferencesVisitor(const gc::space::ImageSpace & space,const InternTable::UnorderedSet & image_interns)1462 CountInternedStringReferencesVisitor(const gc::space::ImageSpace& space,
1463 const InternTable::UnorderedSet& image_interns)
1464 : space_(space),
1465 image_interns_(image_interns),
1466 count_(0u) {}
1467
TestObject(ObjPtr<mirror::Object> referred_obj) const1468 void TestObject(ObjPtr<mirror::Object> referred_obj) const
1469 REQUIRES_SHARED(Locks::mutator_lock_) {
1470 if (referred_obj != nullptr &&
1471 space_.HasAddress(referred_obj.Ptr()) &&
1472 referred_obj->IsString()) {
1473 ObjPtr<mirror::String> referred_str = referred_obj->AsString();
1474 uint32_t hash = static_cast<uint32_t>(referred_str->GetStoredHashCode());
1475 // All image strings have the hash code calculated, even if they are not interned.
1476 DCHECK_EQ(hash, static_cast<uint32_t>(referred_str->ComputeHashCode()));
1477 auto it = image_interns_.FindWithHash(GcRoot<mirror::String>(referred_str), hash);
1478 if (it != image_interns_.end() && it->Read() == referred_str) {
1479 ++count_;
1480 }
1481 }
1482 }
1483
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const1484 void VisitRootIfNonNull(
1485 mirror::CompressedReference<mirror::Object>* root) const
1486 REQUIRES_SHARED(Locks::mutator_lock_) {
1487 if (!root->IsNull()) {
1488 VisitRoot(root);
1489 }
1490 }
1491
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const1492 void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1493 REQUIRES_SHARED(Locks::mutator_lock_) {
1494 TestObject(root->AsMirrorPtr());
1495 }
1496
1497 // Visit Class Fields
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const1498 void operator()(ObjPtr<mirror::Object> obj,
1499 MemberOffset offset,
1500 bool is_static ATTRIBUTE_UNUSED) const
1501 REQUIRES_SHARED(Locks::mutator_lock_) {
1502 // References within image or across images don't need a read barrier.
1503 ObjPtr<mirror::Object> referred_obj =
1504 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1505 TestObject(referred_obj);
1506 }
1507
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const1508 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1509 ObjPtr<mirror::Reference> ref) const
1510 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
1511 operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
1512 }
1513
GetCount() const1514 size_t GetCount() const {
1515 return count_;
1516 }
1517
1518 private:
1519 const gc::space::ImageSpace& space_;
1520 const InternTable::UnorderedSet& image_interns_;
1521 mutable size_t count_; // Modified from the `const` callbacks.
1522 };
1523
1524 /*
1525 * This function counts references to strings interned in the AppImage.
1526 * This is used in debug build to check against the number of the recorded references.
1527 */
CountInternedStringReferences(gc::space::ImageSpace & space,const InternTable::UnorderedSet & image_interns)1528 size_t CountInternedStringReferences(gc::space::ImageSpace& space,
1529 const InternTable::UnorderedSet& image_interns)
1530 REQUIRES_SHARED(Locks::mutator_lock_) {
1531 const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap();
1532 const ImageHeader& image_header = space.GetImageHeader();
1533 const uint8_t* target_base = space.GetMemMap()->Begin();
1534 const ImageSection& objects_section = image_header.GetObjectsSection();
1535
1536 auto objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
1537 auto objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
1538
1539 CountInternedStringReferencesVisitor visitor(space, image_interns);
1540 bitmap->VisitMarkedRange(objects_begin,
1541 objects_end,
1542 [&space, &visitor](mirror::Object* obj)
1543 REQUIRES_SHARED(Locks::mutator_lock_) {
1544 if (space.HasAddress(obj)) {
1545 if (obj->IsDexCache()) {
1546 obj->VisitReferences</* kVisitNativeRoots= */ true,
1547 kVerifyNone,
1548 kWithoutReadBarrier>(visitor, visitor);
1549 } else {
1550 // Don't visit native roots for non-dex-cache as they can't contain
1551 // native references to strings. This is verified during compilation
1552 // by ImageWriter::VerifyNativeGCRootInvariants.
1553 obj->VisitReferences</* kVisitNativeRoots= */ false,
1554 kVerifyNone,
1555 kWithoutReadBarrier>(visitor, visitor);
1556 }
1557 }
1558 });
1559 return visitor.GetCount();
1560 }
1561
1562 template <typename Visitor>
VisitInternedStringReferences(gc::space::ImageSpace * space,const Visitor & visitor)1563 static void VisitInternedStringReferences(
1564 gc::space::ImageSpace* space,
1565 const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1566 const uint8_t* target_base = space->Begin();
1567 const ImageSection& sro_section =
1568 space->GetImageHeader().GetImageStringReferenceOffsetsSection();
1569 const size_t num_string_offsets = sro_section.Size() / sizeof(AppImageReferenceOffsetInfo);
1570
1571 VLOG(image)
1572 << "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = "
1573 << num_string_offsets;
1574
1575 const auto* sro_base =
1576 reinterpret_cast<const AppImageReferenceOffsetInfo*>(target_base + sro_section.Offset());
1577
1578 for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
1579 uint32_t base_offset = sro_base[offset_index].first;
1580
1581 uint32_t raw_member_offset = sro_base[offset_index].second;
1582 DCHECK_ALIGNED(base_offset, 2);
1583
1584 ObjPtr<mirror::Object> obj_ptr =
1585 reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
1586 if (obj_ptr->IsDexCache() && raw_member_offset >= sizeof(mirror::DexCache)) {
1587 // Special case for strings referenced from dex cache array: the offset is
1588 // actually decoded as an index into the dex cache string array.
1589 uint32_t index = raw_member_offset - sizeof(mirror::DexCache);
1590 mirror::GcRootArray<mirror::String>* array = obj_ptr->AsDexCache()->GetStringsArray();
1591 // The array could be concurrently set to null. See `StartupCompletedTask`.
1592 if (array != nullptr) {
1593 ObjPtr<mirror::String> referred_string = array->Get(index);
1594 DCHECK(referred_string != nullptr);
1595 ObjPtr<mirror::String> visited = visitor(referred_string);
1596 if (visited != referred_string) {
1597 array->Set(index, visited.Ptr());
1598 }
1599 }
1600 } else {
1601 DCHECK_ALIGNED(raw_member_offset, 2);
1602 MemberOffset member_offset(raw_member_offset);
1603 ObjPtr<mirror::String> referred_string =
1604 obj_ptr->GetFieldObject<mirror::String,
1605 kVerifyNone,
1606 kWithoutReadBarrier,
1607 /* kIsVolatile= */ false>(member_offset);
1608 DCHECK(referred_string != nullptr);
1609
1610 ObjPtr<mirror::String> visited = visitor(referred_string);
1611 if (visited != referred_string) {
1612 obj_ptr->SetFieldObject</* kTransactionActive= */ false,
1613 /* kCheckTransaction= */ false,
1614 kVerifyNone,
1615 /* kIsVolatile= */ false>(member_offset, visited);
1616 }
1617 }
1618 }
1619 }
1620
VerifyInternedStringReferences(gc::space::ImageSpace * space)1621 static void VerifyInternedStringReferences(gc::space::ImageSpace* space)
1622 REQUIRES_SHARED(Locks::mutator_lock_) {
1623 InternTable::UnorderedSet image_interns;
1624 const ImageSection& section = space->GetImageHeader().GetInternedStringsSection();
1625 if (section.Size() > 0) {
1626 size_t read_count;
1627 const uint8_t* data = space->Begin() + section.Offset();
1628 InternTable::UnorderedSet image_set(data, /*make_copy_of_data=*/ false, &read_count);
1629 image_set.swap(image_interns);
1630 }
1631 size_t num_recorded_refs = 0u;
1632 VisitInternedStringReferences(
1633 space,
1634 [&image_interns, &num_recorded_refs](ObjPtr<mirror::String> str)
1635 REQUIRES_SHARED(Locks::mutator_lock_) {
1636 auto it = image_interns.find(GcRoot<mirror::String>(str));
1637 CHECK(it != image_interns.end());
1638 CHECK(it->Read() == str);
1639 ++num_recorded_refs;
1640 return str;
1641 });
1642 size_t num_found_refs = CountInternedStringReferences(*space, image_interns);
1643 CHECK_EQ(num_recorded_refs, num_found_refs);
1644 }
1645
1646 // new_class_set is the set of classes that were read from the class table section in the image.
1647 // If there was no class table section, it is null.
1648 // Note: using a class here to avoid having to make ClassLinker internals public.
1649 class AppImageLoadingHelper {
1650 public:
1651 static void Update(
1652 ClassLinker* class_linker,
1653 gc::space::ImageSpace* space,
1654 Handle<mirror::ClassLoader> class_loader,
1655 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
1656 REQUIRES(!Locks::dex_lock_)
1657 REQUIRES_SHARED(Locks::mutator_lock_);
1658
1659 static void HandleAppImageStrings(gc::space::ImageSpace* space)
1660 REQUIRES_SHARED(Locks::mutator_lock_);
1661 };
1662
Update(ClassLinker * class_linker,gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)1663 void AppImageLoadingHelper::Update(
1664 ClassLinker* class_linker,
1665 gc::space::ImageSpace* space,
1666 Handle<mirror::ClassLoader> class_loader,
1667 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
1668 REQUIRES(!Locks::dex_lock_)
1669 REQUIRES_SHARED(Locks::mutator_lock_) {
1670 ScopedTrace app_image_timing("AppImage:Updating");
1671
1672 if (kIsDebugBuild && ClassLinker::kAppImageMayContainStrings) {
1673 // In debug build, verify the string references before applying
1674 // the Runtime::LoadAppImageStartupCache() option.
1675 VerifyInternedStringReferences(space);
1676 }
1677
1678 Thread* const self = Thread::Current();
1679 Runtime* const runtime = Runtime::Current();
1680 gc::Heap* const heap = runtime->GetHeap();
1681 const ImageHeader& header = space->GetImageHeader();
1682 int32_t number_of_dex_cache_arrays_cleared = 0;
1683 {
1684 // Register dex caches with the class loader.
1685 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
1686 for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
1687 const DexFile* const dex_file = dex_cache->GetDexFile();
1688 {
1689 WriterMutexLock mu2(self, *Locks::dex_lock_);
1690 CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
1691 if (runtime->GetStartupCompleted()) {
1692 number_of_dex_cache_arrays_cleared++;
1693 // Free up dex cache arrays that we would only allocate at startup.
1694 // We do this here before registering and within the lock to be
1695 // consistent with `StartupCompletedTask`.
1696 dex_cache->UnlinkStartupCaches();
1697 }
1698 class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
1699 }
1700 }
1701 }
1702 if (number_of_dex_cache_arrays_cleared == dex_caches->GetLength()) {
1703 // Free up dex cache arrays that we would only allocate at startup.
1704 // If `number_of_dex_cache_arrays_cleared` isn't the number of dex caches in
1705 // the image, then there is a race with the `StartupCompletedTask`, which
1706 // will release the space instead.
1707 space->ReleaseMetadata();
1708 }
1709
1710 if (ClassLinker::kAppImageMayContainStrings) {
1711 HandleAppImageStrings(space);
1712 }
1713
1714 if (kVerifyArtMethodDeclaringClasses) {
1715 ScopedTrace timing("AppImage:VerifyDeclaringClasses");
1716 ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_);
1717 gc::accounting::HeapBitmap* live_bitmap = heap->GetLiveBitmap();
1718 header.VisitPackedArtMethods([&](ArtMethod& method)
1719 REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
1720 ObjPtr<mirror::Class> klass = method.GetDeclaringClassUnchecked();
1721 if (klass != nullptr) {
1722 CHECK(live_bitmap->Test(klass.Ptr())) << "Image method has unmarked declaring class";
1723 }
1724 }, space->Begin(), kRuntimePointerSize);
1725 }
1726 }
1727
HandleAppImageStrings(gc::space::ImageSpace * space)1728 void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space) {
1729 // Iterate over the string reference offsets stored in the image and intern
1730 // the strings they point to.
1731 ScopedTrace timing("AppImage:InternString");
1732
1733 Runtime* const runtime = Runtime::Current();
1734 InternTable* const intern_table = runtime->GetInternTable();
1735
1736 // Add the intern table, removing any conflicts. For conflicts, store the new address in a map
1737 // for faster lookup.
1738 // TODO: Optimize with a bitmap or bloom filter
1739 SafeMap<mirror::String*, mirror::String*> intern_remap;
1740 auto func = [&](InternTable::UnorderedSet& interns)
1741 REQUIRES_SHARED(Locks::mutator_lock_)
1742 REQUIRES(Locks::intern_table_lock_) {
1743 const size_t non_boot_image_strings = intern_table->CountInterns(
1744 /*visit_boot_images=*/false,
1745 /*visit_non_boot_images=*/true);
1746 VLOG(image) << "AppImage:stringsInInternTableSize = " << interns.size();
1747 VLOG(image) << "AppImage:nonBootImageInternStrings = " << non_boot_image_strings;
1748 // Visit the smaller of the two sets to compute the intersection.
1749 if (interns.size() < non_boot_image_strings) {
1750 for (auto it = interns.begin(); it != interns.end(); ) {
1751 ObjPtr<mirror::String> string = it->Read();
1752 ObjPtr<mirror::String> existing = intern_table->LookupWeakLocked(string);
1753 if (existing == nullptr) {
1754 existing = intern_table->LookupStrongLocked(string);
1755 }
1756 if (existing != nullptr) {
1757 intern_remap.Put(string.Ptr(), existing.Ptr());
1758 it = interns.erase(it);
1759 } else {
1760 ++it;
1761 }
1762 }
1763 } else {
1764 intern_table->VisitInterns([&](const GcRoot<mirror::String>& root)
1765 REQUIRES_SHARED(Locks::mutator_lock_)
1766 REQUIRES(Locks::intern_table_lock_) {
1767 auto it = interns.find(root);
1768 if (it != interns.end()) {
1769 ObjPtr<mirror::String> existing = root.Read();
1770 intern_remap.Put(it->Read(), existing.Ptr());
1771 it = interns.erase(it);
1772 }
1773 }, /*visit_boot_images=*/false, /*visit_non_boot_images=*/true);
1774 }
1775 // Consistency check to ensure correctness.
1776 if (kIsDebugBuild) {
1777 for (GcRoot<mirror::String>& root : interns) {
1778 ObjPtr<mirror::String> string = root.Read();
1779 CHECK(intern_table->LookupWeakLocked(string) == nullptr) << string->ToModifiedUtf8();
1780 CHECK(intern_table->LookupStrongLocked(string) == nullptr) << string->ToModifiedUtf8();
1781 }
1782 }
1783 };
1784 intern_table->AddImageStringsToTable(space, func);
1785 if (!intern_remap.empty()) {
1786 VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
1787 VisitInternedStringReferences(
1788 space,
1789 [&intern_remap](ObjPtr<mirror::String> str) REQUIRES_SHARED(Locks::mutator_lock_) {
1790 auto it = intern_remap.find(str.Ptr());
1791 if (it != intern_remap.end()) {
1792 return ObjPtr<mirror::String>(it->second);
1793 }
1794 return str;
1795 });
1796 }
1797 }
1798
OpenOatDexFile(const OatFile * oat_file,const char * location,std::string * error_msg)1799 static std::unique_ptr<const DexFile> OpenOatDexFile(const OatFile* oat_file,
1800 const char* location,
1801 std::string* error_msg)
1802 REQUIRES_SHARED(Locks::mutator_lock_) {
1803 DCHECK(error_msg != nullptr);
1804 std::unique_ptr<const DexFile> dex_file;
1805 const OatDexFile* oat_dex_file = oat_file->GetOatDexFile(location, nullptr, error_msg);
1806 if (oat_dex_file == nullptr) {
1807 return std::unique_ptr<const DexFile>();
1808 }
1809 std::string inner_error_msg;
1810 dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
1811 if (dex_file == nullptr) {
1812 *error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'",
1813 location,
1814 oat_file->GetLocation().c_str(),
1815 inner_error_msg.c_str());
1816 return std::unique_ptr<const DexFile>();
1817 }
1818
1819 if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
1820 *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
1821 location,
1822 dex_file->GetLocationChecksum(),
1823 oat_dex_file->GetDexFileLocationChecksum());
1824 return std::unique_ptr<const DexFile>();
1825 }
1826 return dex_file;
1827 }
1828
OpenImageDexFiles(gc::space::ImageSpace * space,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1829 bool ClassLinker::OpenImageDexFiles(gc::space::ImageSpace* space,
1830 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1831 std::string* error_msg) {
1832 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
1833 const ImageHeader& header = space->GetImageHeader();
1834 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1835 DCHECK(dex_caches_object != nullptr);
1836 ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
1837 dex_caches_object->AsObjectArray<mirror::DexCache>();
1838 const OatFile* oat_file = space->GetOatFile();
1839 for (auto dex_cache : dex_caches->Iterate()) {
1840 std::string dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8());
1841 std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1842 dex_file_location.c_str(),
1843 error_msg);
1844 if (dex_file == nullptr) {
1845 return false;
1846 }
1847 dex_cache->SetDexFile(dex_file.get());
1848 out_dex_files->push_back(std::move(dex_file));
1849 }
1850 return true;
1851 }
1852
OpenAndInitImageDexFiles(const gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1853 bool ClassLinker::OpenAndInitImageDexFiles(
1854 const gc::space::ImageSpace* space,
1855 Handle<mirror::ClassLoader> class_loader,
1856 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1857 std::string* error_msg) {
1858 DCHECK(out_dex_files != nullptr);
1859 const bool app_image = class_loader != nullptr;
1860 const ImageHeader& header = space->GetImageHeader();
1861 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1862 DCHECK(dex_caches_object != nullptr);
1863 Thread* const self = Thread::Current();
1864 StackHandleScope<3> hs(self);
1865 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
1866 hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
1867 const OatFile* oat_file = space->GetOatFile();
1868 if (oat_file->GetOatHeader().GetDexFileCount() !=
1869 static_cast<uint32_t>(dex_caches->GetLength())) {
1870 *error_msg =
1871 "Dex cache count and dex file count mismatch while trying to initialize from image";
1872 return false;
1873 }
1874
1875 for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
1876 std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
1877 std::unique_ptr<const DexFile> dex_file =
1878 OpenOatDexFile(oat_file, dex_file_location.c_str(), error_msg);
1879 if (dex_file == nullptr) {
1880 return false;
1881 }
1882
1883 {
1884 // Native fields are all null. Initialize them.
1885 WriterMutexLock mu(self, *Locks::dex_lock_);
1886 dex_cache->Initialize(dex_file.get(), class_loader.Get());
1887 }
1888 if (!app_image) {
1889 // Register dex files, keep track of existing ones that are conflicts.
1890 AppendToBootClassPath(dex_file.get(), dex_cache);
1891 }
1892 out_dex_files->push_back(std::move(dex_file));
1893 }
1894 return true;
1895 }
1896
1897 // Helper class for ArtMethod checks when adding an image. Keeps all required functionality
1898 // together and caches some intermediate results.
1899 template <PointerSize kPointerSize>
1900 class ImageChecker final {
1901 public:
CheckObjects(gc::Heap * heap,gc::space::ImageSpace * space)1902 static void CheckObjects(gc::Heap* heap, gc::space::ImageSpace* space)
1903 REQUIRES_SHARED(Locks::mutator_lock_) {
1904 // There can be no GC during boot image initialization, so we do not need read barriers.
1905 ScopedDebugDisallowReadBarriers sddrb(Thread::Current());
1906
1907 CHECK_EQ(kPointerSize, space->GetImageHeader().GetPointerSize());
1908 const ImageSection& objects_section = space->GetImageHeader().GetObjectsSection();
1909 uintptr_t space_begin = reinterpret_cast<uintptr_t>(space->Begin());
1910 uintptr_t objects_begin = space_begin + objects_section.Offset();
1911 uintptr_t objects_end = objects_begin + objects_section.Size();
1912 ImageChecker ic(heap);
1913 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1914 DCHECK(obj != nullptr);
1915 mirror::Class* obj_klass = obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
1916 CHECK(obj_klass != nullptr) << "Null class in object " << obj;
1917 mirror::Class* class_class = obj_klass->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
1918 CHECK(class_class != nullptr) << "Null class class " << obj;
1919 if (obj_klass == class_class) {
1920 auto klass = obj->AsClass();
1921 for (ArtField& field : klass->GetIFields()) {
1922 CHECK_EQ(field.GetDeclaringClass<kWithoutReadBarrier>(), klass);
1923 }
1924 for (ArtField& field : klass->GetSFields()) {
1925 CHECK_EQ(field.GetDeclaringClass<kWithoutReadBarrier>(), klass);
1926 }
1927 for (ArtMethod& m : klass->GetMethods(kPointerSize)) {
1928 ic.CheckArtMethod(&m, klass);
1929 }
1930 ObjPtr<mirror::PointerArray> vtable =
1931 klass->GetVTable<kDefaultVerifyFlags, kWithoutReadBarrier>();
1932 if (vtable != nullptr) {
1933 ic.CheckArtMethodPointerArray(vtable);
1934 }
1935 if (klass->ShouldHaveImt()) {
1936 ImTable* imt = klass->GetImt(kPointerSize);
1937 for (size_t i = 0; i < ImTable::kSize; ++i) {
1938 ic.CheckArtMethod(imt->Get(i, kPointerSize), /*expected_class=*/ nullptr);
1939 }
1940 }
1941 if (klass->ShouldHaveEmbeddedVTable()) {
1942 for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) {
1943 ic.CheckArtMethod(klass->GetEmbeddedVTableEntry(i, kPointerSize),
1944 /*expected_class=*/ nullptr);
1945 }
1946 }
1947 ObjPtr<mirror::IfTable> iftable =
1948 klass->GetIfTable<kDefaultVerifyFlags, kWithoutReadBarrier>();
1949 int32_t iftable_count = (iftable != nullptr) ? iftable->Count() : 0;
1950 for (int32_t i = 0; i < iftable_count; ++i) {
1951 ObjPtr<mirror::PointerArray> method_array =
1952 iftable->GetMethodArrayOrNull<kDefaultVerifyFlags, kWithoutReadBarrier>(i);
1953 if (method_array != nullptr) {
1954 ic.CheckArtMethodPointerArray(method_array);
1955 }
1956 }
1957 }
1958 };
1959 space->GetLiveBitmap()->VisitMarkedRange(objects_begin, objects_end, visitor);
1960 }
1961
1962 private:
ImageChecker(gc::Heap * heap)1963 explicit ImageChecker(gc::Heap* heap) {
1964 ArrayRef<gc::space::ImageSpace* const> spaces(heap->GetBootImageSpaces());
1965 space_begin_.reserve(spaces.size());
1966 for (gc::space::ImageSpace* space : spaces) {
1967 CHECK_EQ(static_cast<const void*>(space->Begin()), &space->GetImageHeader());
1968 space_begin_.push_back(space->Begin());
1969 }
1970 }
1971
CheckArtMethod(ArtMethod * m,ObjPtr<mirror::Class> expected_class)1972 void CheckArtMethod(ArtMethod* m, ObjPtr<mirror::Class> expected_class)
1973 REQUIRES_SHARED(Locks::mutator_lock_) {
1974 ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked<kWithoutReadBarrier>();
1975 if (m->IsRuntimeMethod()) {
1976 CHECK(declaring_class == nullptr) << declaring_class << " " << m->PrettyMethod();
1977 } else if (m->IsCopied()) {
1978 CHECK(declaring_class != nullptr) << m->PrettyMethod();
1979 } else if (expected_class != nullptr) {
1980 CHECK_EQ(declaring_class, expected_class) << m->PrettyMethod();
1981 }
1982 bool contains = false;
1983 for (const uint8_t* begin : space_begin_) {
1984 const size_t offset = reinterpret_cast<uint8_t*>(m) - begin;
1985 const ImageHeader* header = reinterpret_cast<const ImageHeader*>(begin);
1986 if (header->GetMethodsSection().Contains(offset) ||
1987 header->GetRuntimeMethodsSection().Contains(offset)) {
1988 contains = true;
1989 break;
1990 }
1991 }
1992 CHECK(contains) << m << " not found";
1993 }
1994
CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr)1995 void CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr)
1996 REQUIRES_SHARED(Locks::mutator_lock_) {
1997 CHECK(arr != nullptr);
1998 for (int32_t j = 0; j < arr->GetLength(); ++j) {
1999 auto* method = arr->GetElementPtrSize<ArtMethod*>(j, kPointerSize);
2000 CHECK(method != nullptr);
2001 CheckArtMethod(method, /*expected_class=*/ nullptr);
2002 }
2003 }
2004
2005 std::vector<const uint8_t*> space_begin_;
2006 };
2007
VerifyAppImage(const ImageHeader & header,const Handle<mirror::ClassLoader> & class_loader,ClassTable * class_table,gc::space::ImageSpace * space)2008 static void VerifyAppImage(const ImageHeader& header,
2009 const Handle<mirror::ClassLoader>& class_loader,
2010 ClassTable* class_table,
2011 gc::space::ImageSpace* space)
2012 REQUIRES_SHARED(Locks::mutator_lock_) {
2013 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2014 ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
2015 if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
2016 CHECK_EQ(class_table->LookupByDescriptor(klass), klass)
2017 << mirror::Class::PrettyClass(klass);
2018 }
2019 }, space->Begin(), kRuntimePointerSize);
2020 {
2021 // Verify that all direct interfaces of classes in the class table are also resolved.
2022 std::vector<ObjPtr<mirror::Class>> classes;
2023 auto verify_direct_interfaces_in_table = [&](ObjPtr<mirror::Class> klass)
2024 REQUIRES_SHARED(Locks::mutator_lock_) {
2025 if (!klass->IsPrimitive() && klass->GetClassLoader() == class_loader.Get()) {
2026 classes.push_back(klass);
2027 }
2028 return true;
2029 };
2030 class_table->Visit(verify_direct_interfaces_in_table);
2031 for (ObjPtr<mirror::Class> klass : classes) {
2032 for (uint32_t i = 0, num = klass->NumDirectInterfaces(); i != num; ++i) {
2033 CHECK(klass->GetDirectInterface(i) != nullptr)
2034 << klass->PrettyDescriptor() << " iface #" << i;
2035 }
2036 }
2037 }
2038 }
2039
AddImageSpace(gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,ClassLoaderContext * context,const std::vector<std::unique_ptr<const DexFile>> & dex_files,std::string * error_msg)2040 bool ClassLinker::AddImageSpace(gc::space::ImageSpace* space,
2041 Handle<mirror::ClassLoader> class_loader,
2042 ClassLoaderContext* context,
2043 const std::vector<std::unique_ptr<const DexFile>>& dex_files,
2044 std::string* error_msg) {
2045 DCHECK(error_msg != nullptr);
2046 const uint64_t start_time = NanoTime();
2047 const bool app_image = class_loader != nullptr;
2048 const ImageHeader& header = space->GetImageHeader();
2049 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
2050 DCHECK(dex_caches_object != nullptr);
2051 Runtime* const runtime = Runtime::Current();
2052 gc::Heap* const heap = runtime->GetHeap();
2053 Thread* const self = Thread::Current();
2054 // Check that the image is what we are expecting.
2055 if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) {
2056 *error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu",
2057 static_cast<size_t>(space->GetImageHeader().GetPointerSize()),
2058 image_pointer_size_);
2059 return false;
2060 }
2061 size_t expected_image_roots = ImageHeader::NumberOfImageRoots(app_image);
2062 if (static_cast<size_t>(header.GetImageRoots()->GetLength()) != expected_image_roots) {
2063 *error_msg = StringPrintf("Expected %zu image roots but got %d",
2064 expected_image_roots,
2065 header.GetImageRoots()->GetLength());
2066 return false;
2067 }
2068 StackHandleScope<3> hs(self);
2069 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
2070 hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
2071 Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
2072 header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
2073 MutableHandle<mirror::Object> special_root(hs.NewHandle(
2074 app_image ? header.GetImageRoot(ImageHeader::kSpecialRoots) : nullptr));
2075 DCHECK(class_roots != nullptr);
2076 if (class_roots->GetLength() != static_cast<int32_t>(ClassRoot::kMax)) {
2077 *error_msg = StringPrintf("Expected %d class roots but got %d",
2078 class_roots->GetLength(),
2079 static_cast<int32_t>(ClassRoot::kMax));
2080 return false;
2081 }
2082 // Check against existing class roots to make sure they match the ones in the boot image.
2083 ObjPtr<mirror::ObjectArray<mirror::Class>> existing_class_roots = GetClassRoots();
2084 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
2085 if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i), existing_class_roots)) {
2086 *error_msg = "App image class roots must have pointer equality with runtime ones.";
2087 return false;
2088 }
2089 }
2090 const OatFile* oat_file = space->GetOatFile();
2091
2092 if (app_image) {
2093 ScopedAssertNoThreadSuspension sants("Checking app image");
2094 if (special_root == nullptr) {
2095 *error_msg = "Unexpected null special root in app image";
2096 return false;
2097 } else if (special_root->IsByteArray()) {
2098 OatHeader* oat_header = reinterpret_cast<OatHeader*>(special_root->AsByteArray()->GetData());
2099 if (!oat_header->IsValid()) {
2100 *error_msg = "Invalid oat header in special root";
2101 return false;
2102 }
2103 if (oat_file->GetVdexFile()->GetNumberOfDexFiles() != oat_header->GetDexFileCount()) {
2104 *error_msg = "Checksums count does not match";
2105 return false;
2106 }
2107 if (oat_header->IsConcurrentCopying() != gUseReadBarrier) {
2108 *error_msg = "GCs do not match";
2109 return false;
2110 }
2111
2112 // Check if the dex checksums match the dex files that we just loaded.
2113 uint32_t* checksums = reinterpret_cast<uint32_t*>(
2114 reinterpret_cast<uint8_t*>(oat_header) + oat_header->GetHeaderSize());
2115 for (uint32_t i = 0; i < oat_header->GetDexFileCount(); ++i) {
2116 uint32_t dex_checksum = dex_files.at(i)->GetHeader().checksum_;
2117 if (checksums[i] != dex_checksum) {
2118 *error_msg = StringPrintf(
2119 "Image and dex file checksums did not match for %s: image has %d, dex file has %d",
2120 dex_files.at(i)->GetLocation().c_str(),
2121 checksums[i],
2122 dex_checksum);
2123 return false;
2124 }
2125 }
2126
2127 // Validate the class loader context.
2128 const char* stored_context = oat_header->GetStoreValueByKey(OatHeader::kClassPathKey);
2129 if (stored_context == nullptr) {
2130 *error_msg = "Missing class loader context in special root";
2131 return false;
2132 }
2133 if (context->VerifyClassLoaderContextMatch(stored_context) ==
2134 ClassLoaderContext::VerificationResult::kMismatch) {
2135 *error_msg = StringPrintf("Class loader contexts don't match: %s", stored_context);
2136 return false;
2137 }
2138
2139 // Validate the apex versions.
2140 if (!gc::space::ImageSpace::ValidateApexVersions(*oat_header,
2141 runtime->GetApexVersions(),
2142 space->GetImageLocation(),
2143 error_msg)) {
2144 return false;
2145 }
2146
2147 // Validate the boot classpath.
2148 const char* bcp = oat_header->GetStoreValueByKey(OatHeader::kBootClassPathKey);
2149 if (bcp == nullptr) {
2150 *error_msg = "Missing boot classpath in special root";
2151 return false;
2152 }
2153 std::string runtime_bcp = android::base::Join(runtime->GetBootClassPathLocations(), ':');
2154 if (strcmp(bcp, runtime_bcp.c_str()) != 0) {
2155 *error_msg = StringPrintf("Mismatch boot classpath: image has %s, runtime has %s",
2156 bcp,
2157 runtime_bcp.c_str());
2158 return false;
2159 }
2160
2161 // Validate the dex checksums of the boot classpath.
2162 const char* bcp_checksums =
2163 oat_header->GetStoreValueByKey(OatHeader::kBootClassPathChecksumsKey);
2164 if (bcp_checksums == nullptr) {
2165 *error_msg = "Missing boot classpath checksums in special root";
2166 return false;
2167 }
2168 if (strcmp(bcp_checksums, runtime->GetBootClassPathChecksums().c_str()) != 0) {
2169 *error_msg = StringPrintf("Mismatch boot classpath checksums: image has %s, runtime has %s",
2170 bcp_checksums,
2171 runtime->GetBootClassPathChecksums().c_str());
2172 return false;
2173 }
2174 } else if (IsBootClassLoader(special_root.Get())) {
2175 *error_msg = "Unexpected BootClassLoader in app image";
2176 return false;
2177 } else if (!special_root->IsClassLoader()) {
2178 *error_msg = "Unexpected special root in app image";
2179 return false;
2180 }
2181 }
2182
2183 if (kCheckImageObjects) {
2184 if (!app_image) {
2185 if (image_pointer_size_ == PointerSize::k64) {
2186 ImageChecker<PointerSize::k64>::CheckObjects(heap, space);
2187 } else {
2188 ImageChecker<PointerSize::k32>::CheckObjects(heap, space);
2189 }
2190 }
2191 }
2192
2193 // Set entry point to interpreter if in InterpretOnly mode.
2194 if (!runtime->IsAotCompiler() && runtime->GetInstrumentation()->InterpretOnly()) {
2195 // Set image methods' entry point to interpreter.
2196 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2197 if (!method.IsRuntimeMethod()) {
2198 DCHECK(method.GetDeclaringClass() != nullptr);
2199 if (!method.IsNative() && !method.IsResolutionMethod()) {
2200 method.SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(),
2201 image_pointer_size_);
2202 }
2203 }
2204 }, space->Begin(), image_pointer_size_);
2205 }
2206
2207 if (!runtime->IsAotCompiler()) {
2208 // If we are profiling the boot classpath, disable the shared memory for
2209 // boot image method optimization. We need to disable it before doing
2210 // ResetCounter below, as counters of shared memory method always hold the
2211 // "hot" value.
2212 if (runtime->GetJITOptions()->GetProfileSaverOptions().GetProfileBootClassPath()) {
2213 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2214 method.ClearMemorySharedMethod();
2215 }, space->Begin(), image_pointer_size_);
2216 }
2217
2218 ScopedTrace trace("AppImage:UpdateCodeItemAndNterp");
2219 bool can_use_nterp = interpreter::CanRuntimeUseNterp();
2220 uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold();
2221 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2222 // In the image, the `data` pointer field of the ArtMethod contains the code
2223 // item offset. Change this to the actual pointer to the code item.
2224 if (method.HasCodeItem()) {
2225 const dex::CodeItem* code_item = method.GetDexFile()->GetCodeItem(
2226 reinterpret_cast32<uint32_t>(method.GetDataPtrSize(image_pointer_size_)));
2227 method.SetCodeItem(code_item, method.GetDexFile()->IsCompactDexFile());
2228 // The hotness counter may have changed since we compiled the image, so
2229 // reset it with the runtime value.
2230 method.ResetCounter(hotness_threshold);
2231 }
2232 if (method.GetEntryPointFromQuickCompiledCode() == nterp_trampoline_) {
2233 if (can_use_nterp) {
2234 // Set image methods' entry point that point to the nterp trampoline to the
2235 // nterp entry point. This allows taking the fast path when doing a
2236 // nterp->nterp call.
2237 DCHECK(!method.StillNeedsClinitCheck());
2238 method.SetEntryPointFromQuickCompiledCode(interpreter::GetNterpEntryPoint());
2239 } else {
2240 method.SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
2241 }
2242 }
2243 }, space->Begin(), image_pointer_size_);
2244 }
2245
2246 if (runtime->IsVerificationSoftFail()) {
2247 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2248 if (method.IsManagedAndInvokable()) {
2249 method.ClearSkipAccessChecks();
2250 }
2251 }, space->Begin(), image_pointer_size_);
2252 }
2253
2254 ClassTable* class_table = nullptr;
2255 {
2256 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2257 class_table = InsertClassTableForClassLoader(class_loader.Get());
2258 }
2259 // If we have a class table section, read it and use it for verification in
2260 // UpdateAppImageClassLoadersAndDexCaches.
2261 ClassTable::ClassSet temp_set;
2262 const ImageSection& class_table_section = header.GetClassTableSection();
2263 const bool added_class_table = class_table_section.Size() > 0u;
2264 if (added_class_table) {
2265 const uint64_t start_time2 = NanoTime();
2266 size_t read_count = 0;
2267 temp_set = ClassTable::ClassSet(space->Begin() + class_table_section.Offset(),
2268 /*make copy*/false,
2269 &read_count);
2270 VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
2271 }
2272 if (app_image) {
2273 AppImageLoadingHelper::Update(this, space, class_loader, dex_caches);
2274
2275 {
2276 ScopedTrace trace("AppImage:UpdateClassLoaders");
2277 // Update class loader and resolved strings. If added_class_table is false, the resolved
2278 // strings were forwarded UpdateAppImageClassLoadersAndDexCaches.
2279 ObjPtr<mirror::ClassLoader> loader(class_loader.Get());
2280 for (const ClassTable::TableSlot& root : temp_set) {
2281 // Note: We probably don't need the read barrier unless we copy the app image objects into
2282 // the region space.
2283 ObjPtr<mirror::Class> klass(root.Read());
2284 // Do not update class loader for boot image classes where the app image
2285 // class loader is only the initiating loader but not the defining loader.
2286 if (space->HasAddress(klass.Ptr())) {
2287 klass->SetClassLoader(loader);
2288 } else {
2289 DCHECK(klass->IsBootStrapClassLoaded());
2290 DCHECK(Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass.Ptr()));
2291 }
2292 }
2293 }
2294
2295 if (kBitstringSubtypeCheckEnabled) {
2296 // Every class in the app image has initially SubtypeCheckInfo in the
2297 // Uninitialized state.
2298 //
2299 // The SubtypeCheck invariants imply that a SubtypeCheckInfo is at least Initialized
2300 // after class initialization is complete. The app image ClassStatus as-is
2301 // are almost all ClassStatus::Initialized, and being in the
2302 // SubtypeCheckInfo::kUninitialized state is violating that invariant.
2303 //
2304 // Force every app image class's SubtypeCheck to be at least kIninitialized.
2305 //
2306 // See also ImageWriter::FixupClass.
2307 ScopedTrace trace("AppImage:RecacluateSubtypeCheckBitstrings");
2308 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2309 for (const ClassTable::TableSlot& root : temp_set) {
2310 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(root.Read());
2311 }
2312 }
2313 }
2314 if (!oat_file->GetBssGcRoots().empty()) {
2315 // Insert oat file to class table for visiting .bss GC roots.
2316 class_table->InsertOatFile(oat_file);
2317 }
2318
2319 if (added_class_table) {
2320 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2321 class_table->AddClassSet(std::move(temp_set));
2322 }
2323
2324 if (kIsDebugBuild && app_image) {
2325 // This verification needs to happen after the classes have been added to the class loader.
2326 // Since it ensures classes are in the class table.
2327 ScopedTrace trace("AppImage:Verify");
2328 VerifyAppImage(header, class_loader, class_table, space);
2329 }
2330
2331 VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
2332 return true;
2333 }
2334
AddImageSpaces(ArrayRef<gc::space::ImageSpace * > spaces,Handle<mirror::ClassLoader> class_loader,ClassLoaderContext * context,std::vector<std::unique_ptr<const DexFile>> * dex_files,std::string * error_msg)2335 bool ClassLinker::AddImageSpaces(ArrayRef<gc::space::ImageSpace*> spaces,
2336 Handle<mirror::ClassLoader> class_loader,
2337 ClassLoaderContext* context,
2338 /*out*/ std::vector<std::unique_ptr<const DexFile>>* dex_files,
2339 /*out*/ std::string* error_msg) {
2340 std::vector<std::vector<std::unique_ptr<const DexFile>>> dex_files_by_space_index;
2341 for (const gc::space::ImageSpace* space : spaces) {
2342 std::vector<std::unique_ptr<const DexFile>> space_dex_files;
2343 if (!OpenAndInitImageDexFiles(space, class_loader, /*out*/ &space_dex_files, error_msg)) {
2344 return false;
2345 }
2346 dex_files_by_space_index.push_back(std::move(space_dex_files));
2347 }
2348 // This must be done in a separate loop after all dex files are initialized because there can be
2349 // references from an image space to another image space that comes after it.
2350 for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
2351 std::vector<std::unique_ptr<const DexFile>>& space_dex_files = dex_files_by_space_index[i];
2352 if (!AddImageSpace(spaces[i], class_loader, context, space_dex_files, error_msg)) {
2353 return false;
2354 }
2355 // Append opened dex files at the end.
2356 std::move(space_dex_files.begin(), space_dex_files.end(), std::back_inserter(*dex_files));
2357 }
2358 return true;
2359 }
2360
VisitClassRoots(RootVisitor * visitor,VisitRootFlags flags)2361 void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
2362 // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
2363 // enabling tracing requires the mutator lock, there are no race conditions here.
2364 const bool tracing_enabled = Trace::IsTracingEnabled();
2365 Thread* const self = Thread::Current();
2366 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2367 if (gUseReadBarrier) {
2368 // We do not track new roots for CC.
2369 DCHECK_EQ(0, flags & (kVisitRootFlagNewRoots |
2370 kVisitRootFlagClearRootLog |
2371 kVisitRootFlagStartLoggingNewRoots |
2372 kVisitRootFlagStopLoggingNewRoots));
2373 }
2374 if ((flags & kVisitRootFlagAllRoots) != 0) {
2375 // Argument for how root visiting deals with ArtField and ArtMethod roots.
2376 // There is 3 GC cases to handle:
2377 // Non moving concurrent:
2378 // This case is easy to handle since the reference members of ArtMethod and ArtFields are held
2379 // live by the class and class roots.
2380 //
2381 // Moving non-concurrent:
2382 // This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move.
2383 // To prevent missing roots, this case needs to ensure that there is no
2384 // suspend points between the point which we allocate ArtMethod arrays and place them in a
2385 // class which is in the class table.
2386 //
2387 // Moving concurrent:
2388 // Need to make sure to not copy ArtMethods without doing read barriers since the roots are
2389 // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
2390 //
2391 // Use an unbuffered visitor since the class table uses a temporary GcRoot for holding decoded
2392 // ClassTable::TableSlot. The buffered root visiting would access a stale stack location for
2393 // these objects.
2394 UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
2395 boot_class_table_->VisitRoots(root_visitor);
2396 // If tracing is enabled, then mark all the class loaders to prevent unloading.
2397 if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
2398 gc::Heap* const heap = Runtime::Current()->GetHeap();
2399 // Don't visit class-loaders if compacting with userfaultfd GC as these
2400 // weaks are updated using Runtime::SweepSystemWeaks() and the GC doesn't
2401 // tolerate double updates.
2402 if (!heap->IsPerformingUffdCompaction()) {
2403 for (const ClassLoaderData& data : class_loaders_) {
2404 GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
2405 root.VisitRoot(visitor, RootInfo(kRootVMInternal));
2406 }
2407 } else {
2408 DCHECK_EQ(heap->CurrentCollectorType(), gc::CollectorType::kCollectorTypeCMC);
2409 }
2410 }
2411 } else if (!gUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
2412 for (auto& root : new_class_roots_) {
2413 ObjPtr<mirror::Class> old_ref = root.Read<kWithoutReadBarrier>();
2414 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2415 ObjPtr<mirror::Class> new_ref = root.Read<kWithoutReadBarrier>();
2416 // Concurrent moving GC marked new roots through the to-space invariant.
2417 CHECK_EQ(new_ref, old_ref);
2418 }
2419 for (const OatFile* oat_file : new_bss_roots_boot_oat_files_) {
2420 for (GcRoot<mirror::Object>& root : oat_file->GetBssGcRoots()) {
2421 ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2422 if (old_ref != nullptr) {
2423 DCHECK(old_ref->IsClass());
2424 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2425 ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2426 // Concurrent moving GC marked new roots through the to-space invariant.
2427 CHECK_EQ(new_ref, old_ref);
2428 }
2429 }
2430 }
2431 }
2432 if (!gUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
2433 new_class_roots_.clear();
2434 new_bss_roots_boot_oat_files_.clear();
2435 }
2436 if (!gUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
2437 log_new_roots_ = true;
2438 } else if (!gUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
2439 log_new_roots_ = false;
2440 }
2441 // We deliberately ignore the class roots in the image since we
2442 // handle image roots by using the MS/CMS rescanning of dirty cards.
2443 }
2444
2445 // Keep in sync with InitCallback. Anything we visit, we need to
2446 // reinit references to when reinitializing a ClassLinker from a
2447 // mapped image.
VisitRoots(RootVisitor * visitor,VisitRootFlags flags)2448 void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
2449 class_roots_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
2450 VisitClassRoots(visitor, flags);
2451 // Instead of visiting the find_array_class_cache_ drop it so that it doesn't prevent class
2452 // unloading if we are marking roots.
2453 DropFindArrayClassCache();
2454 }
2455
2456 class VisitClassLoaderClassesVisitor : public ClassLoaderVisitor {
2457 public:
VisitClassLoaderClassesVisitor(ClassVisitor * visitor)2458 explicit VisitClassLoaderClassesVisitor(ClassVisitor* visitor)
2459 : visitor_(visitor),
2460 done_(false) {}
2461
Visit(ObjPtr<mirror::ClassLoader> class_loader)2462 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
2463 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
2464 ClassTable* const class_table = class_loader->GetClassTable();
2465 if (!done_ && class_table != nullptr) {
2466 DefiningClassLoaderFilterVisitor visitor(class_loader, visitor_);
2467 if (!class_table->Visit(visitor)) {
2468 // If the visitor ClassTable returns false it means that we don't need to continue.
2469 done_ = true;
2470 }
2471 }
2472 }
2473
2474 private:
2475 // Class visitor that limits the class visits from a ClassTable to the classes with
2476 // the provided defining class loader. This filter is used to avoid multiple visits
2477 // of the same class which can be recorded for multiple initiating class loaders.
2478 class DefiningClassLoaderFilterVisitor : public ClassVisitor {
2479 public:
DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,ClassVisitor * visitor)2480 DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,
2481 ClassVisitor* visitor)
2482 : defining_class_loader_(defining_class_loader), visitor_(visitor) { }
2483
operator ()(ObjPtr<mirror::Class> klass)2484 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2485 if (klass->GetClassLoader() != defining_class_loader_) {
2486 return true;
2487 }
2488 return (*visitor_)(klass);
2489 }
2490
2491 const ObjPtr<mirror::ClassLoader> defining_class_loader_;
2492 ClassVisitor* const visitor_;
2493 };
2494
2495 ClassVisitor* const visitor_;
2496 // If done is true then we don't need to do any more visiting.
2497 bool done_;
2498 };
2499
VisitClassesInternal(ClassVisitor * visitor)2500 void ClassLinker::VisitClassesInternal(ClassVisitor* visitor) {
2501 if (boot_class_table_->Visit(*visitor)) {
2502 VisitClassLoaderClassesVisitor loader_visitor(visitor);
2503 VisitClassLoaders(&loader_visitor);
2504 }
2505 }
2506
VisitClasses(ClassVisitor * visitor)2507 void ClassLinker::VisitClasses(ClassVisitor* visitor) {
2508 Thread* const self = Thread::Current();
2509 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2510 // Not safe to have thread suspension when we are holding a lock.
2511 if (self != nullptr) {
2512 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2513 VisitClassesInternal(visitor);
2514 } else {
2515 VisitClassesInternal(visitor);
2516 }
2517 }
2518
2519 class GetClassesInToVector : public ClassVisitor {
2520 public:
operator ()(ObjPtr<mirror::Class> klass)2521 bool operator()(ObjPtr<mirror::Class> klass) override {
2522 classes_.push_back(klass);
2523 return true;
2524 }
2525 std::vector<ObjPtr<mirror::Class>> classes_;
2526 };
2527
2528 class GetClassInToObjectArray : public ClassVisitor {
2529 public:
GetClassInToObjectArray(mirror::ObjectArray<mirror::Class> * arr)2530 explicit GetClassInToObjectArray(mirror::ObjectArray<mirror::Class>* arr)
2531 : arr_(arr), index_(0) {}
2532
operator ()(ObjPtr<mirror::Class> klass)2533 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2534 ++index_;
2535 if (index_ <= arr_->GetLength()) {
2536 arr_->Set(index_ - 1, klass);
2537 return true;
2538 }
2539 return false;
2540 }
2541
Succeeded() const2542 bool Succeeded() const REQUIRES_SHARED(Locks::mutator_lock_) {
2543 return index_ <= arr_->GetLength();
2544 }
2545
2546 private:
2547 mirror::ObjectArray<mirror::Class>* const arr_;
2548 int32_t index_;
2549 };
2550
VisitClassesWithoutClassesLock(ClassVisitor * visitor)2551 void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) {
2552 // TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem
2553 // is avoiding duplicates.
2554 if (!kMovingClasses) {
2555 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2556 GetClassesInToVector accumulator;
2557 VisitClasses(&accumulator);
2558 for (ObjPtr<mirror::Class> klass : accumulator.classes_) {
2559 if (!visitor->operator()(klass)) {
2560 return;
2561 }
2562 }
2563 } else {
2564 Thread* const self = Thread::Current();
2565 StackHandleScope<1> hs(self);
2566 auto classes = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
2567 // We size the array assuming classes won't be added to the class table during the visit.
2568 // If this assumption fails we iterate again.
2569 while (true) {
2570 size_t class_table_size;
2571 {
2572 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2573 // Add 100 in case new classes get loaded when we are filling in the object array.
2574 class_table_size = NumZygoteClasses() + NumNonZygoteClasses() + 100;
2575 }
2576 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
2577 classes.Assign(
2578 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
2579 CHECK(classes != nullptr); // OOME.
2580 GetClassInToObjectArray accumulator(classes.Get());
2581 VisitClasses(&accumulator);
2582 if (accumulator.Succeeded()) {
2583 break;
2584 }
2585 }
2586 for (int32_t i = 0; i < classes->GetLength(); ++i) {
2587 // If the class table shrank during creation of the clases array we expect null elements. If
2588 // the class table grew then the loop repeats. If classes are created after the loop has
2589 // finished then we don't visit.
2590 ObjPtr<mirror::Class> klass = classes->Get(i);
2591 if (klass != nullptr && !visitor->operator()(klass)) {
2592 return;
2593 }
2594 }
2595 }
2596 }
2597
~ClassLinker()2598 ClassLinker::~ClassLinker() {
2599 Thread* const self = Thread::Current();
2600 for (const ClassLoaderData& data : class_loaders_) {
2601 // CHA unloading analysis is not needed. No negative consequences are expected because
2602 // all the classloaders are deleted at the same time.
2603 DeleteClassLoader(self, data, /*cleanup_cha=*/ false);
2604 }
2605 class_loaders_.clear();
2606 while (!running_visibly_initialized_callbacks_.empty()) {
2607 std::unique_ptr<VisiblyInitializedCallback> callback(
2608 std::addressof(running_visibly_initialized_callbacks_.front()));
2609 running_visibly_initialized_callbacks_.pop_front();
2610 }
2611 }
2612
DeleteClassLoader(Thread * self,const ClassLoaderData & data,bool cleanup_cha)2613 void ClassLinker::DeleteClassLoader(Thread* self, const ClassLoaderData& data, bool cleanup_cha) {
2614 Runtime* const runtime = Runtime::Current();
2615 JavaVMExt* const vm = runtime->GetJavaVM();
2616 vm->DeleteWeakGlobalRef(self, data.weak_root);
2617 // Notify the JIT that we need to remove the methods and/or profiling info.
2618 if (runtime->GetJit() != nullptr) {
2619 jit::JitCodeCache* code_cache = runtime->GetJit()->GetCodeCache();
2620 if (code_cache != nullptr) {
2621 // For the JIT case, RemoveMethodsIn removes the CHA dependencies.
2622 code_cache->RemoveMethodsIn(self, *data.allocator);
2623 }
2624 } else if (cha_ != nullptr) {
2625 // If we don't have a JIT, we need to manually remove the CHA dependencies manually.
2626 cha_->RemoveDependenciesForLinearAlloc(self, data.allocator);
2627 }
2628 // Cleanup references to single implementation ArtMethods that will be deleted.
2629 if (cleanup_cha) {
2630 CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
2631 data.class_table->Visit<kWithoutReadBarrier>(visitor);
2632 }
2633 {
2634 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
2635 auto end = critical_native_code_with_clinit_check_.end();
2636 for (auto it = critical_native_code_with_clinit_check_.begin(); it != end; ) {
2637 if (data.allocator->ContainsUnsafe(it->first)) {
2638 it = critical_native_code_with_clinit_check_.erase(it);
2639 } else {
2640 ++it;
2641 }
2642 }
2643 }
2644
2645 delete data.allocator;
2646 delete data.class_table;
2647 }
2648
AllocPointerArray(Thread * self,size_t length)2649 ObjPtr<mirror::PointerArray> ClassLinker::AllocPointerArray(Thread* self, size_t length) {
2650 return ObjPtr<mirror::PointerArray>::DownCast(
2651 image_pointer_size_ == PointerSize::k64
2652 ? ObjPtr<mirror::Array>(mirror::LongArray::Alloc(self, length))
2653 : ObjPtr<mirror::Array>(mirror::IntArray::Alloc(self, length)));
2654 }
2655
AllocDexCache(Thread * self,const DexFile & dex_file)2656 ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(Thread* self, const DexFile& dex_file) {
2657 StackHandleScope<1> hs(self);
2658 auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
2659 GetClassRoot<mirror::DexCache>(this)->AllocObject(self))));
2660 if (dex_cache == nullptr) {
2661 self->AssertPendingOOMException();
2662 return nullptr;
2663 }
2664 // Use InternWeak() so that the location String can be collected when the ClassLoader
2665 // with this DexCache is collected.
2666 ObjPtr<mirror::String> location = intern_table_->InternWeak(dex_file.GetLocation().c_str());
2667 if (location == nullptr) {
2668 self->AssertPendingOOMException();
2669 return nullptr;
2670 }
2671 dex_cache->SetLocation(location);
2672 return dex_cache.Get();
2673 }
2674
AllocAndInitializeDexCache(Thread * self,const DexFile & dex_file,ObjPtr<mirror::ClassLoader> class_loader)2675 ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(
2676 Thread* self, const DexFile& dex_file, ObjPtr<mirror::ClassLoader> class_loader) {
2677 StackHandleScope<1> hs(self);
2678 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
2679 ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(self, dex_file);
2680 if (dex_cache != nullptr) {
2681 WriterMutexLock mu(self, *Locks::dex_lock_);
2682 dex_cache->Initialize(&dex_file, h_class_loader.Get());
2683 }
2684 return dex_cache;
2685 }
2686
2687 template <bool kMovable, typename PreFenceVisitor>
AllocClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class,uint32_t class_size,const PreFenceVisitor & pre_fence_visitor)2688 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2689 ObjPtr<mirror::Class> java_lang_Class,
2690 uint32_t class_size,
2691 const PreFenceVisitor& pre_fence_visitor) {
2692 DCHECK_GE(class_size, sizeof(mirror::Class));
2693 gc::Heap* heap = Runtime::Current()->GetHeap();
2694 ObjPtr<mirror::Object> k = (kMovingClasses && kMovable) ?
2695 heap->AllocObject(self, java_lang_Class, class_size, pre_fence_visitor) :
2696 heap->AllocNonMovableObject(self, java_lang_Class, class_size, pre_fence_visitor);
2697 if (UNLIKELY(k == nullptr)) {
2698 self->AssertPendingOOMException();
2699 return nullptr;
2700 }
2701 return k->AsClass();
2702 }
2703
2704 template <bool kMovable>
AllocClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class,uint32_t class_size)2705 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2706 ObjPtr<mirror::Class> java_lang_Class,
2707 uint32_t class_size) {
2708 mirror::Class::InitializeClassVisitor visitor(class_size);
2709 return AllocClass<kMovable>(self, java_lang_Class, class_size, visitor);
2710 }
2711
AllocClass(Thread * self,uint32_t class_size)2712 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self, uint32_t class_size) {
2713 return AllocClass(self, GetClassRoot<mirror::Class>(this), class_size);
2714 }
2715
AllocPrimitiveArrayClass(Thread * self,ClassRoot primitive_root,ClassRoot array_root)2716 void ClassLinker::AllocPrimitiveArrayClass(Thread* self,
2717 ClassRoot primitive_root,
2718 ClassRoot array_root) {
2719 // We make this class non-movable for the unlikely case where it were to be
2720 // moved by a sticky-bit (minor) collection when using the Generational
2721 // Concurrent Copying (CC) collector, potentially creating a stale reference
2722 // in the `klass_` field of one of its instances allocated in the Large-Object
2723 // Space (LOS) -- see the comment about the dirty card scanning logic in
2724 // art::gc::collector::ConcurrentCopying::MarkingPhase.
2725 ObjPtr<mirror::Class> array_class = AllocClass</* kMovable= */ false>(
2726 self, GetClassRoot<mirror::Class>(this), mirror::Array::ClassSize(image_pointer_size_));
2727 ObjPtr<mirror::Class> component_type = GetClassRoot(primitive_root, this);
2728 DCHECK(component_type->IsPrimitive());
2729 array_class->SetComponentType(component_type);
2730 SetClassRoot(array_root, array_class);
2731 }
2732
FinishArrayClassSetup(ObjPtr<mirror::Class> array_class)2733 void ClassLinker::FinishArrayClassSetup(ObjPtr<mirror::Class> array_class) {
2734 ObjPtr<mirror::Class> java_lang_Object = GetClassRoot<mirror::Object>(this);
2735 array_class->SetSuperClass(java_lang_Object);
2736 array_class->SetVTable(java_lang_Object->GetVTable());
2737 array_class->SetPrimitiveType(Primitive::kPrimNot);
2738 ObjPtr<mirror::Class> component_type = array_class->GetComponentType();
2739 array_class->SetClassFlags(component_type->IsPrimitive()
2740 ? mirror::kClassFlagNoReferenceFields
2741 : mirror::kClassFlagObjectArray);
2742 array_class->SetClassLoader(component_type->GetClassLoader());
2743 array_class->SetStatusForPrimitiveOrArray(ClassStatus::kLoaded);
2744 array_class->PopulateEmbeddedVTable(image_pointer_size_);
2745 ImTable* object_imt = java_lang_Object->GetImt(image_pointer_size_);
2746 array_class->SetImt(object_imt, image_pointer_size_);
2747 DCHECK_EQ(array_class->NumMethods(), 0u);
2748
2749 // don't need to set new_class->SetObjectSize(..)
2750 // because Object::SizeOf delegates to Array::SizeOf
2751
2752 // All arrays have java/lang/Cloneable and java/io/Serializable as
2753 // interfaces. We need to set that up here, so that stuff like
2754 // "instanceof" works right.
2755
2756 // Use the single, global copies of "interfaces" and "iftable"
2757 // (remember not to free them for arrays).
2758 {
2759 ObjPtr<mirror::IfTable> array_iftable = GetArrayIfTable();
2760 CHECK(array_iftable != nullptr);
2761 array_class->SetIfTable(array_iftable);
2762 }
2763
2764 // Inherit access flags from the component type.
2765 int access_flags = component_type->GetAccessFlags();
2766 // Lose any implementation detail flags; in particular, arrays aren't finalizable.
2767 access_flags &= kAccJavaFlagsMask;
2768 // Arrays can't be used as a superclass or interface, so we want to add "abstract final"
2769 // and remove "interface".
2770 access_flags |= kAccAbstract | kAccFinal;
2771 access_flags &= ~kAccInterface;
2772
2773 array_class->SetAccessFlagsDuringLinking(access_flags);
2774
2775 // Array classes are fully initialized either during single threaded startup,
2776 // or from a pre-fence visitor, so visibly initialized.
2777 array_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
2778 }
2779
FinishCoreArrayClassSetup(ClassRoot array_root)2780 void ClassLinker::FinishCoreArrayClassSetup(ClassRoot array_root) {
2781 // Do not hold lock on the array class object, the initialization of
2782 // core array classes is done while the process is still single threaded.
2783 ObjPtr<mirror::Class> array_class = GetClassRoot(array_root, this);
2784 FinishArrayClassSetup(array_class);
2785
2786 std::string temp;
2787 const char* descriptor = array_class->GetDescriptor(&temp);
2788 size_t hash = ComputeModifiedUtf8Hash(descriptor);
2789 ObjPtr<mirror::Class> existing = InsertClass(descriptor, array_class, hash);
2790 CHECK(existing == nullptr);
2791 }
2792
AllocStackTraceElementArray(Thread * self,size_t length)2793 ObjPtr<mirror::ObjectArray<mirror::StackTraceElement>> ClassLinker::AllocStackTraceElementArray(
2794 Thread* self,
2795 size_t length) {
2796 return mirror::ObjectArray<mirror::StackTraceElement>::Alloc(
2797 self, GetClassRoot<mirror::ObjectArray<mirror::StackTraceElement>>(this), length);
2798 }
2799
EnsureResolved(Thread * self,const char * descriptor,ObjPtr<mirror::Class> klass)2800 ObjPtr<mirror::Class> ClassLinker::EnsureResolved(Thread* self,
2801 const char* descriptor,
2802 ObjPtr<mirror::Class> klass) {
2803 DCHECK(klass != nullptr);
2804 if (kIsDebugBuild) {
2805 StackHandleScope<1> hs(self);
2806 HandleWrapperObjPtr<mirror::Class> h = hs.NewHandleWrapper(&klass);
2807 Thread::PoisonObjectPointersIfDebug();
2808 }
2809
2810 // For temporary classes we must wait for them to be retired.
2811 if (init_done_ && klass->IsTemp()) {
2812 CHECK(!klass->IsResolved());
2813 if (klass->IsErroneousUnresolved()) {
2814 ThrowEarlierClassFailure(klass);
2815 return nullptr;
2816 }
2817 StackHandleScope<1> hs(self);
2818 Handle<mirror::Class> h_class(hs.NewHandle(klass));
2819 ObjectLock<mirror::Class> lock(self, h_class);
2820 // Loop and wait for the resolving thread to retire this class.
2821 while (!h_class->IsRetired() && !h_class->IsErroneousUnresolved()) {
2822 lock.WaitIgnoringInterrupts();
2823 }
2824 if (h_class->IsErroneousUnresolved()) {
2825 ThrowEarlierClassFailure(h_class.Get());
2826 return nullptr;
2827 }
2828 CHECK(h_class->IsRetired());
2829 // Get the updated class from class table.
2830 klass = LookupClass(self, descriptor, h_class.Get()->GetClassLoader());
2831 }
2832
2833 // Wait for the class if it has not already been linked.
2834 size_t index = 0;
2835 // Maximum number of yield iterations until we start sleeping.
2836 static const size_t kNumYieldIterations = 1000;
2837 // How long each sleep is in us.
2838 static const size_t kSleepDurationUS = 1000; // 1 ms.
2839 while (!klass->IsResolved() && !klass->IsErroneousUnresolved()) {
2840 StackHandleScope<1> hs(self);
2841 HandleWrapperObjPtr<mirror::Class> h_class(hs.NewHandleWrapper(&klass));
2842 {
2843 ObjectTryLock<mirror::Class> lock(self, h_class);
2844 // Can not use a monitor wait here since it may block when returning and deadlock if another
2845 // thread has locked klass.
2846 if (lock.Acquired()) {
2847 // Check for circular dependencies between classes, the lock is required for SetStatus.
2848 if (!h_class->IsResolved() && h_class->GetClinitThreadId() == self->GetTid()) {
2849 ThrowClassCircularityError(h_class.Get());
2850 mirror::Class::SetStatus(h_class, ClassStatus::kErrorUnresolved, self);
2851 return nullptr;
2852 }
2853 }
2854 }
2855 {
2856 // Handle wrapper deals with klass moving.
2857 ScopedThreadSuspension sts(self, ThreadState::kSuspended);
2858 if (index < kNumYieldIterations) {
2859 sched_yield();
2860 } else {
2861 usleep(kSleepDurationUS);
2862 }
2863 }
2864 ++index;
2865 }
2866
2867 if (klass->IsErroneousUnresolved()) {
2868 ThrowEarlierClassFailure(klass);
2869 return nullptr;
2870 }
2871 // Return the loaded class. No exceptions should be pending.
2872 CHECK(klass->IsResolved()) << klass->PrettyClass();
2873 self->AssertNoPendingException();
2874 return klass;
2875 }
2876
2877 using ClassPathEntry = std::pair<const DexFile*, const dex::ClassDef*>;
2878
2879 // Search a collection of DexFiles for a descriptor
FindInClassPath(const char * descriptor,size_t hash,const std::vector<const DexFile * > & class_path)2880 ClassPathEntry FindInClassPath(const char* descriptor,
2881 size_t hash, const std::vector<const DexFile*>& class_path) {
2882 for (const DexFile* dex_file : class_path) {
2883 DCHECK(dex_file != nullptr);
2884 const dex::ClassDef* dex_class_def = OatDexFile::FindClassDef(*dex_file, descriptor, hash);
2885 if (dex_class_def != nullptr) {
2886 return ClassPathEntry(dex_file, dex_class_def);
2887 }
2888 }
2889 return ClassPathEntry(nullptr, nullptr);
2890 }
2891
2892 // Helper macro to make sure each class loader lookup call handles the case the
2893 // class loader is not recognized, or the lookup threw an exception.
2894 #define RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(call_, result_, thread_) \
2895 do { \
2896 auto local_call = call_; \
2897 if (!local_call) { \
2898 return false; \
2899 } \
2900 auto local_result = result_; \
2901 if (local_result != nullptr) { \
2902 return true; \
2903 } \
2904 auto local_thread = thread_; \
2905 if (local_thread->IsExceptionPending()) { \
2906 /* Pending exception means there was an error other than */ \
2907 /* ClassNotFound that must be returned to the caller. */ \
2908 return false; \
2909 } \
2910 } while (0)
2911
FindClassInSharedLibraries(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)2912 bool ClassLinker::FindClassInSharedLibraries(Thread* self,
2913 const char* descriptor,
2914 size_t hash,
2915 Handle<mirror::ClassLoader> class_loader,
2916 /*out*/ ObjPtr<mirror::Class>* result) {
2917 ArtField* field = WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders;
2918 return FindClassInSharedLibrariesHelper(self, descriptor, hash, class_loader, field, result);
2919 }
2920
FindClassInSharedLibrariesHelper(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ArtField * field,ObjPtr<mirror::Class> * result)2921 bool ClassLinker::FindClassInSharedLibrariesHelper(Thread* self,
2922 const char* descriptor,
2923 size_t hash,
2924 Handle<mirror::ClassLoader> class_loader,
2925 ArtField* field,
2926 /*out*/ ObjPtr<mirror::Class>* result) {
2927 ObjPtr<mirror::Object> raw_shared_libraries = field->GetObject(class_loader.Get());
2928 if (raw_shared_libraries == nullptr) {
2929 return true;
2930 }
2931
2932 StackHandleScope<2> hs(self);
2933 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries(
2934 hs.NewHandle(raw_shared_libraries->AsObjectArray<mirror::ClassLoader>()));
2935 MutableHandle<mirror::ClassLoader> temp_loader = hs.NewHandle<mirror::ClassLoader>(nullptr);
2936 for (auto loader : shared_libraries.Iterate<mirror::ClassLoader>()) {
2937 temp_loader.Assign(loader);
2938 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2939 FindClassInBaseDexClassLoader(self, descriptor, hash, temp_loader, result),
2940 *result,
2941 self);
2942 }
2943 return true;
2944 }
2945
FindClassInSharedLibrariesAfter(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)2946 bool ClassLinker::FindClassInSharedLibrariesAfter(Thread* self,
2947 const char* descriptor,
2948 size_t hash,
2949 Handle<mirror::ClassLoader> class_loader,
2950 /*out*/ ObjPtr<mirror::Class>* result) {
2951 ArtField* field = WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter;
2952 return FindClassInSharedLibrariesHelper(self, descriptor, hash, class_loader, field, result);
2953 }
2954
FindClassInBaseDexClassLoader(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)2955 bool ClassLinker::FindClassInBaseDexClassLoader(Thread* self,
2956 const char* descriptor,
2957 size_t hash,
2958 Handle<mirror::ClassLoader> class_loader,
2959 /*out*/ ObjPtr<mirror::Class>* result) {
2960 // Termination case: boot class loader.
2961 if (IsBootClassLoader(class_loader.Get())) {
2962 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2963 FindClassInBootClassLoaderClassPath(self, descriptor, hash, result), *result, self);
2964 return true;
2965 }
2966
2967 if (IsPathOrDexClassLoader(class_loader) || IsInMemoryDexClassLoader(class_loader)) {
2968 // For regular path or dex class loader the search order is:
2969 // - parent
2970 // - shared libraries
2971 // - class loader dex files
2972
2973 // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
2974 StackHandleScope<1> hs(self);
2975 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
2976 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2977 FindClassInBaseDexClassLoader(self, descriptor, hash, h_parent, result),
2978 *result,
2979 self);
2980 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2981 FindClassInSharedLibraries(self, descriptor, hash, class_loader, result),
2982 *result,
2983 self);
2984 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2985 FindClassInBaseDexClassLoaderClassPath(self, descriptor, hash, class_loader, result),
2986 *result,
2987 self);
2988 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2989 FindClassInSharedLibrariesAfter(self, descriptor, hash, class_loader, result),
2990 *result,
2991 self);
2992 // We did not find a class, but the class loader chain was recognized, so we
2993 // return true.
2994 return true;
2995 }
2996
2997 if (IsDelegateLastClassLoader(class_loader)) {
2998 // For delegate last, the search order is:
2999 // - boot class path
3000 // - shared libraries
3001 // - class loader dex files
3002 // - parent
3003 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3004 FindClassInBootClassLoaderClassPath(self, descriptor, hash, result), *result, self);
3005 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3006 FindClassInSharedLibraries(self, descriptor, hash, class_loader, result),
3007 *result,
3008 self);
3009 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3010 FindClassInBaseDexClassLoaderClassPath(self, descriptor, hash, class_loader, result),
3011 *result,
3012 self);
3013 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3014 FindClassInSharedLibrariesAfter(self, descriptor, hash, class_loader, result),
3015 *result,
3016 self);
3017
3018 // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
3019 StackHandleScope<1> hs(self);
3020 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
3021 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3022 FindClassInBaseDexClassLoader(self, descriptor, hash, h_parent, result),
3023 *result,
3024 self);
3025 // We did not find a class, but the class loader chain was recognized, so we
3026 // return true.
3027 return true;
3028 }
3029
3030 // Unsupported class loader.
3031 *result = nullptr;
3032 return false;
3033 }
3034
3035 #undef RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION
3036
3037 namespace {
3038
3039 // Matches exceptions caught in DexFile.defineClass.
MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,ClassLinker * class_linker)3040 ALWAYS_INLINE bool MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,
3041 ClassLinker* class_linker)
3042 REQUIRES_SHARED(Locks::mutator_lock_) {
3043 return
3044 // ClassNotFoundException.
3045 throwable->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
3046 class_linker))
3047 ||
3048 // NoClassDefFoundError. TODO: Reconsider this. b/130746382.
3049 throwable->InstanceOf(Runtime::Current()->GetPreAllocatedNoClassDefFoundError()->GetClass());
3050 }
3051
3052 // Clear exceptions caught in DexFile.defineClass.
FilterDexFileCaughtExceptions(Thread * self,ClassLinker * class_linker)3053 ALWAYS_INLINE void FilterDexFileCaughtExceptions(Thread* self, ClassLinker* class_linker)
3054 REQUIRES_SHARED(Locks::mutator_lock_) {
3055 if (MatchesDexFileCaughtExceptions(self->GetException(), class_linker)) {
3056 self->ClearException();
3057 }
3058 }
3059
3060 } // namespace
3061
3062 // Finds the class in the boot class loader.
3063 // If the class is found the method returns the resolved class. Otherwise it returns null.
FindClassInBootClassLoaderClassPath(Thread * self,const char * descriptor,size_t hash,ObjPtr<mirror::Class> * result)3064 bool ClassLinker::FindClassInBootClassLoaderClassPath(Thread* self,
3065 const char* descriptor,
3066 size_t hash,
3067 /*out*/ ObjPtr<mirror::Class>* result) {
3068 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
3069 if (pair.second != nullptr) {
3070 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, nullptr);
3071 if (klass != nullptr) {
3072 *result = EnsureResolved(self, descriptor, klass);
3073 } else {
3074 *result = DefineClass(self,
3075 descriptor,
3076 hash,
3077 ScopedNullHandle<mirror::ClassLoader>(),
3078 *pair.first,
3079 *pair.second);
3080 }
3081 if (*result == nullptr) {
3082 CHECK(self->IsExceptionPending()) << descriptor;
3083 FilterDexFileCaughtExceptions(self, this);
3084 }
3085 }
3086 // The boot classloader is always a known lookup.
3087 return true;
3088 }
3089
FindClassInBaseDexClassLoaderClassPath(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)3090 bool ClassLinker::FindClassInBaseDexClassLoaderClassPath(
3091 Thread* self,
3092 const char* descriptor,
3093 size_t hash,
3094 Handle<mirror::ClassLoader> class_loader,
3095 /*out*/ ObjPtr<mirror::Class>* result) {
3096 DCHECK(IsPathOrDexClassLoader(class_loader) ||
3097 IsInMemoryDexClassLoader(class_loader) ||
3098 IsDelegateLastClassLoader(class_loader))
3099 << "Unexpected class loader for descriptor " << descriptor;
3100
3101 const DexFile* dex_file = nullptr;
3102 const dex::ClassDef* class_def = nullptr;
3103 ObjPtr<mirror::Class> ret;
3104 auto find_class_def = [&](const DexFile* cp_dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
3105 const dex::ClassDef* cp_class_def = OatDexFile::FindClassDef(*cp_dex_file, descriptor, hash);
3106 if (cp_class_def != nullptr) {
3107 dex_file = cp_dex_file;
3108 class_def = cp_class_def;
3109 return false; // Found a class definition, stop visit.
3110 }
3111 return true; // Continue with the next DexFile.
3112 };
3113 VisitClassLoaderDexFiles(self, class_loader, find_class_def);
3114
3115 if (class_def != nullptr) {
3116 *result = DefineClass(self, descriptor, hash, class_loader, *dex_file, *class_def);
3117 if (UNLIKELY(*result == nullptr)) {
3118 CHECK(self->IsExceptionPending()) << descriptor;
3119 FilterDexFileCaughtExceptions(self, this);
3120 } else {
3121 DCHECK(!self->IsExceptionPending());
3122 }
3123 }
3124 // A BaseDexClassLoader is always a known lookup.
3125 return true;
3126 }
3127
FindClass(Thread * self,const char * descriptor,Handle<mirror::ClassLoader> class_loader)3128 ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
3129 const char* descriptor,
3130 Handle<mirror::ClassLoader> class_loader) {
3131 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
3132 DCHECK(self != nullptr);
3133 self->AssertNoPendingException();
3134 self->PoisonObjectPointers(); // For DefineClass, CreateArrayClass, etc...
3135 if (descriptor[1] == '\0') {
3136 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
3137 // for primitive classes that aren't backed by dex files.
3138 return FindPrimitiveClass(descriptor[0]);
3139 }
3140 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
3141 // Find the class in the loaded classes table.
3142 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, class_loader.Get());
3143 if (klass != nullptr) {
3144 return EnsureResolved(self, descriptor, klass);
3145 }
3146 // Class is not yet loaded.
3147 if (descriptor[0] != '[' && class_loader == nullptr) {
3148 // Non-array class and the boot class loader, search the boot class path.
3149 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
3150 if (pair.second != nullptr) {
3151 return DefineClass(self,
3152 descriptor,
3153 hash,
3154 ScopedNullHandle<mirror::ClassLoader>(),
3155 *pair.first,
3156 *pair.second);
3157 } else {
3158 // The boot class loader is searched ahead of the application class loader, failures are
3159 // expected and will be wrapped in a ClassNotFoundException. Use the pre-allocated error to
3160 // trigger the chaining with a proper stack trace.
3161 ObjPtr<mirror::Throwable> pre_allocated =
3162 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3163 self->SetException(pre_allocated);
3164 return nullptr;
3165 }
3166 }
3167 ObjPtr<mirror::Class> result_ptr;
3168 bool descriptor_equals;
3169 if (descriptor[0] == '[') {
3170 result_ptr = CreateArrayClass(self, descriptor, hash, class_loader);
3171 DCHECK_EQ(result_ptr == nullptr, self->IsExceptionPending());
3172 DCHECK(result_ptr == nullptr || result_ptr->DescriptorEquals(descriptor));
3173 descriptor_equals = true;
3174 } else {
3175 ScopedObjectAccessUnchecked soa(self);
3176 bool known_hierarchy =
3177 FindClassInBaseDexClassLoader(self, descriptor, hash, class_loader, &result_ptr);
3178 if (result_ptr != nullptr) {
3179 // The chain was understood and we found the class. We still need to add the class to
3180 // the class table to protect from racy programs that can try and redefine the path list
3181 // which would change the Class<?> returned for subsequent evaluation of const-class.
3182 DCHECK(known_hierarchy);
3183 DCHECK(result_ptr->DescriptorEquals(descriptor));
3184 descriptor_equals = true;
3185 } else if (!self->IsExceptionPending()) {
3186 // Either the chain wasn't understood or the class wasn't found.
3187 // If there is a pending exception we didn't clear, it is a not a ClassNotFoundException and
3188 // we should return it instead of silently clearing and retrying.
3189 //
3190 // If the chain was understood but we did not find the class, let the Java-side
3191 // rediscover all this and throw the exception with the right stack trace. Note that
3192 // the Java-side could still succeed for racy programs if another thread is actively
3193 // modifying the class loader's path list.
3194
3195 // The runtime is not allowed to call into java from a runtime-thread so just abort.
3196 if (self->IsRuntimeThread()) {
3197 // Oops, we can't call into java so we can't run actual class-loader code.
3198 // This is true for e.g. for the compiler (jit or aot).
3199 ObjPtr<mirror::Throwable> pre_allocated =
3200 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3201 self->SetException(pre_allocated);
3202 return nullptr;
3203 }
3204
3205 // Inlined DescriptorToDot(descriptor) with extra validation.
3206 //
3207 // Throw NoClassDefFoundError early rather than potentially load a class only to fail
3208 // the DescriptorEquals() check below and give a confusing error message. For example,
3209 // when native code erroneously calls JNI GetFieldId() with signature "java/lang/String"
3210 // instead of "Ljava/lang/String;", the message below using the "dot" names would be
3211 // "class loader [...] returned class java.lang.String instead of java.lang.String".
3212 size_t descriptor_length = strlen(descriptor);
3213 if (UNLIKELY(descriptor[0] != 'L') ||
3214 UNLIKELY(descriptor[descriptor_length - 1] != ';') ||
3215 UNLIKELY(memchr(descriptor + 1, '.', descriptor_length - 2) != nullptr)) {
3216 ThrowNoClassDefFoundError("Invalid descriptor: %s.", descriptor);
3217 return nullptr;
3218 }
3219
3220 std::string class_name_string(descriptor + 1, descriptor_length - 2);
3221 std::replace(class_name_string.begin(), class_name_string.end(), '/', '.');
3222 if (known_hierarchy &&
3223 fast_class_not_found_exceptions_ &&
3224 !Runtime::Current()->IsJavaDebuggable()) {
3225 // For known hierarchy, we know that the class is going to throw an exception. If we aren't
3226 // debuggable, optimize this path by throwing directly here without going back to Java
3227 // language. This reduces how many ClassNotFoundExceptions happen.
3228 self->ThrowNewExceptionF("Ljava/lang/ClassNotFoundException;",
3229 "%s",
3230 class_name_string.c_str());
3231 } else {
3232 StackHandleScope<1u> hs(self);
3233 Handle<mirror::String> class_name_object = hs.NewHandle(
3234 mirror::String::AllocFromModifiedUtf8(self, class_name_string.c_str()));
3235 if (class_name_object == nullptr) {
3236 DCHECK(self->IsExceptionPending()); // OOME.
3237 return nullptr;
3238 }
3239 DCHECK(class_loader != nullptr);
3240 result_ptr = ObjPtr<mirror::Class>::DownCast(
3241 WellKnownClasses::java_lang_ClassLoader_loadClass->InvokeVirtual<'L', 'L'>(
3242 self, class_loader.Get(), class_name_object.Get()));
3243 if (result_ptr == nullptr && !self->IsExceptionPending()) {
3244 // broken loader - throw NPE to be compatible with Dalvik
3245 ThrowNullPointerException(StringPrintf("ClassLoader.loadClass returned null for %s",
3246 class_name_string.c_str()).c_str());
3247 return nullptr;
3248 }
3249 // Check the name of the returned class.
3250 descriptor_equals = (result_ptr != nullptr) && result_ptr->DescriptorEquals(descriptor);
3251 }
3252 } else {
3253 DCHECK(!MatchesDexFileCaughtExceptions(self->GetException(), this));
3254 }
3255 }
3256
3257 if (self->IsExceptionPending()) {
3258 // If the ClassLoader threw or array class allocation failed, pass that exception up.
3259 // However, to comply with the RI behavior, first check if another thread succeeded.
3260 result_ptr = LookupClass(self, descriptor, hash, class_loader.Get());
3261 if (result_ptr != nullptr && !result_ptr->IsErroneous()) {
3262 self->ClearException();
3263 return EnsureResolved(self, descriptor, result_ptr);
3264 }
3265 return nullptr;
3266 }
3267
3268 // Try to insert the class to the class table, checking for mismatch.
3269 ObjPtr<mirror::Class> old;
3270 {
3271 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3272 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader.Get());
3273 old = class_table->Lookup(descriptor, hash);
3274 if (old == nullptr) {
3275 old = result_ptr; // For the comparison below, after releasing the lock.
3276 if (descriptor_equals) {
3277 class_table->InsertWithHash(result_ptr, hash);
3278 WriteBarrier::ForEveryFieldWrite(class_loader.Get());
3279 } // else throw below, after releasing the lock.
3280 }
3281 }
3282 if (UNLIKELY(old != result_ptr)) {
3283 // Return `old` (even if `!descriptor_equals`) to mimic the RI behavior for parallel
3284 // capable class loaders. (All class loaders are considered parallel capable on Android.)
3285 ObjPtr<mirror::Class> loader_class = class_loader->GetClass();
3286 const char* loader_class_name =
3287 loader_class->GetDexFile().StringByTypeIdx(loader_class->GetDexTypeIndex());
3288 LOG(WARNING) << "Initiating class loader of type " << DescriptorToDot(loader_class_name)
3289 << " is not well-behaved; it returned a different Class for racing loadClass(\""
3290 << DescriptorToDot(descriptor) << "\").";
3291 return EnsureResolved(self, descriptor, old);
3292 }
3293 if (UNLIKELY(!descriptor_equals)) {
3294 std::string result_storage;
3295 const char* result_name = result_ptr->GetDescriptor(&result_storage);
3296 std::string loader_storage;
3297 const char* loader_class_name = class_loader->GetClass()->GetDescriptor(&loader_storage);
3298 ThrowNoClassDefFoundError(
3299 "Initiating class loader of type %s returned class %s instead of %s.",
3300 DescriptorToDot(loader_class_name).c_str(),
3301 DescriptorToDot(result_name).c_str(),
3302 DescriptorToDot(descriptor).c_str());
3303 return nullptr;
3304 }
3305 // Success.
3306 return result_ptr;
3307 }
3308
3309 // Helper for maintaining DefineClass counting. We need to notify callbacks when we start/end a
3310 // define-class and how many recursive DefineClasses we are at in order to allow for doing things
3311 // like pausing class definition.
3312 struct ScopedDefiningClass {
3313 public:
REQUIRES_SHAREDart::ScopedDefiningClass3314 explicit ScopedDefiningClass(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
3315 : self_(self), returned_(false) {
3316 Locks::mutator_lock_->AssertSharedHeld(self_);
3317 Runtime::Current()->GetRuntimeCallbacks()->BeginDefineClass();
3318 self_->IncrDefineClassCount();
3319 }
REQUIRES_SHAREDart::ScopedDefiningClass3320 ~ScopedDefiningClass() REQUIRES_SHARED(Locks::mutator_lock_) {
3321 Locks::mutator_lock_->AssertSharedHeld(self_);
3322 CHECK(returned_);
3323 }
3324
Finishart::ScopedDefiningClass3325 ObjPtr<mirror::Class> Finish(Handle<mirror::Class> h_klass)
3326 REQUIRES_SHARED(Locks::mutator_lock_) {
3327 CHECK(!returned_);
3328 self_->DecrDefineClassCount();
3329 Runtime::Current()->GetRuntimeCallbacks()->EndDefineClass();
3330 Thread::PoisonObjectPointersIfDebug();
3331 returned_ = true;
3332 return h_klass.Get();
3333 }
3334
Finishart::ScopedDefiningClass3335 ObjPtr<mirror::Class> Finish(ObjPtr<mirror::Class> klass)
3336 REQUIRES_SHARED(Locks::mutator_lock_) {
3337 StackHandleScope<1> hs(self_);
3338 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
3339 return Finish(h_klass);
3340 }
3341
Finishart::ScopedDefiningClass3342 ObjPtr<mirror::Class> Finish(nullptr_t np ATTRIBUTE_UNUSED)
3343 REQUIRES_SHARED(Locks::mutator_lock_) {
3344 ScopedNullHandle<mirror::Class> snh;
3345 return Finish(snh);
3346 }
3347
3348 private:
3349 Thread* self_;
3350 bool returned_;
3351 };
3352
DefineClass(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,const dex::ClassDef & dex_class_def)3353 ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
3354 const char* descriptor,
3355 size_t hash,
3356 Handle<mirror::ClassLoader> class_loader,
3357 const DexFile& dex_file,
3358 const dex::ClassDef& dex_class_def) {
3359 ScopedDefiningClass sdc(self);
3360 StackHandleScope<3> hs(self);
3361 metrics::AutoTimer timer{GetMetrics()->ClassLoadingTotalTime()};
3362 metrics::AutoTimer timeDelta{GetMetrics()->ClassLoadingTotalTimeDelta()};
3363 auto klass = hs.NewHandle<mirror::Class>(nullptr);
3364
3365 // Load the class from the dex file.
3366 if (UNLIKELY(!init_done_)) {
3367 // finish up init of hand crafted class_roots_
3368 if (strcmp(descriptor, "Ljava/lang/Object;") == 0) {
3369 klass.Assign(GetClassRoot<mirror::Object>(this));
3370 } else if (strcmp(descriptor, "Ljava/lang/Class;") == 0) {
3371 klass.Assign(GetClassRoot<mirror::Class>(this));
3372 } else if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3373 klass.Assign(GetClassRoot<mirror::String>(this));
3374 } else if (strcmp(descriptor, "Ljava/lang/ref/Reference;") == 0) {
3375 klass.Assign(GetClassRoot<mirror::Reference>(this));
3376 } else if (strcmp(descriptor, "Ljava/lang/DexCache;") == 0) {
3377 klass.Assign(GetClassRoot<mirror::DexCache>(this));
3378 } else if (strcmp(descriptor, "Ldalvik/system/ClassExt;") == 0) {
3379 klass.Assign(GetClassRoot<mirror::ClassExt>(this));
3380 }
3381 }
3382
3383 // For AOT-compilation of an app, we may use only a public SDK to resolve symbols. If the SDK
3384 // checks are configured (a non null SdkChecker) and the descriptor is not in the provided
3385 // public class path then we prevent the definition of the class.
3386 //
3387 // NOTE that we only do the checks for the boot classpath APIs. Anything else, like the app
3388 // classpath is not checked.
3389 if (class_loader == nullptr &&
3390 Runtime::Current()->IsAotCompiler() &&
3391 DenyAccessBasedOnPublicSdk(descriptor)) {
3392 ObjPtr<mirror::Throwable> pre_allocated =
3393 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3394 self->SetException(pre_allocated);
3395 return sdc.Finish(nullptr);
3396 }
3397
3398 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
3399 // code to be executed. We put it up here so we can avoid all the allocations associated with
3400 // creating the class. This can happen with (eg) jit threads.
3401 if (!self->CanLoadClasses()) {
3402 // Make sure we don't try to load anything, potentially causing an infinite loop.
3403 ObjPtr<mirror::Throwable> pre_allocated =
3404 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3405 self->SetException(pre_allocated);
3406 return sdc.Finish(nullptr);
3407 }
3408
3409 if (klass == nullptr) {
3410 // Allocate a class with the status of not ready.
3411 // Interface object should get the right size here. Regular class will
3412 // figure out the right size later and be replaced with one of the right
3413 // size when the class becomes resolved.
3414 if (CanAllocClass()) {
3415 klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def)));
3416 } else {
3417 return sdc.Finish(nullptr);
3418 }
3419 }
3420 if (UNLIKELY(klass == nullptr)) {
3421 self->AssertPendingOOMException();
3422 return sdc.Finish(nullptr);
3423 }
3424 // Get the real dex file. This will return the input if there aren't any callbacks or they do
3425 // nothing.
3426 DexFile const* new_dex_file = nullptr;
3427 dex::ClassDef const* new_class_def = nullptr;
3428 // TODO We should ideally figure out some way to move this after we get a lock on the klass so it
3429 // will only be called once.
3430 Runtime::Current()->GetRuntimeCallbacks()->ClassPreDefine(descriptor,
3431 klass,
3432 class_loader,
3433 dex_file,
3434 dex_class_def,
3435 &new_dex_file,
3436 &new_class_def);
3437 // Check to see if an exception happened during runtime callbacks. Return if so.
3438 if (self->IsExceptionPending()) {
3439 return sdc.Finish(nullptr);
3440 }
3441 ObjPtr<mirror::DexCache> dex_cache = RegisterDexFile(*new_dex_file, class_loader.Get());
3442 if (dex_cache == nullptr) {
3443 self->AssertPendingException();
3444 return sdc.Finish(nullptr);
3445 }
3446 klass->SetDexCache(dex_cache);
3447 SetupClass(*new_dex_file, *new_class_def, klass, class_loader.Get());
3448
3449 // Mark the string class by setting its access flag.
3450 if (UNLIKELY(!init_done_)) {
3451 if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3452 klass->SetStringClass();
3453 }
3454 }
3455
3456 ObjectLock<mirror::Class> lock(self, klass);
3457 klass->SetClinitThreadId(self->GetTid());
3458 // Make sure we have a valid empty iftable even if there are errors.
3459 klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
3460
3461 // Add the newly loaded class to the loaded classes table.
3462 ObjPtr<mirror::Class> existing = InsertClass(descriptor, klass.Get(), hash);
3463 if (existing != nullptr) {
3464 // We failed to insert because we raced with another thread. Calling EnsureResolved may cause
3465 // this thread to block.
3466 return sdc.Finish(EnsureResolved(self, descriptor, existing));
3467 }
3468
3469 // Load the fields and other things after we are inserted in the table. This is so that we don't
3470 // end up allocating unfree-able linear alloc resources and then lose the race condition. The
3471 // other reason is that the field roots are only visited from the class table. So we need to be
3472 // inserted before we allocate / fill in these fields.
3473 LoadClass(self, *new_dex_file, *new_class_def, klass);
3474 if (self->IsExceptionPending()) {
3475 VLOG(class_linker) << self->GetException()->Dump();
3476 // An exception occured during load, set status to erroneous while holding klass' lock in case
3477 // notification is necessary.
3478 if (!klass->IsErroneous()) {
3479 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3480 }
3481 return sdc.Finish(nullptr);
3482 }
3483
3484 // Finish loading (if necessary) by finding parents
3485 CHECK(!klass->IsLoaded());
3486 if (!LoadSuperAndInterfaces(klass, *new_dex_file)) {
3487 // Loading failed.
3488 if (!klass->IsErroneous()) {
3489 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3490 }
3491 return sdc.Finish(nullptr);
3492 }
3493 CHECK(klass->IsLoaded());
3494
3495 // At this point the class is loaded. Publish a ClassLoad event.
3496 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
3497 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(klass);
3498
3499 // Link the class (if necessary)
3500 CHECK(!klass->IsResolved());
3501 // TODO: Use fast jobjects?
3502 auto interfaces = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
3503
3504 MutableHandle<mirror::Class> h_new_class = hs.NewHandle<mirror::Class>(nullptr);
3505 if (!LinkClass(self, descriptor, klass, interfaces, &h_new_class)) {
3506 // Linking failed.
3507 if (!klass->IsErroneous()) {
3508 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3509 }
3510 return sdc.Finish(nullptr);
3511 }
3512 self->AssertNoPendingException();
3513 CHECK(h_new_class != nullptr) << descriptor;
3514 CHECK(h_new_class->IsResolved()) << descriptor << " " << h_new_class->GetStatus();
3515
3516 // Instrumentation may have updated entrypoints for all methods of all
3517 // classes. However it could not update methods of this class while we
3518 // were loading it. Now the class is resolved, we can update entrypoints
3519 // as required by instrumentation.
3520 if (Runtime::Current()->GetInstrumentation()->EntryExitStubsInstalled()) {
3521 // We must be in the kRunnable state to prevent instrumentation from
3522 // suspending all threads to update entrypoints while we are doing it
3523 // for this class.
3524 DCHECK_EQ(self->GetState(), ThreadState::kRunnable);
3525 Runtime::Current()->GetInstrumentation()->InstallStubsForClass(h_new_class.Get());
3526 }
3527
3528 /*
3529 * We send CLASS_PREPARE events to the debugger from here. The
3530 * definition of "preparation" is creating the static fields for a
3531 * class and initializing them to the standard default values, but not
3532 * executing any code (that comes later, during "initialization").
3533 *
3534 * We did the static preparation in LinkClass.
3535 *
3536 * The class has been prepared and resolved but possibly not yet verified
3537 * at this point.
3538 */
3539 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(klass, h_new_class);
3540
3541 // Notify native debugger of the new class and its layout.
3542 jit::Jit::NewTypeLoadedIfUsingJit(h_new_class.Get());
3543
3544 return sdc.Finish(h_new_class);
3545 }
3546
SizeOfClassWithoutEmbeddedTables(const DexFile & dex_file,const dex::ClassDef & dex_class_def)3547 uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
3548 const dex::ClassDef& dex_class_def) {
3549 size_t num_ref = 0;
3550 size_t num_8 = 0;
3551 size_t num_16 = 0;
3552 size_t num_32 = 0;
3553 size_t num_64 = 0;
3554 ClassAccessor accessor(dex_file, dex_class_def);
3555 // We allow duplicate definitions of the same field in a class_data_item
3556 // but ignore the repeated indexes here, b/21868015.
3557 uint32_t last_field_idx = dex::kDexNoIndex;
3558 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
3559 uint32_t field_idx = field.GetIndex();
3560 // Ordering enforced by DexFileVerifier.
3561 DCHECK(last_field_idx == dex::kDexNoIndex || last_field_idx <= field_idx);
3562 if (UNLIKELY(field_idx == last_field_idx)) {
3563 continue;
3564 }
3565 last_field_idx = field_idx;
3566 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
3567 const char* descriptor = dex_file.GetFieldTypeDescriptor(field_id);
3568 char c = descriptor[0];
3569 switch (c) {
3570 case 'L':
3571 case '[':
3572 num_ref++;
3573 break;
3574 case 'J':
3575 case 'D':
3576 num_64++;
3577 break;
3578 case 'I':
3579 case 'F':
3580 num_32++;
3581 break;
3582 case 'S':
3583 case 'C':
3584 num_16++;
3585 break;
3586 case 'B':
3587 case 'Z':
3588 num_8++;
3589 break;
3590 default:
3591 LOG(FATAL) << "Unknown descriptor: " << c;
3592 UNREACHABLE();
3593 }
3594 }
3595 return mirror::Class::ComputeClassSize(false,
3596 0,
3597 num_8,
3598 num_16,
3599 num_32,
3600 num_64,
3601 num_ref,
3602 image_pointer_size_);
3603 }
3604
FixupStaticTrampolines(Thread * self,ObjPtr<mirror::Class> klass)3605 void ClassLinker::FixupStaticTrampolines(Thread* self, ObjPtr<mirror::Class> klass) {
3606 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3607 DCHECK(klass->IsVisiblyInitialized()) << klass->PrettyDescriptor();
3608 size_t num_direct_methods = klass->NumDirectMethods();
3609 if (num_direct_methods == 0) {
3610 return; // No direct methods => no static methods.
3611 }
3612 if (UNLIKELY(klass->IsProxyClass())) {
3613 return;
3614 }
3615 PointerSize pointer_size = image_pointer_size_;
3616 if (std::any_of(klass->GetDirectMethods(pointer_size).begin(),
3617 klass->GetDirectMethods(pointer_size).end(),
3618 [](const ArtMethod& m) { return m.IsCriticalNative(); })) {
3619 // Store registered @CriticalNative methods, if any, to JNI entrypoints.
3620 // Direct methods are a contiguous chunk of memory, so use the ordering of the map.
3621 ArtMethod* first_method = klass->GetDirectMethod(0u, pointer_size);
3622 ArtMethod* last_method = klass->GetDirectMethod(num_direct_methods - 1u, pointer_size);
3623 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
3624 auto lb = critical_native_code_with_clinit_check_.lower_bound(first_method);
3625 while (lb != critical_native_code_with_clinit_check_.end() && lb->first <= last_method) {
3626 lb->first->SetEntryPointFromJni(lb->second);
3627 lb = critical_native_code_with_clinit_check_.erase(lb);
3628 }
3629 }
3630 Runtime* runtime = Runtime::Current();
3631 if (runtime->IsAotCompiler()) {
3632 // We should not update entrypoints when running the transactional
3633 // interpreter.
3634 return;
3635 }
3636
3637 instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
3638 for (size_t method_index = 0; method_index < num_direct_methods; ++method_index) {
3639 ArtMethod* method = klass->GetDirectMethod(method_index, pointer_size);
3640 if (method->NeedsClinitCheckBeforeCall()) {
3641 instrumentation->UpdateMethodsCode(method, instrumentation->GetCodeForInvoke(method));
3642 }
3643 }
3644 // Ignore virtual methods on the iterator.
3645 }
3646
3647 // Does anything needed to make sure that the compiler will not generate a direct invoke to this
3648 // method. Should only be called on non-invokable methods.
EnsureThrowsInvocationError(ClassLinker * class_linker,ArtMethod * method)3649 inline void EnsureThrowsInvocationError(ClassLinker* class_linker, ArtMethod* method)
3650 REQUIRES_SHARED(Locks::mutator_lock_) {
3651 DCHECK(method != nullptr);
3652 DCHECK(!method->IsInvokable());
3653 method->SetEntryPointFromQuickCompiledCodePtrSize(
3654 class_linker->GetQuickToInterpreterBridgeTrampoline(),
3655 class_linker->GetImagePointerSize());
3656 }
3657
LinkCode(ClassLinker * class_linker,ArtMethod * method,const OatFile::OatClass * oat_class,uint32_t class_def_method_index)3658 static void LinkCode(ClassLinker* class_linker,
3659 ArtMethod* method,
3660 const OatFile::OatClass* oat_class,
3661 uint32_t class_def_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
3662 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3663 Runtime* const runtime = Runtime::Current();
3664 if (runtime->IsAotCompiler()) {
3665 // The following code only applies to a non-compiler runtime.
3666 return;
3667 }
3668
3669 // Method shouldn't have already been linked.
3670 DCHECK_EQ(method->GetEntryPointFromQuickCompiledCode(), nullptr);
3671 DCHECK(!method->GetDeclaringClass()->IsVisiblyInitialized()); // Actually ClassStatus::Idx.
3672
3673 if (!method->IsInvokable()) {
3674 EnsureThrowsInvocationError(class_linker, method);
3675 return;
3676 }
3677
3678 const void* quick_code = nullptr;
3679 if (oat_class != nullptr) {
3680 // Every kind of method should at least get an invoke stub from the oat_method.
3681 // non-abstract methods also get their code pointers.
3682 const OatFile::OatMethod oat_method = oat_class->GetOatMethod(class_def_method_index);
3683 quick_code = oat_method.GetQuickCode();
3684 }
3685 runtime->GetInstrumentation()->InitializeMethodsCode(method, quick_code);
3686
3687 if (method->IsNative()) {
3688 // Set up the dlsym lookup stub. Do not go through `UnregisterNative()`
3689 // as the extra processing for @CriticalNative is not needed yet.
3690 method->SetEntryPointFromJni(
3691 method->IsCriticalNative() ? GetJniDlsymLookupCriticalStub() : GetJniDlsymLookupStub());
3692 }
3693 }
3694
SetupClass(const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass,ObjPtr<mirror::ClassLoader> class_loader)3695 void ClassLinker::SetupClass(const DexFile& dex_file,
3696 const dex::ClassDef& dex_class_def,
3697 Handle<mirror::Class> klass,
3698 ObjPtr<mirror::ClassLoader> class_loader) {
3699 CHECK(klass != nullptr);
3700 CHECK(klass->GetDexCache() != nullptr);
3701 CHECK_EQ(ClassStatus::kNotReady, klass->GetStatus());
3702 const char* descriptor = dex_file.GetClassDescriptor(dex_class_def);
3703 CHECK(descriptor != nullptr);
3704
3705 klass->SetClass(GetClassRoot<mirror::Class>(this));
3706 uint32_t access_flags = dex_class_def.GetJavaAccessFlags();
3707 CHECK_EQ(access_flags & ~kAccJavaFlagsMask, 0U);
3708 klass->SetAccessFlagsDuringLinking(access_flags);
3709 klass->SetClassLoader(class_loader);
3710 DCHECK_EQ(klass->GetPrimitiveType(), Primitive::kPrimNot);
3711 mirror::Class::SetStatus(klass, ClassStatus::kIdx, nullptr);
3712
3713 klass->SetDexClassDefIndex(dex_file.GetIndexForClassDef(dex_class_def));
3714 klass->SetDexTypeIndex(dex_class_def.class_idx_);
3715 }
3716
AllocArtFieldArray(Thread * self,LinearAlloc * allocator,size_t length)3717 LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self,
3718 LinearAlloc* allocator,
3719 size_t length) {
3720 if (length == 0) {
3721 return nullptr;
3722 }
3723 // If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>.
3724 static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4.");
3725 size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length);
3726 void* array_storage = allocator->Alloc(self, storage_size, LinearAllocKind::kArtFieldArray);
3727 auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length);
3728 CHECK(ret != nullptr);
3729 std::uninitialized_fill_n(&ret->At(0), length, ArtField());
3730 return ret;
3731 }
3732
AllocArtMethodArray(Thread * self,LinearAlloc * allocator,size_t length)3733 LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self,
3734 LinearAlloc* allocator,
3735 size_t length) {
3736 if (length == 0) {
3737 return nullptr;
3738 }
3739 const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_);
3740 const size_t method_size = ArtMethod::Size(image_pointer_size_);
3741 const size_t storage_size =
3742 LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment);
3743 void* array_storage = allocator->Alloc(self, storage_size, LinearAllocKind::kArtMethodArray);
3744 auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length);
3745 CHECK(ret != nullptr);
3746 for (size_t i = 0; i < length; ++i) {
3747 new(reinterpret_cast<void*>(&ret->At(i, method_size, method_alignment))) ArtMethod;
3748 }
3749 return ret;
3750 }
3751
GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3752 LinearAlloc* ClassLinker::GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3753 if (class_loader == nullptr) {
3754 return Runtime::Current()->GetLinearAlloc();
3755 }
3756 LinearAlloc* allocator = class_loader->GetAllocator();
3757 DCHECK(allocator != nullptr);
3758 return allocator;
3759 }
3760
GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3761 LinearAlloc* ClassLinker::GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3762 if (class_loader == nullptr) {
3763 return Runtime::Current()->GetLinearAlloc();
3764 }
3765 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
3766 LinearAlloc* allocator = class_loader->GetAllocator();
3767 if (allocator == nullptr) {
3768 RegisterClassLoader(class_loader);
3769 allocator = class_loader->GetAllocator();
3770 CHECK(allocator != nullptr);
3771 }
3772 return allocator;
3773 }
3774
LoadClass(Thread * self,const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass)3775 void ClassLinker::LoadClass(Thread* self,
3776 const DexFile& dex_file,
3777 const dex::ClassDef& dex_class_def,
3778 Handle<mirror::Class> klass) {
3779 ClassAccessor accessor(dex_file,
3780 dex_class_def,
3781 /* parse_hiddenapi_class_data= */ klass->IsBootStrapClassLoaded());
3782 if (!accessor.HasClassData()) {
3783 return;
3784 }
3785 Runtime* const runtime = Runtime::Current();
3786 {
3787 // Note: We cannot have thread suspension until the field and method arrays are setup or else
3788 // Class::VisitFieldRoots may miss some fields or methods.
3789 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
3790 // Load static fields.
3791 // We allow duplicate definitions of the same field in a class_data_item
3792 // but ignore the repeated indexes here, b/21868015.
3793 LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
3794 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self,
3795 allocator,
3796 accessor.NumStaticFields());
3797 LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self,
3798 allocator,
3799 accessor.NumInstanceFields());
3800 size_t num_sfields = 0u;
3801 size_t num_ifields = 0u;
3802 uint32_t last_static_field_idx = 0u;
3803 uint32_t last_instance_field_idx = 0u;
3804
3805 // Methods
3806 bool has_oat_class = false;
3807 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
3808 ? OatFile::FindOatClass(dex_file, klass->GetDexClassDefIndex(), &has_oat_class)
3809 : OatFile::OatClass::Invalid();
3810 const OatFile::OatClass* oat_class_ptr = has_oat_class ? &oat_class : nullptr;
3811 klass->SetMethodsPtr(
3812 AllocArtMethodArray(self, allocator, accessor.NumMethods()),
3813 accessor.NumDirectMethods(),
3814 accessor.NumVirtualMethods());
3815 size_t class_def_method_index = 0;
3816 uint32_t last_dex_method_index = dex::kDexNoIndex;
3817 size_t last_class_def_method_index = 0;
3818
3819 uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold();
3820 // Use the visitor since the ranged based loops are bit slower from seeking. Seeking to the
3821 // methods needs to decode all of the fields.
3822 accessor.VisitFieldsAndMethods([&](
3823 const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3824 uint32_t field_idx = field.GetIndex();
3825 DCHECK_GE(field_idx, last_static_field_idx); // Ordering enforced by DexFileVerifier.
3826 if (num_sfields == 0 || LIKELY(field_idx > last_static_field_idx)) {
3827 LoadField(field, klass, &sfields->At(num_sfields));
3828 ++num_sfields;
3829 last_static_field_idx = field_idx;
3830 }
3831 }, [&](const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3832 uint32_t field_idx = field.GetIndex();
3833 DCHECK_GE(field_idx, last_instance_field_idx); // Ordering enforced by DexFileVerifier.
3834 if (num_ifields == 0 || LIKELY(field_idx > last_instance_field_idx)) {
3835 LoadField(field, klass, &ifields->At(num_ifields));
3836 ++num_ifields;
3837 last_instance_field_idx = field_idx;
3838 }
3839 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3840 ArtMethod* art_method = klass->GetDirectMethodUnchecked(class_def_method_index,
3841 image_pointer_size_);
3842 LoadMethod(dex_file, method, klass.Get(), art_method);
3843 LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3844 uint32_t it_method_index = method.GetIndex();
3845 if (last_dex_method_index == it_method_index) {
3846 // duplicate case
3847 art_method->SetMethodIndex(last_class_def_method_index);
3848 } else {
3849 art_method->SetMethodIndex(class_def_method_index);
3850 last_dex_method_index = it_method_index;
3851 last_class_def_method_index = class_def_method_index;
3852 }
3853 art_method->ResetCounter(hotness_threshold);
3854 ++class_def_method_index;
3855 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3856 ArtMethod* art_method = klass->GetVirtualMethodUnchecked(
3857 class_def_method_index - accessor.NumDirectMethods(),
3858 image_pointer_size_);
3859 art_method->ResetCounter(hotness_threshold);
3860 LoadMethod(dex_file, method, klass.Get(), art_method);
3861 LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3862 ++class_def_method_index;
3863 });
3864
3865 if (UNLIKELY(num_ifields + num_sfields != accessor.NumFields())) {
3866 LOG(WARNING) << "Duplicate fields in class " << klass->PrettyDescriptor()
3867 << " (unique static fields: " << num_sfields << "/" << accessor.NumStaticFields()
3868 << ", unique instance fields: " << num_ifields << "/" << accessor.NumInstanceFields()
3869 << ")";
3870 // NOTE: Not shrinking the over-allocated sfields/ifields, just setting size.
3871 if (sfields != nullptr) {
3872 sfields->SetSize(num_sfields);
3873 }
3874 if (ifields != nullptr) {
3875 ifields->SetSize(num_ifields);
3876 }
3877 }
3878 // Set the field arrays.
3879 klass->SetSFieldsPtr(sfields);
3880 DCHECK_EQ(klass->NumStaticFields(), num_sfields);
3881 klass->SetIFieldsPtr(ifields);
3882 DCHECK_EQ(klass->NumInstanceFields(), num_ifields);
3883 }
3884 // Ensure that the card is marked so that remembered sets pick up native roots.
3885 WriteBarrier::ForEveryFieldWrite(klass.Get());
3886 self->AllowThreadSuspension();
3887 }
3888
LoadField(const ClassAccessor::Field & field,Handle<mirror::Class> klass,ArtField * dst)3889 void ClassLinker::LoadField(const ClassAccessor::Field& field,
3890 Handle<mirror::Class> klass,
3891 ArtField* dst) {
3892 const uint32_t field_idx = field.GetIndex();
3893 dst->SetDexFieldIndex(field_idx);
3894 dst->SetDeclaringClass(klass.Get());
3895
3896 // Get access flags from the DexFile and set hiddenapi runtime access flags.
3897 dst->SetAccessFlags(field.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(field));
3898 }
3899
LoadMethod(const DexFile & dex_file,const ClassAccessor::Method & method,ObjPtr<mirror::Class> klass,ArtMethod * dst)3900 void ClassLinker::LoadMethod(const DexFile& dex_file,
3901 const ClassAccessor::Method& method,
3902 ObjPtr<mirror::Class> klass,
3903 ArtMethod* dst) {
3904 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3905
3906 const uint32_t dex_method_idx = method.GetIndex();
3907 const dex::MethodId& method_id = dex_file.GetMethodId(dex_method_idx);
3908 uint32_t name_utf16_length;
3909 const char* method_name = dex_file.StringDataAndUtf16LengthByIdx(method_id.name_idx_,
3910 &name_utf16_length);
3911 std::string_view shorty = dex_file.GetShortyView(dex_file.GetProtoId(method_id.proto_idx_));
3912
3913 dst->SetDexMethodIndex(dex_method_idx);
3914 dst->SetDeclaringClass(klass);
3915
3916 // Get access flags from the DexFile and set hiddenapi runtime access flags.
3917 uint32_t access_flags = method.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(method);
3918
3919 auto has_ascii_name = [method_name, name_utf16_length](const char* ascii_name,
3920 size_t length) ALWAYS_INLINE {
3921 DCHECK_EQ(strlen(ascii_name), length);
3922 return length == name_utf16_length &&
3923 method_name[length] == 0 && // Is `method_name` an ASCII string?
3924 memcmp(ascii_name, method_name, length) == 0;
3925 };
3926 if (UNLIKELY(has_ascii_name("finalize", sizeof("finalize") - 1u))) {
3927 // Set finalizable flag on declaring class.
3928 if (shorty == "V") {
3929 // Void return type.
3930 if (klass->GetClassLoader() != nullptr) { // All non-boot finalizer methods are flagged.
3931 klass->SetFinalizable();
3932 } else {
3933 std::string_view klass_descriptor =
3934 dex_file.GetTypeDescriptorView(dex_file.GetTypeId(klass->GetDexTypeIndex()));
3935 // The Enum class declares a "final" finalize() method to prevent subclasses from
3936 // introducing a finalizer. We don't want to set the finalizable flag for Enum or its
3937 // subclasses, so we exclude it here.
3938 // We also want to avoid setting the flag on Object, where we know that finalize() is
3939 // empty.
3940 if (klass_descriptor != "Ljava/lang/Object;" &&
3941 klass_descriptor != "Ljava/lang/Enum;") {
3942 klass->SetFinalizable();
3943 }
3944 }
3945 }
3946 } else if (method_name[0] == '<') {
3947 // Fix broken access flags for initializers. Bug 11157540.
3948 bool is_init = has_ascii_name("<init>", sizeof("<init>") - 1u);
3949 bool is_clinit = has_ascii_name("<clinit>", sizeof("<clinit>") - 1u);
3950 if (UNLIKELY(!is_init && !is_clinit)) {
3951 LOG(WARNING) << "Unexpected '<' at start of method name " << method_name;
3952 } else {
3953 if (UNLIKELY((access_flags & kAccConstructor) == 0)) {
3954 LOG(WARNING) << method_name << " didn't have expected constructor access flag in class "
3955 << klass->PrettyDescriptor() << " in dex file " << dex_file.GetLocation();
3956 access_flags |= kAccConstructor;
3957 }
3958 }
3959 }
3960
3961 // Check for nterp invoke fast-path based on shorty.
3962 bool all_parameters_are_reference = true;
3963 bool all_parameters_are_reference_or_int = true;
3964 for (size_t i = 1; i < shorty.length(); ++i) {
3965 if (shorty[i] != 'L') {
3966 all_parameters_are_reference = false;
3967 if (shorty[i] == 'F' || shorty[i] == 'D' || shorty[i] == 'J') {
3968 all_parameters_are_reference_or_int = false;
3969 break;
3970 }
3971 }
3972 }
3973 if (all_parameters_are_reference_or_int && shorty[0] != 'F' && shorty[0] != 'D') {
3974 access_flags |= kAccNterpInvokeFastPathFlag;
3975 }
3976
3977 if (UNLIKELY((access_flags & kAccNative) != 0u)) {
3978 // Check if the native method is annotated with @FastNative or @CriticalNative.
3979 const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
3980 access_flags |=
3981 annotations::GetNativeMethodAnnotationAccessFlags(dex_file, class_def, dex_method_idx);
3982 dst->SetAccessFlags(access_flags);
3983 DCHECK(!dst->IsAbstract());
3984 DCHECK(!dst->HasCodeItem());
3985 DCHECK_EQ(method.GetCodeItemOffset(), 0u);
3986 dst->SetDataPtrSize(nullptr, image_pointer_size_); // JNI stub/trampoline not linked yet.
3987 } else if ((access_flags & kAccAbstract) != 0u) {
3988 dst->SetAccessFlags(access_flags);
3989 // Must be done after SetAccessFlags since IsAbstract depends on it.
3990 DCHECK(dst->IsAbstract());
3991 if (klass->IsInterface()) {
3992 dst->CalculateAndSetImtIndex();
3993 }
3994 DCHECK(!dst->HasCodeItem());
3995 DCHECK_EQ(method.GetCodeItemOffset(), 0u);
3996 dst->SetDataPtrSize(nullptr, image_pointer_size_); // Single implementation not set yet.
3997 } else {
3998 // Check for nterp entry fast-path based on shorty.
3999 if (all_parameters_are_reference) {
4000 access_flags |= kAccNterpEntryPointFastPathFlag;
4001 }
4002 const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
4003 if (annotations::MethodIsNeverCompile(dex_file, class_def, dex_method_idx)) {
4004 access_flags |= kAccCompileDontBother;
4005 }
4006 dst->SetAccessFlags(access_flags);
4007 DCHECK(!dst->IsAbstract());
4008 DCHECK(dst->HasCodeItem());
4009 uint32_t code_item_offset = method.GetCodeItemOffset();
4010 DCHECK_NE(code_item_offset, 0u);
4011 if (Runtime::Current()->IsAotCompiler()) {
4012 dst->SetDataPtrSize(reinterpret_cast32<void*>(code_item_offset), image_pointer_size_);
4013 } else {
4014 dst->SetCodeItem(dex_file.GetCodeItem(code_item_offset), dex_file.IsCompactDexFile());
4015 }
4016 }
4017
4018 if (Runtime::Current()->IsZygote() &&
4019 !Runtime::Current()->GetJITOptions()->GetProfileSaverOptions().GetProfileBootClassPath()) {
4020 dst->SetMemorySharedMethod();
4021 }
4022 }
4023
AppendToBootClassPath(Thread * self,const DexFile * dex_file)4024 void ClassLinker::AppendToBootClassPath(Thread* self, const DexFile* dex_file) {
4025 ObjPtr<mirror::DexCache> dex_cache =
4026 AllocAndInitializeDexCache(self, *dex_file, /* class_loader= */ nullptr);
4027 CHECK(dex_cache != nullptr) << "Failed to allocate dex cache for " << dex_file->GetLocation();
4028 AppendToBootClassPath(dex_file, dex_cache);
4029 }
4030
AppendToBootClassPath(const DexFile * dex_file,ObjPtr<mirror::DexCache> dex_cache)4031 void ClassLinker::AppendToBootClassPath(const DexFile* dex_file,
4032 ObjPtr<mirror::DexCache> dex_cache) {
4033 CHECK(dex_file != nullptr);
4034 CHECK(dex_cache != nullptr) << dex_file->GetLocation();
4035 CHECK_EQ(dex_cache->GetDexFile(), dex_file) << dex_file->GetLocation();
4036 boot_class_path_.push_back(dex_file);
4037 WriterMutexLock mu(Thread::Current(), *Locks::dex_lock_);
4038 RegisterDexFileLocked(*dex_file, dex_cache, /* class_loader= */ nullptr);
4039 }
4040
RegisterDexFileLocked(const DexFile & dex_file,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)4041 void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
4042 ObjPtr<mirror::DexCache> dex_cache,
4043 ObjPtr<mirror::ClassLoader> class_loader) {
4044 Thread* const self = Thread::Current();
4045 Locks::dex_lock_->AssertExclusiveHeld(self);
4046 CHECK(dex_cache != nullptr) << dex_file.GetLocation();
4047 CHECK_EQ(dex_cache->GetDexFile(), &dex_file) << dex_file.GetLocation();
4048 // For app images, the dex cache location may be a suffix of the dex file location since the
4049 // dex file location is an absolute path.
4050 const std::string dex_cache_location = dex_cache->GetLocation()->ToModifiedUtf8();
4051 const size_t dex_cache_length = dex_cache_location.length();
4052 CHECK_GT(dex_cache_length, 0u) << dex_file.GetLocation();
4053 std::string dex_file_location = dex_file.GetLocation();
4054 // The following paths checks don't work on preopt when using boot dex files, where the dex
4055 // cache location is the one on device, and the dex_file's location is the one on host.
4056 Runtime* runtime = Runtime::Current();
4057 if (!(runtime->IsAotCompiler() && class_loader == nullptr && !kIsTargetBuild)) {
4058 CHECK_GE(dex_file_location.length(), dex_cache_length)
4059 << dex_cache_location << " " << dex_file.GetLocation();
4060 const std::string dex_file_suffix = dex_file_location.substr(
4061 dex_file_location.length() - dex_cache_length,
4062 dex_cache_length);
4063 // Example dex_cache location is SettingsProvider.apk and
4064 // dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk
4065 CHECK_EQ(dex_cache_location, dex_file_suffix);
4066 }
4067
4068 // Check if we need to initialize OatFile data (.data.bimg.rel.ro and .bss
4069 // sections) needed for code execution and register the oat code range.
4070 const OatFile* oat_file =
4071 (dex_file.GetOatDexFile() != nullptr) ? dex_file.GetOatDexFile()->GetOatFile() : nullptr;
4072 bool initialize_oat_file_data = (oat_file != nullptr) && oat_file->IsExecutable();
4073 if (initialize_oat_file_data) {
4074 for (const auto& entry : dex_caches_) {
4075 if (!self->IsJWeakCleared(entry.second.weak_root) &&
4076 entry.first->GetOatDexFile() != nullptr &&
4077 entry.first->GetOatDexFile()->GetOatFile() == oat_file) {
4078 initialize_oat_file_data = false; // Already initialized.
4079 break;
4080 }
4081 }
4082 }
4083 if (initialize_oat_file_data) {
4084 oat_file->InitializeRelocations();
4085 // Notify the fault handler about the new executable code range if needed.
4086 size_t exec_offset = oat_file->GetOatHeader().GetExecutableOffset();
4087 DCHECK_LE(exec_offset, oat_file->Size());
4088 size_t exec_size = oat_file->Size() - exec_offset;
4089 if (exec_size != 0u) {
4090 runtime->AddGeneratedCodeRange(oat_file->Begin() + exec_offset, exec_size);
4091 }
4092 }
4093
4094 // Let hiddenapi assign a domain to the newly registered dex file.
4095 hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
4096
4097 jweak dex_cache_jweak = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, dex_cache);
4098 DexCacheData data;
4099 data.weak_root = dex_cache_jweak;
4100 data.class_table = ClassTableForClassLoader(class_loader);
4101 AddNativeDebugInfoForDex(self, &dex_file);
4102 DCHECK(data.class_table != nullptr);
4103 // Make sure to hold the dex cache live in the class table. This case happens for the boot class
4104 // path dex caches without an image.
4105 data.class_table->InsertStrongRoot(dex_cache);
4106 // Make sure that the dex cache holds the classloader live.
4107 dex_cache->SetClassLoader(class_loader);
4108 if (class_loader != nullptr) {
4109 // Since we added a strong root to the class table, do the write barrier as required for
4110 // remembered sets and generational GCs.
4111 WriteBarrier::ForEveryFieldWrite(class_loader);
4112 }
4113 bool inserted = dex_caches_.emplace(&dex_file, std::move(data)).second;
4114 CHECK(inserted);
4115 }
4116
DecodeDexCacheLocked(Thread * self,const DexCacheData * data)4117 ObjPtr<mirror::DexCache> ClassLinker::DecodeDexCacheLocked(Thread* self, const DexCacheData* data) {
4118 return data != nullptr
4119 ? ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data->weak_root))
4120 : nullptr;
4121 }
4122
IsSameClassLoader(ObjPtr<mirror::DexCache> dex_cache,const DexCacheData * data,ObjPtr<mirror::ClassLoader> class_loader)4123 bool ClassLinker::IsSameClassLoader(
4124 ObjPtr<mirror::DexCache> dex_cache,
4125 const DexCacheData* data,
4126 ObjPtr<mirror::ClassLoader> class_loader) {
4127 CHECK(data != nullptr);
4128 DCHECK_EQ(FindDexCacheDataLocked(*dex_cache->GetDexFile()), data);
4129 return data->class_table == ClassTableForClassLoader(class_loader);
4130 }
4131
RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)4132 void ClassLinker::RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,
4133 ObjPtr<mirror::ClassLoader> class_loader) {
4134 SCOPED_TRACE << __FUNCTION__ << " " << dex_cache->GetDexFile()->GetLocation();
4135 Thread* self = Thread::Current();
4136 StackHandleScope<2> hs(self);
4137 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
4138 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
4139 const DexFile* dex_file = dex_cache->GetDexFile();
4140 DCHECK(dex_file != nullptr) << "Attempt to register uninitialized dex_cache object!";
4141 if (kIsDebugBuild) {
4142 ReaderMutexLock mu(self, *Locks::dex_lock_);
4143 const DexCacheData* old_data = FindDexCacheDataLocked(*dex_file);
4144 ObjPtr<mirror::DexCache> old_dex_cache = DecodeDexCacheLocked(self, old_data);
4145 DCHECK(old_dex_cache.IsNull()) << "Attempt to manually register a dex cache thats already "
4146 << "been registered on dex file " << dex_file->GetLocation();
4147 }
4148 ClassTable* table;
4149 {
4150 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4151 table = InsertClassTableForClassLoader(h_class_loader.Get());
4152 }
4153 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4154 // a thread holding the dex lock and blocking on a condition variable regarding
4155 // weak references access, and a thread blocking on the dex lock.
4156 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
4157 WriterMutexLock mu(self, *Locks::dex_lock_);
4158 RegisterDexFileLocked(*dex_file, h_dex_cache.Get(), h_class_loader.Get());
4159 table->InsertStrongRoot(h_dex_cache.Get());
4160 if (h_class_loader.Get() != nullptr) {
4161 // Since we added a strong root to the class table, do the write barrier as required for
4162 // remembered sets and generational GCs.
4163 WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
4164 }
4165 }
4166
ThrowDexFileAlreadyRegisteredError(Thread * self,const DexFile & dex_file)4167 static void ThrowDexFileAlreadyRegisteredError(Thread* self, const DexFile& dex_file)
4168 REQUIRES_SHARED(Locks::mutator_lock_) {
4169 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
4170 "Attempt to register dex file %s with multiple class loaders",
4171 dex_file.GetLocation().c_str());
4172 }
4173
RegisterDexFile(const DexFile & dex_file,ObjPtr<mirror::ClassLoader> class_loader)4174 ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
4175 ObjPtr<mirror::ClassLoader> class_loader) {
4176 Thread* self = Thread::Current();
4177 ObjPtr<mirror::DexCache> old_dex_cache;
4178 bool registered_with_another_class_loader = false;
4179 {
4180 ReaderMutexLock mu(self, *Locks::dex_lock_);
4181 const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4182 old_dex_cache = DecodeDexCacheLocked(self, old_data);
4183 if (old_dex_cache != nullptr) {
4184 if (IsSameClassLoader(old_dex_cache, old_data, class_loader)) {
4185 return old_dex_cache;
4186 } else {
4187 // TODO This is not very clean looking. Should maybe try to make a way to request exceptions
4188 // be thrown when it's safe to do so to simplify this.
4189 registered_with_another_class_loader = true;
4190 }
4191 }
4192 }
4193 // We need to have released the dex_lock_ to allocate safely.
4194 if (registered_with_another_class_loader) {
4195 ThrowDexFileAlreadyRegisteredError(self, dex_file);
4196 return nullptr;
4197 }
4198 SCOPED_TRACE << __FUNCTION__ << " " << dex_file.GetLocation();
4199 LinearAlloc* const linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader);
4200 DCHECK(linear_alloc != nullptr);
4201 ClassTable* table;
4202 {
4203 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4204 table = InsertClassTableForClassLoader(class_loader);
4205 }
4206 // Don't alloc while holding the lock, since allocation may need to
4207 // suspend all threads and another thread may need the dex_lock_ to
4208 // get to a suspend point.
4209 StackHandleScope<3> hs(self);
4210 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
4211 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(self, dex_file)));
4212 {
4213 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4214 // a thread holding the dex lock and blocking on a condition variable regarding
4215 // weak references access, and a thread blocking on the dex lock.
4216 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
4217 WriterMutexLock mu(self, *Locks::dex_lock_);
4218 const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4219 old_dex_cache = DecodeDexCacheLocked(self, old_data);
4220 if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
4221 // Do Initialize while holding dex lock to make sure two threads don't call it
4222 // at the same time with the same dex cache. Since the .bss is shared this can cause failing
4223 // DCHECK that the arrays are null.
4224 h_dex_cache->Initialize(&dex_file, h_class_loader.Get());
4225 RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
4226 }
4227 if (old_dex_cache != nullptr) {
4228 // Another thread managed to initialize the dex cache faster, so use that DexCache.
4229 // If this thread encountered OOME, ignore it.
4230 DCHECK_EQ(h_dex_cache == nullptr, self->IsExceptionPending());
4231 self->ClearException();
4232 // We cannot call EnsureSameClassLoader() or allocate an exception while holding the
4233 // dex_lock_.
4234 if (IsSameClassLoader(old_dex_cache, old_data, h_class_loader.Get())) {
4235 return old_dex_cache;
4236 } else {
4237 registered_with_another_class_loader = true;
4238 }
4239 }
4240 }
4241 if (registered_with_another_class_loader) {
4242 ThrowDexFileAlreadyRegisteredError(self, dex_file);
4243 return nullptr;
4244 }
4245 if (h_dex_cache == nullptr) {
4246 self->AssertPendingOOMException();
4247 return nullptr;
4248 }
4249 table->InsertStrongRoot(h_dex_cache.Get());
4250 if (h_class_loader.Get() != nullptr) {
4251 // Since we added a strong root to the class table, do the write barrier as required for
4252 // remembered sets and generational GCs.
4253 WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
4254 }
4255 VLOG(class_linker) << "Registered dex file " << dex_file.GetLocation();
4256 PaletteNotifyDexFileLoaded(dex_file.GetLocation().c_str());
4257 return h_dex_cache.Get();
4258 }
4259
IsDexFileRegistered(Thread * self,const DexFile & dex_file)4260 bool ClassLinker::IsDexFileRegistered(Thread* self, const DexFile& dex_file) {
4261 ReaderMutexLock mu(self, *Locks::dex_lock_);
4262 return DecodeDexCacheLocked(self, FindDexCacheDataLocked(dex_file)) != nullptr;
4263 }
4264
FindDexCache(Thread * self,const DexFile & dex_file)4265 ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const DexFile& dex_file) {
4266 ReaderMutexLock mu(self, *Locks::dex_lock_);
4267 const DexCacheData* dex_cache_data = FindDexCacheDataLocked(dex_file);
4268 ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
4269 if (dex_cache != nullptr) {
4270 return dex_cache;
4271 }
4272 // Failure, dump diagnostic and abort.
4273 for (const auto& entry : dex_caches_) {
4274 const DexCacheData& data = entry.second;
4275 if (DecodeDexCacheLocked(self, &data) != nullptr) {
4276 LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << entry.first->GetLocation();
4277 }
4278 }
4279 LOG(FATAL) << "Failed to find DexCache for DexFile " << dex_file.GetLocation()
4280 << " " << &dex_file;
4281 UNREACHABLE();
4282 }
4283
FindDexCache(Thread * self,const OatDexFile & oat_dex_file)4284 ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const OatDexFile& oat_dex_file) {
4285 ReaderMutexLock mu(self, *Locks::dex_lock_);
4286 const DexCacheData* dex_cache_data = FindDexCacheDataLocked(oat_dex_file);
4287 ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
4288 if (dex_cache != nullptr) {
4289 return dex_cache;
4290 }
4291 // Failure, dump diagnostic and abort.
4292 for (const auto& entry : dex_caches_) {
4293 const DexCacheData& data = entry.second;
4294 if (DecodeDexCacheLocked(self, &data) != nullptr) {
4295 const OatDexFile* other_oat_dex_file = entry.first->GetOatDexFile();
4296 const OatFile* oat_file =
4297 (other_oat_dex_file == nullptr) ? nullptr : other_oat_dex_file->GetOatFile();
4298 LOG(FATAL_WITHOUT_ABORT)
4299 << "Registered dex file " << entry.first->GetLocation()
4300 << " oat_dex_file=" << other_oat_dex_file
4301 << " oat_file=" << oat_file
4302 << " oat_location=" << (oat_file == nullptr ? "null" : oat_file->GetLocation())
4303 << " dex_file=" << &entry.first;
4304 }
4305 }
4306 LOG(FATAL) << "Failed to find DexCache for OatDexFile "
4307 << oat_dex_file.GetDexFileLocation()
4308 << " oat_dex_file=" << &oat_dex_file
4309 << " oat_file=" << oat_dex_file.GetOatFile()
4310 << " oat_location=" << oat_dex_file.GetOatFile()->GetLocation();
4311 UNREACHABLE();
4312 }
4313
FindClassTable(Thread * self,ObjPtr<mirror::DexCache> dex_cache)4314 ClassTable* ClassLinker::FindClassTable(Thread* self, ObjPtr<mirror::DexCache> dex_cache) {
4315 const DexFile* dex_file = dex_cache->GetDexFile();
4316 DCHECK(dex_file != nullptr);
4317 ReaderMutexLock mu(self, *Locks::dex_lock_);
4318 auto it = dex_caches_.find(dex_file);
4319 if (it != dex_caches_.end()) {
4320 const DexCacheData& data = it->second;
4321 ObjPtr<mirror::DexCache> registered_dex_cache = DecodeDexCacheLocked(self, &data);
4322 if (registered_dex_cache != nullptr) {
4323 CHECK_EQ(registered_dex_cache, dex_cache) << dex_file->GetLocation();
4324 return data.class_table;
4325 }
4326 }
4327 return nullptr;
4328 }
4329
FindDexCacheDataLocked(const OatDexFile & oat_dex_file)4330 const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(
4331 const OatDexFile& oat_dex_file) {
4332 auto it = std::find_if(dex_caches_.begin(), dex_caches_.end(), [&](const auto& entry) {
4333 return entry.first->GetOatDexFile() == &oat_dex_file;
4334 });
4335 return it != dex_caches_.end() ? &it->second : nullptr;
4336 }
4337
FindDexCacheDataLocked(const DexFile & dex_file)4338 const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(const DexFile& dex_file) {
4339 auto it = dex_caches_.find(&dex_file);
4340 return it != dex_caches_.end() ? &it->second : nullptr;
4341 }
4342
CreatePrimitiveClass(Thread * self,Primitive::Type type,ClassRoot primitive_root)4343 void ClassLinker::CreatePrimitiveClass(Thread* self,
4344 Primitive::Type type,
4345 ClassRoot primitive_root) {
4346 ObjPtr<mirror::Class> primitive_class =
4347 AllocClass(self, mirror::Class::PrimitiveClassSize(image_pointer_size_));
4348 CHECK(primitive_class != nullptr) << "OOM for primitive class " << type;
4349 // Do not hold lock on the primitive class object, the initialization of
4350 // primitive classes is done while the process is still single threaded.
4351 primitive_class->SetAccessFlagsDuringLinking(kAccPublic | kAccFinal | kAccAbstract);
4352 primitive_class->SetPrimitiveType(type);
4353 primitive_class->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
4354 DCHECK_EQ(primitive_class->NumMethods(), 0u);
4355 // Primitive classes are initialized during single threaded startup, so visibly initialized.
4356 primitive_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
4357 const char* descriptor = Primitive::Descriptor(type);
4358 ObjPtr<mirror::Class> existing = InsertClass(descriptor,
4359 primitive_class,
4360 ComputeModifiedUtf8Hash(descriptor));
4361 CHECK(existing == nullptr) << "InitPrimitiveClass(" << type << ") failed";
4362 SetClassRoot(primitive_root, primitive_class);
4363 }
4364
GetArrayIfTable()4365 inline ObjPtr<mirror::IfTable> ClassLinker::GetArrayIfTable() {
4366 return GetClassRoot<mirror::ObjectArray<mirror::Object>>(this)->GetIfTable();
4367 }
4368
4369 // Create an array class (i.e. the class object for the array, not the
4370 // array itself). "descriptor" looks like "[C" or "[[[[B" or
4371 // "[Ljava/lang/String;".
4372 //
4373 // If "descriptor" refers to an array of primitives, look up the
4374 // primitive type's internally-generated class object.
4375 //
4376 // "class_loader" is the class loader of the class that's referring to
4377 // us. It's used to ensure that we're looking for the element type in
4378 // the right context. It does NOT become the class loader for the
4379 // array class; that always comes from the base element class.
4380 //
4381 // Returns null with an exception raised on failure.
CreateArrayClass(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader)4382 ObjPtr<mirror::Class> ClassLinker::CreateArrayClass(Thread* self,
4383 const char* descriptor,
4384 size_t hash,
4385 Handle<mirror::ClassLoader> class_loader) {
4386 // Identify the underlying component type
4387 CHECK_EQ('[', descriptor[0]);
4388 StackHandleScope<2> hs(self);
4389
4390 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4391 // code to be executed. We put it up here so we can avoid all the allocations associated with
4392 // creating the class. This can happen with (eg) jit threads.
4393 if (!self->CanLoadClasses()) {
4394 // Make sure we don't try to load anything, potentially causing an infinite loop.
4395 ObjPtr<mirror::Throwable> pre_allocated =
4396 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4397 self->SetException(pre_allocated);
4398 return nullptr;
4399 }
4400
4401 MutableHandle<mirror::Class> component_type(hs.NewHandle(FindClass(self, descriptor + 1,
4402 class_loader)));
4403 if (component_type == nullptr) {
4404 DCHECK(self->IsExceptionPending());
4405 // We need to accept erroneous classes as component types. Under AOT, we
4406 // don't accept them as we cannot encode the erroneous class in an image.
4407 const size_t component_hash = ComputeModifiedUtf8Hash(descriptor + 1);
4408 component_type.Assign(LookupClass(self, descriptor + 1, component_hash, class_loader.Get()));
4409 if (component_type == nullptr || Runtime::Current()->IsAotCompiler()) {
4410 DCHECK(self->IsExceptionPending());
4411 return nullptr;
4412 } else {
4413 self->ClearException();
4414 }
4415 }
4416 if (UNLIKELY(component_type->IsPrimitiveVoid())) {
4417 ThrowNoClassDefFoundError("Attempt to create array of void primitive type");
4418 return nullptr;
4419 }
4420 // See if the component type is already loaded. Array classes are
4421 // always associated with the class loader of their underlying
4422 // element type -- an array of Strings goes with the loader for
4423 // java/lang/String -- so we need to look for it there. (The
4424 // caller should have checked for the existence of the class
4425 // before calling here, but they did so with *their* class loader,
4426 // not the component type's loader.)
4427 //
4428 // If we find it, the caller adds "loader" to the class' initiating
4429 // loader list, which should prevent us from going through this again.
4430 //
4431 // This call is unnecessary if "loader" and "component_type->GetClassLoader()"
4432 // are the same, because our caller (FindClass) just did the
4433 // lookup. (Even if we get this wrong we still have correct behavior,
4434 // because we effectively do this lookup again when we add the new
4435 // class to the hash table --- necessary because of possible races with
4436 // other threads.)
4437 if (class_loader.Get() != component_type->GetClassLoader()) {
4438 ObjPtr<mirror::Class> new_class =
4439 LookupClass(self, descriptor, hash, component_type->GetClassLoader());
4440 if (new_class != nullptr) {
4441 return new_class;
4442 }
4443 }
4444 // Core array classes, i.e. Object[], Class[], String[] and primitive
4445 // arrays, have special initialization and they should be found above.
4446 DCHECK_IMPLIES(component_type->IsObjectClass(),
4447 // Guard from false positives for errors before setting superclass.
4448 component_type->IsErroneousUnresolved());
4449 DCHECK(!component_type->IsStringClass());
4450 DCHECK(!component_type->IsClassClass());
4451 DCHECK(!component_type->IsPrimitive());
4452
4453 // Fill out the fields in the Class.
4454 //
4455 // It is possible to execute some methods against arrays, because
4456 // all arrays are subclasses of java_lang_Object_, so we need to set
4457 // up a vtable. We can just point at the one in java_lang_Object_.
4458 //
4459 // Array classes are simple enough that we don't need to do a full
4460 // link step.
4461 size_t array_class_size = mirror::Array::ClassSize(image_pointer_size_);
4462 auto visitor = [this, array_class_size, component_type](ObjPtr<mirror::Object> obj,
4463 size_t usable_size)
4464 REQUIRES_SHARED(Locks::mutator_lock_) {
4465 ScopedAssertNoNewTransactionRecords sanntr("CreateArrayClass");
4466 mirror::Class::InitializeClassVisitor init_class(array_class_size);
4467 init_class(obj, usable_size);
4468 ObjPtr<mirror::Class> klass = ObjPtr<mirror::Class>::DownCast(obj);
4469 klass->SetComponentType(component_type.Get());
4470 // Do not hold lock for initialization, the fence issued after the visitor
4471 // returns ensures memory visibility together with the implicit consume
4472 // semantics (for all supported architectures) for any thread that loads
4473 // the array class reference from any memory locations afterwards.
4474 FinishArrayClassSetup(klass);
4475 };
4476 auto new_class = hs.NewHandle<mirror::Class>(
4477 AllocClass(self, GetClassRoot<mirror::Class>(this), array_class_size, visitor));
4478 if (new_class == nullptr) {
4479 self->AssertPendingOOMException();
4480 return nullptr;
4481 }
4482
4483 ObjPtr<mirror::Class> existing = InsertClass(descriptor, new_class.Get(), hash);
4484 if (existing == nullptr) {
4485 // We postpone ClassLoad and ClassPrepare events to this point in time to avoid
4486 // duplicate events in case of races. Array classes don't really follow dedicated
4487 // load and prepare, anyways.
4488 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(new_class);
4489 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(new_class, new_class);
4490
4491 jit::Jit::NewTypeLoadedIfUsingJit(new_class.Get());
4492 return new_class.Get();
4493 }
4494 // Another thread must have loaded the class after we
4495 // started but before we finished. Abandon what we've
4496 // done.
4497 //
4498 // (Yes, this happens.)
4499
4500 return existing;
4501 }
4502
LookupPrimitiveClass(char type)4503 ObjPtr<mirror::Class> ClassLinker::LookupPrimitiveClass(char type) {
4504 ClassRoot class_root;
4505 switch (type) {
4506 case 'B': class_root = ClassRoot::kPrimitiveByte; break;
4507 case 'C': class_root = ClassRoot::kPrimitiveChar; break;
4508 case 'D': class_root = ClassRoot::kPrimitiveDouble; break;
4509 case 'F': class_root = ClassRoot::kPrimitiveFloat; break;
4510 case 'I': class_root = ClassRoot::kPrimitiveInt; break;
4511 case 'J': class_root = ClassRoot::kPrimitiveLong; break;
4512 case 'S': class_root = ClassRoot::kPrimitiveShort; break;
4513 case 'Z': class_root = ClassRoot::kPrimitiveBoolean; break;
4514 case 'V': class_root = ClassRoot::kPrimitiveVoid; break;
4515 default:
4516 return nullptr;
4517 }
4518 return GetClassRoot(class_root, this);
4519 }
4520
FindPrimitiveClass(char type)4521 ObjPtr<mirror::Class> ClassLinker::FindPrimitiveClass(char type) {
4522 ObjPtr<mirror::Class> result = LookupPrimitiveClass(type);
4523 if (UNLIKELY(result == nullptr)) {
4524 std::string printable_type(PrintableChar(type));
4525 ThrowNoClassDefFoundError("Not a primitive type: %s", printable_type.c_str());
4526 }
4527 return result;
4528 }
4529
InsertClass(const char * descriptor,ObjPtr<mirror::Class> klass,size_t hash)4530 ObjPtr<mirror::Class> ClassLinker::InsertClass(const char* descriptor,
4531 ObjPtr<mirror::Class> klass,
4532 size_t hash) {
4533 DCHECK(Thread::Current()->CanLoadClasses());
4534 if (VLOG_IS_ON(class_linker)) {
4535 ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache();
4536 std::string source;
4537 if (dex_cache != nullptr) {
4538 source += " from ";
4539 source += dex_cache->GetLocation()->ToModifiedUtf8();
4540 }
4541 LOG(INFO) << "Loaded class " << descriptor << source;
4542 }
4543 {
4544 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4545 const ObjPtr<mirror::ClassLoader> class_loader = klass->GetClassLoader();
4546 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader);
4547 ObjPtr<mirror::Class> existing = class_table->Lookup(descriptor, hash);
4548 if (existing != nullptr) {
4549 return existing;
4550 }
4551 VerifyObject(klass);
4552 class_table->InsertWithHash(klass, hash);
4553 if (class_loader != nullptr) {
4554 // This is necessary because we need to have the card dirtied for remembered sets.
4555 WriteBarrier::ForEveryFieldWrite(class_loader);
4556 }
4557 if (log_new_roots_) {
4558 new_class_roots_.push_back(GcRoot<mirror::Class>(klass));
4559 }
4560 }
4561 if (kIsDebugBuild) {
4562 // Test that copied methods correctly can find their holder.
4563 for (ArtMethod& method : klass->GetCopiedMethods(image_pointer_size_)) {
4564 CHECK_EQ(GetHoldingClassOfCopiedMethod(&method), klass);
4565 }
4566 }
4567 return nullptr;
4568 }
4569
WriteBarrierForBootOatFileBssRoots(const OatFile * oat_file)4570 void ClassLinker::WriteBarrierForBootOatFileBssRoots(const OatFile* oat_file) {
4571 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4572 DCHECK(!oat_file->GetBssGcRoots().empty()) << oat_file->GetLocation();
4573 if (log_new_roots_ && !ContainsElement(new_bss_roots_boot_oat_files_, oat_file)) {
4574 new_bss_roots_boot_oat_files_.push_back(oat_file);
4575 }
4576 }
4577
4578 // TODO This should really be in mirror::Class.
UpdateClassMethods(ObjPtr<mirror::Class> klass,LengthPrefixedArray<ArtMethod> * new_methods)4579 void ClassLinker::UpdateClassMethods(ObjPtr<mirror::Class> klass,
4580 LengthPrefixedArray<ArtMethod>* new_methods) {
4581 klass->SetMethodsPtrUnchecked(new_methods,
4582 klass->NumDirectMethods(),
4583 klass->NumDeclaredVirtualMethods());
4584 // Need to mark the card so that the remembered sets and mod union tables get updated.
4585 WriteBarrier::ForEveryFieldWrite(klass);
4586 }
4587
LookupClass(Thread * self,const char * descriptor,ObjPtr<mirror::ClassLoader> class_loader)4588 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4589 const char* descriptor,
4590 ObjPtr<mirror::ClassLoader> class_loader) {
4591 return LookupClass(self, descriptor, ComputeModifiedUtf8Hash(descriptor), class_loader);
4592 }
4593
LookupClass(Thread * self,const char * descriptor,size_t hash,ObjPtr<mirror::ClassLoader> class_loader)4594 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4595 const char* descriptor,
4596 size_t hash,
4597 ObjPtr<mirror::ClassLoader> class_loader) {
4598 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4599 ClassTable* const class_table = ClassTableForClassLoader(class_loader);
4600 if (class_table != nullptr) {
4601 ObjPtr<mirror::Class> result = class_table->Lookup(descriptor, hash);
4602 if (result != nullptr) {
4603 return result;
4604 }
4605 }
4606 return nullptr;
4607 }
4608
4609 class MoveClassTableToPreZygoteVisitor : public ClassLoaderVisitor {
4610 public:
MoveClassTableToPreZygoteVisitor()4611 MoveClassTableToPreZygoteVisitor() {}
4612
Visit(ObjPtr<mirror::ClassLoader> class_loader)4613 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
4614 REQUIRES(Locks::classlinker_classes_lock_)
4615 REQUIRES_SHARED(Locks::mutator_lock_) override {
4616 ClassTable* const class_table = class_loader->GetClassTable();
4617 if (class_table != nullptr) {
4618 class_table->FreezeSnapshot();
4619 }
4620 }
4621 };
4622
MoveClassTableToPreZygote()4623 void ClassLinker::MoveClassTableToPreZygote() {
4624 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4625 boot_class_table_->FreezeSnapshot();
4626 MoveClassTableToPreZygoteVisitor visitor;
4627 VisitClassLoaders(&visitor);
4628 }
4629
4630 // Look up classes by hash and descriptor and put all matching ones in the result array.
4631 class LookupClassesVisitor : public ClassLoaderVisitor {
4632 public:
LookupClassesVisitor(const char * descriptor,size_t hash,std::vector<ObjPtr<mirror::Class>> * result)4633 LookupClassesVisitor(const char* descriptor,
4634 size_t hash,
4635 std::vector<ObjPtr<mirror::Class>>* result)
4636 : descriptor_(descriptor),
4637 hash_(hash),
4638 result_(result) {}
4639
Visit(ObjPtr<mirror::ClassLoader> class_loader)4640 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
4641 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
4642 ClassTable* const class_table = class_loader->GetClassTable();
4643 ObjPtr<mirror::Class> klass = class_table->Lookup(descriptor_, hash_);
4644 // Add `klass` only if `class_loader` is its defining (not just initiating) class loader.
4645 if (klass != nullptr && klass->GetClassLoader() == class_loader) {
4646 result_->push_back(klass);
4647 }
4648 }
4649
4650 private:
4651 const char* const descriptor_;
4652 const size_t hash_;
4653 std::vector<ObjPtr<mirror::Class>>* const result_;
4654 };
4655
LookupClasses(const char * descriptor,std::vector<ObjPtr<mirror::Class>> & result)4656 void ClassLinker::LookupClasses(const char* descriptor,
4657 std::vector<ObjPtr<mirror::Class>>& result) {
4658 result.clear();
4659 Thread* const self = Thread::Current();
4660 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4661 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
4662 ObjPtr<mirror::Class> klass = boot_class_table_->Lookup(descriptor, hash);
4663 if (klass != nullptr) {
4664 DCHECK(klass->GetClassLoader() == nullptr);
4665 result.push_back(klass);
4666 }
4667 LookupClassesVisitor visitor(descriptor, hash, &result);
4668 VisitClassLoaders(&visitor);
4669 }
4670
AttemptSupertypeVerification(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,Handle<mirror::Class> supertype)4671 bool ClassLinker::AttemptSupertypeVerification(Thread* self,
4672 verifier::VerifierDeps* verifier_deps,
4673 Handle<mirror::Class> klass,
4674 Handle<mirror::Class> supertype) {
4675 DCHECK(self != nullptr);
4676 DCHECK(klass != nullptr);
4677 DCHECK(supertype != nullptr);
4678
4679 if (!supertype->IsVerified() && !supertype->IsErroneous()) {
4680 VerifyClass(self, verifier_deps, supertype);
4681 }
4682
4683 if (supertype->IsVerified()
4684 || supertype->ShouldVerifyAtRuntime()
4685 || supertype->IsVerifiedNeedsAccessChecks()) {
4686 // The supertype is either verified, or we soft failed at AOT time.
4687 DCHECK(supertype->IsVerified() || Runtime::Current()->IsAotCompiler());
4688 return true;
4689 }
4690 // If we got this far then we have a hard failure.
4691 std::string error_msg =
4692 StringPrintf("Rejecting class %s that attempts to sub-type erroneous class %s",
4693 klass->PrettyDescriptor().c_str(),
4694 supertype->PrettyDescriptor().c_str());
4695 LOG(WARNING) << error_msg << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4696 StackHandleScope<1> hs(self);
4697 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
4698 if (cause != nullptr) {
4699 // Set during VerifyClass call (if at all).
4700 self->ClearException();
4701 }
4702 // Change into a verify error.
4703 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
4704 if (cause != nullptr) {
4705 self->GetException()->SetCause(cause.Get());
4706 }
4707 ClassReference ref(klass->GetDexCache()->GetDexFile(), klass->GetDexClassDefIndex());
4708 if (Runtime::Current()->IsAotCompiler()) {
4709 Runtime::Current()->GetCompilerCallbacks()->ClassRejected(ref);
4710 }
4711 // Need to grab the lock to change status.
4712 ObjectLock<mirror::Class> super_lock(self, klass);
4713 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
4714 return false;
4715 }
4716
VerifyClass(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level)4717 verifier::FailureKind ClassLinker::VerifyClass(Thread* self,
4718 verifier::VerifierDeps* verifier_deps,
4719 Handle<mirror::Class> klass,
4720 verifier::HardFailLogMode log_level) {
4721 {
4722 // TODO: assert that the monitor on the Class is held
4723 ObjectLock<mirror::Class> lock(self, klass);
4724
4725 // Is somebody verifying this now?
4726 ClassStatus old_status = klass->GetStatus();
4727 while (old_status == ClassStatus::kVerifying) {
4728 lock.WaitIgnoringInterrupts();
4729 // WaitIgnoringInterrupts can still receive an interrupt and return early, in this
4730 // case we may see the same status again. b/62912904. This is why the check is
4731 // greater or equal.
4732 CHECK(klass->IsErroneous() || (klass->GetStatus() >= old_status))
4733 << "Class '" << klass->PrettyClass()
4734 << "' performed an illegal verification state transition from " << old_status
4735 << " to " << klass->GetStatus();
4736 old_status = klass->GetStatus();
4737 }
4738
4739 // The class might already be erroneous, for example at compile time if we attempted to verify
4740 // this class as a parent to another.
4741 if (klass->IsErroneous()) {
4742 ThrowEarlierClassFailure(klass.Get());
4743 return verifier::FailureKind::kHardFailure;
4744 }
4745
4746 // Don't attempt to re-verify if already verified.
4747 if (klass->IsVerified()) {
4748 if (verifier_deps != nullptr &&
4749 verifier_deps->ContainsDexFile(klass->GetDexFile()) &&
4750 !verifier_deps->HasRecordedVerifiedStatus(klass->GetDexFile(), *klass->GetClassDef()) &&
4751 !Runtime::Current()->IsAotCompiler()) {
4752 // If the klass is verified, but `verifier_deps` did not record it, this
4753 // means we are running background verification of a secondary dex file.
4754 // Re-run the verifier to populate `verifier_deps`.
4755 // No need to run the verification when running on the AOT Compiler, as
4756 // the driver handles those multithreaded cases already.
4757 std::string error_msg;
4758 verifier::FailureKind failure =
4759 PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
4760 // We could have soft failures, so just check that we don't have a hard
4761 // failure.
4762 DCHECK_NE(failure, verifier::FailureKind::kHardFailure) << error_msg;
4763 }
4764 return verifier::FailureKind::kNoFailure;
4765 }
4766
4767 if (klass->IsVerifiedNeedsAccessChecks()) {
4768 if (!Runtime::Current()->IsAotCompiler()) {
4769 // Mark the class as having a verification attempt to avoid re-running
4770 // the verifier.
4771 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4772 }
4773 return verifier::FailureKind::kAccessChecksFailure;
4774 }
4775
4776 // For AOT, don't attempt to re-verify if we have already found we should
4777 // verify at runtime.
4778 if (klass->ShouldVerifyAtRuntime()) {
4779 CHECK(Runtime::Current()->IsAotCompiler());
4780 return verifier::FailureKind::kSoftFailure;
4781 }
4782
4783 DCHECK_EQ(klass->GetStatus(), ClassStatus::kResolved);
4784 mirror::Class::SetStatus(klass, ClassStatus::kVerifying, self);
4785
4786 // Skip verification if disabled.
4787 if (!Runtime::Current()->IsVerificationEnabled()) {
4788 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4789 UpdateClassAfterVerification(klass, image_pointer_size_, verifier::FailureKind::kNoFailure);
4790 return verifier::FailureKind::kNoFailure;
4791 }
4792 }
4793
4794 VLOG(class_linker) << "Beginning verification for class: "
4795 << klass->PrettyDescriptor()
4796 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4797
4798 // Verify super class.
4799 StackHandleScope<2> hs(self);
4800 MutableHandle<mirror::Class> supertype(hs.NewHandle(klass->GetSuperClass()));
4801 // If we have a superclass and we get a hard verification failure we can return immediately.
4802 if (supertype != nullptr &&
4803 !AttemptSupertypeVerification(self, verifier_deps, klass, supertype)) {
4804 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
4805 return verifier::FailureKind::kHardFailure;
4806 }
4807
4808 // Verify all default super-interfaces.
4809 //
4810 // (1) Don't bother if the superclass has already had a soft verification failure.
4811 //
4812 // (2) Interfaces shouldn't bother to do this recursive verification because they cannot cause
4813 // recursive initialization by themselves. This is because when an interface is initialized
4814 // directly it must not initialize its superinterfaces. We are allowed to verify regardless
4815 // but choose not to for an optimization. If the interfaces is being verified due to a class
4816 // initialization (which would need all the default interfaces to be verified) the class code
4817 // will trigger the recursive verification anyway.
4818 if ((supertype == nullptr || supertype->IsVerified()) // See (1)
4819 && !klass->IsInterface()) { // See (2)
4820 int32_t iftable_count = klass->GetIfTableCount();
4821 MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
4822 // Loop through all interfaces this class has defined. It doesn't matter the order.
4823 for (int32_t i = 0; i < iftable_count; i++) {
4824 iface.Assign(klass->GetIfTable()->GetInterface(i));
4825 DCHECK(iface != nullptr);
4826 // We only care if we have default interfaces and can skip if we are already verified...
4827 if (LIKELY(!iface->HasDefaultMethods() || iface->IsVerified())) {
4828 continue;
4829 } else if (UNLIKELY(!AttemptSupertypeVerification(self, verifier_deps, klass, iface))) {
4830 // We had a hard failure while verifying this interface. Just return immediately.
4831 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
4832 return verifier::FailureKind::kHardFailure;
4833 } else if (UNLIKELY(!iface->IsVerified())) {
4834 // We softly failed to verify the iface. Stop checking and clean up.
4835 // Put the iface into the supertype handle so we know what caused us to fail.
4836 supertype.Assign(iface.Get());
4837 break;
4838 }
4839 }
4840 }
4841
4842 // At this point if verification failed, then supertype is the "first" supertype that failed
4843 // verification (without a specific order). If verification succeeded, then supertype is either
4844 // null or the original superclass of klass and is verified.
4845 DCHECK(supertype == nullptr ||
4846 supertype.Get() == klass->GetSuperClass() ||
4847 !supertype->IsVerified());
4848
4849 // Try to use verification information from the oat file, otherwise do runtime verification.
4850 const DexFile& dex_file = *klass->GetDexCache()->GetDexFile();
4851 ClassStatus oat_file_class_status(ClassStatus::kNotReady);
4852 bool preverified = VerifyClassUsingOatFile(self, dex_file, klass, oat_file_class_status);
4853
4854 VLOG(class_linker) << "Class preverified status for class "
4855 << klass->PrettyDescriptor()
4856 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4857 << ": "
4858 << preverified
4859 << "( " << oat_file_class_status << ")";
4860
4861 // If the oat file says the class had an error, re-run the verifier. That way we will either:
4862 // 1) Be successful at runtime, or
4863 // 2) Get a precise error message.
4864 DCHECK_IMPLIES(mirror::Class::IsErroneous(oat_file_class_status), !preverified);
4865
4866 std::string error_msg;
4867 verifier::FailureKind verifier_failure = verifier::FailureKind::kNoFailure;
4868 if (!preverified) {
4869 verifier_failure = PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
4870 } else if (oat_file_class_status == ClassStatus::kVerifiedNeedsAccessChecks) {
4871 verifier_failure = verifier::FailureKind::kAccessChecksFailure;
4872 }
4873
4874 // Verification is done, grab the lock again.
4875 ObjectLock<mirror::Class> lock(self, klass);
4876 self->AssertNoPendingException();
4877
4878 if (verifier_failure == verifier::FailureKind::kHardFailure) {
4879 VLOG(verifier) << "Verification failed on class " << klass->PrettyDescriptor()
4880 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4881 << " because: " << error_msg;
4882 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
4883 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
4884 return verifier_failure;
4885 }
4886
4887 // Make sure all classes referenced by catch blocks are resolved.
4888 ResolveClassExceptionHandlerTypes(klass);
4889
4890 if (Runtime::Current()->IsAotCompiler()) {
4891 if (supertype != nullptr && supertype->ShouldVerifyAtRuntime()) {
4892 // Regardless of our own verification result, we need to verify the class
4893 // at runtime if the super class is not verified. This is required in case
4894 // we generate an app/boot image.
4895 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4896 } else if (verifier_failure == verifier::FailureKind::kNoFailure) {
4897 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4898 } else if (verifier_failure == verifier::FailureKind::kSoftFailure ||
4899 verifier_failure == verifier::FailureKind::kTypeChecksFailure) {
4900 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4901 } else {
4902 mirror::Class::SetStatus(klass, ClassStatus::kVerifiedNeedsAccessChecks, self);
4903 }
4904 // Notify the compiler about the verification status, in case the class
4905 // was verified implicitly (eg super class of a compiled class). When the
4906 // compiler unloads dex file after compilation, we still want to keep
4907 // verification states.
4908 Runtime::Current()->GetCompilerCallbacks()->UpdateClassState(
4909 ClassReference(&klass->GetDexFile(), klass->GetDexClassDefIndex()), klass->GetStatus());
4910 } else {
4911 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4912 }
4913
4914 UpdateClassAfterVerification(klass, image_pointer_size_, verifier_failure);
4915 return verifier_failure;
4916 }
4917
PerformClassVerification(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level,std::string * error_msg)4918 verifier::FailureKind ClassLinker::PerformClassVerification(Thread* self,
4919 verifier::VerifierDeps* verifier_deps,
4920 Handle<mirror::Class> klass,
4921 verifier::HardFailLogMode log_level,
4922 std::string* error_msg) {
4923 Runtime* const runtime = Runtime::Current();
4924 StackHandleScope<2> hs(self);
4925 Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
4926 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
4927 return verifier::ClassVerifier::VerifyClass(self,
4928 verifier_deps,
4929 dex_cache->GetDexFile(),
4930 klass,
4931 dex_cache,
4932 class_loader,
4933 *klass->GetClassDef(),
4934 runtime->GetCompilerCallbacks(),
4935 log_level,
4936 Runtime::Current()->GetTargetSdkVersion(),
4937 error_msg);
4938 }
4939
VerifyClassUsingOatFile(Thread * self,const DexFile & dex_file,Handle<mirror::Class> klass,ClassStatus & oat_file_class_status)4940 bool ClassLinker::VerifyClassUsingOatFile(Thread* self,
4941 const DexFile& dex_file,
4942 Handle<mirror::Class> klass,
4943 ClassStatus& oat_file_class_status) {
4944 // If we're compiling, we can only verify the class using the oat file if
4945 // we are not compiling the image or if the class we're verifying is not part of
4946 // the compilation unit (app - dependencies). We will let the compiler callback
4947 // tell us about the latter.
4948 if (Runtime::Current()->IsAotCompiler()) {
4949 CompilerCallbacks* callbacks = Runtime::Current()->GetCompilerCallbacks();
4950 // We are compiling an app (not the image).
4951 if (!callbacks->CanUseOatStatusForVerification(klass.Get())) {
4952 return false;
4953 }
4954 }
4955
4956 const OatDexFile* oat_dex_file = dex_file.GetOatDexFile();
4957 // In case we run without an image there won't be a backing oat file.
4958 if (oat_dex_file == nullptr || oat_dex_file->GetOatFile() == nullptr) {
4959 return false;
4960 }
4961
4962 uint16_t class_def_index = klass->GetDexClassDefIndex();
4963 oat_file_class_status = oat_dex_file->GetOatClass(class_def_index).GetStatus();
4964 if (oat_file_class_status >= ClassStatus::kVerified) {
4965 return true;
4966 }
4967 if (oat_file_class_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
4968 // We return that the clas has already been verified, and the caller should
4969 // check the class status to ensure we run with access checks.
4970 return true;
4971 }
4972
4973 // Check the class status with the vdex file.
4974 const OatFile* oat_file = oat_dex_file->GetOatFile();
4975 if (oat_file != nullptr) {
4976 ClassStatus vdex_status = oat_file->GetVdexFile()->ComputeClassStatus(self, klass);
4977 if (vdex_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
4978 VLOG(verifier) << "Vdex verification success for " << klass->PrettyClass();
4979 oat_file_class_status = vdex_status;
4980 return true;
4981 }
4982 }
4983
4984 // If we only verified a subset of the classes at compile time, we can end up with classes that
4985 // were resolved by the verifier.
4986 if (oat_file_class_status == ClassStatus::kResolved) {
4987 return false;
4988 }
4989 // We never expect a .oat file to have kRetryVerificationAtRuntime statuses.
4990 CHECK_NE(oat_file_class_status, ClassStatus::kRetryVerificationAtRuntime)
4991 << klass->PrettyClass() << " " << dex_file.GetLocation();
4992
4993 if (mirror::Class::IsErroneous(oat_file_class_status)) {
4994 // Compile time verification failed with a hard error. We'll re-run
4995 // verification, which might be successful at runtime.
4996 return false;
4997 }
4998 if (oat_file_class_status == ClassStatus::kNotReady) {
4999 // Status is uninitialized if we couldn't determine the status at compile time, for example,
5000 // not loading the class.
5001 // TODO: when the verifier doesn't rely on Class-es failing to resolve/load the type hierarchy
5002 // isn't a problem and this case shouldn't occur
5003 return false;
5004 }
5005 std::string temp;
5006 LOG(FATAL) << "Unexpected class status: " << oat_file_class_status
5007 << " " << dex_file.GetLocation() << " " << klass->PrettyClass() << " "
5008 << klass->GetDescriptor(&temp);
5009 UNREACHABLE();
5010 }
5011
ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass)5012 void ClassLinker::ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass) {
5013 for (ArtMethod& method : klass->GetMethods(image_pointer_size_)) {
5014 ResolveMethodExceptionHandlerTypes(&method);
5015 }
5016 }
5017
ResolveMethodExceptionHandlerTypes(ArtMethod * method)5018 void ClassLinker::ResolveMethodExceptionHandlerTypes(ArtMethod* method) {
5019 // similar to DexVerifier::ScanTryCatchBlocks and dex2oat's ResolveExceptionsForMethod.
5020 CodeItemDataAccessor accessor(method->DexInstructionData());
5021 if (!accessor.HasCodeItem()) {
5022 return; // native or abstract method
5023 }
5024 if (accessor.TriesSize() == 0) {
5025 return; // nothing to process
5026 }
5027 const uint8_t* handlers_ptr = accessor.GetCatchHandlerData(0);
5028 CHECK(method->GetDexFile()->IsInDataSection(handlers_ptr))
5029 << method->PrettyMethod()
5030 << "@" << method->GetDexFile()->GetLocation()
5031 << "@" << reinterpret_cast<const void*>(handlers_ptr)
5032 << " is_compact_dex=" << method->GetDexFile()->IsCompactDexFile();
5033
5034 uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
5035 for (uint32_t idx = 0; idx < handlers_size; idx++) {
5036 CatchHandlerIterator iterator(handlers_ptr);
5037 for (; iterator.HasNext(); iterator.Next()) {
5038 // Ensure exception types are resolved so that they don't need resolution to be delivered,
5039 // unresolved exception types will be ignored by exception delivery
5040 if (iterator.GetHandlerTypeIndex().IsValid()) {
5041 ObjPtr<mirror::Class> exception_type = ResolveType(iterator.GetHandlerTypeIndex(), method);
5042 if (exception_type == nullptr) {
5043 DCHECK(Thread::Current()->IsExceptionPending());
5044 Thread::Current()->ClearException();
5045 }
5046 }
5047 }
5048 handlers_ptr = iterator.EndDataPointer();
5049 }
5050 }
5051
CreateProxyClass(ScopedObjectAccessAlreadyRunnable & soa,jstring name,jobjectArray interfaces,jobject loader,jobjectArray methods,jobjectArray throws)5052 ObjPtr<mirror::Class> ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& soa,
5053 jstring name,
5054 jobjectArray interfaces,
5055 jobject loader,
5056 jobjectArray methods,
5057 jobjectArray throws) {
5058 Thread* self = soa.Self();
5059
5060 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
5061 // code to be executed. We put it up here so we can avoid all the allocations associated with
5062 // creating the class. This can happen with (eg) jit-threads.
5063 if (!self->CanLoadClasses()) {
5064 // Make sure we don't try to load anything, potentially causing an infinite loop.
5065 ObjPtr<mirror::Throwable> pre_allocated =
5066 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
5067 self->SetException(pre_allocated);
5068 return nullptr;
5069 }
5070
5071 StackHandleScope<12> hs(self);
5072 MutableHandle<mirror::Class> temp_klass(hs.NewHandle(
5073 AllocClass(self, GetClassRoot<mirror::Class>(this), sizeof(mirror::Class))));
5074 if (temp_klass == nullptr) {
5075 CHECK(self->IsExceptionPending()); // OOME.
5076 return nullptr;
5077 }
5078 DCHECK(temp_klass->GetClass() != nullptr);
5079 temp_klass->SetObjectSize(sizeof(mirror::Proxy));
5080 // Set the class access flags incl. VerificationAttempted, so we do not try to set the flag on
5081 // the methods.
5082 temp_klass->SetAccessFlagsDuringLinking(kAccClassIsProxy | kAccPublic | kAccFinal);
5083 temp_klass->SetClassLoader(soa.Decode<mirror::ClassLoader>(loader));
5084 DCHECK_EQ(temp_klass->GetPrimitiveType(), Primitive::kPrimNot);
5085 temp_klass->SetName(soa.Decode<mirror::String>(name));
5086 temp_klass->SetDexCache(GetClassRoot<mirror::Proxy>(this)->GetDexCache());
5087 // Object has an empty iftable, copy it for that reason.
5088 temp_klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
5089 mirror::Class::SetStatus(temp_klass, ClassStatus::kIdx, self);
5090 std::string storage;
5091 const char* descriptor = temp_klass->GetDescriptor(&storage);
5092 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
5093
5094 // Needs to be before we insert the class so that the allocator field is set.
5095 LinearAlloc* const allocator = GetOrCreateAllocatorForClassLoader(temp_klass->GetClassLoader());
5096
5097 // Insert the class before loading the fields as the field roots
5098 // (ArtField::declaring_class_) are only visited from the class
5099 // table. There can't be any suspend points between inserting the
5100 // class and setting the field arrays below.
5101 ObjPtr<mirror::Class> existing = InsertClass(descriptor, temp_klass.Get(), hash);
5102 CHECK(existing == nullptr);
5103
5104 // Instance fields are inherited, but we add a couple of static fields...
5105 const size_t num_fields = 2;
5106 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, allocator, num_fields);
5107 temp_klass->SetSFieldsPtr(sfields);
5108
5109 // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by
5110 // our proxy, so Class.getInterfaces doesn't return the flattened set.
5111 ArtField& interfaces_sfield = sfields->At(0);
5112 interfaces_sfield.SetDexFieldIndex(0);
5113 interfaces_sfield.SetDeclaringClass(temp_klass.Get());
5114 interfaces_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
5115
5116 // 2. Create a static field 'throws' that holds exceptions thrown by our methods.
5117 ArtField& throws_sfield = sfields->At(1);
5118 throws_sfield.SetDexFieldIndex(1);
5119 throws_sfield.SetDeclaringClass(temp_klass.Get());
5120 throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
5121
5122 // Proxies have 1 direct method, the constructor
5123 const size_t num_direct_methods = 1;
5124
5125 // The array we get passed contains all methods, including private and static
5126 // ones that aren't proxied. We need to filter those out since only interface
5127 // methods (non-private & virtual) are actually proxied.
5128 Handle<mirror::ObjectArray<mirror::Method>> h_methods =
5129 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Method>>(methods));
5130 DCHECK_EQ(h_methods->GetClass(), GetClassRoot<mirror::ObjectArray<mirror::Method>>())
5131 << mirror::Class::PrettyClass(h_methods->GetClass());
5132 // List of the actual virtual methods this class will have.
5133 std::vector<ArtMethod*> proxied_methods;
5134 std::vector<size_t> proxied_throws_idx;
5135 proxied_methods.reserve(h_methods->GetLength());
5136 proxied_throws_idx.reserve(h_methods->GetLength());
5137 // Filter out to only the non-private virtual methods.
5138 for (auto [mirror, idx] : ZipCount(h_methods.Iterate<mirror::Method>())) {
5139 ArtMethod* m = mirror->GetArtMethod();
5140 if (!m->IsPrivate() && !m->IsStatic()) {
5141 proxied_methods.push_back(m);
5142 proxied_throws_idx.push_back(idx);
5143 }
5144 }
5145 const size_t num_virtual_methods = proxied_methods.size();
5146 // We also need to filter out the 'throws'. The 'throws' are a Class[][] that
5147 // contains an array of all the classes each function is declared to throw.
5148 // This is used to wrap unexpected exceptions in a
5149 // UndeclaredThrowableException exception. This array is in the same order as
5150 // the methods array and like the methods array must be filtered to remove any
5151 // non-proxied methods.
5152 const bool has_filtered_methods =
5153 static_cast<int32_t>(num_virtual_methods) != h_methods->GetLength();
5154 MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> original_proxied_throws(
5155 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(throws)));
5156 MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> proxied_throws(
5157 hs.NewHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(
5158 (has_filtered_methods)
5159 ? mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>::Alloc(
5160 self, original_proxied_throws->GetClass(), num_virtual_methods)
5161 : original_proxied_throws.Get()));
5162 if (proxied_throws.IsNull() && !original_proxied_throws.IsNull()) {
5163 self->AssertPendingOOMException();
5164 return nullptr;
5165 }
5166 if (has_filtered_methods) {
5167 for (auto [orig_idx, new_idx] : ZipCount(MakeIterationRange(proxied_throws_idx))) {
5168 DCHECK_LE(new_idx, orig_idx);
5169 proxied_throws->Set(new_idx, original_proxied_throws->Get(orig_idx));
5170 }
5171 }
5172
5173 // Create the methods array.
5174 LengthPrefixedArray<ArtMethod>* proxy_class_methods = AllocArtMethodArray(
5175 self, allocator, num_direct_methods + num_virtual_methods);
5176 // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
5177 // want to throw OOM in the future.
5178 if (UNLIKELY(proxy_class_methods == nullptr)) {
5179 self->AssertPendingOOMException();
5180 return nullptr;
5181 }
5182 temp_klass->SetMethodsPtr(proxy_class_methods, num_direct_methods, num_virtual_methods);
5183
5184 // Create the single direct method.
5185 CreateProxyConstructor(temp_klass, temp_klass->GetDirectMethodUnchecked(0, image_pointer_size_));
5186
5187 // Create virtual method using specified prototypes.
5188 // TODO These should really use the iterators.
5189 for (size_t i = 0; i < num_virtual_methods; ++i) {
5190 auto* virtual_method = temp_klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
5191 auto* prototype = proxied_methods[i];
5192 CreateProxyMethod(temp_klass, prototype, virtual_method);
5193 DCHECK(virtual_method->GetDeclaringClass() != nullptr);
5194 DCHECK(prototype->GetDeclaringClass() != nullptr);
5195 }
5196
5197 // The super class is java.lang.reflect.Proxy
5198 temp_klass->SetSuperClass(GetClassRoot<mirror::Proxy>(this));
5199 // Now effectively in the loaded state.
5200 mirror::Class::SetStatus(temp_klass, ClassStatus::kLoaded, self);
5201 self->AssertNoPendingException();
5202
5203 // At this point the class is loaded. Publish a ClassLoad event.
5204 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
5205 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(temp_klass);
5206
5207 MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
5208 {
5209 // Must hold lock on object when resolved.
5210 ObjectLock<mirror::Class> resolution_lock(self, temp_klass);
5211 // Link the fields and virtual methods, creating vtable and iftables.
5212 // The new class will replace the old one in the class table.
5213 Handle<mirror::ObjectArray<mirror::Class>> h_interfaces(
5214 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces)));
5215 if (!LinkClass(self, descriptor, temp_klass, h_interfaces, &klass)) {
5216 if (!temp_klass->IsErroneous()) {
5217 mirror::Class::SetStatus(temp_klass, ClassStatus::kErrorUnresolved, self);
5218 }
5219 return nullptr;
5220 }
5221 }
5222 CHECK(temp_klass->IsRetired());
5223 CHECK_NE(temp_klass.Get(), klass.Get());
5224
5225 CHECK_EQ(interfaces_sfield.GetDeclaringClass(), klass.Get());
5226 interfaces_sfield.SetObject<false>(
5227 klass.Get(),
5228 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
5229 CHECK_EQ(throws_sfield.GetDeclaringClass(), klass.Get());
5230 throws_sfield.SetObject<false>(
5231 klass.Get(),
5232 proxied_throws.Get());
5233
5234 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(temp_klass, klass);
5235
5236 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5237 // See also ClassLinker::EnsureInitialized().
5238 if (kBitstringSubtypeCheckEnabled) {
5239 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
5240 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(klass.Get());
5241 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck for j.l.r.Proxy is already assigned.
5242 }
5243
5244 VisiblyInitializedCallback* callback = nullptr;
5245 {
5246 // Lock on klass is released. Lock new class object.
5247 ObjectLock<mirror::Class> initialization_lock(self, klass);
5248 // Conservatively go through the ClassStatus::kInitialized state.
5249 callback = MarkClassInitialized(self, klass);
5250 }
5251 if (callback != nullptr) {
5252 callback->MakeVisible(self);
5253 }
5254
5255 // Consistency checks.
5256 if (kIsDebugBuild) {
5257 CHECK(klass->GetIFieldsPtr() == nullptr);
5258 CheckProxyConstructor(klass->GetDirectMethod(0, image_pointer_size_));
5259
5260 for (size_t i = 0; i < num_virtual_methods; ++i) {
5261 auto* virtual_method = klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
5262 CheckProxyMethod(virtual_method, proxied_methods[i]);
5263 }
5264
5265 StackHandleScope<1> hs2(self);
5266 Handle<mirror::String> decoded_name = hs2.NewHandle(soa.Decode<mirror::String>(name));
5267 std::string interfaces_field_name(StringPrintf("java.lang.Class[] %s.interfaces",
5268 decoded_name->ToModifiedUtf8().c_str()));
5269 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(0)), interfaces_field_name);
5270
5271 std::string throws_field_name(StringPrintf("java.lang.Class[][] %s.throws",
5272 decoded_name->ToModifiedUtf8().c_str()));
5273 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(1)), throws_field_name);
5274
5275 CHECK_EQ(klass.Get()->GetProxyInterfaces(),
5276 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
5277 CHECK_EQ(klass.Get()->GetProxyThrows(),
5278 proxied_throws.Get());
5279 }
5280 return klass.Get();
5281 }
5282
CreateProxyConstructor(Handle<mirror::Class> klass,ArtMethod * out)5283 void ClassLinker::CreateProxyConstructor(Handle<mirror::Class> klass, ArtMethod* out) {
5284 // Create constructor for Proxy that must initialize the method.
5285 ObjPtr<mirror::Class> proxy_class = GetClassRoot<mirror::Proxy>(this);
5286 CHECK_EQ(proxy_class->NumDirectMethods(), 21u);
5287
5288 // Find the <init>(InvocationHandler)V method. The exact method offset varies depending
5289 // on which front-end compiler was used to build the libcore DEX files.
5290 ArtMethod* proxy_constructor = WellKnownClasses::java_lang_reflect_Proxy_init;
5291 DCHECK(proxy_constructor != nullptr)
5292 << "Could not find <init> method in java.lang.reflect.Proxy";
5293
5294 // Clone the existing constructor of Proxy (our constructor would just invoke it so steal its
5295 // code_ too)
5296 DCHECK(out != nullptr);
5297 out->CopyFrom(proxy_constructor, image_pointer_size_);
5298 // Make this constructor public and fix the class to be our Proxy version.
5299 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5300 // Note that the compiler calls a ResolveMethod() overload that does not handle a Proxy referrer.
5301 out->SetAccessFlags((out->GetAccessFlags() & ~kAccProtected) |
5302 kAccPublic |
5303 kAccCompileDontBother);
5304 out->SetDeclaringClass(klass.Get());
5305
5306 // Set the original constructor method.
5307 out->SetDataPtrSize(proxy_constructor, image_pointer_size_);
5308 }
5309
CheckProxyConstructor(ArtMethod * constructor) const5310 void ClassLinker::CheckProxyConstructor(ArtMethod* constructor) const {
5311 CHECK(constructor->IsConstructor());
5312 auto* np = constructor->GetInterfaceMethodIfProxy(image_pointer_size_);
5313 CHECK_STREQ(np->GetName(), "<init>");
5314 CHECK_STREQ(np->GetSignature().ToString().c_str(), "(Ljava/lang/reflect/InvocationHandler;)V");
5315 DCHECK(constructor->IsPublic());
5316 }
5317
CreateProxyMethod(Handle<mirror::Class> klass,ArtMethod * prototype,ArtMethod * out)5318 void ClassLinker::CreateProxyMethod(Handle<mirror::Class> klass, ArtMethod* prototype,
5319 ArtMethod* out) {
5320 // We steal everything from the prototype (such as DexCache, invoke stub, etc.) then specialize
5321 // as necessary
5322 DCHECK(out != nullptr);
5323 out->CopyFrom(prototype, image_pointer_size_);
5324
5325 // Set class to be the concrete proxy class.
5326 out->SetDeclaringClass(klass.Get());
5327 // Clear the abstract and default flags to ensure that defaults aren't picked in
5328 // preference to the invocation handler.
5329 const uint32_t kRemoveFlags = kAccAbstract | kAccDefault;
5330 // Make the method final.
5331 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5332 const uint32_t kAddFlags = kAccFinal | kAccCompileDontBother;
5333 out->SetAccessFlags((out->GetAccessFlags() & ~kRemoveFlags) | kAddFlags);
5334
5335 // Set the original interface method.
5336 out->SetDataPtrSize(prototype, image_pointer_size_);
5337
5338 // At runtime the method looks like a reference and argument saving method, clone the code
5339 // related parameters from this method.
5340 out->SetEntryPointFromQuickCompiledCode(GetQuickProxyInvokeHandler());
5341 }
5342
CheckProxyMethod(ArtMethod * method,ArtMethod * prototype) const5343 void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) const {
5344 // Basic consistency checks.
5345 CHECK(!prototype->IsFinal());
5346 CHECK(method->IsFinal());
5347 CHECK(method->IsInvokable());
5348
5349 // The proxy method doesn't have its own dex cache or dex file and so it steals those of its
5350 // interface prototype. The exception to this are Constructors and the Class of the Proxy itself.
5351 CHECK_EQ(prototype->GetDexMethodIndex(), method->GetDexMethodIndex());
5352 CHECK_EQ(prototype, method->GetInterfaceMethodIfProxy(image_pointer_size_));
5353 }
5354
CanWeInitializeClass(ObjPtr<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5355 bool ClassLinker::CanWeInitializeClass(ObjPtr<mirror::Class> klass,
5356 bool can_init_statics,
5357 bool can_init_parents) {
5358 if (can_init_statics && can_init_parents) {
5359 return true;
5360 }
5361 DCHECK(Runtime::Current()->IsAotCompiler());
5362
5363 // We currently don't support initializing at AOT time classes that need access
5364 // checks.
5365 if (klass->IsVerifiedNeedsAccessChecks()) {
5366 return false;
5367 }
5368 if (!can_init_statics) {
5369 // Check if there's a class initializer.
5370 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5371 if (clinit != nullptr) {
5372 return false;
5373 }
5374 // Check if there are encoded static values needing initialization.
5375 if (klass->NumStaticFields() != 0) {
5376 const dex::ClassDef* dex_class_def = klass->GetClassDef();
5377 DCHECK(dex_class_def != nullptr);
5378 if (dex_class_def->static_values_off_ != 0) {
5379 return false;
5380 }
5381 }
5382 }
5383 // If we are a class we need to initialize all interfaces with default methods when we are
5384 // initialized. Check all of them.
5385 if (!klass->IsInterface()) {
5386 size_t num_interfaces = klass->GetIfTableCount();
5387 for (size_t i = 0; i < num_interfaces; i++) {
5388 ObjPtr<mirror::Class> iface = klass->GetIfTable()->GetInterface(i);
5389 if (iface->HasDefaultMethods() && !iface->IsInitialized()) {
5390 if (!can_init_parents || !CanWeInitializeClass(iface, can_init_statics, can_init_parents)) {
5391 return false;
5392 }
5393 }
5394 }
5395 }
5396 if (klass->IsInterface() || !klass->HasSuperClass()) {
5397 return true;
5398 }
5399 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5400 if (super_class->IsInitialized()) {
5401 return true;
5402 }
5403 return can_init_parents && CanWeInitializeClass(super_class, can_init_statics, can_init_parents);
5404 }
5405
InitializeClass(Thread * self,Handle<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5406 bool ClassLinker::InitializeClass(Thread* self,
5407 Handle<mirror::Class> klass,
5408 bool can_init_statics,
5409 bool can_init_parents) {
5410 // see JLS 3rd edition, 12.4.2 "Detailed Initialization Procedure" for the locking protocol
5411
5412 // Are we already initialized and therefore done?
5413 // Note: we differ from the JLS here as we don't do this under the lock, this is benign as
5414 // an initialized class will never change its state.
5415 if (klass->IsInitialized()) {
5416 return true;
5417 }
5418
5419 // Fast fail if initialization requires a full runtime. Not part of the JLS.
5420 if (!CanWeInitializeClass(klass.Get(), can_init_statics, can_init_parents)) {
5421 return false;
5422 }
5423
5424 self->AllowThreadSuspension();
5425 Runtime* const runtime = Runtime::Current();
5426 const bool stats_enabled = runtime->HasStatsEnabled();
5427 uint64_t t0;
5428 {
5429 ObjectLock<mirror::Class> lock(self, klass);
5430
5431 // Re-check under the lock in case another thread initialized ahead of us.
5432 if (klass->IsInitialized()) {
5433 return true;
5434 }
5435
5436 // Was the class already found to be erroneous? Done under the lock to match the JLS.
5437 if (klass->IsErroneous()) {
5438 ThrowEarlierClassFailure(klass.Get(), true, /* log= */ true);
5439 VlogClassInitializationFailure(klass);
5440 return false;
5441 }
5442
5443 CHECK(klass->IsResolved() && !klass->IsErroneousResolved())
5444 << klass->PrettyClass() << ": state=" << klass->GetStatus();
5445
5446 if (!klass->IsVerified()) {
5447 VerifyClass(self, /*verifier_deps= */ nullptr, klass);
5448 if (!klass->IsVerified()) {
5449 // We failed to verify, expect either the klass to be erroneous or verification failed at
5450 // compile time.
5451 if (klass->IsErroneous()) {
5452 // The class is erroneous. This may be a verifier error, or another thread attempted
5453 // verification and/or initialization and failed. We can distinguish those cases by
5454 // whether an exception is already pending.
5455 if (self->IsExceptionPending()) {
5456 // Check that it's a VerifyError.
5457 DCHECK(IsVerifyError(self->GetException()));
5458 } else {
5459 // Check that another thread attempted initialization.
5460 DCHECK_NE(0, klass->GetClinitThreadId());
5461 DCHECK_NE(self->GetTid(), klass->GetClinitThreadId());
5462 // Need to rethrow the previous failure now.
5463 ThrowEarlierClassFailure(klass.Get(), true);
5464 }
5465 VlogClassInitializationFailure(klass);
5466 } else {
5467 CHECK(Runtime::Current()->IsAotCompiler());
5468 CHECK(klass->ShouldVerifyAtRuntime() || klass->IsVerifiedNeedsAccessChecks());
5469 self->AssertNoPendingException();
5470 self->SetException(Runtime::Current()->GetPreAllocatedNoClassDefFoundError());
5471 }
5472 self->AssertPendingException();
5473 return false;
5474 } else {
5475 self->AssertNoPendingException();
5476 }
5477
5478 // A separate thread could have moved us all the way to initialized. A "simple" example
5479 // involves a subclass of the current class being initialized at the same time (which
5480 // will implicitly initialize the superclass, if scheduled that way). b/28254258
5481 DCHECK(!klass->IsErroneous()) << klass->GetStatus();
5482 if (klass->IsInitialized()) {
5483 return true;
5484 }
5485 }
5486
5487 // If the class is ClassStatus::kInitializing, either this thread is
5488 // initializing higher up the stack or another thread has beat us
5489 // to initializing and we need to wait. Either way, this
5490 // invocation of InitializeClass will not be responsible for
5491 // running <clinit> and will return.
5492 if (klass->GetStatus() == ClassStatus::kInitializing) {
5493 // Could have got an exception during verification.
5494 if (self->IsExceptionPending()) {
5495 VlogClassInitializationFailure(klass);
5496 return false;
5497 }
5498 // We caught somebody else in the act; was it us?
5499 if (klass->GetClinitThreadId() == self->GetTid()) {
5500 // Yes. That's fine. Return so we can continue initializing.
5501 return true;
5502 }
5503 // No. That's fine. Wait for another thread to finish initializing.
5504 return WaitForInitializeClass(klass, self, lock);
5505 }
5506
5507 // Try to get the oat class's status for this class if the oat file is present. The compiler
5508 // tries to validate superclass descriptors, and writes the result into the oat file.
5509 // Runtime correctness is guaranteed by classpath checks done on loading. If the classpath
5510 // is different at runtime than it was at compile time, the oat file is rejected. So if the
5511 // oat file is present, the classpaths must match, and the runtime time check can be skipped.
5512 bool has_oat_class = false;
5513 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
5514 ? OatFile::FindOatClass(klass->GetDexFile(), klass->GetDexClassDefIndex(), &has_oat_class)
5515 : OatFile::OatClass::Invalid();
5516 if (oat_class.GetStatus() < ClassStatus::kSuperclassValidated &&
5517 !ValidateSuperClassDescriptors(klass)) {
5518 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5519 return false;
5520 }
5521 self->AllowThreadSuspension();
5522
5523 CHECK_EQ(klass->GetStatus(), ClassStatus::kVerified) << klass->PrettyClass()
5524 << " self.tid=" << self->GetTid() << " clinit.tid=" << klass->GetClinitThreadId();
5525
5526 // From here out other threads may observe that we're initializing and so changes of state
5527 // require the a notification.
5528 klass->SetClinitThreadId(self->GetTid());
5529 mirror::Class::SetStatus(klass, ClassStatus::kInitializing, self);
5530
5531 t0 = stats_enabled ? NanoTime() : 0u;
5532 }
5533
5534 uint64_t t_sub = 0;
5535
5536 // Initialize super classes, must be done while initializing for the JLS.
5537 if (!klass->IsInterface() && klass->HasSuperClass()) {
5538 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5539 if (!super_class->IsInitialized()) {
5540 CHECK(!super_class->IsInterface());
5541 CHECK(can_init_parents);
5542 StackHandleScope<1> hs(self);
5543 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
5544 uint64_t super_t0 = stats_enabled ? NanoTime() : 0u;
5545 bool super_initialized = InitializeClass(self, handle_scope_super, can_init_statics, true);
5546 uint64_t super_t1 = stats_enabled ? NanoTime() : 0u;
5547 if (!super_initialized) {
5548 // The super class was verified ahead of entering initializing, we should only be here if
5549 // the super class became erroneous due to initialization.
5550 // For the case of aot compiler, the super class might also be initializing but we don't
5551 // want to process circular dependencies in pre-compile.
5552 CHECK(self->IsExceptionPending())
5553 << "Super class initialization failed for "
5554 << handle_scope_super->PrettyDescriptor()
5555 << " that has unexpected status " << handle_scope_super->GetStatus()
5556 << "\nPending exception:\n"
5557 << (self->GetException() != nullptr ? self->GetException()->Dump() : "");
5558 ObjectLock<mirror::Class> lock(self, klass);
5559 // Initialization failed because the super-class is erroneous.
5560 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5561 return false;
5562 }
5563 t_sub = super_t1 - super_t0;
5564 }
5565 }
5566
5567 if (!klass->IsInterface()) {
5568 // Initialize interfaces with default methods for the JLS.
5569 size_t num_direct_interfaces = klass->NumDirectInterfaces();
5570 // Only setup the (expensive) handle scope if we actually need to.
5571 if (UNLIKELY(num_direct_interfaces > 0)) {
5572 StackHandleScope<1> hs_iface(self);
5573 MutableHandle<mirror::Class> handle_scope_iface(hs_iface.NewHandle<mirror::Class>(nullptr));
5574 for (size_t i = 0; i < num_direct_interfaces; i++) {
5575 handle_scope_iface.Assign(klass->GetDirectInterface(i));
5576 CHECK(handle_scope_iface != nullptr) << klass->PrettyDescriptor() << " iface #" << i;
5577 CHECK(handle_scope_iface->IsInterface());
5578 if (handle_scope_iface->HasBeenRecursivelyInitialized()) {
5579 // We have already done this for this interface. Skip it.
5580 continue;
5581 }
5582 // We cannot just call initialize class directly because we need to ensure that ALL
5583 // interfaces with default methods are initialized. Non-default interface initialization
5584 // will not affect other non-default super-interfaces.
5585 // This is not very precise, misses all walking.
5586 uint64_t inf_t0 = stats_enabled ? NanoTime() : 0u;
5587 bool iface_initialized = InitializeDefaultInterfaceRecursive(self,
5588 handle_scope_iface,
5589 can_init_statics,
5590 can_init_parents);
5591 uint64_t inf_t1 = stats_enabled ? NanoTime() : 0u;
5592 if (!iface_initialized) {
5593 ObjectLock<mirror::Class> lock(self, klass);
5594 // Initialization failed because one of our interfaces with default methods is erroneous.
5595 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5596 return false;
5597 }
5598 t_sub += inf_t1 - inf_t0;
5599 }
5600 }
5601 }
5602
5603 const size_t num_static_fields = klass->NumStaticFields();
5604 if (num_static_fields > 0) {
5605 const dex::ClassDef* dex_class_def = klass->GetClassDef();
5606 CHECK(dex_class_def != nullptr);
5607 StackHandleScope<3> hs(self);
5608 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
5609 Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
5610
5611 // Eagerly fill in static fields so that the we don't have to do as many expensive
5612 // Class::FindStaticField in ResolveField.
5613 for (size_t i = 0; i < num_static_fields; ++i) {
5614 ArtField* field = klass->GetStaticField(i);
5615 const uint32_t field_idx = field->GetDexFieldIndex();
5616 ArtField* resolved_field = dex_cache->GetResolvedField(field_idx);
5617 if (resolved_field == nullptr) {
5618 // Populating cache of a dex file which defines `klass` should always be allowed.
5619 DCHECK(!hiddenapi::ShouldDenyAccessToMember(
5620 field,
5621 hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
5622 hiddenapi::AccessMethod::kNone));
5623 dex_cache->SetResolvedField(field_idx, field);
5624 } else {
5625 DCHECK_EQ(field, resolved_field);
5626 }
5627 }
5628
5629 annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache,
5630 class_loader,
5631 this,
5632 *dex_class_def);
5633 const DexFile& dex_file = *dex_cache->GetDexFile();
5634
5635 if (value_it.HasNext()) {
5636 ClassAccessor accessor(dex_file, *dex_class_def);
5637 CHECK(can_init_statics);
5638 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
5639 if (!value_it.HasNext()) {
5640 break;
5641 }
5642 ArtField* art_field = ResolveField(field.GetIndex(),
5643 dex_cache,
5644 class_loader,
5645 /* is_static= */ true);
5646 if (Runtime::Current()->IsActiveTransaction()) {
5647 value_it.ReadValueToField<true>(art_field);
5648 } else {
5649 value_it.ReadValueToField<false>(art_field);
5650 }
5651 if (self->IsExceptionPending()) {
5652 break;
5653 }
5654 value_it.Next();
5655 }
5656 DCHECK(self->IsExceptionPending() || !value_it.HasNext());
5657 }
5658 }
5659
5660
5661 if (!self->IsExceptionPending()) {
5662 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5663 if (clinit != nullptr) {
5664 CHECK(can_init_statics);
5665 JValue result;
5666 clinit->Invoke(self, nullptr, 0, &result, "V");
5667 }
5668 }
5669 self->AllowThreadSuspension();
5670 uint64_t t1 = stats_enabled ? NanoTime() : 0u;
5671
5672 VisiblyInitializedCallback* callback = nullptr;
5673 bool success = true;
5674 {
5675 ObjectLock<mirror::Class> lock(self, klass);
5676
5677 if (self->IsExceptionPending()) {
5678 WrapExceptionInInitializer(klass);
5679 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5680 success = false;
5681 } else if (Runtime::Current()->IsTransactionAborted()) {
5682 // The exception thrown when the transaction aborted has been caught and cleared
5683 // so we need to throw it again now.
5684 VLOG(compiler) << "Return from class initializer of "
5685 << mirror::Class::PrettyDescriptor(klass.Get())
5686 << " without exception while transaction was aborted: re-throw it now.";
5687 runtime->ThrowTransactionAbortError(self);
5688 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5689 success = false;
5690 } else {
5691 if (stats_enabled) {
5692 RuntimeStats* global_stats = runtime->GetStats();
5693 RuntimeStats* thread_stats = self->GetStats();
5694 ++global_stats->class_init_count;
5695 ++thread_stats->class_init_count;
5696 global_stats->class_init_time_ns += (t1 - t0 - t_sub);
5697 thread_stats->class_init_time_ns += (t1 - t0 - t_sub);
5698 }
5699 // Set the class as initialized except if failed to initialize static fields.
5700 callback = MarkClassInitialized(self, klass);
5701 if (VLOG_IS_ON(class_linker)) {
5702 std::string temp;
5703 LOG(INFO) << "Initialized class " << klass->GetDescriptor(&temp) << " from " <<
5704 klass->GetLocation();
5705 }
5706 }
5707 }
5708 if (callback != nullptr) {
5709 callback->MakeVisible(self);
5710 }
5711 return success;
5712 }
5713
5714 // We recursively run down the tree of interfaces. We need to do this in the order they are declared
5715 // and perform the initialization only on those interfaces that contain default methods.
InitializeDefaultInterfaceRecursive(Thread * self,Handle<mirror::Class> iface,bool can_init_statics,bool can_init_parents)5716 bool ClassLinker::InitializeDefaultInterfaceRecursive(Thread* self,
5717 Handle<mirror::Class> iface,
5718 bool can_init_statics,
5719 bool can_init_parents) {
5720 CHECK(iface->IsInterface());
5721 size_t num_direct_ifaces = iface->NumDirectInterfaces();
5722 // Only create the (expensive) handle scope if we need it.
5723 if (UNLIKELY(num_direct_ifaces > 0)) {
5724 StackHandleScope<1> hs(self);
5725 MutableHandle<mirror::Class> handle_super_iface(hs.NewHandle<mirror::Class>(nullptr));
5726 // First we initialize all of iface's super-interfaces recursively.
5727 for (size_t i = 0; i < num_direct_ifaces; i++) {
5728 ObjPtr<mirror::Class> super_iface = iface->GetDirectInterface(i);
5729 CHECK(super_iface != nullptr) << iface->PrettyDescriptor() << " iface #" << i;
5730 if (!super_iface->HasBeenRecursivelyInitialized()) {
5731 // Recursive step
5732 handle_super_iface.Assign(super_iface);
5733 if (!InitializeDefaultInterfaceRecursive(self,
5734 handle_super_iface,
5735 can_init_statics,
5736 can_init_parents)) {
5737 return false;
5738 }
5739 }
5740 }
5741 }
5742
5743 bool result = true;
5744 // Then we initialize 'iface' if it has default methods. We do not need to (and in fact must not)
5745 // initialize if we don't have default methods.
5746 if (iface->HasDefaultMethods()) {
5747 result = EnsureInitialized(self, iface, can_init_statics, can_init_parents);
5748 }
5749
5750 // Mark that this interface has undergone recursive default interface initialization so we know we
5751 // can skip it on any later class initializations. We do this even if we are not a default
5752 // interface since we can still avoid the traversal. This is purely a performance optimization.
5753 if (result) {
5754 // TODO This should be done in a better way
5755 // Note: Use a try-lock to avoid blocking when someone else is holding the lock on this
5756 // interface. It is bad (Java) style, but not impossible. Marking the recursive
5757 // initialization is a performance optimization (to avoid another idempotent visit
5758 // for other implementing classes/interfaces), and can be revisited later.
5759 ObjectTryLock<mirror::Class> lock(self, iface);
5760 if (lock.Acquired()) {
5761 iface->SetRecursivelyInitialized();
5762 }
5763 }
5764 return result;
5765 }
5766
WaitForInitializeClass(Handle<mirror::Class> klass,Thread * self,ObjectLock<mirror::Class> & lock)5767 bool ClassLinker::WaitForInitializeClass(Handle<mirror::Class> klass,
5768 Thread* self,
5769 ObjectLock<mirror::Class>& lock)
5770 REQUIRES_SHARED(Locks::mutator_lock_) {
5771 while (true) {
5772 self->AssertNoPendingException();
5773 CHECK(!klass->IsInitialized());
5774 lock.WaitIgnoringInterrupts();
5775
5776 // When we wake up, repeat the test for init-in-progress. If
5777 // there's an exception pending (only possible if
5778 // we were not using WaitIgnoringInterrupts), bail out.
5779 if (self->IsExceptionPending()) {
5780 WrapExceptionInInitializer(klass);
5781 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5782 return false;
5783 }
5784 // Spurious wakeup? Go back to waiting.
5785 if (klass->GetStatus() == ClassStatus::kInitializing) {
5786 continue;
5787 }
5788 if (klass->GetStatus() == ClassStatus::kVerified &&
5789 Runtime::Current()->IsAotCompiler()) {
5790 // Compile time initialization failed.
5791 return false;
5792 }
5793 if (klass->IsErroneous()) {
5794 // The caller wants an exception, but it was thrown in a
5795 // different thread. Synthesize one here.
5796 ThrowNoClassDefFoundError("<clinit> failed for class %s; see exception in other thread",
5797 klass->PrettyDescriptor().c_str());
5798 VlogClassInitializationFailure(klass);
5799 return false;
5800 }
5801 if (klass->IsInitialized()) {
5802 return true;
5803 }
5804 LOG(FATAL) << "Unexpected class status. " << klass->PrettyClass() << " is "
5805 << klass->GetStatus();
5806 }
5807 UNREACHABLE();
5808 }
5809
ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m)5810 static void ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,
5811 Handle<mirror::Class> super_klass,
5812 ArtMethod* method,
5813 ArtMethod* m)
5814 REQUIRES_SHARED(Locks::mutator_lock_) {
5815 DCHECK(Thread::Current()->IsExceptionPending());
5816 DCHECK(!m->IsProxyMethod());
5817 const DexFile* dex_file = m->GetDexFile();
5818 const dex::MethodId& method_id = dex_file->GetMethodId(m->GetDexMethodIndex());
5819 const dex::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id);
5820 dex::TypeIndex return_type_idx = proto_id.return_type_idx_;
5821 std::string return_type = dex_file->PrettyType(return_type_idx);
5822 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
5823 ThrowWrappedLinkageError(klass.Get(),
5824 "While checking class %s method %s signature against %s %s: "
5825 "Failed to resolve return type %s with %s",
5826 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5827 ArtMethod::PrettyMethod(method).c_str(),
5828 super_klass->IsInterface() ? "interface" : "superclass",
5829 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
5830 return_type.c_str(), class_loader.c_str());
5831 }
5832
ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m,uint32_t index,dex::TypeIndex arg_type_idx)5833 static void ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,
5834 Handle<mirror::Class> super_klass,
5835 ArtMethod* method,
5836 ArtMethod* m,
5837 uint32_t index,
5838 dex::TypeIndex arg_type_idx)
5839 REQUIRES_SHARED(Locks::mutator_lock_) {
5840 DCHECK(Thread::Current()->IsExceptionPending());
5841 DCHECK(!m->IsProxyMethod());
5842 const DexFile* dex_file = m->GetDexFile();
5843 std::string arg_type = dex_file->PrettyType(arg_type_idx);
5844 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
5845 ThrowWrappedLinkageError(klass.Get(),
5846 "While checking class %s method %s signature against %s %s: "
5847 "Failed to resolve arg %u type %s with %s",
5848 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5849 ArtMethod::PrettyMethod(method).c_str(),
5850 super_klass->IsInterface() ? "interface" : "superclass",
5851 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
5852 index, arg_type.c_str(), class_loader.c_str());
5853 }
5854
ThrowSignatureMismatch(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,const std::string & error_msg)5855 static void ThrowSignatureMismatch(Handle<mirror::Class> klass,
5856 Handle<mirror::Class> super_klass,
5857 ArtMethod* method,
5858 const std::string& error_msg)
5859 REQUIRES_SHARED(Locks::mutator_lock_) {
5860 ThrowLinkageError(klass.Get(),
5861 "Class %s method %s resolves differently in %s %s: %s",
5862 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5863 ArtMethod::PrettyMethod(method).c_str(),
5864 super_klass->IsInterface() ? "interface" : "superclass",
5865 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
5866 error_msg.c_str());
5867 }
5868
HasSameSignatureWithDifferentClassLoaders(Thread * self,Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method1,ArtMethod * method2)5869 static bool HasSameSignatureWithDifferentClassLoaders(Thread* self,
5870 Handle<mirror::Class> klass,
5871 Handle<mirror::Class> super_klass,
5872 ArtMethod* method1,
5873 ArtMethod* method2)
5874 REQUIRES_SHARED(Locks::mutator_lock_) {
5875 {
5876 StackHandleScope<1> hs(self);
5877 Handle<mirror::Class> return_type(hs.NewHandle(method1->ResolveReturnType()));
5878 if (UNLIKELY(return_type == nullptr)) {
5879 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method1);
5880 return false;
5881 }
5882 ObjPtr<mirror::Class> other_return_type = method2->ResolveReturnType();
5883 if (UNLIKELY(other_return_type == nullptr)) {
5884 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method2);
5885 return false;
5886 }
5887 if (UNLIKELY(other_return_type != return_type.Get())) {
5888 ThrowSignatureMismatch(klass, super_klass, method1,
5889 StringPrintf("Return types mismatch: %s(%p) vs %s(%p)",
5890 return_type->PrettyClassAndClassLoader().c_str(),
5891 return_type.Get(),
5892 other_return_type->PrettyClassAndClassLoader().c_str(),
5893 other_return_type.Ptr()));
5894 return false;
5895 }
5896 }
5897 const dex::TypeList* types1 = method1->GetParameterTypeList();
5898 const dex::TypeList* types2 = method2->GetParameterTypeList();
5899 if (types1 == nullptr) {
5900 if (types2 != nullptr && types2->Size() != 0) {
5901 ThrowSignatureMismatch(klass, super_klass, method1,
5902 StringPrintf("Type list mismatch with %s",
5903 method2->PrettyMethod(true).c_str()));
5904 return false;
5905 }
5906 return true;
5907 } else if (UNLIKELY(types2 == nullptr)) {
5908 if (types1->Size() != 0) {
5909 ThrowSignatureMismatch(klass, super_klass, method1,
5910 StringPrintf("Type list mismatch with %s",
5911 method2->PrettyMethod(true).c_str()));
5912 return false;
5913 }
5914 return true;
5915 }
5916 uint32_t num_types = types1->Size();
5917 if (UNLIKELY(num_types != types2->Size())) {
5918 ThrowSignatureMismatch(klass, super_klass, method1,
5919 StringPrintf("Type list mismatch with %s",
5920 method2->PrettyMethod(true).c_str()));
5921 return false;
5922 }
5923 for (uint32_t i = 0; i < num_types; ++i) {
5924 StackHandleScope<1> hs(self);
5925 dex::TypeIndex param_type_idx = types1->GetTypeItem(i).type_idx_;
5926 Handle<mirror::Class> param_type(hs.NewHandle(
5927 method1->ResolveClassFromTypeIndex(param_type_idx)));
5928 if (UNLIKELY(param_type == nullptr)) {
5929 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
5930 method1, i, param_type_idx);
5931 return false;
5932 }
5933 dex::TypeIndex other_param_type_idx = types2->GetTypeItem(i).type_idx_;
5934 ObjPtr<mirror::Class> other_param_type =
5935 method2->ResolveClassFromTypeIndex(other_param_type_idx);
5936 if (UNLIKELY(other_param_type == nullptr)) {
5937 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
5938 method2, i, other_param_type_idx);
5939 return false;
5940 }
5941 if (UNLIKELY(param_type.Get() != other_param_type)) {
5942 ThrowSignatureMismatch(klass, super_klass, method1,
5943 StringPrintf("Parameter %u type mismatch: %s(%p) vs %s(%p)",
5944 i,
5945 param_type->PrettyClassAndClassLoader().c_str(),
5946 param_type.Get(),
5947 other_param_type->PrettyClassAndClassLoader().c_str(),
5948 other_param_type.Ptr()));
5949 return false;
5950 }
5951 }
5952 return true;
5953 }
5954
5955
ValidateSuperClassDescriptors(Handle<mirror::Class> klass)5956 bool ClassLinker::ValidateSuperClassDescriptors(Handle<mirror::Class> klass) {
5957 if (klass->IsInterface()) {
5958 return true;
5959 }
5960 // Begin with the methods local to the superclass.
5961 Thread* self = Thread::Current();
5962 StackHandleScope<1> hs(self);
5963 MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(nullptr));
5964 if (klass->HasSuperClass() &&
5965 klass->GetClassLoader() != klass->GetSuperClass()->GetClassLoader()) {
5966 super_klass.Assign(klass->GetSuperClass());
5967 for (int i = klass->GetSuperClass()->GetVTableLength() - 1; i >= 0; --i) {
5968 auto* m = klass->GetVTableEntry(i, image_pointer_size_);
5969 auto* super_m = klass->GetSuperClass()->GetVTableEntry(i, image_pointer_size_);
5970 if (m != super_m) {
5971 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5972 klass,
5973 super_klass,
5974 m,
5975 super_m))) {
5976 self->AssertPendingException();
5977 return false;
5978 }
5979 }
5980 }
5981 }
5982 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
5983 super_klass.Assign(klass->GetIfTable()->GetInterface(i));
5984 if (klass->GetClassLoader() != super_klass->GetClassLoader()) {
5985 uint32_t num_methods = super_klass->NumVirtualMethods();
5986 for (uint32_t j = 0; j < num_methods; ++j) {
5987 auto* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
5988 j, image_pointer_size_);
5989 auto* super_m = super_klass->GetVirtualMethod(j, image_pointer_size_);
5990 if (m != super_m) {
5991 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5992 klass,
5993 super_klass,
5994 m,
5995 super_m))) {
5996 self->AssertPendingException();
5997 return false;
5998 }
5999 }
6000 }
6001 }
6002 }
6003 return true;
6004 }
6005
EnsureInitialized(Thread * self,Handle<mirror::Class> c,bool can_init_fields,bool can_init_parents)6006 bool ClassLinker::EnsureInitialized(Thread* self,
6007 Handle<mirror::Class> c,
6008 bool can_init_fields,
6009 bool can_init_parents) {
6010 DCHECK(c != nullptr);
6011
6012 if (c->IsInitialized()) {
6013 // If we've seen an initialized but not visibly initialized class
6014 // many times, request visible initialization.
6015 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
6016 // Thanks to the x86 memory model classes skip the initialized status.
6017 DCHECK(c->IsVisiblyInitialized());
6018 } else if (UNLIKELY(!c->IsVisiblyInitialized())) {
6019 if (self->IncrementMakeVisiblyInitializedCounter()) {
6020 MakeInitializedClassesVisiblyInitialized(self, /*wait=*/ false);
6021 }
6022 }
6023 return true;
6024 }
6025 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
6026 //
6027 // Ensure the bitstring is initialized before any of the class initialization
6028 // logic occurs. Once a class initializer starts running, objects can
6029 // escape into the heap and use the subtype checking code.
6030 //
6031 // Note: A class whose SubtypeCheckInfo is at least Initialized means it
6032 // can be used as a source for the IsSubClass check, and that all ancestors
6033 // of the class are Assigned (can be used as a target for IsSubClass check)
6034 // or Overflowed (can be used as a source for IsSubClass check).
6035 if (kBitstringSubtypeCheckEnabled) {
6036 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
6037 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(c.Get());
6038 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck is already initialized.
6039 }
6040 const bool success = InitializeClass(self, c, can_init_fields, can_init_parents);
6041 if (!success) {
6042 if (can_init_fields && can_init_parents) {
6043 CHECK(self->IsExceptionPending()) << c->PrettyClass();
6044 } else {
6045 // There may or may not be an exception pending. If there is, clear it.
6046 // We propagate the exception only if we can initialize fields and parents.
6047 self->ClearException();
6048 }
6049 } else {
6050 self->AssertNoPendingException();
6051 }
6052 return success;
6053 }
6054
FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,ObjPtr<mirror::Class> new_class)6055 void ClassLinker::FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,
6056 ObjPtr<mirror::Class> new_class) {
6057 DCHECK_EQ(temp_class->NumInstanceFields(), 0u);
6058 for (ArtField& field : new_class->GetIFields()) {
6059 if (field.GetDeclaringClass() == temp_class) {
6060 field.SetDeclaringClass(new_class);
6061 }
6062 }
6063
6064 DCHECK_EQ(temp_class->NumStaticFields(), 0u);
6065 for (ArtField& field : new_class->GetSFields()) {
6066 if (field.GetDeclaringClass() == temp_class) {
6067 field.SetDeclaringClass(new_class);
6068 }
6069 }
6070
6071 DCHECK_EQ(temp_class->NumDirectMethods(), 0u);
6072 DCHECK_EQ(temp_class->NumVirtualMethods(), 0u);
6073 for (auto& method : new_class->GetMethods(image_pointer_size_)) {
6074 if (method.GetDeclaringClass() == temp_class) {
6075 method.SetDeclaringClass(new_class);
6076 }
6077 }
6078
6079 // Make sure the remembered set and mod-union tables know that we updated some of the native
6080 // roots.
6081 WriteBarrier::ForEveryFieldWrite(new_class);
6082 }
6083
RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader)6084 void ClassLinker::RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
6085 CHECK(class_loader->GetAllocator() == nullptr);
6086 CHECK(class_loader->GetClassTable() == nullptr);
6087 Thread* const self = Thread::Current();
6088 ClassLoaderData data;
6089 data.weak_root = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, class_loader);
6090 // Create and set the class table.
6091 data.class_table = new ClassTable;
6092 class_loader->SetClassTable(data.class_table);
6093 // Create and set the linear allocator.
6094 data.allocator = Runtime::Current()->CreateLinearAlloc();
6095 class_loader->SetAllocator(data.allocator);
6096 // Add to the list so that we know to free the data later.
6097 class_loaders_.push_back(data);
6098 }
6099
InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)6100 ClassTable* ClassLinker::InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
6101 if (class_loader == nullptr) {
6102 return boot_class_table_.get();
6103 }
6104 ClassTable* class_table = class_loader->GetClassTable();
6105 if (class_table == nullptr) {
6106 RegisterClassLoader(class_loader);
6107 class_table = class_loader->GetClassTable();
6108 DCHECK(class_table != nullptr);
6109 }
6110 return class_table;
6111 }
6112
ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)6113 ClassTable* ClassLinker::ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
6114 return class_loader == nullptr ? boot_class_table_.get() : class_loader->GetClassTable();
6115 }
6116
LinkClass(Thread * self,const char * descriptor,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,MutableHandle<mirror::Class> * h_new_class_out)6117 bool ClassLinker::LinkClass(Thread* self,
6118 const char* descriptor,
6119 Handle<mirror::Class> klass,
6120 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
6121 MutableHandle<mirror::Class>* h_new_class_out) {
6122 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
6123
6124 if (!LinkSuperClass(klass)) {
6125 return false;
6126 }
6127 ArtMethod* imt_data[ImTable::kSize];
6128 // If there are any new conflicts compared to super class.
6129 bool new_conflict = false;
6130 std::fill_n(imt_data, arraysize(imt_data), Runtime::Current()->GetImtUnimplementedMethod());
6131 if (!LinkMethods(self, klass, interfaces, &new_conflict, imt_data)) {
6132 return false;
6133 }
6134 if (!LinkInstanceFields(self, klass)) {
6135 return false;
6136 }
6137 size_t class_size;
6138 if (!LinkStaticFields(self, klass, &class_size)) {
6139 return false;
6140 }
6141 CreateReferenceInstanceOffsets(klass);
6142 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
6143
6144 ImTable* imt = nullptr;
6145 if (klass->ShouldHaveImt()) {
6146 // If there are any new conflicts compared to the super class we can not make a copy. There
6147 // can be cases where both will have a conflict method at the same slot without having the same
6148 // set of conflicts. In this case, we can not share the IMT since the conflict table slow path
6149 // will possibly create a table that is incorrect for either of the classes.
6150 // Same IMT with new_conflict does not happen very often.
6151 if (!new_conflict) {
6152 ImTable* super_imt = klass->FindSuperImt(image_pointer_size_);
6153 if (super_imt != nullptr) {
6154 bool imt_equals = true;
6155 for (size_t i = 0; i < ImTable::kSize && imt_equals; ++i) {
6156 imt_equals = imt_equals && (super_imt->Get(i, image_pointer_size_) == imt_data[i]);
6157 }
6158 if (imt_equals) {
6159 imt = super_imt;
6160 }
6161 }
6162 }
6163 if (imt == nullptr) {
6164 LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
6165 imt = reinterpret_cast<ImTable*>(
6166 allocator->Alloc(self,
6167 ImTable::SizeInBytes(image_pointer_size_),
6168 LinearAllocKind::kNoGCRoots));
6169 if (imt == nullptr) {
6170 return false;
6171 }
6172 imt->Populate(imt_data, image_pointer_size_);
6173 }
6174 }
6175
6176 if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) {
6177 // We don't need to retire this class as it has no embedded tables or it was created the
6178 // correct size during class linker initialization.
6179 CHECK_EQ(klass->GetClassSize(), class_size) << klass->PrettyDescriptor();
6180
6181 if (klass->ShouldHaveEmbeddedVTable()) {
6182 klass->PopulateEmbeddedVTable(image_pointer_size_);
6183 }
6184 if (klass->ShouldHaveImt()) {
6185 klass->SetImt(imt, image_pointer_size_);
6186 }
6187
6188 // Update CHA info based on whether we override methods.
6189 // Have to do this before setting the class as resolved which allows
6190 // instantiation of klass.
6191 if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
6192 cha_->UpdateAfterLoadingOf(klass);
6193 }
6194
6195 // This will notify waiters on klass that saw the not yet resolved
6196 // class in the class_table_ during EnsureResolved.
6197 mirror::Class::SetStatus(klass, ClassStatus::kResolved, self);
6198 h_new_class_out->Assign(klass.Get());
6199 } else {
6200 CHECK(!klass->IsResolved());
6201 // Retire the temporary class and create the correctly sized resolved class.
6202 StackHandleScope<1> hs(self);
6203 Handle<mirror::Class> h_new_class =
6204 hs.NewHandle(mirror::Class::CopyOf(klass, self, class_size, imt, image_pointer_size_));
6205 // Set arrays to null since we don't want to have multiple classes with the same ArtField or
6206 // ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC
6207 // may not see any references to the target space and clean the card for a class if another
6208 // class had the same array pointer.
6209 klass->SetMethodsPtrUnchecked(nullptr, 0, 0);
6210 klass->SetSFieldsPtrUnchecked(nullptr);
6211 klass->SetIFieldsPtrUnchecked(nullptr);
6212 if (UNLIKELY(h_new_class == nullptr)) {
6213 self->AssertPendingOOMException();
6214 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
6215 return false;
6216 }
6217
6218 CHECK_EQ(h_new_class->GetClassSize(), class_size);
6219 ObjectLock<mirror::Class> lock(self, h_new_class);
6220 FixupTemporaryDeclaringClass(klass.Get(), h_new_class.Get());
6221
6222 if (LIKELY(descriptor != nullptr)) {
6223 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
6224 const ObjPtr<mirror::ClassLoader> class_loader = h_new_class.Get()->GetClassLoader();
6225 ClassTable* const table = InsertClassTableForClassLoader(class_loader);
6226 const ObjPtr<mirror::Class> existing =
6227 table->UpdateClass(descriptor, h_new_class.Get(), ComputeModifiedUtf8Hash(descriptor));
6228 if (class_loader != nullptr) {
6229 // We updated the class in the class table, perform the write barrier so that the GC knows
6230 // about the change.
6231 WriteBarrier::ForEveryFieldWrite(class_loader);
6232 }
6233 CHECK_EQ(existing, klass.Get());
6234 if (log_new_roots_) {
6235 new_class_roots_.push_back(GcRoot<mirror::Class>(h_new_class.Get()));
6236 }
6237 }
6238
6239 // Update CHA info based on whether we override methods.
6240 // Have to do this before setting the class as resolved which allows
6241 // instantiation of klass.
6242 if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
6243 cha_->UpdateAfterLoadingOf(h_new_class);
6244 }
6245
6246 // This will notify waiters on temp class that saw the not yet resolved class in the
6247 // class_table_ during EnsureResolved.
6248 mirror::Class::SetStatus(klass, ClassStatus::kRetired, self);
6249
6250 CHECK_EQ(h_new_class->GetStatus(), ClassStatus::kResolving);
6251 // This will notify waiters on new_class that saw the not yet resolved
6252 // class in the class_table_ during EnsureResolved.
6253 mirror::Class::SetStatus(h_new_class, ClassStatus::kResolved, self);
6254 // Return the new class.
6255 h_new_class_out->Assign(h_new_class.Get());
6256 }
6257 return true;
6258 }
6259
LoadSuperAndInterfaces(Handle<mirror::Class> klass,const DexFile & dex_file)6260 bool ClassLinker::LoadSuperAndInterfaces(Handle<mirror::Class> klass, const DexFile& dex_file) {
6261 CHECK_EQ(ClassStatus::kIdx, klass->GetStatus());
6262 const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
6263 dex::TypeIndex super_class_idx = class_def.superclass_idx_;
6264 if (super_class_idx.IsValid()) {
6265 // Check that a class does not inherit from itself directly.
6266 //
6267 // TODO: This is a cheap check to detect the straightforward case
6268 // of a class extending itself (b/28685551), but we should do a
6269 // proper cycle detection on loaded classes, to detect all cases
6270 // of class circularity errors (b/28830038).
6271 if (super_class_idx == class_def.class_idx_) {
6272 ThrowClassCircularityError(klass.Get(),
6273 "Class %s extends itself",
6274 klass->PrettyDescriptor().c_str());
6275 return false;
6276 }
6277
6278 ObjPtr<mirror::Class> super_class = ResolveType(super_class_idx, klass.Get());
6279 if (super_class == nullptr) {
6280 DCHECK(Thread::Current()->IsExceptionPending());
6281 return false;
6282 }
6283 // Verify
6284 if (!klass->CanAccess(super_class)) {
6285 ThrowIllegalAccessError(klass.Get(), "Class %s extended by class %s is inaccessible",
6286 super_class->PrettyDescriptor().c_str(),
6287 klass->PrettyDescriptor().c_str());
6288 return false;
6289 }
6290 CHECK(super_class->IsResolved());
6291 klass->SetSuperClass(super_class);
6292 }
6293 const dex::TypeList* interfaces = dex_file.GetInterfacesList(class_def);
6294 if (interfaces != nullptr) {
6295 for (size_t i = 0; i < interfaces->Size(); i++) {
6296 dex::TypeIndex idx = interfaces->GetTypeItem(i).type_idx_;
6297 ObjPtr<mirror::Class> interface = ResolveType(idx, klass.Get());
6298 if (interface == nullptr) {
6299 DCHECK(Thread::Current()->IsExceptionPending());
6300 return false;
6301 }
6302 // Verify
6303 if (!klass->CanAccess(interface)) {
6304 // TODO: the RI seemed to ignore this in my testing.
6305 ThrowIllegalAccessError(klass.Get(),
6306 "Interface %s implemented by class %s is inaccessible",
6307 interface->PrettyDescriptor().c_str(),
6308 klass->PrettyDescriptor().c_str());
6309 return false;
6310 }
6311 }
6312 }
6313 // Mark the class as loaded.
6314 mirror::Class::SetStatus(klass, ClassStatus::kLoaded, nullptr);
6315 return true;
6316 }
6317
LinkSuperClass(Handle<mirror::Class> klass)6318 bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) {
6319 CHECK(!klass->IsPrimitive());
6320 ObjPtr<mirror::Class> super = klass->GetSuperClass();
6321 ObjPtr<mirror::Class> object_class = GetClassRoot<mirror::Object>(this);
6322 if (klass.Get() == object_class) {
6323 if (super != nullptr) {
6324 ThrowClassFormatError(klass.Get(), "java.lang.Object must not have a superclass");
6325 return false;
6326 }
6327 return true;
6328 }
6329 if (super == nullptr) {
6330 ThrowLinkageError(klass.Get(), "No superclass defined for class %s",
6331 klass->PrettyDescriptor().c_str());
6332 return false;
6333 }
6334 // Verify
6335 if (klass->IsInterface() && super != object_class) {
6336 ThrowClassFormatError(klass.Get(), "Interfaces must have java.lang.Object as superclass");
6337 return false;
6338 }
6339 if (super->IsFinal()) {
6340 ThrowVerifyError(klass.Get(),
6341 "Superclass %s of %s is declared final",
6342 super->PrettyDescriptor().c_str(),
6343 klass->PrettyDescriptor().c_str());
6344 return false;
6345 }
6346 if (super->IsInterface()) {
6347 ThrowIncompatibleClassChangeError(klass.Get(),
6348 "Superclass %s of %s is an interface",
6349 super->PrettyDescriptor().c_str(),
6350 klass->PrettyDescriptor().c_str());
6351 return false;
6352 }
6353 if (!klass->CanAccess(super)) {
6354 ThrowIllegalAccessError(klass.Get(), "Superclass %s is inaccessible to class %s",
6355 super->PrettyDescriptor().c_str(),
6356 klass->PrettyDescriptor().c_str());
6357 return false;
6358 }
6359 if (!VerifyRecordClass(klass, super)) {
6360 DCHECK(Thread::Current()->IsExceptionPending());
6361 return false;
6362 }
6363
6364 // Inherit kAccClassIsFinalizable from the superclass in case this
6365 // class doesn't override finalize.
6366 if (super->IsFinalizable()) {
6367 klass->SetFinalizable();
6368 }
6369
6370 // Inherit class loader flag form super class.
6371 if (super->IsClassLoaderClass()) {
6372 klass->SetClassLoaderClass();
6373 }
6374
6375 // Inherit reference flags (if any) from the superclass.
6376 uint32_t reference_flags = (super->GetClassFlags() & mirror::kClassFlagReference);
6377 if (reference_flags != 0) {
6378 CHECK_EQ(klass->GetClassFlags(), 0u);
6379 klass->SetClassFlags(klass->GetClassFlags() | reference_flags);
6380 }
6381 // Disallow custom direct subclasses of java.lang.ref.Reference.
6382 if (init_done_ && super == GetClassRoot<mirror::Reference>(this)) {
6383 ThrowLinkageError(klass.Get(),
6384 "Class %s attempts to subclass java.lang.ref.Reference, which is not allowed",
6385 klass->PrettyDescriptor().c_str());
6386 return false;
6387 }
6388
6389 if (kIsDebugBuild) {
6390 // Ensure super classes are fully resolved prior to resolving fields..
6391 while (super != nullptr) {
6392 CHECK(super->IsResolved());
6393 super = super->GetSuperClass();
6394 }
6395 }
6396 return true;
6397 }
6398
6399 // Comparator for name and signature of a method, used in finding overriding methods. Implementation
6400 // avoids the use of handles, if it didn't then rather than compare dex files we could compare dex
6401 // caches in the implementation below.
6402 class MethodNameAndSignatureComparator final : public ValueObject {
6403 public:
6404 explicit MethodNameAndSignatureComparator(ArtMethod* method)
REQUIRES_SHARED(Locks::mutator_lock_)6405 REQUIRES_SHARED(Locks::mutator_lock_) :
6406 dex_file_(method->GetDexFile()), mid_(&dex_file_->GetMethodId(method->GetDexMethodIndex())),
6407 name_view_() {
6408 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
6409 }
6410
GetNameView()6411 ALWAYS_INLINE std::string_view GetNameView() {
6412 if (name_view_.empty()) {
6413 name_view_ = dex_file_->StringViewByIdx(mid_->name_idx_);
6414 }
6415 return name_view_;
6416 }
6417
HasSameNameAndSignature(ArtMethod * other)6418 bool HasSameNameAndSignature(ArtMethod* other)
6419 REQUIRES_SHARED(Locks::mutator_lock_) {
6420 DCHECK(!other->IsProxyMethod()) << other->PrettyMethod();
6421 const DexFile* other_dex_file = other->GetDexFile();
6422 const dex::MethodId& other_mid = other_dex_file->GetMethodId(other->GetDexMethodIndex());
6423 if (dex_file_ == other_dex_file) {
6424 return mid_->name_idx_ == other_mid.name_idx_ && mid_->proto_idx_ == other_mid.proto_idx_;
6425 }
6426 return GetNameView() == other_dex_file->StringViewByIdx(other_mid.name_idx_) &&
6427 dex_file_->GetMethodSignature(*mid_) == other_dex_file->GetMethodSignature(other_mid);
6428 }
6429
6430 private:
6431 // Dex file for the method to compare against.
6432 const DexFile* const dex_file_;
6433 // MethodId for the method to compare against.
6434 const dex::MethodId* const mid_;
6435 // Lazily computed name from the dex file's strings.
6436 std::string_view name_view_;
6437 };
6438
GetImtOwner(ObjPtr<mirror::Class> klass)6439 static ObjPtr<mirror::Class> GetImtOwner(ObjPtr<mirror::Class> klass)
6440 REQUIRES_SHARED(Locks::mutator_lock_) {
6441 ImTable* imt = klass->GetImt(kRuntimePointerSize);
6442 DCHECK(imt != nullptr);
6443 while (klass->HasSuperClass()) {
6444 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
6445 if (super_class->ShouldHaveImt() && imt != super_class->GetImt(kRuntimePointerSize)) {
6446 // IMT not shared with the super class, return the current class.
6447 return klass;
6448 }
6449 klass = super_class;
6450 }
6451 return nullptr;
6452 }
6453
AddMethodToConflictTable(ObjPtr<mirror::Class> klass,ArtMethod * conflict_method,ArtMethod * interface_method,ArtMethod * method)6454 ArtMethod* ClassLinker::AddMethodToConflictTable(ObjPtr<mirror::Class> klass,
6455 ArtMethod* conflict_method,
6456 ArtMethod* interface_method,
6457 ArtMethod* method) {
6458 ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
6459 Runtime* const runtime = Runtime::Current();
6460
6461 // The IMT may be shared with a super class, in which case we need to use that
6462 // super class's `LinearAlloc`. The conflict itself should be limited to
6463 // methods at or higher up the chain of the IMT owner, otherwise class
6464 // linker would have created a different IMT.
6465 ObjPtr<mirror::Class> imt_owner = GetImtOwner(klass);
6466 DCHECK(imt_owner != nullptr);
6467
6468 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(imt_owner->GetClassLoader());
6469
6470 // Create a new entry if the existing one is the shared conflict method.
6471 ArtMethod* new_conflict_method = (conflict_method == runtime->GetImtConflictMethod())
6472 ? runtime->CreateImtConflictMethod(linear_alloc)
6473 : conflict_method;
6474
6475 // Allocate a new table. Note that we will leak this table at the next conflict,
6476 // but that's a tradeoff compared to making the table fixed size.
6477 void* data = linear_alloc->Alloc(
6478 Thread::Current(),
6479 ImtConflictTable::ComputeSizeWithOneMoreEntry(current_table, image_pointer_size_),
6480 LinearAllocKind::kNoGCRoots);
6481 if (data == nullptr) {
6482 LOG(ERROR) << "Failed to allocate conflict table";
6483 return conflict_method;
6484 }
6485 ImtConflictTable* new_table = new (data) ImtConflictTable(current_table,
6486 interface_method,
6487 method,
6488 image_pointer_size_);
6489
6490 // Do a fence to ensure threads see the data in the table before it is assigned
6491 // to the conflict method.
6492 // Note that there is a race in the presence of multiple threads and we may leak
6493 // memory from the LinearAlloc, but that's a tradeoff compared to using
6494 // atomic operations.
6495 std::atomic_thread_fence(std::memory_order_release);
6496 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6497 return new_conflict_method;
6498 }
6499
SetIMTRef(ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ArtMethod * current_method,bool * new_conflict,ArtMethod ** imt_ref)6500 void ClassLinker::SetIMTRef(ArtMethod* unimplemented_method,
6501 ArtMethod* imt_conflict_method,
6502 ArtMethod* current_method,
6503 /*out*/bool* new_conflict,
6504 /*out*/ArtMethod** imt_ref) {
6505 // Place method in imt if entry is empty, place conflict otherwise.
6506 if (*imt_ref == unimplemented_method) {
6507 *imt_ref = current_method;
6508 } else if (!(*imt_ref)->IsRuntimeMethod()) {
6509 // If we are not a conflict and we have the same signature and name as the imt
6510 // entry, it must be that we overwrote a superclass vtable entry.
6511 // Note that we have checked IsRuntimeMethod, as there may be multiple different
6512 // conflict methods.
6513 MethodNameAndSignatureComparator imt_comparator(
6514 (*imt_ref)->GetInterfaceMethodIfProxy(image_pointer_size_));
6515 if (imt_comparator.HasSameNameAndSignature(
6516 current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
6517 *imt_ref = current_method;
6518 } else {
6519 *imt_ref = imt_conflict_method;
6520 *new_conflict = true;
6521 }
6522 } else {
6523 // Place the default conflict method. Note that there may be an existing conflict
6524 // method in the IMT, but it could be one tailored to the super class, with a
6525 // specific ImtConflictTable.
6526 *imt_ref = imt_conflict_method;
6527 *new_conflict = true;
6528 }
6529 }
6530
FillIMTAndConflictTables(ObjPtr<mirror::Class> klass)6531 void ClassLinker::FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) {
6532 DCHECK(klass->ShouldHaveImt()) << klass->PrettyClass();
6533 DCHECK(!klass->IsTemp()) << klass->PrettyClass();
6534 ArtMethod* imt_data[ImTable::kSize];
6535 Runtime* const runtime = Runtime::Current();
6536 ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
6537 ArtMethod* const conflict_method = runtime->GetImtConflictMethod();
6538 std::fill_n(imt_data, arraysize(imt_data), unimplemented_method);
6539 if (klass->GetIfTable() != nullptr) {
6540 bool new_conflict = false;
6541 FillIMTFromIfTable(klass->GetIfTable(),
6542 unimplemented_method,
6543 conflict_method,
6544 klass,
6545 /*create_conflict_tables=*/true,
6546 /*ignore_copied_methods=*/false,
6547 &new_conflict,
6548 &imt_data[0]);
6549 }
6550 // Compare the IMT with the super class including the conflict methods. If they are equivalent,
6551 // we can just use the same pointer.
6552 ImTable* imt = nullptr;
6553 ImTable* super_imt = klass->FindSuperImt(image_pointer_size_);
6554 if (super_imt != nullptr) {
6555 bool same = true;
6556 for (size_t i = 0; same && i < ImTable::kSize; ++i) {
6557 ArtMethod* method = imt_data[i];
6558 ArtMethod* super_method = super_imt->Get(i, image_pointer_size_);
6559 if (method != super_method) {
6560 bool is_conflict_table = method->IsRuntimeMethod() &&
6561 method != unimplemented_method &&
6562 method != conflict_method;
6563 // Verify conflict contents.
6564 bool super_conflict_table = super_method->IsRuntimeMethod() &&
6565 super_method != unimplemented_method &&
6566 super_method != conflict_method;
6567 if (!is_conflict_table || !super_conflict_table) {
6568 same = false;
6569 } else {
6570 ImtConflictTable* table1 = method->GetImtConflictTable(image_pointer_size_);
6571 ImtConflictTable* table2 = super_method->GetImtConflictTable(image_pointer_size_);
6572 same = same && table1->Equals(table2, image_pointer_size_);
6573 }
6574 }
6575 }
6576 if (same) {
6577 imt = super_imt;
6578 }
6579 }
6580 if (imt == nullptr) {
6581 imt = klass->GetImt(image_pointer_size_);
6582 DCHECK(imt != nullptr);
6583 DCHECK_NE(imt, super_imt);
6584 imt->Populate(imt_data, image_pointer_size_);
6585 } else {
6586 klass->SetImt(imt, image_pointer_size_);
6587 }
6588 }
6589
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc,PointerSize image_pointer_size)6590 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count,
6591 LinearAlloc* linear_alloc,
6592 PointerSize image_pointer_size) {
6593 void* data = linear_alloc->Alloc(Thread::Current(),
6594 ImtConflictTable::ComputeSize(count, image_pointer_size),
6595 LinearAllocKind::kNoGCRoots);
6596 return (data != nullptr) ? new (data) ImtConflictTable(count, image_pointer_size) : nullptr;
6597 }
6598
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc)6599 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count, LinearAlloc* linear_alloc) {
6600 return CreateImtConflictTable(count, linear_alloc, image_pointer_size_);
6601 }
6602
FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ObjPtr<mirror::Class> klass,bool create_conflict_tables,bool ignore_copied_methods,bool * new_conflict,ArtMethod ** imt)6603 void ClassLinker::FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,
6604 ArtMethod* unimplemented_method,
6605 ArtMethod* imt_conflict_method,
6606 ObjPtr<mirror::Class> klass,
6607 bool create_conflict_tables,
6608 bool ignore_copied_methods,
6609 /*out*/bool* new_conflict,
6610 /*out*/ArtMethod** imt) {
6611 uint32_t conflict_counts[ImTable::kSize] = {};
6612 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
6613 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
6614 const size_t num_virtuals = interface->NumVirtualMethods();
6615 const size_t method_array_count = if_table->GetMethodArrayCount(i);
6616 // Virtual methods can be larger than the if table methods if there are default methods.
6617 DCHECK_GE(num_virtuals, method_array_count);
6618 if (kIsDebugBuild) {
6619 if (klass->IsInterface()) {
6620 DCHECK_EQ(method_array_count, 0u);
6621 } else {
6622 DCHECK_EQ(interface->NumDeclaredVirtualMethods(), method_array_count);
6623 }
6624 }
6625 if (method_array_count == 0) {
6626 continue;
6627 }
6628 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
6629 for (size_t j = 0; j < method_array_count; ++j) {
6630 ArtMethod* implementation_method =
6631 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6632 if (ignore_copied_methods && implementation_method->IsCopied()) {
6633 continue;
6634 }
6635 DCHECK(implementation_method != nullptr);
6636 // Miranda methods cannot be used to implement an interface method, but they are safe to put
6637 // in the IMT since their entrypoint is the interface trampoline. If we put any copied methods
6638 // or interface methods in the IMT here they will not create extra conflicts since we compare
6639 // names and signatures in SetIMTRef.
6640 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
6641 const uint32_t imt_index = interface_method->GetImtIndex();
6642
6643 // There is only any conflicts if all of the interface methods for an IMT slot don't have
6644 // the same implementation method, keep track of this to avoid creating a conflict table in
6645 // this case.
6646
6647 // Conflict table size for each IMT slot.
6648 ++conflict_counts[imt_index];
6649
6650 SetIMTRef(unimplemented_method,
6651 imt_conflict_method,
6652 implementation_method,
6653 /*out*/new_conflict,
6654 /*out*/&imt[imt_index]);
6655 }
6656 }
6657
6658 if (create_conflict_tables) {
6659 // Create the conflict tables.
6660 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
6661 for (size_t i = 0; i < ImTable::kSize; ++i) {
6662 size_t conflicts = conflict_counts[i];
6663 if (imt[i] == imt_conflict_method) {
6664 ImtConflictTable* new_table = CreateImtConflictTable(conflicts, linear_alloc);
6665 if (new_table != nullptr) {
6666 ArtMethod* new_conflict_method =
6667 Runtime::Current()->CreateImtConflictMethod(linear_alloc);
6668 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6669 imt[i] = new_conflict_method;
6670 } else {
6671 LOG(ERROR) << "Failed to allocate conflict table";
6672 imt[i] = imt_conflict_method;
6673 }
6674 } else {
6675 DCHECK_NE(imt[i], imt_conflict_method);
6676 }
6677 }
6678
6679 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
6680 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
6681 const size_t method_array_count = if_table->GetMethodArrayCount(i);
6682 // Virtual methods can be larger than the if table methods if there are default methods.
6683 if (method_array_count == 0) {
6684 continue;
6685 }
6686 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
6687 for (size_t j = 0; j < method_array_count; ++j) {
6688 ArtMethod* implementation_method =
6689 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6690 if (ignore_copied_methods && implementation_method->IsCopied()) {
6691 continue;
6692 }
6693 DCHECK(implementation_method != nullptr);
6694 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
6695 const uint32_t imt_index = interface_method->GetImtIndex();
6696 if (!imt[imt_index]->IsRuntimeMethod() ||
6697 imt[imt_index] == unimplemented_method ||
6698 imt[imt_index] == imt_conflict_method) {
6699 continue;
6700 }
6701 ImtConflictTable* table = imt[imt_index]->GetImtConflictTable(image_pointer_size_);
6702 const size_t num_entries = table->NumEntries(image_pointer_size_);
6703 table->SetInterfaceMethod(num_entries, image_pointer_size_, interface_method);
6704 table->SetImplementationMethod(num_entries, image_pointer_size_, implementation_method);
6705 }
6706 }
6707 }
6708 }
6709
6710 namespace {
6711
6712 // Simple helper function that checks that no subtypes of 'val' are contained within the 'classes'
6713 // set.
NotSubinterfaceOfAny(const ScopedArenaHashSet<mirror::Class * > & classes,ObjPtr<mirror::Class> val)6714 static bool NotSubinterfaceOfAny(
6715 const ScopedArenaHashSet<mirror::Class*>& classes,
6716 ObjPtr<mirror::Class> val)
6717 REQUIRES(Roles::uninterruptible_)
6718 REQUIRES_SHARED(Locks::mutator_lock_) {
6719 DCHECK(val != nullptr);
6720 for (ObjPtr<mirror::Class> c : classes) {
6721 if (val->IsAssignableFrom(c)) {
6722 return false;
6723 }
6724 }
6725 return true;
6726 }
6727
6728 // We record new interfaces by the index of the direct interface and the index in the
6729 // direct interface's `IfTable`, or `dex::kDexNoIndex` if it's the direct interface itself.
6730 struct NewInterfaceReference {
6731 uint32_t direct_interface_index;
6732 uint32_t direct_interface_iftable_index;
6733 };
6734
6735 class ProxyInterfacesAccessor {
6736 public:
6737 explicit ProxyInterfacesAccessor(Handle<mirror::ObjectArray<mirror::Class>> interfaces)
REQUIRES_SHARED(Locks::mutator_lock_)6738 REQUIRES_SHARED(Locks::mutator_lock_)
6739 : interfaces_(interfaces) {}
6740
GetLength()6741 size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
6742 return interfaces_->GetLength();
6743 }
6744
GetInterface(size_t index)6745 ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
6746 DCHECK_LT(index, GetLength());
6747 return interfaces_->GetWithoutChecks(index);
6748 }
6749
6750 private:
6751 Handle<mirror::ObjectArray<mirror::Class>> interfaces_;
6752 };
6753
6754 class NonProxyInterfacesAccessor {
6755 public:
NonProxyInterfacesAccessor(ClassLinker * class_linker,Handle<mirror::Class> klass)6756 NonProxyInterfacesAccessor(ClassLinker* class_linker, Handle<mirror::Class> klass)
6757 REQUIRES_SHARED(Locks::mutator_lock_)
6758 : interfaces_(klass->GetInterfaceTypeList()),
6759 class_linker_(class_linker),
6760 klass_(klass) {
6761 DCHECK(!klass->IsProxyClass());
6762 }
6763
GetLength()6764 size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
6765 return (interfaces_ != nullptr) ? interfaces_->Size() : 0u;
6766 }
6767
GetInterface(size_t index)6768 ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
6769 DCHECK_LT(index, GetLength());
6770 dex::TypeIndex type_index = interfaces_->GetTypeItem(index).type_idx_;
6771 return class_linker_->LookupResolvedType(type_index, klass_.Get());
6772 }
6773
6774 private:
6775 const dex::TypeList* interfaces_;
6776 ClassLinker* class_linker_;
6777 Handle<mirror::Class> klass_;
6778 };
6779
6780 // Finds new interfaces to add to the interface table in addition to superclass interfaces.
6781 //
6782 // Interfaces in the interface table must satisfy the following constraint:
6783 // all I, J: Interface | I <: J implies J precedes I
6784 // (note A <: B means that A is a subtype of B). We order this backwards so that we do not need
6785 // to reorder superclass interfaces when new interfaces are added in subclass's interface tables.
6786 //
6787 // This function returns a list of references for all interfaces in the transitive
6788 // closure of the direct interfaces that are not in the superclass interfaces.
6789 // The entries in the list are ordered to satisfy the interface table ordering
6790 // constraint and therefore the interface table formed by appending them to the
6791 // superclass interface table shall also satisfy that constraint.
6792 template <typename InterfaceAccessor>
6793 ALWAYS_INLINE
FindNewIfTableInterfaces(ObjPtr<mirror::IfTable> super_iftable,size_t super_ifcount,ScopedArenaAllocator * allocator,InterfaceAccessor && interfaces,ArrayRef<NewInterfaceReference> initial_storage,ScopedArenaVector<NewInterfaceReference> * supplemental_storage)6794 static ArrayRef<const NewInterfaceReference> FindNewIfTableInterfaces(
6795 ObjPtr<mirror::IfTable> super_iftable,
6796 size_t super_ifcount,
6797 ScopedArenaAllocator* allocator,
6798 InterfaceAccessor&& interfaces,
6799 ArrayRef<NewInterfaceReference> initial_storage,
6800 /*out*/ScopedArenaVector<NewInterfaceReference>* supplemental_storage)
6801 REQUIRES_SHARED(Locks::mutator_lock_) {
6802 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
6803
6804 // This is the set of all classes already in the iftable. Used to make checking
6805 // if a class has already been added quicker.
6806 constexpr size_t kBufferSize = 32; // 256 bytes on 64-bit architectures.
6807 mirror::Class* buffer[kBufferSize];
6808 ScopedArenaHashSet<mirror::Class*> classes_in_iftable(buffer, kBufferSize, allocator->Adapter());
6809 // The first super_ifcount elements are from the superclass. We note that they are already added.
6810 for (size_t i = 0; i < super_ifcount; i++) {
6811 ObjPtr<mirror::Class> iface = super_iftable->GetInterface(i);
6812 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, iface)) << "Bad ordering.";
6813 classes_in_iftable.Put(iface.Ptr());
6814 }
6815
6816 ArrayRef<NewInterfaceReference> current_storage = initial_storage;
6817 DCHECK_NE(current_storage.size(), 0u);
6818 size_t num_new_interfaces = 0u;
6819 auto insert_reference = [&](uint32_t direct_interface_index,
6820 uint32_t direct_interface_iface_index) {
6821 if (UNLIKELY(num_new_interfaces == current_storage.size())) {
6822 bool copy = current_storage.data() != supplemental_storage->data();
6823 supplemental_storage->resize(2u * num_new_interfaces);
6824 if (copy) {
6825 std::copy_n(current_storage.data(), num_new_interfaces, supplemental_storage->data());
6826 }
6827 current_storage = ArrayRef<NewInterfaceReference>(*supplemental_storage);
6828 }
6829 current_storage[num_new_interfaces] = {direct_interface_index, direct_interface_iface_index};
6830 ++num_new_interfaces;
6831 };
6832
6833 for (size_t i = 0, num_interfaces = interfaces.GetLength(); i != num_interfaces; ++i) {
6834 ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
6835
6836 // Let us call the first filled_ifcount elements of iftable the current-iface-list.
6837 // At this point in the loop current-iface-list has the invariant that:
6838 // for every pair of interfaces I,J within it:
6839 // if index_of(I) < index_of(J) then I is not a subtype of J
6840
6841 // If we have already seen this element then all of its super-interfaces must already be in the
6842 // current-iface-list so we can skip adding it.
6843 if (classes_in_iftable.find(interface.Ptr()) == classes_in_iftable.end()) {
6844 // We haven't seen this interface so add all of its super-interfaces onto the
6845 // current-iface-list, skipping those already on it.
6846 int32_t ifcount = interface->GetIfTableCount();
6847 for (int32_t j = 0; j < ifcount; j++) {
6848 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
6849 if (classes_in_iftable.find(super_interface.Ptr()) == classes_in_iftable.end()) {
6850 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, super_interface)) << "Bad ordering.";
6851 classes_in_iftable.Put(super_interface.Ptr());
6852 insert_reference(i, j);
6853 }
6854 }
6855 // Add this interface reference after all of its super-interfaces.
6856 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, interface)) << "Bad ordering";
6857 classes_in_iftable.Put(interface.Ptr());
6858 insert_reference(i, dex::kDexNoIndex);
6859 } else if (kIsDebugBuild) {
6860 // Check all super-interfaces are already in the list.
6861 int32_t ifcount = interface->GetIfTableCount();
6862 for (int32_t j = 0; j < ifcount; j++) {
6863 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
6864 DCHECK(classes_in_iftable.find(super_interface.Ptr()) != classes_in_iftable.end())
6865 << "Iftable does not contain " << mirror::Class::PrettyClass(super_interface)
6866 << ", a superinterface of " << interface->PrettyClass();
6867 }
6868 }
6869 }
6870 return ArrayRef<const NewInterfaceReference>(current_storage.data(), num_new_interfaces);
6871 }
6872
6873 template <typename InterfaceAccessor>
SetupInterfaceLookupTable(Thread * self,Handle<mirror::Class> klass,ScopedArenaAllocator * allocator,InterfaceAccessor && interfaces)6874 static ObjPtr<mirror::IfTable> SetupInterfaceLookupTable(
6875 Thread* self,
6876 Handle<mirror::Class> klass,
6877 ScopedArenaAllocator* allocator,
6878 InterfaceAccessor&& interfaces)
6879 REQUIRES_SHARED(Locks::mutator_lock_) {
6880 DCHECK(klass->HasSuperClass());
6881 ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
6882 DCHECK(super_iftable != nullptr);
6883 const size_t num_interfaces = interfaces.GetLength();
6884
6885 // If there are no new interfaces, return the interface table from superclass.
6886 // If any implementation methods are overridden, we shall copy the table and
6887 // the method arrays that contain any differences (copy-on-write).
6888 if (num_interfaces == 0) {
6889 return super_iftable;
6890 }
6891
6892 // Check that every class being implemented is an interface.
6893 for (size_t i = 0; i != num_interfaces; ++i) {
6894 ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
6895 DCHECK(interface != nullptr);
6896 if (UNLIKELY(!interface->IsInterface())) {
6897 ThrowIncompatibleClassChangeError(klass.Get(),
6898 "Class %s implements non-interface class %s",
6899 klass->PrettyDescriptor().c_str(),
6900 interface->PrettyDescriptor().c_str());
6901 return nullptr;
6902 }
6903 }
6904
6905 static constexpr size_t kMaxStackReferences = 16;
6906 NewInterfaceReference initial_storage[kMaxStackReferences];
6907 ScopedArenaVector<NewInterfaceReference> supplemental_storage(allocator->Adapter());
6908 const size_t super_ifcount = super_iftable->Count();
6909 ArrayRef<const NewInterfaceReference> new_interface_references =
6910 FindNewIfTableInterfaces(
6911 super_iftable,
6912 super_ifcount,
6913 allocator,
6914 interfaces,
6915 ArrayRef<NewInterfaceReference>(initial_storage),
6916 &supplemental_storage);
6917
6918 // If all declared interfaces were already present in superclass interface table,
6919 // return the interface table from superclass. See above.
6920 if (UNLIKELY(new_interface_references.empty())) {
6921 return super_iftable;
6922 }
6923
6924 // Create the interface table.
6925 size_t ifcount = super_ifcount + new_interface_references.size();
6926 ObjPtr<mirror::IfTable> iftable = AllocIfTable(self, ifcount, super_iftable->GetClass());
6927 if (UNLIKELY(iftable == nullptr)) {
6928 self->AssertPendingOOMException();
6929 return nullptr;
6930 }
6931 // Fill in table with superclass's iftable.
6932 if (super_ifcount != 0) {
6933 // Reload `super_iftable` as it may have been clobbered by the allocation.
6934 super_iftable = klass->GetSuperClass()->GetIfTable();
6935 for (size_t i = 0; i != super_ifcount; i++) {
6936 ObjPtr<mirror::Class> super_interface = super_iftable->GetInterface(i);
6937 DCHECK(super_interface != nullptr);
6938 iftable->SetInterface(i, super_interface);
6939 ObjPtr<mirror::PointerArray> method_array = super_iftable->GetMethodArrayOrNull(i);
6940 if (method_array != nullptr) {
6941 iftable->SetMethodArray(i, method_array);
6942 }
6943 }
6944 }
6945 // Fill in the table with additional interfaces.
6946 size_t current_index = super_ifcount;
6947 for (NewInterfaceReference ref : new_interface_references) {
6948 ObjPtr<mirror::Class> direct_interface = interfaces.GetInterface(ref.direct_interface_index);
6949 ObjPtr<mirror::Class> new_interface = (ref.direct_interface_iftable_index != dex::kDexNoIndex)
6950 ? direct_interface->GetIfTable()->GetInterface(ref.direct_interface_iftable_index)
6951 : direct_interface;
6952 iftable->SetInterface(current_index, new_interface);
6953 ++current_index;
6954 }
6955 DCHECK_EQ(current_index, ifcount);
6956
6957 if (kIsDebugBuild) {
6958 // Check that the iftable is ordered correctly.
6959 for (size_t i = 0; i < ifcount; i++) {
6960 ObjPtr<mirror::Class> if_a = iftable->GetInterface(i);
6961 for (size_t j = i + 1; j < ifcount; j++) {
6962 ObjPtr<mirror::Class> if_b = iftable->GetInterface(j);
6963 // !(if_a <: if_b)
6964 CHECK(!if_b->IsAssignableFrom(if_a))
6965 << "Bad interface order: " << mirror::Class::PrettyClass(if_a) << " (index " << i
6966 << ") extends "
6967 << if_b->PrettyClass() << " (index " << j << ") and so should be after it in the "
6968 << "interface list.";
6969 }
6970 }
6971 }
6972
6973 return iftable;
6974 }
6975
6976 // Check that all vtable entries are present in this class's virtuals or are the same as a
6977 // superclasses vtable entry.
CheckClassOwnsVTableEntries(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)6978 void CheckClassOwnsVTableEntries(Thread* self,
6979 Handle<mirror::Class> klass,
6980 PointerSize pointer_size)
6981 REQUIRES_SHARED(Locks::mutator_lock_) {
6982 StackHandleScope<2> hs(self);
6983 Handle<mirror::PointerArray> check_vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
6984 ObjPtr<mirror::Class> super_temp = (klass->HasSuperClass()) ? klass->GetSuperClass() : nullptr;
6985 Handle<mirror::Class> superclass(hs.NewHandle(super_temp));
6986 int32_t super_vtable_length = (superclass != nullptr) ? superclass->GetVTableLength() : 0;
6987 for (int32_t i = 0; i < check_vtable->GetLength(); ++i) {
6988 ArtMethod* m = check_vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
6989 CHECK(m != nullptr);
6990
6991 if (m->GetMethodIndexDuringLinking() != i) {
6992 LOG(WARNING) << m->PrettyMethod()
6993 << " has an unexpected method index for its spot in the vtable for class"
6994 << klass->PrettyClass();
6995 }
6996 ArraySlice<ArtMethod> virtuals = klass->GetVirtualMethodsSliceUnchecked(pointer_size);
6997 auto is_same_method = [m] (const ArtMethod& meth) {
6998 return &meth == m;
6999 };
7000 if (!((super_vtable_length > i && superclass->GetVTableEntry(i, pointer_size) == m) ||
7001 std::find_if(virtuals.begin(), virtuals.end(), is_same_method) != virtuals.end())) {
7002 LOG(WARNING) << m->PrettyMethod() << " does not seem to be owned by current class "
7003 << klass->PrettyClass() << " or any of its superclasses!";
7004 }
7005 }
7006 }
7007
7008 // Check to make sure the vtable does not have duplicates. Duplicates could cause problems when a
7009 // method is overridden in a subclass.
7010 template <PointerSize kPointerSize>
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass)7011 void CheckVTableHasNoDuplicates(Thread* self, Handle<mirror::Class> klass)
7012 REQUIRES_SHARED(Locks::mutator_lock_) {
7013 StackHandleScope<1> hs(self);
7014 Handle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7015 int32_t num_entries = vtable->GetLength();
7016
7017 // Observations:
7018 // * The older implementation was O(n^2) and got too expensive for apps with larger classes.
7019 // * Many classes do not override Object functions (e.g., equals/hashCode/toString). Thus,
7020 // for many classes outside of libcore a cross-dexfile check has to be run anyways.
7021 // * In the cross-dexfile case, with the O(n^2), in the best case O(n) cross checks would have
7022 // to be done. It is thus OK in a single-pass algorithm to read all data, anyways.
7023 // * The single-pass algorithm will trade memory for speed, but that is OK.
7024
7025 CHECK_GT(num_entries, 0);
7026
7027 auto log_fn = [&vtable, &klass](int32_t i, int32_t j) REQUIRES_SHARED(Locks::mutator_lock_) {
7028 ArtMethod* m1 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(i);
7029 ArtMethod* m2 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
7030 LOG(WARNING) << "vtable entries " << i << " and " << j << " are identical for "
7031 << klass->PrettyClass() << " in method " << m1->PrettyMethod()
7032 << " (0x" << std::hex << reinterpret_cast<uintptr_t>(m2) << ") and "
7033 << m2->PrettyMethod() << " (0x" << std::hex
7034 << reinterpret_cast<uintptr_t>(m2) << ")";
7035 };
7036 struct BaseHashType {
7037 static size_t HashCombine(size_t seed, size_t val) {
7038 return seed ^ (val + 0x9e3779b9 + (seed << 6) + (seed >> 2));
7039 }
7040 };
7041
7042 // Check assuming all entries come from the same dex file.
7043 {
7044 // Find the first interesting method and its dex file.
7045 int32_t start = 0;
7046 for (; start < num_entries; ++start) {
7047 ArtMethod* vtable_entry = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start);
7048 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7049 // maybe).
7050 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7051 vtable_entry->GetAccessFlags())) {
7052 continue;
7053 }
7054 break;
7055 }
7056 if (start == num_entries) {
7057 return;
7058 }
7059 const DexFile* dex_file =
7060 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start)->
7061 GetInterfaceMethodIfProxy(kPointerSize)->GetDexFile();
7062
7063 // Helper function to avoid logging if we have to run the cross-file checks.
7064 auto check_fn = [&](bool log_warn) REQUIRES_SHARED(Locks::mutator_lock_) {
7065 // Use a map to store seen entries, as the storage space is too large for a bitvector.
7066 using PairType = std::pair<uint32_t, uint16_t>;
7067 struct PairHash : BaseHashType {
7068 size_t operator()(const PairType& key) const {
7069 return BaseHashType::HashCombine(BaseHashType::HashCombine(0, key.first), key.second);
7070 }
7071 };
7072 HashMap<PairType, int32_t, DefaultMapEmptyFn<PairType, int32_t>, PairHash> seen;
7073 seen.reserve(2 * num_entries);
7074 bool need_slow_path = false;
7075 bool found_dup = false;
7076 for (int i = start; i < num_entries; ++i) {
7077 // Can use Unchecked here as the start loop already ensured that the arrays are correct
7078 // wrt/ kPointerSize.
7079 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7080 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7081 vtable_entry->GetAccessFlags())) {
7082 continue;
7083 }
7084 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7085 if (dex_file != m->GetDexFile()) {
7086 need_slow_path = true;
7087 break;
7088 }
7089 const dex::MethodId* m_mid = &dex_file->GetMethodId(m->GetDexMethodIndex());
7090 PairType pair = std::make_pair(m_mid->name_idx_.index_, m_mid->proto_idx_.index_);
7091 auto it = seen.find(pair);
7092 if (it != seen.end()) {
7093 found_dup = true;
7094 if (log_warn) {
7095 log_fn(it->second, i);
7096 }
7097 } else {
7098 seen.insert(std::make_pair(pair, i));
7099 }
7100 }
7101 return std::make_pair(need_slow_path, found_dup);
7102 };
7103 std::pair<bool, bool> result = check_fn(/* log_warn= */ false);
7104 if (!result.first) {
7105 if (result.second) {
7106 check_fn(/* log_warn= */ true);
7107 }
7108 return;
7109 }
7110 }
7111
7112 // Need to check across dex files.
7113 struct Entry {
7114 size_t cached_hash = 0;
7115 uint32_t name_len = 0;
7116 const char* name = nullptr;
7117 Signature signature = Signature::NoSignature();
7118
7119 Entry() = default;
7120 Entry(const Entry& other) = default;
7121 Entry& operator=(const Entry& other) = default;
7122
7123 Entry(const DexFile* dex_file, const dex::MethodId& mid)
7124 : name_len(0), // Explicit to enforce ordering with -Werror,-Wreorder-ctor.
7125 // This call writes `name_len` and it is therefore necessary that the
7126 // initializer for `name_len` comes before it, otherwise the value
7127 // from the call would be overwritten by that initializer.
7128 name(dex_file->StringDataAndUtf16LengthByIdx(mid.name_idx_, &name_len)),
7129 signature(dex_file->GetMethodSignature(mid)) {
7130 // The `name_len` has been initialized to the UTF16 length. Calculate length in bytes.
7131 if (name[name_len] != 0) {
7132 name_len += strlen(name + name_len);
7133 }
7134 }
7135
7136 bool operator==(const Entry& other) const {
7137 return name_len == other.name_len &&
7138 memcmp(name, other.name, name_len) == 0 &&
7139 signature == other.signature;
7140 }
7141 };
7142 struct EntryHash {
7143 size_t operator()(const Entry& key) const {
7144 return key.cached_hash;
7145 }
7146 };
7147 HashMap<Entry, int32_t, DefaultMapEmptyFn<Entry, int32_t>, EntryHash> map;
7148 for (int32_t i = 0; i < num_entries; ++i) {
7149 // Can use Unchecked here as the first loop already ensured that the arrays are correct
7150 // wrt/ kPointerSize.
7151 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7152 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7153 // maybe).
7154 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7155 vtable_entry->GetAccessFlags())) {
7156 continue;
7157 }
7158 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7159 const DexFile* dex_file = m->GetDexFile();
7160 const dex::MethodId& mid = dex_file->GetMethodId(m->GetDexMethodIndex());
7161
7162 Entry e(dex_file, mid);
7163
7164 size_t string_hash = std::hash<std::string_view>()(std::string_view(e.name, e.name_len));
7165 size_t sig_hash = std::hash<std::string>()(e.signature.ToString());
7166 e.cached_hash = BaseHashType::HashCombine(BaseHashType::HashCombine(0u, string_hash),
7167 sig_hash);
7168
7169 auto it = map.find(e);
7170 if (it != map.end()) {
7171 log_fn(it->second, i);
7172 } else {
7173 map.insert(std::make_pair(e, i));
7174 }
7175 }
7176 }
7177
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7178 void CheckVTableHasNoDuplicates(Thread* self,
7179 Handle<mirror::Class> klass,
7180 PointerSize pointer_size)
7181 REQUIRES_SHARED(Locks::mutator_lock_) {
7182 switch (pointer_size) {
7183 case PointerSize::k64:
7184 CheckVTableHasNoDuplicates<PointerSize::k64>(self, klass);
7185 break;
7186 case PointerSize::k32:
7187 CheckVTableHasNoDuplicates<PointerSize::k32>(self, klass);
7188 break;
7189 }
7190 }
7191
CheckVTable(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7192 static void CheckVTable(Thread* self, Handle<mirror::Class> klass, PointerSize pointer_size)
7193 REQUIRES_SHARED(Locks::mutator_lock_) {
7194 CheckClassOwnsVTableEntries(self, klass, pointer_size);
7195 CheckVTableHasNoDuplicates(self, klass, pointer_size);
7196 }
7197
7198 } // namespace
7199
7200 template <PointerSize kPointerSize>
7201 class ClassLinker::LinkMethodsHelper {
7202 public:
LinkMethodsHelper(ClassLinker * class_linker,Handle<mirror::Class> klass,Thread * self,Runtime * runtime)7203 LinkMethodsHelper(ClassLinker* class_linker,
7204 Handle<mirror::Class> klass,
7205 Thread* self,
7206 Runtime* runtime)
7207 : class_linker_(class_linker),
7208 klass_(klass),
7209 self_(self),
7210 runtime_(runtime),
7211 stack_(runtime->GetArenaPool()),
7212 allocator_(&stack_),
7213 copied_method_records_(copied_method_records_initial_buffer_,
7214 kCopiedMethodRecordInitialBufferSize,
7215 allocator_.Adapter()),
7216 num_new_copied_methods_(0u) {
7217 }
7218
7219 // Links the virtual and interface methods for the given class.
7220 //
7221 // Arguments:
7222 // * self - The current thread.
7223 // * klass - class, whose vtable will be filled in.
7224 // * interfaces - implemented interfaces for a proxy class, otherwise null.
7225 // * out_new_conflict - whether there is a new conflict compared to the superclass.
7226 // * out_imt - interface method table to fill.
7227 bool LinkMethods(
7228 Thread* self,
7229 Handle<mirror::Class> klass,
7230 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
7231 bool* out_new_conflict,
7232 ArtMethod** out_imt)
7233 REQUIRES_SHARED(Locks::mutator_lock_);
7234
7235 private:
7236 // Allocate a pointer array.
7237 static ObjPtr<mirror::PointerArray> AllocPointerArray(Thread* self, size_t length)
7238 REQUIRES_SHARED(Locks::mutator_lock_);
7239
7240 // Allocate method arrays for interfaces.
7241 bool AllocateIfTableMethodArrays(Thread* self,
7242 Handle<mirror::Class> klass,
7243 Handle<mirror::IfTable> iftable)
7244 REQUIRES_SHARED(Locks::mutator_lock_);
7245
7246 // Assign vtable indexes to declared virtual methods for a non-interface class other
7247 // than `java.lang.Object`. Returns the number of vtable entries on success, 0 on failure.
7248 // This function also assigns vtable indexes for interface methods in new interfaces
7249 // and records data for copied methods which shall be referenced by the vtable.
7250 size_t AssignVTableIndexes(ObjPtr<mirror::Class> klass,
7251 ObjPtr<mirror::Class> super_class,
7252 bool is_super_abstract,
7253 size_t num_virtual_methods,
7254 ObjPtr<mirror::IfTable> iftable)
7255 REQUIRES_SHARED(Locks::mutator_lock_);
7256
7257 bool FindCopiedMethodsForInterface(ObjPtr<mirror::Class> klass,
7258 size_t num_virtual_methods,
7259 ObjPtr<mirror::IfTable> iftable)
7260 REQUIRES_SHARED(Locks::mutator_lock_);
7261
7262 bool LinkJavaLangObjectMethods(Thread* self, Handle<mirror::Class> klass)
7263 REQUIRES_SHARED(Locks::mutator_lock_) COLD_ATTR;
7264
7265 void ReallocMethods(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
7266 bool FinalizeIfTable(Handle<mirror::Class> klass,
7267 MutableHandle<mirror::IfTable> iftable,
7268 Handle<mirror::PointerArray> vtable,
7269 bool is_klass_abstract,
7270 bool is_super_abstract,
7271 bool* out_new_conflict,
7272 ArtMethod** out_imt)
7273 REQUIRES_SHARED(Locks::mutator_lock_);
7274
ClobberOldMethods(LengthPrefixedArray<ArtMethod> * old_methods,LengthPrefixedArray<ArtMethod> * methods)7275 void ClobberOldMethods(LengthPrefixedArray<ArtMethod>* old_methods,
7276 LengthPrefixedArray<ArtMethod>* methods) {
7277 if (kIsDebugBuild && old_methods != nullptr) {
7278 CHECK(methods != nullptr);
7279 // Put some random garbage in old methods to help find stale pointers.
7280 if (methods != old_methods) {
7281 // Need to make sure the GC is not running since it could be scanning the methods we are
7282 // about to overwrite.
7283 ScopedThreadStateChange tsc(self_, ThreadState::kSuspended);
7284 gc::ScopedGCCriticalSection gcs(self_,
7285 gc::kGcCauseClassLinker,
7286 gc::kCollectorTypeClassLinker);
7287 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_methods->size(),
7288 kMethodSize,
7289 kMethodAlignment);
7290 memset(old_methods, 0xFEu, old_size);
7291 // Set size to 0 to avoid visiting declaring classes.
7292 if (gUseUserfaultfd) {
7293 old_methods->SetSize(0);
7294 }
7295 }
7296 }
7297 }
7298
7299 NO_INLINE
LogNewVirtuals(LengthPrefixedArray<ArtMethod> * methods) const7300 void LogNewVirtuals(LengthPrefixedArray<ArtMethod>* methods) const
7301 REQUIRES_SHARED(Locks::mutator_lock_) {
7302 ObjPtr<mirror::Class> klass = klass_.Get();
7303 size_t num_new_copied_methods = num_new_copied_methods_;
7304 size_t old_method_count = methods->size() - num_new_copied_methods;
7305 size_t super_vtable_length = klass->GetSuperClass()->GetVTableLength();
7306 size_t num_miranda_methods = 0u;
7307 size_t num_overriding_default_methods = 0u;
7308 size_t num_default_methods = 0u;
7309 size_t num_overriding_default_conflict_methods = 0u;
7310 size_t num_default_conflict_methods = 0u;
7311 for (size_t i = 0; i != num_new_copied_methods; ++i) {
7312 ArtMethod& m = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
7313 if (m.IsDefault()) {
7314 if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7315 ++num_overriding_default_methods;
7316 } else {
7317 ++num_default_methods;
7318 }
7319 } else if (m.IsDefaultConflicting()) {
7320 if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7321 ++num_overriding_default_conflict_methods;
7322 } else {
7323 ++num_default_conflict_methods;
7324 }
7325 } else {
7326 DCHECK(m.IsMiranda());
7327 ++num_miranda_methods;
7328 }
7329 }
7330 VLOG(class_linker) << klass->PrettyClass() << ": miranda_methods=" << num_miranda_methods
7331 << " default_methods=" << num_default_methods
7332 << " overriding_default_methods=" << num_overriding_default_methods
7333 << " default_conflict_methods=" << num_default_conflict_methods
7334 << " overriding_default_conflict_methods="
7335 << num_overriding_default_conflict_methods;
7336 }
7337
7338 class MethodIndexEmptyFn {
7339 public:
MakeEmpty(uint32_t & item) const7340 void MakeEmpty(uint32_t& item) const {
7341 item = dex::kDexNoIndex;
7342 }
IsEmpty(const uint32_t & item) const7343 bool IsEmpty(const uint32_t& item) const {
7344 return item == dex::kDexNoIndex;
7345 }
7346 };
7347
7348 class VTableIndexCheckerDebug {
7349 protected:
VTableIndexCheckerDebug(size_t vtable_length)7350 explicit VTableIndexCheckerDebug(size_t vtable_length)
7351 : vtable_length_(vtable_length) {}
7352
CheckIndex(uint32_t index) const7353 void CheckIndex(uint32_t index) const {
7354 CHECK_LT(index, vtable_length_);
7355 }
7356
7357 private:
7358 uint32_t vtable_length_;
7359 };
7360
7361 class VTableIndexCheckerRelease {
7362 protected:
VTableIndexCheckerRelease(size_t vtable_length ATTRIBUTE_UNUSED)7363 explicit VTableIndexCheckerRelease(size_t vtable_length ATTRIBUTE_UNUSED) {}
CheckIndex(uint32_t index ATTRIBUTE_UNUSED) const7364 void CheckIndex(uint32_t index ATTRIBUTE_UNUSED) const {}
7365 };
7366
7367 using VTableIndexChecker =
7368 std::conditional_t<kIsDebugBuild, VTableIndexCheckerDebug, VTableIndexCheckerRelease>;
7369
7370 class VTableAccessor : private VTableIndexChecker {
7371 public:
VTableAccessor(uint8_t * raw_vtable,size_t vtable_length)7372 VTableAccessor(uint8_t* raw_vtable, size_t vtable_length)
7373 REQUIRES_SHARED(Locks::mutator_lock_)
7374 : VTableIndexChecker(vtable_length),
7375 raw_vtable_(raw_vtable) {}
7376
GetVTableEntry(uint32_t index) const7377 ArtMethod* GetVTableEntry(uint32_t index) const REQUIRES_SHARED(Locks::mutator_lock_) {
7378 this->CheckIndex(index);
7379 uint8_t* entry = raw_vtable_ + static_cast<size_t>(kPointerSize) * index;
7380 if (kPointerSize == PointerSize::k64) {
7381 return reinterpret_cast64<ArtMethod*>(*reinterpret_cast<uint64_t*>(entry));
7382 } else {
7383 return reinterpret_cast32<ArtMethod*>(*reinterpret_cast<uint32_t*>(entry));
7384 }
7385 }
7386
7387 private:
7388 uint8_t* raw_vtable_;
7389 };
7390
7391 class VTableSignatureHash {
7392 public:
7393 explicit VTableSignatureHash(VTableAccessor accessor)
REQUIRES_SHARED(Locks::mutator_lock_)7394 REQUIRES_SHARED(Locks::mutator_lock_)
7395 : accessor_(accessor) {}
7396
7397 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const7398 size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7399 return ComputeMethodHash(method);
7400 }
7401
7402 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t index) const7403 size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7404 return ComputeMethodHash(accessor_.GetVTableEntry(index));
7405 }
7406
7407 private:
7408 VTableAccessor accessor_;
7409 };
7410
7411 class VTableSignatureEqual {
7412 public:
7413 explicit VTableSignatureEqual(VTableAccessor accessor)
REQUIRES_SHARED(Locks::mutator_lock_)7414 REQUIRES_SHARED(Locks::mutator_lock_)
7415 : accessor_(accessor) {}
7416
7417 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,ArtMethod * rhs) const7418 bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7419 return MethodSignatureEquals(accessor_.GetVTableEntry(lhs_index), rhs);
7420 }
7421
7422 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,uint32_t rhs_index) const7423 bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7424 return (*this)(lhs_index, accessor_.GetVTableEntry(rhs_index));
7425 }
7426
7427 private:
7428 VTableAccessor accessor_;
7429 };
7430
7431 using VTableSignatureSet =
7432 ScopedArenaHashSet<uint32_t, MethodIndexEmptyFn, VTableSignatureHash, VTableSignatureEqual>;
7433
7434 class DeclaredVirtualSignatureHash {
7435 public:
7436 explicit DeclaredVirtualSignatureHash(ObjPtr<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_)7437 REQUIRES_SHARED(Locks::mutator_lock_)
7438 : klass_(klass) {}
7439
7440 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const7441 size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7442 return ComputeMethodHash(method);
7443 }
7444
7445 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t index) const7446 size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7447 DCHECK_LT(index, klass_->NumDeclaredVirtualMethods());
7448 ArtMethod* method = klass_->GetVirtualMethodDuringLinking(index, kPointerSize);
7449 return ComputeMethodHash(method->GetInterfaceMethodIfProxy(kPointerSize));
7450 }
7451
7452 private:
7453 ObjPtr<mirror::Class> klass_;
7454 };
7455
7456 class DeclaredVirtualSignatureEqual {
7457 public:
7458 explicit DeclaredVirtualSignatureEqual(ObjPtr<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_)7459 REQUIRES_SHARED(Locks::mutator_lock_)
7460 : klass_(klass) {}
7461
7462 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,ArtMethod * rhs) const7463 bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7464 DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7465 ArtMethod* lhs = klass_->GetVirtualMethodDuringLinking(lhs_index, kPointerSize);
7466 return MethodSignatureEquals(lhs->GetInterfaceMethodIfProxy(kPointerSize), rhs);
7467 }
7468
7469 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,uint32_t rhs_index) const7470 bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7471 DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7472 DCHECK_LT(rhs_index, klass_->NumDeclaredVirtualMethods());
7473 return lhs_index == rhs_index;
7474 }
7475
7476 private:
7477 ObjPtr<mirror::Class> klass_;
7478 };
7479
7480 using DeclaredVirtualSignatureSet = ScopedArenaHashSet<uint32_t,
7481 MethodIndexEmptyFn,
7482 DeclaredVirtualSignatureHash,
7483 DeclaredVirtualSignatureEqual>;
7484
7485 // Helper class to keep records for determining the correct copied method to create.
7486 class CopiedMethodRecord {
7487 public:
7488 enum class State : uint32_t {
7489 // Note: The `*Single` values are used when we know that there is only one interface
7490 // method with the given signature that's not masked; that method is the main method.
7491 // We use this knowledge for faster masking check, otherwise we need to search for
7492 // a masking method through methods of all interfaces that could potentially mask it.
7493 kAbstractSingle,
7494 kDefaultSingle,
7495 kAbstract,
7496 kDefault,
7497 kDefaultConflict,
7498 kUseSuperMethod,
7499 };
7500
CopiedMethodRecord()7501 CopiedMethodRecord()
7502 : main_method_(nullptr),
7503 method_index_(0u),
7504 state_(State::kAbstractSingle) {}
7505
CopiedMethodRecord(ArtMethod * main_method,size_t vtable_index)7506 CopiedMethodRecord(ArtMethod* main_method, size_t vtable_index)
7507 : main_method_(main_method),
7508 method_index_(vtable_index),
7509 state_(State::kAbstractSingle) {}
7510
7511 // Set main method. The new main method must be more specific implementation.
SetMainMethod(ArtMethod * main_method)7512 void SetMainMethod(ArtMethod* main_method) {
7513 DCHECK(main_method_ != nullptr);
7514 main_method_ = main_method;
7515 }
7516
7517 // The main method is the first encountered default method if any,
7518 // otherwise the first encountered abstract method.
GetMainMethod() const7519 ArtMethod* GetMainMethod() const {
7520 return main_method_;
7521 }
7522
SetMethodIndex(size_t method_index)7523 void SetMethodIndex(size_t method_index) {
7524 DCHECK_NE(method_index, dex::kDexNoIndex);
7525 method_index_ = method_index;
7526 }
7527
GetMethodIndex() const7528 size_t GetMethodIndex() const {
7529 DCHECK_NE(method_index_, dex::kDexNoIndex);
7530 return method_index_;
7531 }
7532
SetState(State state)7533 void SetState(State state) {
7534 state_ = state;
7535 }
7536
GetState() const7537 State GetState() const {
7538 return state_;
7539 }
7540
7541 ALWAYS_INLINE
UpdateStateForInterface(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,ObjPtr<mirror::IfTable> iftable,size_t ifcount,size_t index)7542 void UpdateStateForInterface(ObjPtr<mirror::Class> iface,
7543 ArtMethod* interface_method,
7544 ObjPtr<mirror::IfTable> iftable,
7545 size_t ifcount,
7546 size_t index)
7547 REQUIRES_SHARED(Locks::mutator_lock_) {
7548 DCHECK_EQ(ifcount, iftable->Count());
7549 DCHECK_LT(index, ifcount);
7550 DCHECK(iface == interface_method->GetDeclaringClass());
7551 DCHECK(iface == iftable->GetInterface(index));
7552 DCHECK(interface_method->IsDefault());
7553 if (GetState() != State::kDefaultConflict) {
7554 DCHECK(GetState() == State::kDefault);
7555 // We do not record all overriding methods, so we need to walk over all
7556 // interfaces that could mask the `interface_method`.
7557 if (ContainsOverridingMethodOf(iftable, index + 1, ifcount, iface, interface_method)) {
7558 return; // Found an overriding method that masks `interface_method`.
7559 }
7560 // We have a new default method that's not masked by any other method.
7561 SetState(State::kDefaultConflict);
7562 }
7563 }
7564
7565 ALWAYS_INLINE
UpdateState(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,size_t vtable_index,ObjPtr<mirror::IfTable> iftable,size_t ifcount,size_t index)7566 void UpdateState(ObjPtr<mirror::Class> iface,
7567 ArtMethod* interface_method,
7568 size_t vtable_index,
7569 ObjPtr<mirror::IfTable> iftable,
7570 size_t ifcount,
7571 size_t index)
7572 REQUIRES_SHARED(Locks::mutator_lock_) {
7573 DCHECK_EQ(ifcount, iftable->Count());
7574 DCHECK_LT(index, ifcount);
7575 if (kIsDebugBuild) {
7576 if (interface_method->IsCopied()) {
7577 // Called from `FinalizeState()` for a default method from superclass.
7578 // The `index` points to the last interface inherited from the superclass
7579 // as we need to search only the new interfaces for masking methods.
7580 DCHECK(interface_method->IsDefault());
7581 } else {
7582 DCHECK(iface == interface_method->GetDeclaringClass());
7583 DCHECK(iface == iftable->GetInterface(index));
7584 }
7585 }
7586 DCHECK_EQ(vtable_index, method_index_);
7587 auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
7588 return ContainsImplementingMethod(iftable, index + 1, ifcount, iface, vtable_index);
7589 };
7590 UpdateStateImpl(iface, interface_method, slow_is_masked);
7591 }
7592
7593 ALWAYS_INLINE
FinalizeState(ArtMethod * super_method,size_t vtable_index,ObjPtr<mirror::IfTable> iftable,size_t ifcount,ObjPtr<mirror::IfTable> super_iftable,size_t super_ifcount)7594 void FinalizeState(ArtMethod* super_method,
7595 size_t vtable_index,
7596 ObjPtr<mirror::IfTable> iftable,
7597 size_t ifcount,
7598 ObjPtr<mirror::IfTable> super_iftable,
7599 size_t super_ifcount)
7600 REQUIRES_SHARED(Locks::mutator_lock_) {
7601 DCHECK(super_method->IsCopied());
7602 DCHECK_EQ(vtable_index, method_index_);
7603 DCHECK_EQ(vtable_index, super_method->GetMethodIndex());
7604 DCHECK_NE(super_ifcount, 0u);
7605 if (super_method->IsDefault()) {
7606 if (UNLIKELY(super_method->IsDefaultConflicting())) {
7607 // Some of the default methods that contributed to the conflict in the superclass
7608 // may be masked by new interfaces. Walk over all the interfaces and update state
7609 // as long as the current state is not `kDefaultConflict`.
7610 size_t i = super_ifcount;
7611 while (GetState() != State::kDefaultConflict && i != 0u) {
7612 --i;
7613 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
7614 DCHECK(iface == super_iftable->GetInterface(i));
7615 auto [found, index] =
7616 MethodArrayContains(super_iftable->GetMethodArrayOrNull(i), super_method);
7617 if (found) {
7618 ArtMethod* interface_method = iface->GetVirtualMethod(index, kPointerSize);
7619 auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
7620 // Note: The `iftable` has method arrays in range [super_ifcount, ifcount) filled
7621 // with vtable indexes but the range [0, super_ifcount) is empty, so we need to
7622 // use the `super_iftable` filled with implementation methods for that range.
7623 return ContainsImplementingMethod(
7624 super_iftable, i + 1u, super_ifcount, iface, super_method) ||
7625 ContainsImplementingMethod(
7626 iftable, super_ifcount, ifcount, iface, vtable_index);
7627 };
7628 UpdateStateImpl(iface, interface_method, slow_is_masked);
7629 }
7630 }
7631 if (GetState() == State::kDefaultConflict) {
7632 SetState(State::kUseSuperMethod);
7633 }
7634 } else {
7635 // There was exactly one default method in superclass interfaces that was
7636 // not masked by subinterfaces. Use `UpdateState()` to process it and pass
7637 // `super_ifcount - 1` as index for checking if it's been masked by new interfaces.
7638 ObjPtr<mirror::Class> iface = super_method->GetDeclaringClass();
7639 UpdateState(
7640 iface, super_method, vtable_index, iftable, ifcount, /*index=*/ super_ifcount - 1u);
7641 if (GetMainMethod() == super_method) {
7642 DCHECK(GetState() == State::kDefault) << enum_cast<uint32_t>(GetState());
7643 SetState(State::kUseSuperMethod);
7644 }
7645 }
7646 } else {
7647 DCHECK(super_method->IsMiranda());
7648 // Any default methods with this signature in superclass interfaces have been
7649 // masked by subinterfaces. Check if we can reuse the miranda method.
7650 if (GetState() == State::kAbstractSingle || GetState() == State::kAbstract) {
7651 SetState(State::kUseSuperMethod);
7652 }
7653 }
7654 }
7655
7656 private:
7657 template <typename Predicate>
7658 ALWAYS_INLINE
UpdateStateImpl(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,Predicate && slow_is_masked)7659 void UpdateStateImpl(ObjPtr<mirror::Class> iface,
7660 ArtMethod* interface_method,
7661 Predicate&& slow_is_masked)
7662 REQUIRES_SHARED(Locks::mutator_lock_) {
7663 bool have_default = false;
7664 switch (GetState()) {
7665 case State::kDefaultSingle:
7666 have_default = true;
7667 FALLTHROUGH_INTENDED;
7668 case State::kAbstractSingle:
7669 if (GetMainMethod()->GetDeclaringClass()->Implements(iface)) {
7670 return; // The main method masks the `interface_method`.
7671 }
7672 if (!interface_method->IsDefault()) {
7673 SetState(have_default ? State::kDefault : State::kAbstract);
7674 return;
7675 }
7676 break;
7677 case State::kDefault:
7678 have_default = true;
7679 FALLTHROUGH_INTENDED;
7680 case State::kAbstract:
7681 if (!interface_method->IsDefault()) {
7682 return; // Keep the same state. We do not need to check for masking.
7683 }
7684 // We do not record all overriding methods, so we need to walk over all
7685 // interfaces that could mask the `interface_method`. The provided
7686 // predicate `slow_is_masked()` does that.
7687 if (slow_is_masked()) {
7688 return; // Found an overriding method that masks `interface_method`.
7689 }
7690 break;
7691 case State::kDefaultConflict:
7692 return; // The state cannot change anymore.
7693 default:
7694 LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(GetState());
7695 UNREACHABLE();
7696 }
7697 // We have a new default method that's not masked by any other method.
7698 DCHECK(interface_method->IsDefault());
7699 if (have_default) {
7700 SetState(State::kDefaultConflict);
7701 } else {
7702 SetMainMethod(interface_method);
7703 SetState(State::kDefault);
7704 }
7705 }
7706
7707 // Determine if the given `iftable` contains in the given range a subinterface of `iface`
7708 // that declares a method with the same name and signature as 'interface_method'.
7709 //
7710 // Arguments
7711 // - iftable: The iftable we are searching for an overriding method.
7712 // - begin: The start of the range to search.
7713 // - end: The end of the range to search.
7714 // - iface: The interface we are checking to see if anything overrides.
7715 // - interface_method:
7716 // The interface method providing a name and signature we're searching for.
7717 //
7718 // Returns whether an overriding method was found in any subinterface of `iface`.
ContainsOverridingMethodOf(ObjPtr<mirror::IfTable> iftable,size_t begin,size_t end,ObjPtr<mirror::Class> iface,ArtMethod * interface_method)7719 static bool ContainsOverridingMethodOf(ObjPtr<mirror::IfTable> iftable,
7720 size_t begin,
7721 size_t end,
7722 ObjPtr<mirror::Class> iface,
7723 ArtMethod* interface_method)
7724 REQUIRES_SHARED(Locks::mutator_lock_) {
7725 for (size_t i = begin; i != end; ++i) {
7726 ObjPtr<mirror::Class> current_iface = iftable->GetInterface(i);
7727 for (ArtMethod& current_method : current_iface->GetDeclaredVirtualMethods(kPointerSize)) {
7728 if (MethodSignatureEquals(¤t_method, interface_method)) {
7729 // Check if the i'th interface is a subtype of this one.
7730 if (current_iface->Implements(iface)) {
7731 return true;
7732 }
7733 break;
7734 }
7735 }
7736 }
7737 return false;
7738 }
7739
7740 // Determine if the given `iftable` contains in the given range a subinterface of `iface`
7741 // that declares a method implemented by 'target'. This is an optimized version of
7742 // `ContainsOverridingMethodOf()` that searches implementation method arrays instead
7743 // of comparing signatures for declared interface methods.
7744 //
7745 // Arguments
7746 // - iftable: The iftable we are searching for an overriding method.
7747 // - begin: The start of the range to search.
7748 // - end: The end of the range to search.
7749 // - iface: The interface we are checking to see if anything overrides.
7750 // - target: The implementation method we're searching for.
7751 // Note that the new `iftable` is filled with vtable indexes for new interfaces,
7752 // so this needs to be the vtable index if we're searching that range.
7753 //
7754 // Returns whether the `target` was found in a method array for any subinterface of `iface`.
7755 template <typename TargetType>
ContainsImplementingMethod(ObjPtr<mirror::IfTable> iftable,size_t begin,size_t end,ObjPtr<mirror::Class> iface,TargetType target)7756 static bool ContainsImplementingMethod(ObjPtr<mirror::IfTable> iftable,
7757 size_t begin,
7758 size_t end,
7759 ObjPtr<mirror::Class> iface,
7760 TargetType target)
7761 REQUIRES_SHARED(Locks::mutator_lock_) {
7762 for (size_t i = begin; i != end; ++i) {
7763 if (MethodArrayContains(iftable->GetMethodArrayOrNull(i), target).first &&
7764 iftable->GetInterface(i)->Implements(iface)) {
7765 return true;
7766 }
7767 }
7768 return false;
7769 }
7770
7771 template <typename TargetType>
MethodArrayContains(ObjPtr<mirror::PointerArray> method_array,TargetType target)7772 static std::pair<bool, size_t> MethodArrayContains(ObjPtr<mirror::PointerArray> method_array,
7773 TargetType target)
7774 REQUIRES_SHARED(Locks::mutator_lock_) {
7775 size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
7776 for (size_t j = 0; j != num_methods; ++j) {
7777 if (method_array->GetElementPtrSize<TargetType, kPointerSize>(j) == target) {
7778 return {true, j};
7779 }
7780 }
7781 return {false, 0};
7782 }
7783
7784 ArtMethod* main_method_;
7785 uint32_t method_index_;
7786 State state_;
7787 };
7788
7789 class CopiedMethodRecordEmptyFn {
7790 public:
MakeEmpty(CopiedMethodRecord & item) const7791 void MakeEmpty(CopiedMethodRecord& item) const {
7792 item = CopiedMethodRecord();
7793 }
IsEmpty(const CopiedMethodRecord & item) const7794 bool IsEmpty(const CopiedMethodRecord& item) const {
7795 return item.GetMainMethod() == nullptr;
7796 }
7797 };
7798
7799 class CopiedMethodRecordHash {
7800 public:
7801 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const7802 size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7803 DCHECK(method != nullptr);
7804 return ComputeMethodHash(method);
7805 }
7806
7807 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & record) const7808 size_t operator()(const CopiedMethodRecord& record) const NO_THREAD_SAFETY_ANALYSIS {
7809 return (*this)(record.GetMainMethod());
7810 }
7811 };
7812
7813 class CopiedMethodRecordEqual {
7814 public:
7815 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & lhs_record,ArtMethod * rhs) const7816 bool operator()(const CopiedMethodRecord& lhs_record,
7817 ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7818 ArtMethod* lhs = lhs_record.GetMainMethod();
7819 DCHECK(lhs != nullptr);
7820 DCHECK(rhs != nullptr);
7821 return MethodSignatureEquals(lhs, rhs);
7822 }
7823
7824 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & lhs_record,const CopiedMethodRecord & rhs_record) const7825 bool operator()(const CopiedMethodRecord& lhs_record,
7826 const CopiedMethodRecord& rhs_record) const NO_THREAD_SAFETY_ANALYSIS {
7827 return (*this)(lhs_record, rhs_record.GetMainMethod());
7828 }
7829 };
7830
7831 using CopiedMethodRecordSet = ScopedArenaHashSet<CopiedMethodRecord,
7832 CopiedMethodRecordEmptyFn,
7833 CopiedMethodRecordHash,
7834 CopiedMethodRecordEqual>;
7835
7836 static constexpr size_t kMethodAlignment = ArtMethod::Alignment(kPointerSize);
7837 static constexpr size_t kMethodSize = ArtMethod::Size(kPointerSize);
7838
7839 ClassLinker* class_linker_;
7840 Handle<mirror::Class> klass_;
7841 Thread* const self_;
7842 Runtime* const runtime_;
7843
7844 // These are allocated on the heap to begin, we then transfer to linear alloc when we re-create
7845 // the virtual methods array.
7846 // Need to use low 4GB arenas for compiler or else the pointers wont fit in 32 bit method array
7847 // during cross compilation.
7848 // Use the linear alloc pool since this one is in the low 4gb for the compiler.
7849 ArenaStack stack_;
7850 ScopedArenaAllocator allocator_;
7851
7852 // If there are multiple methods with the same signature in the superclass vtable
7853 // (which can happen with a new virtual method having the same signature as an
7854 // inaccessible package-private method from another package in the superclass),
7855 // we keep singly-linked lists in this single array that maps vtable index to the
7856 // next vtable index in the list, `dex::kDexNoIndex` denotes the end of a list.
7857 ArrayRef<uint32_t> same_signature_vtable_lists_;
7858
7859 // Avoid large allocation for a few copied method records.
7860 // Keep the initial buffer on the stack to avoid arena allocations
7861 // if there are no special cases (the first arena allocation is costly).
7862 static constexpr size_t kCopiedMethodRecordInitialBufferSize = 16u;
7863 CopiedMethodRecord copied_method_records_initial_buffer_[kCopiedMethodRecordInitialBufferSize];
7864 CopiedMethodRecordSet copied_method_records_;
7865 size_t num_new_copied_methods_;
7866 };
7867
7868 template <PointerSize kPointerSize>
7869 NO_INLINE
ReallocMethods(ObjPtr<mirror::Class> klass)7870 void ClassLinker::LinkMethodsHelper<kPointerSize>::ReallocMethods(ObjPtr<mirror::Class> klass) {
7871 // There should be no thread suspension in this function,
7872 // native allocations do not cause thread suspension.
7873 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
7874
7875 size_t num_new_copied_methods = num_new_copied_methods_;
7876 DCHECK_NE(num_new_copied_methods, 0u);
7877 const size_t old_method_count = klass->NumMethods();
7878 const size_t new_method_count = old_method_count + num_new_copied_methods;
7879
7880 // Attempt to realloc to save RAM if possible.
7881 LengthPrefixedArray<ArtMethod>* old_methods = klass->GetMethodsPtr();
7882 // The Realloced virtual methods aren't visible from the class roots, so there is no issue
7883 // where GCs could attempt to mark stale pointers due to memcpy. And since we overwrite the
7884 // realloced memory with out->CopyFrom, we are guaranteed to have objects in the to space since
7885 // CopyFrom has internal read barriers.
7886 //
7887 // TODO We should maybe move some of this into mirror::Class or at least into another method.
7888 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_method_count,
7889 kMethodSize,
7890 kMethodAlignment);
7891 const size_t new_size = LengthPrefixedArray<ArtMethod>::ComputeSize(new_method_count,
7892 kMethodSize,
7893 kMethodAlignment);
7894 const size_t old_methods_ptr_size = (old_methods != nullptr) ? old_size : 0;
7895 auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
7896 class_linker_->GetAllocatorForClassLoader(klass->GetClassLoader())->Realloc(
7897 self_, old_methods, old_methods_ptr_size, new_size, LinearAllocKind::kArtMethodArray));
7898 CHECK(methods != nullptr); // Native allocation failure aborts.
7899
7900 if (methods != old_methods) {
7901 if (gUseReadBarrier) {
7902 StrideIterator<ArtMethod> out = methods->begin(kMethodSize, kMethodAlignment);
7903 // Copy over the old methods. The `ArtMethod::CopyFrom()` is only necessary to not miss
7904 // read barriers since `LinearAlloc::Realloc()` won't do read barriers when it copies.
7905 for (auto& m : klass->GetMethods(kPointerSize)) {
7906 out->CopyFrom(&m, kPointerSize);
7907 ++out;
7908 }
7909 } else if (gUseUserfaultfd) {
7910 // Clear the declaring class of the old dangling method array so that GC doesn't
7911 // try to update them, which could cause crashes in userfaultfd GC due to
7912 // checks in post-compact address computation.
7913 for (auto& m : klass->GetMethods(kPointerSize)) {
7914 m.SetDeclaringClass(nullptr);
7915 }
7916 }
7917 }
7918
7919 // Collect and sort copied method records by the vtable index. This places overriding
7920 // copied methods first, sorted by the vtable index already assigned in the superclass,
7921 // followed by copied methods with new signatures in the order in which we encountered
7922 // them when going over virtual methods of new interfaces.
7923 // This order is deterministic but implementation-defined.
7924 //
7925 // Avoid arena allocation for a few records (the first arena allocation is costly).
7926 constexpr size_t kSortedRecordsBufferSize = 16;
7927 CopiedMethodRecord* sorted_records_buffer[kSortedRecordsBufferSize];
7928 CopiedMethodRecord** sorted_records = (num_new_copied_methods <= kSortedRecordsBufferSize)
7929 ? sorted_records_buffer
7930 : allocator_.AllocArray<CopiedMethodRecord*>(num_new_copied_methods);
7931 size_t filled_sorted_records = 0u;
7932 for (CopiedMethodRecord& record : copied_method_records_) {
7933 if (record.GetState() != CopiedMethodRecord::State::kUseSuperMethod) {
7934 DCHECK_LT(filled_sorted_records, num_new_copied_methods);
7935 sorted_records[filled_sorted_records] = &record;
7936 ++filled_sorted_records;
7937 }
7938 }
7939 DCHECK_EQ(filled_sorted_records, num_new_copied_methods);
7940 std::sort(sorted_records,
7941 sorted_records + num_new_copied_methods,
7942 [](const CopiedMethodRecord* lhs, const CopiedMethodRecord* rhs) {
7943 return lhs->GetMethodIndex() < rhs->GetMethodIndex();
7944 });
7945
7946 if (klass->IsInterface()) {
7947 // Some records may have been pruned. Update method indexes in collected records.
7948 size_t interface_method_index = klass->NumDeclaredVirtualMethods();
7949 for (size_t i = 0; i != num_new_copied_methods; ++i) {
7950 CopiedMethodRecord* record = sorted_records[i];
7951 DCHECK_LE(interface_method_index, record->GetMethodIndex());
7952 record->SetMethodIndex(interface_method_index);
7953 ++interface_method_index;
7954 }
7955 }
7956
7957 // Add copied methods.
7958 methods->SetSize(new_method_count);
7959 for (size_t i = 0; i != num_new_copied_methods; ++i) {
7960 const CopiedMethodRecord* record = sorted_records[i];
7961 ArtMethod* interface_method = record->GetMainMethod();
7962 DCHECK(!interface_method->IsCopied());
7963 ArtMethod& new_method = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
7964 new_method.CopyFrom(interface_method, kPointerSize);
7965 new_method.SetMethodIndex(dchecked_integral_cast<uint16_t>(record->GetMethodIndex()));
7966 switch (record->GetState()) {
7967 case CopiedMethodRecord::State::kAbstractSingle:
7968 case CopiedMethodRecord::State::kAbstract: {
7969 DCHECK(!klass->IsInterface()); // We do not create miranda methods for interfaces.
7970 uint32_t access_flags = new_method.GetAccessFlags();
7971 DCHECK_EQ(access_flags & (kAccAbstract | kAccIntrinsic | kAccDefault), kAccAbstract)
7972 << "Miranda method should be abstract but not intrinsic or default!";
7973 new_method.SetAccessFlags(access_flags | kAccCopied);
7974 break;
7975 }
7976 case CopiedMethodRecord::State::kDefaultSingle:
7977 case CopiedMethodRecord::State::kDefault: {
7978 DCHECK(!klass->IsInterface()); // We do not copy default methods for interfaces.
7979 // Clear the kAccSkipAccessChecks flag if it is present. Since this class hasn't been
7980 // verified yet it shouldn't have methods that are skipping access checks.
7981 // TODO This is rather arbitrary. We should maybe support classes where only some of its
7982 // methods are skip_access_checks.
7983 DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
7984 constexpr uint32_t kSetFlags = kAccDefault | kAccCopied;
7985 constexpr uint32_t kMaskFlags = ~kAccSkipAccessChecks;
7986 new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
7987 break;
7988 }
7989 case CopiedMethodRecord::State::kDefaultConflict: {
7990 // This is a type of default method (there are default method impls, just a conflict)
7991 // so mark this as a default. We use the `kAccAbstract` flag to distinguish it from
7992 // invokable copied default method without using a separate access flag but the default
7993 // conflicting method is technically not abstract and ArtMethod::IsAbstract() shall
7994 // return false. Also clear the kAccSkipAccessChecks bit since this class hasn't been
7995 // verified yet it shouldn't have methods that are skipping access checks. Also clear
7996 // potential kAccSingleImplementation to avoid CHA trying to inline the default method.
7997 uint32_t access_flags = new_method.GetAccessFlags();
7998 DCHECK_EQ(access_flags & (kAccNative | kAccIntrinsic), 0u);
7999 constexpr uint32_t kSetFlags = kAccDefault | kAccAbstract | kAccCopied;
8000 constexpr uint32_t kMaskFlags = ~(kAccSkipAccessChecks | kAccSingleImplementation);
8001 new_method.SetAccessFlags((access_flags | kSetFlags) & kMaskFlags);
8002 new_method.SetDataPtrSize(nullptr, kPointerSize);
8003 DCHECK(new_method.IsDefaultConflicting());
8004 DCHECK(!new_method.IsAbstract());
8005 // The actual method might or might not be marked abstract since we just copied it from
8006 // a (possibly default) interface method. We need to set its entry point to be the bridge
8007 // so that the compiler will not invoke the implementation of whatever method we copied
8008 // from.
8009 EnsureThrowsInvocationError(class_linker_, &new_method);
8010 break;
8011 }
8012 default:
8013 LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(record->GetState());
8014 UNREACHABLE();
8015 }
8016 }
8017
8018 if (VLOG_IS_ON(class_linker)) {
8019 LogNewVirtuals(methods);
8020 }
8021
8022 class_linker_->UpdateClassMethods(klass, methods);
8023 }
8024
8025 template <PointerSize kPointerSize>
FinalizeIfTable(Handle<mirror::Class> klass,MutableHandle<mirror::IfTable> iftable,Handle<mirror::PointerArray> vtable,bool is_klass_abstract,bool is_super_abstract,bool * out_new_conflict,ArtMethod ** out_imt)8026 bool ClassLinker::LinkMethodsHelper<kPointerSize>::FinalizeIfTable(
8027 Handle<mirror::Class> klass,
8028 MutableHandle<mirror::IfTable> iftable,
8029 Handle<mirror::PointerArray> vtable,
8030 bool is_klass_abstract,
8031 bool is_super_abstract,
8032 bool* out_new_conflict,
8033 ArtMethod** out_imt) {
8034 size_t ifcount = iftable->Count();
8035 // We do not need a read barrier here as the length is constant, both from-space and
8036 // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
8037 size_t super_ifcount =
8038 klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
8039
8040 ClassLinker* class_linker = nullptr;
8041 ArtMethod* unimplemented_method = nullptr;
8042 ArtMethod* imt_conflict_method = nullptr;
8043 uintptr_t imt_methods_begin = 0u;
8044 size_t imt_methods_size = 0u;
8045 DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8046 DCHECK_EQ(klass->GetSuperClass()->ShouldHaveImt(), !is_super_abstract);
8047 if (!is_klass_abstract) {
8048 class_linker = class_linker_;
8049 unimplemented_method = runtime_->GetImtUnimplementedMethod();
8050 imt_conflict_method = runtime_->GetImtConflictMethod();
8051 if (is_super_abstract) {
8052 // There was no IMT in superclass to copy to `out_imt[]`, so we need
8053 // to fill it with all implementation methods from superclass.
8054 DCHECK_EQ(imt_methods_begin, 0u);
8055 imt_methods_size = std::numeric_limits<size_t>::max(); // No method at the last byte.
8056 } else {
8057 // If the superclass has IMT, we have already copied it to `out_imt[]` and
8058 // we do not need to call `SetIMTRef()` for interfaces from superclass when
8059 // the implementation method is already in the superclass, only for new methods.
8060 // For simplicity, use the entire method array including direct methods.
8061 LengthPrefixedArray<ArtMethod>* const new_methods = klass->GetMethodsPtr();
8062 if (new_methods != nullptr) {
8063 DCHECK_NE(new_methods->size(), 0u);
8064 imt_methods_begin =
8065 reinterpret_cast<uintptr_t>(&new_methods->At(0, kMethodSize, kMethodAlignment));
8066 imt_methods_size = new_methods->size() * kMethodSize;
8067 }
8068 }
8069 }
8070
8071 auto update_imt = [=](ObjPtr<mirror::Class> iface, size_t j, ArtMethod* implementation)
8072 REQUIRES_SHARED(Locks::mutator_lock_) {
8073 // Place method in imt if entry is empty, place conflict otherwise.
8074 ArtMethod** imt_ptr = &out_imt[iface->GetVirtualMethod(j, kPointerSize)->GetImtIndex()];
8075 class_linker->SetIMTRef(unimplemented_method,
8076 imt_conflict_method,
8077 implementation,
8078 /*out*/out_new_conflict,
8079 /*out*/imt_ptr);
8080 };
8081
8082 // For interfaces inherited from superclass, the new method arrays are empty,
8083 // so use vtable indexes from implementation methods from the superclass method array.
8084 for (size_t i = 0; i != super_ifcount; ++i) {
8085 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8086 DCHECK(method_array == klass->GetSuperClass()->GetIfTable()->GetMethodArrayOrNull(i));
8087 if (method_array == nullptr) {
8088 continue;
8089 }
8090 size_t num_methods = method_array->GetLength();
8091 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8092 size_t j = 0;
8093 // First loop has method array shared with the super class.
8094 for (; j != num_methods; ++j) {
8095 ArtMethod* super_implementation =
8096 method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
8097 size_t vtable_index = super_implementation->GetMethodIndex();
8098 ArtMethod* implementation =
8099 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
8100 // Check if we need to update IMT with this method, see above.
8101 if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
8102 update_imt(iface, j, implementation);
8103 }
8104 if (implementation != super_implementation) {
8105 // Copy-on-write and move to the next loop.
8106 Thread* self = self_;
8107 StackHandleScope<2u> hs(self);
8108 Handle<mirror::PointerArray> old_method_array = hs.NewHandle(method_array);
8109 HandleWrapperObjPtr<mirror::Class> h_iface = hs.NewHandleWrapper(&iface);
8110 if (ifcount == super_ifcount && iftable.Get() == klass->GetSuperClass()->GetIfTable()) {
8111 ObjPtr<mirror::IfTable> new_iftable = ObjPtr<mirror::IfTable>::DownCast(
8112 mirror::ObjectArray<mirror::Object>::CopyOf(
8113 iftable, self, ifcount * mirror::IfTable::kMax));
8114 if (new_iftable == nullptr) {
8115 return false;
8116 }
8117 iftable.Assign(new_iftable);
8118 }
8119 method_array = ObjPtr<mirror::PointerArray>::DownCast(
8120 mirror::Array::CopyOf(old_method_array, self, num_methods));
8121 if (method_array == nullptr) {
8122 return false;
8123 }
8124 iftable->SetMethodArray(i, method_array);
8125 method_array->SetElementPtrSize(j, implementation, kPointerSize);
8126 ++j;
8127 break;
8128 }
8129 }
8130 // Second loop (if non-empty) has method array different from the superclass.
8131 for (; j != num_methods; ++j) {
8132 ArtMethod* super_implementation =
8133 method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
8134 size_t vtable_index = super_implementation->GetMethodIndex();
8135 ArtMethod* implementation =
8136 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
8137 method_array->SetElementPtrSize(j, implementation, kPointerSize);
8138 // Check if we need to update IMT with this method, see above.
8139 if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
8140 update_imt(iface, j, implementation);
8141 }
8142 }
8143 }
8144
8145 // New interface method arrays contain vtable indexes. Translate them to methods.
8146 DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8147 for (size_t i = super_ifcount; i != ifcount; ++i) {
8148 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8149 if (method_array == nullptr) {
8150 continue;
8151 }
8152 size_t num_methods = method_array->GetLength();
8153 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8154 for (size_t j = 0; j != num_methods; ++j) {
8155 size_t vtable_index = method_array->GetElementPtrSize<size_t, kPointerSize>(j);
8156 ArtMethod* implementation =
8157 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
8158 method_array->SetElementPtrSize(j, implementation, kPointerSize);
8159 if (!is_klass_abstract) {
8160 update_imt(iface, j, implementation);
8161 }
8162 }
8163 }
8164
8165 return true;
8166 }
8167
8168 template <PointerSize kPointerSize>
AllocPointerArray(Thread * self,size_t length)8169 ObjPtr<mirror::PointerArray> ClassLinker::LinkMethodsHelper<kPointerSize>::AllocPointerArray(
8170 Thread* self, size_t length) {
8171 using PointerArrayType = std::conditional_t<
8172 kPointerSize == PointerSize::k64, mirror::LongArray, mirror::IntArray>;
8173 ObjPtr<mirror::Array> array = PointerArrayType::Alloc(self, length);
8174 return ObjPtr<mirror::PointerArray>::DownCast(array);
8175 }
8176
8177 template <PointerSize kPointerSize>
AllocateIfTableMethodArrays(Thread * self,Handle<mirror::Class> klass,Handle<mirror::IfTable> iftable)8178 bool ClassLinker::LinkMethodsHelper<kPointerSize>::AllocateIfTableMethodArrays(
8179 Thread* self,
8180 Handle<mirror::Class> klass,
8181 Handle<mirror::IfTable> iftable) {
8182 DCHECK(!klass->IsInterface());
8183 DCHECK(klass_->HasSuperClass());
8184 const size_t ifcount = iftable->Count();
8185 // We do not need a read barrier here as the length is constant, both from-space and
8186 // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
8187 size_t super_ifcount =
8188 klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
8189 if (ifcount == super_ifcount) {
8190 DCHECK(iftable.Get() == klass_->GetSuperClass()->GetIfTable());
8191 return true;
8192 }
8193
8194 if (kIsDebugBuild) {
8195 // The method array references for superclass interfaces have been copied.
8196 // We shall allocate new arrays if needed (copy-on-write) in `FinalizeIfTable()`.
8197 ObjPtr<mirror::IfTable> super_iftable = klass_->GetSuperClass()->GetIfTable();
8198 for (size_t i = 0; i != super_ifcount; ++i) {
8199 CHECK(iftable->GetInterface(i) == super_iftable->GetInterface(i));
8200 CHECK(iftable->GetMethodArrayOrNull(i) == super_iftable->GetMethodArrayOrNull(i));
8201 }
8202 }
8203
8204 for (size_t i = super_ifcount; i < ifcount; ++i) {
8205 size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
8206 if (num_methods > 0) {
8207 ObjPtr<mirror::PointerArray> method_array = AllocPointerArray(self, num_methods);
8208 if (UNLIKELY(method_array == nullptr)) {
8209 self->AssertPendingOOMException();
8210 return false;
8211 }
8212 iftable->SetMethodArray(i, method_array);
8213 }
8214 }
8215 return true;
8216 }
8217
8218 template <PointerSize kPointerSize>
AssignVTableIndexes(ObjPtr<mirror::Class> klass,ObjPtr<mirror::Class> super_class,bool is_super_abstract,size_t num_virtual_methods,ObjPtr<mirror::IfTable> iftable)8219 size_t ClassLinker::LinkMethodsHelper<kPointerSize>::AssignVTableIndexes(
8220 ObjPtr<mirror::Class> klass,
8221 ObjPtr<mirror::Class> super_class,
8222 bool is_super_abstract,
8223 size_t num_virtual_methods,
8224 ObjPtr<mirror::IfTable> iftable) {
8225 DCHECK(!klass->IsInterface());
8226 DCHECK(klass->HasSuperClass());
8227 DCHECK(klass->GetSuperClass() == super_class);
8228
8229 // There should be no thread suspension unless we want to throw an exception.
8230 // (We are using `ObjPtr<>` and raw vtable pointers that are invalidated by thread suspension.)
8231 std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
8232
8233 // Prepare a hash table with virtual methods from the superclass.
8234 // For the unlikely cases that there are multiple methods with the same signature
8235 // but different vtable indexes, keep an array with indexes of the previous
8236 // methods with the same signature (walked as singly-linked lists).
8237 uint8_t* raw_super_vtable;
8238 size_t super_vtable_length;
8239 if (is_super_abstract) {
8240 DCHECK(!super_class->ShouldHaveEmbeddedVTable());
8241 ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTableDuringLinking();
8242 DCHECK(super_vtable != nullptr);
8243 raw_super_vtable = reinterpret_cast<uint8_t*>(super_vtable.Ptr()) +
8244 mirror::Array::DataOffset(static_cast<size_t>(kPointerSize)).Uint32Value();
8245 super_vtable_length = super_vtable->GetLength();
8246 } else {
8247 DCHECK(super_class->ShouldHaveEmbeddedVTable());
8248 raw_super_vtable = reinterpret_cast<uint8_t*>(super_class.Ptr()) +
8249 mirror::Class::EmbeddedVTableOffset(kPointerSize).Uint32Value();
8250 super_vtable_length = super_class->GetEmbeddedVTableLength();
8251 }
8252 VTableAccessor super_vtable_accessor(raw_super_vtable, super_vtable_length);
8253 static constexpr double kMinLoadFactor = 0.3;
8254 static constexpr double kMaxLoadFactor = 0.5;
8255 static constexpr size_t kMaxStackBuferSize = 256;
8256 const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
8257 const size_t super_vtable_buffer_size = super_vtable_length * 3;
8258 const size_t bit_vector_size = BitVector::BitsToWords(num_virtual_methods);
8259 const size_t total_size =
8260 declared_virtuals_buffer_size + super_vtable_buffer_size + bit_vector_size;
8261
8262 uint32_t* declared_virtuals_buffer_ptr = (total_size <= kMaxStackBuferSize)
8263 ? reinterpret_cast<uint32_t*>(alloca(total_size * sizeof(uint32_t)))
8264 : allocator_.AllocArray<uint32_t>(total_size);
8265 uint32_t* bit_vector_buffer_ptr = declared_virtuals_buffer_ptr + declared_virtuals_buffer_size;
8266
8267 DeclaredVirtualSignatureSet declared_virtual_signatures(
8268 kMinLoadFactor,
8269 kMaxLoadFactor,
8270 DeclaredVirtualSignatureHash(klass),
8271 DeclaredVirtualSignatureEqual(klass),
8272 declared_virtuals_buffer_ptr,
8273 declared_virtuals_buffer_size,
8274 allocator_.Adapter());
8275
8276 ArrayRef<uint32_t> same_signature_vtable_lists;
8277 const bool is_proxy_class = klass->IsProxyClass();
8278 size_t vtable_length = super_vtable_length;
8279
8280 // Record which declared methods are overriding a super method.
8281 BitVector initialized_methods(/* expandable= */ false,
8282 Allocator::GetNoopAllocator(),
8283 bit_vector_size,
8284 bit_vector_buffer_ptr);
8285
8286 // Note: our sets hash on the method name, and therefore we pay a high
8287 // performance price when a class has many overloads.
8288 //
8289 // We populate a set of declared signatures instead of signatures from the
8290 // super vtable (which is only lazy populated in case of interface overriding,
8291 // see below). This makes sure that we pay the performance price only on that
8292 // class, and not on its subclasses (except in the case of interface overriding, see below).
8293 for (size_t i = 0; i != num_virtual_methods; ++i) {
8294 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8295 DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
8296 ArtMethod* signature_method = UNLIKELY(is_proxy_class)
8297 ? virtual_method->GetInterfaceMethodForProxyUnchecked(kPointerSize)
8298 : virtual_method;
8299 size_t hash = ComputeMethodHash(signature_method);
8300 declared_virtual_signatures.PutWithHash(i, hash);
8301 }
8302
8303 // Loop through each super vtable method and see if they are overridden by a method we added to
8304 // the hash table.
8305 for (size_t j = 0; j < super_vtable_length; ++j) {
8306 // Search the hash table to see if we are overridden by any method.
8307 ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(j);
8308 if (!klass->CanAccessMember(super_method->GetDeclaringClass(),
8309 super_method->GetAccessFlags())) {
8310 // Continue on to the next method since this one is package private and cannot be overridden.
8311 // Before Android 4.1, the package-private method super_method might have been incorrectly
8312 // overridden.
8313 continue;
8314 }
8315 size_t hash = (j < mirror::Object::kVTableLength)
8316 ? class_linker_->object_virtual_method_hashes_[j]
8317 : ComputeMethodHash(super_method);
8318 auto it = declared_virtual_signatures.FindWithHash(super_method, hash);
8319 if (it == declared_virtual_signatures.end()) {
8320 continue;
8321 }
8322 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(*it, kPointerSize);
8323 if (super_method->IsFinal()) {
8324 sants.reset();
8325 ThrowLinkageError(klass, "Method %s overrides final method in class %s",
8326 virtual_method->PrettyMethod().c_str(),
8327 super_method->GetDeclaringClassDescriptor());
8328 return 0u;
8329 }
8330 if (initialized_methods.IsBitSet(*it)) {
8331 // The method is overriding more than one method.
8332 // We record that information in a linked list to later set the method in the vtable
8333 // locations that are not the method index.
8334 if (same_signature_vtable_lists.empty()) {
8335 same_signature_vtable_lists = ArrayRef<uint32_t>(
8336 allocator_.AllocArray<uint32_t>(super_vtable_length), super_vtable_length);
8337 std::fill_n(same_signature_vtable_lists.data(), super_vtable_length, dex::kDexNoIndex);
8338 same_signature_vtable_lists_ = same_signature_vtable_lists;
8339 }
8340 same_signature_vtable_lists[j] = virtual_method->GetMethodIndexDuringLinking();
8341 } else {
8342 initialized_methods.SetBit(*it);
8343 }
8344
8345 // We arbitrarily set to the largest index. This is also expected when
8346 // iterating over the `same_signature_vtable_lists_`.
8347 virtual_method->SetMethodIndex(j);
8348 }
8349
8350 // Add the non-overridden methods at the end.
8351 for (size_t i = 0; i < num_virtual_methods; ++i) {
8352 if (!initialized_methods.IsBitSet(i)) {
8353 ArtMethod* local_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8354 local_method->SetMethodIndex(vtable_length);
8355 vtable_length++;
8356 }
8357 }
8358
8359 // A lazily constructed super vtable set, which we only populate in the less
8360 // common sittuation of a superclass implementing a method declared in an
8361 // interface this class inherits.
8362 // We still try to allocate the set on the stack as using the arena will have
8363 // a larger cost.
8364 uint32_t* super_vtable_buffer_ptr = bit_vector_buffer_ptr + bit_vector_size;
8365 VTableSignatureSet super_vtable_signatures(
8366 kMinLoadFactor,
8367 kMaxLoadFactor,
8368 VTableSignatureHash(super_vtable_accessor),
8369 VTableSignatureEqual(super_vtable_accessor),
8370 super_vtable_buffer_ptr,
8371 super_vtable_buffer_size,
8372 allocator_.Adapter());
8373
8374 // Assign vtable indexes for interface methods in new interfaces and store them
8375 // in implementation method arrays. These shall be replaced by actual method
8376 // pointers later. We do not need to do this for superclass interfaces as we can
8377 // get these vtable indexes from implementation methods in superclass iftable.
8378 // Record data for copied methods which shall be referenced by the vtable.
8379 const size_t ifcount = iftable->Count();
8380 ObjPtr<mirror::IfTable> super_iftable = super_class->GetIfTable();
8381 const size_t super_ifcount = super_iftable->Count();
8382 for (size_t i = ifcount; i != super_ifcount; ) {
8383 --i;
8384 DCHECK_LT(i, ifcount);
8385 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8386 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8387 size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
8388 for (size_t j = 0; j != num_methods; ++j) {
8389 ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8390 size_t hash = ComputeMethodHash(interface_method);
8391 ArtMethod* vtable_method = nullptr;
8392 auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8393 if (it1 != declared_virtual_signatures.end()) {
8394 ArtMethod* found_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
8395 // For interface overriding, we only look at public methods.
8396 if (found_method->IsPublic()) {
8397 vtable_method = found_method;
8398 }
8399 } else {
8400 // This situation should be rare (a superclass implements a method
8401 // declared in an interface this class is inheriting). Only in this case
8402 // do we lazily populate the super_vtable_signatures.
8403 if (super_vtable_signatures.empty()) {
8404 for (size_t k = 0; k < super_vtable_length; ++k) {
8405 ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(k);
8406 if (!super_method->IsPublic()) {
8407 // For interface overriding, we only look at public methods.
8408 continue;
8409 }
8410 size_t super_hash = (k < mirror::Object::kVTableLength)
8411 ? class_linker_->object_virtual_method_hashes_[k]
8412 : ComputeMethodHash(super_method);
8413 auto [it, inserted] = super_vtable_signatures.InsertWithHash(k, super_hash);
8414 DCHECK(inserted || super_vtable_accessor.GetVTableEntry(*it) == super_method);
8415 }
8416 }
8417 auto it2 = super_vtable_signatures.FindWithHash(interface_method, hash);
8418 if (it2 != super_vtable_signatures.end()) {
8419 vtable_method = super_vtable_accessor.GetVTableEntry(*it2);
8420 }
8421 }
8422
8423 uint32_t vtable_index = vtable_length;
8424 if (vtable_method != nullptr) {
8425 vtable_index = vtable_method->GetMethodIndexDuringLinking();
8426 if (!vtable_method->IsOverridableByDefaultMethod()) {
8427 method_array->SetElementPtrSize(j, vtable_index, kPointerSize);
8428 continue;
8429 }
8430 }
8431
8432 auto [it, inserted] = copied_method_records_.InsertWithHash(
8433 CopiedMethodRecord(interface_method, vtable_index), hash);
8434 if (vtable_method != nullptr) {
8435 DCHECK_EQ(vtable_index, it->GetMethodIndex());
8436 } else if (inserted) {
8437 DCHECK_EQ(vtable_index, it->GetMethodIndex());
8438 DCHECK_EQ(vtable_index, vtable_length);
8439 ++vtable_length;
8440 } else {
8441 vtable_index = it->GetMethodIndex();
8442 }
8443 method_array->SetElementPtrSize(j, it->GetMethodIndex(), kPointerSize);
8444 if (inserted) {
8445 it->SetState(interface_method->IsAbstract() ? CopiedMethodRecord::State::kAbstractSingle
8446 : CopiedMethodRecord::State::kDefaultSingle);
8447 } else {
8448 it->UpdateState(iface, interface_method, vtable_index, iftable, ifcount, i);
8449 }
8450 }
8451 }
8452 // Finalize copied method records and check if we can reuse some methods from superclass vtable.
8453 size_t num_new_copied_methods = copied_method_records_.size();
8454 for (CopiedMethodRecord& record : copied_method_records_) {
8455 uint32_t vtable_index = record.GetMethodIndex();
8456 if (vtable_index < super_vtable_length) {
8457 ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(record.GetMethodIndex());
8458 DCHECK(super_method->IsOverridableByDefaultMethod());
8459 record.FinalizeState(
8460 super_method, vtable_index, iftable, ifcount, super_iftable, super_ifcount);
8461 if (record.GetState() == CopiedMethodRecord::State::kUseSuperMethod) {
8462 --num_new_copied_methods;
8463 }
8464 }
8465 }
8466 num_new_copied_methods_ = num_new_copied_methods;
8467
8468 if (UNLIKELY(!IsUint<16>(vtable_length))) {
8469 sants.reset();
8470 ThrowClassFormatError(klass, "Too many methods defined on class: %zd", vtable_length);
8471 return 0u;
8472 }
8473
8474 return vtable_length;
8475 }
8476
8477 template <PointerSize kPointerSize>
FindCopiedMethodsForInterface(ObjPtr<mirror::Class> klass,size_t num_virtual_methods,ObjPtr<mirror::IfTable> iftable)8478 bool ClassLinker::LinkMethodsHelper<kPointerSize>::FindCopiedMethodsForInterface(
8479 ObjPtr<mirror::Class> klass,
8480 size_t num_virtual_methods,
8481 ObjPtr<mirror::IfTable> iftable) {
8482 DCHECK(klass->IsInterface());
8483 DCHECK(klass->HasSuperClass());
8484 DCHECK(klass->GetSuperClass()->IsObjectClass());
8485 DCHECK_EQ(klass->GetSuperClass()->GetIfTableCount(), 0);
8486
8487 // There should be no thread suspension unless we want to throw an exception.
8488 // (We are using `ObjPtr<>`s that are invalidated by thread suspension.)
8489 std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
8490
8491 // Prepare a `HashSet<>` with the declared virtual methods. These mask any methods
8492 // from superinterfaces, so we can filter out matching superinterface methods.
8493 static constexpr double kMinLoadFactor = 0.3;
8494 static constexpr double kMaxLoadFactor = 0.5;
8495 static constexpr size_t kMaxStackBuferSize = 256;
8496 const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
8497 uint32_t* declared_virtuals_buffer_ptr = (declared_virtuals_buffer_size <= kMaxStackBuferSize)
8498 ? reinterpret_cast<uint32_t*>(alloca(declared_virtuals_buffer_size * sizeof(uint32_t)))
8499 : allocator_.AllocArray<uint32_t>(declared_virtuals_buffer_size);
8500 DeclaredVirtualSignatureSet declared_virtual_signatures(
8501 kMinLoadFactor,
8502 kMaxLoadFactor,
8503 DeclaredVirtualSignatureHash(klass),
8504 DeclaredVirtualSignatureEqual(klass),
8505 declared_virtuals_buffer_ptr,
8506 declared_virtuals_buffer_size,
8507 allocator_.Adapter());
8508 for (size_t i = 0; i != num_virtual_methods; ++i) {
8509 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8510 DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
8511 size_t hash = ComputeMethodHash(virtual_method);
8512 declared_virtual_signatures.PutWithHash(i, hash);
8513 }
8514
8515 // We do not create miranda methods for interface classes, so we do not need to track
8516 // non-default (abstract) interface methods. The downside is that we cannot use the
8517 // optimized code paths with `CopiedMethodRecord::State::kDefaultSingle` and since
8518 // we do not fill method arrays for interfaces, the method search actually has to
8519 // compare signatures instead of searching for the implementing method.
8520 const size_t ifcount = iftable->Count();
8521 size_t new_method_index = num_virtual_methods;
8522 for (size_t i = ifcount; i != 0u; ) {
8523 --i;
8524 DCHECK_LT(i, ifcount);
8525 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8526 if (!iface->HasDefaultMethods()) {
8527 continue; // No default methods to process.
8528 }
8529 size_t num_methods = iface->NumDeclaredVirtualMethods();
8530 for (size_t j = 0; j != num_methods; ++j) {
8531 ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8532 if (!interface_method->IsDefault()) {
8533 continue; // Do not process this non-default method.
8534 }
8535 size_t hash = ComputeMethodHash(interface_method);
8536 auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8537 if (it1 != declared_virtual_signatures.end()) {
8538 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
8539 if (!virtual_method->IsAbstract() && !virtual_method->IsPublic()) {
8540 sants.reset();
8541 ThrowIllegalAccessErrorForImplementingMethod(klass, virtual_method, interface_method);
8542 return false;
8543 }
8544 continue; // This default method is masked by a method declared in this interface.
8545 }
8546
8547 CopiedMethodRecord new_record(interface_method, new_method_index);
8548 auto it = copied_method_records_.FindWithHash(new_record, hash);
8549 if (it == copied_method_records_.end()) {
8550 // Pretend that there is another default method and try to update the state.
8551 // If the `interface_method` is not masked, the state shall change to
8552 // `kDefaultConflict`; if it is masked, the state remains `kDefault`.
8553 new_record.SetState(CopiedMethodRecord::State::kDefault);
8554 new_record.UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8555 if (new_record.GetState() == CopiedMethodRecord::State::kDefaultConflict) {
8556 // Insert the new record with the state `kDefault`.
8557 new_record.SetState(CopiedMethodRecord::State::kDefault);
8558 copied_method_records_.PutWithHash(new_record, hash);
8559 DCHECK_EQ(new_method_index, new_record.GetMethodIndex());
8560 ++new_method_index;
8561 }
8562 } else {
8563 it->UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8564 }
8565 }
8566 }
8567
8568 // Prune records without conflict. (Method indexes are updated in `ReallocMethods()`.)
8569 // We do not copy normal default methods to subinterfaces, instead we find the
8570 // default method with `Class::FindVirtualMethodForInterfaceSuper()` when needed.
8571 size_t num_new_copied_methods = copied_method_records_.size();
8572 for (CopiedMethodRecord& record : copied_method_records_) {
8573 if (record.GetState() != CopiedMethodRecord::State::kDefaultConflict) {
8574 DCHECK(record.GetState() == CopiedMethodRecord::State::kDefault);
8575 record.SetState(CopiedMethodRecord::State::kUseSuperMethod);
8576 --num_new_copied_methods;
8577 }
8578 }
8579 num_new_copied_methods_ = num_new_copied_methods;
8580
8581 return true;
8582 }
8583
8584
8585 template <PointerSize kPointerSize>
8586 FLATTEN
LinkMethods(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,bool * out_new_conflict,ArtMethod ** out_imt)8587 bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkMethods(
8588 Thread* self,
8589 Handle<mirror::Class> klass,
8590 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
8591 bool* out_new_conflict,
8592 ArtMethod** out_imt) {
8593 const size_t num_virtual_methods = klass->NumVirtualMethods();
8594 if (klass->IsInterface()) {
8595 // No vtable.
8596 if (!IsUint<16>(num_virtual_methods)) {
8597 ThrowClassFormatError(klass.Get(), "Too many methods on interface: %zu", num_virtual_methods);
8598 return false;
8599 }
8600 // Assign each method an interface table index and set the default flag.
8601 bool has_defaults = false;
8602 for (size_t i = 0; i < num_virtual_methods; ++i) {
8603 ArtMethod* m = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8604 m->SetMethodIndex(i);
8605 uint32_t access_flags = m->GetAccessFlags();
8606 DCHECK(!ArtMethod::IsDefault(access_flags));
8607 DCHECK_EQ(!ArtMethod::IsAbstract(access_flags), ArtMethod::IsInvokable(access_flags));
8608 if (ArtMethod::IsInvokable(access_flags)) {
8609 // If the dex file does not support default methods, throw ClassFormatError.
8610 // This check is necessary to protect from odd cases, such as native default
8611 // methods, that the dex file verifier permits for old dex file versions. b/157170505
8612 // FIXME: This should be `if (!m->GetDexFile()->SupportsDefaultMethods())` but we're
8613 // currently running CTS tests for default methods with dex file version 035 which
8614 // does not support default methods. So, we limit this to native methods. b/157718952
8615 if (ArtMethod::IsNative(access_flags)) {
8616 DCHECK(!m->GetDexFile()->SupportsDefaultMethods());
8617 ThrowClassFormatError(klass.Get(),
8618 "Dex file does not support default method '%s'",
8619 m->PrettyMethod().c_str());
8620 return false;
8621 }
8622 if (!ArtMethod::IsPublic(access_flags)) {
8623 // The verifier should have caught the non-public method for dex version 37.
8624 // Just warn and skip it since this is from before default-methods so we don't
8625 // really need to care that it has code.
8626 LOG(WARNING) << "Default interface method " << m->PrettyMethod() << " is not public! "
8627 << "This will be a fatal error in subsequent versions of android. "
8628 << "Continuing anyway.";
8629 }
8630 m->SetAccessFlags(access_flags | kAccDefault);
8631 has_defaults = true;
8632 }
8633 }
8634 // Mark that we have default methods so that we won't need to scan the virtual_methods_ array
8635 // during initialization. This is a performance optimization. We could simply traverse the
8636 // virtual_methods_ array again during initialization.
8637 if (has_defaults) {
8638 klass->SetHasDefaultMethods();
8639 }
8640 ObjPtr<mirror::IfTable> iftable = SetupInterfaceLookupTable(
8641 self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass));
8642 if (UNLIKELY(iftable == nullptr)) {
8643 self->AssertPendingException();
8644 return false;
8645 }
8646 size_t ifcount = iftable->Count();
8647 bool have_super_with_defaults = false;
8648 for (size_t i = 0; i != ifcount; ++i) {
8649 if (iftable->GetInterface(i)->HasDefaultMethods()) {
8650 have_super_with_defaults = true;
8651 break;
8652 }
8653 }
8654 LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8655 if (have_super_with_defaults) {
8656 if (!FindCopiedMethodsForInterface(klass.Get(), num_virtual_methods, iftable)) {
8657 self->AssertPendingException();
8658 return false;
8659 }
8660 if (num_new_copied_methods_ != 0u) {
8661 // Re-check the number of methods.
8662 size_t final_num_virtual_methods = num_virtual_methods + num_new_copied_methods_;
8663 if (!IsUint<16>(final_num_virtual_methods)) {
8664 ThrowClassFormatError(
8665 klass.Get(), "Too many methods on interface: %zu", final_num_virtual_methods);
8666 return false;
8667 }
8668 ReallocMethods(klass.Get());
8669 }
8670 }
8671 klass->SetIfTable(iftable);
8672 if (kIsDebugBuild) {
8673 // May cause thread suspension, so do this after we're done with `ObjPtr<> iftable`.
8674 ClobberOldMethods(old_methods, klass->GetMethodsPtr());
8675 }
8676 return true;
8677 } else if (LIKELY(klass->HasSuperClass())) {
8678 // We set up the interface lookup table now because we need it to determine if we need
8679 // to update any vtable entries with new default method implementations.
8680 StackHandleScope<3> hs(self);
8681 MutableHandle<mirror::IfTable> iftable = hs.NewHandle(UNLIKELY(klass->IsProxyClass())
8682 ? SetupInterfaceLookupTable(self, klass, &allocator_, ProxyInterfacesAccessor(interfaces))
8683 : SetupInterfaceLookupTable(
8684 self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass)));
8685 if (UNLIKELY(iftable == nullptr)) {
8686 self->AssertPendingException();
8687 return false;
8688 }
8689
8690 // Copy the IMT from superclass if present and needed. Update with new methods later.
8691 Handle<mirror::Class> super_class = hs.NewHandle(klass->GetSuperClass());
8692 bool is_klass_abstract = klass->IsAbstract();
8693 bool is_super_abstract = super_class->IsAbstract();
8694 DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8695 DCHECK_EQ(super_class->ShouldHaveImt(), !is_super_abstract);
8696 if (!is_klass_abstract && !is_super_abstract) {
8697 ImTable* super_imt = super_class->GetImt(kPointerSize);
8698 for (size_t i = 0; i < ImTable::kSize; ++i) {
8699 out_imt[i] = super_imt->Get(i, kPointerSize);
8700 }
8701 }
8702
8703 // If there are no new virtual methods and no new interfaces, we can simply reuse
8704 // the vtable from superclass. We may need to make a copy if it's embedded.
8705 const size_t super_vtable_length = super_class->GetVTableLength();
8706 if (num_virtual_methods == 0 && iftable.Get() == super_class->GetIfTable()) {
8707 DCHECK_EQ(is_super_abstract, !super_class->ShouldHaveEmbeddedVTable());
8708 if (is_super_abstract) {
8709 DCHECK(super_class->IsAbstract() && !super_class->IsArrayClass());
8710 ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTable();
8711 CHECK(super_vtable != nullptr) << super_class->PrettyClass();
8712 klass->SetVTable(super_vtable);
8713 // No IMT in the super class, we need to reconstruct it from the iftable.
8714 if (!is_klass_abstract && iftable->Count() != 0) {
8715 class_linker_->FillIMTFromIfTable(iftable.Get(),
8716 runtime_->GetImtUnimplementedMethod(),
8717 runtime_->GetImtConflictMethod(),
8718 klass.Get(),
8719 /*create_conflict_tables=*/false,
8720 /*ignore_copied_methods=*/false,
8721 out_new_conflict,
8722 out_imt);
8723 }
8724 } else {
8725 ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, super_vtable_length);
8726 if (UNLIKELY(vtable == nullptr)) {
8727 self->AssertPendingOOMException();
8728 return false;
8729 }
8730 for (size_t i = 0; i < super_vtable_length; i++) {
8731 vtable->SetElementPtrSize(
8732 i, super_class->GetEmbeddedVTableEntry(i, kPointerSize), kPointerSize);
8733 }
8734 klass->SetVTable(vtable);
8735 // The IMT was already copied from superclass if `klass` is not abstract.
8736 }
8737 klass->SetIfTable(iftable.Get());
8738 return true;
8739 }
8740
8741 // Allocate method arrays, so that we can link interface methods without thread suspension,
8742 // otherwise GC could miss visiting newly allocated copied methods.
8743 // TODO: Do not allocate copied methods during linking, store only records about what
8744 // we need to allocate and allocate it at the end. Start with superclass iftable and
8745 // perform copy-on-write when needed to facilitate maximum memory sharing.
8746 if (!AllocateIfTableMethodArrays(self, klass, iftable)) {
8747 self->AssertPendingOOMException();
8748 return false;
8749 }
8750
8751 size_t final_vtable_size = AssignVTableIndexes(
8752 klass.Get(), super_class.Get(), is_super_abstract, num_virtual_methods, iftable.Get());
8753 if (final_vtable_size == 0u) {
8754 self->AssertPendingException();
8755 return false;
8756 }
8757 DCHECK(IsUint<16>(final_vtable_size));
8758
8759 // Allocate the new vtable.
8760 Handle<mirror::PointerArray> vtable = hs.NewHandle(AllocPointerArray(self, final_vtable_size));
8761 if (UNLIKELY(vtable == nullptr)) {
8762 self->AssertPendingOOMException();
8763 return false;
8764 }
8765
8766 LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8767 if (num_new_copied_methods_ != 0u) {
8768 ReallocMethods(klass.Get());
8769 }
8770
8771 // Store new virtual methods in the new vtable.
8772 ArrayRef<uint32_t> same_signature_vtable_lists = same_signature_vtable_lists_;
8773 for (ArtMethod& virtual_method : klass->GetVirtualMethodsSliceUnchecked(kPointerSize)) {
8774 uint32_t vtable_index = virtual_method.GetMethodIndexDuringLinking();
8775 vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
8776 if (UNLIKELY(vtable_index < same_signature_vtable_lists.size())) {
8777 // We may override more than one method according to JLS, see b/211854716.
8778 while (same_signature_vtable_lists[vtable_index] != dex::kDexNoIndex) {
8779 DCHECK_LT(same_signature_vtable_lists[vtable_index], vtable_index);
8780 vtable_index = same_signature_vtable_lists[vtable_index];
8781 vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
8782 if (kIsDebugBuild) {
8783 ArtMethod* current_method = super_class->GetVTableEntry(vtable_index, kPointerSize);
8784 DCHECK(klass->CanAccessMember(current_method->GetDeclaringClass(),
8785 current_method->GetAccessFlags()));
8786 DCHECK(!current_method->IsFinal());
8787 }
8788 }
8789 }
8790 }
8791
8792 // For non-overridden vtable slots, copy a method from `super_class`.
8793 for (size_t j = 0; j != super_vtable_length; ++j) {
8794 if (vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j) == nullptr) {
8795 ArtMethod* super_method = super_class->GetVTableEntry(j, kPointerSize);
8796 vtable->SetElementPtrSize(j, super_method, kPointerSize);
8797 }
8798 }
8799
8800 // Update the `iftable` (and IMT) with finalized virtual methods.
8801 if (!FinalizeIfTable(klass,
8802 iftable,
8803 vtable,
8804 is_klass_abstract,
8805 is_super_abstract,
8806 out_new_conflict,
8807 out_imt)) {
8808 self->AssertPendingOOMException();
8809 return false;
8810 }
8811
8812 klass->SetVTable(vtable.Get());
8813 klass->SetIfTable(iftable.Get());
8814 if (kIsDebugBuild) {
8815 CheckVTable(self, klass, kPointerSize);
8816 ClobberOldMethods(old_methods, klass->GetMethodsPtr());
8817 }
8818 return true;
8819 } else {
8820 return LinkJavaLangObjectMethods(self, klass);
8821 }
8822 }
8823
8824 template <PointerSize kPointerSize>
LinkJavaLangObjectMethods(Thread * self,Handle<mirror::Class> klass)8825 bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkJavaLangObjectMethods(
8826 Thread* self,
8827 Handle<mirror::Class> klass) {
8828 DCHECK_EQ(klass.Get(), GetClassRoot<mirror::Object>(class_linker_));
8829 DCHECK_EQ(klass->NumVirtualMethods(), mirror::Object::kVTableLength);
8830 static_assert(IsUint<16>(mirror::Object::kVTableLength));
8831 ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, mirror::Object::kVTableLength);
8832 if (UNLIKELY(vtable == nullptr)) {
8833 self->AssertPendingOOMException();
8834 return false;
8835 }
8836 for (size_t i = 0; i < mirror::Object::kVTableLength; ++i) {
8837 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8838 vtable->SetElementPtrSize(i, virtual_method, kPointerSize);
8839 virtual_method->SetMethodIndex(i);
8840 }
8841 klass->SetVTable(vtable);
8842 InitializeObjectVirtualMethodHashes(
8843 klass.Get(),
8844 kPointerSize,
8845 ArrayRef<uint32_t>(class_linker_->object_virtual_method_hashes_));
8846 // The interface table is already allocated but there are no interface methods to link.
8847 DCHECK(klass->GetIfTable() != nullptr);
8848 DCHECK_EQ(klass->GetIfTableCount(), 0);
8849 return true;
8850 }
8851
8852 // Populate the class vtable and itable. Compute return type indices.
LinkMethods(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,bool * out_new_conflict,ArtMethod ** out_imt)8853 bool ClassLinker::LinkMethods(Thread* self,
8854 Handle<mirror::Class> klass,
8855 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
8856 bool* out_new_conflict,
8857 ArtMethod** out_imt) {
8858 self->AllowThreadSuspension();
8859 // Link virtual methods then interface methods.
8860 Runtime* const runtime = Runtime::Current();
8861 if (LIKELY(GetImagePointerSize() == kRuntimePointerSize)) {
8862 LinkMethodsHelper<kRuntimePointerSize> helper(this, klass, self, runtime);
8863 return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
8864 } else {
8865 constexpr PointerSize kOtherPointerSize =
8866 (kRuntimePointerSize == PointerSize::k64) ? PointerSize::k32 : PointerSize::k64;
8867 LinkMethodsHelper<kOtherPointerSize> helper(this, klass, self, runtime);
8868 return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
8869 }
8870 }
8871
8872 class ClassLinker::LinkFieldsHelper {
8873 public:
8874 static bool LinkFields(ClassLinker* class_linker,
8875 Thread* self,
8876 Handle<mirror::Class> klass,
8877 bool is_static,
8878 size_t* class_size)
8879 REQUIRES_SHARED(Locks::mutator_lock_);
8880
8881 private:
8882 enum class FieldTypeOrder : uint16_t;
8883 class FieldGaps;
8884
8885 struct FieldTypeOrderAndIndex {
8886 FieldTypeOrder field_type_order;
8887 uint16_t field_index;
8888 };
8889
8890 static FieldTypeOrder FieldTypeOrderFromFirstDescriptorCharacter(char first_char);
8891
8892 template <size_t kSize>
8893 static MemberOffset AssignFieldOffset(ArtField* field, MemberOffset field_offset)
8894 REQUIRES_SHARED(Locks::mutator_lock_);
8895 };
8896
8897 // We use the following order of field types for assigning offsets.
8898 // Some fields can be shuffled forward to fill gaps, see
8899 // `ClassLinker::LinkFieldsHelper::LinkFields()`.
8900 enum class ClassLinker::LinkFieldsHelper::FieldTypeOrder : uint16_t {
8901 kReference = 0u,
8902 kLong,
8903 kDouble,
8904 kInt,
8905 kFloat,
8906 kChar,
8907 kShort,
8908 kBoolean,
8909 kByte,
8910
8911 kLast64BitType = kDouble,
8912 kLast32BitType = kFloat,
8913 kLast16BitType = kShort,
8914 };
8915
8916 ALWAYS_INLINE
8917 ClassLinker::LinkFieldsHelper::FieldTypeOrder
FieldTypeOrderFromFirstDescriptorCharacter(char first_char)8918 ClassLinker::LinkFieldsHelper::FieldTypeOrderFromFirstDescriptorCharacter(char first_char) {
8919 switch (first_char) {
8920 case 'J':
8921 return FieldTypeOrder::kLong;
8922 case 'D':
8923 return FieldTypeOrder::kDouble;
8924 case 'I':
8925 return FieldTypeOrder::kInt;
8926 case 'F':
8927 return FieldTypeOrder::kFloat;
8928 case 'C':
8929 return FieldTypeOrder::kChar;
8930 case 'S':
8931 return FieldTypeOrder::kShort;
8932 case 'Z':
8933 return FieldTypeOrder::kBoolean;
8934 case 'B':
8935 return FieldTypeOrder::kByte;
8936 default:
8937 DCHECK(first_char == 'L' || first_char == '[') << first_char;
8938 return FieldTypeOrder::kReference;
8939 }
8940 }
8941
8942 // Gaps where we can insert fields in object layout.
8943 class ClassLinker::LinkFieldsHelper::FieldGaps {
8944 public:
8945 template <uint32_t kSize>
AlignFieldOffset(MemberOffset field_offset)8946 ALWAYS_INLINE MemberOffset AlignFieldOffset(MemberOffset field_offset) {
8947 static_assert(kSize == 2u || kSize == 4u || kSize == 8u);
8948 if (!IsAligned<kSize>(field_offset.Uint32Value())) {
8949 uint32_t gap_start = field_offset.Uint32Value();
8950 field_offset = MemberOffset(RoundUp(gap_start, kSize));
8951 AddGaps<kSize - 1u>(gap_start, field_offset.Uint32Value());
8952 }
8953 return field_offset;
8954 }
8955
8956 template <uint32_t kSize>
HasGap() const8957 bool HasGap() const {
8958 static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
8959 return (kSize == 1u && gap1_offset_ != kNoOffset) ||
8960 (kSize <= 2u && gap2_offset_ != kNoOffset) ||
8961 gap4_offset_ != kNoOffset;
8962 }
8963
8964 template <uint32_t kSize>
ReleaseGap()8965 MemberOffset ReleaseGap() {
8966 static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
8967 uint32_t result;
8968 if (kSize == 1u && gap1_offset_ != kNoOffset) {
8969 DCHECK(gap2_offset_ == kNoOffset || gap2_offset_ > gap1_offset_);
8970 DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap1_offset_);
8971 result = gap1_offset_;
8972 gap1_offset_ = kNoOffset;
8973 } else if (kSize <= 2u && gap2_offset_ != kNoOffset) {
8974 DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap2_offset_);
8975 result = gap2_offset_;
8976 gap2_offset_ = kNoOffset;
8977 if (kSize < 2u) {
8978 AddGaps<1u>(result + kSize, result + 2u);
8979 }
8980 } else {
8981 DCHECK_NE(gap4_offset_, kNoOffset);
8982 result = gap4_offset_;
8983 gap4_offset_ = kNoOffset;
8984 if (kSize < 4u) {
8985 AddGaps<kSize | 2u>(result + kSize, result + 4u);
8986 }
8987 }
8988 return MemberOffset(result);
8989 }
8990
8991 private:
8992 template <uint32_t kGapsToCheck>
AddGaps(uint32_t gap_start,uint32_t gap_end)8993 void AddGaps(uint32_t gap_start, uint32_t gap_end) {
8994 if ((kGapsToCheck & 1u) != 0u) {
8995 DCHECK_LT(gap_start, gap_end);
8996 DCHECK_ALIGNED(gap_end, 2u);
8997 if ((gap_start & 1u) != 0u) {
8998 DCHECK_EQ(gap1_offset_, kNoOffset);
8999 gap1_offset_ = gap_start;
9000 gap_start += 1u;
9001 if (kGapsToCheck == 1u || gap_start == gap_end) {
9002 DCHECK_EQ(gap_start, gap_end);
9003 return;
9004 }
9005 }
9006 }
9007
9008 if ((kGapsToCheck & 2u) != 0u) {
9009 DCHECK_LT(gap_start, gap_end);
9010 DCHECK_ALIGNED(gap_start, 2u);
9011 DCHECK_ALIGNED(gap_end, 4u);
9012 if ((gap_start & 2u) != 0u) {
9013 DCHECK_EQ(gap2_offset_, kNoOffset);
9014 gap2_offset_ = gap_start;
9015 gap_start += 2u;
9016 if (kGapsToCheck <= 3u || gap_start == gap_end) {
9017 DCHECK_EQ(gap_start, gap_end);
9018 return;
9019 }
9020 }
9021 }
9022
9023 if ((kGapsToCheck & 4u) != 0u) {
9024 DCHECK_LT(gap_start, gap_end);
9025 DCHECK_ALIGNED(gap_start, 4u);
9026 DCHECK_ALIGNED(gap_end, 8u);
9027 DCHECK_EQ(gap_start + 4u, gap_end);
9028 DCHECK_EQ(gap4_offset_, kNoOffset);
9029 gap4_offset_ = gap_start;
9030 return;
9031 }
9032
9033 DCHECK(false) << "Remaining gap: " << gap_start << " to " << gap_end
9034 << " after checking " << kGapsToCheck;
9035 }
9036
9037 static constexpr uint32_t kNoOffset = static_cast<uint32_t>(-1);
9038
9039 uint32_t gap4_offset_ = kNoOffset;
9040 uint32_t gap2_offset_ = kNoOffset;
9041 uint32_t gap1_offset_ = kNoOffset;
9042 };
9043
9044 template <size_t kSize>
9045 ALWAYS_INLINE
AssignFieldOffset(ArtField * field,MemberOffset field_offset)9046 MemberOffset ClassLinker::LinkFieldsHelper::AssignFieldOffset(ArtField* field,
9047 MemberOffset field_offset) {
9048 DCHECK_ALIGNED(field_offset.Uint32Value(), kSize);
9049 DCHECK_EQ(Primitive::ComponentSize(field->GetTypeAsPrimitiveType()), kSize);
9050 field->SetOffset(field_offset);
9051 return MemberOffset(field_offset.Uint32Value() + kSize);
9052 }
9053
LinkFields(ClassLinker * class_linker,Thread * self,Handle<mirror::Class> klass,bool is_static,size_t * class_size)9054 bool ClassLinker::LinkFieldsHelper::LinkFields(ClassLinker* class_linker,
9055 Thread* self,
9056 Handle<mirror::Class> klass,
9057 bool is_static,
9058 size_t* class_size) {
9059 self->AllowThreadSuspension();
9060 const size_t num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields();
9061 LengthPrefixedArray<ArtField>* const fields = is_static ? klass->GetSFieldsPtr() :
9062 klass->GetIFieldsPtr();
9063
9064 // Initialize field_offset
9065 MemberOffset field_offset(0);
9066 if (is_static) {
9067 field_offset = klass->GetFirstReferenceStaticFieldOffsetDuringLinking(
9068 class_linker->GetImagePointerSize());
9069 } else {
9070 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
9071 if (super_class != nullptr) {
9072 CHECK(super_class->IsResolved())
9073 << klass->PrettyClass() << " " << super_class->PrettyClass();
9074 field_offset = MemberOffset(super_class->GetObjectSize());
9075 }
9076 }
9077
9078 CHECK_EQ(num_fields == 0, fields == nullptr) << klass->PrettyClass();
9079
9080 // we want a relatively stable order so that adding new fields
9081 // minimizes disruption of C++ version such as Class and Method.
9082 //
9083 // The overall sort order order is:
9084 // 1) All object reference fields, sorted alphabetically.
9085 // 2) All java long (64-bit) integer fields, sorted alphabetically.
9086 // 3) All java double (64-bit) floating point fields, sorted alphabetically.
9087 // 4) All java int (32-bit) integer fields, sorted alphabetically.
9088 // 5) All java float (32-bit) floating point fields, sorted alphabetically.
9089 // 6) All java char (16-bit) integer fields, sorted alphabetically.
9090 // 7) All java short (16-bit) integer fields, sorted alphabetically.
9091 // 8) All java boolean (8-bit) integer fields, sorted alphabetically.
9092 // 9) All java byte (8-bit) integer fields, sorted alphabetically.
9093 //
9094 // (References are first to increase the chance of reference visiting
9095 // being able to take a fast path using a bitmap of references at the
9096 // start of the object, see `Class::reference_instance_offsets_`.)
9097 //
9098 // Once the fields are sorted in this order we will attempt to fill any gaps
9099 // that might be present in the memory layout of the structure.
9100 // Note that we shall not fill gaps between the superclass fields.
9101
9102 // Collect fields and their "type order index" (see numbered points above).
9103 const char* old_no_suspend_cause = self->StartAssertNoThreadSuspension(
9104 "Using plain ArtField references");
9105 constexpr size_t kStackBufferEntries = 64; // Avoid allocations for small number of fields.
9106 FieldTypeOrderAndIndex stack_buffer[kStackBufferEntries];
9107 std::vector<FieldTypeOrderAndIndex> heap_buffer;
9108 ArrayRef<FieldTypeOrderAndIndex> sorted_fields;
9109 if (num_fields <= kStackBufferEntries) {
9110 sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(stack_buffer, num_fields);
9111 } else {
9112 heap_buffer.resize(num_fields);
9113 sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(heap_buffer);
9114 }
9115 size_t num_reference_fields = 0;
9116 size_t primitive_fields_start = num_fields;
9117 DCHECK_LE(num_fields, 1u << 16);
9118 for (size_t i = 0; i != num_fields; ++i) {
9119 ArtField* field = &fields->At(i);
9120 const char* descriptor = field->GetTypeDescriptor();
9121 FieldTypeOrder field_type_order = FieldTypeOrderFromFirstDescriptorCharacter(descriptor[0]);
9122 uint16_t field_index = dchecked_integral_cast<uint16_t>(i);
9123 // Insert references to the start, other fields to the end.
9124 DCHECK_LT(num_reference_fields, primitive_fields_start);
9125 if (field_type_order == FieldTypeOrder::kReference) {
9126 sorted_fields[num_reference_fields] = { field_type_order, field_index };
9127 ++num_reference_fields;
9128 } else {
9129 --primitive_fields_start;
9130 sorted_fields[primitive_fields_start] = { field_type_order, field_index };
9131 }
9132 }
9133 DCHECK_EQ(num_reference_fields, primitive_fields_start);
9134
9135 // Reference fields are already sorted by field index (and dex field index).
9136 DCHECK(std::is_sorted(
9137 sorted_fields.begin(),
9138 sorted_fields.begin() + num_reference_fields,
9139 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
9140 ArtField* lhs_field = &fields->At(lhs.field_index);
9141 ArtField* rhs_field = &fields->At(rhs.field_index);
9142 CHECK_EQ(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9143 CHECK_EQ(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9144 CHECK_EQ(lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex(),
9145 lhs.field_index < rhs.field_index);
9146 return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
9147 }));
9148 // Primitive fields were stored in reverse order of their field index (and dex field index).
9149 DCHECK(std::is_sorted(
9150 sorted_fields.begin() + primitive_fields_start,
9151 sorted_fields.end(),
9152 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
9153 ArtField* lhs_field = &fields->At(lhs.field_index);
9154 ArtField* rhs_field = &fields->At(rhs.field_index);
9155 CHECK_NE(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9156 CHECK_NE(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9157 CHECK_EQ(lhs_field->GetDexFieldIndex() > rhs_field->GetDexFieldIndex(),
9158 lhs.field_index > rhs.field_index);
9159 return lhs.field_index > rhs.field_index;
9160 }));
9161 // Sort the primitive fields by the field type order, then field index.
9162 std::sort(sorted_fields.begin() + primitive_fields_start,
9163 sorted_fields.end(),
9164 [](const auto& lhs, const auto& rhs) {
9165 if (lhs.field_type_order != rhs.field_type_order) {
9166 return lhs.field_type_order < rhs.field_type_order;
9167 } else {
9168 return lhs.field_index < rhs.field_index;
9169 }
9170 });
9171 // Primitive fields are now sorted by field size (descending), then type, then field index.
9172 DCHECK(std::is_sorted(
9173 sorted_fields.begin() + primitive_fields_start,
9174 sorted_fields.end(),
9175 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
9176 ArtField* lhs_field = &fields->At(lhs.field_index);
9177 ArtField* rhs_field = &fields->At(rhs.field_index);
9178 Primitive::Type lhs_type = lhs_field->GetTypeAsPrimitiveType();
9179 CHECK_NE(lhs_type, Primitive::kPrimNot);
9180 Primitive::Type rhs_type = rhs_field->GetTypeAsPrimitiveType();
9181 CHECK_NE(rhs_type, Primitive::kPrimNot);
9182 if (lhs_type != rhs_type) {
9183 size_t lhs_size = Primitive::ComponentSize(lhs_type);
9184 size_t rhs_size = Primitive::ComponentSize(rhs_type);
9185 return (lhs_size != rhs_size) ? (lhs_size > rhs_size) : (lhs_type < rhs_type);
9186 } else {
9187 return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
9188 }
9189 }));
9190
9191 // Process reference fields.
9192 FieldGaps field_gaps;
9193 size_t index = 0u;
9194 if (num_reference_fields != 0u) {
9195 constexpr size_t kReferenceSize = sizeof(mirror::HeapReference<mirror::Object>);
9196 field_offset = field_gaps.AlignFieldOffset<kReferenceSize>(field_offset);
9197 for (; index != num_reference_fields; ++index) {
9198 ArtField* field = &fields->At(sorted_fields[index].field_index);
9199 field_offset = AssignFieldOffset<kReferenceSize>(field, field_offset);
9200 }
9201 }
9202 // Process 64-bit fields.
9203 if (index != num_fields &&
9204 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
9205 field_offset = field_gaps.AlignFieldOffset<8u>(field_offset);
9206 while (index != num_fields &&
9207 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
9208 ArtField* field = &fields->At(sorted_fields[index].field_index);
9209 field_offset = AssignFieldOffset<8u>(field, field_offset);
9210 ++index;
9211 }
9212 }
9213 // Process 32-bit fields.
9214 if (index != num_fields &&
9215 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
9216 field_offset = field_gaps.AlignFieldOffset<4u>(field_offset);
9217 if (field_gaps.HasGap<4u>()) {
9218 ArtField* field = &fields->At(sorted_fields[index].field_index);
9219 AssignFieldOffset<4u>(field, field_gaps.ReleaseGap<4u>()); // Ignore return value.
9220 ++index;
9221 DCHECK(!field_gaps.HasGap<4u>()); // There can be only one gap for a 32-bit field.
9222 }
9223 while (index != num_fields &&
9224 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
9225 ArtField* field = &fields->At(sorted_fields[index].field_index);
9226 field_offset = AssignFieldOffset<4u>(field, field_offset);
9227 ++index;
9228 }
9229 }
9230 // Process 16-bit fields.
9231 if (index != num_fields &&
9232 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
9233 field_offset = field_gaps.AlignFieldOffset<2u>(field_offset);
9234 while (index != num_fields &&
9235 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType &&
9236 field_gaps.HasGap<2u>()) {
9237 ArtField* field = &fields->At(sorted_fields[index].field_index);
9238 AssignFieldOffset<2u>(field, field_gaps.ReleaseGap<2u>()); // Ignore return value.
9239 ++index;
9240 }
9241 while (index != num_fields &&
9242 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
9243 ArtField* field = &fields->At(sorted_fields[index].field_index);
9244 field_offset = AssignFieldOffset<2u>(field, field_offset);
9245 ++index;
9246 }
9247 }
9248 // Process 8-bit fields.
9249 for (; index != num_fields && field_gaps.HasGap<1u>(); ++index) {
9250 ArtField* field = &fields->At(sorted_fields[index].field_index);
9251 AssignFieldOffset<1u>(field, field_gaps.ReleaseGap<1u>()); // Ignore return value.
9252 }
9253 for (; index != num_fields; ++index) {
9254 ArtField* field = &fields->At(sorted_fields[index].field_index);
9255 field_offset = AssignFieldOffset<1u>(field, field_offset);
9256 }
9257
9258 self->EndAssertNoThreadSuspension(old_no_suspend_cause);
9259
9260 // We lie to the GC about the java.lang.ref.Reference.referent field, so it doesn't scan it.
9261 DCHECK_IMPLIES(class_linker->init_done_, !klass->DescriptorEquals("Ljava/lang/ref/Reference;"));
9262 if (!is_static &&
9263 UNLIKELY(!class_linker->init_done_) &&
9264 klass->DescriptorEquals("Ljava/lang/ref/Reference;")) {
9265 // We know there are no non-reference fields in the Reference classes, and we know
9266 // that 'referent' is alphabetically last, so this is easy...
9267 CHECK_EQ(num_reference_fields, num_fields) << klass->PrettyClass();
9268 CHECK_STREQ(fields->At(num_fields - 1).GetName(), "referent")
9269 << klass->PrettyClass();
9270 --num_reference_fields;
9271 }
9272
9273 size_t size = field_offset.Uint32Value();
9274 // Update klass
9275 if (is_static) {
9276 klass->SetNumReferenceStaticFields(num_reference_fields);
9277 *class_size = size;
9278 } else {
9279 klass->SetNumReferenceInstanceFields(num_reference_fields);
9280 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
9281 if (num_reference_fields == 0 || super_class == nullptr) {
9282 // object has one reference field, klass, but we ignore it since we always visit the class.
9283 // super_class is null iff the class is java.lang.Object.
9284 if (super_class == nullptr ||
9285 (super_class->GetClassFlags() & mirror::kClassFlagNoReferenceFields) != 0) {
9286 klass->SetClassFlags(klass->GetClassFlags() | mirror::kClassFlagNoReferenceFields);
9287 }
9288 }
9289 if (kIsDebugBuild) {
9290 DCHECK_EQ(super_class == nullptr, klass->DescriptorEquals("Ljava/lang/Object;"));
9291 size_t total_reference_instance_fields = 0;
9292 ObjPtr<mirror::Class> cur_super = klass.Get();
9293 while (cur_super != nullptr) {
9294 total_reference_instance_fields += cur_super->NumReferenceInstanceFieldsDuringLinking();
9295 cur_super = cur_super->GetSuperClass();
9296 }
9297 if (super_class == nullptr) {
9298 CHECK_EQ(total_reference_instance_fields, 1u) << klass->PrettyDescriptor();
9299 } else {
9300 // Check that there is at least num_reference_fields other than Object.class.
9301 CHECK_GE(total_reference_instance_fields, 1u + num_reference_fields)
9302 << klass->PrettyClass();
9303 }
9304 }
9305 if (!klass->IsVariableSize()) {
9306 std::string temp;
9307 DCHECK_GE(size, sizeof(mirror::Object)) << klass->GetDescriptor(&temp);
9308 size_t previous_size = klass->GetObjectSize();
9309 if (previous_size != 0) {
9310 // Make sure that we didn't originally have an incorrect size.
9311 CHECK_EQ(previous_size, size) << klass->GetDescriptor(&temp);
9312 }
9313 klass->SetObjectSize(size);
9314 }
9315 }
9316
9317 if (kIsDebugBuild) {
9318 // Make sure that the fields array is ordered by name but all reference
9319 // offsets are at the beginning as far as alignment allows.
9320 MemberOffset start_ref_offset = is_static
9321 ? klass->GetFirstReferenceStaticFieldOffsetDuringLinking(class_linker->image_pointer_size_)
9322 : klass->GetFirstReferenceInstanceFieldOffset();
9323 MemberOffset end_ref_offset(start_ref_offset.Uint32Value() +
9324 num_reference_fields *
9325 sizeof(mirror::HeapReference<mirror::Object>));
9326 MemberOffset current_ref_offset = start_ref_offset;
9327 for (size_t i = 0; i < num_fields; i++) {
9328 ArtField* field = &fields->At(i);
9329 VLOG(class_linker) << "LinkFields: " << (is_static ? "static" : "instance")
9330 << " class=" << klass->PrettyClass() << " field=" << field->PrettyField()
9331 << " offset=" << field->GetOffsetDuringLinking();
9332 if (i != 0) {
9333 ArtField* const prev_field = &fields->At(i - 1);
9334 // NOTE: The field names can be the same. This is not possible in the Java language
9335 // but it's valid Java/dex bytecode and for example proguard can generate such bytecode.
9336 DCHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0);
9337 }
9338 Primitive::Type type = field->GetTypeAsPrimitiveType();
9339 bool is_primitive = type != Primitive::kPrimNot;
9340 if (klass->DescriptorEquals("Ljava/lang/ref/Reference;") &&
9341 strcmp("referent", field->GetName()) == 0) {
9342 is_primitive = true; // We lied above, so we have to expect a lie here.
9343 }
9344 MemberOffset offset = field->GetOffsetDuringLinking();
9345 if (is_primitive) {
9346 if (offset.Uint32Value() < end_ref_offset.Uint32Value()) {
9347 // Shuffled before references.
9348 size_t type_size = Primitive::ComponentSize(type);
9349 CHECK_LT(type_size, sizeof(mirror::HeapReference<mirror::Object>));
9350 CHECK_LT(offset.Uint32Value(), start_ref_offset.Uint32Value());
9351 CHECK_LE(offset.Uint32Value() + type_size, start_ref_offset.Uint32Value());
9352 CHECK(!IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(offset.Uint32Value()));
9353 }
9354 } else {
9355 CHECK_EQ(current_ref_offset.Uint32Value(), offset.Uint32Value());
9356 current_ref_offset = MemberOffset(current_ref_offset.Uint32Value() +
9357 sizeof(mirror::HeapReference<mirror::Object>));
9358 }
9359 }
9360 CHECK_EQ(current_ref_offset.Uint32Value(), end_ref_offset.Uint32Value());
9361 }
9362 return true;
9363 }
9364
LinkInstanceFields(Thread * self,Handle<mirror::Class> klass)9365 bool ClassLinker::LinkInstanceFields(Thread* self, Handle<mirror::Class> klass) {
9366 CHECK(klass != nullptr);
9367 return LinkFieldsHelper::LinkFields(this, self, klass, false, nullptr);
9368 }
9369
LinkStaticFields(Thread * self,Handle<mirror::Class> klass,size_t * class_size)9370 bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, size_t* class_size) {
9371 CHECK(klass != nullptr);
9372 return LinkFieldsHelper::LinkFields(this, self, klass, true, class_size);
9373 }
9374
9375 enum class RecordElementType : uint8_t {
9376 kNames = 0,
9377 kTypes = 1,
9378 kSignatures = 2,
9379 kAnnotationVisibilities = 3,
9380 kAnnotations = 4
9381 };
9382
9383 static const char* kRecordElementNames[] = {"componentNames",
9384 "componentTypes",
9385 "componentSignatures",
9386 "componentAnnotationVisibilities",
9387 "componentAnnotations"};
9388
9389 class RecordAnnotationVisitor final : public annotations::AnnotationVisitor {
9390 public:
RecordAnnotationVisitor()9391 RecordAnnotationVisitor() {}
9392
ValidateCounts()9393 bool ValidateCounts() {
9394 if (is_error_) {
9395 return false;
9396 }
9397
9398 // Verify the counts.
9399 bool annotation_element_exists =
9400 (signatures_count_ != UINT32_MAX) || (annotations_count_ != UINT32_MAX);
9401 if (count_ >= 2) {
9402 SetErrorMsg("Record class can't have more than one @Record Annotation");
9403 } else if (names_count_ == UINT32_MAX) {
9404 SetErrorMsg("componentNames element is required");
9405 } else if (types_count_ == UINT32_MAX) {
9406 SetErrorMsg("componentTypes element is required");
9407 } else if (names_count_ != types_count_) { // Every component must have a name and a type.
9408 SetErrorMsg(StringPrintf(
9409 "componentTypes is expected to have %i, but has %i types", names_count_, types_count_));
9410 // The other 3 elements are optional, but is expected to have the same count if it exists.
9411 } else if (signatures_count_ != UINT32_MAX && signatures_count_ != names_count_) {
9412 SetErrorMsg(StringPrintf("componentSignatures size is %i, but is expected to be %i",
9413 signatures_count_,
9414 names_count_));
9415 } else if (annotation_element_exists && visibilities_count_ != names_count_) {
9416 SetErrorMsg(
9417 StringPrintf("componentAnnotationVisibilities size is %i, but is expected to be %i",
9418 visibilities_count_,
9419 names_count_));
9420 } else if (annotation_element_exists && annotations_count_ != names_count_) {
9421 SetErrorMsg(StringPrintf("componentAnnotations size is %i, but is expected to be %i",
9422 annotations_count_,
9423 names_count_));
9424 }
9425
9426 return !is_error_;
9427 }
9428
GetErrorMsg()9429 const std::string& GetErrorMsg() { return error_msg_; }
9430
IsRecordAnnotationFound()9431 bool IsRecordAnnotationFound() { return count_ != 0; }
9432
VisitAnnotation(const char * descriptor,uint8_t visibility)9433 annotations::VisitorStatus VisitAnnotation(const char* descriptor, uint8_t visibility) override {
9434 if (is_error_) {
9435 return annotations::VisitorStatus::kVisitBreak;
9436 }
9437
9438 if (visibility != DexFile::kDexVisibilitySystem) {
9439 return annotations::VisitorStatus::kVisitNext;
9440 }
9441
9442 if (strcmp(descriptor, "Ldalvik/annotation/Record;") != 0) {
9443 return annotations::VisitorStatus::kVisitNext;
9444 }
9445
9446 count_ += 1;
9447 if (count_ >= 2) {
9448 return annotations::VisitorStatus::kVisitBreak;
9449 }
9450 return annotations::VisitorStatus::kVisitInner;
9451 }
9452
VisitAnnotationElement(const char * element_name,uint8_t type,const JValue & value)9453 annotations::VisitorStatus VisitAnnotationElement(const char* element_name,
9454 uint8_t type,
9455 [[maybe_unused]] const JValue& value) override {
9456 if (is_error_) {
9457 return annotations::VisitorStatus::kVisitBreak;
9458 }
9459
9460 RecordElementType visiting_type;
9461 uint32_t* element_count;
9462 if (strcmp(element_name, "componentNames") == 0) {
9463 visiting_type = RecordElementType::kNames;
9464 element_count = &names_count_;
9465 } else if (strcmp(element_name, "componentTypes") == 0) {
9466 visiting_type = RecordElementType::kTypes;
9467 element_count = &types_count_;
9468 } else if (strcmp(element_name, "componentSignatures") == 0) {
9469 visiting_type = RecordElementType::kSignatures;
9470 element_count = &signatures_count_;
9471 } else if (strcmp(element_name, "componentAnnotationVisibilities") == 0) {
9472 visiting_type = RecordElementType::kAnnotationVisibilities;
9473 element_count = &visibilities_count_;
9474 } else if (strcmp(element_name, "componentAnnotations") == 0) {
9475 visiting_type = RecordElementType::kAnnotations;
9476 element_count = &annotations_count_;
9477 } else {
9478 // ignore this element that could be introduced in the future ART.
9479 return annotations::VisitorStatus::kVisitNext;
9480 }
9481
9482 if ((*element_count) != UINT32_MAX) {
9483 SetErrorMsg(StringPrintf("Two %s annotation elements are found but only one is expected",
9484 kRecordElementNames[static_cast<uint8_t>(visiting_type)]));
9485 return annotations::VisitorStatus::kVisitBreak;
9486 }
9487
9488 if (type != DexFile::kDexAnnotationArray) {
9489 SetErrorMsg(StringPrintf("%s must be array type", element_name));
9490 return annotations::VisitorStatus::kVisitBreak;
9491 }
9492
9493 *element_count = 0;
9494 visiting_type_ = visiting_type;
9495 return annotations::VisitorStatus::kVisitInner;
9496 }
9497
VisitArrayElement(uint8_t depth,uint32_t index,uint8_t type,const JValue & value)9498 annotations::VisitorStatus VisitArrayElement(uint8_t depth,
9499 uint32_t index,
9500 uint8_t type,
9501 [[maybe_unused]] const JValue& value) override {
9502 if (is_error_) {
9503 return annotations::VisitorStatus::kVisitBreak;
9504 }
9505 switch (visiting_type_) {
9506 case RecordElementType::kNames: {
9507 if (depth == 0) {
9508 if (!ExpectedTypeOrError(
9509 type, DexFile::kDexAnnotationString, visiting_type_, index, depth)) {
9510 return annotations::VisitorStatus::kVisitBreak;
9511 }
9512 names_count_++;
9513 return annotations::VisitorStatus::kVisitNext;
9514 }
9515 break;
9516 }
9517 case RecordElementType::kTypes: {
9518 if (depth == 0) {
9519 if (!ExpectedTypeOrError(
9520 type, DexFile::kDexAnnotationType, visiting_type_, index, depth)) {
9521 return annotations::VisitorStatus::kVisitBreak;
9522 }
9523 types_count_++;
9524 return annotations::VisitorStatus::kVisitNext;
9525 }
9526 break;
9527 }
9528 case RecordElementType::kSignatures: {
9529 if (depth == 0) {
9530 // kDexAnnotationNull implies no generic signature for the component.
9531 if (type != DexFile::kDexAnnotationNull &&
9532 !ExpectedTypeOrError(
9533 type, DexFile::kDexAnnotationAnnotation, visiting_type_, index, depth)) {
9534 return annotations::VisitorStatus::kVisitBreak;
9535 }
9536 signatures_count_++;
9537 return annotations::VisitorStatus::kVisitNext;
9538 }
9539 break;
9540 }
9541 case RecordElementType::kAnnotationVisibilities: {
9542 if (depth == 0) {
9543 if (!ExpectedTypeOrError(
9544 type, DexFile::kDexAnnotationArray, visiting_type_, index, depth)) {
9545 return annotations::VisitorStatus::kVisitBreak;
9546 }
9547 visibilities_count_++;
9548 return annotations::VisitorStatus::kVisitInner;
9549 } else if (depth == 1) {
9550 if (!ExpectedTypeOrError(
9551 type, DexFile::kDexAnnotationByte, visiting_type_, index, depth)) {
9552 return annotations::VisitorStatus::kVisitBreak;
9553 }
9554 return annotations::VisitorStatus::kVisitNext;
9555 }
9556 break;
9557 }
9558 case RecordElementType::kAnnotations: {
9559 if (depth == 0) {
9560 if (!ExpectedTypeOrError(
9561 type, DexFile::kDexAnnotationArray, visiting_type_, index, depth)) {
9562 return annotations::VisitorStatus::kVisitBreak;
9563 }
9564 annotations_count_++;
9565 return annotations::VisitorStatus::kVisitInner;
9566 } else if (depth == 1) {
9567 if (!ExpectedTypeOrError(
9568 type, DexFile::kDexAnnotationAnnotation, visiting_type_, index, depth)) {
9569 return annotations::VisitorStatus::kVisitBreak;
9570 }
9571 return annotations::VisitorStatus::kVisitNext;
9572 }
9573 break;
9574 }
9575 }
9576
9577 // Should never happen if every next depth level is handled above whenever kVisitInner is
9578 // returned.
9579 DCHECK(false) << StringPrintf("Unexpected depth %i for element %s",
9580 depth,
9581 kRecordElementNames[static_cast<uint8_t>(visiting_type_)]);
9582 return annotations::VisitorStatus::kVisitBreak;
9583 }
9584
9585 private:
9586 bool is_error_ = false;
9587 uint32_t count_ = 0;
9588 uint32_t names_count_ = UINT32_MAX;
9589 uint32_t types_count_ = UINT32_MAX;
9590 uint32_t signatures_count_ = UINT32_MAX;
9591 uint32_t visibilities_count_ = UINT32_MAX;
9592 uint32_t annotations_count_ = UINT32_MAX;
9593 std::string error_msg_;
9594 RecordElementType visiting_type_;
9595
ExpectedTypeOrError(uint8_t type,uint8_t expected,RecordElementType visiting_type,uint8_t depth,uint32_t index)9596 inline bool ExpectedTypeOrError(uint8_t type,
9597 uint8_t expected,
9598 RecordElementType visiting_type,
9599 uint8_t depth,
9600 uint32_t index) {
9601 if (type == expected) {
9602 return true;
9603 }
9604
9605 SetErrorMsg(StringPrintf(
9606 "Expect 0x%02x type but got 0x%02x at the index %i and depth %i for the element %s",
9607 expected,
9608 type,
9609 index,
9610 depth,
9611 kRecordElementNames[static_cast<uint8_t>(visiting_type)]));
9612 return false;
9613 }
9614
SetErrorMsg(const std::string & msg)9615 void SetErrorMsg(const std::string& msg) {
9616 is_error_ = true;
9617 error_msg_ = msg;
9618 }
9619
9620 DISALLOW_COPY_AND_ASSIGN(RecordAnnotationVisitor);
9621 };
9622
9623 /**
9624 * Set kClassFlagRecord and verify if klass is a record class.
9625 * If the verification fails, a pending java exception is thrown.
9626 *
9627 * @return false if verification fails. If klass isn't a record class,
9628 * it should always return true.
9629 */
VerifyRecordClass(Handle<mirror::Class> klass,ObjPtr<mirror::Class> super)9630 bool ClassLinker::VerifyRecordClass(Handle<mirror::Class> klass, ObjPtr<mirror::Class> super) {
9631 CHECK(klass != nullptr);
9632 // First, we check the conditions specified in java.lang.Class#isRecord().
9633 // If any of the conditions isn't fulfilled, it's not a record class and
9634 // ART should treat it as a normal class even if it's inherited from java.lang.Record.
9635 if (!klass->IsFinal()) {
9636 return true;
9637 }
9638
9639 if (super == nullptr) {
9640 return true;
9641 }
9642
9643 // Compare the string directly when this ClassLinker is initializing before
9644 // WellKnownClasses initializes
9645 if (WellKnownClasses::java_lang_Record == nullptr) {
9646 if (!super->DescriptorEquals("Ljava/lang/Record;")) {
9647 return true;
9648 }
9649 } else {
9650 ObjPtr<mirror::Class> java_lang_Record =
9651 WellKnownClasses::ToClass(WellKnownClasses::java_lang_Record);
9652 if (super.Ptr() != java_lang_Record.Ptr()) {
9653 return true;
9654 }
9655 }
9656
9657 // Verify @dalvik.annotation.Record
9658 // The annotation has a mandatory element componentNames[] and componentTypes[] of the same size.
9659 // componentSignatures[], componentAnnotationVisibilities[][], componentAnnotations[][] are
9660 // optional, but should have the same size if it exists.
9661 RecordAnnotationVisitor visitor;
9662 annotations::VisitClassAnnotations(klass, &visitor);
9663 if (!visitor.IsRecordAnnotationFound()) {
9664 return true;
9665 }
9666
9667 if (!visitor.ValidateCounts()) {
9668 ThrowClassFormatError(klass.Get(), "%s", visitor.GetErrorMsg().c_str());
9669 return false;
9670 }
9671
9672 // Set kClassFlagRecord.
9673 klass->SetRecordClass();
9674 return true;
9675 }
9676
9677 // Set the bitmap of reference instance field offsets.
CreateReferenceInstanceOffsets(Handle<mirror::Class> klass)9678 void ClassLinker::CreateReferenceInstanceOffsets(Handle<mirror::Class> klass) {
9679 uint32_t reference_offsets = 0;
9680 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
9681 // Leave the reference offsets as 0 for mirror::Object (the class field is handled specially).
9682 if (super_class != nullptr) {
9683 reference_offsets = super_class->GetReferenceInstanceOffsets();
9684 // Compute reference offsets unless our superclass overflowed.
9685 if (reference_offsets != mirror::Class::kClassWalkSuper) {
9686 size_t num_reference_fields = klass->NumReferenceInstanceFieldsDuringLinking();
9687 if (num_reference_fields != 0u) {
9688 // All of the fields that contain object references are guaranteed be grouped in memory
9689 // starting at an appropriately aligned address after super class object data.
9690 uint32_t start_offset = RoundUp(super_class->GetObjectSize(),
9691 sizeof(mirror::HeapReference<mirror::Object>));
9692 uint32_t start_bit = (start_offset - mirror::kObjectHeaderSize) /
9693 sizeof(mirror::HeapReference<mirror::Object>);
9694 if (start_bit + num_reference_fields > 32) {
9695 reference_offsets = mirror::Class::kClassWalkSuper;
9696 } else {
9697 reference_offsets |= (0xffffffffu << start_bit) &
9698 (0xffffffffu >> (32 - (start_bit + num_reference_fields)));
9699 }
9700 }
9701 }
9702 }
9703 klass->SetReferenceInstanceOffsets(reference_offsets);
9704 }
9705
DoResolveString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)9706 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
9707 ObjPtr<mirror::DexCache> dex_cache) {
9708 StackHandleScope<1> hs(Thread::Current());
9709 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
9710 return DoResolveString(string_idx, h_dex_cache);
9711 }
9712
DoResolveString(dex::StringIndex string_idx,Handle<mirror::DexCache> dex_cache)9713 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
9714 Handle<mirror::DexCache> dex_cache) {
9715 const DexFile& dex_file = *dex_cache->GetDexFile();
9716 uint32_t utf16_length;
9717 const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
9718 ObjPtr<mirror::String> string = intern_table_->InternStrong(utf16_length, utf8_data);
9719 if (string != nullptr) {
9720 dex_cache->SetResolvedString(string_idx, string);
9721 }
9722 return string;
9723 }
9724
DoLookupString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)9725 ObjPtr<mirror::String> ClassLinker::DoLookupString(dex::StringIndex string_idx,
9726 ObjPtr<mirror::DexCache> dex_cache) {
9727 DCHECK(dex_cache != nullptr);
9728 const DexFile& dex_file = *dex_cache->GetDexFile();
9729 uint32_t utf16_length;
9730 const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
9731 ObjPtr<mirror::String> string =
9732 intern_table_->LookupStrong(Thread::Current(), utf16_length, utf8_data);
9733 if (string != nullptr) {
9734 dex_cache->SetResolvedString(string_idx, string);
9735 }
9736 return string;
9737 }
9738
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::Class> referrer)9739 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
9740 ObjPtr<mirror::Class> referrer) {
9741 return DoLookupResolvedType(type_idx, referrer->GetDexCache(), referrer->GetClassLoader());
9742 }
9743
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)9744 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
9745 ObjPtr<mirror::DexCache> dex_cache,
9746 ObjPtr<mirror::ClassLoader> class_loader) {
9747 DCHECK(dex_cache->GetClassLoader() == class_loader);
9748 const DexFile& dex_file = *dex_cache->GetDexFile();
9749 const char* descriptor = dex_file.StringByTypeIdx(type_idx);
9750 ObjPtr<mirror::Class> type = LookupResolvedType(descriptor, class_loader);
9751 if (type != nullptr) {
9752 DCHECK(type->IsResolved());
9753 dex_cache->SetResolvedType(type_idx, type);
9754 }
9755 return type;
9756 }
9757
LookupResolvedType(const char * descriptor,ObjPtr<mirror::ClassLoader> class_loader)9758 ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(const char* descriptor,
9759 ObjPtr<mirror::ClassLoader> class_loader) {
9760 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
9761 ObjPtr<mirror::Class> type = nullptr;
9762 if (descriptor[1] == '\0') {
9763 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
9764 // for primitive classes that aren't backed by dex files.
9765 type = LookupPrimitiveClass(descriptor[0]);
9766 } else {
9767 Thread* const self = Thread::Current();
9768 DCHECK(self != nullptr);
9769 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
9770 // Find the class in the loaded classes table.
9771 type = LookupClass(self, descriptor, hash, class_loader);
9772 }
9773 return (type != nullptr && type->IsResolved()) ? type : nullptr;
9774 }
9775
9776 template <typename RefType>
DoResolveType(dex::TypeIndex type_idx,RefType referrer)9777 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx, RefType referrer) {
9778 StackHandleScope<2> hs(Thread::Current());
9779 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
9780 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
9781 return DoResolveType(type_idx, dex_cache, class_loader);
9782 }
9783
9784 // Instantiate the above.
9785 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9786 ArtField* referrer);
9787 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9788 ArtMethod* referrer);
9789 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9790 ObjPtr<mirror::Class> referrer);
9791
DoResolveType(dex::TypeIndex type_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)9792 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9793 Handle<mirror::DexCache> dex_cache,
9794 Handle<mirror::ClassLoader> class_loader) {
9795 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
9796 Thread* self = Thread::Current();
9797 const char* descriptor = dex_cache->GetDexFile()->StringByTypeIdx(type_idx);
9798 ObjPtr<mirror::Class> resolved = FindClass(self, descriptor, class_loader);
9799 if (resolved != nullptr) {
9800 // TODO: we used to throw here if resolved's class loader was not the
9801 // boot class loader. This was to permit different classes with the
9802 // same name to be loaded simultaneously by different loaders
9803 dex_cache->SetResolvedType(type_idx, resolved);
9804 } else {
9805 CHECK(self->IsExceptionPending())
9806 << "Expected pending exception for failed resolution of: " << descriptor;
9807 // Convert a ClassNotFoundException to a NoClassDefFoundError.
9808 StackHandleScope<1> hs(self);
9809 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
9810 if (cause->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException, this))) {
9811 DCHECK(resolved == nullptr); // No Handle needed to preserve resolved.
9812 self->ClearException();
9813 ThrowNoClassDefFoundError("Failed resolution of: %s", descriptor);
9814 self->GetException()->SetCause(cause.Get());
9815 }
9816 }
9817 DCHECK((resolved == nullptr) || resolved->IsResolved())
9818 << resolved->PrettyDescriptor() << " " << resolved->GetStatus();
9819 return resolved;
9820 }
9821
FindResolvedMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)9822 ArtMethod* ClassLinker::FindResolvedMethod(ObjPtr<mirror::Class> klass,
9823 ObjPtr<mirror::DexCache> dex_cache,
9824 ObjPtr<mirror::ClassLoader> class_loader,
9825 uint32_t method_idx) {
9826 DCHECK(dex_cache->GetClassLoader() == class_loader);
9827 // Search for the method using dex_cache and method_idx. The Class::Find*Method()
9828 // functions can optimize the search if the dex_cache is the same as the DexCache
9829 // of the class, with fall-back to name and signature search otherwise.
9830 ArtMethod* resolved = nullptr;
9831 if (klass->IsInterface()) {
9832 resolved = klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
9833 } else {
9834 resolved = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
9835 }
9836 DCHECK(resolved == nullptr || resolved->GetDeclaringClassUnchecked() != nullptr);
9837 if (resolved != nullptr &&
9838 // We pass AccessMethod::kNone instead of kLinking to not warn yet on the
9839 // access, as we'll be looking if the method can be accessed through an
9840 // interface.
9841 hiddenapi::ShouldDenyAccessToMember(resolved,
9842 hiddenapi::AccessContext(class_loader, dex_cache),
9843 hiddenapi::AccessMethod::kNone)) {
9844 // The resolved method that we have found cannot be accessed due to
9845 // hiddenapi (typically it is declared up the hierarchy and is not an SDK
9846 // method). Try to find an interface method from the implemented interfaces which is
9847 // part of the SDK.
9848 ArtMethod* itf_method = klass->FindAccessibleInterfaceMethod(resolved, image_pointer_size_);
9849 if (itf_method == nullptr) {
9850 // No interface method. Call ShouldDenyAccessToMember again but this time
9851 // with AccessMethod::kLinking to ensure that an appropriate warning is
9852 // logged.
9853 hiddenapi::ShouldDenyAccessToMember(resolved,
9854 hiddenapi::AccessContext(class_loader, dex_cache),
9855 hiddenapi::AccessMethod::kLinking);
9856 resolved = nullptr;
9857 } else {
9858 // We found an interface method that is accessible, continue with the resolved method.
9859 }
9860 }
9861 if (resolved != nullptr) {
9862 // In case of jmvti, the dex file gets verified before being registered, so first
9863 // check if it's registered before checking class tables.
9864 const DexFile& dex_file = *dex_cache->GetDexFile();
9865 DCHECK_IMPLIES(
9866 IsDexFileRegistered(Thread::Current(), dex_file),
9867 FindClassTable(Thread::Current(), dex_cache) == ClassTableForClassLoader(class_loader))
9868 << "DexFile referrer: " << dex_file.GetLocation()
9869 << " ClassLoader: " << DescribeLoaders(class_loader, "");
9870 // Be a good citizen and update the dex cache to speed subsequent calls.
9871 dex_cache->SetResolvedMethod(method_idx, resolved);
9872 // Disable the following invariant check as the verifier breaks it. b/73760543
9873 // const DexFile::MethodId& method_id = dex_file.GetMethodId(method_idx);
9874 // DCHECK(LookupResolvedType(method_id.class_idx_, dex_cache, class_loader) != nullptr)
9875 // << "Method: " << resolved->PrettyMethod() << ", "
9876 // << "Class: " << klass->PrettyClass() << " (" << klass->GetStatus() << "), "
9877 // << "DexFile referrer: " << dex_file.GetLocation();
9878 }
9879 return resolved;
9880 }
9881
9882 // Returns true if `method` is either null or hidden.
9883 // Does not print any warnings if it is hidden.
CheckNoSuchMethod(ArtMethod * method,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)9884 static bool CheckNoSuchMethod(ArtMethod* method,
9885 ObjPtr<mirror::DexCache> dex_cache,
9886 ObjPtr<mirror::ClassLoader> class_loader)
9887 REQUIRES_SHARED(Locks::mutator_lock_) {
9888 DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
9889 return method == nullptr ||
9890 hiddenapi::ShouldDenyAccessToMember(method,
9891 hiddenapi::AccessContext(class_loader, dex_cache),
9892 hiddenapi::AccessMethod::kNone); // no warnings
9893 }
9894
FindIncompatibleMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)9895 ArtMethod* ClassLinker::FindIncompatibleMethod(ObjPtr<mirror::Class> klass,
9896 ObjPtr<mirror::DexCache> dex_cache,
9897 ObjPtr<mirror::ClassLoader> class_loader,
9898 uint32_t method_idx) {
9899 DCHECK(dex_cache->GetClassLoader() == class_loader);
9900 if (klass->IsInterface()) {
9901 ArtMethod* method = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
9902 return CheckNoSuchMethod(method, dex_cache, class_loader) ? nullptr : method;
9903 } else {
9904 // If there was an interface method with the same signature, we would have
9905 // found it in the "copied" methods. Only DCHECK that the interface method
9906 // really does not exist.
9907 if (kIsDebugBuild) {
9908 ArtMethod* method =
9909 klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
9910 CHECK(CheckNoSuchMethod(method, dex_cache, class_loader) ||
9911 (klass->FindAccessibleInterfaceMethod(method, image_pointer_size_) == nullptr));
9912 }
9913 return nullptr;
9914 }
9915 }
9916
ResolveMethodWithoutInvokeType(uint32_t method_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)9917 ArtMethod* ClassLinker::ResolveMethodWithoutInvokeType(uint32_t method_idx,
9918 Handle<mirror::DexCache> dex_cache,
9919 Handle<mirror::ClassLoader> class_loader) {
9920 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
9921 ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx);
9922 Thread::PoisonObjectPointersIfDebug();
9923 if (resolved != nullptr) {
9924 DCHECK(!resolved->IsRuntimeMethod());
9925 DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
9926 return resolved;
9927 }
9928 // Fail, get the declaring class.
9929 const dex::MethodId& method_id = dex_cache->GetDexFile()->GetMethodId(method_idx);
9930 ObjPtr<mirror::Class> klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
9931 if (klass == nullptr) {
9932 Thread::Current()->AssertPendingException();
9933 return nullptr;
9934 }
9935 return FindResolvedMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
9936 }
9937
LookupResolvedField(uint32_t field_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,bool is_static)9938 ArtField* ClassLinker::LookupResolvedField(uint32_t field_idx,
9939 ObjPtr<mirror::DexCache> dex_cache,
9940 ObjPtr<mirror::ClassLoader> class_loader,
9941 bool is_static) {
9942 DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
9943 const DexFile& dex_file = *dex_cache->GetDexFile();
9944 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
9945 ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(field_id.class_idx_);
9946 if (klass == nullptr) {
9947 klass = LookupResolvedType(field_id.class_idx_, dex_cache, class_loader);
9948 }
9949 if (klass == nullptr) {
9950 // The class has not been resolved yet, so the field is also unresolved.
9951 return nullptr;
9952 }
9953 DCHECK(klass->IsResolved());
9954
9955 return FindResolvedField(klass, dex_cache, class_loader, field_idx, is_static);
9956 }
9957
ResolveFieldJLS(uint32_t field_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)9958 ArtField* ClassLinker::ResolveFieldJLS(uint32_t field_idx,
9959 Handle<mirror::DexCache> dex_cache,
9960 Handle<mirror::ClassLoader> class_loader) {
9961 DCHECK(dex_cache != nullptr);
9962 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
9963 ArtField* resolved = dex_cache->GetResolvedField(field_idx);
9964 Thread::PoisonObjectPointersIfDebug();
9965 if (resolved != nullptr) {
9966 return resolved;
9967 }
9968 const DexFile& dex_file = *dex_cache->GetDexFile();
9969 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
9970 ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
9971 if (klass == nullptr) {
9972 DCHECK(Thread::Current()->IsExceptionPending());
9973 return nullptr;
9974 }
9975
9976 resolved = FindResolvedFieldJLS(klass, dex_cache.Get(), class_loader.Get(), field_idx);
9977 if (resolved == nullptr) {
9978 const char* name = dex_file.GetFieldName(field_id);
9979 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
9980 ThrowNoSuchFieldError("", klass, type, name);
9981 }
9982 return resolved;
9983 }
9984
FindResolvedField(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx,bool is_static)9985 ArtField* ClassLinker::FindResolvedField(ObjPtr<mirror::Class> klass,
9986 ObjPtr<mirror::DexCache> dex_cache,
9987 ObjPtr<mirror::ClassLoader> class_loader,
9988 uint32_t field_idx,
9989 bool is_static) {
9990 DCHECK(dex_cache->GetClassLoader() == class_loader);
9991 ArtField* resolved = is_static ? klass->FindStaticField(dex_cache, field_idx)
9992 : klass->FindInstanceField(dex_cache, field_idx);
9993 if (resolved != nullptr &&
9994 hiddenapi::ShouldDenyAccessToMember(resolved,
9995 hiddenapi::AccessContext(class_loader, dex_cache),
9996 hiddenapi::AccessMethod::kLinking)) {
9997 resolved = nullptr;
9998 }
9999
10000 if (resolved != nullptr) {
10001 dex_cache->SetResolvedField(field_idx, resolved);
10002 }
10003
10004 return resolved;
10005 }
10006
FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx)10007 ArtField* ClassLinker::FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,
10008 ObjPtr<mirror::DexCache> dex_cache,
10009 ObjPtr<mirror::ClassLoader> class_loader,
10010 uint32_t field_idx) {
10011 DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
10012 ArtField* resolved = klass->FindField(dex_cache, field_idx);
10013
10014 if (resolved != nullptr &&
10015 hiddenapi::ShouldDenyAccessToMember(resolved,
10016 hiddenapi::AccessContext(class_loader, dex_cache),
10017 hiddenapi::AccessMethod::kLinking)) {
10018 resolved = nullptr;
10019 }
10020
10021 if (resolved != nullptr) {
10022 dex_cache->SetResolvedField(field_idx, resolved);
10023 }
10024
10025 return resolved;
10026 }
10027
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)10028 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(
10029 Thread* self,
10030 dex::ProtoIndex proto_idx,
10031 Handle<mirror::DexCache> dex_cache,
10032 Handle<mirror::ClassLoader> class_loader) {
10033 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
10034 DCHECK(dex_cache != nullptr);
10035 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10036
10037 ObjPtr<mirror::MethodType> resolved = dex_cache->GetResolvedMethodType(proto_idx);
10038 if (resolved != nullptr) {
10039 return resolved;
10040 }
10041
10042 StackHandleScope<4> hs(self);
10043
10044 // First resolve the return type.
10045 const DexFile& dex_file = *dex_cache->GetDexFile();
10046 const dex::ProtoId& proto_id = dex_file.GetProtoId(proto_idx);
10047 Handle<mirror::Class> return_type(hs.NewHandle(
10048 ResolveType(proto_id.return_type_idx_, dex_cache, class_loader)));
10049 if (return_type == nullptr) {
10050 DCHECK(self->IsExceptionPending());
10051 return nullptr;
10052 }
10053
10054 // Then resolve the argument types.
10055 //
10056 // TODO: Is there a better way to figure out the number of method arguments
10057 // other than by looking at the shorty ?
10058 const size_t num_method_args = strlen(dex_file.StringDataByIdx(proto_id.shorty_idx_)) - 1;
10059
10060 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
10061 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
10062 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_method_args)));
10063 if (method_params == nullptr) {
10064 DCHECK(self->IsExceptionPending());
10065 return nullptr;
10066 }
10067
10068 DexFileParameterIterator it(dex_file, proto_id);
10069 int32_t i = 0;
10070 MutableHandle<mirror::Class> param_class = hs.NewHandle<mirror::Class>(nullptr);
10071 for (; it.HasNext(); it.Next()) {
10072 const dex::TypeIndex type_idx = it.GetTypeIdx();
10073 param_class.Assign(ResolveType(type_idx, dex_cache, class_loader));
10074 if (param_class == nullptr) {
10075 DCHECK(self->IsExceptionPending());
10076 return nullptr;
10077 }
10078
10079 method_params->Set(i++, param_class.Get());
10080 }
10081
10082 DCHECK(!it.HasNext());
10083
10084 Handle<mirror::MethodType> type = hs.NewHandle(
10085 mirror::MethodType::Create(self, return_type, method_params));
10086 if (type != nullptr) {
10087 // Ensure all stores for the newly created MethodType are visible, before we attempt to place
10088 // it in the DexCache (b/224733324).
10089 std::atomic_thread_fence(std::memory_order_release);
10090 dex_cache->SetResolvedMethodType(proto_idx, type.Get());
10091 }
10092
10093 return type.Get();
10094 }
10095
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,ArtMethod * referrer)10096 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(Thread* self,
10097 dex::ProtoIndex proto_idx,
10098 ArtMethod* referrer) {
10099 StackHandleScope<2> hs(self);
10100 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
10101 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
10102 return ResolveMethodType(self, proto_idx, dex_cache, class_loader);
10103 }
10104
ResolveMethodHandleForField(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)10105 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForField(
10106 Thread* self,
10107 const dex::MethodHandleItem& method_handle,
10108 ArtMethod* referrer) {
10109 DexFile::MethodHandleType handle_type =
10110 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
10111 mirror::MethodHandle::Kind kind;
10112 bool is_put;
10113 bool is_static;
10114 int32_t num_params;
10115 switch (handle_type) {
10116 case DexFile::MethodHandleType::kStaticPut: {
10117 kind = mirror::MethodHandle::Kind::kStaticPut;
10118 is_put = true;
10119 is_static = true;
10120 num_params = 1;
10121 break;
10122 }
10123 case DexFile::MethodHandleType::kStaticGet: {
10124 kind = mirror::MethodHandle::Kind::kStaticGet;
10125 is_put = false;
10126 is_static = true;
10127 num_params = 0;
10128 break;
10129 }
10130 case DexFile::MethodHandleType::kInstancePut: {
10131 kind = mirror::MethodHandle::Kind::kInstancePut;
10132 is_put = true;
10133 is_static = false;
10134 num_params = 2;
10135 break;
10136 }
10137 case DexFile::MethodHandleType::kInstanceGet: {
10138 kind = mirror::MethodHandle::Kind::kInstanceGet;
10139 is_put = false;
10140 is_static = false;
10141 num_params = 1;
10142 break;
10143 }
10144 case DexFile::MethodHandleType::kInvokeStatic:
10145 case DexFile::MethodHandleType::kInvokeInstance:
10146 case DexFile::MethodHandleType::kInvokeConstructor:
10147 case DexFile::MethodHandleType::kInvokeDirect:
10148 case DexFile::MethodHandleType::kInvokeInterface:
10149 UNREACHABLE();
10150 }
10151
10152 ArtField* target_field =
10153 ResolveField(method_handle.field_or_method_idx_, referrer, is_static);
10154 if (LIKELY(target_field != nullptr)) {
10155 ObjPtr<mirror::Class> target_class = target_field->GetDeclaringClass();
10156 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
10157 if (UNLIKELY(!referring_class->CanAccessMember(target_class, target_field->GetAccessFlags()))) {
10158 ThrowIllegalAccessErrorField(referring_class, target_field);
10159 return nullptr;
10160 }
10161 if (UNLIKELY(is_put && target_field->IsFinal())) {
10162 ThrowIllegalAccessErrorField(referring_class, target_field);
10163 return nullptr;
10164 }
10165 } else {
10166 DCHECK(Thread::Current()->IsExceptionPending());
10167 return nullptr;
10168 }
10169
10170 StackHandleScope<4> hs(self);
10171 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
10172 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
10173 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
10174 if (UNLIKELY(method_params == nullptr)) {
10175 DCHECK(self->IsExceptionPending());
10176 return nullptr;
10177 }
10178
10179 Handle<mirror::Class> constructor_class;
10180 Handle<mirror::Class> return_type;
10181 switch (handle_type) {
10182 case DexFile::MethodHandleType::kStaticPut: {
10183 method_params->Set(0, target_field->ResolveType());
10184 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
10185 break;
10186 }
10187 case DexFile::MethodHandleType::kStaticGet: {
10188 return_type = hs.NewHandle(target_field->ResolveType());
10189 break;
10190 }
10191 case DexFile::MethodHandleType::kInstancePut: {
10192 method_params->Set(0, target_field->GetDeclaringClass());
10193 method_params->Set(1, target_field->ResolveType());
10194 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
10195 break;
10196 }
10197 case DexFile::MethodHandleType::kInstanceGet: {
10198 method_params->Set(0, target_field->GetDeclaringClass());
10199 return_type = hs.NewHandle(target_field->ResolveType());
10200 break;
10201 }
10202 case DexFile::MethodHandleType::kInvokeStatic:
10203 case DexFile::MethodHandleType::kInvokeInstance:
10204 case DexFile::MethodHandleType::kInvokeConstructor:
10205 case DexFile::MethodHandleType::kInvokeDirect:
10206 case DexFile::MethodHandleType::kInvokeInterface:
10207 UNREACHABLE();
10208 }
10209
10210 for (int32_t i = 0; i < num_params; ++i) {
10211 if (UNLIKELY(method_params->Get(i) == nullptr)) {
10212 DCHECK(self->IsExceptionPending());
10213 return nullptr;
10214 }
10215 }
10216
10217 if (UNLIKELY(return_type.IsNull())) {
10218 DCHECK(self->IsExceptionPending());
10219 return nullptr;
10220 }
10221
10222 Handle<mirror::MethodType>
10223 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
10224 if (UNLIKELY(method_type.IsNull())) {
10225 DCHECK(self->IsExceptionPending());
10226 return nullptr;
10227 }
10228
10229 uintptr_t target = reinterpret_cast<uintptr_t>(target_field);
10230 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
10231 }
10232
ResolveMethodHandleForMethod(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)10233 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForMethod(
10234 Thread* self,
10235 const dex::MethodHandleItem& method_handle,
10236 ArtMethod* referrer) {
10237 DexFile::MethodHandleType handle_type =
10238 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
10239 mirror::MethodHandle::Kind kind;
10240 uint32_t receiver_count = 0;
10241 ArtMethod* target_method = nullptr;
10242 switch (handle_type) {
10243 case DexFile::MethodHandleType::kStaticPut:
10244 case DexFile::MethodHandleType::kStaticGet:
10245 case DexFile::MethodHandleType::kInstancePut:
10246 case DexFile::MethodHandleType::kInstanceGet:
10247 UNREACHABLE();
10248 case DexFile::MethodHandleType::kInvokeStatic: {
10249 kind = mirror::MethodHandle::Kind::kInvokeStatic;
10250 receiver_count = 0;
10251 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
10252 method_handle.field_or_method_idx_,
10253 referrer,
10254 InvokeType::kStatic);
10255 break;
10256 }
10257 case DexFile::MethodHandleType::kInvokeInstance: {
10258 kind = mirror::MethodHandle::Kind::kInvokeVirtual;
10259 receiver_count = 1;
10260 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
10261 method_handle.field_or_method_idx_,
10262 referrer,
10263 InvokeType::kVirtual);
10264 break;
10265 }
10266 case DexFile::MethodHandleType::kInvokeConstructor: {
10267 // Constructors are currently implemented as a transform. They
10268 // are special cased later in this method.
10269 kind = mirror::MethodHandle::Kind::kInvokeTransform;
10270 receiver_count = 0;
10271 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
10272 method_handle.field_or_method_idx_,
10273 referrer,
10274 InvokeType::kDirect);
10275 break;
10276 }
10277 case DexFile::MethodHandleType::kInvokeDirect: {
10278 kind = mirror::MethodHandle::Kind::kInvokeDirect;
10279 receiver_count = 1;
10280 StackHandleScope<2> hs(self);
10281 // A constant method handle with type kInvokeDirect can refer to
10282 // a method that is private or to a method in a super class. To
10283 // disambiguate the two options, we resolve the method ignoring
10284 // the invocation type to determine if the method is private. We
10285 // then resolve again specifying the intended invocation type to
10286 // force the appropriate checks.
10287 target_method = ResolveMethodWithoutInvokeType(method_handle.field_or_method_idx_,
10288 hs.NewHandle(referrer->GetDexCache()),
10289 hs.NewHandle(referrer->GetClassLoader()));
10290 if (UNLIKELY(target_method == nullptr)) {
10291 break;
10292 }
10293
10294 if (target_method->IsPrivate()) {
10295 kind = mirror::MethodHandle::Kind::kInvokeDirect;
10296 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
10297 method_handle.field_or_method_idx_,
10298 referrer,
10299 InvokeType::kDirect);
10300 } else {
10301 kind = mirror::MethodHandle::Kind::kInvokeSuper;
10302 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
10303 method_handle.field_or_method_idx_,
10304 referrer,
10305 InvokeType::kSuper);
10306 if (UNLIKELY(target_method == nullptr)) {
10307 break;
10308 }
10309 // Find the method specified in the parent in referring class
10310 // so invoke-super invokes the method in the parent of the
10311 // referrer.
10312 target_method =
10313 referrer->GetDeclaringClass()->FindVirtualMethodForVirtual(target_method,
10314 kRuntimePointerSize);
10315 }
10316 break;
10317 }
10318 case DexFile::MethodHandleType::kInvokeInterface: {
10319 kind = mirror::MethodHandle::Kind::kInvokeInterface;
10320 receiver_count = 1;
10321 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
10322 method_handle.field_or_method_idx_,
10323 referrer,
10324 InvokeType::kInterface);
10325 break;
10326 }
10327 }
10328
10329 if (UNLIKELY(target_method == nullptr)) {
10330 DCHECK(Thread::Current()->IsExceptionPending());
10331 return nullptr;
10332 }
10333
10334 ObjPtr<mirror::Class> target_class = target_method->GetDeclaringClass();
10335 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
10336 uint32_t access_flags = target_method->GetAccessFlags();
10337 if (UNLIKELY(!referring_class->CanAccessMember(target_class, access_flags))) {
10338 ThrowIllegalAccessErrorMethod(referring_class, target_method);
10339 return nullptr;
10340 }
10341
10342 // Calculate the number of parameters from the method shorty. We add the
10343 // receiver count (0 or 1) and deduct one for the return value.
10344 uint32_t shorty_length;
10345 target_method->GetShorty(&shorty_length);
10346 int32_t num_params = static_cast<int32_t>(shorty_length + receiver_count - 1);
10347
10348 StackHandleScope<5> hs(self);
10349 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
10350 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
10351 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
10352 if (method_params.Get() == nullptr) {
10353 DCHECK(self->IsExceptionPending());
10354 return nullptr;
10355 }
10356
10357 const DexFile* dex_file = referrer->GetDexFile();
10358 const dex::MethodId& method_id = dex_file->GetMethodId(method_handle.field_or_method_idx_);
10359 int32_t index = 0;
10360 if (receiver_count != 0) {
10361 // Insert receiver. Use the class identified in the method handle rather than the declaring
10362 // class of the resolved method which may be super class or default interface method
10363 // (b/115964401).
10364 ObjPtr<mirror::Class> receiver_class = LookupResolvedType(method_id.class_idx_, referrer);
10365 // receiver_class should have been resolved when resolving the target method.
10366 DCHECK(receiver_class != nullptr);
10367 method_params->Set(index++, receiver_class);
10368 }
10369
10370 const dex::ProtoId& proto_id = dex_file->GetProtoId(method_id.proto_idx_);
10371 DexFileParameterIterator it(*dex_file, proto_id);
10372 while (it.HasNext()) {
10373 DCHECK_LT(index, num_params);
10374 const dex::TypeIndex type_idx = it.GetTypeIdx();
10375 ObjPtr<mirror::Class> klass = ResolveType(type_idx, referrer);
10376 if (nullptr == klass) {
10377 DCHECK(self->IsExceptionPending());
10378 return nullptr;
10379 }
10380 method_params->Set(index++, klass);
10381 it.Next();
10382 }
10383
10384 Handle<mirror::Class> return_type =
10385 hs.NewHandle(ResolveType(proto_id.return_type_idx_, referrer));
10386 if (UNLIKELY(return_type.IsNull())) {
10387 DCHECK(self->IsExceptionPending());
10388 return nullptr;
10389 }
10390
10391 Handle<mirror::MethodType>
10392 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
10393 if (UNLIKELY(method_type.IsNull())) {
10394 DCHECK(self->IsExceptionPending());
10395 return nullptr;
10396 }
10397
10398 if (UNLIKELY(handle_type == DexFile::MethodHandleType::kInvokeConstructor)) {
10399 Handle<mirror::Class> constructor_class = hs.NewHandle(target_method->GetDeclaringClass());
10400 Handle<mirror::MethodHandlesLookup> lookup =
10401 hs.NewHandle(mirror::MethodHandlesLookup::GetDefault(self));
10402 return lookup->FindConstructor(self, constructor_class, method_type);
10403 }
10404
10405 uintptr_t target = reinterpret_cast<uintptr_t>(target_method);
10406 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
10407 }
10408
ResolveMethodHandle(Thread * self,uint32_t method_handle_idx,ArtMethod * referrer)10409 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandle(Thread* self,
10410 uint32_t method_handle_idx,
10411 ArtMethod* referrer)
10412 REQUIRES_SHARED(Locks::mutator_lock_) {
10413 const DexFile* const dex_file = referrer->GetDexFile();
10414 const dex::MethodHandleItem& method_handle = dex_file->GetMethodHandle(method_handle_idx);
10415 switch (static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_)) {
10416 case DexFile::MethodHandleType::kStaticPut:
10417 case DexFile::MethodHandleType::kStaticGet:
10418 case DexFile::MethodHandleType::kInstancePut:
10419 case DexFile::MethodHandleType::kInstanceGet:
10420 return ResolveMethodHandleForField(self, method_handle, referrer);
10421 case DexFile::MethodHandleType::kInvokeStatic:
10422 case DexFile::MethodHandleType::kInvokeInstance:
10423 case DexFile::MethodHandleType::kInvokeConstructor:
10424 case DexFile::MethodHandleType::kInvokeDirect:
10425 case DexFile::MethodHandleType::kInvokeInterface:
10426 return ResolveMethodHandleForMethod(self, method_handle, referrer);
10427 }
10428 }
10429
IsQuickResolutionStub(const void * entry_point) const10430 bool ClassLinker::IsQuickResolutionStub(const void* entry_point) const {
10431 return (entry_point == GetQuickResolutionStub()) ||
10432 (quick_resolution_trampoline_ == entry_point);
10433 }
10434
IsQuickToInterpreterBridge(const void * entry_point) const10435 bool ClassLinker::IsQuickToInterpreterBridge(const void* entry_point) const {
10436 return (entry_point == GetQuickToInterpreterBridge()) ||
10437 (quick_to_interpreter_bridge_trampoline_ == entry_point);
10438 }
10439
IsQuickGenericJniStub(const void * entry_point) const10440 bool ClassLinker::IsQuickGenericJniStub(const void* entry_point) const {
10441 return (entry_point == GetQuickGenericJniStub()) ||
10442 (quick_generic_jni_trampoline_ == entry_point);
10443 }
10444
IsJniDlsymLookupStub(const void * entry_point) const10445 bool ClassLinker::IsJniDlsymLookupStub(const void* entry_point) const {
10446 return entry_point == GetJniDlsymLookupStub() ||
10447 (jni_dlsym_lookup_trampoline_ == entry_point);
10448 }
10449
IsJniDlsymLookupCriticalStub(const void * entry_point) const10450 bool ClassLinker::IsJniDlsymLookupCriticalStub(const void* entry_point) const {
10451 return entry_point == GetJniDlsymLookupCriticalStub() ||
10452 (jni_dlsym_lookup_critical_trampoline_ == entry_point);
10453 }
10454
GetRuntimeQuickGenericJniStub() const10455 const void* ClassLinker::GetRuntimeQuickGenericJniStub() const {
10456 return GetQuickGenericJniStub();
10457 }
10458
SetEntryPointsForObsoleteMethod(ArtMethod * method) const10459 void ClassLinker::SetEntryPointsForObsoleteMethod(ArtMethod* method) const {
10460 DCHECK(method->IsObsolete());
10461 // We cannot mess with the entrypoints of native methods because they are used to determine how
10462 // large the method's quick stack frame is. Without this information we cannot walk the stacks.
10463 if (!method->IsNative()) {
10464 method->SetEntryPointFromQuickCompiledCode(GetInvokeObsoleteMethodStub());
10465 }
10466 }
10467
DumpForSigQuit(std::ostream & os)10468 void ClassLinker::DumpForSigQuit(std::ostream& os) {
10469 ScopedObjectAccess soa(Thread::Current());
10470 ReaderMutexLock mu(soa.Self(), *Locks::classlinker_classes_lock_);
10471 os << "Zygote loaded classes=" << NumZygoteClasses() << " post zygote classes="
10472 << NumNonZygoteClasses() << "\n";
10473 ReaderMutexLock mu2(soa.Self(), *Locks::dex_lock_);
10474 os << "Dumping registered class loaders\n";
10475 size_t class_loader_index = 0;
10476 for (const ClassLoaderData& class_loader : class_loaders_) {
10477 ObjPtr<mirror::ClassLoader> loader =
10478 ObjPtr<mirror::ClassLoader>::DownCast(soa.Self()->DecodeJObject(class_loader.weak_root));
10479 if (loader != nullptr) {
10480 os << "#" << class_loader_index++ << " " << loader->GetClass()->PrettyDescriptor() << ": [";
10481 bool saw_one_dex_file = false;
10482 for (const auto& entry : dex_caches_) {
10483 const DexCacheData& dex_cache = entry.second;
10484 if (dex_cache.class_table == class_loader.class_table) {
10485 if (saw_one_dex_file) {
10486 os << ":";
10487 }
10488 saw_one_dex_file = true;
10489 os << entry.first->GetLocation();
10490 }
10491 }
10492 os << "]";
10493 bool found_parent = false;
10494 if (loader->GetParent() != nullptr) {
10495 size_t parent_index = 0;
10496 for (const ClassLoaderData& class_loader2 : class_loaders_) {
10497 ObjPtr<mirror::ClassLoader> loader2 = ObjPtr<mirror::ClassLoader>::DownCast(
10498 soa.Self()->DecodeJObject(class_loader2.weak_root));
10499 if (loader2 == loader->GetParent()) {
10500 os << ", parent #" << parent_index;
10501 found_parent = true;
10502 break;
10503 }
10504 parent_index++;
10505 }
10506 if (!found_parent) {
10507 os << ", unregistered parent of type "
10508 << loader->GetParent()->GetClass()->PrettyDescriptor();
10509 }
10510 } else {
10511 os << ", no parent";
10512 }
10513 os << "\n";
10514 }
10515 }
10516 os << "Done dumping class loaders\n";
10517 Runtime* runtime = Runtime::Current();
10518 os << "Classes initialized: " << runtime->GetStat(KIND_GLOBAL_CLASS_INIT_COUNT) << " in "
10519 << PrettyDuration(runtime->GetStat(KIND_GLOBAL_CLASS_INIT_TIME)) << "\n";
10520 }
10521
10522 class CountClassesVisitor : public ClassLoaderVisitor {
10523 public:
CountClassesVisitor()10524 CountClassesVisitor() : num_zygote_classes(0), num_non_zygote_classes(0) {}
10525
Visit(ObjPtr<mirror::ClassLoader> class_loader)10526 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
10527 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
10528 ClassTable* const class_table = class_loader->GetClassTable();
10529 if (class_table != nullptr) {
10530 num_zygote_classes += class_table->NumZygoteClasses(class_loader);
10531 num_non_zygote_classes += class_table->NumNonZygoteClasses(class_loader);
10532 }
10533 }
10534
10535 size_t num_zygote_classes;
10536 size_t num_non_zygote_classes;
10537 };
10538
NumZygoteClasses() const10539 size_t ClassLinker::NumZygoteClasses() const {
10540 CountClassesVisitor visitor;
10541 VisitClassLoaders(&visitor);
10542 return visitor.num_zygote_classes + boot_class_table_->NumZygoteClasses(nullptr);
10543 }
10544
NumNonZygoteClasses() const10545 size_t ClassLinker::NumNonZygoteClasses() const {
10546 CountClassesVisitor visitor;
10547 VisitClassLoaders(&visitor);
10548 return visitor.num_non_zygote_classes + boot_class_table_->NumNonZygoteClasses(nullptr);
10549 }
10550
NumLoadedClasses()10551 size_t ClassLinker::NumLoadedClasses() {
10552 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
10553 // Only return non zygote classes since these are the ones which apps which care about.
10554 return NumNonZygoteClasses();
10555 }
10556
GetClassesLockOwner()10557 pid_t ClassLinker::GetClassesLockOwner() {
10558 return Locks::classlinker_classes_lock_->GetExclusiveOwnerTid();
10559 }
10560
GetDexLockOwner()10561 pid_t ClassLinker::GetDexLockOwner() {
10562 return Locks::dex_lock_->GetExclusiveOwnerTid();
10563 }
10564
SetClassRoot(ClassRoot class_root,ObjPtr<mirror::Class> klass)10565 void ClassLinker::SetClassRoot(ClassRoot class_root, ObjPtr<mirror::Class> klass) {
10566 DCHECK(!init_done_);
10567
10568 DCHECK(klass != nullptr);
10569 DCHECK(klass->GetClassLoader() == nullptr);
10570
10571 mirror::ObjectArray<mirror::Class>* class_roots = class_roots_.Read();
10572 DCHECK(class_roots != nullptr);
10573 DCHECK_LT(static_cast<uint32_t>(class_root), static_cast<uint32_t>(ClassRoot::kMax));
10574 int32_t index = static_cast<int32_t>(class_root);
10575 DCHECK(class_roots->Get(index) == nullptr);
10576 class_roots->Set<false>(index, klass);
10577 }
10578
CreateWellKnownClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files,Handle<mirror::Class> loader_class,Handle<mirror::ClassLoader> parent_loader,Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries,Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries_after)10579 ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
10580 Thread* self,
10581 const std::vector<const DexFile*>& dex_files,
10582 Handle<mirror::Class> loader_class,
10583 Handle<mirror::ClassLoader> parent_loader,
10584 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries,
10585 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries_after) {
10586 CHECK(loader_class.Get() == WellKnownClasses::dalvik_system_PathClassLoader ||
10587 loader_class.Get() == WellKnownClasses::dalvik_system_DelegateLastClassLoader ||
10588 loader_class.Get() == WellKnownClasses::dalvik_system_InMemoryDexClassLoader);
10589
10590 StackHandleScope<5> hs(self);
10591
10592 ArtField* dex_elements_field = WellKnownClasses::dalvik_system_DexPathList_dexElements;
10593
10594 Handle<mirror::Class> dex_elements_class(hs.NewHandle(dex_elements_field->ResolveType()));
10595 DCHECK(dex_elements_class != nullptr);
10596 DCHECK(dex_elements_class->IsArrayClass());
10597 Handle<mirror::ObjectArray<mirror::Object>> h_dex_elements(hs.NewHandle(
10598 mirror::ObjectArray<mirror::Object>::Alloc(self,
10599 dex_elements_class.Get(),
10600 dex_files.size())));
10601 Handle<mirror::Class> h_dex_element_class =
10602 hs.NewHandle(dex_elements_class->GetComponentType());
10603
10604 ArtField* element_file_field = WellKnownClasses::dalvik_system_DexPathList__Element_dexFile;
10605 DCHECK_EQ(h_dex_element_class.Get(), element_file_field->GetDeclaringClass());
10606
10607 ArtField* cookie_field = WellKnownClasses::dalvik_system_DexFile_cookie;
10608 DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
10609
10610 ArtField* file_name_field = WellKnownClasses::dalvik_system_DexFile_fileName;
10611 DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
10612
10613 // Fill the elements array.
10614 int32_t index = 0;
10615 for (const DexFile* dex_file : dex_files) {
10616 StackHandleScope<4> hs2(self);
10617
10618 // CreateWellKnownClassLoader is only used by gtests and compiler.
10619 // Index 0 of h_long_array is supposed to be the oat file but we can leave it null.
10620 Handle<mirror::LongArray> h_long_array = hs2.NewHandle(mirror::LongArray::Alloc(
10621 self,
10622 kDexFileIndexStart + 1));
10623 DCHECK(h_long_array != nullptr);
10624 h_long_array->Set(kDexFileIndexStart, reinterpret_cast64<int64_t>(dex_file));
10625
10626 // Note that this creates a finalizable dalvik.system.DexFile object and a corresponding
10627 // FinalizerReference which will never get cleaned up without a started runtime.
10628 Handle<mirror::Object> h_dex_file = hs2.NewHandle(
10629 cookie_field->GetDeclaringClass()->AllocObject(self));
10630 DCHECK(h_dex_file != nullptr);
10631 cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get());
10632
10633 Handle<mirror::String> h_file_name = hs2.NewHandle(
10634 mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str()));
10635 DCHECK(h_file_name != nullptr);
10636 file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get());
10637
10638 Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self));
10639 DCHECK(h_element != nullptr);
10640 element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get());
10641
10642 h_dex_elements->Set(index, h_element.Get());
10643 index++;
10644 }
10645 DCHECK_EQ(index, h_dex_elements->GetLength());
10646
10647 // Create DexPathList.
10648 Handle<mirror::Object> h_dex_path_list = hs.NewHandle(
10649 dex_elements_field->GetDeclaringClass()->AllocObject(self));
10650 DCHECK(h_dex_path_list != nullptr);
10651 // Set elements.
10652 dex_elements_field->SetObject<false>(h_dex_path_list.Get(), h_dex_elements.Get());
10653 // Create an empty List for the "nativeLibraryDirectories," required for native tests.
10654 // Note: this code is uncommon(oatdump)/testing-only, so don't add further WellKnownClasses
10655 // elements.
10656 {
10657 ArtField* native_lib_dirs = dex_elements_field->GetDeclaringClass()->
10658 FindDeclaredInstanceField("nativeLibraryDirectories", "Ljava/util/List;");
10659 DCHECK(native_lib_dirs != nullptr);
10660 ObjPtr<mirror::Class> list_class = FindSystemClass(self, "Ljava/util/ArrayList;");
10661 DCHECK(list_class != nullptr);
10662 {
10663 StackHandleScope<1> h_list_scope(self);
10664 Handle<mirror::Class> h_list_class(h_list_scope.NewHandle<mirror::Class>(list_class));
10665 bool list_init = EnsureInitialized(self, h_list_class, true, true);
10666 DCHECK(list_init);
10667 list_class = h_list_class.Get();
10668 }
10669 ObjPtr<mirror::Object> list_object = list_class->AllocObject(self);
10670 // Note: we leave the object uninitialized. This must never leak into any non-testing code, but
10671 // is fine for testing. While it violates a Java-code invariant (the elementData field is
10672 // normally never null), as long as one does not try to add elements, this will still
10673 // work.
10674 native_lib_dirs->SetObject<false>(h_dex_path_list.Get(), list_object);
10675 }
10676
10677 // Create the class loader..
10678 Handle<mirror::ClassLoader> h_class_loader = hs.NewHandle<mirror::ClassLoader>(
10679 ObjPtr<mirror::ClassLoader>::DownCast(loader_class->AllocObject(self)));
10680 DCHECK(h_class_loader != nullptr);
10681 // Set DexPathList.
10682 ArtField* path_list_field = WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList;
10683 DCHECK(path_list_field != nullptr);
10684 path_list_field->SetObject<false>(h_class_loader.Get(), h_dex_path_list.Get());
10685
10686 // Make a pretend boot-classpath.
10687 // TODO: Should we scan the image?
10688 ArtField* const parent_field = WellKnownClasses::java_lang_ClassLoader_parent;
10689 DCHECK(parent_field != nullptr);
10690 if (parent_loader.Get() == nullptr) {
10691 ObjPtr<mirror::Object> boot_loader(
10692 WellKnownClasses::java_lang_BootClassLoader->AllocObject(self));
10693 parent_field->SetObject<false>(h_class_loader.Get(), boot_loader);
10694 } else {
10695 parent_field->SetObject<false>(h_class_loader.Get(), parent_loader.Get());
10696 }
10697
10698 ArtField* shared_libraries_field =
10699 WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders;
10700 DCHECK(shared_libraries_field != nullptr);
10701 shared_libraries_field->SetObject<false>(h_class_loader.Get(), shared_libraries.Get());
10702
10703 ArtField* shared_libraries_after_field =
10704 WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter;
10705 DCHECK(shared_libraries_after_field != nullptr);
10706 shared_libraries_after_field->SetObject<false>(h_class_loader.Get(),
10707 shared_libraries_after.Get());
10708 return h_class_loader.Get();
10709 }
10710
CreatePathClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files)10711 jobject ClassLinker::CreatePathClassLoader(Thread* self,
10712 const std::vector<const DexFile*>& dex_files) {
10713 StackHandleScope<3u> hs(self);
10714 Handle<mirror::Class> d_s_pcl =
10715 hs.NewHandle(WellKnownClasses::dalvik_system_PathClassLoader.Get());
10716 auto null_parent = hs.NewHandle<mirror::ClassLoader>(nullptr);
10717 auto null_libs = hs.NewHandle<mirror::ObjectArray<mirror::ClassLoader>>(nullptr);
10718 ObjPtr<mirror::ClassLoader> class_loader =
10719 CreateWellKnownClassLoader(self, dex_files, d_s_pcl, null_parent, null_libs, null_libs);
10720 return Runtime::Current()->GetJavaVM()->AddGlobalRef(self, class_loader);
10721 }
10722
DropFindArrayClassCache()10723 void ClassLinker::DropFindArrayClassCache() {
10724 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
10725 find_array_class_cache_next_victim_ = 0;
10726 }
10727
VisitClassLoaders(ClassLoaderVisitor * visitor) const10728 void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
10729 Thread* const self = Thread::Current();
10730 for (const ClassLoaderData& data : class_loaders_) {
10731 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10732 ObjPtr<mirror::ClassLoader> class_loader = ObjPtr<mirror::ClassLoader>::DownCast(
10733 self->DecodeJObject(data.weak_root));
10734 if (class_loader != nullptr) {
10735 visitor->Visit(class_loader);
10736 }
10737 }
10738 }
10739
VisitDexCaches(DexCacheVisitor * visitor) const10740 void ClassLinker::VisitDexCaches(DexCacheVisitor* visitor) const {
10741 Thread* const self = Thread::Current();
10742 for (const auto& it : dex_caches_) {
10743 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10744 ObjPtr<mirror::DexCache> dex_cache = ObjPtr<mirror::DexCache>::DownCast(
10745 self->DecodeJObject(it.second.weak_root));
10746 if (dex_cache != nullptr) {
10747 visitor->Visit(dex_cache);
10748 }
10749 }
10750 }
10751
VisitAllocators(AllocatorVisitor * visitor) const10752 void ClassLinker::VisitAllocators(AllocatorVisitor* visitor) const {
10753 for (const ClassLoaderData& data : class_loaders_) {
10754 LinearAlloc* alloc = data.allocator;
10755 if (alloc != nullptr && !visitor->Visit(alloc)) {
10756 break;
10757 }
10758 }
10759 }
10760
InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,ObjPtr<mirror::ClassLoader> class_loader)10761 void ClassLinker::InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,
10762 ObjPtr<mirror::ClassLoader> class_loader) {
10763 DCHECK(dex_file != nullptr);
10764 Thread* const self = Thread::Current();
10765 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
10766 ClassTable* const table = ClassTableForClassLoader(class_loader);
10767 DCHECK(table != nullptr);
10768 if (table->InsertStrongRoot(dex_file) && class_loader != nullptr) {
10769 // It was not already inserted, perform the write barrier to let the GC know the class loader's
10770 // class table was modified.
10771 WriteBarrier::ForEveryFieldWrite(class_loader);
10772 }
10773 }
10774
CleanupClassLoaders()10775 void ClassLinker::CleanupClassLoaders() {
10776 Thread* const self = Thread::Current();
10777 std::list<ClassLoaderData> to_delete;
10778 // Do the delete outside the lock to avoid lock violation in jit code cache.
10779 {
10780 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
10781 for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
10782 auto this_it = it;
10783 ++it;
10784 const ClassLoaderData& data = *this_it;
10785 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10786 ObjPtr<mirror::ClassLoader> class_loader =
10787 ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(data.weak_root));
10788 if (class_loader == nullptr) {
10789 VLOG(class_linker) << "Freeing class loader";
10790 to_delete.splice(to_delete.end(), class_loaders_, this_it);
10791 }
10792 }
10793 }
10794 if (to_delete.empty()) {
10795 return;
10796 }
10797 std::set<const OatFile*> unregistered_oat_files;
10798 JavaVMExt* vm = self->GetJniEnv()->GetVm();
10799 {
10800 WriterMutexLock mu(self, *Locks::dex_lock_);
10801 for (auto it = dex_caches_.begin(), end = dex_caches_.end(); it != end; ) {
10802 const DexFile* dex_file = it->first;
10803 const DexCacheData& data = it->second;
10804 if (self->DecodeJObject(data.weak_root) == nullptr) {
10805 DCHECK(to_delete.end() != std::find_if(
10806 to_delete.begin(),
10807 to_delete.end(),
10808 [&](const ClassLoaderData& cld) { return cld.class_table == data.class_table; }));
10809 if (dex_file->GetOatDexFile() != nullptr &&
10810 dex_file->GetOatDexFile()->GetOatFile() != nullptr &&
10811 dex_file->GetOatDexFile()->GetOatFile()->IsExecutable()) {
10812 unregistered_oat_files.insert(dex_file->GetOatDexFile()->GetOatFile());
10813 }
10814 vm->DeleteWeakGlobalRef(self, data.weak_root);
10815 it = dex_caches_.erase(it);
10816 } else {
10817 ++it;
10818 }
10819 }
10820 }
10821 {
10822 ScopedDebugDisallowReadBarriers sddrb(self);
10823 for (ClassLoaderData& data : to_delete) {
10824 // CHA unloading analysis and SingleImplementaion cleanups are required.
10825 DeleteClassLoader(self, data, /*cleanup_cha=*/ true);
10826 }
10827 }
10828 Runtime* runtime = Runtime::Current();
10829 if (!unregistered_oat_files.empty()) {
10830 for (const OatFile* oat_file : unregistered_oat_files) {
10831 // Notify the fault handler about removal of the executable code range if needed.
10832 DCHECK(oat_file->IsExecutable());
10833 size_t exec_offset = oat_file->GetOatHeader().GetExecutableOffset();
10834 DCHECK_LE(exec_offset, oat_file->Size());
10835 size_t exec_size = oat_file->Size() - exec_offset;
10836 if (exec_size != 0u) {
10837 runtime->RemoveGeneratedCodeRange(oat_file->Begin() + exec_offset, exec_size);
10838 }
10839 }
10840 }
10841
10842 if (runtime->GetStartupLinearAlloc() != nullptr) {
10843 // Because the startup linear alloc can contain dex cache arrays associated
10844 // to class loaders that got unloaded, we need to delete these
10845 // arrays.
10846 StartupCompletedTask::DeleteStartupDexCaches(self, /* called_by_gc= */ true);
10847 DCHECK_EQ(runtime->GetStartupLinearAlloc(), nullptr);
10848 }
10849 }
10850
10851 class ClassLinker::FindVirtualMethodHolderVisitor : public ClassVisitor {
10852 public:
FindVirtualMethodHolderVisitor(const ArtMethod * method,PointerSize pointer_size)10853 FindVirtualMethodHolderVisitor(const ArtMethod* method, PointerSize pointer_size)
10854 : method_(method),
10855 pointer_size_(pointer_size) {}
10856
operator ()(ObjPtr<mirror::Class> klass)10857 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) override {
10858 if (klass->GetVirtualMethodsSliceUnchecked(pointer_size_).Contains(method_)) {
10859 holder_ = klass;
10860 }
10861 // Return false to stop searching if holder_ is not null.
10862 return holder_ == nullptr;
10863 }
10864
10865 ObjPtr<mirror::Class> holder_ = nullptr;
10866 const ArtMethod* const method_;
10867 const PointerSize pointer_size_;
10868 };
10869
GetHoldingClassOfCopiedMethod(ArtMethod * method)10870 ObjPtr<mirror::Class> ClassLinker::GetHoldingClassOfCopiedMethod(ArtMethod* method) {
10871 ScopedTrace trace(__FUNCTION__); // Since this function is slow, have a trace to notify people.
10872 CHECK(method->IsCopied());
10873 FindVirtualMethodHolderVisitor visitor(method, image_pointer_size_);
10874 VisitClasses(&visitor);
10875 DCHECK(visitor.holder_ != nullptr);
10876 return visitor.holder_;
10877 }
10878
GetHoldingClassLoaderOfCopiedMethod(Thread * self,ArtMethod * method)10879 ObjPtr<mirror::ClassLoader> ClassLinker::GetHoldingClassLoaderOfCopiedMethod(Thread* self,
10880 ArtMethod* method) {
10881 // Note: `GetHoldingClassOfCopiedMethod(method)` is a lot more expensive than finding
10882 // the class loader, so we're using it only to verify the result in debug mode.
10883 CHECK(method->IsCopied());
10884 gc::Heap* heap = Runtime::Current()->GetHeap();
10885 // Check if the copied method is in the boot class path.
10886 if (heap->IsBootImageAddress(method) || GetAllocatorForClassLoader(nullptr)->Contains(method)) {
10887 DCHECK(GetHoldingClassOfCopiedMethod(method)->GetClassLoader() == nullptr);
10888 return nullptr;
10889 }
10890 // Check if the copied method is in an app image.
10891 // Note: Continuous spaces contain boot image spaces and app image spaces.
10892 // However, they are sorted by address, so boot images are not trivial to skip.
10893 ArrayRef<gc::space::ContinuousSpace* const> spaces(heap->GetContinuousSpaces());
10894 DCHECK_GE(spaces.size(), heap->GetBootImageSpaces().size());
10895 for (gc::space::ContinuousSpace* space : spaces) {
10896 if (space->IsImageSpace()) {
10897 gc::space::ImageSpace* image_space = space->AsImageSpace();
10898 size_t offset = reinterpret_cast<const uint8_t*>(method) - image_space->Begin();
10899 const ImageSection& methods_section = image_space->GetImageHeader().GetMethodsSection();
10900 if (offset - methods_section.Offset() < methods_section.Size()) {
10901 // Grab the class loader from the first non-BCP class in the app image class table.
10902 // Note: If we allow classes from arbitrary parent or library class loaders in app
10903 // images, this shall need to be updated to actually search for the exact class.
10904 const ImageSection& class_table_section =
10905 image_space->GetImageHeader().GetClassTableSection();
10906 CHECK_NE(class_table_section.Size(), 0u);
10907 const uint8_t* ptr = image_space->Begin() + class_table_section.Offset();
10908 size_t read_count = 0;
10909 ClassTable::ClassSet class_set(ptr, /*make_copy_of_data=*/ false, &read_count);
10910 CHECK(!class_set.empty());
10911 auto it = class_set.begin();
10912 // No read barrier needed for references to non-movable image classes.
10913 while ((*it).Read<kWithoutReadBarrier>()->IsBootStrapClassLoaded()) {
10914 ++it;
10915 CHECK(it != class_set.end());
10916 }
10917 ObjPtr<mirror::ClassLoader> class_loader =
10918 (*it).Read<kWithoutReadBarrier>()->GetClassLoader();
10919 DCHECK(GetHoldingClassOfCopiedMethod(method)->GetClassLoader() == class_loader);
10920 return class_loader;
10921 }
10922 }
10923 }
10924 // Otherwise, the method must be in one of the `LinearAlloc` memory areas.
10925 jweak result = nullptr;
10926 {
10927 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
10928 for (const ClassLoaderData& data : class_loaders_) {
10929 if (data.allocator->Contains(method)) {
10930 result = data.weak_root;
10931 break;
10932 }
10933 }
10934 }
10935 CHECK(result != nullptr) << "Did not find allocator holding the copied method: " << method
10936 << " " << method->PrettyMethod();
10937 // The `method` is alive, so the class loader must also be alive.
10938 return ObjPtr<mirror::ClassLoader>::DownCast(
10939 Runtime::Current()->GetJavaVM()->DecodeWeakGlobalAsStrong(result));
10940 }
10941
DenyAccessBasedOnPublicSdk(ArtMethod * art_method ATTRIBUTE_UNUSED) const10942 bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtMethod* art_method ATTRIBUTE_UNUSED) const
10943 REQUIRES_SHARED(Locks::mutator_lock_) {
10944 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10945 LOG(FATAL) << "UNREACHABLE";
10946 UNREACHABLE();
10947 }
10948
DenyAccessBasedOnPublicSdk(ArtField * art_field ATTRIBUTE_UNUSED) const10949 bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtField* art_field ATTRIBUTE_UNUSED) const
10950 REQUIRES_SHARED(Locks::mutator_lock_) {
10951 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10952 LOG(FATAL) << "UNREACHABLE";
10953 UNREACHABLE();
10954 }
10955
DenyAccessBasedOnPublicSdk(const char * type_descriptor ATTRIBUTE_UNUSED) const10956 bool ClassLinker::DenyAccessBasedOnPublicSdk(const char* type_descriptor ATTRIBUTE_UNUSED) const {
10957 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10958 LOG(FATAL) << "UNREACHABLE";
10959 UNREACHABLE();
10960 }
10961
SetEnablePublicSdkChecks(bool enabled ATTRIBUTE_UNUSED)10962 void ClassLinker::SetEnablePublicSdkChecks(bool enabled ATTRIBUTE_UNUSED) {
10963 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10964 LOG(FATAL) << "UNREACHABLE";
10965 UNREACHABLE();
10966 }
10967
RemoveDexFromCaches(const DexFile & dex_file)10968 void ClassLinker::RemoveDexFromCaches(const DexFile& dex_file) {
10969 ReaderMutexLock mu(Thread::Current(), *Locks::dex_lock_);
10970
10971 auto it = dex_caches_.find(&dex_file);
10972 if (it != dex_caches_.end()) {
10973 dex_caches_.erase(it);
10974 }
10975 }
10976
10977 // Instantiate ClassLinker::AllocClass.
10978 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ true>(
10979 Thread* self,
10980 ObjPtr<mirror::Class> java_lang_Class,
10981 uint32_t class_size);
10982 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ false>(
10983 Thread* self,
10984 ObjPtr<mirror::Class> java_lang_Class,
10985 uint32_t class_size);
10986
10987 } // namespace art
10988