1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "class_linker.h"
18
19 #include <unistd.h>
20
21 #include <algorithm>
22 #include <deque>
23 #include <iostream>
24 #include <map>
25 #include <memory>
26 #include <queue>
27 #include <string>
28 #include <string_view>
29 #include <tuple>
30 #include <unordered_map>
31 #include <utility>
32 #include <vector>
33
34 #include "android-base/stringprintf.h"
35
36 #include "art_field-inl.h"
37 #include "art_method-inl.h"
38 #include "base/arena_allocator.h"
39 #include "base/casts.h"
40 #include "base/leb128.h"
41 #include "base/logging.h"
42 #include "base/os.h"
43 #include "base/quasi_atomic.h"
44 #include "base/scoped_arena_containers.h"
45 #include "base/scoped_flock.h"
46 #include "base/stl_util.h"
47 #include "base/string_view_cpp20.h"
48 #include "base/systrace.h"
49 #include "base/time_utils.h"
50 #include "base/unix_file/fd_file.h"
51 #include "base/utils.h"
52 #include "base/value_object.h"
53 #include "cha.h"
54 #include "class_linker-inl.h"
55 #include "class_loader_utils.h"
56 #include "class_root.h"
57 #include "class_table-inl.h"
58 #include "compiler_callbacks.h"
59 #include "debug_print.h"
60 #include "debugger.h"
61 #include "dex/class_accessor-inl.h"
62 #include "dex/descriptors_names.h"
63 #include "dex/dex_file-inl.h"
64 #include "dex/dex_file_exception_helpers.h"
65 #include "dex/dex_file_loader.h"
66 #include "dex/signature-inl.h"
67 #include "dex/utf.h"
68 #include "entrypoints/entrypoint_utils.h"
69 #include "entrypoints/runtime_asm_entrypoints.h"
70 #include "experimental_flags.h"
71 #include "gc/accounting/card_table-inl.h"
72 #include "gc/accounting/heap_bitmap-inl.h"
73 #include "gc/accounting/space_bitmap-inl.h"
74 #include "gc/heap-visit-objects-inl.h"
75 #include "gc/heap.h"
76 #include "gc/scoped_gc_critical_section.h"
77 #include "gc/space/image_space.h"
78 #include "gc/space/space-inl.h"
79 #include "gc_root-inl.h"
80 #include "handle_scope-inl.h"
81 #include "hidden_api.h"
82 #include "image-inl.h"
83 #include "imt_conflict_table.h"
84 #include "imtable-inl.h"
85 #include "intern_table-inl.h"
86 #include "interpreter/interpreter.h"
87 #include "jit/debugger_interface.h"
88 #include "jit/jit.h"
89 #include "jit/jit_code_cache.h"
90 #include "jni/java_vm_ext.h"
91 #include "jni/jni_internal.h"
92 #include "linear_alloc.h"
93 #include "mirror/array-alloc-inl.h"
94 #include "mirror/array-inl.h"
95 #include "mirror/call_site.h"
96 #include "mirror/class-alloc-inl.h"
97 #include "mirror/class-inl.h"
98 #include "mirror/class.h"
99 #include "mirror/class_ext.h"
100 #include "mirror/class_loader.h"
101 #include "mirror/dex_cache-inl.h"
102 #include "mirror/dex_cache.h"
103 #include "mirror/emulated_stack_frame.h"
104 #include "mirror/field.h"
105 #include "mirror/iftable-inl.h"
106 #include "mirror/method.h"
107 #include "mirror/method_handle_impl.h"
108 #include "mirror/method_handles_lookup.h"
109 #include "mirror/method_type.h"
110 #include "mirror/object-inl.h"
111 #include "mirror/object-refvisitor-inl.h"
112 #include "mirror/object_array-alloc-inl.h"
113 #include "mirror/object_array-inl.h"
114 #include "mirror/object_reference.h"
115 #include "mirror/object_reference-inl.h"
116 #include "mirror/proxy.h"
117 #include "mirror/reference-inl.h"
118 #include "mirror/stack_trace_element.h"
119 #include "mirror/string-inl.h"
120 #include "mirror/throwable.h"
121 #include "mirror/var_handle.h"
122 #include "native/dalvik_system_DexFile.h"
123 #include "nativehelper/scoped_local_ref.h"
124 #include "oat.h"
125 #include "oat_file-inl.h"
126 #include "oat_file.h"
127 #include "oat_file_assistant.h"
128 #include "oat_file_manager.h"
129 #include "object_lock.h"
130 #include "profile/profile_compilation_info.h"
131 #include "runtime.h"
132 #include "runtime_callbacks.h"
133 #include "scoped_thread_state_change-inl.h"
134 #include "thread-inl.h"
135 #include "thread_list.h"
136 #include "trace.h"
137 #include "utils/dex_cache_arrays_layout-inl.h"
138 #include "verifier/class_verifier.h"
139 #include "well_known_classes.h"
140
141 namespace art {
142
143 using android::base::StringPrintf;
144
145 static constexpr bool kSanityCheckObjects = kIsDebugBuild;
146 static constexpr bool kVerifyArtMethodDeclaringClasses = kIsDebugBuild;
147
148 static void ThrowNoClassDefFoundError(const char* fmt, ...)
149 __attribute__((__format__(__printf__, 1, 2)))
150 REQUIRES_SHARED(Locks::mutator_lock_);
ThrowNoClassDefFoundError(const char * fmt,...)151 static void ThrowNoClassDefFoundError(const char* fmt, ...) {
152 va_list args;
153 va_start(args, fmt);
154 Thread* self = Thread::Current();
155 self->ThrowNewExceptionV("Ljava/lang/NoClassDefFoundError;", fmt, args);
156 va_end(args);
157 }
158
HasInitWithString(Thread * self,ClassLinker * class_linker,const char * descriptor)159 static bool HasInitWithString(Thread* self, ClassLinker* class_linker, const char* descriptor)
160 REQUIRES_SHARED(Locks::mutator_lock_) {
161 ArtMethod* method = self->GetCurrentMethod(nullptr);
162 StackHandleScope<1> hs(self);
163 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(method != nullptr ?
164 method->GetDeclaringClass()->GetClassLoader() : nullptr));
165 ObjPtr<mirror::Class> exception_class = class_linker->FindClass(self, descriptor, class_loader);
166
167 if (exception_class == nullptr) {
168 // No exc class ~ no <init>-with-string.
169 CHECK(self->IsExceptionPending());
170 self->ClearException();
171 return false;
172 }
173
174 ArtMethod* exception_init_method = exception_class->FindConstructor(
175 "(Ljava/lang/String;)V", class_linker->GetImagePointerSize());
176 return exception_init_method != nullptr;
177 }
178
GetVerifyError(ObjPtr<mirror::Class> c)179 static ObjPtr<mirror::Object> GetVerifyError(ObjPtr<mirror::Class> c)
180 REQUIRES_SHARED(Locks::mutator_lock_) {
181 ObjPtr<mirror::ClassExt> ext(c->GetExtData());
182 if (ext == nullptr) {
183 return nullptr;
184 } else {
185 return ext->GetVerifyError();
186 }
187 }
188
189 // Helper for ThrowEarlierClassFailure. Throws the stored error.
HandleEarlierVerifyError(Thread * self,ClassLinker * class_linker,ObjPtr<mirror::Class> c)190 static void HandleEarlierVerifyError(Thread* self,
191 ClassLinker* class_linker,
192 ObjPtr<mirror::Class> c)
193 REQUIRES_SHARED(Locks::mutator_lock_) {
194 ObjPtr<mirror::Object> obj = GetVerifyError(c);
195 DCHECK(obj != nullptr);
196 self->AssertNoPendingException();
197 if (obj->IsClass()) {
198 // Previous error has been stored as class. Create a new exception of that type.
199
200 // It's possible the exception doesn't have a <init>(String).
201 std::string temp;
202 const char* descriptor = obj->AsClass()->GetDescriptor(&temp);
203
204 if (HasInitWithString(self, class_linker, descriptor)) {
205 self->ThrowNewException(descriptor, c->PrettyDescriptor().c_str());
206 } else {
207 self->ThrowNewException(descriptor, nullptr);
208 }
209 } else {
210 // Previous error has been stored as an instance. Just rethrow.
211 ObjPtr<mirror::Class> throwable_class = GetClassRoot<mirror::Throwable>(class_linker);
212 ObjPtr<mirror::Class> error_class = obj->GetClass();
213 CHECK(throwable_class->IsAssignableFrom(error_class));
214 self->SetException(obj->AsThrowable());
215 }
216 self->AssertPendingException();
217 }
218
219 // Ensures that methods have the kAccSkipAccessChecks bit set. We use the
220 // kAccVerificationAttempted bit on the class access flags to determine whether this has been done
221 // before.
222 template <bool kNeedsVerified = false>
EnsureSkipAccessChecksMethods(Handle<mirror::Class> klass,PointerSize pointer_size)223 static void EnsureSkipAccessChecksMethods(Handle<mirror::Class> klass, PointerSize pointer_size)
224 REQUIRES_SHARED(Locks::mutator_lock_) {
225 if (kNeedsVerified) {
226 // To not fail access-flags access checks, push a minimal state.
227 mirror::Class::SetStatus(klass, ClassStatus::kVerified, Thread::Current());
228 }
229 if (!klass->WasVerificationAttempted()) {
230 klass->SetSkipAccessChecksFlagOnAllMethods(pointer_size);
231 klass->SetVerificationAttempted();
232 }
233 }
234
ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,bool wrap_in_no_class_def,bool log)235 void ClassLinker::ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,
236 bool wrap_in_no_class_def,
237 bool log) {
238 // The class failed to initialize on a previous attempt, so we want to throw
239 // a NoClassDefFoundError (v2 2.17.5). The exception to this rule is if we
240 // failed in verification, in which case v2 5.4.1 says we need to re-throw
241 // the previous error.
242 Runtime* const runtime = Runtime::Current();
243 if (!runtime->IsAotCompiler()) { // Give info if this occurs at runtime.
244 std::string extra;
245 ObjPtr<mirror::Object> verify_error = GetVerifyError(c);
246 if (verify_error != nullptr) {
247 if (verify_error->IsClass()) {
248 extra = mirror::Class::PrettyDescriptor(verify_error->AsClass());
249 } else {
250 extra = verify_error->AsThrowable()->Dump();
251 }
252 }
253 if (log) {
254 LOG(INFO) << "Rejecting re-init on previously-failed class " << c->PrettyClass()
255 << ": " << extra;
256 }
257 }
258
259 CHECK(c->IsErroneous()) << c->PrettyClass() << " " << c->GetStatus();
260 Thread* self = Thread::Current();
261 if (runtime->IsAotCompiler()) {
262 // At compile time, accurate errors and NCDFE are disabled to speed compilation.
263 ObjPtr<mirror::Throwable> pre_allocated = runtime->GetPreAllocatedNoClassDefFoundError();
264 self->SetException(pre_allocated);
265 } else {
266 ObjPtr<mirror::Object> verify_error = GetVerifyError(c);
267 if (verify_error != nullptr) {
268 // Rethrow stored error.
269 HandleEarlierVerifyError(self, this, c);
270 }
271 // TODO This might be wrong if we hit an OOME while allocating the ClassExt. In that case we
272 // might have meant to go down the earlier if statement with the original error but it got
273 // swallowed by the OOM so we end up here.
274 if (verify_error == nullptr || wrap_in_no_class_def) {
275 // If there isn't a recorded earlier error, or this is a repeat throw from initialization,
276 // the top-level exception must be a NoClassDefFoundError. The potentially already pending
277 // exception will be a cause.
278 self->ThrowNewWrappedException("Ljava/lang/NoClassDefFoundError;",
279 c->PrettyDescriptor().c_str());
280 }
281 }
282 }
283
VlogClassInitializationFailure(Handle<mirror::Class> klass)284 static void VlogClassInitializationFailure(Handle<mirror::Class> klass)
285 REQUIRES_SHARED(Locks::mutator_lock_) {
286 if (VLOG_IS_ON(class_linker)) {
287 std::string temp;
288 LOG(INFO) << "Failed to initialize class " << klass->GetDescriptor(&temp) << " from "
289 << klass->GetLocation() << "\n" << Thread::Current()->GetException()->Dump();
290 }
291 }
292
WrapExceptionInInitializer(Handle<mirror::Class> klass)293 static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
294 REQUIRES_SHARED(Locks::mutator_lock_) {
295 Thread* self = Thread::Current();
296 JNIEnv* env = self->GetJniEnv();
297
298 ScopedLocalRef<jthrowable> cause(env, env->ExceptionOccurred());
299 CHECK(cause.get() != nullptr);
300
301 // Boot classpath classes should not fail initialization. This is a sanity debug check. This
302 // cannot in general be guaranteed, but in all likelihood leads to breakage down the line.
303 if (klass->GetClassLoader() == nullptr && !Runtime::Current()->IsAotCompiler()) {
304 std::string tmp;
305 // We want to LOG(FATAL) on debug builds since this really shouldn't be happening but we need to
306 // make sure to only do it if we don't have AsyncExceptions being thrown around since those
307 // could have caused the error.
308 bool known_impossible = kIsDebugBuild && !Runtime::Current()->AreAsyncExceptionsThrown();
309 LOG(known_impossible ? FATAL : WARNING) << klass->GetDescriptor(&tmp)
310 << " failed initialization: "
311 << self->GetException()->Dump();
312 }
313
314 env->ExceptionClear();
315 bool is_error = env->IsInstanceOf(cause.get(), WellKnownClasses::java_lang_Error);
316 env->Throw(cause.get());
317
318 // We only wrap non-Error exceptions; an Error can just be used as-is.
319 if (!is_error) {
320 self->ThrowNewWrappedException("Ljava/lang/ExceptionInInitializerError;", nullptr);
321 }
322 VlogClassInitializationFailure(klass);
323 }
324
325 // Gap between two fields in object layout.
326 struct FieldGap {
327 uint32_t start_offset; // The offset from the start of the object.
328 uint32_t size; // The gap size of 1, 2, or 4 bytes.
329 };
330 struct FieldGapsComparator {
FieldGapsComparatorart::FieldGapsComparator331 FieldGapsComparator() {
332 }
operator ()art::FieldGapsComparator333 bool operator() (const FieldGap& lhs, const FieldGap& rhs)
334 NO_THREAD_SAFETY_ANALYSIS {
335 // Sort by gap size, largest first. Secondary sort by starting offset.
336 // Note that the priority queue returns the largest element, so operator()
337 // should return true if lhs is less than rhs.
338 return lhs.size < rhs.size || (lhs.size == rhs.size && lhs.start_offset > rhs.start_offset);
339 }
340 };
341 using FieldGaps = std::priority_queue<FieldGap, std::vector<FieldGap>, FieldGapsComparator>;
342
343 // Adds largest aligned gaps to queue of gaps.
AddFieldGap(uint32_t gap_start,uint32_t gap_end,FieldGaps * gaps)344 static void AddFieldGap(uint32_t gap_start, uint32_t gap_end, FieldGaps* gaps) {
345 DCHECK(gaps != nullptr);
346
347 uint32_t current_offset = gap_start;
348 while (current_offset != gap_end) {
349 size_t remaining = gap_end - current_offset;
350 if (remaining >= sizeof(uint32_t) && IsAligned<4>(current_offset)) {
351 gaps->push(FieldGap {current_offset, sizeof(uint32_t)});
352 current_offset += sizeof(uint32_t);
353 } else if (remaining >= sizeof(uint16_t) && IsAligned<2>(current_offset)) {
354 gaps->push(FieldGap {current_offset, sizeof(uint16_t)});
355 current_offset += sizeof(uint16_t);
356 } else {
357 gaps->push(FieldGap {current_offset, sizeof(uint8_t)});
358 current_offset += sizeof(uint8_t);
359 }
360 DCHECK_LE(current_offset, gap_end) << "Overran gap";
361 }
362 }
363 // Shuffle fields forward, making use of gaps whenever possible.
364 template<int n>
ShuffleForward(size_t * current_field_idx,MemberOffset * field_offset,std::deque<ArtField * > * grouped_and_sorted_fields,FieldGaps * gaps)365 static void ShuffleForward(size_t* current_field_idx,
366 MemberOffset* field_offset,
367 std::deque<ArtField*>* grouped_and_sorted_fields,
368 FieldGaps* gaps)
369 REQUIRES_SHARED(Locks::mutator_lock_) {
370 DCHECK(current_field_idx != nullptr);
371 DCHECK(grouped_and_sorted_fields != nullptr);
372 DCHECK(gaps != nullptr);
373 DCHECK(field_offset != nullptr);
374
375 DCHECK(IsPowerOfTwo(n));
376 while (!grouped_and_sorted_fields->empty()) {
377 ArtField* field = grouped_and_sorted_fields->front();
378 Primitive::Type type = field->GetTypeAsPrimitiveType();
379 if (Primitive::ComponentSize(type) < n) {
380 break;
381 }
382 if (!IsAligned<n>(field_offset->Uint32Value())) {
383 MemberOffset old_offset = *field_offset;
384 *field_offset = MemberOffset(RoundUp(field_offset->Uint32Value(), n));
385 AddFieldGap(old_offset.Uint32Value(), field_offset->Uint32Value(), gaps);
386 }
387 CHECK(type != Primitive::kPrimNot) << field->PrettyField(); // should be primitive types
388 grouped_and_sorted_fields->pop_front();
389 if (!gaps->empty() && gaps->top().size >= n) {
390 FieldGap gap = gaps->top();
391 gaps->pop();
392 DCHECK_ALIGNED(gap.start_offset, n);
393 field->SetOffset(MemberOffset(gap.start_offset));
394 if (gap.size > n) {
395 AddFieldGap(gap.start_offset + n, gap.start_offset + gap.size, gaps);
396 }
397 } else {
398 DCHECK_ALIGNED(field_offset->Uint32Value(), n);
399 field->SetOffset(*field_offset);
400 *field_offset = MemberOffset(field_offset->Uint32Value() + n);
401 }
402 ++(*current_field_idx);
403 }
404 }
405
ClassLinker(InternTable * intern_table,bool fast_class_not_found_exceptions)406 ClassLinker::ClassLinker(InternTable* intern_table, bool fast_class_not_found_exceptions)
407 : boot_class_table_(new ClassTable()),
408 failed_dex_cache_class_lookups_(0),
409 class_roots_(nullptr),
410 find_array_class_cache_next_victim_(0),
411 init_done_(false),
412 log_new_roots_(false),
413 intern_table_(intern_table),
414 fast_class_not_found_exceptions_(fast_class_not_found_exceptions),
415 quick_resolution_trampoline_(nullptr),
416 quick_imt_conflict_trampoline_(nullptr),
417 quick_generic_jni_trampoline_(nullptr),
418 quick_to_interpreter_bridge_trampoline_(nullptr),
419 image_pointer_size_(kRuntimePointerSize),
420 cha_(Runtime::Current()->IsAotCompiler() ? nullptr : new ClassHierarchyAnalysis()) {
421 // For CHA disabled during Aot, see b/34193647.
422
423 CHECK(intern_table_ != nullptr);
424 static_assert(kFindArrayCacheSize == arraysize(find_array_class_cache_),
425 "Array cache size wrong.");
426 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
427 }
428
CheckSystemClass(Thread * self,Handle<mirror::Class> c1,const char * descriptor)429 void ClassLinker::CheckSystemClass(Thread* self, Handle<mirror::Class> c1, const char* descriptor) {
430 ObjPtr<mirror::Class> c2 = FindSystemClass(self, descriptor);
431 if (c2 == nullptr) {
432 LOG(FATAL) << "Could not find class " << descriptor;
433 UNREACHABLE();
434 }
435 if (c1.Get() != c2) {
436 std::ostringstream os1, os2;
437 c1->DumpClass(os1, mirror::Class::kDumpClassFullDetail);
438 c2->DumpClass(os2, mirror::Class::kDumpClassFullDetail);
439 LOG(FATAL) << "InitWithoutImage: Class mismatch for " << descriptor
440 << ". This is most likely the result of a broken build. Make sure that "
441 << "libcore and art projects match.\n\n"
442 << os1.str() << "\n\n" << os2.str();
443 UNREACHABLE();
444 }
445 }
446
InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,std::string * error_msg)447 bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,
448 std::string* error_msg) {
449 VLOG(startup) << "ClassLinker::Init";
450
451 Thread* const self = Thread::Current();
452 Runtime* const runtime = Runtime::Current();
453 gc::Heap* const heap = runtime->GetHeap();
454
455 CHECK(!heap->HasBootImageSpace()) << "Runtime has image. We should use it.";
456 CHECK(!init_done_);
457
458 // Use the pointer size from the runtime since we are probably creating the image.
459 image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet());
460
461 // java_lang_Class comes first, it's needed for AllocClass
462 // The GC can't handle an object with a null class since we can't get the size of this object.
463 heap->IncrementDisableMovingGC(self);
464 StackHandleScope<64> hs(self); // 64 is picked arbitrarily.
465 auto class_class_size = mirror::Class::ClassClassSize(image_pointer_size_);
466 // Allocate the object as non-movable so that there are no cases where Object::IsClass returns
467 // the incorrect result when comparing to-space vs from-space.
468 Handle<mirror::Class> java_lang_Class(hs.NewHandle(ObjPtr<mirror::Class>::DownCast(
469 heap->AllocNonMovableObject<true>(self, nullptr, class_class_size, VoidFunctor()))));
470 CHECK(java_lang_Class != nullptr);
471 java_lang_Class->SetClassFlags(mirror::kClassFlagClass);
472 java_lang_Class->SetClass(java_lang_Class.Get());
473 if (kUseBakerReadBarrier) {
474 java_lang_Class->AssertReadBarrierState();
475 }
476 java_lang_Class->SetClassSize(class_class_size);
477 java_lang_Class->SetPrimitiveType(Primitive::kPrimNot);
478 heap->DecrementDisableMovingGC(self);
479 // AllocClass(ObjPtr<mirror::Class>) can now be used
480
481 // Class[] is used for reflection support.
482 auto class_array_class_size = mirror::ObjectArray<mirror::Class>::ClassSize(image_pointer_size_);
483 Handle<mirror::Class> class_array_class(hs.NewHandle(
484 AllocClass(self, java_lang_Class.Get(), class_array_class_size)));
485 class_array_class->SetComponentType(java_lang_Class.Get());
486
487 // java_lang_Object comes next so that object_array_class can be created.
488 Handle<mirror::Class> java_lang_Object(hs.NewHandle(
489 AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize(image_pointer_size_))));
490 CHECK(java_lang_Object != nullptr);
491 // backfill Object as the super class of Class.
492 java_lang_Class->SetSuperClass(java_lang_Object.Get());
493 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kLoaded, self);
494
495 java_lang_Object->SetObjectSize(sizeof(mirror::Object));
496 // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
497 // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
498 runtime->SetSentinel(heap->AllocNonMovableObject<true>(self,
499 java_lang_Object.Get(),
500 java_lang_Object->GetObjectSize(),
501 VoidFunctor()));
502
503 // Initialize the SubtypeCheck bitstring for java.lang.Object and java.lang.Class.
504 if (kBitstringSubtypeCheckEnabled) {
505 // It might seem the lock here is unnecessary, however all the SubtypeCheck
506 // functions are annotated to require locks all the way down.
507 //
508 // We take the lock here to avoid using NO_THREAD_SAFETY_ANALYSIS.
509 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
510 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Object.Get());
511 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Class.Get());
512 }
513
514 // Object[] next to hold class roots.
515 Handle<mirror::Class> object_array_class(hs.NewHandle(
516 AllocClass(self, java_lang_Class.Get(),
517 mirror::ObjectArray<mirror::Object>::ClassSize(image_pointer_size_))));
518 object_array_class->SetComponentType(java_lang_Object.Get());
519
520 // Setup java.lang.String.
521 //
522 // We make this class non-movable for the unlikely case where it were to be
523 // moved by a sticky-bit (minor) collection when using the Generational
524 // Concurrent Copying (CC) collector, potentially creating a stale reference
525 // in the `klass_` field of one of its instances allocated in the Large-Object
526 // Space (LOS) -- see the comment about the dirty card scanning logic in
527 // art::gc::collector::ConcurrentCopying::MarkingPhase.
528 Handle<mirror::Class> java_lang_String(hs.NewHandle(
529 AllocClass</* kMovable= */ false>(
530 self, java_lang_Class.Get(), mirror::String::ClassSize(image_pointer_size_))));
531 java_lang_String->SetStringClass();
532 mirror::Class::SetStatus(java_lang_String, ClassStatus::kResolved, self);
533
534 // Setup java.lang.ref.Reference.
535 Handle<mirror::Class> java_lang_ref_Reference(hs.NewHandle(
536 AllocClass(self, java_lang_Class.Get(), mirror::Reference::ClassSize(image_pointer_size_))));
537 java_lang_ref_Reference->SetObjectSize(mirror::Reference::InstanceSize());
538 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kResolved, self);
539
540 // Create storage for root classes, save away our work so far (requires descriptors).
541 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
542 mirror::ObjectArray<mirror::Class>::Alloc(self,
543 object_array_class.Get(),
544 static_cast<int32_t>(ClassRoot::kMax)));
545 CHECK(!class_roots_.IsNull());
546 SetClassRoot(ClassRoot::kJavaLangClass, java_lang_Class.Get());
547 SetClassRoot(ClassRoot::kJavaLangObject, java_lang_Object.Get());
548 SetClassRoot(ClassRoot::kClassArrayClass, class_array_class.Get());
549 SetClassRoot(ClassRoot::kObjectArrayClass, object_array_class.Get());
550 SetClassRoot(ClassRoot::kJavaLangString, java_lang_String.Get());
551 SetClassRoot(ClassRoot::kJavaLangRefReference, java_lang_ref_Reference.Get());
552
553 // Fill in the empty iftable. Needs to be done after the kObjectArrayClass root is set.
554 java_lang_Object->SetIfTable(AllocIfTable(self, 0));
555
556 // Create array interface entries to populate once we can load system classes.
557 object_array_class->SetIfTable(AllocIfTable(self, 2));
558 DCHECK_EQ(GetArrayIfTable(), object_array_class->GetIfTable());
559
560 // Setup the primitive type classes.
561 SetClassRoot(ClassRoot::kPrimitiveBoolean, CreatePrimitiveClass(self, Primitive::kPrimBoolean));
562 SetClassRoot(ClassRoot::kPrimitiveByte, CreatePrimitiveClass(self, Primitive::kPrimByte));
563 SetClassRoot(ClassRoot::kPrimitiveChar, CreatePrimitiveClass(self, Primitive::kPrimChar));
564 SetClassRoot(ClassRoot::kPrimitiveShort, CreatePrimitiveClass(self, Primitive::kPrimShort));
565 SetClassRoot(ClassRoot::kPrimitiveInt, CreatePrimitiveClass(self, Primitive::kPrimInt));
566 SetClassRoot(ClassRoot::kPrimitiveLong, CreatePrimitiveClass(self, Primitive::kPrimLong));
567 SetClassRoot(ClassRoot::kPrimitiveFloat, CreatePrimitiveClass(self, Primitive::kPrimFloat));
568 SetClassRoot(ClassRoot::kPrimitiveDouble, CreatePrimitiveClass(self, Primitive::kPrimDouble));
569 SetClassRoot(ClassRoot::kPrimitiveVoid, CreatePrimitiveClass(self, Primitive::kPrimVoid));
570
571 // Create int array type for native pointer arrays (for example vtables) on 32-bit archs.
572 Handle<mirror::Class> int_array_class(hs.NewHandle(
573 AllocPrimitiveArrayClass(self, java_lang_Class.Get())));
574 int_array_class->SetComponentType(GetClassRoot(ClassRoot::kPrimitiveInt, this));
575 SetClassRoot(ClassRoot::kIntArrayClass, int_array_class.Get());
576
577 // Create long array type for native pointer arrays (for example vtables) on 64-bit archs.
578 Handle<mirror::Class> long_array_class(hs.NewHandle(
579 AllocPrimitiveArrayClass(self, java_lang_Class.Get())));
580 long_array_class->SetComponentType(GetClassRoot(ClassRoot::kPrimitiveLong, this));
581 SetClassRoot(ClassRoot::kLongArrayClass, long_array_class.Get());
582
583 // now that these are registered, we can use AllocClass() and AllocObjectArray
584
585 // Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache.
586 Handle<mirror::Class> java_lang_DexCache(hs.NewHandle(
587 AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize(image_pointer_size_))));
588 SetClassRoot(ClassRoot::kJavaLangDexCache, java_lang_DexCache.Get());
589 java_lang_DexCache->SetDexCacheClass();
590 java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize());
591 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kResolved, self);
592
593
594 // Setup dalvik.system.ClassExt
595 Handle<mirror::Class> dalvik_system_ClassExt(hs.NewHandle(
596 AllocClass(self, java_lang_Class.Get(), mirror::ClassExt::ClassSize(image_pointer_size_))));
597 SetClassRoot(ClassRoot::kDalvikSystemClassExt, dalvik_system_ClassExt.Get());
598 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kResolved, self);
599
600 // Set up array classes for string, field, method
601 Handle<mirror::Class> object_array_string(hs.NewHandle(
602 AllocClass(self, java_lang_Class.Get(),
603 mirror::ObjectArray<mirror::String>::ClassSize(image_pointer_size_))));
604 object_array_string->SetComponentType(java_lang_String.Get());
605 SetClassRoot(ClassRoot::kJavaLangStringArrayClass, object_array_string.Get());
606
607 LinearAlloc* linear_alloc = runtime->GetLinearAlloc();
608 // Create runtime resolution and imt conflict methods.
609 runtime->SetResolutionMethod(runtime->CreateResolutionMethod());
610 runtime->SetImtConflictMethod(runtime->CreateImtConflictMethod(linear_alloc));
611 runtime->SetImtUnimplementedMethod(runtime->CreateImtConflictMethod(linear_alloc));
612
613 // Setup boot_class_path_ and register class_path now that we can use AllocObjectArray to create
614 // DexCache instances. Needs to be after String, Field, Method arrays since AllocDexCache uses
615 // these roots.
616 if (boot_class_path.empty()) {
617 *error_msg = "Boot classpath is empty.";
618 return false;
619 }
620 for (auto& dex_file : boot_class_path) {
621 if (dex_file.get() == nullptr) {
622 *error_msg = "Null dex file.";
623 return false;
624 }
625 AppendToBootClassPath(self, *dex_file);
626 boot_dex_files_.push_back(std::move(dex_file));
627 }
628
629 // now we can use FindSystemClass
630
631 // Set up GenericJNI entrypoint. That is mainly a hack for common_compiler_test.h so that
632 // we do not need friend classes or a publicly exposed setter.
633 quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
634 if (!runtime->IsAotCompiler()) {
635 // We need to set up the generic trampolines since we don't have an image.
636 quick_resolution_trampoline_ = GetQuickResolutionStub();
637 quick_imt_conflict_trampoline_ = GetQuickImtConflictStub();
638 quick_to_interpreter_bridge_trampoline_ = GetQuickToInterpreterBridge();
639 }
640
641 // Object, String, ClassExt and DexCache need to be rerun through FindSystemClass to finish init
642 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kNotReady, self);
643 CheckSystemClass(self, java_lang_Object, "Ljava/lang/Object;");
644 CHECK_EQ(java_lang_Object->GetObjectSize(), mirror::Object::InstanceSize());
645 mirror::Class::SetStatus(java_lang_String, ClassStatus::kNotReady, self);
646 CheckSystemClass(self, java_lang_String, "Ljava/lang/String;");
647 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kNotReady, self);
648 CheckSystemClass(self, java_lang_DexCache, "Ljava/lang/DexCache;");
649 CHECK_EQ(java_lang_DexCache->GetObjectSize(), mirror::DexCache::InstanceSize());
650 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kNotReady, self);
651 CheckSystemClass(self, dalvik_system_ClassExt, "Ldalvik/system/ClassExt;");
652 CHECK_EQ(dalvik_system_ClassExt->GetObjectSize(), mirror::ClassExt::InstanceSize());
653
654 // Setup the primitive array type classes - can't be done until Object has a vtable.
655 AllocAndSetPrimitiveArrayClassRoot(self,
656 java_lang_Class.Get(),
657 ClassRoot::kBooleanArrayClass,
658 ClassRoot::kPrimitiveBoolean,
659 "[Z");
660
661 AllocAndSetPrimitiveArrayClassRoot(
662 self, java_lang_Class.Get(), ClassRoot::kByteArrayClass, ClassRoot::kPrimitiveByte, "[B");
663
664 AllocAndSetPrimitiveArrayClassRoot(
665 self, java_lang_Class.Get(), ClassRoot::kCharArrayClass, ClassRoot::kPrimitiveChar, "[C");
666
667 AllocAndSetPrimitiveArrayClassRoot(
668 self, java_lang_Class.Get(), ClassRoot::kShortArrayClass, ClassRoot::kPrimitiveShort, "[S");
669
670 CheckSystemClass(self, int_array_class, "[I");
671 CheckSystemClass(self, long_array_class, "[J");
672
673 AllocAndSetPrimitiveArrayClassRoot(
674 self, java_lang_Class.Get(), ClassRoot::kFloatArrayClass, ClassRoot::kPrimitiveFloat, "[F");
675
676 AllocAndSetPrimitiveArrayClassRoot(
677 self, java_lang_Class.Get(), ClassRoot::kDoubleArrayClass, ClassRoot::kPrimitiveDouble, "[D");
678
679 // Run Class through FindSystemClass. This initializes the dex_cache_ fields and register it
680 // in class_table_.
681 CheckSystemClass(self, java_lang_Class, "Ljava/lang/Class;");
682
683 CheckSystemClass(self, class_array_class, "[Ljava/lang/Class;");
684 CheckSystemClass(self, object_array_class, "[Ljava/lang/Object;");
685
686 // Setup the single, global copy of "iftable".
687 auto java_lang_Cloneable = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Cloneable;"));
688 CHECK(java_lang_Cloneable != nullptr);
689 auto java_io_Serializable = hs.NewHandle(FindSystemClass(self, "Ljava/io/Serializable;"));
690 CHECK(java_io_Serializable != nullptr);
691 // We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
692 // crawl up and explicitly list all of the supers as well.
693 object_array_class->GetIfTable()->SetInterface(0, java_lang_Cloneable.Get());
694 object_array_class->GetIfTable()->SetInterface(1, java_io_Serializable.Get());
695
696 // Sanity check Class[] and Object[]'s interfaces. GetDirectInterface may cause thread
697 // suspension.
698 CHECK_EQ(java_lang_Cloneable.Get(),
699 mirror::Class::GetDirectInterface(self, class_array_class.Get(), 0));
700 CHECK_EQ(java_io_Serializable.Get(),
701 mirror::Class::GetDirectInterface(self, class_array_class.Get(), 1));
702 CHECK_EQ(java_lang_Cloneable.Get(),
703 mirror::Class::GetDirectInterface(self, object_array_class.Get(), 0));
704 CHECK_EQ(java_io_Serializable.Get(),
705 mirror::Class::GetDirectInterface(self, object_array_class.Get(), 1));
706
707 CHECK_EQ(object_array_string.Get(),
708 FindSystemClass(self, GetClassRootDescriptor(ClassRoot::kJavaLangStringArrayClass)));
709
710 // End of special init trickery, all subsequent classes may be loaded via FindSystemClass.
711
712 // Create java.lang.reflect.Proxy root.
713 SetClassRoot(ClassRoot::kJavaLangReflectProxy,
714 FindSystemClass(self, "Ljava/lang/reflect/Proxy;"));
715
716 // Create java.lang.reflect.Field.class root.
717 ObjPtr<mirror::Class> class_root = FindSystemClass(self, "Ljava/lang/reflect/Field;");
718 CHECK(class_root != nullptr);
719 SetClassRoot(ClassRoot::kJavaLangReflectField, class_root);
720
721 // Create java.lang.reflect.Field array root.
722 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Field;");
723 CHECK(class_root != nullptr);
724 SetClassRoot(ClassRoot::kJavaLangReflectFieldArrayClass, class_root);
725
726 // Create java.lang.reflect.Constructor.class root and array root.
727 class_root = FindSystemClass(self, "Ljava/lang/reflect/Constructor;");
728 CHECK(class_root != nullptr);
729 SetClassRoot(ClassRoot::kJavaLangReflectConstructor, class_root);
730 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Constructor;");
731 CHECK(class_root != nullptr);
732 SetClassRoot(ClassRoot::kJavaLangReflectConstructorArrayClass, class_root);
733
734 // Create java.lang.reflect.Method.class root and array root.
735 class_root = FindSystemClass(self, "Ljava/lang/reflect/Method;");
736 CHECK(class_root != nullptr);
737 SetClassRoot(ClassRoot::kJavaLangReflectMethod, class_root);
738 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Method;");
739 CHECK(class_root != nullptr);
740 SetClassRoot(ClassRoot::kJavaLangReflectMethodArrayClass, class_root);
741
742 // Create java.lang.invoke.CallSite.class root
743 class_root = FindSystemClass(self, "Ljava/lang/invoke/CallSite;");
744 CHECK(class_root != nullptr);
745 SetClassRoot(ClassRoot::kJavaLangInvokeCallSite, class_root);
746
747 // Create java.lang.invoke.MethodType.class root
748 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodType;");
749 CHECK(class_root != nullptr);
750 SetClassRoot(ClassRoot::kJavaLangInvokeMethodType, class_root);
751
752 // Create java.lang.invoke.MethodHandleImpl.class root
753 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandleImpl;");
754 CHECK(class_root != nullptr);
755 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandleImpl, class_root);
756 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandle, class_root->GetSuperClass());
757
758 // Create java.lang.invoke.MethodHandles.Lookup.class root
759 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandles$Lookup;");
760 CHECK(class_root != nullptr);
761 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandlesLookup, class_root);
762
763 // Create java.lang.invoke.VarHandle.class root
764 class_root = FindSystemClass(self, "Ljava/lang/invoke/VarHandle;");
765 CHECK(class_root != nullptr);
766 SetClassRoot(ClassRoot::kJavaLangInvokeVarHandle, class_root);
767
768 // Create java.lang.invoke.FieldVarHandle.class root
769 class_root = FindSystemClass(self, "Ljava/lang/invoke/FieldVarHandle;");
770 CHECK(class_root != nullptr);
771 SetClassRoot(ClassRoot::kJavaLangInvokeFieldVarHandle, class_root);
772
773 // Create java.lang.invoke.ArrayElementVarHandle.class root
774 class_root = FindSystemClass(self, "Ljava/lang/invoke/ArrayElementVarHandle;");
775 CHECK(class_root != nullptr);
776 SetClassRoot(ClassRoot::kJavaLangInvokeArrayElementVarHandle, class_root);
777
778 // Create java.lang.invoke.ByteArrayViewVarHandle.class root
779 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteArrayViewVarHandle;");
780 CHECK(class_root != nullptr);
781 SetClassRoot(ClassRoot::kJavaLangInvokeByteArrayViewVarHandle, class_root);
782
783 // Create java.lang.invoke.ByteBufferViewVarHandle.class root
784 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteBufferViewVarHandle;");
785 CHECK(class_root != nullptr);
786 SetClassRoot(ClassRoot::kJavaLangInvokeByteBufferViewVarHandle, class_root);
787
788 class_root = FindSystemClass(self, "Ldalvik/system/EmulatedStackFrame;");
789 CHECK(class_root != nullptr);
790 SetClassRoot(ClassRoot::kDalvikSystemEmulatedStackFrame, class_root);
791
792 // java.lang.ref classes need to be specially flagged, but otherwise are normal classes
793 // finish initializing Reference class
794 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kNotReady, self);
795 CheckSystemClass(self, java_lang_ref_Reference, "Ljava/lang/ref/Reference;");
796 CHECK_EQ(java_lang_ref_Reference->GetObjectSize(), mirror::Reference::InstanceSize());
797 CHECK_EQ(java_lang_ref_Reference->GetClassSize(),
798 mirror::Reference::ClassSize(image_pointer_size_));
799 class_root = FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;");
800 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
801 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagFinalizerReference);
802 class_root = FindSystemClass(self, "Ljava/lang/ref/PhantomReference;");
803 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
804 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagPhantomReference);
805 class_root = FindSystemClass(self, "Ljava/lang/ref/SoftReference;");
806 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
807 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagSoftReference);
808 class_root = FindSystemClass(self, "Ljava/lang/ref/WeakReference;");
809 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
810 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagWeakReference);
811
812 // Setup the ClassLoader, verifying the object_size_.
813 class_root = FindSystemClass(self, "Ljava/lang/ClassLoader;");
814 class_root->SetClassLoaderClass();
815 CHECK_EQ(class_root->GetObjectSize(), mirror::ClassLoader::InstanceSize());
816 SetClassRoot(ClassRoot::kJavaLangClassLoader, class_root);
817
818 // Set up java.lang.Throwable, java.lang.ClassNotFoundException, and
819 // java.lang.StackTraceElement as a convenience.
820 SetClassRoot(ClassRoot::kJavaLangThrowable, FindSystemClass(self, "Ljava/lang/Throwable;"));
821 SetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
822 FindSystemClass(self, "Ljava/lang/ClassNotFoundException;"));
823 SetClassRoot(ClassRoot::kJavaLangStackTraceElement,
824 FindSystemClass(self, "Ljava/lang/StackTraceElement;"));
825 SetClassRoot(ClassRoot::kJavaLangStackTraceElementArrayClass,
826 FindSystemClass(self, "[Ljava/lang/StackTraceElement;"));
827 SetClassRoot(ClassRoot::kJavaLangClassLoaderArrayClass,
828 FindSystemClass(self, "[Ljava/lang/ClassLoader;"));
829
830 // Create conflict tables that depend on the class linker.
831 runtime->FixupConflictTables();
832
833 FinishInit(self);
834
835 VLOG(startup) << "ClassLinker::InitFromCompiler exiting";
836
837 return true;
838 }
839
CreateStringInitBindings(Thread * self,ClassLinker * class_linker)840 static void CreateStringInitBindings(Thread* self, ClassLinker* class_linker)
841 REQUIRES_SHARED(Locks::mutator_lock_) {
842 // Find String.<init> -> StringFactory bindings.
843 ObjPtr<mirror::Class> string_factory_class =
844 class_linker->FindSystemClass(self, "Ljava/lang/StringFactory;");
845 CHECK(string_factory_class != nullptr);
846 ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>(class_linker);
847 WellKnownClasses::InitStringInit(string_class, string_factory_class);
848 // Update the primordial thread.
849 self->InitStringEntryPoints();
850 }
851
FinishInit(Thread * self)852 void ClassLinker::FinishInit(Thread* self) {
853 VLOG(startup) << "ClassLinker::FinishInit entering";
854
855 CreateStringInitBindings(self, this);
856
857 // Let the heap know some key offsets into java.lang.ref instances
858 // Note: we hard code the field indexes here rather than using FindInstanceField
859 // as the types of the field can't be resolved prior to the runtime being
860 // fully initialized
861 StackHandleScope<3> hs(self);
862 Handle<mirror::Class> java_lang_ref_Reference =
863 hs.NewHandle(GetClassRoot<mirror::Reference>(this));
864 Handle<mirror::Class> java_lang_ref_FinalizerReference =
865 hs.NewHandle(FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;"));
866
867 ArtField* pendingNext = java_lang_ref_Reference->GetInstanceField(0);
868 CHECK_STREQ(pendingNext->GetName(), "pendingNext");
869 CHECK_STREQ(pendingNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
870
871 ArtField* queue = java_lang_ref_Reference->GetInstanceField(1);
872 CHECK_STREQ(queue->GetName(), "queue");
873 CHECK_STREQ(queue->GetTypeDescriptor(), "Ljava/lang/ref/ReferenceQueue;");
874
875 ArtField* queueNext = java_lang_ref_Reference->GetInstanceField(2);
876 CHECK_STREQ(queueNext->GetName(), "queueNext");
877 CHECK_STREQ(queueNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
878
879 ArtField* referent = java_lang_ref_Reference->GetInstanceField(3);
880 CHECK_STREQ(referent->GetName(), "referent");
881 CHECK_STREQ(referent->GetTypeDescriptor(), "Ljava/lang/Object;");
882
883 ArtField* zombie = java_lang_ref_FinalizerReference->GetInstanceField(2);
884 CHECK_STREQ(zombie->GetName(), "zombie");
885 CHECK_STREQ(zombie->GetTypeDescriptor(), "Ljava/lang/Object;");
886
887 // ensure all class_roots_ are initialized
888 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
889 ClassRoot class_root = static_cast<ClassRoot>(i);
890 ObjPtr<mirror::Class> klass = GetClassRoot(class_root);
891 CHECK(klass != nullptr);
892 DCHECK(klass->IsArrayClass() || klass->IsPrimitive() || klass->GetDexCache() != nullptr);
893 // note SetClassRoot does additional validation.
894 // if possible add new checks there to catch errors early
895 }
896
897 CHECK(GetArrayIfTable() != nullptr);
898
899 // disable the slow paths in FindClass and CreatePrimitiveClass now
900 // that Object, Class, and Object[] are setup
901 init_done_ = true;
902
903 // Under sanitization, the small carve-out to handle stack overflow might not be enough to
904 // initialize the StackOverflowError class (as it might require running the verifier). Instead,
905 // ensure that the class will be initialized.
906 if (kMemoryToolIsAvailable && !Runtime::Current()->IsAotCompiler()) {
907 verifier::ClassVerifier::Init(); // Need to prepare the verifier.
908
909 ObjPtr<mirror::Class> soe_klass = FindSystemClass(self, "Ljava/lang/StackOverflowError;");
910 if (soe_klass == nullptr || !EnsureInitialized(self, hs.NewHandle(soe_klass), true, true)) {
911 // Strange, but don't crash.
912 LOG(WARNING) << "Could not prepare StackOverflowError.";
913 self->ClearException();
914 }
915 }
916
917 VLOG(startup) << "ClassLinker::FinishInit exiting";
918 }
919
RunRootClinits(Thread * self)920 void ClassLinker::RunRootClinits(Thread* self) {
921 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); ++i) {
922 ObjPtr<mirror::Class> c = GetClassRoot(ClassRoot(i), this);
923 if (!c->IsArrayClass() && !c->IsPrimitive()) {
924 StackHandleScope<1> hs(self);
925 Handle<mirror::Class> h_class(hs.NewHandle(c));
926 EnsureInitialized(self, h_class, true, true);
927 self->AssertNoPendingException();
928 } else {
929 DCHECK(c->IsInitialized());
930 }
931 }
932 }
933
934 struct TrampolineCheckData {
935 const void* quick_resolution_trampoline;
936 const void* quick_imt_conflict_trampoline;
937 const void* quick_generic_jni_trampoline;
938 const void* quick_to_interpreter_bridge_trampoline;
939 PointerSize pointer_size;
940 ArtMethod* m;
941 bool error;
942 };
943
InitFromBootImage(std::string * error_msg)944 bool ClassLinker::InitFromBootImage(std::string* error_msg) {
945 VLOG(startup) << __FUNCTION__ << " entering";
946 CHECK(!init_done_);
947
948 Runtime* const runtime = Runtime::Current();
949 Thread* const self = Thread::Current();
950 gc::Heap* const heap = runtime->GetHeap();
951 std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces();
952 CHECK(!spaces.empty());
953 uint32_t pointer_size_unchecked = spaces[0]->GetImageHeader().GetPointerSizeUnchecked();
954 if (!ValidPointerSize(pointer_size_unchecked)) {
955 *error_msg = StringPrintf("Invalid image pointer size: %u", pointer_size_unchecked);
956 return false;
957 }
958 const ImageHeader& image_header = spaces[0]->GetImageHeader();
959 image_pointer_size_ = image_header.GetPointerSize();
960 if (!runtime->IsAotCompiler()) {
961 // Only the Aot compiler supports having an image with a different pointer size than the
962 // runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart
963 // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
964 if (image_pointer_size_ != kRuntimePointerSize) {
965 *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
966 static_cast<size_t>(image_pointer_size_),
967 sizeof(void*));
968 return false;
969 }
970 }
971 DCHECK(!runtime->HasResolutionMethod());
972 runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod));
973 runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod));
974 runtime->SetImtUnimplementedMethod(
975 image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod));
976 runtime->SetCalleeSaveMethod(
977 image_header.GetImageMethod(ImageHeader::kSaveAllCalleeSavesMethod),
978 CalleeSaveType::kSaveAllCalleeSaves);
979 runtime->SetCalleeSaveMethod(
980 image_header.GetImageMethod(ImageHeader::kSaveRefsOnlyMethod),
981 CalleeSaveType::kSaveRefsOnly);
982 runtime->SetCalleeSaveMethod(
983 image_header.GetImageMethod(ImageHeader::kSaveRefsAndArgsMethod),
984 CalleeSaveType::kSaveRefsAndArgs);
985 runtime->SetCalleeSaveMethod(
986 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethod),
987 CalleeSaveType::kSaveEverything);
988 runtime->SetCalleeSaveMethod(
989 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForClinit),
990 CalleeSaveType::kSaveEverythingForClinit);
991 runtime->SetCalleeSaveMethod(
992 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForSuspendCheck),
993 CalleeSaveType::kSaveEverythingForSuspendCheck);
994
995 std::vector<const OatFile*> oat_files =
996 runtime->GetOatFileManager().RegisterImageOatFiles(spaces);
997 DCHECK(!oat_files.empty());
998 const OatHeader& default_oat_header = oat_files[0]->GetOatHeader();
999 quick_resolution_trampoline_ = default_oat_header.GetQuickResolutionTrampoline();
1000 quick_imt_conflict_trampoline_ = default_oat_header.GetQuickImtConflictTrampoline();
1001 quick_generic_jni_trampoline_ = default_oat_header.GetQuickGenericJniTrampoline();
1002 quick_to_interpreter_bridge_trampoline_ = default_oat_header.GetQuickToInterpreterBridge();
1003 if (kIsDebugBuild) {
1004 // Check that the other images use the same trampoline.
1005 for (size_t i = 1; i < oat_files.size(); ++i) {
1006 const OatHeader& ith_oat_header = oat_files[i]->GetOatHeader();
1007 const void* ith_quick_resolution_trampoline =
1008 ith_oat_header.GetQuickResolutionTrampoline();
1009 const void* ith_quick_imt_conflict_trampoline =
1010 ith_oat_header.GetQuickImtConflictTrampoline();
1011 const void* ith_quick_generic_jni_trampoline =
1012 ith_oat_header.GetQuickGenericJniTrampoline();
1013 const void* ith_quick_to_interpreter_bridge_trampoline =
1014 ith_oat_header.GetQuickToInterpreterBridge();
1015 if (ith_quick_resolution_trampoline != quick_resolution_trampoline_ ||
1016 ith_quick_imt_conflict_trampoline != quick_imt_conflict_trampoline_ ||
1017 ith_quick_generic_jni_trampoline != quick_generic_jni_trampoline_ ||
1018 ith_quick_to_interpreter_bridge_trampoline != quick_to_interpreter_bridge_trampoline_) {
1019 // Make sure that all methods in this image do not contain those trampolines as
1020 // entrypoints. Otherwise the class-linker won't be able to work with a single set.
1021 TrampolineCheckData data;
1022 data.error = false;
1023 data.pointer_size = GetImagePointerSize();
1024 data.quick_resolution_trampoline = ith_quick_resolution_trampoline;
1025 data.quick_imt_conflict_trampoline = ith_quick_imt_conflict_trampoline;
1026 data.quick_generic_jni_trampoline = ith_quick_generic_jni_trampoline;
1027 data.quick_to_interpreter_bridge_trampoline = ith_quick_to_interpreter_bridge_trampoline;
1028 ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
1029 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1030 if (obj->IsClass()) {
1031 ObjPtr<mirror::Class> klass = obj->AsClass();
1032 for (ArtMethod& m : klass->GetMethods(data.pointer_size)) {
1033 const void* entrypoint =
1034 m.GetEntryPointFromQuickCompiledCodePtrSize(data.pointer_size);
1035 if (entrypoint == data.quick_resolution_trampoline ||
1036 entrypoint == data.quick_imt_conflict_trampoline ||
1037 entrypoint == data.quick_generic_jni_trampoline ||
1038 entrypoint == data.quick_to_interpreter_bridge_trampoline) {
1039 data.m = &m;
1040 data.error = true;
1041 return;
1042 }
1043 }
1044 }
1045 };
1046 spaces[i]->GetLiveBitmap()->Walk(visitor);
1047 if (data.error) {
1048 ArtMethod* m = data.m;
1049 LOG(ERROR) << "Found a broken ArtMethod: " << ArtMethod::PrettyMethod(m);
1050 *error_msg = "Found an ArtMethod with a bad entrypoint";
1051 return false;
1052 }
1053 }
1054 }
1055 }
1056
1057 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
1058 ObjPtr<mirror::ObjectArray<mirror::Class>>::DownCast(
1059 spaces[0]->GetImageHeader().GetImageRoot(ImageHeader::kClassRoots)));
1060 DCHECK_EQ(GetClassRoot<mirror::Class>(this)->GetClassFlags(), mirror::kClassFlagClass);
1061
1062 ObjPtr<mirror::Class> java_lang_Object = GetClassRoot<mirror::Object>(this);
1063 java_lang_Object->SetObjectSize(sizeof(mirror::Object));
1064 // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
1065 // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
1066 runtime->SetSentinel(heap->AllocNonMovableObject<true>(
1067 self, java_lang_Object, java_lang_Object->GetObjectSize(), VoidFunctor()));
1068
1069 const std::vector<std::string>& boot_class_path_locations = runtime->GetBootClassPathLocations();
1070 CHECK_LE(spaces.size(), boot_class_path_locations.size());
1071 for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
1072 // Boot class loader, use a null handle.
1073 std::vector<std::unique_ptr<const DexFile>> dex_files;
1074 if (!AddImageSpace(spaces[i],
1075 ScopedNullHandle<mirror::ClassLoader>(),
1076 /*dex_elements=*/ nullptr,
1077 /*dex_location=*/ boot_class_path_locations[i].c_str(),
1078 /*out*/&dex_files,
1079 error_msg)) {
1080 return false;
1081 }
1082 // Assert that if absolute boot classpath locations were provided, they were
1083 // assigned to the loaded dex files.
1084 if (kIsDebugBuild && IsAbsoluteLocation(boot_class_path_locations[i])) {
1085 for (const auto& dex_file : dex_files) {
1086 DCHECK_EQ(DexFileLoader::GetBaseLocation(dex_file->GetLocation()),
1087 boot_class_path_locations[i]);
1088 }
1089 }
1090 // Append opened dex files at the end.
1091 boot_dex_files_.insert(boot_dex_files_.end(),
1092 std::make_move_iterator(dex_files.begin()),
1093 std::make_move_iterator(dex_files.end()));
1094 }
1095 for (const std::unique_ptr<const DexFile>& dex_file : boot_dex_files_) {
1096 OatDexFile::MadviseDexFile(*dex_file, MadviseState::kMadviseStateAtLoad);
1097 }
1098 FinishInit(self);
1099
1100 VLOG(startup) << __FUNCTION__ << " exiting";
1101 return true;
1102 }
1103
AddExtraBootDexFiles(Thread * self,std::vector<std::unique_ptr<const DexFile>> && additional_dex_files)1104 void ClassLinker::AddExtraBootDexFiles(
1105 Thread* self,
1106 std::vector<std::unique_ptr<const DexFile>>&& additional_dex_files) {
1107 for (std::unique_ptr<const DexFile>& dex_file : additional_dex_files) {
1108 AppendToBootClassPath(self, *dex_file);
1109 boot_dex_files_.push_back(std::move(dex_file));
1110 }
1111 }
1112
IsBootClassLoader(ScopedObjectAccessAlreadyRunnable & soa,ObjPtr<mirror::ClassLoader> class_loader)1113 bool ClassLinker::IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
1114 ObjPtr<mirror::ClassLoader> class_loader) {
1115 return class_loader == nullptr ||
1116 soa.Decode<mirror::Class>(WellKnownClasses::java_lang_BootClassLoader) ==
1117 class_loader->GetClass();
1118 }
1119
GetDexPathListElementName(ObjPtr<mirror::Object> element,ObjPtr<mirror::String> * out_name)1120 static bool GetDexPathListElementName(ObjPtr<mirror::Object> element,
1121 ObjPtr<mirror::String>* out_name)
1122 REQUIRES_SHARED(Locks::mutator_lock_) {
1123 ArtField* const dex_file_field =
1124 jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList__Element_dexFile);
1125 ArtField* const dex_file_name_field =
1126 jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_fileName);
1127 DCHECK(dex_file_field != nullptr);
1128 DCHECK(dex_file_name_field != nullptr);
1129 DCHECK(element != nullptr);
1130 CHECK_EQ(dex_file_field->GetDeclaringClass(), element->GetClass()) << element->PrettyTypeOf();
1131 ObjPtr<mirror::Object> dex_file = dex_file_field->GetObject(element);
1132 if (dex_file == nullptr) {
1133 // Null dex file means it was probably a jar with no dex files, return a null string.
1134 *out_name = nullptr;
1135 return true;
1136 }
1137 ObjPtr<mirror::Object> name_object = dex_file_name_field->GetObject(dex_file);
1138 if (name_object != nullptr) {
1139 *out_name = name_object->AsString();
1140 return true;
1141 }
1142 return false;
1143 }
1144
GetDexFileNames(ScopedObjectAccessUnchecked & soa,ObjPtr<mirror::ClassLoader> class_loader,std::list<ObjPtr<mirror::String>> * dex_files,std::string * error_msg)1145 static bool GetDexFileNames(ScopedObjectAccessUnchecked& soa,
1146 ObjPtr<mirror::ClassLoader> class_loader,
1147 /*out*/std::list<ObjPtr<mirror::String>>* dex_files,
1148 /*out*/std::string* error_msg)
1149 REQUIRES_SHARED(Locks::mutator_lock_) {
1150 StackHandleScope<1> hs(soa.Self());
1151 Handle<mirror::ClassLoader> handle(hs.NewHandle(class_loader));
1152 // Get element names. Sets error to true on failure.
1153 auto add_element_names = [&](ObjPtr<mirror::Object> element, bool* error)
1154 REQUIRES_SHARED(Locks::mutator_lock_) {
1155 if (element == nullptr) {
1156 *error_msg = "Null dex element";
1157 *error = true; // Null element is a critical error.
1158 return false; // Had an error, stop the visit.
1159 }
1160 ObjPtr<mirror::String> name;
1161 if (!GetDexPathListElementName(element, &name)) {
1162 *error_msg = "Invalid dex path list element";
1163 *error = true; // Invalid element, make it a critical error.
1164 return false; // Stop the visit.
1165 }
1166 if (name != nullptr) {
1167 dex_files->push_front(name);
1168 }
1169 return true; // Continue with the next Element.
1170 };
1171 bool error = VisitClassLoaderDexElements(soa,
1172 handle,
1173 add_element_names,
1174 /*defaultReturn=*/ false);
1175 return !error;
1176 }
1177
CompareClassLoaderTypes(ScopedObjectAccessUnchecked & soa,ObjPtr<mirror::ClassLoader> image_class_loader,ObjPtr<mirror::ClassLoader> class_loader,std::string * error_msg)1178 static bool CompareClassLoaderTypes(ScopedObjectAccessUnchecked& soa,
1179 ObjPtr<mirror::ClassLoader> image_class_loader,
1180 ObjPtr<mirror::ClassLoader> class_loader,
1181 std::string* error_msg)
1182 REQUIRES_SHARED(Locks::mutator_lock_) {
1183 if (ClassLinker::IsBootClassLoader(soa, class_loader)) {
1184 if (!ClassLinker::IsBootClassLoader(soa, image_class_loader)) {
1185 *error_msg = "Hierarchies don't match";
1186 return false;
1187 }
1188 } else if (ClassLinker::IsBootClassLoader(soa, image_class_loader)) {
1189 *error_msg = "Hierarchies don't match";
1190 return false;
1191 } else if (class_loader->GetClass() != image_class_loader->GetClass()) {
1192 *error_msg = StringPrintf("Class loader types don't match %s and %s",
1193 image_class_loader->PrettyTypeOf().c_str(),
1194 class_loader->PrettyTypeOf().c_str());
1195 return false;
1196 } else if (soa.Decode<mirror::Class>(WellKnownClasses::dalvik_system_PathClassLoader) !=
1197 class_loader->GetClass()) {
1198 *error_msg = StringPrintf("Unknown class loader type %s",
1199 class_loader->PrettyTypeOf().c_str());
1200 // Unsupported class loader.
1201 return false;
1202 }
1203 return true;
1204 }
1205
CompareDexFiles(const std::list<ObjPtr<mirror::String>> & image_dex_files,const std::list<ObjPtr<mirror::String>> & loader_dex_files,std::string * error_msg)1206 static bool CompareDexFiles(const std::list<ObjPtr<mirror::String>>& image_dex_files,
1207 const std::list<ObjPtr<mirror::String>>& loader_dex_files,
1208 std::string* error_msg)
1209 REQUIRES_SHARED(Locks::mutator_lock_) {
1210 bool equal = (image_dex_files.size() == loader_dex_files.size()) &&
1211 std::equal(image_dex_files.begin(),
1212 image_dex_files.end(),
1213 loader_dex_files.begin(),
1214 [](ObjPtr<mirror::String> lhs, ObjPtr<mirror::String> rhs)
1215 REQUIRES_SHARED(Locks::mutator_lock_) {
1216 return lhs->Equals(rhs);
1217 });
1218 if (!equal) {
1219 VLOG(image) << "Image dex files " << image_dex_files.size();
1220 for (ObjPtr<mirror::String> name : image_dex_files) {
1221 VLOG(image) << name->ToModifiedUtf8();
1222 }
1223 VLOG(image) << "Loader dex files " << loader_dex_files.size();
1224 for (ObjPtr<mirror::String> name : loader_dex_files) {
1225 VLOG(image) << name->ToModifiedUtf8();
1226 }
1227 *error_msg = "Mismatch in dex files";
1228 }
1229 return equal;
1230 }
1231
CompareClassLoaders(ScopedObjectAccessUnchecked & soa,ObjPtr<mirror::ClassLoader> image_class_loader,ObjPtr<mirror::ClassLoader> class_loader,bool check_dex_file_names,std::string * error_msg)1232 static bool CompareClassLoaders(ScopedObjectAccessUnchecked& soa,
1233 ObjPtr<mirror::ClassLoader> image_class_loader,
1234 ObjPtr<mirror::ClassLoader> class_loader,
1235 bool check_dex_file_names,
1236 std::string* error_msg)
1237 REQUIRES_SHARED(Locks::mutator_lock_) {
1238 if (!CompareClassLoaderTypes(soa, image_class_loader, class_loader, error_msg)) {
1239 return false;
1240 }
1241
1242 if (ClassLinker::IsBootClassLoader(soa, class_loader)) {
1243 // No need to check further.
1244 return true;
1245 }
1246
1247 if (check_dex_file_names) {
1248 std::list<ObjPtr<mirror::String>> image_dex_files;
1249 if (!GetDexFileNames(soa, image_class_loader, &image_dex_files, error_msg)) {
1250 return false;
1251 }
1252
1253 std::list<ObjPtr<mirror::String>> loader_dex_files;
1254 if (!GetDexFileNames(soa, class_loader, &loader_dex_files, error_msg)) {
1255 return false;
1256 }
1257
1258 if (!CompareDexFiles(image_dex_files, loader_dex_files, error_msg)) {
1259 return false;
1260 }
1261 }
1262
1263 ArtField* field =
1264 jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
1265 ObjPtr<mirror::Object> shared_libraries_image_loader = field->GetObject(image_class_loader.Ptr());
1266 ObjPtr<mirror::Object> shared_libraries_loader = field->GetObject(class_loader.Ptr());
1267 if (shared_libraries_image_loader == nullptr) {
1268 if (shared_libraries_loader != nullptr) {
1269 *error_msg = "Mismatch in shared libraries";
1270 return false;
1271 }
1272 } else if (shared_libraries_loader == nullptr) {
1273 *error_msg = "Mismatch in shared libraries";
1274 return false;
1275 } else {
1276 ObjPtr<mirror::ObjectArray<mirror::ClassLoader>> array1 =
1277 shared_libraries_image_loader->AsObjectArray<mirror::ClassLoader>();
1278 ObjPtr<mirror::ObjectArray<mirror::ClassLoader>> array2 =
1279 shared_libraries_loader->AsObjectArray<mirror::ClassLoader>();
1280 if (array1->GetLength() != array2->GetLength()) {
1281 *error_msg = "Mismatch in number of shared libraries";
1282 return false;
1283 }
1284
1285 for (int32_t i = 0; i < array1->GetLength(); ++i) {
1286 // Do a full comparison of the class loaders, including comparing their dex files.
1287 if (!CompareClassLoaders(soa,
1288 array1->Get(i),
1289 array2->Get(i),
1290 /*check_dex_file_names=*/ true,
1291 error_msg)) {
1292 return false;
1293 }
1294 }
1295 }
1296
1297 // Do a full comparison of the class loaders, including comparing their dex files.
1298 if (!CompareClassLoaders(soa,
1299 image_class_loader->GetParent(),
1300 class_loader->GetParent(),
1301 /*check_dex_file_names=*/ true,
1302 error_msg)) {
1303 return false;
1304 }
1305 return true;
1306 }
1307
1308 class CHAOnDeleteUpdateClassVisitor {
1309 public:
CHAOnDeleteUpdateClassVisitor(LinearAlloc * alloc)1310 explicit CHAOnDeleteUpdateClassVisitor(LinearAlloc* alloc)
1311 : allocator_(alloc), cha_(Runtime::Current()->GetClassLinker()->GetClassHierarchyAnalysis()),
1312 pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()),
1313 self_(Thread::Current()) {}
1314
operator ()(ObjPtr<mirror::Class> klass)1315 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
1316 // This class is going to be unloaded. Tell CHA about it.
1317 cha_->ResetSingleImplementationInHierarchy(klass, allocator_, pointer_size_);
1318 return true;
1319 }
1320 private:
1321 const LinearAlloc* allocator_;
1322 const ClassHierarchyAnalysis* cha_;
1323 const PointerSize pointer_size_;
1324 const Thread* self_;
1325 };
1326
1327 /*
1328 * A class used to ensure that all strings in an AppImage have been properly
1329 * interned, and is only ever run in debug mode.
1330 */
1331 class VerifyStringInterningVisitor {
1332 public:
VerifyStringInterningVisitor(const gc::space::ImageSpace & space)1333 explicit VerifyStringInterningVisitor(const gc::space::ImageSpace& space) :
1334 space_(space),
1335 intern_table_(*Runtime::Current()->GetInternTable()) {}
1336
TestObject(ObjPtr<mirror::Object> referred_obj) const1337 void TestObject(ObjPtr<mirror::Object> referred_obj) const
1338 REQUIRES_SHARED(Locks::mutator_lock_) {
1339 if (referred_obj != nullptr &&
1340 space_.HasAddress(referred_obj.Ptr()) &&
1341 referred_obj->IsString()) {
1342 ObjPtr<mirror::String> referred_str = referred_obj->AsString();
1343
1344 if (kIsDebugBuild) {
1345 // Saved to temporary variables to aid in debugging.
1346 ObjPtr<mirror::String> strong_lookup_result =
1347 intern_table_.LookupStrong(Thread::Current(), referred_str);
1348 ObjPtr<mirror::String> weak_lookup_result =
1349 intern_table_.LookupWeak(Thread::Current(), referred_str);
1350
1351 DCHECK((strong_lookup_result == referred_str) || (weak_lookup_result == referred_str));
1352 }
1353 }
1354 }
1355
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const1356 void VisitRootIfNonNull(
1357 mirror::CompressedReference<mirror::Object>* root) const
1358 REQUIRES_SHARED(Locks::mutator_lock_) {
1359 if (!root->IsNull()) {
1360 VisitRoot(root);
1361 }
1362 }
1363
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const1364 void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1365 REQUIRES_SHARED(Locks::mutator_lock_) {
1366 TestObject(root->AsMirrorPtr());
1367 }
1368
1369 // Visit Class Fields
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const1370 void operator()(ObjPtr<mirror::Object> obj,
1371 MemberOffset offset,
1372 bool is_static ATTRIBUTE_UNUSED) const
1373 REQUIRES_SHARED(Locks::mutator_lock_) {
1374 // There could be overlap between ranges, we must avoid visiting the same reference twice.
1375 // Avoid the class field since we already fixed it up in FixupClassVisitor.
1376 if (offset.Uint32Value() != mirror::Object::ClassOffset().Uint32Value()) {
1377 // Updating images, don't do a read barrier.
1378 ObjPtr<mirror::Object> referred_obj =
1379 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1380
1381 TestObject(referred_obj);
1382 }
1383 }
1384
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const1385 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1386 ObjPtr<mirror::Reference> ref) const
1387 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
1388 operator()(ref, mirror::Reference::ReferentOffset(), false);
1389 }
1390
1391 const gc::space::ImageSpace& space_;
1392 InternTable& intern_table_;
1393 };
1394
1395 /*
1396 * This function verifies that string references in the AppImage have been
1397 * properly interned. To be considered properly interned a reference must
1398 * point to the same version of the string that the intern table does.
1399 */
VerifyStringInterning(gc::space::ImageSpace & space)1400 void VerifyStringInterning(gc::space::ImageSpace& space) REQUIRES_SHARED(Locks::mutator_lock_) {
1401 const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap();
1402 const ImageHeader& image_header = space.GetImageHeader();
1403 const uint8_t* target_base = space.GetMemMap()->Begin();
1404 const ImageSection& objects_section = image_header.GetObjectsSection();
1405
1406 auto objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
1407 auto objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
1408
1409 VerifyStringInterningVisitor visitor(space);
1410 bitmap->VisitMarkedRange(objects_begin,
1411 objects_end,
1412 [&space, &visitor](mirror::Object* obj)
1413 REQUIRES_SHARED(Locks::mutator_lock_) {
1414 if (space.HasAddress(obj)) {
1415 if (obj->IsDexCache()) {
1416 obj->VisitReferences</* kVisitNativeRoots= */ true,
1417 kVerifyNone,
1418 kWithoutReadBarrier>(visitor, visitor);
1419 } else {
1420 // Don't visit native roots for non-dex-cache as they can't contain
1421 // native references to strings. This is verified during compilation
1422 // by ImageWriter::VerifyNativeGCRootInvariants.
1423 obj->VisitReferences</* kVisitNativeRoots= */ false,
1424 kVerifyNone,
1425 kWithoutReadBarrier>(visitor, visitor);
1426 }
1427 }
1428 });
1429 }
1430
1431 // new_class_set is the set of classes that were read from the class table section in the image.
1432 // If there was no class table section, it is null.
1433 // Note: using a class here to avoid having to make ClassLinker internals public.
1434 class AppImageLoadingHelper {
1435 public:
1436 static void Update(
1437 ClassLinker* class_linker,
1438 gc::space::ImageSpace* space,
1439 Handle<mirror::ClassLoader> class_loader,
1440 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,
1441 ClassTable::ClassSet* new_class_set)
1442 REQUIRES(!Locks::dex_lock_)
1443 REQUIRES_SHARED(Locks::mutator_lock_);
1444
1445 static void HandleAppImageStrings(gc::space::ImageSpace* space)
1446 REQUIRES_SHARED(Locks::mutator_lock_);
1447
1448 static void UpdateInternStrings(
1449 gc::space::ImageSpace* space,
1450 bool use_preresolved_strings,
1451 const SafeMap<mirror::String*, mirror::String*>& intern_remap)
1452 REQUIRES_SHARED(Locks::mutator_lock_);
1453 };
1454
Update(ClassLinker * class_linker,gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,ClassTable::ClassSet * new_class_set)1455 void AppImageLoadingHelper::Update(
1456 ClassLinker* class_linker,
1457 gc::space::ImageSpace* space,
1458 Handle<mirror::ClassLoader> class_loader,
1459 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,
1460 ClassTable::ClassSet* new_class_set)
1461 REQUIRES(!Locks::dex_lock_)
1462 REQUIRES_SHARED(Locks::mutator_lock_) {
1463 ScopedTrace app_image_timing("AppImage:Updating");
1464
1465 Thread* const self = Thread::Current();
1466 Runtime* const runtime = Runtime::Current();
1467 gc::Heap* const heap = runtime->GetHeap();
1468 const ImageHeader& header = space->GetImageHeader();
1469 bool load_app_image_startup_cache = runtime->LoadAppImageStartupCache();
1470 {
1471 // Register dex caches with the class loader.
1472 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
1473 const size_t num_dex_caches = dex_caches->GetLength();
1474 for (size_t i = 0; i < num_dex_caches; i++) {
1475 ObjPtr<mirror::DexCache> dex_cache = dex_caches->Get(i);
1476 const DexFile* const dex_file = dex_cache->GetDexFile();
1477 {
1478 WriterMutexLock mu2(self, *Locks::dex_lock_);
1479 CHECK(!class_linker->FindDexCacheDataLocked(*dex_file).IsValid());
1480 class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
1481 }
1482
1483 if (!load_app_image_startup_cache) {
1484 dex_cache->ClearPreResolvedStrings();
1485 }
1486
1487 if (kIsDebugBuild) {
1488 CHECK(new_class_set != nullptr);
1489 mirror::TypeDexCacheType* const types = dex_cache->GetResolvedTypes();
1490 const size_t num_types = dex_cache->NumResolvedTypes();
1491 for (size_t j = 0; j != num_types; ++j) {
1492 // The image space is not yet added to the heap, avoid read barriers.
1493 ObjPtr<mirror::Class> klass = types[j].load(std::memory_order_relaxed).object.Read();
1494
1495 if (space->HasAddress(klass.Ptr())) {
1496 DCHECK(!klass->IsErroneous()) << klass->GetStatus();
1497 auto it = new_class_set->find(ClassTable::TableSlot(klass));
1498 DCHECK(it != new_class_set->end());
1499 DCHECK_EQ(it->Read(), klass);
1500 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
1501
1502 if (super_class != nullptr && !heap->ObjectIsInBootImageSpace(super_class)) {
1503 auto it2 = new_class_set->find(ClassTable::TableSlot(super_class));
1504 DCHECK(it2 != new_class_set->end());
1505 DCHECK_EQ(it2->Read(), super_class);
1506 }
1507
1508 for (ArtMethod& m : klass->GetDirectMethods(kRuntimePointerSize)) {
1509 const void* code = m.GetEntryPointFromQuickCompiledCode();
1510 const void* oat_code = m.IsInvokable() ? class_linker->GetQuickOatCodeFor(&m) : code;
1511 if (!class_linker->IsQuickResolutionStub(code) &&
1512 !class_linker->IsQuickGenericJniStub(code) &&
1513 !class_linker->IsQuickToInterpreterBridge(code) &&
1514 !m.IsNative()) {
1515 DCHECK_EQ(code, oat_code) << m.PrettyMethod();
1516 }
1517 }
1518
1519 for (ArtMethod& m : klass->GetVirtualMethods(kRuntimePointerSize)) {
1520 const void* code = m.GetEntryPointFromQuickCompiledCode();
1521 const void* oat_code = m.IsInvokable() ? class_linker->GetQuickOatCodeFor(&m) : code;
1522 if (!class_linker->IsQuickResolutionStub(code) &&
1523 !class_linker->IsQuickGenericJniStub(code) &&
1524 !class_linker->IsQuickToInterpreterBridge(code) &&
1525 !m.IsNative()) {
1526 DCHECK_EQ(code, oat_code) << m.PrettyMethod();
1527 }
1528 }
1529 }
1530 }
1531 }
1532 }
1533 }
1534
1535 if (ClassLinker::kAppImageMayContainStrings) {
1536 HandleAppImageStrings(space);
1537
1538 if (kIsDebugBuild) {
1539 VerifyStringInterning(*space);
1540 }
1541 }
1542
1543 if (kVerifyArtMethodDeclaringClasses) {
1544 ScopedTrace timing("AppImage:VerifyDeclaringClasses");
1545 ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_);
1546 gc::accounting::HeapBitmap* live_bitmap = heap->GetLiveBitmap();
1547 header.VisitPackedArtMethods([&](ArtMethod& method)
1548 REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
1549 ObjPtr<mirror::Class> klass = method.GetDeclaringClassUnchecked();
1550 if (klass != nullptr) {
1551 CHECK(live_bitmap->Test(klass.Ptr())) << "Image method has unmarked declaring class";
1552 }
1553 }, space->Begin(), kRuntimePointerSize);
1554 }
1555 }
1556
UpdateInternStrings(gc::space::ImageSpace * space,bool use_preresolved_strings,const SafeMap<mirror::String *,mirror::String * > & intern_remap)1557 void AppImageLoadingHelper::UpdateInternStrings(
1558 gc::space::ImageSpace* space,
1559 bool use_preresolved_strings,
1560 const SafeMap<mirror::String*, mirror::String*>& intern_remap) {
1561 const uint8_t* target_base = space->Begin();
1562 const ImageSection& sro_section =
1563 space->GetImageHeader().GetImageStringReferenceOffsetsSection();
1564 const size_t num_string_offsets = sro_section.Size() / sizeof(AppImageReferenceOffsetInfo);
1565 InternTable* const intern_table = Runtime::Current()->GetInternTable();
1566
1567 VLOG(image)
1568 << "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = "
1569 << num_string_offsets;
1570
1571 const auto* sro_base =
1572 reinterpret_cast<const AppImageReferenceOffsetInfo*>(target_base + sro_section.Offset());
1573
1574 for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
1575 uint32_t base_offset = sro_base[offset_index].first;
1576
1577 if (HasDexCacheStringNativeRefTag(base_offset)) {
1578 base_offset = ClearDexCacheNativeRefTags(base_offset);
1579 DCHECK_ALIGNED(base_offset, 2);
1580
1581 ObjPtr<mirror::DexCache> dex_cache =
1582 reinterpret_cast<mirror::DexCache*>(space->Begin() + base_offset);
1583 uint32_t string_index = sro_base[offset_index].second;
1584
1585 mirror::StringDexCachePair source = dex_cache->GetStrings()[string_index].load();
1586 ObjPtr<mirror::String> referred_string = source.object.Read();
1587 DCHECK(referred_string != nullptr);
1588
1589 auto it = intern_remap.find(referred_string.Ptr());
1590 if (it != intern_remap.end()) {
1591 // This doesn't use SetResolvedString to maintain consistency with how
1592 // we load the string. The index from the source string must be
1593 // re-used due to the circular nature of the cache. Because we are not
1594 // using a helper function we need to mark the GC card manually.
1595 WriteBarrier::ForEveryFieldWrite(dex_cache);
1596 dex_cache->GetStrings()[string_index].store(
1597 mirror::StringDexCachePair(it->second, source.index));
1598 } else if (!use_preresolved_strings) {
1599 dex_cache->GetStrings()[string_index].store(
1600 mirror::StringDexCachePair(intern_table->InternStrong(referred_string), source.index));
1601 }
1602 } else if (HasDexCachePreResolvedStringNativeRefTag(base_offset)) {
1603 if (use_preresolved_strings) {
1604 base_offset = ClearDexCacheNativeRefTags(base_offset);
1605 DCHECK_ALIGNED(base_offset, 2);
1606
1607 ObjPtr<mirror::DexCache> dex_cache =
1608 reinterpret_cast<mirror::DexCache*>(space->Begin() + base_offset);
1609 uint32_t string_index = sro_base[offset_index].second;
1610
1611 GcRoot<mirror::String>* preresolved_strings =
1612 dex_cache->GetPreResolvedStrings();
1613 // Handle calls to ClearPreResolvedStrings that might occur concurrently by the profile
1614 // saver that runs shortly after startup. In case the strings are cleared, there is nothing
1615 // to fix up.
1616 if (preresolved_strings == nullptr) {
1617 continue;
1618 }
1619 ObjPtr<mirror::String> referred_string =
1620 preresolved_strings[string_index].Read();
1621 if (referred_string == nullptr) {
1622 continue;
1623 }
1624 auto it = intern_remap.find(referred_string.Ptr());
1625 if (it != intern_remap.end()) {
1626 // Because we are not using a helper function we need to mark the GC card manually.
1627 WriteBarrier::ForEveryFieldWrite(dex_cache);
1628 dex_cache->GetPreResolvedStrings()[string_index] = GcRoot<mirror::String>(it->second);
1629 }
1630 }
1631 } else {
1632 uint32_t raw_member_offset = sro_base[offset_index].second;
1633 DCHECK_ALIGNED(base_offset, 2);
1634 DCHECK_ALIGNED(raw_member_offset, 2);
1635
1636 ObjPtr<mirror::Object> obj_ptr =
1637 reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
1638 MemberOffset member_offset(raw_member_offset);
1639 ObjPtr<mirror::String> referred_string =
1640 obj_ptr->GetFieldObject<mirror::String,
1641 kVerifyNone,
1642 kWithoutReadBarrier,
1643 /* kIsVolatile= */ false>(member_offset);
1644 DCHECK(referred_string != nullptr);
1645
1646 auto it = intern_remap.find(referred_string.Ptr());
1647 if (it != intern_remap.end()) {
1648 obj_ptr->SetFieldObject</* kTransactionActive= */ false,
1649 /* kCheckTransaction= */ false,
1650 kVerifyNone,
1651 /* kIsVolatile= */ false>(member_offset, it->second);
1652 } else if (!use_preresolved_strings) {
1653 obj_ptr->SetFieldObject</* kTransactionActive= */ false,
1654 /* kCheckTransaction= */ false,
1655 kVerifyNone,
1656 /* kIsVolatile= */ false>(
1657 member_offset,
1658 intern_table->InternStrong(referred_string));
1659 }
1660 }
1661 }
1662 }
1663
HandleAppImageStrings(gc::space::ImageSpace * space)1664 void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space) {
1665 // Iterate over the string reference offsets stored in the image and intern
1666 // the strings they point to.
1667 ScopedTrace timing("AppImage:InternString");
1668
1669 Runtime* const runtime = Runtime::Current();
1670 InternTable* const intern_table = runtime->GetInternTable();
1671
1672 const bool load_startup_cache = runtime->LoadAppImageStartupCache();
1673
1674 // Add the intern table, removing any conflicts. For conflicts, store the new address in a map
1675 // for faster lookup.
1676 // TODO: Optimize with a bitmap or bloom filter
1677 SafeMap<mirror::String*, mirror::String*> intern_remap;
1678 auto func = [&](InternTable::UnorderedSet& interns)
1679 REQUIRES_SHARED(Locks::mutator_lock_)
1680 REQUIRES(Locks::intern_table_lock_) {
1681 const size_t non_boot_image_strings = intern_table->CountInterns(
1682 /*visit_boot_images=*/false,
1683 /*visit_non_boot_images=*/true);
1684 VLOG(image) << "AppImage:stringsInInternTableSize = " << interns.size();
1685 VLOG(image) << "AppImage:nonBootImageInternStrings = " << non_boot_image_strings;
1686 // Visit the smaller of the two sets to compute the intersection.
1687 if (interns.size() < non_boot_image_strings) {
1688 for (auto it = interns.begin(); it != interns.end(); ) {
1689 ObjPtr<mirror::String> string = it->Read();
1690 ObjPtr<mirror::String> existing = intern_table->LookupWeakLocked(string);
1691 if (existing == nullptr) {
1692 existing = intern_table->LookupStrongLocked(string);
1693 }
1694 if (existing != nullptr) {
1695 intern_remap.Put(string.Ptr(), existing.Ptr());
1696 it = interns.erase(it);
1697 } else {
1698 ++it;
1699 }
1700 }
1701 } else {
1702 intern_table->VisitInterns([&](const GcRoot<mirror::String>& root)
1703 REQUIRES_SHARED(Locks::mutator_lock_)
1704 REQUIRES(Locks::intern_table_lock_) {
1705 auto it = interns.find(root);
1706 if (it != interns.end()) {
1707 ObjPtr<mirror::String> existing = root.Read();
1708 intern_remap.Put(it->Read(), existing.Ptr());
1709 it = interns.erase(it);
1710 }
1711 }, /*visit_boot_images=*/false, /*visit_non_boot_images=*/true);
1712 }
1713 // Sanity check to ensure correctness.
1714 if (kIsDebugBuild) {
1715 for (GcRoot<mirror::String>& root : interns) {
1716 ObjPtr<mirror::String> string = root.Read();
1717 CHECK(intern_table->LookupWeakLocked(string) == nullptr) << string->ToModifiedUtf8();
1718 CHECK(intern_table->LookupStrongLocked(string) == nullptr) << string->ToModifiedUtf8();
1719 }
1720 }
1721 };
1722
1723 bool update_intern_strings;
1724 if (load_startup_cache) {
1725 VLOG(image) << "AppImage:load_startup_cache";
1726 // Only add the intern table if we are using the startup cache. Otherwise,
1727 // UpdateInternStrings adds the strings to the intern table.
1728 intern_table->AddImageStringsToTable(space, func);
1729 update_intern_strings = kIsDebugBuild || !intern_remap.empty();
1730 VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
1731 } else {
1732 update_intern_strings = true;
1733 }
1734
1735 // For debug builds, always run the code below to get coverage.
1736 if (update_intern_strings) {
1737 // Slow path case is when there are conflicting intern strings to fix up.
1738 UpdateInternStrings(space, /*use_preresolved_strings=*/ load_startup_cache, intern_remap);
1739 }
1740 }
1741
OpenOatDexFile(const OatFile * oat_file,const char * location,std::string * error_msg)1742 static std::unique_ptr<const DexFile> OpenOatDexFile(const OatFile* oat_file,
1743 const char* location,
1744 std::string* error_msg)
1745 REQUIRES_SHARED(Locks::mutator_lock_) {
1746 DCHECK(error_msg != nullptr);
1747 std::unique_ptr<const DexFile> dex_file;
1748 const OatDexFile* oat_dex_file = oat_file->GetOatDexFile(location, nullptr, error_msg);
1749 if (oat_dex_file == nullptr) {
1750 return std::unique_ptr<const DexFile>();
1751 }
1752 std::string inner_error_msg;
1753 dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
1754 if (dex_file == nullptr) {
1755 *error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'",
1756 location,
1757 oat_file->GetLocation().c_str(),
1758 inner_error_msg.c_str());
1759 return std::unique_ptr<const DexFile>();
1760 }
1761
1762 if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
1763 *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
1764 location,
1765 dex_file->GetLocationChecksum(),
1766 oat_dex_file->GetDexFileLocationChecksum());
1767 return std::unique_ptr<const DexFile>();
1768 }
1769 return dex_file;
1770 }
1771
OpenImageDexFiles(gc::space::ImageSpace * space,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1772 bool ClassLinker::OpenImageDexFiles(gc::space::ImageSpace* space,
1773 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1774 std::string* error_msg) {
1775 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
1776 const ImageHeader& header = space->GetImageHeader();
1777 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1778 DCHECK(dex_caches_object != nullptr);
1779 ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
1780 dex_caches_object->AsObjectArray<mirror::DexCache>();
1781 const OatFile* oat_file = space->GetOatFile();
1782 for (int32_t i = 0, length = dex_caches->GetLength(); i != length; ++i) {
1783 ObjPtr<mirror::DexCache> dex_cache = dex_caches->Get(i);
1784 std::string dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8());
1785 std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1786 dex_file_location.c_str(),
1787 error_msg);
1788 if (dex_file == nullptr) {
1789 return false;
1790 }
1791 dex_cache->SetDexFile(dex_file.get());
1792 out_dex_files->push_back(std::move(dex_file));
1793 }
1794 return true;
1795 }
1796
1797 // Helper class for ArtMethod checks when adding an image. Keeps all required functionality
1798 // together and caches some intermediate results.
1799 class ImageSanityChecks final {
1800 public:
CheckObjects(gc::Heap * heap,ClassLinker * class_linker)1801 static void CheckObjects(gc::Heap* heap, ClassLinker* class_linker)
1802 REQUIRES_SHARED(Locks::mutator_lock_) {
1803 ImageSanityChecks isc(heap, class_linker);
1804 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1805 DCHECK(obj != nullptr);
1806 CHECK(obj->GetClass() != nullptr) << "Null class in object " << obj;
1807 CHECK(obj->GetClass()->GetClass() != nullptr) << "Null class class " << obj;
1808 if (obj->IsClass()) {
1809 auto klass = obj->AsClass();
1810 for (ArtField& field : klass->GetIFields()) {
1811 CHECK_EQ(field.GetDeclaringClass(), klass);
1812 }
1813 for (ArtField& field : klass->GetSFields()) {
1814 CHECK_EQ(field.GetDeclaringClass(), klass);
1815 }
1816 const PointerSize pointer_size = isc.pointer_size_;
1817 for (ArtMethod& m : klass->GetMethods(pointer_size)) {
1818 isc.SanityCheckArtMethod(&m, klass);
1819 }
1820 ObjPtr<mirror::PointerArray> vtable = klass->GetVTable();
1821 if (vtable != nullptr) {
1822 isc.SanityCheckArtMethodPointerArray(vtable, nullptr);
1823 }
1824 if (klass->ShouldHaveImt()) {
1825 ImTable* imt = klass->GetImt(pointer_size);
1826 for (size_t i = 0; i < ImTable::kSize; ++i) {
1827 isc.SanityCheckArtMethod(imt->Get(i, pointer_size), nullptr);
1828 }
1829 }
1830 if (klass->ShouldHaveEmbeddedVTable()) {
1831 for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) {
1832 isc.SanityCheckArtMethod(klass->GetEmbeddedVTableEntry(i, pointer_size), nullptr);
1833 }
1834 }
1835 ObjPtr<mirror::IfTable> iftable = klass->GetIfTable();
1836 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
1837 if (iftable->GetMethodArrayCount(i) > 0) {
1838 isc.SanityCheckArtMethodPointerArray(iftable->GetMethodArray(i), nullptr);
1839 }
1840 }
1841 }
1842 };
1843 heap->VisitObjects(visitor);
1844 }
1845
CheckArtMethodDexCacheArray(gc::Heap * heap,ClassLinker * class_linker,mirror::MethodDexCacheType * arr,size_t size)1846 static void CheckArtMethodDexCacheArray(gc::Heap* heap,
1847 ClassLinker* class_linker,
1848 mirror::MethodDexCacheType* arr,
1849 size_t size)
1850 REQUIRES_SHARED(Locks::mutator_lock_) {
1851 ImageSanityChecks isc(heap, class_linker);
1852 isc.SanityCheckArtMethodDexCacheArray(arr, size);
1853 }
1854
1855 private:
ImageSanityChecks(gc::Heap * heap,ClassLinker * class_linker)1856 ImageSanityChecks(gc::Heap* heap, ClassLinker* class_linker)
1857 : spaces_(heap->GetBootImageSpaces()),
1858 pointer_size_(class_linker->GetImagePointerSize()) {
1859 space_begin_.reserve(spaces_.size());
1860 method_sections_.reserve(spaces_.size());
1861 runtime_method_sections_.reserve(spaces_.size());
1862 for (gc::space::ImageSpace* space : spaces_) {
1863 space_begin_.push_back(space->Begin());
1864 auto& header = space->GetImageHeader();
1865 method_sections_.push_back(&header.GetMethodsSection());
1866 runtime_method_sections_.push_back(&header.GetRuntimeMethodsSection());
1867 }
1868 }
1869
SanityCheckArtMethod(ArtMethod * m,ObjPtr<mirror::Class> expected_class)1870 void SanityCheckArtMethod(ArtMethod* m, ObjPtr<mirror::Class> expected_class)
1871 REQUIRES_SHARED(Locks::mutator_lock_) {
1872 if (m->IsRuntimeMethod()) {
1873 ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked();
1874 CHECK(declaring_class == nullptr) << declaring_class << " " << m->PrettyMethod();
1875 } else if (m->IsCopied()) {
1876 CHECK(m->GetDeclaringClass() != nullptr) << m->PrettyMethod();
1877 } else if (expected_class != nullptr) {
1878 CHECK_EQ(m->GetDeclaringClassUnchecked(), expected_class) << m->PrettyMethod();
1879 }
1880 if (!spaces_.empty()) {
1881 bool contains = false;
1882 for (size_t i = 0; !contains && i != space_begin_.size(); ++i) {
1883 const size_t offset = reinterpret_cast<uint8_t*>(m) - space_begin_[i];
1884 contains = method_sections_[i]->Contains(offset) ||
1885 runtime_method_sections_[i]->Contains(offset);
1886 }
1887 CHECK(contains) << m << " not found";
1888 }
1889 }
1890
SanityCheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr,ObjPtr<mirror::Class> expected_class)1891 void SanityCheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr,
1892 ObjPtr<mirror::Class> expected_class)
1893 REQUIRES_SHARED(Locks::mutator_lock_) {
1894 CHECK(arr != nullptr);
1895 for (int32_t j = 0; j < arr->GetLength(); ++j) {
1896 auto* method = arr->GetElementPtrSize<ArtMethod*>(j, pointer_size_);
1897 // expected_class == null means we are a dex cache.
1898 if (expected_class != nullptr) {
1899 CHECK(method != nullptr);
1900 }
1901 if (method != nullptr) {
1902 SanityCheckArtMethod(method, expected_class);
1903 }
1904 }
1905 }
1906
SanityCheckArtMethodDexCacheArray(mirror::MethodDexCacheType * arr,size_t size)1907 void SanityCheckArtMethodDexCacheArray(mirror::MethodDexCacheType* arr, size_t size)
1908 REQUIRES_SHARED(Locks::mutator_lock_) {
1909 CHECK_EQ(arr != nullptr, size != 0u);
1910 if (arr != nullptr) {
1911 bool contains = false;
1912 for (auto space : spaces_) {
1913 auto offset = reinterpret_cast<uint8_t*>(arr) - space->Begin();
1914 if (space->GetImageHeader().GetDexCacheArraysSection().Contains(offset)) {
1915 contains = true;
1916 break;
1917 }
1918 }
1919 CHECK(contains);
1920 }
1921 for (size_t j = 0; j < size; ++j) {
1922 auto pair = mirror::DexCache::GetNativePairPtrSize(arr, j, pointer_size_);
1923 ArtMethod* method = pair.object;
1924 // expected_class == null means we are a dex cache.
1925 if (method != nullptr) {
1926 SanityCheckArtMethod(method, nullptr);
1927 }
1928 }
1929 }
1930
1931 const std::vector<gc::space::ImageSpace*>& spaces_;
1932 const PointerSize pointer_size_;
1933
1934 // Cached sections from the spaces.
1935 std::vector<const uint8_t*> space_begin_;
1936 std::vector<const ImageSection*> method_sections_;
1937 std::vector<const ImageSection*> runtime_method_sections_;
1938 };
1939
VerifyAppImage(const ImageHeader & header,const Handle<mirror::ClassLoader> & class_loader,const Handle<mirror::ObjectArray<mirror::DexCache>> & dex_caches,ClassTable * class_table,gc::space::ImageSpace * space)1940 static void VerifyAppImage(const ImageHeader& header,
1941 const Handle<mirror::ClassLoader>& class_loader,
1942 const Handle<mirror::ObjectArray<mirror::DexCache> >& dex_caches,
1943 ClassTable* class_table, gc::space::ImageSpace* space)
1944 REQUIRES_SHARED(Locks::mutator_lock_) {
1945 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
1946 ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
1947 if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
1948 CHECK_EQ(class_table->LookupByDescriptor(klass), klass)
1949 << mirror::Class::PrettyClass(klass);
1950 }
1951 }, space->Begin(), kRuntimePointerSize);
1952 {
1953 // Verify that all direct interfaces of classes in the class table are also resolved.
1954 std::vector<ObjPtr<mirror::Class>> classes;
1955 auto verify_direct_interfaces_in_table = [&](ObjPtr<mirror::Class> klass)
1956 REQUIRES_SHARED(Locks::mutator_lock_) {
1957 if (!klass->IsPrimitive() && klass->GetClassLoader() == class_loader.Get()) {
1958 classes.push_back(klass);
1959 }
1960 return true;
1961 };
1962 class_table->Visit(verify_direct_interfaces_in_table);
1963 Thread* self = Thread::Current();
1964 for (ObjPtr<mirror::Class> klass : classes) {
1965 for (uint32_t i = 0, num = klass->NumDirectInterfaces(); i != num; ++i) {
1966 CHECK(klass->GetDirectInterface(self, klass, i) != nullptr)
1967 << klass->PrettyDescriptor() << " iface #" << i;
1968 }
1969 }
1970 }
1971 // Check that all non-primitive classes in dex caches are also in the class table.
1972 for (int32_t i = 0; i < dex_caches->GetLength(); i++) {
1973 ObjPtr<mirror::DexCache> dex_cache = dex_caches->Get(i);
1974 mirror::TypeDexCacheType* const types = dex_cache->GetResolvedTypes();
1975 for (int32_t j = 0, num_types = dex_cache->NumResolvedTypes(); j < num_types; j++) {
1976 ObjPtr<mirror::Class> klass = types[j].load(std::memory_order_relaxed).object.Read();
1977 if (klass != nullptr && !klass->IsPrimitive()) {
1978 CHECK(class_table->Contains(klass))
1979 << klass->PrettyDescriptor() << " " << dex_cache->GetDexFile()->GetLocation();
1980 }
1981 }
1982 }
1983 }
1984
AddImageSpace(gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,jobjectArray dex_elements,const char * dex_location,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1985 bool ClassLinker::AddImageSpace(
1986 gc::space::ImageSpace* space,
1987 Handle<mirror::ClassLoader> class_loader,
1988 jobjectArray dex_elements,
1989 const char* dex_location,
1990 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1991 std::string* error_msg) {
1992 DCHECK(out_dex_files != nullptr);
1993 DCHECK(error_msg != nullptr);
1994 const uint64_t start_time = NanoTime();
1995 const bool app_image = class_loader != nullptr;
1996 const ImageHeader& header = space->GetImageHeader();
1997 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1998 DCHECK(dex_caches_object != nullptr);
1999 Runtime* const runtime = Runtime::Current();
2000 gc::Heap* const heap = runtime->GetHeap();
2001 Thread* const self = Thread::Current();
2002 // Check that the image is what we are expecting.
2003 if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) {
2004 *error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu",
2005 static_cast<size_t>(space->GetImageHeader().GetPointerSize()),
2006 image_pointer_size_);
2007 return false;
2008 }
2009 size_t expected_image_roots = ImageHeader::NumberOfImageRoots(app_image);
2010 if (static_cast<size_t>(header.GetImageRoots()->GetLength()) != expected_image_roots) {
2011 *error_msg = StringPrintf("Expected %zu image roots but got %d",
2012 expected_image_roots,
2013 header.GetImageRoots()->GetLength());
2014 return false;
2015 }
2016 StackHandleScope<3> hs(self);
2017 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
2018 hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
2019 Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
2020 header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
2021 MutableHandle<mirror::ClassLoader> image_class_loader(hs.NewHandle(
2022 app_image ? header.GetImageRoot(ImageHeader::kAppImageClassLoader)->AsClassLoader()
2023 : nullptr));
2024 DCHECK(class_roots != nullptr);
2025 if (class_roots->GetLength() != static_cast<int32_t>(ClassRoot::kMax)) {
2026 *error_msg = StringPrintf("Expected %d class roots but got %d",
2027 class_roots->GetLength(),
2028 static_cast<int32_t>(ClassRoot::kMax));
2029 return false;
2030 }
2031 // Check against existing class roots to make sure they match the ones in the boot image.
2032 ObjPtr<mirror::ObjectArray<mirror::Class>> existing_class_roots = GetClassRoots();
2033 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
2034 if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i), existing_class_roots)) {
2035 *error_msg = "App image class roots must have pointer equality with runtime ones.";
2036 return false;
2037 }
2038 }
2039 const OatFile* oat_file = space->GetOatFile();
2040 if (oat_file->GetOatHeader().GetDexFileCount() !=
2041 static_cast<uint32_t>(dex_caches->GetLength())) {
2042 *error_msg = "Dex cache count and dex file count mismatch while trying to initialize from "
2043 "image";
2044 return false;
2045 }
2046
2047 for (int32_t i = 0; i < dex_caches->GetLength(); i++) {
2048 ObjPtr<mirror::DexCache> dex_cache = dex_caches->Get(i);
2049 std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
2050 if (class_loader == nullptr) {
2051 // For app images, we'll see the relative location. b/130666977.
2052 DCHECK_EQ(dex_location, DexFileLoader::GetBaseLocation(dex_file_location));
2053 }
2054 std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
2055 dex_file_location.c_str(),
2056 error_msg);
2057 if (dex_file == nullptr) {
2058 return false;
2059 }
2060
2061 if (app_image) {
2062 // The current dex file field is bogus, overwrite it so that we can get the dex file in the
2063 // loop below.
2064 dex_cache->SetDexFile(dex_file.get());
2065 mirror::TypeDexCacheType* const types = dex_cache->GetResolvedTypes();
2066 for (int32_t j = 0, num_types = dex_cache->NumResolvedTypes(); j < num_types; j++) {
2067 ObjPtr<mirror::Class> klass = types[j].load(std::memory_order_relaxed).object.Read();
2068 if (klass != nullptr) {
2069 DCHECK(!klass->IsErroneous()) << klass->GetStatus();
2070 }
2071 }
2072 } else {
2073 if (kSanityCheckObjects) {
2074 ImageSanityChecks::CheckArtMethodDexCacheArray(heap,
2075 this,
2076 dex_cache->GetResolvedMethods(),
2077 dex_cache->NumResolvedMethods());
2078 }
2079 // Register dex files, keep track of existing ones that are conflicts.
2080 AppendToBootClassPath(*dex_file.get(), dex_cache);
2081 }
2082 out_dex_files->push_back(std::move(dex_file));
2083 }
2084
2085 if (app_image) {
2086 ScopedObjectAccessUnchecked soa(Thread::Current());
2087 ScopedAssertNoThreadSuspension sants("Checking app image", soa.Self());
2088 // Check that the class loader resolves the same way as the ones in the image.
2089 // Image class loader [A][B][C][image dex files]
2090 // Class loader = [???][dex_elements][image dex files]
2091 // Need to ensure that [???][dex_elements] == [A][B][C].
2092 // For each class loader, PathClassLoader, the loader checks the parent first. Also the logic
2093 // for PathClassLoader does this by looping through the array of dex files. To ensure they
2094 // resolve the same way, simply flatten the hierarchy in the way the resolution order would be,
2095 // and check that the dex file names are the same.
2096 if (IsBootClassLoader(soa, image_class_loader.Get())) {
2097 *error_msg = "Unexpected BootClassLoader in app image";
2098 return false;
2099 }
2100 // The dex files of `class_loader` are not setup yet, so we cannot do a full comparison
2101 // of `class_loader` and `image_class_loader` in `CompareClassLoaders`. Therefore, we
2102 // special case the comparison of dex files of the two class loaders, but then do full
2103 // comparisons for their shared libraries and parent.
2104 auto elements = soa.Decode<mirror::ObjectArray<mirror::Object>>(dex_elements);
2105 std::list<ObjPtr<mirror::String>> loader_dex_file_names;
2106 for (size_t i = 0, num_elems = elements->GetLength(); i < num_elems; ++i) {
2107 ObjPtr<mirror::Object> element = elements->GetWithoutChecks(i);
2108 if (element != nullptr) {
2109 // If we are somewhere in the middle of the array, there may be nulls at the end.
2110 ObjPtr<mirror::String> name;
2111 if (GetDexPathListElementName(element, &name) && name != nullptr) {
2112 loader_dex_file_names.push_back(name);
2113 }
2114 }
2115 }
2116 std::string temp_error_msg;
2117 std::list<ObjPtr<mirror::String>> image_dex_file_names;
2118 bool success = GetDexFileNames(
2119 soa, image_class_loader.Get(), &image_dex_file_names, &temp_error_msg);
2120 if (success) {
2121 // Ignore the number of image dex files since we are adding those to the class loader anyways.
2122 CHECK_GE(static_cast<size_t>(image_dex_file_names.size()),
2123 static_cast<size_t>(dex_caches->GetLength()));
2124 size_t image_count = image_dex_file_names.size() - dex_caches->GetLength();
2125 image_dex_file_names.resize(image_count);
2126 success = success && CompareDexFiles(image_dex_file_names,
2127 loader_dex_file_names,
2128 &temp_error_msg);
2129 success = success && CompareClassLoaders(soa,
2130 image_class_loader.Get(),
2131 class_loader.Get(),
2132 /*check_dex_file_names=*/ false,
2133 &temp_error_msg);
2134 }
2135 if (!success) {
2136 *error_msg = StringPrintf("Rejecting application image due to class loader mismatch: '%s'",
2137 temp_error_msg.c_str());
2138 return false;
2139 }
2140 }
2141
2142 if (kSanityCheckObjects) {
2143 for (int32_t i = 0; i < dex_caches->GetLength(); i++) {
2144 ObjPtr<mirror::DexCache> dex_cache = dex_caches->Get(i);
2145 for (size_t j = 0; j < dex_cache->NumResolvedFields(); ++j) {
2146 auto* field = dex_cache->GetResolvedField(j, image_pointer_size_);
2147 if (field != nullptr) {
2148 CHECK(field->GetDeclaringClass()->GetClass() != nullptr);
2149 }
2150 }
2151 }
2152 if (!app_image) {
2153 ImageSanityChecks::CheckObjects(heap, this);
2154 }
2155 }
2156
2157 // Set entry point to interpreter if in InterpretOnly mode.
2158 if (!runtime->IsAotCompiler() && runtime->GetInstrumentation()->InterpretOnly()) {
2159 // Set image methods' entry point to interpreter.
2160 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2161 if (!method.IsRuntimeMethod()) {
2162 DCHECK(method.GetDeclaringClass() != nullptr);
2163 if (!method.IsNative() && !method.IsResolutionMethod()) {
2164 method.SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(),
2165 image_pointer_size_);
2166 }
2167 }
2168 }, space->Begin(), image_pointer_size_);
2169 }
2170
2171 ClassTable* class_table = nullptr;
2172 {
2173 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2174 class_table = InsertClassTableForClassLoader(class_loader.Get());
2175 }
2176 // If we have a class table section, read it and use it for verification in
2177 // UpdateAppImageClassLoadersAndDexCaches.
2178 ClassTable::ClassSet temp_set;
2179 const ImageSection& class_table_section = header.GetClassTableSection();
2180 const bool added_class_table = class_table_section.Size() > 0u;
2181 if (added_class_table) {
2182 const uint64_t start_time2 = NanoTime();
2183 size_t read_count = 0;
2184 temp_set = ClassTable::ClassSet(space->Begin() + class_table_section.Offset(),
2185 /*make copy*/false,
2186 &read_count);
2187 VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
2188 }
2189 if (app_image) {
2190 AppImageLoadingHelper::Update(this, space, class_loader, dex_caches, &temp_set);
2191
2192 {
2193 ScopedTrace trace("AppImage:UpdateClassLoaders");
2194 // Update class loader and resolved strings. If added_class_table is false, the resolved
2195 // strings were forwarded UpdateAppImageClassLoadersAndDexCaches.
2196 ObjPtr<mirror::ClassLoader> loader(class_loader.Get());
2197 for (const ClassTable::TableSlot& root : temp_set) {
2198 // Note: We probably don't need the read barrier unless we copy the app image objects into
2199 // the region space.
2200 ObjPtr<mirror::Class> klass(root.Read());
2201 // Do not update class loader for boot image classes where the app image
2202 // class loader is only the initiating loader but not the defining loader.
2203 // Avoid read barrier since we are comparing against null.
2204 if (klass->GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
2205 klass->SetClassLoader</*kCheckTransaction=*/ false>(loader);
2206 }
2207 }
2208 }
2209
2210 if (kBitstringSubtypeCheckEnabled) {
2211 // Every class in the app image has initially SubtypeCheckInfo in the
2212 // Uninitialized state.
2213 //
2214 // The SubtypeCheck invariants imply that a SubtypeCheckInfo is at least Initialized
2215 // after class initialization is complete. The app image ClassStatus as-is
2216 // are almost all ClassStatus::Initialized, and being in the
2217 // SubtypeCheckInfo::kUninitialized state is violating that invariant.
2218 //
2219 // Force every app image class's SubtypeCheck to be at least kIninitialized.
2220 //
2221 // See also ImageWriter::FixupClass.
2222 ScopedTrace trace("AppImage:RecacluateSubtypeCheckBitstrings");
2223 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2224 for (const ClassTable::TableSlot& root : temp_set) {
2225 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(root.Read());
2226 }
2227 }
2228 }
2229 if (!oat_file->GetBssGcRoots().empty()) {
2230 // Insert oat file to class table for visiting .bss GC roots.
2231 class_table->InsertOatFile(oat_file);
2232 }
2233
2234 if (added_class_table) {
2235 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2236 class_table->AddClassSet(std::move(temp_set));
2237 }
2238
2239 if (kIsDebugBuild && app_image) {
2240 // This verification needs to happen after the classes have been added to the class loader.
2241 // Since it ensures classes are in the class table.
2242 ScopedTrace trace("AppImage:Verify");
2243 VerifyAppImage(header, class_loader, dex_caches, class_table, space);
2244 }
2245
2246 VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
2247 return true;
2248 }
2249
ClassInClassTable(ObjPtr<mirror::Class> klass)2250 bool ClassLinker::ClassInClassTable(ObjPtr<mirror::Class> klass) {
2251 ClassTable* const class_table = ClassTableForClassLoader(klass->GetClassLoader());
2252 return class_table != nullptr && class_table->Contains(klass);
2253 }
2254
VisitClassRoots(RootVisitor * visitor,VisitRootFlags flags)2255 void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
2256 // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
2257 // enabling tracing requires the mutator lock, there are no race conditions here.
2258 const bool tracing_enabled = Trace::IsTracingEnabled();
2259 Thread* const self = Thread::Current();
2260 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2261 if (kUseReadBarrier) {
2262 // We do not track new roots for CC.
2263 DCHECK_EQ(0, flags & (kVisitRootFlagNewRoots |
2264 kVisitRootFlagClearRootLog |
2265 kVisitRootFlagStartLoggingNewRoots |
2266 kVisitRootFlagStopLoggingNewRoots));
2267 }
2268 if ((flags & kVisitRootFlagAllRoots) != 0) {
2269 // Argument for how root visiting deals with ArtField and ArtMethod roots.
2270 // There is 3 GC cases to handle:
2271 // Non moving concurrent:
2272 // This case is easy to handle since the reference members of ArtMethod and ArtFields are held
2273 // live by the class and class roots.
2274 //
2275 // Moving non-concurrent:
2276 // This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move.
2277 // To prevent missing roots, this case needs to ensure that there is no
2278 // suspend points between the point which we allocate ArtMethod arrays and place them in a
2279 // class which is in the class table.
2280 //
2281 // Moving concurrent:
2282 // Need to make sure to not copy ArtMethods without doing read barriers since the roots are
2283 // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
2284 //
2285 // Use an unbuffered visitor since the class table uses a temporary GcRoot for holding decoded
2286 // ClassTable::TableSlot. The buffered root visiting would access a stale stack location for
2287 // these objects.
2288 UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
2289 boot_class_table_->VisitRoots(root_visitor);
2290 // If tracing is enabled, then mark all the class loaders to prevent unloading.
2291 if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
2292 for (const ClassLoaderData& data : class_loaders_) {
2293 GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
2294 root.VisitRoot(visitor, RootInfo(kRootVMInternal));
2295 }
2296 }
2297 } else if (!kUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
2298 for (auto& root : new_class_roots_) {
2299 ObjPtr<mirror::Class> old_ref = root.Read<kWithoutReadBarrier>();
2300 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2301 ObjPtr<mirror::Class> new_ref = root.Read<kWithoutReadBarrier>();
2302 // Concurrent moving GC marked new roots through the to-space invariant.
2303 CHECK_EQ(new_ref, old_ref);
2304 }
2305 for (const OatFile* oat_file : new_bss_roots_boot_oat_files_) {
2306 for (GcRoot<mirror::Object>& root : oat_file->GetBssGcRoots()) {
2307 ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2308 if (old_ref != nullptr) {
2309 DCHECK(old_ref->IsClass());
2310 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2311 ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2312 // Concurrent moving GC marked new roots through the to-space invariant.
2313 CHECK_EQ(new_ref, old_ref);
2314 }
2315 }
2316 }
2317 }
2318 if (!kUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
2319 new_class_roots_.clear();
2320 new_bss_roots_boot_oat_files_.clear();
2321 }
2322 if (!kUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
2323 log_new_roots_ = true;
2324 } else if (!kUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
2325 log_new_roots_ = false;
2326 }
2327 // We deliberately ignore the class roots in the image since we
2328 // handle image roots by using the MS/CMS rescanning of dirty cards.
2329 }
2330
2331 // Keep in sync with InitCallback. Anything we visit, we need to
2332 // reinit references to when reinitializing a ClassLinker from a
2333 // mapped image.
VisitRoots(RootVisitor * visitor,VisitRootFlags flags)2334 void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
2335 class_roots_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
2336 VisitClassRoots(visitor, flags);
2337 // Instead of visiting the find_array_class_cache_ drop it so that it doesn't prevent class
2338 // unloading if we are marking roots.
2339 DropFindArrayClassCache();
2340 }
2341
2342 class VisitClassLoaderClassesVisitor : public ClassLoaderVisitor {
2343 public:
VisitClassLoaderClassesVisitor(ClassVisitor * visitor)2344 explicit VisitClassLoaderClassesVisitor(ClassVisitor* visitor)
2345 : visitor_(visitor),
2346 done_(false) {}
2347
Visit(ObjPtr<mirror::ClassLoader> class_loader)2348 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
2349 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
2350 ClassTable* const class_table = class_loader->GetClassTable();
2351 if (!done_ && class_table != nullptr) {
2352 DefiningClassLoaderFilterVisitor visitor(class_loader, visitor_);
2353 if (!class_table->Visit(visitor)) {
2354 // If the visitor ClassTable returns false it means that we don't need to continue.
2355 done_ = true;
2356 }
2357 }
2358 }
2359
2360 private:
2361 // Class visitor that limits the class visits from a ClassTable to the classes with
2362 // the provided defining class loader. This filter is used to avoid multiple visits
2363 // of the same class which can be recorded for multiple initiating class loaders.
2364 class DefiningClassLoaderFilterVisitor : public ClassVisitor {
2365 public:
DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,ClassVisitor * visitor)2366 DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,
2367 ClassVisitor* visitor)
2368 : defining_class_loader_(defining_class_loader), visitor_(visitor) { }
2369
operator ()(ObjPtr<mirror::Class> klass)2370 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2371 if (klass->GetClassLoader() != defining_class_loader_) {
2372 return true;
2373 }
2374 return (*visitor_)(klass);
2375 }
2376
2377 const ObjPtr<mirror::ClassLoader> defining_class_loader_;
2378 ClassVisitor* const visitor_;
2379 };
2380
2381 ClassVisitor* const visitor_;
2382 // If done is true then we don't need to do any more visiting.
2383 bool done_;
2384 };
2385
VisitClassesInternal(ClassVisitor * visitor)2386 void ClassLinker::VisitClassesInternal(ClassVisitor* visitor) {
2387 if (boot_class_table_->Visit(*visitor)) {
2388 VisitClassLoaderClassesVisitor loader_visitor(visitor);
2389 VisitClassLoaders(&loader_visitor);
2390 }
2391 }
2392
VisitClasses(ClassVisitor * visitor)2393 void ClassLinker::VisitClasses(ClassVisitor* visitor) {
2394 Thread* const self = Thread::Current();
2395 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2396 // Not safe to have thread suspension when we are holding a lock.
2397 if (self != nullptr) {
2398 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2399 VisitClassesInternal(visitor);
2400 } else {
2401 VisitClassesInternal(visitor);
2402 }
2403 }
2404
2405 class GetClassesInToVector : public ClassVisitor {
2406 public:
operator ()(ObjPtr<mirror::Class> klass)2407 bool operator()(ObjPtr<mirror::Class> klass) override {
2408 classes_.push_back(klass);
2409 return true;
2410 }
2411 std::vector<ObjPtr<mirror::Class>> classes_;
2412 };
2413
2414 class GetClassInToObjectArray : public ClassVisitor {
2415 public:
GetClassInToObjectArray(mirror::ObjectArray<mirror::Class> * arr)2416 explicit GetClassInToObjectArray(mirror::ObjectArray<mirror::Class>* arr)
2417 : arr_(arr), index_(0) {}
2418
operator ()(ObjPtr<mirror::Class> klass)2419 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2420 ++index_;
2421 if (index_ <= arr_->GetLength()) {
2422 arr_->Set(index_ - 1, klass);
2423 return true;
2424 }
2425 return false;
2426 }
2427
Succeeded() const2428 bool Succeeded() const REQUIRES_SHARED(Locks::mutator_lock_) {
2429 return index_ <= arr_->GetLength();
2430 }
2431
2432 private:
2433 mirror::ObjectArray<mirror::Class>* const arr_;
2434 int32_t index_;
2435 };
2436
VisitClassesWithoutClassesLock(ClassVisitor * visitor)2437 void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) {
2438 // TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem
2439 // is avoiding duplicates.
2440 if (!kMovingClasses) {
2441 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2442 GetClassesInToVector accumulator;
2443 VisitClasses(&accumulator);
2444 for (ObjPtr<mirror::Class> klass : accumulator.classes_) {
2445 if (!visitor->operator()(klass)) {
2446 return;
2447 }
2448 }
2449 } else {
2450 Thread* const self = Thread::Current();
2451 StackHandleScope<1> hs(self);
2452 auto classes = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
2453 // We size the array assuming classes won't be added to the class table during the visit.
2454 // If this assumption fails we iterate again.
2455 while (true) {
2456 size_t class_table_size;
2457 {
2458 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2459 // Add 100 in case new classes get loaded when we are filling in the object array.
2460 class_table_size = NumZygoteClasses() + NumNonZygoteClasses() + 100;
2461 }
2462 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
2463 classes.Assign(
2464 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
2465 CHECK(classes != nullptr); // OOME.
2466 GetClassInToObjectArray accumulator(classes.Get());
2467 VisitClasses(&accumulator);
2468 if (accumulator.Succeeded()) {
2469 break;
2470 }
2471 }
2472 for (int32_t i = 0; i < classes->GetLength(); ++i) {
2473 // If the class table shrank during creation of the clases array we expect null elements. If
2474 // the class table grew then the loop repeats. If classes are created after the loop has
2475 // finished then we don't visit.
2476 ObjPtr<mirror::Class> klass = classes->Get(i);
2477 if (klass != nullptr && !visitor->operator()(klass)) {
2478 return;
2479 }
2480 }
2481 }
2482 }
2483
~ClassLinker()2484 ClassLinker::~ClassLinker() {
2485 Thread* const self = Thread::Current();
2486 for (const ClassLoaderData& data : class_loaders_) {
2487 // CHA unloading analysis is not needed. No negative consequences are expected because
2488 // all the classloaders are deleted at the same time.
2489 DeleteClassLoader(self, data, /*cleanup_cha=*/ false);
2490 }
2491 class_loaders_.clear();
2492 }
2493
DeleteClassLoader(Thread * self,const ClassLoaderData & data,bool cleanup_cha)2494 void ClassLinker::DeleteClassLoader(Thread* self, const ClassLoaderData& data, bool cleanup_cha) {
2495 Runtime* const runtime = Runtime::Current();
2496 JavaVMExt* const vm = runtime->GetJavaVM();
2497 vm->DeleteWeakGlobalRef(self, data.weak_root);
2498 // Notify the JIT that we need to remove the methods and/or profiling info.
2499 if (runtime->GetJit() != nullptr) {
2500 jit::JitCodeCache* code_cache = runtime->GetJit()->GetCodeCache();
2501 if (code_cache != nullptr) {
2502 // For the JIT case, RemoveMethodsIn removes the CHA dependencies.
2503 code_cache->RemoveMethodsIn(self, *data.allocator);
2504 }
2505 } else if (cha_ != nullptr) {
2506 // If we don't have a JIT, we need to manually remove the CHA dependencies manually.
2507 cha_->RemoveDependenciesForLinearAlloc(data.allocator);
2508 }
2509 // Cleanup references to single implementation ArtMethods that will be deleted.
2510 if (cleanup_cha) {
2511 CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
2512 data.class_table->Visit<CHAOnDeleteUpdateClassVisitor, kWithoutReadBarrier>(visitor);
2513 }
2514
2515 delete data.allocator;
2516 delete data.class_table;
2517 }
2518
AllocPointerArray(Thread * self,size_t length)2519 ObjPtr<mirror::PointerArray> ClassLinker::AllocPointerArray(Thread* self, size_t length) {
2520 return ObjPtr<mirror::PointerArray>::DownCast(
2521 image_pointer_size_ == PointerSize::k64
2522 ? ObjPtr<mirror::Array>(mirror::LongArray::Alloc(self, length))
2523 : ObjPtr<mirror::Array>(mirror::IntArray::Alloc(self, length)));
2524 }
2525
AllocDexCache(ObjPtr<mirror::String> * out_location,Thread * self,const DexFile & dex_file)2526 ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(/*out*/ ObjPtr<mirror::String>* out_location,
2527 Thread* self,
2528 const DexFile& dex_file) {
2529 StackHandleScope<1> hs(self);
2530 DCHECK(out_location != nullptr);
2531 auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
2532 GetClassRoot<mirror::DexCache>(this)->AllocObject(self))));
2533 if (dex_cache == nullptr) {
2534 self->AssertPendingOOMException();
2535 return nullptr;
2536 }
2537 ObjPtr<mirror::String> location = intern_table_->InternStrong(dex_file.GetLocation().c_str());
2538 if (location == nullptr) {
2539 self->AssertPendingOOMException();
2540 return nullptr;
2541 }
2542 *out_location = location;
2543 return dex_cache.Get();
2544 }
2545
AllocAndInitializeDexCache(Thread * self,const DexFile & dex_file,LinearAlloc * linear_alloc)2546 ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(Thread* self,
2547 const DexFile& dex_file,
2548 LinearAlloc* linear_alloc) {
2549 ObjPtr<mirror::String> location = nullptr;
2550 ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(&location, self, dex_file);
2551 if (dex_cache != nullptr) {
2552 WriterMutexLock mu(self, *Locks::dex_lock_);
2553 DCHECK(location != nullptr);
2554 mirror::DexCache::InitializeDexCache(self,
2555 dex_cache,
2556 location,
2557 &dex_file,
2558 linear_alloc,
2559 image_pointer_size_);
2560 }
2561 return dex_cache;
2562 }
2563
2564 template <bool kMovable>
AllocClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class,uint32_t class_size)2565 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2566 ObjPtr<mirror::Class> java_lang_Class,
2567 uint32_t class_size) {
2568 DCHECK_GE(class_size, sizeof(mirror::Class));
2569 gc::Heap* heap = Runtime::Current()->GetHeap();
2570 mirror::Class::InitializeClassVisitor visitor(class_size);
2571 ObjPtr<mirror::Object> k = (kMovingClasses && kMovable) ?
2572 heap->AllocObject<true>(self, java_lang_Class, class_size, visitor) :
2573 heap->AllocNonMovableObject<true>(self, java_lang_Class, class_size, visitor);
2574 if (UNLIKELY(k == nullptr)) {
2575 self->AssertPendingOOMException();
2576 return nullptr;
2577 }
2578 return k->AsClass();
2579 }
2580
AllocClass(Thread * self,uint32_t class_size)2581 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self, uint32_t class_size) {
2582 return AllocClass(self, GetClassRoot<mirror::Class>(this), class_size);
2583 }
2584
AllocPrimitiveArrayClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class)2585 ObjPtr<mirror::Class> ClassLinker::AllocPrimitiveArrayClass(Thread* self,
2586 ObjPtr<mirror::Class> java_lang_Class) {
2587 // We make this class non-movable for the unlikely case where it were to be
2588 // moved by a sticky-bit (minor) collection when using the Generational
2589 // Concurrent Copying (CC) collector, potentially creating a stale reference
2590 // in the `klass_` field of one of its instances allocated in the Large-Object
2591 // Space (LOS) -- see the comment about the dirty card scanning logic in
2592 // art::gc::collector::ConcurrentCopying::MarkingPhase.
2593 return AllocClass</* kMovable= */ false>(
2594 self, java_lang_Class, mirror::Array::ClassSize(image_pointer_size_));
2595 }
2596
AllocStackTraceElementArray(Thread * self,size_t length)2597 ObjPtr<mirror::ObjectArray<mirror::StackTraceElement>> ClassLinker::AllocStackTraceElementArray(
2598 Thread* self,
2599 size_t length) {
2600 return mirror::ObjectArray<mirror::StackTraceElement>::Alloc(
2601 self, GetClassRoot<mirror::ObjectArray<mirror::StackTraceElement>>(this), length);
2602 }
2603
EnsureResolved(Thread * self,const char * descriptor,ObjPtr<mirror::Class> klass)2604 ObjPtr<mirror::Class> ClassLinker::EnsureResolved(Thread* self,
2605 const char* descriptor,
2606 ObjPtr<mirror::Class> klass) {
2607 DCHECK(klass != nullptr);
2608 if (kIsDebugBuild) {
2609 StackHandleScope<1> hs(self);
2610 HandleWrapperObjPtr<mirror::Class> h = hs.NewHandleWrapper(&klass);
2611 Thread::PoisonObjectPointersIfDebug();
2612 }
2613
2614 // For temporary classes we must wait for them to be retired.
2615 if (init_done_ && klass->IsTemp()) {
2616 CHECK(!klass->IsResolved());
2617 if (klass->IsErroneousUnresolved()) {
2618 ThrowEarlierClassFailure(klass);
2619 return nullptr;
2620 }
2621 StackHandleScope<1> hs(self);
2622 Handle<mirror::Class> h_class(hs.NewHandle(klass));
2623 ObjectLock<mirror::Class> lock(self, h_class);
2624 // Loop and wait for the resolving thread to retire this class.
2625 while (!h_class->IsRetired() && !h_class->IsErroneousUnresolved()) {
2626 lock.WaitIgnoringInterrupts();
2627 }
2628 if (h_class->IsErroneousUnresolved()) {
2629 ThrowEarlierClassFailure(h_class.Get());
2630 return nullptr;
2631 }
2632 CHECK(h_class->IsRetired());
2633 // Get the updated class from class table.
2634 klass = LookupClass(self, descriptor, h_class.Get()->GetClassLoader());
2635 }
2636
2637 // Wait for the class if it has not already been linked.
2638 size_t index = 0;
2639 // Maximum number of yield iterations until we start sleeping.
2640 static const size_t kNumYieldIterations = 1000;
2641 // How long each sleep is in us.
2642 static const size_t kSleepDurationUS = 1000; // 1 ms.
2643 while (!klass->IsResolved() && !klass->IsErroneousUnresolved()) {
2644 StackHandleScope<1> hs(self);
2645 HandleWrapperObjPtr<mirror::Class> h_class(hs.NewHandleWrapper(&klass));
2646 {
2647 ObjectTryLock<mirror::Class> lock(self, h_class);
2648 // Can not use a monitor wait here since it may block when returning and deadlock if another
2649 // thread has locked klass.
2650 if (lock.Acquired()) {
2651 // Check for circular dependencies between classes, the lock is required for SetStatus.
2652 if (!h_class->IsResolved() && h_class->GetClinitThreadId() == self->GetTid()) {
2653 ThrowClassCircularityError(h_class.Get());
2654 mirror::Class::SetStatus(h_class, ClassStatus::kErrorUnresolved, self);
2655 return nullptr;
2656 }
2657 }
2658 }
2659 {
2660 // Handle wrapper deals with klass moving.
2661 ScopedThreadSuspension sts(self, kSuspended);
2662 if (index < kNumYieldIterations) {
2663 sched_yield();
2664 } else {
2665 usleep(kSleepDurationUS);
2666 }
2667 }
2668 ++index;
2669 }
2670
2671 if (klass->IsErroneousUnresolved()) {
2672 ThrowEarlierClassFailure(klass);
2673 return nullptr;
2674 }
2675 // Return the loaded class. No exceptions should be pending.
2676 CHECK(klass->IsResolved()) << klass->PrettyClass();
2677 self->AssertNoPendingException();
2678 return klass;
2679 }
2680
2681 using ClassPathEntry = std::pair<const DexFile*, const dex::ClassDef*>;
2682
2683 // Search a collection of DexFiles for a descriptor
FindInClassPath(const char * descriptor,size_t hash,const std::vector<const DexFile * > & class_path)2684 ClassPathEntry FindInClassPath(const char* descriptor,
2685 size_t hash, const std::vector<const DexFile*>& class_path) {
2686 for (const DexFile* dex_file : class_path) {
2687 const dex::ClassDef* dex_class_def = OatDexFile::FindClassDef(*dex_file, descriptor, hash);
2688 if (dex_class_def != nullptr) {
2689 return ClassPathEntry(dex_file, dex_class_def);
2690 }
2691 }
2692 return ClassPathEntry(nullptr, nullptr);
2693 }
2694
FindClassInSharedLibraries(ScopedObjectAccessAlreadyRunnable & soa,Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)2695 bool ClassLinker::FindClassInSharedLibraries(ScopedObjectAccessAlreadyRunnable& soa,
2696 Thread* self,
2697 const char* descriptor,
2698 size_t hash,
2699 Handle<mirror::ClassLoader> class_loader,
2700 /*out*/ ObjPtr<mirror::Class>* result) {
2701 ArtField* field =
2702 jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
2703 ObjPtr<mirror::Object> raw_shared_libraries = field->GetObject(class_loader.Get());
2704 if (raw_shared_libraries == nullptr) {
2705 return true;
2706 }
2707
2708 StackHandleScope<2> hs(self);
2709 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries(
2710 hs.NewHandle(raw_shared_libraries->AsObjectArray<mirror::ClassLoader>()));
2711 MutableHandle<mirror::ClassLoader> temp_loader = hs.NewHandle<mirror::ClassLoader>(nullptr);
2712 for (int32_t i = 0; i < shared_libraries->GetLength(); ++i) {
2713 temp_loader.Assign(shared_libraries->Get(i));
2714 if (!FindClassInBaseDexClassLoader(soa, self, descriptor, hash, temp_loader, result)) {
2715 return false; // One of the shared libraries is not supported.
2716 }
2717 if (*result != nullptr) {
2718 return true; // Found the class up the chain.
2719 }
2720 }
2721 return true;
2722 }
2723
FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnable & soa,Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)2724 bool ClassLinker::FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
2725 Thread* self,
2726 const char* descriptor,
2727 size_t hash,
2728 Handle<mirror::ClassLoader> class_loader,
2729 /*out*/ ObjPtr<mirror::Class>* result) {
2730 // Termination case: boot class loader.
2731 if (IsBootClassLoader(soa, class_loader.Get())) {
2732 *result = FindClassInBootClassLoaderClassPath(self, descriptor, hash);
2733 return true;
2734 }
2735
2736 if (IsPathOrDexClassLoader(soa, class_loader) || IsInMemoryDexClassLoader(soa, class_loader)) {
2737 // For regular path or dex class loader the search order is:
2738 // - parent
2739 // - shared libraries
2740 // - class loader dex files
2741
2742 // Handles as RegisterDexFile may allocate dex caches (and cause thread suspension).
2743 StackHandleScope<1> hs(self);
2744 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
2745 if (!FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result)) {
2746 return false; // One of the parents is not supported.
2747 }
2748 if (*result != nullptr) {
2749 return true; // Found the class up the chain.
2750 }
2751
2752 if (!FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result)) {
2753 return false; // One of the shared library loader is not supported.
2754 }
2755 if (*result != nullptr) {
2756 return true; // Found the class in a shared library.
2757 }
2758
2759 // Search the current class loader classpath.
2760 *result = FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader);
2761 return !soa.Self()->IsExceptionPending();
2762 }
2763
2764 if (IsDelegateLastClassLoader(soa, class_loader)) {
2765 // For delegate last, the search order is:
2766 // - boot class path
2767 // - shared libraries
2768 // - class loader dex files
2769 // - parent
2770 *result = FindClassInBootClassLoaderClassPath(self, descriptor, hash);
2771 if (*result != nullptr) {
2772 return true; // The class is part of the boot class path.
2773 }
2774 if (self->IsExceptionPending()) {
2775 // Pending exception means there was an error other than ClassNotFound that must be returned
2776 // to the caller.
2777 return false;
2778 }
2779
2780 if (!FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result)) {
2781 return false; // One of the shared library loader is not supported.
2782 }
2783 if (*result != nullptr) {
2784 return true; // Found the class in a shared library.
2785 }
2786
2787 *result = FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader);
2788 if (*result != nullptr) {
2789 return true; // Found the class in the current class loader
2790 }
2791 if (self->IsExceptionPending()) {
2792 // Pending exception means there was an error other than ClassNotFound that must be returned
2793 // to the caller.
2794 return false;
2795 }
2796
2797 // Handles as RegisterDexFile may allocate dex caches (and cause thread suspension).
2798 StackHandleScope<1> hs(self);
2799 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
2800 return FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result);
2801 }
2802
2803 // Unsupported class loader.
2804 *result = nullptr;
2805 return false;
2806 }
2807
2808 namespace {
2809
2810 // Matches exceptions caught in DexFile.defineClass.
MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,ClassLinker * class_linker)2811 ALWAYS_INLINE bool MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,
2812 ClassLinker* class_linker)
2813 REQUIRES_SHARED(Locks::mutator_lock_) {
2814 return
2815 // ClassNotFoundException.
2816 throwable->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
2817 class_linker))
2818 ||
2819 // NoClassDefFoundError. TODO: Reconsider this. b/130746382.
2820 throwable->InstanceOf(Runtime::Current()->GetPreAllocatedNoClassDefFoundError()->GetClass());
2821 }
2822
2823 // Clear exceptions caught in DexFile.defineClass.
FilterDexFileCaughtExceptions(Thread * self,ClassLinker * class_linker)2824 ALWAYS_INLINE void FilterDexFileCaughtExceptions(Thread* self, ClassLinker* class_linker)
2825 REQUIRES_SHARED(Locks::mutator_lock_) {
2826 if (MatchesDexFileCaughtExceptions(self->GetException(), class_linker)) {
2827 self->ClearException();
2828 }
2829 }
2830
2831 } // namespace
2832
2833 // Finds the class in the boot class loader.
2834 // If the class is found the method returns the resolved class. Otherwise it returns null.
FindClassInBootClassLoaderClassPath(Thread * self,const char * descriptor,size_t hash)2835 ObjPtr<mirror::Class> ClassLinker::FindClassInBootClassLoaderClassPath(Thread* self,
2836 const char* descriptor,
2837 size_t hash) {
2838 ObjPtr<mirror::Class> result = nullptr;
2839 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
2840 if (pair.second != nullptr) {
2841 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, nullptr);
2842 if (klass != nullptr) {
2843 result = EnsureResolved(self, descriptor, klass);
2844 } else {
2845 result = DefineClass(self,
2846 descriptor,
2847 hash,
2848 ScopedNullHandle<mirror::ClassLoader>(),
2849 *pair.first,
2850 *pair.second);
2851 }
2852 if (result == nullptr) {
2853 CHECK(self->IsExceptionPending()) << descriptor;
2854 FilterDexFileCaughtExceptions(self, this);
2855 }
2856 }
2857 return result;
2858 }
2859
FindClassInBaseDexClassLoaderClassPath(ScopedObjectAccessAlreadyRunnable & soa,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader)2860 ObjPtr<mirror::Class> ClassLinker::FindClassInBaseDexClassLoaderClassPath(
2861 ScopedObjectAccessAlreadyRunnable& soa,
2862 const char* descriptor,
2863 size_t hash,
2864 Handle<mirror::ClassLoader> class_loader) {
2865 DCHECK(IsPathOrDexClassLoader(soa, class_loader) ||
2866 IsInMemoryDexClassLoader(soa, class_loader) ||
2867 IsDelegateLastClassLoader(soa, class_loader))
2868 << "Unexpected class loader for descriptor " << descriptor;
2869
2870 ObjPtr<mirror::Class> ret;
2871 auto define_class = [&](const DexFile* cp_dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
2872 const dex::ClassDef* dex_class_def = OatDexFile::FindClassDef(*cp_dex_file, descriptor, hash);
2873 if (dex_class_def != nullptr) {
2874 ObjPtr<mirror::Class> klass = DefineClass(soa.Self(),
2875 descriptor,
2876 hash,
2877 class_loader,
2878 *cp_dex_file,
2879 *dex_class_def);
2880 if (klass == nullptr) {
2881 CHECK(soa.Self()->IsExceptionPending()) << descriptor;
2882 FilterDexFileCaughtExceptions(soa.Self(), this);
2883 // TODO: Is it really right to break here, and not check the other dex files?
2884 } else {
2885 DCHECK(!soa.Self()->IsExceptionPending());
2886 }
2887 ret = klass;
2888 return false; // Found a Class (or error == nullptr), stop visit.
2889 }
2890 return true; // Continue with the next DexFile.
2891 };
2892
2893 VisitClassLoaderDexFiles(soa, class_loader, define_class);
2894 return ret;
2895 }
2896
FindClass(Thread * self,const char * descriptor,Handle<mirror::ClassLoader> class_loader)2897 ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
2898 const char* descriptor,
2899 Handle<mirror::ClassLoader> class_loader) {
2900 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
2901 DCHECK(self != nullptr);
2902 self->AssertNoPendingException();
2903 self->PoisonObjectPointers(); // For DefineClass, CreateArrayClass, etc...
2904 if (descriptor[1] == '\0') {
2905 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
2906 // for primitive classes that aren't backed by dex files.
2907 return FindPrimitiveClass(descriptor[0]);
2908 }
2909 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
2910 // Find the class in the loaded classes table.
2911 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, class_loader.Get());
2912 if (klass != nullptr) {
2913 return EnsureResolved(self, descriptor, klass);
2914 }
2915 // Class is not yet loaded.
2916 if (descriptor[0] != '[' && class_loader == nullptr) {
2917 // Non-array class and the boot class loader, search the boot class path.
2918 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
2919 if (pair.second != nullptr) {
2920 return DefineClass(self,
2921 descriptor,
2922 hash,
2923 ScopedNullHandle<mirror::ClassLoader>(),
2924 *pair.first,
2925 *pair.second);
2926 } else {
2927 // The boot class loader is searched ahead of the application class loader, failures are
2928 // expected and will be wrapped in a ClassNotFoundException. Use the pre-allocated error to
2929 // trigger the chaining with a proper stack trace.
2930 ObjPtr<mirror::Throwable> pre_allocated =
2931 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
2932 self->SetException(pre_allocated);
2933 return nullptr;
2934 }
2935 }
2936 ObjPtr<mirror::Class> result_ptr;
2937 bool descriptor_equals;
2938 if (descriptor[0] == '[') {
2939 result_ptr = CreateArrayClass(self, descriptor, hash, class_loader);
2940 DCHECK_EQ(result_ptr == nullptr, self->IsExceptionPending());
2941 DCHECK(result_ptr == nullptr || result_ptr->DescriptorEquals(descriptor));
2942 descriptor_equals = true;
2943 } else {
2944 ScopedObjectAccessUnchecked soa(self);
2945 bool known_hierarchy =
2946 FindClassInBaseDexClassLoader(soa, self, descriptor, hash, class_loader, &result_ptr);
2947 if (result_ptr != nullptr) {
2948 // The chain was understood and we found the class. We still need to add the class to
2949 // the class table to protect from racy programs that can try and redefine the path list
2950 // which would change the Class<?> returned for subsequent evaluation of const-class.
2951 DCHECK(known_hierarchy);
2952 DCHECK(result_ptr->DescriptorEquals(descriptor));
2953 descriptor_equals = true;
2954 } else if (!self->IsExceptionPending()) {
2955 // Either the chain wasn't understood or the class wasn't found.
2956 // If there is a pending exception we didn't clear, it is a not a ClassNotFoundException and
2957 // we should return it instead of silently clearing and retrying.
2958 //
2959 // If the chain was understood but we did not find the class, let the Java-side
2960 // rediscover all this and throw the exception with the right stack trace. Note that
2961 // the Java-side could still succeed for racy programs if another thread is actively
2962 // modifying the class loader's path list.
2963
2964 // The runtime is not allowed to call into java from a runtime-thread so just abort.
2965 if (self->IsRuntimeThread()) {
2966 // Oops, we can't call into java so we can't run actual class-loader code.
2967 // This is true for e.g. for the compiler (jit or aot).
2968 ObjPtr<mirror::Throwable> pre_allocated =
2969 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
2970 self->SetException(pre_allocated);
2971 return nullptr;
2972 }
2973
2974 // Inlined DescriptorToDot(descriptor) with extra validation.
2975 //
2976 // Throw NoClassDefFoundError early rather than potentially load a class only to fail
2977 // the DescriptorEquals() check below and give a confusing error message. For example,
2978 // when native code erroneously calls JNI GetFieldId() with signature "java/lang/String"
2979 // instead of "Ljava/lang/String;", the message below using the "dot" names would be
2980 // "class loader [...] returned class java.lang.String instead of java.lang.String".
2981 size_t descriptor_length = strlen(descriptor);
2982 if (UNLIKELY(descriptor[0] != 'L') ||
2983 UNLIKELY(descriptor[descriptor_length - 1] != ';') ||
2984 UNLIKELY(memchr(descriptor + 1, '.', descriptor_length - 2) != nullptr)) {
2985 ThrowNoClassDefFoundError("Invalid descriptor: %s.", descriptor);
2986 return nullptr;
2987 }
2988
2989 std::string class_name_string(descriptor + 1, descriptor_length - 2);
2990 std::replace(class_name_string.begin(), class_name_string.end(), '/', '.');
2991 if (known_hierarchy &&
2992 fast_class_not_found_exceptions_ &&
2993 !Runtime::Current()->IsJavaDebuggable()) {
2994 // For known hierarchy, we know that the class is going to throw an exception. If we aren't
2995 // debuggable, optimize this path by throwing directly here without going back to Java
2996 // language. This reduces how many ClassNotFoundExceptions happen.
2997 self->ThrowNewExceptionF("Ljava/lang/ClassNotFoundException;",
2998 "%s",
2999 class_name_string.c_str());
3000 } else {
3001 ScopedLocalRef<jobject> class_loader_object(
3002 soa.Env(), soa.AddLocalReference<jobject>(class_loader.Get()));
3003 ScopedLocalRef<jobject> result(soa.Env(), nullptr);
3004 {
3005 ScopedThreadStateChange tsc(self, kNative);
3006 ScopedLocalRef<jobject> class_name_object(
3007 soa.Env(), soa.Env()->NewStringUTF(class_name_string.c_str()));
3008 if (class_name_object.get() == nullptr) {
3009 DCHECK(self->IsExceptionPending()); // OOME.
3010 return nullptr;
3011 }
3012 CHECK(class_loader_object.get() != nullptr);
3013 result.reset(soa.Env()->CallObjectMethod(class_loader_object.get(),
3014 WellKnownClasses::java_lang_ClassLoader_loadClass,
3015 class_name_object.get()));
3016 }
3017 if (result.get() == nullptr && !self->IsExceptionPending()) {
3018 // broken loader - throw NPE to be compatible with Dalvik
3019 ThrowNullPointerException(StringPrintf("ClassLoader.loadClass returned null for %s",
3020 class_name_string.c_str()).c_str());
3021 return nullptr;
3022 }
3023 result_ptr = soa.Decode<mirror::Class>(result.get());
3024 // Check the name of the returned class.
3025 descriptor_equals = (result_ptr != nullptr) && result_ptr->DescriptorEquals(descriptor);
3026 }
3027 } else {
3028 DCHECK(!MatchesDexFileCaughtExceptions(self->GetException(), this));
3029 }
3030 }
3031
3032 if (self->IsExceptionPending()) {
3033 // If the ClassLoader threw or array class allocation failed, pass that exception up.
3034 // However, to comply with the RI behavior, first check if another thread succeeded.
3035 result_ptr = LookupClass(self, descriptor, hash, class_loader.Get());
3036 if (result_ptr != nullptr && !result_ptr->IsErroneous()) {
3037 self->ClearException();
3038 return EnsureResolved(self, descriptor, result_ptr);
3039 }
3040 return nullptr;
3041 }
3042
3043 // Try to insert the class to the class table, checking for mismatch.
3044 ObjPtr<mirror::Class> old;
3045 {
3046 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3047 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader.Get());
3048 old = class_table->Lookup(descriptor, hash);
3049 if (old == nullptr) {
3050 old = result_ptr; // For the comparison below, after releasing the lock.
3051 if (descriptor_equals) {
3052 class_table->InsertWithHash(result_ptr, hash);
3053 WriteBarrier::ForEveryFieldWrite(class_loader.Get());
3054 } // else throw below, after releasing the lock.
3055 }
3056 }
3057 if (UNLIKELY(old != result_ptr)) {
3058 // Return `old` (even if `!descriptor_equals`) to mimic the RI behavior for parallel
3059 // capable class loaders. (All class loaders are considered parallel capable on Android.)
3060 ObjPtr<mirror::Class> loader_class = class_loader->GetClass();
3061 const char* loader_class_name =
3062 loader_class->GetDexFile().StringByTypeIdx(loader_class->GetDexTypeIndex());
3063 LOG(WARNING) << "Initiating class loader of type " << DescriptorToDot(loader_class_name)
3064 << " is not well-behaved; it returned a different Class for racing loadClass(\""
3065 << DescriptorToDot(descriptor) << "\").";
3066 return EnsureResolved(self, descriptor, old);
3067 }
3068 if (UNLIKELY(!descriptor_equals)) {
3069 std::string result_storage;
3070 const char* result_name = result_ptr->GetDescriptor(&result_storage);
3071 std::string loader_storage;
3072 const char* loader_class_name = class_loader->GetClass()->GetDescriptor(&loader_storage);
3073 ThrowNoClassDefFoundError(
3074 "Initiating class loader of type %s returned class %s instead of %s.",
3075 DescriptorToDot(loader_class_name).c_str(),
3076 DescriptorToDot(result_name).c_str(),
3077 DescriptorToDot(descriptor).c_str());
3078 return nullptr;
3079 }
3080 // Success.
3081 return result_ptr;
3082 }
3083
IsReservedBootClassPathDescriptor(const char * descriptor)3084 static bool IsReservedBootClassPathDescriptor(const char* descriptor) {
3085 std::string_view descriptor_sv(descriptor);
3086 return
3087 // Reserved conscrypt packages (includes sub-packages under these paths).
3088 StartsWith(descriptor_sv, "Landroid/net/ssl/") ||
3089 StartsWith(descriptor_sv, "Lcom/android/org/conscrypt/") ||
3090 // Reserved updatable-media package (includes sub-packages under this path).
3091 StartsWith(descriptor_sv, "Landroid/media/");
3092 }
3093
DefineClass(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,const dex::ClassDef & dex_class_def)3094 ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
3095 const char* descriptor,
3096 size_t hash,
3097 Handle<mirror::ClassLoader> class_loader,
3098 const DexFile& dex_file,
3099 const dex::ClassDef& dex_class_def) {
3100 StackHandleScope<3> hs(self);
3101 auto klass = hs.NewHandle<mirror::Class>(nullptr);
3102
3103 // Load the class from the dex file.
3104 if (UNLIKELY(!init_done_)) {
3105 // finish up init of hand crafted class_roots_
3106 if (strcmp(descriptor, "Ljava/lang/Object;") == 0) {
3107 klass.Assign(GetClassRoot<mirror::Object>(this));
3108 } else if (strcmp(descriptor, "Ljava/lang/Class;") == 0) {
3109 klass.Assign(GetClassRoot<mirror::Class>(this));
3110 } else if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3111 klass.Assign(GetClassRoot<mirror::String>(this));
3112 } else if (strcmp(descriptor, "Ljava/lang/ref/Reference;") == 0) {
3113 klass.Assign(GetClassRoot<mirror::Reference>(this));
3114 } else if (strcmp(descriptor, "Ljava/lang/DexCache;") == 0) {
3115 klass.Assign(GetClassRoot<mirror::DexCache>(this));
3116 } else if (strcmp(descriptor, "Ldalvik/system/ClassExt;") == 0) {
3117 klass.Assign(GetClassRoot<mirror::ClassExt>(this));
3118 }
3119 }
3120
3121 // For AOT-compilation of an app, we may use a shortened boot class path that excludes
3122 // some runtime modules. Prevent definition of classes in app class loader that could clash
3123 // with these modules as these classes could be resolved differently during execution.
3124 if (class_loader != nullptr &&
3125 Runtime::Current()->IsAotCompiler() &&
3126 IsReservedBootClassPathDescriptor(descriptor)) {
3127 ObjPtr<mirror::Throwable> pre_allocated =
3128 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3129 self->SetException(pre_allocated);
3130 return nullptr;
3131 }
3132
3133 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
3134 // code to be executed. We put it up here so we can avoid all the allocations associated with
3135 // creating the class. This can happen with (eg) jit threads.
3136 if (!self->CanLoadClasses()) {
3137 // Make sure we don't try to load anything, potentially causing an infinite loop.
3138 ObjPtr<mirror::Throwable> pre_allocated =
3139 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3140 self->SetException(pre_allocated);
3141 return nullptr;
3142 }
3143
3144 if (klass == nullptr) {
3145 // Allocate a class with the status of not ready.
3146 // Interface object should get the right size here. Regular class will
3147 // figure out the right size later and be replaced with one of the right
3148 // size when the class becomes resolved.
3149 klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def)));
3150 }
3151 if (UNLIKELY(klass == nullptr)) {
3152 self->AssertPendingOOMException();
3153 return nullptr;
3154 }
3155 // Get the real dex file. This will return the input if there aren't any callbacks or they do
3156 // nothing.
3157 DexFile const* new_dex_file = nullptr;
3158 dex::ClassDef const* new_class_def = nullptr;
3159 // TODO We should ideally figure out some way to move this after we get a lock on the klass so it
3160 // will only be called once.
3161 Runtime::Current()->GetRuntimeCallbacks()->ClassPreDefine(descriptor,
3162 klass,
3163 class_loader,
3164 dex_file,
3165 dex_class_def,
3166 &new_dex_file,
3167 &new_class_def);
3168 // Check to see if an exception happened during runtime callbacks. Return if so.
3169 if (self->IsExceptionPending()) {
3170 return nullptr;
3171 }
3172 ObjPtr<mirror::DexCache> dex_cache = RegisterDexFile(*new_dex_file, class_loader.Get());
3173 if (dex_cache == nullptr) {
3174 self->AssertPendingException();
3175 return nullptr;
3176 }
3177 klass->SetDexCache(dex_cache);
3178 SetupClass(*new_dex_file, *new_class_def, klass, class_loader.Get());
3179
3180 // Mark the string class by setting its access flag.
3181 if (UNLIKELY(!init_done_)) {
3182 if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3183 klass->SetStringClass();
3184 }
3185 }
3186
3187 ObjectLock<mirror::Class> lock(self, klass);
3188 klass->SetClinitThreadId(self->GetTid());
3189 // Make sure we have a valid empty iftable even if there are errors.
3190 klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
3191
3192 // Add the newly loaded class to the loaded classes table.
3193 ObjPtr<mirror::Class> existing = InsertClass(descriptor, klass.Get(), hash);
3194 if (existing != nullptr) {
3195 // We failed to insert because we raced with another thread. Calling EnsureResolved may cause
3196 // this thread to block.
3197 return EnsureResolved(self, descriptor, existing);
3198 }
3199
3200 // Load the fields and other things after we are inserted in the table. This is so that we don't
3201 // end up allocating unfree-able linear alloc resources and then lose the race condition. The
3202 // other reason is that the field roots are only visited from the class table. So we need to be
3203 // inserted before we allocate / fill in these fields.
3204 LoadClass(self, *new_dex_file, *new_class_def, klass);
3205 if (self->IsExceptionPending()) {
3206 VLOG(class_linker) << self->GetException()->Dump();
3207 // An exception occured during load, set status to erroneous while holding klass' lock in case
3208 // notification is necessary.
3209 if (!klass->IsErroneous()) {
3210 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3211 }
3212 return nullptr;
3213 }
3214
3215 // Finish loading (if necessary) by finding parents
3216 CHECK(!klass->IsLoaded());
3217 if (!LoadSuperAndInterfaces(klass, *new_dex_file)) {
3218 // Loading failed.
3219 if (!klass->IsErroneous()) {
3220 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3221 }
3222 return nullptr;
3223 }
3224 CHECK(klass->IsLoaded());
3225
3226 // At this point the class is loaded. Publish a ClassLoad event.
3227 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
3228 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(klass);
3229
3230 // Link the class (if necessary)
3231 CHECK(!klass->IsResolved());
3232 // TODO: Use fast jobjects?
3233 auto interfaces = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
3234
3235 MutableHandle<mirror::Class> h_new_class = hs.NewHandle<mirror::Class>(nullptr);
3236 if (!LinkClass(self, descriptor, klass, interfaces, &h_new_class)) {
3237 // Linking failed.
3238 if (!klass->IsErroneous()) {
3239 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3240 }
3241 return nullptr;
3242 }
3243 self->AssertNoPendingException();
3244 CHECK(h_new_class != nullptr) << descriptor;
3245 CHECK(h_new_class->IsResolved() && !h_new_class->IsErroneousResolved()) << descriptor;
3246
3247 // Instrumentation may have updated entrypoints for all methods of all
3248 // classes. However it could not update methods of this class while we
3249 // were loading it. Now the class is resolved, we can update entrypoints
3250 // as required by instrumentation.
3251 if (Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()) {
3252 // We must be in the kRunnable state to prevent instrumentation from
3253 // suspending all threads to update entrypoints while we are doing it
3254 // for this class.
3255 DCHECK_EQ(self->GetState(), kRunnable);
3256 Runtime::Current()->GetInstrumentation()->InstallStubsForClass(h_new_class.Get());
3257 }
3258
3259 /*
3260 * We send CLASS_PREPARE events to the debugger from here. The
3261 * definition of "preparation" is creating the static fields for a
3262 * class and initializing them to the standard default values, but not
3263 * executing any code (that comes later, during "initialization").
3264 *
3265 * We did the static preparation in LinkClass.
3266 *
3267 * The class has been prepared and resolved but possibly not yet verified
3268 * at this point.
3269 */
3270 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(klass, h_new_class);
3271
3272 // Notify native debugger of the new class and its layout.
3273 jit::Jit::NewTypeLoadedIfUsingJit(h_new_class.Get());
3274
3275 return h_new_class.Get();
3276 }
3277
SizeOfClassWithoutEmbeddedTables(const DexFile & dex_file,const dex::ClassDef & dex_class_def)3278 uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
3279 const dex::ClassDef& dex_class_def) {
3280 size_t num_ref = 0;
3281 size_t num_8 = 0;
3282 size_t num_16 = 0;
3283 size_t num_32 = 0;
3284 size_t num_64 = 0;
3285 ClassAccessor accessor(dex_file, dex_class_def);
3286 // We allow duplicate definitions of the same field in a class_data_item
3287 // but ignore the repeated indexes here, b/21868015.
3288 uint32_t last_field_idx = dex::kDexNoIndex;
3289 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
3290 uint32_t field_idx = field.GetIndex();
3291 // Ordering enforced by DexFileVerifier.
3292 DCHECK(last_field_idx == dex::kDexNoIndex || last_field_idx <= field_idx);
3293 if (UNLIKELY(field_idx == last_field_idx)) {
3294 continue;
3295 }
3296 last_field_idx = field_idx;
3297 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
3298 const char* descriptor = dex_file.GetFieldTypeDescriptor(field_id);
3299 char c = descriptor[0];
3300 switch (c) {
3301 case 'L':
3302 case '[':
3303 num_ref++;
3304 break;
3305 case 'J':
3306 case 'D':
3307 num_64++;
3308 break;
3309 case 'I':
3310 case 'F':
3311 num_32++;
3312 break;
3313 case 'S':
3314 case 'C':
3315 num_16++;
3316 break;
3317 case 'B':
3318 case 'Z':
3319 num_8++;
3320 break;
3321 default:
3322 LOG(FATAL) << "Unknown descriptor: " << c;
3323 UNREACHABLE();
3324 }
3325 }
3326 return mirror::Class::ComputeClassSize(false,
3327 0,
3328 num_8,
3329 num_16,
3330 num_32,
3331 num_64,
3332 num_ref,
3333 image_pointer_size_);
3334 }
3335
3336 // Special case to get oat code without overwriting a trampoline.
GetQuickOatCodeFor(ArtMethod * method)3337 const void* ClassLinker::GetQuickOatCodeFor(ArtMethod* method) {
3338 CHECK(method->IsInvokable()) << method->PrettyMethod();
3339 if (method->IsProxyMethod()) {
3340 return GetQuickProxyInvokeHandler();
3341 }
3342 auto* code = method->GetOatMethodQuickCode(GetImagePointerSize());
3343 if (code != nullptr) {
3344 return code;
3345 }
3346 if (method->IsNative()) {
3347 // No code and native? Use generic trampoline.
3348 return GetQuickGenericJniStub();
3349 }
3350 return GetQuickToInterpreterBridge();
3351 }
3352
ShouldUseInterpreterEntrypoint(ArtMethod * method,const void * quick_code)3353 bool ClassLinker::ShouldUseInterpreterEntrypoint(ArtMethod* method, const void* quick_code) {
3354 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3355 if (UNLIKELY(method->IsNative() || method->IsProxyMethod())) {
3356 return false;
3357 }
3358
3359 if (quick_code == nullptr) {
3360 return true;
3361 }
3362
3363 Runtime* runtime = Runtime::Current();
3364 instrumentation::Instrumentation* instr = runtime->GetInstrumentation();
3365 if (instr->InterpretOnly()) {
3366 return true;
3367 }
3368
3369 if (runtime->GetClassLinker()->IsQuickToInterpreterBridge(quick_code)) {
3370 // Doing this check avoids doing compiled/interpreter transitions.
3371 return true;
3372 }
3373
3374 if (Thread::Current()->IsForceInterpreter() ||
3375 Dbg::IsForcedInterpreterNeededForCalling(Thread::Current(), method)) {
3376 // Force the use of interpreter when it is required by the debugger.
3377 return true;
3378 }
3379
3380 if (Thread::Current()->IsAsyncExceptionPending()) {
3381 // Force use of interpreter to handle async-exceptions
3382 return true;
3383 }
3384
3385 if (quick_code == GetQuickInstrumentationEntryPoint()) {
3386 const void* instr_target = instr->GetCodeForInvoke(method);
3387 DCHECK_NE(instr_target, GetQuickInstrumentationEntryPoint()) << method->PrettyMethod();
3388 return ShouldUseInterpreterEntrypoint(method, instr_target);
3389 }
3390
3391 if (runtime->IsJavaDebuggable()) {
3392 // For simplicity, we ignore precompiled code and go to the interpreter
3393 // assuming we don't already have jitted code.
3394 // We could look at the oat file where `quick_code` is being defined,
3395 // and check whether it's been compiled debuggable, but we decided to
3396 // only rely on the JIT for debuggable apps.
3397 jit::Jit* jit = Runtime::Current()->GetJit();
3398 return (jit == nullptr) || !jit->GetCodeCache()->ContainsPc(quick_code);
3399 }
3400
3401 if (runtime->IsNativeDebuggable()) {
3402 DCHECK(runtime->UseJitCompilation() && runtime->GetJit()->JitAtFirstUse());
3403 // If we are doing native debugging, ignore application's AOT code,
3404 // since we want to JIT it (at first use) with extra stackmaps for native
3405 // debugging. We keep however all AOT code from the boot image,
3406 // since the JIT-at-first-use is blocking and would result in non-negligible
3407 // startup performance impact.
3408 return !runtime->GetHeap()->IsInBootImageOatFile(quick_code);
3409 }
3410
3411 return false;
3412 }
3413
FixupStaticTrampolines(ObjPtr<mirror::Class> klass)3414 void ClassLinker::FixupStaticTrampolines(ObjPtr<mirror::Class> klass) {
3415 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3416 DCHECK(klass->IsInitialized()) << klass->PrettyDescriptor();
3417 if (klass->NumDirectMethods() == 0) {
3418 return; // No direct methods => no static methods.
3419 }
3420 Runtime* runtime = Runtime::Current();
3421 if (!runtime->IsStarted()) {
3422 if (runtime->IsAotCompiler() || runtime->GetHeap()->HasBootImageSpace()) {
3423 return; // OAT file unavailable.
3424 }
3425 }
3426
3427 const DexFile& dex_file = klass->GetDexFile();
3428 const uint16_t class_def_idx = klass->GetDexClassDefIndex();
3429 CHECK_NE(class_def_idx, DexFile::kDexNoIndex16);
3430 ClassAccessor accessor(dex_file, class_def_idx);
3431 // There should always be class data if there were direct methods.
3432 CHECK(accessor.HasClassData()) << klass->PrettyDescriptor();
3433 bool has_oat_class;
3434 OatFile::OatClass oat_class = OatFile::FindOatClass(dex_file,
3435 klass->GetDexClassDefIndex(),
3436 &has_oat_class);
3437 // Link the code of methods skipped by LinkCode.
3438 for (size_t method_index = 0; method_index < accessor.NumDirectMethods(); ++method_index) {
3439 ArtMethod* method = klass->GetDirectMethod(method_index, image_pointer_size_);
3440 if (!method->IsStatic()) {
3441 // Only update static methods.
3442 continue;
3443 }
3444 const void* quick_code = nullptr;
3445 if (has_oat_class) {
3446 OatFile::OatMethod oat_method = oat_class.GetOatMethod(method_index);
3447 quick_code = oat_method.GetQuickCode();
3448 }
3449 // Check if we have JIT compiled code for it.
3450 if (quick_code == nullptr && Runtime::Current()->GetJit() != nullptr) {
3451 quick_code = Runtime::Current()->GetJit()->GetCodeCache()->GetZygoteSavedEntryPoint(method);
3452 }
3453 // Check whether the method is native, in which case it's generic JNI.
3454 if (quick_code == nullptr && method->IsNative()) {
3455 quick_code = GetQuickGenericJniStub();
3456 } else if (ShouldUseInterpreterEntrypoint(method, quick_code)) {
3457 // Use interpreter entry point.
3458 quick_code = GetQuickToInterpreterBridge();
3459 }
3460 runtime->GetInstrumentation()->UpdateMethodsCode(method, quick_code);
3461 }
3462 // Ignore virtual methods on the iterator.
3463 }
3464
3465 // Does anything needed to make sure that the compiler will not generate a direct invoke to this
3466 // method. Should only be called on non-invokable methods.
EnsureThrowsInvocationError(ClassLinker * class_linker,ArtMethod * method)3467 inline void EnsureThrowsInvocationError(ClassLinker* class_linker, ArtMethod* method) {
3468 DCHECK(method != nullptr);
3469 DCHECK(!method->IsInvokable());
3470 method->SetEntryPointFromQuickCompiledCodePtrSize(
3471 class_linker->GetQuickToInterpreterBridgeTrampoline(),
3472 class_linker->GetImagePointerSize());
3473 }
3474
LinkCode(ClassLinker * class_linker,ArtMethod * method,const OatFile::OatClass * oat_class,uint32_t class_def_method_index)3475 static void LinkCode(ClassLinker* class_linker,
3476 ArtMethod* method,
3477 const OatFile::OatClass* oat_class,
3478 uint32_t class_def_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
3479 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3480 Runtime* const runtime = Runtime::Current();
3481 if (runtime->IsAotCompiler()) {
3482 // The following code only applies to a non-compiler runtime.
3483 return;
3484 }
3485 // Method shouldn't have already been linked.
3486 DCHECK(method->GetEntryPointFromQuickCompiledCode() == nullptr);
3487 if (oat_class != nullptr) {
3488 // Every kind of method should at least get an invoke stub from the oat_method.
3489 // non-abstract methods also get their code pointers.
3490 const OatFile::OatMethod oat_method = oat_class->GetOatMethod(class_def_method_index);
3491 oat_method.LinkMethod(method);
3492 }
3493
3494 // Install entry point from interpreter.
3495 const void* quick_code = method->GetEntryPointFromQuickCompiledCode();
3496 bool enter_interpreter = class_linker->ShouldUseInterpreterEntrypoint(method, quick_code);
3497
3498 if (!method->IsInvokable()) {
3499 EnsureThrowsInvocationError(class_linker, method);
3500 return;
3501 }
3502
3503 if (method->IsStatic() && !method->IsConstructor()) {
3504 // For static methods excluding the class initializer, install the trampoline.
3505 // It will be replaced by the proper entry point by ClassLinker::FixupStaticTrampolines
3506 // after initializing class (see ClassLinker::InitializeClass method).
3507 method->SetEntryPointFromQuickCompiledCode(GetQuickResolutionStub());
3508 } else if (quick_code == nullptr && method->IsNative()) {
3509 method->SetEntryPointFromQuickCompiledCode(GetQuickGenericJniStub());
3510 } else if (enter_interpreter) {
3511 // Set entry point from compiled code if there's no code or in interpreter only mode.
3512 method->SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
3513 }
3514
3515 if (method->IsNative()) {
3516 // Unregistering restores the dlsym lookup stub.
3517 method->UnregisterNative();
3518
3519 if (enter_interpreter || quick_code == nullptr) {
3520 // We have a native method here without code. Then it should have either the generic JNI
3521 // trampoline as entrypoint (non-static), or the resolution trampoline (static).
3522 // TODO: this doesn't handle all the cases where trampolines may be installed.
3523 const void* entry_point = method->GetEntryPointFromQuickCompiledCode();
3524 DCHECK(class_linker->IsQuickGenericJniStub(entry_point) ||
3525 class_linker->IsQuickResolutionStub(entry_point));
3526 }
3527 }
3528 }
3529
SetupClass(const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass,ObjPtr<mirror::ClassLoader> class_loader)3530 void ClassLinker::SetupClass(const DexFile& dex_file,
3531 const dex::ClassDef& dex_class_def,
3532 Handle<mirror::Class> klass,
3533 ObjPtr<mirror::ClassLoader> class_loader) {
3534 CHECK(klass != nullptr);
3535 CHECK(klass->GetDexCache() != nullptr);
3536 CHECK_EQ(ClassStatus::kNotReady, klass->GetStatus());
3537 const char* descriptor = dex_file.GetClassDescriptor(dex_class_def);
3538 CHECK(descriptor != nullptr);
3539
3540 klass->SetClass(GetClassRoot<mirror::Class>(this));
3541 uint32_t access_flags = dex_class_def.GetJavaAccessFlags();
3542 CHECK_EQ(access_flags & ~kAccJavaFlagsMask, 0U);
3543 klass->SetAccessFlags(access_flags);
3544 klass->SetClassLoader(class_loader);
3545 DCHECK_EQ(klass->GetPrimitiveType(), Primitive::kPrimNot);
3546 mirror::Class::SetStatus(klass, ClassStatus::kIdx, nullptr);
3547
3548 klass->SetDexClassDefIndex(dex_file.GetIndexForClassDef(dex_class_def));
3549 klass->SetDexTypeIndex(dex_class_def.class_idx_);
3550 }
3551
AllocArtFieldArray(Thread * self,LinearAlloc * allocator,size_t length)3552 LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self,
3553 LinearAlloc* allocator,
3554 size_t length) {
3555 if (length == 0) {
3556 return nullptr;
3557 }
3558 // If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>.
3559 static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4.");
3560 size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length);
3561 void* array_storage = allocator->Alloc(self, storage_size);
3562 auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length);
3563 CHECK(ret != nullptr);
3564 std::uninitialized_fill_n(&ret->At(0), length, ArtField());
3565 return ret;
3566 }
3567
AllocArtMethodArray(Thread * self,LinearAlloc * allocator,size_t length)3568 LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self,
3569 LinearAlloc* allocator,
3570 size_t length) {
3571 if (length == 0) {
3572 return nullptr;
3573 }
3574 const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_);
3575 const size_t method_size = ArtMethod::Size(image_pointer_size_);
3576 const size_t storage_size =
3577 LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment);
3578 void* array_storage = allocator->Alloc(self, storage_size);
3579 auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length);
3580 CHECK(ret != nullptr);
3581 for (size_t i = 0; i < length; ++i) {
3582 new(reinterpret_cast<void*>(&ret->At(i, method_size, method_alignment))) ArtMethod;
3583 }
3584 return ret;
3585 }
3586
GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3587 LinearAlloc* ClassLinker::GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3588 if (class_loader == nullptr) {
3589 return Runtime::Current()->GetLinearAlloc();
3590 }
3591 LinearAlloc* allocator = class_loader->GetAllocator();
3592 DCHECK(allocator != nullptr);
3593 return allocator;
3594 }
3595
GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3596 LinearAlloc* ClassLinker::GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3597 if (class_loader == nullptr) {
3598 return Runtime::Current()->GetLinearAlloc();
3599 }
3600 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
3601 LinearAlloc* allocator = class_loader->GetAllocator();
3602 if (allocator == nullptr) {
3603 RegisterClassLoader(class_loader);
3604 allocator = class_loader->GetAllocator();
3605 CHECK(allocator != nullptr);
3606 }
3607 return allocator;
3608 }
3609
LoadClass(Thread * self,const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass)3610 void ClassLinker::LoadClass(Thread* self,
3611 const DexFile& dex_file,
3612 const dex::ClassDef& dex_class_def,
3613 Handle<mirror::Class> klass) {
3614 ClassAccessor accessor(dex_file,
3615 dex_class_def,
3616 /* parse_hiddenapi_class_data= */ klass->IsBootStrapClassLoaded());
3617 if (!accessor.HasClassData()) {
3618 return;
3619 }
3620 Runtime* const runtime = Runtime::Current();
3621 {
3622 // Note: We cannot have thread suspension until the field and method arrays are setup or else
3623 // Class::VisitFieldRoots may miss some fields or methods.
3624 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
3625 // Load static fields.
3626 // We allow duplicate definitions of the same field in a class_data_item
3627 // but ignore the repeated indexes here, b/21868015.
3628 LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
3629 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self,
3630 allocator,
3631 accessor.NumStaticFields());
3632 LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self,
3633 allocator,
3634 accessor.NumInstanceFields());
3635 size_t num_sfields = 0u;
3636 size_t num_ifields = 0u;
3637 uint32_t last_static_field_idx = 0u;
3638 uint32_t last_instance_field_idx = 0u;
3639
3640 // Methods
3641 bool has_oat_class = false;
3642 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
3643 ? OatFile::FindOatClass(dex_file, klass->GetDexClassDefIndex(), &has_oat_class)
3644 : OatFile::OatClass::Invalid();
3645 const OatFile::OatClass* oat_class_ptr = has_oat_class ? &oat_class : nullptr;
3646 klass->SetMethodsPtr(
3647 AllocArtMethodArray(self, allocator, accessor.NumMethods()),
3648 accessor.NumDirectMethods(),
3649 accessor.NumVirtualMethods());
3650 size_t class_def_method_index = 0;
3651 uint32_t last_dex_method_index = dex::kDexNoIndex;
3652 size_t last_class_def_method_index = 0;
3653
3654 // Use the visitor since the ranged based loops are bit slower from seeking. Seeking to the
3655 // methods needs to decode all of the fields.
3656 accessor.VisitFieldsAndMethods([&](
3657 const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3658 uint32_t field_idx = field.GetIndex();
3659 DCHECK_GE(field_idx, last_static_field_idx); // Ordering enforced by DexFileVerifier.
3660 if (num_sfields == 0 || LIKELY(field_idx > last_static_field_idx)) {
3661 LoadField(field, klass, &sfields->At(num_sfields));
3662 ++num_sfields;
3663 last_static_field_idx = field_idx;
3664 }
3665 }, [&](const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3666 uint32_t field_idx = field.GetIndex();
3667 DCHECK_GE(field_idx, last_instance_field_idx); // Ordering enforced by DexFileVerifier.
3668 if (num_ifields == 0 || LIKELY(field_idx > last_instance_field_idx)) {
3669 LoadField(field, klass, &ifields->At(num_ifields));
3670 ++num_ifields;
3671 last_instance_field_idx = field_idx;
3672 }
3673 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3674 ArtMethod* art_method = klass->GetDirectMethodUnchecked(class_def_method_index,
3675 image_pointer_size_);
3676 LoadMethod(dex_file, method, klass, art_method);
3677 LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3678 uint32_t it_method_index = method.GetIndex();
3679 if (last_dex_method_index == it_method_index) {
3680 // duplicate case
3681 art_method->SetMethodIndex(last_class_def_method_index);
3682 } else {
3683 art_method->SetMethodIndex(class_def_method_index);
3684 last_dex_method_index = it_method_index;
3685 last_class_def_method_index = class_def_method_index;
3686 }
3687 ++class_def_method_index;
3688 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3689 ArtMethod* art_method = klass->GetVirtualMethodUnchecked(
3690 class_def_method_index - accessor.NumDirectMethods(),
3691 image_pointer_size_);
3692 LoadMethod(dex_file, method, klass, art_method);
3693 LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3694 ++class_def_method_index;
3695 });
3696
3697 if (UNLIKELY(num_ifields + num_sfields != accessor.NumFields())) {
3698 LOG(WARNING) << "Duplicate fields in class " << klass->PrettyDescriptor()
3699 << " (unique static fields: " << num_sfields << "/" << accessor.NumStaticFields()
3700 << ", unique instance fields: " << num_ifields << "/" << accessor.NumInstanceFields()
3701 << ")";
3702 // NOTE: Not shrinking the over-allocated sfields/ifields, just setting size.
3703 if (sfields != nullptr) {
3704 sfields->SetSize(num_sfields);
3705 }
3706 if (ifields != nullptr) {
3707 ifields->SetSize(num_ifields);
3708 }
3709 }
3710 // Set the field arrays.
3711 klass->SetSFieldsPtr(sfields);
3712 DCHECK_EQ(klass->NumStaticFields(), num_sfields);
3713 klass->SetIFieldsPtr(ifields);
3714 DCHECK_EQ(klass->NumInstanceFields(), num_ifields);
3715 }
3716 // Ensure that the card is marked so that remembered sets pick up native roots.
3717 WriteBarrier::ForEveryFieldWrite(klass.Get());
3718 self->AllowThreadSuspension();
3719 }
3720
LoadField(const ClassAccessor::Field & field,Handle<mirror::Class> klass,ArtField * dst)3721 void ClassLinker::LoadField(const ClassAccessor::Field& field,
3722 Handle<mirror::Class> klass,
3723 ArtField* dst) {
3724 const uint32_t field_idx = field.GetIndex();
3725 dst->SetDexFieldIndex(field_idx);
3726 dst->SetDeclaringClass(klass.Get());
3727
3728 // Get access flags from the DexFile and set hiddenapi runtime access flags.
3729 dst->SetAccessFlags(field.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(field));
3730 }
3731
LoadMethod(const DexFile & dex_file,const ClassAccessor::Method & method,Handle<mirror::Class> klass,ArtMethod * dst)3732 void ClassLinker::LoadMethod(const DexFile& dex_file,
3733 const ClassAccessor::Method& method,
3734 Handle<mirror::Class> klass,
3735 ArtMethod* dst) {
3736 const uint32_t dex_method_idx = method.GetIndex();
3737 const dex::MethodId& method_id = dex_file.GetMethodId(dex_method_idx);
3738 const char* method_name = dex_file.StringDataByIdx(method_id.name_idx_);
3739
3740 ScopedAssertNoThreadSuspension ants("LoadMethod");
3741 dst->SetDexMethodIndex(dex_method_idx);
3742 dst->SetDeclaringClass(klass.Get());
3743 dst->SetCodeItemOffset(method.GetCodeItemOffset());
3744
3745 // Get access flags from the DexFile and set hiddenapi runtime access flags.
3746 uint32_t access_flags = method.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(method);
3747
3748 if (UNLIKELY(strcmp("finalize", method_name) == 0)) {
3749 // Set finalizable flag on declaring class.
3750 if (strcmp("V", dex_file.GetShorty(method_id.proto_idx_)) == 0) {
3751 // Void return type.
3752 if (klass->GetClassLoader() != nullptr) { // All non-boot finalizer methods are flagged.
3753 klass->SetFinalizable();
3754 } else {
3755 std::string temp;
3756 const char* klass_descriptor = klass->GetDescriptor(&temp);
3757 // The Enum class declares a "final" finalize() method to prevent subclasses from
3758 // introducing a finalizer. We don't want to set the finalizable flag for Enum or its
3759 // subclasses, so we exclude it here.
3760 // We also want to avoid setting the flag on Object, where we know that finalize() is
3761 // empty.
3762 if (strcmp(klass_descriptor, "Ljava/lang/Object;") != 0 &&
3763 strcmp(klass_descriptor, "Ljava/lang/Enum;") != 0) {
3764 klass->SetFinalizable();
3765 }
3766 }
3767 }
3768 } else if (method_name[0] == '<') {
3769 // Fix broken access flags for initializers. Bug 11157540.
3770 bool is_init = (strcmp("<init>", method_name) == 0);
3771 bool is_clinit = !is_init && (strcmp("<clinit>", method_name) == 0);
3772 if (UNLIKELY(!is_init && !is_clinit)) {
3773 LOG(WARNING) << "Unexpected '<' at start of method name " << method_name;
3774 } else {
3775 if (UNLIKELY((access_flags & kAccConstructor) == 0)) {
3776 LOG(WARNING) << method_name << " didn't have expected constructor access flag in class "
3777 << klass->PrettyDescriptor() << " in dex file " << dex_file.GetLocation();
3778 access_flags |= kAccConstructor;
3779 }
3780 }
3781 }
3782 if (UNLIKELY((access_flags & kAccNative) != 0u)) {
3783 // Check if the native method is annotated with @FastNative or @CriticalNative.
3784 access_flags |= annotations::GetNativeMethodAnnotationAccessFlags(
3785 dex_file, dst->GetClassDef(), dex_method_idx);
3786 }
3787 dst->SetAccessFlags(access_flags);
3788 // Must be done after SetAccessFlags since IsAbstract depends on it.
3789 if (klass->IsInterface() && dst->IsAbstract()) {
3790 dst->CalculateAndSetImtIndex();
3791 }
3792 }
3793
AppendToBootClassPath(Thread * self,const DexFile & dex_file)3794 void ClassLinker::AppendToBootClassPath(Thread* self, const DexFile& dex_file) {
3795 ObjPtr<mirror::DexCache> dex_cache = AllocAndInitializeDexCache(
3796 self,
3797 dex_file,
3798 Runtime::Current()->GetLinearAlloc());
3799 CHECK(dex_cache != nullptr) << "Failed to allocate dex cache for " << dex_file.GetLocation();
3800 AppendToBootClassPath(dex_file, dex_cache);
3801 }
3802
AppendToBootClassPath(const DexFile & dex_file,ObjPtr<mirror::DexCache> dex_cache)3803 void ClassLinker::AppendToBootClassPath(const DexFile& dex_file,
3804 ObjPtr<mirror::DexCache> dex_cache) {
3805 CHECK(dex_cache != nullptr) << dex_file.GetLocation();
3806 boot_class_path_.push_back(&dex_file);
3807 WriterMutexLock mu(Thread::Current(), *Locks::dex_lock_);
3808 RegisterDexFileLocked(dex_file, dex_cache, /* class_loader= */ nullptr);
3809 }
3810
RegisterDexFileLocked(const DexFile & dex_file,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)3811 void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
3812 ObjPtr<mirror::DexCache> dex_cache,
3813 ObjPtr<mirror::ClassLoader> class_loader) {
3814 Thread* const self = Thread::Current();
3815 Locks::dex_lock_->AssertExclusiveHeld(self);
3816 CHECK(dex_cache != nullptr) << dex_file.GetLocation();
3817 // For app images, the dex cache location may be a suffix of the dex file location since the
3818 // dex file location is an absolute path.
3819 const std::string dex_cache_location = dex_cache->GetLocation()->ToModifiedUtf8();
3820 const size_t dex_cache_length = dex_cache_location.length();
3821 CHECK_GT(dex_cache_length, 0u) << dex_file.GetLocation();
3822 std::string dex_file_location = dex_file.GetLocation();
3823 // The following paths checks don't work on preopt when using boot dex files, where the dex
3824 // cache location is the one on device, and the dex_file's location is the one on host.
3825 if (!(Runtime::Current()->IsAotCompiler() && class_loader == nullptr && !kIsTargetBuild)) {
3826 CHECK_GE(dex_file_location.length(), dex_cache_length)
3827 << dex_cache_location << " " << dex_file.GetLocation();
3828 const std::string dex_file_suffix = dex_file_location.substr(
3829 dex_file_location.length() - dex_cache_length,
3830 dex_cache_length);
3831 // Example dex_cache location is SettingsProvider.apk and
3832 // dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk
3833 CHECK_EQ(dex_cache_location, dex_file_suffix);
3834 }
3835 const OatFile* oat_file =
3836 (dex_file.GetOatDexFile() != nullptr) ? dex_file.GetOatDexFile()->GetOatFile() : nullptr;
3837 // Clean up pass to remove null dex caches; null dex caches can occur due to class unloading
3838 // and we are lazily removing null entries. Also check if we need to initialize OatFile data
3839 // (.data.bimg.rel.ro and .bss sections) needed for code execution.
3840 bool initialize_oat_file_data = (oat_file != nullptr) && oat_file->IsExecutable();
3841 JavaVMExt* const vm = self->GetJniEnv()->GetVm();
3842 for (auto it = dex_caches_.begin(); it != dex_caches_.end(); ) {
3843 DexCacheData data = *it;
3844 if (self->IsJWeakCleared(data.weak_root)) {
3845 vm->DeleteWeakGlobalRef(self, data.weak_root);
3846 it = dex_caches_.erase(it);
3847 } else {
3848 if (initialize_oat_file_data &&
3849 it->dex_file->GetOatDexFile() != nullptr &&
3850 it->dex_file->GetOatDexFile()->GetOatFile() == oat_file) {
3851 initialize_oat_file_data = false; // Already initialized.
3852 }
3853 ++it;
3854 }
3855 }
3856 if (initialize_oat_file_data) {
3857 oat_file->InitializeRelocations();
3858 }
3859 // Let hiddenapi assign a domain to the newly registered dex file.
3860 hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
3861
3862 jweak dex_cache_jweak = vm->AddWeakGlobalRef(self, dex_cache);
3863 dex_cache->SetDexFile(&dex_file);
3864 DexCacheData data;
3865 data.weak_root = dex_cache_jweak;
3866 data.dex_file = dex_cache->GetDexFile();
3867 data.class_table = ClassTableForClassLoader(class_loader);
3868 AddNativeDebugInfoForDex(self, data.dex_file);
3869 DCHECK(data.class_table != nullptr);
3870 // Make sure to hold the dex cache live in the class table. This case happens for the boot class
3871 // path dex caches without an image.
3872 data.class_table->InsertStrongRoot(dex_cache);
3873 if (class_loader != nullptr) {
3874 // Since we added a strong root to the class table, do the write barrier as required for
3875 // remembered sets and generational GCs.
3876 WriteBarrier::ForEveryFieldWrite(class_loader);
3877 }
3878 dex_caches_.push_back(data);
3879 }
3880
DecodeDexCache(Thread * self,const DexCacheData & data)3881 ObjPtr<mirror::DexCache> ClassLinker::DecodeDexCache(Thread* self, const DexCacheData& data) {
3882 return data.IsValid()
3883 ? ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root))
3884 : nullptr;
3885 }
3886
EnsureSameClassLoader(Thread * self,ObjPtr<mirror::DexCache> dex_cache,const DexCacheData & data,ObjPtr<mirror::ClassLoader> class_loader)3887 ObjPtr<mirror::DexCache> ClassLinker::EnsureSameClassLoader(
3888 Thread* self,
3889 ObjPtr<mirror::DexCache> dex_cache,
3890 const DexCacheData& data,
3891 ObjPtr<mirror::ClassLoader> class_loader) {
3892 DCHECK_EQ(dex_cache->GetDexFile(), data.dex_file);
3893 if (data.class_table != ClassTableForClassLoader(class_loader)) {
3894 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
3895 "Attempt to register dex file %s with multiple class loaders",
3896 data.dex_file->GetLocation().c_str());
3897 return nullptr;
3898 }
3899 return dex_cache;
3900 }
3901
RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)3902 void ClassLinker::RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,
3903 ObjPtr<mirror::ClassLoader> class_loader) {
3904 SCOPED_TRACE << __FUNCTION__ << " " << dex_cache->GetDexFile()->GetLocation();
3905 Thread* self = Thread::Current();
3906 StackHandleScope<2> hs(self);
3907 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
3908 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
3909 const DexFile* dex_file = dex_cache->GetDexFile();
3910 DCHECK(dex_file != nullptr) << "Attempt to register uninitialized dex_cache object!";
3911 if (kIsDebugBuild) {
3912 DexCacheData old_data;
3913 {
3914 ReaderMutexLock mu(self, *Locks::dex_lock_);
3915 old_data = FindDexCacheDataLocked(*dex_file);
3916 }
3917 ObjPtr<mirror::DexCache> old_dex_cache = DecodeDexCache(self, old_data);
3918 DCHECK(old_dex_cache.IsNull()) << "Attempt to manually register a dex cache thats already "
3919 << "been registered on dex file " << dex_file->GetLocation();
3920 }
3921 ClassTable* table;
3922 {
3923 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3924 table = InsertClassTableForClassLoader(h_class_loader.Get());
3925 }
3926 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
3927 // a thread holding the dex lock and blocking on a condition variable regarding
3928 // weak references access, and a thread blocking on the dex lock.
3929 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
3930 WriterMutexLock mu(self, *Locks::dex_lock_);
3931 RegisterDexFileLocked(*dex_file, h_dex_cache.Get(), h_class_loader.Get());
3932 table->InsertStrongRoot(h_dex_cache.Get());
3933 if (h_class_loader.Get() != nullptr) {
3934 // Since we added a strong root to the class table, do the write barrier as required for
3935 // remembered sets and generational GCs.
3936 WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
3937 }
3938 }
3939
RegisterDexFile(const DexFile & dex_file,ObjPtr<mirror::ClassLoader> class_loader)3940 ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
3941 ObjPtr<mirror::ClassLoader> class_loader) {
3942 Thread* self = Thread::Current();
3943 DexCacheData old_data;
3944 {
3945 ReaderMutexLock mu(self, *Locks::dex_lock_);
3946 old_data = FindDexCacheDataLocked(dex_file);
3947 }
3948 ObjPtr<mirror::DexCache> old_dex_cache = DecodeDexCache(self, old_data);
3949 if (old_dex_cache != nullptr) {
3950 return EnsureSameClassLoader(self, old_dex_cache, old_data, class_loader);
3951 }
3952 SCOPED_TRACE << __FUNCTION__ << " " << dex_file.GetLocation();
3953 LinearAlloc* const linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader);
3954 DCHECK(linear_alloc != nullptr);
3955 ClassTable* table;
3956 {
3957 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3958 table = InsertClassTableForClassLoader(class_loader);
3959 }
3960 // Don't alloc while holding the lock, since allocation may need to
3961 // suspend all threads and another thread may need the dex_lock_ to
3962 // get to a suspend point.
3963 StackHandleScope<3> hs(self);
3964 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
3965 ObjPtr<mirror::String> location;
3966 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(/*out*/&location,
3967 self,
3968 dex_file)));
3969 Handle<mirror::String> h_location(hs.NewHandle(location));
3970 {
3971 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
3972 // a thread holding the dex lock and blocking on a condition variable regarding
3973 // weak references access, and a thread blocking on the dex lock.
3974 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
3975 WriterMutexLock mu(self, *Locks::dex_lock_);
3976 old_data = FindDexCacheDataLocked(dex_file);
3977 old_dex_cache = DecodeDexCache(self, old_data);
3978 if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
3979 // Do InitializeDexCache while holding dex lock to make sure two threads don't call it at the
3980 // same time with the same dex cache. Since the .bss is shared this can cause failing DCHECK
3981 // that the arrays are null.
3982 mirror::DexCache::InitializeDexCache(self,
3983 h_dex_cache.Get(),
3984 h_location.Get(),
3985 &dex_file,
3986 linear_alloc,
3987 image_pointer_size_);
3988 RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
3989 }
3990 }
3991 if (old_dex_cache != nullptr) {
3992 // Another thread managed to initialize the dex cache faster, so use that DexCache.
3993 // If this thread encountered OOME, ignore it.
3994 DCHECK_EQ(h_dex_cache == nullptr, self->IsExceptionPending());
3995 self->ClearException();
3996 // We cannot call EnsureSameClassLoader() while holding the dex_lock_.
3997 return EnsureSameClassLoader(self, old_dex_cache, old_data, h_class_loader.Get());
3998 }
3999 if (h_dex_cache == nullptr) {
4000 self->AssertPendingOOMException();
4001 return nullptr;
4002 }
4003 table->InsertStrongRoot(h_dex_cache.Get());
4004 if (h_class_loader.Get() != nullptr) {
4005 // Since we added a strong root to the class table, do the write barrier as required for
4006 // remembered sets and generational GCs.
4007 WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
4008 }
4009 return h_dex_cache.Get();
4010 }
4011
IsDexFileRegistered(Thread * self,const DexFile & dex_file)4012 bool ClassLinker::IsDexFileRegistered(Thread* self, const DexFile& dex_file) {
4013 ReaderMutexLock mu(self, *Locks::dex_lock_);
4014 return DecodeDexCache(self, FindDexCacheDataLocked(dex_file)) != nullptr;
4015 }
4016
FindDexCache(Thread * self,const DexFile & dex_file)4017 ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const DexFile& dex_file) {
4018 ReaderMutexLock mu(self, *Locks::dex_lock_);
4019 DexCacheData dex_cache_data = FindDexCacheDataLocked(dex_file);
4020 ObjPtr<mirror::DexCache> dex_cache = DecodeDexCache(self, dex_cache_data);
4021 if (dex_cache != nullptr) {
4022 return dex_cache;
4023 }
4024 // Failure, dump diagnostic and abort.
4025 for (const DexCacheData& data : dex_caches_) {
4026 if (DecodeDexCache(self, data) != nullptr) {
4027 LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << data.dex_file->GetLocation();
4028 }
4029 }
4030 LOG(FATAL) << "Failed to find DexCache for DexFile " << dex_file.GetLocation()
4031 << " " << &dex_file << " " << dex_cache_data.dex_file;
4032 UNREACHABLE();
4033 }
4034
FindClassTable(Thread * self,ObjPtr<mirror::DexCache> dex_cache)4035 ClassTable* ClassLinker::FindClassTable(Thread* self, ObjPtr<mirror::DexCache> dex_cache) {
4036 const DexFile* dex_file = dex_cache->GetDexFile();
4037 DCHECK(dex_file != nullptr);
4038 ReaderMutexLock mu(self, *Locks::dex_lock_);
4039 // Search assuming unique-ness of dex file.
4040 for (const DexCacheData& data : dex_caches_) {
4041 // Avoid decoding (and read barriers) other unrelated dex caches.
4042 if (data.dex_file == dex_file) {
4043 ObjPtr<mirror::DexCache> registered_dex_cache = DecodeDexCache(self, data);
4044 if (registered_dex_cache != nullptr) {
4045 CHECK_EQ(registered_dex_cache, dex_cache) << dex_file->GetLocation();
4046 return data.class_table;
4047 }
4048 }
4049 }
4050 return nullptr;
4051 }
4052
FindDexCacheDataLocked(const DexFile & dex_file)4053 ClassLinker::DexCacheData ClassLinker::FindDexCacheDataLocked(const DexFile& dex_file) {
4054 // Search assuming unique-ness of dex file.
4055 for (const DexCacheData& data : dex_caches_) {
4056 // Avoid decoding (and read barriers) other unrelated dex caches.
4057 if (data.dex_file == &dex_file) {
4058 return data;
4059 }
4060 }
4061 return DexCacheData();
4062 }
4063
CreatePrimitiveClass(Thread * self,Primitive::Type type)4064 ObjPtr<mirror::Class> ClassLinker::CreatePrimitiveClass(Thread* self, Primitive::Type type) {
4065 ObjPtr<mirror::Class> primitive_class =
4066 AllocClass(self, mirror::Class::PrimitiveClassSize(image_pointer_size_));
4067 if (UNLIKELY(primitive_class == nullptr)) {
4068 self->AssertPendingOOMException();
4069 return nullptr;
4070 }
4071 // Must hold lock on object when initializing.
4072 StackHandleScope<1> hs(self);
4073 Handle<mirror::Class> h_class(hs.NewHandle(primitive_class));
4074 ObjectLock<mirror::Class> lock(self, h_class);
4075 h_class->SetAccessFlags(kAccPublic | kAccFinal | kAccAbstract);
4076 h_class->SetPrimitiveType(type);
4077 h_class->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
4078 EnsureSkipAccessChecksMethods</* kNeedsVerified= */ true>(h_class, image_pointer_size_);
4079 mirror::Class::SetStatus(h_class, ClassStatus::kInitialized, self);
4080 const char* descriptor = Primitive::Descriptor(type);
4081 ObjPtr<mirror::Class> existing = InsertClass(descriptor,
4082 h_class.Get(),
4083 ComputeModifiedUtf8Hash(descriptor));
4084 CHECK(existing == nullptr) << "InitPrimitiveClass(" << type << ") failed";
4085 return h_class.Get();
4086 }
4087
GetArrayIfTable()4088 inline ObjPtr<mirror::IfTable> ClassLinker::GetArrayIfTable() {
4089 return GetClassRoot<mirror::ObjectArray<mirror::Object>>(this)->GetIfTable();
4090 }
4091
4092 // Create an array class (i.e. the class object for the array, not the
4093 // array itself). "descriptor" looks like "[C" or "[[[[B" or
4094 // "[Ljava/lang/String;".
4095 //
4096 // If "descriptor" refers to an array of primitives, look up the
4097 // primitive type's internally-generated class object.
4098 //
4099 // "class_loader" is the class loader of the class that's referring to
4100 // us. It's used to ensure that we're looking for the element type in
4101 // the right context. It does NOT become the class loader for the
4102 // array class; that always comes from the base element class.
4103 //
4104 // Returns null with an exception raised on failure.
CreateArrayClass(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader)4105 ObjPtr<mirror::Class> ClassLinker::CreateArrayClass(Thread* self,
4106 const char* descriptor,
4107 size_t hash,
4108 Handle<mirror::ClassLoader> class_loader) {
4109 // Identify the underlying component type
4110 CHECK_EQ('[', descriptor[0]);
4111 StackHandleScope<2> hs(self);
4112
4113 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4114 // code to be executed. We put it up here so we can avoid all the allocations associated with
4115 // creating the class. This can happen with (eg) jit threads.
4116 if (!self->CanLoadClasses()) {
4117 // Make sure we don't try to load anything, potentially causing an infinite loop.
4118 ObjPtr<mirror::Throwable> pre_allocated =
4119 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4120 self->SetException(pre_allocated);
4121 return nullptr;
4122 }
4123
4124 MutableHandle<mirror::Class> component_type(hs.NewHandle(FindClass(self, descriptor + 1,
4125 class_loader)));
4126 if (component_type == nullptr) {
4127 DCHECK(self->IsExceptionPending());
4128 // We need to accept erroneous classes as component types.
4129 const size_t component_hash = ComputeModifiedUtf8Hash(descriptor + 1);
4130 component_type.Assign(LookupClass(self, descriptor + 1, component_hash, class_loader.Get()));
4131 if (component_type == nullptr) {
4132 DCHECK(self->IsExceptionPending());
4133 return nullptr;
4134 } else {
4135 self->ClearException();
4136 }
4137 }
4138 if (UNLIKELY(component_type->IsPrimitiveVoid())) {
4139 ThrowNoClassDefFoundError("Attempt to create array of void primitive type");
4140 return nullptr;
4141 }
4142 // See if the component type is already loaded. Array classes are
4143 // always associated with the class loader of their underlying
4144 // element type -- an array of Strings goes with the loader for
4145 // java/lang/String -- so we need to look for it there. (The
4146 // caller should have checked for the existence of the class
4147 // before calling here, but they did so with *their* class loader,
4148 // not the component type's loader.)
4149 //
4150 // If we find it, the caller adds "loader" to the class' initiating
4151 // loader list, which should prevent us from going through this again.
4152 //
4153 // This call is unnecessary if "loader" and "component_type->GetClassLoader()"
4154 // are the same, because our caller (FindClass) just did the
4155 // lookup. (Even if we get this wrong we still have correct behavior,
4156 // because we effectively do this lookup again when we add the new
4157 // class to the hash table --- necessary because of possible races with
4158 // other threads.)
4159 if (class_loader.Get() != component_type->GetClassLoader()) {
4160 ObjPtr<mirror::Class> new_class =
4161 LookupClass(self, descriptor, hash, component_type->GetClassLoader());
4162 if (new_class != nullptr) {
4163 return new_class;
4164 }
4165 }
4166
4167 // Fill out the fields in the Class.
4168 //
4169 // It is possible to execute some methods against arrays, because
4170 // all arrays are subclasses of java_lang_Object_, so we need to set
4171 // up a vtable. We can just point at the one in java_lang_Object_.
4172 //
4173 // Array classes are simple enough that we don't need to do a full
4174 // link step.
4175 auto new_class = hs.NewHandle<mirror::Class>(nullptr);
4176 if (UNLIKELY(!init_done_)) {
4177 // Classes that were hand created, ie not by FindSystemClass
4178 if (strcmp(descriptor, "[Ljava/lang/Class;") == 0) {
4179 new_class.Assign(GetClassRoot<mirror::ObjectArray<mirror::Class>>(this));
4180 } else if (strcmp(descriptor, "[Ljava/lang/Object;") == 0) {
4181 new_class.Assign(GetClassRoot<mirror::ObjectArray<mirror::Object>>(this));
4182 } else if (strcmp(descriptor, "[Ljava/lang/String;") == 0) {
4183 new_class.Assign(GetClassRoot<mirror::ObjectArray<mirror::String>>(this));
4184 } else if (strcmp(descriptor, "[Z") == 0) {
4185 new_class.Assign(GetClassRoot<mirror::BooleanArray>(this));
4186 } else if (strcmp(descriptor, "[B") == 0) {
4187 new_class.Assign(GetClassRoot<mirror::ByteArray>(this));
4188 } else if (strcmp(descriptor, "[C") == 0) {
4189 new_class.Assign(GetClassRoot<mirror::CharArray>(this));
4190 } else if (strcmp(descriptor, "[S") == 0) {
4191 new_class.Assign(GetClassRoot<mirror::ShortArray>(this));
4192 } else if (strcmp(descriptor, "[I") == 0) {
4193 new_class.Assign(GetClassRoot<mirror::IntArray>(this));
4194 } else if (strcmp(descriptor, "[J") == 0) {
4195 new_class.Assign(GetClassRoot<mirror::LongArray>(this));
4196 } else if (strcmp(descriptor, "[F") == 0) {
4197 new_class.Assign(GetClassRoot<mirror::FloatArray>(this));
4198 } else if (strcmp(descriptor, "[D") == 0) {
4199 new_class.Assign(GetClassRoot<mirror::DoubleArray>(this));
4200 }
4201 }
4202 if (new_class == nullptr) {
4203 new_class.Assign(AllocClass(self, mirror::Array::ClassSize(image_pointer_size_)));
4204 if (new_class == nullptr) {
4205 self->AssertPendingOOMException();
4206 return nullptr;
4207 }
4208 new_class->SetComponentType(component_type.Get());
4209 }
4210 ObjectLock<mirror::Class> lock(self, new_class); // Must hold lock on object when initializing.
4211 DCHECK(new_class->GetComponentType() != nullptr);
4212 ObjPtr<mirror::Class> java_lang_Object = GetClassRoot<mirror::Object>(this);
4213 new_class->SetSuperClass(java_lang_Object);
4214 new_class->SetVTable(java_lang_Object->GetVTable());
4215 new_class->SetPrimitiveType(Primitive::kPrimNot);
4216 new_class->SetClassLoader(component_type->GetClassLoader());
4217 if (component_type->IsPrimitive()) {
4218 new_class->SetClassFlags(mirror::kClassFlagNoReferenceFields);
4219 } else {
4220 new_class->SetClassFlags(mirror::kClassFlagObjectArray);
4221 }
4222 mirror::Class::SetStatus(new_class, ClassStatus::kLoaded, self);
4223 new_class->PopulateEmbeddedVTable(image_pointer_size_);
4224 ImTable* object_imt = java_lang_Object->GetImt(image_pointer_size_);
4225 new_class->SetImt(object_imt, image_pointer_size_);
4226 EnsureSkipAccessChecksMethods</* kNeedsVerified= */ true>(new_class, image_pointer_size_);
4227 mirror::Class::SetStatus(new_class, ClassStatus::kInitialized, self);
4228 // don't need to set new_class->SetObjectSize(..)
4229 // because Object::SizeOf delegates to Array::SizeOf
4230
4231 // All arrays have java/lang/Cloneable and java/io/Serializable as
4232 // interfaces. We need to set that up here, so that stuff like
4233 // "instanceof" works right.
4234 //
4235 // Note: The GC could run during the call to FindSystemClass,
4236 // so we need to make sure the class object is GC-valid while we're in
4237 // there. Do this by clearing the interface list so the GC will just
4238 // think that the entries are null.
4239
4240
4241 // Use the single, global copies of "interfaces" and "iftable"
4242 // (remember not to free them for arrays).
4243 {
4244 ObjPtr<mirror::IfTable> array_iftable = GetArrayIfTable();
4245 CHECK(array_iftable != nullptr);
4246 new_class->SetIfTable(array_iftable);
4247 }
4248
4249 // Inherit access flags from the component type.
4250 int access_flags = new_class->GetComponentType()->GetAccessFlags();
4251 // Lose any implementation detail flags; in particular, arrays aren't finalizable.
4252 access_flags &= kAccJavaFlagsMask;
4253 // Arrays can't be used as a superclass or interface, so we want to add "abstract final"
4254 // and remove "interface".
4255 access_flags |= kAccAbstract | kAccFinal;
4256 access_flags &= ~kAccInterface;
4257 // Arrays are access-checks-clean and preverified.
4258 access_flags |= kAccVerificationAttempted;
4259
4260 new_class->SetAccessFlags(access_flags);
4261
4262 ObjPtr<mirror::Class> existing = InsertClass(descriptor, new_class.Get(), hash);
4263 if (existing == nullptr) {
4264 // We postpone ClassLoad and ClassPrepare events to this point in time to avoid
4265 // duplicate events in case of races. Array classes don't really follow dedicated
4266 // load and prepare, anyways.
4267 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(new_class);
4268 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(new_class, new_class);
4269
4270 jit::Jit::NewTypeLoadedIfUsingJit(new_class.Get());
4271 return new_class.Get();
4272 }
4273 // Another thread must have loaded the class after we
4274 // started but before we finished. Abandon what we've
4275 // done.
4276 //
4277 // (Yes, this happens.)
4278
4279 return existing;
4280 }
4281
LookupPrimitiveClass(char type)4282 ObjPtr<mirror::Class> ClassLinker::LookupPrimitiveClass(char type) {
4283 ClassRoot class_root;
4284 switch (type) {
4285 case 'B': class_root = ClassRoot::kPrimitiveByte; break;
4286 case 'C': class_root = ClassRoot::kPrimitiveChar; break;
4287 case 'D': class_root = ClassRoot::kPrimitiveDouble; break;
4288 case 'F': class_root = ClassRoot::kPrimitiveFloat; break;
4289 case 'I': class_root = ClassRoot::kPrimitiveInt; break;
4290 case 'J': class_root = ClassRoot::kPrimitiveLong; break;
4291 case 'S': class_root = ClassRoot::kPrimitiveShort; break;
4292 case 'Z': class_root = ClassRoot::kPrimitiveBoolean; break;
4293 case 'V': class_root = ClassRoot::kPrimitiveVoid; break;
4294 default:
4295 return nullptr;
4296 }
4297 return GetClassRoot(class_root, this);
4298 }
4299
FindPrimitiveClass(char type)4300 ObjPtr<mirror::Class> ClassLinker::FindPrimitiveClass(char type) {
4301 ObjPtr<mirror::Class> result = LookupPrimitiveClass(type);
4302 if (UNLIKELY(result == nullptr)) {
4303 std::string printable_type(PrintableChar(type));
4304 ThrowNoClassDefFoundError("Not a primitive type: %s", printable_type.c_str());
4305 }
4306 return result;
4307 }
4308
InsertClass(const char * descriptor,ObjPtr<mirror::Class> klass,size_t hash)4309 ObjPtr<mirror::Class> ClassLinker::InsertClass(const char* descriptor,
4310 ObjPtr<mirror::Class> klass,
4311 size_t hash) {
4312 DCHECK(Thread::Current()->CanLoadClasses());
4313 if (VLOG_IS_ON(class_linker)) {
4314 ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache();
4315 std::string source;
4316 if (dex_cache != nullptr) {
4317 source += " from ";
4318 source += dex_cache->GetLocation()->ToModifiedUtf8();
4319 }
4320 LOG(INFO) << "Loaded class " << descriptor << source;
4321 }
4322 {
4323 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4324 const ObjPtr<mirror::ClassLoader> class_loader = klass->GetClassLoader();
4325 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader);
4326 ObjPtr<mirror::Class> existing = class_table->Lookup(descriptor, hash);
4327 if (existing != nullptr) {
4328 return existing;
4329 }
4330 VerifyObject(klass);
4331 class_table->InsertWithHash(klass, hash);
4332 if (class_loader != nullptr) {
4333 // This is necessary because we need to have the card dirtied for remembered sets.
4334 WriteBarrier::ForEveryFieldWrite(class_loader);
4335 }
4336 if (log_new_roots_) {
4337 new_class_roots_.push_back(GcRoot<mirror::Class>(klass));
4338 }
4339 }
4340 if (kIsDebugBuild) {
4341 // Test that copied methods correctly can find their holder.
4342 for (ArtMethod& method : klass->GetCopiedMethods(image_pointer_size_)) {
4343 CHECK_EQ(GetHoldingClassOfCopiedMethod(&method), klass);
4344 }
4345 }
4346 return nullptr;
4347 }
4348
WriteBarrierForBootOatFileBssRoots(const OatFile * oat_file)4349 void ClassLinker::WriteBarrierForBootOatFileBssRoots(const OatFile* oat_file) {
4350 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4351 DCHECK(!oat_file->GetBssGcRoots().empty()) << oat_file->GetLocation();
4352 if (log_new_roots_ && !ContainsElement(new_bss_roots_boot_oat_files_, oat_file)) {
4353 new_bss_roots_boot_oat_files_.push_back(oat_file);
4354 }
4355 }
4356
4357 // TODO This should really be in mirror::Class.
UpdateClassMethods(ObjPtr<mirror::Class> klass,LengthPrefixedArray<ArtMethod> * new_methods)4358 void ClassLinker::UpdateClassMethods(ObjPtr<mirror::Class> klass,
4359 LengthPrefixedArray<ArtMethod>* new_methods) {
4360 klass->SetMethodsPtrUnchecked(new_methods,
4361 klass->NumDirectMethods(),
4362 klass->NumDeclaredVirtualMethods());
4363 // Need to mark the card so that the remembered sets and mod union tables get updated.
4364 WriteBarrier::ForEveryFieldWrite(klass);
4365 }
4366
LookupClass(Thread * self,const char * descriptor,ObjPtr<mirror::ClassLoader> class_loader)4367 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4368 const char* descriptor,
4369 ObjPtr<mirror::ClassLoader> class_loader) {
4370 return LookupClass(self, descriptor, ComputeModifiedUtf8Hash(descriptor), class_loader);
4371 }
4372
LookupClass(Thread * self,const char * descriptor,size_t hash,ObjPtr<mirror::ClassLoader> class_loader)4373 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4374 const char* descriptor,
4375 size_t hash,
4376 ObjPtr<mirror::ClassLoader> class_loader) {
4377 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4378 ClassTable* const class_table = ClassTableForClassLoader(class_loader);
4379 if (class_table != nullptr) {
4380 ObjPtr<mirror::Class> result = class_table->Lookup(descriptor, hash);
4381 if (result != nullptr) {
4382 return result;
4383 }
4384 }
4385 return nullptr;
4386 }
4387
4388 class MoveClassTableToPreZygoteVisitor : public ClassLoaderVisitor {
4389 public:
MoveClassTableToPreZygoteVisitor()4390 MoveClassTableToPreZygoteVisitor() {}
4391
Visit(ObjPtr<mirror::ClassLoader> class_loader)4392 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
4393 REQUIRES(Locks::classlinker_classes_lock_)
4394 REQUIRES_SHARED(Locks::mutator_lock_) override {
4395 ClassTable* const class_table = class_loader->GetClassTable();
4396 if (class_table != nullptr) {
4397 class_table->FreezeSnapshot();
4398 }
4399 }
4400 };
4401
MoveClassTableToPreZygote()4402 void ClassLinker::MoveClassTableToPreZygote() {
4403 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4404 boot_class_table_->FreezeSnapshot();
4405 MoveClassTableToPreZygoteVisitor visitor;
4406 VisitClassLoaders(&visitor);
4407 }
4408
4409 // Look up classes by hash and descriptor and put all matching ones in the result array.
4410 class LookupClassesVisitor : public ClassLoaderVisitor {
4411 public:
LookupClassesVisitor(const char * descriptor,size_t hash,std::vector<ObjPtr<mirror::Class>> * result)4412 LookupClassesVisitor(const char* descriptor,
4413 size_t hash,
4414 std::vector<ObjPtr<mirror::Class>>* result)
4415 : descriptor_(descriptor),
4416 hash_(hash),
4417 result_(result) {}
4418
Visit(ObjPtr<mirror::ClassLoader> class_loader)4419 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
4420 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
4421 ClassTable* const class_table = class_loader->GetClassTable();
4422 ObjPtr<mirror::Class> klass = class_table->Lookup(descriptor_, hash_);
4423 // Add `klass` only if `class_loader` is its defining (not just initiating) class loader.
4424 if (klass != nullptr && klass->GetClassLoader() == class_loader) {
4425 result_->push_back(klass);
4426 }
4427 }
4428
4429 private:
4430 const char* const descriptor_;
4431 const size_t hash_;
4432 std::vector<ObjPtr<mirror::Class>>* const result_;
4433 };
4434
LookupClasses(const char * descriptor,std::vector<ObjPtr<mirror::Class>> & result)4435 void ClassLinker::LookupClasses(const char* descriptor,
4436 std::vector<ObjPtr<mirror::Class>>& result) {
4437 result.clear();
4438 Thread* const self = Thread::Current();
4439 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4440 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
4441 ObjPtr<mirror::Class> klass = boot_class_table_->Lookup(descriptor, hash);
4442 if (klass != nullptr) {
4443 DCHECK(klass->GetClassLoader() == nullptr);
4444 result.push_back(klass);
4445 }
4446 LookupClassesVisitor visitor(descriptor, hash, &result);
4447 VisitClassLoaders(&visitor);
4448 }
4449
AttemptSupertypeVerification(Thread * self,Handle<mirror::Class> klass,Handle<mirror::Class> supertype)4450 bool ClassLinker::AttemptSupertypeVerification(Thread* self,
4451 Handle<mirror::Class> klass,
4452 Handle<mirror::Class> supertype) {
4453 DCHECK(self != nullptr);
4454 DCHECK(klass != nullptr);
4455 DCHECK(supertype != nullptr);
4456
4457 if (!supertype->IsVerified() && !supertype->IsErroneous()) {
4458 VerifyClass(self, supertype);
4459 }
4460
4461 if (supertype->IsVerified() || supertype->ShouldVerifyAtRuntime()) {
4462 // The supertype is either verified, or we soft failed at AOT time.
4463 DCHECK(supertype->IsVerified() || Runtime::Current()->IsAotCompiler());
4464 return true;
4465 }
4466 // If we got this far then we have a hard failure.
4467 std::string error_msg =
4468 StringPrintf("Rejecting class %s that attempts to sub-type erroneous class %s",
4469 klass->PrettyDescriptor().c_str(),
4470 supertype->PrettyDescriptor().c_str());
4471 LOG(WARNING) << error_msg << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4472 StackHandleScope<1> hs(self);
4473 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
4474 if (cause != nullptr) {
4475 // Set during VerifyClass call (if at all).
4476 self->ClearException();
4477 }
4478 // Change into a verify error.
4479 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
4480 if (cause != nullptr) {
4481 self->GetException()->SetCause(cause.Get());
4482 }
4483 ClassReference ref(klass->GetDexCache()->GetDexFile(), klass->GetDexClassDefIndex());
4484 if (Runtime::Current()->IsAotCompiler()) {
4485 Runtime::Current()->GetCompilerCallbacks()->ClassRejected(ref);
4486 }
4487 // Need to grab the lock to change status.
4488 ObjectLock<mirror::Class> super_lock(self, klass);
4489 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
4490 return false;
4491 }
4492
VerifyClass(Thread * self,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level)4493 verifier::FailureKind ClassLinker::VerifyClass(
4494 Thread* self, Handle<mirror::Class> klass, verifier::HardFailLogMode log_level) {
4495 {
4496 // TODO: assert that the monitor on the Class is held
4497 ObjectLock<mirror::Class> lock(self, klass);
4498
4499 // Is somebody verifying this now?
4500 ClassStatus old_status = klass->GetStatus();
4501 while (old_status == ClassStatus::kVerifying ||
4502 old_status == ClassStatus::kVerifyingAtRuntime) {
4503 lock.WaitIgnoringInterrupts();
4504 // WaitIgnoringInterrupts can still receive an interrupt and return early, in this
4505 // case we may see the same status again. b/62912904. This is why the check is
4506 // greater or equal.
4507 CHECK(klass->IsErroneous() || (klass->GetStatus() >= old_status))
4508 << "Class '" << klass->PrettyClass()
4509 << "' performed an illegal verification state transition from " << old_status
4510 << " to " << klass->GetStatus();
4511 old_status = klass->GetStatus();
4512 }
4513
4514 // The class might already be erroneous, for example at compile time if we attempted to verify
4515 // this class as a parent to another.
4516 if (klass->IsErroneous()) {
4517 ThrowEarlierClassFailure(klass.Get());
4518 return verifier::FailureKind::kHardFailure;
4519 }
4520
4521 // Don't attempt to re-verify if already verified.
4522 if (klass->IsVerified()) {
4523 EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
4524 return verifier::FailureKind::kNoFailure;
4525 }
4526
4527 // For AOT, don't attempt to re-verify if we have already found we should
4528 // verify at runtime.
4529 if (Runtime::Current()->IsAotCompiler() && klass->ShouldVerifyAtRuntime()) {
4530 return verifier::FailureKind::kSoftFailure;
4531 }
4532
4533 if (klass->GetStatus() == ClassStatus::kResolved) {
4534 mirror::Class::SetStatus(klass, ClassStatus::kVerifying, self);
4535 } else {
4536 CHECK_EQ(klass->GetStatus(), ClassStatus::kRetryVerificationAtRuntime)
4537 << klass->PrettyClass();
4538 CHECK(!Runtime::Current()->IsAotCompiler());
4539 mirror::Class::SetStatus(klass, ClassStatus::kVerifyingAtRuntime, self);
4540 }
4541
4542 // Skip verification if disabled.
4543 if (!Runtime::Current()->IsVerificationEnabled()) {
4544 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4545 EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
4546 return verifier::FailureKind::kNoFailure;
4547 }
4548 }
4549
4550 VLOG(class_linker) << "Beginning verification for class: "
4551 << klass->PrettyDescriptor()
4552 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4553
4554 // Verify super class.
4555 StackHandleScope<2> hs(self);
4556 MutableHandle<mirror::Class> supertype(hs.NewHandle(klass->GetSuperClass()));
4557 // If we have a superclass and we get a hard verification failure we can return immediately.
4558 if (supertype != nullptr && !AttemptSupertypeVerification(self, klass, supertype)) {
4559 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
4560 return verifier::FailureKind::kHardFailure;
4561 }
4562
4563 // Verify all default super-interfaces.
4564 //
4565 // (1) Don't bother if the superclass has already had a soft verification failure.
4566 //
4567 // (2) Interfaces shouldn't bother to do this recursive verification because they cannot cause
4568 // recursive initialization by themselves. This is because when an interface is initialized
4569 // directly it must not initialize its superinterfaces. We are allowed to verify regardless
4570 // but choose not to for an optimization. If the interfaces is being verified due to a class
4571 // initialization (which would need all the default interfaces to be verified) the class code
4572 // will trigger the recursive verification anyway.
4573 if ((supertype == nullptr || supertype->IsVerified()) // See (1)
4574 && !klass->IsInterface()) { // See (2)
4575 int32_t iftable_count = klass->GetIfTableCount();
4576 MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
4577 // Loop through all interfaces this class has defined. It doesn't matter the order.
4578 for (int32_t i = 0; i < iftable_count; i++) {
4579 iface.Assign(klass->GetIfTable()->GetInterface(i));
4580 DCHECK(iface != nullptr);
4581 // We only care if we have default interfaces and can skip if we are already verified...
4582 if (LIKELY(!iface->HasDefaultMethods() || iface->IsVerified())) {
4583 continue;
4584 } else if (UNLIKELY(!AttemptSupertypeVerification(self, klass, iface))) {
4585 // We had a hard failure while verifying this interface. Just return immediately.
4586 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
4587 return verifier::FailureKind::kHardFailure;
4588 } else if (UNLIKELY(!iface->IsVerified())) {
4589 // We softly failed to verify the iface. Stop checking and clean up.
4590 // Put the iface into the supertype handle so we know what caused us to fail.
4591 supertype.Assign(iface.Get());
4592 break;
4593 }
4594 }
4595 }
4596
4597 // At this point if verification failed, then supertype is the "first" supertype that failed
4598 // verification (without a specific order). If verification succeeded, then supertype is either
4599 // null or the original superclass of klass and is verified.
4600 DCHECK(supertype == nullptr ||
4601 supertype.Get() == klass->GetSuperClass() ||
4602 !supertype->IsVerified());
4603
4604 // Try to use verification information from the oat file, otherwise do runtime verification.
4605 const DexFile& dex_file = *klass->GetDexCache()->GetDexFile();
4606 ClassStatus oat_file_class_status(ClassStatus::kNotReady);
4607 bool preverified = VerifyClassUsingOatFile(dex_file, klass.Get(), oat_file_class_status);
4608
4609 VLOG(class_linker) << "Class preverified status for class "
4610 << klass->PrettyDescriptor()
4611 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4612 << ": "
4613 << preverified;
4614
4615 // If the oat file says the class had an error, re-run the verifier. That way we will get a
4616 // precise error message. To ensure a rerun, test:
4617 // mirror::Class::IsErroneous(oat_file_class_status) => !preverified
4618 DCHECK(!mirror::Class::IsErroneous(oat_file_class_status) || !preverified);
4619
4620 std::string error_msg;
4621 verifier::FailureKind verifier_failure = verifier::FailureKind::kNoFailure;
4622 if (!preverified) {
4623 verifier_failure = PerformClassVerification(self, klass, log_level, &error_msg);
4624 }
4625
4626 // Verification is done, grab the lock again.
4627 ObjectLock<mirror::Class> lock(self, klass);
4628
4629 if (preverified || verifier_failure != verifier::FailureKind::kHardFailure) {
4630 if (!preverified && verifier_failure != verifier::FailureKind::kNoFailure) {
4631 VLOG(class_linker) << "Soft verification failure in class "
4632 << klass->PrettyDescriptor()
4633 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4634 << " because: " << error_msg;
4635 }
4636 self->AssertNoPendingException();
4637 // Make sure all classes referenced by catch blocks are resolved.
4638 ResolveClassExceptionHandlerTypes(klass);
4639 if (verifier_failure == verifier::FailureKind::kNoFailure) {
4640 // Even though there were no verifier failures we need to respect whether the super-class and
4641 // super-default-interfaces were verified or requiring runtime reverification.
4642 if (supertype == nullptr || supertype->IsVerified()) {
4643 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4644 } else {
4645 CHECK_EQ(supertype->GetStatus(), ClassStatus::kRetryVerificationAtRuntime);
4646 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4647 // Pretend a soft failure occurred so that we don't consider the class verified below.
4648 verifier_failure = verifier::FailureKind::kSoftFailure;
4649 }
4650 } else {
4651 CHECK_EQ(verifier_failure, verifier::FailureKind::kSoftFailure);
4652 // Soft failures at compile time should be retried at runtime. Soft
4653 // failures at runtime will be handled by slow paths in the generated
4654 // code. Set status accordingly.
4655 if (Runtime::Current()->IsAotCompiler()) {
4656 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4657 } else {
4658 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4659 // As this is a fake verified status, make sure the methods are _not_ marked
4660 // kAccSkipAccessChecks later.
4661 klass->SetVerificationAttempted();
4662 }
4663 }
4664 } else {
4665 VLOG(verifier) << "Verification failed on class " << klass->PrettyDescriptor()
4666 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4667 << " because: " << error_msg;
4668 self->AssertNoPendingException();
4669 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
4670 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
4671 }
4672 if (preverified || verifier_failure == verifier::FailureKind::kNoFailure) {
4673 // Class is verified so we don't need to do any access check on its methods.
4674 // Let the interpreter know it by setting the kAccSkipAccessChecks flag onto each
4675 // method.
4676 // Note: we're going here during compilation and at runtime. When we set the
4677 // kAccSkipAccessChecks flag when compiling image classes, the flag is recorded
4678 // in the image and is set when loading the image.
4679
4680 if (UNLIKELY(Runtime::Current()->IsVerificationSoftFail())) {
4681 // Never skip access checks if the verification soft fail is forced.
4682 // Mark the class as having a verification attempt to avoid re-running the verifier.
4683 klass->SetVerificationAttempted();
4684 } else {
4685 EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
4686 }
4687 }
4688 // Done verifying. Notify the compiler about the verification status, in case the class
4689 // was verified implicitly (eg super class of a compiled class).
4690 if (Runtime::Current()->IsAotCompiler()) {
4691 Runtime::Current()->GetCompilerCallbacks()->UpdateClassState(
4692 ClassReference(&klass->GetDexFile(), klass->GetDexClassDefIndex()), klass->GetStatus());
4693 }
4694 return verifier_failure;
4695 }
4696
PerformClassVerification(Thread * self,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level,std::string * error_msg)4697 verifier::FailureKind ClassLinker::PerformClassVerification(Thread* self,
4698 Handle<mirror::Class> klass,
4699 verifier::HardFailLogMode log_level,
4700 std::string* error_msg) {
4701 Runtime* const runtime = Runtime::Current();
4702 return verifier::ClassVerifier::VerifyClass(self,
4703 klass.Get(),
4704 runtime->GetCompilerCallbacks(),
4705 runtime->IsAotCompiler(),
4706 log_level,
4707 Runtime::Current()->GetTargetSdkVersion(),
4708 error_msg);
4709 }
4710
VerifyClassUsingOatFile(const DexFile & dex_file,ObjPtr<mirror::Class> klass,ClassStatus & oat_file_class_status)4711 bool ClassLinker::VerifyClassUsingOatFile(const DexFile& dex_file,
4712 ObjPtr<mirror::Class> klass,
4713 ClassStatus& oat_file_class_status) {
4714 // If we're compiling, we can only verify the class using the oat file if
4715 // we are not compiling the image or if the class we're verifying is not part of
4716 // the compilation unit (app - dependencies). We will let the compiler callback
4717 // tell us about the latter.
4718 if (Runtime::Current()->IsAotCompiler()) {
4719 CompilerCallbacks* callbacks = Runtime::Current()->GetCompilerCallbacks();
4720 // Are we compiling the bootclasspath?
4721 if (callbacks->IsBootImage()) {
4722 return false;
4723 }
4724 // We are compiling an app (not the image).
4725 if (!callbacks->CanUseOatStatusForVerification(klass.Ptr())) {
4726 return false;
4727 }
4728 }
4729
4730 const OatDexFile* oat_dex_file = dex_file.GetOatDexFile();
4731 // In case we run without an image there won't be a backing oat file.
4732 if (oat_dex_file == nullptr || oat_dex_file->GetOatFile() == nullptr) {
4733 if (!kIsDebugBuild && klass->GetClassLoader() == nullptr) {
4734 // For boot classpath classes in the case we're not using a default boot image:
4735 // we don't have the infrastructure yet to query verification data on individual
4736 // boot vdex files, so it's simpler for now to consider all boot classpath classes
4737 // verified. This should be taken into account when measuring boot time and app
4738 // startup compare to the (current) production system where both:
4739 // 1) updatable boot classpath classes, and
4740 // 2) classes in /system referencing updatable classes
4741 // will be verified at runtime.
4742 if (Runtime::Current()->IsUsingApexBootImageLocation()) {
4743 oat_file_class_status = ClassStatus::kVerified;
4744 return true;
4745 }
4746 }
4747 return false;
4748 }
4749
4750 uint16_t class_def_index = klass->GetDexClassDefIndex();
4751 oat_file_class_status = oat_dex_file->GetOatClass(class_def_index).GetStatus();
4752 if (oat_file_class_status >= ClassStatus::kVerified) {
4753 return true;
4754 }
4755 // If we only verified a subset of the classes at compile time, we can end up with classes that
4756 // were resolved by the verifier.
4757 if (oat_file_class_status == ClassStatus::kResolved) {
4758 return false;
4759 }
4760 if (oat_file_class_status == ClassStatus::kRetryVerificationAtRuntime) {
4761 // Compile time verification failed with a soft error. Compile time verification can fail
4762 // because we have incomplete type information. Consider the following:
4763 // class ... {
4764 // Foo x;
4765 // .... () {
4766 // if (...) {
4767 // v1 gets assigned a type of resolved class Foo
4768 // } else {
4769 // v1 gets assigned a type of unresolved class Bar
4770 // }
4771 // iput x = v1
4772 // } }
4773 // when we merge v1 following the if-the-else it results in Conflict
4774 // (see verifier::RegType::Merge) as we can't know the type of Bar and we could possibly be
4775 // allowing an unsafe assignment to the field x in the iput (javac may have compiled this as
4776 // it knew Bar was a sub-class of Foo, but for us this may have been moved into a separate apk
4777 // at compile time).
4778 return false;
4779 }
4780 if (mirror::Class::IsErroneous(oat_file_class_status)) {
4781 // Compile time verification failed with a hard error. This is caused by invalid instructions
4782 // in the class. These errors are unrecoverable.
4783 return false;
4784 }
4785 if (oat_file_class_status == ClassStatus::kNotReady) {
4786 // Status is uninitialized if we couldn't determine the status at compile time, for example,
4787 // not loading the class.
4788 // TODO: when the verifier doesn't rely on Class-es failing to resolve/load the type hierarchy
4789 // isn't a problem and this case shouldn't occur
4790 return false;
4791 }
4792 std::string temp;
4793 LOG(FATAL) << "Unexpected class status: " << oat_file_class_status
4794 << " " << dex_file.GetLocation() << " " << klass->PrettyClass() << " "
4795 << klass->GetDescriptor(&temp);
4796 UNREACHABLE();
4797 }
4798
ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass)4799 void ClassLinker::ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass) {
4800 for (ArtMethod& method : klass->GetMethods(image_pointer_size_)) {
4801 ResolveMethodExceptionHandlerTypes(&method);
4802 }
4803 }
4804
ResolveMethodExceptionHandlerTypes(ArtMethod * method)4805 void ClassLinker::ResolveMethodExceptionHandlerTypes(ArtMethod* method) {
4806 // similar to DexVerifier::ScanTryCatchBlocks and dex2oat's ResolveExceptionsForMethod.
4807 CodeItemDataAccessor accessor(method->DexInstructionData());
4808 if (!accessor.HasCodeItem()) {
4809 return; // native or abstract method
4810 }
4811 if (accessor.TriesSize() == 0) {
4812 return; // nothing to process
4813 }
4814 const uint8_t* handlers_ptr = accessor.GetCatchHandlerData(0);
4815 uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
4816 for (uint32_t idx = 0; idx < handlers_size; idx++) {
4817 CatchHandlerIterator iterator(handlers_ptr);
4818 for (; iterator.HasNext(); iterator.Next()) {
4819 // Ensure exception types are resolved so that they don't need resolution to be delivered,
4820 // unresolved exception types will be ignored by exception delivery
4821 if (iterator.GetHandlerTypeIndex().IsValid()) {
4822 ObjPtr<mirror::Class> exception_type = ResolveType(iterator.GetHandlerTypeIndex(), method);
4823 if (exception_type == nullptr) {
4824 DCHECK(Thread::Current()->IsExceptionPending());
4825 Thread::Current()->ClearException();
4826 }
4827 }
4828 }
4829 handlers_ptr = iterator.EndDataPointer();
4830 }
4831 }
4832
CreateProxyClass(ScopedObjectAccessAlreadyRunnable & soa,jstring name,jobjectArray interfaces,jobject loader,jobjectArray methods,jobjectArray throws)4833 ObjPtr<mirror::Class> ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& soa,
4834 jstring name,
4835 jobjectArray interfaces,
4836 jobject loader,
4837 jobjectArray methods,
4838 jobjectArray throws) {
4839 Thread* self = soa.Self();
4840
4841 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4842 // code to be executed. We put it up here so we can avoid all the allocations associated with
4843 // creating the class. This can happen with (eg) jit-threads.
4844 if (!self->CanLoadClasses()) {
4845 // Make sure we don't try to load anything, potentially causing an infinite loop.
4846 ObjPtr<mirror::Throwable> pre_allocated =
4847 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4848 self->SetException(pre_allocated);
4849 return nullptr;
4850 }
4851
4852 StackHandleScope<10> hs(self);
4853 MutableHandle<mirror::Class> temp_klass(hs.NewHandle(
4854 AllocClass(self, GetClassRoot<mirror::Class>(this), sizeof(mirror::Class))));
4855 if (temp_klass == nullptr) {
4856 CHECK(self->IsExceptionPending()); // OOME.
4857 return nullptr;
4858 }
4859 DCHECK(temp_klass->GetClass() != nullptr);
4860 temp_klass->SetObjectSize(sizeof(mirror::Proxy));
4861 // Set the class access flags incl. VerificationAttempted, so we do not try to set the flag on
4862 // the methods.
4863 temp_klass->SetAccessFlags(kAccClassIsProxy | kAccPublic | kAccFinal | kAccVerificationAttempted);
4864 temp_klass->SetClassLoader(soa.Decode<mirror::ClassLoader>(loader));
4865 DCHECK_EQ(temp_klass->GetPrimitiveType(), Primitive::kPrimNot);
4866 temp_klass->SetName(soa.Decode<mirror::String>(name));
4867 temp_klass->SetDexCache(GetClassRoot<mirror::Proxy>(this)->GetDexCache());
4868 // Object has an empty iftable, copy it for that reason.
4869 temp_klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
4870 mirror::Class::SetStatus(temp_klass, ClassStatus::kIdx, self);
4871 std::string storage;
4872 const char* descriptor = temp_klass->GetDescriptor(&storage);
4873 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
4874
4875 // Needs to be before we insert the class so that the allocator field is set.
4876 LinearAlloc* const allocator = GetOrCreateAllocatorForClassLoader(temp_klass->GetClassLoader());
4877
4878 // Insert the class before loading the fields as the field roots
4879 // (ArtField::declaring_class_) are only visited from the class
4880 // table. There can't be any suspend points between inserting the
4881 // class and setting the field arrays below.
4882 ObjPtr<mirror::Class> existing = InsertClass(descriptor, temp_klass.Get(), hash);
4883 CHECK(existing == nullptr);
4884
4885 // Instance fields are inherited, but we add a couple of static fields...
4886 const size_t num_fields = 2;
4887 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, allocator, num_fields);
4888 temp_klass->SetSFieldsPtr(sfields);
4889
4890 // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by
4891 // our proxy, so Class.getInterfaces doesn't return the flattened set.
4892 ArtField& interfaces_sfield = sfields->At(0);
4893 interfaces_sfield.SetDexFieldIndex(0);
4894 interfaces_sfield.SetDeclaringClass(temp_klass.Get());
4895 interfaces_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
4896
4897 // 2. Create a static field 'throws' that holds exceptions thrown by our methods.
4898 ArtField& throws_sfield = sfields->At(1);
4899 throws_sfield.SetDexFieldIndex(1);
4900 throws_sfield.SetDeclaringClass(temp_klass.Get());
4901 throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
4902
4903 // Proxies have 1 direct method, the constructor
4904 const size_t num_direct_methods = 1;
4905
4906 // They have as many virtual methods as the array
4907 auto h_methods = hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Method>>(methods));
4908 DCHECK_EQ(h_methods->GetClass(), GetClassRoot<mirror::ObjectArray<mirror::Method>>())
4909 << mirror::Class::PrettyClass(h_methods->GetClass());
4910 const size_t num_virtual_methods = h_methods->GetLength();
4911
4912 // Create the methods array.
4913 LengthPrefixedArray<ArtMethod>* proxy_class_methods = AllocArtMethodArray(
4914 self, allocator, num_direct_methods + num_virtual_methods);
4915 // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
4916 // want to throw OOM in the future.
4917 if (UNLIKELY(proxy_class_methods == nullptr)) {
4918 self->AssertPendingOOMException();
4919 return nullptr;
4920 }
4921 temp_klass->SetMethodsPtr(proxy_class_methods, num_direct_methods, num_virtual_methods);
4922
4923 // Create the single direct method.
4924 CreateProxyConstructor(temp_klass, temp_klass->GetDirectMethodUnchecked(0, image_pointer_size_));
4925
4926 // Create virtual method using specified prototypes.
4927 // TODO These should really use the iterators.
4928 for (size_t i = 0; i < num_virtual_methods; ++i) {
4929 auto* virtual_method = temp_klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
4930 auto* prototype = h_methods->Get(i)->GetArtMethod();
4931 CreateProxyMethod(temp_klass, prototype, virtual_method);
4932 DCHECK(virtual_method->GetDeclaringClass() != nullptr);
4933 DCHECK(prototype->GetDeclaringClass() != nullptr);
4934 }
4935
4936 // The super class is java.lang.reflect.Proxy
4937 temp_klass->SetSuperClass(GetClassRoot<mirror::Proxy>(this));
4938 // Now effectively in the loaded state.
4939 mirror::Class::SetStatus(temp_klass, ClassStatus::kLoaded, self);
4940 self->AssertNoPendingException();
4941
4942 // At this point the class is loaded. Publish a ClassLoad event.
4943 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
4944 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(temp_klass);
4945
4946 MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
4947 {
4948 // Must hold lock on object when resolved.
4949 ObjectLock<mirror::Class> resolution_lock(self, temp_klass);
4950 // Link the fields and virtual methods, creating vtable and iftables.
4951 // The new class will replace the old one in the class table.
4952 Handle<mirror::ObjectArray<mirror::Class>> h_interfaces(
4953 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces)));
4954 if (!LinkClass(self, descriptor, temp_klass, h_interfaces, &klass)) {
4955 mirror::Class::SetStatus(temp_klass, ClassStatus::kErrorUnresolved, self);
4956 return nullptr;
4957 }
4958 }
4959 CHECK(temp_klass->IsRetired());
4960 CHECK_NE(temp_klass.Get(), klass.Get());
4961
4962 CHECK_EQ(interfaces_sfield.GetDeclaringClass(), klass.Get());
4963 interfaces_sfield.SetObject<false>(
4964 klass.Get(),
4965 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
4966 CHECK_EQ(throws_sfield.GetDeclaringClass(), klass.Get());
4967 throws_sfield.SetObject<false>(
4968 klass.Get(),
4969 soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(throws));
4970
4971 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(temp_klass, klass);
4972
4973 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
4974 // See also ClassLinker::EnsureInitialized().
4975 if (kBitstringSubtypeCheckEnabled) {
4976 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
4977 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(klass.Get());
4978 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck for j.l.r.Proxy is already assigned.
4979 }
4980
4981 {
4982 // Lock on klass is released. Lock new class object.
4983 ObjectLock<mirror::Class> initialization_lock(self, klass);
4984 EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
4985 mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
4986 }
4987
4988 // sanity checks
4989 if (kIsDebugBuild) {
4990 CHECK(klass->GetIFieldsPtr() == nullptr);
4991 CheckProxyConstructor(klass->GetDirectMethod(0, image_pointer_size_));
4992
4993 for (size_t i = 0; i < num_virtual_methods; ++i) {
4994 auto* virtual_method = klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
4995 auto* prototype = h_methods->Get(i++)->GetArtMethod();
4996 CheckProxyMethod(virtual_method, prototype);
4997 }
4998
4999 StackHandleScope<1> hs2(self);
5000 Handle<mirror::String> decoded_name = hs2.NewHandle(soa.Decode<mirror::String>(name));
5001 std::string interfaces_field_name(StringPrintf("java.lang.Class[] %s.interfaces",
5002 decoded_name->ToModifiedUtf8().c_str()));
5003 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(0)), interfaces_field_name);
5004
5005 std::string throws_field_name(StringPrintf("java.lang.Class[][] %s.throws",
5006 decoded_name->ToModifiedUtf8().c_str()));
5007 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(1)), throws_field_name);
5008
5009 CHECK_EQ(klass.Get()->GetProxyInterfaces(),
5010 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
5011 CHECK_EQ(klass.Get()->GetProxyThrows(),
5012 soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(throws));
5013 }
5014 return klass.Get();
5015 }
5016
CreateProxyConstructor(Handle<mirror::Class> klass,ArtMethod * out)5017 void ClassLinker::CreateProxyConstructor(Handle<mirror::Class> klass, ArtMethod* out) {
5018 // Create constructor for Proxy that must initialize the method.
5019 ObjPtr<mirror::Class> proxy_class = GetClassRoot<mirror::Proxy>(this);
5020 CHECK_EQ(proxy_class->NumDirectMethods(), 21u);
5021
5022 // Find the <init>(InvocationHandler)V method. The exact method offset varies depending
5023 // on which front-end compiler was used to build the libcore DEX files.
5024 ArtMethod* proxy_constructor =
5025 jni::DecodeArtMethod(WellKnownClasses::java_lang_reflect_Proxy_init);
5026 DCHECK(proxy_constructor != nullptr)
5027 << "Could not find <init> method in java.lang.reflect.Proxy";
5028
5029 // Clone the existing constructor of Proxy (our constructor would just invoke it so steal its
5030 // code_ too)
5031 DCHECK(out != nullptr);
5032 out->CopyFrom(proxy_constructor, image_pointer_size_);
5033 // Make this constructor public and fix the class to be our Proxy version.
5034 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5035 // Note that the compiler calls a ResolveMethod() overload that does not handle a Proxy referrer.
5036 out->SetAccessFlags((out->GetAccessFlags() & ~kAccProtected) |
5037 kAccPublic |
5038 kAccCompileDontBother);
5039 out->SetDeclaringClass(klass.Get());
5040
5041 // Set the original constructor method.
5042 out->SetDataPtrSize(proxy_constructor, image_pointer_size_);
5043 }
5044
CheckProxyConstructor(ArtMethod * constructor) const5045 void ClassLinker::CheckProxyConstructor(ArtMethod* constructor) const {
5046 CHECK(constructor->IsConstructor());
5047 auto* np = constructor->GetInterfaceMethodIfProxy(image_pointer_size_);
5048 CHECK_STREQ(np->GetName(), "<init>");
5049 CHECK_STREQ(np->GetSignature().ToString().c_str(), "(Ljava/lang/reflect/InvocationHandler;)V");
5050 DCHECK(constructor->IsPublic());
5051 }
5052
CreateProxyMethod(Handle<mirror::Class> klass,ArtMethod * prototype,ArtMethod * out)5053 void ClassLinker::CreateProxyMethod(Handle<mirror::Class> klass, ArtMethod* prototype,
5054 ArtMethod* out) {
5055 // We steal everything from the prototype (such as DexCache, invoke stub, etc.) then specialize
5056 // as necessary
5057 DCHECK(out != nullptr);
5058 out->CopyFrom(prototype, image_pointer_size_);
5059
5060 // Set class to be the concrete proxy class.
5061 out->SetDeclaringClass(klass.Get());
5062 // Clear the abstract, default and conflict flags to ensure that defaults aren't picked in
5063 // preference to the invocation handler.
5064 const uint32_t kRemoveFlags = kAccAbstract | kAccDefault | kAccDefaultConflict;
5065 // Make the method final.
5066 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5067 const uint32_t kAddFlags = kAccFinal | kAccCompileDontBother;
5068 out->SetAccessFlags((out->GetAccessFlags() & ~kRemoveFlags) | kAddFlags);
5069
5070 // Clear the dex_code_item_offset_. It needs to be 0 since proxy methods have no CodeItems but the
5071 // method they copy might (if it's a default method).
5072 out->SetCodeItemOffset(0);
5073
5074 // Set the original interface method.
5075 out->SetDataPtrSize(prototype, image_pointer_size_);
5076
5077 // At runtime the method looks like a reference and argument saving method, clone the code
5078 // related parameters from this method.
5079 out->SetEntryPointFromQuickCompiledCode(GetQuickProxyInvokeHandler());
5080 }
5081
CheckProxyMethod(ArtMethod * method,ArtMethod * prototype) const5082 void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) const {
5083 // Basic sanity
5084 CHECK(!prototype->IsFinal());
5085 CHECK(method->IsFinal());
5086 CHECK(method->IsInvokable());
5087
5088 // The proxy method doesn't have its own dex cache or dex file and so it steals those of its
5089 // interface prototype. The exception to this are Constructors and the Class of the Proxy itself.
5090 CHECK_EQ(prototype->GetDexMethodIndex(), method->GetDexMethodIndex());
5091 CHECK_EQ(prototype, method->GetInterfaceMethodIfProxy(image_pointer_size_));
5092 }
5093
CanWeInitializeClass(ObjPtr<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5094 bool ClassLinker::CanWeInitializeClass(ObjPtr<mirror::Class> klass, bool can_init_statics,
5095 bool can_init_parents) {
5096 if (can_init_statics && can_init_parents) {
5097 return true;
5098 }
5099 if (!can_init_statics) {
5100 // Check if there's a class initializer.
5101 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5102 if (clinit != nullptr) {
5103 return false;
5104 }
5105 // Check if there are encoded static values needing initialization.
5106 if (klass->NumStaticFields() != 0) {
5107 const dex::ClassDef* dex_class_def = klass->GetClassDef();
5108 DCHECK(dex_class_def != nullptr);
5109 if (dex_class_def->static_values_off_ != 0) {
5110 return false;
5111 }
5112 }
5113 // If we are a class we need to initialize all interfaces with default methods when we are
5114 // initialized. Check all of them.
5115 if (!klass->IsInterface()) {
5116 size_t num_interfaces = klass->GetIfTableCount();
5117 for (size_t i = 0; i < num_interfaces; i++) {
5118 ObjPtr<mirror::Class> iface = klass->GetIfTable()->GetInterface(i);
5119 if (iface->HasDefaultMethods() &&
5120 !CanWeInitializeClass(iface, can_init_statics, can_init_parents)) {
5121 return false;
5122 }
5123 }
5124 }
5125 }
5126 if (klass->IsInterface() || !klass->HasSuperClass()) {
5127 return true;
5128 }
5129 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5130 if (!can_init_parents && !super_class->IsInitialized()) {
5131 return false;
5132 }
5133 return CanWeInitializeClass(super_class, can_init_statics, can_init_parents);
5134 }
5135
InitializeClass(Thread * self,Handle<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5136 bool ClassLinker::InitializeClass(Thread* self, Handle<mirror::Class> klass,
5137 bool can_init_statics, bool can_init_parents) {
5138 // see JLS 3rd edition, 12.4.2 "Detailed Initialization Procedure" for the locking protocol
5139
5140 // Are we already initialized and therefore done?
5141 // Note: we differ from the JLS here as we don't do this under the lock, this is benign as
5142 // an initialized class will never change its state.
5143 if (klass->IsInitialized()) {
5144 return true;
5145 }
5146
5147 // Fast fail if initialization requires a full runtime. Not part of the JLS.
5148 if (!CanWeInitializeClass(klass.Get(), can_init_statics, can_init_parents)) {
5149 return false;
5150 }
5151
5152 self->AllowThreadSuspension();
5153 uint64_t t0;
5154 {
5155 ObjectLock<mirror::Class> lock(self, klass);
5156
5157 // Re-check under the lock in case another thread initialized ahead of us.
5158 if (klass->IsInitialized()) {
5159 return true;
5160 }
5161
5162 // Was the class already found to be erroneous? Done under the lock to match the JLS.
5163 if (klass->IsErroneous()) {
5164 ThrowEarlierClassFailure(klass.Get(), true, /* log= */ true);
5165 VlogClassInitializationFailure(klass);
5166 return false;
5167 }
5168
5169 CHECK(klass->IsResolved() && !klass->IsErroneousResolved())
5170 << klass->PrettyClass() << ": state=" << klass->GetStatus();
5171
5172 if (!klass->IsVerified()) {
5173 VerifyClass(self, klass);
5174 if (!klass->IsVerified()) {
5175 // We failed to verify, expect either the klass to be erroneous or verification failed at
5176 // compile time.
5177 if (klass->IsErroneous()) {
5178 // The class is erroneous. This may be a verifier error, or another thread attempted
5179 // verification and/or initialization and failed. We can distinguish those cases by
5180 // whether an exception is already pending.
5181 if (self->IsExceptionPending()) {
5182 // Check that it's a VerifyError.
5183 DCHECK_EQ("java.lang.Class<java.lang.VerifyError>",
5184 mirror::Class::PrettyClass(self->GetException()->GetClass()));
5185 } else {
5186 // Check that another thread attempted initialization.
5187 DCHECK_NE(0, klass->GetClinitThreadId());
5188 DCHECK_NE(self->GetTid(), klass->GetClinitThreadId());
5189 // Need to rethrow the previous failure now.
5190 ThrowEarlierClassFailure(klass.Get(), true);
5191 }
5192 VlogClassInitializationFailure(klass);
5193 } else {
5194 CHECK(Runtime::Current()->IsAotCompiler());
5195 CHECK_EQ(klass->GetStatus(), ClassStatus::kRetryVerificationAtRuntime);
5196 self->AssertNoPendingException();
5197 self->SetException(Runtime::Current()->GetPreAllocatedNoClassDefFoundError());
5198 }
5199 self->AssertPendingException();
5200 return false;
5201 } else {
5202 self->AssertNoPendingException();
5203 }
5204
5205 // A separate thread could have moved us all the way to initialized. A "simple" example
5206 // involves a subclass of the current class being initialized at the same time (which
5207 // will implicitly initialize the superclass, if scheduled that way). b/28254258
5208 DCHECK(!klass->IsErroneous()) << klass->GetStatus();
5209 if (klass->IsInitialized()) {
5210 return true;
5211 }
5212 }
5213
5214 // If the class is ClassStatus::kInitializing, either this thread is
5215 // initializing higher up the stack or another thread has beat us
5216 // to initializing and we need to wait. Either way, this
5217 // invocation of InitializeClass will not be responsible for
5218 // running <clinit> and will return.
5219 if (klass->GetStatus() == ClassStatus::kInitializing) {
5220 // Could have got an exception during verification.
5221 if (self->IsExceptionPending()) {
5222 VlogClassInitializationFailure(klass);
5223 return false;
5224 }
5225 // We caught somebody else in the act; was it us?
5226 if (klass->GetClinitThreadId() == self->GetTid()) {
5227 // Yes. That's fine. Return so we can continue initializing.
5228 return true;
5229 }
5230 // No. That's fine. Wait for another thread to finish initializing.
5231 return WaitForInitializeClass(klass, self, lock);
5232 }
5233
5234 // Try to get the oat class's status for this class if the oat file is present. The compiler
5235 // tries to validate superclass descriptors, and writes the result into the oat file.
5236 // Runtime correctness is guaranteed by classpath checks done on loading. If the classpath
5237 // is different at runtime than it was at compile time, the oat file is rejected. So if the
5238 // oat file is present, the classpaths must match, and the runtime time check can be skipped.
5239 bool has_oat_class = false;
5240 const Runtime* runtime = Runtime::Current();
5241 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
5242 ? OatFile::FindOatClass(klass->GetDexFile(), klass->GetDexClassDefIndex(), &has_oat_class)
5243 : OatFile::OatClass::Invalid();
5244 if (oat_class.GetStatus() < ClassStatus::kSuperclassValidated &&
5245 !ValidateSuperClassDescriptors(klass)) {
5246 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5247 return false;
5248 }
5249 self->AllowThreadSuspension();
5250
5251 CHECK_EQ(klass->GetStatus(), ClassStatus::kVerified) << klass->PrettyClass()
5252 << " self.tid=" << self->GetTid() << " clinit.tid=" << klass->GetClinitThreadId();
5253
5254 // From here out other threads may observe that we're initializing and so changes of state
5255 // require the a notification.
5256 klass->SetClinitThreadId(self->GetTid());
5257 mirror::Class::SetStatus(klass, ClassStatus::kInitializing, self);
5258
5259 t0 = NanoTime();
5260 }
5261
5262 // Initialize super classes, must be done while initializing for the JLS.
5263 if (!klass->IsInterface() && klass->HasSuperClass()) {
5264 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5265 if (!super_class->IsInitialized()) {
5266 CHECK(!super_class->IsInterface());
5267 CHECK(can_init_parents);
5268 StackHandleScope<1> hs(self);
5269 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
5270 bool super_initialized = InitializeClass(self, handle_scope_super, can_init_statics, true);
5271 if (!super_initialized) {
5272 // The super class was verified ahead of entering initializing, we should only be here if
5273 // the super class became erroneous due to initialization.
5274 // For the case of aot compiler, the super class might also be initializing but we don't
5275 // want to process circular dependencies in pre-compile.
5276 CHECK(self->IsExceptionPending())
5277 << "Super class initialization failed for "
5278 << handle_scope_super->PrettyDescriptor()
5279 << " that has unexpected status " << handle_scope_super->GetStatus()
5280 << "\nPending exception:\n"
5281 << (self->GetException() != nullptr ? self->GetException()->Dump() : "");
5282 ObjectLock<mirror::Class> lock(self, klass);
5283 // Initialization failed because the super-class is erroneous.
5284 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5285 return false;
5286 }
5287 }
5288 }
5289
5290 if (!klass->IsInterface()) {
5291 // Initialize interfaces with default methods for the JLS.
5292 size_t num_direct_interfaces = klass->NumDirectInterfaces();
5293 // Only setup the (expensive) handle scope if we actually need to.
5294 if (UNLIKELY(num_direct_interfaces > 0)) {
5295 StackHandleScope<1> hs_iface(self);
5296 MutableHandle<mirror::Class> handle_scope_iface(hs_iface.NewHandle<mirror::Class>(nullptr));
5297 for (size_t i = 0; i < num_direct_interfaces; i++) {
5298 handle_scope_iface.Assign(mirror::Class::GetDirectInterface(self, klass.Get(), i));
5299 CHECK(handle_scope_iface != nullptr) << klass->PrettyDescriptor() << " iface #" << i;
5300 CHECK(handle_scope_iface->IsInterface());
5301 if (handle_scope_iface->HasBeenRecursivelyInitialized()) {
5302 // We have already done this for this interface. Skip it.
5303 continue;
5304 }
5305 // We cannot just call initialize class directly because we need to ensure that ALL
5306 // interfaces with default methods are initialized. Non-default interface initialization
5307 // will not affect other non-default super-interfaces.
5308 bool iface_initialized = InitializeDefaultInterfaceRecursive(self,
5309 handle_scope_iface,
5310 can_init_statics,
5311 can_init_parents);
5312 if (!iface_initialized) {
5313 ObjectLock<mirror::Class> lock(self, klass);
5314 // Initialization failed because one of our interfaces with default methods is erroneous.
5315 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5316 return false;
5317 }
5318 }
5319 }
5320 }
5321
5322 const size_t num_static_fields = klass->NumStaticFields();
5323 if (num_static_fields > 0) {
5324 const dex::ClassDef* dex_class_def = klass->GetClassDef();
5325 CHECK(dex_class_def != nullptr);
5326 StackHandleScope<3> hs(self);
5327 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
5328 Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
5329
5330 // Eagerly fill in static fields so that the we don't have to do as many expensive
5331 // Class::FindStaticField in ResolveField.
5332 for (size_t i = 0; i < num_static_fields; ++i) {
5333 ArtField* field = klass->GetStaticField(i);
5334 const uint32_t field_idx = field->GetDexFieldIndex();
5335 ArtField* resolved_field = dex_cache->GetResolvedField(field_idx, image_pointer_size_);
5336 if (resolved_field == nullptr) {
5337 // Populating cache of a dex file which defines `klass` should always be allowed.
5338 DCHECK(!hiddenapi::ShouldDenyAccessToMember(
5339 field,
5340 hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
5341 hiddenapi::AccessMethod::kNone));
5342 dex_cache->SetResolvedField(field_idx, field, image_pointer_size_);
5343 } else {
5344 DCHECK_EQ(field, resolved_field);
5345 }
5346 }
5347
5348 annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache,
5349 class_loader,
5350 this,
5351 *dex_class_def);
5352 const DexFile& dex_file = *dex_cache->GetDexFile();
5353
5354 if (value_it.HasNext()) {
5355 ClassAccessor accessor(dex_file, *dex_class_def);
5356 CHECK(can_init_statics);
5357 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
5358 if (!value_it.HasNext()) {
5359 break;
5360 }
5361 ArtField* art_field = ResolveField(field.GetIndex(),
5362 dex_cache,
5363 class_loader,
5364 /* is_static= */ true);
5365 if (Runtime::Current()->IsActiveTransaction()) {
5366 value_it.ReadValueToField<true>(art_field);
5367 } else {
5368 value_it.ReadValueToField<false>(art_field);
5369 }
5370 if (self->IsExceptionPending()) {
5371 break;
5372 }
5373 value_it.Next();
5374 }
5375 DCHECK(self->IsExceptionPending() || !value_it.HasNext());
5376 }
5377 }
5378
5379
5380 if (!self->IsExceptionPending()) {
5381 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5382 if (clinit != nullptr) {
5383 CHECK(can_init_statics);
5384 JValue result;
5385 clinit->Invoke(self, nullptr, 0, &result, "V");
5386 }
5387 }
5388 self->AllowThreadSuspension();
5389 uint64_t t1 = NanoTime();
5390
5391 bool success = true;
5392 {
5393 ObjectLock<mirror::Class> lock(self, klass);
5394
5395 if (self->IsExceptionPending()) {
5396 WrapExceptionInInitializer(klass);
5397 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5398 success = false;
5399 } else if (Runtime::Current()->IsTransactionAborted()) {
5400 // The exception thrown when the transaction aborted has been caught and cleared
5401 // so we need to throw it again now.
5402 VLOG(compiler) << "Return from class initializer of "
5403 << mirror::Class::PrettyDescriptor(klass.Get())
5404 << " without exception while transaction was aborted: re-throw it now.";
5405 Runtime::Current()->ThrowTransactionAbortError(self);
5406 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5407 success = false;
5408 } else {
5409 RuntimeStats* global_stats = Runtime::Current()->GetStats();
5410 RuntimeStats* thread_stats = self->GetStats();
5411 ++global_stats->class_init_count;
5412 ++thread_stats->class_init_count;
5413 global_stats->class_init_time_ns += (t1 - t0);
5414 thread_stats->class_init_time_ns += (t1 - t0);
5415 // Set the class as initialized except if failed to initialize static fields.
5416 mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
5417 if (VLOG_IS_ON(class_linker)) {
5418 std::string temp;
5419 LOG(INFO) << "Initialized class " << klass->GetDescriptor(&temp) << " from " <<
5420 klass->GetLocation();
5421 }
5422 // Opportunistically set static method trampolines to their destination.
5423 FixupStaticTrampolines(klass.Get());
5424 }
5425 }
5426 return success;
5427 }
5428
5429 // We recursively run down the tree of interfaces. We need to do this in the order they are declared
5430 // and perform the initialization only on those interfaces that contain default methods.
InitializeDefaultInterfaceRecursive(Thread * self,Handle<mirror::Class> iface,bool can_init_statics,bool can_init_parents)5431 bool ClassLinker::InitializeDefaultInterfaceRecursive(Thread* self,
5432 Handle<mirror::Class> iface,
5433 bool can_init_statics,
5434 bool can_init_parents) {
5435 CHECK(iface->IsInterface());
5436 size_t num_direct_ifaces = iface->NumDirectInterfaces();
5437 // Only create the (expensive) handle scope if we need it.
5438 if (UNLIKELY(num_direct_ifaces > 0)) {
5439 StackHandleScope<1> hs(self);
5440 MutableHandle<mirror::Class> handle_super_iface(hs.NewHandle<mirror::Class>(nullptr));
5441 // First we initialize all of iface's super-interfaces recursively.
5442 for (size_t i = 0; i < num_direct_ifaces; i++) {
5443 ObjPtr<mirror::Class> super_iface = mirror::Class::GetDirectInterface(self, iface.Get(), i);
5444 CHECK(super_iface != nullptr) << iface->PrettyDescriptor() << " iface #" << i;
5445 if (!super_iface->HasBeenRecursivelyInitialized()) {
5446 // Recursive step
5447 handle_super_iface.Assign(super_iface);
5448 if (!InitializeDefaultInterfaceRecursive(self,
5449 handle_super_iface,
5450 can_init_statics,
5451 can_init_parents)) {
5452 return false;
5453 }
5454 }
5455 }
5456 }
5457
5458 bool result = true;
5459 // Then we initialize 'iface' if it has default methods. We do not need to (and in fact must not)
5460 // initialize if we don't have default methods.
5461 if (iface->HasDefaultMethods()) {
5462 result = EnsureInitialized(self, iface, can_init_statics, can_init_parents);
5463 }
5464
5465 // Mark that this interface has undergone recursive default interface initialization so we know we
5466 // can skip it on any later class initializations. We do this even if we are not a default
5467 // interface since we can still avoid the traversal. This is purely a performance optimization.
5468 if (result) {
5469 // TODO This should be done in a better way
5470 // Note: Use a try-lock to avoid blocking when someone else is holding the lock on this
5471 // interface. It is bad (Java) style, but not impossible. Marking the recursive
5472 // initialization is a performance optimization (to avoid another idempotent visit
5473 // for other implementing classes/interfaces), and can be revisited later.
5474 ObjectTryLock<mirror::Class> lock(self, iface);
5475 if (lock.Acquired()) {
5476 iface->SetRecursivelyInitialized();
5477 }
5478 }
5479 return result;
5480 }
5481
WaitForInitializeClass(Handle<mirror::Class> klass,Thread * self,ObjectLock<mirror::Class> & lock)5482 bool ClassLinker::WaitForInitializeClass(Handle<mirror::Class> klass,
5483 Thread* self,
5484 ObjectLock<mirror::Class>& lock)
5485 REQUIRES_SHARED(Locks::mutator_lock_) {
5486 while (true) {
5487 self->AssertNoPendingException();
5488 CHECK(!klass->IsInitialized());
5489 lock.WaitIgnoringInterrupts();
5490
5491 // When we wake up, repeat the test for init-in-progress. If
5492 // there's an exception pending (only possible if
5493 // we were not using WaitIgnoringInterrupts), bail out.
5494 if (self->IsExceptionPending()) {
5495 WrapExceptionInInitializer(klass);
5496 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5497 return false;
5498 }
5499 // Spurious wakeup? Go back to waiting.
5500 if (klass->GetStatus() == ClassStatus::kInitializing) {
5501 continue;
5502 }
5503 if (klass->GetStatus() == ClassStatus::kVerified &&
5504 Runtime::Current()->IsAotCompiler()) {
5505 // Compile time initialization failed.
5506 return false;
5507 }
5508 if (klass->IsErroneous()) {
5509 // The caller wants an exception, but it was thrown in a
5510 // different thread. Synthesize one here.
5511 ThrowNoClassDefFoundError("<clinit> failed for class %s; see exception in other thread",
5512 klass->PrettyDescriptor().c_str());
5513 VlogClassInitializationFailure(klass);
5514 return false;
5515 }
5516 if (klass->IsInitialized()) {
5517 return true;
5518 }
5519 LOG(FATAL) << "Unexpected class status. " << klass->PrettyClass() << " is "
5520 << klass->GetStatus();
5521 }
5522 UNREACHABLE();
5523 }
5524
ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m)5525 static void ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,
5526 Handle<mirror::Class> super_klass,
5527 ArtMethod* method,
5528 ArtMethod* m)
5529 REQUIRES_SHARED(Locks::mutator_lock_) {
5530 DCHECK(Thread::Current()->IsExceptionPending());
5531 DCHECK(!m->IsProxyMethod());
5532 const DexFile* dex_file = m->GetDexFile();
5533 const dex::MethodId& method_id = dex_file->GetMethodId(m->GetDexMethodIndex());
5534 const dex::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id);
5535 dex::TypeIndex return_type_idx = proto_id.return_type_idx_;
5536 std::string return_type = dex_file->PrettyType(return_type_idx);
5537 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
5538 ThrowWrappedLinkageError(klass.Get(),
5539 "While checking class %s method %s signature against %s %s: "
5540 "Failed to resolve return type %s with %s",
5541 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5542 ArtMethod::PrettyMethod(method).c_str(),
5543 super_klass->IsInterface() ? "interface" : "superclass",
5544 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
5545 return_type.c_str(), class_loader.c_str());
5546 }
5547
ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m,uint32_t index,dex::TypeIndex arg_type_idx)5548 static void ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,
5549 Handle<mirror::Class> super_klass,
5550 ArtMethod* method,
5551 ArtMethod* m,
5552 uint32_t index,
5553 dex::TypeIndex arg_type_idx)
5554 REQUIRES_SHARED(Locks::mutator_lock_) {
5555 DCHECK(Thread::Current()->IsExceptionPending());
5556 DCHECK(!m->IsProxyMethod());
5557 const DexFile* dex_file = m->GetDexFile();
5558 std::string arg_type = dex_file->PrettyType(arg_type_idx);
5559 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
5560 ThrowWrappedLinkageError(klass.Get(),
5561 "While checking class %s method %s signature against %s %s: "
5562 "Failed to resolve arg %u type %s with %s",
5563 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5564 ArtMethod::PrettyMethod(method).c_str(),
5565 super_klass->IsInterface() ? "interface" : "superclass",
5566 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
5567 index, arg_type.c_str(), class_loader.c_str());
5568 }
5569
ThrowSignatureMismatch(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,const std::string & error_msg)5570 static void ThrowSignatureMismatch(Handle<mirror::Class> klass,
5571 Handle<mirror::Class> super_klass,
5572 ArtMethod* method,
5573 const std::string& error_msg)
5574 REQUIRES_SHARED(Locks::mutator_lock_) {
5575 ThrowLinkageError(klass.Get(),
5576 "Class %s method %s resolves differently in %s %s: %s",
5577 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5578 ArtMethod::PrettyMethod(method).c_str(),
5579 super_klass->IsInterface() ? "interface" : "superclass",
5580 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
5581 error_msg.c_str());
5582 }
5583
HasSameSignatureWithDifferentClassLoaders(Thread * self,Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method1,ArtMethod * method2)5584 static bool HasSameSignatureWithDifferentClassLoaders(Thread* self,
5585 Handle<mirror::Class> klass,
5586 Handle<mirror::Class> super_klass,
5587 ArtMethod* method1,
5588 ArtMethod* method2)
5589 REQUIRES_SHARED(Locks::mutator_lock_) {
5590 {
5591 StackHandleScope<1> hs(self);
5592 Handle<mirror::Class> return_type(hs.NewHandle(method1->ResolveReturnType()));
5593 if (UNLIKELY(return_type == nullptr)) {
5594 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method1);
5595 return false;
5596 }
5597 ObjPtr<mirror::Class> other_return_type = method2->ResolveReturnType();
5598 if (UNLIKELY(other_return_type == nullptr)) {
5599 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method2);
5600 return false;
5601 }
5602 if (UNLIKELY(other_return_type != return_type.Get())) {
5603 ThrowSignatureMismatch(klass, super_klass, method1,
5604 StringPrintf("Return types mismatch: %s(%p) vs %s(%p)",
5605 return_type->PrettyClassAndClassLoader().c_str(),
5606 return_type.Get(),
5607 other_return_type->PrettyClassAndClassLoader().c_str(),
5608 other_return_type.Ptr()));
5609 return false;
5610 }
5611 }
5612 const dex::TypeList* types1 = method1->GetParameterTypeList();
5613 const dex::TypeList* types2 = method2->GetParameterTypeList();
5614 if (types1 == nullptr) {
5615 if (types2 != nullptr && types2->Size() != 0) {
5616 ThrowSignatureMismatch(klass, super_klass, method1,
5617 StringPrintf("Type list mismatch with %s",
5618 method2->PrettyMethod(true).c_str()));
5619 return false;
5620 }
5621 return true;
5622 } else if (UNLIKELY(types2 == nullptr)) {
5623 if (types1->Size() != 0) {
5624 ThrowSignatureMismatch(klass, super_klass, method1,
5625 StringPrintf("Type list mismatch with %s",
5626 method2->PrettyMethod(true).c_str()));
5627 return false;
5628 }
5629 return true;
5630 }
5631 uint32_t num_types = types1->Size();
5632 if (UNLIKELY(num_types != types2->Size())) {
5633 ThrowSignatureMismatch(klass, super_klass, method1,
5634 StringPrintf("Type list mismatch with %s",
5635 method2->PrettyMethod(true).c_str()));
5636 return false;
5637 }
5638 for (uint32_t i = 0; i < num_types; ++i) {
5639 StackHandleScope<1> hs(self);
5640 dex::TypeIndex param_type_idx = types1->GetTypeItem(i).type_idx_;
5641 Handle<mirror::Class> param_type(hs.NewHandle(
5642 method1->ResolveClassFromTypeIndex(param_type_idx)));
5643 if (UNLIKELY(param_type == nullptr)) {
5644 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
5645 method1, i, param_type_idx);
5646 return false;
5647 }
5648 dex::TypeIndex other_param_type_idx = types2->GetTypeItem(i).type_idx_;
5649 ObjPtr<mirror::Class> other_param_type =
5650 method2->ResolveClassFromTypeIndex(other_param_type_idx);
5651 if (UNLIKELY(other_param_type == nullptr)) {
5652 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
5653 method2, i, other_param_type_idx);
5654 return false;
5655 }
5656 if (UNLIKELY(param_type.Get() != other_param_type)) {
5657 ThrowSignatureMismatch(klass, super_klass, method1,
5658 StringPrintf("Parameter %u type mismatch: %s(%p) vs %s(%p)",
5659 i,
5660 param_type->PrettyClassAndClassLoader().c_str(),
5661 param_type.Get(),
5662 other_param_type->PrettyClassAndClassLoader().c_str(),
5663 other_param_type.Ptr()));
5664 return false;
5665 }
5666 }
5667 return true;
5668 }
5669
5670
ValidateSuperClassDescriptors(Handle<mirror::Class> klass)5671 bool ClassLinker::ValidateSuperClassDescriptors(Handle<mirror::Class> klass) {
5672 if (klass->IsInterface()) {
5673 return true;
5674 }
5675 // Begin with the methods local to the superclass.
5676 Thread* self = Thread::Current();
5677 StackHandleScope<1> hs(self);
5678 MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(nullptr));
5679 if (klass->HasSuperClass() &&
5680 klass->GetClassLoader() != klass->GetSuperClass()->GetClassLoader()) {
5681 super_klass.Assign(klass->GetSuperClass());
5682 for (int i = klass->GetSuperClass()->GetVTableLength() - 1; i >= 0; --i) {
5683 auto* m = klass->GetVTableEntry(i, image_pointer_size_);
5684 auto* super_m = klass->GetSuperClass()->GetVTableEntry(i, image_pointer_size_);
5685 if (m != super_m) {
5686 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5687 klass,
5688 super_klass,
5689 m,
5690 super_m))) {
5691 self->AssertPendingException();
5692 return false;
5693 }
5694 }
5695 }
5696 }
5697 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
5698 super_klass.Assign(klass->GetIfTable()->GetInterface(i));
5699 if (klass->GetClassLoader() != super_klass->GetClassLoader()) {
5700 uint32_t num_methods = super_klass->NumVirtualMethods();
5701 for (uint32_t j = 0; j < num_methods; ++j) {
5702 auto* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
5703 j, image_pointer_size_);
5704 auto* super_m = super_klass->GetVirtualMethod(j, image_pointer_size_);
5705 if (m != super_m) {
5706 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5707 klass,
5708 super_klass,
5709 m,
5710 super_m))) {
5711 self->AssertPendingException();
5712 return false;
5713 }
5714 }
5715 }
5716 }
5717 }
5718 return true;
5719 }
5720
EnsureInitialized(Thread * self,Handle<mirror::Class> c,bool can_init_fields,bool can_init_parents)5721 bool ClassLinker::EnsureInitialized(Thread* self,
5722 Handle<mirror::Class> c,
5723 bool can_init_fields,
5724 bool can_init_parents) {
5725 DCHECK(c != nullptr);
5726
5727 if (c->IsInitialized()) {
5728 DCHECK(c->WasVerificationAttempted()) << c->PrettyClassAndClassLoader();
5729 return true;
5730 }
5731 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5732 //
5733 // Ensure the bitstring is initialized before any of the class initialization
5734 // logic occurs. Once a class initializer starts running, objects can
5735 // escape into the heap and use the subtype checking code.
5736 //
5737 // Note: A class whose SubtypeCheckInfo is at least Initialized means it
5738 // can be used as a source for the IsSubClass check, and that all ancestors
5739 // of the class are Assigned (can be used as a target for IsSubClass check)
5740 // or Overflowed (can be used as a source for IsSubClass check).
5741 if (kBitstringSubtypeCheckEnabled) {
5742 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
5743 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(c.Get());
5744 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck is already initialized.
5745 }
5746 const bool success = InitializeClass(self, c, can_init_fields, can_init_parents);
5747 if (!success) {
5748 if (can_init_fields && can_init_parents) {
5749 CHECK(self->IsExceptionPending()) << c->PrettyClass();
5750 }
5751 } else {
5752 self->AssertNoPendingException();
5753 }
5754 return success;
5755 }
5756
FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,ObjPtr<mirror::Class> new_class)5757 void ClassLinker::FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,
5758 ObjPtr<mirror::Class> new_class) {
5759 DCHECK_EQ(temp_class->NumInstanceFields(), 0u);
5760 for (ArtField& field : new_class->GetIFields()) {
5761 if (field.GetDeclaringClass() == temp_class) {
5762 field.SetDeclaringClass(new_class);
5763 }
5764 }
5765
5766 DCHECK_EQ(temp_class->NumStaticFields(), 0u);
5767 for (ArtField& field : new_class->GetSFields()) {
5768 if (field.GetDeclaringClass() == temp_class) {
5769 field.SetDeclaringClass(new_class);
5770 }
5771 }
5772
5773 DCHECK_EQ(temp_class->NumDirectMethods(), 0u);
5774 DCHECK_EQ(temp_class->NumVirtualMethods(), 0u);
5775 for (auto& method : new_class->GetMethods(image_pointer_size_)) {
5776 if (method.GetDeclaringClass() == temp_class) {
5777 method.SetDeclaringClass(new_class);
5778 }
5779 }
5780
5781 // Make sure the remembered set and mod-union tables know that we updated some of the native
5782 // roots.
5783 WriteBarrier::ForEveryFieldWrite(new_class);
5784 }
5785
RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader)5786 void ClassLinker::RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
5787 CHECK(class_loader->GetAllocator() == nullptr);
5788 CHECK(class_loader->GetClassTable() == nullptr);
5789 Thread* const self = Thread::Current();
5790 ClassLoaderData data;
5791 data.weak_root = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, class_loader);
5792 // Create and set the class table.
5793 data.class_table = new ClassTable;
5794 class_loader->SetClassTable(data.class_table);
5795 // Create and set the linear allocator.
5796 data.allocator = Runtime::Current()->CreateLinearAlloc();
5797 class_loader->SetAllocator(data.allocator);
5798 // Add to the list so that we know to free the data later.
5799 class_loaders_.push_back(data);
5800 }
5801
InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)5802 ClassTable* ClassLinker::InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
5803 if (class_loader == nullptr) {
5804 return boot_class_table_.get();
5805 }
5806 ClassTable* class_table = class_loader->GetClassTable();
5807 if (class_table == nullptr) {
5808 RegisterClassLoader(class_loader);
5809 class_table = class_loader->GetClassTable();
5810 DCHECK(class_table != nullptr);
5811 }
5812 return class_table;
5813 }
5814
ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)5815 ClassTable* ClassLinker::ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
5816 return class_loader == nullptr ? boot_class_table_.get() : class_loader->GetClassTable();
5817 }
5818
FindSuperImt(ObjPtr<mirror::Class> klass,PointerSize pointer_size)5819 static ImTable* FindSuperImt(ObjPtr<mirror::Class> klass, PointerSize pointer_size)
5820 REQUIRES_SHARED(Locks::mutator_lock_) {
5821 while (klass->HasSuperClass()) {
5822 klass = klass->GetSuperClass();
5823 if (klass->ShouldHaveImt()) {
5824 return klass->GetImt(pointer_size);
5825 }
5826 }
5827 return nullptr;
5828 }
5829
LinkClass(Thread * self,const char * descriptor,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,MutableHandle<mirror::Class> * h_new_class_out)5830 bool ClassLinker::LinkClass(Thread* self,
5831 const char* descriptor,
5832 Handle<mirror::Class> klass,
5833 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
5834 MutableHandle<mirror::Class>* h_new_class_out) {
5835 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
5836
5837 if (!LinkSuperClass(klass)) {
5838 return false;
5839 }
5840 ArtMethod* imt_data[ImTable::kSize];
5841 // If there are any new conflicts compared to super class.
5842 bool new_conflict = false;
5843 std::fill_n(imt_data, arraysize(imt_data), Runtime::Current()->GetImtUnimplementedMethod());
5844 if (!LinkMethods(self, klass, interfaces, &new_conflict, imt_data)) {
5845 return false;
5846 }
5847 if (!LinkInstanceFields(self, klass)) {
5848 return false;
5849 }
5850 size_t class_size;
5851 if (!LinkStaticFields(self, klass, &class_size)) {
5852 return false;
5853 }
5854 CreateReferenceInstanceOffsets(klass);
5855 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
5856
5857 ImTable* imt = nullptr;
5858 if (klass->ShouldHaveImt()) {
5859 // If there are any new conflicts compared to the super class we can not make a copy. There
5860 // can be cases where both will have a conflict method at the same slot without having the same
5861 // set of conflicts. In this case, we can not share the IMT since the conflict table slow path
5862 // will possibly create a table that is incorrect for either of the classes.
5863 // Same IMT with new_conflict does not happen very often.
5864 if (!new_conflict) {
5865 ImTable* super_imt = FindSuperImt(klass.Get(), image_pointer_size_);
5866 if (super_imt != nullptr) {
5867 bool imt_equals = true;
5868 for (size_t i = 0; i < ImTable::kSize && imt_equals; ++i) {
5869 imt_equals = imt_equals && (super_imt->Get(i, image_pointer_size_) == imt_data[i]);
5870 }
5871 if (imt_equals) {
5872 imt = super_imt;
5873 }
5874 }
5875 }
5876 if (imt == nullptr) {
5877 LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
5878 imt = reinterpret_cast<ImTable*>(
5879 allocator->Alloc(self, ImTable::SizeInBytes(image_pointer_size_)));
5880 if (imt == nullptr) {
5881 return false;
5882 }
5883 imt->Populate(imt_data, image_pointer_size_);
5884 }
5885 }
5886
5887 if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) {
5888 // We don't need to retire this class as it has no embedded tables or it was created the
5889 // correct size during class linker initialization.
5890 CHECK_EQ(klass->GetClassSize(), class_size) << klass->PrettyDescriptor();
5891
5892 if (klass->ShouldHaveEmbeddedVTable()) {
5893 klass->PopulateEmbeddedVTable(image_pointer_size_);
5894 }
5895 if (klass->ShouldHaveImt()) {
5896 klass->SetImt(imt, image_pointer_size_);
5897 }
5898
5899 // Update CHA info based on whether we override methods.
5900 // Have to do this before setting the class as resolved which allows
5901 // instantiation of klass.
5902 if (cha_ != nullptr) {
5903 cha_->UpdateAfterLoadingOf(klass);
5904 }
5905
5906 // This will notify waiters on klass that saw the not yet resolved
5907 // class in the class_table_ during EnsureResolved.
5908 mirror::Class::SetStatus(klass, ClassStatus::kResolved, self);
5909 h_new_class_out->Assign(klass.Get());
5910 } else {
5911 CHECK(!klass->IsResolved());
5912 // Retire the temporary class and create the correctly sized resolved class.
5913 StackHandleScope<1> hs(self);
5914 auto h_new_class = hs.NewHandle(klass->CopyOf(self, class_size, imt, image_pointer_size_));
5915 // Set arrays to null since we don't want to have multiple classes with the same ArtField or
5916 // ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC
5917 // may not see any references to the target space and clean the card for a class if another
5918 // class had the same array pointer.
5919 klass->SetMethodsPtrUnchecked(nullptr, 0, 0);
5920 klass->SetSFieldsPtrUnchecked(nullptr);
5921 klass->SetIFieldsPtrUnchecked(nullptr);
5922 if (UNLIKELY(h_new_class == nullptr)) {
5923 self->AssertPendingOOMException();
5924 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
5925 return false;
5926 }
5927
5928 CHECK_EQ(h_new_class->GetClassSize(), class_size);
5929 ObjectLock<mirror::Class> lock(self, h_new_class);
5930 FixupTemporaryDeclaringClass(klass.Get(), h_new_class.Get());
5931
5932 {
5933 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
5934 const ObjPtr<mirror::ClassLoader> class_loader = h_new_class.Get()->GetClassLoader();
5935 ClassTable* const table = InsertClassTableForClassLoader(class_loader);
5936 const ObjPtr<mirror::Class> existing =
5937 table->UpdateClass(descriptor, h_new_class.Get(), ComputeModifiedUtf8Hash(descriptor));
5938 if (class_loader != nullptr) {
5939 // We updated the class in the class table, perform the write barrier so that the GC knows
5940 // about the change.
5941 WriteBarrier::ForEveryFieldWrite(class_loader);
5942 }
5943 CHECK_EQ(existing, klass.Get());
5944 if (log_new_roots_) {
5945 new_class_roots_.push_back(GcRoot<mirror::Class>(h_new_class.Get()));
5946 }
5947 }
5948
5949 // Update CHA info based on whether we override methods.
5950 // Have to do this before setting the class as resolved which allows
5951 // instantiation of klass.
5952 if (cha_ != nullptr) {
5953 cha_->UpdateAfterLoadingOf(h_new_class);
5954 }
5955
5956 // This will notify waiters on temp class that saw the not yet resolved class in the
5957 // class_table_ during EnsureResolved.
5958 mirror::Class::SetStatus(klass, ClassStatus::kRetired, self);
5959
5960 CHECK_EQ(h_new_class->GetStatus(), ClassStatus::kResolving);
5961 // This will notify waiters on new_class that saw the not yet resolved
5962 // class in the class_table_ during EnsureResolved.
5963 mirror::Class::SetStatus(h_new_class, ClassStatus::kResolved, self);
5964 // Return the new class.
5965 h_new_class_out->Assign(h_new_class.Get());
5966 }
5967 return true;
5968 }
5969
LoadSuperAndInterfaces(Handle<mirror::Class> klass,const DexFile & dex_file)5970 bool ClassLinker::LoadSuperAndInterfaces(Handle<mirror::Class> klass, const DexFile& dex_file) {
5971 CHECK_EQ(ClassStatus::kIdx, klass->GetStatus());
5972 const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
5973 dex::TypeIndex super_class_idx = class_def.superclass_idx_;
5974 if (super_class_idx.IsValid()) {
5975 // Check that a class does not inherit from itself directly.
5976 //
5977 // TODO: This is a cheap check to detect the straightforward case
5978 // of a class extending itself (b/28685551), but we should do a
5979 // proper cycle detection on loaded classes, to detect all cases
5980 // of class circularity errors (b/28830038).
5981 if (super_class_idx == class_def.class_idx_) {
5982 ThrowClassCircularityError(klass.Get(),
5983 "Class %s extends itself",
5984 klass->PrettyDescriptor().c_str());
5985 return false;
5986 }
5987
5988 ObjPtr<mirror::Class> super_class = ResolveType(super_class_idx, klass.Get());
5989 if (super_class == nullptr) {
5990 DCHECK(Thread::Current()->IsExceptionPending());
5991 return false;
5992 }
5993 // Verify
5994 if (!klass->CanAccess(super_class)) {
5995 ThrowIllegalAccessError(klass.Get(), "Class %s extended by class %s is inaccessible",
5996 super_class->PrettyDescriptor().c_str(),
5997 klass->PrettyDescriptor().c_str());
5998 return false;
5999 }
6000 CHECK(super_class->IsResolved());
6001 klass->SetSuperClass(super_class);
6002 }
6003 const dex::TypeList* interfaces = dex_file.GetInterfacesList(class_def);
6004 if (interfaces != nullptr) {
6005 for (size_t i = 0; i < interfaces->Size(); i++) {
6006 dex::TypeIndex idx = interfaces->GetTypeItem(i).type_idx_;
6007 ObjPtr<mirror::Class> interface = ResolveType(idx, klass.Get());
6008 if (interface == nullptr) {
6009 DCHECK(Thread::Current()->IsExceptionPending());
6010 return false;
6011 }
6012 // Verify
6013 if (!klass->CanAccess(interface)) {
6014 // TODO: the RI seemed to ignore this in my testing.
6015 ThrowIllegalAccessError(klass.Get(),
6016 "Interface %s implemented by class %s is inaccessible",
6017 interface->PrettyDescriptor().c_str(),
6018 klass->PrettyDescriptor().c_str());
6019 return false;
6020 }
6021 }
6022 }
6023 // Mark the class as loaded.
6024 mirror::Class::SetStatus(klass, ClassStatus::kLoaded, nullptr);
6025 return true;
6026 }
6027
LinkSuperClass(Handle<mirror::Class> klass)6028 bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) {
6029 CHECK(!klass->IsPrimitive());
6030 ObjPtr<mirror::Class> super = klass->GetSuperClass();
6031 ObjPtr<mirror::Class> object_class = GetClassRoot<mirror::Object>(this);
6032 if (klass.Get() == object_class) {
6033 if (super != nullptr) {
6034 ThrowClassFormatError(klass.Get(), "java.lang.Object must not have a superclass");
6035 return false;
6036 }
6037 return true;
6038 }
6039 if (super == nullptr) {
6040 ThrowLinkageError(klass.Get(), "No superclass defined for class %s",
6041 klass->PrettyDescriptor().c_str());
6042 return false;
6043 }
6044 // Verify
6045 if (klass->IsInterface() && super != object_class) {
6046 ThrowClassFormatError(klass.Get(), "Interfaces must have java.lang.Object as superclass");
6047 return false;
6048 }
6049 if (super->IsFinal()) {
6050 ThrowVerifyError(klass.Get(),
6051 "Superclass %s of %s is declared final",
6052 super->PrettyDescriptor().c_str(),
6053 klass->PrettyDescriptor().c_str());
6054 return false;
6055 }
6056 if (super->IsInterface()) {
6057 ThrowIncompatibleClassChangeError(klass.Get(),
6058 "Superclass %s of %s is an interface",
6059 super->PrettyDescriptor().c_str(),
6060 klass->PrettyDescriptor().c_str());
6061 return false;
6062 }
6063 if (!klass->CanAccess(super)) {
6064 ThrowIllegalAccessError(klass.Get(), "Superclass %s is inaccessible to class %s",
6065 super->PrettyDescriptor().c_str(),
6066 klass->PrettyDescriptor().c_str());
6067 return false;
6068 }
6069
6070 // Inherit kAccClassIsFinalizable from the superclass in case this
6071 // class doesn't override finalize.
6072 if (super->IsFinalizable()) {
6073 klass->SetFinalizable();
6074 }
6075
6076 // Inherit class loader flag form super class.
6077 if (super->IsClassLoaderClass()) {
6078 klass->SetClassLoaderClass();
6079 }
6080
6081 // Inherit reference flags (if any) from the superclass.
6082 uint32_t reference_flags = (super->GetClassFlags() & mirror::kClassFlagReference);
6083 if (reference_flags != 0) {
6084 CHECK_EQ(klass->GetClassFlags(), 0u);
6085 klass->SetClassFlags(klass->GetClassFlags() | reference_flags);
6086 }
6087 // Disallow custom direct subclasses of java.lang.ref.Reference.
6088 if (init_done_ && super == GetClassRoot<mirror::Reference>(this)) {
6089 ThrowLinkageError(klass.Get(),
6090 "Class %s attempts to subclass java.lang.ref.Reference, which is not allowed",
6091 klass->PrettyDescriptor().c_str());
6092 return false;
6093 }
6094
6095 if (kIsDebugBuild) {
6096 // Ensure super classes are fully resolved prior to resolving fields..
6097 while (super != nullptr) {
6098 CHECK(super->IsResolved());
6099 super = super->GetSuperClass();
6100 }
6101 }
6102 return true;
6103 }
6104
6105 // Populate the class vtable and itable. Compute return type indices.
LinkMethods(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,bool * out_new_conflict,ArtMethod ** out_imt)6106 bool ClassLinker::LinkMethods(Thread* self,
6107 Handle<mirror::Class> klass,
6108 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
6109 bool* out_new_conflict,
6110 ArtMethod** out_imt) {
6111 self->AllowThreadSuspension();
6112 // A map from vtable indexes to the method they need to be updated to point to. Used because we
6113 // need to have default methods be in the virtuals array of each class but we don't set that up
6114 // until LinkInterfaceMethods.
6115 std::unordered_map<size_t, ClassLinker::MethodTranslation> default_translations;
6116 // Link virtual methods then interface methods.
6117 // We set up the interface lookup table first because we need it to determine if we need to update
6118 // any vtable entries with new default method implementations.
6119 return SetupInterfaceLookupTable(self, klass, interfaces)
6120 && LinkVirtualMethods(self, klass, /*out*/ &default_translations)
6121 && LinkInterfaceMethods(self, klass, default_translations, out_new_conflict, out_imt);
6122 }
6123
6124 // Comparator for name and signature of a method, used in finding overriding methods. Implementation
6125 // avoids the use of handles, if it didn't then rather than compare dex files we could compare dex
6126 // caches in the implementation below.
6127 class MethodNameAndSignatureComparator final : public ValueObject {
6128 public:
6129 explicit MethodNameAndSignatureComparator(ArtMethod* method)
REQUIRES_SHARED(Locks::mutator_lock_)6130 REQUIRES_SHARED(Locks::mutator_lock_) :
6131 dex_file_(method->GetDexFile()), mid_(&dex_file_->GetMethodId(method->GetDexMethodIndex())),
6132 name_(nullptr), name_len_(0) {
6133 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
6134 }
6135
GetName()6136 const char* GetName() {
6137 if (name_ == nullptr) {
6138 name_ = dex_file_->StringDataAndUtf16LengthByIdx(mid_->name_idx_, &name_len_);
6139 }
6140 return name_;
6141 }
6142
HasSameNameAndSignature(ArtMethod * other)6143 bool HasSameNameAndSignature(ArtMethod* other)
6144 REQUIRES_SHARED(Locks::mutator_lock_) {
6145 DCHECK(!other->IsProxyMethod()) << other->PrettyMethod();
6146 const DexFile* other_dex_file = other->GetDexFile();
6147 const dex::MethodId& other_mid = other_dex_file->GetMethodId(other->GetDexMethodIndex());
6148 if (dex_file_ == other_dex_file) {
6149 return mid_->name_idx_ == other_mid.name_idx_ && mid_->proto_idx_ == other_mid.proto_idx_;
6150 }
6151 GetName(); // Only used to make sure its calculated.
6152 uint32_t other_name_len;
6153 const char* other_name = other_dex_file->StringDataAndUtf16LengthByIdx(other_mid.name_idx_,
6154 &other_name_len);
6155 if (name_len_ != other_name_len || strcmp(name_, other_name) != 0) {
6156 return false;
6157 }
6158 return dex_file_->GetMethodSignature(*mid_) == other_dex_file->GetMethodSignature(other_mid);
6159 }
6160
6161 private:
6162 // Dex file for the method to compare against.
6163 const DexFile* const dex_file_;
6164 // MethodId for the method to compare against.
6165 const dex::MethodId* const mid_;
6166 // Lazily computed name from the dex file's strings.
6167 const char* name_;
6168 // Lazily computed name length.
6169 uint32_t name_len_;
6170 };
6171
6172 class LinkVirtualHashTable {
6173 public:
LinkVirtualHashTable(Handle<mirror::Class> klass,size_t hash_size,uint32_t * hash_table,PointerSize image_pointer_size)6174 LinkVirtualHashTable(Handle<mirror::Class> klass,
6175 size_t hash_size,
6176 uint32_t* hash_table,
6177 PointerSize image_pointer_size)
6178 : klass_(klass),
6179 hash_size_(hash_size),
6180 hash_table_(hash_table),
6181 image_pointer_size_(image_pointer_size) {
6182 std::fill(hash_table_, hash_table_ + hash_size_, invalid_index_);
6183 }
6184
Add(uint32_t virtual_method_index)6185 void Add(uint32_t virtual_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
6186 ArtMethod* local_method = klass_->GetVirtualMethodDuringLinking(
6187 virtual_method_index, image_pointer_size_);
6188 const char* name = local_method->GetInterfaceMethodIfProxy(image_pointer_size_)->GetName();
6189 uint32_t hash = ComputeModifiedUtf8Hash(name);
6190 uint32_t index = hash % hash_size_;
6191 // Linear probe until we have an empty slot.
6192 while (hash_table_[index] != invalid_index_) {
6193 if (++index == hash_size_) {
6194 index = 0;
6195 }
6196 }
6197 hash_table_[index] = virtual_method_index;
6198 }
6199
FindAndRemove(MethodNameAndSignatureComparator * comparator)6200 uint32_t FindAndRemove(MethodNameAndSignatureComparator* comparator)
6201 REQUIRES_SHARED(Locks::mutator_lock_) {
6202 const char* name = comparator->GetName();
6203 uint32_t hash = ComputeModifiedUtf8Hash(name);
6204 size_t index = hash % hash_size_;
6205 while (true) {
6206 const uint32_t value = hash_table_[index];
6207 // Since linear probe makes continuous blocks, hitting an invalid index means we are done
6208 // the block and can safely assume not found.
6209 if (value == invalid_index_) {
6210 break;
6211 }
6212 if (value != removed_index_) { // This signifies not already overriden.
6213 ArtMethod* virtual_method =
6214 klass_->GetVirtualMethodDuringLinking(value, image_pointer_size_);
6215 if (comparator->HasSameNameAndSignature(
6216 virtual_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
6217 hash_table_[index] = removed_index_;
6218 return value;
6219 }
6220 }
6221 if (++index == hash_size_) {
6222 index = 0;
6223 }
6224 }
6225 return GetNotFoundIndex();
6226 }
6227
GetNotFoundIndex()6228 static uint32_t GetNotFoundIndex() {
6229 return invalid_index_;
6230 }
6231
6232 private:
6233 static const uint32_t invalid_index_;
6234 static const uint32_t removed_index_;
6235
6236 Handle<mirror::Class> klass_;
6237 const size_t hash_size_;
6238 uint32_t* const hash_table_;
6239 const PointerSize image_pointer_size_;
6240 };
6241
6242 const uint32_t LinkVirtualHashTable::invalid_index_ = std::numeric_limits<uint32_t>::max();
6243 const uint32_t LinkVirtualHashTable::removed_index_ = std::numeric_limits<uint32_t>::max() - 1;
6244
LinkVirtualMethods(Thread * self,Handle<mirror::Class> klass,std::unordered_map<size_t,ClassLinker::MethodTranslation> * default_translations)6245 bool ClassLinker::LinkVirtualMethods(
6246 Thread* self,
6247 Handle<mirror::Class> klass,
6248 /*out*/std::unordered_map<size_t, ClassLinker::MethodTranslation>* default_translations) {
6249 const size_t num_virtual_methods = klass->NumVirtualMethods();
6250 if (klass->IsInterface()) {
6251 // No vtable.
6252 if (!IsUint<16>(num_virtual_methods)) {
6253 ThrowClassFormatError(klass.Get(), "Too many methods on interface: %zu", num_virtual_methods);
6254 return false;
6255 }
6256 bool has_defaults = false;
6257 // Assign each method an IMT index and set the default flag.
6258 for (size_t i = 0; i < num_virtual_methods; ++i) {
6259 ArtMethod* m = klass->GetVirtualMethodDuringLinking(i, image_pointer_size_);
6260 m->SetMethodIndex(i);
6261 if (!m->IsAbstract()) {
6262 m->SetAccessFlags(m->GetAccessFlags() | kAccDefault);
6263 has_defaults = true;
6264 }
6265 }
6266 // Mark that we have default methods so that we won't need to scan the virtual_methods_ array
6267 // during initialization. This is a performance optimization. We could simply traverse the
6268 // virtual_methods_ array again during initialization.
6269 if (has_defaults) {
6270 klass->SetHasDefaultMethods();
6271 }
6272 return true;
6273 } else if (klass->HasSuperClass()) {
6274 const size_t super_vtable_length = klass->GetSuperClass()->GetVTableLength();
6275 const size_t max_count = num_virtual_methods + super_vtable_length;
6276 StackHandleScope<2> hs(self);
6277 Handle<mirror::Class> super_class(hs.NewHandle(klass->GetSuperClass()));
6278 MutableHandle<mirror::PointerArray> vtable;
6279 if (super_class->ShouldHaveEmbeddedVTable()) {
6280 vtable = hs.NewHandle(AllocPointerArray(self, max_count));
6281 if (UNLIKELY(vtable == nullptr)) {
6282 self->AssertPendingOOMException();
6283 return false;
6284 }
6285 for (size_t i = 0; i < super_vtable_length; i++) {
6286 vtable->SetElementPtrSize(
6287 i, super_class->GetEmbeddedVTableEntry(i, image_pointer_size_), image_pointer_size_);
6288 }
6289 // We might need to change vtable if we have new virtual methods or new interfaces (since that
6290 // might give us new default methods). If no new interfaces then we can skip the rest since
6291 // the class cannot override any of the super-class's methods. This is required for
6292 // correctness since without it we might not update overridden default method vtable entries
6293 // correctly.
6294 if (num_virtual_methods == 0 && super_class->GetIfTableCount() == klass->GetIfTableCount()) {
6295 klass->SetVTable(vtable.Get());
6296 return true;
6297 }
6298 } else {
6299 DCHECK(super_class->IsAbstract() && !super_class->IsArrayClass());
6300 ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTable();
6301 CHECK(super_vtable != nullptr) << super_class->PrettyClass();
6302 // We might need to change vtable if we have new virtual methods or new interfaces (since that
6303 // might give us new default methods). See comment above.
6304 if (num_virtual_methods == 0 && super_class->GetIfTableCount() == klass->GetIfTableCount()) {
6305 klass->SetVTable(super_vtable);
6306 return true;
6307 }
6308 vtable = hs.NewHandle(
6309 ObjPtr<mirror::PointerArray>::DownCast(super_vtable->CopyOf(self, max_count)));
6310 if (UNLIKELY(vtable == nullptr)) {
6311 self->AssertPendingOOMException();
6312 return false;
6313 }
6314 }
6315 // How the algorithm works:
6316 // 1. Populate hash table by adding num_virtual_methods from klass. The values in the hash
6317 // table are: invalid_index for unused slots, index super_vtable_length + i for a virtual
6318 // method which has not been matched to a vtable method, and j if the virtual method at the
6319 // index overrode the super virtual method at index j.
6320 // 2. Loop through super virtual methods, if they overwrite, update hash table to j
6321 // (j < super_vtable_length) to avoid redundant checks. (TODO maybe use this info for reducing
6322 // the need for the initial vtable which we later shrink back down).
6323 // 3. Add non overridden methods to the end of the vtable.
6324 static constexpr size_t kMaxStackHash = 250;
6325 // + 1 so that even if we only have new default methods we will still be able to use this hash
6326 // table (i.e. it will never have 0 size).
6327 const size_t hash_table_size = num_virtual_methods * 3 + 1;
6328 uint32_t* hash_table_ptr;
6329 std::unique_ptr<uint32_t[]> hash_heap_storage;
6330 if (hash_table_size <= kMaxStackHash) {
6331 hash_table_ptr = reinterpret_cast<uint32_t*>(
6332 alloca(hash_table_size * sizeof(*hash_table_ptr)));
6333 } else {
6334 hash_heap_storage.reset(new uint32_t[hash_table_size]);
6335 hash_table_ptr = hash_heap_storage.get();
6336 }
6337 LinkVirtualHashTable hash_table(klass, hash_table_size, hash_table_ptr, image_pointer_size_);
6338 // Add virtual methods to the hash table.
6339 for (size_t i = 0; i < num_virtual_methods; ++i) {
6340 DCHECK(klass->GetVirtualMethodDuringLinking(
6341 i, image_pointer_size_)->GetDeclaringClass() != nullptr);
6342 hash_table.Add(i);
6343 }
6344 // Loop through each super vtable method and see if they are overridden by a method we added to
6345 // the hash table.
6346 for (size_t j = 0; j < super_vtable_length; ++j) {
6347 // Search the hash table to see if we are overridden by any method.
6348 ArtMethod* super_method = vtable->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6349 if (!klass->CanAccessMember(super_method->GetDeclaringClass(),
6350 super_method->GetAccessFlags())) {
6351 // Continue on to the next method since this one is package private and canot be overridden.
6352 // Before Android 4.1, the package-private method super_method might have been incorrectly
6353 // overridden.
6354 continue;
6355 }
6356 MethodNameAndSignatureComparator super_method_name_comparator(
6357 super_method->GetInterfaceMethodIfProxy(image_pointer_size_));
6358 // We remove the method so that subsequent lookups will be faster by making the hash-map
6359 // smaller as we go on.
6360 uint32_t hash_index = hash_table.FindAndRemove(&super_method_name_comparator);
6361 if (hash_index != hash_table.GetNotFoundIndex()) {
6362 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(
6363 hash_index, image_pointer_size_);
6364 if (super_method->IsFinal()) {
6365 ThrowLinkageError(klass.Get(), "Method %s overrides final method in class %s",
6366 virtual_method->PrettyMethod().c_str(),
6367 super_method->GetDeclaringClassDescriptor());
6368 return false;
6369 }
6370 vtable->SetElementPtrSize(j, virtual_method, image_pointer_size_);
6371 virtual_method->SetMethodIndex(j);
6372 } else if (super_method->IsOverridableByDefaultMethod()) {
6373 // We didn't directly override this method but we might through default methods...
6374 // Check for default method update.
6375 ArtMethod* default_method = nullptr;
6376 switch (FindDefaultMethodImplementation(self,
6377 super_method,
6378 klass,
6379 /*out*/&default_method)) {
6380 case DefaultMethodSearchResult::kDefaultConflict: {
6381 // A conflict was found looking for default methods. Note this (assuming it wasn't
6382 // pre-existing) in the translations map.
6383 if (UNLIKELY(!super_method->IsDefaultConflicting())) {
6384 // Don't generate another conflict method to reduce memory use as an optimization.
6385 default_translations->insert(
6386 {j, ClassLinker::MethodTranslation::CreateConflictingMethod()});
6387 }
6388 break;
6389 }
6390 case DefaultMethodSearchResult::kAbstractFound: {
6391 // No conflict but method is abstract.
6392 // We note that this vtable entry must be made abstract.
6393 if (UNLIKELY(!super_method->IsAbstract())) {
6394 default_translations->insert(
6395 {j, ClassLinker::MethodTranslation::CreateAbstractMethod()});
6396 }
6397 break;
6398 }
6399 case DefaultMethodSearchResult::kDefaultFound: {
6400 if (UNLIKELY(super_method->IsDefaultConflicting() ||
6401 default_method->GetDeclaringClass() != super_method->GetDeclaringClass())) {
6402 // Found a default method implementation that is new.
6403 // TODO Refactor this add default methods to virtuals here and not in
6404 // LinkInterfaceMethods maybe.
6405 // The problem is default methods might override previously present
6406 // default-method or miranda-method vtable entries from the superclass.
6407 // Unfortunately we need these to be entries in this class's virtuals. We do not
6408 // give these entries there until LinkInterfaceMethods so we pass this map around
6409 // to let it know which vtable entries need to be updated.
6410 // Make a note that vtable entry j must be updated, store what it needs to be updated
6411 // to. We will allocate a virtual method slot in LinkInterfaceMethods and fix it up
6412 // then.
6413 default_translations->insert(
6414 {j, ClassLinker::MethodTranslation::CreateTranslatedMethod(default_method)});
6415 VLOG(class_linker) << "Method " << super_method->PrettyMethod()
6416 << " overridden by default "
6417 << default_method->PrettyMethod()
6418 << " in " << mirror::Class::PrettyClass(klass.Get());
6419 }
6420 break;
6421 }
6422 }
6423 }
6424 }
6425 size_t actual_count = super_vtable_length;
6426 // Add the non-overridden methods at the end.
6427 for (size_t i = 0; i < num_virtual_methods; ++i) {
6428 ArtMethod* local_method = klass->GetVirtualMethodDuringLinking(i, image_pointer_size_);
6429 size_t method_idx = local_method->GetMethodIndexDuringLinking();
6430 if (method_idx < super_vtable_length &&
6431 local_method == vtable->GetElementPtrSize<ArtMethod*>(method_idx, image_pointer_size_)) {
6432 continue;
6433 }
6434 vtable->SetElementPtrSize(actual_count, local_method, image_pointer_size_);
6435 local_method->SetMethodIndex(actual_count);
6436 ++actual_count;
6437 }
6438 if (!IsUint<16>(actual_count)) {
6439 ThrowClassFormatError(klass.Get(), "Too many methods defined on class: %zd", actual_count);
6440 return false;
6441 }
6442 // Shrink vtable if possible
6443 CHECK_LE(actual_count, max_count);
6444 if (actual_count < max_count) {
6445 vtable.Assign(ObjPtr<mirror::PointerArray>::DownCast(vtable->CopyOf(self, actual_count)));
6446 if (UNLIKELY(vtable == nullptr)) {
6447 self->AssertPendingOOMException();
6448 return false;
6449 }
6450 }
6451 klass->SetVTable(vtable.Get());
6452 } else {
6453 CHECK_EQ(klass.Get(), GetClassRoot<mirror::Object>(this));
6454 if (!IsUint<16>(num_virtual_methods)) {
6455 ThrowClassFormatError(klass.Get(), "Too many methods: %d",
6456 static_cast<int>(num_virtual_methods));
6457 return false;
6458 }
6459 ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, num_virtual_methods);
6460 if (UNLIKELY(vtable == nullptr)) {
6461 self->AssertPendingOOMException();
6462 return false;
6463 }
6464 for (size_t i = 0; i < num_virtual_methods; ++i) {
6465 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, image_pointer_size_);
6466 vtable->SetElementPtrSize(i, virtual_method, image_pointer_size_);
6467 virtual_method->SetMethodIndex(i & 0xFFFF);
6468 }
6469 klass->SetVTable(vtable);
6470 }
6471 return true;
6472 }
6473
6474 // Determine if the given iface has any subinterface in the given list that declares the method
6475 // specified by 'target'.
6476 //
6477 // Arguments
6478 // - self: The thread we are running on
6479 // - target: A comparator that will match any method that overrides the method we are checking for
6480 // - iftable: The iftable we are searching for an overriding method on.
6481 // - ifstart: The index of the interface we are checking to see if anything overrides
6482 // - iface: The interface we are checking to see if anything overrides.
6483 // - image_pointer_size:
6484 // The image pointer size.
6485 //
6486 // Returns
6487 // - True: There is some method that matches the target comparator defined in an interface that
6488 // is a subtype of iface.
6489 // - False: There is no method that matches the target comparator in any interface that is a subtype
6490 // of iface.
ContainsOverridingMethodOf(Thread * self,MethodNameAndSignatureComparator & target,Handle<mirror::IfTable> iftable,size_t ifstart,Handle<mirror::Class> iface,PointerSize image_pointer_size)6491 static bool ContainsOverridingMethodOf(Thread* self,
6492 MethodNameAndSignatureComparator& target,
6493 Handle<mirror::IfTable> iftable,
6494 size_t ifstart,
6495 Handle<mirror::Class> iface,
6496 PointerSize image_pointer_size)
6497 REQUIRES_SHARED(Locks::mutator_lock_) {
6498 DCHECK(self != nullptr);
6499 DCHECK(iface != nullptr);
6500 DCHECK(iftable != nullptr);
6501 DCHECK_GE(ifstart, 0u);
6502 DCHECK_LT(ifstart, iftable->Count());
6503 DCHECK_EQ(iface.Get(), iftable->GetInterface(ifstart));
6504 DCHECK(iface->IsInterface());
6505
6506 size_t iftable_count = iftable->Count();
6507 StackHandleScope<1> hs(self);
6508 MutableHandle<mirror::Class> current_iface(hs.NewHandle<mirror::Class>(nullptr));
6509 for (size_t k = ifstart + 1; k < iftable_count; k++) {
6510 // Skip ifstart since our current interface obviously cannot override itself.
6511 current_iface.Assign(iftable->GetInterface(k));
6512 // Iterate through every method on this interface. The order does not matter.
6513 for (ArtMethod& current_method : current_iface->GetDeclaredVirtualMethods(image_pointer_size)) {
6514 if (UNLIKELY(target.HasSameNameAndSignature(
6515 current_method.GetInterfaceMethodIfProxy(image_pointer_size)))) {
6516 // Check if the i'th interface is a subtype of this one.
6517 if (iface->IsAssignableFrom(current_iface.Get())) {
6518 return true;
6519 }
6520 break;
6521 }
6522 }
6523 }
6524 return false;
6525 }
6526
6527 // Find the default method implementation for 'interface_method' in 'klass'. Stores it into
6528 // out_default_method and returns kDefaultFound on success. If no default method was found return
6529 // kAbstractFound and store nullptr into out_default_method. If an error occurs (such as a
6530 // default_method conflict) it will return kDefaultConflict.
FindDefaultMethodImplementation(Thread * self,ArtMethod * target_method,Handle<mirror::Class> klass,ArtMethod ** out_default_method) const6531 ClassLinker::DefaultMethodSearchResult ClassLinker::FindDefaultMethodImplementation(
6532 Thread* self,
6533 ArtMethod* target_method,
6534 Handle<mirror::Class> klass,
6535 /*out*/ArtMethod** out_default_method) const {
6536 DCHECK(self != nullptr);
6537 DCHECK(target_method != nullptr);
6538 DCHECK(out_default_method != nullptr);
6539
6540 *out_default_method = nullptr;
6541
6542 // We organize the interface table so that, for interface I any subinterfaces J follow it in the
6543 // table. This lets us walk the table backwards when searching for default methods. The first one
6544 // we encounter is the best candidate since it is the most specific. Once we have found it we keep
6545 // track of it and then continue checking all other interfaces, since we need to throw an error if
6546 // we encounter conflicting default method implementations (one is not a subtype of the other).
6547 //
6548 // The order of unrelated interfaces does not matter and is not defined.
6549 size_t iftable_count = klass->GetIfTableCount();
6550 if (iftable_count == 0) {
6551 // No interfaces. We have already reset out to null so just return kAbstractFound.
6552 return DefaultMethodSearchResult::kAbstractFound;
6553 }
6554
6555 StackHandleScope<3> hs(self);
6556 MutableHandle<mirror::Class> chosen_iface(hs.NewHandle<mirror::Class>(nullptr));
6557 MutableHandle<mirror::IfTable> iftable(hs.NewHandle(klass->GetIfTable()));
6558 MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
6559 MethodNameAndSignatureComparator target_name_comparator(
6560 target_method->GetInterfaceMethodIfProxy(image_pointer_size_));
6561 // Iterates over the klass's iftable in reverse
6562 for (size_t k = iftable_count; k != 0; ) {
6563 --k;
6564
6565 DCHECK_LT(k, iftable->Count());
6566
6567 iface.Assign(iftable->GetInterface(k));
6568 // Iterate through every declared method on this interface. The order does not matter.
6569 for (auto& method_iter : iface->GetDeclaredVirtualMethods(image_pointer_size_)) {
6570 ArtMethod* current_method = &method_iter;
6571 // Skip abstract methods and methods with different names.
6572 if (current_method->IsAbstract() ||
6573 !target_name_comparator.HasSameNameAndSignature(
6574 current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
6575 continue;
6576 } else if (!current_method->IsPublic()) {
6577 // The verifier should have caught the non-public method for dex version 37. Just warn and
6578 // skip it since this is from before default-methods so we don't really need to care that it
6579 // has code.
6580 LOG(WARNING) << "Interface method " << current_method->PrettyMethod()
6581 << " is not public! "
6582 << "This will be a fatal error in subsequent versions of android. "
6583 << "Continuing anyway.";
6584 }
6585 if (UNLIKELY(chosen_iface != nullptr)) {
6586 // We have multiple default impls of the same method. This is a potential default conflict.
6587 // We need to check if this possibly conflicting method is either a superclass of the chosen
6588 // default implementation or is overridden by a non-default interface method. In either case
6589 // there is no conflict.
6590 if (!iface->IsAssignableFrom(chosen_iface.Get()) &&
6591 !ContainsOverridingMethodOf(self,
6592 target_name_comparator,
6593 iftable,
6594 k,
6595 iface,
6596 image_pointer_size_)) {
6597 VLOG(class_linker) << "Conflicting default method implementations found: "
6598 << current_method->PrettyMethod() << " and "
6599 << ArtMethod::PrettyMethod(*out_default_method) << " in class "
6600 << klass->PrettyClass() << " conflict.";
6601 *out_default_method = nullptr;
6602 return DefaultMethodSearchResult::kDefaultConflict;
6603 } else {
6604 break; // Continue checking at the next interface.
6605 }
6606 } else {
6607 // chosen_iface == null
6608 if (!ContainsOverridingMethodOf(self,
6609 target_name_comparator,
6610 iftable,
6611 k,
6612 iface,
6613 image_pointer_size_)) {
6614 // Don't set this as the chosen interface if something else is overriding it (because that
6615 // other interface would be potentially chosen instead if it was default). If the other
6616 // interface was abstract then we wouldn't select this interface as chosen anyway since
6617 // the abstract method masks it.
6618 *out_default_method = current_method;
6619 chosen_iface.Assign(iface.Get());
6620 // We should now finish traversing the graph to find if we have default methods that
6621 // conflict.
6622 } else {
6623 VLOG(class_linker) << "A default method '" << current_method->PrettyMethod()
6624 << "' was "
6625 << "skipped because it was overridden by an abstract method in a "
6626 << "subinterface on class '" << klass->PrettyClass() << "'";
6627 }
6628 }
6629 break;
6630 }
6631 }
6632 if (*out_default_method != nullptr) {
6633 VLOG(class_linker) << "Default method '" << (*out_default_method)->PrettyMethod()
6634 << "' selected "
6635 << "as the implementation for '" << target_method->PrettyMethod()
6636 << "' in '" << klass->PrettyClass() << "'";
6637 return DefaultMethodSearchResult::kDefaultFound;
6638 } else {
6639 return DefaultMethodSearchResult::kAbstractFound;
6640 }
6641 }
6642
AddMethodToConflictTable(ObjPtr<mirror::Class> klass,ArtMethod * conflict_method,ArtMethod * interface_method,ArtMethod * method,bool force_new_conflict_method)6643 ArtMethod* ClassLinker::AddMethodToConflictTable(ObjPtr<mirror::Class> klass,
6644 ArtMethod* conflict_method,
6645 ArtMethod* interface_method,
6646 ArtMethod* method,
6647 bool force_new_conflict_method) {
6648 ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
6649 Runtime* const runtime = Runtime::Current();
6650 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
6651 bool new_entry = conflict_method == runtime->GetImtConflictMethod() || force_new_conflict_method;
6652
6653 // Create a new entry if the existing one is the shared conflict method.
6654 ArtMethod* new_conflict_method = new_entry
6655 ? runtime->CreateImtConflictMethod(linear_alloc)
6656 : conflict_method;
6657
6658 // Allocate a new table. Note that we will leak this table at the next conflict,
6659 // but that's a tradeoff compared to making the table fixed size.
6660 void* data = linear_alloc->Alloc(
6661 Thread::Current(), ImtConflictTable::ComputeSizeWithOneMoreEntry(current_table,
6662 image_pointer_size_));
6663 if (data == nullptr) {
6664 LOG(ERROR) << "Failed to allocate conflict table";
6665 return conflict_method;
6666 }
6667 ImtConflictTable* new_table = new (data) ImtConflictTable(current_table,
6668 interface_method,
6669 method,
6670 image_pointer_size_);
6671
6672 // Do a fence to ensure threads see the data in the table before it is assigned
6673 // to the conflict method.
6674 // Note that there is a race in the presence of multiple threads and we may leak
6675 // memory from the LinearAlloc, but that's a tradeoff compared to using
6676 // atomic operations.
6677 std::atomic_thread_fence(std::memory_order_release);
6678 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6679 return new_conflict_method;
6680 }
6681
AllocateIfTableMethodArrays(Thread * self,Handle<mirror::Class> klass,Handle<mirror::IfTable> iftable)6682 bool ClassLinker::AllocateIfTableMethodArrays(Thread* self,
6683 Handle<mirror::Class> klass,
6684 Handle<mirror::IfTable> iftable) {
6685 DCHECK(!klass->IsInterface());
6686 const bool has_superclass = klass->HasSuperClass();
6687 const bool extend_super_iftable = has_superclass;
6688 const size_t ifcount = klass->GetIfTableCount();
6689 const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
6690 for (size_t i = 0; i < ifcount; ++i) {
6691 size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
6692 if (num_methods > 0) {
6693 const bool is_super = i < super_ifcount;
6694 // This is an interface implemented by a super-class. Therefore we can just copy the method
6695 // array from the superclass.
6696 const bool super_interface = is_super && extend_super_iftable;
6697 ObjPtr<mirror::PointerArray> method_array;
6698 if (super_interface) {
6699 ObjPtr<mirror::IfTable> if_table = klass->GetSuperClass()->GetIfTable();
6700 DCHECK(if_table != nullptr);
6701 DCHECK(if_table->GetMethodArray(i) != nullptr);
6702 // If we are working on a super interface, try extending the existing method array.
6703 method_array = ObjPtr<mirror::PointerArray>::DownCast(
6704 if_table->GetMethodArray(i)->Clone(self));
6705 } else {
6706 method_array = AllocPointerArray(self, num_methods);
6707 }
6708 if (UNLIKELY(method_array == nullptr)) {
6709 self->AssertPendingOOMException();
6710 return false;
6711 }
6712 iftable->SetMethodArray(i, method_array);
6713 }
6714 }
6715 return true;
6716 }
6717
SetIMTRef(ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ArtMethod * current_method,bool * new_conflict,ArtMethod ** imt_ref)6718 void ClassLinker::SetIMTRef(ArtMethod* unimplemented_method,
6719 ArtMethod* imt_conflict_method,
6720 ArtMethod* current_method,
6721 /*out*/bool* new_conflict,
6722 /*out*/ArtMethod** imt_ref) {
6723 // Place method in imt if entry is empty, place conflict otherwise.
6724 if (*imt_ref == unimplemented_method) {
6725 *imt_ref = current_method;
6726 } else if (!(*imt_ref)->IsRuntimeMethod()) {
6727 // If we are not a conflict and we have the same signature and name as the imt
6728 // entry, it must be that we overwrote a superclass vtable entry.
6729 // Note that we have checked IsRuntimeMethod, as there may be multiple different
6730 // conflict methods.
6731 MethodNameAndSignatureComparator imt_comparator(
6732 (*imt_ref)->GetInterfaceMethodIfProxy(image_pointer_size_));
6733 if (imt_comparator.HasSameNameAndSignature(
6734 current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
6735 *imt_ref = current_method;
6736 } else {
6737 *imt_ref = imt_conflict_method;
6738 *new_conflict = true;
6739 }
6740 } else {
6741 // Place the default conflict method. Note that there may be an existing conflict
6742 // method in the IMT, but it could be one tailored to the super class, with a
6743 // specific ImtConflictTable.
6744 *imt_ref = imt_conflict_method;
6745 *new_conflict = true;
6746 }
6747 }
6748
FillIMTAndConflictTables(ObjPtr<mirror::Class> klass)6749 void ClassLinker::FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) {
6750 DCHECK(klass->ShouldHaveImt()) << klass->PrettyClass();
6751 DCHECK(!klass->IsTemp()) << klass->PrettyClass();
6752 ArtMethod* imt_data[ImTable::kSize];
6753 Runtime* const runtime = Runtime::Current();
6754 ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
6755 ArtMethod* const conflict_method = runtime->GetImtConflictMethod();
6756 std::fill_n(imt_data, arraysize(imt_data), unimplemented_method);
6757 if (klass->GetIfTable() != nullptr) {
6758 bool new_conflict = false;
6759 FillIMTFromIfTable(klass->GetIfTable(),
6760 unimplemented_method,
6761 conflict_method,
6762 klass,
6763 /*create_conflict_tables=*/true,
6764 /*ignore_copied_methods=*/false,
6765 &new_conflict,
6766 &imt_data[0]);
6767 }
6768 if (!klass->ShouldHaveImt()) {
6769 return;
6770 }
6771 // Compare the IMT with the super class including the conflict methods. If they are equivalent,
6772 // we can just use the same pointer.
6773 ImTable* imt = nullptr;
6774 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
6775 if (super_class != nullptr && super_class->ShouldHaveImt()) {
6776 ImTable* super_imt = super_class->GetImt(image_pointer_size_);
6777 bool same = true;
6778 for (size_t i = 0; same && i < ImTable::kSize; ++i) {
6779 ArtMethod* method = imt_data[i];
6780 ArtMethod* super_method = super_imt->Get(i, image_pointer_size_);
6781 if (method != super_method) {
6782 bool is_conflict_table = method->IsRuntimeMethod() &&
6783 method != unimplemented_method &&
6784 method != conflict_method;
6785 // Verify conflict contents.
6786 bool super_conflict_table = super_method->IsRuntimeMethod() &&
6787 super_method != unimplemented_method &&
6788 super_method != conflict_method;
6789 if (!is_conflict_table || !super_conflict_table) {
6790 same = false;
6791 } else {
6792 ImtConflictTable* table1 = method->GetImtConflictTable(image_pointer_size_);
6793 ImtConflictTable* table2 = super_method->GetImtConflictTable(image_pointer_size_);
6794 same = same && table1->Equals(table2, image_pointer_size_);
6795 }
6796 }
6797 }
6798 if (same) {
6799 imt = super_imt;
6800 }
6801 }
6802 if (imt == nullptr) {
6803 imt = klass->GetImt(image_pointer_size_);
6804 DCHECK(imt != nullptr);
6805 imt->Populate(imt_data, image_pointer_size_);
6806 } else {
6807 klass->SetImt(imt, image_pointer_size_);
6808 }
6809 }
6810
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc,PointerSize image_pointer_size)6811 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count,
6812 LinearAlloc* linear_alloc,
6813 PointerSize image_pointer_size) {
6814 void* data = linear_alloc->Alloc(Thread::Current(),
6815 ImtConflictTable::ComputeSize(count,
6816 image_pointer_size));
6817 return (data != nullptr) ? new (data) ImtConflictTable(count, image_pointer_size) : nullptr;
6818 }
6819
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc)6820 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count, LinearAlloc* linear_alloc) {
6821 return CreateImtConflictTable(count, linear_alloc, image_pointer_size_);
6822 }
6823
FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ObjPtr<mirror::Class> klass,bool create_conflict_tables,bool ignore_copied_methods,bool * new_conflict,ArtMethod ** imt)6824 void ClassLinker::FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,
6825 ArtMethod* unimplemented_method,
6826 ArtMethod* imt_conflict_method,
6827 ObjPtr<mirror::Class> klass,
6828 bool create_conflict_tables,
6829 bool ignore_copied_methods,
6830 /*out*/bool* new_conflict,
6831 /*out*/ArtMethod** imt) {
6832 uint32_t conflict_counts[ImTable::kSize] = {};
6833 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
6834 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
6835 const size_t num_virtuals = interface->NumVirtualMethods();
6836 const size_t method_array_count = if_table->GetMethodArrayCount(i);
6837 // Virtual methods can be larger than the if table methods if there are default methods.
6838 DCHECK_GE(num_virtuals, method_array_count);
6839 if (kIsDebugBuild) {
6840 if (klass->IsInterface()) {
6841 DCHECK_EQ(method_array_count, 0u);
6842 } else {
6843 DCHECK_EQ(interface->NumDeclaredVirtualMethods(), method_array_count);
6844 }
6845 }
6846 if (method_array_count == 0) {
6847 continue;
6848 }
6849 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
6850 for (size_t j = 0; j < method_array_count; ++j) {
6851 ArtMethod* implementation_method =
6852 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6853 if (ignore_copied_methods && implementation_method->IsCopied()) {
6854 continue;
6855 }
6856 DCHECK(implementation_method != nullptr);
6857 // Miranda methods cannot be used to implement an interface method, but they are safe to put
6858 // in the IMT since their entrypoint is the interface trampoline. If we put any copied methods
6859 // or interface methods in the IMT here they will not create extra conflicts since we compare
6860 // names and signatures in SetIMTRef.
6861 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
6862 const uint32_t imt_index = interface_method->GetImtIndex();
6863
6864 // There is only any conflicts if all of the interface methods for an IMT slot don't have
6865 // the same implementation method, keep track of this to avoid creating a conflict table in
6866 // this case.
6867
6868 // Conflict table size for each IMT slot.
6869 ++conflict_counts[imt_index];
6870
6871 SetIMTRef(unimplemented_method,
6872 imt_conflict_method,
6873 implementation_method,
6874 /*out*/new_conflict,
6875 /*out*/&imt[imt_index]);
6876 }
6877 }
6878
6879 if (create_conflict_tables) {
6880 // Create the conflict tables.
6881 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
6882 for (size_t i = 0; i < ImTable::kSize; ++i) {
6883 size_t conflicts = conflict_counts[i];
6884 if (imt[i] == imt_conflict_method) {
6885 ImtConflictTable* new_table = CreateImtConflictTable(conflicts, linear_alloc);
6886 if (new_table != nullptr) {
6887 ArtMethod* new_conflict_method =
6888 Runtime::Current()->CreateImtConflictMethod(linear_alloc);
6889 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6890 imt[i] = new_conflict_method;
6891 } else {
6892 LOG(ERROR) << "Failed to allocate conflict table";
6893 imt[i] = imt_conflict_method;
6894 }
6895 } else {
6896 DCHECK_NE(imt[i], imt_conflict_method);
6897 }
6898 }
6899
6900 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
6901 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
6902 const size_t method_array_count = if_table->GetMethodArrayCount(i);
6903 // Virtual methods can be larger than the if table methods if there are default methods.
6904 if (method_array_count == 0) {
6905 continue;
6906 }
6907 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
6908 for (size_t j = 0; j < method_array_count; ++j) {
6909 ArtMethod* implementation_method =
6910 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6911 if (ignore_copied_methods && implementation_method->IsCopied()) {
6912 continue;
6913 }
6914 DCHECK(implementation_method != nullptr);
6915 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
6916 const uint32_t imt_index = interface_method->GetImtIndex();
6917 if (!imt[imt_index]->IsRuntimeMethod() ||
6918 imt[imt_index] == unimplemented_method ||
6919 imt[imt_index] == imt_conflict_method) {
6920 continue;
6921 }
6922 ImtConflictTable* table = imt[imt_index]->GetImtConflictTable(image_pointer_size_);
6923 const size_t num_entries = table->NumEntries(image_pointer_size_);
6924 table->SetInterfaceMethod(num_entries, image_pointer_size_, interface_method);
6925 table->SetImplementationMethod(num_entries, image_pointer_size_, implementation_method);
6926 }
6927 }
6928 }
6929 }
6930
6931 // Simple helper function that checks that no subtypes of 'val' are contained within the 'classes'
6932 // set.
NotSubinterfaceOfAny(const std::unordered_set<ObjPtr<mirror::Class>,HashObjPtr> & classes,ObjPtr<mirror::Class> val)6933 static bool NotSubinterfaceOfAny(
6934 const std::unordered_set<ObjPtr<mirror::Class>, HashObjPtr>& classes,
6935 ObjPtr<mirror::Class> val)
6936 REQUIRES(Roles::uninterruptible_)
6937 REQUIRES_SHARED(Locks::mutator_lock_) {
6938 DCHECK(val != nullptr);
6939 for (ObjPtr<mirror::Class> c : classes) {
6940 if (val->IsAssignableFrom(c)) {
6941 return false;
6942 }
6943 }
6944 return true;
6945 }
6946
6947 // Fills in and flattens the interface inheritance hierarchy.
6948 //
6949 // By the end of this function all interfaces in the transitive closure of to_process are added to
6950 // the iftable and every interface precedes all of its sub-interfaces in this list.
6951 //
6952 // all I, J: Interface | I <: J implies J precedes I
6953 //
6954 // (note A <: B means that A is a subtype of B)
6955 //
6956 // This returns the total number of items in the iftable. The iftable might be resized down after
6957 // this call.
6958 //
6959 // We order this backwards so that we do not need to reorder superclass interfaces when new
6960 // interfaces are added in subclass's interface tables.
6961 //
6962 // Upon entry into this function iftable is a copy of the superclass's iftable with the first
6963 // super_ifcount entries filled in with the transitive closure of the interfaces of the superclass.
6964 // The other entries are uninitialized. We will fill in the remaining entries in this function. The
6965 // iftable must be large enough to hold all interfaces without changing its size.
FillIfTable(ObjPtr<mirror::IfTable> iftable,size_t super_ifcount,const std::vector<ObjPtr<mirror::Class>> & to_process)6966 static size_t FillIfTable(ObjPtr<mirror::IfTable> iftable,
6967 size_t super_ifcount,
6968 const std::vector<ObjPtr<mirror::Class>>& to_process)
6969 REQUIRES(Roles::uninterruptible_)
6970 REQUIRES_SHARED(Locks::mutator_lock_) {
6971 // This is the set of all class's already in the iftable. Used to make checking if a class has
6972 // already been added quicker.
6973 std::unordered_set<ObjPtr<mirror::Class>, HashObjPtr> classes_in_iftable;
6974 // The first super_ifcount elements are from the superclass. We note that they are already added.
6975 for (size_t i = 0; i < super_ifcount; i++) {
6976 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
6977 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, iface)) << "Bad ordering.";
6978 classes_in_iftable.insert(iface);
6979 }
6980 size_t filled_ifcount = super_ifcount;
6981 for (ObjPtr<mirror::Class> interface : to_process) {
6982 // Let us call the first filled_ifcount elements of iftable the current-iface-list.
6983 // At this point in the loop current-iface-list has the invariant that:
6984 // for every pair of interfaces I,J within it:
6985 // if index_of(I) < index_of(J) then I is not a subtype of J
6986
6987 // If we have already seen this element then all of its super-interfaces must already be in the
6988 // current-iface-list so we can skip adding it.
6989 if (!ContainsElement(classes_in_iftable, interface)) {
6990 // We haven't seen this interface so add all of its super-interfaces onto the
6991 // current-iface-list, skipping those already on it.
6992 int32_t ifcount = interface->GetIfTableCount();
6993 for (int32_t j = 0; j < ifcount; j++) {
6994 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
6995 if (!ContainsElement(classes_in_iftable, super_interface)) {
6996 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, super_interface)) << "Bad ordering.";
6997 classes_in_iftable.insert(super_interface);
6998 iftable->SetInterface(filled_ifcount, super_interface);
6999 filled_ifcount++;
7000 }
7001 }
7002 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, interface)) << "Bad ordering";
7003 // Place this interface onto the current-iface-list after all of its super-interfaces.
7004 classes_in_iftable.insert(interface);
7005 iftable->SetInterface(filled_ifcount, interface);
7006 filled_ifcount++;
7007 } else if (kIsDebugBuild) {
7008 // Check all super-interfaces are already in the list.
7009 int32_t ifcount = interface->GetIfTableCount();
7010 for (int32_t j = 0; j < ifcount; j++) {
7011 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
7012 DCHECK(ContainsElement(classes_in_iftable, super_interface))
7013 << "Iftable does not contain " << mirror::Class::PrettyClass(super_interface)
7014 << ", a superinterface of " << interface->PrettyClass();
7015 }
7016 }
7017 }
7018 if (kIsDebugBuild) {
7019 // Check that the iftable is ordered correctly.
7020 for (size_t i = 0; i < filled_ifcount; i++) {
7021 ObjPtr<mirror::Class> if_a = iftable->GetInterface(i);
7022 for (size_t j = i + 1; j < filled_ifcount; j++) {
7023 ObjPtr<mirror::Class> if_b = iftable->GetInterface(j);
7024 // !(if_a <: if_b)
7025 CHECK(!if_b->IsAssignableFrom(if_a))
7026 << "Bad interface order: " << mirror::Class::PrettyClass(if_a) << " (index " << i
7027 << ") extends "
7028 << if_b->PrettyClass() << " (index " << j << ") and so should be after it in the "
7029 << "interface list.";
7030 }
7031 }
7032 }
7033 return filled_ifcount;
7034 }
7035
SetupInterfaceLookupTable(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces)7036 bool ClassLinker::SetupInterfaceLookupTable(Thread* self, Handle<mirror::Class> klass,
7037 Handle<mirror::ObjectArray<mirror::Class>> interfaces) {
7038 StackHandleScope<1> hs(self);
7039 const bool has_superclass = klass->HasSuperClass();
7040 const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
7041 const bool have_interfaces = interfaces != nullptr;
7042 const size_t num_interfaces =
7043 have_interfaces ? interfaces->GetLength() : klass->NumDirectInterfaces();
7044 if (num_interfaces == 0) {
7045 if (super_ifcount == 0) {
7046 if (LIKELY(has_superclass)) {
7047 klass->SetIfTable(klass->GetSuperClass()->GetIfTable());
7048 }
7049 // Class implements no interfaces.
7050 DCHECK_EQ(klass->GetIfTableCount(), 0);
7051 return true;
7052 }
7053 // Class implements same interfaces as parent, are any of these not marker interfaces?
7054 bool has_non_marker_interface = false;
7055 ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
7056 for (size_t i = 0; i < super_ifcount; ++i) {
7057 if (super_iftable->GetMethodArrayCount(i) > 0) {
7058 has_non_marker_interface = true;
7059 break;
7060 }
7061 }
7062 // Class just inherits marker interfaces from parent so recycle parent's iftable.
7063 if (!has_non_marker_interface) {
7064 klass->SetIfTable(super_iftable);
7065 return true;
7066 }
7067 }
7068 size_t ifcount = super_ifcount + num_interfaces;
7069 // Check that every class being implemented is an interface.
7070 for (size_t i = 0; i < num_interfaces; i++) {
7071 ObjPtr<mirror::Class> interface = have_interfaces
7072 ? interfaces->GetWithoutChecks(i)
7073 : mirror::Class::GetDirectInterface(self, klass.Get(), i);
7074 DCHECK(interface != nullptr);
7075 if (UNLIKELY(!interface->IsInterface())) {
7076 std::string temp;
7077 ThrowIncompatibleClassChangeError(klass.Get(),
7078 "Class %s implements non-interface class %s",
7079 klass->PrettyDescriptor().c_str(),
7080 PrettyDescriptor(interface->GetDescriptor(&temp)).c_str());
7081 return false;
7082 }
7083 ifcount += interface->GetIfTableCount();
7084 }
7085 // Create the interface function table.
7086 MutableHandle<mirror::IfTable> iftable(hs.NewHandle(AllocIfTable(self, ifcount)));
7087 if (UNLIKELY(iftable == nullptr)) {
7088 self->AssertPendingOOMException();
7089 return false;
7090 }
7091 // Fill in table with superclass's iftable.
7092 if (super_ifcount != 0) {
7093 ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
7094 for (size_t i = 0; i < super_ifcount; i++) {
7095 ObjPtr<mirror::Class> super_interface = super_iftable->GetInterface(i);
7096 iftable->SetInterface(i, super_interface);
7097 }
7098 }
7099
7100 // Note that AllowThreadSuspension is to thread suspension as pthread_testcancel is to pthread
7101 // cancellation. That is it will suspend if one has a pending suspend request but otherwise
7102 // doesn't really do anything.
7103 self->AllowThreadSuspension();
7104
7105 size_t new_ifcount;
7106 {
7107 ScopedAssertNoThreadSuspension nts("Copying mirror::Class*'s for FillIfTable");
7108 std::vector<ObjPtr<mirror::Class>> to_add;
7109 for (size_t i = 0; i < num_interfaces; i++) {
7110 ObjPtr<mirror::Class> interface = have_interfaces ? interfaces->Get(i) :
7111 mirror::Class::GetDirectInterface(self, klass.Get(), i);
7112 to_add.push_back(interface);
7113 }
7114
7115 new_ifcount = FillIfTable(iftable.Get(), super_ifcount, std::move(to_add));
7116 }
7117
7118 self->AllowThreadSuspension();
7119
7120 // Shrink iftable in case duplicates were found
7121 if (new_ifcount < ifcount) {
7122 DCHECK_NE(num_interfaces, 0U);
7123 iftable.Assign(ObjPtr<mirror::IfTable>::DownCast(
7124 iftable->CopyOf(self, new_ifcount * mirror::IfTable::kMax)));
7125 if (UNLIKELY(iftable == nullptr)) {
7126 self->AssertPendingOOMException();
7127 return false;
7128 }
7129 ifcount = new_ifcount;
7130 } else {
7131 DCHECK_EQ(new_ifcount, ifcount);
7132 }
7133 klass->SetIfTable(iftable.Get());
7134 return true;
7135 }
7136
7137 // Finds the method with a name/signature that matches cmp in the given lists of methods. The list
7138 // of methods must be unique.
FindSameNameAndSignature(MethodNameAndSignatureComparator & cmp ATTRIBUTE_UNUSED)7139 static ArtMethod* FindSameNameAndSignature(MethodNameAndSignatureComparator& cmp ATTRIBUTE_UNUSED) {
7140 return nullptr;
7141 }
7142
7143 template <typename ... Types>
FindSameNameAndSignature(MethodNameAndSignatureComparator & cmp,const ScopedArenaVector<ArtMethod * > & list,const Types &...rest)7144 static ArtMethod* FindSameNameAndSignature(MethodNameAndSignatureComparator& cmp,
7145 const ScopedArenaVector<ArtMethod*>& list,
7146 const Types& ... rest)
7147 REQUIRES_SHARED(Locks::mutator_lock_) {
7148 for (ArtMethod* method : list) {
7149 if (cmp.HasSameNameAndSignature(method)) {
7150 return method;
7151 }
7152 }
7153 return FindSameNameAndSignature(cmp, rest...);
7154 }
7155
7156 namespace {
7157
7158 // Check that all vtable entries are present in this class's virtuals or are the same as a
7159 // superclasses vtable entry.
CheckClassOwnsVTableEntries(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7160 void CheckClassOwnsVTableEntries(Thread* self,
7161 Handle<mirror::Class> klass,
7162 PointerSize pointer_size)
7163 REQUIRES_SHARED(Locks::mutator_lock_) {
7164 StackHandleScope<2> hs(self);
7165 Handle<mirror::PointerArray> check_vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7166 ObjPtr<mirror::Class> super_temp = (klass->HasSuperClass()) ? klass->GetSuperClass() : nullptr;
7167 Handle<mirror::Class> superclass(hs.NewHandle(super_temp));
7168 int32_t super_vtable_length = (superclass != nullptr) ? superclass->GetVTableLength() : 0;
7169 for (int32_t i = 0; i < check_vtable->GetLength(); ++i) {
7170 ArtMethod* m = check_vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
7171 CHECK(m != nullptr);
7172
7173 if (m->GetMethodIndexDuringLinking() != i) {
7174 LOG(WARNING) << m->PrettyMethod()
7175 << " has an unexpected method index for its spot in the vtable for class"
7176 << klass->PrettyClass();
7177 }
7178 ArraySlice<ArtMethod> virtuals = klass->GetVirtualMethodsSliceUnchecked(pointer_size);
7179 auto is_same_method = [m] (const ArtMethod& meth) {
7180 return &meth == m;
7181 };
7182 if (!((super_vtable_length > i && superclass->GetVTableEntry(i, pointer_size) == m) ||
7183 std::find_if(virtuals.begin(), virtuals.end(), is_same_method) != virtuals.end())) {
7184 LOG(WARNING) << m->PrettyMethod() << " does not seem to be owned by current class "
7185 << klass->PrettyClass() << " or any of its superclasses!";
7186 }
7187 }
7188 }
7189
7190 // Check to make sure the vtable does not have duplicates. Duplicates could cause problems when a
7191 // method is overridden in a subclass.
7192 template <PointerSize kPointerSize>
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass)7193 void CheckVTableHasNoDuplicates(Thread* self, Handle<mirror::Class> klass)
7194 REQUIRES_SHARED(Locks::mutator_lock_) {
7195 StackHandleScope<1> hs(self);
7196 Handle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7197 int32_t num_entries = vtable->GetLength();
7198
7199 // Observations:
7200 // * The older implementation was O(n^2) and got too expensive for apps with larger classes.
7201 // * Many classes do not override Object functions (e.g., equals/hashCode/toString). Thus,
7202 // for many classes outside of libcore a cross-dexfile check has to be run anyways.
7203 // * In the cross-dexfile case, with the O(n^2), in the best case O(n) cross checks would have
7204 // to be done. It is thus OK in a single-pass algorithm to read all data, anyways.
7205 // * The single-pass algorithm will trade memory for speed, but that is OK.
7206
7207 CHECK_GT(num_entries, 0);
7208
7209 auto log_fn = [&vtable, &klass](int32_t i, int32_t j) REQUIRES_SHARED(Locks::mutator_lock_) {
7210 ArtMethod* m1 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(i);
7211 ArtMethod* m2 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
7212 LOG(WARNING) << "vtable entries " << i << " and " << j << " are identical for "
7213 << klass->PrettyClass() << " in method " << m1->PrettyMethod()
7214 << " (0x" << std::hex << reinterpret_cast<uintptr_t>(m2) << ") and "
7215 << m2->PrettyMethod() << " (0x" << std::hex
7216 << reinterpret_cast<uintptr_t>(m2) << ")";
7217 };
7218 struct BaseHashType {
7219 static size_t HashCombine(size_t seed, size_t val) {
7220 return seed ^ (val + 0x9e3779b9 + (seed << 6) + (seed >> 2));
7221 }
7222 };
7223
7224 // Check assuming all entries come from the same dex file.
7225 {
7226 // Find the first interesting method and its dex file.
7227 int32_t start = 0;
7228 for (; start < num_entries; ++start) {
7229 ArtMethod* vtable_entry = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start);
7230 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7231 // maybe).
7232 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7233 vtable_entry->GetAccessFlags())) {
7234 continue;
7235 }
7236 break;
7237 }
7238 if (start == num_entries) {
7239 return;
7240 }
7241 const DexFile* dex_file =
7242 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start)->
7243 GetInterfaceMethodIfProxy(kPointerSize)->GetDexFile();
7244
7245 // Helper function to avoid logging if we have to run the cross-file checks.
7246 auto check_fn = [&](bool log_warn) REQUIRES_SHARED(Locks::mutator_lock_) {
7247 // Use a map to store seen entries, as the storage space is too large for a bitvector.
7248 using PairType = std::pair<uint32_t, uint16_t>;
7249 struct PairHash : BaseHashType {
7250 size_t operator()(const PairType& key) const {
7251 return BaseHashType::HashCombine(BaseHashType::HashCombine(0, key.first), key.second);
7252 }
7253 };
7254 std::unordered_map<PairType, int32_t, PairHash> seen;
7255 seen.reserve(2 * num_entries);
7256 bool need_slow_path = false;
7257 bool found_dup = false;
7258 for (int i = start; i < num_entries; ++i) {
7259 // Can use Unchecked here as the start loop already ensured that the arrays are correct
7260 // wrt/ kPointerSize.
7261 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7262 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7263 vtable_entry->GetAccessFlags())) {
7264 continue;
7265 }
7266 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7267 if (dex_file != m->GetDexFile()) {
7268 need_slow_path = true;
7269 break;
7270 }
7271 const dex::MethodId* m_mid = &dex_file->GetMethodId(m->GetDexMethodIndex());
7272 PairType pair = std::make_pair(m_mid->name_idx_.index_, m_mid->proto_idx_.index_);
7273 auto it = seen.find(pair);
7274 if (it != seen.end()) {
7275 found_dup = true;
7276 if (log_warn) {
7277 log_fn(it->second, i);
7278 }
7279 } else {
7280 seen.emplace(pair, i);
7281 }
7282 }
7283 return std::make_pair(need_slow_path, found_dup);
7284 };
7285 std::pair<bool, bool> result = check_fn(/* log_warn= */ false);
7286 if (!result.first) {
7287 if (result.second) {
7288 check_fn(/* log_warn= */ true);
7289 }
7290 return;
7291 }
7292 }
7293
7294 // Need to check across dex files.
7295 struct Entry {
7296 size_t cached_hash = 0;
7297 const char* name = nullptr;
7298 Signature signature = Signature::NoSignature();
7299 uint32_t name_len = 0;
7300
7301 Entry(const DexFile* dex_file, const dex::MethodId& mid)
7302 : name(dex_file->StringDataAndUtf16LengthByIdx(mid.name_idx_, &name_len)),
7303 signature(dex_file->GetMethodSignature(mid)) {
7304 }
7305
7306 bool operator==(const Entry& other) const {
7307 if (name_len != other.name_len || strcmp(name, other.name) != 0) {
7308 return false;
7309 }
7310 return signature == other.signature;
7311 }
7312 };
7313 struct EntryHash {
7314 size_t operator()(const Entry& key) const {
7315 return key.cached_hash;
7316 }
7317 };
7318 std::unordered_map<Entry, int32_t, EntryHash> map;
7319 for (int32_t i = 0; i < num_entries; ++i) {
7320 // Can use Unchecked here as the first loop already ensured that the arrays are correct
7321 // wrt/ kPointerSize.
7322 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7323 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7324 // maybe).
7325 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7326 vtable_entry->GetAccessFlags())) {
7327 continue;
7328 }
7329 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7330 const DexFile* dex_file = m->GetDexFile();
7331 const dex::MethodId& mid = dex_file->GetMethodId(m->GetDexMethodIndex());
7332
7333 Entry e(dex_file, mid);
7334
7335 size_t string_hash = std::hash<std::string_view>()(std::string_view(e.name, e.name_len));
7336 size_t sig_hash = std::hash<std::string>()(e.signature.ToString());
7337 e.cached_hash = BaseHashType::HashCombine(BaseHashType::HashCombine(0u, string_hash),
7338 sig_hash);
7339
7340 auto it = map.find(e);
7341 if (it != map.end()) {
7342 log_fn(it->second, i);
7343 } else {
7344 map.emplace(e, i);
7345 }
7346 }
7347 }
7348
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7349 void CheckVTableHasNoDuplicates(Thread* self,
7350 Handle<mirror::Class> klass,
7351 PointerSize pointer_size)
7352 REQUIRES_SHARED(Locks::mutator_lock_) {
7353 switch (pointer_size) {
7354 case PointerSize::k64:
7355 CheckVTableHasNoDuplicates<PointerSize::k64>(self, klass);
7356 break;
7357 case PointerSize::k32:
7358 CheckVTableHasNoDuplicates<PointerSize::k32>(self, klass);
7359 break;
7360 }
7361 }
7362
SanityCheckVTable(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7363 static void SanityCheckVTable(Thread* self, Handle<mirror::Class> klass, PointerSize pointer_size)
7364 REQUIRES_SHARED(Locks::mutator_lock_) {
7365 CheckClassOwnsVTableEntries(self, klass, pointer_size);
7366 CheckVTableHasNoDuplicates(self, klass, pointer_size);
7367 }
7368
7369 } // namespace
7370
FillImtFromSuperClass(Handle<mirror::Class> klass,ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,bool * new_conflict,ArtMethod ** imt)7371 void ClassLinker::FillImtFromSuperClass(Handle<mirror::Class> klass,
7372 ArtMethod* unimplemented_method,
7373 ArtMethod* imt_conflict_method,
7374 bool* new_conflict,
7375 ArtMethod** imt) {
7376 DCHECK(klass->HasSuperClass());
7377 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
7378 if (super_class->ShouldHaveImt()) {
7379 ImTable* super_imt = super_class->GetImt(image_pointer_size_);
7380 for (size_t i = 0; i < ImTable::kSize; ++i) {
7381 imt[i] = super_imt->Get(i, image_pointer_size_);
7382 }
7383 } else {
7384 // No imt in the super class, need to reconstruct from the iftable.
7385 ObjPtr<mirror::IfTable> if_table = super_class->GetIfTable();
7386 if (if_table->Count() != 0) {
7387 // Ignore copied methods since we will handle these in LinkInterfaceMethods.
7388 FillIMTFromIfTable(if_table,
7389 unimplemented_method,
7390 imt_conflict_method,
7391 klass.Get(),
7392 /*create_conflict_tables=*/false,
7393 /*ignore_copied_methods=*/true,
7394 /*out*/new_conflict,
7395 /*out*/imt);
7396 }
7397 }
7398 }
7399
7400 class ClassLinker::LinkInterfaceMethodsHelper {
7401 public:
LinkInterfaceMethodsHelper(ClassLinker * class_linker,Handle<mirror::Class> klass,Thread * self,Runtime * runtime)7402 LinkInterfaceMethodsHelper(ClassLinker* class_linker,
7403 Handle<mirror::Class> klass,
7404 Thread* self,
7405 Runtime* runtime)
7406 : class_linker_(class_linker),
7407 klass_(klass),
7408 method_alignment_(ArtMethod::Alignment(class_linker->GetImagePointerSize())),
7409 method_size_(ArtMethod::Size(class_linker->GetImagePointerSize())),
7410 self_(self),
7411 stack_(runtime->GetLinearAlloc()->GetArenaPool()),
7412 allocator_(&stack_),
7413 default_conflict_methods_(allocator_.Adapter()),
7414 overriding_default_conflict_methods_(allocator_.Adapter()),
7415 miranda_methods_(allocator_.Adapter()),
7416 default_methods_(allocator_.Adapter()),
7417 overriding_default_methods_(allocator_.Adapter()),
7418 move_table_(allocator_.Adapter()) {
7419 }
7420
7421 ArtMethod* FindMethod(ArtMethod* interface_method,
7422 MethodNameAndSignatureComparator& interface_name_comparator,
7423 ArtMethod* vtable_impl)
7424 REQUIRES_SHARED(Locks::mutator_lock_);
7425
7426 ArtMethod* GetOrCreateMirandaMethod(ArtMethod* interface_method,
7427 MethodNameAndSignatureComparator& interface_name_comparator)
7428 REQUIRES_SHARED(Locks::mutator_lock_);
7429
HasNewVirtuals() const7430 bool HasNewVirtuals() const {
7431 return !(miranda_methods_.empty() &&
7432 default_methods_.empty() &&
7433 overriding_default_methods_.empty() &&
7434 overriding_default_conflict_methods_.empty() &&
7435 default_conflict_methods_.empty());
7436 }
7437
7438 void ReallocMethods() REQUIRES_SHARED(Locks::mutator_lock_);
7439
7440 ObjPtr<mirror::PointerArray> UpdateVtable(
7441 const std::unordered_map<size_t, ClassLinker::MethodTranslation>& default_translations,
7442 ObjPtr<mirror::PointerArray> old_vtable) REQUIRES_SHARED(Locks::mutator_lock_);
7443
7444 void UpdateIfTable(Handle<mirror::IfTable> iftable) REQUIRES_SHARED(Locks::mutator_lock_);
7445
7446 void UpdateIMT(ArtMethod** out_imt);
7447
CheckNoStaleMethodsInDexCache()7448 void CheckNoStaleMethodsInDexCache() REQUIRES_SHARED(Locks::mutator_lock_) {
7449 if (kIsDebugBuild) {
7450 PointerSize pointer_size = class_linker_->GetImagePointerSize();
7451 // Check that there are no stale methods are in the dex cache array.
7452 auto* resolved_methods = klass_->GetDexCache()->GetResolvedMethods();
7453 for (size_t i = 0, count = klass_->GetDexCache()->NumResolvedMethods(); i < count; ++i) {
7454 auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_methods, i, pointer_size);
7455 ArtMethod* m = pair.object;
7456 CHECK(move_table_.find(m) == move_table_.end() ||
7457 // The original versions of copied methods will still be present so allow those too.
7458 // Note that if the first check passes this might fail to GetDeclaringClass().
7459 std::find_if(m->GetDeclaringClass()->GetMethods(pointer_size).begin(),
7460 m->GetDeclaringClass()->GetMethods(pointer_size).end(),
7461 [m] (ArtMethod& meth) {
7462 return &meth == m;
7463 }) != m->GetDeclaringClass()->GetMethods(pointer_size).end())
7464 << "Obsolete method " << m->PrettyMethod() << " is in dex cache!";
7465 }
7466 }
7467 }
7468
ClobberOldMethods(LengthPrefixedArray<ArtMethod> * old_methods,LengthPrefixedArray<ArtMethod> * methods)7469 void ClobberOldMethods(LengthPrefixedArray<ArtMethod>* old_methods,
7470 LengthPrefixedArray<ArtMethod>* methods) {
7471 if (kIsDebugBuild) {
7472 CHECK(methods != nullptr);
7473 // Put some random garbage in old methods to help find stale pointers.
7474 if (methods != old_methods && old_methods != nullptr) {
7475 // Need to make sure the GC is not running since it could be scanning the methods we are
7476 // about to overwrite.
7477 ScopedThreadStateChange tsc(self_, kSuspended);
7478 gc::ScopedGCCriticalSection gcs(self_,
7479 gc::kGcCauseClassLinker,
7480 gc::kCollectorTypeClassLinker);
7481 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_methods->size(),
7482 method_size_,
7483 method_alignment_);
7484 memset(old_methods, 0xFEu, old_size);
7485 }
7486 }
7487 }
7488
7489 private:
NumberOfNewVirtuals() const7490 size_t NumberOfNewVirtuals() const {
7491 return miranda_methods_.size() +
7492 default_methods_.size() +
7493 overriding_default_conflict_methods_.size() +
7494 overriding_default_methods_.size() +
7495 default_conflict_methods_.size();
7496 }
7497
FillTables()7498 bool FillTables() REQUIRES_SHARED(Locks::mutator_lock_) {
7499 return !klass_->IsInterface();
7500 }
7501
LogNewVirtuals() const7502 void LogNewVirtuals() const REQUIRES_SHARED(Locks::mutator_lock_) {
7503 DCHECK(!klass_->IsInterface() || (default_methods_.empty() && miranda_methods_.empty()))
7504 << "Interfaces should only have default-conflict methods appended to them.";
7505 VLOG(class_linker) << mirror::Class::PrettyClass(klass_.Get()) << ": miranda_methods="
7506 << miranda_methods_.size()
7507 << " default_methods=" << default_methods_.size()
7508 << " overriding_default_methods=" << overriding_default_methods_.size()
7509 << " default_conflict_methods=" << default_conflict_methods_.size()
7510 << " overriding_default_conflict_methods="
7511 << overriding_default_conflict_methods_.size();
7512 }
7513
7514 ClassLinker* class_linker_;
7515 Handle<mirror::Class> klass_;
7516 size_t method_alignment_;
7517 size_t method_size_;
7518 Thread* const self_;
7519
7520 // These are allocated on the heap to begin, we then transfer to linear alloc when we re-create
7521 // the virtual methods array.
7522 // Need to use low 4GB arenas for compiler or else the pointers wont fit in 32 bit method array
7523 // during cross compilation.
7524 // Use the linear alloc pool since this one is in the low 4gb for the compiler.
7525 ArenaStack stack_;
7526 ScopedArenaAllocator allocator_;
7527
7528 ScopedArenaVector<ArtMethod*> default_conflict_methods_;
7529 ScopedArenaVector<ArtMethod*> overriding_default_conflict_methods_;
7530 ScopedArenaVector<ArtMethod*> miranda_methods_;
7531 ScopedArenaVector<ArtMethod*> default_methods_;
7532 ScopedArenaVector<ArtMethod*> overriding_default_methods_;
7533
7534 ScopedArenaUnorderedMap<ArtMethod*, ArtMethod*> move_table_;
7535 };
7536
FindMethod(ArtMethod * interface_method,MethodNameAndSignatureComparator & interface_name_comparator,ArtMethod * vtable_impl)7537 ArtMethod* ClassLinker::LinkInterfaceMethodsHelper::FindMethod(
7538 ArtMethod* interface_method,
7539 MethodNameAndSignatureComparator& interface_name_comparator,
7540 ArtMethod* vtable_impl) {
7541 ArtMethod* current_method = nullptr;
7542 switch (class_linker_->FindDefaultMethodImplementation(self_,
7543 interface_method,
7544 klass_,
7545 /*out*/¤t_method)) {
7546 case DefaultMethodSearchResult::kDefaultConflict: {
7547 // Default method conflict.
7548 DCHECK(current_method == nullptr);
7549 ArtMethod* default_conflict_method = nullptr;
7550 if (vtable_impl != nullptr && vtable_impl->IsDefaultConflicting()) {
7551 // We can reuse the method from the superclass, don't bother adding it to virtuals.
7552 default_conflict_method = vtable_impl;
7553 } else {
7554 // See if we already have a conflict method for this method.
7555 ArtMethod* preexisting_conflict = FindSameNameAndSignature(
7556 interface_name_comparator,
7557 default_conflict_methods_,
7558 overriding_default_conflict_methods_);
7559 if (LIKELY(preexisting_conflict != nullptr)) {
7560 // We already have another conflict we can reuse.
7561 default_conflict_method = preexisting_conflict;
7562 } else {
7563 // Note that we do this even if we are an interface since we need to create this and
7564 // cannot reuse another classes.
7565 // Create a new conflict method for this to use.
7566 default_conflict_method = reinterpret_cast<ArtMethod*>(allocator_.Alloc(method_size_));
7567 new(default_conflict_method) ArtMethod(interface_method,
7568 class_linker_->GetImagePointerSize());
7569 if (vtable_impl == nullptr) {
7570 // Save the conflict method. We need to add it to the vtable.
7571 default_conflict_methods_.push_back(default_conflict_method);
7572 } else {
7573 // Save the conflict method but it is already in the vtable.
7574 overriding_default_conflict_methods_.push_back(default_conflict_method);
7575 }
7576 }
7577 }
7578 current_method = default_conflict_method;
7579 break;
7580 } // case kDefaultConflict
7581 case DefaultMethodSearchResult::kDefaultFound: {
7582 DCHECK(current_method != nullptr);
7583 // Found a default method.
7584 if (vtable_impl != nullptr &&
7585 current_method->GetDeclaringClass() == vtable_impl->GetDeclaringClass()) {
7586 // We found a default method but it was the same one we already have from our
7587 // superclass. Don't bother adding it to our vtable again.
7588 current_method = vtable_impl;
7589 } else if (LIKELY(FillTables())) {
7590 // Interfaces don't need to copy default methods since they don't have vtables.
7591 // Only record this default method if it is new to save space.
7592 // TODO It might be worthwhile to copy default methods on interfaces anyway since it
7593 // would make lookup for interface super much faster. (We would only need to scan
7594 // the iftable to find if there is a NSME or AME.)
7595 ArtMethod* old = FindSameNameAndSignature(interface_name_comparator,
7596 default_methods_,
7597 overriding_default_methods_);
7598 if (old == nullptr) {
7599 // We found a default method implementation and there were no conflicts.
7600 if (vtable_impl == nullptr) {
7601 // Save the default method. We need to add it to the vtable.
7602 default_methods_.push_back(current_method);
7603 } else {
7604 // Save the default method but it is already in the vtable.
7605 overriding_default_methods_.push_back(current_method);
7606 }
7607 } else {
7608 CHECK(old == current_method) << "Multiple default implementations selected!";
7609 }
7610 }
7611 break;
7612 } // case kDefaultFound
7613 case DefaultMethodSearchResult::kAbstractFound: {
7614 DCHECK(current_method == nullptr);
7615 // Abstract method masks all defaults.
7616 if (vtable_impl != nullptr &&
7617 vtable_impl->IsAbstract() &&
7618 !vtable_impl->IsDefaultConflicting()) {
7619 // We need to make this an abstract method but the version in the vtable already is so
7620 // don't do anything.
7621 current_method = vtable_impl;
7622 }
7623 break;
7624 } // case kAbstractFound
7625 }
7626 return current_method;
7627 }
7628
GetOrCreateMirandaMethod(ArtMethod * interface_method,MethodNameAndSignatureComparator & interface_name_comparator)7629 ArtMethod* ClassLinker::LinkInterfaceMethodsHelper::GetOrCreateMirandaMethod(
7630 ArtMethod* interface_method,
7631 MethodNameAndSignatureComparator& interface_name_comparator) {
7632 // Find out if there is already a miranda method we can use.
7633 ArtMethod* miranda_method = FindSameNameAndSignature(interface_name_comparator,
7634 miranda_methods_);
7635 if (miranda_method == nullptr) {
7636 DCHECK(interface_method->IsAbstract()) << interface_method->PrettyMethod();
7637 miranda_method = reinterpret_cast<ArtMethod*>(allocator_.Alloc(method_size_));
7638 CHECK(miranda_method != nullptr);
7639 // Point the interface table at a phantom slot.
7640 new(miranda_method) ArtMethod(interface_method, class_linker_->GetImagePointerSize());
7641 miranda_methods_.push_back(miranda_method);
7642 }
7643 return miranda_method;
7644 }
7645
ReallocMethods()7646 void ClassLinker::LinkInterfaceMethodsHelper::ReallocMethods() {
7647 LogNewVirtuals();
7648
7649 const size_t old_method_count = klass_->NumMethods();
7650 const size_t new_method_count = old_method_count + NumberOfNewVirtuals();
7651 DCHECK_NE(old_method_count, new_method_count);
7652
7653 // Attempt to realloc to save RAM if possible.
7654 LengthPrefixedArray<ArtMethod>* old_methods = klass_->GetMethodsPtr();
7655 // The Realloced virtual methods aren't visible from the class roots, so there is no issue
7656 // where GCs could attempt to mark stale pointers due to memcpy. And since we overwrite the
7657 // realloced memory with out->CopyFrom, we are guaranteed to have objects in the to space since
7658 // CopyFrom has internal read barriers.
7659 //
7660 // TODO We should maybe move some of this into mirror::Class or at least into another method.
7661 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_method_count,
7662 method_size_,
7663 method_alignment_);
7664 const size_t new_size = LengthPrefixedArray<ArtMethod>::ComputeSize(new_method_count,
7665 method_size_,
7666 method_alignment_);
7667 const size_t old_methods_ptr_size = (old_methods != nullptr) ? old_size : 0;
7668 auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
7669 class_linker_->GetAllocatorForClassLoader(klass_->GetClassLoader())->Realloc(
7670 self_, old_methods, old_methods_ptr_size, new_size));
7671 CHECK(methods != nullptr); // Native allocation failure aborts.
7672
7673 PointerSize pointer_size = class_linker_->GetImagePointerSize();
7674 if (methods != old_methods) {
7675 // Maps from heap allocated miranda method to linear alloc miranda method.
7676 StrideIterator<ArtMethod> out = methods->begin(method_size_, method_alignment_);
7677 // Copy over the old methods.
7678 for (auto& m : klass_->GetMethods(pointer_size)) {
7679 move_table_.emplace(&m, &*out);
7680 // The CopyFrom is only necessary to not miss read barriers since Realloc won't do read
7681 // barriers when it copies.
7682 out->CopyFrom(&m, pointer_size);
7683 ++out;
7684 }
7685 }
7686 StrideIterator<ArtMethod> out(methods->begin(method_size_, method_alignment_) + old_method_count);
7687 // Copy over miranda methods before copying vtable since CopyOf may cause thread suspension and
7688 // we want the roots of the miranda methods to get visited.
7689 for (size_t i = 0; i < miranda_methods_.size(); ++i) {
7690 ArtMethod* mir_method = miranda_methods_[i];
7691 ArtMethod& new_method = *out;
7692 new_method.CopyFrom(mir_method, pointer_size);
7693 new_method.SetAccessFlags(new_method.GetAccessFlags() | kAccMiranda | kAccCopied);
7694 DCHECK_NE(new_method.GetAccessFlags() & kAccAbstract, 0u)
7695 << "Miranda method should be abstract!";
7696 move_table_.emplace(mir_method, &new_method);
7697 // Update the entry in the method array, as the array will be used for future lookups,
7698 // where thread suspension is allowed.
7699 // As such, the array should not contain locally allocated ArtMethod, otherwise the GC
7700 // would not see them.
7701 miranda_methods_[i] = &new_method;
7702 ++out;
7703 }
7704 // We need to copy the default methods into our own method table since the runtime requires that
7705 // every method on a class's vtable be in that respective class's virtual method table.
7706 // NOTE This means that two classes might have the same implementation of a method from the same
7707 // interface but will have different ArtMethod*s for them. This also means we cannot compare a
7708 // default method found on a class with one found on the declaring interface directly and must
7709 // look at the declaring class to determine if they are the same.
7710 for (ScopedArenaVector<ArtMethod*>* methods_vec : {&default_methods_,
7711 &overriding_default_methods_}) {
7712 for (size_t i = 0; i < methods_vec->size(); ++i) {
7713 ArtMethod* def_method = (*methods_vec)[i];
7714 ArtMethod& new_method = *out;
7715 new_method.CopyFrom(def_method, pointer_size);
7716 // Clear the kAccSkipAccessChecks flag if it is present. Since this class hasn't been
7717 // verified yet it shouldn't have methods that are skipping access checks.
7718 // TODO This is rather arbitrary. We should maybe support classes where only some of its
7719 // methods are skip_access_checks.
7720 DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
7721 constexpr uint32_t kSetFlags = kAccDefault | kAccCopied;
7722 constexpr uint32_t kMaskFlags = ~kAccSkipAccessChecks;
7723 new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
7724 move_table_.emplace(def_method, &new_method);
7725 // Update the entry in the method array, as the array will be used for future lookups,
7726 // where thread suspension is allowed.
7727 // As such, the array should not contain locally allocated ArtMethod, otherwise the GC
7728 // would not see them.
7729 (*methods_vec)[i] = &new_method;
7730 ++out;
7731 }
7732 }
7733 for (ScopedArenaVector<ArtMethod*>* methods_vec : {&default_conflict_methods_,
7734 &overriding_default_conflict_methods_}) {
7735 for (size_t i = 0; i < methods_vec->size(); ++i) {
7736 ArtMethod* conf_method = (*methods_vec)[i];
7737 ArtMethod& new_method = *out;
7738 new_method.CopyFrom(conf_method, pointer_size);
7739 // This is a type of default method (there are default method impls, just a conflict) so
7740 // mark this as a default, non-abstract method, since thats what it is. Also clear the
7741 // kAccSkipAccessChecks bit since this class hasn't been verified yet it shouldn't have
7742 // methods that are skipping access checks.
7743 // Also clear potential kAccSingleImplementation to avoid CHA trying to inline
7744 // the default method.
7745 DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
7746 constexpr uint32_t kSetFlags = kAccDefault | kAccDefaultConflict | kAccCopied;
7747 constexpr uint32_t kMaskFlags =
7748 ~(kAccAbstract | kAccSkipAccessChecks | kAccSingleImplementation);
7749 new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
7750 DCHECK(new_method.IsDefaultConflicting());
7751 // The actual method might or might not be marked abstract since we just copied it from a
7752 // (possibly default) interface method. We need to set it entry point to be the bridge so
7753 // that the compiler will not invoke the implementation of whatever method we copied from.
7754 EnsureThrowsInvocationError(class_linker_, &new_method);
7755 move_table_.emplace(conf_method, &new_method);
7756 // Update the entry in the method array, as the array will be used for future lookups,
7757 // where thread suspension is allowed.
7758 // As such, the array should not contain locally allocated ArtMethod, otherwise the GC
7759 // would not see them.
7760 (*methods_vec)[i] = &new_method;
7761 ++out;
7762 }
7763 }
7764 methods->SetSize(new_method_count);
7765 class_linker_->UpdateClassMethods(klass_.Get(), methods);
7766 }
7767
UpdateVtable(const std::unordered_map<size_t,ClassLinker::MethodTranslation> & default_translations,ObjPtr<mirror::PointerArray> old_vtable)7768 ObjPtr<mirror::PointerArray> ClassLinker::LinkInterfaceMethodsHelper::UpdateVtable(
7769 const std::unordered_map<size_t, ClassLinker::MethodTranslation>& default_translations,
7770 ObjPtr<mirror::PointerArray> old_vtable) {
7771 // Update the vtable to the new method structures. We can skip this for interfaces since they
7772 // do not have vtables.
7773 const size_t old_vtable_count = old_vtable->GetLength();
7774 const size_t new_vtable_count = old_vtable_count +
7775 miranda_methods_.size() +
7776 default_methods_.size() +
7777 default_conflict_methods_.size();
7778
7779 ObjPtr<mirror::PointerArray> vtable =
7780 ObjPtr<mirror::PointerArray>::DownCast(old_vtable->CopyOf(self_, new_vtable_count));
7781 if (UNLIKELY(vtable == nullptr)) {
7782 self_->AssertPendingOOMException();
7783 return nullptr;
7784 }
7785
7786 size_t vtable_pos = old_vtable_count;
7787 PointerSize pointer_size = class_linker_->GetImagePointerSize();
7788 // Update all the newly copied method's indexes so they denote their placement in the vtable.
7789 for (const ScopedArenaVector<ArtMethod*>& methods_vec : {default_methods_,
7790 default_conflict_methods_,
7791 miranda_methods_}) {
7792 // These are the functions that are not already in the vtable!
7793 for (ArtMethod* new_vtable_method : methods_vec) {
7794 // Leave the declaring class alone the method's dex_code_item_offset_ and dex_method_index_
7795 // fields are references into the dex file the method was defined in. Since the ArtMethod
7796 // does not store that information it uses declaring_class_->dex_cache_.
7797 new_vtable_method->SetMethodIndex(0xFFFF & vtable_pos);
7798 vtable->SetElementPtrSize(vtable_pos, new_vtable_method, pointer_size);
7799 ++vtable_pos;
7800 }
7801 }
7802 DCHECK_EQ(vtable_pos, new_vtable_count);
7803
7804 // Update old vtable methods. We use the default_translations map to figure out what each
7805 // vtable entry should be updated to, if they need to be at all.
7806 for (size_t i = 0; i < old_vtable_count; ++i) {
7807 ArtMethod* translated_method = vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
7808 // Try and find what we need to change this method to.
7809 auto translation_it = default_translations.find(i);
7810 if (translation_it != default_translations.end()) {
7811 if (translation_it->second.IsInConflict()) {
7812 // Find which conflict method we are to use for this method.
7813 MethodNameAndSignatureComparator old_method_comparator(
7814 translated_method->GetInterfaceMethodIfProxy(pointer_size));
7815 // We only need to look through overriding_default_conflict_methods since this is an
7816 // overridden method we are fixing up here.
7817 ArtMethod* new_conflict_method = FindSameNameAndSignature(
7818 old_method_comparator, overriding_default_conflict_methods_);
7819 CHECK(new_conflict_method != nullptr) << "Expected a conflict method!";
7820 translated_method = new_conflict_method;
7821 } else if (translation_it->second.IsAbstract()) {
7822 // Find which miranda method we are to use for this method.
7823 MethodNameAndSignatureComparator old_method_comparator(
7824 translated_method->GetInterfaceMethodIfProxy(pointer_size));
7825 ArtMethod* miranda_method = FindSameNameAndSignature(old_method_comparator,
7826 miranda_methods_);
7827 DCHECK(miranda_method != nullptr);
7828 translated_method = miranda_method;
7829 } else {
7830 // Normal default method (changed from an older default or abstract interface method).
7831 DCHECK(translation_it->second.IsTranslation());
7832 translated_method = translation_it->second.GetTranslation();
7833 auto it = move_table_.find(translated_method);
7834 DCHECK(it != move_table_.end());
7835 translated_method = it->second;
7836 }
7837 } else {
7838 auto it = move_table_.find(translated_method);
7839 translated_method = (it != move_table_.end()) ? it->second : nullptr;
7840 }
7841
7842 if (translated_method != nullptr) {
7843 // Make sure the new_methods index is set.
7844 if (translated_method->GetMethodIndexDuringLinking() != i) {
7845 if (kIsDebugBuild) {
7846 auto* methods = klass_->GetMethodsPtr();
7847 CHECK_LE(reinterpret_cast<uintptr_t>(&*methods->begin(method_size_, method_alignment_)),
7848 reinterpret_cast<uintptr_t>(translated_method));
7849 CHECK_LT(reinterpret_cast<uintptr_t>(translated_method),
7850 reinterpret_cast<uintptr_t>(&*methods->end(method_size_, method_alignment_)));
7851 }
7852 translated_method->SetMethodIndex(0xFFFF & i);
7853 }
7854 vtable->SetElementPtrSize(i, translated_method, pointer_size);
7855 }
7856 }
7857 klass_->SetVTable(vtable);
7858 return vtable;
7859 }
7860
UpdateIfTable(Handle<mirror::IfTable> iftable)7861 void ClassLinker::LinkInterfaceMethodsHelper::UpdateIfTable(Handle<mirror::IfTable> iftable) {
7862 PointerSize pointer_size = class_linker_->GetImagePointerSize();
7863 const size_t ifcount = klass_->GetIfTableCount();
7864 // Go fix up all the stale iftable pointers.
7865 for (size_t i = 0; i < ifcount; ++i) {
7866 for (size_t j = 0, count = iftable->GetMethodArrayCount(i); j < count; ++j) {
7867 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArray(i);
7868 ArtMethod* m = method_array->GetElementPtrSize<ArtMethod*>(j, pointer_size);
7869 DCHECK(m != nullptr) << klass_->PrettyClass();
7870 auto it = move_table_.find(m);
7871 if (it != move_table_.end()) {
7872 auto* new_m = it->second;
7873 DCHECK(new_m != nullptr) << klass_->PrettyClass();
7874 method_array->SetElementPtrSize(j, new_m, pointer_size);
7875 }
7876 }
7877 }
7878 }
7879
UpdateIMT(ArtMethod ** out_imt)7880 void ClassLinker::LinkInterfaceMethodsHelper::UpdateIMT(ArtMethod** out_imt) {
7881 // Fix up IMT next.
7882 for (size_t i = 0; i < ImTable::kSize; ++i) {
7883 auto it = move_table_.find(out_imt[i]);
7884 if (it != move_table_.end()) {
7885 out_imt[i] = it->second;
7886 }
7887 }
7888 }
7889
7890 // TODO This method needs to be split up into several smaller methods.
LinkInterfaceMethods(Thread * self,Handle<mirror::Class> klass,const std::unordered_map<size_t,ClassLinker::MethodTranslation> & default_translations,bool * out_new_conflict,ArtMethod ** out_imt)7891 bool ClassLinker::LinkInterfaceMethods(
7892 Thread* self,
7893 Handle<mirror::Class> klass,
7894 const std::unordered_map<size_t, ClassLinker::MethodTranslation>& default_translations,
7895 bool* out_new_conflict,
7896 ArtMethod** out_imt) {
7897 StackHandleScope<3> hs(self);
7898 Runtime* const runtime = Runtime::Current();
7899
7900 const bool is_interface = klass->IsInterface();
7901 const bool has_superclass = klass->HasSuperClass();
7902 const bool fill_tables = !is_interface;
7903 const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
7904 const size_t ifcount = klass->GetIfTableCount();
7905
7906 Handle<mirror::IfTable> iftable(hs.NewHandle(klass->GetIfTable()));
7907
7908 MutableHandle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7909 ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
7910 ArtMethod* const imt_conflict_method = runtime->GetImtConflictMethod();
7911 // Copy the IMT from the super class if possible.
7912 const bool extend_super_iftable = has_superclass;
7913 if (has_superclass && fill_tables) {
7914 FillImtFromSuperClass(klass,
7915 unimplemented_method,
7916 imt_conflict_method,
7917 out_new_conflict,
7918 out_imt);
7919 }
7920 // Allocate method arrays before since we don't want miss visiting miranda method roots due to
7921 // thread suspension.
7922 if (fill_tables) {
7923 if (!AllocateIfTableMethodArrays(self, klass, iftable)) {
7924 return false;
7925 }
7926 }
7927
7928 LinkInterfaceMethodsHelper helper(this, klass, self, runtime);
7929
7930 auto* old_cause = self->StartAssertNoThreadSuspension(
7931 "Copying ArtMethods for LinkInterfaceMethods");
7932 // Going in reverse to ensure that we will hit abstract methods that override defaults before the
7933 // defaults. This means we don't need to do any trickery when creating the Miranda methods, since
7934 // they will already be null. This has the additional benefit that the declarer of a miranda
7935 // method will actually declare an abstract method.
7936 for (size_t i = ifcount; i != 0u; ) {
7937 --i;
7938 DCHECK_LT(i, ifcount);
7939
7940 size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
7941 if (num_methods > 0) {
7942 StackHandleScope<2> hs2(self);
7943 const bool is_super = i < super_ifcount;
7944 const bool super_interface = is_super && extend_super_iftable;
7945 // We don't actually create or fill these tables for interfaces, we just copy some methods for
7946 // conflict methods. Just set this as nullptr in those cases.
7947 Handle<mirror::PointerArray> method_array(fill_tables
7948 ? hs2.NewHandle(iftable->GetMethodArray(i))
7949 : hs2.NewHandle<mirror::PointerArray>(nullptr));
7950
7951 ArraySlice<ArtMethod> input_virtual_methods;
7952 ScopedNullHandle<mirror::PointerArray> null_handle;
7953 Handle<mirror::PointerArray> input_vtable_array(null_handle);
7954 int32_t input_array_length = 0;
7955
7956 // TODO Cleanup Needed: In the presence of default methods this optimization is rather dirty
7957 // and confusing. Default methods should always look through all the superclasses
7958 // because they are the last choice of an implementation. We get around this by looking
7959 // at the super-classes iftable methods (copied into method_array previously) when we are
7960 // looking for the implementation of a super-interface method but that is rather dirty.
7961 bool using_virtuals;
7962 if (super_interface || is_interface) {
7963 // If we are overwriting a super class interface, try to only virtual methods instead of the
7964 // whole vtable.
7965 using_virtuals = true;
7966 input_virtual_methods = klass->GetDeclaredMethodsSlice(image_pointer_size_);
7967 input_array_length = input_virtual_methods.size();
7968 } else {
7969 // For a new interface, however, we need the whole vtable in case a new
7970 // interface method is implemented in the whole superclass.
7971 using_virtuals = false;
7972 DCHECK(vtable != nullptr);
7973 input_vtable_array = vtable;
7974 input_array_length = input_vtable_array->GetLength();
7975 }
7976
7977 // For each method in interface
7978 for (size_t j = 0; j < num_methods; ++j) {
7979 auto* interface_method = iftable->GetInterface(i)->GetVirtualMethod(j, image_pointer_size_);
7980 MethodNameAndSignatureComparator interface_name_comparator(
7981 interface_method->GetInterfaceMethodIfProxy(image_pointer_size_));
7982 uint32_t imt_index = interface_method->GetImtIndex();
7983 ArtMethod** imt_ptr = &out_imt[imt_index];
7984 // For each method listed in the interface's method list, find the
7985 // matching method in our class's method list. We want to favor the
7986 // subclass over the superclass, which just requires walking
7987 // back from the end of the vtable. (This only matters if the
7988 // superclass defines a private method and this class redefines
7989 // it -- otherwise it would use the same vtable slot. In .dex files
7990 // those don't end up in the virtual method table, so it shouldn't
7991 // matter which direction we go. We walk it backward anyway.)
7992 //
7993 // To find defaults we need to do the same but also go over interfaces.
7994 bool found_impl = false;
7995 ArtMethod* vtable_impl = nullptr;
7996 for (int32_t k = input_array_length - 1; k >= 0; --k) {
7997 ArtMethod* vtable_method = using_virtuals ?
7998 &input_virtual_methods[k] :
7999 input_vtable_array->GetElementPtrSize<ArtMethod*>(k, image_pointer_size_);
8000 ArtMethod* vtable_method_for_name_comparison =
8001 vtable_method->GetInterfaceMethodIfProxy(image_pointer_size_);
8002 if (interface_name_comparator.HasSameNameAndSignature(
8003 vtable_method_for_name_comparison)) {
8004 if (!vtable_method->IsAbstract() && !vtable_method->IsPublic()) {
8005 // Must do EndAssertNoThreadSuspension before throw since the throw can cause
8006 // allocations.
8007 self->EndAssertNoThreadSuspension(old_cause);
8008 ThrowIllegalAccessError(klass.Get(),
8009 "Method '%s' implementing interface method '%s' is not public",
8010 vtable_method->PrettyMethod().c_str(),
8011 interface_method->PrettyMethod().c_str());
8012 return false;
8013 } else if (UNLIKELY(vtable_method->IsOverridableByDefaultMethod())) {
8014 // We might have a newer, better, default method for this, so we just skip it. If we
8015 // are still using this we will select it again when scanning for default methods. To
8016 // obviate the need to copy the method again we will make a note that we already found
8017 // a default here.
8018 // TODO This should be much cleaner.
8019 vtable_impl = vtable_method;
8020 break;
8021 } else {
8022 found_impl = true;
8023 if (LIKELY(fill_tables)) {
8024 method_array->SetElementPtrSize(j, vtable_method, image_pointer_size_);
8025 // Place method in imt if entry is empty, place conflict otherwise.
8026 SetIMTRef(unimplemented_method,
8027 imt_conflict_method,
8028 vtable_method,
8029 /*out*/out_new_conflict,
8030 /*out*/imt_ptr);
8031 }
8032 break;
8033 }
8034 }
8035 }
8036 // Continue on to the next method if we are done.
8037 if (LIKELY(found_impl)) {
8038 continue;
8039 } else if (LIKELY(super_interface)) {
8040 // Don't look for a default implementation when the super-method is implemented directly
8041 // by the class.
8042 //
8043 // See if we can use the superclasses method and skip searching everything else.
8044 // Note: !found_impl && super_interface
8045 CHECK(extend_super_iftable);
8046 // If this is a super_interface method it is possible we shouldn't override it because a
8047 // superclass could have implemented it directly. We get the method the superclass used
8048 // to implement this to know if we can override it with a default method. Doing this is
8049 // safe since we know that the super_iftable is filled in so we can simply pull it from
8050 // there. We don't bother if this is not a super-classes interface since in that case we
8051 // have scanned the entire vtable anyway and would have found it.
8052 // TODO This is rather dirty but it is faster than searching through the entire vtable
8053 // every time.
8054 ArtMethod* supers_method =
8055 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
8056 DCHECK(supers_method != nullptr);
8057 DCHECK(interface_name_comparator.HasSameNameAndSignature(supers_method));
8058 if (LIKELY(!supers_method->IsOverridableByDefaultMethod())) {
8059 // The method is not overridable by a default method (i.e. it is directly implemented
8060 // in some class). Therefore move onto the next interface method.
8061 continue;
8062 } else {
8063 // If the super-classes method is override-able by a default method we need to keep
8064 // track of it since though it is override-able it is not guaranteed to be 'overridden'.
8065 // If it turns out not to be overridden and we did not keep track of it we might add it
8066 // to the vtable twice, causing corruption (vtable entries having inconsistent and
8067 // illegal states, incorrect vtable size, and incorrect or inconsistent iftable entries)
8068 // in this class and any subclasses.
8069 DCHECK(vtable_impl == nullptr || vtable_impl == supers_method)
8070 << "vtable_impl was " << ArtMethod::PrettyMethod(vtable_impl)
8071 << " and not 'nullptr' or "
8072 << supers_method->PrettyMethod()
8073 << " as expected. IFTable appears to be corrupt!";
8074 vtable_impl = supers_method;
8075 }
8076 }
8077 // If we haven't found it yet we should search through the interfaces for default methods.
8078 ArtMethod* current_method = helper.FindMethod(interface_method,
8079 interface_name_comparator,
8080 vtable_impl);
8081 if (LIKELY(fill_tables)) {
8082 if (current_method == nullptr && !super_interface) {
8083 // We could not find an implementation for this method and since it is a brand new
8084 // interface we searched the entire vtable (and all default methods) for an
8085 // implementation but couldn't find one. We therefore need to make a miranda method.
8086 current_method = helper.GetOrCreateMirandaMethod(interface_method,
8087 interface_name_comparator);
8088 }
8089
8090 if (current_method != nullptr) {
8091 // We found a default method implementation. Record it in the iftable and IMT.
8092 method_array->SetElementPtrSize(j, current_method, image_pointer_size_);
8093 SetIMTRef(unimplemented_method,
8094 imt_conflict_method,
8095 current_method,
8096 /*out*/out_new_conflict,
8097 /*out*/imt_ptr);
8098 }
8099 }
8100 } // For each method in interface end.
8101 } // if (num_methods > 0)
8102 } // For each interface.
8103 // TODO don't extend virtuals of interface unless necessary (when is it?).
8104 if (helper.HasNewVirtuals()) {
8105 LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8106 helper.ReallocMethods(); // No return value to check. Native allocation failure aborts.
8107 LengthPrefixedArray<ArtMethod>* methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8108
8109 // Done copying methods, they are all roots in the class now, so we can end the no thread
8110 // suspension assert.
8111 self->EndAssertNoThreadSuspension(old_cause);
8112
8113 if (fill_tables) {
8114 vtable.Assign(helper.UpdateVtable(default_translations, vtable.Get()));
8115 if (UNLIKELY(vtable == nullptr)) {
8116 // The helper has already called self->AssertPendingOOMException();
8117 return false;
8118 }
8119 helper.UpdateIfTable(iftable);
8120 helper.UpdateIMT(out_imt);
8121 }
8122
8123 helper.CheckNoStaleMethodsInDexCache();
8124 helper.ClobberOldMethods(old_methods, methods);
8125 } else {
8126 self->EndAssertNoThreadSuspension(old_cause);
8127 }
8128 if (kIsDebugBuild && !is_interface) {
8129 SanityCheckVTable(self, klass, image_pointer_size_);
8130 }
8131 return true;
8132 }
8133
LinkInstanceFields(Thread * self,Handle<mirror::Class> klass)8134 bool ClassLinker::LinkInstanceFields(Thread* self, Handle<mirror::Class> klass) {
8135 CHECK(klass != nullptr);
8136 return LinkFields(self, klass, false, nullptr);
8137 }
8138
LinkStaticFields(Thread * self,Handle<mirror::Class> klass,size_t * class_size)8139 bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, size_t* class_size) {
8140 CHECK(klass != nullptr);
8141 return LinkFields(self, klass, true, class_size);
8142 }
8143
8144 struct LinkFieldsComparator {
REQUIRES_SHAREDart::LinkFieldsComparator8145 LinkFieldsComparator() REQUIRES_SHARED(Locks::mutator_lock_) {
8146 }
8147 // No thread safety analysis as will be called from STL. Checked lock held in constructor.
operator ()art::LinkFieldsComparator8148 bool operator()(ArtField* field1, ArtField* field2)
8149 NO_THREAD_SAFETY_ANALYSIS {
8150 // First come reference fields, then 64-bit, then 32-bit, and then 16-bit, then finally 8-bit.
8151 Primitive::Type type1 = field1->GetTypeAsPrimitiveType();
8152 Primitive::Type type2 = field2->GetTypeAsPrimitiveType();
8153 if (type1 != type2) {
8154 if (type1 == Primitive::kPrimNot) {
8155 // Reference always goes first.
8156 return true;
8157 }
8158 if (type2 == Primitive::kPrimNot) {
8159 // Reference always goes first.
8160 return false;
8161 }
8162 size_t size1 = Primitive::ComponentSize(type1);
8163 size_t size2 = Primitive::ComponentSize(type2);
8164 if (size1 != size2) {
8165 // Larger primitive types go first.
8166 return size1 > size2;
8167 }
8168 // Primitive types differ but sizes match. Arbitrarily order by primitive type.
8169 return type1 < type2;
8170 }
8171 // Same basic group? Then sort by dex field index. This is guaranteed to be sorted
8172 // by name and for equal names by type id index.
8173 // NOTE: This works also for proxies. Their static fields are assigned appropriate indexes.
8174 return field1->GetDexFieldIndex() < field2->GetDexFieldIndex();
8175 }
8176 };
8177
LinkFields(Thread * self,Handle<mirror::Class> klass,bool is_static,size_t * class_size)8178 bool ClassLinker::LinkFields(Thread* self,
8179 Handle<mirror::Class> klass,
8180 bool is_static,
8181 size_t* class_size) {
8182 self->AllowThreadSuspension();
8183 const size_t num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields();
8184 LengthPrefixedArray<ArtField>* const fields = is_static ? klass->GetSFieldsPtr() :
8185 klass->GetIFieldsPtr();
8186
8187 // Initialize field_offset
8188 MemberOffset field_offset(0);
8189 if (is_static) {
8190 field_offset = klass->GetFirstReferenceStaticFieldOffsetDuringLinking(image_pointer_size_);
8191 } else {
8192 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
8193 if (super_class != nullptr) {
8194 CHECK(super_class->IsResolved())
8195 << klass->PrettyClass() << " " << super_class->PrettyClass();
8196 field_offset = MemberOffset(super_class->GetObjectSize());
8197 }
8198 }
8199
8200 CHECK_EQ(num_fields == 0, fields == nullptr) << klass->PrettyClass();
8201
8202 // we want a relatively stable order so that adding new fields
8203 // minimizes disruption of C++ version such as Class and Method.
8204 //
8205 // The overall sort order order is:
8206 // 1) All object reference fields, sorted alphabetically.
8207 // 2) All java long (64-bit) integer fields, sorted alphabetically.
8208 // 3) All java double (64-bit) floating point fields, sorted alphabetically.
8209 // 4) All java int (32-bit) integer fields, sorted alphabetically.
8210 // 5) All java float (32-bit) floating point fields, sorted alphabetically.
8211 // 6) All java char (16-bit) integer fields, sorted alphabetically.
8212 // 7) All java short (16-bit) integer fields, sorted alphabetically.
8213 // 8) All java boolean (8-bit) integer fields, sorted alphabetically.
8214 // 9) All java byte (8-bit) integer fields, sorted alphabetically.
8215 //
8216 // Once the fields are sorted in this order we will attempt to fill any gaps that might be present
8217 // in the memory layout of the structure. See ShuffleForward for how this is done.
8218 std::deque<ArtField*> grouped_and_sorted_fields;
8219 const char* old_no_suspend_cause = self->StartAssertNoThreadSuspension(
8220 "Naked ArtField references in deque");
8221 for (size_t i = 0; i < num_fields; i++) {
8222 grouped_and_sorted_fields.push_back(&fields->At(i));
8223 }
8224 std::sort(grouped_and_sorted_fields.begin(), grouped_and_sorted_fields.end(),
8225 LinkFieldsComparator());
8226
8227 // References should be at the front.
8228 size_t current_field = 0;
8229 size_t num_reference_fields = 0;
8230 FieldGaps gaps;
8231
8232 for (; current_field < num_fields; current_field++) {
8233 ArtField* field = grouped_and_sorted_fields.front();
8234 Primitive::Type type = field->GetTypeAsPrimitiveType();
8235 bool isPrimitive = type != Primitive::kPrimNot;
8236 if (isPrimitive) {
8237 break; // past last reference, move on to the next phase
8238 }
8239 if (UNLIKELY(!IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(
8240 field_offset.Uint32Value()))) {
8241 MemberOffset old_offset = field_offset;
8242 field_offset = MemberOffset(RoundUp(field_offset.Uint32Value(), 4));
8243 AddFieldGap(old_offset.Uint32Value(), field_offset.Uint32Value(), &gaps);
8244 }
8245 DCHECK_ALIGNED(field_offset.Uint32Value(), sizeof(mirror::HeapReference<mirror::Object>));
8246 grouped_and_sorted_fields.pop_front();
8247 num_reference_fields++;
8248 field->SetOffset(field_offset);
8249 field_offset = MemberOffset(field_offset.Uint32Value() +
8250 sizeof(mirror::HeapReference<mirror::Object>));
8251 }
8252 // Gaps are stored as a max heap which means that we must shuffle from largest to smallest
8253 // otherwise we could end up with suboptimal gap fills.
8254 ShuffleForward<8>(¤t_field, &field_offset, &grouped_and_sorted_fields, &gaps);
8255 ShuffleForward<4>(¤t_field, &field_offset, &grouped_and_sorted_fields, &gaps);
8256 ShuffleForward<2>(¤t_field, &field_offset, &grouped_and_sorted_fields, &gaps);
8257 ShuffleForward<1>(¤t_field, &field_offset, &grouped_and_sorted_fields, &gaps);
8258 CHECK(grouped_and_sorted_fields.empty()) << "Missed " << grouped_and_sorted_fields.size() <<
8259 " fields.";
8260 self->EndAssertNoThreadSuspension(old_no_suspend_cause);
8261
8262 // We lie to the GC about the java.lang.ref.Reference.referent field, so it doesn't scan it.
8263 if (!is_static && klass->DescriptorEquals("Ljava/lang/ref/Reference;")) {
8264 // We know there are no non-reference fields in the Reference classes, and we know
8265 // that 'referent' is alphabetically last, so this is easy...
8266 CHECK_EQ(num_reference_fields, num_fields) << klass->PrettyClass();
8267 CHECK_STREQ(fields->At(num_fields - 1).GetName(), "referent")
8268 << klass->PrettyClass();
8269 --num_reference_fields;
8270 }
8271
8272 size_t size = field_offset.Uint32Value();
8273 // Update klass
8274 if (is_static) {
8275 klass->SetNumReferenceStaticFields(num_reference_fields);
8276 *class_size = size;
8277 } else {
8278 klass->SetNumReferenceInstanceFields(num_reference_fields);
8279 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
8280 if (num_reference_fields == 0 || super_class == nullptr) {
8281 // object has one reference field, klass, but we ignore it since we always visit the class.
8282 // super_class is null iff the class is java.lang.Object.
8283 if (super_class == nullptr ||
8284 (super_class->GetClassFlags() & mirror::kClassFlagNoReferenceFields) != 0) {
8285 klass->SetClassFlags(klass->GetClassFlags() | mirror::kClassFlagNoReferenceFields);
8286 }
8287 }
8288 if (kIsDebugBuild) {
8289 DCHECK_EQ(super_class == nullptr, klass->DescriptorEquals("Ljava/lang/Object;"));
8290 size_t total_reference_instance_fields = 0;
8291 ObjPtr<mirror::Class> cur_super = klass.Get();
8292 while (cur_super != nullptr) {
8293 total_reference_instance_fields += cur_super->NumReferenceInstanceFieldsDuringLinking();
8294 cur_super = cur_super->GetSuperClass();
8295 }
8296 if (super_class == nullptr) {
8297 CHECK_EQ(total_reference_instance_fields, 1u) << klass->PrettyDescriptor();
8298 } else {
8299 // Check that there is at least num_reference_fields other than Object.class.
8300 CHECK_GE(total_reference_instance_fields, 1u + num_reference_fields)
8301 << klass->PrettyClass();
8302 }
8303 }
8304 if (!klass->IsVariableSize()) {
8305 std::string temp;
8306 DCHECK_GE(size, sizeof(mirror::Object)) << klass->GetDescriptor(&temp);
8307 size_t previous_size = klass->GetObjectSize();
8308 if (previous_size != 0) {
8309 // Make sure that we didn't originally have an incorrect size.
8310 CHECK_EQ(previous_size, size) << klass->GetDescriptor(&temp);
8311 }
8312 klass->SetObjectSize(size);
8313 }
8314 }
8315
8316 if (kIsDebugBuild) {
8317 // Make sure that the fields array is ordered by name but all reference
8318 // offsets are at the beginning as far as alignment allows.
8319 MemberOffset start_ref_offset = is_static
8320 ? klass->GetFirstReferenceStaticFieldOffsetDuringLinking(image_pointer_size_)
8321 : klass->GetFirstReferenceInstanceFieldOffset();
8322 MemberOffset end_ref_offset(start_ref_offset.Uint32Value() +
8323 num_reference_fields *
8324 sizeof(mirror::HeapReference<mirror::Object>));
8325 MemberOffset current_ref_offset = start_ref_offset;
8326 for (size_t i = 0; i < num_fields; i++) {
8327 ArtField* field = &fields->At(i);
8328 VLOG(class_linker) << "LinkFields: " << (is_static ? "static" : "instance")
8329 << " class=" << klass->PrettyClass() << " field=" << field->PrettyField()
8330 << " offset=" << field->GetOffsetDuringLinking();
8331 if (i != 0) {
8332 ArtField* const prev_field = &fields->At(i - 1);
8333 // NOTE: The field names can be the same. This is not possible in the Java language
8334 // but it's valid Java/dex bytecode and for example proguard can generate such bytecode.
8335 DCHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0);
8336 }
8337 Primitive::Type type = field->GetTypeAsPrimitiveType();
8338 bool is_primitive = type != Primitive::kPrimNot;
8339 if (klass->DescriptorEquals("Ljava/lang/ref/Reference;") &&
8340 strcmp("referent", field->GetName()) == 0) {
8341 is_primitive = true; // We lied above, so we have to expect a lie here.
8342 }
8343 MemberOffset offset = field->GetOffsetDuringLinking();
8344 if (is_primitive) {
8345 if (offset.Uint32Value() < end_ref_offset.Uint32Value()) {
8346 // Shuffled before references.
8347 size_t type_size = Primitive::ComponentSize(type);
8348 CHECK_LT(type_size, sizeof(mirror::HeapReference<mirror::Object>));
8349 CHECK_LT(offset.Uint32Value(), start_ref_offset.Uint32Value());
8350 CHECK_LE(offset.Uint32Value() + type_size, start_ref_offset.Uint32Value());
8351 CHECK(!IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(offset.Uint32Value()));
8352 }
8353 } else {
8354 CHECK_EQ(current_ref_offset.Uint32Value(), offset.Uint32Value());
8355 current_ref_offset = MemberOffset(current_ref_offset.Uint32Value() +
8356 sizeof(mirror::HeapReference<mirror::Object>));
8357 }
8358 }
8359 CHECK_EQ(current_ref_offset.Uint32Value(), end_ref_offset.Uint32Value());
8360 }
8361 return true;
8362 }
8363
8364 // Set the bitmap of reference instance field offsets.
CreateReferenceInstanceOffsets(Handle<mirror::Class> klass)8365 void ClassLinker::CreateReferenceInstanceOffsets(Handle<mirror::Class> klass) {
8366 uint32_t reference_offsets = 0;
8367 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
8368 // Leave the reference offsets as 0 for mirror::Object (the class field is handled specially).
8369 if (super_class != nullptr) {
8370 reference_offsets = super_class->GetReferenceInstanceOffsets();
8371 // Compute reference offsets unless our superclass overflowed.
8372 if (reference_offsets != mirror::Class::kClassWalkSuper) {
8373 size_t num_reference_fields = klass->NumReferenceInstanceFieldsDuringLinking();
8374 if (num_reference_fields != 0u) {
8375 // All of the fields that contain object references are guaranteed be grouped in memory
8376 // starting at an appropriately aligned address after super class object data.
8377 uint32_t start_offset = RoundUp(super_class->GetObjectSize(),
8378 sizeof(mirror::HeapReference<mirror::Object>));
8379 uint32_t start_bit = (start_offset - mirror::kObjectHeaderSize) /
8380 sizeof(mirror::HeapReference<mirror::Object>);
8381 if (start_bit + num_reference_fields > 32) {
8382 reference_offsets = mirror::Class::kClassWalkSuper;
8383 } else {
8384 reference_offsets |= (0xffffffffu << start_bit) &
8385 (0xffffffffu >> (32 - (start_bit + num_reference_fields)));
8386 }
8387 }
8388 }
8389 }
8390 klass->SetReferenceInstanceOffsets(reference_offsets);
8391 }
8392
DoResolveString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)8393 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
8394 ObjPtr<mirror::DexCache> dex_cache) {
8395 StackHandleScope<1> hs(Thread::Current());
8396 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
8397 return DoResolveString(string_idx, h_dex_cache);
8398 }
8399
DoResolveString(dex::StringIndex string_idx,Handle<mirror::DexCache> dex_cache)8400 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
8401 Handle<mirror::DexCache> dex_cache) {
8402 const DexFile& dex_file = *dex_cache->GetDexFile();
8403 uint32_t utf16_length;
8404 const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
8405 ObjPtr<mirror::String> string = intern_table_->InternStrong(utf16_length, utf8_data);
8406 if (string != nullptr) {
8407 dex_cache->SetResolvedString(string_idx, string);
8408 }
8409 return string;
8410 }
8411
DoLookupString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)8412 ObjPtr<mirror::String> ClassLinker::DoLookupString(dex::StringIndex string_idx,
8413 ObjPtr<mirror::DexCache> dex_cache) {
8414 DCHECK(dex_cache != nullptr);
8415 const DexFile& dex_file = *dex_cache->GetDexFile();
8416 uint32_t utf16_length;
8417 const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
8418 ObjPtr<mirror::String> string =
8419 intern_table_->LookupStrong(Thread::Current(), utf16_length, utf8_data);
8420 if (string != nullptr) {
8421 dex_cache->SetResolvedString(string_idx, string);
8422 }
8423 return string;
8424 }
8425
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::Class> referrer)8426 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
8427 ObjPtr<mirror::Class> referrer) {
8428 return DoLookupResolvedType(type_idx, referrer->GetDexCache(), referrer->GetClassLoader());
8429 }
8430
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)8431 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
8432 ObjPtr<mirror::DexCache> dex_cache,
8433 ObjPtr<mirror::ClassLoader> class_loader) {
8434 const DexFile& dex_file = *dex_cache->GetDexFile();
8435 const char* descriptor = dex_file.StringByTypeIdx(type_idx);
8436 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
8437 ObjPtr<mirror::Class> type = nullptr;
8438 if (descriptor[1] == '\0') {
8439 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
8440 // for primitive classes that aren't backed by dex files.
8441 type = LookupPrimitiveClass(descriptor[0]);
8442 } else {
8443 Thread* const self = Thread::Current();
8444 DCHECK(self != nullptr);
8445 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
8446 // Find the class in the loaded classes table.
8447 type = LookupClass(self, descriptor, hash, class_loader);
8448 }
8449 if (type != nullptr) {
8450 if (type->IsResolved()) {
8451 dex_cache->SetResolvedType(type_idx, type);
8452 } else {
8453 type = nullptr;
8454 }
8455 }
8456 return type;
8457 }
8458
8459 template <typename T>
DoResolveType(dex::TypeIndex type_idx,T referrer)8460 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx, T referrer) {
8461 StackHandleScope<2> hs(Thread::Current());
8462 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
8463 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
8464 return DoResolveType(type_idx, dex_cache, class_loader);
8465 }
8466
8467 // Instantiate the above.
8468 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8469 ArtField* referrer);
8470 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8471 ArtMethod* referrer);
8472 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8473 ObjPtr<mirror::Class> referrer);
8474
DoResolveType(dex::TypeIndex type_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)8475 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8476 Handle<mirror::DexCache> dex_cache,
8477 Handle<mirror::ClassLoader> class_loader) {
8478 Thread* self = Thread::Current();
8479 const char* descriptor = dex_cache->GetDexFile()->StringByTypeIdx(type_idx);
8480 ObjPtr<mirror::Class> resolved = FindClass(self, descriptor, class_loader);
8481 if (resolved != nullptr) {
8482 // TODO: we used to throw here if resolved's class loader was not the
8483 // boot class loader. This was to permit different classes with the
8484 // same name to be loaded simultaneously by different loaders
8485 dex_cache->SetResolvedType(type_idx, resolved);
8486 } else {
8487 CHECK(self->IsExceptionPending())
8488 << "Expected pending exception for failed resolution of: " << descriptor;
8489 // Convert a ClassNotFoundException to a NoClassDefFoundError.
8490 StackHandleScope<1> hs(self);
8491 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
8492 if (cause->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException, this))) {
8493 DCHECK(resolved == nullptr); // No Handle needed to preserve resolved.
8494 self->ClearException();
8495 ThrowNoClassDefFoundError("Failed resolution of: %s", descriptor);
8496 self->GetException()->SetCause(cause.Get());
8497 }
8498 }
8499 DCHECK((resolved == nullptr) || resolved->IsResolved())
8500 << resolved->PrettyDescriptor() << " " << resolved->GetStatus();
8501 return resolved;
8502 }
8503
FindResolvedMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)8504 ArtMethod* ClassLinker::FindResolvedMethod(ObjPtr<mirror::Class> klass,
8505 ObjPtr<mirror::DexCache> dex_cache,
8506 ObjPtr<mirror::ClassLoader> class_loader,
8507 uint32_t method_idx) {
8508 // Search for the method using dex_cache and method_idx. The Class::Find*Method()
8509 // functions can optimize the search if the dex_cache is the same as the DexCache
8510 // of the class, with fall-back to name and signature search otherwise.
8511 ArtMethod* resolved = nullptr;
8512 if (klass->IsInterface()) {
8513 resolved = klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
8514 } else {
8515 resolved = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
8516 }
8517 DCHECK(resolved == nullptr || resolved->GetDeclaringClassUnchecked() != nullptr);
8518 if (resolved != nullptr &&
8519 hiddenapi::ShouldDenyAccessToMember(resolved,
8520 hiddenapi::AccessContext(class_loader, dex_cache),
8521 hiddenapi::AccessMethod::kLinking)) {
8522 resolved = nullptr;
8523 }
8524 if (resolved != nullptr) {
8525 // In case of jmvti, the dex file gets verified before being registered, so first
8526 // check if it's registered before checking class tables.
8527 const DexFile& dex_file = *dex_cache->GetDexFile();
8528 DCHECK(!IsDexFileRegistered(Thread::Current(), dex_file) ||
8529 FindClassTable(Thread::Current(), dex_cache) == ClassTableForClassLoader(class_loader))
8530 << "DexFile referrer: " << dex_file.GetLocation()
8531 << " ClassLoader: " << DescribeLoaders(class_loader, "");
8532 // Be a good citizen and update the dex cache to speed subsequent calls.
8533 dex_cache->SetResolvedMethod(method_idx, resolved, image_pointer_size_);
8534 // Disable the following invariant check as the verifier breaks it. b/73760543
8535 // const DexFile::MethodId& method_id = dex_file.GetMethodId(method_idx);
8536 // DCHECK(LookupResolvedType(method_id.class_idx_, dex_cache, class_loader) != nullptr)
8537 // << "Method: " << resolved->PrettyMethod() << ", "
8538 // << "Class: " << klass->PrettyClass() << " (" << klass->GetStatus() << "), "
8539 // << "DexFile referrer: " << dex_file.GetLocation();
8540 }
8541 return resolved;
8542 }
8543
8544 // Returns true if `method` is either null or hidden.
8545 // Does not print any warnings if it is hidden.
CheckNoSuchMethod(ArtMethod * method,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)8546 static bool CheckNoSuchMethod(ArtMethod* method,
8547 ObjPtr<mirror::DexCache> dex_cache,
8548 ObjPtr<mirror::ClassLoader> class_loader)
8549 REQUIRES_SHARED(Locks::mutator_lock_) {
8550 return method == nullptr ||
8551 hiddenapi::ShouldDenyAccessToMember(method,
8552 hiddenapi::AccessContext(class_loader, dex_cache),
8553 hiddenapi::AccessMethod::kNone); // no warnings
8554 }
8555
FindIncompatibleMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)8556 ArtMethod* ClassLinker::FindIncompatibleMethod(ObjPtr<mirror::Class> klass,
8557 ObjPtr<mirror::DexCache> dex_cache,
8558 ObjPtr<mirror::ClassLoader> class_loader,
8559 uint32_t method_idx) {
8560 if (klass->IsInterface()) {
8561 ArtMethod* method = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
8562 return CheckNoSuchMethod(method, dex_cache, class_loader) ? nullptr : method;
8563 } else {
8564 // If there was an interface method with the same signature, we would have
8565 // found it in the "copied" methods. Only DCHECK that the interface method
8566 // really does not exist.
8567 if (kIsDebugBuild) {
8568 ArtMethod* method =
8569 klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
8570 DCHECK(CheckNoSuchMethod(method, dex_cache, class_loader));
8571 }
8572 return nullptr;
8573 }
8574 }
8575
8576 template <ClassLinker::ResolveMode kResolveMode>
ResolveMethod(uint32_t method_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader,ArtMethod * referrer,InvokeType type)8577 ArtMethod* ClassLinker::ResolveMethod(uint32_t method_idx,
8578 Handle<mirror::DexCache> dex_cache,
8579 Handle<mirror::ClassLoader> class_loader,
8580 ArtMethod* referrer,
8581 InvokeType type) {
8582 DCHECK(dex_cache != nullptr);
8583 DCHECK(referrer == nullptr || !referrer->IsProxyMethod());
8584 // Check for hit in the dex cache.
8585 PointerSize pointer_size = image_pointer_size_;
8586 ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx, pointer_size);
8587 Thread::PoisonObjectPointersIfDebug();
8588 DCHECK(resolved == nullptr || !resolved->IsRuntimeMethod());
8589 bool valid_dex_cache_method = resolved != nullptr;
8590 if (kResolveMode == ResolveMode::kNoChecks && valid_dex_cache_method) {
8591 // We have a valid method from the DexCache and no checks to perform.
8592 DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
8593 return resolved;
8594 }
8595 const DexFile& dex_file = *dex_cache->GetDexFile();
8596 const dex::MethodId& method_id = dex_file.GetMethodId(method_idx);
8597 ObjPtr<mirror::Class> klass = nullptr;
8598 if (valid_dex_cache_method) {
8599 // We have a valid method from the DexCache but we need to perform ICCE and IAE checks.
8600 DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
8601 klass = LookupResolvedType(method_id.class_idx_, dex_cache.Get(), class_loader.Get());
8602 if (UNLIKELY(klass == nullptr)) {
8603 // We normaly should not end up here. However the verifier currently doesn't guarantee
8604 // the invariant of having the klass in the class table. b/73760543
8605 klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
8606 }
8607 } else {
8608 // The method was not in the DexCache, resolve the declaring class.
8609 klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
8610 if (klass == nullptr) {
8611 DCHECK(Thread::Current()->IsExceptionPending());
8612 return nullptr;
8613 }
8614 }
8615
8616 // Check if the invoke type matches the class type.
8617 if (kResolveMode == ResolveMode::kCheckICCEAndIAE &&
8618 CheckInvokeClassMismatch</* kThrow= */ true>(
8619 dex_cache.Get(), type, [klass]() { return klass; })) {
8620 DCHECK(Thread::Current()->IsExceptionPending());
8621 return nullptr;
8622 }
8623
8624 if (!valid_dex_cache_method) {
8625 resolved = FindResolvedMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
8626 }
8627
8628 // Note: We can check for IllegalAccessError only if we have a referrer.
8629 if (kResolveMode == ResolveMode::kCheckICCEAndIAE && resolved != nullptr && referrer != nullptr) {
8630 ObjPtr<mirror::Class> methods_class = resolved->GetDeclaringClass();
8631 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
8632 if (!referring_class->CheckResolvedMethodAccess(methods_class,
8633 resolved,
8634 dex_cache.Get(),
8635 method_idx,
8636 type)) {
8637 DCHECK(Thread::Current()->IsExceptionPending());
8638 return nullptr;
8639 }
8640 }
8641
8642 // If we found a method, check for incompatible class changes.
8643 if (LIKELY(resolved != nullptr) &&
8644 LIKELY(kResolveMode == ResolveMode::kNoChecks ||
8645 !resolved->CheckIncompatibleClassChange(type))) {
8646 return resolved;
8647 } else {
8648 // If we had a method, or if we can find one with another lookup type,
8649 // it's an incompatible-class-change error.
8650 if (resolved == nullptr) {
8651 resolved = FindIncompatibleMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
8652 }
8653 if (resolved != nullptr) {
8654 ThrowIncompatibleClassChangeError(type, resolved->GetInvokeType(), resolved, referrer);
8655 } else {
8656 // We failed to find the method (using all lookup types), so throw a NoSuchMethodError.
8657 const char* name = dex_file.StringDataByIdx(method_id.name_idx_);
8658 const Signature signature = dex_file.GetMethodSignature(method_id);
8659 ThrowNoSuchMethodError(type, klass, name, signature);
8660 }
8661 Thread::Current()->AssertPendingException();
8662 return nullptr;
8663 }
8664 }
8665
ResolveMethodWithoutInvokeType(uint32_t method_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)8666 ArtMethod* ClassLinker::ResolveMethodWithoutInvokeType(uint32_t method_idx,
8667 Handle<mirror::DexCache> dex_cache,
8668 Handle<mirror::ClassLoader> class_loader) {
8669 ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx, image_pointer_size_);
8670 Thread::PoisonObjectPointersIfDebug();
8671 if (resolved != nullptr) {
8672 DCHECK(!resolved->IsRuntimeMethod());
8673 DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
8674 return resolved;
8675 }
8676 // Fail, get the declaring class.
8677 const dex::MethodId& method_id = dex_cache->GetDexFile()->GetMethodId(method_idx);
8678 ObjPtr<mirror::Class> klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
8679 if (klass == nullptr) {
8680 Thread::Current()->AssertPendingException();
8681 return nullptr;
8682 }
8683 if (klass->IsInterface()) {
8684 resolved = klass->FindInterfaceMethod(dex_cache.Get(), method_idx, image_pointer_size_);
8685 } else {
8686 resolved = klass->FindClassMethod(dex_cache.Get(), method_idx, image_pointer_size_);
8687 }
8688 if (resolved != nullptr &&
8689 hiddenapi::ShouldDenyAccessToMember(
8690 resolved,
8691 hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
8692 hiddenapi::AccessMethod::kLinking)) {
8693 resolved = nullptr;
8694 }
8695 return resolved;
8696 }
8697
LookupResolvedField(uint32_t field_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,bool is_static)8698 ArtField* ClassLinker::LookupResolvedField(uint32_t field_idx,
8699 ObjPtr<mirror::DexCache> dex_cache,
8700 ObjPtr<mirror::ClassLoader> class_loader,
8701 bool is_static) {
8702 const DexFile& dex_file = *dex_cache->GetDexFile();
8703 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
8704 ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(field_id.class_idx_);
8705 if (klass == nullptr) {
8706 klass = LookupResolvedType(field_id.class_idx_, dex_cache, class_loader);
8707 }
8708 if (klass == nullptr) {
8709 // The class has not been resolved yet, so the field is also unresolved.
8710 return nullptr;
8711 }
8712 DCHECK(klass->IsResolved());
8713
8714 return FindResolvedField(klass, dex_cache, class_loader, field_idx, is_static);
8715 }
8716
ResolveField(uint32_t field_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader,bool is_static)8717 ArtField* ClassLinker::ResolveField(uint32_t field_idx,
8718 Handle<mirror::DexCache> dex_cache,
8719 Handle<mirror::ClassLoader> class_loader,
8720 bool is_static) {
8721 DCHECK(dex_cache != nullptr);
8722 ArtField* resolved = dex_cache->GetResolvedField(field_idx, image_pointer_size_);
8723 Thread::PoisonObjectPointersIfDebug();
8724 if (resolved != nullptr) {
8725 return resolved;
8726 }
8727 const DexFile& dex_file = *dex_cache->GetDexFile();
8728 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
8729 ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
8730 if (klass == nullptr) {
8731 DCHECK(Thread::Current()->IsExceptionPending());
8732 return nullptr;
8733 }
8734
8735 resolved = FindResolvedField(klass, dex_cache.Get(), class_loader.Get(), field_idx, is_static);
8736 if (resolved == nullptr) {
8737 const char* name = dex_file.GetFieldName(field_id);
8738 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
8739 ThrowNoSuchFieldError(is_static ? "static " : "instance ", klass, type, name);
8740 }
8741 return resolved;
8742 }
8743
ResolveFieldJLS(uint32_t field_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)8744 ArtField* ClassLinker::ResolveFieldJLS(uint32_t field_idx,
8745 Handle<mirror::DexCache> dex_cache,
8746 Handle<mirror::ClassLoader> class_loader) {
8747 DCHECK(dex_cache != nullptr);
8748 ArtField* resolved = dex_cache->GetResolvedField(field_idx, image_pointer_size_);
8749 Thread::PoisonObjectPointersIfDebug();
8750 if (resolved != nullptr) {
8751 return resolved;
8752 }
8753 const DexFile& dex_file = *dex_cache->GetDexFile();
8754 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
8755 ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
8756 if (klass == nullptr) {
8757 DCHECK(Thread::Current()->IsExceptionPending());
8758 return nullptr;
8759 }
8760
8761 resolved = FindResolvedFieldJLS(klass, dex_cache.Get(), class_loader.Get(), field_idx);
8762 if (resolved == nullptr) {
8763 const char* name = dex_file.GetFieldName(field_id);
8764 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
8765 ThrowNoSuchFieldError("", klass, type, name);
8766 }
8767 return resolved;
8768 }
8769
FindResolvedField(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx,bool is_static)8770 ArtField* ClassLinker::FindResolvedField(ObjPtr<mirror::Class> klass,
8771 ObjPtr<mirror::DexCache> dex_cache,
8772 ObjPtr<mirror::ClassLoader> class_loader,
8773 uint32_t field_idx,
8774 bool is_static) {
8775 ArtField* resolved = nullptr;
8776 Thread* self = is_static ? Thread::Current() : nullptr;
8777 const DexFile& dex_file = *dex_cache->GetDexFile();
8778
8779 resolved = is_static ? mirror::Class::FindStaticField(self, klass, dex_cache, field_idx)
8780 : klass->FindInstanceField(dex_cache, field_idx);
8781
8782 if (resolved == nullptr) {
8783 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
8784 const char* name = dex_file.GetFieldName(field_id);
8785 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
8786 resolved = is_static ? mirror::Class::FindStaticField(self, klass, name, type)
8787 : klass->FindInstanceField(name, type);
8788 }
8789
8790 if (resolved != nullptr &&
8791 hiddenapi::ShouldDenyAccessToMember(resolved,
8792 hiddenapi::AccessContext(class_loader, dex_cache),
8793 hiddenapi::AccessMethod::kLinking)) {
8794 resolved = nullptr;
8795 }
8796
8797 if (resolved != nullptr) {
8798 dex_cache->SetResolvedField(field_idx, resolved, image_pointer_size_);
8799 }
8800
8801 return resolved;
8802 }
8803
FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx)8804 ArtField* ClassLinker::FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,
8805 ObjPtr<mirror::DexCache> dex_cache,
8806 ObjPtr<mirror::ClassLoader> class_loader,
8807 uint32_t field_idx) {
8808 ArtField* resolved = nullptr;
8809 Thread* self = Thread::Current();
8810 const DexFile& dex_file = *dex_cache->GetDexFile();
8811 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
8812
8813 const char* name = dex_file.GetFieldName(field_id);
8814 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
8815 resolved = mirror::Class::FindField(self, klass, name, type);
8816
8817 if (resolved != nullptr &&
8818 hiddenapi::ShouldDenyAccessToMember(resolved,
8819 hiddenapi::AccessContext(class_loader, dex_cache),
8820 hiddenapi::AccessMethod::kLinking)) {
8821 resolved = nullptr;
8822 }
8823
8824 if (resolved != nullptr) {
8825 dex_cache->SetResolvedField(field_idx, resolved, image_pointer_size_);
8826 }
8827
8828 return resolved;
8829 }
8830
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)8831 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(
8832 Thread* self,
8833 dex::ProtoIndex proto_idx,
8834 Handle<mirror::DexCache> dex_cache,
8835 Handle<mirror::ClassLoader> class_loader) {
8836 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
8837 DCHECK(dex_cache != nullptr);
8838
8839 ObjPtr<mirror::MethodType> resolved = dex_cache->GetResolvedMethodType(proto_idx);
8840 if (resolved != nullptr) {
8841 return resolved;
8842 }
8843
8844 StackHandleScope<4> hs(self);
8845
8846 // First resolve the return type.
8847 const DexFile& dex_file = *dex_cache->GetDexFile();
8848 const dex::ProtoId& proto_id = dex_file.GetProtoId(proto_idx);
8849 Handle<mirror::Class> return_type(hs.NewHandle(
8850 ResolveType(proto_id.return_type_idx_, dex_cache, class_loader)));
8851 if (return_type == nullptr) {
8852 DCHECK(self->IsExceptionPending());
8853 return nullptr;
8854 }
8855
8856 // Then resolve the argument types.
8857 //
8858 // TODO: Is there a better way to figure out the number of method arguments
8859 // other than by looking at the shorty ?
8860 const size_t num_method_args = strlen(dex_file.StringDataByIdx(proto_id.shorty_idx_)) - 1;
8861
8862 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
8863 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
8864 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_method_args)));
8865 if (method_params == nullptr) {
8866 DCHECK(self->IsExceptionPending());
8867 return nullptr;
8868 }
8869
8870 DexFileParameterIterator it(dex_file, proto_id);
8871 int32_t i = 0;
8872 MutableHandle<mirror::Class> param_class = hs.NewHandle<mirror::Class>(nullptr);
8873 for (; it.HasNext(); it.Next()) {
8874 const dex::TypeIndex type_idx = it.GetTypeIdx();
8875 param_class.Assign(ResolveType(type_idx, dex_cache, class_loader));
8876 if (param_class == nullptr) {
8877 DCHECK(self->IsExceptionPending());
8878 return nullptr;
8879 }
8880
8881 method_params->Set(i++, param_class.Get());
8882 }
8883
8884 DCHECK(!it.HasNext());
8885
8886 Handle<mirror::MethodType> type = hs.NewHandle(
8887 mirror::MethodType::Create(self, return_type, method_params));
8888 dex_cache->SetResolvedMethodType(proto_idx, type.Get());
8889
8890 return type.Get();
8891 }
8892
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,ArtMethod * referrer)8893 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(Thread* self,
8894 dex::ProtoIndex proto_idx,
8895 ArtMethod* referrer) {
8896 StackHandleScope<2> hs(self);
8897 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
8898 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
8899 return ResolveMethodType(self, proto_idx, dex_cache, class_loader);
8900 }
8901
ResolveMethodHandleForField(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)8902 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForField(
8903 Thread* self,
8904 const dex::MethodHandleItem& method_handle,
8905 ArtMethod* referrer) {
8906 DexFile::MethodHandleType handle_type =
8907 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
8908 mirror::MethodHandle::Kind kind;
8909 bool is_put;
8910 bool is_static;
8911 int32_t num_params;
8912 switch (handle_type) {
8913 case DexFile::MethodHandleType::kStaticPut: {
8914 kind = mirror::MethodHandle::Kind::kStaticPut;
8915 is_put = true;
8916 is_static = true;
8917 num_params = 1;
8918 break;
8919 }
8920 case DexFile::MethodHandleType::kStaticGet: {
8921 kind = mirror::MethodHandle::Kind::kStaticGet;
8922 is_put = false;
8923 is_static = true;
8924 num_params = 0;
8925 break;
8926 }
8927 case DexFile::MethodHandleType::kInstancePut: {
8928 kind = mirror::MethodHandle::Kind::kInstancePut;
8929 is_put = true;
8930 is_static = false;
8931 num_params = 2;
8932 break;
8933 }
8934 case DexFile::MethodHandleType::kInstanceGet: {
8935 kind = mirror::MethodHandle::Kind::kInstanceGet;
8936 is_put = false;
8937 is_static = false;
8938 num_params = 1;
8939 break;
8940 }
8941 case DexFile::MethodHandleType::kInvokeStatic:
8942 case DexFile::MethodHandleType::kInvokeInstance:
8943 case DexFile::MethodHandleType::kInvokeConstructor:
8944 case DexFile::MethodHandleType::kInvokeDirect:
8945 case DexFile::MethodHandleType::kInvokeInterface:
8946 UNREACHABLE();
8947 }
8948
8949 ArtField* target_field =
8950 ResolveField(method_handle.field_or_method_idx_, referrer, is_static);
8951 if (LIKELY(target_field != nullptr)) {
8952 ObjPtr<mirror::Class> target_class = target_field->GetDeclaringClass();
8953 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
8954 if (UNLIKELY(!referring_class->CanAccessMember(target_class, target_field->GetAccessFlags()))) {
8955 ThrowIllegalAccessErrorField(referring_class, target_field);
8956 return nullptr;
8957 }
8958 if (UNLIKELY(is_put && target_field->IsFinal())) {
8959 ThrowIllegalAccessErrorField(referring_class, target_field);
8960 return nullptr;
8961 }
8962 } else {
8963 DCHECK(Thread::Current()->IsExceptionPending());
8964 return nullptr;
8965 }
8966
8967 StackHandleScope<4> hs(self);
8968 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
8969 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
8970 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
8971 if (UNLIKELY(method_params == nullptr)) {
8972 DCHECK(self->IsExceptionPending());
8973 return nullptr;
8974 }
8975
8976 Handle<mirror::Class> constructor_class;
8977 Handle<mirror::Class> return_type;
8978 switch (handle_type) {
8979 case DexFile::MethodHandleType::kStaticPut: {
8980 method_params->Set(0, target_field->ResolveType());
8981 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
8982 break;
8983 }
8984 case DexFile::MethodHandleType::kStaticGet: {
8985 return_type = hs.NewHandle(target_field->ResolveType());
8986 break;
8987 }
8988 case DexFile::MethodHandleType::kInstancePut: {
8989 method_params->Set(0, target_field->GetDeclaringClass());
8990 method_params->Set(1, target_field->ResolveType());
8991 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
8992 break;
8993 }
8994 case DexFile::MethodHandleType::kInstanceGet: {
8995 method_params->Set(0, target_field->GetDeclaringClass());
8996 return_type = hs.NewHandle(target_field->ResolveType());
8997 break;
8998 }
8999 case DexFile::MethodHandleType::kInvokeStatic:
9000 case DexFile::MethodHandleType::kInvokeInstance:
9001 case DexFile::MethodHandleType::kInvokeConstructor:
9002 case DexFile::MethodHandleType::kInvokeDirect:
9003 case DexFile::MethodHandleType::kInvokeInterface:
9004 UNREACHABLE();
9005 }
9006
9007 for (int32_t i = 0; i < num_params; ++i) {
9008 if (UNLIKELY(method_params->Get(i) == nullptr)) {
9009 DCHECK(self->IsExceptionPending());
9010 return nullptr;
9011 }
9012 }
9013
9014 if (UNLIKELY(return_type.IsNull())) {
9015 DCHECK(self->IsExceptionPending());
9016 return nullptr;
9017 }
9018
9019 Handle<mirror::MethodType>
9020 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9021 if (UNLIKELY(method_type.IsNull())) {
9022 DCHECK(self->IsExceptionPending());
9023 return nullptr;
9024 }
9025
9026 uintptr_t target = reinterpret_cast<uintptr_t>(target_field);
9027 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9028 }
9029
ResolveMethodHandleForMethod(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)9030 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForMethod(
9031 Thread* self,
9032 const dex::MethodHandleItem& method_handle,
9033 ArtMethod* referrer) {
9034 DexFile::MethodHandleType handle_type =
9035 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
9036 mirror::MethodHandle::Kind kind;
9037 uint32_t receiver_count = 0;
9038 ArtMethod* target_method = nullptr;
9039 switch (handle_type) {
9040 case DexFile::MethodHandleType::kStaticPut:
9041 case DexFile::MethodHandleType::kStaticGet:
9042 case DexFile::MethodHandleType::kInstancePut:
9043 case DexFile::MethodHandleType::kInstanceGet:
9044 UNREACHABLE();
9045 case DexFile::MethodHandleType::kInvokeStatic: {
9046 kind = mirror::MethodHandle::Kind::kInvokeStatic;
9047 receiver_count = 0;
9048 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9049 method_handle.field_or_method_idx_,
9050 referrer,
9051 InvokeType::kStatic);
9052 break;
9053 }
9054 case DexFile::MethodHandleType::kInvokeInstance: {
9055 kind = mirror::MethodHandle::Kind::kInvokeVirtual;
9056 receiver_count = 1;
9057 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9058 method_handle.field_or_method_idx_,
9059 referrer,
9060 InvokeType::kVirtual);
9061 break;
9062 }
9063 case DexFile::MethodHandleType::kInvokeConstructor: {
9064 // Constructors are currently implemented as a transform. They
9065 // are special cased later in this method.
9066 kind = mirror::MethodHandle::Kind::kInvokeTransform;
9067 receiver_count = 0;
9068 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9069 method_handle.field_or_method_idx_,
9070 referrer,
9071 InvokeType::kDirect);
9072 break;
9073 }
9074 case DexFile::MethodHandleType::kInvokeDirect: {
9075 kind = mirror::MethodHandle::Kind::kInvokeDirect;
9076 receiver_count = 1;
9077 StackHandleScope<2> hs(self);
9078 // A constant method handle with type kInvokeDirect can refer to
9079 // a method that is private or to a method in a super class. To
9080 // disambiguate the two options, we resolve the method ignoring
9081 // the invocation type to determine if the method is private. We
9082 // then resolve again specifying the intended invocation type to
9083 // force the appropriate checks.
9084 target_method = ResolveMethodWithoutInvokeType(method_handle.field_or_method_idx_,
9085 hs.NewHandle(referrer->GetDexCache()),
9086 hs.NewHandle(referrer->GetClassLoader()));
9087 if (UNLIKELY(target_method == nullptr)) {
9088 break;
9089 }
9090
9091 if (target_method->IsPrivate()) {
9092 kind = mirror::MethodHandle::Kind::kInvokeDirect;
9093 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9094 method_handle.field_or_method_idx_,
9095 referrer,
9096 InvokeType::kDirect);
9097 } else {
9098 kind = mirror::MethodHandle::Kind::kInvokeSuper;
9099 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9100 method_handle.field_or_method_idx_,
9101 referrer,
9102 InvokeType::kSuper);
9103 if (UNLIKELY(target_method == nullptr)) {
9104 break;
9105 }
9106 // Find the method specified in the parent in referring class
9107 // so invoke-super invokes the method in the parent of the
9108 // referrer.
9109 target_method =
9110 referrer->GetDeclaringClass()->FindVirtualMethodForVirtual(target_method,
9111 kRuntimePointerSize);
9112 }
9113 break;
9114 }
9115 case DexFile::MethodHandleType::kInvokeInterface: {
9116 kind = mirror::MethodHandle::Kind::kInvokeInterface;
9117 receiver_count = 1;
9118 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9119 method_handle.field_or_method_idx_,
9120 referrer,
9121 InvokeType::kInterface);
9122 break;
9123 }
9124 }
9125
9126 if (UNLIKELY(target_method == nullptr)) {
9127 DCHECK(Thread::Current()->IsExceptionPending());
9128 return nullptr;
9129 }
9130
9131 ObjPtr<mirror::Class> target_class = target_method->GetDeclaringClass();
9132 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9133 uint32_t access_flags = target_method->GetAccessFlags();
9134 if (UNLIKELY(!referring_class->CanAccessMember(target_class, access_flags))) {
9135 ThrowIllegalAccessErrorMethod(referring_class, target_method);
9136 return nullptr;
9137 }
9138
9139 // Calculate the number of parameters from the method shorty. We add the
9140 // receiver count (0 or 1) and deduct one for the return value.
9141 uint32_t shorty_length;
9142 target_method->GetShorty(&shorty_length);
9143 int32_t num_params = static_cast<int32_t>(shorty_length + receiver_count - 1);
9144
9145 StackHandleScope<5> hs(self);
9146 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
9147 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9148 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
9149 if (method_params.Get() == nullptr) {
9150 DCHECK(self->IsExceptionPending());
9151 return nullptr;
9152 }
9153
9154 const DexFile* dex_file = referrer->GetDexFile();
9155 const dex::MethodId& method_id = dex_file->GetMethodId(method_handle.field_or_method_idx_);
9156 int32_t index = 0;
9157 if (receiver_count != 0) {
9158 // Insert receiver. Use the class identified in the method handle rather than the declaring
9159 // class of the resolved method which may be super class or default interface method
9160 // (b/115964401).
9161 ObjPtr<mirror::Class> receiver_class = LookupResolvedType(method_id.class_idx_, referrer);
9162 // receiver_class should have been resolved when resolving the target method.
9163 DCHECK(receiver_class != nullptr);
9164 method_params->Set(index++, receiver_class);
9165 }
9166
9167 const dex::ProtoId& proto_id = dex_file->GetProtoId(method_id.proto_idx_);
9168 DexFileParameterIterator it(*dex_file, proto_id);
9169 while (it.HasNext()) {
9170 DCHECK_LT(index, num_params);
9171 const dex::TypeIndex type_idx = it.GetTypeIdx();
9172 ObjPtr<mirror::Class> klass = ResolveType(type_idx, referrer);
9173 if (nullptr == klass) {
9174 DCHECK(self->IsExceptionPending());
9175 return nullptr;
9176 }
9177 method_params->Set(index++, klass);
9178 it.Next();
9179 }
9180
9181 Handle<mirror::Class> return_type =
9182 hs.NewHandle(ResolveType(proto_id.return_type_idx_, referrer));
9183 if (UNLIKELY(return_type.IsNull())) {
9184 DCHECK(self->IsExceptionPending());
9185 return nullptr;
9186 }
9187
9188 Handle<mirror::MethodType>
9189 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9190 if (UNLIKELY(method_type.IsNull())) {
9191 DCHECK(self->IsExceptionPending());
9192 return nullptr;
9193 }
9194
9195 if (UNLIKELY(handle_type == DexFile::MethodHandleType::kInvokeConstructor)) {
9196 Handle<mirror::Class> constructor_class = hs.NewHandle(target_method->GetDeclaringClass());
9197 Handle<mirror::MethodHandlesLookup> lookup =
9198 hs.NewHandle(mirror::MethodHandlesLookup::GetDefault(self));
9199 return lookup->FindConstructor(self, constructor_class, method_type);
9200 }
9201
9202 uintptr_t target = reinterpret_cast<uintptr_t>(target_method);
9203 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9204 }
9205
ResolveMethodHandle(Thread * self,uint32_t method_handle_idx,ArtMethod * referrer)9206 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandle(Thread* self,
9207 uint32_t method_handle_idx,
9208 ArtMethod* referrer)
9209 REQUIRES_SHARED(Locks::mutator_lock_) {
9210 const DexFile* const dex_file = referrer->GetDexFile();
9211 const dex::MethodHandleItem& method_handle = dex_file->GetMethodHandle(method_handle_idx);
9212 switch (static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_)) {
9213 case DexFile::MethodHandleType::kStaticPut:
9214 case DexFile::MethodHandleType::kStaticGet:
9215 case DexFile::MethodHandleType::kInstancePut:
9216 case DexFile::MethodHandleType::kInstanceGet:
9217 return ResolveMethodHandleForField(self, method_handle, referrer);
9218 case DexFile::MethodHandleType::kInvokeStatic:
9219 case DexFile::MethodHandleType::kInvokeInstance:
9220 case DexFile::MethodHandleType::kInvokeConstructor:
9221 case DexFile::MethodHandleType::kInvokeDirect:
9222 case DexFile::MethodHandleType::kInvokeInterface:
9223 return ResolveMethodHandleForMethod(self, method_handle, referrer);
9224 }
9225 }
9226
IsQuickResolutionStub(const void * entry_point) const9227 bool ClassLinker::IsQuickResolutionStub(const void* entry_point) const {
9228 return (entry_point == GetQuickResolutionStub()) ||
9229 (quick_resolution_trampoline_ == entry_point);
9230 }
9231
IsQuickToInterpreterBridge(const void * entry_point) const9232 bool ClassLinker::IsQuickToInterpreterBridge(const void* entry_point) const {
9233 return (entry_point == GetQuickToInterpreterBridge()) ||
9234 (quick_to_interpreter_bridge_trampoline_ == entry_point);
9235 }
9236
IsQuickGenericJniStub(const void * entry_point) const9237 bool ClassLinker::IsQuickGenericJniStub(const void* entry_point) const {
9238 return (entry_point == GetQuickGenericJniStub()) ||
9239 (quick_generic_jni_trampoline_ == entry_point);
9240 }
9241
IsJniDlsymLookupStub(const void * entry_point) const9242 bool ClassLinker::IsJniDlsymLookupStub(const void* entry_point) const {
9243 return entry_point == GetJniDlsymLookupStub();
9244 }
9245
GetRuntimeQuickGenericJniStub() const9246 const void* ClassLinker::GetRuntimeQuickGenericJniStub() const {
9247 return GetQuickGenericJniStub();
9248 }
9249
SetEntryPointsToInterpreter(ArtMethod * method) const9250 void ClassLinker::SetEntryPointsToInterpreter(ArtMethod* method) const {
9251 if (!method->IsNative()) {
9252 method->SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
9253 } else {
9254 method->SetEntryPointFromQuickCompiledCode(GetQuickGenericJniStub());
9255 }
9256 }
9257
SetEntryPointsForObsoleteMethod(ArtMethod * method) const9258 void ClassLinker::SetEntryPointsForObsoleteMethod(ArtMethod* method) const {
9259 DCHECK(method->IsObsolete());
9260 // We cannot mess with the entrypoints of native methods because they are used to determine how
9261 // large the method's quick stack frame is. Without this information we cannot walk the stacks.
9262 if (!method->IsNative()) {
9263 method->SetEntryPointFromQuickCompiledCode(GetInvokeObsoleteMethodStub());
9264 }
9265 }
9266
DumpForSigQuit(std::ostream & os)9267 void ClassLinker::DumpForSigQuit(std::ostream& os) {
9268 ScopedObjectAccess soa(Thread::Current());
9269 ReaderMutexLock mu(soa.Self(), *Locks::classlinker_classes_lock_);
9270 os << "Zygote loaded classes=" << NumZygoteClasses() << " post zygote classes="
9271 << NumNonZygoteClasses() << "\n";
9272 ReaderMutexLock mu2(soa.Self(), *Locks::dex_lock_);
9273 os << "Dumping registered class loaders\n";
9274 size_t class_loader_index = 0;
9275 for (const ClassLoaderData& class_loader : class_loaders_) {
9276 ObjPtr<mirror::ClassLoader> loader =
9277 ObjPtr<mirror::ClassLoader>::DownCast(soa.Self()->DecodeJObject(class_loader.weak_root));
9278 if (loader != nullptr) {
9279 os << "#" << class_loader_index++ << " " << loader->GetClass()->PrettyDescriptor() << ": [";
9280 bool saw_one_dex_file = false;
9281 for (const DexCacheData& dex_cache : dex_caches_) {
9282 if (dex_cache.IsValid() && dex_cache.class_table == class_loader.class_table) {
9283 if (saw_one_dex_file) {
9284 os << ":";
9285 }
9286 saw_one_dex_file = true;
9287 os << dex_cache.dex_file->GetLocation();
9288 }
9289 }
9290 os << "]";
9291 bool found_parent = false;
9292 if (loader->GetParent() != nullptr) {
9293 size_t parent_index = 0;
9294 for (const ClassLoaderData& class_loader2 : class_loaders_) {
9295 ObjPtr<mirror::ClassLoader> loader2 = ObjPtr<mirror::ClassLoader>::DownCast(
9296 soa.Self()->DecodeJObject(class_loader2.weak_root));
9297 if (loader2 == loader->GetParent()) {
9298 os << ", parent #" << parent_index;
9299 found_parent = true;
9300 break;
9301 }
9302 parent_index++;
9303 }
9304 if (!found_parent) {
9305 os << ", unregistered parent of type "
9306 << loader->GetParent()->GetClass()->PrettyDescriptor();
9307 }
9308 } else {
9309 os << ", no parent";
9310 }
9311 os << "\n";
9312 }
9313 }
9314 os << "Done dumping class loaders\n";
9315 }
9316
9317 class CountClassesVisitor : public ClassLoaderVisitor {
9318 public:
CountClassesVisitor()9319 CountClassesVisitor() : num_zygote_classes(0), num_non_zygote_classes(0) {}
9320
Visit(ObjPtr<mirror::ClassLoader> class_loader)9321 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
9322 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
9323 ClassTable* const class_table = class_loader->GetClassTable();
9324 if (class_table != nullptr) {
9325 num_zygote_classes += class_table->NumZygoteClasses(class_loader);
9326 num_non_zygote_classes += class_table->NumNonZygoteClasses(class_loader);
9327 }
9328 }
9329
9330 size_t num_zygote_classes;
9331 size_t num_non_zygote_classes;
9332 };
9333
NumZygoteClasses() const9334 size_t ClassLinker::NumZygoteClasses() const {
9335 CountClassesVisitor visitor;
9336 VisitClassLoaders(&visitor);
9337 return visitor.num_zygote_classes + boot_class_table_->NumZygoteClasses(nullptr);
9338 }
9339
NumNonZygoteClasses() const9340 size_t ClassLinker::NumNonZygoteClasses() const {
9341 CountClassesVisitor visitor;
9342 VisitClassLoaders(&visitor);
9343 return visitor.num_non_zygote_classes + boot_class_table_->NumNonZygoteClasses(nullptr);
9344 }
9345
NumLoadedClasses()9346 size_t ClassLinker::NumLoadedClasses() {
9347 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
9348 // Only return non zygote classes since these are the ones which apps which care about.
9349 return NumNonZygoteClasses();
9350 }
9351
GetClassesLockOwner()9352 pid_t ClassLinker::GetClassesLockOwner() {
9353 return Locks::classlinker_classes_lock_->GetExclusiveOwnerTid();
9354 }
9355
GetDexLockOwner()9356 pid_t ClassLinker::GetDexLockOwner() {
9357 return Locks::dex_lock_->GetExclusiveOwnerTid();
9358 }
9359
SetClassRoot(ClassRoot class_root,ObjPtr<mirror::Class> klass)9360 void ClassLinker::SetClassRoot(ClassRoot class_root, ObjPtr<mirror::Class> klass) {
9361 DCHECK(!init_done_);
9362
9363 DCHECK(klass != nullptr);
9364 DCHECK(klass->GetClassLoader() == nullptr);
9365
9366 mirror::ObjectArray<mirror::Class>* class_roots = class_roots_.Read();
9367 DCHECK(class_roots != nullptr);
9368 DCHECK_LT(static_cast<uint32_t>(class_root), static_cast<uint32_t>(ClassRoot::kMax));
9369 int32_t index = static_cast<int32_t>(class_root);
9370 DCHECK(class_roots->Get(index) == nullptr);
9371 class_roots->Set<false>(index, klass);
9372 }
9373
AllocAndSetPrimitiveArrayClassRoot(Thread * self,ObjPtr<mirror::Class> java_lang_Class,ClassRoot primitive_array_class_root,ClassRoot primitive_class_root,const char * descriptor)9374 void ClassLinker::AllocAndSetPrimitiveArrayClassRoot(Thread* self,
9375 ObjPtr<mirror::Class> java_lang_Class,
9376 ClassRoot primitive_array_class_root,
9377 ClassRoot primitive_class_root,
9378 const char* descriptor) {
9379 StackHandleScope<1> hs(self);
9380 Handle<mirror::Class> primitive_array_class(hs.NewHandle(
9381 AllocPrimitiveArrayClass(self, java_lang_Class)));
9382 primitive_array_class->SetComponentType(GetClassRoot(primitive_class_root, this));
9383 SetClassRoot(primitive_array_class_root, primitive_array_class.Get());
9384 CheckSystemClass(self, primitive_array_class, descriptor);
9385 }
9386
CreateWellKnownClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files,Handle<mirror::Class> loader_class,Handle<mirror::ClassLoader> parent_loader,Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries)9387 ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
9388 Thread* self,
9389 const std::vector<const DexFile*>& dex_files,
9390 Handle<mirror::Class> loader_class,
9391 Handle<mirror::ClassLoader> parent_loader,
9392 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries) {
9393
9394 StackHandleScope<5> hs(self);
9395
9396 ArtField* dex_elements_field =
9397 jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList_dexElements);
9398
9399 Handle<mirror::Class> dex_elements_class(hs.NewHandle(dex_elements_field->ResolveType()));
9400 DCHECK(dex_elements_class != nullptr);
9401 DCHECK(dex_elements_class->IsArrayClass());
9402 Handle<mirror::ObjectArray<mirror::Object>> h_dex_elements(hs.NewHandle(
9403 mirror::ObjectArray<mirror::Object>::Alloc(self,
9404 dex_elements_class.Get(),
9405 dex_files.size())));
9406 Handle<mirror::Class> h_dex_element_class =
9407 hs.NewHandle(dex_elements_class->GetComponentType());
9408
9409 ArtField* element_file_field =
9410 jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList__Element_dexFile);
9411 DCHECK_EQ(h_dex_element_class.Get(), element_file_field->GetDeclaringClass());
9412
9413 ArtField* cookie_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
9414 DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
9415
9416 ArtField* file_name_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_fileName);
9417 DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
9418
9419 // Fill the elements array.
9420 int32_t index = 0;
9421 for (const DexFile* dex_file : dex_files) {
9422 StackHandleScope<4> hs2(self);
9423
9424 // CreateWellKnownClassLoader is only used by gtests and compiler.
9425 // Index 0 of h_long_array is supposed to be the oat file but we can leave it null.
9426 Handle<mirror::LongArray> h_long_array = hs2.NewHandle(mirror::LongArray::Alloc(
9427 self,
9428 kDexFileIndexStart + 1));
9429 DCHECK(h_long_array != nullptr);
9430 h_long_array->Set(kDexFileIndexStart, reinterpret_cast64<int64_t>(dex_file));
9431
9432 // Note that this creates a finalizable dalvik.system.DexFile object and a corresponding
9433 // FinalizerReference which will never get cleaned up without a started runtime.
9434 Handle<mirror::Object> h_dex_file = hs2.NewHandle(
9435 cookie_field->GetDeclaringClass()->AllocObject(self));
9436 DCHECK(h_dex_file != nullptr);
9437 cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get());
9438
9439 Handle<mirror::String> h_file_name = hs2.NewHandle(
9440 mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str()));
9441 DCHECK(h_file_name != nullptr);
9442 file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get());
9443
9444 Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self));
9445 DCHECK(h_element != nullptr);
9446 element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get());
9447
9448 h_dex_elements->Set(index, h_element.Get());
9449 index++;
9450 }
9451 DCHECK_EQ(index, h_dex_elements->GetLength());
9452
9453 // Create DexPathList.
9454 Handle<mirror::Object> h_dex_path_list = hs.NewHandle(
9455 dex_elements_field->GetDeclaringClass()->AllocObject(self));
9456 DCHECK(h_dex_path_list != nullptr);
9457 // Set elements.
9458 dex_elements_field->SetObject<false>(h_dex_path_list.Get(), h_dex_elements.Get());
9459 // Create an empty List for the "nativeLibraryDirectories," required for native tests.
9460 // Note: this code is uncommon(oatdump)/testing-only, so don't add further WellKnownClasses
9461 // elements.
9462 {
9463 ArtField* native_lib_dirs = dex_elements_field->GetDeclaringClass()->
9464 FindDeclaredInstanceField("nativeLibraryDirectories", "Ljava/util/List;");
9465 DCHECK(native_lib_dirs != nullptr);
9466 ObjPtr<mirror::Class> list_class = FindSystemClass(self, "Ljava/util/ArrayList;");
9467 DCHECK(list_class != nullptr);
9468 {
9469 StackHandleScope<1> h_list_scope(self);
9470 Handle<mirror::Class> h_list_class(h_list_scope.NewHandle<mirror::Class>(list_class));
9471 bool list_init = EnsureInitialized(self, h_list_class, true, true);
9472 DCHECK(list_init);
9473 list_class = h_list_class.Get();
9474 }
9475 ObjPtr<mirror::Object> list_object = list_class->AllocObject(self);
9476 // Note: we leave the object uninitialized. This must never leak into any non-testing code, but
9477 // is fine for testing. While it violates a Java-code invariant (the elementData field is
9478 // normally never null), as long as one does not try to add elements, this will still
9479 // work.
9480 native_lib_dirs->SetObject<false>(h_dex_path_list.Get(), list_object);
9481 }
9482
9483 // Create the class loader..
9484 Handle<mirror::ClassLoader> h_class_loader = hs.NewHandle<mirror::ClassLoader>(
9485 ObjPtr<mirror::ClassLoader>::DownCast(loader_class->AllocObject(self)));
9486 DCHECK(h_class_loader != nullptr);
9487 // Set DexPathList.
9488 ArtField* path_list_field =
9489 jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList);
9490 DCHECK(path_list_field != nullptr);
9491 path_list_field->SetObject<false>(h_class_loader.Get(), h_dex_path_list.Get());
9492
9493 // Make a pretend boot-classpath.
9494 // TODO: Should we scan the image?
9495 ArtField* const parent_field =
9496 mirror::Class::FindField(self,
9497 h_class_loader->GetClass(),
9498 "parent",
9499 "Ljava/lang/ClassLoader;");
9500 DCHECK(parent_field != nullptr);
9501 if (parent_loader.Get() == nullptr) {
9502 ScopedObjectAccessUnchecked soa(self);
9503 ObjPtr<mirror::Object> boot_loader(soa.Decode<mirror::Class>(
9504 WellKnownClasses::java_lang_BootClassLoader)->AllocObject(self));
9505 parent_field->SetObject<false>(h_class_loader.Get(), boot_loader);
9506 } else {
9507 parent_field->SetObject<false>(h_class_loader.Get(), parent_loader.Get());
9508 }
9509
9510 ArtField* shared_libraries_field =
9511 jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
9512 DCHECK(shared_libraries_field != nullptr);
9513 shared_libraries_field->SetObject<false>(h_class_loader.Get(), shared_libraries.Get());
9514
9515 return h_class_loader.Get();
9516 }
9517
CreateWellKnownClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files,jclass loader_class,jobject parent_loader,jobject shared_libraries)9518 jobject ClassLinker::CreateWellKnownClassLoader(Thread* self,
9519 const std::vector<const DexFile*>& dex_files,
9520 jclass loader_class,
9521 jobject parent_loader,
9522 jobject shared_libraries) {
9523 CHECK(self->GetJniEnv()->IsSameObject(loader_class,
9524 WellKnownClasses::dalvik_system_PathClassLoader) ||
9525 self->GetJniEnv()->IsSameObject(loader_class,
9526 WellKnownClasses::dalvik_system_DelegateLastClassLoader) ||
9527 self->GetJniEnv()->IsSameObject(loader_class,
9528 WellKnownClasses::dalvik_system_InMemoryDexClassLoader));
9529
9530 // SOAAlreadyRunnable is protected, and we need something to add a global reference.
9531 // We could move the jobject to the callers, but all call-sites do this...
9532 ScopedObjectAccessUnchecked soa(self);
9533
9534 // For now, create a libcore-level DexFile for each ART DexFile. This "explodes" multidex.
9535 StackHandleScope<4> hs(self);
9536
9537 Handle<mirror::Class> h_loader_class =
9538 hs.NewHandle<mirror::Class>(soa.Decode<mirror::Class>(loader_class));
9539 Handle<mirror::ClassLoader> h_parent =
9540 hs.NewHandle<mirror::ClassLoader>(soa.Decode<mirror::ClassLoader>(parent_loader));
9541 Handle<mirror::ObjectArray<mirror::ClassLoader>> h_shared_libraries =
9542 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ClassLoader>>(shared_libraries));
9543
9544 ObjPtr<mirror::ClassLoader> loader = CreateWellKnownClassLoader(
9545 self,
9546 dex_files,
9547 h_loader_class,
9548 h_parent,
9549 h_shared_libraries);
9550
9551 // Make it a global ref and return.
9552 ScopedLocalRef<jobject> local_ref(
9553 soa.Env(), soa.Env()->AddLocalReference<jobject>(loader));
9554 return soa.Env()->NewGlobalRef(local_ref.get());
9555 }
9556
CreatePathClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files)9557 jobject ClassLinker::CreatePathClassLoader(Thread* self,
9558 const std::vector<const DexFile*>& dex_files) {
9559 return CreateWellKnownClassLoader(self,
9560 dex_files,
9561 WellKnownClasses::dalvik_system_PathClassLoader,
9562 nullptr);
9563 }
9564
DropFindArrayClassCache()9565 void ClassLinker::DropFindArrayClassCache() {
9566 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
9567 find_array_class_cache_next_victim_ = 0;
9568 }
9569
VisitClassLoaders(ClassLoaderVisitor * visitor) const9570 void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
9571 Thread* const self = Thread::Current();
9572 for (const ClassLoaderData& data : class_loaders_) {
9573 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
9574 ObjPtr<mirror::ClassLoader> class_loader = ObjPtr<mirror::ClassLoader>::DownCast(
9575 self->DecodeJObject(data.weak_root));
9576 if (class_loader != nullptr) {
9577 visitor->Visit(class_loader);
9578 }
9579 }
9580 }
9581
VisitAllocators(AllocatorVisitor * visitor) const9582 void ClassLinker::VisitAllocators(AllocatorVisitor* visitor) const {
9583 for (const ClassLoaderData& data : class_loaders_) {
9584 LinearAlloc* alloc = data.allocator;
9585 if (alloc != nullptr && !visitor->Visit(alloc)) {
9586 break;
9587 }
9588 }
9589 }
9590
InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,ObjPtr<mirror::ClassLoader> class_loader)9591 void ClassLinker::InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,
9592 ObjPtr<mirror::ClassLoader> class_loader) {
9593 DCHECK(dex_file != nullptr);
9594 Thread* const self = Thread::Current();
9595 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
9596 ClassTable* const table = ClassTableForClassLoader(class_loader);
9597 DCHECK(table != nullptr);
9598 if (table->InsertStrongRoot(dex_file) && class_loader != nullptr) {
9599 // It was not already inserted, perform the write barrier to let the GC know the class loader's
9600 // class table was modified.
9601 WriteBarrier::ForEveryFieldWrite(class_loader);
9602 }
9603 }
9604
CleanupClassLoaders()9605 void ClassLinker::CleanupClassLoaders() {
9606 Thread* const self = Thread::Current();
9607 std::vector<ClassLoaderData> to_delete;
9608 // Do the delete outside the lock to avoid lock violation in jit code cache.
9609 {
9610 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
9611 for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
9612 const ClassLoaderData& data = *it;
9613 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
9614 ObjPtr<mirror::ClassLoader> class_loader =
9615 ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(data.weak_root));
9616 if (class_loader != nullptr) {
9617 ++it;
9618 } else {
9619 VLOG(class_linker) << "Freeing class loader";
9620 to_delete.push_back(data);
9621 it = class_loaders_.erase(it);
9622 }
9623 }
9624 }
9625 for (ClassLoaderData& data : to_delete) {
9626 // CHA unloading analysis and SingleImplementaion cleanups are required.
9627 DeleteClassLoader(self, data, /*cleanup_cha=*/ true);
9628 }
9629 }
9630
9631 class GetResolvedClassesVisitor : public ClassVisitor {
9632 public:
GetResolvedClassesVisitor(std::set<DexCacheResolvedClasses> * result,bool ignore_boot_classes)9633 GetResolvedClassesVisitor(std::set<DexCacheResolvedClasses>* result, bool ignore_boot_classes)
9634 : result_(result),
9635 ignore_boot_classes_(ignore_boot_classes),
9636 last_resolved_classes_(result->end()),
9637 last_dex_file_(nullptr),
9638 vlog_is_on_(VLOG_IS_ON(class_linker)),
9639 extra_stats_(),
9640 last_extra_stats_(extra_stats_.end()) { }
9641
operator ()(ObjPtr<mirror::Class> klass)9642 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
9643 if (!klass->IsProxyClass() &&
9644 !klass->IsArrayClass() &&
9645 klass->IsResolved() &&
9646 !klass->IsErroneousResolved() &&
9647 (!ignore_boot_classes_ || klass->GetClassLoader() != nullptr)) {
9648 const DexFile& dex_file = klass->GetDexFile();
9649 if (&dex_file != last_dex_file_) {
9650 last_dex_file_ = &dex_file;
9651 DexCacheResolvedClasses resolved_classes(
9652 dex_file.GetLocation(),
9653 DexFileLoader::GetBaseLocation(dex_file.GetLocation()),
9654 dex_file.GetLocationChecksum(),
9655 dex_file.NumMethodIds());
9656 last_resolved_classes_ = result_->find(resolved_classes);
9657 if (last_resolved_classes_ == result_->end()) {
9658 last_resolved_classes_ = result_->insert(resolved_classes).first;
9659 }
9660 }
9661 bool added = last_resolved_classes_->AddClass(klass->GetDexTypeIndex());
9662 if (UNLIKELY(vlog_is_on_) && added) {
9663 const DexCacheResolvedClasses* resolved_classes = std::addressof(*last_resolved_classes_);
9664 if (last_extra_stats_ == extra_stats_.end() ||
9665 last_extra_stats_->first != resolved_classes) {
9666 last_extra_stats_ = extra_stats_.find(resolved_classes);
9667 if (last_extra_stats_ == extra_stats_.end()) {
9668 last_extra_stats_ =
9669 extra_stats_.emplace(resolved_classes, ExtraStats(dex_file.NumClassDefs())).first;
9670 }
9671 }
9672 }
9673 }
9674 return true;
9675 }
9676
PrintStatistics() const9677 void PrintStatistics() const {
9678 if (vlog_is_on_) {
9679 for (const DexCacheResolvedClasses& resolved_classes : *result_) {
9680 auto it = extra_stats_.find(std::addressof(resolved_classes));
9681 DCHECK(it != extra_stats_.end());
9682 const ExtraStats& extra_stats = it->second;
9683 LOG(INFO) << "Dex location " << resolved_classes.GetDexLocation()
9684 << " has " << resolved_classes.GetClasses().size() << " / "
9685 << extra_stats.number_of_class_defs_ << " resolved classes";
9686 }
9687 }
9688 }
9689
9690 private:
9691 struct ExtraStats {
ExtraStatsart::GetResolvedClassesVisitor::ExtraStats9692 explicit ExtraStats(uint32_t number_of_class_defs)
9693 : number_of_class_defs_(number_of_class_defs) {}
9694 uint32_t number_of_class_defs_;
9695 };
9696
9697 std::set<DexCacheResolvedClasses>* result_;
9698 bool ignore_boot_classes_;
9699 std::set<DexCacheResolvedClasses>::iterator last_resolved_classes_;
9700 const DexFile* last_dex_file_;
9701
9702 // Statistics.
9703 bool vlog_is_on_;
9704 std::map<const DexCacheResolvedClasses*, ExtraStats> extra_stats_;
9705 std::map<const DexCacheResolvedClasses*, ExtraStats>::iterator last_extra_stats_;
9706 };
9707
GetResolvedClasses(bool ignore_boot_classes)9708 std::set<DexCacheResolvedClasses> ClassLinker::GetResolvedClasses(bool ignore_boot_classes) {
9709 ScopedTrace trace(__PRETTY_FUNCTION__);
9710 ScopedObjectAccess soa(Thread::Current());
9711 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
9712 std::set<DexCacheResolvedClasses> ret;
9713 VLOG(class_linker) << "Collecting resolved classes";
9714 const uint64_t start_time = NanoTime();
9715 GetResolvedClassesVisitor visitor(&ret, ignore_boot_classes);
9716 VisitClasses(&visitor);
9717 if (VLOG_IS_ON(class_linker)) {
9718 visitor.PrintStatistics();
9719 LOG(INFO) << "Collecting class profile took " << PrettyDuration(NanoTime() - start_time);
9720 }
9721 return ret;
9722 }
9723
9724 class ClassLinker::FindVirtualMethodHolderVisitor : public ClassVisitor {
9725 public:
FindVirtualMethodHolderVisitor(const ArtMethod * method,PointerSize pointer_size)9726 FindVirtualMethodHolderVisitor(const ArtMethod* method, PointerSize pointer_size)
9727 : method_(method),
9728 pointer_size_(pointer_size) {}
9729
operator ()(ObjPtr<mirror::Class> klass)9730 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) override {
9731 if (klass->GetVirtualMethodsSliceUnchecked(pointer_size_).Contains(method_)) {
9732 holder_ = klass;
9733 }
9734 // Return false to stop searching if holder_ is not null.
9735 return holder_ == nullptr;
9736 }
9737
9738 ObjPtr<mirror::Class> holder_ = nullptr;
9739 const ArtMethod* const method_;
9740 const PointerSize pointer_size_;
9741 };
9742
GetHoldingClassOfCopiedMethod(ArtMethod * method)9743 ObjPtr<mirror::Class> ClassLinker::GetHoldingClassOfCopiedMethod(ArtMethod* method) {
9744 ScopedTrace trace(__FUNCTION__); // Since this function is slow, have a trace to notify people.
9745 CHECK(method->IsCopied());
9746 FindVirtualMethodHolderVisitor visitor(method, image_pointer_size_);
9747 VisitClasses(&visitor);
9748 return visitor.holder_;
9749 }
9750
AllocIfTable(Thread * self,size_t ifcount)9751 ObjPtr<mirror::IfTable> ClassLinker::AllocIfTable(Thread* self, size_t ifcount) {
9752 return ObjPtr<mirror::IfTable>::DownCast(ObjPtr<mirror::ObjectArray<mirror::Object>>(
9753 mirror::IfTable::Alloc(self,
9754 GetClassRoot<mirror::ObjectArray<mirror::Object>>(this),
9755 ifcount * mirror::IfTable::kMax)));
9756 }
9757
9758 // Instantiate ClassLinker::ResolveMethod.
9759 template ArtMethod* ClassLinker::ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
9760 uint32_t method_idx,
9761 Handle<mirror::DexCache> dex_cache,
9762 Handle<mirror::ClassLoader> class_loader,
9763 ArtMethod* referrer,
9764 InvokeType type);
9765 template ArtMethod* ClassLinker::ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
9766 uint32_t method_idx,
9767 Handle<mirror::DexCache> dex_cache,
9768 Handle<mirror::ClassLoader> class_loader,
9769 ArtMethod* referrer,
9770 InvokeType type);
9771
9772 // Instantiate ClassLinker::AllocClass.
9773 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ true>(
9774 Thread* self,
9775 ObjPtr<mirror::Class> java_lang_Class,
9776 uint32_t class_size);
9777 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ false>(
9778 Thread* self,
9779 ObjPtr<mirror::Class> java_lang_Class,
9780 uint32_t class_size);
9781
9782 } // namespace art
9783