• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
18 #define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
19 
20 #include "entrypoint_utils.h"
21 
22 #include "art_field-inl.h"
23 #include "art_method-inl.h"
24 #include "base/enums.h"
25 #include "class_linker-inl.h"
26 #include "common_throws.h"
27 #include "dex_file.h"
28 #include "entrypoints/quick/callee_save_frame.h"
29 #include "handle_scope-inl.h"
30 #include "imt_conflict_table.h"
31 #include "imtable-inl.h"
32 #include "indirect_reference_table.h"
33 #include "invoke_type.h"
34 #include "jni_internal.h"
35 #include "mirror/array.h"
36 #include "mirror/class-inl.h"
37 #include "mirror/object-inl.h"
38 #include "mirror/throwable.h"
39 #include "nth_caller_visitor.h"
40 #include "runtime.h"
41 #include "stack_map.h"
42 #include "thread.h"
43 #include "well_known_classes.h"
44 
45 namespace art {
46 
GetResolvedMethod(ArtMethod * outer_method,const MethodInfo & method_info,const InlineInfo & inline_info,const InlineInfoEncoding & encoding,uint8_t inlining_depth)47 inline ArtMethod* GetResolvedMethod(ArtMethod* outer_method,
48                                     const MethodInfo& method_info,
49                                     const InlineInfo& inline_info,
50                                     const InlineInfoEncoding& encoding,
51                                     uint8_t inlining_depth)
52     REQUIRES_SHARED(Locks::mutator_lock_) {
53   DCHECK(!outer_method->IsObsolete());
54 
55   // This method is being used by artQuickResolutionTrampoline, before it sets up
56   // the passed parameters in a GC friendly way. Therefore we must never be
57   // suspended while executing it.
58   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
59 
60   if (inline_info.EncodesArtMethodAtDepth(encoding, inlining_depth)) {
61     return inline_info.GetArtMethodAtDepth(encoding, inlining_depth);
62   }
63 
64   uint32_t method_index = inline_info.GetMethodIndexAtDepth(encoding, method_info, inlining_depth);
65   if (inline_info.GetDexPcAtDepth(encoding, inlining_depth) == static_cast<uint32_t>(-1)) {
66     // "charAt" special case. It is the only non-leaf method we inline across dex files.
67     ArtMethod* inlined_method = jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt);
68     DCHECK_EQ(inlined_method->GetDexMethodIndex(), method_index);
69     return inlined_method;
70   }
71 
72   // Find which method did the call in the inlining hierarchy.
73   ArtMethod* caller = outer_method;
74   if (inlining_depth != 0) {
75     caller = GetResolvedMethod(outer_method,
76                                method_info,
77                                inline_info,
78                                encoding,
79                                inlining_depth - 1);
80   }
81 
82   // Lookup the declaring class of the inlined method.
83   ObjPtr<mirror::DexCache> dex_cache = caller->GetDexCache();
84   const DexFile* dex_file = dex_cache->GetDexFile();
85   const DexFile::MethodId& method_id = dex_file->GetMethodId(method_index);
86   ArtMethod* inlined_method = caller->GetDexCacheResolvedMethod(method_index, kRuntimePointerSize);
87   if (inlined_method != nullptr) {
88     DCHECK(!inlined_method->IsRuntimeMethod());
89     return inlined_method;
90   }
91   const char* descriptor = dex_file->StringByTypeIdx(method_id.class_idx_);
92   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
93   Thread* self = Thread::Current();
94   mirror::ClassLoader* class_loader = caller->GetDeclaringClass()->GetClassLoader();
95   mirror::Class* klass = class_linker->LookupClass(self, descriptor, class_loader);
96   if (klass == nullptr) {
97     LOG(FATAL) << "Could not find an inlined method from an .oat file: the class " << descriptor
98                << " was not found in the class loader of " << caller->PrettyMethod() << ". "
99                << "This must be due to playing wrongly with class loaders";
100   }
101 
102   inlined_method = klass->FindClassMethod(dex_cache, method_index, kRuntimePointerSize);
103   if (inlined_method == nullptr) {
104     LOG(FATAL) << "Could not find an inlined method from an .oat file: the class " << descriptor
105                << " does not have " << dex_file->GetMethodName(method_id)
106                << dex_file->GetMethodSignature(method_id) << " declared. "
107                << "This must be due to duplicate classes or playing wrongly with class loaders";
108   }
109   caller->SetDexCacheResolvedMethod(method_index, inlined_method, kRuntimePointerSize);
110 
111   return inlined_method;
112 }
113 
CheckObjectAlloc(mirror::Class * klass,Thread * self,bool * slow_path)114 ALWAYS_INLINE inline mirror::Class* CheckObjectAlloc(mirror::Class* klass,
115                                                      Thread* self,
116                                                      bool* slow_path)
117     REQUIRES_SHARED(Locks::mutator_lock_)
118     REQUIRES(!Roles::uninterruptible_) {
119   if (UNLIKELY(!klass->IsInstantiable())) {
120     self->ThrowNewException("Ljava/lang/InstantiationError;", klass->PrettyDescriptor().c_str());
121     *slow_path = true;
122     return nullptr;  // Failure
123   }
124   if (UNLIKELY(klass->IsClassClass())) {
125     ThrowIllegalAccessError(nullptr, "Class %s is inaccessible",
126                             klass->PrettyDescriptor().c_str());
127     *slow_path = true;
128     return nullptr;  // Failure
129   }
130   if (UNLIKELY(!klass->IsInitialized())) {
131     StackHandleScope<1> hs(self);
132     Handle<mirror::Class> h_klass(hs.NewHandle(klass));
133     // EnsureInitialized (the class initializer) might cause a GC.
134     // may cause us to suspend meaning that another thread may try to
135     // change the allocator while we are stuck in the entrypoints of
136     // an old allocator. Also, the class initialization may fail. To
137     // handle these cases we mark the slow path boolean as true so
138     // that the caller knows to check the allocator type to see if it
139     // has changed and to null-check the return value in case the
140     // initialization fails.
141     *slow_path = true;
142     if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_klass, true, true)) {
143       DCHECK(self->IsExceptionPending());
144       return nullptr;  // Failure
145     } else {
146       DCHECK(!self->IsExceptionPending());
147     }
148     return h_klass.Get();
149   }
150   return klass;
151 }
152 
153 ALWAYS_INLINE
CheckClassInitializedForObjectAlloc(mirror::Class * klass,Thread * self,bool * slow_path)154 inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
155                                                           Thread* self,
156                                                           bool* slow_path)
157     REQUIRES_SHARED(Locks::mutator_lock_)
158     REQUIRES(!Roles::uninterruptible_) {
159   if (UNLIKELY(!klass->IsInitialized())) {
160     StackHandleScope<1> hs(self);
161     Handle<mirror::Class> h_class(hs.NewHandle(klass));
162     // EnsureInitialized (the class initializer) might cause a GC.
163     // may cause us to suspend meaning that another thread may try to
164     // change the allocator while we are stuck in the entrypoints of
165     // an old allocator. Also, the class initialization may fail. To
166     // handle these cases we mark the slow path boolean as true so
167     // that the caller knows to check the allocator type to see if it
168     // has changed and to null-check the return value in case the
169     // initialization fails.
170     *slow_path = true;
171     if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
172       DCHECK(self->IsExceptionPending());
173       return nullptr;  // Failure
174     }
175     return h_class.Get();
176   }
177   return klass;
178 }
179 
180 // Allocate an instance of klass. Throws InstantationError if klass is not instantiable,
181 // or IllegalAccessError if klass is j.l.Class. Performs a clinit check too.
182 template <bool kInstrumented>
183 ALWAYS_INLINE
AllocObjectFromCode(mirror::Class * klass,Thread * self,gc::AllocatorType allocator_type)184 inline mirror::Object* AllocObjectFromCode(mirror::Class* klass,
185                                            Thread* self,
186                                            gc::AllocatorType allocator_type) {
187   bool slow_path = false;
188   klass = CheckObjectAlloc(klass, self, &slow_path);
189   if (UNLIKELY(slow_path)) {
190     if (klass == nullptr) {
191       return nullptr;
192     }
193     // CheckObjectAlloc can cause thread suspension which means we may now be instrumented.
194     return klass->Alloc</*kInstrumented*/true>(
195         self,
196         Runtime::Current()->GetHeap()->GetCurrentAllocator()).Ptr();
197   }
198   DCHECK(klass != nullptr);
199   return klass->Alloc<kInstrumented>(self, allocator_type).Ptr();
200 }
201 
202 // Given the context of a calling Method and a resolved class, create an instance.
203 template <bool kInstrumented>
204 ALWAYS_INLINE
AllocObjectFromCodeResolved(mirror::Class * klass,Thread * self,gc::AllocatorType allocator_type)205 inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
206                                                    Thread* self,
207                                                    gc::AllocatorType allocator_type) {
208   DCHECK(klass != nullptr);
209   bool slow_path = false;
210   klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
211   if (UNLIKELY(slow_path)) {
212     if (klass == nullptr) {
213       return nullptr;
214     }
215     gc::Heap* heap = Runtime::Current()->GetHeap();
216     // Pass in false since the object cannot be finalizable.
217     // CheckClassInitializedForObjectAlloc can cause thread suspension which means we may now be
218     // instrumented.
219     return klass->Alloc</*kInstrumented*/true, false>(self, heap->GetCurrentAllocator()).Ptr();
220   }
221   // Pass in false since the object cannot be finalizable.
222   return klass->Alloc<kInstrumented, false>(self, allocator_type).Ptr();
223 }
224 
225 // Given the context of a calling Method and an initialized class, create an instance.
226 template <bool kInstrumented>
227 ALWAYS_INLINE
AllocObjectFromCodeInitialized(mirror::Class * klass,Thread * self,gc::AllocatorType allocator_type)228 inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
229                                                       Thread* self,
230                                                       gc::AllocatorType allocator_type) {
231   DCHECK(klass != nullptr);
232   // Pass in false since the object cannot be finalizable.
233   return klass->Alloc<kInstrumented, false>(self, allocator_type).Ptr();
234 }
235 
236 
237 template <bool kAccessCheck>
238 ALWAYS_INLINE
CheckArrayAlloc(dex::TypeIndex type_idx,int32_t component_count,ArtMethod * method,bool * slow_path)239 inline mirror::Class* CheckArrayAlloc(dex::TypeIndex type_idx,
240                                       int32_t component_count,
241                                       ArtMethod* method,
242                                       bool* slow_path) {
243   if (UNLIKELY(component_count < 0)) {
244     ThrowNegativeArraySizeException(component_count);
245     *slow_path = true;
246     return nullptr;  // Failure
247   }
248   mirror::Class* klass = method->GetDexCache()->GetResolvedType(type_idx);
249   if (UNLIKELY(klass == nullptr)) {  // Not in dex cache so try to resolve
250     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
251     klass = class_linker->ResolveType(type_idx, method);
252     *slow_path = true;
253     if (klass == nullptr) {  // Error
254       DCHECK(Thread::Current()->IsExceptionPending());
255       return nullptr;  // Failure
256     }
257     CHECK(klass->IsArrayClass()) << klass->PrettyClass();
258   }
259   if (kAccessCheck) {
260     mirror::Class* referrer = method->GetDeclaringClass();
261     if (UNLIKELY(!referrer->CanAccess(klass))) {
262       ThrowIllegalAccessErrorClass(referrer, klass);
263       *slow_path = true;
264       return nullptr;  // Failure
265     }
266   }
267   return klass;
268 }
269 
270 // Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
271 // it cannot be resolved, throw an error. If it can, use it to create an array.
272 // When verification/compiler hasn't been able to verify access, optionally perform an access
273 // check.
274 template <bool kAccessCheck, bool kInstrumented>
275 ALWAYS_INLINE
AllocArrayFromCode(dex::TypeIndex type_idx,int32_t component_count,ArtMethod * method,Thread * self,gc::AllocatorType allocator_type)276 inline mirror::Array* AllocArrayFromCode(dex::TypeIndex type_idx,
277                                          int32_t component_count,
278                                          ArtMethod* method,
279                                          Thread* self,
280                                          gc::AllocatorType allocator_type) {
281   bool slow_path = false;
282   mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, component_count, method,
283                                                        &slow_path);
284   if (UNLIKELY(slow_path)) {
285     if (klass == nullptr) {
286       return nullptr;
287     }
288     gc::Heap* heap = Runtime::Current()->GetHeap();
289     // CheckArrayAlloc can cause thread suspension which means we may now be instrumented.
290     return mirror::Array::Alloc</*kInstrumented*/true>(self,
291                                                        klass,
292                                                        component_count,
293                                                        klass->GetComponentSizeShift(),
294                                                        heap->GetCurrentAllocator());
295   }
296   return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
297                                              klass->GetComponentSizeShift(), allocator_type);
298 }
299 
300 template <bool kInstrumented>
301 ALWAYS_INLINE
AllocArrayFromCodeResolved(mirror::Class * klass,int32_t component_count,Thread * self,gc::AllocatorType allocator_type)302 inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
303                                                  int32_t component_count,
304                                                  Thread* self,
305                                                  gc::AllocatorType allocator_type) {
306   DCHECK(klass != nullptr);
307   if (UNLIKELY(component_count < 0)) {
308     ThrowNegativeArraySizeException(component_count);
309     return nullptr;  // Failure
310   }
311   // No need to retry a slow-path allocation as the above code won't cause a GC or thread
312   // suspension.
313   return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
314                                              klass->GetComponentSizeShift(), allocator_type);
315 }
316 
317 template<FindFieldType type, bool access_check>
FindFieldFromCode(uint32_t field_idx,ArtMethod * referrer,Thread * self,size_t expected_size)318 inline ArtField* FindFieldFromCode(uint32_t field_idx,
319                                    ArtMethod* referrer,
320                                    Thread* self,
321                                    size_t expected_size) {
322   bool is_primitive;
323   bool is_set;
324   bool is_static;
325   switch (type) {
326     case InstanceObjectRead:     is_primitive = false; is_set = false; is_static = false; break;
327     case InstanceObjectWrite:    is_primitive = false; is_set = true;  is_static = false; break;
328     case InstancePrimitiveRead:  is_primitive = true;  is_set = false; is_static = false; break;
329     case InstancePrimitiveWrite: is_primitive = true;  is_set = true;  is_static = false; break;
330     case StaticObjectRead:       is_primitive = false; is_set = false; is_static = true;  break;
331     case StaticObjectWrite:      is_primitive = false; is_set = true;  is_static = true;  break;
332     case StaticPrimitiveRead:    is_primitive = true;  is_set = false; is_static = true;  break;
333     case StaticPrimitiveWrite:   // Keep GCC happy by having a default handler, fall-through.
334     default:                     is_primitive = true;  is_set = true;  is_static = true;  break;
335   }
336   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
337 
338   ArtField* resolved_field;
339   if (access_check) {
340     // Slow path: According to JLS 13.4.8, a linkage error may occur if a compile-time
341     // qualifying type of a field and the resolved run-time qualifying type of a field differed
342     // in their static-ness.
343     //
344     // In particular, don't assume the dex instruction already correctly knows if the
345     // real field is static or not. The resolution must not be aware of this.
346     ArtMethod* method = referrer->GetInterfaceMethodIfProxy(kRuntimePointerSize);
347 
348     StackHandleScope<2> hs(self);
349     Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(method->GetDexCache()));
350     Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(method->GetClassLoader()));
351 
352     resolved_field = class_linker->ResolveFieldJLS(*method->GetDexFile(),
353                                                    field_idx,
354                                                    h_dex_cache,
355                                                    h_class_loader);
356   } else {
357     // Fast path: Verifier already would've called ResolveFieldJLS and we wouldn't
358     // be executing here if there was a static/non-static mismatch.
359     resolved_field = class_linker->ResolveField(field_idx, referrer, is_static);
360   }
361 
362   if (UNLIKELY(resolved_field == nullptr)) {
363     DCHECK(self->IsExceptionPending());  // Throw exception and unwind.
364     return nullptr;  // Failure.
365   }
366   ObjPtr<mirror::Class> fields_class = resolved_field->GetDeclaringClass();
367   if (access_check) {
368     if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
369       ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer);
370       return nullptr;
371     }
372     mirror::Class* referring_class = referrer->GetDeclaringClass();
373     if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class,
374                                                             resolved_field,
375                                                             referrer->GetDexCache(),
376                                                             field_idx))) {
377       DCHECK(self->IsExceptionPending());  // Throw exception and unwind.
378       return nullptr;  // Failure.
379     }
380     if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) {
381       ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
382       return nullptr;  // Failure.
383     } else {
384       if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
385                    resolved_field->FieldSize() != expected_size)) {
386         self->ThrowNewExceptionF("Ljava/lang/NoSuchFieldError;",
387                                  "Attempted read of %zd-bit %s on field '%s'",
388                                  expected_size * (32 / sizeof(int32_t)),
389                                  is_primitive ? "primitive" : "non-primitive",
390                                  resolved_field->PrettyField(true).c_str());
391         return nullptr;  // Failure.
392       }
393     }
394   }
395   if (!is_static) {
396     // instance fields must be being accessed on an initialized class
397     return resolved_field;
398   } else {
399     // If the class is initialized we're done.
400     if (LIKELY(fields_class->IsInitialized())) {
401       return resolved_field;
402     } else {
403       StackHandleScope<1> hs(self);
404       if (LIKELY(class_linker->EnsureInitialized(self, hs.NewHandle(fields_class), true, true))) {
405         // Otherwise let's ensure the class is initialized before resolving the field.
406         return resolved_field;
407       }
408       DCHECK(self->IsExceptionPending());  // Throw exception and unwind
409       return nullptr;  // Failure.
410     }
411   }
412 }
413 
414 // Explicit template declarations of FindFieldFromCode for all field access types.
415 #define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
416 template REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE \
417 ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \
418                                                   ArtMethod* referrer, \
419                                                   Thread* self, size_t expected_size) \
420 
421 #define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
422     EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \
423     EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true)
424 
425 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead);
426 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite);
427 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead);
428 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite);
429 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead);
430 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite);
431 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead);
432 EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite);
433 
434 #undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL
435 #undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
436 
437 template<InvokeType type, bool access_check>
FindMethodFromCode(uint32_t method_idx,ObjPtr<mirror::Object> * this_object,ArtMethod * referrer,Thread * self)438 inline ArtMethod* FindMethodFromCode(uint32_t method_idx,
439                                      ObjPtr<mirror::Object>* this_object,
440                                      ArtMethod* referrer,
441                                      Thread* self) {
442   ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
443   constexpr ClassLinker::ResolveMode resolve_mode =
444       access_check ? ClassLinker::ResolveMode::kCheckICCEAndIAE
445                    : ClassLinker::ResolveMode::kNoChecks;
446   ArtMethod* resolved_method;
447   if (type == kStatic) {
448     resolved_method = class_linker->ResolveMethod<resolve_mode>(self, method_idx, referrer, type);
449   } else {
450     StackHandleScope<1> hs(self);
451     HandleWrapperObjPtr<mirror::Object> h_this(hs.NewHandleWrapper(this_object));
452     resolved_method = class_linker->ResolveMethod<resolve_mode>(self, method_idx, referrer, type);
453   }
454   if (UNLIKELY(resolved_method == nullptr)) {
455     DCHECK(self->IsExceptionPending());  // Throw exception and unwind.
456     return nullptr;  // Failure.
457   }
458   // Next, null pointer check.
459   if (UNLIKELY(*this_object == nullptr && type != kStatic)) {
460     if (UNLIKELY(resolved_method->GetDeclaringClass()->IsStringClass() &&
461                  resolved_method->IsConstructor())) {
462       // Hack for String init:
463       //
464       // We assume that the input of String.<init> in verified code is always
465       // an unitialized reference. If it is a null constant, it must have been
466       // optimized out by the compiler. Do not throw NullPointerException.
467     } else {
468       // Maintain interpreter-like semantics where NullPointerException is thrown
469       // after potential NoSuchMethodError from class linker.
470       ThrowNullPointerExceptionForMethodAccess(method_idx, type);
471       return nullptr;  // Failure.
472     }
473   }
474   switch (type) {
475     case kStatic:
476     case kDirect:
477       return resolved_method;
478     case kVirtual: {
479       ObjPtr<mirror::Class> klass = (*this_object)->GetClass();
480       uint16_t vtable_index = resolved_method->GetMethodIndex();
481       if (access_check &&
482           (!klass->HasVTable() ||
483            vtable_index >= static_cast<uint32_t>(klass->GetVTableLength()))) {
484         // Behavior to agree with that of the verifier.
485         ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
486                                resolved_method->GetName(), resolved_method->GetSignature());
487         return nullptr;  // Failure.
488       }
489       DCHECK(klass->HasVTable()) << klass->PrettyClass();
490       return klass->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
491     }
492     case kSuper: {
493       // TODO This lookup is quite slow.
494       // NB This is actually quite tricky to do any other way. We cannot use GetDeclaringClass since
495       //    that will actually not be what we want in some cases where there are miranda methods or
496       //    defaults. What we actually need is a GetContainingClass that says which classes virtuals
497       //    this method is coming from.
498       StackHandleScope<2> hs2(self);
499       HandleWrapperObjPtr<mirror::Object> h_this(hs2.NewHandleWrapper(this_object));
500       Handle<mirror::Class> h_referring_class(hs2.NewHandle(referrer->GetDeclaringClass()));
501       const dex::TypeIndex method_type_idx =
502           referrer->GetDexFile()->GetMethodId(method_idx).class_idx_;
503       mirror::Class* method_reference_class = class_linker->ResolveType(method_type_idx, referrer);
504       if (UNLIKELY(method_reference_class == nullptr)) {
505         // Bad type idx.
506         CHECK(self->IsExceptionPending());
507         return nullptr;
508       } else if (!method_reference_class->IsInterface()) {
509         // It is not an interface. If the referring class is in the class hierarchy of the
510         // referenced class in the bytecode, we use its super class. Otherwise, we throw
511         // a NoSuchMethodError.
512         ObjPtr<mirror::Class> super_class = nullptr;
513         if (method_reference_class->IsAssignableFrom(h_referring_class.Get())) {
514           super_class = h_referring_class->GetSuperClass();
515         }
516         uint16_t vtable_index = resolved_method->GetMethodIndex();
517         if (access_check) {
518           // Check existence of super class.
519           if (super_class == nullptr ||
520               !super_class->HasVTable() ||
521               vtable_index >= static_cast<uint32_t>(super_class->GetVTableLength())) {
522             // Behavior to agree with that of the verifier.
523             ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
524                                    resolved_method->GetName(), resolved_method->GetSignature());
525             return nullptr;  // Failure.
526           }
527         }
528         DCHECK(super_class != nullptr);
529         DCHECK(super_class->HasVTable());
530         return super_class->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
531       } else {
532         // It is an interface.
533         if (access_check) {
534           if (!method_reference_class->IsAssignableFrom(h_this->GetClass())) {
535             ThrowIncompatibleClassChangeErrorClassForInterfaceSuper(resolved_method,
536                                                                     method_reference_class,
537                                                                     h_this.Get(),
538                                                                     referrer);
539             return nullptr;  // Failure.
540           }
541         }
542         // TODO We can do better than this for a (compiled) fastpath.
543         ArtMethod* result = method_reference_class->FindVirtualMethodForInterfaceSuper(
544             resolved_method, class_linker->GetImagePointerSize());
545         // Throw an NSME if nullptr;
546         if (result == nullptr) {
547           ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
548                                  resolved_method->GetName(), resolved_method->GetSignature());
549         }
550         return result;
551       }
552       UNREACHABLE();
553     }
554     case kInterface: {
555       uint32_t imt_index = ImTable::GetImtIndex(resolved_method);
556       PointerSize pointer_size = class_linker->GetImagePointerSize();
557       ObjPtr<mirror::Class> klass = (*this_object)->GetClass();
558       ArtMethod* imt_method = klass->GetImt(pointer_size)->Get(imt_index, pointer_size);
559       if (!imt_method->IsRuntimeMethod()) {
560         if (kIsDebugBuild) {
561           ArtMethod* method = klass->FindVirtualMethodForInterface(
562               resolved_method, class_linker->GetImagePointerSize());
563           CHECK_EQ(imt_method, method) << ArtMethod::PrettyMethod(resolved_method) << " / "
564                                        << imt_method->PrettyMethod() << " / "
565                                        << ArtMethod::PrettyMethod(method) << " / "
566                                        << klass->PrettyClass();
567         }
568         return imt_method;
569       } else {
570         ArtMethod* interface_method = klass->FindVirtualMethodForInterface(
571             resolved_method, class_linker->GetImagePointerSize());
572         if (UNLIKELY(interface_method == nullptr)) {
573           ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method,
574                                                                      *this_object, referrer);
575           return nullptr;  // Failure.
576         }
577         return interface_method;
578       }
579     }
580     default:
581       LOG(FATAL) << "Unknown invoke type " << type;
582       return nullptr;  // Failure.
583   }
584 }
585 
586 // Explicit template declarations of FindMethodFromCode for all invoke types.
587 #define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check)                 \
588   template REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE                       \
589   ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx,         \
590                                                       ObjPtr<mirror::Object>* this_object, \
591                                                       ArtMethod* referrer, \
592                                                       Thread* self)
593 #define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
594     EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false);   \
595     EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true)
596 
597 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic);
598 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect);
599 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual);
600 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper);
601 EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface);
602 
603 #undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL
604 #undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL
605 
606 // Fast path field resolution that can't initialize classes or throw exceptions.
FindFieldFast(uint32_t field_idx,ArtMethod * referrer,FindFieldType type,size_t expected_size)607 inline ArtField* FindFieldFast(uint32_t field_idx, ArtMethod* referrer, FindFieldType type,
608                                size_t expected_size) {
609   ScopedAssertNoThreadSuspension ants(__FUNCTION__);
610   ArtField* resolved_field =
611       referrer->GetDexCache()->GetResolvedField(field_idx, kRuntimePointerSize);
612   if (UNLIKELY(resolved_field == nullptr)) {
613     return nullptr;
614   }
615   // Check for incompatible class change.
616   bool is_primitive;
617   bool is_set;
618   bool is_static;
619   switch (type) {
620     case InstanceObjectRead:     is_primitive = false; is_set = false; is_static = false; break;
621     case InstanceObjectWrite:    is_primitive = false; is_set = true;  is_static = false; break;
622     case InstancePrimitiveRead:  is_primitive = true;  is_set = false; is_static = false; break;
623     case InstancePrimitiveWrite: is_primitive = true;  is_set = true;  is_static = false; break;
624     case StaticObjectRead:       is_primitive = false; is_set = false; is_static = true;  break;
625     case StaticObjectWrite:      is_primitive = false; is_set = true;  is_static = true;  break;
626     case StaticPrimitiveRead:    is_primitive = true;  is_set = false; is_static = true;  break;
627     case StaticPrimitiveWrite:   is_primitive = true;  is_set = true;  is_static = true;  break;
628     default:
629       LOG(FATAL) << "UNREACHABLE";
630       UNREACHABLE();
631   }
632   if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
633     // Incompatible class change.
634     return nullptr;
635   }
636   ObjPtr<mirror::Class> fields_class = resolved_field->GetDeclaringClass();
637   if (is_static) {
638     // Check class is initialized else fail so that we can contend to initialize the class with
639     // other threads that may be racing to do this.
640     if (UNLIKELY(!fields_class->IsInitialized())) {
641       return nullptr;
642     }
643   }
644   ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
645   if (UNLIKELY(!referring_class->CanAccess(fields_class) ||
646                !referring_class->CanAccessMember(fields_class, resolved_field->GetAccessFlags()) ||
647                (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) {
648     // Illegal access.
649     return nullptr;
650   }
651   if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
652                resolved_field->FieldSize() != expected_size)) {
653     return nullptr;
654   }
655   return resolved_field;
656 }
657 
658 // Fast path method resolution that can't throw exceptions.
659 template <InvokeType type, bool access_check>
FindMethodFast(uint32_t method_idx,ObjPtr<mirror::Object> this_object,ArtMethod * referrer)660 inline ArtMethod* FindMethodFast(uint32_t method_idx,
661                                  ObjPtr<mirror::Object> this_object,
662                                  ArtMethod* referrer) {
663   ScopedAssertNoThreadSuspension ants(__FUNCTION__);
664   if (UNLIKELY(this_object == nullptr && type != kStatic)) {
665     return nullptr;
666   }
667   ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
668   ObjPtr<mirror::DexCache> dex_cache = referrer->GetDexCache();
669   constexpr ClassLinker::ResolveMode resolve_mode = access_check
670       ? ClassLinker::ResolveMode::kCheckICCEAndIAE
671       : ClassLinker::ResolveMode::kNoChecks;
672   ClassLinker* linker = Runtime::Current()->GetClassLinker();
673   ArtMethod* resolved_method = linker->GetResolvedMethod<type, resolve_mode>(method_idx, referrer);
674   if (UNLIKELY(resolved_method == nullptr)) {
675     return nullptr;
676   }
677   if (type == kInterface) {  // Most common form of slow path dispatch.
678     return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method,
679                                                                   kRuntimePointerSize);
680   } else if (type == kStatic || type == kDirect) {
681     return resolved_method;
682   } else if (type == kSuper) {
683     // TODO This lookup is rather slow.
684     dex::TypeIndex method_type_idx = dex_cache->GetDexFile()->GetMethodId(method_idx).class_idx_;
685     ObjPtr<mirror::Class> method_reference_class = ClassLinker::LookupResolvedType(
686         method_type_idx, dex_cache, referrer->GetClassLoader());
687     if (method_reference_class == nullptr) {
688       // Need to do full type resolution...
689       return nullptr;
690     } else if (!method_reference_class->IsInterface()) {
691       // It is not an interface. If the referring class is in the class hierarchy of the
692       // referenced class in the bytecode, we use its super class. Otherwise, we cannot
693       // resolve the method.
694       if (!method_reference_class->IsAssignableFrom(referring_class)) {
695         return nullptr;
696       }
697       ObjPtr<mirror::Class> super_class = referring_class->GetSuperClass();
698       if (resolved_method->GetMethodIndex() >= super_class->GetVTableLength()) {
699         // The super class does not have the method.
700         return nullptr;
701       }
702       return super_class->GetVTableEntry(resolved_method->GetMethodIndex(), kRuntimePointerSize);
703     } else {
704       return method_reference_class->FindVirtualMethodForInterfaceSuper(
705           resolved_method, kRuntimePointerSize);
706     }
707   } else {
708     DCHECK(type == kVirtual);
709     return this_object->GetClass()->GetVTableEntry(
710         resolved_method->GetMethodIndex(), kRuntimePointerSize);
711   }
712 }
713 
ResolveVerifyAndClinit(dex::TypeIndex type_idx,ArtMethod * referrer,Thread * self,bool can_run_clinit,bool verify_access)714 inline mirror::Class* ResolveVerifyAndClinit(dex::TypeIndex type_idx,
715                                              ArtMethod* referrer,
716                                              Thread* self,
717                                              bool can_run_clinit,
718                                              bool verify_access) {
719   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
720   mirror::Class* klass = class_linker->ResolveType(type_idx, referrer);
721   if (UNLIKELY(klass == nullptr)) {
722     CHECK(self->IsExceptionPending());
723     return nullptr;  // Failure - Indicate to caller to deliver exception
724   }
725   // Perform access check if necessary.
726   mirror::Class* referring_class = referrer->GetDeclaringClass();
727   if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) {
728     ThrowIllegalAccessErrorClass(referring_class, klass);
729     return nullptr;  // Failure - Indicate to caller to deliver exception
730   }
731   // If we're just implementing const-class, we shouldn't call <clinit>.
732   if (!can_run_clinit) {
733     return klass;
734   }
735   // If we are the <clinit> of this class, just return our storage.
736   //
737   // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished
738   // running.
739   if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) {
740     return klass;
741   }
742   StackHandleScope<1> hs(self);
743   Handle<mirror::Class> h_class(hs.NewHandle(klass));
744   if (!class_linker->EnsureInitialized(self, h_class, true, true)) {
745     CHECK(self->IsExceptionPending());
746     return nullptr;  // Failure - Indicate to caller to deliver exception
747   }
748   return h_class.Get();
749 }
750 
ResolveString(ClassLinker * class_linker,dex::StringIndex string_idx,ArtMethod * referrer)751 static inline mirror::String* ResolveString(ClassLinker* class_linker,
752                                             dex::StringIndex string_idx,
753                                             ArtMethod* referrer)
754     REQUIRES_SHARED(Locks::mutator_lock_) {
755   Thread::PoisonObjectPointersIfDebug();
756   ObjPtr<mirror::String> string = referrer->GetDexCache()->GetResolvedString(string_idx);
757   if (UNLIKELY(string == nullptr)) {
758     StackHandleScope<1> hs(Thread::Current());
759     Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
760     const DexFile& dex_file = *dex_cache->GetDexFile();
761     string = class_linker->ResolveString(dex_file, string_idx, dex_cache);
762   }
763   return string.Ptr();
764 }
765 
ResolveStringFromCode(ArtMethod * referrer,dex::StringIndex string_idx)766 inline mirror::String* ResolveStringFromCode(ArtMethod* referrer, dex::StringIndex string_idx) {
767   Thread::PoisonObjectPointersIfDebug();
768   ObjPtr<mirror::String> string = referrer->GetDexCache()->GetResolvedString(string_idx);
769   if (UNLIKELY(string == nullptr)) {
770     StackHandleScope<1> hs(Thread::Current());
771     Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
772     const DexFile& dex_file = *dex_cache->GetDexFile();
773     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
774     string = class_linker->ResolveString(dex_file, string_idx, dex_cache);
775   }
776   return string.Ptr();
777 }
778 
UnlockJniSynchronizedMethod(jobject locked,Thread * self)779 inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) {
780   // Save any pending exception over monitor exit call.
781   mirror::Throwable* saved_exception = nullptr;
782   if (UNLIKELY(self->IsExceptionPending())) {
783     saved_exception = self->GetException();
784     self->ClearException();
785   }
786   // Decode locked object and unlock, before popping local references.
787   self->DecodeJObject(locked)->MonitorExit(self);
788   if (UNLIKELY(self->IsExceptionPending())) {
789     LOG(FATAL) << "Synchronized JNI code returning with an exception:\n"
790         << saved_exception->Dump()
791         << "\nEncountered second exception during implicit MonitorExit:\n"
792         << self->GetException()->Dump();
793   }
794   // Restore pending exception.
795   if (saved_exception != nullptr) {
796     self->SetException(saved_exception);
797   }
798 }
799 
800 template <typename INT_TYPE, typename FLOAT_TYPE>
art_float_to_integral(FLOAT_TYPE f)801 inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) {
802   const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max());
803   const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min());
804   const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt);
805   const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt);
806   if (LIKELY(f > kMinIntAsFloat)) {
807      if (LIKELY(f < kMaxIntAsFloat)) {
808        return static_cast<INT_TYPE>(f);
809      } else {
810        return kMaxInt;
811      }
812   } else {
813     return (f != f) ? 0 : kMinInt;  // f != f implies NaN
814   }
815 }
816 
817 }  // namespace art
818 
819 #endif  // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
820