• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_ART_METHOD_H_
18 #define ART_RUNTIME_ART_METHOD_H_
19 
20 #include <cstddef>
21 #include <limits>
22 
23 #include <android-base/logging.h>
24 #include <jni.h>
25 
26 #include "base/array_ref.h"
27 #include "base/bit_utils.h"
28 #include "base/casts.h"
29 #include "base/enums.h"
30 #include "base/logging.h"
31 #include "base/macros.h"
32 #include "base/runtime_debug.h"
33 #include "dex/dex_file_structs.h"
34 #include "dex/modifiers.h"
35 #include "dex/primitive.h"
36 #include "interpreter/mterp/nterp.h"
37 #include "gc_root.h"
38 #include "obj_ptr.h"
39 #include "offsets.h"
40 #include "read_barrier_option.h"
41 
42 namespace art {
43 
44 class CodeItemDataAccessor;
45 class CodeItemDebugInfoAccessor;
46 class CodeItemInstructionAccessor;
47 class DexFile;
48 template<class T> class Handle;
49 class ImtConflictTable;
50 enum InvokeType : uint32_t;
51 union JValue;
52 class OatQuickMethodHeader;
53 class ProfilingInfo;
54 class ScopedObjectAccessAlreadyRunnable;
55 class ShadowFrame;
56 class Signature;
57 
58 namespace mirror {
59 class Array;
60 class Class;
61 class ClassLoader;
62 class DexCache;
63 class IfTable;
64 class Object;
65 template <typename MirrorType> class ObjectArray;
66 class PointerArray;
67 class String;
68 }  // namespace mirror
69 
70 class ArtMethod final {
71  public:
72   // Should the class state be checked on sensitive operations?
73   DECLARE_RUNTIME_DEBUG_FLAG(kCheckDeclaringClassState);
74 
75   // The runtime dex_method_index is kDexNoIndex. To lower dependencies, we use this
76   // constexpr, and ensure that the value is correct in art_method.cc.
77   static constexpr uint32_t kRuntimeMethodDexMethodIndex = 0xFFFFFFFF;
78 
ArtMethod()79   ArtMethod() : access_flags_(0), dex_method_index_(0),
80       method_index_(0), hotness_count_(0) { }
81 
ArtMethod(ArtMethod * src,PointerSize image_pointer_size)82   ArtMethod(ArtMethod* src, PointerSize image_pointer_size) {
83     CopyFrom(src, image_pointer_size);
84   }
85 
86   static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
87                                         jobject jlr_method)
88       REQUIRES_SHARED(Locks::mutator_lock_);
89 
90   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
91   ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_);
92 
93   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
94   ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClassUnchecked()
95       REQUIRES_SHARED(Locks::mutator_lock_);
96 
GetDeclaringClassAddressWithoutBarrier()97   mirror::CompressedReference<mirror::Object>* GetDeclaringClassAddressWithoutBarrier() {
98     return declaring_class_.AddressWithoutBarrier();
99   }
100 
101   void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class)
102       REQUIRES_SHARED(Locks::mutator_lock_);
103 
104   bool CASDeclaringClass(ObjPtr<mirror::Class> expected_class, ObjPtr<mirror::Class> desired_class)
105       REQUIRES_SHARED(Locks::mutator_lock_);
106 
DeclaringClassOffset()107   static constexpr MemberOffset DeclaringClassOffset() {
108     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
109   }
110 
GetAccessFlags()111   uint32_t GetAccessFlags() const {
112     return access_flags_.load(std::memory_order_relaxed);
113   }
114 
115   // This version should only be called when it's certain there is no
116   // concurrency so there is no need to guarantee atomicity. For example,
117   // before the method is linked.
SetAccessFlags(uint32_t new_access_flags)118   void SetAccessFlags(uint32_t new_access_flags) REQUIRES_SHARED(Locks::mutator_lock_) {
119     // The following check ensures that we do not set `Intrinsics::kNone` (see b/228049006).
120     DCHECK_IMPLIES((new_access_flags & kAccIntrinsic) != 0,
121                    (new_access_flags & kAccIntrinsicBits) != 0);
122     access_flags_.store(new_access_flags, std::memory_order_relaxed);
123   }
124 
AccessFlagsOffset()125   static constexpr MemberOffset AccessFlagsOffset() {
126     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, access_flags_));
127   }
128 
129   // Approximate what kind of method call would be used for this method.
130   InvokeType GetInvokeType() REQUIRES_SHARED(Locks::mutator_lock_);
131 
132   // Returns true if the method is declared public.
IsPublic()133   bool IsPublic() const {
134     return (GetAccessFlags() & kAccPublic) != 0;
135   }
136 
137   // Returns true if the method is declared private.
IsPrivate()138   bool IsPrivate() const {
139     return (GetAccessFlags() & kAccPrivate) != 0;
140   }
141 
142   // Returns true if the method is declared static.
IsStatic()143   bool IsStatic() const {
144     return (GetAccessFlags() & kAccStatic) != 0;
145   }
146 
147   // Returns true if the method is a constructor according to access flags.
IsConstructor()148   bool IsConstructor() const {
149     return (GetAccessFlags() & kAccConstructor) != 0;
150   }
151 
152   // Returns true if the method is a class initializer according to access flags.
IsClassInitializer()153   bool IsClassInitializer() const {
154     return IsConstructor() && IsStatic();
155   }
156 
157   // Returns true if the method is static, private, or a constructor.
IsDirect()158   bool IsDirect() const {
159     return IsDirect(GetAccessFlags());
160   }
161 
IsDirect(uint32_t access_flags)162   static bool IsDirect(uint32_t access_flags) {
163     constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
164     return (access_flags & direct) != 0;
165   }
166 
167   // Returns true if the method is declared synchronized.
IsSynchronized()168   bool IsSynchronized() const {
169     constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
170     return (GetAccessFlags() & synchonized) != 0;
171   }
172 
IsFinal()173   bool IsFinal() const {
174     return (GetAccessFlags() & kAccFinal) != 0;
175   }
176 
IsIntrinsic()177   bool IsIntrinsic() const {
178     return (GetAccessFlags() & kAccIntrinsic) != 0;
179   }
180 
181   ALWAYS_INLINE void SetIntrinsic(uint32_t intrinsic) REQUIRES_SHARED(Locks::mutator_lock_);
182 
GetIntrinsic()183   uint32_t GetIntrinsic() const {
184     static const int kAccFlagsShift = CTZ(kAccIntrinsicBits);
185     static_assert(IsPowerOfTwo((kAccIntrinsicBits >> kAccFlagsShift) + 1),
186                   "kAccIntrinsicBits are not continuous");
187     static_assert((kAccIntrinsic & kAccIntrinsicBits) == 0,
188                   "kAccIntrinsic overlaps kAccIntrinsicBits");
189     DCHECK(IsIntrinsic());
190     return (GetAccessFlags() & kAccIntrinsicBits) >> kAccFlagsShift;
191   }
192 
193   void SetNotIntrinsic() REQUIRES_SHARED(Locks::mutator_lock_);
194 
IsCopied()195   bool IsCopied() const {
196     // We do not have intrinsics for any default methods and therefore intrinsics are never copied.
197     // So we are using a flag from the intrinsic flags range and need to check `kAccIntrinsic` too.
198     static_assert((kAccCopied & kAccIntrinsicBits) != 0,
199                   "kAccCopied deliberately overlaps intrinsic bits");
200     const bool copied = (GetAccessFlags() & (kAccIntrinsic | kAccCopied)) == kAccCopied;
201     // (IsMiranda() || IsDefaultConflicting()) implies copied
202     DCHECK(!(IsMiranda() || IsDefaultConflicting()) || copied)
203         << "Miranda or default-conflict methods must always be copied.";
204     return copied;
205   }
206 
IsMiranda()207   bool IsMiranda() const {
208     // Miranda methods are marked as copied and abstract but not default.
209     // We need to check the kAccIntrinsic too, see `IsCopied()`.
210     static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault;
211     static constexpr uint32_t kValue = kAccCopied | kAccAbstract;
212     return (GetAccessFlags() & kMask) == kValue;
213   }
214 
215   // A default conflict method is a special sentinel method that stands for a conflict between
216   // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError
217   // if one attempts to do so.
IsDefaultConflicting()218   bool IsDefaultConflicting() const {
219     // Default conflct methods are marked as copied, abstract and default.
220     // We need to check the kAccIntrinsic too, see `IsCopied()`.
221     static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault;
222     static constexpr uint32_t kValue = kAccCopied | kAccAbstract | kAccDefault;
223     return (GetAccessFlags() & kMask) == kValue;
224   }
225 
226   // Returns true if invoking this method will not throw an AbstractMethodError or
227   // IncompatibleClassChangeError.
IsInvokable()228   bool IsInvokable() const {
229     // Default conflicting methods are marked with `kAccAbstract` (as well as `kAccCopied`
230     // and `kAccDefault`) but they are not considered abstract, see `IsAbstract()`.
231     DCHECK_EQ((GetAccessFlags() & kAccAbstract) == 0, !IsDefaultConflicting() && !IsAbstract());
232     return (GetAccessFlags() & kAccAbstract) == 0;
233   }
234 
IsPreCompiled()235   bool IsPreCompiled() const {
236     // kAccCompileDontBother and kAccPreCompiled overlap with kAccIntrinsicBits.
237     static_assert((kAccCompileDontBother & kAccIntrinsicBits) != 0);
238     static_assert((kAccPreCompiled & kAccIntrinsicBits) != 0);
239     static constexpr uint32_t kMask = kAccIntrinsic | kAccCompileDontBother | kAccPreCompiled;
240     static constexpr uint32_t kValue = kAccCompileDontBother | kAccPreCompiled;
241     return (GetAccessFlags() & kMask) == kValue;
242   }
243 
SetPreCompiled()244   void SetPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) {
245     DCHECK(IsInvokable());
246     DCHECK(IsCompilable());
247     // kAccPreCompiled and kAccCompileDontBother overlaps with kAccIntrinsicBits.
248     // We don't mark the intrinsics as precompiled, which means in JIT zygote
249     // mode, compiled code for intrinsics will not be shared, and apps will
250     // compile intrinsics themselves if needed.
251     if (IsIntrinsic()) {
252       return;
253     }
254     AddAccessFlags(kAccPreCompiled | kAccCompileDontBother);
255   }
256 
IsMemorySharedMethod()257   bool IsMemorySharedMethod() {
258     return (GetAccessFlags() & kAccMemorySharedMethod) != 0;
259   }
260 
SetMemorySharedMethod()261   void SetMemorySharedMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
262     // Disable until we make sure critical code is AOTed.
263     static constexpr bool kEnabledMemorySharedMethod = false;
264     if (kEnabledMemorySharedMethod && !IsIntrinsic() && !IsAbstract()) {
265       AddAccessFlags(kAccMemorySharedMethod);
266       SetHotCounter();
267     }
268   }
269 
ClearMemorySharedMethod()270   void ClearMemorySharedMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
271     if (IsIntrinsic() || IsAbstract()) {
272       return;
273     }
274     if (IsMemorySharedMethod()) {
275       ClearAccessFlags(kAccMemorySharedMethod);
276     }
277   }
278 
ClearPreCompiled()279   void ClearPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) {
280     ClearAccessFlags(kAccPreCompiled | kAccCompileDontBother);
281   }
282 
IsCompilable()283   bool IsCompilable() const {
284     if (IsIntrinsic()) {
285       // kAccCompileDontBother overlaps with kAccIntrinsicBits.
286       return true;
287     }
288     if (IsPreCompiled()) {
289       return true;
290     }
291     return (GetAccessFlags() & kAccCompileDontBother) == 0;
292   }
293 
ClearDontCompile()294   void ClearDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) {
295     DCHECK(!IsMiranda());
296     ClearAccessFlags(kAccCompileDontBother);
297   }
298 
SetDontCompile()299   void SetDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) {
300     DCHECK(!IsMiranda());
301     AddAccessFlags(kAccCompileDontBother);
302   }
303 
304   // This is set by the class linker.
IsDefault()305   bool IsDefault() const {
306     static_assert((kAccDefault & (kAccIntrinsic | kAccIntrinsicBits)) == 0,
307                   "kAccDefault conflicts with intrinsic modifier");
308     return (GetAccessFlags() & kAccDefault) != 0;
309   }
310 
IsObsolete()311   bool IsObsolete() const {
312     return (GetAccessFlags() & kAccObsoleteMethod) != 0;
313   }
314 
SetIsObsolete()315   void SetIsObsolete() REQUIRES_SHARED(Locks::mutator_lock_) {
316     AddAccessFlags(kAccObsoleteMethod);
317   }
318 
IsNative()319   bool IsNative() const {
320     return (GetAccessFlags() & kAccNative) != 0;
321   }
322 
323   // Checks to see if the method was annotated with @dalvik.annotation.optimization.FastNative.
IsFastNative()324   bool IsFastNative() const {
325     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
326     // The kAccFastNative flag value is used with a different meaning for non-native methods,
327     // so we need to check the kAccNative flag as well.
328     constexpr uint32_t mask = kAccFastNative | kAccNative;
329     return (GetAccessFlags() & mask) == mask;
330   }
331 
332   // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative.
IsCriticalNative()333   bool IsCriticalNative() const {
334     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
335     // The kAccCriticalNative flag value is used with a different meaning for non-native methods,
336     // so we need to check the kAccNative flag as well.
337     constexpr uint32_t mask = kAccCriticalNative | kAccNative;
338     return (GetAccessFlags() & mask) == mask;
339   }
340 
IsAbstract()341   bool IsAbstract() const {
342     // Default confliciting methods have `kAccAbstract` set but they are not actually abstract.
343     return (GetAccessFlags() & kAccAbstract) != 0 && !IsDefaultConflicting();
344   }
345 
IsSynthetic()346   bool IsSynthetic() const {
347     return (GetAccessFlags() & kAccSynthetic) != 0;
348   }
349 
IsVarargs()350   bool IsVarargs() const {
351     return (GetAccessFlags() & kAccVarargs) != 0;
352   }
353 
354   bool IsProxyMethod() REQUIRES_SHARED(Locks::mutator_lock_);
355 
356   bool IsSignaturePolymorphic() REQUIRES_SHARED(Locks::mutator_lock_);
357 
SkipAccessChecks()358   bool SkipAccessChecks() const {
359     // The kAccSkipAccessChecks flag value is used with a different meaning for native methods,
360     // so we need to check the kAccNative flag as well.
361     return (GetAccessFlags() & (kAccSkipAccessChecks | kAccNative)) == kAccSkipAccessChecks;
362   }
363 
SetSkipAccessChecks()364   void SetSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) {
365     // SkipAccessChecks() is applicable only to non-native methods.
366     DCHECK(!IsNative());
367     AddAccessFlags(kAccSkipAccessChecks);
368   }
ClearSkipAccessChecks()369   void ClearSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) {
370     // SkipAccessChecks() is applicable only to non-native methods.
371     DCHECK(!IsNative());
372     ClearAccessFlags(kAccSkipAccessChecks);
373   }
374 
PreviouslyWarm()375   bool PreviouslyWarm() const {
376     // kAccPreviouslyWarm overlaps with kAccIntrinsicBits. Return true for intrinsics.
377     constexpr uint32_t mask = kAccPreviouslyWarm | kAccIntrinsic;
378     return (GetAccessFlags() & mask) != 0u;
379   }
380 
SetPreviouslyWarm()381   void SetPreviouslyWarm() REQUIRES_SHARED(Locks::mutator_lock_) {
382     if (IsIntrinsic()) {
383       // kAccPreviouslyWarm overlaps with kAccIntrinsicBits.
384       return;
385     }
386     AddAccessFlags(kAccPreviouslyWarm);
387   }
388 
389   // Should this method be run in the interpreter and count locks (e.g., failed structured-
390   // locking verification)?
MustCountLocks()391   bool MustCountLocks() const {
392     if (IsIntrinsic()) {
393       return false;
394     }
395     return (GetAccessFlags() & kAccMustCountLocks) != 0;
396   }
397 
ClearMustCountLocks()398   void ClearMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) {
399     ClearAccessFlags(kAccMustCountLocks);
400   }
401 
SetMustCountLocks()402   void SetMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) {
403     AddAccessFlags(kAccMustCountLocks);
404     ClearAccessFlags(kAccSkipAccessChecks);
405   }
406 
HasNterpEntryPointFastPathFlag()407   bool HasNterpEntryPointFastPathFlag() const {
408     constexpr uint32_t mask = kAccNative | kAccNterpEntryPointFastPathFlag;
409     return (GetAccessFlags() & mask) == kAccNterpEntryPointFastPathFlag;
410   }
411 
SetNterpEntryPointFastPathFlag()412   void SetNterpEntryPointFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
413     DCHECK(!IsNative());
414     AddAccessFlags(kAccNterpEntryPointFastPathFlag);
415   }
416 
SetNterpInvokeFastPathFlag()417   void SetNterpInvokeFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
418     AddAccessFlags(kAccNterpInvokeFastPathFlag);
419   }
420 
421   // Returns true if this method could be overridden by a default method.
422   bool IsOverridableByDefaultMethod() REQUIRES_SHARED(Locks::mutator_lock_);
423 
424   bool CheckIncompatibleClassChange(InvokeType type) REQUIRES_SHARED(Locks::mutator_lock_);
425 
426   // Throws the error that would result from trying to invoke this method (i.e.
427   // IncompatibleClassChangeError or AbstractMethodError). Only call if !IsInvokable();
428   void ThrowInvocationTimeError() REQUIRES_SHARED(Locks::mutator_lock_);
429 
430   uint16_t GetMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
431 
432   // Doesn't do erroneous / unresolved class checks.
433   uint16_t GetMethodIndexDuringLinking() REQUIRES_SHARED(Locks::mutator_lock_);
434 
GetVtableIndex()435   size_t GetVtableIndex() REQUIRES_SHARED(Locks::mutator_lock_) {
436     return GetMethodIndex();
437   }
438 
SetMethodIndex(uint16_t new_method_index)439   void SetMethodIndex(uint16_t new_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
440     // Not called within a transaction.
441     method_index_ = new_method_index;
442   }
443 
DexMethodIndexOffset()444   static constexpr MemberOffset DexMethodIndexOffset() {
445     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, dex_method_index_));
446   }
447 
MethodIndexOffset()448   static constexpr MemberOffset MethodIndexOffset() {
449     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, method_index_));
450   }
451 
ImtIndexOffset()452   static constexpr MemberOffset ImtIndexOffset() {
453     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, imt_index_));
454   }
455 
456   // Number of 32bit registers that would be required to hold all the arguments
457   static size_t NumArgRegisters(const char* shorty);
458 
GetDexMethodIndex()459   ALWAYS_INLINE uint32_t GetDexMethodIndex() const {
460     return dex_method_index_;
461   }
462 
SetDexMethodIndex(uint32_t new_idx)463   void SetDexMethodIndex(uint32_t new_idx) REQUIRES_SHARED(Locks::mutator_lock_) {
464     // Not called within a transaction.
465     dex_method_index_ = new_idx;
466   }
467 
468   // Lookup the Class from the type index into this method's dex cache.
469   ObjPtr<mirror::Class> LookupResolvedClassFromTypeIndex(dex::TypeIndex type_idx)
470       REQUIRES_SHARED(Locks::mutator_lock_);
471   // Resolve the Class from the type index into this method's dex cache.
472   ObjPtr<mirror::Class> ResolveClassFromTypeIndex(dex::TypeIndex type_idx)
473       REQUIRES_SHARED(Locks::mutator_lock_);
474 
475   // Returns true if this method has the same name and signature of the other method.
476   bool HasSameNameAndSignature(ArtMethod* other) REQUIRES_SHARED(Locks::mutator_lock_);
477 
478   // Find the method that this method overrides.
479   ArtMethod* FindOverriddenMethod(PointerSize pointer_size)
480       REQUIRES_SHARED(Locks::mutator_lock_);
481 
482   // Find the method index for this method within other_dexfile. If this method isn't present then
483   // return dex::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
484   // name and signature in the other_dexfile, such as the method index used to resolve this method
485   // in the other_dexfile.
486   uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
487                                             uint32_t name_and_signature_idx)
488       REQUIRES_SHARED(Locks::mutator_lock_);
489 
490   void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
491       REQUIRES_SHARED(Locks::mutator_lock_);
492 
GetEntryPointFromQuickCompiledCode()493   const void* GetEntryPointFromQuickCompiledCode() const {
494     return GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
495   }
496   ALWAYS_INLINE
GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size)497   const void* GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size) const {
498     return GetNativePointer<const void*>(
499         EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
500   }
501 
SetEntryPointFromQuickCompiledCode(const void * entry_point_from_quick_compiled_code)502   void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code)
503       REQUIRES_SHARED(Locks::mutator_lock_) {
504     SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
505                                               kRuntimePointerSize);
506   }
SetEntryPointFromQuickCompiledCodePtrSize(const void * entry_point_from_quick_compiled_code,PointerSize pointer_size)507   ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
508       const void* entry_point_from_quick_compiled_code, PointerSize pointer_size)
509       REQUIRES_SHARED(Locks::mutator_lock_) {
510     SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
511                      entry_point_from_quick_compiled_code,
512                      pointer_size);
513   }
514 
DataOffset(PointerSize pointer_size)515   static constexpr MemberOffset DataOffset(PointerSize pointer_size) {
516     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
517         PtrSizedFields, data_) / sizeof(void*) * static_cast<size_t>(pointer_size));
518   }
519 
EntryPointFromJniOffset(PointerSize pointer_size)520   static constexpr MemberOffset EntryPointFromJniOffset(PointerSize pointer_size) {
521     return DataOffset(pointer_size);
522   }
523 
EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size)524   static constexpr MemberOffset EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size) {
525     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
526         PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*)
527             * static_cast<size_t>(pointer_size));
528   }
529 
GetImtConflictTable(PointerSize pointer_size)530   ImtConflictTable* GetImtConflictTable(PointerSize pointer_size) const {
531     DCHECK(IsRuntimeMethod());
532     return reinterpret_cast<ImtConflictTable*>(GetDataPtrSize(pointer_size));
533   }
534 
SetImtConflictTable(ImtConflictTable * table,PointerSize pointer_size)535   ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size)
536       REQUIRES_SHARED(Locks::mutator_lock_) {
537     DCHECK(IsRuntimeMethod());
538     SetDataPtrSize(table, pointer_size);
539   }
540 
541   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
542   ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_);
543 
SetHasSingleImplementation(bool single_impl)544   ALWAYS_INLINE void SetHasSingleImplementation(bool single_impl)
545       REQUIRES_SHARED(Locks::mutator_lock_) {
546     DCHECK(!IsIntrinsic()) << "conflict with intrinsic bits";
547     if (single_impl) {
548       AddAccessFlags(kAccSingleImplementation);
549     } else {
550       ClearAccessFlags(kAccSingleImplementation);
551     }
552   }
553 
HasSingleImplementationFlag()554   ALWAYS_INLINE bool HasSingleImplementationFlag() const {
555     return (GetAccessFlags() & kAccSingleImplementation) != 0;
556   }
557 
558   // Takes a method and returns a 'canonical' one if the method is default (and therefore
559   // potentially copied from some other class). For example, this ensures that the debugger does not
560   // get confused as to which method we are in.
561   ArtMethod* GetCanonicalMethod(PointerSize pointer_size = kRuntimePointerSize)
562       REQUIRES_SHARED(Locks::mutator_lock_);
563 
564   ArtMethod* GetSingleImplementation(PointerSize pointer_size);
565 
SetSingleImplementation(ArtMethod * method,PointerSize pointer_size)566   ALWAYS_INLINE void SetSingleImplementation(ArtMethod* method, PointerSize pointer_size)
567       REQUIRES_SHARED(Locks::mutator_lock_) {
568     DCHECK(!IsNative());
569     // Non-abstract method's single implementation is just itself.
570     DCHECK(IsAbstract());
571     DCHECK(method == nullptr || method->IsInvokable());
572     SetDataPtrSize(method, pointer_size);
573   }
574 
GetEntryPointFromJni()575   void* GetEntryPointFromJni() const {
576     DCHECK(IsNative());
577     return GetEntryPointFromJniPtrSize(kRuntimePointerSize);
578   }
579 
GetEntryPointFromJniPtrSize(PointerSize pointer_size)580   ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(PointerSize pointer_size) const {
581     return GetDataPtrSize(pointer_size);
582   }
583 
SetEntryPointFromJni(const void * entrypoint)584   void SetEntryPointFromJni(const void* entrypoint)
585       REQUIRES_SHARED(Locks::mutator_lock_) {
586     // The resolution method also has a JNI entrypoint for direct calls from
587     // compiled code to the JNI dlsym lookup stub for @CriticalNative.
588     DCHECK(IsNative() || IsRuntimeMethod());
589     SetEntryPointFromJniPtrSize(entrypoint, kRuntimePointerSize);
590   }
591 
SetEntryPointFromJniPtrSize(const void * entrypoint,PointerSize pointer_size)592   ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, PointerSize pointer_size)
593       REQUIRES_SHARED(Locks::mutator_lock_) {
594     SetDataPtrSize(entrypoint, pointer_size);
595   }
596 
GetDataPtrSize(PointerSize pointer_size)597   ALWAYS_INLINE void* GetDataPtrSize(PointerSize pointer_size) const {
598     DCHECK(IsImagePointerSize(pointer_size));
599     return GetNativePointer<void*>(DataOffset(pointer_size), pointer_size);
600   }
601 
SetDataPtrSize(const void * data,PointerSize pointer_size)602   ALWAYS_INLINE void SetDataPtrSize(const void* data, PointerSize pointer_size)
603       REQUIRES_SHARED(Locks::mutator_lock_) {
604     DCHECK(IsImagePointerSize(pointer_size));
605     SetNativePointer(DataOffset(pointer_size), data, pointer_size);
606   }
607 
608   // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
609   // conventions for a method of managed code. Returns false for Proxy methods.
IsRuntimeMethod()610   ALWAYS_INLINE bool IsRuntimeMethod() const {
611     return dex_method_index_ == kRuntimeMethodDexMethodIndex;
612   }
613 
HasCodeItem()614   bool HasCodeItem() REQUIRES_SHARED(Locks::mutator_lock_) {
615     return !IsRuntimeMethod() && !IsNative() && !IsProxyMethod() && !IsAbstract();
616   }
617 
618   // We need to explicitly indicate whether the code item is obtained from the compact dex file,
619   // because in JVMTI, we obtain the code item from the standard dex file to update the method.
620   void SetCodeItem(const dex::CodeItem* code_item, bool is_compact_dex_code_item)
621       REQUIRES_SHARED(Locks::mutator_lock_);
622 
623   // Is this a hand crafted method used for something like describing callee saves?
624   bool IsCalleeSaveMethod() REQUIRES_SHARED(Locks::mutator_lock_);
625 
626   bool IsResolutionMethod() REQUIRES_SHARED(Locks::mutator_lock_);
627 
628   bool IsImtUnimplementedMethod() REQUIRES_SHARED(Locks::mutator_lock_);
629 
630   // Find the catch block for the given exception type and dex_pc. When a catch block is found,
631   // indicates whether the found catch block is responsible for clearing the exception or whether
632   // a move-exception instruction is present.
633   uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
634                           bool* has_no_move_exception)
635       REQUIRES_SHARED(Locks::mutator_lock_);
636 
637   // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
638   template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename RootVisitorType>
639   void VisitRoots(RootVisitorType& visitor, PointerSize pointer_size) NO_THREAD_SAFETY_ANALYSIS;
640 
641   const DexFile* GetDexFile() REQUIRES_SHARED(Locks::mutator_lock_);
642 
643   const char* GetDeclaringClassDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
644 
645   ALWAYS_INLINE const char* GetShorty() REQUIRES_SHARED(Locks::mutator_lock_);
646 
647   const char* GetShorty(uint32_t* out_length) REQUIRES_SHARED(Locks::mutator_lock_);
648 
649   const Signature GetSignature() REQUIRES_SHARED(Locks::mutator_lock_);
650 
651   ALWAYS_INLINE const char* GetName() REQUIRES_SHARED(Locks::mutator_lock_);
652 
653   ALWAYS_INLINE std::string_view GetNameView() REQUIRES_SHARED(Locks::mutator_lock_);
654 
655   ObjPtr<mirror::String> ResolveNameString() REQUIRES_SHARED(Locks::mutator_lock_);
656 
657   bool NameEquals(ObjPtr<mirror::String> name) REQUIRES_SHARED(Locks::mutator_lock_);
658 
659   const dex::CodeItem* GetCodeItem() REQUIRES_SHARED(Locks::mutator_lock_);
660 
661   bool IsResolvedTypeIdx(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
662 
663   int32_t GetLineNumFromDexPC(uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_);
664 
665   const dex::ProtoId& GetPrototype() REQUIRES_SHARED(Locks::mutator_lock_);
666 
667   const dex::TypeList* GetParameterTypeList() REQUIRES_SHARED(Locks::mutator_lock_);
668 
669   const char* GetDeclaringClassSourceFile() REQUIRES_SHARED(Locks::mutator_lock_);
670 
671   uint16_t GetClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
672 
673   const dex::ClassDef& GetClassDef() REQUIRES_SHARED(Locks::mutator_lock_);
674 
675   ALWAYS_INLINE size_t GetNumberOfParameters() REQUIRES_SHARED(Locks::mutator_lock_);
676 
677   const char* GetReturnTypeDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
678 
679   ALWAYS_INLINE Primitive::Type GetReturnTypePrimitive() REQUIRES_SHARED(Locks::mutator_lock_);
680 
681   const char* GetTypeDescriptorFromTypeIdx(dex::TypeIndex type_idx)
682       REQUIRES_SHARED(Locks::mutator_lock_);
683 
684   // Lookup return type.
685   ObjPtr<mirror::Class> LookupResolvedReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
686   // Resolve return type. May cause thread suspension due to GetClassFromTypeIdx
687   // calling ResolveType this caused a large number of bugs at call sites.
688   ObjPtr<mirror::Class> ResolveReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
689 
690   ObjPtr<mirror::ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
691 
692   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
693   ObjPtr<mirror::DexCache> GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
694   ObjPtr<mirror::DexCache> GetObsoleteDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
695 
696   ALWAYS_INLINE ArtMethod* GetInterfaceMethodForProxyUnchecked(PointerSize pointer_size)
697       REQUIRES_SHARED(Locks::mutator_lock_);
698   ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(PointerSize pointer_size)
699       REQUIRES_SHARED(Locks::mutator_lock_);
700 
701   ArtMethod* GetNonObsoleteMethod() REQUIRES_SHARED(Locks::mutator_lock_);
702 
703   // May cause thread suspension due to class resolution.
704   bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
705       REQUIRES_SHARED(Locks::mutator_lock_);
706 
707   // Size of an instance of this native class.
Size(PointerSize pointer_size)708   static constexpr size_t Size(PointerSize pointer_size) {
709     return PtrSizedFieldsOffset(pointer_size) +
710         (sizeof(PtrSizedFields) / sizeof(void*)) * static_cast<size_t>(pointer_size);
711   }
712 
713   // Alignment of an instance of this native class.
Alignment(PointerSize pointer_size)714   static constexpr size_t Alignment(PointerSize pointer_size) {
715     // The ArtMethod alignment is the same as image pointer size. This differs from
716     // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
717     return static_cast<size_t>(pointer_size);
718   }
719 
720   void CopyFrom(ArtMethod* src, PointerSize image_pointer_size)
721       REQUIRES_SHARED(Locks::mutator_lock_);
722 
723   ALWAYS_INLINE void ResetCounter(uint16_t new_value);
724   ALWAYS_INLINE void UpdateCounter(int32_t new_samples);
725   ALWAYS_INLINE void SetHotCounter();
726   ALWAYS_INLINE bool CounterIsHot();
727   ALWAYS_INLINE bool CounterHasReached(uint16_t samples, uint16_t threshold);
728   ALWAYS_INLINE uint16_t GetCounter();
729   ALWAYS_INLINE bool CounterHasChanged(uint16_t threshold);
730 
MaxCounter()731   ALWAYS_INLINE static constexpr uint16_t MaxCounter() {
732     return std::numeric_limits<decltype(hotness_count_)>::max();
733   }
734 
735   ALWAYS_INLINE uint32_t GetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_);
736 
737   void CalculateAndSetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_);
738 
HotnessCountOffset()739   static constexpr MemberOffset HotnessCountOffset() {
740     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, hotness_count_));
741   }
742 
743   // Returns the method header for the compiled code containing 'pc'. Note that runtime
744   // methods will return null for this method, as they are not oat based.
745   const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc)
746       REQUIRES_SHARED(Locks::mutator_lock_);
747 
748   // Get compiled code for the method, return null if no code exists.
749   const void* GetOatMethodQuickCode(PointerSize pointer_size)
750       REQUIRES_SHARED(Locks::mutator_lock_);
751 
752   // Returns whether the method has any compiled code, JIT or AOT.
753   bool HasAnyCompiledCode() REQUIRES_SHARED(Locks::mutator_lock_);
754 
755   // Returns a human-readable signature for 'm'. Something like "a.b.C.m" or
756   // "a.b.C.m(II)V" (depending on the value of 'with_signature').
757   static std::string PrettyMethod(ArtMethod* m, bool with_signature = true)
758       REQUIRES_SHARED(Locks::mutator_lock_);
759   std::string PrettyMethod(bool with_signature = true)
760       REQUIRES_SHARED(Locks::mutator_lock_);
761   // Returns the JNI native function name for the non-overloaded method 'm'.
762   std::string JniShortName()
763       REQUIRES_SHARED(Locks::mutator_lock_);
764   // Returns the JNI native function name for the overloaded method 'm'.
765   std::string JniLongName()
766       REQUIRES_SHARED(Locks::mutator_lock_);
767 
768   // Update entry points by passing them through the visitor.
769   template <typename Visitor>
770   ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor, PointerSize pointer_size)
771       REQUIRES_SHARED(Locks::mutator_lock_);
772 
773   // Visit the individual members of an ArtMethod.  Used by imgdiag.
774   // As imgdiag does not support mixing instruction sets or pointer sizes (e.g., using imgdiag32
775   // to inspect 64-bit images, etc.), we can go beneath the accessors directly to the class members.
776   template <typename VisitorFunc>
VisitMembers(VisitorFunc & visitor)777   void VisitMembers(VisitorFunc& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
778     DCHECK(IsImagePointerSize(kRuntimePointerSize));
779     visitor(this, &declaring_class_, "declaring_class_");
780     visitor(this, &access_flags_, "access_flags_");
781     visitor(this, &dex_method_index_, "dex_method_index_");
782     visitor(this, &method_index_, "method_index_");
783     visitor(this, &hotness_count_, "hotness_count_");
784     visitor(this, &ptr_sized_fields_.data_, "ptr_sized_fields_.data_");
785     visitor(this,
786             &ptr_sized_fields_.entry_point_from_quick_compiled_code_,
787             "ptr_sized_fields_.entry_point_from_quick_compiled_code_");
788   }
789 
790   // Returns the dex instructions of the code item for the art method. Returns an empty array for
791   // the null code item case.
792   ALWAYS_INLINE CodeItemInstructionAccessor DexInstructions()
793       REQUIRES_SHARED(Locks::mutator_lock_);
794 
795   // Returns the dex code item data section of the DexFile for the art method.
796   ALWAYS_INLINE CodeItemDataAccessor DexInstructionData()
797       REQUIRES_SHARED(Locks::mutator_lock_);
798 
799   // Returns the dex code item debug info section of the DexFile for the art method.
800   ALWAYS_INLINE CodeItemDebugInfoAccessor DexInstructionDebugInfo()
801       REQUIRES_SHARED(Locks::mutator_lock_);
802 
DeclaringClassRoot()803   GcRoot<mirror::Class>& DeclaringClassRoot() {
804     return declaring_class_;
805   }
806 
807  protected:
808   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
809   // The class we are a part of.
810   GcRoot<mirror::Class> declaring_class_;
811 
812   // Access flags; low 16 bits are defined by spec.
813   // Getting and setting this flag needs to be atomic when concurrency is
814   // possible, e.g. after this method's class is linked. Such as when setting
815   // verifier flags and single-implementation flag.
816   std::atomic<std::uint32_t> access_flags_;
817 
818   /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
819 
820   // Index into method_ids of the dex file associated with this method.
821   uint32_t dex_method_index_;
822 
823   /* End of dex file fields. */
824 
825   // Entry within a dispatch table for this method. For static/direct methods the index is into
826   // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
827   // ifTable.
828   uint16_t method_index_;
829 
830   union {
831     // Non-abstract methods: The hotness we measure for this method. Not atomic,
832     // as we allow missing increments: if the method is hot, we will see it eventually.
833     uint16_t hotness_count_;
834     // Abstract methods: IMT index.
835     uint16_t imt_index_;
836   };
837 
838   // Fake padding field gets inserted here.
839 
840   // Must be the last fields in the method.
841   struct PtrSizedFields {
842     // Depending on the method type, the data is
843     //   - native method: pointer to the JNI function registered to this method
844     //                    or a function to resolve the JNI function,
845     //   - resolution method: pointer to a function to resolve the method and
846     //                        the JNI function for @CriticalNative.
847     //   - conflict method: ImtConflictTable,
848     //   - abstract/interface method: the single-implementation if any,
849     //   - proxy method: the original interface method or constructor,
850     //   - other methods: during AOT the code item offset, at runtime a pointer
851     //                    to the code item.
852     void* data_;
853 
854     // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
855     // the interpreter.
856     void* entry_point_from_quick_compiled_code_;
857   } ptr_sized_fields_;
858 
859  private:
860   uint16_t FindObsoleteDexClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
861 
PtrSizedFieldsOffset(PointerSize pointer_size)862   static constexpr size_t PtrSizedFieldsOffset(PointerSize pointer_size) {
863     // Round up to pointer size for padding field. Tested in art_method.cc.
864     return RoundUp(offsetof(ArtMethod, hotness_count_) + sizeof(hotness_count_),
865                    static_cast<size_t>(pointer_size));
866   }
867 
868   // Compare given pointer size to the image pointer size.
869   static bool IsImagePointerSize(PointerSize pointer_size);
870 
871   dex::TypeIndex GetReturnTypeIndex() REQUIRES_SHARED(Locks::mutator_lock_);
872 
873   template<typename T>
GetNativePointer(MemberOffset offset,PointerSize pointer_size)874   ALWAYS_INLINE T GetNativePointer(MemberOffset offset, PointerSize pointer_size) const {
875     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
876     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
877     if (pointer_size == PointerSize::k32) {
878       return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
879     } else {
880       auto v = *reinterpret_cast<const uint64_t*>(addr);
881       return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
882     }
883   }
884 
885   template<typename T>
SetNativePointer(MemberOffset offset,T new_value,PointerSize pointer_size)886   ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size)
887       REQUIRES_SHARED(Locks::mutator_lock_) {
888     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
889     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
890     if (pointer_size == PointerSize::k32) {
891       uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
892       *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
893     } else {
894       *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
895     }
896   }
897 
IsValidIntrinsicUpdate(uint32_t modifier)898   static inline bool IsValidIntrinsicUpdate(uint32_t modifier) {
899     return (((modifier & kAccIntrinsic) == kAccIntrinsic) &&
900             ((modifier & ~(kAccIntrinsic | kAccIntrinsicBits)) == 0) &&
901             ((modifier & kAccIntrinsicBits) != 0));  // b/228049006: ensure intrinsic is not `kNone`
902   }
903 
OverlapsIntrinsicBits(uint32_t modifier)904   static inline bool OverlapsIntrinsicBits(uint32_t modifier) {
905     return (modifier & kAccIntrinsicBits) != 0;
906   }
907 
908   // This setter guarantees atomicity.
AddAccessFlags(uint32_t flag)909   void AddAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) {
910     DCHECK_IMPLIES(IsIntrinsic(), !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
911     // None of the readers rely ordering.
912     access_flags_.fetch_or(flag, std::memory_order_relaxed);
913   }
914 
915   // This setter guarantees atomicity.
ClearAccessFlags(uint32_t flag)916   void ClearAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) {
917     DCHECK_IMPLIES(IsIntrinsic(), !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
918     access_flags_.fetch_and(~flag, std::memory_order_relaxed);
919   }
920 
921   // Used by GetName and GetNameView to share common code.
922   const char* GetRuntimeMethodName() REQUIRES_SHARED(Locks::mutator_lock_);
923 
924   DISALLOW_COPY_AND_ASSIGN(ArtMethod);  // Need to use CopyFrom to deal with 32 vs 64 bits.
925 };
926 
927 class MethodCallback {
928  public:
~MethodCallback()929   virtual ~MethodCallback() {}
930 
931   virtual void RegisterNativeMethod(ArtMethod* method,
932                                     const void* original_implementation,
933                                     /*out*/void** new_implementation)
934       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
935 };
936 
937 }  // namespace art
938 
939 #endif  // ART_RUNTIME_ART_METHOD_H_
940