• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_ART_METHOD_H_
18 #define ART_RUNTIME_ART_METHOD_H_
19 
20 #include <cstddef>
21 #include <limits>
22 
23 #include <android-base/logging.h>
24 #include <jni.h>
25 
26 #include "base/array_ref.h"
27 #include "base/bit_utils.h"
28 #include "base/casts.h"
29 #include "base/logging.h"
30 #include "base/macros.h"
31 #include "base/pointer_size.h"
32 #include "base/runtime_debug.h"
33 #include "dex/dex_file_structs.h"
34 #include "dex/dex_file_types.h"
35 #include "dex/modifiers.h"
36 #include "dex/primitive.h"
37 #include "interpreter/mterp/nterp.h"
38 #include "gc_root.h"
39 #include "intrinsics_enum.h"
40 #include "obj_ptr.h"
41 #include "offsets.h"
42 #include "read_barrier_option.h"
43 
44 namespace art HIDDEN {
45 
46 class CodeItemDataAccessor;
47 class CodeItemDebugInfoAccessor;
48 class CodeItemInstructionAccessor;
49 class DexFile;
50 template<class T> class Handle;
51 class ImtConflictTable;
52 enum InvokeType : uint32_t;
53 union JValue;
54 template<typename T> class LengthPrefixedArray;
55 class OatQuickMethodHeader;
56 class ProfilingInfo;
57 class ScopedObjectAccessAlreadyRunnable;
58 class ShadowFrame;
59 class Signature;
60 
61 namespace mirror {
62 class Array;
63 class Class;
64 class ClassLoader;
65 class DexCache;
66 class IfTable;
67 class Object;
68 template <typename MirrorType> class ObjectArray;
69 class PointerArray;
70 class String;
71 }  // namespace mirror
72 
73 namespace detail {
74 template <char Shorty> struct ShortyTraits;
75 template <> struct ShortyTraits<'V'>;
76 template <> struct ShortyTraits<'Z'>;
77 template <> struct ShortyTraits<'B'>;
78 template <> struct ShortyTraits<'C'>;
79 template <> struct ShortyTraits<'S'>;
80 template <> struct ShortyTraits<'I'>;
81 template <> struct ShortyTraits<'J'>;
82 template <> struct ShortyTraits<'F'>;
83 template <> struct ShortyTraits<'D'>;
84 template <> struct ShortyTraits<'L'>;
85 template <char Shorty> struct HandleShortyTraits;
86 template <> struct HandleShortyTraits<'L'>;
87 }  // namespace detail
88 
89 class EXPORT ArtMethod final {
90  public:
91   // Should the class state be checked on sensitive operations?
92   DECLARE_RUNTIME_DEBUG_FLAG(kCheckDeclaringClassState);
93 
94   // The runtime dex_method_index is kDexNoIndex. To lower dependencies, we use this
95   // constexpr, and ensure that the value is correct in art_method.cc.
96   static constexpr uint32_t kRuntimeMethodDexMethodIndex = 0xFFFFFFFF;
97 
98   ArtMethod() : access_flags_(0), dex_method_index_(0),
99       method_index_(0), hotness_count_(0) { }
100 
101   ArtMethod(ArtMethod* src, PointerSize image_pointer_size) {
102     CopyFrom(src, image_pointer_size);
103   }
104 
105   static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
106                                         jobject jlr_method)
107       REQUIRES_SHARED(Locks::mutator_lock_);
108 
109   // Visit the declaring class in 'method' if it is within [start_boundary, end_boundary).
110   template<typename RootVisitorType>
111   static void VisitRoots(RootVisitorType& visitor,
112                          uint8_t* start_boundary,
113                          uint8_t* end_boundary,
114                          ArtMethod* method)
115       REQUIRES_SHARED(Locks::mutator_lock_);
116 
117   // Visit declaring classes of all the art-methods in 'array' that reside
118   // in [start_boundary, end_boundary).
119   template<PointerSize kPointerSize, typename RootVisitorType>
120   static void VisitArrayRoots(RootVisitorType& visitor,
121                               uint8_t* start_boundary,
122                               uint8_t* end_boundary,
123                               LengthPrefixedArray<ArtMethod>* array)
124       REQUIRES_SHARED(Locks::mutator_lock_);
125 
126   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
127   ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_);
128 
129   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
130   ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClassUnchecked()
131       REQUIRES_SHARED(Locks::mutator_lock_);
132 
133   mirror::CompressedReference<mirror::Object>* GetDeclaringClassAddressWithoutBarrier() {
134     return declaring_class_.AddressWithoutBarrier();
135   }
136 
137   void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class)
138       REQUIRES_SHARED(Locks::mutator_lock_);
139 
140   bool CASDeclaringClass(ObjPtr<mirror::Class> expected_class, ObjPtr<mirror::Class> desired_class)
141       REQUIRES_SHARED(Locks::mutator_lock_);
142 
143   static constexpr MemberOffset DeclaringClassOffset() {
144     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
145   }
146 
147   uint32_t GetAccessFlags() const {
148     return access_flags_.load(std::memory_order_relaxed);
149   }
150 
151   // This version should only be called when it's certain there is no
152   // concurrency so there is no need to guarantee atomicity. For example,
153   // before the method is linked.
154   void SetAccessFlags(uint32_t new_access_flags) REQUIRES_SHARED(Locks::mutator_lock_) {
155     // The following check ensures that we do not set `Intrinsics::kNone` (see b/228049006).
156     DCHECK_IMPLIES((new_access_flags & kAccIntrinsic) != 0,
157                    (new_access_flags & kAccIntrinsicBits) != 0);
158     access_flags_.store(new_access_flags, std::memory_order_relaxed);
159   }
160 
161   static constexpr MemberOffset AccessFlagsOffset() {
162     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, access_flags_));
163   }
164 
165   // Approximate what kind of method call would be used for this method.
166   InvokeType GetInvokeType() REQUIRES_SHARED(Locks::mutator_lock_);
167 
168   // Returns true if the method is declared public.
169   bool IsPublic() const {
170     return IsPublic(GetAccessFlags());
171   }
172 
173   static bool IsPublic(uint32_t access_flags) {
174     return (access_flags & kAccPublic) != 0;
175   }
176 
177   // Returns true if the method is declared private.
178   bool IsPrivate() const {
179     return IsPrivate(GetAccessFlags());
180   }
181 
182   static bool IsPrivate(uint32_t access_flags) {
183     return (access_flags & kAccPrivate) != 0;
184   }
185 
186   // Returns true if the method is declared static.
187   bool IsStatic() const {
188     return IsStatic(GetAccessFlags());
189   }
190 
191   static bool IsStatic(uint32_t access_flags) {
192     return (access_flags & kAccStatic) != 0;
193   }
194 
195   // Returns true if the method is a constructor according to access flags.
196   bool IsConstructor() const {
197     return IsConstructor(GetAccessFlags());
198   }
199 
200   static bool IsConstructor(uint32_t access_flags) {
201     return (access_flags & kAccConstructor) != 0;
202   }
203 
204   // Returns true if the method is a class initializer according to access flags.
205   bool IsClassInitializer() const {
206     return IsClassInitializer(GetAccessFlags());
207   }
208 
209   static bool IsClassInitializer(uint32_t access_flags) {
210     return IsConstructor(access_flags) && IsStatic(access_flags);
211   }
212 
213   // Returns true if the method is static, private, or a constructor.
214   bool IsDirect() const {
215     return IsDirect(GetAccessFlags());
216   }
217 
218   static bool IsDirect(uint32_t access_flags) {
219     constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
220     return (access_flags & direct) != 0;
221   }
222 
223   // Returns true if the method is declared synchronized.
224   bool IsSynchronized() const {
225     return IsSynchronized(GetAccessFlags());
226   }
227 
228   static bool IsSynchronized(uint32_t access_flags) {
229     constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
230     return (access_flags & synchonized) != 0;
231   }
232 
233   // Returns true if the method is declared final.
234   bool IsFinal() const {
235     return IsFinal(GetAccessFlags());
236   }
237 
238   static bool IsFinal(uint32_t access_flags) {
239     return (access_flags & kAccFinal) != 0;
240   }
241 
242   // Returns true if the method is an intrinsic.
243   bool IsIntrinsic() const {
244     return IsIntrinsic(GetAccessFlags());
245   }
246 
247   static bool IsIntrinsic(uint32_t access_flags) {
248     return (access_flags & kAccIntrinsic) != 0;
249   }
250 
251   ALWAYS_INLINE void SetIntrinsic(Intrinsics intrinsic) REQUIRES_SHARED(Locks::mutator_lock_);
252 
253   Intrinsics GetIntrinsic() const {
254     static const int kAccFlagsShift = CTZ(kAccIntrinsicBits);
255     static_assert(IsPowerOfTwo((kAccIntrinsicBits >> kAccFlagsShift) + 1),
256                   "kAccIntrinsicBits are not continuous");
257     static_assert((kAccIntrinsic & kAccIntrinsicBits) == 0,
258                   "kAccIntrinsic overlaps kAccIntrinsicBits");
259     DCHECK(IsIntrinsic());
260     return static_cast<Intrinsics>((GetAccessFlags() & kAccIntrinsicBits) >> kAccFlagsShift);
261   }
262 
263   void SetNotIntrinsic() REQUIRES_SHARED(Locks::mutator_lock_);
264 
265   // Returns true if the method is a copied method.
266   bool IsCopied() const {
267     return IsCopied(GetAccessFlags());
268   }
269 
270   static bool IsCopied(uint32_t access_flags) {
271     // We do not have intrinsics for any default methods and therefore intrinsics are never copied.
272     // So we are using a flag from the intrinsic flags range and need to check `kAccIntrinsic` too.
273     static_assert((kAccCopied & kAccIntrinsicBits) != 0,
274                   "kAccCopied deliberately overlaps intrinsic bits");
275     const bool copied = (access_flags & (kAccIntrinsic | kAccCopied)) == kAccCopied;
276     // (IsMiranda() || IsDefaultConflicting()) implies copied
277     DCHECK(!(IsMiranda(access_flags) || IsDefaultConflicting(access_flags)) || copied)
278         << "Miranda or default-conflict methods must always be copied.";
279     return copied;
280   }
281 
282   bool IsMiranda() const {
283     return IsMiranda(GetAccessFlags());
284   }
285 
286   static bool IsMiranda(uint32_t access_flags) {
287     // Miranda methods are marked as copied and abstract but not default.
288     // We need to check the kAccIntrinsic too, see `IsCopied()`.
289     static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault;
290     static constexpr uint32_t kValue = kAccCopied | kAccAbstract;
291     return (access_flags & kMask) == kValue;
292   }
293 
294   // A default conflict method is a special sentinel method that stands for a conflict between
295   // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError
296   // if one attempts to do so.
297   bool IsDefaultConflicting() const {
298     return IsDefaultConflicting(GetAccessFlags());
299   }
300 
301   static bool IsDefaultConflicting(uint32_t access_flags) {
302     // Default conflct methods are marked as copied, abstract and default.
303     // We need to check the kAccIntrinsic too, see `IsCopied()`.
304     static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault;
305     static constexpr uint32_t kValue = kAccCopied | kAccAbstract | kAccDefault;
306     return (access_flags & kMask) == kValue;
307   }
308 
309   // Returns true if invoking this method will not throw an AbstractMethodError or
310   // IncompatibleClassChangeError.
311   bool IsInvokable() const {
312     return IsInvokable(GetAccessFlags());
313   }
314 
315   static bool IsInvokable(uint32_t access_flags) {
316     // Default conflicting methods are marked with `kAccAbstract` (as well as `kAccCopied`
317     // and `kAccDefault`) but they are not considered abstract, see `IsAbstract()`.
318     DCHECK_EQ((access_flags & kAccAbstract) == 0,
319               !IsDefaultConflicting(access_flags) && !IsAbstract(access_flags));
320     return (access_flags & kAccAbstract) == 0;
321   }
322 
323   // Returns true if the method is marked as pre-compiled.
324   bool IsPreCompiled() const {
325     return IsPreCompiled(GetAccessFlags());
326   }
327 
328   static bool IsPreCompiled(uint32_t access_flags) {
329     // kAccCompileDontBother and kAccPreCompiled overlap with kAccIntrinsicBits.
330     static_assert((kAccCompileDontBother & kAccIntrinsicBits) != 0);
331     static_assert((kAccPreCompiled & kAccIntrinsicBits) != 0);
332     static constexpr uint32_t kMask = kAccIntrinsic | kAccCompileDontBother | kAccPreCompiled;
333     static constexpr uint32_t kValue = kAccCompileDontBother | kAccPreCompiled;
334     return (access_flags & kMask) == kValue;
335   }
336 
337   void SetPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) {
338     DCHECK(IsInvokable());
339     DCHECK(IsCompilable());
340     // kAccPreCompiled and kAccCompileDontBother overlaps with kAccIntrinsicBits.
341     // We don't mark the intrinsics as precompiled, which means in JIT zygote
342     // mode, compiled code for intrinsics will not be shared, and apps will
343     // compile intrinsics themselves if needed.
344     if (IsIntrinsic()) {
345       return;
346     }
347     AddAccessFlags(kAccPreCompiled | kAccCompileDontBother);
348   }
349 
350   void ClearPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) {
351     ClearAccessFlags(kAccPreCompiled | kAccCompileDontBother);
352   }
353 
354   // Returns true if the method resides in shared memory.
355   bool IsMemorySharedMethod() {
356     return IsMemorySharedMethod(GetAccessFlags());
357   }
358 
359   static bool IsMemorySharedMethod(uint32_t access_flags) {
360     // There's an overlap with `kAccMemorySharedMethod` and `kAccIntrinsicBits` but that's OK as
361     // intrinsics are always in the boot image and therefore memory shared.
362     static_assert((kAccMemorySharedMethod & kAccIntrinsicBits) != 0,
363                   "kAccMemorySharedMethod deliberately overlaps intrinsic bits");
364     if (IsIntrinsic(access_flags)) {
365       return true;
366     }
367 
368     return (access_flags & kAccMemorySharedMethod) != 0;
369   }
370 
371   void SetMemorySharedMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
372     DCHECK(!IsIntrinsic());
373     DCHECK(!IsAbstract());
374     AddAccessFlags(kAccMemorySharedMethod);
375   }
376 
377   static uint32_t SetMemorySharedMethod(uint32_t access_flags) {
378     DCHECK(!IsIntrinsic(access_flags));
379     DCHECK(!IsAbstract(access_flags));
380     return access_flags | kAccMemorySharedMethod;
381   }
382 
383   void ClearMemorySharedMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
384     uint32_t access_flags = GetAccessFlags();
385     if (IsIntrinsic(access_flags) || IsAbstract(access_flags)) {
386       return;
387     }
388     if (IsMemorySharedMethod(access_flags)) {
389       ClearAccessFlags(kAccMemorySharedMethod);
390     }
391   }
392 
393   // Returns true if the method can be compiled.
394   bool IsCompilable() const {
395     return IsCompilable(GetAccessFlags());
396   }
397 
398   static bool IsCompilable(uint32_t access_flags) {
399     if (IsIntrinsic(access_flags)) {
400       // kAccCompileDontBother overlaps with kAccIntrinsicBits.
401       return true;
402     }
403     if (IsPreCompiled(access_flags)) {
404       return true;
405     }
406     return (access_flags & kAccCompileDontBother) == 0;
407   }
408 
409   void ClearDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) {
410     DCHECK(!IsMiranda());
411     ClearAccessFlags(kAccCompileDontBother);
412   }
413 
414   void SetDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) {
415     DCHECK(!IsMiranda());
416     AddAccessFlags(kAccCompileDontBother);
417   }
418 
419   // This is set by the class linker.
420   bool IsDefault() const {
421     return IsDefault(GetAccessFlags());
422   }
423 
424   static bool IsDefault(uint32_t access_flags) {
425     // The intrinsic bits use `kAccDefault`. However, we don't generate intrinsics for default
426     // methods. Therefore, we check that both `kAccDefault` is set and `kAccIntrinsic` unset.
427     static_assert((kAccDefault & kAccIntrinsicBits) != 0,
428                   "kAccDefault deliberately overlaps intrinsic bits");
429     static constexpr uint32_t kMask = kAccIntrinsic | kAccDefault;
430     static constexpr uint32_t kValue = kAccDefault;
431     return (access_flags & kMask) == kValue;
432   }
433 
434   // Returns true if the method is obsolete.
435   bool IsObsolete() const {
436     return IsObsolete(GetAccessFlags());
437   }
438 
439   static bool IsObsolete(uint32_t access_flags) {
440     return (access_flags & kAccObsoleteMethod) != 0;
441   }
442 
443   void SetIsObsolete() REQUIRES_SHARED(Locks::mutator_lock_) {
444     AddAccessFlags(kAccObsoleteMethod);
445   }
446 
447   // Returns true if the method is native.
448   bool IsNative() const {
449     return IsNative(GetAccessFlags());
450   }
451 
452   static bool IsNative(uint32_t access_flags) {
453     return (access_flags & kAccNative) != 0;
454   }
455 
456   // Checks to see if the method was annotated with @dalvik.annotation.optimization.FastNative.
457   bool IsFastNative() const {
458     return IsFastNative(GetAccessFlags());
459   }
460 
461   static bool IsFastNative(uint32_t access_flags) {
462     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
463     // The kAccFastNative flag value is used with a different meaning for non-native methods,
464     // so we need to check the kAccNative flag as well.
465     constexpr uint32_t mask = kAccFastNative | kAccNative;
466     return (access_flags & mask) == mask;
467   }
468 
469   // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative.
470   bool IsCriticalNative() const {
471     return IsCriticalNative(GetAccessFlags());
472   }
473 
474   static bool IsCriticalNative([[maybe_unused]] uint32_t access_flags) {
475 #ifdef ART_USE_RESTRICTED_MODE
476     // Return false to treat all critical native methods as normal native methods instead, i.e.:
477     // will use the generic JNI trampoline instead.
478     // TODO(Simulator): support critical native methods
479     return false;
480 #else
481     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
482     // The kAccCriticalNative flag value is used with a different meaning for non-native methods,
483     // so we need to check the kAccNative flag as well.
484     constexpr uint32_t mask = kAccCriticalNative | kAccNative;
485     return (access_flags & mask) == mask;
486 #endif
487   }
488 
489   // Returns true if the method is managed (not native).
490   bool IsManaged() const {
491     return IsManaged(GetAccessFlags());
492   }
493 
494   static bool IsManaged(uint32_t access_flags) {
495     return !IsNative(access_flags);
496   }
497 
498   // Returns true if the method is managed (not native) and invokable.
499   bool IsManagedAndInvokable() const {
500     return IsManagedAndInvokable(GetAccessFlags());
501   }
502 
503   static bool IsManagedAndInvokable(uint32_t access_flags) {
504     return IsManaged(access_flags) && IsInvokable(access_flags);
505   }
506 
507   // Returns true if the method is abstract.
508   bool IsAbstract() const {
509     return IsAbstract(GetAccessFlags());
510   }
511 
512   static bool IsAbstract(uint32_t access_flags) {
513     // Default confliciting methods have `kAccAbstract` set but they are not actually abstract.
514     return (access_flags & kAccAbstract) != 0 && !IsDefaultConflicting(access_flags);
515   }
516 
517   // Returns true if the method is declared synthetic.
518   bool IsSynthetic() const {
519     return IsSynthetic(GetAccessFlags());
520   }
521 
522   static bool IsSynthetic(uint32_t access_flags) {
523     return (access_flags & kAccSynthetic) != 0;
524   }
525 
526   // Returns true if the method is declared varargs.
527   bool IsVarargs() const {
528     return IsVarargs(GetAccessFlags());
529   }
530 
531   static bool IsVarargs(uint32_t access_flags) {
532     return (access_flags & kAccVarargs) != 0;
533   }
534 
535   bool IsProxyMethod() REQUIRES_SHARED(Locks::mutator_lock_);
536 
537   bool IsSignaturePolymorphic() REQUIRES_SHARED(Locks::mutator_lock_);
538 
539   bool SkipAccessChecks() const {
540     // The kAccSkipAccessChecks flag value is used with a different meaning for native methods,
541     // so we need to check the kAccNative flag as well.
542     return (GetAccessFlags() & (kAccSkipAccessChecks | kAccNative)) == kAccSkipAccessChecks;
543   }
544 
545   void SetSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) {
546     // SkipAccessChecks() is applicable only to non-native methods.
547     DCHECK(!IsNative());
548     AddAccessFlags(kAccSkipAccessChecks);
549   }
550   void ClearSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) {
551     // SkipAccessChecks() is applicable only to non-native methods.
552     DCHECK(!IsNative());
553     ClearAccessFlags(kAccSkipAccessChecks);
554   }
555 
556   // Returns true if the method has previously been warm.
557   bool PreviouslyWarm() const {
558     return PreviouslyWarm(GetAccessFlags());
559   }
560 
561   static bool PreviouslyWarm(uint32_t access_flags) {
562     // kAccPreviouslyWarm overlaps with kAccIntrinsicBits. Return true for intrinsics.
563     constexpr uint32_t mask = kAccPreviouslyWarm | kAccIntrinsic;
564     return (access_flags & mask) != 0u;
565   }
566 
567   void SetPreviouslyWarm() REQUIRES_SHARED(Locks::mutator_lock_) {
568     if (IsIntrinsic()) {
569       // kAccPreviouslyWarm overlaps with kAccIntrinsicBits.
570       return;
571     }
572     AddAccessFlags(kAccPreviouslyWarm);
573   }
574 
575   // Should this method be run in the interpreter and count locks (e.g., failed structured-
576   // locking verification)?
577   bool MustCountLocks() const {
578     return MustCountLocks(GetAccessFlags());
579   }
580 
581   static bool MustCountLocks(uint32_t access_flags) {
582     if (IsIntrinsic(access_flags)) {
583       return false;
584     }
585     return (access_flags & kAccMustCountLocks) != 0;
586   }
587 
588   void ClearMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) {
589     ClearAccessFlags(kAccMustCountLocks);
590   }
591 
592   void SetMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) {
593     ClearAccessFlags(kAccSkipAccessChecks);
594     AddAccessFlags(kAccMustCountLocks);
595   }
596 
597   // Returns true if the method is using the nterp entrypoint fast path.
598   bool HasNterpEntryPointFastPathFlag() const {
599     return HasNterpEntryPointFastPathFlag(GetAccessFlags());
600   }
601 
602   static bool HasNterpEntryPointFastPathFlag(uint32_t access_flags) {
603     constexpr uint32_t mask = kAccNative | kAccNterpEntryPointFastPathFlag;
604     return (access_flags & mask) == kAccNterpEntryPointFastPathFlag;
605   }
606 
607   void SetNterpEntryPointFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
608     DCHECK(!IsNative());
609     AddAccessFlags(kAccNterpEntryPointFastPathFlag);
610   }
611 
612   void ClearNterpEntryPointFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
613     DCHECK(!IsNative());
614     ClearAccessFlags(kAccNterpEntryPointFastPathFlag);
615   }
616 
617   void SetNterpInvokeFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
618     AddAccessFlags(kAccNterpInvokeFastPathFlag);
619   }
620 
621   void ClearNterpInvokeFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
622     ClearAccessFlags(kAccNterpInvokeFastPathFlag);
623   }
624 
625   static uint32_t ClearNterpFastPathFlags(uint32_t access_flags) {
626     // `kAccNterpEntryPointFastPathFlag` has a different use for native methods.
627     if (!IsNative(access_flags)) {
628       access_flags &= ~kAccNterpEntryPointFastPathFlag;
629     }
630     access_flags &= ~kAccNterpInvokeFastPathFlag;
631     return access_flags;
632   }
633 
634   // Returns whether the method is a string constructor. The method must not
635   // be a class initializer. (Class initializers are called from a different
636   // context where we do not need to check for string constructors.)
637   bool IsStringConstructor() REQUIRES_SHARED(Locks::mutator_lock_);
638 
639   // Returns true if this method could be overridden by a default method.
640   bool IsOverridableByDefaultMethod() REQUIRES_SHARED(Locks::mutator_lock_);
641 
642   bool CheckIncompatibleClassChange(InvokeType type) REQUIRES_SHARED(Locks::mutator_lock_);
643 
644   // Throws the error that would result from trying to invoke this method (i.e.
645   // IncompatibleClassChangeError, AbstractMethodError, or IllegalAccessError).
646   // Only call if !IsInvokable();
647   void ThrowInvocationTimeError(ObjPtr<mirror::Object> receiver)
648       REQUIRES_SHARED(Locks::mutator_lock_);
649 
650   uint16_t GetMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
651 
652   // Doesn't do erroneous / unresolved class checks.
653   uint16_t GetMethodIndexDuringLinking() REQUIRES_SHARED(Locks::mutator_lock_);
654 
655   size_t GetVtableIndex() REQUIRES_SHARED(Locks::mutator_lock_) {
656     return GetMethodIndex();
657   }
658 
659   void SetMethodIndex(uint16_t new_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
660     // Not called within a transaction.
661     method_index_ = new_method_index;
662   }
663 
664   static constexpr MemberOffset DexMethodIndexOffset() {
665     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, dex_method_index_));
666   }
667 
668   static constexpr MemberOffset MethodIndexOffset() {
669     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, method_index_));
670   }
671 
672   static constexpr MemberOffset ImtIndexOffset() {
673     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, imt_index_));
674   }
675 
676   // Number of 32bit registers that would be required to hold all the arguments
677   static size_t NumArgRegisters(std::string_view shorty);
678 
679   ALWAYS_INLINE uint32_t GetDexMethodIndex() const {
680     return dex_method_index_;
681   }
682 
683   void SetDexMethodIndex(uint32_t new_idx) REQUIRES_SHARED(Locks::mutator_lock_) {
684     // Not called within a transaction.
685     dex_method_index_ = new_idx;
686   }
687 
688   // Lookup the Class from the type index into this method's dex cache.
689   ObjPtr<mirror::Class> LookupResolvedClassFromTypeIndex(dex::TypeIndex type_idx)
690       REQUIRES_SHARED(Locks::mutator_lock_);
691   // Resolve the Class from the type index into this method's dex cache.
692   ObjPtr<mirror::Class> ResolveClassFromTypeIndex(dex::TypeIndex type_idx)
693       REQUIRES_SHARED(Locks::mutator_lock_);
694 
695   // Returns true if this method has the same name and signature of the other method.
696   bool HasSameNameAndSignature(ArtMethod* other) REQUIRES_SHARED(Locks::mutator_lock_);
697 
698   // Find the method that this method overrides.
699   ArtMethod* FindOverriddenMethod(PointerSize pointer_size)
700       REQUIRES_SHARED(Locks::mutator_lock_);
701 
702   // Find the method index for this method within other_dexfile. If this method isn't present then
703   // return dex::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
704   // name and signature in the other_dexfile, such as the method index used to resolve this method
705   // in the other_dexfile.
706   uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
707                                             uint32_t name_and_signature_idx)
708       REQUIRES_SHARED(Locks::mutator_lock_);
709 
710   void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
711       REQUIRES_SHARED(Locks::mutator_lock_);
712 
713   template <char ReturnType, char... ArgType>
714   typename detail::ShortyTraits<ReturnType>::Type
715   InvokeStatic(Thread* self, typename detail::ShortyTraits<ArgType>::Type... args)
716       REQUIRES_SHARED(Locks::mutator_lock_);
717 
718   template <char ReturnType, char... ArgType>
719   typename detail::ShortyTraits<ReturnType>::Type
720   InvokeInstance(Thread* self,
721                  ObjPtr<mirror::Object> receiver,
722                  typename detail::ShortyTraits<ArgType>::Type... args)
723       REQUIRES_SHARED(Locks::mutator_lock_);
724 
725   template <char ReturnType, char... ArgType>
726   typename detail::ShortyTraits<ReturnType>::Type
727   InvokeFinal(Thread* self,
728               ObjPtr<mirror::Object> receiver,
729               typename detail::ShortyTraits<ArgType>::Type... args)
730       REQUIRES_SHARED(Locks::mutator_lock_);
731 
732   template <char ReturnType, char... ArgType>
733   typename detail::ShortyTraits<ReturnType>::Type
734   InvokeVirtual(Thread* self,
735                 ObjPtr<mirror::Object> receiver,
736                 typename detail::ShortyTraits<ArgType>::Type... args)
737       REQUIRES_SHARED(Locks::mutator_lock_);
738 
739   template <char ReturnType, char... ArgType>
740   typename detail::ShortyTraits<ReturnType>::Type
741   InvokeInterface(Thread* self,
742                   ObjPtr<mirror::Object> receiver,
743                   typename detail::ShortyTraits<ArgType>::Type... args)
744       REQUIRES_SHARED(Locks::mutator_lock_);
745 
746   template <char... ArgType, typename HandleScopeType>
747   Handle<mirror::Object> NewObject(HandleScopeType& hs,
748                                    Thread* self,
749                                    typename detail::HandleShortyTraits<ArgType>::Type... args)
750       REQUIRES_SHARED(Locks::mutator_lock_);
751 
752   template <char... ArgType>
753   ObjPtr<mirror::Object> NewObject(Thread* self,
754                                    typename detail::HandleShortyTraits<ArgType>::Type... args)
755       REQUIRES_SHARED(Locks::mutator_lock_);
756 
757   // Returns true if the method needs a class initialization check according to access flags.
758   // Only static methods other than the class initializer need this check.
759   // The caller is responsible for performing the actual check.
760   bool NeedsClinitCheckBeforeCall() const {
761     return NeedsClinitCheckBeforeCall(GetAccessFlags());
762   }
763 
764   static bool NeedsClinitCheckBeforeCall(uint32_t access_flags) {
765     // The class initializer is special as it is invoked during initialization
766     // and does not need the check.
767     return IsStatic(access_flags) && !IsConstructor(access_flags);
768   }
769 
770   // Check if the method needs a class initialization check before call
771   // and its declaring class is not yet visibly initialized.
772   // (The class needs to be visibly initialized before we can use entrypoints
773   // to compiled code for static methods. See b/18161648 .)
774   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
775   bool StillNeedsClinitCheck() REQUIRES_SHARED(Locks::mutator_lock_);
776 
777   // Similar to `StillNeedsClinitCheck()` but the method's declaring class may
778   // be dead but not yet reclaimed by the GC, so we cannot do a full read barrier
779   // but we still want to check the class status in the to-space class if any.
780   // Note: JIT can hold and use such methods during managed heap GC.
781   bool StillNeedsClinitCheckMayBeDead() REQUIRES_SHARED(Locks::mutator_lock_);
782 
783   // Check if the declaring class has been verified and look at the to-space
784   // class object, if any, as in `StillNeedsClinitCheckMayBeDead()`.
785   bool IsDeclaringClassVerifiedMayBeDead() REQUIRES_SHARED(Locks::mutator_lock_);
786 
787   const void* GetEntryPointFromQuickCompiledCode() const {
788     return GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
789   }
790   ALWAYS_INLINE
791   const void* GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size) const {
792     return GetNativePointer<const void*>(
793         EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
794   }
795 
796   void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code)
797       REQUIRES_SHARED(Locks::mutator_lock_) {
798     SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
799                                               kRuntimePointerSize);
800   }
801   ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
802       const void* entry_point_from_quick_compiled_code, PointerSize pointer_size)
803       REQUIRES_SHARED(Locks::mutator_lock_) {
804     SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
805                      entry_point_from_quick_compiled_code,
806                      pointer_size);
807   }
808 
809   static constexpr MemberOffset DataOffset(PointerSize pointer_size) {
810     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
811         PtrSizedFields, data_) / sizeof(void*) * static_cast<size_t>(pointer_size));
812   }
813 
814   static constexpr MemberOffset EntryPointFromJniOffset(PointerSize pointer_size) {
815     return DataOffset(pointer_size);
816   }
817 
818   static constexpr MemberOffset EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size) {
819     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
820         PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*)
821             * static_cast<size_t>(pointer_size));
822   }
823 
824   ImtConflictTable* GetImtConflictTable(PointerSize pointer_size) const {
825     DCHECK(IsRuntimeMethod());
826     return reinterpret_cast<ImtConflictTable*>(GetDataPtrSize(pointer_size));
827   }
828 
829   ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size)
830       REQUIRES_SHARED(Locks::mutator_lock_) {
831     DCHECK(IsRuntimeMethod());
832     SetDataPtrSize(table, pointer_size);
833   }
834 
835   ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_);
836 
837   ALWAYS_INLINE void SetHasSingleImplementation(bool single_impl)
838       REQUIRES_SHARED(Locks::mutator_lock_) {
839     DCHECK(!IsIntrinsic()) << "conflict with intrinsic bits";
840     if (single_impl) {
841       AddAccessFlags(kAccSingleImplementation);
842     } else {
843       ClearAccessFlags(kAccSingleImplementation);
844     }
845   }
846 
847   ALWAYS_INLINE bool HasSingleImplementationFlag() const {
848     return (GetAccessFlags() & kAccSingleImplementation) != 0;
849   }
850 
851   static uint32_t SetHasSingleImplementation(uint32_t access_flags, bool single_impl) {
852     DCHECK(!IsIntrinsic(access_flags)) << "conflict with intrinsic bits";
853     if (single_impl) {
854       return access_flags | kAccSingleImplementation;
855     } else {
856       return access_flags & ~kAccSingleImplementation;
857     }
858   }
859 
860   // Takes a method and returns a 'canonical' one if the method is default (and therefore
861   // potentially copied from some other class). For example, this ensures that the debugger does not
862   // get confused as to which method we are in.
863   ArtMethod* GetCanonicalMethod(PointerSize pointer_size = kRuntimePointerSize)
864       REQUIRES_SHARED(Locks::mutator_lock_);
865 
866   ArtMethod* GetSingleImplementation(PointerSize pointer_size);
867 
868   ALWAYS_INLINE void SetSingleImplementation(ArtMethod* method, PointerSize pointer_size)
869       REQUIRES_SHARED(Locks::mutator_lock_) {
870     DCHECK(!IsNative());
871     // Non-abstract method's single implementation is just itself.
872     DCHECK(IsAbstract());
873     DCHECK(method == nullptr || method->IsInvokable());
874     SetDataPtrSize(method, pointer_size);
875   }
876 
877   void* GetEntryPointFromJni() const {
878     DCHECK(IsNative());
879     return GetEntryPointFromJniPtrSize(kRuntimePointerSize);
880   }
881 
882   ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(PointerSize pointer_size) const {
883     return GetDataPtrSize(pointer_size);
884   }
885 
886   void SetEntryPointFromJni(const void* entrypoint)
887       REQUIRES_SHARED(Locks::mutator_lock_) {
888     // The resolution method also has a JNI entrypoint for direct calls from
889     // compiled code to the JNI dlsym lookup stub for @CriticalNative.
890     DCHECK(IsNative() || IsRuntimeMethod());
891     SetEntryPointFromJniPtrSize(entrypoint, kRuntimePointerSize);
892   }
893 
894   ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, PointerSize pointer_size)
895       REQUIRES_SHARED(Locks::mutator_lock_) {
896     SetDataPtrSize(entrypoint, pointer_size);
897   }
898 
899   ALWAYS_INLINE void* GetDataPtrSize(PointerSize pointer_size) const {
900     DCHECK(IsImagePointerSize(pointer_size));
901     return GetNativePointer<void*>(DataOffset(pointer_size), pointer_size);
902   }
903 
904   ALWAYS_INLINE void SetDataPtrSize(const void* data, PointerSize pointer_size)
905       REQUIRES_SHARED(Locks::mutator_lock_) {
906     DCHECK(IsImagePointerSize(pointer_size));
907     SetNativePointer(DataOffset(pointer_size), data, pointer_size);
908   }
909 
910   // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
911   // conventions for a method of managed code. Returns false for Proxy methods.
912   ALWAYS_INLINE bool IsRuntimeMethod() const {
913     return dex_method_index_ == kRuntimeMethodDexMethodIndex;
914   }
915 
916   bool HasCodeItem() REQUIRES_SHARED(Locks::mutator_lock_) {
917     return NeedsCodeItem(GetAccessFlags()) && !IsRuntimeMethod() && !IsProxyMethod();
918   }
919 
920   static bool NeedsCodeItem(uint32_t access_flags) {
921     return !IsNative(access_flags) &&
922            !IsAbstract(access_flags) &&
923            !IsDefaultConflicting(access_flags);
924   }
925 
926   void SetCodeItem(const dex::CodeItem* code_item)
927       REQUIRES_SHARED(Locks::mutator_lock_) {
928     DCHECK(HasCodeItem());
929     SetDataPtrSize(code_item, kRuntimePointerSize);
930   }
931 
932   // Is this a hand crafted method used for something like describing callee saves?
933   bool IsCalleeSaveMethod() REQUIRES_SHARED(Locks::mutator_lock_);
934 
935   bool IsResolutionMethod() REQUIRES_SHARED(Locks::mutator_lock_);
936 
937   bool IsImtUnimplementedMethod() REQUIRES_SHARED(Locks::mutator_lock_);
938 
939   // Find the catch block for the given exception type and dex_pc. When a catch block is found,
940   // indicates whether the found catch block is responsible for clearing the exception or whether
941   // a move-exception instruction is present.
942   uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
943                           bool* has_no_move_exception)
944       REQUIRES_SHARED(Locks::mutator_lock_);
945 
946   // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
947   template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
948            bool kVisitProxyMethod = true,
949            typename RootVisitorType>
950   void VisitRoots(RootVisitorType& visitor, PointerSize pointer_size) NO_THREAD_SAFETY_ANALYSIS;
951 
952   const DexFile* GetDexFile() REQUIRES_SHARED(Locks::mutator_lock_);
953 
954   const char* GetDeclaringClassDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
955   std::string_view GetDeclaringClassDescriptorView() REQUIRES_SHARED(Locks::mutator_lock_);
956 
957   ALWAYS_INLINE const char* GetShorty() REQUIRES_SHARED(Locks::mutator_lock_);
958 
959   const char* GetShorty(uint32_t* out_length) REQUIRES_SHARED(Locks::mutator_lock_);
960 
961   std::string_view GetShortyView() REQUIRES_SHARED(Locks::mutator_lock_);
962 
963   const Signature GetSignature() REQUIRES_SHARED(Locks::mutator_lock_);
964 
965   ALWAYS_INLINE const char* GetName() REQUIRES_SHARED(Locks::mutator_lock_);
966 
967   ALWAYS_INLINE std::string_view GetNameView() REQUIRES_SHARED(Locks::mutator_lock_);
968 
969   ObjPtr<mirror::String> ResolveNameString() REQUIRES_SHARED(Locks::mutator_lock_);
970 
971   bool NameEquals(ObjPtr<mirror::String> name) REQUIRES_SHARED(Locks::mutator_lock_);
972 
973   const dex::CodeItem* GetCodeItem() REQUIRES_SHARED(Locks::mutator_lock_);
974 
975   int32_t GetLineNumFromDexPC(uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_);
976 
977   const dex::ProtoId& GetPrototype() REQUIRES_SHARED(Locks::mutator_lock_);
978 
979   const dex::ProtoIndex GetProtoIndex() REQUIRES_SHARED(Locks::mutator_lock_);
980 
981   const dex::TypeList* GetParameterTypeList() REQUIRES_SHARED(Locks::mutator_lock_);
982 
983   const char* GetDeclaringClassSourceFile() REQUIRES_SHARED(Locks::mutator_lock_);
984 
985   uint16_t GetClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
986 
987   const dex::ClassDef& GetClassDef() REQUIRES_SHARED(Locks::mutator_lock_);
988 
989   ALWAYS_INLINE size_t GetNumberOfParameters() REQUIRES_SHARED(Locks::mutator_lock_);
990 
991   const char* GetReturnTypeDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
992   std::string_view GetReturnTypeDescriptorView() REQUIRES_SHARED(Locks::mutator_lock_);
993 
994   ALWAYS_INLINE Primitive::Type GetReturnTypePrimitive() REQUIRES_SHARED(Locks::mutator_lock_);
995 
996   const char* GetTypeDescriptorFromTypeIdx(dex::TypeIndex type_idx)
997       REQUIRES_SHARED(Locks::mutator_lock_);
998 
999   // Lookup return type.
1000   ObjPtr<mirror::Class> LookupResolvedReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
1001   // Resolve return type. May cause thread suspension due to GetClassFromTypeIdx
1002   // calling ResolveType this caused a large number of bugs at call sites.
1003   ObjPtr<mirror::Class> ResolveReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
1004 
1005   ObjPtr<mirror::ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
1006 
1007   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
1008   ObjPtr<mirror::DexCache> GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
1009   template <ReadBarrierOption kReadBarrierOption>
1010   ObjPtr<mirror::DexCache> GetObsoleteDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
1011 
1012   ALWAYS_INLINE ArtMethod* GetInterfaceMethodForProxyUnchecked(PointerSize pointer_size)
1013       REQUIRES_SHARED(Locks::mutator_lock_);
1014   ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(PointerSize pointer_size)
1015       REQUIRES_SHARED(Locks::mutator_lock_);
1016 
1017   ArtMethod* GetNonObsoleteMethod() REQUIRES_SHARED(Locks::mutator_lock_);
1018 
1019   // May cause thread suspension due to class resolution.
1020   bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
1021       REQUIRES_SHARED(Locks::mutator_lock_);
1022 
1023   // Size of an instance of this native class.
1024   static constexpr size_t Size(PointerSize pointer_size) {
1025     return PtrSizedFieldsOffset(pointer_size) +
1026         (sizeof(PtrSizedFields) / sizeof(void*)) * static_cast<size_t>(pointer_size);
1027   }
1028 
1029   // Alignment of an instance of this native class.
1030   static constexpr size_t Alignment(PointerSize pointer_size) {
1031     // The ArtMethod alignment is the same as image pointer size. This differs from
1032     // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
1033     return static_cast<size_t>(pointer_size);
1034   }
1035 
1036   void CopyFrom(ArtMethod* src, PointerSize image_pointer_size)
1037       REQUIRES_SHARED(Locks::mutator_lock_);
1038 
1039   ALWAYS_INLINE void ResetCounter(uint16_t new_value);
1040   ALWAYS_INLINE void UpdateCounter(int32_t new_samples);
1041   ALWAYS_INLINE void SetHotCounter();
1042   ALWAYS_INLINE bool CounterIsHot();
1043   ALWAYS_INLINE uint16_t GetCounter();
1044   ALWAYS_INLINE bool CounterHasChanged(uint16_t threshold);
1045 
1046   ALWAYS_INLINE static constexpr uint16_t MaxCounter() {
1047     return std::numeric_limits<decltype(hotness_count_)>::max();
1048   }
1049 
1050   ALWAYS_INLINE uint32_t GetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_);
1051 
1052   void SetImtIndex(uint16_t imt_index) REQUIRES_SHARED(Locks::mutator_lock_) {
1053     imt_index_ = imt_index;
1054   }
1055 
1056   void SetHotnessCount(uint16_t hotness_count) REQUIRES_SHARED(Locks::mutator_lock_) {
1057     hotness_count_ = hotness_count;
1058   }
1059 
1060   static constexpr MemberOffset HotnessCountOffset() {
1061     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, hotness_count_));
1062   }
1063 
1064   // Returns the method header for the compiled code containing 'pc'. Note that runtime
1065   // methods will return null for this method, as they are not oat based.
1066   const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc)
1067       REQUIRES_SHARED(Locks::mutator_lock_);
1068 
1069   // Get compiled code for the method, return null if no code exists.
1070   const void* GetOatMethodQuickCode(PointerSize pointer_size)
1071       REQUIRES_SHARED(Locks::mutator_lock_);
1072 
1073   // Returns a human-readable signature for 'm'. Something like "a.b.C.m" or
1074   // "a.b.C.m(II)V" (depending on the value of 'with_signature').
1075   static std::string PrettyMethod(ArtMethod* m, bool with_signature = true)
1076       REQUIRES_SHARED(Locks::mutator_lock_);
1077   std::string PrettyMethod(bool with_signature = true)
1078       REQUIRES_SHARED(Locks::mutator_lock_);
1079   // Returns the JNI native function name for the non-overloaded method 'm'.
1080   std::string JniShortName()
1081       REQUIRES_SHARED(Locks::mutator_lock_);
1082   // Returns the JNI native function name for the overloaded method 'm'.
1083   std::string JniLongName()
1084       REQUIRES_SHARED(Locks::mutator_lock_);
1085 
1086   // Visit the individual members of an ArtMethod.  Used by imgdiag.
1087   // As imgdiag does not support mixing instruction sets or pointer sizes (e.g., using imgdiag32
1088   // to inspect 64-bit images, etc.), we can go beneath the accessors directly to the class members.
1089   template <typename VisitorFunc>
1090   void VisitMembers(VisitorFunc& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1091     DCHECK(IsImagePointerSize(kRuntimePointerSize));
1092     visitor(this, &declaring_class_, "declaring_class_");
1093     visitor(this, &access_flags_, "access_flags_");
1094     visitor(this, &dex_method_index_, "dex_method_index_");
1095     visitor(this, &method_index_, "method_index_");
1096     visitor(this, &hotness_count_, "hotness_count_");
1097     visitor(this, &ptr_sized_fields_.data_, "ptr_sized_fields_.data_");
1098     visitor(this,
1099             &ptr_sized_fields_.entry_point_from_quick_compiled_code_,
1100             "ptr_sized_fields_.entry_point_from_quick_compiled_code_");
1101   }
1102 
1103   // Returns the dex instructions of the code item for the art method. Returns an empty array for
1104   // the null code item case.
1105   ALWAYS_INLINE CodeItemInstructionAccessor DexInstructions()
1106       REQUIRES_SHARED(Locks::mutator_lock_);
1107 
1108   // Returns the dex code item data section of the DexFile for the art method.
1109   ALWAYS_INLINE CodeItemDataAccessor DexInstructionData()
1110       REQUIRES_SHARED(Locks::mutator_lock_);
1111 
1112   // Returns the dex code item debug info section of the DexFile for the art method.
1113   ALWAYS_INLINE CodeItemDebugInfoAccessor DexInstructionDebugInfo()
1114       REQUIRES_SHARED(Locks::mutator_lock_);
1115 
1116   GcRoot<mirror::Class>& DeclaringClassRoot() {
1117     return declaring_class_;
1118   }
1119 
1120  protected:
1121   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
1122   // The class we are a part of.
1123   GcRoot<mirror::Class> declaring_class_;
1124 
1125   // Access flags; low 16 bits are defined by spec.
1126   // Getting and setting this flag needs to be atomic when concurrency is
1127   // possible, e.g. after this method's class is linked. Such as when setting
1128   // verifier flags and single-implementation flag.
1129   std::atomic<std::uint32_t> access_flags_;
1130 
1131   /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
1132 
1133   // Index into method_ids of the dex file associated with this method.
1134   uint32_t dex_method_index_;
1135 
1136   /* End of dex file fields. */
1137 
1138   // Entry within a dispatch table for this method. For static/direct methods the index is into
1139   // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
1140   // interface's method array in `IfTable`s of implementing classes.
1141   uint16_t method_index_;
1142 
1143   union {
1144     // Non-abstract methods: The hotness we measure for this method. Not atomic,
1145     // as we allow missing increments: if the method is hot, we will see it eventually.
1146     uint16_t hotness_count_;
1147     // Abstract interface methods: IMT index.
1148     // Abstract class (non-interface) methods: Unused (zero-initialized).
1149     uint16_t imt_index_;
1150   };
1151 
1152   // Fake padding field gets inserted here.
1153 
1154   // Must be the last fields in the method.
1155   struct PtrSizedFields {
1156     // Depending on the method type, the data is
1157     //   - native method: pointer to the JNI function registered to this method
1158     //                    or a function to resolve the JNI function,
1159     //   - resolution method: pointer to a function to resolve the method and
1160     //                        the JNI function for @CriticalNative.
1161     //   - conflict method: ImtConflictTable,
1162     //   - abstract/interface method: the single-implementation if any,
1163     //   - proxy method: the original interface method or constructor,
1164     //   - default conflict method: null
1165     //   - other methods: during AOT the code item offset, at runtime a pointer
1166     //                    to the code item.
1167     void* data_;
1168 
1169     // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
1170     // the interpreter.
1171     void* entry_point_from_quick_compiled_code_;
1172   } ptr_sized_fields_;
1173 
1174  private:
1175   uint16_t FindObsoleteDexClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
1176 
1177   static constexpr size_t PtrSizedFieldsOffset(PointerSize pointer_size) {
1178     // Round up to pointer size for padding field. Tested in art_method.cc.
1179     return RoundUp(offsetof(ArtMethod, hotness_count_) + sizeof(hotness_count_),
1180                    static_cast<size_t>(pointer_size));
1181   }
1182 
1183   // Compare given pointer size to the image pointer size.
1184   static bool IsImagePointerSize(PointerSize pointer_size);
1185 
1186   dex::TypeIndex GetReturnTypeIndex() REQUIRES_SHARED(Locks::mutator_lock_);
1187 
1188   template<typename T>
1189   ALWAYS_INLINE T GetNativePointer(MemberOffset offset, PointerSize pointer_size) const {
1190     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
1191     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
1192     if (pointer_size == PointerSize::k32) {
1193       return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
1194     } else {
1195       auto v = *reinterpret_cast<const uint64_t*>(addr);
1196       return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
1197     }
1198   }
1199 
1200   template<typename T>
1201   ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size)
1202       REQUIRES_SHARED(Locks::mutator_lock_) {
1203     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
1204     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
1205     if (pointer_size == PointerSize::k32) {
1206       uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
1207       *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
1208     } else {
1209       *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
1210     }
1211   }
1212 
1213   static inline bool IsValidIntrinsicUpdate(uint32_t modifier) {
1214     return (((modifier & kAccIntrinsic) == kAccIntrinsic) &&
1215             ((modifier & ~(kAccIntrinsic | kAccIntrinsicBits)) == 0) &&
1216             ((modifier & kAccIntrinsicBits) != 0));  // b/228049006: ensure intrinsic is not `kNone`
1217   }
1218 
1219   static inline bool OverlapsIntrinsicBits(uint32_t modifier) {
1220     return (modifier & kAccIntrinsicBits) != 0;
1221   }
1222 
1223   // This setter guarantees atomicity.
1224   void AddAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) {
1225     DCHECK_IMPLIES(IsIntrinsic(), !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
1226     // None of the readers rely ordering.
1227     access_flags_.fetch_or(flag, std::memory_order_relaxed);
1228   }
1229 
1230   // This setter guarantees atomicity.
1231   void ClearAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) {
1232     DCHECK_IMPLIES(IsIntrinsic(), !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
1233     access_flags_.fetch_and(~flag, std::memory_order_relaxed);
1234   }
1235 
1236   // Helper method for checking the class status of a possibly dead declaring class.
1237   // See `StillNeedsClinitCheckMayBeDead()` and `IsDeclaringClassVerifierMayBeDead()`.
1238   ObjPtr<mirror::Class> GetDeclaringClassMayBeDead() REQUIRES_SHARED(Locks::mutator_lock_);
1239 
1240   // Used by GetName and GetNameView to share common code.
1241   const char* GetRuntimeMethodName() REQUIRES_SHARED(Locks::mutator_lock_);
1242 
1243   DISALLOW_COPY_AND_ASSIGN(ArtMethod);  // Need to use CopyFrom to deal with 32 vs 64 bits.
1244 };
1245 
1246 class MethodCallback {
1247  public:
1248   virtual ~MethodCallback() {}
1249 
1250   virtual void RegisterNativeMethod(ArtMethod* method,
1251                                     const void* original_implementation,
1252                                     /*out*/void** new_implementation)
1253       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
1254 };
1255 
1256 }  // namespace art
1257 
1258 #endif  // ART_RUNTIME_ART_METHOD_H_
1259