• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_ART_METHOD_H_
18 #define ART_RUNTIME_ART_METHOD_H_
19 
20 #include <cstddef>
21 #include <limits>
22 
23 #include <android-base/logging.h>
24 #include <jni.h>
25 
26 #include "base/array_ref.h"
27 #include "base/bit_utils.h"
28 #include "base/casts.h"
29 #include "base/enums.h"
30 #include "base/logging.h"
31 #include "base/macros.h"
32 #include "base/runtime_debug.h"
33 #include "dex/dex_file_structs.h"
34 #include "dex/modifiers.h"
35 #include "dex/primitive.h"
36 #include "interpreter/mterp/nterp.h"
37 #include "gc_root.h"
38 #include "obj_ptr.h"
39 #include "offsets.h"
40 #include "read_barrier_option.h"
41 
42 namespace art {
43 
44 class CodeItemDataAccessor;
45 class CodeItemDebugInfoAccessor;
46 class CodeItemInstructionAccessor;
47 class DexFile;
48 template<class T> class Handle;
49 class ImtConflictTable;
50 enum InvokeType : uint32_t;
51 union JValue;
52 template<typename T> class LengthPrefixedArray;
53 class OatQuickMethodHeader;
54 class ProfilingInfo;
55 class ScopedObjectAccessAlreadyRunnable;
56 class ShadowFrame;
57 class Signature;
58 
59 namespace mirror {
60 class Array;
61 class Class;
62 class ClassLoader;
63 class DexCache;
64 class IfTable;
65 class Object;
66 template <typename MirrorType> class ObjectArray;
67 class PointerArray;
68 class String;
69 }  // namespace mirror
70 
71 namespace detail {
72 template <char Shorty> struct ShortyTraits;
73 template <> struct ShortyTraits<'V'>;
74 template <> struct ShortyTraits<'Z'>;
75 template <> struct ShortyTraits<'B'>;
76 template <> struct ShortyTraits<'C'>;
77 template <> struct ShortyTraits<'S'>;
78 template <> struct ShortyTraits<'I'>;
79 template <> struct ShortyTraits<'J'>;
80 template <> struct ShortyTraits<'F'>;
81 template <> struct ShortyTraits<'D'>;
82 template <> struct ShortyTraits<'L'>;
83 template <char Shorty> struct HandleShortyTraits;
84 template <> struct HandleShortyTraits<'L'>;
85 }  // namespace detail
86 
87 class ArtMethod final {
88  public:
89   // Should the class state be checked on sensitive operations?
90   DECLARE_RUNTIME_DEBUG_FLAG(kCheckDeclaringClassState);
91 
92   // The runtime dex_method_index is kDexNoIndex. To lower dependencies, we use this
93   // constexpr, and ensure that the value is correct in art_method.cc.
94   static constexpr uint32_t kRuntimeMethodDexMethodIndex = 0xFFFFFFFF;
95 
96   ArtMethod() : access_flags_(0), dex_method_index_(0),
97       method_index_(0), hotness_count_(0) { }
98 
99   ArtMethod(ArtMethod* src, PointerSize image_pointer_size) {
100     CopyFrom(src, image_pointer_size);
101   }
102 
103   static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
104                                         jobject jlr_method)
105       REQUIRES_SHARED(Locks::mutator_lock_);
106 
107   // Visit the declaring class in 'method' if it is within [start_boundary, end_boundary).
108   template<typename RootVisitorType>
109   static void VisitRoots(RootVisitorType& visitor,
110                          uint8_t* start_boundary,
111                          uint8_t* end_boundary,
112                          ArtMethod* method)
113       REQUIRES_SHARED(Locks::mutator_lock_);
114 
115   // Visit declaring classes of all the art-methods in 'array' that reside
116   // in [start_boundary, end_boundary).
117   template<PointerSize kPointerSize, typename RootVisitorType>
118   static void VisitArrayRoots(RootVisitorType& visitor,
119                               uint8_t* start_boundary,
120                               uint8_t* end_boundary,
121                               LengthPrefixedArray<ArtMethod>* array)
122       REQUIRES_SHARED(Locks::mutator_lock_);
123 
124   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
125   ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_);
126 
127   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
128   ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClassUnchecked()
129       REQUIRES_SHARED(Locks::mutator_lock_);
130 
131   mirror::CompressedReference<mirror::Object>* GetDeclaringClassAddressWithoutBarrier() {
132     return declaring_class_.AddressWithoutBarrier();
133   }
134 
135   void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class)
136       REQUIRES_SHARED(Locks::mutator_lock_);
137 
138   bool CASDeclaringClass(ObjPtr<mirror::Class> expected_class, ObjPtr<mirror::Class> desired_class)
139       REQUIRES_SHARED(Locks::mutator_lock_);
140 
141   static constexpr MemberOffset DeclaringClassOffset() {
142     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
143   }
144 
145   uint32_t GetAccessFlags() const {
146     return access_flags_.load(std::memory_order_relaxed);
147   }
148 
149   // This version should only be called when it's certain there is no
150   // concurrency so there is no need to guarantee atomicity. For example,
151   // before the method is linked.
152   void SetAccessFlags(uint32_t new_access_flags) REQUIRES_SHARED(Locks::mutator_lock_) {
153     // The following check ensures that we do not set `Intrinsics::kNone` (see b/228049006).
154     DCHECK_IMPLIES((new_access_flags & kAccIntrinsic) != 0,
155                    (new_access_flags & kAccIntrinsicBits) != 0);
156     access_flags_.store(new_access_flags, std::memory_order_relaxed);
157   }
158 
159   static constexpr MemberOffset AccessFlagsOffset() {
160     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, access_flags_));
161   }
162 
163   // Approximate what kind of method call would be used for this method.
164   InvokeType GetInvokeType() REQUIRES_SHARED(Locks::mutator_lock_);
165 
166   // Returns true if the method is declared public.
167   bool IsPublic() const {
168     return IsPublic(GetAccessFlags());
169   }
170 
171   static bool IsPublic(uint32_t access_flags) {
172     return (access_flags & kAccPublic) != 0;
173   }
174 
175   // Returns true if the method is declared private.
176   bool IsPrivate() const {
177     return IsPrivate(GetAccessFlags());
178   }
179 
180   static bool IsPrivate(uint32_t access_flags) {
181     return (access_flags & kAccPrivate) != 0;
182   }
183 
184   // Returns true if the method is declared static.
185   bool IsStatic() const {
186     return IsStatic(GetAccessFlags());
187   }
188 
189   static bool IsStatic(uint32_t access_flags) {
190     return (access_flags & kAccStatic) != 0;
191   }
192 
193   // Returns true if the method is a constructor according to access flags.
194   bool IsConstructor() const {
195     return IsConstructor(GetAccessFlags());
196   }
197 
198   static bool IsConstructor(uint32_t access_flags) {
199     return (access_flags & kAccConstructor) != 0;
200   }
201 
202   // Returns true if the method is a class initializer according to access flags.
203   bool IsClassInitializer() const {
204     return IsClassInitializer(GetAccessFlags());
205   }
206 
207   static bool IsClassInitializer(uint32_t access_flags) {
208     return IsConstructor(access_flags) && IsStatic(access_flags);
209   }
210 
211   // Returns true if the method is static, private, or a constructor.
212   bool IsDirect() const {
213     return IsDirect(GetAccessFlags());
214   }
215 
216   static bool IsDirect(uint32_t access_flags) {
217     constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
218     return (access_flags & direct) != 0;
219   }
220 
221   // Returns true if the method is declared synchronized.
222   bool IsSynchronized() const {
223     return IsSynchronized(GetAccessFlags());
224   }
225 
226   static bool IsSynchronized(uint32_t access_flags) {
227     constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
228     return (access_flags & synchonized) != 0;
229   }
230 
231   // Returns true if the method is declared final.
232   bool IsFinal() const {
233     return IsFinal(GetAccessFlags());
234   }
235 
236   static bool IsFinal(uint32_t access_flags) {
237     return (access_flags & kAccFinal) != 0;
238   }
239 
240   // Returns true if the method is an intrinsic.
241   bool IsIntrinsic() const {
242     return IsIntrinsic(GetAccessFlags());
243   }
244 
245   static bool IsIntrinsic(uint32_t access_flags) {
246     return (access_flags & kAccIntrinsic) != 0;
247   }
248 
249   ALWAYS_INLINE void SetIntrinsic(uint32_t intrinsic) REQUIRES_SHARED(Locks::mutator_lock_);
250 
251   uint32_t GetIntrinsic() const {
252     static const int kAccFlagsShift = CTZ(kAccIntrinsicBits);
253     static_assert(IsPowerOfTwo((kAccIntrinsicBits >> kAccFlagsShift) + 1),
254                   "kAccIntrinsicBits are not continuous");
255     static_assert((kAccIntrinsic & kAccIntrinsicBits) == 0,
256                   "kAccIntrinsic overlaps kAccIntrinsicBits");
257     DCHECK(IsIntrinsic());
258     return (GetAccessFlags() & kAccIntrinsicBits) >> kAccFlagsShift;
259   }
260 
261   void SetNotIntrinsic() REQUIRES_SHARED(Locks::mutator_lock_);
262 
263   // Returns true if the method is a copied method.
264   bool IsCopied() const {
265     return IsCopied(GetAccessFlags());
266   }
267 
268   static bool IsCopied(uint32_t access_flags) {
269     // We do not have intrinsics for any default methods and therefore intrinsics are never copied.
270     // So we are using a flag from the intrinsic flags range and need to check `kAccIntrinsic` too.
271     static_assert((kAccCopied & kAccIntrinsicBits) != 0,
272                   "kAccCopied deliberately overlaps intrinsic bits");
273     const bool copied = (access_flags & (kAccIntrinsic | kAccCopied)) == kAccCopied;
274     // (IsMiranda() || IsDefaultConflicting()) implies copied
275     DCHECK(!(IsMiranda(access_flags) || IsDefaultConflicting(access_flags)) || copied)
276         << "Miranda or default-conflict methods must always be copied.";
277     return copied;
278   }
279 
280   bool IsMiranda() const {
281     return IsMiranda(GetAccessFlags());
282   }
283 
284   static bool IsMiranda(uint32_t access_flags) {
285     // Miranda methods are marked as copied and abstract but not default.
286     // We need to check the kAccIntrinsic too, see `IsCopied()`.
287     static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault;
288     static constexpr uint32_t kValue = kAccCopied | kAccAbstract;
289     return (access_flags & kMask) == kValue;
290   }
291 
292   // A default conflict method is a special sentinel method that stands for a conflict between
293   // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError
294   // if one attempts to do so.
295   bool IsDefaultConflicting() const {
296     return IsDefaultConflicting(GetAccessFlags());
297   }
298 
299   static bool IsDefaultConflicting(uint32_t access_flags) {
300     // Default conflct methods are marked as copied, abstract and default.
301     // We need to check the kAccIntrinsic too, see `IsCopied()`.
302     static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault;
303     static constexpr uint32_t kValue = kAccCopied | kAccAbstract | kAccDefault;
304     return (access_flags & kMask) == kValue;
305   }
306 
307   // Returns true if invoking this method will not throw an AbstractMethodError or
308   // IncompatibleClassChangeError.
309   bool IsInvokable() const {
310     return IsInvokable(GetAccessFlags());
311   }
312 
313   static bool IsInvokable(uint32_t access_flags) {
314     // Default conflicting methods are marked with `kAccAbstract` (as well as `kAccCopied`
315     // and `kAccDefault`) but they are not considered abstract, see `IsAbstract()`.
316     DCHECK_EQ((access_flags & kAccAbstract) == 0,
317               !IsDefaultConflicting(access_flags) && !IsAbstract(access_flags));
318     return (access_flags & kAccAbstract) == 0;
319   }
320 
321   // Returns true if the method is marked as pre-compiled.
322   bool IsPreCompiled() const {
323     return IsPreCompiled(GetAccessFlags());
324   }
325 
326   static bool IsPreCompiled(uint32_t access_flags) {
327     // kAccCompileDontBother and kAccPreCompiled overlap with kAccIntrinsicBits.
328     static_assert((kAccCompileDontBother & kAccIntrinsicBits) != 0);
329     static_assert((kAccPreCompiled & kAccIntrinsicBits) != 0);
330     static constexpr uint32_t kMask = kAccIntrinsic | kAccCompileDontBother | kAccPreCompiled;
331     static constexpr uint32_t kValue = kAccCompileDontBother | kAccPreCompiled;
332     return (access_flags & kMask) == kValue;
333   }
334 
335   void SetPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) {
336     DCHECK(IsInvokable());
337     DCHECK(IsCompilable());
338     // kAccPreCompiled and kAccCompileDontBother overlaps with kAccIntrinsicBits.
339     // We don't mark the intrinsics as precompiled, which means in JIT zygote
340     // mode, compiled code for intrinsics will not be shared, and apps will
341     // compile intrinsics themselves if needed.
342     if (IsIntrinsic()) {
343       return;
344     }
345     AddAccessFlags(kAccPreCompiled | kAccCompileDontBother);
346   }
347 
348   void ClearPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) {
349     ClearAccessFlags(kAccPreCompiled | kAccCompileDontBother);
350   }
351 
352   // Returns true if the method resides in shared memory.
353   bool IsMemorySharedMethod() {
354     return IsMemorySharedMethod(GetAccessFlags());
355   }
356 
357   static bool IsMemorySharedMethod(uint32_t access_flags) {
358     return (access_flags & kAccMemorySharedMethod) != 0;
359   }
360 
361   void SetMemorySharedMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
362     uint32_t access_flags = GetAccessFlags();
363     if (!IsIntrinsic(access_flags) && !IsAbstract(access_flags)) {
364       AddAccessFlags(kAccMemorySharedMethod);
365       SetHotCounter();
366     }
367   }
368 
369   void ClearMemorySharedMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
370     uint32_t access_flags = GetAccessFlags();
371     if (IsIntrinsic(access_flags) || IsAbstract(access_flags)) {
372       return;
373     }
374     if (IsMemorySharedMethod(access_flags)) {
375       ClearAccessFlags(kAccMemorySharedMethod);
376     }
377   }
378 
379   // Returns true if the method can be compiled.
380   bool IsCompilable() const {
381     return IsCompilable(GetAccessFlags());
382   }
383 
384   static bool IsCompilable(uint32_t access_flags) {
385     if (IsIntrinsic(access_flags)) {
386       // kAccCompileDontBother overlaps with kAccIntrinsicBits.
387       return true;
388     }
389     if (IsPreCompiled(access_flags)) {
390       return true;
391     }
392     return (access_flags & kAccCompileDontBother) == 0;
393   }
394 
395   void ClearDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) {
396     DCHECK(!IsMiranda());
397     ClearAccessFlags(kAccCompileDontBother);
398   }
399 
400   void SetDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) {
401     DCHECK(!IsMiranda());
402     AddAccessFlags(kAccCompileDontBother);
403   }
404 
405   // This is set by the class linker.
406   bool IsDefault() const {
407     return IsDefault(GetAccessFlags());
408   }
409 
410   static bool IsDefault(uint32_t access_flags) {
411     static_assert((kAccDefault & (kAccIntrinsic | kAccIntrinsicBits)) == 0,
412                   "kAccDefault conflicts with intrinsic modifier");
413     return (access_flags & kAccDefault) != 0;
414   }
415 
416   // Returns true if the method is obsolete.
417   bool IsObsolete() const {
418     return IsObsolete(GetAccessFlags());
419   }
420 
421   static bool IsObsolete(uint32_t access_flags) {
422     return (access_flags & kAccObsoleteMethod) != 0;
423   }
424 
425   void SetIsObsolete() REQUIRES_SHARED(Locks::mutator_lock_) {
426     AddAccessFlags(kAccObsoleteMethod);
427   }
428 
429   // Returns true if the method is native.
430   bool IsNative() const {
431     return IsNative(GetAccessFlags());
432   }
433 
434   static bool IsNative(uint32_t access_flags) {
435     return (access_flags & kAccNative) != 0;
436   }
437 
438   // Checks to see if the method was annotated with @dalvik.annotation.optimization.FastNative.
439   bool IsFastNative() const {
440     return IsFastNative(GetAccessFlags());
441   }
442 
443   static bool IsFastNative(uint32_t access_flags) {
444     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
445     // The kAccFastNative flag value is used with a different meaning for non-native methods,
446     // so we need to check the kAccNative flag as well.
447     constexpr uint32_t mask = kAccFastNative | kAccNative;
448     return (access_flags & mask) == mask;
449   }
450 
451   // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative.
452   bool IsCriticalNative() const {
453     return IsCriticalNative(GetAccessFlags());
454   }
455 
456   static bool IsCriticalNative(uint32_t access_flags) {
457     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
458     // The kAccCriticalNative flag value is used with a different meaning for non-native methods,
459     // so we need to check the kAccNative flag as well.
460     constexpr uint32_t mask = kAccCriticalNative | kAccNative;
461     return (access_flags & mask) == mask;
462   }
463 
464   // Returns true if the method is managed (not native).
465   bool IsManaged() const {
466     return IsManaged(GetAccessFlags());
467   }
468 
469   static bool IsManaged(uint32_t access_flags) {
470     return !IsNative(access_flags);
471   }
472 
473   // Returns true if the method is managed (not native) and invokable.
474   bool IsManagedAndInvokable() const {
475     return IsManagedAndInvokable(GetAccessFlags());
476   }
477 
478   static bool IsManagedAndInvokable(uint32_t access_flags) {
479     return IsManaged(access_flags) && IsInvokable(access_flags);
480   }
481 
482   // Returns true if the method is abstract.
483   bool IsAbstract() const {
484     return IsAbstract(GetAccessFlags());
485   }
486 
487   static bool IsAbstract(uint32_t access_flags) {
488     // Default confliciting methods have `kAccAbstract` set but they are not actually abstract.
489     return (access_flags & kAccAbstract) != 0 && !IsDefaultConflicting(access_flags);
490   }
491 
492   // Returns true if the method is declared synthetic.
493   bool IsSynthetic() const {
494     return IsSynthetic(GetAccessFlags());
495   }
496 
497   static bool IsSynthetic(uint32_t access_flags) {
498     return (access_flags & kAccSynthetic) != 0;
499   }
500 
501   // Returns true if the method is declared varargs.
502   bool IsVarargs() const {
503     return IsVarargs(GetAccessFlags());
504   }
505 
506   static bool IsVarargs(uint32_t access_flags) {
507     return (access_flags & kAccVarargs) != 0;
508   }
509 
510   bool IsProxyMethod() REQUIRES_SHARED(Locks::mutator_lock_);
511 
512   bool IsSignaturePolymorphic() REQUIRES_SHARED(Locks::mutator_lock_);
513 
514   bool SkipAccessChecks() const {
515     // The kAccSkipAccessChecks flag value is used with a different meaning for native methods,
516     // so we need to check the kAccNative flag as well.
517     return (GetAccessFlags() & (kAccSkipAccessChecks | kAccNative)) == kAccSkipAccessChecks;
518   }
519 
520   void SetSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) {
521     // SkipAccessChecks() is applicable only to non-native methods.
522     DCHECK(!IsNative());
523     AddAccessFlags(kAccSkipAccessChecks);
524   }
525   void ClearSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) {
526     // SkipAccessChecks() is applicable only to non-native methods.
527     DCHECK(!IsNative());
528     ClearAccessFlags(kAccSkipAccessChecks);
529   }
530 
531   // Returns true if the method has previously been warm.
532   bool PreviouslyWarm() const {
533     return PreviouslyWarm(GetAccessFlags());
534   }
535 
536   static bool PreviouslyWarm(uint32_t access_flags) {
537     // kAccPreviouslyWarm overlaps with kAccIntrinsicBits. Return true for intrinsics.
538     constexpr uint32_t mask = kAccPreviouslyWarm | kAccIntrinsic;
539     return (access_flags & mask) != 0u;
540   }
541 
542   void SetPreviouslyWarm() REQUIRES_SHARED(Locks::mutator_lock_) {
543     if (IsIntrinsic()) {
544       // kAccPreviouslyWarm overlaps with kAccIntrinsicBits.
545       return;
546     }
547     AddAccessFlags(kAccPreviouslyWarm);
548   }
549 
550   // Should this method be run in the interpreter and count locks (e.g., failed structured-
551   // locking verification)?
552   bool MustCountLocks() const {
553     return MustCountLocks(GetAccessFlags());
554   }
555 
556   static bool MustCountLocks(uint32_t access_flags) {
557     if (IsIntrinsic(access_flags)) {
558       return false;
559     }
560     return (access_flags & kAccMustCountLocks) != 0;
561   }
562 
563   void ClearMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) {
564     ClearAccessFlags(kAccMustCountLocks);
565   }
566 
567   void SetMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) {
568     ClearAccessFlags(kAccSkipAccessChecks);
569     AddAccessFlags(kAccMustCountLocks);
570   }
571 
572   // Returns true if the method is using the nterp entrypoint fast path.
573   bool HasNterpEntryPointFastPathFlag() const {
574     return HasNterpEntryPointFastPathFlag(GetAccessFlags());
575   }
576 
577   static bool HasNterpEntryPointFastPathFlag(uint32_t access_flags) {
578     constexpr uint32_t mask = kAccNative | kAccNterpEntryPointFastPathFlag;
579     return (access_flags & mask) == kAccNterpEntryPointFastPathFlag;
580   }
581 
582   void SetNterpEntryPointFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
583     DCHECK(!IsNative());
584     AddAccessFlags(kAccNterpEntryPointFastPathFlag);
585   }
586 
587   void SetNterpInvokeFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
588     AddAccessFlags(kAccNterpInvokeFastPathFlag);
589   }
590 
591   // Returns whether the method is a string constructor. The method must not
592   // be a class initializer. (Class initializers are called from a different
593   // context where we do not need to check for string constructors.)
594   bool IsStringConstructor() REQUIRES_SHARED(Locks::mutator_lock_);
595 
596   // Returns true if this method could be overridden by a default method.
597   bool IsOverridableByDefaultMethod() REQUIRES_SHARED(Locks::mutator_lock_);
598 
599   bool CheckIncompatibleClassChange(InvokeType type) REQUIRES_SHARED(Locks::mutator_lock_);
600 
601   // Throws the error that would result from trying to invoke this method (i.e.
602   // IncompatibleClassChangeError, AbstractMethodError, or IllegalAccessError).
603   // Only call if !IsInvokable();
604   void ThrowInvocationTimeError(ObjPtr<mirror::Object> receiver)
605       REQUIRES_SHARED(Locks::mutator_lock_);
606 
607   uint16_t GetMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
608 
609   // Doesn't do erroneous / unresolved class checks.
610   uint16_t GetMethodIndexDuringLinking() REQUIRES_SHARED(Locks::mutator_lock_);
611 
612   size_t GetVtableIndex() REQUIRES_SHARED(Locks::mutator_lock_) {
613     return GetMethodIndex();
614   }
615 
616   void SetMethodIndex(uint16_t new_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
617     // Not called within a transaction.
618     method_index_ = new_method_index;
619   }
620 
621   static constexpr MemberOffset DexMethodIndexOffset() {
622     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, dex_method_index_));
623   }
624 
625   static constexpr MemberOffset MethodIndexOffset() {
626     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, method_index_));
627   }
628 
629   static constexpr MemberOffset ImtIndexOffset() {
630     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, imt_index_));
631   }
632 
633   // Number of 32bit registers that would be required to hold all the arguments
634   static size_t NumArgRegisters(const char* shorty);
635 
636   ALWAYS_INLINE uint32_t GetDexMethodIndex() const {
637     return dex_method_index_;
638   }
639 
640   void SetDexMethodIndex(uint32_t new_idx) REQUIRES_SHARED(Locks::mutator_lock_) {
641     // Not called within a transaction.
642     dex_method_index_ = new_idx;
643   }
644 
645   // Lookup the Class from the type index into this method's dex cache.
646   ObjPtr<mirror::Class> LookupResolvedClassFromTypeIndex(dex::TypeIndex type_idx)
647       REQUIRES_SHARED(Locks::mutator_lock_);
648   // Resolve the Class from the type index into this method's dex cache.
649   ObjPtr<mirror::Class> ResolveClassFromTypeIndex(dex::TypeIndex type_idx)
650       REQUIRES_SHARED(Locks::mutator_lock_);
651 
652   // Returns true if this method has the same name and signature of the other method.
653   bool HasSameNameAndSignature(ArtMethod* other) REQUIRES_SHARED(Locks::mutator_lock_);
654 
655   // Find the method that this method overrides.
656   ArtMethod* FindOverriddenMethod(PointerSize pointer_size)
657       REQUIRES_SHARED(Locks::mutator_lock_);
658 
659   // Find the method index for this method within other_dexfile. If this method isn't present then
660   // return dex::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
661   // name and signature in the other_dexfile, such as the method index used to resolve this method
662   // in the other_dexfile.
663   uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
664                                             uint32_t name_and_signature_idx)
665       REQUIRES_SHARED(Locks::mutator_lock_);
666 
667   void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
668       REQUIRES_SHARED(Locks::mutator_lock_);
669 
670   template <char ReturnType, char... ArgType>
671   typename detail::ShortyTraits<ReturnType>::Type
672   InvokeStatic(Thread* self, typename detail::ShortyTraits<ArgType>::Type... args)
673       REQUIRES_SHARED(Locks::mutator_lock_);
674 
675   template <char ReturnType, char... ArgType>
676   typename detail::ShortyTraits<ReturnType>::Type
677   InvokeInstance(Thread* self,
678                  ObjPtr<mirror::Object> receiver,
679                  typename detail::ShortyTraits<ArgType>::Type... args)
680       REQUIRES_SHARED(Locks::mutator_lock_);
681 
682   template <char ReturnType, char... ArgType>
683   typename detail::ShortyTraits<ReturnType>::Type
684   InvokeFinal(Thread* self,
685               ObjPtr<mirror::Object> receiver,
686               typename detail::ShortyTraits<ArgType>::Type... args)
687       REQUIRES_SHARED(Locks::mutator_lock_);
688 
689   template <char ReturnType, char... ArgType>
690   typename detail::ShortyTraits<ReturnType>::Type
691   InvokeVirtual(Thread* self,
692                 ObjPtr<mirror::Object> receiver,
693                 typename detail::ShortyTraits<ArgType>::Type... args)
694       REQUIRES_SHARED(Locks::mutator_lock_);
695 
696   template <char ReturnType, char... ArgType>
697   typename detail::ShortyTraits<ReturnType>::Type
698   InvokeInterface(Thread* self,
699                   ObjPtr<mirror::Object> receiver,
700                   typename detail::ShortyTraits<ArgType>::Type... args)
701       REQUIRES_SHARED(Locks::mutator_lock_);
702 
703   template <char... ArgType, typename HandleScopeType>
704   Handle<mirror::Object> NewObject(HandleScopeType& hs,
705                                    Thread* self,
706                                    typename detail::HandleShortyTraits<ArgType>::Type... args)
707       REQUIRES_SHARED(Locks::mutator_lock_);
708 
709   template <char... ArgType>
710   ObjPtr<mirror::Object> NewObject(Thread* self,
711                                    typename detail::HandleShortyTraits<ArgType>::Type... args)
712       REQUIRES_SHARED(Locks::mutator_lock_);
713 
714   // Returns true if the method needs a class initialization check according to access flags.
715   // Only static methods other than the class initializer need this check.
716   // The caller is responsible for performing the actual check.
717   bool NeedsClinitCheckBeforeCall() const {
718     return NeedsClinitCheckBeforeCall(GetAccessFlags());
719   }
720 
721   static bool NeedsClinitCheckBeforeCall(uint32_t access_flags) {
722     // The class initializer is special as it is invoked during initialization
723     // and does not need the check.
724     return IsStatic(access_flags) && !IsConstructor(access_flags);
725   }
726 
727   // Check if the method needs a class initialization check before call
728   // and its declaring class is not yet visibly initialized.
729   // (The class needs to be visibly initialized before we can use entrypoints
730   // to compiled code for static methods. See b/18161648 .)
731   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
732   bool StillNeedsClinitCheck() REQUIRES_SHARED(Locks::mutator_lock_);
733 
734   // Similar to `StillNeedsClinitCheck()` but the method's declaring class may
735   // be dead but not yet reclaimed by the GC, so we cannot do a full read barrier
736   // but we still want to check the class status in the to-space class if any.
737   // Note: JIT can hold and use such methods during managed heap GC.
738   bool StillNeedsClinitCheckMayBeDead() REQUIRES_SHARED(Locks::mutator_lock_);
739 
740   // Check if the declaring class has been verified and look at the to-space
741   // class object, if any, as in `StillNeedsClinitCheckMayBeDead()`.
742   bool IsDeclaringClassVerifiedMayBeDead() REQUIRES_SHARED(Locks::mutator_lock_);
743 
744   const void* GetEntryPointFromQuickCompiledCode() const {
745     return GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
746   }
747   ALWAYS_INLINE
748   const void* GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size) const {
749     return GetNativePointer<const void*>(
750         EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
751   }
752 
753   void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code)
754       REQUIRES_SHARED(Locks::mutator_lock_) {
755     SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
756                                               kRuntimePointerSize);
757   }
758   ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
759       const void* entry_point_from_quick_compiled_code, PointerSize pointer_size)
760       REQUIRES_SHARED(Locks::mutator_lock_) {
761     SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
762                      entry_point_from_quick_compiled_code,
763                      pointer_size);
764   }
765 
766   static constexpr MemberOffset DataOffset(PointerSize pointer_size) {
767     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
768         PtrSizedFields, data_) / sizeof(void*) * static_cast<size_t>(pointer_size));
769   }
770 
771   static constexpr MemberOffset EntryPointFromJniOffset(PointerSize pointer_size) {
772     return DataOffset(pointer_size);
773   }
774 
775   static constexpr MemberOffset EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size) {
776     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
777         PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*)
778             * static_cast<size_t>(pointer_size));
779   }
780 
781   ImtConflictTable* GetImtConflictTable(PointerSize pointer_size) const {
782     DCHECK(IsRuntimeMethod());
783     return reinterpret_cast<ImtConflictTable*>(GetDataPtrSize(pointer_size));
784   }
785 
786   ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size)
787       REQUIRES_SHARED(Locks::mutator_lock_) {
788     DCHECK(IsRuntimeMethod());
789     SetDataPtrSize(table, pointer_size);
790   }
791 
792   ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_);
793 
794   ALWAYS_INLINE void SetHasSingleImplementation(bool single_impl)
795       REQUIRES_SHARED(Locks::mutator_lock_) {
796     DCHECK(!IsIntrinsic()) << "conflict with intrinsic bits";
797     if (single_impl) {
798       AddAccessFlags(kAccSingleImplementation);
799     } else {
800       ClearAccessFlags(kAccSingleImplementation);
801     }
802   }
803 
804   ALWAYS_INLINE bool HasSingleImplementationFlag() const {
805     return (GetAccessFlags() & kAccSingleImplementation) != 0;
806   }
807 
808   // Takes a method and returns a 'canonical' one if the method is default (and therefore
809   // potentially copied from some other class). For example, this ensures that the debugger does not
810   // get confused as to which method we are in.
811   ArtMethod* GetCanonicalMethod(PointerSize pointer_size = kRuntimePointerSize)
812       REQUIRES_SHARED(Locks::mutator_lock_);
813 
814   ArtMethod* GetSingleImplementation(PointerSize pointer_size);
815 
816   ALWAYS_INLINE void SetSingleImplementation(ArtMethod* method, PointerSize pointer_size)
817       REQUIRES_SHARED(Locks::mutator_lock_) {
818     DCHECK(!IsNative());
819     // Non-abstract method's single implementation is just itself.
820     DCHECK(IsAbstract());
821     DCHECK(method == nullptr || method->IsInvokable());
822     SetDataPtrSize(method, pointer_size);
823   }
824 
825   void* GetEntryPointFromJni() const {
826     DCHECK(IsNative());
827     return GetEntryPointFromJniPtrSize(kRuntimePointerSize);
828   }
829 
830   ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(PointerSize pointer_size) const {
831     return GetDataPtrSize(pointer_size);
832   }
833 
834   void SetEntryPointFromJni(const void* entrypoint)
835       REQUIRES_SHARED(Locks::mutator_lock_) {
836     // The resolution method also has a JNI entrypoint for direct calls from
837     // compiled code to the JNI dlsym lookup stub for @CriticalNative.
838     DCHECK(IsNative() || IsRuntimeMethod());
839     SetEntryPointFromJniPtrSize(entrypoint, kRuntimePointerSize);
840   }
841 
842   ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, PointerSize pointer_size)
843       REQUIRES_SHARED(Locks::mutator_lock_) {
844     SetDataPtrSize(entrypoint, pointer_size);
845   }
846 
847   ALWAYS_INLINE void* GetDataPtrSize(PointerSize pointer_size) const {
848     DCHECK(IsImagePointerSize(pointer_size));
849     return GetNativePointer<void*>(DataOffset(pointer_size), pointer_size);
850   }
851 
852   ALWAYS_INLINE void SetDataPtrSize(const void* data, PointerSize pointer_size)
853       REQUIRES_SHARED(Locks::mutator_lock_) {
854     DCHECK(IsImagePointerSize(pointer_size));
855     SetNativePointer(DataOffset(pointer_size), data, pointer_size);
856   }
857 
858   // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
859   // conventions for a method of managed code. Returns false for Proxy methods.
860   ALWAYS_INLINE bool IsRuntimeMethod() const {
861     return dex_method_index_ == kRuntimeMethodDexMethodIndex;
862   }
863 
864   bool HasCodeItem() REQUIRES_SHARED(Locks::mutator_lock_) {
865     uint32_t access_flags = GetAccessFlags();
866     return !IsNative(access_flags) &&
867            !IsAbstract(access_flags) &&
868            !IsDefaultConflicting(access_flags) &&
869            !IsRuntimeMethod() &&
870            !IsProxyMethod();
871   }
872 
873   // We need to explicitly indicate whether the code item is obtained from the compact dex file,
874   // because in JVMTI, we obtain the code item from the standard dex file to update the method.
875   void SetCodeItem(const dex::CodeItem* code_item, bool is_compact_dex_code_item)
876       REQUIRES_SHARED(Locks::mutator_lock_);
877 
878   // Is this a hand crafted method used for something like describing callee saves?
879   bool IsCalleeSaveMethod() REQUIRES_SHARED(Locks::mutator_lock_);
880 
881   bool IsResolutionMethod() REQUIRES_SHARED(Locks::mutator_lock_);
882 
883   bool IsImtUnimplementedMethod() REQUIRES_SHARED(Locks::mutator_lock_);
884 
885   // Find the catch block for the given exception type and dex_pc. When a catch block is found,
886   // indicates whether the found catch block is responsible for clearing the exception or whether
887   // a move-exception instruction is present.
888   uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
889                           bool* has_no_move_exception)
890       REQUIRES_SHARED(Locks::mutator_lock_);
891 
892   // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
893   template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
894            bool kVisitProxyMethod = true,
895            typename RootVisitorType>
896   void VisitRoots(RootVisitorType& visitor, PointerSize pointer_size) NO_THREAD_SAFETY_ANALYSIS;
897 
898   const DexFile* GetDexFile() REQUIRES_SHARED(Locks::mutator_lock_);
899 
900   const char* GetDeclaringClassDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
901 
902   ALWAYS_INLINE const char* GetShorty() REQUIRES_SHARED(Locks::mutator_lock_);
903 
904   const char* GetShorty(uint32_t* out_length) REQUIRES_SHARED(Locks::mutator_lock_);
905 
906   const Signature GetSignature() REQUIRES_SHARED(Locks::mutator_lock_);
907 
908   ALWAYS_INLINE const char* GetName() REQUIRES_SHARED(Locks::mutator_lock_);
909 
910   ALWAYS_INLINE std::string_view GetNameView() REQUIRES_SHARED(Locks::mutator_lock_);
911 
912   ObjPtr<mirror::String> ResolveNameString() REQUIRES_SHARED(Locks::mutator_lock_);
913 
914   bool NameEquals(ObjPtr<mirror::String> name) REQUIRES_SHARED(Locks::mutator_lock_);
915 
916   const dex::CodeItem* GetCodeItem() REQUIRES_SHARED(Locks::mutator_lock_);
917 
918   bool IsResolvedTypeIdx(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
919 
920   int32_t GetLineNumFromDexPC(uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_);
921 
922   const dex::ProtoId& GetPrototype() REQUIRES_SHARED(Locks::mutator_lock_);
923 
924   const dex::TypeList* GetParameterTypeList() REQUIRES_SHARED(Locks::mutator_lock_);
925 
926   const char* GetDeclaringClassSourceFile() REQUIRES_SHARED(Locks::mutator_lock_);
927 
928   uint16_t GetClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
929 
930   const dex::ClassDef& GetClassDef() REQUIRES_SHARED(Locks::mutator_lock_);
931 
932   ALWAYS_INLINE size_t GetNumberOfParameters() REQUIRES_SHARED(Locks::mutator_lock_);
933 
934   const char* GetReturnTypeDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
935 
936   ALWAYS_INLINE Primitive::Type GetReturnTypePrimitive() REQUIRES_SHARED(Locks::mutator_lock_);
937 
938   const char* GetTypeDescriptorFromTypeIdx(dex::TypeIndex type_idx)
939       REQUIRES_SHARED(Locks::mutator_lock_);
940 
941   // Lookup return type.
942   ObjPtr<mirror::Class> LookupResolvedReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
943   // Resolve return type. May cause thread suspension due to GetClassFromTypeIdx
944   // calling ResolveType this caused a large number of bugs at call sites.
945   ObjPtr<mirror::Class> ResolveReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
946 
947   ObjPtr<mirror::ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
948 
949   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
950   ObjPtr<mirror::DexCache> GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
951   template <ReadBarrierOption kReadBarrierOption>
952   ObjPtr<mirror::DexCache> GetObsoleteDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
953 
954   ALWAYS_INLINE ArtMethod* GetInterfaceMethodForProxyUnchecked(PointerSize pointer_size)
955       REQUIRES_SHARED(Locks::mutator_lock_);
956   ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(PointerSize pointer_size)
957       REQUIRES_SHARED(Locks::mutator_lock_);
958 
959   ArtMethod* GetNonObsoleteMethod() REQUIRES_SHARED(Locks::mutator_lock_);
960 
961   // May cause thread suspension due to class resolution.
962   bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
963       REQUIRES_SHARED(Locks::mutator_lock_);
964 
965   // Size of an instance of this native class.
966   static constexpr size_t Size(PointerSize pointer_size) {
967     return PtrSizedFieldsOffset(pointer_size) +
968         (sizeof(PtrSizedFields) / sizeof(void*)) * static_cast<size_t>(pointer_size);
969   }
970 
971   // Alignment of an instance of this native class.
972   static constexpr size_t Alignment(PointerSize pointer_size) {
973     // The ArtMethod alignment is the same as image pointer size. This differs from
974     // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
975     return static_cast<size_t>(pointer_size);
976   }
977 
978   void CopyFrom(ArtMethod* src, PointerSize image_pointer_size)
979       REQUIRES_SHARED(Locks::mutator_lock_);
980 
981   ALWAYS_INLINE void ResetCounter(uint16_t new_value);
982   ALWAYS_INLINE void UpdateCounter(int32_t new_samples);
983   ALWAYS_INLINE void SetHotCounter();
984   ALWAYS_INLINE bool CounterIsHot();
985   ALWAYS_INLINE bool CounterHasReached(uint16_t samples, uint16_t threshold);
986   ALWAYS_INLINE uint16_t GetCounter();
987   ALWAYS_INLINE bool CounterHasChanged(uint16_t threshold);
988 
989   ALWAYS_INLINE static constexpr uint16_t MaxCounter() {
990     return std::numeric_limits<decltype(hotness_count_)>::max();
991   }
992 
993   ALWAYS_INLINE uint32_t GetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_);
994 
995   void CalculateAndSetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_);
996 
997   static constexpr MemberOffset HotnessCountOffset() {
998     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, hotness_count_));
999   }
1000 
1001   // Returns the method header for the compiled code containing 'pc'. Note that runtime
1002   // methods will return null for this method, as they are not oat based.
1003   const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc)
1004       REQUIRES_SHARED(Locks::mutator_lock_);
1005 
1006   // Get compiled code for the method, return null if no code exists.
1007   const void* GetOatMethodQuickCode(PointerSize pointer_size)
1008       REQUIRES_SHARED(Locks::mutator_lock_);
1009 
1010   // Returns whether the method has any compiled code, JIT or AOT.
1011   bool HasAnyCompiledCode() REQUIRES_SHARED(Locks::mutator_lock_);
1012 
1013   // Returns a human-readable signature for 'm'. Something like "a.b.C.m" or
1014   // "a.b.C.m(II)V" (depending on the value of 'with_signature').
1015   static std::string PrettyMethod(ArtMethod* m, bool with_signature = true)
1016       REQUIRES_SHARED(Locks::mutator_lock_);
1017   std::string PrettyMethod(bool with_signature = true)
1018       REQUIRES_SHARED(Locks::mutator_lock_);
1019   // Returns the JNI native function name for the non-overloaded method 'm'.
1020   std::string JniShortName()
1021       REQUIRES_SHARED(Locks::mutator_lock_);
1022   // Returns the JNI native function name for the overloaded method 'm'.
1023   std::string JniLongName()
1024       REQUIRES_SHARED(Locks::mutator_lock_);
1025 
1026   // Update entry points by passing them through the visitor.
1027   template <typename Visitor>
1028   ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor, PointerSize pointer_size)
1029       REQUIRES_SHARED(Locks::mutator_lock_);
1030 
1031   // Visit the individual members of an ArtMethod.  Used by imgdiag.
1032   // As imgdiag does not support mixing instruction sets or pointer sizes (e.g., using imgdiag32
1033   // to inspect 64-bit images, etc.), we can go beneath the accessors directly to the class members.
1034   template <typename VisitorFunc>
1035   void VisitMembers(VisitorFunc& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1036     DCHECK(IsImagePointerSize(kRuntimePointerSize));
1037     visitor(this, &declaring_class_, "declaring_class_");
1038     visitor(this, &access_flags_, "access_flags_");
1039     visitor(this, &dex_method_index_, "dex_method_index_");
1040     visitor(this, &method_index_, "method_index_");
1041     visitor(this, &hotness_count_, "hotness_count_");
1042     visitor(this, &ptr_sized_fields_.data_, "ptr_sized_fields_.data_");
1043     visitor(this,
1044             &ptr_sized_fields_.entry_point_from_quick_compiled_code_,
1045             "ptr_sized_fields_.entry_point_from_quick_compiled_code_");
1046   }
1047 
1048   // Returns the dex instructions of the code item for the art method. Returns an empty array for
1049   // the null code item case.
1050   ALWAYS_INLINE CodeItemInstructionAccessor DexInstructions()
1051       REQUIRES_SHARED(Locks::mutator_lock_);
1052 
1053   // Returns the dex code item data section of the DexFile for the art method.
1054   ALWAYS_INLINE CodeItemDataAccessor DexInstructionData()
1055       REQUIRES_SHARED(Locks::mutator_lock_);
1056 
1057   // Returns the dex code item debug info section of the DexFile for the art method.
1058   ALWAYS_INLINE CodeItemDebugInfoAccessor DexInstructionDebugInfo()
1059       REQUIRES_SHARED(Locks::mutator_lock_);
1060 
1061   GcRoot<mirror::Class>& DeclaringClassRoot() {
1062     return declaring_class_;
1063   }
1064 
1065  protected:
1066   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
1067   // The class we are a part of.
1068   GcRoot<mirror::Class> declaring_class_;
1069 
1070   // Access flags; low 16 bits are defined by spec.
1071   // Getting and setting this flag needs to be atomic when concurrency is
1072   // possible, e.g. after this method's class is linked. Such as when setting
1073   // verifier flags and single-implementation flag.
1074   std::atomic<std::uint32_t> access_flags_;
1075 
1076   /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
1077 
1078   // Index into method_ids of the dex file associated with this method.
1079   uint32_t dex_method_index_;
1080 
1081   /* End of dex file fields. */
1082 
1083   // Entry within a dispatch table for this method. For static/direct methods the index is into
1084   // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
1085   // interface's method array in `IfTable`s of implementing classes.
1086   uint16_t method_index_;
1087 
1088   union {
1089     // Non-abstract methods: The hotness we measure for this method. Not atomic,
1090     // as we allow missing increments: if the method is hot, we will see it eventually.
1091     uint16_t hotness_count_;
1092     // Abstract methods: IMT index.
1093     uint16_t imt_index_;
1094   };
1095 
1096   // Fake padding field gets inserted here.
1097 
1098   // Must be the last fields in the method.
1099   struct PtrSizedFields {
1100     // Depending on the method type, the data is
1101     //   - native method: pointer to the JNI function registered to this method
1102     //                    or a function to resolve the JNI function,
1103     //   - resolution method: pointer to a function to resolve the method and
1104     //                        the JNI function for @CriticalNative.
1105     //   - conflict method: ImtConflictTable,
1106     //   - abstract/interface method: the single-implementation if any,
1107     //   - proxy method: the original interface method or constructor,
1108     //   - default conflict method: null
1109     //   - other methods: during AOT the code item offset, at runtime a pointer
1110     //                    to the code item.
1111     void* data_;
1112 
1113     // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
1114     // the interpreter.
1115     void* entry_point_from_quick_compiled_code_;
1116   } ptr_sized_fields_;
1117 
1118  private:
1119   uint16_t FindObsoleteDexClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
1120 
1121   static constexpr size_t PtrSizedFieldsOffset(PointerSize pointer_size) {
1122     // Round up to pointer size for padding field. Tested in art_method.cc.
1123     return RoundUp(offsetof(ArtMethod, hotness_count_) + sizeof(hotness_count_),
1124                    static_cast<size_t>(pointer_size));
1125   }
1126 
1127   // Compare given pointer size to the image pointer size.
1128   static bool IsImagePointerSize(PointerSize pointer_size);
1129 
1130   dex::TypeIndex GetReturnTypeIndex() REQUIRES_SHARED(Locks::mutator_lock_);
1131 
1132   template<typename T>
1133   ALWAYS_INLINE T GetNativePointer(MemberOffset offset, PointerSize pointer_size) const {
1134     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
1135     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
1136     if (pointer_size == PointerSize::k32) {
1137       return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
1138     } else {
1139       auto v = *reinterpret_cast<const uint64_t*>(addr);
1140       return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
1141     }
1142   }
1143 
1144   template<typename T>
1145   ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size)
1146       REQUIRES_SHARED(Locks::mutator_lock_) {
1147     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
1148     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
1149     if (pointer_size == PointerSize::k32) {
1150       uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
1151       *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
1152     } else {
1153       *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
1154     }
1155   }
1156 
1157   static inline bool IsValidIntrinsicUpdate(uint32_t modifier) {
1158     return (((modifier & kAccIntrinsic) == kAccIntrinsic) &&
1159             ((modifier & ~(kAccIntrinsic | kAccIntrinsicBits)) == 0) &&
1160             ((modifier & kAccIntrinsicBits) != 0));  // b/228049006: ensure intrinsic is not `kNone`
1161   }
1162 
1163   static inline bool OverlapsIntrinsicBits(uint32_t modifier) {
1164     return (modifier & kAccIntrinsicBits) != 0;
1165   }
1166 
1167   // This setter guarantees atomicity.
1168   void AddAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) {
1169     DCHECK_IMPLIES(IsIntrinsic(), !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
1170     // None of the readers rely ordering.
1171     access_flags_.fetch_or(flag, std::memory_order_relaxed);
1172   }
1173 
1174   // This setter guarantees atomicity.
1175   void ClearAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) {
1176     DCHECK_IMPLIES(IsIntrinsic(), !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
1177     access_flags_.fetch_and(~flag, std::memory_order_relaxed);
1178   }
1179 
1180   // Helper method for checking the class status of a possibly dead declaring class.
1181   // See `StillNeedsClinitCheckMayBeDead()` and `IsDeclaringClassVerifierMayBeDead()`.
1182   ObjPtr<mirror::Class> GetDeclaringClassMayBeDead() REQUIRES_SHARED(Locks::mutator_lock_);
1183 
1184   // Used by GetName and GetNameView to share common code.
1185   const char* GetRuntimeMethodName() REQUIRES_SHARED(Locks::mutator_lock_);
1186 
1187   DISALLOW_COPY_AND_ASSIGN(ArtMethod);  // Need to use CopyFrom to deal with 32 vs 64 bits.
1188 };
1189 
1190 class MethodCallback {
1191  public:
1192   virtual ~MethodCallback() {}
1193 
1194   virtual void RegisterNativeMethod(ArtMethod* method,
1195                                     const void* original_implementation,
1196                                     /*out*/void** new_implementation)
1197       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
1198 };
1199 
1200 }  // namespace art
1201 
1202 #endif  // ART_RUNTIME_ART_METHOD_H_
1203