• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_ART_METHOD_H_
18 #define ART_RUNTIME_ART_METHOD_H_
19 
20 #include <cstddef>
21 
22 #include "base/bit_utils.h"
23 #include "base/casts.h"
24 #include "base/enums.h"
25 #include "base/logging.h"
26 #include "dex_file.h"
27 #include "gc_root.h"
28 #include "modifiers.h"
29 #include "obj_ptr.h"
30 #include "offsets.h"
31 #include "primitive.h"
32 #include "read_barrier_option.h"
33 
34 namespace art {
35 
36 template<class T> class Handle;
37 class ImtConflictTable;
38 enum InvokeType : uint32_t;
39 union JValue;
40 class OatQuickMethodHeader;
41 class ProfilingInfo;
42 class ScopedObjectAccessAlreadyRunnable;
43 class StringPiece;
44 class ShadowFrame;
45 
46 namespace mirror {
47 class Array;
48 class Class;
49 class ClassLoader;
50 class DexCache;
51 class IfTable;
52 class Object;
53 template <typename MirrorType> class ObjectArray;
54 class PointerArray;
55 class String;
56 
57 template <typename T> struct NativeDexCachePair;
58 using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
59 using MethodDexCacheType = std::atomic<MethodDexCachePair>;
60 }  // namespace mirror
61 
62 class ArtMethod FINAL {
63  public:
64   // Should the class state be checked on sensitive operations?
65   DECLARE_RUNTIME_DEBUG_FLAG(kCheckDeclaringClassState);
66 
67   // The runtime dex_method_index is kDexNoIndex. To lower dependencies, we use this
68   // constexpr, and ensure that the value is correct in art_method.cc.
69   static constexpr uint32_t kRuntimeMethodDexMethodIndex = 0xFFFFFFFF;
70 
ArtMethod()71   ArtMethod() : access_flags_(0), dex_code_item_offset_(0), dex_method_index_(0),
72       method_index_(0), hotness_count_(0) { }
73 
ArtMethod(ArtMethod * src,PointerSize image_pointer_size)74   ArtMethod(ArtMethod* src, PointerSize image_pointer_size) {
75     CopyFrom(src, image_pointer_size);
76   }
77 
78   static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
79                                         jobject jlr_method)
80       REQUIRES_SHARED(Locks::mutator_lock_);
81 
82   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
83   ALWAYS_INLINE mirror::Class* GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_);
84 
85   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
86   ALWAYS_INLINE mirror::Class* GetDeclaringClassUnchecked()
87       REQUIRES_SHARED(Locks::mutator_lock_);
88 
GetDeclaringClassAddressWithoutBarrier()89   mirror::CompressedReference<mirror::Object>* GetDeclaringClassAddressWithoutBarrier() {
90     return declaring_class_.AddressWithoutBarrier();
91   }
92 
93   void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class)
94       REQUIRES_SHARED(Locks::mutator_lock_);
95 
96   bool CASDeclaringClass(mirror::Class* expected_class, mirror::Class* desired_class)
97       REQUIRES_SHARED(Locks::mutator_lock_);
98 
DeclaringClassOffset()99   static MemberOffset DeclaringClassOffset() {
100     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
101   }
102 
103   // Note: GetAccessFlags acquires the mutator lock in debug mode to check that it is not called for
104   // a proxy method.
105   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
GetAccessFlags()106   uint32_t GetAccessFlags() {
107     if (kCheckDeclaringClassState) {
108       GetAccessFlagsDCheck<kReadBarrierOption>();
109     }
110     return access_flags_.load(std::memory_order_relaxed);
111   }
112 
113   // This version should only be called when it's certain there is no
114   // concurrency so there is no need to guarantee atomicity. For example,
115   // before the method is linked.
SetAccessFlags(uint32_t new_access_flags)116   void SetAccessFlags(uint32_t new_access_flags) {
117     access_flags_.store(new_access_flags, std::memory_order_relaxed);
118   }
119 
120   // This setter guarantees atomicity.
AddAccessFlags(uint32_t flag)121   void AddAccessFlags(uint32_t flag) {
122     uint32_t old_access_flags;
123     uint32_t new_access_flags;
124     do {
125       old_access_flags = access_flags_.load(std::memory_order_relaxed);
126       new_access_flags = old_access_flags | flag;
127     } while (!access_flags_.compare_exchange_weak(old_access_flags, new_access_flags));
128   }
129 
130   // This setter guarantees atomicity.
ClearAccessFlags(uint32_t flag)131   void ClearAccessFlags(uint32_t flag) {
132     uint32_t old_access_flags;
133     uint32_t new_access_flags;
134     do {
135       old_access_flags = access_flags_.load(std::memory_order_relaxed);
136       new_access_flags = old_access_flags & ~flag;
137     } while (!access_flags_.compare_exchange_weak(old_access_flags, new_access_flags));
138   }
139 
140   // Approximate what kind of method call would be used for this method.
141   InvokeType GetInvokeType() REQUIRES_SHARED(Locks::mutator_lock_);
142 
143   // Returns true if the method is declared public.
IsPublic()144   bool IsPublic() {
145     return (GetAccessFlags() & kAccPublic) != 0;
146   }
147 
148   // Returns true if the method is declared private.
IsPrivate()149   bool IsPrivate() {
150     return (GetAccessFlags() & kAccPrivate) != 0;
151   }
152 
153   // Returns true if the method is declared static.
IsStatic()154   bool IsStatic() {
155     return (GetAccessFlags() & kAccStatic) != 0;
156   }
157 
158   // Returns true if the method is a constructor according to access flags.
IsConstructor()159   bool IsConstructor() {
160     return (GetAccessFlags() & kAccConstructor) != 0;
161   }
162 
163   // Returns true if the method is a class initializer according to access flags.
IsClassInitializer()164   bool IsClassInitializer() {
165     return IsConstructor() && IsStatic();
166   }
167 
168   // Returns true if the method is static, private, or a constructor.
IsDirect()169   bool IsDirect() {
170     return IsDirect(GetAccessFlags());
171   }
172 
IsDirect(uint32_t access_flags)173   static bool IsDirect(uint32_t access_flags) {
174     constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
175     return (access_flags & direct) != 0;
176   }
177 
178   // Returns true if the method is declared synchronized.
IsSynchronized()179   bool IsSynchronized() {
180     constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
181     return (GetAccessFlags() & synchonized) != 0;
182   }
183 
IsFinal()184   bool IsFinal() {
185     return (GetAccessFlags() & kAccFinal) != 0;
186   }
187 
IsIntrinsic()188   bool IsIntrinsic() {
189     return (GetAccessFlags() & kAccIntrinsic) != 0;
190   }
191 
192   ALWAYS_INLINE void SetIntrinsic(uint32_t intrinsic) REQUIRES_SHARED(Locks::mutator_lock_);
193 
GetIntrinsic()194   uint32_t GetIntrinsic() {
195     DCHECK(IsIntrinsic());
196     return (GetAccessFlags() >> POPCOUNT(kAccFlagsNotUsedByIntrinsic)) & kAccMaxIntrinsic;
197   }
198 
IsCopied()199   bool IsCopied() {
200     static_assert((kAccCopied & kAccFlagsNotUsedByIntrinsic) == kAccCopied,
201                   "kAccCopied conflicts with intrinsic modifier");
202     const bool copied = (GetAccessFlags() & kAccCopied) != 0;
203     // (IsMiranda() || IsDefaultConflicting()) implies copied
204     DCHECK(!(IsMiranda() || IsDefaultConflicting()) || copied)
205         << "Miranda or default-conflict methods must always be copied.";
206     return copied;
207   }
208 
IsMiranda()209   bool IsMiranda() {
210     static_assert((kAccMiranda & kAccFlagsNotUsedByIntrinsic) == kAccMiranda,
211                   "kAccMiranda conflicts with intrinsic modifier");
212     return (GetAccessFlags() & kAccMiranda) != 0;
213   }
214 
215   // Returns true if invoking this method will not throw an AbstractMethodError or
216   // IncompatibleClassChangeError.
IsInvokable()217   bool IsInvokable() {
218     return !IsAbstract() && !IsDefaultConflicting();
219   }
220 
IsCompilable()221   bool IsCompilable() {
222     if (IsIntrinsic()) {
223       return true;
224     }
225     return (GetAccessFlags() & kAccCompileDontBother) == 0;
226   }
227 
SetDontCompile()228   void SetDontCompile() {
229     AddAccessFlags(kAccCompileDontBother);
230   }
231 
232   // A default conflict method is a special sentinel method that stands for a conflict between
233   // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError if one
234   // attempts to do so.
IsDefaultConflicting()235   bool IsDefaultConflicting() {
236     if (IsIntrinsic()) {
237       return false;
238     }
239     return (GetAccessFlags() & kAccDefaultConflict) != 0u;
240   }
241 
242   // This is set by the class linker.
IsDefault()243   bool IsDefault() {
244     static_assert((kAccDefault & kAccFlagsNotUsedByIntrinsic) == kAccDefault,
245                   "kAccDefault conflicts with intrinsic modifier");
246     return (GetAccessFlags() & kAccDefault) != 0;
247   }
248 
IsObsolete()249   bool IsObsolete() {
250     return (GetAccessFlags() & kAccObsoleteMethod) != 0;
251   }
252 
SetIsObsolete()253   void SetIsObsolete() {
254     AddAccessFlags(kAccObsoleteMethod);
255   }
256 
257   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
IsNative()258   bool IsNative() {
259     return (GetAccessFlags<kReadBarrierOption>() & kAccNative) != 0;
260   }
261 
IsFastNative()262   bool IsFastNative() {
263     constexpr uint32_t mask = kAccFastNative | kAccNative;
264     return (GetAccessFlags() & mask) == mask;
265   }
266 
IsAbstract()267   bool IsAbstract() {
268     return (GetAccessFlags() & kAccAbstract) != 0;
269   }
270 
IsSynthetic()271   bool IsSynthetic() {
272     return (GetAccessFlags() & kAccSynthetic) != 0;
273   }
274 
IsVarargs()275   bool IsVarargs() {
276     return (GetAccessFlags() & kAccVarargs) != 0;
277   }
278 
279   bool IsProxyMethod() REQUIRES_SHARED(Locks::mutator_lock_);
280 
SkipAccessChecks()281   bool SkipAccessChecks() {
282     return (GetAccessFlags() & kAccSkipAccessChecks) != 0;
283   }
284 
SetSkipAccessChecks()285   void SetSkipAccessChecks() {
286     AddAccessFlags(kAccSkipAccessChecks);
287   }
288 
289   // Should this method be run in the interpreter and count locks (e.g., failed structured-
290   // locking verification)?
MustCountLocks()291   bool MustCountLocks() {
292     if (IsIntrinsic()) {
293       return false;
294     }
295     return (GetAccessFlags() & kAccMustCountLocks) != 0;
296   }
297 
298   // Checks to see if the method was annotated with @dalvik.annotation.optimization.FastNative
299   // -- Independent of kAccFastNative access flags.
300   bool IsAnnotatedWithFastNative();
301 
302   // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative
303   // -- Unrelated to the GC notion of "critical".
304   bool IsAnnotatedWithCriticalNative();
305 
306   // Returns true if this method could be overridden by a default method.
307   bool IsOverridableByDefaultMethod() REQUIRES_SHARED(Locks::mutator_lock_);
308 
309   bool CheckIncompatibleClassChange(InvokeType type) REQUIRES_SHARED(Locks::mutator_lock_);
310 
311   // Throws the error that would result from trying to invoke this method (i.e.
312   // IncompatibleClassChangeError or AbstractMethodError). Only call if !IsInvokable();
313   void ThrowInvocationTimeError() REQUIRES_SHARED(Locks::mutator_lock_);
314 
315   uint16_t GetMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
316 
317   // Doesn't do erroneous / unresolved class checks.
318   uint16_t GetMethodIndexDuringLinking() REQUIRES_SHARED(Locks::mutator_lock_);
319 
GetVtableIndex()320   size_t GetVtableIndex() REQUIRES_SHARED(Locks::mutator_lock_) {
321     return GetMethodIndex();
322   }
323 
SetMethodIndex(uint16_t new_method_index)324   void SetMethodIndex(uint16_t new_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
325     // Not called within a transaction.
326     method_index_ = new_method_index;
327   }
328 
DexMethodIndexOffset()329   static MemberOffset DexMethodIndexOffset() {
330     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, dex_method_index_));
331   }
332 
MethodIndexOffset()333   static MemberOffset MethodIndexOffset() {
334     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, method_index_));
335   }
336 
GetCodeItemOffset()337   uint32_t GetCodeItemOffset() {
338     return dex_code_item_offset_;
339   }
340 
SetCodeItemOffset(uint32_t new_code_off)341   void SetCodeItemOffset(uint32_t new_code_off) {
342     // Not called within a transaction.
343     dex_code_item_offset_ = new_code_off;
344   }
345 
346   // Number of 32bit registers that would be required to hold all the arguments
347   static size_t NumArgRegisters(const StringPiece& shorty);
348 
GetDexMethodIndexUnchecked()349   ALWAYS_INLINE uint32_t GetDexMethodIndexUnchecked() {
350     return dex_method_index_;
351   }
352   ALWAYS_INLINE uint32_t GetDexMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
353 
SetDexMethodIndex(uint32_t new_idx)354   void SetDexMethodIndex(uint32_t new_idx) {
355     // Not called within a transaction.
356     dex_method_index_ = new_idx;
357   }
358 
359   ALWAYS_INLINE mirror::MethodDexCacheType* GetDexCacheResolvedMethods(PointerSize pointer_size)
360       REQUIRES_SHARED(Locks::mutator_lock_);
361   ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_index,
362                                                      PointerSize pointer_size)
363       REQUIRES_SHARED(Locks::mutator_lock_);
364 
365   ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_index,
366                                                ArtMethod* new_method,
367                                                PointerSize pointer_size)
368       REQUIRES_SHARED(Locks::mutator_lock_);
369   ALWAYS_INLINE void SetDexCacheResolvedMethods(mirror::MethodDexCacheType* new_dex_cache_methods,
370                                                 PointerSize pointer_size)
371       REQUIRES_SHARED(Locks::mutator_lock_);
372   bool HasDexCacheResolvedMethods(PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_);
373   bool HasSameDexCacheResolvedMethods(ArtMethod* other, PointerSize pointer_size)
374       REQUIRES_SHARED(Locks::mutator_lock_);
375   bool HasSameDexCacheResolvedMethods(mirror::MethodDexCacheType* other_cache,
376                                       PointerSize pointer_size)
377       REQUIRES_SHARED(Locks::mutator_lock_);
378 
379   // Get the Class* from the type index into this method's dex cache.
380   mirror::Class* GetClassFromTypeIndex(dex::TypeIndex type_idx, bool resolve)
381       REQUIRES_SHARED(Locks::mutator_lock_);
382 
383   // Returns true if this method has the same name and signature of the other method.
384   bool HasSameNameAndSignature(ArtMethod* other) REQUIRES_SHARED(Locks::mutator_lock_);
385 
386   // Find the method that this method overrides.
387   ArtMethod* FindOverriddenMethod(PointerSize pointer_size)
388       REQUIRES_SHARED(Locks::mutator_lock_);
389 
390   // Find the method index for this method within other_dexfile. If this method isn't present then
391   // return DexFile::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
392   // name and signature in the other_dexfile, such as the method index used to resolve this method
393   // in the other_dexfile.
394   uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
395                                             uint32_t name_and_signature_idx)
396       REQUIRES_SHARED(Locks::mutator_lock_);
397 
398   void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
399       REQUIRES_SHARED(Locks::mutator_lock_);
400 
GetEntryPointFromQuickCompiledCode()401   const void* GetEntryPointFromQuickCompiledCode() {
402     return GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
403   }
GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size)404   ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size) {
405     return GetNativePointer<const void*>(
406         EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
407   }
408 
SetEntryPointFromQuickCompiledCode(const void * entry_point_from_quick_compiled_code)409   void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code) {
410     SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
411                                               kRuntimePointerSize);
412   }
SetEntryPointFromQuickCompiledCodePtrSize(const void * entry_point_from_quick_compiled_code,PointerSize pointer_size)413   ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
414       const void* entry_point_from_quick_compiled_code, PointerSize pointer_size) {
415     SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
416                      entry_point_from_quick_compiled_code,
417                      pointer_size);
418   }
419 
420   // Registers the native method and returns the new entry point. NB The returned entry point might
421   // be different from the native_method argument if some MethodCallback modifies it.
422   const void* RegisterNative(const void* native_method, bool is_fast)
423       REQUIRES_SHARED(Locks::mutator_lock_) WARN_UNUSED;
424 
425   void UnregisterNative() REQUIRES_SHARED(Locks::mutator_lock_);
426 
DexCacheResolvedMethodsOffset(PointerSize pointer_size)427   static MemberOffset DexCacheResolvedMethodsOffset(PointerSize pointer_size) {
428     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
429         PtrSizedFields, dex_cache_resolved_methods_) / sizeof(void*)
430             * static_cast<size_t>(pointer_size));
431   }
432 
DataOffset(PointerSize pointer_size)433   static MemberOffset DataOffset(PointerSize pointer_size) {
434     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
435         PtrSizedFields, data_) / sizeof(void*) * static_cast<size_t>(pointer_size));
436   }
437 
EntryPointFromJniOffset(PointerSize pointer_size)438   static MemberOffset EntryPointFromJniOffset(PointerSize pointer_size) {
439     return DataOffset(pointer_size);
440   }
441 
EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size)442   static MemberOffset EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size) {
443     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
444         PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*)
445             * static_cast<size_t>(pointer_size));
446   }
447 
GetImtConflictTable(PointerSize pointer_size)448   ImtConflictTable* GetImtConflictTable(PointerSize pointer_size) {
449     DCHECK(IsRuntimeMethod());
450     return reinterpret_cast<ImtConflictTable*>(GetDataPtrSize(pointer_size));
451   }
452 
SetImtConflictTable(ImtConflictTable * table,PointerSize pointer_size)453   ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size) {
454     DCHECK(IsRuntimeMethod());
455     SetDataPtrSize(table, pointer_size);
456   }
457 
GetProfilingInfo(PointerSize pointer_size)458   ProfilingInfo* GetProfilingInfo(PointerSize pointer_size) {
459     // Don't do a read barrier in the DCHECK, as GetProfilingInfo is called in places
460     // where the declaring class is treated as a weak reference (accessing it with
461     // a read barrier would either prevent unloading the class, or crash the runtime if
462     // the GC wants to unload it).
463     DCHECK(!IsNative<kWithoutReadBarrier>());
464     return reinterpret_cast<ProfilingInfo*>(GetDataPtrSize(pointer_size));
465   }
466 
SetProfilingInfo(ProfilingInfo * info)467   ALWAYS_INLINE void SetProfilingInfo(ProfilingInfo* info) {
468     SetDataPtrSize(info, kRuntimePointerSize);
469   }
470 
SetProfilingInfoPtrSize(ProfilingInfo * info,PointerSize pointer_size)471   ALWAYS_INLINE void SetProfilingInfoPtrSize(ProfilingInfo* info, PointerSize pointer_size) {
472     SetDataPtrSize(info, pointer_size);
473   }
474 
ProfilingInfoOffset()475   static MemberOffset ProfilingInfoOffset() {
476     DCHECK(IsImagePointerSize(kRuntimePointerSize));
477     return DataOffset(kRuntimePointerSize);
478   }
479 
480   ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_);
481 
SetHasSingleImplementation(bool single_impl)482   ALWAYS_INLINE void SetHasSingleImplementation(bool single_impl) {
483     DCHECK(!IsIntrinsic()) << "conflict with intrinsic bits";
484     if (single_impl) {
485       AddAccessFlags(kAccSingleImplementation);
486     } else {
487       ClearAccessFlags(kAccSingleImplementation);
488     }
489   }
490 
491   // Takes a method and returns a 'canonical' one if the method is default (and therefore
492   // potentially copied from some other class). For example, this ensures that the debugger does not
493   // get confused as to which method we are in.
494   ArtMethod* GetCanonicalMethod(PointerSize pointer_size = kRuntimePointerSize)
495       REQUIRES_SHARED(Locks::mutator_lock_);
496 
497   ArtMethod* GetSingleImplementation(PointerSize pointer_size)
498       REQUIRES_SHARED(Locks::mutator_lock_);
499 
SetSingleImplementation(ArtMethod * method,PointerSize pointer_size)500   ALWAYS_INLINE void SetSingleImplementation(ArtMethod* method, PointerSize pointer_size) {
501     DCHECK(!IsNative());
502     DCHECK(IsAbstract());  // Non-abstract method's single implementation is just itself.
503     SetDataPtrSize(method, pointer_size);
504   }
505 
GetEntryPointFromJni()506   void* GetEntryPointFromJni() {
507     DCHECK(IsNative());
508     return GetEntryPointFromJniPtrSize(kRuntimePointerSize);
509   }
510 
GetEntryPointFromJniPtrSize(PointerSize pointer_size)511   ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(PointerSize pointer_size) {
512     return GetDataPtrSize(pointer_size);
513   }
514 
SetEntryPointFromJni(const void * entrypoint)515   void SetEntryPointFromJni(const void* entrypoint) {
516     DCHECK(IsNative());
517     SetEntryPointFromJniPtrSize(entrypoint, kRuntimePointerSize);
518   }
519 
SetEntryPointFromJniPtrSize(const void * entrypoint,PointerSize pointer_size)520   ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, PointerSize pointer_size) {
521     SetDataPtrSize(entrypoint, pointer_size);
522   }
523 
GetDataPtrSize(PointerSize pointer_size)524   ALWAYS_INLINE void* GetDataPtrSize(PointerSize pointer_size) {
525     DCHECK(IsImagePointerSize(pointer_size));
526     return GetNativePointer<void*>(DataOffset(pointer_size), pointer_size);
527   }
528 
SetDataPtrSize(const void * data,PointerSize pointer_size)529   ALWAYS_INLINE void SetDataPtrSize(const void* data, PointerSize pointer_size) {
530     DCHECK(IsImagePointerSize(pointer_size));
531     SetNativePointer(DataOffset(pointer_size), data, pointer_size);
532   }
533 
534   // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
535   // conventions for a method of managed code. Returns false for Proxy methods.
IsRuntimeMethod()536   ALWAYS_INLINE bool IsRuntimeMethod() {
537     return dex_method_index_ == kRuntimeMethodDexMethodIndex;;
538   }
539 
540   // Is this a hand crafted method used for something like describing callee saves?
541   bool IsCalleeSaveMethod() REQUIRES_SHARED(Locks::mutator_lock_);
542 
543   bool IsResolutionMethod() REQUIRES_SHARED(Locks::mutator_lock_);
544 
545   bool IsImtUnimplementedMethod() REQUIRES_SHARED(Locks::mutator_lock_);
546 
547   // Find the catch block for the given exception type and dex_pc. When a catch block is found,
548   // indicates whether the found catch block is responsible for clearing the exception or whether
549   // a move-exception instruction is present.
550   uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
551                           bool* has_no_move_exception)
552       REQUIRES_SHARED(Locks::mutator_lock_);
553 
554   // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
555   template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename RootVisitorType>
556   void VisitRoots(RootVisitorType& visitor, PointerSize pointer_size) NO_THREAD_SAFETY_ANALYSIS;
557 
558   const DexFile* GetDexFile() REQUIRES_SHARED(Locks::mutator_lock_);
559 
560   const char* GetDeclaringClassDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
561 
562   ALWAYS_INLINE const char* GetShorty() REQUIRES_SHARED(Locks::mutator_lock_);
563 
564   const char* GetShorty(uint32_t* out_length) REQUIRES_SHARED(Locks::mutator_lock_);
565 
566   const Signature GetSignature() REQUIRES_SHARED(Locks::mutator_lock_);
567 
568   ALWAYS_INLINE const char* GetName() REQUIRES_SHARED(Locks::mutator_lock_);
569 
570   mirror::String* GetNameAsString(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
571 
572   const DexFile::CodeItem* GetCodeItem() REQUIRES_SHARED(Locks::mutator_lock_);
573 
574   bool IsResolvedTypeIdx(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
575 
576   int32_t GetLineNumFromDexPC(uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_);
577 
578   const DexFile::ProtoId& GetPrototype() REQUIRES_SHARED(Locks::mutator_lock_);
579 
580   const DexFile::TypeList* GetParameterTypeList() REQUIRES_SHARED(Locks::mutator_lock_);
581 
582   const char* GetDeclaringClassSourceFile() REQUIRES_SHARED(Locks::mutator_lock_);
583 
584   uint16_t GetClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
585 
586   const DexFile::ClassDef& GetClassDef() REQUIRES_SHARED(Locks::mutator_lock_);
587 
588   const char* GetReturnTypeDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
589 
590   ALWAYS_INLINE Primitive::Type GetReturnTypePrimitive() REQUIRES_SHARED(Locks::mutator_lock_);
591 
592   const char* GetTypeDescriptorFromTypeIdx(dex::TypeIndex type_idx)
593       REQUIRES_SHARED(Locks::mutator_lock_);
594 
595   // May cause thread suspension due to GetClassFromTypeIdx calling ResolveType this caused a large
596   // number of bugs at call sites.
597   mirror::Class* GetReturnType(bool resolve) REQUIRES_SHARED(Locks::mutator_lock_);
598 
599   mirror::ClassLoader* GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
600 
601   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
602   mirror::DexCache* GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
603   mirror::DexCache* GetObsoleteDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
604 
605   ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(PointerSize pointer_size)
606       REQUIRES_SHARED(Locks::mutator_lock_);
607 
608   ArtMethod* GetNonObsoleteMethod() REQUIRES_SHARED(Locks::mutator_lock_);
609 
610   // May cause thread suspension due to class resolution.
611   bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
612       REQUIRES_SHARED(Locks::mutator_lock_);
613 
614   // Size of an instance of this native class.
Size(PointerSize pointer_size)615   static size_t Size(PointerSize pointer_size) {
616     return PtrSizedFieldsOffset(pointer_size) +
617         (sizeof(PtrSizedFields) / sizeof(void*)) * static_cast<size_t>(pointer_size);
618   }
619 
620   // Alignment of an instance of this native class.
Alignment(PointerSize pointer_size)621   static size_t Alignment(PointerSize pointer_size) {
622     // The ArtMethod alignment is the same as image pointer size. This differs from
623     // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
624     return static_cast<size_t>(pointer_size);
625   }
626 
627   void CopyFrom(ArtMethod* src, PointerSize image_pointer_size)
628       REQUIRES_SHARED(Locks::mutator_lock_);
629 
630   // Note, hotness_counter_ updates are non-atomic but it doesn't need to be precise.  Also,
631   // given that the counter is only 16 bits wide we can expect wrap-around in some
632   // situations.  Consumers of hotness_count_ must be able to deal with that.
IncrementCounter()633   uint16_t IncrementCounter() {
634     return ++hotness_count_;
635   }
636 
ClearCounter()637   void ClearCounter() {
638     hotness_count_ = 0;
639   }
640 
SetCounter(int16_t hotness_count)641   void SetCounter(int16_t hotness_count) {
642     hotness_count_ = hotness_count;
643   }
644 
GetCounter()645   uint16_t GetCounter() const {
646     return hotness_count_;
647   }
648 
649   const uint8_t* GetQuickenedInfo(PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_);
650 
651   // Returns the method header for the compiled code containing 'pc'. Note that runtime
652   // methods will return null for this method, as they are not oat based.
653   const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc)
654       REQUIRES_SHARED(Locks::mutator_lock_);
655 
656   // Get compiled code for the method, return null if no code exists.
657   const void* GetOatMethodQuickCode(PointerSize pointer_size)
658       REQUIRES_SHARED(Locks::mutator_lock_);
659 
660   // Returns whether the method has any compiled code, JIT or AOT.
661   bool HasAnyCompiledCode() REQUIRES_SHARED(Locks::mutator_lock_);
662 
663   // Returns a human-readable signature for 'm'. Something like "a.b.C.m" or
664   // "a.b.C.m(II)V" (depending on the value of 'with_signature').
665   static std::string PrettyMethod(ArtMethod* m, bool with_signature = true)
666       REQUIRES_SHARED(Locks::mutator_lock_);
667   std::string PrettyMethod(bool with_signature = true)
668       REQUIRES_SHARED(Locks::mutator_lock_);
669   // Returns the JNI native function name for the non-overloaded method 'm'.
670   std::string JniShortName()
671       REQUIRES_SHARED(Locks::mutator_lock_);
672   // Returns the JNI native function name for the overloaded method 'm'.
673   std::string JniLongName()
674       REQUIRES_SHARED(Locks::mutator_lock_);
675 
676   // Update heap objects and non-entrypoint pointers by the passed in visitor for image relocation.
677   // Does not use read barrier.
678   template <typename Visitor>
679   ALWAYS_INLINE void UpdateObjectsForImageRelocation(const Visitor& visitor,
680                                                      PointerSize pointer_size)
681       REQUIRES_SHARED(Locks::mutator_lock_);
682 
683   // Update entry points by passing them through the visitor.
684   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
685   ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor, PointerSize pointer_size);
686 
687   // Visit the individual members of an ArtMethod.  Used by imgdiag.
688   // As imgdiag does not support mixing instruction sets or pointer sizes (e.g., using imgdiag32
689   // to inspect 64-bit images, etc.), we can go beneath the accessors directly to the class members.
690   template <typename VisitorFunc>
VisitMembers(VisitorFunc & visitor)691   void VisitMembers(VisitorFunc& visitor) {
692     DCHECK(IsImagePointerSize(kRuntimePointerSize));
693     visitor(this, &declaring_class_, "declaring_class_");
694     visitor(this, &access_flags_, "access_flags_");
695     visitor(this, &dex_code_item_offset_, "dex_code_item_offset_");
696     visitor(this, &dex_method_index_, "dex_method_index_");
697     visitor(this, &method_index_, "method_index_");
698     visitor(this, &hotness_count_, "hotness_count_");
699     visitor(this,
700             &ptr_sized_fields_.dex_cache_resolved_methods_,
701             "ptr_sized_fields_.dex_cache_resolved_methods_");
702     visitor(this, &ptr_sized_fields_.data_, "ptr_sized_fields_.data_");
703     visitor(this,
704             &ptr_sized_fields_.entry_point_from_quick_compiled_code_,
705             "ptr_sized_fields_.entry_point_from_quick_compiled_code_");
706   }
707 
708  protected:
709   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
710   // The class we are a part of.
711   GcRoot<mirror::Class> declaring_class_;
712 
713   // Access flags; low 16 bits are defined by spec.
714   // Getting and setting this flag needs to be atomic when concurrency is
715   // possible, e.g. after this method's class is linked. Such as when setting
716   // verifier flags and single-implementation flag.
717   std::atomic<std::uint32_t> access_flags_;
718 
719   /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
720 
721   // Offset to the CodeItem.
722   uint32_t dex_code_item_offset_;
723 
724   // Index into method_ids of the dex file associated with this method.
725   uint32_t dex_method_index_;
726 
727   /* End of dex file fields. */
728 
729   // Entry within a dispatch table for this method. For static/direct methods the index is into
730   // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
731   // ifTable.
732   uint16_t method_index_;
733 
734   // The hotness we measure for this method. Managed by the interpreter. Not atomic, as we allow
735   // missing increments: if the method is hot, we will see it eventually.
736   uint16_t hotness_count_;
737 
738   // Fake padding field gets inserted here.
739 
740   // Must be the last fields in the method.
741   struct PtrSizedFields {
742     // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
743     mirror::MethodDexCacheType* dex_cache_resolved_methods_;
744 
745     // Pointer to JNI function registered to this method, or a function to resolve the JNI function,
746     // or the profiling data for non-native methods, or an ImtConflictTable, or the
747     // single-implementation of an abstract/interface method.
748     void* data_;
749 
750     // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
751     // the interpreter.
752     void* entry_point_from_quick_compiled_code_;
753   } ptr_sized_fields_;
754 
755  private:
756   uint16_t FindObsoleteDexClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
757 
758   // If `lookup_in_resolved_boot_classes` is true, look up any of the
759   // method's annotations' classes in the bootstrap class loader's
760   // resolved types; otherwise, resolve them as a side effect.
761   bool IsAnnotatedWith(jclass klass, uint32_t visibility, bool lookup_in_resolved_boot_classes);
762 
PtrSizedFieldsOffset(PointerSize pointer_size)763   static constexpr size_t PtrSizedFieldsOffset(PointerSize pointer_size) {
764     // Round up to pointer size for padding field. Tested in art_method.cc.
765     return RoundUp(offsetof(ArtMethod, hotness_count_) + sizeof(hotness_count_),
766                    static_cast<size_t>(pointer_size));
767   }
768 
769   // Compare given pointer size to the image pointer size.
770   static bool IsImagePointerSize(PointerSize pointer_size);
771 
772   template<typename T>
GetNativePointer(MemberOffset offset,PointerSize pointer_size)773   ALWAYS_INLINE T GetNativePointer(MemberOffset offset, PointerSize pointer_size) const {
774     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
775     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
776     if (pointer_size == PointerSize::k32) {
777       return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
778     } else {
779       auto v = *reinterpret_cast<const uint64_t*>(addr);
780       return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
781     }
782   }
783 
784   template<typename T>
SetNativePointer(MemberOffset offset,T new_value,PointerSize pointer_size)785   ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size) {
786     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
787     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
788     if (pointer_size == PointerSize::k32) {
789       uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
790       *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
791     } else {
792       *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
793     }
794   }
795 
796   template <ReadBarrierOption kReadBarrierOption> void GetAccessFlagsDCheck();
797 
798   DISALLOW_COPY_AND_ASSIGN(ArtMethod);  // Need to use CopyFrom to deal with 32 vs 64 bits.
799 };
800 
801 class MethodCallback {
802  public:
~MethodCallback()803   virtual ~MethodCallback() {}
804 
805   virtual void RegisterNativeMethod(ArtMethod* method,
806                                     const void* original_implementation,
807                                     /*out*/void** new_implementation)
808       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
809 };
810 
811 }  // namespace art
812 
813 #endif  // ART_RUNTIME_ART_METHOD_H_
814