• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_ART_METHOD_H_
18 #define ART_RUNTIME_ART_METHOD_H_
19 
20 #include "base/bit_utils.h"
21 #include "base/casts.h"
22 #include "dex_file.h"
23 #include "gc_root.h"
24 #include "invoke_type.h"
25 #include "method_reference.h"
26 #include "modifiers.h"
27 #include "mirror/object.h"
28 #include "read_barrier_option.h"
29 #include "stack.h"
30 #include "utils.h"
31 
32 namespace art {
33 
34 union JValue;
35 class OatQuickMethodHeader;
36 class ProfilingInfo;
37 class ScopedObjectAccessAlreadyRunnable;
38 class StringPiece;
39 class ShadowFrame;
40 
41 namespace mirror {
42 class Array;
43 class Class;
44 class IfTable;
45 class PointerArray;
46 }  // namespace mirror
47 
48 // Table to resolve IMT conflicts at runtime. The table is attached to
49 // the jni entrypoint of IMT conflict ArtMethods.
50 // The table contains a list of pairs of { interface_method, implementation_method }
51 // with the last entry being null to make an assembly implementation of a lookup
52 // faster.
53 class ImtConflictTable {
54   enum MethodIndex {
55     kMethodInterface,
56     kMethodImplementation,
57     kMethodCount,  // Number of elements in enum.
58   };
59 
60  public:
61   // Build a new table copying `other` and adding the new entry formed of
62   // the pair { `interface_method`, `implementation_method` }
ImtConflictTable(ImtConflictTable * other,ArtMethod * interface_method,ArtMethod * implementation_method,size_t pointer_size)63   ImtConflictTable(ImtConflictTable* other,
64                    ArtMethod* interface_method,
65                    ArtMethod* implementation_method,
66                    size_t pointer_size) {
67     const size_t count = other->NumEntries(pointer_size);
68     for (size_t i = 0; i < count; ++i) {
69       SetInterfaceMethod(i, pointer_size, other->GetInterfaceMethod(i, pointer_size));
70       SetImplementationMethod(i, pointer_size, other->GetImplementationMethod(i, pointer_size));
71     }
72     SetInterfaceMethod(count, pointer_size, interface_method);
73     SetImplementationMethod(count, pointer_size, implementation_method);
74     // Add the null marker.
75     SetInterfaceMethod(count + 1, pointer_size, nullptr);
76     SetImplementationMethod(count + 1, pointer_size, nullptr);
77   }
78 
79   // num_entries excludes the header.
ImtConflictTable(size_t num_entries,size_t pointer_size)80   ImtConflictTable(size_t num_entries, size_t pointer_size) {
81     SetInterfaceMethod(num_entries, pointer_size, nullptr);
82     SetImplementationMethod(num_entries, pointer_size, nullptr);
83   }
84 
85   // Set an entry at an index.
SetInterfaceMethod(size_t index,size_t pointer_size,ArtMethod * method)86   void SetInterfaceMethod(size_t index, size_t pointer_size, ArtMethod* method) {
87     SetMethod(index * kMethodCount + kMethodInterface, pointer_size, method);
88   }
89 
SetImplementationMethod(size_t index,size_t pointer_size,ArtMethod * method)90   void SetImplementationMethod(size_t index, size_t pointer_size, ArtMethod* method) {
91     SetMethod(index * kMethodCount + kMethodImplementation, pointer_size, method);
92   }
93 
GetInterfaceMethod(size_t index,size_t pointer_size)94   ArtMethod* GetInterfaceMethod(size_t index, size_t pointer_size) const {
95     return GetMethod(index * kMethodCount + kMethodInterface, pointer_size);
96   }
97 
GetImplementationMethod(size_t index,size_t pointer_size)98   ArtMethod* GetImplementationMethod(size_t index, size_t pointer_size) const {
99     return GetMethod(index * kMethodCount + kMethodImplementation, pointer_size);
100   }
101 
102   // Return true if two conflict tables are the same.
Equals(ImtConflictTable * other,size_t pointer_size)103   bool Equals(ImtConflictTable* other, size_t pointer_size) const {
104     size_t num = NumEntries(pointer_size);
105     if (num != other->NumEntries(pointer_size)) {
106       return false;
107     }
108     for (size_t i = 0; i < num; ++i) {
109       if (GetInterfaceMethod(i, pointer_size) != other->GetInterfaceMethod(i, pointer_size) ||
110           GetImplementationMethod(i, pointer_size) !=
111               other->GetImplementationMethod(i, pointer_size)) {
112         return false;
113       }
114     }
115     return true;
116   }
117 
118   // Visit all of the entries.
119   // NO_THREAD_SAFETY_ANALYSIS for calling with held locks. Visitor is passed a pair of ArtMethod*
120   // and also returns one. The order is <interface, implementation>.
121   template<typename Visitor>
Visit(const Visitor & visitor,size_t pointer_size)122   void Visit(const Visitor& visitor, size_t pointer_size) NO_THREAD_SAFETY_ANALYSIS {
123     uint32_t table_index = 0;
124     for (;;) {
125       ArtMethod* interface_method = GetInterfaceMethod(table_index, pointer_size);
126       if (interface_method == nullptr) {
127         break;
128       }
129       ArtMethod* implementation_method = GetImplementationMethod(table_index, pointer_size);
130       auto input = std::make_pair(interface_method, implementation_method);
131       std::pair<ArtMethod*, ArtMethod*> updated = visitor(input);
132       if (input.first != updated.first) {
133         SetInterfaceMethod(table_index, pointer_size, updated.first);
134       }
135       if (input.second != updated.second) {
136         SetImplementationMethod(table_index, pointer_size, updated.second);
137       }
138       ++table_index;
139     }
140   }
141 
142   // Lookup the implementation ArtMethod associated to `interface_method`. Return null
143   // if not found.
Lookup(ArtMethod * interface_method,size_t pointer_size)144   ArtMethod* Lookup(ArtMethod* interface_method, size_t pointer_size) const {
145     uint32_t table_index = 0;
146     for (;;) {
147       ArtMethod* current_interface_method = GetInterfaceMethod(table_index, pointer_size);
148       if (current_interface_method == nullptr) {
149         break;
150       }
151       if (current_interface_method == interface_method) {
152         return GetImplementationMethod(table_index, pointer_size);
153       }
154       ++table_index;
155     }
156     return nullptr;
157   }
158 
159   // Compute the number of entries in this table.
NumEntries(size_t pointer_size)160   size_t NumEntries(size_t pointer_size) const {
161     uint32_t table_index = 0;
162     while (GetInterfaceMethod(table_index, pointer_size) != nullptr) {
163       ++table_index;
164     }
165     return table_index;
166   }
167 
168   // Compute the size in bytes taken by this table.
ComputeSize(size_t pointer_size)169   size_t ComputeSize(size_t pointer_size) const {
170     // Add the end marker.
171     return ComputeSize(NumEntries(pointer_size), pointer_size);
172   }
173 
174   // Compute the size in bytes needed for copying the given `table` and add
175   // one more entry.
ComputeSizeWithOneMoreEntry(ImtConflictTable * table,size_t pointer_size)176   static size_t ComputeSizeWithOneMoreEntry(ImtConflictTable* table, size_t pointer_size) {
177     return table->ComputeSize(pointer_size) + EntrySize(pointer_size);
178   }
179 
180   // Compute size with a fixed number of entries.
ComputeSize(size_t num_entries,size_t pointer_size)181   static size_t ComputeSize(size_t num_entries, size_t pointer_size) {
182     return (num_entries + 1) * EntrySize(pointer_size);  // Add one for null terminator.
183   }
184 
EntrySize(size_t pointer_size)185   static size_t EntrySize(size_t pointer_size) {
186     return pointer_size * static_cast<size_t>(kMethodCount);
187   }
188 
189  private:
GetMethod(size_t index,size_t pointer_size)190   ArtMethod* GetMethod(size_t index, size_t pointer_size) const {
191     if (pointer_size == 8) {
192       return reinterpret_cast<ArtMethod*>(static_cast<uintptr_t>(data64_[index]));
193     } else {
194       DCHECK_EQ(pointer_size, 4u);
195       return reinterpret_cast<ArtMethod*>(static_cast<uintptr_t>(data32_[index]));
196     }
197   }
198 
SetMethod(size_t index,size_t pointer_size,ArtMethod * method)199   void SetMethod(size_t index, size_t pointer_size, ArtMethod* method) {
200     if (pointer_size == 8) {
201       data64_[index] = dchecked_integral_cast<uint64_t>(reinterpret_cast<uintptr_t>(method));
202     } else {
203       DCHECK_EQ(pointer_size, 4u);
204       data32_[index] = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(method));
205     }
206   }
207 
208   // Array of entries that the assembly stubs will iterate over. Note that this is
209   // not fixed size, and we allocate data prior to calling the constructor
210   // of ImtConflictTable.
211   union {
212     uint32_t data32_[0];
213     uint64_t data64_[0];
214   };
215 
216   DISALLOW_COPY_AND_ASSIGN(ImtConflictTable);
217 };
218 
219 class ArtMethod FINAL {
220  public:
ArtMethod()221   ArtMethod() : access_flags_(0), dex_code_item_offset_(0), dex_method_index_(0),
222       method_index_(0) { }
223 
ArtMethod(ArtMethod * src,size_t image_pointer_size)224   ArtMethod(ArtMethod* src, size_t image_pointer_size) {
225     CopyFrom(src, image_pointer_size);
226   }
227 
228   static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
229                                         jobject jlr_method)
230       SHARED_REQUIRES(Locks::mutator_lock_);
231 
232   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
233   ALWAYS_INLINE mirror::Class* GetDeclaringClass() SHARED_REQUIRES(Locks::mutator_lock_);
234 
235   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
236   ALWAYS_INLINE mirror::Class* GetDeclaringClassUnchecked()
237       SHARED_REQUIRES(Locks::mutator_lock_);
238 
239   void SetDeclaringClass(mirror::Class *new_declaring_class)
240       SHARED_REQUIRES(Locks::mutator_lock_);
241 
242   bool CASDeclaringClass(mirror::Class* expected_class, mirror::Class* desired_class)
243       SHARED_REQUIRES(Locks::mutator_lock_);
244 
DeclaringClassOffset()245   static MemberOffset DeclaringClassOffset() {
246     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
247   }
248 
249   // Note: GetAccessFlags acquires the mutator lock in debug mode to check that it is not called for
250   // a proxy method.
251   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
252   ALWAYS_INLINE uint32_t GetAccessFlags();
253 
SetAccessFlags(uint32_t new_access_flags)254   void SetAccessFlags(uint32_t new_access_flags) {
255     // Not called within a transaction.
256     access_flags_ = new_access_flags;
257   }
258 
259   // Approximate what kind of method call would be used for this method.
260   InvokeType GetInvokeType() SHARED_REQUIRES(Locks::mutator_lock_);
261 
262   // Returns true if the method is declared public.
IsPublic()263   bool IsPublic() {
264     return (GetAccessFlags() & kAccPublic) != 0;
265   }
266 
267   // Returns true if the method is declared private.
IsPrivate()268   bool IsPrivate() {
269     return (GetAccessFlags() & kAccPrivate) != 0;
270   }
271 
272   // Returns true if the method is declared static.
IsStatic()273   bool IsStatic() {
274     return (GetAccessFlags() & kAccStatic) != 0;
275   }
276 
277   // Returns true if the method is a constructor.
IsConstructor()278   bool IsConstructor() {
279     return (GetAccessFlags() & kAccConstructor) != 0;
280   }
281 
282   // Returns true if the method is a class initializer.
IsClassInitializer()283   bool IsClassInitializer() {
284     return IsConstructor() && IsStatic();
285   }
286 
287   // Returns true if the method is static, private, or a constructor.
IsDirect()288   bool IsDirect() {
289     return IsDirect(GetAccessFlags());
290   }
291 
IsDirect(uint32_t access_flags)292   static bool IsDirect(uint32_t access_flags) {
293     constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
294     return (access_flags & direct) != 0;
295   }
296 
297   // Returns true if the method is declared synchronized.
IsSynchronized()298   bool IsSynchronized() {
299     constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
300     return (GetAccessFlags() & synchonized) != 0;
301   }
302 
IsFinal()303   bool IsFinal() {
304     return (GetAccessFlags() & kAccFinal) != 0;
305   }
306 
IsCopied()307   bool IsCopied() {
308     const bool copied = (GetAccessFlags() & kAccCopied) != 0;
309     // (IsMiranda() || IsDefaultConflicting()) implies copied
310     DCHECK(!(IsMiranda() || IsDefaultConflicting()) || copied)
311         << "Miranda or default-conflict methods must always be copied.";
312     return copied;
313   }
314 
IsMiranda()315   bool IsMiranda() {
316     return (GetAccessFlags() & kAccMiranda) != 0;
317   }
318 
319   // Returns true if invoking this method will not throw an AbstractMethodError or
320   // IncompatibleClassChangeError.
IsInvokable()321   bool IsInvokable() {
322     return !IsAbstract() && !IsDefaultConflicting();
323   }
324 
IsCompilable()325   bool IsCompilable() {
326     return (GetAccessFlags() & kAccCompileDontBother) == 0;
327   }
328 
329   // A default conflict method is a special sentinel method that stands for a conflict between
330   // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError if one
331   // attempts to do so.
IsDefaultConflicting()332   bool IsDefaultConflicting() {
333     return (GetAccessFlags() & kAccDefaultConflict) != 0u;
334   }
335 
336   // This is set by the class linker.
IsDefault()337   bool IsDefault() {
338     return (GetAccessFlags() & kAccDefault) != 0;
339   }
340 
341   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
IsNative()342   bool IsNative() {
343     return (GetAccessFlags<kReadBarrierOption>() & kAccNative) != 0;
344   }
345 
IsFastNative()346   bool IsFastNative() {
347     constexpr uint32_t mask = kAccFastNative | kAccNative;
348     return (GetAccessFlags() & mask) == mask;
349   }
350 
IsAbstract()351   bool IsAbstract() {
352     return (GetAccessFlags() & kAccAbstract) != 0;
353   }
354 
IsSynthetic()355   bool IsSynthetic() {
356     return (GetAccessFlags() & kAccSynthetic) != 0;
357   }
358 
359   bool IsProxyMethod() SHARED_REQUIRES(Locks::mutator_lock_);
360 
SkipAccessChecks()361   bool SkipAccessChecks() {
362     return (GetAccessFlags() & kAccSkipAccessChecks) != 0;
363   }
364 
SetSkipAccessChecks()365   void SetSkipAccessChecks() {
366     DCHECK(!SkipAccessChecks());
367     SetAccessFlags(GetAccessFlags() | kAccSkipAccessChecks);
368   }
369 
370   // Should this method be run in the interpreter and count locks (e.g., failed structured-
371   // locking verification)?
MustCountLocks()372   bool MustCountLocks() {
373     return (GetAccessFlags() & kAccMustCountLocks) != 0;
374   }
375 
376   // Returns true if this method could be overridden by a default method.
377   bool IsOverridableByDefaultMethod() SHARED_REQUIRES(Locks::mutator_lock_);
378 
379   bool CheckIncompatibleClassChange(InvokeType type) SHARED_REQUIRES(Locks::mutator_lock_);
380 
381   // Throws the error that would result from trying to invoke this method (i.e.
382   // IncompatibleClassChangeError or AbstractMethodError). Only call if !IsInvokable();
383   void ThrowInvocationTimeError() SHARED_REQUIRES(Locks::mutator_lock_);
384 
385   uint16_t GetMethodIndex() SHARED_REQUIRES(Locks::mutator_lock_);
386 
387   // Doesn't do erroneous / unresolved class checks.
388   uint16_t GetMethodIndexDuringLinking() SHARED_REQUIRES(Locks::mutator_lock_);
389 
GetVtableIndex()390   size_t GetVtableIndex() SHARED_REQUIRES(Locks::mutator_lock_) {
391     return GetMethodIndex();
392   }
393 
SetMethodIndex(uint16_t new_method_index)394   void SetMethodIndex(uint16_t new_method_index) SHARED_REQUIRES(Locks::mutator_lock_) {
395     // Not called within a transaction.
396     method_index_ = new_method_index;
397   }
398 
DexMethodIndexOffset()399   static MemberOffset DexMethodIndexOffset() {
400     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_method_index_);
401   }
402 
MethodIndexOffset()403   static MemberOffset MethodIndexOffset() {
404     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_);
405   }
406 
GetCodeItemOffset()407   uint32_t GetCodeItemOffset() {
408     return dex_code_item_offset_;
409   }
410 
SetCodeItemOffset(uint32_t new_code_off)411   void SetCodeItemOffset(uint32_t new_code_off) {
412     // Not called within a transaction.
413     dex_code_item_offset_ = new_code_off;
414   }
415 
416   // Number of 32bit registers that would be required to hold all the arguments
417   static size_t NumArgRegisters(const StringPiece& shorty);
418 
419   ALWAYS_INLINE uint32_t GetDexMethodIndex() SHARED_REQUIRES(Locks::mutator_lock_);
420 
SetDexMethodIndex(uint32_t new_idx)421   void SetDexMethodIndex(uint32_t new_idx) {
422     // Not called within a transaction.
423     dex_method_index_ = new_idx;
424   }
425 
426   ALWAYS_INLINE ArtMethod** GetDexCacheResolvedMethods(size_t pointer_size)
427       SHARED_REQUIRES(Locks::mutator_lock_);
428   ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_index, size_t ptr_size)
429       SHARED_REQUIRES(Locks::mutator_lock_);
430   ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_index,
431                                                ArtMethod* new_method,
432                                                size_t ptr_size)
433       SHARED_REQUIRES(Locks::mutator_lock_);
434   ALWAYS_INLINE void SetDexCacheResolvedMethods(ArtMethod** new_dex_cache_methods, size_t ptr_size)
435       SHARED_REQUIRES(Locks::mutator_lock_);
436   bool HasDexCacheResolvedMethods(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_);
437   bool HasSameDexCacheResolvedMethods(ArtMethod* other, size_t pointer_size)
438       SHARED_REQUIRES(Locks::mutator_lock_);
439   bool HasSameDexCacheResolvedMethods(ArtMethod** other_cache, size_t pointer_size)
440       SHARED_REQUIRES(Locks::mutator_lock_);
441 
442   template <bool kWithCheck = true>
443   mirror::Class* GetDexCacheResolvedType(uint32_t type_idx, size_t ptr_size)
444       SHARED_REQUIRES(Locks::mutator_lock_);
445   void SetDexCacheResolvedTypes(GcRoot<mirror::Class>* new_dex_cache_types, size_t ptr_size)
446       SHARED_REQUIRES(Locks::mutator_lock_);
447   bool HasDexCacheResolvedTypes(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_);
448   bool HasSameDexCacheResolvedTypes(ArtMethod* other, size_t pointer_size)
449       SHARED_REQUIRES(Locks::mutator_lock_);
450   bool HasSameDexCacheResolvedTypes(GcRoot<mirror::Class>* other_cache, size_t pointer_size)
451       SHARED_REQUIRES(Locks::mutator_lock_);
452 
453   // Get the Class* from the type index into this method's dex cache.
454   mirror::Class* GetClassFromTypeIndex(uint16_t type_idx, bool resolve, size_t ptr_size)
455       SHARED_REQUIRES(Locks::mutator_lock_);
456 
457   // Returns true if this method has the same name and signature of the other method.
458   bool HasSameNameAndSignature(ArtMethod* other) SHARED_REQUIRES(Locks::mutator_lock_);
459 
460   // Find the method that this method overrides.
461   ArtMethod* FindOverriddenMethod(size_t pointer_size)
462       SHARED_REQUIRES(Locks::mutator_lock_);
463 
464   // Find the method index for this method within other_dexfile. If this method isn't present then
465   // return DexFile::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
466   // name and signature in the other_dexfile, such as the method index used to resolve this method
467   // in the other_dexfile.
468   uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
469                                             uint32_t name_and_signature_idx)
470       SHARED_REQUIRES(Locks::mutator_lock_);
471 
472   void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
473       SHARED_REQUIRES(Locks::mutator_lock_);
474 
GetEntryPointFromQuickCompiledCode()475   const void* GetEntryPointFromQuickCompiledCode() {
476     return GetEntryPointFromQuickCompiledCodePtrSize(sizeof(void*));
477   }
GetEntryPointFromQuickCompiledCodePtrSize(size_t pointer_size)478   ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(size_t pointer_size) {
479     return GetNativePointer<const void*>(
480         EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
481   }
482 
SetEntryPointFromQuickCompiledCode(const void * entry_point_from_quick_compiled_code)483   void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code) {
484     SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
485                                               sizeof(void*));
486   }
SetEntryPointFromQuickCompiledCodePtrSize(const void * entry_point_from_quick_compiled_code,size_t pointer_size)487   ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
488       const void* entry_point_from_quick_compiled_code, size_t pointer_size) {
489     SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
490                      entry_point_from_quick_compiled_code, pointer_size);
491   }
492 
493   void RegisterNative(const void* native_method, bool is_fast)
494       SHARED_REQUIRES(Locks::mutator_lock_);
495 
496   void UnregisterNative() SHARED_REQUIRES(Locks::mutator_lock_);
497 
DexCacheResolvedMethodsOffset(size_t pointer_size)498   static MemberOffset DexCacheResolvedMethodsOffset(size_t pointer_size) {
499     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
500         PtrSizedFields, dex_cache_resolved_methods_) / sizeof(void*) * pointer_size);
501   }
502 
DexCacheResolvedTypesOffset(size_t pointer_size)503   static MemberOffset DexCacheResolvedTypesOffset(size_t pointer_size) {
504     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
505         PtrSizedFields, dex_cache_resolved_types_) / sizeof(void*) * pointer_size);
506   }
507 
EntryPointFromJniOffset(size_t pointer_size)508   static MemberOffset EntryPointFromJniOffset(size_t pointer_size) {
509     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
510         PtrSizedFields, entry_point_from_jni_) / sizeof(void*) * pointer_size);
511   }
512 
EntryPointFromQuickCompiledCodeOffset(size_t pointer_size)513   static MemberOffset EntryPointFromQuickCompiledCodeOffset(size_t pointer_size) {
514     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
515         PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*) * pointer_size);
516   }
517 
GetProfilingInfo(size_t pointer_size)518   ProfilingInfo* GetProfilingInfo(size_t pointer_size) {
519     return reinterpret_cast<ProfilingInfo*>(GetEntryPointFromJniPtrSize(pointer_size));
520   }
521 
GetImtConflictTable(size_t pointer_size)522   ImtConflictTable* GetImtConflictTable(size_t pointer_size) {
523     DCHECK(IsRuntimeMethod());
524     return reinterpret_cast<ImtConflictTable*>(GetEntryPointFromJniPtrSize(pointer_size));
525   }
526 
SetImtConflictTable(ImtConflictTable * table,size_t pointer_size)527   ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, size_t pointer_size) {
528     SetEntryPointFromJniPtrSize(table, pointer_size);
529   }
530 
SetProfilingInfo(ProfilingInfo * info)531   ALWAYS_INLINE void SetProfilingInfo(ProfilingInfo* info) {
532     SetEntryPointFromJniPtrSize(info, sizeof(void*));
533   }
534 
SetProfilingInfoPtrSize(ProfilingInfo * info,size_t pointer_size)535   ALWAYS_INLINE void SetProfilingInfoPtrSize(ProfilingInfo* info, size_t pointer_size) {
536     SetEntryPointFromJniPtrSize(info, pointer_size);
537   }
538 
ProfilingInfoOffset()539   static MemberOffset ProfilingInfoOffset() {
540     return EntryPointFromJniOffset(sizeof(void*));
541   }
542 
GetEntryPointFromJni()543   void* GetEntryPointFromJni() {
544     return GetEntryPointFromJniPtrSize(sizeof(void*));
545   }
546 
GetEntryPointFromJniPtrSize(size_t pointer_size)547   ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(size_t pointer_size) {
548     return GetNativePointer<void*>(EntryPointFromJniOffset(pointer_size), pointer_size);
549   }
550 
SetEntryPointFromJni(const void * entrypoint)551   void SetEntryPointFromJni(const void* entrypoint) {
552     DCHECK(IsNative());
553     SetEntryPointFromJniPtrSize(entrypoint, sizeof(void*));
554   }
555 
SetEntryPointFromJniPtrSize(const void * entrypoint,size_t pointer_size)556   ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, size_t pointer_size) {
557     SetNativePointer(EntryPointFromJniOffset(pointer_size), entrypoint, pointer_size);
558   }
559 
560   // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
561   // conventions for a method of managed code. Returns false for Proxy methods.
562   ALWAYS_INLINE bool IsRuntimeMethod();
563 
564   // Is this a hand crafted method used for something like describing callee saves?
565   bool IsCalleeSaveMethod() SHARED_REQUIRES(Locks::mutator_lock_);
566 
567   bool IsResolutionMethod() SHARED_REQUIRES(Locks::mutator_lock_);
568 
569   bool IsImtUnimplementedMethod() SHARED_REQUIRES(Locks::mutator_lock_);
570 
ToMethodReference()571   MethodReference ToMethodReference() SHARED_REQUIRES(Locks::mutator_lock_) {
572     return MethodReference(GetDexFile(), GetDexMethodIndex());
573   }
574 
575   // Find the catch block for the given exception type and dex_pc. When a catch block is found,
576   // indicates whether the found catch block is responsible for clearing the exception or whether
577   // a move-exception instruction is present.
578   uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
579                           bool* has_no_move_exception)
580       SHARED_REQUIRES(Locks::mutator_lock_);
581 
582   // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
583   template<typename RootVisitorType>
584   void VisitRoots(RootVisitorType& visitor, size_t pointer_size) NO_THREAD_SAFETY_ANALYSIS;
585 
586   const DexFile* GetDexFile() SHARED_REQUIRES(Locks::mutator_lock_);
587 
588   const char* GetDeclaringClassDescriptor() SHARED_REQUIRES(Locks::mutator_lock_);
589 
GetShorty()590   const char* GetShorty() SHARED_REQUIRES(Locks::mutator_lock_) {
591     uint32_t unused_length;
592     return GetShorty(&unused_length);
593   }
594 
595   const char* GetShorty(uint32_t* out_length) SHARED_REQUIRES(Locks::mutator_lock_);
596 
597   const Signature GetSignature() SHARED_REQUIRES(Locks::mutator_lock_);
598 
599   ALWAYS_INLINE const char* GetName() SHARED_REQUIRES(Locks::mutator_lock_);
600 
601   mirror::String* GetNameAsString(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_);
602 
603   const DexFile::CodeItem* GetCodeItem() SHARED_REQUIRES(Locks::mutator_lock_);
604 
605   bool IsResolvedTypeIdx(uint16_t type_idx, size_t ptr_size) SHARED_REQUIRES(Locks::mutator_lock_);
606 
607   int32_t GetLineNumFromDexPC(uint32_t dex_pc) SHARED_REQUIRES(Locks::mutator_lock_);
608 
609   const DexFile::ProtoId& GetPrototype() SHARED_REQUIRES(Locks::mutator_lock_);
610 
611   const DexFile::TypeList* GetParameterTypeList() SHARED_REQUIRES(Locks::mutator_lock_);
612 
613   const char* GetDeclaringClassSourceFile() SHARED_REQUIRES(Locks::mutator_lock_);
614 
615   uint16_t GetClassDefIndex() SHARED_REQUIRES(Locks::mutator_lock_);
616 
617   const DexFile::ClassDef& GetClassDef() SHARED_REQUIRES(Locks::mutator_lock_);
618 
619   const char* GetReturnTypeDescriptor() SHARED_REQUIRES(Locks::mutator_lock_);
620 
621   const char* GetTypeDescriptorFromTypeIdx(uint16_t type_idx)
622       SHARED_REQUIRES(Locks::mutator_lock_);
623 
624   // May cause thread suspension due to GetClassFromTypeIdx calling ResolveType this caused a large
625   // number of bugs at call sites.
626   mirror::Class* GetReturnType(bool resolve, size_t ptr_size)
627       SHARED_REQUIRES(Locks::mutator_lock_);
628 
629   mirror::ClassLoader* GetClassLoader() SHARED_REQUIRES(Locks::mutator_lock_);
630 
631   mirror::DexCache* GetDexCache() SHARED_REQUIRES(Locks::mutator_lock_);
632 
633   ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(size_t pointer_size)
634       SHARED_REQUIRES(Locks::mutator_lock_);
635 
636   // May cause thread suspension due to class resolution.
637   bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
638       SHARED_REQUIRES(Locks::mutator_lock_);
639 
640   // Size of an instance of this native class.
Size(size_t pointer_size)641   static size_t Size(size_t pointer_size) {
642     return RoundUp(OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_), pointer_size) +
643         (sizeof(PtrSizedFields) / sizeof(void*)) * pointer_size;
644   }
645 
646   // Alignment of an instance of this native class.
Alignment(size_t pointer_size)647   static size_t Alignment(size_t pointer_size) {
648     // The ArtMethod alignment is the same as image pointer size. This differs from
649     // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
650     return pointer_size;
651   }
652 
653   void CopyFrom(ArtMethod* src, size_t image_pointer_size)
654       SHARED_REQUIRES(Locks::mutator_lock_);
655 
656   ALWAYS_INLINE GcRoot<mirror::Class>* GetDexCacheResolvedTypes(size_t pointer_size)
657       SHARED_REQUIRES(Locks::mutator_lock_);
658 
659   // Note, hotness_counter_ updates are non-atomic but it doesn't need to be precise.  Also,
660   // given that the counter is only 16 bits wide we can expect wrap-around in some
661   // situations.  Consumers of hotness_count_ must be able to deal with that.
IncrementCounter()662   uint16_t IncrementCounter() {
663     return ++hotness_count_;
664   }
665 
ClearCounter()666   void ClearCounter() {
667     hotness_count_ = 0;
668   }
669 
SetCounter(int16_t hotness_count)670   void SetCounter(int16_t hotness_count) {
671     hotness_count_ = hotness_count;
672   }
673 
GetCounter()674   uint16_t GetCounter() const {
675     return hotness_count_;
676   }
677 
678   const uint8_t* GetQuickenedInfo() SHARED_REQUIRES(Locks::mutator_lock_);
679 
680   // Returns the method header for the compiled code containing 'pc'. Note that runtime
681   // methods will return null for this method, as they are not oat based.
682   const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc)
683       SHARED_REQUIRES(Locks::mutator_lock_);
684 
685   // Returns whether the method has any compiled code, JIT or AOT.
686   bool HasAnyCompiledCode() SHARED_REQUIRES(Locks::mutator_lock_);
687 
688 
689   // Update heap objects and non-entrypoint pointers by the passed in visitor for image relocation.
690   // Does not use read barrier.
691   template <typename Visitor>
692   ALWAYS_INLINE void UpdateObjectsForImageRelocation(const Visitor& visitor, size_t pointer_size)
693       SHARED_REQUIRES(Locks::mutator_lock_);
694 
695   // Update entry points by passing them through the visitor.
696   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
697   ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor, size_t pointer_size);
698 
699  protected:
700   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
701   // The class we are a part of.
702   GcRoot<mirror::Class> declaring_class_;
703 
704   // Access flags; low 16 bits are defined by spec.
705   uint32_t access_flags_;
706 
707   /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
708 
709   // Offset to the CodeItem.
710   uint32_t dex_code_item_offset_;
711 
712   // Index into method_ids of the dex file associated with this method.
713   uint32_t dex_method_index_;
714 
715   /* End of dex file fields. */
716 
717   // Entry within a dispatch table for this method. For static/direct methods the index is into
718   // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
719   // ifTable.
720   uint16_t method_index_;
721 
722   // The hotness we measure for this method. Managed by the interpreter. Not atomic, as we allow
723   // missing increments: if the method is hot, we will see it eventually.
724   uint16_t hotness_count_;
725 
726   // Fake padding field gets inserted here.
727 
728   // Must be the last fields in the method.
729   // PACKED(4) is necessary for the correctness of
730   // RoundUp(OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_), pointer_size).
731   struct PACKED(4) PtrSizedFields {
732     // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
733     ArtMethod** dex_cache_resolved_methods_;
734 
735     // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
736     GcRoot<mirror::Class>* dex_cache_resolved_types_;
737 
738     // Pointer to JNI function registered to this method, or a function to resolve the JNI function,
739     // or the profiling data for non-native methods, or an ImtConflictTable.
740     void* entry_point_from_jni_;
741 
742     // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
743     // the interpreter.
744     void* entry_point_from_quick_compiled_code_;
745   } ptr_sized_fields_;
746 
747  private:
PtrSizedFieldsOffset(size_t pointer_size)748   static size_t PtrSizedFieldsOffset(size_t pointer_size) {
749     // Round up to pointer size for padding field.
750     return RoundUp(OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_), pointer_size);
751   }
752 
753   template<typename T>
GetNativePointer(MemberOffset offset,size_t pointer_size)754   ALWAYS_INLINE T GetNativePointer(MemberOffset offset, size_t pointer_size) const {
755     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
756     DCHECK(ValidPointerSize(pointer_size)) << pointer_size;
757     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
758     if (pointer_size == sizeof(uint32_t)) {
759       return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
760     } else {
761       auto v = *reinterpret_cast<const uint64_t*>(addr);
762       return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
763     }
764   }
765 
766   template<typename T>
SetNativePointer(MemberOffset offset,T new_value,size_t pointer_size)767   ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, size_t pointer_size) {
768     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
769     DCHECK(ValidPointerSize(pointer_size)) << pointer_size;
770     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
771     if (pointer_size == sizeof(uint32_t)) {
772       uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
773       *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
774     } else {
775       *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
776     }
777   }
778 
779   DISALLOW_COPY_AND_ASSIGN(ArtMethod);  // Need to use CopyFrom to deal with 32 vs 64 bits.
780 };
781 
782 }  // namespace art
783 
784 #endif  // ART_RUNTIME_ART_METHOD_H_
785