1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_ART_METHOD_H_ 18 #define ART_RUNTIME_ART_METHOD_H_ 19 20 #include <cstddef> 21 #include <limits> 22 23 #include <android-base/logging.h> 24 #include <jni.h> 25 26 #include "base/array_ref.h" 27 #include "base/bit_utils.h" 28 #include "base/casts.h" 29 #include "base/enums.h" 30 #include "base/macros.h" 31 #include "base/runtime_debug.h" 32 #include "dex/dex_file_structs.h" 33 #include "dex/modifiers.h" 34 #include "dex/primitive.h" 35 #include "gc_root.h" 36 #include "obj_ptr.h" 37 #include "offsets.h" 38 #include "read_barrier_option.h" 39 40 namespace art { 41 42 class CodeItemDataAccessor; 43 class CodeItemDebugInfoAccessor; 44 class CodeItemInstructionAccessor; 45 class DexFile; 46 template<class T> class Handle; 47 class ImtConflictTable; 48 enum InvokeType : uint32_t; 49 union JValue; 50 class OatQuickMethodHeader; 51 class ProfilingInfo; 52 class ScopedObjectAccessAlreadyRunnable; 53 class ShadowFrame; 54 class Signature; 55 56 namespace mirror { 57 class Array; 58 class Class; 59 class ClassLoader; 60 class DexCache; 61 class IfTable; 62 class Object; 63 template <typename MirrorType> class ObjectArray; 64 class PointerArray; 65 class String; 66 67 template <typename T> struct NativeDexCachePair; 68 using MethodDexCachePair = NativeDexCachePair<ArtMethod>; 69 using MethodDexCacheType = std::atomic<MethodDexCachePair>; 70 } // namespace mirror 71 72 class ArtMethod final { 73 public: 74 // Should the class state be checked on sensitive operations? 75 DECLARE_RUNTIME_DEBUG_FLAG(kCheckDeclaringClassState); 76 77 // The runtime dex_method_index is kDexNoIndex. To lower dependencies, we use this 78 // constexpr, and ensure that the value is correct in art_method.cc. 79 static constexpr uint32_t kRuntimeMethodDexMethodIndex = 0xFFFFFFFF; 80 ArtMethod()81 ArtMethod() : access_flags_(0), dex_method_index_(0), 82 method_index_(0), hotness_count_(0) { } 83 ArtMethod(ArtMethod * src,PointerSize image_pointer_size)84 ArtMethod(ArtMethod* src, PointerSize image_pointer_size) { 85 CopyFrom(src, image_pointer_size); 86 } 87 88 static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa, 89 jobject jlr_method) 90 REQUIRES_SHARED(Locks::mutator_lock_); 91 92 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 93 ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_); 94 95 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 96 ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClassUnchecked() 97 REQUIRES_SHARED(Locks::mutator_lock_); 98 GetDeclaringClassAddressWithoutBarrier()99 mirror::CompressedReference<mirror::Object>* GetDeclaringClassAddressWithoutBarrier() { 100 return declaring_class_.AddressWithoutBarrier(); 101 } 102 103 void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class) 104 REQUIRES_SHARED(Locks::mutator_lock_); 105 106 bool CASDeclaringClass(ObjPtr<mirror::Class> expected_class, ObjPtr<mirror::Class> desired_class) 107 REQUIRES_SHARED(Locks::mutator_lock_); 108 DeclaringClassOffset()109 static constexpr MemberOffset DeclaringClassOffset() { 110 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_)); 111 } 112 GetAccessFlags()113 uint32_t GetAccessFlags() const { 114 return access_flags_.load(std::memory_order_relaxed); 115 } 116 117 // This version should only be called when it's certain there is no 118 // concurrency so there is no need to guarantee atomicity. For example, 119 // before the method is linked. SetAccessFlags(uint32_t new_access_flags)120 void SetAccessFlags(uint32_t new_access_flags) REQUIRES_SHARED(Locks::mutator_lock_) { 121 access_flags_.store(new_access_flags, std::memory_order_relaxed); 122 } 123 AccessFlagsOffset()124 static constexpr MemberOffset AccessFlagsOffset() { 125 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, access_flags_)); 126 } 127 128 // Approximate what kind of method call would be used for this method. 129 InvokeType GetInvokeType() REQUIRES_SHARED(Locks::mutator_lock_); 130 131 // Returns true if the method is declared public. IsPublic()132 bool IsPublic() const { 133 return (GetAccessFlags() & kAccPublic) != 0; 134 } 135 136 // Returns true if the method is declared private. IsPrivate()137 bool IsPrivate() const { 138 return (GetAccessFlags() & kAccPrivate) != 0; 139 } 140 141 // Returns true if the method is declared static. IsStatic()142 bool IsStatic() const { 143 return (GetAccessFlags() & kAccStatic) != 0; 144 } 145 146 // Returns true if the method is a constructor according to access flags. IsConstructor()147 bool IsConstructor() const { 148 return (GetAccessFlags() & kAccConstructor) != 0; 149 } 150 151 // Returns true if the method is a class initializer according to access flags. IsClassInitializer()152 bool IsClassInitializer() const { 153 return IsConstructor() && IsStatic(); 154 } 155 156 // Returns true if the method is static, private, or a constructor. IsDirect()157 bool IsDirect() const { 158 return IsDirect(GetAccessFlags()); 159 } 160 IsDirect(uint32_t access_flags)161 static bool IsDirect(uint32_t access_flags) { 162 constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor; 163 return (access_flags & direct) != 0; 164 } 165 166 // Returns true if the method is declared synchronized. IsSynchronized()167 bool IsSynchronized() const { 168 constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized; 169 return (GetAccessFlags() & synchonized) != 0; 170 } 171 IsFinal()172 bool IsFinal() const { 173 return (GetAccessFlags() & kAccFinal) != 0; 174 } 175 IsIntrinsic()176 bool IsIntrinsic() const { 177 return (GetAccessFlags() & kAccIntrinsic) != 0; 178 } 179 180 ALWAYS_INLINE void SetIntrinsic(uint32_t intrinsic) REQUIRES_SHARED(Locks::mutator_lock_); 181 GetIntrinsic()182 uint32_t GetIntrinsic() const { 183 static const int kAccFlagsShift = CTZ(kAccIntrinsicBits); 184 static_assert(IsPowerOfTwo((kAccIntrinsicBits >> kAccFlagsShift) + 1), 185 "kAccIntrinsicBits are not continuous"); 186 static_assert((kAccIntrinsic & kAccIntrinsicBits) == 0, 187 "kAccIntrinsic overlaps kAccIntrinsicBits"); 188 DCHECK(IsIntrinsic()); 189 return (GetAccessFlags() & kAccIntrinsicBits) >> kAccFlagsShift; 190 } 191 192 void SetNotIntrinsic() REQUIRES_SHARED(Locks::mutator_lock_); 193 IsCopied()194 bool IsCopied() const { 195 // We do not have intrinsics for any default methods and therefore intrinsics are never copied. 196 // So we are using a flag from the intrinsic flags range and need to check `kAccIntrinsic` too. 197 static_assert((kAccCopied & kAccIntrinsicBits) != 0, 198 "kAccCopied deliberately overlaps intrinsic bits"); 199 const bool copied = (GetAccessFlags() & (kAccIntrinsic | kAccCopied)) == kAccCopied; 200 // (IsMiranda() || IsDefaultConflicting()) implies copied 201 DCHECK(!(IsMiranda() || IsDefaultConflicting()) || copied) 202 << "Miranda or default-conflict methods must always be copied."; 203 return copied; 204 } 205 IsMiranda()206 bool IsMiranda() const { 207 // Miranda methods are marked as copied and abstract but not default. 208 // We need to check the kAccIntrinsic too, see `IsCopied()`. 209 static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault; 210 static constexpr uint32_t kValue = kAccCopied | kAccAbstract; 211 return (GetAccessFlags() & kMask) == kValue; 212 } 213 214 // A default conflict method is a special sentinel method that stands for a conflict between 215 // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError 216 // if one attempts to do so. IsDefaultConflicting()217 bool IsDefaultConflicting() const { 218 // Default conflct methods are marked as copied, abstract and default. 219 // We need to check the kAccIntrinsic too, see `IsCopied()`. 220 static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault; 221 static constexpr uint32_t kValue = kAccCopied | kAccAbstract | kAccDefault; 222 return (GetAccessFlags() & kMask) == kValue; 223 } 224 225 // Returns true if invoking this method will not throw an AbstractMethodError or 226 // IncompatibleClassChangeError. IsInvokable()227 bool IsInvokable() const { 228 // Default conflicting methods are marked with `kAccAbstract` (as well as `kAccCopied` 229 // and `kAccDefault`) but they are not considered abstract, see `IsAbstract()`. 230 DCHECK_EQ((GetAccessFlags() & kAccAbstract) == 0, !IsDefaultConflicting() && !IsAbstract()); 231 return (GetAccessFlags() & kAccAbstract) == 0; 232 } 233 IsPreCompiled()234 bool IsPreCompiled() const { 235 // kAccCompileDontBother and kAccPreCompiled overlap with kAccIntrinsicBits. 236 // Intrinsics should be compiled in primary boot image, not pre-compiled by JIT. 237 static_assert((kAccCompileDontBother & kAccIntrinsicBits) != 0); 238 static_assert((kAccPreCompiled & kAccIntrinsicBits) != 0); 239 static constexpr uint32_t kMask = kAccIntrinsic | kAccCompileDontBother | kAccPreCompiled; 240 static constexpr uint32_t kValue = kAccCompileDontBother | kAccPreCompiled; 241 return (GetAccessFlags() & kMask) == kValue; 242 } 243 SetPreCompiled()244 void SetPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) { 245 DCHECK(IsInvokable()); 246 DCHECK(IsCompilable()); 247 AddAccessFlags(kAccPreCompiled | kAccCompileDontBother); 248 } 249 ClearPreCompiled()250 void ClearPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) { 251 ClearAccessFlags(kAccPreCompiled | kAccCompileDontBother); 252 } 253 IsCompilable()254 bool IsCompilable() const { 255 if (IsIntrinsic()) { 256 // kAccCompileDontBother overlaps with kAccIntrinsicBits. 257 return true; 258 } 259 if (IsPreCompiled()) { 260 return true; 261 } 262 return (GetAccessFlags() & kAccCompileDontBother) == 0; 263 } 264 ClearDontCompile()265 void ClearDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) { 266 DCHECK(!IsMiranda()); 267 ClearAccessFlags(kAccCompileDontBother); 268 } 269 SetDontCompile()270 void SetDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) { 271 DCHECK(!IsMiranda()); 272 AddAccessFlags(kAccCompileDontBother); 273 } 274 275 // This is set by the class linker. IsDefault()276 bool IsDefault() const { 277 static_assert((kAccDefault & (kAccIntrinsic | kAccIntrinsicBits)) == 0, 278 "kAccDefault conflicts with intrinsic modifier"); 279 return (GetAccessFlags() & kAccDefault) != 0; 280 } 281 IsObsolete()282 bool IsObsolete() const { 283 return (GetAccessFlags() & kAccObsoleteMethod) != 0; 284 } 285 SetIsObsolete()286 void SetIsObsolete() REQUIRES_SHARED(Locks::mutator_lock_) { 287 AddAccessFlags(kAccObsoleteMethod); 288 } 289 IsNative()290 bool IsNative() const { 291 return (GetAccessFlags() & kAccNative) != 0; 292 } 293 294 // Checks to see if the method was annotated with @dalvik.annotation.optimization.FastNative. IsFastNative()295 bool IsFastNative() const { 296 // The presence of the annotation is checked by ClassLinker and recorded in access flags. 297 // The kAccFastNative flag value is used with a different meaning for non-native methods, 298 // so we need to check the kAccNative flag as well. 299 constexpr uint32_t mask = kAccFastNative | kAccNative; 300 return (GetAccessFlags() & mask) == mask; 301 } 302 303 // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative. IsCriticalNative()304 bool IsCriticalNative() const { 305 // The presence of the annotation is checked by ClassLinker and recorded in access flags. 306 // The kAccCriticalNative flag value is used with a different meaning for non-native methods, 307 // so we need to check the kAccNative flag as well. 308 constexpr uint32_t mask = kAccCriticalNative | kAccNative; 309 return (GetAccessFlags() & mask) == mask; 310 } 311 IsAbstract()312 bool IsAbstract() const { 313 // Default confliciting methods have `kAccAbstract` set but they are not actually abstract. 314 return (GetAccessFlags() & kAccAbstract) != 0 && !IsDefaultConflicting(); 315 } 316 IsSynthetic()317 bool IsSynthetic() const { 318 return (GetAccessFlags() & kAccSynthetic) != 0; 319 } 320 IsVarargs()321 bool IsVarargs() const { 322 return (GetAccessFlags() & kAccVarargs) != 0; 323 } 324 325 bool IsProxyMethod() REQUIRES_SHARED(Locks::mutator_lock_); 326 327 bool IsSignaturePolymorphic() REQUIRES_SHARED(Locks::mutator_lock_); 328 UseFastInterpreterToInterpreterInvoke()329 bool UseFastInterpreterToInterpreterInvoke() const { 330 // The bit is applicable only if the method is not intrinsic. 331 constexpr uint32_t mask = kAccFastInterpreterToInterpreterInvoke | kAccIntrinsic; 332 return (GetAccessFlags() & mask) == kAccFastInterpreterToInterpreterInvoke; 333 } 334 SetFastInterpreterToInterpreterInvokeFlag()335 void SetFastInterpreterToInterpreterInvokeFlag() REQUIRES_SHARED(Locks::mutator_lock_) { 336 DCHECK(!IsIntrinsic()); 337 AddAccessFlags(kAccFastInterpreterToInterpreterInvoke); 338 } 339 ClearFastInterpreterToInterpreterInvokeFlag()340 void ClearFastInterpreterToInterpreterInvokeFlag() REQUIRES_SHARED(Locks::mutator_lock_) { 341 if (!IsIntrinsic()) { 342 ClearAccessFlags(kAccFastInterpreterToInterpreterInvoke); 343 } 344 } 345 SkipAccessChecks()346 bool SkipAccessChecks() const { 347 // The kAccSkipAccessChecks flag value is used with a different meaning for native methods, 348 // so we need to check the kAccNative flag as well. 349 return (GetAccessFlags() & (kAccSkipAccessChecks | kAccNative)) == kAccSkipAccessChecks; 350 } 351 SetSkipAccessChecks()352 void SetSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) { 353 // SkipAccessChecks() is applicable only to non-native methods. 354 DCHECK(!IsNative()); 355 AddAccessFlags(kAccSkipAccessChecks); 356 } ClearSkipAccessChecks()357 void ClearSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) { 358 // SkipAccessChecks() is applicable only to non-native methods. 359 DCHECK(!IsNative()); 360 ClearAccessFlags(kAccSkipAccessChecks); 361 } 362 PreviouslyWarm()363 bool PreviouslyWarm() const { 364 // kAccPreviouslyWarm overlaps with kAccIntrinsicBits. Return true for intrinsics. 365 constexpr uint32_t mask = kAccPreviouslyWarm | kAccIntrinsic; 366 return (GetAccessFlags() & mask) != 0u; 367 } 368 SetPreviouslyWarm()369 void SetPreviouslyWarm() REQUIRES_SHARED(Locks::mutator_lock_) { 370 if (IsIntrinsic()) { 371 // kAccPreviouslyWarm overlaps with kAccIntrinsicBits. 372 return; 373 } 374 AddAccessFlags(kAccPreviouslyWarm); 375 } 376 377 // Should this method be run in the interpreter and count locks (e.g., failed structured- 378 // locking verification)? MustCountLocks()379 bool MustCountLocks() const { 380 if (IsIntrinsic()) { 381 return false; 382 } 383 return (GetAccessFlags() & kAccMustCountLocks) != 0; 384 } 385 ClearMustCountLocks()386 void ClearMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) { 387 ClearAccessFlags(kAccMustCountLocks); 388 } 389 SetMustCountLocks()390 void SetMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) { 391 AddAccessFlags(kAccMustCountLocks); 392 ClearAccessFlags(kAccSkipAccessChecks); 393 } 394 HasNterpEntryPointFastPathFlag()395 bool HasNterpEntryPointFastPathFlag() const { 396 constexpr uint32_t mask = kAccNative | kAccNterpEntryPointFastPathFlag; 397 return (GetAccessFlags() & mask) == kAccNterpEntryPointFastPathFlag; 398 } 399 SetNterpEntryPointFastPathFlag()400 void SetNterpEntryPointFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) { 401 DCHECK(!IsNative()); 402 AddAccessFlags(kAccNterpEntryPointFastPathFlag); 403 } 404 SetNterpInvokeFastPathFlag()405 void SetNterpInvokeFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) { 406 AddAccessFlags(kAccNterpInvokeFastPathFlag); 407 } 408 409 // Returns true if this method could be overridden by a default method. 410 bool IsOverridableByDefaultMethod() REQUIRES_SHARED(Locks::mutator_lock_); 411 412 bool CheckIncompatibleClassChange(InvokeType type) REQUIRES_SHARED(Locks::mutator_lock_); 413 414 // Throws the error that would result from trying to invoke this method (i.e. 415 // IncompatibleClassChangeError or AbstractMethodError). Only call if !IsInvokable(); 416 void ThrowInvocationTimeError() REQUIRES_SHARED(Locks::mutator_lock_); 417 418 uint16_t GetMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_); 419 420 // Doesn't do erroneous / unresolved class checks. 421 uint16_t GetMethodIndexDuringLinking() REQUIRES_SHARED(Locks::mutator_lock_); 422 GetVtableIndex()423 size_t GetVtableIndex() REQUIRES_SHARED(Locks::mutator_lock_) { 424 return GetMethodIndex(); 425 } 426 SetMethodIndex(uint16_t new_method_index)427 void SetMethodIndex(uint16_t new_method_index) REQUIRES_SHARED(Locks::mutator_lock_) { 428 // Not called within a transaction. 429 method_index_ = new_method_index; 430 } 431 DexMethodIndexOffset()432 static constexpr MemberOffset DexMethodIndexOffset() { 433 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, dex_method_index_)); 434 } 435 MethodIndexOffset()436 static constexpr MemberOffset MethodIndexOffset() { 437 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, method_index_)); 438 } 439 ImtIndexOffset()440 static constexpr MemberOffset ImtIndexOffset() { 441 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, imt_index_)); 442 } 443 444 // Number of 32bit registers that would be required to hold all the arguments 445 static size_t NumArgRegisters(const char* shorty); 446 GetDexMethodIndex()447 ALWAYS_INLINE uint32_t GetDexMethodIndex() const { 448 return dex_method_index_; 449 } 450 SetDexMethodIndex(uint32_t new_idx)451 void SetDexMethodIndex(uint32_t new_idx) REQUIRES_SHARED(Locks::mutator_lock_) { 452 // Not called within a transaction. 453 dex_method_index_ = new_idx; 454 } 455 456 // Lookup the Class from the type index into this method's dex cache. 457 ObjPtr<mirror::Class> LookupResolvedClassFromTypeIndex(dex::TypeIndex type_idx) 458 REQUIRES_SHARED(Locks::mutator_lock_); 459 // Resolve the Class from the type index into this method's dex cache. 460 ObjPtr<mirror::Class> ResolveClassFromTypeIndex(dex::TypeIndex type_idx) 461 REQUIRES_SHARED(Locks::mutator_lock_); 462 463 // Returns true if this method has the same name and signature of the other method. 464 bool HasSameNameAndSignature(ArtMethod* other) REQUIRES_SHARED(Locks::mutator_lock_); 465 466 // Find the method that this method overrides. 467 ArtMethod* FindOverriddenMethod(PointerSize pointer_size) 468 REQUIRES_SHARED(Locks::mutator_lock_); 469 470 // Find the method index for this method within other_dexfile. If this method isn't present then 471 // return dex::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same 472 // name and signature in the other_dexfile, such as the method index used to resolve this method 473 // in the other_dexfile. 474 uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile, 475 uint32_t name_and_signature_idx) 476 REQUIRES_SHARED(Locks::mutator_lock_); 477 478 void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty) 479 REQUIRES_SHARED(Locks::mutator_lock_); 480 GetEntryPointFromQuickCompiledCode()481 const void* GetEntryPointFromQuickCompiledCode() const { 482 return GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize); 483 } 484 ALWAYS_INLINE GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size)485 const void* GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size) const { 486 return GetNativePointer<const void*>( 487 EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size); 488 } 489 SetEntryPointFromQuickCompiledCode(const void * entry_point_from_quick_compiled_code)490 void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code) 491 REQUIRES_SHARED(Locks::mutator_lock_) { 492 SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code, 493 kRuntimePointerSize); 494 } SetEntryPointFromQuickCompiledCodePtrSize(const void * entry_point_from_quick_compiled_code,PointerSize pointer_size)495 ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize( 496 const void* entry_point_from_quick_compiled_code, PointerSize pointer_size) 497 REQUIRES_SHARED(Locks::mutator_lock_) { 498 SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size), 499 entry_point_from_quick_compiled_code, 500 pointer_size); 501 // We might want to invoke compiled code, so don't use the fast path. 502 ClearFastInterpreterToInterpreterInvokeFlag(); 503 } 504 DataOffset(PointerSize pointer_size)505 static constexpr MemberOffset DataOffset(PointerSize pointer_size) { 506 return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER( 507 PtrSizedFields, data_) / sizeof(void*) * static_cast<size_t>(pointer_size)); 508 } 509 EntryPointFromJniOffset(PointerSize pointer_size)510 static constexpr MemberOffset EntryPointFromJniOffset(PointerSize pointer_size) { 511 return DataOffset(pointer_size); 512 } 513 EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size)514 static constexpr MemberOffset EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size) { 515 return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER( 516 PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*) 517 * static_cast<size_t>(pointer_size)); 518 } 519 GetImtConflictTable(PointerSize pointer_size)520 ImtConflictTable* GetImtConflictTable(PointerSize pointer_size) const { 521 DCHECK(IsRuntimeMethod()); 522 return reinterpret_cast<ImtConflictTable*>(GetDataPtrSize(pointer_size)); 523 } 524 SetImtConflictTable(ImtConflictTable * table,PointerSize pointer_size)525 ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size) 526 REQUIRES_SHARED(Locks::mutator_lock_) { 527 DCHECK(IsRuntimeMethod()); 528 SetDataPtrSize(table, pointer_size); 529 } 530 531 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 532 ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_); 533 SetHasSingleImplementation(bool single_impl)534 ALWAYS_INLINE void SetHasSingleImplementation(bool single_impl) 535 REQUIRES_SHARED(Locks::mutator_lock_) { 536 DCHECK(!IsIntrinsic()) << "conflict with intrinsic bits"; 537 if (single_impl) { 538 AddAccessFlags(kAccSingleImplementation); 539 } else { 540 ClearAccessFlags(kAccSingleImplementation); 541 } 542 } 543 HasSingleImplementationFlag()544 ALWAYS_INLINE bool HasSingleImplementationFlag() const { 545 return (GetAccessFlags() & kAccSingleImplementation) != 0; 546 } 547 548 // Takes a method and returns a 'canonical' one if the method is default (and therefore 549 // potentially copied from some other class). For example, this ensures that the debugger does not 550 // get confused as to which method we are in. 551 ArtMethod* GetCanonicalMethod(PointerSize pointer_size = kRuntimePointerSize) 552 REQUIRES_SHARED(Locks::mutator_lock_); 553 554 ArtMethod* GetSingleImplementation(PointerSize pointer_size); 555 SetSingleImplementation(ArtMethod * method,PointerSize pointer_size)556 ALWAYS_INLINE void SetSingleImplementation(ArtMethod* method, PointerSize pointer_size) 557 REQUIRES_SHARED(Locks::mutator_lock_) { 558 DCHECK(!IsNative()); 559 // Non-abstract method's single implementation is just itself. 560 DCHECK(IsAbstract()); 561 SetDataPtrSize(method, pointer_size); 562 } 563 GetEntryPointFromJni()564 void* GetEntryPointFromJni() const { 565 DCHECK(IsNative()); 566 return GetEntryPointFromJniPtrSize(kRuntimePointerSize); 567 } 568 GetEntryPointFromJniPtrSize(PointerSize pointer_size)569 ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(PointerSize pointer_size) const { 570 return GetDataPtrSize(pointer_size); 571 } 572 SetEntryPointFromJni(const void * entrypoint)573 void SetEntryPointFromJni(const void* entrypoint) 574 REQUIRES_SHARED(Locks::mutator_lock_) { 575 // The resolution method also has a JNI entrypoint for direct calls from 576 // compiled code to the JNI dlsym lookup stub for @CriticalNative. 577 DCHECK(IsNative() || IsRuntimeMethod()); 578 SetEntryPointFromJniPtrSize(entrypoint, kRuntimePointerSize); 579 } 580 SetEntryPointFromJniPtrSize(const void * entrypoint,PointerSize pointer_size)581 ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, PointerSize pointer_size) 582 REQUIRES_SHARED(Locks::mutator_lock_) { 583 SetDataPtrSize(entrypoint, pointer_size); 584 } 585 GetDataPtrSize(PointerSize pointer_size)586 ALWAYS_INLINE void* GetDataPtrSize(PointerSize pointer_size) const { 587 DCHECK(IsImagePointerSize(pointer_size)); 588 return GetNativePointer<void*>(DataOffset(pointer_size), pointer_size); 589 } 590 SetDataPtrSize(const void * data,PointerSize pointer_size)591 ALWAYS_INLINE void SetDataPtrSize(const void* data, PointerSize pointer_size) 592 REQUIRES_SHARED(Locks::mutator_lock_) { 593 DCHECK(IsImagePointerSize(pointer_size)); 594 SetNativePointer(DataOffset(pointer_size), data, pointer_size); 595 } 596 597 // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal 598 // conventions for a method of managed code. Returns false for Proxy methods. IsRuntimeMethod()599 ALWAYS_INLINE bool IsRuntimeMethod() const { 600 return dex_method_index_ == kRuntimeMethodDexMethodIndex; 601 } 602 HasCodeItem()603 bool HasCodeItem() REQUIRES_SHARED(Locks::mutator_lock_) { 604 return !IsRuntimeMethod() && !IsNative() && !IsProxyMethod() && !IsAbstract(); 605 } 606 607 void SetCodeItem(const dex::CodeItem* code_item) REQUIRES_SHARED(Locks::mutator_lock_); 608 609 // Is this a hand crafted method used for something like describing callee saves? 610 bool IsCalleeSaveMethod() REQUIRES_SHARED(Locks::mutator_lock_); 611 612 bool IsResolutionMethod() REQUIRES_SHARED(Locks::mutator_lock_); 613 614 bool IsImtUnimplementedMethod() REQUIRES_SHARED(Locks::mutator_lock_); 615 616 // Find the catch block for the given exception type and dex_pc. When a catch block is found, 617 // indicates whether the found catch block is responsible for clearing the exception or whether 618 // a move-exception instruction is present. 619 uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc, 620 bool* has_no_move_exception) 621 REQUIRES_SHARED(Locks::mutator_lock_); 622 623 // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires. 624 template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename RootVisitorType> 625 void VisitRoots(RootVisitorType& visitor, PointerSize pointer_size) NO_THREAD_SAFETY_ANALYSIS; 626 627 const DexFile* GetDexFile() REQUIRES_SHARED(Locks::mutator_lock_); 628 629 const char* GetDeclaringClassDescriptor() REQUIRES_SHARED(Locks::mutator_lock_); 630 631 ALWAYS_INLINE const char* GetShorty() REQUIRES_SHARED(Locks::mutator_lock_); 632 633 const char* GetShorty(uint32_t* out_length) REQUIRES_SHARED(Locks::mutator_lock_); 634 635 const Signature GetSignature() REQUIRES_SHARED(Locks::mutator_lock_); 636 637 ALWAYS_INLINE const char* GetName() REQUIRES_SHARED(Locks::mutator_lock_); 638 639 ALWAYS_INLINE std::string_view GetNameView() REQUIRES_SHARED(Locks::mutator_lock_); 640 641 ObjPtr<mirror::String> ResolveNameString() REQUIRES_SHARED(Locks::mutator_lock_); 642 643 const dex::CodeItem* GetCodeItem() REQUIRES_SHARED(Locks::mutator_lock_); 644 645 bool IsResolvedTypeIdx(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); 646 647 int32_t GetLineNumFromDexPC(uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_); 648 649 const dex::ProtoId& GetPrototype() REQUIRES_SHARED(Locks::mutator_lock_); 650 651 const dex::TypeList* GetParameterTypeList() REQUIRES_SHARED(Locks::mutator_lock_); 652 653 const char* GetDeclaringClassSourceFile() REQUIRES_SHARED(Locks::mutator_lock_); 654 655 uint16_t GetClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_); 656 657 const dex::ClassDef& GetClassDef() REQUIRES_SHARED(Locks::mutator_lock_); 658 659 ALWAYS_INLINE size_t GetNumberOfParameters() REQUIRES_SHARED(Locks::mutator_lock_); 660 661 const char* GetReturnTypeDescriptor() REQUIRES_SHARED(Locks::mutator_lock_); 662 663 ALWAYS_INLINE Primitive::Type GetReturnTypePrimitive() REQUIRES_SHARED(Locks::mutator_lock_); 664 665 const char* GetTypeDescriptorFromTypeIdx(dex::TypeIndex type_idx) 666 REQUIRES_SHARED(Locks::mutator_lock_); 667 668 // Lookup return type. 669 ObjPtr<mirror::Class> LookupResolvedReturnType() REQUIRES_SHARED(Locks::mutator_lock_); 670 // Resolve return type. May cause thread suspension due to GetClassFromTypeIdx 671 // calling ResolveType this caused a large number of bugs at call sites. 672 ObjPtr<mirror::Class> ResolveReturnType() REQUIRES_SHARED(Locks::mutator_lock_); 673 674 ObjPtr<mirror::ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_); 675 676 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 677 ObjPtr<mirror::DexCache> GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_); 678 ObjPtr<mirror::DexCache> GetObsoleteDexCache() REQUIRES_SHARED(Locks::mutator_lock_); 679 680 ALWAYS_INLINE ArtMethod* GetInterfaceMethodForProxyUnchecked(PointerSize pointer_size) 681 REQUIRES_SHARED(Locks::mutator_lock_); 682 ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(PointerSize pointer_size) 683 REQUIRES_SHARED(Locks::mutator_lock_); 684 685 ArtMethod* GetNonObsoleteMethod() REQUIRES_SHARED(Locks::mutator_lock_); 686 687 // May cause thread suspension due to class resolution. 688 bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params) 689 REQUIRES_SHARED(Locks::mutator_lock_); 690 691 // Size of an instance of this native class. Size(PointerSize pointer_size)692 static size_t Size(PointerSize pointer_size) { 693 return PtrSizedFieldsOffset(pointer_size) + 694 (sizeof(PtrSizedFields) / sizeof(void*)) * static_cast<size_t>(pointer_size); 695 } 696 697 // Alignment of an instance of this native class. Alignment(PointerSize pointer_size)698 static size_t Alignment(PointerSize pointer_size) { 699 // The ArtMethod alignment is the same as image pointer size. This differs from 700 // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*). 701 return static_cast<size_t>(pointer_size); 702 } 703 704 void CopyFrom(ArtMethod* src, PointerSize image_pointer_size) 705 REQUIRES_SHARED(Locks::mutator_lock_); 706 707 ALWAYS_INLINE void SetCounter(uint16_t hotness_count); 708 709 ALWAYS_INLINE uint16_t GetCounter(); 710 MaxCounter()711 ALWAYS_INLINE static constexpr uint16_t MaxCounter() { 712 return std::numeric_limits<decltype(hotness_count_)>::max(); 713 } 714 715 ALWAYS_INLINE uint32_t GetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_); 716 717 void CalculateAndSetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_); 718 HotnessCountOffset()719 static constexpr MemberOffset HotnessCountOffset() { 720 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, hotness_count_)); 721 } 722 723 // Returns the method header for the compiled code containing 'pc'. Note that runtime 724 // methods will return null for this method, as they are not oat based. 725 const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc) 726 REQUIRES_SHARED(Locks::mutator_lock_); 727 728 // Get compiled code for the method, return null if no code exists. 729 const void* GetOatMethodQuickCode(PointerSize pointer_size) 730 REQUIRES_SHARED(Locks::mutator_lock_); 731 732 // Returns whether the method has any compiled code, JIT or AOT. 733 bool HasAnyCompiledCode() REQUIRES_SHARED(Locks::mutator_lock_); 734 735 // Returns a human-readable signature for 'm'. Something like "a.b.C.m" or 736 // "a.b.C.m(II)V" (depending on the value of 'with_signature'). 737 static std::string PrettyMethod(ArtMethod* m, bool with_signature = true) 738 REQUIRES_SHARED(Locks::mutator_lock_); 739 std::string PrettyMethod(bool with_signature = true) 740 REQUIRES_SHARED(Locks::mutator_lock_); 741 // Returns the JNI native function name for the non-overloaded method 'm'. 742 std::string JniShortName() 743 REQUIRES_SHARED(Locks::mutator_lock_); 744 // Returns the JNI native function name for the overloaded method 'm'. 745 std::string JniLongName() 746 REQUIRES_SHARED(Locks::mutator_lock_); 747 748 // Update entry points by passing them through the visitor. 749 template <typename Visitor> 750 ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor, PointerSize pointer_size) 751 REQUIRES_SHARED(Locks::mutator_lock_); 752 753 // Visit the individual members of an ArtMethod. Used by imgdiag. 754 // As imgdiag does not support mixing instruction sets or pointer sizes (e.g., using imgdiag32 755 // to inspect 64-bit images, etc.), we can go beneath the accessors directly to the class members. 756 template <typename VisitorFunc> VisitMembers(VisitorFunc & visitor)757 void VisitMembers(VisitorFunc& visitor) REQUIRES_SHARED(Locks::mutator_lock_) { 758 DCHECK(IsImagePointerSize(kRuntimePointerSize)); 759 visitor(this, &declaring_class_, "declaring_class_"); 760 visitor(this, &access_flags_, "access_flags_"); 761 visitor(this, &dex_method_index_, "dex_method_index_"); 762 visitor(this, &method_index_, "method_index_"); 763 visitor(this, &hotness_count_, "hotness_count_"); 764 visitor(this, &ptr_sized_fields_.data_, "ptr_sized_fields_.data_"); 765 visitor(this, 766 &ptr_sized_fields_.entry_point_from_quick_compiled_code_, 767 "ptr_sized_fields_.entry_point_from_quick_compiled_code_"); 768 } 769 770 // Returns the dex instructions of the code item for the art method. Returns an empty array for 771 // the null code item case. 772 ALWAYS_INLINE CodeItemInstructionAccessor DexInstructions() 773 REQUIRES_SHARED(Locks::mutator_lock_); 774 775 // Returns the dex code item data section of the DexFile for the art method. 776 ALWAYS_INLINE CodeItemDataAccessor DexInstructionData() 777 REQUIRES_SHARED(Locks::mutator_lock_); 778 779 // Returns the dex code item debug info section of the DexFile for the art method. 780 ALWAYS_INLINE CodeItemDebugInfoAccessor DexInstructionDebugInfo() 781 REQUIRES_SHARED(Locks::mutator_lock_); 782 DeclaringClassRoot()783 GcRoot<mirror::Class>& DeclaringClassRoot() { 784 return declaring_class_; 785 } 786 787 protected: 788 // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses". 789 // The class we are a part of. 790 GcRoot<mirror::Class> declaring_class_; 791 792 // Access flags; low 16 bits are defined by spec. 793 // Getting and setting this flag needs to be atomic when concurrency is 794 // possible, e.g. after this method's class is linked. Such as when setting 795 // verifier flags and single-implementation flag. 796 std::atomic<std::uint32_t> access_flags_; 797 798 /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */ 799 800 // Index into method_ids of the dex file associated with this method. 801 uint32_t dex_method_index_; 802 803 /* End of dex file fields. */ 804 805 // Entry within a dispatch table for this method. For static/direct methods the index is into 806 // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the 807 // ifTable. 808 uint16_t method_index_; 809 810 union { 811 // Non-abstract methods: The hotness we measure for this method. Not atomic, 812 // as we allow missing increments: if the method is hot, we will see it eventually. 813 uint16_t hotness_count_; 814 // Abstract methods: IMT index. 815 uint16_t imt_index_; 816 }; 817 818 // Fake padding field gets inserted here. 819 820 // Must be the last fields in the method. 821 struct PtrSizedFields { 822 // Depending on the method type, the data is 823 // - native method: pointer to the JNI function registered to this method 824 // or a function to resolve the JNI function, 825 // - resolution method: pointer to a function to resolve the method and 826 // the JNI function for @CriticalNative. 827 // - conflict method: ImtConflictTable, 828 // - abstract/interface method: the single-implementation if any, 829 // - proxy method: the original interface method or constructor, 830 // - other methods: during AOT the code item offset, at runtime a pointer 831 // to the code item. 832 void* data_; 833 834 // Method dispatch from quick compiled code invokes this pointer which may cause bridging into 835 // the interpreter. 836 void* entry_point_from_quick_compiled_code_; 837 } ptr_sized_fields_; 838 839 private: 840 uint16_t FindObsoleteDexClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_); 841 PtrSizedFieldsOffset(PointerSize pointer_size)842 static constexpr size_t PtrSizedFieldsOffset(PointerSize pointer_size) { 843 // Round up to pointer size for padding field. Tested in art_method.cc. 844 return RoundUp(offsetof(ArtMethod, hotness_count_) + sizeof(hotness_count_), 845 static_cast<size_t>(pointer_size)); 846 } 847 848 // Compare given pointer size to the image pointer size. 849 static bool IsImagePointerSize(PointerSize pointer_size); 850 851 dex::TypeIndex GetReturnTypeIndex() REQUIRES_SHARED(Locks::mutator_lock_); 852 853 template<typename T> GetNativePointer(MemberOffset offset,PointerSize pointer_size)854 ALWAYS_INLINE T GetNativePointer(MemberOffset offset, PointerSize pointer_size) const { 855 static_assert(std::is_pointer<T>::value, "T must be a pointer type"); 856 const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value(); 857 if (pointer_size == PointerSize::k32) { 858 return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr)); 859 } else { 860 auto v = *reinterpret_cast<const uint64_t*>(addr); 861 return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v)); 862 } 863 } 864 865 template<typename T> SetNativePointer(MemberOffset offset,T new_value,PointerSize pointer_size)866 ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size) 867 REQUIRES_SHARED(Locks::mutator_lock_) { 868 static_assert(std::is_pointer<T>::value, "T must be a pointer type"); 869 const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value(); 870 if (pointer_size == PointerSize::k32) { 871 uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value); 872 *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr); 873 } else { 874 *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value); 875 } 876 } 877 IsValidIntrinsicUpdate(uint32_t modifier)878 static inline bool IsValidIntrinsicUpdate(uint32_t modifier) { 879 return (((modifier & kAccIntrinsic) == kAccIntrinsic) && 880 (((modifier & ~(kAccIntrinsic | kAccIntrinsicBits)) == 0))); 881 } 882 OverlapsIntrinsicBits(uint32_t modifier)883 static inline bool OverlapsIntrinsicBits(uint32_t modifier) { 884 return (modifier & kAccIntrinsicBits) != 0; 885 } 886 887 // This setter guarantees atomicity. AddAccessFlags(uint32_t flag)888 void AddAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) { 889 DCHECK(!IsIntrinsic() || !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag)); 890 // None of the readers rely ordering. 891 access_flags_.fetch_or(flag, std::memory_order_relaxed); 892 } 893 894 // This setter guarantees atomicity. ClearAccessFlags(uint32_t flag)895 void ClearAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) { 896 DCHECK(!IsIntrinsic() || !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag)); 897 access_flags_.fetch_and(~flag, std::memory_order_relaxed); 898 } 899 900 // Used by GetName and GetNameView to share common code. 901 const char* GetRuntimeMethodName() REQUIRES_SHARED(Locks::mutator_lock_); 902 903 DISALLOW_COPY_AND_ASSIGN(ArtMethod); // Need to use CopyFrom to deal with 32 vs 64 bits. 904 }; 905 906 class MethodCallback { 907 public: ~MethodCallback()908 virtual ~MethodCallback() {} 909 910 virtual void RegisterNativeMethod(ArtMethod* method, 911 const void* original_implementation, 912 /*out*/void** new_implementation) 913 REQUIRES_SHARED(Locks::mutator_lock_) = 0; 914 }; 915 916 } // namespace art 917 918 #endif // ART_RUNTIME_ART_METHOD_H_ 919