1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_MIRROR_ART_METHOD_H_ 18 #define ART_RUNTIME_MIRROR_ART_METHOD_H_ 19 20 #include "dex_file.h" 21 #include "gc_root.h" 22 #include "invoke_type.h" 23 #include "modifiers.h" 24 #include "object.h" 25 #include "object_callbacks.h" 26 #include "quick/quick_method_frame_info.h" 27 #include "read_barrier_option.h" 28 29 namespace art { 30 31 struct ArtMethodOffsets; 32 struct ConstructorMethodOffsets; 33 union JValue; 34 class MethodHelper; 35 class ScopedObjectAccessAlreadyRunnable; 36 class StringPiece; 37 class ShadowFrame; 38 39 namespace mirror { 40 41 typedef void (EntryPointFromInterpreter)(Thread* self, MethodHelper& mh, 42 const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame, JValue* result); 43 44 // C++ mirror of java.lang.reflect.ArtMethod. 45 class MANAGED ArtMethod FINAL : public Object { 46 public: 47 // Size of java.lang.reflect.ArtMethod.class. 48 static uint32_t ClassSize(); 49 50 // Size of an instance of java.lang.reflect.ArtMethod not including its value array. InstanceSize()51 static constexpr uint32_t InstanceSize() { 52 return sizeof(ArtMethod); 53 } 54 55 static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa, 56 jobject jlr_method) 57 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 58 59 Class* GetDeclaringClass() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 60 61 void SetDeclaringClass(Class *new_declaring_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 62 DeclaringClassOffset()63 static MemberOffset DeclaringClassOffset() { 64 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_)); 65 } 66 67 uint32_t GetAccessFlags() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 68 SetAccessFlags(uint32_t new_access_flags)69 void SetAccessFlags(uint32_t new_access_flags) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 70 // Not called within a transaction. 71 SetField32<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, access_flags_), new_access_flags); 72 } 73 74 // Approximate what kind of method call would be used for this method. 75 InvokeType GetInvokeType() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 76 77 // Returns true if the method is declared public. IsPublic()78 bool IsPublic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 79 return (GetAccessFlags() & kAccPublic) != 0; 80 } 81 82 // Returns true if the method is declared private. IsPrivate()83 bool IsPrivate() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 84 return (GetAccessFlags() & kAccPrivate) != 0; 85 } 86 87 // Returns true if the method is declared static. IsStatic()88 bool IsStatic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 89 return (GetAccessFlags() & kAccStatic) != 0; 90 } 91 92 // Returns true if the method is a constructor. IsConstructor()93 bool IsConstructor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 94 return (GetAccessFlags() & kAccConstructor) != 0; 95 } 96 97 // Returns true if the method is a class initializer. IsClassInitializer()98 bool IsClassInitializer() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 99 return IsConstructor() && IsStatic(); 100 } 101 102 // Returns true if the method is static, private, or a constructor. IsDirect()103 bool IsDirect() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 104 return IsDirect(GetAccessFlags()); 105 } 106 IsDirect(uint32_t access_flags)107 static bool IsDirect(uint32_t access_flags) { 108 return (access_flags & (kAccStatic | kAccPrivate | kAccConstructor)) != 0; 109 } 110 111 // Returns true if the method is declared synchronized. IsSynchronized()112 bool IsSynchronized() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 113 uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized; 114 return (GetAccessFlags() & synchonized) != 0; 115 } 116 IsFinal()117 bool IsFinal() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 118 return (GetAccessFlags() & kAccFinal) != 0; 119 } 120 IsMiranda()121 bool IsMiranda() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 122 return (GetAccessFlags() & kAccMiranda) != 0; 123 } 124 IsNative()125 bool IsNative() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 126 return (GetAccessFlags() & kAccNative) != 0; 127 } 128 IsFastNative()129 bool IsFastNative() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 130 uint32_t mask = kAccFastNative | kAccNative; 131 return (GetAccessFlags() & mask) == mask; 132 } 133 IsAbstract()134 bool IsAbstract() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 135 return (GetAccessFlags() & kAccAbstract) != 0; 136 } 137 IsSynthetic()138 bool IsSynthetic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 139 return (GetAccessFlags() & kAccSynthetic) != 0; 140 } 141 142 bool IsProxyMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 143 IsPreverified()144 bool IsPreverified() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 145 return (GetAccessFlags() & kAccPreverified) != 0; 146 } 147 SetPreverified()148 void SetPreverified() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 149 DCHECK(!IsPreverified()); 150 SetAccessFlags(GetAccessFlags() | kAccPreverified); 151 } 152 IsPortableCompiled()153 bool IsPortableCompiled() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 154 return kUsePortableCompiler && ((GetAccessFlags() & kAccPortableCompiled) != 0); 155 } 156 SetIsPortableCompiled()157 void SetIsPortableCompiled() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 158 DCHECK(!IsPortableCompiled()); 159 SetAccessFlags(GetAccessFlags() | kAccPortableCompiled); 160 } 161 ClearIsPortableCompiled()162 void ClearIsPortableCompiled() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 163 DCHECK(IsPortableCompiled()); 164 SetAccessFlags(GetAccessFlags() & ~kAccPortableCompiled); 165 } 166 167 bool CheckIncompatibleClassChange(InvokeType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 168 169 uint16_t GetMethodIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 170 GetVtableIndex()171 size_t GetVtableIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 172 return GetMethodIndex(); 173 } 174 SetMethodIndex(uint16_t new_method_index)175 void SetMethodIndex(uint16_t new_method_index) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 176 // Not called within a transaction. 177 SetField32<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_), new_method_index); 178 } 179 MethodIndexOffset()180 static MemberOffset MethodIndexOffset() { 181 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_); 182 } 183 GetCodeItemOffset()184 uint32_t GetCodeItemOffset() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 185 return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_code_item_offset_)); 186 } 187 SetCodeItemOffset(uint32_t new_code_off)188 void SetCodeItemOffset(uint32_t new_code_off) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 189 // Not called within a transaction. 190 SetField32<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_code_item_offset_), new_code_off); 191 } 192 193 // Number of 32bit registers that would be required to hold all the arguments 194 static size_t NumArgRegisters(const StringPiece& shorty); 195 196 uint32_t GetDexMethodIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 197 SetDexMethodIndex(uint32_t new_idx)198 void SetDexMethodIndex(uint32_t new_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 199 // Not called within a transaction. 200 SetField32<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_method_index_), new_idx); 201 } 202 203 ObjectArray<String>* GetDexCacheStrings() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 204 void SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings) 205 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 206 DexCacheStringsOffset()207 static MemberOffset DexCacheStringsOffset() { 208 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_strings_); 209 } 210 DexCacheResolvedMethodsOffset()211 static MemberOffset DexCacheResolvedMethodsOffset() { 212 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_methods_); 213 } 214 DexCacheResolvedTypesOffset()215 static MemberOffset DexCacheResolvedTypesOffset() { 216 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_types_); 217 } 218 219 ArtMethod* GetDexCacheResolvedMethod(uint16_t method_idx) 220 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 221 void SetDexCacheResolvedMethod(uint16_t method_idx, ArtMethod* new_method) 222 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 223 void SetDexCacheResolvedMethods(ObjectArray<ArtMethod>* new_dex_cache_methods) 224 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 225 bool HasDexCacheResolvedMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 226 bool HasSameDexCacheResolvedMethods(ArtMethod* other) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 227 bool HasSameDexCacheResolvedMethods(ObjectArray<ArtMethod>* other_cache) 228 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 229 230 template <bool kWithCheck = true> 231 Class* GetDexCacheResolvedType(uint32_t type_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 232 void SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_types) 233 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 234 bool HasDexCacheResolvedTypes() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 235 bool HasSameDexCacheResolvedTypes(ArtMethod* other) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 236 bool HasSameDexCacheResolvedTypes(ObjectArray<Class>* other_cache) 237 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 238 239 // Find the method that this method overrides 240 ArtMethod* FindOverriddenMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 241 242 void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty) 243 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 244 245 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> GetEntryPointFromInterpreter()246 EntryPointFromInterpreter* GetEntryPointFromInterpreter() 247 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 248 return GetFieldPtr<EntryPointFromInterpreter*, kVerifyFlags>( 249 OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_interpreter_)); 250 } 251 252 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> SetEntryPointFromInterpreter(EntryPointFromInterpreter * entry_point_from_interpreter)253 void SetEntryPointFromInterpreter(EntryPointFromInterpreter* entry_point_from_interpreter) 254 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 255 SetFieldPtr<false, true, kVerifyFlags>( 256 OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_interpreter_), 257 entry_point_from_interpreter); 258 } 259 260 #if defined(ART_USE_PORTABLE_COMPILER) EntryPointFromPortableCompiledCodeOffset()261 static MemberOffset EntryPointFromPortableCompiledCodeOffset() { 262 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, entry_point_from_portable_compiled_code_)); 263 } 264 265 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> GetEntryPointFromPortableCompiledCode()266 const void* GetEntryPointFromPortableCompiledCode() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 267 return GetFieldPtr<const void*, kVerifyFlags>( 268 EntryPointFromPortableCompiledCodeOffset()); 269 } 270 271 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> SetEntryPointFromPortableCompiledCode(const void * entry_point_from_portable_compiled_code)272 void SetEntryPointFromPortableCompiledCode(const void* entry_point_from_portable_compiled_code) 273 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 274 SetFieldPtr<false, true, kVerifyFlags>( 275 EntryPointFromPortableCompiledCodeOffset(), entry_point_from_portable_compiled_code); 276 } 277 #endif 278 EntryPointFromQuickCompiledCodeOffset()279 static MemberOffset EntryPointFromQuickCompiledCodeOffset() { 280 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, entry_point_from_quick_compiled_code_)); 281 } 282 283 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> GetEntryPointFromQuickCompiledCode()284 const void* GetEntryPointFromQuickCompiledCode() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 285 return GetFieldPtr<const void*, kVerifyFlags>(EntryPointFromQuickCompiledCodeOffset()); 286 } 287 288 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> SetEntryPointFromQuickCompiledCode(const void * entry_point_from_quick_compiled_code)289 void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code) 290 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 291 SetFieldPtr<false, true, kVerifyFlags>( 292 EntryPointFromQuickCompiledCodeOffset(), entry_point_from_quick_compiled_code); 293 } 294 295 uint32_t GetCodeSize() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 296 IsWithinQuickCode(uintptr_t pc)297 bool IsWithinQuickCode(uintptr_t pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 298 uintptr_t code = reinterpret_cast<uintptr_t>(GetEntryPointFromQuickCompiledCode()); 299 if (code == 0) { 300 return pc == 0; 301 } 302 /* 303 * During a stack walk, a return PC may point past-the-end of the code 304 * in the case that the last instruction is a call that isn't expected to 305 * return. Thus, we check <= code + GetCodeSize(). 306 * 307 * NOTE: For Thumb both pc and code are offset by 1 indicating the Thumb state. 308 */ 309 return code <= pc && pc <= code + GetCodeSize(); 310 } 311 312 void AssertPcIsWithinQuickCode(uintptr_t pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 313 314 #if defined(ART_USE_PORTABLE_COMPILER) 315 uint32_t GetPortableOatCodeOffset() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 316 void SetPortableOatCodeOffset(uint32_t code_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 317 #endif 318 uint32_t GetQuickOatCodeOffset() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 319 void SetQuickOatCodeOffset(uint32_t code_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 320 EntryPointToCodePointer(const void * entry_point)321 static const void* EntryPointToCodePointer(const void* entry_point) ALWAYS_INLINE { 322 uintptr_t code = reinterpret_cast<uintptr_t>(entry_point); 323 code &= ~0x1; // TODO: Make this Thumb2 specific. 324 return reinterpret_cast<const void*>(code); 325 } 326 327 // Actual entry point pointer to compiled oat code or nullptr. 328 const void* GetQuickOatEntryPoint() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 329 // Actual pointer to compiled oat code or nullptr. 330 const void* GetQuickOatCodePointer() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 331 332 // Callers should wrap the uint8_t* in a MappingTable instance for convenient access. 333 const uint8_t* GetMappingTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 334 const uint8_t* GetMappingTable(const void* code_pointer) 335 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 336 337 // Callers should wrap the uint8_t* in a VmapTable instance for convenient access. 338 const uint8_t* GetVmapTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 339 const uint8_t* GetVmapTable(const void* code_pointer) 340 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 341 GetNativeGcMap()342 const uint8_t* GetNativeGcMap() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 343 return GetFieldPtr<uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, gc_map_)); 344 } 345 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> SetNativeGcMap(const uint8_t * data)346 void SetNativeGcMap(const uint8_t* data) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 347 SetFieldPtr<false, true, kVerifyFlags>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, gc_map_), data); 348 } 349 350 // When building the oat need a convenient place to stuff the offset of the native GC map. 351 void SetOatNativeGcMapOffset(uint32_t gc_map_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 352 uint32_t GetOatNativeGcMapOffset() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 353 354 template <bool kCheckFrameSize = true> GetFrameSizeInBytes()355 uint32_t GetFrameSizeInBytes() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 356 uint32_t result = GetQuickFrameInfo().FrameSizeInBytes(); 357 if (kCheckFrameSize) { 358 DCHECK_LE(static_cast<size_t>(kStackAlignment), result); 359 } 360 return result; 361 } 362 363 QuickMethodFrameInfo GetQuickFrameInfo() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 364 QuickMethodFrameInfo GetQuickFrameInfo(const void* code_pointer) 365 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 366 GetReturnPcOffsetInBytes()367 size_t GetReturnPcOffsetInBytes() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 368 return GetReturnPcOffsetInBytes(GetFrameSizeInBytes()); 369 } 370 GetReturnPcOffsetInBytes(uint32_t frame_size_in_bytes)371 size_t GetReturnPcOffsetInBytes(uint32_t frame_size_in_bytes) 372 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 373 DCHECK_EQ(frame_size_in_bytes, GetFrameSizeInBytes()); 374 return frame_size_in_bytes - kPointerSize; 375 } 376 GetHandleScopeOffsetInBytes()377 size_t GetHandleScopeOffsetInBytes() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 378 return kPointerSize; 379 } 380 381 void RegisterNative(Thread* self, const void* native_method, bool is_fast) 382 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 383 384 void UnregisterNative(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 385 NativeMethodOffset()386 static MemberOffset NativeMethodOffset() { 387 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_jni_); 388 } 389 GetNativeMethod()390 const void* GetNativeMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 391 return GetFieldPtr<const void*>(NativeMethodOffset()); 392 } 393 394 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> 395 void SetNativeMethod(const void*) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 396 GetMethodIndexOffset()397 static MemberOffset GetMethodIndexOffset() { 398 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_); 399 } 400 401 // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal 402 // conventions for a method of managed code. Returns false for Proxy methods. 403 bool IsRuntimeMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 404 405 // Is this a hand crafted method used for something like describing callee saves? 406 bool IsCalleeSaveMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 407 408 bool IsResolutionMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 409 410 bool IsImtConflictMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 411 412 uintptr_t NativePcOffset(const uintptr_t pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 413 uintptr_t NativePcOffset(const uintptr_t pc, const void* quick_entry_point) 414 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 415 416 // Converts a native PC to a dex PC. 417 uint32_t ToDexPc(const uintptr_t pc, bool abort_on_failure = true) 418 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 419 420 // Converts a dex PC to a native PC. 421 uintptr_t ToNativePc(const uint32_t dex_pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 422 423 // Find the catch block for the given exception type and dex_pc. When a catch block is found, 424 // indicates whether the found catch block is responsible for clearing the exception or whether 425 // a move-exception instruction is present. 426 static uint32_t FindCatchBlock(Handle<ArtMethod> h_this, Handle<Class> exception_type, 427 uint32_t dex_pc, bool* has_no_move_exception) 428 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 429 430 static void SetClass(Class* java_lang_reflect_ArtMethod); 431 432 template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 433 static Class* GetJavaLangReflectArtMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 434 435 static void ResetClass(); 436 437 static void VisitRoots(RootCallback* callback, void* arg) 438 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 439 440 const DexFile* GetDexFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 441 442 const char* GetDeclaringClassDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 443 GetShorty()444 const char* GetShorty() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 445 uint32_t unused_length; 446 return GetShorty(&unused_length); 447 } 448 449 const char* GetShorty(uint32_t* out_length) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 450 451 const Signature GetSignature() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 452 453 ALWAYS_INLINE const char* GetName() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 454 455 const DexFile::CodeItem* GetCodeItem() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 456 457 bool IsResolvedTypeIdx(uint16_t type_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 458 459 int32_t GetLineNumFromDexPC(uint32_t dex_pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 460 461 const DexFile::ProtoId& GetPrototype() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 462 463 const DexFile::TypeList* GetParameterTypeList() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 464 465 const char* GetDeclaringClassSourceFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 466 467 uint16_t GetClassDefIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 468 469 const DexFile::ClassDef& GetClassDef() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 470 471 const char* GetReturnTypeDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 472 473 const char* GetTypeDescriptorFromTypeIdx(uint16_t type_idx) 474 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 475 476 mirror::ClassLoader* GetClassLoader() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 477 478 mirror::DexCache* GetDexCache() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 479 480 ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 481 482 protected: 483 // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses". 484 // The class we are a part of. 485 HeapReference<Class> declaring_class_; 486 487 // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access. 488 HeapReference<ObjectArray<ArtMethod>> dex_cache_resolved_methods_; 489 490 // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access. 491 HeapReference<ObjectArray<Class>> dex_cache_resolved_types_; 492 493 // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access. 494 HeapReference<ObjectArray<String>> dex_cache_strings_; 495 496 // Method dispatch from the interpreter invokes this pointer which may cause a bridge into 497 // compiled code. 498 uint64_t entry_point_from_interpreter_; 499 500 // Pointer to JNI function registered to this method, or a function to resolve the JNI function. 501 uint64_t entry_point_from_jni_; 502 503 // Method dispatch from portable compiled code invokes this pointer which may cause bridging into 504 // quick compiled code or the interpreter. 505 #if defined(ART_USE_PORTABLE_COMPILER) 506 uint64_t entry_point_from_portable_compiled_code_; 507 #endif 508 509 // Method dispatch from quick compiled code invokes this pointer which may cause bridging into 510 // portable compiled code or the interpreter. 511 uint64_t entry_point_from_quick_compiled_code_; 512 513 // Pointer to a data structure created by the compiler and used by the garbage collector to 514 // determine which registers hold live references to objects within the heap. Keyed by native PC 515 // offsets for the quick compiler and dex PCs for the portable. 516 uint64_t gc_map_; 517 518 // Access flags; low 16 bits are defined by spec. 519 uint32_t access_flags_; 520 521 /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */ 522 523 // Offset to the CodeItem. 524 uint32_t dex_code_item_offset_; 525 526 // Index into method_ids of the dex file associated with this method. 527 uint32_t dex_method_index_; 528 529 /* End of dex file fields. */ 530 531 // Entry within a dispatch table for this method. For static/direct methods the index is into 532 // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the 533 // ifTable. 534 uint32_t method_index_; 535 536 static GcRoot<Class> java_lang_reflect_ArtMethod_; 537 538 private: 539 ObjectArray<ArtMethod>* GetDexCacheResolvedMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 540 541 ObjectArray<Class>* GetDexCacheResolvedTypes() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 542 543 friend struct art::ArtMethodOffsets; // for verifying offset information 544 DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethod); 545 }; 546 547 } // namespace mirror 548 } // namespace art 549 550 #endif // ART_RUNTIME_MIRROR_ART_METHOD_H_ 551