1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_MIRROR_DEX_CACHE_H_ 18 #define ART_RUNTIME_MIRROR_DEX_CACHE_H_ 19 20 #include "array.h" 21 #include "base/array_ref.h" 22 #include "base/bit_utils.h" 23 #include "base/locks.h" 24 #include "dex/dex_file_types.h" 25 #include "gc_root.h" // Note: must not use -inl here to avoid circular dependency. 26 #include "object.h" 27 #include "object_array.h" 28 29 namespace art { 30 31 namespace linker { 32 class ImageWriter; 33 } // namespace linker 34 35 class ArtField; 36 class ArtMethod; 37 struct DexCacheOffsets; 38 class DexFile; 39 union JValue; 40 class LinearAlloc; 41 class ReflectiveValueVisitor; 42 class Thread; 43 44 namespace mirror { 45 46 class CallSite; 47 class Class; 48 class ClassLoader; 49 class MethodType; 50 class String; 51 52 template <typename T> struct PACKED(8) DexCachePair { 53 GcRoot<T> object; 54 uint32_t index; 55 // The array is initially [ {0,0}, {0,0}, {0,0} ... ] 56 // We maintain the invariant that once a dex cache entry is populated, 57 // the pointer is always non-0 58 // Any given entry would thus be: 59 // {non-0, non-0} OR {0,0} 60 // 61 // It's generally sufficiently enough then to check if the 62 // lookup index matches the stored index (for a >0 lookup index) 63 // because if it's true the pointer is also non-null. 64 // 65 // For the 0th entry which is a special case, the value is either 66 // {0,0} (initial state) or {non-0, 0} which indicates 67 // that a valid object is stored at that index for a dex section id of 0. 68 // 69 // As an optimization, we want to avoid branching on the object pointer since 70 // it's always non-null if the id branch succeeds (except for the 0th id). 71 // Set the initial state for the 0th entry to be {0,1} which is guaranteed to fail 72 // the lookup id == stored id branch. 73 DexCachePair(ObjPtr<T> object, uint32_t index); DexCachePairDexCachePair74 DexCachePair() : index(0) {} 75 DexCachePair(const DexCachePair<T>&) = default; 76 DexCachePair& operator=(const DexCachePair<T>&) = default; 77 78 static void Initialize(std::atomic<DexCachePair<T>>* dex_cache); 79 InvalidIndexForSlotDexCachePair80 static uint32_t InvalidIndexForSlot(uint32_t slot) { 81 // Since the cache size is a power of two, 0 will always map to slot 0. 82 // Use 1 for slot 0 and 0 for all other slots. 83 return (slot == 0) ? 1u : 0u; 84 } 85 86 T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_); 87 }; 88 89 template <typename T> struct PACKED(2 * __SIZEOF_POINTER__) NativeDexCachePair { 90 T* object; 91 size_t index; 92 // This is similar to DexCachePair except that we're storing a native pointer 93 // instead of a GC root. See DexCachePair for the details. NativeDexCachePairNativeDexCachePair94 NativeDexCachePair(T* object, uint32_t index) 95 : object(object), 96 index(index) {} NativeDexCachePairNativeDexCachePair97 NativeDexCachePair() : object(nullptr), index(0u) { } 98 NativeDexCachePair(const NativeDexCachePair<T>&) = default; 99 NativeDexCachePair& operator=(const NativeDexCachePair<T>&) = default; 100 101 static void Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache); 102 InvalidIndexForSlotNativeDexCachePair103 static uint32_t InvalidIndexForSlot(uint32_t slot) { 104 // Since the cache size is a power of two, 0 will always map to slot 0. 105 // Use 1 for slot 0 and 0 for all other slots. 106 return (slot == 0) ? 1u : 0u; 107 } 108 GetObjectForIndexNativeDexCachePair109 T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) { 110 if (idx != index) { 111 return nullptr; 112 } 113 DCHECK(object != nullptr); 114 return object; 115 } 116 }; 117 118 using TypeDexCachePair = DexCachePair<Class>; 119 using TypeDexCacheType = std::atomic<TypeDexCachePair>; 120 121 using StringDexCachePair = DexCachePair<String>; 122 using StringDexCacheType = std::atomic<StringDexCachePair>; 123 124 using FieldDexCachePair = NativeDexCachePair<ArtField>; 125 using FieldDexCacheType = std::atomic<FieldDexCachePair>; 126 127 using MethodDexCachePair = NativeDexCachePair<ArtMethod>; 128 using MethodDexCacheType = std::atomic<MethodDexCachePair>; 129 130 using MethodTypeDexCachePair = DexCachePair<MethodType>; 131 using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>; 132 133 // C++ mirror of java.lang.DexCache. 134 class MANAGED DexCache final : public Object { 135 public: 136 MIRROR_CLASS("Ljava/lang/DexCache;"); 137 138 // Size of java.lang.DexCache.class. 139 static uint32_t ClassSize(PointerSize pointer_size); 140 141 // Size of type dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. 142 static constexpr size_t kDexCacheTypeCacheSize = 1024; 143 static_assert(IsPowerOfTwo(kDexCacheTypeCacheSize), 144 "Type dex cache size is not a power of 2."); 145 146 // Size of string dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. 147 static constexpr size_t kDexCacheStringCacheSize = 1024; 148 static_assert(IsPowerOfTwo(kDexCacheStringCacheSize), 149 "String dex cache size is not a power of 2."); 150 151 // Size of field dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. 152 static constexpr size_t kDexCacheFieldCacheSize = 1024; 153 static_assert(IsPowerOfTwo(kDexCacheFieldCacheSize), 154 "Field dex cache size is not a power of 2."); 155 156 // Size of method dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. 157 static constexpr size_t kDexCacheMethodCacheSize = 1024; 158 static_assert(IsPowerOfTwo(kDexCacheMethodCacheSize), 159 "Method dex cache size is not a power of 2."); 160 161 // Size of method type dex cache. Needs to be a power of 2 for entrypoint assumptions 162 // to hold. 163 static constexpr size_t kDexCacheMethodTypeCacheSize = 1024; 164 static_assert(IsPowerOfTwo(kDexCacheMethodTypeCacheSize), 165 "MethodType dex cache size is not a power of 2."); 166 StaticTypeSize()167 static constexpr size_t StaticTypeSize() { 168 return kDexCacheTypeCacheSize; 169 } 170 StaticStringSize()171 static constexpr size_t StaticStringSize() { 172 return kDexCacheStringCacheSize; 173 } 174 StaticArtFieldSize()175 static constexpr size_t StaticArtFieldSize() { 176 return kDexCacheFieldCacheSize; 177 } 178 StaticMethodSize()179 static constexpr size_t StaticMethodSize() { 180 return kDexCacheMethodCacheSize; 181 } 182 StaticMethodTypeSize()183 static constexpr size_t StaticMethodTypeSize() { 184 return kDexCacheMethodTypeCacheSize; 185 } 186 187 // Size of an instance of java.lang.DexCache not including referenced values. InstanceSize()188 static constexpr uint32_t InstanceSize() { 189 return sizeof(DexCache); 190 } 191 192 void Initialize(const DexFile* dex_file, ObjPtr<ClassLoader> class_loader) 193 REQUIRES_SHARED(Locks::mutator_lock_) 194 REQUIRES(Locks::dex_lock_); 195 196 // Zero all array references. 197 // WARNING: This does not free the memory since it is in LinearAlloc. 198 void ResetNativeArrays() REQUIRES_SHARED(Locks::mutator_lock_); 199 200 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 201 ReadBarrierOption kReadBarrierOption = kWithReadBarrier> 202 ObjPtr<String> GetLocation() REQUIRES_SHARED(Locks::mutator_lock_); 203 StringsOffset()204 static constexpr MemberOffset StringsOffset() { 205 return OFFSET_OF_OBJECT_MEMBER(DexCache, strings_); 206 } 207 PreResolvedStringsOffset()208 static constexpr MemberOffset PreResolvedStringsOffset() { 209 return OFFSET_OF_OBJECT_MEMBER(DexCache, preresolved_strings_); 210 } 211 ResolvedTypesOffset()212 static constexpr MemberOffset ResolvedTypesOffset() { 213 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_types_); 214 } 215 ResolvedFieldsOffset()216 static constexpr MemberOffset ResolvedFieldsOffset() { 217 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_fields_); 218 } 219 ResolvedMethodsOffset()220 static constexpr MemberOffset ResolvedMethodsOffset() { 221 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_methods_); 222 } 223 ResolvedMethodTypesOffset()224 static constexpr MemberOffset ResolvedMethodTypesOffset() { 225 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_method_types_); 226 } 227 ResolvedCallSitesOffset()228 static constexpr MemberOffset ResolvedCallSitesOffset() { 229 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_call_sites_); 230 } 231 NumStringsOffset()232 static constexpr MemberOffset NumStringsOffset() { 233 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_strings_); 234 } 235 NumPreResolvedStringsOffset()236 static constexpr MemberOffset NumPreResolvedStringsOffset() { 237 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_preresolved_strings_); 238 } 239 NumResolvedTypesOffset()240 static constexpr MemberOffset NumResolvedTypesOffset() { 241 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_types_); 242 } 243 NumResolvedFieldsOffset()244 static constexpr MemberOffset NumResolvedFieldsOffset() { 245 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_fields_); 246 } 247 NumResolvedMethodsOffset()248 static constexpr MemberOffset NumResolvedMethodsOffset() { 249 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_methods_); 250 } 251 NumResolvedMethodTypesOffset()252 static constexpr MemberOffset NumResolvedMethodTypesOffset() { 253 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_method_types_); 254 } 255 NumResolvedCallSitesOffset()256 static constexpr MemberOffset NumResolvedCallSitesOffset() { 257 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_call_sites_); 258 } 259 PreResolvedStringsAlignment()260 static constexpr size_t PreResolvedStringsAlignment() { 261 return alignof(GcRoot<mirror::String>); 262 } 263 264 String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE 265 REQUIRES_SHARED(Locks::mutator_lock_); 266 267 void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE 268 REQUIRES_SHARED(Locks::mutator_lock_); 269 270 // Clear a string for a string_idx, used to undo string intern transactions to make sure 271 // the string isn't kept live. 272 void ClearString(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_); 273 274 Class* GetResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); 275 276 void SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) 277 REQUIRES_SHARED(Locks::mutator_lock_); 278 279 void ClearResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); 280 281 ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx) 282 REQUIRES_SHARED(Locks::mutator_lock_); 283 284 ALWAYS_INLINE void SetResolvedMethod(uint32_t method_idx, ArtMethod* resolved) 285 REQUIRES_SHARED(Locks::mutator_lock_); 286 287 ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx) 288 REQUIRES_SHARED(Locks::mutator_lock_); 289 290 ALWAYS_INLINE void SetResolvedField(uint32_t idx, ArtField* field) 291 REQUIRES_SHARED(Locks::mutator_lock_); 292 293 MethodType* GetResolvedMethodType(dex::ProtoIndex proto_idx) REQUIRES_SHARED(Locks::mutator_lock_); 294 295 void SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodType* resolved) 296 REQUIRES_SHARED(Locks::mutator_lock_); 297 298 // Clear a method type for proto_idx, used to undo method type resolution 299 // in aborted transactions to make sure the method type isn't kept live. 300 void ClearMethodType(dex::ProtoIndex proto_idx) REQUIRES_SHARED(Locks::mutator_lock_); 301 302 CallSite* GetResolvedCallSite(uint32_t call_site_idx) REQUIRES_SHARED(Locks::mutator_lock_); 303 304 // Attempts to bind |call_site_idx| to the call site |resolved|. The 305 // caller must use the return value in place of |resolved|. This is 306 // because multiple threads can invoke the bootstrap method each 307 // producing a call site, but the method handle invocation on the 308 // call site must be on a common agreed value. 309 ObjPtr<CallSite> SetResolvedCallSite(uint32_t call_site_idx, ObjPtr<CallSite> resolved) 310 REQUIRES_SHARED(Locks::mutator_lock_) WARN_UNUSED; 311 312 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> GetStrings()313 StringDexCacheType* GetStrings() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { 314 return GetFieldPtr64<StringDexCacheType*, kVerifyFlags>(StringsOffset()); 315 } 316 SetStrings(StringDexCacheType * strings)317 void SetStrings(StringDexCacheType* strings) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { 318 SetFieldPtr<false>(StringsOffset(), strings); 319 } 320 321 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> GetResolvedTypes()322 TypeDexCacheType* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { 323 return GetFieldPtr<TypeDexCacheType*, kVerifyFlags>(ResolvedTypesOffset()); 324 } 325 SetResolvedTypes(TypeDexCacheType * resolved_types)326 void SetResolvedTypes(TypeDexCacheType* resolved_types) 327 ALWAYS_INLINE 328 REQUIRES_SHARED(Locks::mutator_lock_) { 329 SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types); 330 } 331 GetResolvedMethods()332 MethodDexCacheType* GetResolvedMethods() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { 333 return GetFieldPtr<MethodDexCacheType*>(ResolvedMethodsOffset()); 334 } 335 SetResolvedMethods(MethodDexCacheType * resolved_methods)336 void SetResolvedMethods(MethodDexCacheType* resolved_methods) 337 ALWAYS_INLINE 338 REQUIRES_SHARED(Locks::mutator_lock_) { 339 SetFieldPtr<false>(ResolvedMethodsOffset(), resolved_methods); 340 } 341 GetResolvedFields()342 FieldDexCacheType* GetResolvedFields() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { 343 return GetFieldPtr<FieldDexCacheType*>(ResolvedFieldsOffset()); 344 } 345 SetResolvedFields(FieldDexCacheType * resolved_fields)346 void SetResolvedFields(FieldDexCacheType* resolved_fields) 347 ALWAYS_INLINE 348 REQUIRES_SHARED(Locks::mutator_lock_) { 349 SetFieldPtr<false>(ResolvedFieldsOffset(), resolved_fields); 350 } 351 352 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> GetResolvedMethodTypes()353 MethodTypeDexCacheType* GetResolvedMethodTypes() 354 ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { 355 return GetFieldPtr64<MethodTypeDexCacheType*, kVerifyFlags>(ResolvedMethodTypesOffset()); 356 } 357 SetResolvedMethodTypes(MethodTypeDexCacheType * resolved_method_types)358 void SetResolvedMethodTypes(MethodTypeDexCacheType* resolved_method_types) 359 ALWAYS_INLINE 360 REQUIRES_SHARED(Locks::mutator_lock_) { 361 SetFieldPtr<false>(ResolvedMethodTypesOffset(), resolved_method_types); 362 } 363 364 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> GetResolvedCallSites()365 GcRoot<CallSite>* GetResolvedCallSites() 366 ALWAYS_INLINE 367 REQUIRES_SHARED(Locks::mutator_lock_) { 368 return GetFieldPtr<GcRoot<CallSite>*, kVerifyFlags>(ResolvedCallSitesOffset()); 369 } 370 SetResolvedCallSites(GcRoot<CallSite> * resolved_call_sites)371 void SetResolvedCallSites(GcRoot<CallSite>* resolved_call_sites) 372 ALWAYS_INLINE 373 REQUIRES_SHARED(Locks::mutator_lock_) { 374 SetFieldPtr<false>(ResolvedCallSitesOffset(), resolved_call_sites); 375 } 376 377 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> NumStrings()378 size_t NumStrings() REQUIRES_SHARED(Locks::mutator_lock_) { 379 return GetField32<kVerifyFlags>(NumStringsOffset()); 380 } 381 382 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> NumPreResolvedStrings()383 size_t NumPreResolvedStrings() REQUIRES_SHARED(Locks::mutator_lock_) { 384 return GetField32<kVerifyFlags>(NumPreResolvedStringsOffset()); 385 } 386 387 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> NumResolvedTypes()388 size_t NumResolvedTypes() REQUIRES_SHARED(Locks::mutator_lock_) { 389 return GetField32<kVerifyFlags>(NumResolvedTypesOffset()); 390 } 391 392 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> NumResolvedMethods()393 size_t NumResolvedMethods() REQUIRES_SHARED(Locks::mutator_lock_) { 394 return GetField32<kVerifyFlags>(NumResolvedMethodsOffset()); 395 } 396 397 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> NumResolvedFields()398 size_t NumResolvedFields() REQUIRES_SHARED(Locks::mutator_lock_) { 399 return GetField32<kVerifyFlags>(NumResolvedFieldsOffset()); 400 } 401 402 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> NumResolvedMethodTypes()403 size_t NumResolvedMethodTypes() REQUIRES_SHARED(Locks::mutator_lock_) { 404 return GetField32<kVerifyFlags>(NumResolvedMethodTypesOffset()); 405 } 406 407 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> NumResolvedCallSites()408 size_t NumResolvedCallSites() REQUIRES_SHARED(Locks::mutator_lock_) { 409 return GetField32<kVerifyFlags>(NumResolvedCallSitesOffset()); 410 } 411 GetDexFile()412 const DexFile* GetDexFile() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { 413 return GetFieldPtr<const DexFile*>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_)); 414 } 415 SetDexFile(const DexFile * dex_file)416 void SetDexFile(const DexFile* dex_file) REQUIRES_SHARED(Locks::mutator_lock_) { 417 SetFieldPtr<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_), dex_file); 418 } 419 420 void SetLocation(ObjPtr<String> location) REQUIRES_SHARED(Locks::mutator_lock_); 421 422 template <typename T> 423 static NativeDexCachePair<T> GetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array, 424 size_t idx); 425 426 template <typename T> 427 static void SetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array, 428 size_t idx, 429 NativeDexCachePair<T> pair); 430 PreResolvedStringsSize(size_t num_strings)431 static size_t PreResolvedStringsSize(size_t num_strings) { 432 return sizeof(GcRoot<mirror::String>) * num_strings; 433 } 434 435 uint32_t StringSlotIndex(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_); 436 uint32_t TypeSlotIndex(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); 437 uint32_t FieldSlotIndex(uint32_t field_idx) REQUIRES_SHARED(Locks::mutator_lock_); 438 uint32_t MethodSlotIndex(uint32_t method_idx) REQUIRES_SHARED(Locks::mutator_lock_); 439 uint32_t MethodTypeSlotIndex(dex::ProtoIndex proto_idx) REQUIRES_SHARED(Locks::mutator_lock_); 440 441 void VisitReflectiveTargets(ReflectiveValueVisitor* visitor) REQUIRES(Locks::mutator_lock_); 442 443 void SetClassLoader(ObjPtr<ClassLoader> class_loader) REQUIRES_SHARED(Locks::mutator_lock_); 444 445 ObjPtr<ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_); 446 447 private: 448 // Allocate new array in linear alloc and save it in the given fields. 449 template<typename T, size_t kMaxCacheSize> 450 T* AllocArray(MemberOffset obj_offset, MemberOffset num_offset, size_t num) 451 REQUIRES_SHARED(Locks::mutator_lock_); 452 453 // Visit instance fields of the dex cache as well as its associated arrays. 454 template <bool kVisitNativeRoots, 455 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, 456 ReadBarrierOption kReadBarrierOption = kWithReadBarrier, 457 typename Visitor> 458 void VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) 459 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_); 460 461 HeapReference<ClassLoader> class_loader_; 462 HeapReference<String> location_; 463 464 uint64_t dex_file_; // const DexFile* 465 uint64_t preresolved_strings_; // GcRoot<mirror::String*> array with num_preresolved_strings 466 // elements. 467 uint64_t resolved_call_sites_; // GcRoot<CallSite>* array with num_resolved_call_sites_ 468 // elements. 469 uint64_t resolved_fields_; // std::atomic<FieldDexCachePair>*, array with 470 // num_resolved_fields_ elements. 471 uint64_t resolved_method_types_; // std::atomic<MethodTypeDexCachePair>* array with 472 // num_resolved_method_types_ elements. 473 uint64_t resolved_methods_; // ArtMethod*, array with num_resolved_methods_ elements. 474 uint64_t resolved_types_; // TypeDexCacheType*, array with num_resolved_types_ elements. 475 uint64_t strings_; // std::atomic<StringDexCachePair>*, array with num_strings_ 476 // elements. 477 478 uint32_t num_preresolved_strings_; // Number of elements in the preresolved_strings_ array. 479 uint32_t num_resolved_call_sites_; // Number of elements in the call_sites_ array. 480 uint32_t num_resolved_fields_; // Number of elements in the resolved_fields_ array. 481 uint32_t num_resolved_method_types_; // Number of elements in the resolved_method_types_ array. 482 uint32_t num_resolved_methods_; // Number of elements in the resolved_methods_ array. 483 uint32_t num_resolved_types_; // Number of elements in the resolved_types_ array. 484 uint32_t num_strings_; // Number of elements in the strings_ array. 485 486 friend struct art::DexCacheOffsets; // for verifying offset information 487 friend class linker::ImageWriter; 488 friend class Object; // For VisitReferences 489 DISALLOW_IMPLICIT_CONSTRUCTORS(DexCache); 490 }; 491 492 } // namespace mirror 493 } // namespace art 494 495 #endif // ART_RUNTIME_MIRROR_DEX_CACHE_H_ 496