1 /*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_
18 #define ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_
19
20 #include "dex_cache.h"
21
22 #include <android-base/logging.h>
23
24 #include "art_field.h"
25 #include "art_method.h"
26 #include "base/atomic_pair.h"
27 #include "base/casts.h"
28 #include "base/enums.h"
29 #include "class_linker.h"
30 #include "dex/dex_file.h"
31 #include "gc_root-inl.h"
32 #include "linear_alloc.h"
33 #include "mirror/call_site.h"
34 #include "mirror/class.h"
35 #include "mirror/method_type.h"
36 #include "obj_ptr.h"
37 #include "object-inl.h"
38 #include "runtime.h"
39 #include "write_barrier-inl.h"
40
41 #include <atomic>
42
43 namespace art {
44 namespace mirror {
45
46 template<typename DexCachePair>
InitializeArray(std::atomic<DexCachePair> * array)47 static void InitializeArray(std::atomic<DexCachePair>* array) {
48 DexCachePair::Initialize(array);
49 }
50
51 template<typename T>
InitializeArray(GcRoot<T> *)52 static void InitializeArray(GcRoot<T>*) {
53 // No special initialization is needed.
54 }
55
56 template<typename T, size_t kMaxCacheSize>
AllocArray(MemberOffset obj_offset,MemberOffset num_offset,size_t num)57 T* DexCache::AllocArray(MemberOffset obj_offset, MemberOffset num_offset, size_t num) {
58 num = std::min<size_t>(num, kMaxCacheSize);
59 if (num == 0) {
60 return nullptr;
61 }
62 mirror::DexCache* dex_cache = this;
63 if (kUseReadBarrier && Thread::Current()->GetIsGcMarking()) {
64 // Several code paths use DexCache without read-barrier for performance.
65 // We have to check the "to-space" object here to avoid allocating twice.
66 dex_cache = reinterpret_cast<DexCache*>(ReadBarrier::Mark(dex_cache));
67 }
68 Thread* self = Thread::Current();
69 ClassLinker* linker = Runtime::Current()->GetClassLinker();
70 LinearAlloc* alloc = linker->GetOrCreateAllocatorForClassLoader(GetClassLoader());
71 MutexLock mu(self, *Locks::dex_cache_lock_); // Avoid allocation by multiple threads.
72 T* array = dex_cache->GetFieldPtr64<T*>(obj_offset);
73 if (array != nullptr) {
74 DCHECK(alloc->Contains(array));
75 return array; // Other thread just allocated the array.
76 }
77 array = reinterpret_cast<T*>(alloc->AllocAlign16(self, RoundUp(num * sizeof(T), 16)));
78 InitializeArray(array); // Ensure other threads see the array initialized.
79 dex_cache->SetField32Volatile<false, false>(num_offset, num);
80 dex_cache->SetField64Volatile<false, false>(obj_offset, reinterpret_cast64<uint64_t>(array));
81 return array;
82 }
83
84 template <typename T>
DexCachePair(ObjPtr<T> object,uint32_t index)85 inline DexCachePair<T>::DexCachePair(ObjPtr<T> object, uint32_t index)
86 : object(object), index(index) {}
87
88 template <typename T>
Initialize(std::atomic<DexCachePair<T>> * dex_cache)89 inline void DexCachePair<T>::Initialize(std::atomic<DexCachePair<T>>* dex_cache) {
90 DexCachePair<T> first_elem;
91 first_elem.object = GcRoot<T>(nullptr);
92 first_elem.index = InvalidIndexForSlot(0);
93 dex_cache[0].store(first_elem, std::memory_order_relaxed);
94 }
95
96 template <typename T>
GetObjectForIndex(uint32_t idx)97 inline T* DexCachePair<T>::GetObjectForIndex(uint32_t idx) {
98 if (idx != index) {
99 return nullptr;
100 }
101 DCHECK(!object.IsNull());
102 return object.Read();
103 }
104
105 template <typename T>
Initialize(std::atomic<NativeDexCachePair<T>> * dex_cache)106 inline void NativeDexCachePair<T>::Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache) {
107 NativeDexCachePair<T> first_elem;
108 first_elem.object = nullptr;
109 first_elem.index = InvalidIndexForSlot(0);
110 DexCache::SetNativePair(dex_cache, 0, first_elem);
111 }
112
ClassSize(PointerSize pointer_size)113 inline uint32_t DexCache::ClassSize(PointerSize pointer_size) {
114 const uint32_t vtable_entries = Object::kVTableLength;
115 return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
116 }
117
StringSlotIndex(dex::StringIndex string_idx)118 inline uint32_t DexCache::StringSlotIndex(dex::StringIndex string_idx) {
119 DCHECK_LT(string_idx.index_, GetDexFile()->NumStringIds());
120 const uint32_t slot_idx = string_idx.index_ % kDexCacheStringCacheSize;
121 DCHECK_LT(slot_idx, NumStrings());
122 return slot_idx;
123 }
124
GetResolvedString(dex::StringIndex string_idx)125 inline String* DexCache::GetResolvedString(dex::StringIndex string_idx) {
126 StringDexCacheType* strings = GetStrings();
127 if (UNLIKELY(strings == nullptr)) {
128 return nullptr;
129 }
130 return strings[StringSlotIndex(string_idx)].load(
131 std::memory_order_relaxed).GetObjectForIndex(string_idx.index_);
132 }
133
SetResolvedString(dex::StringIndex string_idx,ObjPtr<String> resolved)134 inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) {
135 DCHECK(resolved != nullptr);
136 StringDexCacheType* strings = GetStrings();
137 if (UNLIKELY(strings == nullptr)) {
138 strings = AllocArray<StringDexCacheType, kDexCacheStringCacheSize>(
139 StringsOffset(), NumStringsOffset(), GetDexFile()->NumStringIds());
140 }
141 strings[StringSlotIndex(string_idx)].store(
142 StringDexCachePair(resolved, string_idx.index_), std::memory_order_relaxed);
143 Runtime* const runtime = Runtime::Current();
144 if (UNLIKELY(runtime->IsActiveTransaction())) {
145 DCHECK(runtime->IsAotCompiler());
146 runtime->RecordResolveString(this, string_idx);
147 }
148 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
149 WriteBarrier::ForEveryFieldWrite(this);
150 }
151
ClearString(dex::StringIndex string_idx)152 inline void DexCache::ClearString(dex::StringIndex string_idx) {
153 DCHECK(Runtime::Current()->IsAotCompiler());
154 uint32_t slot_idx = StringSlotIndex(string_idx);
155 StringDexCacheType* strings = GetStrings();
156 if (UNLIKELY(strings == nullptr)) {
157 return;
158 }
159 StringDexCacheType* slot = &strings[slot_idx];
160 // This is racy but should only be called from the transactional interpreter.
161 if (slot->load(std::memory_order_relaxed).index == string_idx.index_) {
162 StringDexCachePair cleared(nullptr, StringDexCachePair::InvalidIndexForSlot(slot_idx));
163 slot->store(cleared, std::memory_order_relaxed);
164 }
165 }
166
TypeSlotIndex(dex::TypeIndex type_idx)167 inline uint32_t DexCache::TypeSlotIndex(dex::TypeIndex type_idx) {
168 DCHECK_LT(type_idx.index_, GetDexFile()->NumTypeIds());
169 const uint32_t slot_idx = type_idx.index_ % kDexCacheTypeCacheSize;
170 DCHECK_LT(slot_idx, NumResolvedTypes());
171 return slot_idx;
172 }
173
GetResolvedType(dex::TypeIndex type_idx)174 inline Class* DexCache::GetResolvedType(dex::TypeIndex type_idx) {
175 // It is theorized that a load acquire is not required since obtaining the resolved class will
176 // always have an address dependency or a lock.
177 TypeDexCacheType* resolved_types = GetResolvedTypes();
178 if (UNLIKELY(resolved_types == nullptr)) {
179 return nullptr;
180 }
181 return resolved_types[TypeSlotIndex(type_idx)].load(
182 std::memory_order_relaxed).GetObjectForIndex(type_idx.index_);
183 }
184
SetResolvedType(dex::TypeIndex type_idx,ObjPtr<Class> resolved)185 inline void DexCache::SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) {
186 DCHECK(resolved != nullptr);
187 DCHECK(resolved->IsResolved()) << resolved->GetStatus();
188 TypeDexCacheType* resolved_types = GetResolvedTypes();
189 if (UNLIKELY(resolved_types == nullptr)) {
190 resolved_types = AllocArray<TypeDexCacheType, kDexCacheTypeCacheSize>(
191 ResolvedTypesOffset(), NumResolvedTypesOffset(), GetDexFile()->NumTypeIds());
192 }
193 // TODO default transaction support.
194 // Use a release store for SetResolvedType. This is done to prevent other threads from seeing a
195 // class but not necessarily seeing the loaded members like the static fields array.
196 // See b/32075261.
197 resolved_types[TypeSlotIndex(type_idx)].store(
198 TypeDexCachePair(resolved, type_idx.index_), std::memory_order_release);
199 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
200 WriteBarrier::ForEveryFieldWrite(this);
201 }
202
ClearResolvedType(dex::TypeIndex type_idx)203 inline void DexCache::ClearResolvedType(dex::TypeIndex type_idx) {
204 DCHECK(Runtime::Current()->IsAotCompiler());
205 TypeDexCacheType* resolved_types = GetResolvedTypes();
206 if (UNLIKELY(resolved_types == nullptr)) {
207 return;
208 }
209 uint32_t slot_idx = TypeSlotIndex(type_idx);
210 TypeDexCacheType* slot = &resolved_types[slot_idx];
211 // This is racy but should only be called from the single-threaded ImageWriter and tests.
212 if (slot->load(std::memory_order_relaxed).index == type_idx.index_) {
213 TypeDexCachePair cleared(nullptr, TypeDexCachePair::InvalidIndexForSlot(slot_idx));
214 slot->store(cleared, std::memory_order_relaxed);
215 }
216 }
217
MethodTypeSlotIndex(dex::ProtoIndex proto_idx)218 inline uint32_t DexCache::MethodTypeSlotIndex(dex::ProtoIndex proto_idx) {
219 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
220 DCHECK_LT(proto_idx.index_, GetDexFile()->NumProtoIds());
221 const uint32_t slot_idx = proto_idx.index_ % kDexCacheMethodTypeCacheSize;
222 DCHECK_LT(slot_idx, NumResolvedMethodTypes());
223 return slot_idx;
224 }
225
GetResolvedMethodType(dex::ProtoIndex proto_idx)226 inline MethodType* DexCache::GetResolvedMethodType(dex::ProtoIndex proto_idx) {
227 MethodTypeDexCacheType* methods = GetResolvedMethodTypes();
228 if (UNLIKELY(methods == nullptr)) {
229 return nullptr;
230 }
231 return methods[MethodTypeSlotIndex(proto_idx)].load(
232 std::memory_order_relaxed).GetObjectForIndex(proto_idx.index_);
233 }
234
SetResolvedMethodType(dex::ProtoIndex proto_idx,MethodType * resolved)235 inline void DexCache::SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodType* resolved) {
236 DCHECK(resolved != nullptr);
237 MethodTypeDexCacheType* methods = GetResolvedMethodTypes();
238 if (UNLIKELY(methods == nullptr)) {
239 methods = AllocArray<MethodTypeDexCacheType, kDexCacheMethodTypeCacheSize>(
240 ResolvedMethodTypesOffset(), NumResolvedMethodTypesOffset(), GetDexFile()->NumProtoIds());
241 }
242 methods[MethodTypeSlotIndex(proto_idx)].store(
243 MethodTypeDexCachePair(resolved, proto_idx.index_), std::memory_order_relaxed);
244 Runtime* const runtime = Runtime::Current();
245 if (UNLIKELY(runtime->IsActiveTransaction())) {
246 DCHECK(runtime->IsAotCompiler());
247 runtime->RecordResolveMethodType(this, proto_idx);
248 }
249 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
250 WriteBarrier::ForEveryFieldWrite(this);
251 }
252
ClearMethodType(dex::ProtoIndex proto_idx)253 inline void DexCache::ClearMethodType(dex::ProtoIndex proto_idx) {
254 DCHECK(Runtime::Current()->IsAotCompiler());
255 uint32_t slot_idx = MethodTypeSlotIndex(proto_idx);
256 MethodTypeDexCacheType* slot = &GetResolvedMethodTypes()[slot_idx];
257 // This is racy but should only be called from the transactional interpreter.
258 if (slot->load(std::memory_order_relaxed).index == proto_idx.index_) {
259 MethodTypeDexCachePair cleared(nullptr,
260 MethodTypeDexCachePair::InvalidIndexForSlot(proto_idx.index_));
261 slot->store(cleared, std::memory_order_relaxed);
262 }
263 }
264
GetResolvedCallSite(uint32_t call_site_idx)265 inline CallSite* DexCache::GetResolvedCallSite(uint32_t call_site_idx) {
266 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
267 DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds());
268 GcRoot<CallSite>* call_sites = GetResolvedCallSites();
269 if (UNLIKELY(call_sites == nullptr)) {
270 return nullptr;
271 }
272 GcRoot<mirror::CallSite>& target = call_sites[call_site_idx];
273 Atomic<GcRoot<mirror::CallSite>>& ref =
274 reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target);
275 return ref.load(std::memory_order_seq_cst).Read();
276 }
277
SetResolvedCallSite(uint32_t call_site_idx,ObjPtr<CallSite> call_site)278 inline ObjPtr<CallSite> DexCache::SetResolvedCallSite(uint32_t call_site_idx,
279 ObjPtr<CallSite> call_site) {
280 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
281 DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds());
282
283 GcRoot<mirror::CallSite> null_call_site(nullptr);
284 GcRoot<mirror::CallSite> candidate(call_site);
285 GcRoot<CallSite>* call_sites = GetResolvedCallSites();
286 if (UNLIKELY(call_sites == nullptr)) {
287 call_sites = AllocArray<GcRoot<CallSite>, std::numeric_limits<size_t>::max()>(
288 ResolvedCallSitesOffset(), NumResolvedCallSitesOffset(), GetDexFile()->NumCallSiteIds());
289 }
290 GcRoot<mirror::CallSite>& target = call_sites[call_site_idx];
291
292 // The first assignment for a given call site wins.
293 Atomic<GcRoot<mirror::CallSite>>& ref =
294 reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target);
295 if (ref.CompareAndSetStrongSequentiallyConsistent(null_call_site, candidate)) {
296 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
297 WriteBarrier::ForEveryFieldWrite(this);
298 return call_site;
299 } else {
300 return target.Read();
301 }
302 }
303
FieldSlotIndex(uint32_t field_idx)304 inline uint32_t DexCache::FieldSlotIndex(uint32_t field_idx) {
305 DCHECK_LT(field_idx, GetDexFile()->NumFieldIds());
306 const uint32_t slot_idx = field_idx % kDexCacheFieldCacheSize;
307 DCHECK_LT(slot_idx, NumResolvedFields());
308 return slot_idx;
309 }
310
GetResolvedField(uint32_t field_idx)311 inline ArtField* DexCache::GetResolvedField(uint32_t field_idx) {
312 FieldDexCacheType* fields = GetResolvedFields();
313 if (UNLIKELY(fields == nullptr)) {
314 return nullptr;
315 }
316 auto pair = GetNativePair(fields, FieldSlotIndex(field_idx));
317 return pair.GetObjectForIndex(field_idx);
318 }
319
SetResolvedField(uint32_t field_idx,ArtField * field)320 inline void DexCache::SetResolvedField(uint32_t field_idx, ArtField* field) {
321 DCHECK(field != nullptr);
322 FieldDexCachePair pair(field, field_idx);
323 FieldDexCacheType* fields = GetResolvedFields();
324 if (UNLIKELY(fields == nullptr)) {
325 fields = AllocArray<FieldDexCacheType, kDexCacheFieldCacheSize>(
326 ResolvedFieldsOffset(), NumResolvedFieldsOffset(), GetDexFile()->NumFieldIds());
327 }
328 SetNativePair(fields, FieldSlotIndex(field_idx), pair);
329 }
330
MethodSlotIndex(uint32_t method_idx)331 inline uint32_t DexCache::MethodSlotIndex(uint32_t method_idx) {
332 DCHECK_LT(method_idx, GetDexFile()->NumMethodIds());
333 const uint32_t slot_idx = method_idx % kDexCacheMethodCacheSize;
334 DCHECK_LT(slot_idx, NumResolvedMethods());
335 return slot_idx;
336 }
337
GetResolvedMethod(uint32_t method_idx)338 inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx) {
339 MethodDexCacheType* methods = GetResolvedMethods();
340 if (UNLIKELY(methods == nullptr)) {
341 return nullptr;
342 }
343 auto pair = GetNativePair(methods, MethodSlotIndex(method_idx));
344 return pair.GetObjectForIndex(method_idx);
345 }
346
SetResolvedMethod(uint32_t method_idx,ArtMethod * method)347 inline void DexCache::SetResolvedMethod(uint32_t method_idx, ArtMethod* method) {
348 DCHECK(method != nullptr);
349 MethodDexCachePair pair(method, method_idx);
350 MethodDexCacheType* methods = GetResolvedMethods();
351 if (UNLIKELY(methods == nullptr)) {
352 methods = AllocArray<MethodDexCacheType, kDexCacheMethodCacheSize>(
353 ResolvedMethodsOffset(), NumResolvedMethodsOffset(), GetDexFile()->NumMethodIds());
354 }
355 SetNativePair(methods, MethodSlotIndex(method_idx), pair);
356 }
357
358 template <typename T>
GetNativePair(std::atomic<NativeDexCachePair<T>> * pair_array,size_t idx)359 NativeDexCachePair<T> DexCache::GetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array,
360 size_t idx) {
361 auto* array = reinterpret_cast<std::atomic<AtomicPair<uintptr_t>>*>(pair_array);
362 AtomicPair<uintptr_t> value = AtomicPairLoadAcquire(&array[idx]);
363 return NativeDexCachePair<T>(reinterpret_cast<T*>(value.first), value.second);
364 }
365
366 template <typename T>
SetNativePair(std::atomic<NativeDexCachePair<T>> * pair_array,size_t idx,NativeDexCachePair<T> pair)367 void DexCache::SetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array,
368 size_t idx,
369 NativeDexCachePair<T> pair) {
370 auto* array = reinterpret_cast<std::atomic<AtomicPair<uintptr_t>>*>(pair_array);
371 AtomicPair<uintptr_t> v(reinterpret_cast<size_t>(pair.object), pair.index);
372 AtomicPairStoreRelease(&array[idx], v);
373 }
374
375 template <typename T,
376 ReadBarrierOption kReadBarrierOption,
377 typename Visitor>
VisitDexCachePairs(std::atomic<DexCachePair<T>> * pairs,size_t num_pairs,const Visitor & visitor)378 inline void VisitDexCachePairs(std::atomic<DexCachePair<T>>* pairs,
379 size_t num_pairs,
380 const Visitor& visitor)
381 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
382 // Check both the data pointer and count since the array might be initialized
383 // concurrently on other thread, and we might observe just one of the values.
384 for (size_t i = 0; pairs != nullptr && i < num_pairs; ++i) {
385 DexCachePair<T> source = pairs[i].load(std::memory_order_relaxed);
386 // NOTE: We need the "template" keyword here to avoid a compilation
387 // failure. GcRoot<T> is a template argument-dependent type and we need to
388 // tell the compiler to treat "Read" as a template rather than a field or
389 // function. Otherwise, on encountering the "<" token, the compiler would
390 // treat "Read" as a field.
391 T* const before = source.object.template Read<kReadBarrierOption>();
392 visitor.VisitRootIfNonNull(source.object.AddressWithoutBarrier());
393 if (source.object.template Read<kReadBarrierOption>() != before) {
394 pairs[i].store(source, std::memory_order_relaxed);
395 }
396 }
397 }
398
399 template <bool kVisitNativeRoots,
400 VerifyObjectFlags kVerifyFlags,
401 ReadBarrierOption kReadBarrierOption,
402 typename Visitor>
VisitReferences(ObjPtr<Class> klass,const Visitor & visitor)403 inline void DexCache::VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) {
404 // Visit instance fields first.
405 VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
406 // Visit arrays after.
407 if (kVisitNativeRoots) {
408 VisitDexCachePairs<String, kReadBarrierOption, Visitor>(
409 GetStrings<kVerifyFlags>(), NumStrings<kVerifyFlags>(), visitor);
410
411 VisitDexCachePairs<Class, kReadBarrierOption, Visitor>(
412 GetResolvedTypes<kVerifyFlags>(), NumResolvedTypes<kVerifyFlags>(), visitor);
413
414 VisitDexCachePairs<MethodType, kReadBarrierOption, Visitor>(
415 GetResolvedMethodTypes<kVerifyFlags>(), NumResolvedMethodTypes<kVerifyFlags>(), visitor);
416
417 GcRoot<mirror::CallSite>* resolved_call_sites = GetResolvedCallSites<kVerifyFlags>();
418 size_t num_call_sites = NumResolvedCallSites<kVerifyFlags>();
419 for (size_t i = 0; resolved_call_sites != nullptr && i != num_call_sites; ++i) {
420 visitor.VisitRootIfNonNull(resolved_call_sites[i].AddressWithoutBarrier());
421 }
422 }
423 }
424
425 template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
GetLocation()426 inline ObjPtr<String> DexCache::GetLocation() {
427 return GetFieldObject<String, kVerifyFlags, kReadBarrierOption>(
428 OFFSET_OF_OBJECT_MEMBER(DexCache, location_));
429 }
430
431 } // namespace mirror
432 } // namespace art
433
434 #endif // ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_
435