1 /*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_
18 #define ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_
19
20 #include "dex_cache.h"
21
22 #include <android-base/logging.h>
23
24 #include "art_field.h"
25 #include "art_method.h"
26 #include "base/atomic_pair.h"
27 #include "base/casts.h"
28 #include "base/enums.h"
29 #include "class_linker.h"
30 #include "dex/dex_file.h"
31 #include "gc_root-inl.h"
32 #include "linear_alloc-inl.h"
33 #include "mirror/call_site.h"
34 #include "mirror/class.h"
35 #include "mirror/method_type.h"
36 #include "obj_ptr.h"
37 #include "object-inl.h"
38 #include "runtime.h"
39 #include "write_barrier-inl.h"
40
41 #include <atomic>
42
43 namespace art {
44 namespace mirror {
45
46 template<typename DexCachePair>
InitializeArray(std::atomic<DexCachePair> * array)47 static void InitializeArray(std::atomic<DexCachePair>* array) {
48 DexCachePair::Initialize(array);
49 }
50
51 template<typename T>
InitializeArray(T *)52 static void InitializeArray(T*) {
53 // Nothing to do.
54 }
55
56 template<typename T>
AllocArray(MemberOffset obj_offset,size_t num,LinearAllocKind kind,bool startup)57 T* DexCache::AllocArray(MemberOffset obj_offset, size_t num, LinearAllocKind kind, bool startup) {
58 Thread* self = Thread::Current();
59 mirror::DexCache* dex_cache = this;
60 if (gUseReadBarrier && self->GetIsGcMarking()) {
61 // Several code paths use DexCache without read-barrier for performance.
62 // We have to check the "to-space" object here to avoid allocating twice.
63 dex_cache = reinterpret_cast<DexCache*>(ReadBarrier::Mark(this));
64 }
65 // DON'T USE 'this' from now on.
66 Runtime* runtime = Runtime::Current();
67 // Note: in the 1002-notify-startup test, the startup linear alloc can become null
68 // concurrently, even if the runtime is marked at startup. Therefore we should only
69 // fetch it once here.
70 LinearAlloc* startup_linear_alloc = runtime->GetStartupLinearAlloc();
71 LinearAlloc* alloc = (startup && startup_linear_alloc != nullptr)
72 ? startup_linear_alloc
73 : runtime->GetClassLinker()->GetOrCreateAllocatorForClassLoader(GetClassLoader());
74 MutexLock mu(self, *Locks::dex_cache_lock_); // Avoid allocation by multiple threads.
75 T* array = dex_cache->GetFieldPtr64<T*>(obj_offset);
76 if (array != nullptr) {
77 DCHECK(alloc->Contains(array));
78 return array; // Other thread just allocated the array.
79 }
80 array = reinterpret_cast<T*>(alloc->AllocAlign16(self, RoundUp(num * sizeof(T), 16), kind));
81 InitializeArray(array); // Ensure other threads see the array initialized.
82 dex_cache->SetField64Volatile<false, false>(obj_offset, reinterpret_cast64<uint64_t>(array));
83 return array;
84 }
85
86 template <typename T>
DexCachePair(ObjPtr<T> object,uint32_t index)87 inline DexCachePair<T>::DexCachePair(ObjPtr<T> object, uint32_t index)
88 : object(object), index(index) {}
89
90 template <typename T>
GetObjectForIndex(uint32_t idx)91 inline T* DexCachePair<T>::GetObjectForIndex(uint32_t idx) {
92 if (idx != index) {
93 return nullptr;
94 }
95 DCHECK(!object.IsNull());
96 return object.Read();
97 }
98
99 template <typename T>
Initialize(std::atomic<DexCachePair<T>> * dex_cache)100 inline void DexCachePair<T>::Initialize(std::atomic<DexCachePair<T>>* dex_cache) {
101 DexCachePair<T> first_elem;
102 first_elem.object = GcRoot<T>(nullptr);
103 first_elem.index = InvalidIndexForSlot(0);
104 dex_cache[0].store(first_elem, std::memory_order_relaxed);
105 }
106
107 template <typename T>
Initialize(std::atomic<NativeDexCachePair<T>> * dex_cache)108 inline void NativeDexCachePair<T>::Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache) {
109 NativeDexCachePair<T> first_elem;
110 first_elem.object = nullptr;
111 first_elem.index = InvalidIndexForSlot(0);
112
113 auto* array = reinterpret_cast<std::atomic<AtomicPair<uintptr_t>>*>(dex_cache);
114 AtomicPair<uintptr_t> v(reinterpret_cast<size_t>(first_elem.object), first_elem.index);
115 AtomicPairStoreRelease(&array[0], v);
116 }
117
118 template <typename T>
Set(uint32_t index,T * value)119 inline void GcRootArray<T>::Set(uint32_t index, T* value) {
120 GcRoot<T> root(value);
121 entries_[index].store(root, std::memory_order_relaxed);
122 }
123
124 template <typename T>
Get(uint32_t index)125 inline T* GcRootArray<T>::Get(uint32_t index) {
126 return entries_[index].load(std::memory_order_relaxed).Read();
127 }
128
ClassSize(PointerSize pointer_size)129 inline uint32_t DexCache::ClassSize(PointerSize pointer_size) {
130 const uint32_t vtable_entries = Object::kVTableLength;
131 return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
132 }
133
GetResolvedString(dex::StringIndex string_idx)134 inline String* DexCache::GetResolvedString(dex::StringIndex string_idx) {
135 return GetStringsEntry(string_idx.index_);
136 }
137
SetResolvedString(dex::StringIndex string_idx,ObjPtr<String> resolved)138 inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) {
139 DCHECK(resolved != nullptr);
140 SetStringsEntry(string_idx.index_, resolved.Ptr());
141 Runtime* const runtime = Runtime::Current();
142 if (UNLIKELY(runtime->IsActiveTransaction())) {
143 DCHECK(runtime->IsAotCompiler());
144 runtime->RecordResolveString(this, string_idx);
145 }
146 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
147 WriteBarrier::ForEveryFieldWrite(this);
148 }
149
ClearString(dex::StringIndex string_idx)150 inline void DexCache::ClearString(dex::StringIndex string_idx) {
151 DCHECK(Runtime::Current()->IsAotCompiler());
152 auto* array = GetStringsArray();
153 if (array != nullptr) {
154 array->Set(string_idx.index_, nullptr);
155 }
156 auto* strings = GetStrings();
157 if (UNLIKELY(strings == nullptr)) {
158 return;
159 }
160 strings->Clear(string_idx.index_);
161 }
162
GetResolvedType(dex::TypeIndex type_idx)163 inline Class* DexCache::GetResolvedType(dex::TypeIndex type_idx) {
164 return GetResolvedTypesEntry(type_idx.index_);
165 }
166
ClearResolvedType(dex::TypeIndex type_idx)167 inline void DexCache::ClearResolvedType(dex::TypeIndex type_idx) {
168 DCHECK(Runtime::Current()->IsAotCompiler());
169 auto* array = GetResolvedTypesArray();
170 if (array != nullptr) {
171 array->Set(type_idx.index_, nullptr);
172 }
173 auto* resolved_types = GetResolvedTypes();
174 if (UNLIKELY(resolved_types == nullptr)) {
175 return;
176 }
177 resolved_types->Clear(type_idx.index_);
178 }
179
GetResolvedMethodType(dex::ProtoIndex proto_idx)180 inline MethodType* DexCache::GetResolvedMethodType(dex::ProtoIndex proto_idx) {
181 return GetResolvedMethodTypesEntry(proto_idx.index_);
182 }
183
SetResolvedMethodType(dex::ProtoIndex proto_idx,MethodType * resolved)184 inline void DexCache::SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodType* resolved) {
185 DCHECK(resolved != nullptr);
186 SetResolvedMethodTypesEntry(proto_idx.index_, resolved);
187
188 Runtime* const runtime = Runtime::Current();
189 if (UNLIKELY(runtime->IsActiveTransaction())) {
190 DCHECK(runtime->IsAotCompiler());
191 runtime->RecordResolveMethodType(this, proto_idx);
192 }
193 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
194 WriteBarrier::ForEveryFieldWrite(this);
195 }
196
ClearMethodType(dex::ProtoIndex proto_idx)197 inline void DexCache::ClearMethodType(dex::ProtoIndex proto_idx) {
198 DCHECK(Runtime::Current()->IsAotCompiler());
199 auto* array = GetResolvedMethodTypesArray();
200 if (array != nullptr) {
201 array->Set(proto_idx.index_, nullptr);
202 }
203 auto* methods = GetResolvedMethodTypes();
204 if (methods == nullptr) {
205 return;
206 }
207 methods->Clear(proto_idx.index_);
208 }
209
GetResolvedCallSite(uint32_t call_site_idx)210 inline CallSite* DexCache::GetResolvedCallSite(uint32_t call_site_idx) {
211 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
212 DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds());
213 GcRootArray<CallSite>* call_sites = GetResolvedCallSites();
214 if (UNLIKELY(call_sites == nullptr)) {
215 return nullptr;
216 }
217 Atomic<GcRoot<mirror::CallSite>>* target = call_sites->GetGcRoot(call_site_idx);
218 return target->load(std::memory_order_seq_cst).Read();
219 }
220
SetResolvedCallSite(uint32_t call_site_idx,ObjPtr<CallSite> call_site)221 inline ObjPtr<CallSite> DexCache::SetResolvedCallSite(uint32_t call_site_idx,
222 ObjPtr<CallSite> call_site) {
223 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
224 DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds());
225
226 GcRoot<mirror::CallSite> null_call_site(nullptr);
227 GcRoot<mirror::CallSite> candidate(call_site);
228 GcRootArray<CallSite>* call_sites = GetResolvedCallSites();
229 if (UNLIKELY(call_sites == nullptr)) {
230 call_sites = AllocateResolvedCallSites();
231 }
232 Atomic<GcRoot<mirror::CallSite>>* target = call_sites->GetGcRoot(call_site_idx);
233
234 // The first assignment for a given call site wins.
235 if (target->CompareAndSetStrongSequentiallyConsistent(null_call_site, candidate)) {
236 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
237 WriteBarrier::ForEveryFieldWrite(this);
238 return call_site;
239 } else {
240 return target->load(std::memory_order_relaxed).Read();
241 }
242 }
243
GetResolvedField(uint32_t field_idx)244 inline ArtField* DexCache::GetResolvedField(uint32_t field_idx) {
245 return GetResolvedFieldsEntry(field_idx);
246 }
247
SetResolvedField(uint32_t field_idx,ArtField * field)248 inline void DexCache::SetResolvedField(uint32_t field_idx, ArtField* field) {
249 SetResolvedFieldsEntry(field_idx, field);
250 }
251
GetResolvedMethod(uint32_t method_idx)252 inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx) {
253 return GetResolvedMethodsEntry(method_idx);
254 }
255
SetResolvedMethod(uint32_t method_idx,ArtMethod * method)256 inline void DexCache::SetResolvedMethod(uint32_t method_idx, ArtMethod* method) {
257 SetResolvedMethodsEntry(method_idx, method);
258 }
259
260 template <ReadBarrierOption kReadBarrierOption,
261 typename Visitor,
262 typename T>
VisitDexCachePairs(T * array,size_t num_pairs,const Visitor & visitor)263 inline void VisitDexCachePairs(T* array,
264 size_t num_pairs,
265 const Visitor& visitor)
266 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
267 // Check both the data pointer and count since the array might be initialized
268 // concurrently on other thread, and we might observe just one of the values.
269 for (size_t i = 0; array != nullptr && i < num_pairs; ++i) {
270 auto source = array->GetPair(i);
271 // NOTE: We need the "template" keyword here to avoid a compilation
272 // failure. GcRoot<T> is a template argument-dependent type and we need to
273 // tell the compiler to treat "Read" as a template rather than a field or
274 // function. Otherwise, on encountering the "<" token, the compiler would
275 // treat "Read" as a field.
276 auto const before = source.object.template Read<kReadBarrierOption>();
277 visitor.VisitRootIfNonNull(source.object.AddressWithoutBarrier());
278 if (source.object.template Read<kReadBarrierOption>() != before) {
279 array->SetPair(i, source);
280 }
281 }
282 }
283
284 template <typename Visitor>
VisitDexCachePairRoots(Visitor & visitor,DexCachePair<Object> * pairs_begin,DexCachePair<Object> * pairs_end)285 void DexCache::VisitDexCachePairRoots(Visitor& visitor,
286 DexCachePair<Object>* pairs_begin,
287 DexCachePair<Object>* pairs_end) {
288 for (; pairs_begin < pairs_end; pairs_begin++) {
289 visitor.VisitRootIfNonNull(pairs_begin->object.AddressWithoutBarrier());
290 }
291 }
292
293 template <bool kVisitNativeRoots,
294 VerifyObjectFlags kVerifyFlags,
295 ReadBarrierOption kReadBarrierOption,
296 typename Visitor>
VisitReferences(ObjPtr<Class> klass,const Visitor & visitor)297 inline void DexCache::VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) {
298 // Visit instance fields first.
299 VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
300 // Visit arrays after.
301 if (kVisitNativeRoots) {
302 VisitNativeRoots<kVerifyFlags, kReadBarrierOption>(visitor);
303 }
304 }
305
306 template <VerifyObjectFlags kVerifyFlags,
307 ReadBarrierOption kReadBarrierOption,
308 typename Visitor>
VisitNativeRoots(const Visitor & visitor)309 inline void DexCache::VisitNativeRoots(const Visitor& visitor) {
310 VisitDexCachePairs<kReadBarrierOption, Visitor>(
311 GetStrings<kVerifyFlags>(), NumStrings<kVerifyFlags>(), visitor);
312
313 VisitDexCachePairs<kReadBarrierOption, Visitor>(
314 GetResolvedTypes<kVerifyFlags>(), NumResolvedTypes<kVerifyFlags>(), visitor);
315
316 VisitDexCachePairs<kReadBarrierOption, Visitor>(
317 GetResolvedMethodTypes<kVerifyFlags>(), NumResolvedMethodTypes<kVerifyFlags>(), visitor);
318
319 GcRootArray<mirror::CallSite>* resolved_call_sites = GetResolvedCallSites<kVerifyFlags>();
320 size_t num_call_sites = NumResolvedCallSites<kVerifyFlags>();
321 for (size_t i = 0; resolved_call_sites != nullptr && i != num_call_sites; ++i) {
322 visitor.VisitRootIfNonNull(resolved_call_sites->GetGcRootAddress(i)->AddressWithoutBarrier());
323 }
324
325 // Dex cache arrays can be reset and cleared during app startup. Assert we do not get
326 // suspended to ensure the arrays are not deallocated.
327 ScopedAssertNoThreadSuspension soants("dex caches");
328 GcRootArray<mirror::Class>* resolved_types = GetResolvedTypesArray<kVerifyFlags>();
329 size_t num_resolved_types = NumResolvedTypesArray<kVerifyFlags>();
330 for (size_t i = 0; resolved_types != nullptr && i != num_resolved_types; ++i) {
331 visitor.VisitRootIfNonNull(resolved_types->GetGcRootAddress(i)->AddressWithoutBarrier());
332 }
333
334 GcRootArray<mirror::String>* resolved_strings = GetStringsArray<kVerifyFlags>();
335 size_t num_resolved_strings = NumStringsArray<kVerifyFlags>();
336 for (size_t i = 0; resolved_strings != nullptr && i != num_resolved_strings; ++i) {
337 visitor.VisitRootIfNonNull(resolved_strings->GetGcRootAddress(i)->AddressWithoutBarrier());
338 }
339
340 GcRootArray<mirror::MethodType>* resolved_method_types =
341 GetResolvedMethodTypesArray<kVerifyFlags>();
342 size_t num_resolved_method_types = NumResolvedMethodTypesArray<kVerifyFlags>();
343 for (size_t i = 0; resolved_method_types != nullptr && i != num_resolved_method_types; ++i) {
344 visitor.VisitRootIfNonNull(resolved_method_types->GetGcRootAddress(i)->AddressWithoutBarrier());
345 }
346 }
347
348 template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
GetLocation()349 inline ObjPtr<String> DexCache::GetLocation() {
350 return GetFieldObject<String, kVerifyFlags, kReadBarrierOption>(
351 OFFSET_OF_OBJECT_MEMBER(DexCache, location_));
352 }
353
354 } // namespace mirror
355 } // namespace art
356
357 #endif // ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_
358