• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "dex_cache-inl.h"
18 
19 #include "art_method-inl.h"
20 #include "class_linker.h"
21 #include "gc/accounting/card_table-inl.h"
22 #include "gc/heap.h"
23 #include "linear_alloc.h"
24 #include "oat_file.h"
25 #include "object-inl.h"
26 #include "object.h"
27 #include "object_array-inl.h"
28 #include "reflective_value_visitor.h"
29 #include "runtime.h"
30 #include "runtime_globals.h"
31 #include "string.h"
32 #include "thread.h"
33 #include "write_barrier.h"
34 
35 namespace art {
36 namespace mirror {
37 
38 template<typename T>
AllocArray(Thread * self,LinearAlloc * alloc,size_t num)39 static T* AllocArray(Thread* self, LinearAlloc* alloc, size_t num) {
40   if (num == 0) {
41     return nullptr;
42   }
43   return reinterpret_cast<T*>(alloc->AllocAlign16(self, RoundUp(num * sizeof(T), 16)));
44 }
45 
InitializeNativeFields(const DexFile * dex_file,LinearAlloc * linear_alloc)46 void DexCache::InitializeNativeFields(const DexFile* dex_file, LinearAlloc* linear_alloc) {
47   DCHECK(GetDexFile() == nullptr);
48   DCHECK(GetStrings() == nullptr);
49   DCHECK(GetResolvedTypes() == nullptr);
50   DCHECK(GetResolvedMethods() == nullptr);
51   DCHECK(GetResolvedFields() == nullptr);
52   DCHECK(GetResolvedMethodTypes() == nullptr);
53   DCHECK(GetResolvedCallSites() == nullptr);
54 
55   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
56   Thread* self = Thread::Current();
57 
58   size_t num_strings = std::min<size_t>(kDexCacheStringCacheSize, dex_file->NumStringIds());
59   size_t num_types = std::min<size_t>(kDexCacheTypeCacheSize, dex_file->NumTypeIds());
60   size_t num_fields = std::min<size_t>(kDexCacheFieldCacheSize, dex_file->NumFieldIds());
61   size_t num_methods = std::min<size_t>(kDexCacheMethodCacheSize, dex_file->NumMethodIds());
62   size_t num_method_types = std::min<size_t>(kDexCacheMethodTypeCacheSize, dex_file->NumProtoIds());
63   size_t num_call_sites = dex_file->NumCallSiteIds();  // Full size.
64 
65   static_assert(ArenaAllocator::kAlignment == 8, "Expecting arena alignment of 8.");
66   StringDexCacheType* strings =
67       AllocArray<StringDexCacheType>(self, linear_alloc, num_strings);
68   TypeDexCacheType* types =
69       AllocArray<TypeDexCacheType>(self, linear_alloc, num_types);
70   MethodDexCacheType* methods =
71       AllocArray<MethodDexCacheType>(self, linear_alloc, num_methods);
72   FieldDexCacheType* fields =
73       AllocArray<FieldDexCacheType>(self, linear_alloc, num_fields);
74   MethodTypeDexCacheType* method_types =
75       AllocArray<MethodTypeDexCacheType>(self, linear_alloc, num_method_types);
76   GcRoot<mirror::CallSite>* call_sites =
77       AllocArray<GcRoot<CallSite>>(self, linear_alloc, num_call_sites);
78 
79   DCHECK_ALIGNED(types, alignof(StringDexCacheType)) <<
80                  "Expected StringsOffset() to align to StringDexCacheType.";
81   DCHECK_ALIGNED(strings, alignof(StringDexCacheType)) <<
82                  "Expected strings to align to StringDexCacheType.";
83   static_assert(alignof(StringDexCacheType) == 8u,
84                 "Expected StringDexCacheType to have align of 8.");
85   if (kIsDebugBuild) {
86     // Consistency check to make sure all the dex cache arrays are empty. b/28992179
87     for (size_t i = 0; i < num_strings; ++i) {
88       CHECK_EQ(strings[i].load(std::memory_order_relaxed).index, 0u);
89       CHECK(strings[i].load(std::memory_order_relaxed).object.IsNull());
90     }
91     for (size_t i = 0; i < num_types; ++i) {
92       CHECK_EQ(types[i].load(std::memory_order_relaxed).index, 0u);
93       CHECK(types[i].load(std::memory_order_relaxed).object.IsNull());
94     }
95     for (size_t i = 0; i < num_methods; ++i) {
96       CHECK_EQ(GetNativePair(methods, i).index, 0u);
97       CHECK(GetNativePair(methods, i).object == nullptr);
98     }
99     for (size_t i = 0; i < num_fields; ++i) {
100       CHECK_EQ(GetNativePair(fields, i).index, 0u);
101       CHECK(GetNativePair(fields, i).object == nullptr);
102     }
103     for (size_t i = 0; i < num_method_types; ++i) {
104       CHECK_EQ(method_types[i].load(std::memory_order_relaxed).index, 0u);
105       CHECK(method_types[i].load(std::memory_order_relaxed).object.IsNull());
106     }
107     for (size_t i = 0; i < dex_file->NumCallSiteIds(); ++i) {
108       CHECK(call_sites[i].IsNull());
109     }
110   }
111   if (strings != nullptr) {
112     mirror::StringDexCachePair::Initialize(strings);
113   }
114   if (types != nullptr) {
115     mirror::TypeDexCachePair::Initialize(types);
116   }
117   if (fields != nullptr) {
118     mirror::FieldDexCachePair::Initialize(fields);
119   }
120   if (methods != nullptr) {
121     mirror::MethodDexCachePair::Initialize(methods);
122   }
123   if (method_types != nullptr) {
124     mirror::MethodTypeDexCachePair::Initialize(method_types);
125   }
126   SetDexFile(dex_file);
127   SetNativeArrays(strings,
128                   num_strings,
129                   types,
130                   num_types,
131                   methods,
132                   num_methods,
133                   fields,
134                   num_fields,
135                   method_types,
136                   num_method_types,
137                   call_sites,
138                   num_call_sites);
139 }
140 
ResetNativeFields()141 void DexCache::ResetNativeFields() {
142   SetDexFile(nullptr);
143   SetNativeArrays(nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0);
144 }
145 
VisitReflectiveTargets(ReflectiveValueVisitor * visitor)146 void DexCache::VisitReflectiveTargets(ReflectiveValueVisitor* visitor) {
147   bool wrote = false;
148   for (size_t i = 0; i < NumResolvedFields(); i++) {
149     auto pair(GetNativePair(GetResolvedFields(), i));
150     if (pair.index == FieldDexCachePair::InvalidIndexForSlot(i)) {
151       continue;
152     }
153     ArtField* new_val = visitor->VisitField(
154         pair.object, DexCacheSourceInfo(kSourceDexCacheResolvedField, pair.index, this));
155     if (UNLIKELY(new_val != pair.object)) {
156       if (new_val == nullptr) {
157         pair = FieldDexCachePair(nullptr, FieldDexCachePair::InvalidIndexForSlot(i));
158       } else {
159         pair.object = new_val;
160       }
161       SetNativePair(GetResolvedFields(), i, pair);
162       wrote = true;
163     }
164   }
165   for (size_t i = 0; i < NumResolvedMethods(); i++) {
166     auto pair(GetNativePair(GetResolvedMethods(), i));
167     if (pair.index == MethodDexCachePair::InvalidIndexForSlot(i)) {
168       continue;
169     }
170     ArtMethod* new_val = visitor->VisitMethod(
171         pair.object, DexCacheSourceInfo(kSourceDexCacheResolvedMethod, pair.index, this));
172     if (UNLIKELY(new_val != pair.object)) {
173       if (new_val == nullptr) {
174         pair = MethodDexCachePair(nullptr, MethodDexCachePair::InvalidIndexForSlot(i));
175       } else {
176         pair.object = new_val;
177       }
178       SetNativePair(GetResolvedMethods(), i, pair);
179       wrote = true;
180     }
181   }
182   if (wrote) {
183     WriteBarrier::ForEveryFieldWrite(this);
184   }
185 }
186 
AddPreResolvedStringsArray()187 bool DexCache::AddPreResolvedStringsArray() {
188   DCHECK_EQ(NumPreResolvedStrings(), 0u);
189   Thread* const self = Thread::Current();
190   LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
191   const size_t num_strings = GetDexFile()->NumStringIds();
192   if (num_strings != 0) {
193     GcRoot<mirror::String>* strings =
194         linear_alloc->AllocArray<GcRoot<mirror::String>>(self, num_strings);
195     if (strings == nullptr) {
196       // Failed to allocate pre-resolved string array (probably due to address fragmentation), bail.
197       return false;
198     }
199     SetField32<false>(NumPreResolvedStringsOffset(), num_strings);
200 
201     CHECK(strings != nullptr);
202     SetPreResolvedStrings(strings);
203     for (size_t i = 0; i < GetDexFile()->NumStringIds(); ++i) {
204       CHECK(GetPreResolvedStrings()[i].Read() == nullptr);
205     }
206   }
207   return true;
208 }
209 
SetNativeArrays(StringDexCacheType * strings,uint32_t num_strings,TypeDexCacheType * resolved_types,uint32_t num_resolved_types,MethodDexCacheType * resolved_methods,uint32_t num_resolved_methods,FieldDexCacheType * resolved_fields,uint32_t num_resolved_fields,MethodTypeDexCacheType * resolved_method_types,uint32_t num_resolved_method_types,GcRoot<CallSite> * resolved_call_sites,uint32_t num_resolved_call_sites)210 void DexCache::SetNativeArrays(StringDexCacheType* strings,
211                                uint32_t num_strings,
212                                TypeDexCacheType* resolved_types,
213                                uint32_t num_resolved_types,
214                                MethodDexCacheType* resolved_methods,
215                                uint32_t num_resolved_methods,
216                                FieldDexCacheType* resolved_fields,
217                                uint32_t num_resolved_fields,
218                                MethodTypeDexCacheType* resolved_method_types,
219                                uint32_t num_resolved_method_types,
220                                GcRoot<CallSite>* resolved_call_sites,
221                                uint32_t num_resolved_call_sites) {
222   CHECK_EQ(num_strings != 0u, strings != nullptr);
223   CHECK_EQ(num_resolved_types != 0u, resolved_types != nullptr);
224   CHECK_EQ(num_resolved_methods != 0u, resolved_methods != nullptr);
225   CHECK_EQ(num_resolved_fields != 0u, resolved_fields != nullptr);
226   CHECK_EQ(num_resolved_method_types != 0u, resolved_method_types != nullptr);
227   CHECK_EQ(num_resolved_call_sites != 0u, resolved_call_sites != nullptr);
228   SetStrings(strings);
229   SetResolvedTypes(resolved_types);
230   SetResolvedMethods(resolved_methods);
231   SetResolvedFields(resolved_fields);
232   SetResolvedMethodTypes(resolved_method_types);
233   SetResolvedCallSites(resolved_call_sites);
234   SetField32<false>(NumStringsOffset(), num_strings);
235   SetField32<false>(NumResolvedTypesOffset(), num_resolved_types);
236   SetField32<false>(NumResolvedMethodsOffset(), num_resolved_methods);
237   SetField32<false>(NumResolvedFieldsOffset(), num_resolved_fields);
238   SetField32<false>(NumResolvedMethodTypesOffset(), num_resolved_method_types);
239   SetField32<false>(NumResolvedCallSitesOffset(), num_resolved_call_sites);
240 }
241 
SetLocation(ObjPtr<mirror::String> location)242 void DexCache::SetLocation(ObjPtr<mirror::String> location) {
243   SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, location_), location);
244 }
245 
SetClassLoader(ObjPtr<ClassLoader> class_loader)246 void DexCache::SetClassLoader(ObjPtr<ClassLoader> class_loader) {
247   SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, class_loader_), class_loader);
248 }
249 
250 #if !defined(__aarch64__) && !defined(__x86_64__)
251 static pthread_mutex_t dex_cache_slow_atomic_mutex = PTHREAD_MUTEX_INITIALIZER;
252 
AtomicLoadRelaxed16B(std::atomic<ConversionPair64> * target)253 DexCache::ConversionPair64 DexCache::AtomicLoadRelaxed16B(std::atomic<ConversionPair64>* target) {
254   pthread_mutex_lock(&dex_cache_slow_atomic_mutex);
255   DexCache::ConversionPair64 value = *reinterpret_cast<ConversionPair64*>(target);
256   pthread_mutex_unlock(&dex_cache_slow_atomic_mutex);
257   return value;
258 }
259 
AtomicStoreRelease16B(std::atomic<ConversionPair64> * target,ConversionPair64 value)260 void DexCache::AtomicStoreRelease16B(std::atomic<ConversionPair64>* target,
261                                      ConversionPair64 value) {
262   pthread_mutex_lock(&dex_cache_slow_atomic_mutex);
263   *reinterpret_cast<ConversionPair64*>(target) = value;
264   pthread_mutex_unlock(&dex_cache_slow_atomic_mutex);
265 }
266 #endif
267 
268 }  // namespace mirror
269 }  // namespace art
270