• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_CLASS_TABLE_H_
18 #define ART_RUNTIME_CLASS_TABLE_H_
19 
20 #include <string>
21 #include <utility>
22 #include <vector>
23 
24 #include "base/allocator.h"
25 #include "base/hash_set.h"
26 #include "base/macros.h"
27 #include "base/mutex.h"
28 #include "gc_root.h"
29 #include "obj_ptr.h"
30 
31 namespace art {
32 
33 class OatFile;
34 
35 namespace linker {
36 class ImageWriter;
37 }  // namespace linker
38 
39 namespace linker {
40 class OatWriter;
41 }  // namespace linker
42 
43 namespace mirror {
44 class Class;
45 class ClassLoader;
46 class Object;
47 }  // namespace mirror
48 
49 // Each loader has a ClassTable
50 class ClassTable {
51  public:
52   class TableSlot {
53    public:
TableSlot()54     TableSlot() : data_(0u) {}
55 
TableSlot(const TableSlot & copy)56     TableSlot(const TableSlot& copy) : data_(copy.data_.load(std::memory_order_relaxed)) {}
57 
58     explicit TableSlot(ObjPtr<mirror::Class> klass);
59 
60     TableSlot(ObjPtr<mirror::Class> klass, uint32_t descriptor_hash);
61 
62     TableSlot& operator=(const TableSlot& copy) {
63       data_.store(copy.data_.load(std::memory_order_relaxed), std::memory_order_relaxed);
64       return *this;
65     }
66 
67     bool IsNull() const REQUIRES_SHARED(Locks::mutator_lock_);
68 
Hash()69     uint32_t Hash() const {
70       return MaskHash(data_.load(std::memory_order_relaxed));
71     }
72 
MaskHash(uint32_t hash)73     static uint32_t MaskHash(uint32_t hash) {
74       return hash & kHashMask;
75     }
76 
MaskedHashEquals(uint32_t other)77     bool MaskedHashEquals(uint32_t other) const {
78       return MaskHash(other) == Hash();
79     }
80 
81     template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
82     ObjPtr<mirror::Class> Read() const REQUIRES_SHARED(Locks::mutator_lock_);
83 
84     // NO_THREAD_SAFETY_ANALYSIS since the visitor may require heap bitmap lock.
85     template<typename Visitor>
86     void VisitRoot(const Visitor& visitor) const NO_THREAD_SAFETY_ANALYSIS;
87 
88    private:
89     // Extract a raw pointer from an address.
90     static ObjPtr<mirror::Class> ExtractPtr(uint32_t data)
91         REQUIRES_SHARED(Locks::mutator_lock_);
92 
93     static uint32_t Encode(ObjPtr<mirror::Class> klass, uint32_t hash_bits)
94         REQUIRES_SHARED(Locks::mutator_lock_);
95 
96     // Data contains the class pointer GcRoot as well as the low bits of the descriptor hash.
97     mutable Atomic<uint32_t> data_;
98     static constexpr uint32_t kHashMask = kObjectAlignment - 1;
99   };
100 
101   using DescriptorHashPair = std::pair<const char*, uint32_t>;
102 
103   class ClassDescriptorHash {
104    public:
105     // uint32_t for cross compilation.
106     uint32_t operator()(const TableSlot& slot) const NO_THREAD_SAFETY_ANALYSIS;
107     // uint32_t for cross compilation.
108     uint32_t operator()(const DescriptorHashPair& pair) const NO_THREAD_SAFETY_ANALYSIS;
109   };
110 
111   class ClassDescriptorEquals {
112    public:
113     // Same class loader and descriptor.
114     bool operator()(const TableSlot& a, const TableSlot& b) const
115         NO_THREAD_SAFETY_ANALYSIS;
116     // Same descriptor.
117     bool operator()(const TableSlot& a, const DescriptorHashPair& b) const
118         NO_THREAD_SAFETY_ANALYSIS;
119   };
120 
121   class TableSlotEmptyFn {
122    public:
MakeEmpty(TableSlot & item)123     void MakeEmpty(TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
124       item = TableSlot();
125       DCHECK(IsEmpty(item));
126     }
IsEmpty(const TableSlot & item)127     bool IsEmpty(const TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
128       return item.IsNull();
129     }
130   };
131 
132   // Hash set that hashes class descriptor, and compares descriptors and class loaders. Results
133   // should be compared for a matching class descriptor and class loader.
134   using ClassSet = HashSet<TableSlot,
135                            TableSlotEmptyFn,
136                            ClassDescriptorHash,
137                            ClassDescriptorEquals,
138                            TrackingAllocator<TableSlot, kAllocatorTagClassTable>>;
139 
140   ClassTable();
141 
142   // Freeze the current class tables by allocating a new table and never updating or modifying the
143   // existing table. This helps prevents dirty pages after caused by inserting after zygote fork.
144   void FreezeSnapshot()
145       REQUIRES(!lock_)
146       REQUIRES_SHARED(Locks::mutator_lock_);
147 
148   // Returns the number of classes in previous snapshots defined by `defining_loader`.
149   size_t NumZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
150       REQUIRES(!lock_)
151       REQUIRES_SHARED(Locks::mutator_lock_);
152 
153   // Returns all off the classes in the lastest snapshot defined by `defining_loader`.
154   size_t NumNonZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
155       REQUIRES(!lock_)
156       REQUIRES_SHARED(Locks::mutator_lock_);
157 
158   // Returns the number of classes in previous snapshots no matter the defining loader.
159   size_t NumReferencedZygoteClasses() const
160       REQUIRES(!lock_)
161       REQUIRES_SHARED(Locks::mutator_lock_);
162 
163   // Returns all off the classes in the lastest snapshot no matter the defining loader.
164   size_t NumReferencedNonZygoteClasses() const
165       REQUIRES(!lock_)
166       REQUIRES_SHARED(Locks::mutator_lock_);
167 
168   // Update a class in the table with the new class. Returns the existing class which was replaced.
169   ObjPtr<mirror::Class> UpdateClass(const char* descriptor,
170                                     ObjPtr<mirror::Class> new_klass,
171                                     size_t hash)
172       REQUIRES(!lock_)
173       REQUIRES_SHARED(Locks::mutator_lock_);
174 
175   // NO_THREAD_SAFETY_ANALYSIS for object marking requiring heap bitmap lock.
176   template<class Visitor>
177   void VisitRoots(Visitor& visitor)
178       NO_THREAD_SAFETY_ANALYSIS
179       REQUIRES(!lock_)
180       REQUIRES_SHARED(Locks::mutator_lock_);
181 
182   template<class Visitor>
183   void VisitRoots(const Visitor& visitor)
184       NO_THREAD_SAFETY_ANALYSIS
185       REQUIRES(!lock_)
186       REQUIRES_SHARED(Locks::mutator_lock_);
187 
188   // Stops visit if the visitor returns false.
189   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
190   bool Visit(Visitor& visitor)
191       REQUIRES(!lock_)
192       REQUIRES_SHARED(Locks::mutator_lock_);
193   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
194   bool Visit(const Visitor& visitor)
195       REQUIRES(!lock_)
196       REQUIRES_SHARED(Locks::mutator_lock_);
197 
198   // Return the first class that matches the descriptor. Returns null if there are none.
199   ObjPtr<mirror::Class> Lookup(const char* descriptor, size_t hash)
200       REQUIRES(!lock_)
201       REQUIRES_SHARED(Locks::mutator_lock_);
202 
203   // Return the first class that matches the descriptor of klass. Returns null if there are none.
204   // Used for tests and debug-build checks.
205   ObjPtr<mirror::Class> LookupByDescriptor(ObjPtr<mirror::Class> klass)
206       REQUIRES(!lock_)
207       REQUIRES_SHARED(Locks::mutator_lock_);
208 
209   void Insert(ObjPtr<mirror::Class> klass)
210       REQUIRES(!lock_)
211       REQUIRES_SHARED(Locks::mutator_lock_);
212 
213   void InsertWithHash(ObjPtr<mirror::Class> klass, size_t hash)
214       REQUIRES(!lock_)
215       REQUIRES_SHARED(Locks::mutator_lock_);
216 
217   // Return true if we inserted the strong root, false if it already exists.
218   bool InsertStrongRoot(ObjPtr<mirror::Object> obj)
219       REQUIRES(!lock_)
220       REQUIRES_SHARED(Locks::mutator_lock_);
221 
222   // Return true if we inserted the oat file, false if it already exists.
223   bool InsertOatFile(const OatFile* oat_file)
224       REQUIRES(!lock_)
225       REQUIRES_SHARED(Locks::mutator_lock_);
226 
227   // Read a table from ptr and put it at the front of the class set.
228   size_t ReadFromMemory(uint8_t* ptr)
229       REQUIRES(!lock_)
230       REQUIRES_SHARED(Locks::mutator_lock_);
231 
232   // Add a class set to the front of classes.
233   void AddClassSet(ClassSet&& set)
234       REQUIRES(!lock_)
235       REQUIRES_SHARED(Locks::mutator_lock_);
236 
237   // Clear strong roots (other than classes themselves).
238   void ClearStrongRoots()
239       REQUIRES(!lock_)
240       REQUIRES_SHARED(Locks::mutator_lock_);
241 
242   // Filter strong roots (other than classes themselves).
243   template <typename Filter>
244   void RemoveStrongRoots(const Filter& filter)
245       REQUIRES(!lock_)
246       REQUIRES_SHARED(Locks::mutator_lock_);
247 
GetLock()248   ReaderWriterMutex& GetLock() {
249     return lock_;
250   }
251 
252  private:
253   size_t CountDefiningLoaderClasses(ObjPtr<mirror::ClassLoader> defining_loader,
254                                     const ClassSet& set) const
255       REQUIRES(lock_)
256       REQUIRES_SHARED(Locks::mutator_lock_);
257 
258   // Return true if we inserted the oat file, false if it already exists.
259   bool InsertOatFileLocked(const OatFile* oat_file)
260       REQUIRES(lock_)
261       REQUIRES_SHARED(Locks::mutator_lock_);
262 
263   // Lock to guard inserting and removing.
264   mutable ReaderWriterMutex lock_;
265   // We have a vector to help prevent dirty pages after the zygote forks by calling FreezeSnapshot.
266   std::vector<ClassSet> classes_ GUARDED_BY(lock_);
267   // Extra strong roots that can be either dex files or dex caches. Dex files used by the class
268   // loader which may not be owned by the class loader must be held strongly live. Also dex caches
269   // are held live to prevent them being unloading once they have classes in them.
270   std::vector<GcRoot<mirror::Object>> strong_roots_ GUARDED_BY(lock_);
271   // Keep track of oat files with GC roots associated with dex caches in `strong_roots_`.
272   std::vector<const OatFile*> oat_files_ GUARDED_BY(lock_);
273 
274   friend class linker::ImageWriter;  // for InsertWithoutLocks.
275 };
276 
277 }  // namespace art
278 
279 #endif  // ART_RUNTIME_CLASS_TABLE_H_
280