• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_CLASS_TABLE_H_
18 #define ART_RUNTIME_CLASS_TABLE_H_
19 
20 #include <string>
21 #include <utility>
22 #include <vector>
23 
24 #include "base/gc_visited_arena_pool.h"
25 #include "base/hash_set.h"
26 #include "base/macros.h"
27 #include "base/mutex.h"
28 #include "gc_root.h"
29 #include "obj_ptr.h"
30 
31 namespace art HIDDEN {
32 
33 class OatFile;
34 
35 namespace linker {
36 class ImageWriter;
37 }  // namespace linker
38 
39 namespace linker {
40 class OatWriter;
41 }  // namespace linker
42 
43 namespace mirror {
44 class Class;
45 class ClassLoader;
46 class Object;
47 }  // namespace mirror
48 
49 // Each loader has a ClassTable
50 class ClassTable {
51  public:
52   class TableSlot {
53    public:
TableSlot()54     TableSlot() : data_(0u) {}
55 
TableSlot(const TableSlot & copy)56     TableSlot(const TableSlot& copy) : data_(copy.data_.load(std::memory_order_relaxed)) {}
57 
58     explicit TableSlot(ObjPtr<mirror::Class> klass);
59 
60     TableSlot(ObjPtr<mirror::Class> klass, uint32_t descriptor_hash);
61     TableSlot(uint32_t ptr, uint32_t descriptor_hash);
62 
63     TableSlot& operator=(const TableSlot& copy) {
64       data_.store(copy.data_.load(std::memory_order_relaxed), std::memory_order_relaxed);
65       return *this;
66     }
67 
Data()68     uint32_t Data() const {
69       return data_.load(std::memory_order_relaxed);
70     }
71 
72     bool IsNull() const REQUIRES_SHARED(Locks::mutator_lock_);
73 
Hash()74     uint32_t Hash() const {
75       return MaskHash(data_.load(std::memory_order_relaxed));
76     }
77 
NonHashData()78     uint32_t NonHashData() const {
79       return RemoveHash(Data());
80     }
81 
RemoveHash(uint32_t hash)82     static uint32_t RemoveHash(uint32_t hash) {
83       return hash & ~kHashMask;
84     }
85 
MaskHash(uint32_t hash)86     static uint32_t MaskHash(uint32_t hash) {
87       return hash & kHashMask;
88     }
89 
MaskedHashEquals(uint32_t other)90     bool MaskedHashEquals(uint32_t other) const {
91       return MaskHash(other) == Hash();
92     }
93 
94     template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
95     ObjPtr<mirror::Class> Read() const REQUIRES_SHARED(Locks::mutator_lock_);
96 
97     // NO_THREAD_SAFETY_ANALYSIS since the visitor may require heap bitmap lock.
98     template<typename Visitor>
99     void VisitRoot(const Visitor& visitor) const NO_THREAD_SAFETY_ANALYSIS;
100 
101     template<typename Visitor>
102     class ClassAndRootVisitor;
103 
104    private:
105     // Extract a raw pointer from an address.
106     static ObjPtr<mirror::Class> ExtractPtr(uint32_t data)
107         REQUIRES_SHARED(Locks::mutator_lock_);
108 
109     static uint32_t Encode(ObjPtr<mirror::Class> klass, uint32_t hash_bits)
110         REQUIRES_SHARED(Locks::mutator_lock_);
111 
112     // Data contains the class pointer GcRoot as well as the low bits of the descriptor hash.
113     mutable Atomic<uint32_t> data_;
114     static constexpr uint32_t kHashMask = kObjectAlignment - 1;
115   };
116 
117   using DescriptorHashPair = std::pair<const char*, uint32_t>;
118 
119   class ClassDescriptorHash {
120    public:
121     // uint32_t for cross compilation.
122     uint32_t operator()(const TableSlot& slot) const NO_THREAD_SAFETY_ANALYSIS;
123     // uint32_t for cross compilation.
124     uint32_t operator()(const DescriptorHashPair& pair) const NO_THREAD_SAFETY_ANALYSIS;
125   };
126 
127   class ClassDescriptorEquals {
128    public:
129     // Same class loader and descriptor.
130     bool operator()(const TableSlot& a, const TableSlot& b) const
131         NO_THREAD_SAFETY_ANALYSIS;
132     // Same descriptor.
133     bool operator()(const TableSlot& a, const DescriptorHashPair& b) const
134         NO_THREAD_SAFETY_ANALYSIS;
135   };
136 
137   class TableSlotEmptyFn {
138    public:
MakeEmpty(TableSlot & item)139     void MakeEmpty(TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
140       item = TableSlot();
141       DCHECK(IsEmpty(item));
142     }
IsEmpty(const TableSlot & item)143     bool IsEmpty(const TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
144       return item.IsNull();
145     }
146   };
147 
148   // Hash set that hashes class descriptor, and compares descriptors and class loaders. Results
149   // should be compared for a matching class descriptor and class loader.
150   using ClassSet = HashSet<TableSlot,
151                            TableSlotEmptyFn,
152                            ClassDescriptorHash,
153                            ClassDescriptorEquals,
154                            GcRootArenaAllocator<TableSlot, kAllocatorTagClassTable>>;
155 
156   EXPORT ClassTable();
157 
158   // Freeze the current class tables by allocating a new table and never updating or modifying the
159   // existing table. This helps prevents dirty pages after caused by inserting after zygote fork.
160   void FreezeSnapshot()
161       REQUIRES(!lock_)
162       REQUIRES_SHARED(Locks::mutator_lock_);
163 
164   // Returns the number of classes in previous snapshots defined by `defining_loader`.
165   size_t NumZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
166       REQUIRES(!lock_)
167       REQUIRES_SHARED(Locks::mutator_lock_);
168 
169   // Returns all off the classes in the lastest snapshot defined by `defining_loader`.
170   size_t NumNonZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
171       REQUIRES(!lock_)
172       REQUIRES_SHARED(Locks::mutator_lock_);
173 
174   // Returns the number of classes in previous snapshots no matter the defining loader.
175   EXPORT size_t NumReferencedZygoteClasses() const
176       REQUIRES(!lock_)
177       REQUIRES_SHARED(Locks::mutator_lock_);
178 
179   // Returns all off the classes in the lastest snapshot no matter the defining loader.
180   size_t NumReferencedNonZygoteClasses() const
181       REQUIRES(!lock_)
182       REQUIRES_SHARED(Locks::mutator_lock_);
183 
184   // Returns the number of class-sets in the class table.
185   size_t Size() const
186       REQUIRES(!lock_)
187       REQUIRES_SHARED(Locks::mutator_lock_);
188 
189   // Update a class in the table with the new class. Returns the existing class which was replaced.
190   ObjPtr<mirror::Class> UpdateClass(ObjPtr<mirror::Class> new_klass, size_t hash)
191       REQUIRES(!lock_)
192       REQUIRES_SHARED(Locks::mutator_lock_);
193 
194   // NO_THREAD_SAFETY_ANALYSIS for object marking requiring heap bitmap lock.
195   template <class Visitor>
196   void VisitRoots(Visitor& visitor, bool skip_classes = false) NO_THREAD_SAFETY_ANALYSIS
197       REQUIRES(!lock_) REQUIRES_SHARED(Locks::mutator_lock_);
198 
199   template <class Visitor>
200   void VisitRoots(const Visitor& visitor, bool skip_classes = false) NO_THREAD_SAFETY_ANALYSIS
201       REQUIRES(!lock_) REQUIRES_SHARED(Locks::mutator_lock_);
202 
203   template<class Visitor>
204   void VisitClassesAndRoots(Visitor& visitor)
205       NO_THREAD_SAFETY_ANALYSIS
206       REQUIRES(!lock_)
207       REQUIRES_SHARED(Locks::mutator_lock_);
208 
209   // Visit classes in those class-sets which satisfy 'cond'.
210   template <class Condition, class Visitor>
211   void VisitClassesIfConditionMet(Condition& cond, Visitor& visitor) REQUIRES(!lock_)
212       REQUIRES_SHARED(Locks::mutator_lock_);
213   // Stops visit if the visitor returns false.
214   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
215   bool Visit(Visitor& visitor)
216       REQUIRES(!lock_)
217       REQUIRES_SHARED(Locks::mutator_lock_);
218   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
219   bool Visit(const Visitor& visitor)
220       REQUIRES(!lock_)
221       REQUIRES_SHARED(Locks::mutator_lock_);
222 
223   // Return the first class that matches the descriptor. Returns null if there are none.
224   ObjPtr<mirror::Class> Lookup(const char* descriptor, size_t hash)
225       REQUIRES(!lock_)
226       REQUIRES_SHARED(Locks::mutator_lock_);
227 
228   // Return the first class that matches the descriptor of klass. Returns null if there are none.
229   // Used for tests and debug-build checks.
230   ObjPtr<mirror::Class> LookupByDescriptor(ObjPtr<mirror::Class> klass)
231       REQUIRES(!lock_)
232       REQUIRES_SHARED(Locks::mutator_lock_);
233 
234   void Insert(ObjPtr<mirror::Class> klass)
235       REQUIRES(!lock_)
236       REQUIRES_SHARED(Locks::mutator_lock_);
237 
238   void InsertWithHash(ObjPtr<mirror::Class> klass, size_t hash)
239       REQUIRES(!lock_)
240       REQUIRES_SHARED(Locks::mutator_lock_);
241 
242   // Return true if we inserted the strong root, false if it already exists.
243   bool InsertStrongRoot(ObjPtr<mirror::Object> obj)
244       REQUIRES(!lock_)
245       REQUIRES_SHARED(Locks::mutator_lock_);
246 
247   // Return true if we inserted the oat file, false if it already exists.
248   bool InsertOatFile(const OatFile* oat_file)
249       REQUIRES(!lock_)
250       REQUIRES_SHARED(Locks::mutator_lock_);
251 
252   // Read a table from ptr and put it at the front of the class set.
253   EXPORT size_t ReadFromMemory(uint8_t* ptr)
254       REQUIRES(!lock_)
255       REQUIRES_SHARED(Locks::mutator_lock_);
256 
257   // Add a class set to the front of classes.
258   void AddClassSet(ClassSet&& set)
259       REQUIRES(!lock_)
260       REQUIRES_SHARED(Locks::mutator_lock_);
261 
262   // Clear strong roots (other than classes themselves).
263   void ClearStrongRoots()
264       REQUIRES(!lock_)
265       REQUIRES_SHARED(Locks::mutator_lock_);
266 
267   // Filter strong roots (other than classes themselves).
268   template <typename Filter>
269   void RemoveStrongRoots(const Filter& filter)
270       REQUIRES(!lock_)
271       REQUIRES_SHARED(Locks::mutator_lock_);
272 
GetLock()273   ReaderWriterMutex& GetLock() {
274     return lock_;
275   }
276 
277  private:
278   size_t CountDefiningLoaderClasses(ObjPtr<mirror::ClassLoader> defining_loader,
279                                     const ClassSet& set) const
280       REQUIRES(lock_)
281       REQUIRES_SHARED(Locks::mutator_lock_);
282 
283   // Return true if we inserted the oat file, false if it already exists.
284   bool InsertOatFileLocked(const OatFile* oat_file)
285       REQUIRES(lock_)
286       REQUIRES_SHARED(Locks::mutator_lock_);
287 
288   // Lock to guard inserting and removing.
289   mutable ReaderWriterMutex lock_;
290   // We have a vector to help prevent dirty pages after the zygote forks by calling FreezeSnapshot.
291   std::vector<ClassSet> classes_ GUARDED_BY(lock_);
292   // Extra strong roots that can be either dex files or dex caches. Dex files used by the class
293   // loader which may not be owned by the class loader must be held strongly live. Also dex caches
294   // are held live to prevent them being unloading once they have classes in them.
295   std::vector<GcRoot<mirror::Object>> strong_roots_ GUARDED_BY(lock_);
296   // Keep track of oat files with GC roots associated with dex caches in `strong_roots_`.
297   std::vector<const OatFile*> oat_files_ GUARDED_BY(lock_);
298 
299   friend class linker::ImageWriter;  // for InsertWithoutLocks.
300 };
301 
302 }  // namespace art
303 
304 #endif  // ART_RUNTIME_CLASS_TABLE_H_
305