• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_CLASS_TABLE_H_
18 #define ART_RUNTIME_CLASS_TABLE_H_
19 
20 #include <string>
21 #include <utility>
22 #include <vector>
23 
24 #include "base/allocator.h"
25 #include "base/hash_set.h"
26 #include "base/macros.h"
27 #include "base/mutex.h"
28 #include "gc_root.h"
29 #include "obj_ptr.h"
30 
31 namespace art {
32 
33 class OatFile;
34 
35 namespace mirror {
36   class Class;
37   class ClassLoader;
38   class Object;
39 }  // namespace mirror
40 
41 // Each loader has a ClassTable
42 class ClassTable {
43  public:
44   class TableSlot {
45    public:
TableSlot()46     TableSlot() : data_(0u) {}
47 
TableSlot(const TableSlot & copy)48     TableSlot(const TableSlot& copy) : data_(copy.data_.LoadRelaxed()) {}
49 
50     explicit TableSlot(ObjPtr<mirror::Class> klass);
51 
52     TableSlot(ObjPtr<mirror::Class> klass, uint32_t descriptor_hash);
53 
54     TableSlot& operator=(const TableSlot& copy) {
55       data_.StoreRelaxed(copy.data_.LoadRelaxed());
56       return *this;
57     }
58 
IsNull()59     bool IsNull() const REQUIRES_SHARED(Locks::mutator_lock_) {
60       return Read<kWithoutReadBarrier>() == nullptr;
61     }
62 
Hash()63     uint32_t Hash() const {
64       return MaskHash(data_.LoadRelaxed());
65     }
66 
MaskHash(uint32_t hash)67     static uint32_t MaskHash(uint32_t hash) {
68       return hash & kHashMask;
69     }
70 
MaskedHashEquals(uint32_t other)71     bool MaskedHashEquals(uint32_t other) const {
72       return MaskHash(other) == Hash();
73     }
74 
75     static uint32_t HashDescriptor(ObjPtr<mirror::Class> klass)
76         REQUIRES_SHARED(Locks::mutator_lock_);
77 
78     template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
79     mirror::Class* Read() const REQUIRES_SHARED(Locks::mutator_lock_);
80 
81     // NO_THREAD_SAFETY_ANALYSIS since the visitor may require heap bitmap lock.
82     template<typename Visitor>
83     void VisitRoot(const Visitor& visitor) const NO_THREAD_SAFETY_ANALYSIS;
84 
85    private:
86     // Extract a raw pointer from an address.
87     static ObjPtr<mirror::Class> ExtractPtr(uint32_t data)
88         REQUIRES_SHARED(Locks::mutator_lock_);
89 
90     static uint32_t Encode(ObjPtr<mirror::Class> klass, uint32_t hash_bits)
91         REQUIRES_SHARED(Locks::mutator_lock_);
92 
93     // Data contains the class pointer GcRoot as well as the low bits of the descriptor hash.
94     mutable Atomic<uint32_t> data_;
95     static const uint32_t kHashMask = kObjectAlignment - 1;
96   };
97 
98   using DescriptorHashPair = std::pair<const char*, uint32_t>;
99 
100   class ClassDescriptorHashEquals {
101    public:
102     // uint32_t for cross compilation.
103     uint32_t operator()(const TableSlot& slot) const NO_THREAD_SAFETY_ANALYSIS;
104     // Same class loader and descriptor.
105     bool operator()(const TableSlot& a, const TableSlot& b) const
106         NO_THREAD_SAFETY_ANALYSIS;
107     // Same descriptor.
108     bool operator()(const TableSlot& a, const DescriptorHashPair& b) const
109         NO_THREAD_SAFETY_ANALYSIS;
110     // uint32_t for cross compilation.
111     uint32_t operator()(const DescriptorHashPair& pair) const NO_THREAD_SAFETY_ANALYSIS;
112   };
113 
114   class TableSlotEmptyFn {
115    public:
MakeEmpty(TableSlot & item)116     void MakeEmpty(TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
117       item = TableSlot();
118       DCHECK(IsEmpty(item));
119     }
IsEmpty(const TableSlot & item)120     bool IsEmpty(const TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
121       return item.IsNull();
122     }
123   };
124 
125   // Hash set that hashes class descriptor, and compares descriptors and class loaders. Results
126   // should be compared for a matching class descriptor and class loader.
127   typedef HashSet<TableSlot,
128                   TableSlotEmptyFn,
129                   ClassDescriptorHashEquals,
130                   ClassDescriptorHashEquals,
131                   TrackingAllocator<TableSlot, kAllocatorTagClassTable>> ClassSet;
132 
133   ClassTable();
134 
135   // Used by image writer for checking.
136   bool Contains(ObjPtr<mirror::Class> klass)
137       REQUIRES(!lock_)
138       REQUIRES_SHARED(Locks::mutator_lock_);
139 
140   // Freeze the current class tables by allocating a new table and never updating or modifying the
141   // existing table. This helps prevents dirty pages after caused by inserting after zygote fork.
142   void FreezeSnapshot()
143       REQUIRES(!lock_)
144       REQUIRES_SHARED(Locks::mutator_lock_);
145 
146   // Returns the number of classes in previous snapshots defined by `defining_loader`.
147   size_t NumZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
148       REQUIRES(!lock_)
149       REQUIRES_SHARED(Locks::mutator_lock_);
150 
151   // Returns all off the classes in the lastest snapshot defined by `defining_loader`.
152   size_t NumNonZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
153       REQUIRES(!lock_)
154       REQUIRES_SHARED(Locks::mutator_lock_);
155 
156   // Returns the number of classes in previous snapshots no matter the defining loader.
157   size_t NumReferencedZygoteClasses() const
158       REQUIRES(!lock_)
159       REQUIRES_SHARED(Locks::mutator_lock_);
160 
161   // Returns all off the classes in the lastest snapshot no matter the defining loader.
162   size_t NumReferencedNonZygoteClasses() const
163       REQUIRES(!lock_)
164       REQUIRES_SHARED(Locks::mutator_lock_);
165 
166   // Update a class in the table with the new class. Returns the existing class which was replaced.
167   mirror::Class* UpdateClass(const char* descriptor, mirror::Class* new_klass, size_t hash)
168       REQUIRES(!lock_)
169       REQUIRES_SHARED(Locks::mutator_lock_);
170 
171   // NO_THREAD_SAFETY_ANALYSIS for object marking requiring heap bitmap lock.
172   template<class Visitor>
173   void VisitRoots(Visitor& visitor)
174       NO_THREAD_SAFETY_ANALYSIS
175       REQUIRES(!lock_)
176       REQUIRES_SHARED(Locks::mutator_lock_);
177 
178   template<class Visitor>
179   void VisitRoots(const Visitor& visitor)
180       NO_THREAD_SAFETY_ANALYSIS
181       REQUIRES(!lock_)
182       REQUIRES_SHARED(Locks::mutator_lock_);
183 
184   // Stops visit if the visitor returns false.
185   template <typename Visitor>
186   bool Visit(Visitor& visitor)
187       REQUIRES(!lock_)
188       REQUIRES_SHARED(Locks::mutator_lock_);
189   template <typename Visitor>
190   bool Visit(const Visitor& visitor)
191       REQUIRES(!lock_)
192       REQUIRES_SHARED(Locks::mutator_lock_);
193 
194   // Return the first class that matches the descriptor. Returns null if there are none.
195   mirror::Class* Lookup(const char* descriptor, size_t hash)
196       REQUIRES(!lock_)
197       REQUIRES_SHARED(Locks::mutator_lock_);
198 
199   // Return the first class that matches the descriptor of klass. Returns null if there are none.
200   mirror::Class* LookupByDescriptor(ObjPtr<mirror::Class> klass)
201       REQUIRES(!lock_)
202       REQUIRES_SHARED(Locks::mutator_lock_);
203 
204   // Try to insert a class and return the inserted class if successful. If another class
205   // with the same descriptor is already in the table, return the existing entry.
206   ObjPtr<mirror::Class> TryInsert(ObjPtr<mirror::Class> klass)
207       REQUIRES(!lock_)
208       REQUIRES_SHARED(Locks::mutator_lock_);
209 
210   void Insert(ObjPtr<mirror::Class> klass)
211       REQUIRES(!lock_)
212       REQUIRES_SHARED(Locks::mutator_lock_);
213 
214   void InsertWithHash(ObjPtr<mirror::Class> klass, size_t hash)
215       REQUIRES(!lock_)
216       REQUIRES_SHARED(Locks::mutator_lock_);
217 
218   // Returns true if the class was found and removed, false otherwise.
219   bool Remove(const char* descriptor)
220       REQUIRES(!lock_)
221       REQUIRES_SHARED(Locks::mutator_lock_);
222 
223   // Return true if we inserted the strong root, false if it already exists.
224   bool InsertStrongRoot(ObjPtr<mirror::Object> obj)
225       REQUIRES(!lock_)
226       REQUIRES_SHARED(Locks::mutator_lock_);
227 
228   // Return true if we inserted the oat file, false if it already exists.
229   bool InsertOatFile(const OatFile* oat_file)
230       REQUIRES(!lock_)
231       REQUIRES_SHARED(Locks::mutator_lock_);
232 
233   // Combines all of the tables into one class set.
234   size_t WriteToMemory(uint8_t* ptr) const
235       REQUIRES(!lock_)
236       REQUIRES_SHARED(Locks::mutator_lock_);
237 
238   // Read a table from ptr and put it at the front of the class set.
239   size_t ReadFromMemory(uint8_t* ptr)
240       REQUIRES(!lock_)
241       REQUIRES_SHARED(Locks::mutator_lock_);
242 
243   // Add a class set to the front of classes.
244   void AddClassSet(ClassSet&& set)
245       REQUIRES(!lock_)
246       REQUIRES_SHARED(Locks::mutator_lock_);
247 
248   // Clear strong roots (other than classes themselves).
249   void ClearStrongRoots()
250       REQUIRES(!lock_)
251       REQUIRES_SHARED(Locks::mutator_lock_);
252 
253   // Filter strong roots (other than classes themselves).
254   template <typename Filter>
255   void RemoveStrongRoots(const Filter& filter)
256       REQUIRES(!lock_)
257       REQUIRES_SHARED(Locks::mutator_lock_);
258 
GetLock()259   ReaderWriterMutex& GetLock() {
260     return lock_;
261   }
262 
263  private:
264   // Only copies classes.
265   void CopyWithoutLocks(const ClassTable& source_table) NO_THREAD_SAFETY_ANALYSIS;
266   void InsertWithoutLocks(ObjPtr<mirror::Class> klass) NO_THREAD_SAFETY_ANALYSIS;
267 
268   size_t CountDefiningLoaderClasses(ObjPtr<mirror::ClassLoader> defining_loader,
269                                     const ClassSet& set) const
270       REQUIRES(lock_)
271       REQUIRES_SHARED(Locks::mutator_lock_);
272 
273   // Return true if we inserted the oat file, false if it already exists.
274   bool InsertOatFileLocked(const OatFile* oat_file)
275       REQUIRES(lock_)
276       REQUIRES_SHARED(Locks::mutator_lock_);
277 
278   // Lock to guard inserting and removing.
279   mutable ReaderWriterMutex lock_;
280   // We have a vector to help prevent dirty pages after the zygote forks by calling FreezeSnapshot.
281   std::vector<ClassSet> classes_ GUARDED_BY(lock_);
282   // Extra strong roots that can be either dex files or dex caches. Dex files used by the class
283   // loader which may not be owned by the class loader must be held strongly live. Also dex caches
284   // are held live to prevent them being unloading once they have classes in them.
285   std::vector<GcRoot<mirror::Object>> strong_roots_ GUARDED_BY(lock_);
286   // Keep track of oat files with GC roots associated with dex caches in `strong_roots_`.
287   std::vector<const OatFile*> oat_files_ GUARDED_BY(lock_);
288 
289   friend class ImageWriter;  // for InsertWithoutLocks.
290 };
291 
292 }  // namespace art
293 
294 #endif  // ART_RUNTIME_CLASS_TABLE_H_
295