• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_CLASS_TABLE_H_
18 #define ART_RUNTIME_CLASS_TABLE_H_
19 
20 #include <string>
21 #include <utility>
22 #include <vector>
23 
24 #include "base/allocator.h"
25 #include "base/hash_set.h"
26 #include "base/macros.h"
27 #include "base/mutex.h"
28 #include "gc_root.h"
29 #include "obj_ptr.h"
30 
31 namespace art {
32 
33 class OatFile;
34 
35 namespace linker {
36 class ImageWriter;
37 }  // namespace linker
38 
39 namespace linker {
40 class OatWriter;
41 }  // namespace linker
42 
43 namespace mirror {
44 class Class;
45 class ClassLoader;
46 class Object;
47 }  // namespace mirror
48 
49 // Each loader has a ClassTable
50 class ClassTable {
51  public:
52   class TableSlot {
53    public:
TableSlot()54     TableSlot() : data_(0u) {}
55 
TableSlot(const TableSlot & copy)56     TableSlot(const TableSlot& copy) : data_(copy.data_.load(std::memory_order_relaxed)) {}
57 
58     explicit TableSlot(ObjPtr<mirror::Class> klass);
59 
60     TableSlot(ObjPtr<mirror::Class> klass, uint32_t descriptor_hash);
61 
62     TableSlot& operator=(const TableSlot& copy) {
63       data_.store(copy.data_.load(std::memory_order_relaxed), std::memory_order_relaxed);
64       return *this;
65     }
66 
IsNull()67     bool IsNull() const REQUIRES_SHARED(Locks::mutator_lock_) {
68       return Read<kWithoutReadBarrier>() == nullptr;
69     }
70 
Hash()71     uint32_t Hash() const {
72       return MaskHash(data_.load(std::memory_order_relaxed));
73     }
74 
MaskHash(uint32_t hash)75     static uint32_t MaskHash(uint32_t hash) {
76       return hash & kHashMask;
77     }
78 
MaskedHashEquals(uint32_t other)79     bool MaskedHashEquals(uint32_t other) const {
80       return MaskHash(other) == Hash();
81     }
82 
83     static uint32_t HashDescriptor(ObjPtr<mirror::Class> klass)
84         REQUIRES_SHARED(Locks::mutator_lock_);
85 
86     template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
87     mirror::Class* Read() const REQUIRES_SHARED(Locks::mutator_lock_);
88 
89     // NO_THREAD_SAFETY_ANALYSIS since the visitor may require heap bitmap lock.
90     template<typename Visitor>
91     void VisitRoot(const Visitor& visitor) const NO_THREAD_SAFETY_ANALYSIS;
92 
93    private:
94     // Extract a raw pointer from an address.
95     static ObjPtr<mirror::Class> ExtractPtr(uint32_t data)
96         REQUIRES_SHARED(Locks::mutator_lock_);
97 
98     static uint32_t Encode(ObjPtr<mirror::Class> klass, uint32_t hash_bits)
99         REQUIRES_SHARED(Locks::mutator_lock_);
100 
101     // Data contains the class pointer GcRoot as well as the low bits of the descriptor hash.
102     mutable Atomic<uint32_t> data_;
103     static const uint32_t kHashMask = kObjectAlignment - 1;
104   };
105 
106   using DescriptorHashPair = std::pair<const char*, uint32_t>;
107 
108   class ClassDescriptorHashEquals {
109    public:
110     // uint32_t for cross compilation.
111     uint32_t operator()(const TableSlot& slot) const NO_THREAD_SAFETY_ANALYSIS;
112     // Same class loader and descriptor.
113     bool operator()(const TableSlot& a, const TableSlot& b) const
114         NO_THREAD_SAFETY_ANALYSIS;
115     // Same descriptor.
116     bool operator()(const TableSlot& a, const DescriptorHashPair& b) const
117         NO_THREAD_SAFETY_ANALYSIS;
118     // uint32_t for cross compilation.
119     uint32_t operator()(const DescriptorHashPair& pair) const NO_THREAD_SAFETY_ANALYSIS;
120   };
121 
122   class TableSlotEmptyFn {
123    public:
MakeEmpty(TableSlot & item)124     void MakeEmpty(TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
125       item = TableSlot();
126       DCHECK(IsEmpty(item));
127     }
IsEmpty(const TableSlot & item)128     bool IsEmpty(const TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
129       return item.IsNull();
130     }
131   };
132 
133   // Hash set that hashes class descriptor, and compares descriptors and class loaders. Results
134   // should be compared for a matching class descriptor and class loader.
135   typedef HashSet<TableSlot,
136                   TableSlotEmptyFn,
137                   ClassDescriptorHashEquals,
138                   ClassDescriptorHashEquals,
139                   TrackingAllocator<TableSlot, kAllocatorTagClassTable>> ClassSet;
140 
141   ClassTable();
142 
143   // Used by image writer for checking.
144   bool Contains(ObjPtr<mirror::Class> klass)
145       REQUIRES(!lock_)
146       REQUIRES_SHARED(Locks::mutator_lock_);
147 
148   // Freeze the current class tables by allocating a new table and never updating or modifying the
149   // existing table. This helps prevents dirty pages after caused by inserting after zygote fork.
150   void FreezeSnapshot()
151       REQUIRES(!lock_)
152       REQUIRES_SHARED(Locks::mutator_lock_);
153 
154   // Returns the number of classes in previous snapshots defined by `defining_loader`.
155   size_t NumZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
156       REQUIRES(!lock_)
157       REQUIRES_SHARED(Locks::mutator_lock_);
158 
159   // Returns all off the classes in the lastest snapshot defined by `defining_loader`.
160   size_t NumNonZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
161       REQUIRES(!lock_)
162       REQUIRES_SHARED(Locks::mutator_lock_);
163 
164   // Returns the number of classes in previous snapshots no matter the defining loader.
165   size_t NumReferencedZygoteClasses() const
166       REQUIRES(!lock_)
167       REQUIRES_SHARED(Locks::mutator_lock_);
168 
169   // Returns all off the classes in the lastest snapshot no matter the defining loader.
170   size_t NumReferencedNonZygoteClasses() const
171       REQUIRES(!lock_)
172       REQUIRES_SHARED(Locks::mutator_lock_);
173 
174   // Update a class in the table with the new class. Returns the existing class which was replaced.
175   mirror::Class* UpdateClass(const char* descriptor, mirror::Class* new_klass, size_t hash)
176       REQUIRES(!lock_)
177       REQUIRES_SHARED(Locks::mutator_lock_);
178 
179   // NO_THREAD_SAFETY_ANALYSIS for object marking requiring heap bitmap lock.
180   template<class Visitor>
181   void VisitRoots(Visitor& visitor)
182       NO_THREAD_SAFETY_ANALYSIS
183       REQUIRES(!lock_)
184       REQUIRES_SHARED(Locks::mutator_lock_);
185 
186   template<class Visitor>
187   void VisitRoots(const Visitor& visitor)
188       NO_THREAD_SAFETY_ANALYSIS
189       REQUIRES(!lock_)
190       REQUIRES_SHARED(Locks::mutator_lock_);
191 
192   // Stops visit if the visitor returns false.
193   template <typename Visitor, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
194   bool Visit(Visitor& visitor)
195       REQUIRES(!lock_)
196       REQUIRES_SHARED(Locks::mutator_lock_);
197   template <typename Visitor, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
198   bool Visit(const Visitor& visitor)
199       REQUIRES(!lock_)
200       REQUIRES_SHARED(Locks::mutator_lock_);
201 
202   // Return the first class that matches the descriptor. Returns null if there are none.
203   mirror::Class* Lookup(const char* descriptor, size_t hash)
204       REQUIRES(!lock_)
205       REQUIRES_SHARED(Locks::mutator_lock_);
206 
207   // Return the first class that matches the descriptor of klass. Returns null if there are none.
208   mirror::Class* LookupByDescriptor(ObjPtr<mirror::Class> klass)
209       REQUIRES(!lock_)
210       REQUIRES_SHARED(Locks::mutator_lock_);
211 
212   // Try to insert a class and return the inserted class if successful. If another class
213   // with the same descriptor is already in the table, return the existing entry.
214   ObjPtr<mirror::Class> TryInsert(ObjPtr<mirror::Class> klass)
215       REQUIRES(!lock_)
216       REQUIRES_SHARED(Locks::mutator_lock_);
217 
218   void Insert(ObjPtr<mirror::Class> klass)
219       REQUIRES(!lock_)
220       REQUIRES_SHARED(Locks::mutator_lock_);
221 
222   void InsertWithHash(ObjPtr<mirror::Class> klass, size_t hash)
223       REQUIRES(!lock_)
224       REQUIRES_SHARED(Locks::mutator_lock_);
225 
226   // Returns true if the class was found and removed, false otherwise.
227   bool Remove(const char* descriptor)
228       REQUIRES(!lock_)
229       REQUIRES_SHARED(Locks::mutator_lock_);
230 
231   // Return true if we inserted the strong root, false if it already exists.
232   bool InsertStrongRoot(ObjPtr<mirror::Object> obj)
233       REQUIRES(!lock_)
234       REQUIRES_SHARED(Locks::mutator_lock_);
235 
236   // Return true if we inserted the oat file, false if it already exists.
237   bool InsertOatFile(const OatFile* oat_file)
238       REQUIRES(!lock_)
239       REQUIRES_SHARED(Locks::mutator_lock_);
240 
241   // Combines all of the tables into one class set.
242   size_t WriteToMemory(uint8_t* ptr) const
243       REQUIRES(!lock_)
244       REQUIRES_SHARED(Locks::mutator_lock_);
245 
246   // Read a table from ptr and put it at the front of the class set.
247   size_t ReadFromMemory(uint8_t* ptr)
248       REQUIRES(!lock_)
249       REQUIRES_SHARED(Locks::mutator_lock_);
250 
251   // Add a class set to the front of classes.
252   void AddClassSet(ClassSet&& set)
253       REQUIRES(!lock_)
254       REQUIRES_SHARED(Locks::mutator_lock_);
255 
256   // Clear strong roots (other than classes themselves).
257   void ClearStrongRoots()
258       REQUIRES(!lock_)
259       REQUIRES_SHARED(Locks::mutator_lock_);
260 
261   // Filter strong roots (other than classes themselves).
262   template <typename Filter>
263   void RemoveStrongRoots(const Filter& filter)
264       REQUIRES(!lock_)
265       REQUIRES_SHARED(Locks::mutator_lock_);
266 
GetLock()267   ReaderWriterMutex& GetLock() {
268     return lock_;
269   }
270 
271  private:
272   // Only copies classes.
273   void CopyWithoutLocks(const ClassTable& source_table) NO_THREAD_SAFETY_ANALYSIS;
274   void InsertWithoutLocks(ObjPtr<mirror::Class> klass) NO_THREAD_SAFETY_ANALYSIS;
275 
276   size_t CountDefiningLoaderClasses(ObjPtr<mirror::ClassLoader> defining_loader,
277                                     const ClassSet& set) const
278       REQUIRES(lock_)
279       REQUIRES_SHARED(Locks::mutator_lock_);
280 
281   // Return true if we inserted the oat file, false if it already exists.
282   bool InsertOatFileLocked(const OatFile* oat_file)
283       REQUIRES(lock_)
284       REQUIRES_SHARED(Locks::mutator_lock_);
285 
286   // Lock to guard inserting and removing.
287   mutable ReaderWriterMutex lock_;
288   // We have a vector to help prevent dirty pages after the zygote forks by calling FreezeSnapshot.
289   std::vector<ClassSet> classes_ GUARDED_BY(lock_);
290   // Extra strong roots that can be either dex files or dex caches. Dex files used by the class
291   // loader which may not be owned by the class loader must be held strongly live. Also dex caches
292   // are held live to prevent them being unloading once they have classes in them.
293   std::vector<GcRoot<mirror::Object>> strong_roots_ GUARDED_BY(lock_);
294   // Keep track of oat files with GC roots associated with dex caches in `strong_roots_`.
295   std::vector<const OatFile*> oat_files_ GUARDED_BY(lock_);
296 
297   friend class linker::ImageWriter;  // for InsertWithoutLocks.
298 };
299 
300 }  // namespace art
301 
302 #endif  // ART_RUNTIME_CLASS_TABLE_H_
303