• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_ROOT_H_
18 #define ART_RUNTIME_GC_ROOT_H_
19 
20 #include "base/locks.h"       // For Locks::mutator_lock_.
21 #include "base/macros.h"
22 #include "mirror/object_reference.h"
23 #include "read_barrier_option.h"
24 
25 namespace art {
26 class ArtField;
27 class ArtMethod;
28 template<class MirrorType> class ObjPtr;
29 
30 namespace mirror {
31 class Object;
32 }  // namespace mirror
33 
34 template <size_t kBufferSize>
35 class BufferedRootVisitor;
36 
37 // Dependent on pointer size so that we don't have frames that are too big on 64 bit.
38 static const size_t kDefaultBufferedRootCount = 1024 / sizeof(void*);
39 
40 enum RootType {
41   kRootUnknown = 0,
42   kRootJNIGlobal,
43   kRootJNILocal,
44   kRootJavaFrame,
45   kRootNativeStack,
46   kRootStickyClass,
47   kRootThreadBlock,
48   kRootMonitorUsed,
49   kRootThreadObject,
50   kRootInternedString,
51   kRootFinalizing,  // used for HPROF's conversion to HprofHeapTag
52   kRootDebugger,
53   kRootReferenceCleanup,  // used for HPROF's conversion to HprofHeapTag
54   kRootVMInternal,
55   kRootJNIMonitor,
56 };
57 std::ostream& operator<<(std::ostream& os, RootType root_type);
58 
59 // Only used by hprof. thread_id_ and type_ are only used by hprof.
60 class RootInfo {
61  public:
62   // Thread id 0 is for non thread roots.
63   explicit RootInfo(RootType type, uint32_t thread_id = 0)
type_(type)64      : type_(type), thread_id_(thread_id) {
65   }
66   RootInfo(const RootInfo&) = default;
~RootInfo()67   virtual ~RootInfo() {
68   }
GetType()69   RootType GetType() const {
70     return type_;
71   }
GetThreadId()72   uint32_t GetThreadId() const {
73     return thread_id_;
74   }
Describe(std::ostream & os)75   virtual void Describe(std::ostream& os) const {
76     os << "Type=" << type_ << " thread_id=" << thread_id_;
77   }
78   std::string ToString() const;
79 
80  private:
81   const RootType type_;
82   const uint32_t thread_id_;
83 };
84 
85 inline std::ostream& operator<<(std::ostream& os, const RootInfo& root_info) {
86   root_info.Describe(os);
87   return os;
88 }
89 
90 // Not all combinations of flags are valid. You may not visit all roots as well as the new roots
91 // (no logical reason to do this). You also may not start logging new roots and stop logging new
92 // roots (also no logical reason to do this).
93 //
94 // The precise flag ensures that more metadata is supplied. An example is vreg data for compiled
95 // method frames.
96 enum VisitRootFlags : uint8_t {
97   kVisitRootFlagAllRoots = (1 << 0),
98   kVisitRootFlagNewRoots = (1 << 1),
99   kVisitRootFlagStartLoggingNewRoots = (1 << 2),
100   kVisitRootFlagStopLoggingNewRoots = (1 << 3),
101   kVisitRootFlagClearRootLog = (1 << 4),
102   kVisitRootFlagClassLoader = (1 << 5),
103   // There is no (1 << 6).
104   kVisitRootFlagPrecise = (1 << 7),
105 };
106 
107 class RootVisitor {
108  public:
~RootVisitor()109   virtual ~RootVisitor() { }
110 
111   // Single root version, not overridable.
VisitRoot(mirror::Object ** root,const RootInfo & info)112   ALWAYS_INLINE void VisitRoot(mirror::Object** root, const RootInfo& info)
113       REQUIRES_SHARED(Locks::mutator_lock_) {
114     VisitRoots(&root, 1, info);
115   }
116 
117   // Single root version, not overridable.
VisitRootIfNonNull(mirror::Object ** root,const RootInfo & info)118   ALWAYS_INLINE void VisitRootIfNonNull(mirror::Object** root, const RootInfo& info)
119       REQUIRES_SHARED(Locks::mutator_lock_) {
120     if (*root != nullptr) {
121       VisitRoot(root, info);
122     }
123   }
124 
125   virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info)
126       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
127 
128   virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
129                           const RootInfo& info)
130       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
131 };
132 
133 // Only visits roots one at a time, doesn't handle updating roots. Used when performance isn't
134 // critical.
135 class SingleRootVisitor : public RootVisitor {
136  private:
VisitRoots(mirror::Object *** roots,size_t count,const RootInfo & info)137   void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) override
138       REQUIRES_SHARED(Locks::mutator_lock_) {
139     for (size_t i = 0; i < count; ++i) {
140       VisitRoot(*roots[i], info);
141     }
142   }
143 
VisitRoots(mirror::CompressedReference<mirror::Object> ** roots,size_t count,const RootInfo & info)144   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
145                           const RootInfo& info) override
146       REQUIRES_SHARED(Locks::mutator_lock_) {
147     for (size_t i = 0; i < count; ++i) {
148       VisitRoot(roots[i]->AsMirrorPtr(), info);
149     }
150   }
151 
152   virtual void VisitRoot(mirror::Object* root, const RootInfo& info) = 0;
153 };
154 
155 class GcRootSource {
156  public:
GcRootSource()157   GcRootSource()
158       : field_(nullptr), method_(nullptr) {
159   }
GcRootSource(ArtField * field)160   explicit GcRootSource(ArtField* field)
161       : field_(field), method_(nullptr) {
162   }
GcRootSource(ArtMethod * method)163   explicit GcRootSource(ArtMethod* method)
164       : field_(nullptr), method_(method) {
165   }
GetArtField()166   ArtField* GetArtField() const {
167     return field_;
168   }
GetArtMethod()169   ArtMethod* GetArtMethod() const {
170     return method_;
171   }
HasArtField()172   bool HasArtField() const {
173     return field_ != nullptr;
174   }
HasArtMethod()175   bool HasArtMethod() const {
176     return method_ != nullptr;
177   }
178 
179  private:
180   ArtField* const field_;
181   ArtMethod* const method_;
182 
183   DISALLOW_COPY_AND_ASSIGN(GcRootSource);
184 };
185 
186 template<class MirrorType>
187 class GcRoot {
188  public:
189   template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
190   ALWAYS_INLINE MirrorType* Read(GcRootSource* gc_root_source = nullptr) const
191       REQUIRES_SHARED(Locks::mutator_lock_);
192 
VisitRoot(RootVisitor * visitor,const RootInfo & info)193   void VisitRoot(RootVisitor* visitor, const RootInfo& info) const
194       REQUIRES_SHARED(Locks::mutator_lock_) {
195     DCHECK(!IsNull());
196     mirror::CompressedReference<mirror::Object>* roots[1] = { &root_ };
197     visitor->VisitRoots(roots, 1u, info);
198     DCHECK(!IsNull());
199   }
200 
VisitRootIfNonNull(RootVisitor * visitor,const RootInfo & info)201   void VisitRootIfNonNull(RootVisitor* visitor, const RootInfo& info) const
202       REQUIRES_SHARED(Locks::mutator_lock_) {
203     if (!IsNull()) {
204       VisitRoot(visitor, info);
205     }
206   }
207 
AddressWithoutBarrier()208   ALWAYS_INLINE mirror::CompressedReference<mirror::Object>* AddressWithoutBarrier() {
209     return &root_;
210   }
211 
IsNull()212   ALWAYS_INLINE bool IsNull() const {
213     // It's safe to null-check it without a read barrier.
214     return root_.IsNull();
215   }
216 
GcRoot()217   ALWAYS_INLINE GcRoot() : GcRoot(nullptr) {}
GcRoot(std::nullptr_t)218   ALWAYS_INLINE GcRoot(std::nullptr_t) : root_() {
219     DCHECK(IsNull());
220   }
221   explicit ALWAYS_INLINE GcRoot(mirror::CompressedReference<mirror::Object> ref)
222       REQUIRES_SHARED(Locks::mutator_lock_);
223   explicit ALWAYS_INLINE GcRoot(MirrorType* ref)
224       REQUIRES_SHARED(Locks::mutator_lock_);
225   explicit ALWAYS_INLINE GcRoot(ObjPtr<MirrorType> ref)
226       REQUIRES_SHARED(Locks::mutator_lock_);
227 
228  private:
229   // Root visitors take pointers to root_ and place them in CompressedReference** arrays. We use a
230   // CompressedReference<mirror::Object> here since it violates strict aliasing requirements to
231   // cast CompressedReference<MirrorType>* to CompressedReference<mirror::Object>*.
232   mutable mirror::CompressedReference<mirror::Object> root_;
233 
234   template <size_t kBufferSize> friend class BufferedRootVisitor;
235 };
236 
237 // Simple data structure for buffered root visiting to avoid virtual dispatch overhead. Currently
238 // only for CompressedReferences since these are more common than the Object** roots which are only
239 // for thread local roots.
240 template <size_t kBufferSize>
241 class BufferedRootVisitor {
242  public:
BufferedRootVisitor(RootVisitor * visitor,const RootInfo & root_info)243   BufferedRootVisitor(RootVisitor* visitor, const RootInfo& root_info)
244       : visitor_(visitor), root_info_(root_info), buffer_pos_(0) {
245   }
246 
~BufferedRootVisitor()247   ~BufferedRootVisitor() {
248     Flush();
249   }
250 
251   template <class MirrorType>
VisitRootIfNonNull(GcRoot<MirrorType> & root)252   ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root)
253       REQUIRES_SHARED(Locks::mutator_lock_) {
254     if (!root.IsNull()) {
255       VisitRoot(root);
256     }
257   }
258 
259   template <class MirrorType>
VisitRootIfNonNull(mirror::CompressedReference<MirrorType> * root)260   ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root)
261       REQUIRES_SHARED(Locks::mutator_lock_) {
262     if (!root->IsNull()) {
263       VisitRoot(root);
264     }
265   }
266 
267   template <class MirrorType>
VisitRoot(GcRoot<MirrorType> & root)268   void VisitRoot(GcRoot<MirrorType>& root) REQUIRES_SHARED(Locks::mutator_lock_) {
269     VisitRoot(root.AddressWithoutBarrier());
270   }
271 
272   template <class MirrorType>
VisitRoot(mirror::CompressedReference<MirrorType> * root)273   void VisitRoot(mirror::CompressedReference<MirrorType>* root)
274       REQUIRES_SHARED(Locks::mutator_lock_) {
275     if (UNLIKELY(buffer_pos_ >= kBufferSize)) {
276       Flush();
277     }
278     roots_[buffer_pos_++] = root;
279   }
280 
Flush()281   void Flush() REQUIRES_SHARED(Locks::mutator_lock_) {
282     visitor_->VisitRoots(roots_, buffer_pos_, root_info_);
283     buffer_pos_ = 0;
284   }
285 
286  private:
287   RootVisitor* const visitor_;
288   RootInfo root_info_;
289   mirror::CompressedReference<mirror::Object>* roots_[kBufferSize];
290   size_t buffer_pos_;
291 };
292 
293 class UnbufferedRootVisitor {
294  public:
UnbufferedRootVisitor(RootVisitor * visitor,const RootInfo & root_info)295   UnbufferedRootVisitor(RootVisitor* visitor, const RootInfo& root_info)
296       : visitor_(visitor), root_info_(root_info) {}
297 
298   template <class MirrorType>
VisitRootIfNonNull(GcRoot<MirrorType> & root)299   ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root) const
300       REQUIRES_SHARED(Locks::mutator_lock_) {
301     if (!root.IsNull()) {
302       VisitRoot(root);
303     }
304   }
305 
306   template <class MirrorType>
VisitRootIfNonNull(mirror::CompressedReference<MirrorType> * root)307   ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root) const
308       REQUIRES_SHARED(Locks::mutator_lock_) {
309     if (!root->IsNull()) {
310       VisitRoot(root);
311     }
312   }
313 
314   template <class MirrorType>
VisitRoot(GcRoot<MirrorType> & root)315   void VisitRoot(GcRoot<MirrorType>& root) const REQUIRES_SHARED(Locks::mutator_lock_) {
316     VisitRoot(root.AddressWithoutBarrier());
317   }
318 
319   template <class MirrorType>
VisitRoot(mirror::CompressedReference<MirrorType> * root)320   void VisitRoot(mirror::CompressedReference<MirrorType>* root) const
321       REQUIRES_SHARED(Locks::mutator_lock_) {
322     visitor_->VisitRoots(&root, 1, root_info_);
323   }
324 
325  private:
326   RootVisitor* const visitor_;
327   RootInfo root_info_;
328 };
329 
330 }  // namespace art
331 
332 #endif  // ART_RUNTIME_GC_ROOT_H_
333