• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_ROOT_H_
18 #define ART_RUNTIME_GC_ROOT_H_
19 
20 #include "base/locks.h"       // For Locks::mutator_lock_.
21 #include "base/macros.h"
22 #include "mirror/object_reference.h"
23 #include "read_barrier_option.h"
24 
25 namespace art {
26 class ArtField;
27 class ArtMethod;
28 template<class MirrorType> class ObjPtr;
29 
30 namespace mirror {
31 class Object;
32 }  // namespace mirror
33 
34 template <size_t kBufferSize>
35 class BufferedRootVisitor;
36 
37 // Dependent on pointer size so that we don't have frames that are too big on 64 bit.
38 static const size_t kDefaultBufferedRootCount = 1024 / sizeof(void*);
39 
40 enum RootType {
41   kRootUnknown = 0,
42   kRootJNIGlobal,
43   kRootJNILocal,
44   kRootJavaFrame,
45   kRootNativeStack,
46   kRootStickyClass,
47   kRootThreadBlock,
48   kRootMonitorUsed,
49   kRootThreadObject,
50   kRootInternedString,
51   kRootFinalizing,  // used for HPROF's conversion to HprofHeapTag
52   kRootDebugger,
53   kRootReferenceCleanup,  // used for HPROF's conversion to HprofHeapTag
54   kRootVMInternal,
55   kRootJNIMonitor,
56 };
57 std::ostream& operator<<(std::ostream& os, RootType root_type);
58 
59 // Only used by hprof. thread_id_ and type_ are only used by hprof.
60 class RootInfo {
61  public:
62   // Thread id 0 is for non thread roots.
63   explicit RootInfo(RootType type, uint32_t thread_id = 0)
type_(type)64      : type_(type), thread_id_(thread_id) {
65   }
66   RootInfo(const RootInfo&) = default;
~RootInfo()67   virtual ~RootInfo() {
68   }
GetType()69   RootType GetType() const {
70     return type_;
71   }
GetThreadId()72   uint32_t GetThreadId() const {
73     return thread_id_;
74   }
Describe(std::ostream & os)75   virtual void Describe(std::ostream& os) const {
76     os << "Type=" << type_ << " thread_id=" << thread_id_;
77   }
78   std::string ToString() const;
79 
80  private:
81   const RootType type_;
82   const uint32_t thread_id_;
83 };
84 
85 inline std::ostream& operator<<(std::ostream& os, const RootInfo& root_info) {
86   root_info.Describe(os);
87   return os;
88 }
89 
90 // Not all combinations of flags are valid. You may not visit all roots as well as the new roots
91 // (no logical reason to do this). You also may not start logging new roots and stop logging new
92 // roots (also no logical reason to do this).
93 //
94 // The precise flag ensures that more metadata is supplied. An example is vreg data for compiled
95 // method frames.
96 enum VisitRootFlags : uint8_t {
97   kVisitRootFlagAllRoots = (1 << 0),
98   kVisitRootFlagNewRoots = (1 << 1),
99   kVisitRootFlagStartLoggingNewRoots = (1 << 2),
100   kVisitRootFlagStopLoggingNewRoots = (1 << 3),
101   kVisitRootFlagClearRootLog = (1 << 4),
102   kVisitRootFlagClassLoader = (1 << 5),
103   // There is no (1 << 6).
104   kVisitRootFlagPrecise = (1 << 7),
105 };
106 
107 class RootVisitor {
108  public:
~RootVisitor()109   virtual ~RootVisitor() { }
110 
111   // Single root version, not overridable.
VisitRoot(mirror::Object ** root,const RootInfo & info)112   ALWAYS_INLINE void VisitRoot(mirror::Object** root, const RootInfo& info)
113       REQUIRES_SHARED(Locks::mutator_lock_) {
114     VisitRoots(&root, 1, info);
115   }
116 
117   // Single root version, not overridable.
VisitRootIfNonNull(mirror::Object ** root,const RootInfo & info)118   ALWAYS_INLINE void VisitRootIfNonNull(mirror::Object** root, const RootInfo& info)
119       REQUIRES_SHARED(Locks::mutator_lock_) {
120     if (*root != nullptr) {
121       VisitRoot(root, info);
122     }
123   }
124 
125   virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info)
126       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
127 
128   virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
129                           const RootInfo& info)
130       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
131 };
132 
133 // Only visits roots one at a time, doesn't handle updating roots. Used when performance isn't
134 // critical.
135 class SingleRootVisitor : public RootVisitor {
136  private:
VisitRoots(mirror::Object *** roots,size_t count,const RootInfo & info)137   void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) override
138       REQUIRES_SHARED(Locks::mutator_lock_) {
139     for (size_t i = 0; i < count; ++i) {
140       VisitRoot(*roots[i], info);
141     }
142   }
143 
VisitRoots(mirror::CompressedReference<mirror::Object> ** roots,size_t count,const RootInfo & info)144   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
145                           const RootInfo& info) override
146       REQUIRES_SHARED(Locks::mutator_lock_) {
147     for (size_t i = 0; i < count; ++i) {
148       VisitRoot(roots[i]->AsMirrorPtr(), info);
149     }
150   }
151 
152   virtual void VisitRoot(mirror::Object* root, const RootInfo& info) = 0;
153 };
154 
155 class GcRootSource {
156  public:
GcRootSource()157   GcRootSource()
158       : field_(nullptr), method_(nullptr) {
159   }
GcRootSource(ArtField * field)160   explicit GcRootSource(ArtField* field)
161       : field_(field), method_(nullptr) {
162   }
GcRootSource(ArtMethod * method)163   explicit GcRootSource(ArtMethod* method)
164       : field_(nullptr), method_(method) {
165   }
GetArtField()166   ArtField* GetArtField() const {
167     return field_;
168   }
GetArtMethod()169   ArtMethod* GetArtMethod() const {
170     return method_;
171   }
HasArtField()172   bool HasArtField() const {
173     return field_ != nullptr;
174   }
HasArtMethod()175   bool HasArtMethod() const {
176     return method_ != nullptr;
177   }
178 
179  private:
180   ArtField* const field_;
181   ArtMethod* const method_;
182 
183   DISALLOW_COPY_AND_ASSIGN(GcRootSource);
184 };
185 
186 template<class MirrorType>
187 class GcRoot {
188  public:
189   template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
190   ALWAYS_INLINE MirrorType* Read(GcRootSource* gc_root_source = nullptr) const
191       REQUIRES_SHARED(Locks::mutator_lock_);
192 
VisitRoot(RootVisitor * visitor,const RootInfo & info)193   void VisitRoot(RootVisitor* visitor, const RootInfo& info) const
194       REQUIRES_SHARED(Locks::mutator_lock_) {
195     DCHECK(!IsNull());
196     mirror::CompressedReference<mirror::Object>* roots[1] = { &root_ };
197     visitor->VisitRoots(roots, 1u, info);
198     DCHECK(!IsNull());
199   }
200 
VisitRootIfNonNull(RootVisitor * visitor,const RootInfo & info)201   void VisitRootIfNonNull(RootVisitor* visitor, const RootInfo& info) const
202       REQUIRES_SHARED(Locks::mutator_lock_) {
203     if (!IsNull()) {
204       VisitRoot(visitor, info);
205     }
206   }
207 
AddressWithoutBarrier()208   ALWAYS_INLINE mirror::CompressedReference<mirror::Object>* AddressWithoutBarrier() {
209     return &root_;
210   }
211 
IsNull()212   ALWAYS_INLINE bool IsNull() const {
213     // It's safe to null-check it without a read barrier.
214     return root_.IsNull();
215   }
216 
GcRoot()217   ALWAYS_INLINE GcRoot() {}
218   explicit ALWAYS_INLINE GcRoot(MirrorType* ref)
219       REQUIRES_SHARED(Locks::mutator_lock_);
220   explicit ALWAYS_INLINE GcRoot(ObjPtr<MirrorType> ref)
221       REQUIRES_SHARED(Locks::mutator_lock_);
222 
223  private:
224   // Root visitors take pointers to root_ and place them in CompressedReference** arrays. We use a
225   // CompressedReference<mirror::Object> here since it violates strict aliasing requirements to
226   // cast CompressedReference<MirrorType>* to CompressedReference<mirror::Object>*.
227   mutable mirror::CompressedReference<mirror::Object> root_;
228 
229   template <size_t kBufferSize> friend class BufferedRootVisitor;
230 };
231 
232 // Simple data structure for buffered root visiting to avoid virtual dispatch overhead. Currently
233 // only for CompressedReferences since these are more common than the Object** roots which are only
234 // for thread local roots.
235 template <size_t kBufferSize>
236 class BufferedRootVisitor {
237  public:
BufferedRootVisitor(RootVisitor * visitor,const RootInfo & root_info)238   BufferedRootVisitor(RootVisitor* visitor, const RootInfo& root_info)
239       : visitor_(visitor), root_info_(root_info), buffer_pos_(0) {
240   }
241 
~BufferedRootVisitor()242   ~BufferedRootVisitor() {
243     Flush();
244   }
245 
246   template <class MirrorType>
VisitRootIfNonNull(GcRoot<MirrorType> & root)247   ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root)
248       REQUIRES_SHARED(Locks::mutator_lock_) {
249     if (!root.IsNull()) {
250       VisitRoot(root);
251     }
252   }
253 
254   template <class MirrorType>
VisitRootIfNonNull(mirror::CompressedReference<MirrorType> * root)255   ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root)
256       REQUIRES_SHARED(Locks::mutator_lock_) {
257     if (!root->IsNull()) {
258       VisitRoot(root);
259     }
260   }
261 
262   template <class MirrorType>
VisitRoot(GcRoot<MirrorType> & root)263   void VisitRoot(GcRoot<MirrorType>& root) REQUIRES_SHARED(Locks::mutator_lock_) {
264     VisitRoot(root.AddressWithoutBarrier());
265   }
266 
267   template <class MirrorType>
VisitRoot(mirror::CompressedReference<MirrorType> * root)268   void VisitRoot(mirror::CompressedReference<MirrorType>* root)
269       REQUIRES_SHARED(Locks::mutator_lock_) {
270     if (UNLIKELY(buffer_pos_ >= kBufferSize)) {
271       Flush();
272     }
273     roots_[buffer_pos_++] = root;
274   }
275 
Flush()276   void Flush() REQUIRES_SHARED(Locks::mutator_lock_) {
277     visitor_->VisitRoots(roots_, buffer_pos_, root_info_);
278     buffer_pos_ = 0;
279   }
280 
281  private:
282   RootVisitor* const visitor_;
283   RootInfo root_info_;
284   mirror::CompressedReference<mirror::Object>* roots_[kBufferSize];
285   size_t buffer_pos_;
286 };
287 
288 class UnbufferedRootVisitor {
289  public:
UnbufferedRootVisitor(RootVisitor * visitor,const RootInfo & root_info)290   UnbufferedRootVisitor(RootVisitor* visitor, const RootInfo& root_info)
291       : visitor_(visitor), root_info_(root_info) {}
292 
293   template <class MirrorType>
VisitRootIfNonNull(GcRoot<MirrorType> & root)294   ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root) const
295       REQUIRES_SHARED(Locks::mutator_lock_) {
296     if (!root.IsNull()) {
297       VisitRoot(root);
298     }
299   }
300 
301   template <class MirrorType>
VisitRootIfNonNull(mirror::CompressedReference<MirrorType> * root)302   ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root) const
303       REQUIRES_SHARED(Locks::mutator_lock_) {
304     if (!root->IsNull()) {
305       VisitRoot(root);
306     }
307   }
308 
309   template <class MirrorType>
VisitRoot(GcRoot<MirrorType> & root)310   void VisitRoot(GcRoot<MirrorType>& root) const REQUIRES_SHARED(Locks::mutator_lock_) {
311     VisitRoot(root.AddressWithoutBarrier());
312   }
313 
314   template <class MirrorType>
VisitRoot(mirror::CompressedReference<MirrorType> * root)315   void VisitRoot(mirror::CompressedReference<MirrorType>* root) const
316       REQUIRES_SHARED(Locks::mutator_lock_) {
317     visitor_->VisitRoots(&root, 1, root_info_);
318   }
319 
320  private:
321   RootVisitor* const visitor_;
322   RootInfo root_info_;
323 };
324 
325 }  // namespace art
326 
327 #endif  // ART_RUNTIME_GC_ROOT_H_
328