• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_HANDLE_SCOPE_H_
18 #define ART_RUNTIME_HANDLE_SCOPE_H_
19 
20 #include <stack>
21 
22 #include <android-base/logging.h>
23 
24 #include "base/enums.h"
25 #include "base/locks.h"
26 #include "base/macros.h"
27 #include "stack_reference.h"
28 
29 namespace art {
30 
31 template<class T> class Handle;
32 class HandleScope;
33 template<class T> class HandleWrapper;
34 template<class T> class HandleWrapperObjPtr;
35 template<class T> class MutableHandle;
36 template<class MirrorType> class ObjPtr;
37 class Thread;
38 class VariableSizedHandleScope;
39 
40 namespace mirror {
41 class Object;
42 }  // namespace mirror
43 
44 // Basic handle scope, tracked by a list. May be variable sized.
45 class PACKED(4) BaseHandleScope {
46  public:
IsVariableSized()47   bool IsVariableSized() const {
48     return number_of_references_ == kNumReferencesVariableSized;
49   }
50 
51   // Number of references contained within this handle scope.
52   ALWAYS_INLINE uint32_t NumberOfReferences() const;
53 
54   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
55 
56   template <typename Visitor>
57   ALWAYS_INLINE void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
58 
59   template <typename Visitor>
60   ALWAYS_INLINE void VisitHandles(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
61 
62   // Link to previous BaseHandleScope or null.
GetLink()63   BaseHandleScope* GetLink() const {
64     return link_;
65   }
66 
67   ALWAYS_INLINE VariableSizedHandleScope* AsVariableSized();
68   ALWAYS_INLINE HandleScope* AsHandleScope();
69   ALWAYS_INLINE const VariableSizedHandleScope* AsVariableSized() const;
70   ALWAYS_INLINE const HandleScope* AsHandleScope() const;
71 
72  protected:
BaseHandleScope(BaseHandleScope * link,uint32_t num_references)73   BaseHandleScope(BaseHandleScope* link, uint32_t num_references)
74       : link_(link),
75         number_of_references_(num_references) {}
76 
77   // Variable sized constructor.
BaseHandleScope(BaseHandleScope * link)78   explicit BaseHandleScope(BaseHandleScope* link)
79       : link_(link),
80         number_of_references_(kNumReferencesVariableSized) {}
81 
82   static constexpr int32_t kNumReferencesVariableSized = -1;
83 
84   // Link-list of handle scopes. The root is held by a Thread.
85   BaseHandleScope* const link_;
86 
87   // Number of handlerized references. -1 for variable sized handle scopes.
88   const int32_t number_of_references_;
89 
90  private:
91   DISALLOW_COPY_AND_ASSIGN(BaseHandleScope);
92 };
93 
94 // HandleScopes are scoped objects containing a number of Handles. They are used to allocate
95 // handles, for these handles (and the objects contained within them) to be visible/roots for the
96 // GC. It is most common to stack allocate HandleScopes using StackHandleScope.
97 class PACKED(4) HandleScope : public BaseHandleScope {
98  public:
~HandleScope()99   ~HandleScope() {}
100 
101   // We have versions with and without explicit pointer size of the following. The first two are
102   // used at runtime, so OFFSETOF_MEMBER computes the right offsets automatically. The last one
103   // takes the pointer size explicitly so that at compile time we can cross-compile correctly.
104 
105   // Returns the size of a HandleScope containing num_references handles.
106   static size_t SizeOf(uint32_t num_references);
107 
108   // Returns the size of a HandleScope containing num_references handles.
109   static size_t SizeOf(PointerSize pointer_size, uint32_t num_references);
110 
111   ALWAYS_INLINE ObjPtr<mirror::Object> GetReference(size_t i) const
112       REQUIRES_SHARED(Locks::mutator_lock_);
113 
114   ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i);
115 
116   ALWAYS_INLINE MutableHandle<mirror::Object> GetMutableHandle(size_t i)
117       REQUIRES_SHARED(Locks::mutator_lock_);
118 
119   ALWAYS_INLINE void SetReference(size_t i, ObjPtr<mirror::Object> object)
120       REQUIRES_SHARED(Locks::mutator_lock_);
121 
122   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
123 
124   // Offset of link within HandleScope, used by generated code.
LinkOffset(PointerSize pointer_size ATTRIBUTE_UNUSED)125   static constexpr size_t LinkOffset(PointerSize pointer_size ATTRIBUTE_UNUSED) {
126     return 0;
127   }
128 
129   // Offset of length within handle scope, used by generated code.
NumberOfReferencesOffset(PointerSize pointer_size)130   static constexpr size_t NumberOfReferencesOffset(PointerSize pointer_size) {
131     return static_cast<size_t>(pointer_size);
132   }
133 
134   // Offset of link within handle scope, used by generated code.
ReferencesOffset(PointerSize pointer_size)135   static constexpr size_t ReferencesOffset(PointerSize pointer_size) {
136     return NumberOfReferencesOffset(pointer_size) + sizeof(number_of_references_);
137   }
138 
139   // Placement new creation.
Create(void * storage,BaseHandleScope * link,uint32_t num_references)140   static HandleScope* Create(void* storage, BaseHandleScope* link, uint32_t num_references)
141       WARN_UNUSED {
142     return new (storage) HandleScope(link, num_references);
143   }
144 
145   // Number of references contained within this handle scope.
NumberOfReferences()146   ALWAYS_INLINE uint32_t NumberOfReferences() const {
147     DCHECK_GE(number_of_references_, 0);
148     return static_cast<uint32_t>(number_of_references_);
149   }
150 
151   template <typename Visitor>
152   ALWAYS_INLINE void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
153 
154   template <typename Visitor>
155   ALWAYS_INLINE void VisitHandles(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
156 
157  protected:
158   // Return backing storage used for references.
GetReferences()159   ALWAYS_INLINE StackReference<mirror::Object>* GetReferences() const {
160     uintptr_t address = reinterpret_cast<uintptr_t>(this) + ReferencesOffset(kRuntimePointerSize);
161     return reinterpret_cast<StackReference<mirror::Object>*>(address);
162   }
163 
HandleScope(size_t number_of_references)164   explicit HandleScope(size_t number_of_references) : HandleScope(nullptr, number_of_references) {}
165 
166   // Semi-hidden constructor. Construction expected by generated code and StackHandleScope.
HandleScope(BaseHandleScope * link,uint32_t num_references)167   HandleScope(BaseHandleScope* link, uint32_t num_references)
168       : BaseHandleScope(link, num_references) {}
169 
170   // Storage for references.
171   // StackReference<mirror::Object> references_[number_of_references_]
172 
173  private:
174   DISALLOW_COPY_AND_ASSIGN(HandleScope);
175 };
176 
177 // Fixed size handle scope that is not necessarily linked in the thread.
178 template<size_t kNumReferences>
179 class PACKED(4) FixedSizeHandleScope : public HandleScope {
180  public:
181   template<class T>
182   ALWAYS_INLINE MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_);
183 
184   template<class T>
185   ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object)
186       REQUIRES_SHARED(Locks::mutator_lock_);
187 
188   template<class T>
189   ALWAYS_INLINE HandleWrapperObjPtr<T> NewHandleWrapper(ObjPtr<T>* object)
190       REQUIRES_SHARED(Locks::mutator_lock_);
191 
192   template<class MirrorType>
193   ALWAYS_INLINE MutableHandle<MirrorType> NewHandle(ObjPtr<MirrorType> object)
194     REQUIRES_SHARED(Locks::mutator_lock_);
195 
196   ALWAYS_INLINE void SetReference(size_t i, ObjPtr<mirror::Object> object)
197       REQUIRES_SHARED(Locks::mutator_lock_);
198 
RemainingSlots()199   size_t RemainingSlots() const {
200     return kNumReferences - pos_;
201   }
202 
203  private:
204   explicit ALWAYS_INLINE FixedSizeHandleScope(BaseHandleScope* link,
205                                               ObjPtr<mirror::Object> fill_value = nullptr)
206       REQUIRES_SHARED(Locks::mutator_lock_);
~FixedSizeHandleScope()207   ALWAYS_INLINE ~FixedSizeHandleScope() REQUIRES_SHARED(Locks::mutator_lock_) {}
208 
209   template<class T>
GetHandle(size_t i)210   ALWAYS_INLINE MutableHandle<T> GetHandle(size_t i) REQUIRES_SHARED(Locks::mutator_lock_) {
211     DCHECK_LT(i, kNumReferences);
212     return MutableHandle<T>(&GetReferences()[i]);
213   }
214 
215   // Reference storage needs to be first as expected by the HandleScope layout.
216   StackReference<mirror::Object> storage_[kNumReferences];
217 
218   // Position new handles will be created.
219   uint32_t pos_ = 0;
220 
221   template<size_t kNumRefs> friend class StackHandleScope;
222   friend class VariableSizedHandleScope;
223 };
224 
225 // Scoped handle storage of a fixed size that is stack allocated.
226 template<size_t kNumReferences>
227 class PACKED(4) StackHandleScope final : public FixedSizeHandleScope<kNumReferences> {
228  public:
229   explicit ALWAYS_INLINE StackHandleScope(Thread* self,
230                                           ObjPtr<mirror::Object> fill_value = nullptr)
231       REQUIRES_SHARED(Locks::mutator_lock_);
232 
233   ALWAYS_INLINE ~StackHandleScope() REQUIRES_SHARED(Locks::mutator_lock_);
234 
Self()235   Thread* Self() const {
236     return self_;
237   }
238 
239  private:
240   // The thread that the stack handle scope is a linked list upon. The stack handle scope will
241   // push and pop itself from this thread.
242   Thread* const self_;
243 };
244 
245 // Utility class to manage a variable sized handle scope by having a list of fixed size handle
246 // scopes.
247 // Calls to NewHandle will create a new handle inside the current FixedSizeHandleScope.
248 // When the current handle scope becomes full a new one is created and put at the front of the
249 // list.
250 class VariableSizedHandleScope : public BaseHandleScope {
251  public:
252   explicit VariableSizedHandleScope(Thread* const self) REQUIRES_SHARED(Locks::mutator_lock_);
253   ~VariableSizedHandleScope() REQUIRES_SHARED(Locks::mutator_lock_);
254 
255   template<class T>
256   MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_);
257 
258   template<class MirrorType>
259   MutableHandle<MirrorType> NewHandle(ObjPtr<MirrorType> ptr)
260       REQUIRES_SHARED(Locks::mutator_lock_);
261 
262   // Number of references contained within this handle scope.
263   ALWAYS_INLINE uint32_t NumberOfReferences() const;
264 
265   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
266 
267   template <typename Visitor>
268   void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
269 
270   template <typename Visitor>
271   ALWAYS_INLINE void VisitHandles(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
272 
273  private:
274   static constexpr size_t kLocalScopeSize = 64u;
275   static constexpr size_t kSizeOfReferencesPerScope =
276       kLocalScopeSize
277           - /* BaseHandleScope::link_ */ sizeof(BaseHandleScope*)
278           - /* BaseHandleScope::number_of_references_ */ sizeof(int32_t)
279           - /* FixedSizeHandleScope<>::pos_ */ sizeof(uint32_t);
280   static constexpr size_t kNumReferencesPerScope =
281       kSizeOfReferencesPerScope / sizeof(StackReference<mirror::Object>);
282 
283   Thread* const self_;
284 
285   // Linked list of fixed size handle scopes.
286   using LocalScopeType = FixedSizeHandleScope<kNumReferencesPerScope>;
287   static_assert(sizeof(LocalScopeType) == kLocalScopeSize, "Unexpected size of LocalScopeType");
288   LocalScopeType* current_scope_;
289   LocalScopeType first_scope_;
290 
291   DISALLOW_COPY_AND_ASSIGN(VariableSizedHandleScope);
292 };
293 
294 }  // namespace art
295 
296 #endif  // ART_RUNTIME_HANDLE_SCOPE_H_
297