• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_HANDLE_SCOPE_H_
18 #define ART_RUNTIME_HANDLE_SCOPE_H_
19 
20 #include <stack>
21 
22 #include "base/logging.h"
23 #include "base/macros.h"
24 #include "handle.h"
25 #include "stack.h"
26 #include "verify_object.h"
27 
28 namespace art {
29 namespace mirror {
30 class Object;
31 }
32 
33 class Thread;
34 
35 // HandleScopes are scoped objects containing a number of Handles. They are used to allocate
36 // handles, for these handles (and the objects contained within them) to be visible/roots for the
37 // GC. It is most common to stack allocate HandleScopes using StackHandleScope.
38 class PACKED(4) HandleScope {
39  public:
~HandleScope()40   ~HandleScope() {}
41 
42   // Number of references contained within this handle scope.
NumberOfReferences()43   uint32_t NumberOfReferences() const {
44     return number_of_references_;
45   }
46 
47   // We have versions with and without explicit pointer size of the following. The first two are
48   // used at runtime, so OFFSETOF_MEMBER computes the right offsets automatically. The last one
49   // takes the pointer size explicitly so that at compile time we can cross-compile correctly.
50 
51   // Returns the size of a HandleScope containing num_references handles.
52   static size_t SizeOf(uint32_t num_references);
53 
54   // Returns the size of a HandleScope containing num_references handles.
55   static size_t SizeOf(size_t pointer_size, uint32_t num_references);
56 
57   // Link to previous HandleScope or null.
GetLink()58   HandleScope* GetLink() const {
59     return link_;
60   }
61 
62   ALWAYS_INLINE mirror::Object* GetReference(size_t i) const
63       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
64 
65   ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i)
66       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
67 
68   ALWAYS_INLINE MutableHandle<mirror::Object> GetMutableHandle(size_t i)
69       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
70 
71   ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
72       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
73 
74   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
75 
76   // Offset of link within HandleScope, used by generated code.
LinkOffset(size_t pointer_size ATTRIBUTE_UNUSED)77   static size_t LinkOffset(size_t pointer_size ATTRIBUTE_UNUSED) {
78     return 0;
79   }
80 
81   // Offset of length within handle scope, used by generated code.
NumberOfReferencesOffset(size_t pointer_size)82   static size_t NumberOfReferencesOffset(size_t pointer_size) {
83     return pointer_size;
84   }
85 
86   // Offset of link within handle scope, used by generated code.
ReferencesOffset(size_t pointer_size)87   static size_t ReferencesOffset(size_t pointer_size) {
88     return pointer_size + sizeof(number_of_references_);
89   }
90 
91   // Placement new creation.
Create(void * storage,HandleScope * link,uint32_t num_references)92   static HandleScope* Create(void* storage, HandleScope* link, uint32_t num_references)
93       WARN_UNUSED {
94     return new (storage) HandleScope(link, num_references);
95   }
96 
97  protected:
98   // Return backing storage used for references.
GetReferences()99   ALWAYS_INLINE StackReference<mirror::Object>* GetReferences() const {
100     uintptr_t address = reinterpret_cast<uintptr_t>(this) + ReferencesOffset(sizeof(void*));
101     return reinterpret_cast<StackReference<mirror::Object>*>(address);
102   }
103 
HandleScope(size_t number_of_references)104   explicit HandleScope(size_t number_of_references) :
105       link_(nullptr), number_of_references_(number_of_references) {
106   }
107 
108   // Semi-hidden constructor. Construction expected by generated code and StackHandleScope.
HandleScope(HandleScope * link,uint32_t num_references)109   explicit HandleScope(HandleScope* link, uint32_t num_references) :
110       link_(link), number_of_references_(num_references) {
111   }
112 
113   // Link-list of handle scopes. The root is held by a Thread.
114   HandleScope* const link_;
115 
116   // Number of handlerized references.
117   const uint32_t number_of_references_;
118 
119   // Storage for references.
120   // StackReference<mirror::Object> references_[number_of_references_]
121 
122  private:
123   DISALLOW_COPY_AND_ASSIGN(HandleScope);
124 };
125 
126 // A wrapper which wraps around Object** and restores the pointer in the destructor.
127 // TODO: Add more functionality.
128 template<class T>
129 class HandleWrapper : public MutableHandle<T> {
130  public:
HandleWrapper(T ** obj,const MutableHandle<T> & handle)131   HandleWrapper(T** obj, const MutableHandle<T>& handle)
132      : MutableHandle<T>(handle), obj_(obj) {
133   }
134 
135   HandleWrapper(const HandleWrapper&) = default;
136 
~HandleWrapper()137   ~HandleWrapper() {
138     *obj_ = MutableHandle<T>::Get();
139   }
140 
141  private:
142   T** const obj_;
143 };
144 
145 // Scoped handle storage of a fixed size that is usually stack allocated.
146 template<size_t kNumReferences>
147 class PACKED(4) StackHandleScope FINAL : public HandleScope {
148  public:
149   explicit ALWAYS_INLINE StackHandleScope(Thread* self, mirror::Object* fill_value = nullptr);
150   ALWAYS_INLINE ~StackHandleScope();
151 
152   template<class T>
153   ALWAYS_INLINE MutableHandle<T> NewHandle(T* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
154 
155   template<class T>
156   ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object)
157       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
158 
159   ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
160       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
161 
Self()162   Thread* Self() const {
163     return self_;
164   }
165 
166  private:
167   template<class T>
GetHandle(size_t i)168   ALWAYS_INLINE MutableHandle<T> GetHandle(size_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
169     DCHECK_LT(i, kNumReferences);
170     return MutableHandle<T>(&GetReferences()[i]);
171   }
172 
173   // Reference storage needs to be first as expected by the HandleScope layout.
174   StackReference<mirror::Object> storage_[kNumReferences];
175 
176   // The thread that the stack handle scope is a linked list upon. The stack handle scope will
177   // push and pop itself from this thread.
178   Thread* const self_;
179 
180   // Position new handles will be created.
181   size_t pos_;
182 
183   template<size_t kNumRefs> friend class StackHandleScope;
184 };
185 
186 // Utility class to manage a collection (stack) of StackHandleScope. All the managed
187 // scope handle have the same fixed sized.
188 // Calls to NewHandle will create a new handle inside the top StackHandleScope.
189 // When the handle scope becomes full a new one is created and push on top of the
190 // previous.
191 //
192 // NB:
193 // - it is not safe to use the *same* StackHandleScopeCollection intermix with
194 // other StackHandleScopes.
195 // - this is a an easy way around implementing a full ZoneHandleScope to manage an
196 // arbitrary number of handles.
197 class StackHandleScopeCollection {
198  public:
StackHandleScopeCollection(Thread * const self)199   explicit StackHandleScopeCollection(Thread* const self) :
200       self_(self),
201       current_scope_num_refs_(0) {
202   }
203 
~StackHandleScopeCollection()204   ~StackHandleScopeCollection() {
205     while (!scopes_.empty()) {
206       delete scopes_.top();
207       scopes_.pop();
208     }
209   }
210 
211   template<class T>
NewHandle(T * object)212   MutableHandle<T> NewHandle(T* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
213     if (scopes_.empty() || current_scope_num_refs_ >= kNumReferencesPerScope) {
214       StackHandleScope<kNumReferencesPerScope>* scope =
215           new StackHandleScope<kNumReferencesPerScope>(self_);
216       scopes_.push(scope);
217       current_scope_num_refs_ = 0;
218     }
219     current_scope_num_refs_++;
220     return scopes_.top()->NewHandle(object);
221   }
222 
223  private:
224   static constexpr size_t kNumReferencesPerScope = 4;
225 
226   Thread* const self_;
227 
228   std::stack<StackHandleScope<kNumReferencesPerScope>*> scopes_;
229   size_t current_scope_num_refs_;
230 
231   DISALLOW_COPY_AND_ASSIGN(StackHandleScopeCollection);
232 };
233 
234 }  // namespace art
235 
236 #endif  // ART_RUNTIME_HANDLE_SCOPE_H_
237