• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_HANDLE_SCOPE_INL_H_
18 #define ART_RUNTIME_HANDLE_SCOPE_INL_H_
19 
20 #include "handle_scope.h"
21 
22 #include "base/mutex.h"
23 #include "handle.h"
24 #include "handle_wrapper.h"
25 #include "mirror/object_reference-inl.h"
26 #include "obj_ptr-inl.h"
27 #include "thread-current-inl.h"
28 #include "verify_object.h"
29 
30 namespace art {
31 
32 template<size_t kNumReferences>
FixedSizeHandleScope(BaseHandleScope * link,ObjPtr<mirror::Object> fill_value)33 inline FixedSizeHandleScope<kNumReferences>::FixedSizeHandleScope(BaseHandleScope* link,
34                                                                   ObjPtr<mirror::Object> fill_value)
35     : HandleScope(link, kNumReferences) {
36   if (kDebugLocking) {
37     Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
38   }
39   static_assert(kNumReferences >= 1, "FixedSizeHandleScope must contain at least 1 reference");
40   DCHECK_EQ(&storage_[0], GetReferences());  // TODO: Figure out how to use a compile assert.
41   for (size_t i = 0; i < kNumReferences; ++i) {
42     SetReference(i, fill_value);
43   }
44 }
45 
46 template<size_t kNumReferences>
StackHandleScope(Thread * self,ObjPtr<mirror::Object> fill_value)47 inline StackHandleScope<kNumReferences>::StackHandleScope(Thread* self,
48                                                           ObjPtr<mirror::Object> fill_value)
49     : FixedSizeHandleScope<kNumReferences>(self->GetTopHandleScope(), fill_value),
50       self_(self) {
51   DCHECK_EQ(self, Thread::Current());
52   if (kDebugLocking) {
53     Locks::mutator_lock_->AssertSharedHeld(self_);
54   }
55   self_->PushHandleScope(this);
56 }
57 
58 template<size_t kNumReferences>
~StackHandleScope()59 inline StackHandleScope<kNumReferences>::~StackHandleScope() {
60   if (kDebugLocking) {
61     Locks::mutator_lock_->AssertSharedHeld(self_);
62   }
63   BaseHandleScope* top_handle_scope = self_->PopHandleScope();
64   DCHECK_EQ(top_handle_scope, this);
65 }
66 
SizeOf(uint32_t num_references)67 inline size_t HandleScope::SizeOf(uint32_t num_references) {
68   size_t header_size = sizeof(HandleScope);
69   size_t data_size = sizeof(StackReference<mirror::Object>) * num_references;
70   return header_size + data_size;
71 }
72 
SizeOf(PointerSize pointer_size,uint32_t num_references)73 inline size_t HandleScope::SizeOf(PointerSize pointer_size, uint32_t num_references) {
74   // Assume that the layout is packed.
75   size_t header_size = ReferencesOffset(pointer_size);
76   size_t data_size = sizeof(StackReference<mirror::Object>) * num_references;
77   return header_size + data_size;
78 }
79 
GetReference(size_t i)80 inline ObjPtr<mirror::Object> HandleScope::GetReference(size_t i) const {
81   DCHECK_LT(i, NumberOfReferences());
82   if (kDebugLocking) {
83     Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
84   }
85   return GetReferences()[i].AsMirrorPtr();
86 }
87 
GetHandle(size_t i)88 inline Handle<mirror::Object> HandleScope::GetHandle(size_t i) {
89   DCHECK_LT(i, NumberOfReferences());
90   return Handle<mirror::Object>(&GetReferences()[i]);
91 }
92 
GetMutableHandle(size_t i)93 inline MutableHandle<mirror::Object> HandleScope::GetMutableHandle(size_t i) {
94   DCHECK_LT(i, NumberOfReferences());
95   return MutableHandle<mirror::Object>(&GetReferences()[i]);
96 }
97 
SetReference(size_t i,ObjPtr<mirror::Object> object)98 inline void HandleScope::SetReference(size_t i, ObjPtr<mirror::Object> object) {
99   if (kDebugLocking) {
100     Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
101   }
102   DCHECK_LT(i, NumberOfReferences());
103   GetReferences()[i].Assign(object);
104 }
105 
Contains(StackReference<mirror::Object> * handle_scope_entry)106 inline bool HandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry) const {
107   // A HandleScope should always contain something. One created by the
108   // jni_compiler should have a jobject/jclass as a native method is
109   // passed in a this pointer or a class
110   DCHECK_GT(NumberOfReferences(), 0U);
111   return &GetReferences()[0] <= handle_scope_entry &&
112       handle_scope_entry <= &GetReferences()[number_of_references_ - 1];
113 }
114 
115 template <typename Visitor>
VisitRoots(Visitor & visitor)116 inline void HandleScope::VisitRoots(Visitor& visitor) {
117   for (size_t i = 0, count = NumberOfReferences(); i < count; ++i) {
118     // GetReference returns a pointer to the stack reference within the handle scope. If this
119     // needs to be updated, it will be done by the root visitor.
120     visitor.VisitRootIfNonNull(GetHandle(i).GetReference());
121   }
122 }
123 
124 template <typename Visitor>
VisitHandles(Visitor & visitor)125 inline void HandleScope::VisitHandles(Visitor& visitor) {
126   for (size_t i = 0, count = NumberOfReferences(); i < count; ++i) {
127     if (GetHandle(i) != nullptr) {
128       visitor.Visit(GetHandle(i));
129     }
130   }
131 }
132 
133 template<size_t kNumReferences> template<class T>
NewHandle(T * object)134 inline MutableHandle<T> FixedSizeHandleScope<kNumReferences>::NewHandle(T* object) {
135   return NewHandle(ObjPtr<T>(object));
136 }
137 
138 template<size_t kNumReferences> template<class MirrorType>
NewHandle(ObjPtr<MirrorType> object)139 inline MutableHandle<MirrorType> FixedSizeHandleScope<kNumReferences>::NewHandle(
140     ObjPtr<MirrorType> object) {
141   SetReference(pos_, object);
142   MutableHandle<MirrorType> h(GetHandle<MirrorType>(pos_));
143   ++pos_;
144   return h;
145 }
146 
147 template<size_t kNumReferences> template<class T>
NewHandleWrapper(T ** object)148 inline HandleWrapper<T> FixedSizeHandleScope<kNumReferences>::NewHandleWrapper(T** object) {
149   return HandleWrapper<T>(object, NewHandle(*object));
150 }
151 
152 template<size_t kNumReferences> template<class T>
NewHandleWrapper(ObjPtr<T> * object)153 inline HandleWrapperObjPtr<T> FixedSizeHandleScope<kNumReferences>::NewHandleWrapper(
154     ObjPtr<T>* object) {
155   return HandleWrapperObjPtr<T>(object, NewHandle(*object));
156 }
157 
158 template<size_t kNumReferences>
SetReference(size_t i,ObjPtr<mirror::Object> object)159 inline void FixedSizeHandleScope<kNumReferences>::SetReference(size_t i,
160                                                                ObjPtr<mirror::Object> object) {
161   if (kDebugLocking) {
162     Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
163   }
164   DCHECK_LT(i, kNumReferences);
165   VerifyObject(object);
166   GetReferences()[i].Assign(object);
167 }
168 
169 // Number of references contained within this handle scope.
NumberOfReferences()170 inline uint32_t BaseHandleScope::NumberOfReferences() const {
171   return LIKELY(!IsVariableSized())
172       ? AsHandleScope()->NumberOfReferences()
173       : AsVariableSized()->NumberOfReferences();
174 }
175 
Contains(StackReference<mirror::Object> * handle_scope_entry)176 inline bool BaseHandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry) const {
177   return LIKELY(!IsVariableSized())
178       ? AsHandleScope()->Contains(handle_scope_entry)
179       : AsVariableSized()->Contains(handle_scope_entry);
180 }
181 
182 template <typename Visitor>
VisitRoots(Visitor & visitor)183 inline void BaseHandleScope::VisitRoots(Visitor& visitor) {
184   if (LIKELY(!IsVariableSized())) {
185     AsHandleScope()->VisitRoots(visitor);
186   } else {
187     AsVariableSized()->VisitRoots(visitor);
188   }
189 }
190 
191 template <typename Visitor>
VisitHandles(Visitor & visitor)192 inline void BaseHandleScope::VisitHandles(Visitor& visitor) {
193   if (LIKELY(!IsVariableSized())) {
194     AsHandleScope()->VisitHandles(visitor);
195   } else {
196     AsVariableSized()->VisitHandles(visitor);
197   }
198 }
199 
AsVariableSized()200 inline VariableSizedHandleScope* BaseHandleScope::AsVariableSized() {
201   DCHECK(IsVariableSized());
202   return down_cast<VariableSizedHandleScope*>(this);
203 }
204 
AsHandleScope()205 inline HandleScope* BaseHandleScope::AsHandleScope() {
206   DCHECK(!IsVariableSized());
207   return down_cast<HandleScope*>(this);
208 }
209 
AsVariableSized()210 inline const VariableSizedHandleScope* BaseHandleScope::AsVariableSized() const {
211   DCHECK(IsVariableSized());
212   return down_cast<const VariableSizedHandleScope*>(this);
213 }
214 
AsHandleScope()215 inline const HandleScope* BaseHandleScope::AsHandleScope() const {
216   DCHECK(!IsVariableSized());
217   return down_cast<const HandleScope*>(this);
218 }
219 
220 template<class T>
NewHandle(T * object)221 inline MutableHandle<T> VariableSizedHandleScope::NewHandle(T* object) {
222   return NewHandle(ObjPtr<T>(object));
223 }
224 
225 template<class MirrorType>
NewHandle(ObjPtr<MirrorType> ptr)226 inline MutableHandle<MirrorType> VariableSizedHandleScope::NewHandle(ObjPtr<MirrorType> ptr) {
227   if (current_scope_->RemainingSlots() == 0) {
228     current_scope_ = new LocalScopeType(current_scope_);
229   }
230   return current_scope_->NewHandle(ptr);
231 }
232 
VariableSizedHandleScope(Thread * const self)233 inline VariableSizedHandleScope::VariableSizedHandleScope(Thread* const self)
234     : BaseHandleScope(self->GetTopHandleScope()),
235       self_(self),
236       current_scope_(&first_scope_),
237       first_scope_(/*link=*/ nullptr) {
238   DCHECK_EQ(self, Thread::Current());
239   if (kDebugLocking) {
240     Locks::mutator_lock_->AssertSharedHeld(self_);
241   }
242   self_->PushHandleScope(this);
243 }
244 
~VariableSizedHandleScope()245 inline VariableSizedHandleScope::~VariableSizedHandleScope() {
246   if (kDebugLocking) {
247     Locks::mutator_lock_->AssertSharedHeld(self_);
248   }
249   BaseHandleScope* top_handle_scope = self_->PopHandleScope();
250   DCHECK_EQ(top_handle_scope, this);
251   // Don't delete first_scope_ since it is not heap allocated.
252   while (current_scope_ != &first_scope_) {
253     LocalScopeType* next = down_cast<LocalScopeType*>(current_scope_->GetLink());
254     delete current_scope_;
255     current_scope_ = next;
256   }
257 }
258 
NumberOfReferences()259 inline uint32_t VariableSizedHandleScope::NumberOfReferences() const {
260   uint32_t sum = 0;
261   const LocalScopeType* cur = current_scope_;
262   while (cur != nullptr) {
263     sum += cur->NumberOfReferences();
264     cur = reinterpret_cast<const LocalScopeType*>(cur->GetLink());
265   }
266   return sum;
267 }
268 
Contains(StackReference<mirror::Object> * handle_scope_entry)269 inline bool VariableSizedHandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry)
270     const {
271   const LocalScopeType* cur = current_scope_;
272   while (cur != nullptr) {
273     if (cur->Contains(handle_scope_entry)) {
274       return true;
275     }
276     cur = reinterpret_cast<const LocalScopeType*>(cur->GetLink());
277   }
278   return false;
279 }
280 
281 template <typename Visitor>
VisitRoots(Visitor & visitor)282 inline void VariableSizedHandleScope::VisitRoots(Visitor& visitor) {
283   LocalScopeType* cur = current_scope_;
284   while (cur != nullptr) {
285     cur->VisitRoots(visitor);
286     cur = reinterpret_cast<LocalScopeType*>(cur->GetLink());
287   }
288 }
289 
290 template <typename Visitor>
VisitHandles(Visitor & visitor)291 inline void VariableSizedHandleScope::VisitHandles(Visitor& visitor) {
292   LocalScopeType* cur = current_scope_;
293   while (cur != nullptr) {
294     cur->VisitHandles(visitor);
295     cur = reinterpret_cast<LocalScopeType*>(cur->GetLink());
296   }
297 }
298 
299 }  // namespace art
300 
301 #endif  // ART_RUNTIME_HANDLE_SCOPE_INL_H_
302