• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_HANDLE_SCOPE_INL_H_
18 #define ART_RUNTIME_HANDLE_SCOPE_INL_H_
19 
20 #include "handle_scope.h"
21 
22 #include "base/mutex.h"
23 #include "handle.h"
24 #include "handle_wrapper.h"
25 #include "mirror/object_reference-inl.h"
26 #include "obj_ptr-inl.h"
27 #include "thread-current-inl.h"
28 #include "verify_object.h"
29 
30 namespace art {
31 
32 template<size_t kNumReferences>
FixedSizeHandleScope(BaseHandleScope * link,ObjPtr<mirror::Object> fill_value)33 inline FixedSizeHandleScope<kNumReferences>::FixedSizeHandleScope(BaseHandleScope* link,
34                                                                   ObjPtr<mirror::Object> fill_value)
35     : HandleScope(link, kNumReferences) {
36   if (kDebugLocking) {
37     Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
38   }
39   static_assert(kNumReferences >= 1, "FixedSizeHandleScope must contain at least 1 reference");
40   DCHECK_EQ(&storage_[0], GetReferences());  // TODO: Figure out how to use a compile assert.
41   for (size_t i = 0; i < kNumReferences; ++i) {
42     SetReference(i, fill_value);
43   }
44 }
45 
46 template<size_t kNumReferences>
StackHandleScope(Thread * self,ObjPtr<mirror::Object> fill_value)47 inline StackHandleScope<kNumReferences>::StackHandleScope(Thread* self,
48                                                           ObjPtr<mirror::Object> fill_value)
49     : FixedSizeHandleScope<kNumReferences>(self->GetTopHandleScope(), fill_value),
50       self_(self) {
51   DCHECK_EQ(self, Thread::Current());
52   self_->PushHandleScope(this);
53 }
54 
55 template<size_t kNumReferences>
~StackHandleScope()56 inline StackHandleScope<kNumReferences>::~StackHandleScope() {
57   BaseHandleScope* top_handle_scope = self_->PopHandleScope();
58   DCHECK_EQ(top_handle_scope, this);
59   if (kDebugLocking) {
60     Locks::mutator_lock_->AssertSharedHeld(self_);
61   }
62 }
63 
SizeOf(uint32_t num_references)64 inline size_t HandleScope::SizeOf(uint32_t num_references) {
65   size_t header_size = sizeof(HandleScope);
66   size_t data_size = sizeof(StackReference<mirror::Object>) * num_references;
67   return header_size + data_size;
68 }
69 
SizeOf(PointerSize pointer_size,uint32_t num_references)70 inline size_t HandleScope::SizeOf(PointerSize pointer_size, uint32_t num_references) {
71   // Assume that the layout is packed.
72   size_t header_size = ReferencesOffset(pointer_size);
73   size_t data_size = sizeof(StackReference<mirror::Object>) * num_references;
74   return header_size + data_size;
75 }
76 
GetReference(size_t i)77 inline ObjPtr<mirror::Object> HandleScope::GetReference(size_t i) const {
78   DCHECK_LT(i, NumberOfReferences());
79   if (kDebugLocking) {
80     Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
81   }
82   return GetReferences()[i].AsMirrorPtr();
83 }
84 
GetHandle(size_t i)85 inline Handle<mirror::Object> HandleScope::GetHandle(size_t i) {
86   DCHECK_LT(i, NumberOfReferences());
87   return Handle<mirror::Object>(&GetReferences()[i]);
88 }
89 
GetMutableHandle(size_t i)90 inline MutableHandle<mirror::Object> HandleScope::GetMutableHandle(size_t i) {
91   DCHECK_LT(i, NumberOfReferences());
92   return MutableHandle<mirror::Object>(&GetReferences()[i]);
93 }
94 
SetReference(size_t i,ObjPtr<mirror::Object> object)95 inline void HandleScope::SetReference(size_t i, ObjPtr<mirror::Object> object) {
96   if (kDebugLocking) {
97     Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
98   }
99   DCHECK_LT(i, NumberOfReferences());
100   GetReferences()[i].Assign(object);
101 }
102 
Contains(StackReference<mirror::Object> * handle_scope_entry)103 inline bool HandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry) const {
104   // A HandleScope should always contain something. One created by the
105   // jni_compiler should have a jobject/jclass as a native method is
106   // passed in a this pointer or a class
107   DCHECK_GT(NumberOfReferences(), 0U);
108   return &GetReferences()[0] <= handle_scope_entry &&
109       handle_scope_entry <= &GetReferences()[number_of_references_ - 1];
110 }
111 
112 template <typename Visitor>
VisitRoots(Visitor & visitor)113 inline void HandleScope::VisitRoots(Visitor& visitor) {
114   for (size_t i = 0, count = NumberOfReferences(); i < count; ++i) {
115     // GetReference returns a pointer to the stack reference within the handle scope. If this
116     // needs to be updated, it will be done by the root visitor.
117     visitor.VisitRootIfNonNull(GetHandle(i).GetReference());
118   }
119 }
120 
121 template<size_t kNumReferences> template<class T>
NewHandle(T * object)122 inline MutableHandle<T> FixedSizeHandleScope<kNumReferences>::NewHandle(T* object) {
123   return NewHandle(ObjPtr<T>(object));
124 }
125 
126 template<size_t kNumReferences> template<class MirrorType>
NewHandle(ObjPtr<MirrorType> object)127 inline MutableHandle<MirrorType> FixedSizeHandleScope<kNumReferences>::NewHandle(
128     ObjPtr<MirrorType> object) {
129   SetReference(pos_, object);
130   MutableHandle<MirrorType> h(GetHandle<MirrorType>(pos_));
131   ++pos_;
132   return h;
133 }
134 
135 template<size_t kNumReferences> template<class T>
NewHandleWrapper(T ** object)136 inline HandleWrapper<T> FixedSizeHandleScope<kNumReferences>::NewHandleWrapper(T** object) {
137   return HandleWrapper<T>(object, NewHandle(*object));
138 }
139 
140 template<size_t kNumReferences> template<class T>
NewHandleWrapper(ObjPtr<T> * object)141 inline HandleWrapperObjPtr<T> FixedSizeHandleScope<kNumReferences>::NewHandleWrapper(
142     ObjPtr<T>* object) {
143   return HandleWrapperObjPtr<T>(object, NewHandle(*object));
144 }
145 
146 template<size_t kNumReferences>
SetReference(size_t i,ObjPtr<mirror::Object> object)147 inline void FixedSizeHandleScope<kNumReferences>::SetReference(size_t i,
148                                                                ObjPtr<mirror::Object> object) {
149   if (kDebugLocking) {
150     Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
151   }
152   DCHECK_LT(i, kNumReferences);
153   VerifyObject(object);
154   GetReferences()[i].Assign(object);
155 }
156 
157 // Number of references contained within this handle scope.
NumberOfReferences()158 inline uint32_t BaseHandleScope::NumberOfReferences() const {
159   return LIKELY(!IsVariableSized())
160       ? AsHandleScope()->NumberOfReferences()
161       : AsVariableSized()->NumberOfReferences();
162 }
163 
Contains(StackReference<mirror::Object> * handle_scope_entry)164 inline bool BaseHandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry) const {
165   return LIKELY(!IsVariableSized())
166       ? AsHandleScope()->Contains(handle_scope_entry)
167       : AsVariableSized()->Contains(handle_scope_entry);
168 }
169 
170 template <typename Visitor>
VisitRoots(Visitor & visitor)171 inline void BaseHandleScope::VisitRoots(Visitor& visitor) {
172   if (LIKELY(!IsVariableSized())) {
173     AsHandleScope()->VisitRoots(visitor);
174   } else {
175     AsVariableSized()->VisitRoots(visitor);
176   }
177 }
178 
AsVariableSized()179 inline VariableSizedHandleScope* BaseHandleScope::AsVariableSized() {
180   DCHECK(IsVariableSized());
181   return down_cast<VariableSizedHandleScope*>(this);
182 }
183 
AsHandleScope()184 inline HandleScope* BaseHandleScope::AsHandleScope() {
185   DCHECK(!IsVariableSized());
186   return down_cast<HandleScope*>(this);
187 }
188 
AsVariableSized()189 inline const VariableSizedHandleScope* BaseHandleScope::AsVariableSized() const {
190   DCHECK(IsVariableSized());
191   return down_cast<const VariableSizedHandleScope*>(this);
192 }
193 
AsHandleScope()194 inline const HandleScope* BaseHandleScope::AsHandleScope() const {
195   DCHECK(!IsVariableSized());
196   return down_cast<const HandleScope*>(this);
197 }
198 
199 template<class T>
NewHandle(T * object)200 inline MutableHandle<T> VariableSizedHandleScope::NewHandle(T* object) {
201   return NewHandle(ObjPtr<T>(object));
202 }
203 
204 template<class MirrorType>
NewHandle(ObjPtr<MirrorType> ptr)205 inline MutableHandle<MirrorType> VariableSizedHandleScope::NewHandle(ObjPtr<MirrorType> ptr) {
206   if (current_scope_->RemainingSlots() == 0) {
207     current_scope_ = new LocalScopeType(current_scope_);
208   }
209   return current_scope_->NewHandle(ptr);
210 }
211 
VariableSizedHandleScope(Thread * const self)212 inline VariableSizedHandleScope::VariableSizedHandleScope(Thread* const self)
213     : BaseHandleScope(self->GetTopHandleScope()),
214       self_(self),
215       current_scope_(&first_scope_),
216       first_scope_(/*link=*/ nullptr) {
217   self_->PushHandleScope(this);
218 }
219 
~VariableSizedHandleScope()220 inline VariableSizedHandleScope::~VariableSizedHandleScope() {
221   BaseHandleScope* top_handle_scope = self_->PopHandleScope();
222   DCHECK_EQ(top_handle_scope, this);
223   // Don't delete first_scope_ since it is not heap allocated.
224   while (current_scope_ != &first_scope_) {
225     LocalScopeType* next = down_cast<LocalScopeType*>(current_scope_->GetLink());
226     delete current_scope_;
227     current_scope_ = next;
228   }
229 }
230 
NumberOfReferences()231 inline uint32_t VariableSizedHandleScope::NumberOfReferences() const {
232   uint32_t sum = 0;
233   const LocalScopeType* cur = current_scope_;
234   while (cur != nullptr) {
235     sum += cur->NumberOfReferences();
236     cur = reinterpret_cast<const LocalScopeType*>(cur->GetLink());
237   }
238   return sum;
239 }
240 
Contains(StackReference<mirror::Object> * handle_scope_entry)241 inline bool VariableSizedHandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry)
242     const {
243   const LocalScopeType* cur = current_scope_;
244   while (cur != nullptr) {
245     if (cur->Contains(handle_scope_entry)) {
246       return true;
247     }
248     cur = reinterpret_cast<const LocalScopeType*>(cur->GetLink());
249   }
250   return false;
251 }
252 
253 template <typename Visitor>
VisitRoots(Visitor & visitor)254 inline void VariableSizedHandleScope::VisitRoots(Visitor& visitor) {
255   LocalScopeType* cur = current_scope_;
256   while (cur != nullptr) {
257     cur->VisitRoots(visitor);
258     cur = reinterpret_cast<LocalScopeType*>(cur->GetLink());
259   }
260 }
261 
262 
263 }  // namespace art
264 
265 #endif  // ART_RUNTIME_HANDLE_SCOPE_INL_H_
266