1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_HANDLE_SCOPE_INL_H_
18 #define ART_RUNTIME_HANDLE_SCOPE_INL_H_
19
20 #include "handle_scope.h"
21
22 #include "base/mutex.h"
23 #include "handle.h"
24 #include "handle_wrapper.h"
25 #include "mirror/object_reference-inl.h"
26 #include "obj_ptr-inl.h"
27 #include "thread-current-inl.h"
28 #include "verify_object.h"
29
30 namespace art {
31
32 template<size_t kNumReferences>
FixedSizeHandleScope(BaseHandleScope * link,ObjPtr<mirror::Object> fill_value)33 inline FixedSizeHandleScope<kNumReferences>::FixedSizeHandleScope(BaseHandleScope* link,
34 ObjPtr<mirror::Object> fill_value)
35 : HandleScope(link, kNumReferences) {
36 if (kDebugLocking) {
37 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
38 }
39 static_assert(kNumReferences >= 1, "FixedSizeHandleScope must contain at least 1 reference");
40 DCHECK_EQ(&storage_[0], GetReferences()); // TODO: Figure out how to use a compile assert.
41 for (size_t i = 0; i < kNumReferences; ++i) {
42 SetReference(i, fill_value);
43 }
44 }
45
46 template<size_t kNumReferences>
StackHandleScope(Thread * self,ObjPtr<mirror::Object> fill_value)47 inline StackHandleScope<kNumReferences>::StackHandleScope(Thread* self,
48 ObjPtr<mirror::Object> fill_value)
49 : FixedSizeHandleScope<kNumReferences>(self->GetTopHandleScope(), fill_value),
50 self_(self) {
51 DCHECK_EQ(self, Thread::Current());
52 if (kDebugLocking) {
53 Locks::mutator_lock_->AssertSharedHeld(self_);
54 }
55 self_->PushHandleScope(this);
56 }
57
58 template<size_t kNumReferences>
~StackHandleScope()59 inline StackHandleScope<kNumReferences>::~StackHandleScope() {
60 if (kDebugLocking) {
61 Locks::mutator_lock_->AssertSharedHeld(self_);
62 }
63 BaseHandleScope* top_handle_scope = self_->PopHandleScope();
64 DCHECK_EQ(top_handle_scope, this);
65 }
66
SizeOf(uint32_t num_references)67 inline size_t HandleScope::SizeOf(uint32_t num_references) {
68 size_t header_size = sizeof(HandleScope);
69 size_t data_size = sizeof(StackReference<mirror::Object>) * num_references;
70 return header_size + data_size;
71 }
72
SizeOf(PointerSize pointer_size,uint32_t num_references)73 inline size_t HandleScope::SizeOf(PointerSize pointer_size, uint32_t num_references) {
74 // Assume that the layout is packed.
75 size_t header_size = ReferencesOffset(pointer_size);
76 size_t data_size = sizeof(StackReference<mirror::Object>) * num_references;
77 return header_size + data_size;
78 }
79
GetReference(size_t i)80 inline ObjPtr<mirror::Object> HandleScope::GetReference(size_t i) const {
81 DCHECK_LT(i, NumberOfReferences());
82 if (kDebugLocking) {
83 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
84 }
85 return GetReferences()[i].AsMirrorPtr();
86 }
87
GetHandle(size_t i)88 inline Handle<mirror::Object> HandleScope::GetHandle(size_t i) {
89 DCHECK_LT(i, NumberOfReferences());
90 return Handle<mirror::Object>(&GetReferences()[i]);
91 }
92
GetMutableHandle(size_t i)93 inline MutableHandle<mirror::Object> HandleScope::GetMutableHandle(size_t i) {
94 DCHECK_LT(i, NumberOfReferences());
95 return MutableHandle<mirror::Object>(&GetReferences()[i]);
96 }
97
SetReference(size_t i,ObjPtr<mirror::Object> object)98 inline void HandleScope::SetReference(size_t i, ObjPtr<mirror::Object> object) {
99 if (kDebugLocking) {
100 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
101 }
102 DCHECK_LT(i, NumberOfReferences());
103 GetReferences()[i].Assign(object);
104 }
105
Contains(StackReference<mirror::Object> * handle_scope_entry)106 inline bool HandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry) const {
107 // A HandleScope should always contain something. One created by the
108 // jni_compiler should have a jobject/jclass as a native method is
109 // passed in a this pointer or a class
110 DCHECK_GT(NumberOfReferences(), 0U);
111 return &GetReferences()[0] <= handle_scope_entry &&
112 handle_scope_entry <= &GetReferences()[number_of_references_ - 1];
113 }
114
115 template <typename Visitor>
VisitRoots(Visitor & visitor)116 inline void HandleScope::VisitRoots(Visitor& visitor) {
117 for (size_t i = 0, count = NumberOfReferences(); i < count; ++i) {
118 // GetReference returns a pointer to the stack reference within the handle scope. If this
119 // needs to be updated, it will be done by the root visitor.
120 visitor.VisitRootIfNonNull(GetHandle(i).GetReference());
121 }
122 }
123
124 template<size_t kNumReferences> template<class T>
NewHandle(T * object)125 inline MutableHandle<T> FixedSizeHandleScope<kNumReferences>::NewHandle(T* object) {
126 return NewHandle(ObjPtr<T>(object));
127 }
128
129 template<size_t kNumReferences> template<class MirrorType>
NewHandle(ObjPtr<MirrorType> object)130 inline MutableHandle<MirrorType> FixedSizeHandleScope<kNumReferences>::NewHandle(
131 ObjPtr<MirrorType> object) {
132 SetReference(pos_, object);
133 MutableHandle<MirrorType> h(GetHandle<MirrorType>(pos_));
134 ++pos_;
135 return h;
136 }
137
138 template<size_t kNumReferences> template<class T>
NewHandleWrapper(T ** object)139 inline HandleWrapper<T> FixedSizeHandleScope<kNumReferences>::NewHandleWrapper(T** object) {
140 return HandleWrapper<T>(object, NewHandle(*object));
141 }
142
143 template<size_t kNumReferences> template<class T>
NewHandleWrapper(ObjPtr<T> * object)144 inline HandleWrapperObjPtr<T> FixedSizeHandleScope<kNumReferences>::NewHandleWrapper(
145 ObjPtr<T>* object) {
146 return HandleWrapperObjPtr<T>(object, NewHandle(*object));
147 }
148
149 template<size_t kNumReferences>
SetReference(size_t i,ObjPtr<mirror::Object> object)150 inline void FixedSizeHandleScope<kNumReferences>::SetReference(size_t i,
151 ObjPtr<mirror::Object> object) {
152 if (kDebugLocking) {
153 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
154 }
155 DCHECK_LT(i, kNumReferences);
156 VerifyObject(object);
157 GetReferences()[i].Assign(object);
158 }
159
160 // Number of references contained within this handle scope.
NumberOfReferences()161 inline uint32_t BaseHandleScope::NumberOfReferences() const {
162 return LIKELY(!IsVariableSized())
163 ? AsHandleScope()->NumberOfReferences()
164 : AsVariableSized()->NumberOfReferences();
165 }
166
Contains(StackReference<mirror::Object> * handle_scope_entry)167 inline bool BaseHandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry) const {
168 return LIKELY(!IsVariableSized())
169 ? AsHandleScope()->Contains(handle_scope_entry)
170 : AsVariableSized()->Contains(handle_scope_entry);
171 }
172
173 template <typename Visitor>
VisitRoots(Visitor & visitor)174 inline void BaseHandleScope::VisitRoots(Visitor& visitor) {
175 if (LIKELY(!IsVariableSized())) {
176 AsHandleScope()->VisitRoots(visitor);
177 } else {
178 AsVariableSized()->VisitRoots(visitor);
179 }
180 }
181
AsVariableSized()182 inline VariableSizedHandleScope* BaseHandleScope::AsVariableSized() {
183 DCHECK(IsVariableSized());
184 return down_cast<VariableSizedHandleScope*>(this);
185 }
186
AsHandleScope()187 inline HandleScope* BaseHandleScope::AsHandleScope() {
188 DCHECK(!IsVariableSized());
189 return down_cast<HandleScope*>(this);
190 }
191
AsVariableSized()192 inline const VariableSizedHandleScope* BaseHandleScope::AsVariableSized() const {
193 DCHECK(IsVariableSized());
194 return down_cast<const VariableSizedHandleScope*>(this);
195 }
196
AsHandleScope()197 inline const HandleScope* BaseHandleScope::AsHandleScope() const {
198 DCHECK(!IsVariableSized());
199 return down_cast<const HandleScope*>(this);
200 }
201
202 template<class T>
NewHandle(T * object)203 inline MutableHandle<T> VariableSizedHandleScope::NewHandle(T* object) {
204 return NewHandle(ObjPtr<T>(object));
205 }
206
207 template<class MirrorType>
NewHandle(ObjPtr<MirrorType> ptr)208 inline MutableHandle<MirrorType> VariableSizedHandleScope::NewHandle(ObjPtr<MirrorType> ptr) {
209 if (current_scope_->RemainingSlots() == 0) {
210 current_scope_ = new LocalScopeType(current_scope_);
211 }
212 return current_scope_->NewHandle(ptr);
213 }
214
VariableSizedHandleScope(Thread * const self)215 inline VariableSizedHandleScope::VariableSizedHandleScope(Thread* const self)
216 : BaseHandleScope(self->GetTopHandleScope()),
217 self_(self),
218 current_scope_(&first_scope_),
219 first_scope_(/*link=*/ nullptr) {
220 DCHECK_EQ(self, Thread::Current());
221 if (kDebugLocking) {
222 Locks::mutator_lock_->AssertSharedHeld(self_);
223 }
224 self_->PushHandleScope(this);
225 }
226
~VariableSizedHandleScope()227 inline VariableSizedHandleScope::~VariableSizedHandleScope() {
228 if (kDebugLocking) {
229 Locks::mutator_lock_->AssertSharedHeld(self_);
230 }
231 BaseHandleScope* top_handle_scope = self_->PopHandleScope();
232 DCHECK_EQ(top_handle_scope, this);
233 // Don't delete first_scope_ since it is not heap allocated.
234 while (current_scope_ != &first_scope_) {
235 LocalScopeType* next = down_cast<LocalScopeType*>(current_scope_->GetLink());
236 delete current_scope_;
237 current_scope_ = next;
238 }
239 }
240
NumberOfReferences()241 inline uint32_t VariableSizedHandleScope::NumberOfReferences() const {
242 uint32_t sum = 0;
243 const LocalScopeType* cur = current_scope_;
244 while (cur != nullptr) {
245 sum += cur->NumberOfReferences();
246 cur = reinterpret_cast<const LocalScopeType*>(cur->GetLink());
247 }
248 return sum;
249 }
250
Contains(StackReference<mirror::Object> * handle_scope_entry)251 inline bool VariableSizedHandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry)
252 const {
253 const LocalScopeType* cur = current_scope_;
254 while (cur != nullptr) {
255 if (cur->Contains(handle_scope_entry)) {
256 return true;
257 }
258 cur = reinterpret_cast<const LocalScopeType*>(cur->GetLink());
259 }
260 return false;
261 }
262
263 template <typename Visitor>
VisitRoots(Visitor & visitor)264 inline void VariableSizedHandleScope::VisitRoots(Visitor& visitor) {
265 LocalScopeType* cur = current_scope_;
266 while (cur != nullptr) {
267 cur->VisitRoots(visitor);
268 cur = reinterpret_cast<LocalScopeType*>(cur->GetLink());
269 }
270 }
271
272 } // namespace art
273
274 #endif // ART_RUNTIME_HANDLE_SCOPE_INL_H_
275