1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "interpreter_common.h"
18
19 #include <limits>
20
21 #include "mirror/string-inl.h"
22
23 namespace art {
24 namespace interpreter {
25
26 // Hand select a number of methods to be run in a not yet started runtime without using JNI.
UnstartedRuntimeJni(Thread * self,ArtMethod * method,Object * receiver,uint32_t * args,JValue * result)27 static void UnstartedRuntimeJni(Thread* self, ArtMethod* method,
28 Object* receiver, uint32_t* args, JValue* result)
29 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
30 std::string name(PrettyMethod(method));
31 if (name == "java.lang.Object dalvik.system.VMRuntime.newUnpaddedArray(java.lang.Class, int)") {
32 int32_t length = args[1];
33 DCHECK_GE(length, 0);
34 mirror::Class* element_class = reinterpret_cast<Object*>(args[0])->AsClass();
35 Runtime* runtime = Runtime::Current();
36 mirror::Class* array_class = runtime->GetClassLinker()->FindArrayClass(self, &element_class);
37 DCHECK(array_class != nullptr);
38 gc::AllocatorType allocator = runtime->GetHeap()->GetCurrentAllocator();
39 result->SetL(mirror::Array::Alloc<true>(self, array_class, length,
40 array_class->GetComponentSize(), allocator, true));
41 } else if (name == "java.lang.ClassLoader dalvik.system.VMStack.getCallingClassLoader()") {
42 result->SetL(NULL);
43 } else if (name == "java.lang.Class dalvik.system.VMStack.getStackClass2()") {
44 NthCallerVisitor visitor(self, 3);
45 visitor.WalkStack();
46 result->SetL(visitor.caller->GetDeclaringClass());
47 } else if (name == "double java.lang.Math.log(double)") {
48 JValue value;
49 value.SetJ((static_cast<uint64_t>(args[1]) << 32) | args[0]);
50 result->SetD(log(value.GetD()));
51 } else if (name == "java.lang.String java.lang.Class.getNameNative()") {
52 StackHandleScope<1> hs(self);
53 result->SetL(mirror::Class::ComputeName(hs.NewHandle(receiver->AsClass())));
54 } else if (name == "int java.lang.Float.floatToRawIntBits(float)") {
55 result->SetI(args[0]);
56 } else if (name == "float java.lang.Float.intBitsToFloat(int)") {
57 result->SetI(args[0]);
58 } else if (name == "double java.lang.Math.exp(double)") {
59 JValue value;
60 value.SetJ((static_cast<uint64_t>(args[1]) << 32) | args[0]);
61 result->SetD(exp(value.GetD()));
62 } else if (name == "java.lang.Object java.lang.Object.internalClone()") {
63 result->SetL(receiver->Clone(self));
64 } else if (name == "void java.lang.Object.notifyAll()") {
65 receiver->NotifyAll(self);
66 } else if (name == "int java.lang.String.compareTo(java.lang.String)") {
67 String* rhs = reinterpret_cast<Object*>(args[0])->AsString();
68 CHECK(rhs != NULL);
69 result->SetI(receiver->AsString()->CompareTo(rhs));
70 } else if (name == "java.lang.String java.lang.String.intern()") {
71 result->SetL(receiver->AsString()->Intern());
72 } else if (name == "int java.lang.String.fastIndexOf(int, int)") {
73 result->SetI(receiver->AsString()->FastIndexOf(args[0], args[1]));
74 } else if (name == "java.lang.Object java.lang.reflect.Array.createMultiArray(java.lang.Class, int[])") {
75 StackHandleScope<2> hs(self);
76 auto h_class(hs.NewHandle(reinterpret_cast<mirror::Class*>(args[0])->AsClass()));
77 auto h_dimensions(hs.NewHandle(reinterpret_cast<mirror::IntArray*>(args[1])->AsIntArray()));
78 result->SetL(Array::CreateMultiArray(self, h_class, h_dimensions));
79 } else if (name == "java.lang.Object java.lang.Throwable.nativeFillInStackTrace()") {
80 ScopedObjectAccessUnchecked soa(self);
81 if (Runtime::Current()->IsActiveTransaction()) {
82 result->SetL(soa.Decode<Object*>(self->CreateInternalStackTrace<true>(soa)));
83 } else {
84 result->SetL(soa.Decode<Object*>(self->CreateInternalStackTrace<false>(soa)));
85 }
86 } else if (name == "int java.lang.System.identityHashCode(java.lang.Object)") {
87 mirror::Object* obj = reinterpret_cast<Object*>(args[0]);
88 result->SetI((obj != nullptr) ? obj->IdentityHashCode() : 0);
89 } else if (name == "boolean java.nio.ByteOrder.isLittleEndian()") {
90 result->SetZ(JNI_TRUE);
91 } else if (name == "boolean sun.misc.Unsafe.compareAndSwapInt(java.lang.Object, long, int, int)") {
92 Object* obj = reinterpret_cast<Object*>(args[0]);
93 jlong offset = (static_cast<uint64_t>(args[2]) << 32) | args[1];
94 jint expectedValue = args[3];
95 jint newValue = args[4];
96 bool success;
97 if (Runtime::Current()->IsActiveTransaction()) {
98 success = obj->CasFieldStrongSequentiallyConsistent32<true>(MemberOffset(offset),
99 expectedValue, newValue);
100 } else {
101 success = obj->CasFieldStrongSequentiallyConsistent32<false>(MemberOffset(offset),
102 expectedValue, newValue);
103 }
104 result->SetZ(success ? JNI_TRUE : JNI_FALSE);
105 } else if (name == "void sun.misc.Unsafe.putObject(java.lang.Object, long, java.lang.Object)") {
106 Object* obj = reinterpret_cast<Object*>(args[0]);
107 jlong offset = (static_cast<uint64_t>(args[2]) << 32) | args[1];
108 Object* newValue = reinterpret_cast<Object*>(args[3]);
109 if (Runtime::Current()->IsActiveTransaction()) {
110 obj->SetFieldObject<true>(MemberOffset(offset), newValue);
111 } else {
112 obj->SetFieldObject<false>(MemberOffset(offset), newValue);
113 }
114 } else if (name == "int sun.misc.Unsafe.getArrayBaseOffsetForComponentType(java.lang.Class)") {
115 mirror::Class* component = reinterpret_cast<Object*>(args[0])->AsClass();
116 Primitive::Type primitive_type = component->GetPrimitiveType();
117 result->SetI(mirror::Array::DataOffset(Primitive::ComponentSize(primitive_type)).Int32Value());
118 } else if (name == "int sun.misc.Unsafe.getArrayIndexScaleForComponentType(java.lang.Class)") {
119 mirror::Class* component = reinterpret_cast<Object*>(args[0])->AsClass();
120 Primitive::Type primitive_type = component->GetPrimitiveType();
121 result->SetI(Primitive::ComponentSize(primitive_type));
122 } else if (Runtime::Current()->IsActiveTransaction()) {
123 AbortTransaction(self, "Attempt to invoke native method in non-started runtime: %s",
124 name.c_str());
125
126 } else {
127 LOG(FATAL) << "Calling native method " << PrettyMethod(method) << " in an unstarted "
128 "non-transactional runtime";
129 }
130 }
131
InterpreterJni(Thread * self,ArtMethod * method,const StringPiece & shorty,Object * receiver,uint32_t * args,JValue * result)132 static void InterpreterJni(Thread* self, ArtMethod* method, const StringPiece& shorty,
133 Object* receiver, uint32_t* args, JValue* result)
134 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
135 // TODO: The following enters JNI code using a typedef-ed function rather than the JNI compiler,
136 // it should be removed and JNI compiled stubs used instead.
137 ScopedObjectAccessUnchecked soa(self);
138 if (method->IsStatic()) {
139 if (shorty == "L") {
140 typedef jobject (fntype)(JNIEnv*, jclass);
141 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
142 ScopedLocalRef<jclass> klass(soa.Env(),
143 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
144 jobject jresult;
145 {
146 ScopedThreadStateChange tsc(self, kNative);
147 jresult = fn(soa.Env(), klass.get());
148 }
149 result->SetL(soa.Decode<Object*>(jresult));
150 } else if (shorty == "V") {
151 typedef void (fntype)(JNIEnv*, jclass);
152 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
153 ScopedLocalRef<jclass> klass(soa.Env(),
154 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
155 ScopedThreadStateChange tsc(self, kNative);
156 fn(soa.Env(), klass.get());
157 } else if (shorty == "Z") {
158 typedef jboolean (fntype)(JNIEnv*, jclass);
159 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
160 ScopedLocalRef<jclass> klass(soa.Env(),
161 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
162 ScopedThreadStateChange tsc(self, kNative);
163 result->SetZ(fn(soa.Env(), klass.get()));
164 } else if (shorty == "BI") {
165 typedef jbyte (fntype)(JNIEnv*, jclass, jint);
166 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
167 ScopedLocalRef<jclass> klass(soa.Env(),
168 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
169 ScopedThreadStateChange tsc(self, kNative);
170 result->SetB(fn(soa.Env(), klass.get(), args[0]));
171 } else if (shorty == "II") {
172 typedef jint (fntype)(JNIEnv*, jclass, jint);
173 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
174 ScopedLocalRef<jclass> klass(soa.Env(),
175 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
176 ScopedThreadStateChange tsc(self, kNative);
177 result->SetI(fn(soa.Env(), klass.get(), args[0]));
178 } else if (shorty == "LL") {
179 typedef jobject (fntype)(JNIEnv*, jclass, jobject);
180 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
181 ScopedLocalRef<jclass> klass(soa.Env(),
182 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
183 ScopedLocalRef<jobject> arg0(soa.Env(),
184 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[0])));
185 jobject jresult;
186 {
187 ScopedThreadStateChange tsc(self, kNative);
188 jresult = fn(soa.Env(), klass.get(), arg0.get());
189 }
190 result->SetL(soa.Decode<Object*>(jresult));
191 } else if (shorty == "IIZ") {
192 typedef jint (fntype)(JNIEnv*, jclass, jint, jboolean);
193 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
194 ScopedLocalRef<jclass> klass(soa.Env(),
195 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
196 ScopedThreadStateChange tsc(self, kNative);
197 result->SetI(fn(soa.Env(), klass.get(), args[0], args[1]));
198 } else if (shorty == "ILI") {
199 typedef jint (fntype)(JNIEnv*, jclass, jobject, jint);
200 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
201 ScopedLocalRef<jclass> klass(soa.Env(),
202 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
203 ScopedLocalRef<jobject> arg0(soa.Env(),
204 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[0])));
205 ScopedThreadStateChange tsc(self, kNative);
206 result->SetI(fn(soa.Env(), klass.get(), arg0.get(), args[1]));
207 } else if (shorty == "SIZ") {
208 typedef jshort (fntype)(JNIEnv*, jclass, jint, jboolean);
209 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
210 ScopedLocalRef<jclass> klass(soa.Env(),
211 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
212 ScopedThreadStateChange tsc(self, kNative);
213 result->SetS(fn(soa.Env(), klass.get(), args[0], args[1]));
214 } else if (shorty == "VIZ") {
215 typedef void (fntype)(JNIEnv*, jclass, jint, jboolean);
216 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
217 ScopedLocalRef<jclass> klass(soa.Env(),
218 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
219 ScopedThreadStateChange tsc(self, kNative);
220 fn(soa.Env(), klass.get(), args[0], args[1]);
221 } else if (shorty == "ZLL") {
222 typedef jboolean (fntype)(JNIEnv*, jclass, jobject, jobject);
223 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
224 ScopedLocalRef<jclass> klass(soa.Env(),
225 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
226 ScopedLocalRef<jobject> arg0(soa.Env(),
227 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[0])));
228 ScopedLocalRef<jobject> arg1(soa.Env(),
229 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[1])));
230 ScopedThreadStateChange tsc(self, kNative);
231 result->SetZ(fn(soa.Env(), klass.get(), arg0.get(), arg1.get()));
232 } else if (shorty == "ZILL") {
233 typedef jboolean (fntype)(JNIEnv*, jclass, jint, jobject, jobject);
234 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
235 ScopedLocalRef<jclass> klass(soa.Env(),
236 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
237 ScopedLocalRef<jobject> arg1(soa.Env(),
238 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[1])));
239 ScopedLocalRef<jobject> arg2(soa.Env(),
240 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[2])));
241 ScopedThreadStateChange tsc(self, kNative);
242 result->SetZ(fn(soa.Env(), klass.get(), args[0], arg1.get(), arg2.get()));
243 } else if (shorty == "VILII") {
244 typedef void (fntype)(JNIEnv*, jclass, jint, jobject, jint, jint);
245 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
246 ScopedLocalRef<jclass> klass(soa.Env(),
247 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
248 ScopedLocalRef<jobject> arg1(soa.Env(),
249 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[1])));
250 ScopedThreadStateChange tsc(self, kNative);
251 fn(soa.Env(), klass.get(), args[0], arg1.get(), args[2], args[3]);
252 } else if (shorty == "VLILII") {
253 typedef void (fntype)(JNIEnv*, jclass, jobject, jint, jobject, jint, jint);
254 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
255 ScopedLocalRef<jclass> klass(soa.Env(),
256 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
257 ScopedLocalRef<jobject> arg0(soa.Env(),
258 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[0])));
259 ScopedLocalRef<jobject> arg2(soa.Env(),
260 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[2])));
261 ScopedThreadStateChange tsc(self, kNative);
262 fn(soa.Env(), klass.get(), arg0.get(), args[1], arg2.get(), args[3], args[4]);
263 } else {
264 LOG(FATAL) << "Do something with static native method: " << PrettyMethod(method)
265 << " shorty: " << shorty;
266 }
267 } else {
268 if (shorty == "L") {
269 typedef jobject (fntype)(JNIEnv*, jobject);
270 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
271 ScopedLocalRef<jobject> rcvr(soa.Env(),
272 soa.AddLocalReference<jobject>(receiver));
273 jobject jresult;
274 {
275 ScopedThreadStateChange tsc(self, kNative);
276 jresult = fn(soa.Env(), rcvr.get());
277 }
278 result->SetL(soa.Decode<Object*>(jresult));
279 } else if (shorty == "V") {
280 typedef void (fntype)(JNIEnv*, jobject);
281 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
282 ScopedLocalRef<jobject> rcvr(soa.Env(),
283 soa.AddLocalReference<jobject>(receiver));
284 ScopedThreadStateChange tsc(self, kNative);
285 fn(soa.Env(), rcvr.get());
286 } else if (shorty == "LL") {
287 typedef jobject (fntype)(JNIEnv*, jobject, jobject);
288 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
289 ScopedLocalRef<jobject> rcvr(soa.Env(),
290 soa.AddLocalReference<jobject>(receiver));
291 ScopedLocalRef<jobject> arg0(soa.Env(),
292 soa.AddLocalReference<jobject>(reinterpret_cast<Object*>(args[0])));
293 jobject jresult;
294 {
295 ScopedThreadStateChange tsc(self, kNative);
296 jresult = fn(soa.Env(), rcvr.get(), arg0.get());
297 }
298 result->SetL(soa.Decode<Object*>(jresult));
299 ScopedThreadStateChange tsc(self, kNative);
300 } else if (shorty == "III") {
301 typedef jint (fntype)(JNIEnv*, jobject, jint, jint);
302 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(method->GetNativeMethod()));
303 ScopedLocalRef<jobject> rcvr(soa.Env(),
304 soa.AddLocalReference<jobject>(receiver));
305 ScopedThreadStateChange tsc(self, kNative);
306 result->SetI(fn(soa.Env(), rcvr.get(), args[0], args[1]));
307 } else {
308 LOG(FATAL) << "Do something with native method: " << PrettyMethod(method)
309 << " shorty: " << shorty;
310 }
311 }
312 }
313
314 enum InterpreterImplKind {
315 kSwitchImpl, // Switch-based interpreter implementation.
316 kComputedGotoImplKind // Computed-goto-based interpreter implementation.
317 };
318
319 #if !defined(__clang__)
320 static constexpr InterpreterImplKind kInterpreterImplKind = kComputedGotoImplKind;
321 #else
322 // Clang 3.4 fails to build the goto interpreter implementation.
323 static constexpr InterpreterImplKind kInterpreterImplKind = kSwitchImpl;
324 template<bool do_access_check, bool transaction_active>
ExecuteGotoImpl(Thread * self,MethodHelper & mh,const DexFile::CodeItem * code_item,ShadowFrame & shadow_frame,JValue result_register)325 JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem* code_item,
326 ShadowFrame& shadow_frame, JValue result_register) {
327 LOG(FATAL) << "UNREACHABLE";
328 exit(0);
329 }
330 // Explicit definitions of ExecuteGotoImpl.
331 template<> SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
332 JValue ExecuteGotoImpl<true, false>(Thread* self, MethodHelper& mh,
333 const DexFile::CodeItem* code_item,
334 ShadowFrame& shadow_frame, JValue result_register);
335 template<> SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
336 JValue ExecuteGotoImpl<false, false>(Thread* self, MethodHelper& mh,
337 const DexFile::CodeItem* code_item,
338 ShadowFrame& shadow_frame, JValue result_register);
339 template<> SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
340 JValue ExecuteGotoImpl<true, true>(Thread* self, MethodHelper& mh,
341 const DexFile::CodeItem* code_item,
342 ShadowFrame& shadow_frame, JValue result_register);
343 template<> SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
344 JValue ExecuteGotoImpl<false, true>(Thread* self, MethodHelper& mh,
345 const DexFile::CodeItem* code_item,
346 ShadowFrame& shadow_frame, JValue result_register);
347 #endif
348
349 static JValue Execute(Thread* self, MethodHelper& mh, const DexFile::CodeItem* code_item,
350 ShadowFrame& shadow_frame, JValue result_register)
351 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
352
Execute(Thread * self,MethodHelper & mh,const DexFile::CodeItem * code_item,ShadowFrame & shadow_frame,JValue result_register)353 static inline JValue Execute(Thread* self, MethodHelper& mh, const DexFile::CodeItem* code_item,
354 ShadowFrame& shadow_frame, JValue result_register) {
355 DCHECK(shadow_frame.GetMethod() == mh.GetMethod() ||
356 shadow_frame.GetMethod()->GetDeclaringClass()->IsProxyClass());
357 DCHECK(!shadow_frame.GetMethod()->IsAbstract());
358 DCHECK(!shadow_frame.GetMethod()->IsNative());
359 shadow_frame.GetMethod()->GetDeclaringClass()->AssertInitializedOrInitializingInThread(self);
360
361 bool transaction_active = Runtime::Current()->IsActiveTransaction();
362 if (LIKELY(shadow_frame.GetMethod()->IsPreverified())) {
363 // Enter the "without access check" interpreter.
364 if (kInterpreterImplKind == kSwitchImpl) {
365 if (transaction_active) {
366 return ExecuteSwitchImpl<false, true>(self, mh, code_item, shadow_frame, result_register);
367 } else {
368 return ExecuteSwitchImpl<false, false>(self, mh, code_item, shadow_frame, result_register);
369 }
370 } else {
371 DCHECK_EQ(kInterpreterImplKind, kComputedGotoImplKind);
372 if (transaction_active) {
373 return ExecuteGotoImpl<false, true>(self, mh, code_item, shadow_frame, result_register);
374 } else {
375 return ExecuteGotoImpl<false, false>(self, mh, code_item, shadow_frame, result_register);
376 }
377 }
378 } else {
379 // Enter the "with access check" interpreter.
380 if (kInterpreterImplKind == kSwitchImpl) {
381 if (transaction_active) {
382 return ExecuteSwitchImpl<true, true>(self, mh, code_item, shadow_frame, result_register);
383 } else {
384 return ExecuteSwitchImpl<true, false>(self, mh, code_item, shadow_frame, result_register);
385 }
386 } else {
387 DCHECK_EQ(kInterpreterImplKind, kComputedGotoImplKind);
388 if (transaction_active) {
389 return ExecuteGotoImpl<true, true>(self, mh, code_item, shadow_frame, result_register);
390 } else {
391 return ExecuteGotoImpl<true, false>(self, mh, code_item, shadow_frame, result_register);
392 }
393 }
394 }
395 }
396
EnterInterpreterFromInvoke(Thread * self,ArtMethod * method,Object * receiver,uint32_t * args,JValue * result)397 void EnterInterpreterFromInvoke(Thread* self, ArtMethod* method, Object* receiver,
398 uint32_t* args, JValue* result) {
399 DCHECK_EQ(self, Thread::Current());
400 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
401 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
402 ThrowStackOverflowError(self);
403 return;
404 }
405
406 const char* old_cause = self->StartAssertNoThreadSuspension("EnterInterpreterFromInvoke");
407 const DexFile::CodeItem* code_item = method->GetCodeItem();
408 uint16_t num_regs;
409 uint16_t num_ins;
410 if (code_item != NULL) {
411 num_regs = code_item->registers_size_;
412 num_ins = code_item->ins_size_;
413 } else if (method->IsAbstract()) {
414 self->EndAssertNoThreadSuspension(old_cause);
415 ThrowAbstractMethodError(method);
416 return;
417 } else {
418 DCHECK(method->IsNative());
419 num_regs = num_ins = ArtMethod::NumArgRegisters(method->GetShorty());
420 if (!method->IsStatic()) {
421 num_regs++;
422 num_ins++;
423 }
424 }
425 // Set up shadow frame with matching number of reference slots to vregs.
426 ShadowFrame* last_shadow_frame = self->GetManagedStack()->GetTopShadowFrame();
427 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
428 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, last_shadow_frame, method, 0, memory));
429 self->PushShadowFrame(shadow_frame);
430
431 size_t cur_reg = num_regs - num_ins;
432 if (!method->IsStatic()) {
433 CHECK(receiver != NULL);
434 shadow_frame->SetVRegReference(cur_reg, receiver);
435 ++cur_reg;
436 }
437 uint32_t shorty_len = 0;
438 const char* shorty = method->GetShorty(&shorty_len);
439 for (size_t shorty_pos = 0, arg_pos = 0; cur_reg < num_regs; ++shorty_pos, ++arg_pos, cur_reg++) {
440 DCHECK_LT(shorty_pos + 1, shorty_len);
441 switch (shorty[shorty_pos + 1]) {
442 case 'L': {
443 Object* o = reinterpret_cast<StackReference<Object>*>(&args[arg_pos])->AsMirrorPtr();
444 shadow_frame->SetVRegReference(cur_reg, o);
445 break;
446 }
447 case 'J': case 'D': {
448 uint64_t wide_value = (static_cast<uint64_t>(args[arg_pos + 1]) << 32) | args[arg_pos];
449 shadow_frame->SetVRegLong(cur_reg, wide_value);
450 cur_reg++;
451 arg_pos++;
452 break;
453 }
454 default:
455 shadow_frame->SetVReg(cur_reg, args[arg_pos]);
456 break;
457 }
458 }
459 self->EndAssertNoThreadSuspension(old_cause);
460 // Do this after populating the shadow frame in case EnsureInitialized causes a GC.
461 if (method->IsStatic() && UNLIKELY(!method->GetDeclaringClass()->IsInitialized())) {
462 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
463 StackHandleScope<1> hs(self);
464 Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass()));
465 if (UNLIKELY(!class_linker->EnsureInitialized(h_class, true, true))) {
466 CHECK(self->IsExceptionPending());
467 self->PopShadowFrame();
468 return;
469 }
470 }
471 if (LIKELY(!method->IsNative())) {
472 StackHandleScope<1> hs(self);
473 MethodHelper mh(hs.NewHandle(method));
474 JValue r = Execute(self, mh, code_item, *shadow_frame, JValue());
475 if (result != NULL) {
476 *result = r;
477 }
478 } else {
479 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
480 // generated stub) except during testing and image writing.
481 // Update args to be the args in the shadow frame since the input ones could hold stale
482 // references pointers due to moving GC.
483 args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1);
484 if (!Runtime::Current()->IsStarted()) {
485 UnstartedRuntimeJni(self, method, receiver, args, result);
486 } else {
487 InterpreterJni(self, method, shorty, receiver, args, result);
488 }
489 }
490 self->PopShadowFrame();
491 }
492
EnterInterpreterFromDeoptimize(Thread * self,ShadowFrame * shadow_frame,JValue * ret_val)493 void EnterInterpreterFromDeoptimize(Thread* self, ShadowFrame* shadow_frame, JValue* ret_val)
494 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
495 JValue value;
496 value.SetJ(ret_val->GetJ()); // Set value to last known result in case the shadow frame chain is empty.
497 while (shadow_frame != NULL) {
498 self->SetTopOfShadowStack(shadow_frame);
499 StackHandleScope<1> hs(self);
500 MethodHelper mh(hs.NewHandle(shadow_frame->GetMethod()));
501 const DexFile::CodeItem* code_item = mh.GetMethod()->GetCodeItem();
502 value = Execute(self, mh, code_item, *shadow_frame, value);
503 ShadowFrame* old_frame = shadow_frame;
504 shadow_frame = shadow_frame->GetLink();
505 delete old_frame;
506 }
507 ret_val->SetJ(value.GetJ());
508 }
509
EnterInterpreterFromStub(Thread * self,MethodHelper & mh,const DexFile::CodeItem * code_item,ShadowFrame & shadow_frame)510 JValue EnterInterpreterFromStub(Thread* self, MethodHelper& mh, const DexFile::CodeItem* code_item,
511 ShadowFrame& shadow_frame) {
512 DCHECK_EQ(self, Thread::Current());
513 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
514 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
515 ThrowStackOverflowError(self);
516 return JValue();
517 }
518
519 return Execute(self, mh, code_item, shadow_frame, JValue());
520 }
521
artInterpreterToInterpreterBridge(Thread * self,MethodHelper & mh,const DexFile::CodeItem * code_item,ShadowFrame * shadow_frame,JValue * result)522 extern "C" void artInterpreterToInterpreterBridge(Thread* self, MethodHelper& mh,
523 const DexFile::CodeItem* code_item,
524 ShadowFrame* shadow_frame, JValue* result) {
525 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
526 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
527 ThrowStackOverflowError(self);
528 return;
529 }
530
531 self->PushShadowFrame(shadow_frame);
532 ArtMethod* method = shadow_frame->GetMethod();
533 // Ensure static methods are initialized.
534 if (method->IsStatic()) {
535 mirror::Class* declaring_class = method->GetDeclaringClass();
536 if (UNLIKELY(!declaring_class->IsInitialized())) {
537 StackHandleScope<1> hs(self);
538 HandleWrapper<Class> h_declaring_class(hs.NewHandleWrapper(&declaring_class));
539 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
540 h_declaring_class, true, true))) {
541 DCHECK(self->IsExceptionPending());
542 self->PopShadowFrame();
543 return;
544 }
545 CHECK(h_declaring_class->IsInitializing());
546 }
547 }
548
549 if (LIKELY(!method->IsNative())) {
550 result->SetJ(Execute(self, mh, code_item, *shadow_frame, JValue()).GetJ());
551 } else {
552 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
553 // generated stub) except during testing and image writing.
554 CHECK(!Runtime::Current()->IsStarted());
555 Object* receiver = method->IsStatic() ? nullptr : shadow_frame->GetVRegReference(0);
556 uint32_t* args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1);
557 UnstartedRuntimeJni(self, method, receiver, args, result);
558 }
559
560 self->PopShadowFrame();
561 }
562
563 } // namespace interpreter
564 } // namespace art
565