1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "interpreter.h"
18
19 #include <limits>
20
21 #include "common_throws.h"
22 #include "interpreter_common.h"
23 #include "interpreter_mterp_impl.h"
24 #include "interpreter_switch_impl.h"
25 #include "jit/jit.h"
26 #include "jit/jit_code_cache.h"
27 #include "jvalue-inl.h"
28 #include "mirror/string-inl.h"
29 #include "mterp/mterp.h"
30 #include "nativehelper/ScopedLocalRef.h"
31 #include "scoped_thread_state_change-inl.h"
32 #include "stack.h"
33 #include "thread-inl.h"
34 #include "unstarted_runtime.h"
35
36 namespace art {
37 namespace interpreter {
38
ObjArg(uint32_t arg)39 ALWAYS_INLINE static ObjPtr<mirror::Object> ObjArg(uint32_t arg)
40 REQUIRES_SHARED(Locks::mutator_lock_) {
41 return ObjPtr<mirror::Object>(reinterpret_cast<mirror::Object*>(arg));
42 }
43
InterpreterJni(Thread * self,ArtMethod * method,const StringPiece & shorty,ObjPtr<mirror::Object> receiver,uint32_t * args,JValue * result)44 static void InterpreterJni(Thread* self,
45 ArtMethod* method,
46 const StringPiece& shorty,
47 ObjPtr<mirror::Object> receiver,
48 uint32_t* args,
49 JValue* result)
50 REQUIRES_SHARED(Locks::mutator_lock_) {
51 // TODO: The following enters JNI code using a typedef-ed function rather than the JNI compiler,
52 // it should be removed and JNI compiled stubs used instead.
53 ScopedObjectAccessUnchecked soa(self);
54 if (method->IsStatic()) {
55 if (shorty == "L") {
56 typedef jobject (fntype)(JNIEnv*, jclass);
57 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
58 ScopedLocalRef<jclass> klass(soa.Env(),
59 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
60 jobject jresult;
61 {
62 ScopedThreadStateChange tsc(self, kNative);
63 jresult = fn(soa.Env(), klass.get());
64 }
65 result->SetL(soa.Decode<mirror::Object>(jresult));
66 } else if (shorty == "V") {
67 typedef void (fntype)(JNIEnv*, jclass);
68 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
69 ScopedLocalRef<jclass> klass(soa.Env(),
70 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
71 ScopedThreadStateChange tsc(self, kNative);
72 fn(soa.Env(), klass.get());
73 } else if (shorty == "Z") {
74 typedef jboolean (fntype)(JNIEnv*, jclass);
75 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
76 ScopedLocalRef<jclass> klass(soa.Env(),
77 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
78 ScopedThreadStateChange tsc(self, kNative);
79 result->SetZ(fn(soa.Env(), klass.get()));
80 } else if (shorty == "BI") {
81 typedef jbyte (fntype)(JNIEnv*, jclass, jint);
82 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
83 ScopedLocalRef<jclass> klass(soa.Env(),
84 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
85 ScopedThreadStateChange tsc(self, kNative);
86 result->SetB(fn(soa.Env(), klass.get(), args[0]));
87 } else if (shorty == "II") {
88 typedef jint (fntype)(JNIEnv*, jclass, jint);
89 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
90 ScopedLocalRef<jclass> klass(soa.Env(),
91 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
92 ScopedThreadStateChange tsc(self, kNative);
93 result->SetI(fn(soa.Env(), klass.get(), args[0]));
94 } else if (shorty == "LL") {
95 typedef jobject (fntype)(JNIEnv*, jclass, jobject);
96 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
97 ScopedLocalRef<jclass> klass(soa.Env(),
98 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
99 ScopedLocalRef<jobject> arg0(soa.Env(),
100 soa.AddLocalReference<jobject>(ObjArg(args[0])));
101 jobject jresult;
102 {
103 ScopedThreadStateChange tsc(self, kNative);
104 jresult = fn(soa.Env(), klass.get(), arg0.get());
105 }
106 result->SetL(soa.Decode<mirror::Object>(jresult));
107 } else if (shorty == "IIZ") {
108 typedef jint (fntype)(JNIEnv*, jclass, jint, jboolean);
109 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
110 ScopedLocalRef<jclass> klass(soa.Env(),
111 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
112 ScopedThreadStateChange tsc(self, kNative);
113 result->SetI(fn(soa.Env(), klass.get(), args[0], args[1]));
114 } else if (shorty == "ILI") {
115 typedef jint (fntype)(JNIEnv*, jclass, jobject, jint);
116 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(
117 method->GetEntryPointFromJni()));
118 ScopedLocalRef<jclass> klass(soa.Env(),
119 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
120 ScopedLocalRef<jobject> arg0(soa.Env(),
121 soa.AddLocalReference<jobject>(ObjArg(args[0])));
122 ScopedThreadStateChange tsc(self, kNative);
123 result->SetI(fn(soa.Env(), klass.get(), arg0.get(), args[1]));
124 } else if (shorty == "SIZ") {
125 typedef jshort (fntype)(JNIEnv*, jclass, jint, jboolean);
126 fntype* const fn =
127 reinterpret_cast<fntype*>(const_cast<void*>(method->GetEntryPointFromJni()));
128 ScopedLocalRef<jclass> klass(soa.Env(),
129 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
130 ScopedThreadStateChange tsc(self, kNative);
131 result->SetS(fn(soa.Env(), klass.get(), args[0], args[1]));
132 } else if (shorty == "VIZ") {
133 typedef void (fntype)(JNIEnv*, jclass, jint, jboolean);
134 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
135 ScopedLocalRef<jclass> klass(soa.Env(),
136 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
137 ScopedThreadStateChange tsc(self, kNative);
138 fn(soa.Env(), klass.get(), args[0], args[1]);
139 } else if (shorty == "ZLL") {
140 typedef jboolean (fntype)(JNIEnv*, jclass, jobject, jobject);
141 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
142 ScopedLocalRef<jclass> klass(soa.Env(),
143 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
144 ScopedLocalRef<jobject> arg0(soa.Env(),
145 soa.AddLocalReference<jobject>(ObjArg(args[0])));
146 ScopedLocalRef<jobject> arg1(soa.Env(),
147 soa.AddLocalReference<jobject>(ObjArg(args[1])));
148 ScopedThreadStateChange tsc(self, kNative);
149 result->SetZ(fn(soa.Env(), klass.get(), arg0.get(), arg1.get()));
150 } else if (shorty == "ZILL") {
151 typedef jboolean (fntype)(JNIEnv*, jclass, jint, jobject, jobject);
152 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
153 ScopedLocalRef<jclass> klass(soa.Env(),
154 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
155 ScopedLocalRef<jobject> arg1(soa.Env(),
156 soa.AddLocalReference<jobject>(ObjArg(args[1])));
157 ScopedLocalRef<jobject> arg2(soa.Env(),
158 soa.AddLocalReference<jobject>(ObjArg(args[2])));
159 ScopedThreadStateChange tsc(self, kNative);
160 result->SetZ(fn(soa.Env(), klass.get(), args[0], arg1.get(), arg2.get()));
161 } else if (shorty == "VILII") {
162 typedef void (fntype)(JNIEnv*, jclass, jint, jobject, jint, jint);
163 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
164 ScopedLocalRef<jclass> klass(soa.Env(),
165 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
166 ScopedLocalRef<jobject> arg1(soa.Env(),
167 soa.AddLocalReference<jobject>(ObjArg(args[1])));
168 ScopedThreadStateChange tsc(self, kNative);
169 fn(soa.Env(), klass.get(), args[0], arg1.get(), args[2], args[3]);
170 } else if (shorty == "VLILII") {
171 typedef void (fntype)(JNIEnv*, jclass, jobject, jint, jobject, jint, jint);
172 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
173 ScopedLocalRef<jclass> klass(soa.Env(),
174 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
175 ScopedLocalRef<jobject> arg0(soa.Env(),
176 soa.AddLocalReference<jobject>(ObjArg(args[0])));
177 ScopedLocalRef<jobject> arg2(soa.Env(),
178 soa.AddLocalReference<jobject>(ObjArg(args[2])));
179 ScopedThreadStateChange tsc(self, kNative);
180 fn(soa.Env(), klass.get(), arg0.get(), args[1], arg2.get(), args[3], args[4]);
181 } else {
182 LOG(FATAL) << "Do something with static native method: " << method->PrettyMethod()
183 << " shorty: " << shorty;
184 }
185 } else {
186 if (shorty == "L") {
187 typedef jobject (fntype)(JNIEnv*, jobject);
188 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
189 ScopedLocalRef<jobject> rcvr(soa.Env(),
190 soa.AddLocalReference<jobject>(receiver));
191 jobject jresult;
192 {
193 ScopedThreadStateChange tsc(self, kNative);
194 jresult = fn(soa.Env(), rcvr.get());
195 }
196 result->SetL(soa.Decode<mirror::Object>(jresult));
197 } else if (shorty == "V") {
198 typedef void (fntype)(JNIEnv*, jobject);
199 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
200 ScopedLocalRef<jobject> rcvr(soa.Env(),
201 soa.AddLocalReference<jobject>(receiver));
202 ScopedThreadStateChange tsc(self, kNative);
203 fn(soa.Env(), rcvr.get());
204 } else if (shorty == "LL") {
205 typedef jobject (fntype)(JNIEnv*, jobject, jobject);
206 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
207 ScopedLocalRef<jobject> rcvr(soa.Env(),
208 soa.AddLocalReference<jobject>(receiver));
209 ScopedLocalRef<jobject> arg0(soa.Env(),
210 soa.AddLocalReference<jobject>(ObjArg(args[0])));
211 jobject jresult;
212 {
213 ScopedThreadStateChange tsc(self, kNative);
214 jresult = fn(soa.Env(), rcvr.get(), arg0.get());
215 }
216 result->SetL(soa.Decode<mirror::Object>(jresult));
217 ScopedThreadStateChange tsc(self, kNative);
218 } else if (shorty == "III") {
219 typedef jint (fntype)(JNIEnv*, jobject, jint, jint);
220 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
221 ScopedLocalRef<jobject> rcvr(soa.Env(),
222 soa.AddLocalReference<jobject>(receiver));
223 ScopedThreadStateChange tsc(self, kNative);
224 result->SetI(fn(soa.Env(), rcvr.get(), args[0], args[1]));
225 } else {
226 LOG(FATAL) << "Do something with native method: " << method->PrettyMethod()
227 << " shorty: " << shorty;
228 }
229 }
230 }
231
232 enum InterpreterImplKind {
233 kSwitchImplKind, // Switch-based interpreter implementation.
234 kMterpImplKind // Assembly interpreter
235 };
236
237 static constexpr InterpreterImplKind kInterpreterImplKind = kMterpImplKind;
238
Execute(Thread * self,const DexFile::CodeItem * code_item,ShadowFrame & shadow_frame,JValue result_register,bool stay_in_interpreter=false)239 static inline JValue Execute(
240 Thread* self,
241 const DexFile::CodeItem* code_item,
242 ShadowFrame& shadow_frame,
243 JValue result_register,
244 bool stay_in_interpreter = false) REQUIRES_SHARED(Locks::mutator_lock_) {
245 DCHECK(!shadow_frame.GetMethod()->IsAbstract());
246 DCHECK(!shadow_frame.GetMethod()->IsNative());
247 if (LIKELY(shadow_frame.GetDexPC() == 0)) { // Entering the method, but not via deoptimization.
248 if (kIsDebugBuild) {
249 self->AssertNoPendingException();
250 }
251 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
252 ArtMethod *method = shadow_frame.GetMethod();
253
254 if (UNLIKELY(instrumentation->HasMethodEntryListeners())) {
255 instrumentation->MethodEnterEvent(self, shadow_frame.GetThisObject(code_item->ins_size_),
256 method, 0);
257 if (UNLIKELY(self->IsExceptionPending())) {
258 instrumentation->MethodUnwindEvent(self,
259 shadow_frame.GetThisObject(code_item->ins_size_),
260 method,
261 0);
262 return JValue();
263 }
264 }
265
266 if (!stay_in_interpreter) {
267 jit::Jit* jit = Runtime::Current()->GetJit();
268 if (jit != nullptr) {
269 jit->MethodEntered(self, shadow_frame.GetMethod());
270 if (jit->CanInvokeCompiledCode(method)) {
271 JValue result;
272
273 // Pop the shadow frame before calling into compiled code.
274 self->PopShadowFrame();
275 // Calculate the offset of the first input reg. The input registers are in the high regs.
276 // It's ok to access the code item here since JIT code will have been touched by the
277 // interpreter and compiler already.
278 uint16_t arg_offset = code_item->registers_size_ - code_item->ins_size_;
279 ArtInterpreterToCompiledCodeBridge(self, nullptr, &shadow_frame, arg_offset, &result);
280 // Push the shadow frame back as the caller will expect it.
281 self->PushShadowFrame(&shadow_frame);
282
283 return result;
284 }
285 }
286 }
287 }
288
289 shadow_frame.GetMethod()->GetDeclaringClass()->AssertInitializedOrInitializingInThread(self);
290
291 // Lock counting is a special version of accessibility checks, and for simplicity and
292 // reduction of template parameters, we gate it behind access-checks mode.
293 ArtMethod* method = shadow_frame.GetMethod();
294 DCHECK(!method->SkipAccessChecks() || !method->MustCountLocks());
295
296 bool transaction_active = Runtime::Current()->IsActiveTransaction();
297 if (LIKELY(method->SkipAccessChecks())) {
298 // Enter the "without access check" interpreter.
299 if (kInterpreterImplKind == kMterpImplKind) {
300 if (transaction_active) {
301 // No Mterp variant - just use the switch interpreter.
302 return ExecuteSwitchImpl<false, true>(self, code_item, shadow_frame, result_register,
303 false);
304 } else if (UNLIKELY(!Runtime::Current()->IsStarted())) {
305 return ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, result_register,
306 false);
307 } else {
308 while (true) {
309 // Mterp does not support all instrumentation/debugging.
310 if (MterpShouldSwitchInterpreters() != 0) {
311 return ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, result_register,
312 false);
313 }
314 bool returned = ExecuteMterpImpl(self, code_item, &shadow_frame, &result_register);
315 if (returned) {
316 return result_register;
317 } else {
318 // Mterp didn't like that instruction. Single-step it with the reference interpreter.
319 result_register = ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame,
320 result_register, true);
321 if (shadow_frame.GetDexPC() == DexFile::kDexNoIndex) {
322 // Single-stepped a return or an exception not handled locally. Return to caller.
323 return result_register;
324 }
325 }
326 }
327 }
328 } else {
329 DCHECK_EQ(kInterpreterImplKind, kSwitchImplKind);
330 if (transaction_active) {
331 return ExecuteSwitchImpl<false, true>(self, code_item, shadow_frame, result_register,
332 false);
333 } else {
334 return ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, result_register,
335 false);
336 }
337 }
338 } else {
339 // Enter the "with access check" interpreter.
340 if (kInterpreterImplKind == kMterpImplKind) {
341 // No access check variants for Mterp. Just use the switch version.
342 if (transaction_active) {
343 return ExecuteSwitchImpl<true, true>(self, code_item, shadow_frame, result_register,
344 false);
345 } else {
346 return ExecuteSwitchImpl<true, false>(self, code_item, shadow_frame, result_register,
347 false);
348 }
349 } else {
350 DCHECK_EQ(kInterpreterImplKind, kSwitchImplKind);
351 if (transaction_active) {
352 return ExecuteSwitchImpl<true, true>(self, code_item, shadow_frame, result_register,
353 false);
354 } else {
355 return ExecuteSwitchImpl<true, false>(self, code_item, shadow_frame, result_register,
356 false);
357 }
358 }
359 }
360 }
361
EnterInterpreterFromInvoke(Thread * self,ArtMethod * method,ObjPtr<mirror::Object> receiver,uint32_t * args,JValue * result,bool stay_in_interpreter)362 void EnterInterpreterFromInvoke(Thread* self,
363 ArtMethod* method,
364 ObjPtr<mirror::Object> receiver,
365 uint32_t* args,
366 JValue* result,
367 bool stay_in_interpreter) {
368 DCHECK_EQ(self, Thread::Current());
369 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
370 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
371 ThrowStackOverflowError(self);
372 return;
373 }
374
375 // This can happen if we are in forced interpreter mode and an obsolete method is called using
376 // reflection.
377 if (UNLIKELY(method->IsObsolete())) {
378 ThrowInternalError("Attempting to invoke obsolete version of '%s'.",
379 method->PrettyMethod().c_str());
380 return;
381 }
382
383 const char* old_cause = self->StartAssertNoThreadSuspension("EnterInterpreterFromInvoke");
384 const DexFile::CodeItem* code_item = method->GetCodeItem();
385 uint16_t num_regs;
386 uint16_t num_ins;
387 if (code_item != nullptr) {
388 num_regs = code_item->registers_size_;
389 num_ins = code_item->ins_size_;
390 } else if (!method->IsInvokable()) {
391 self->EndAssertNoThreadSuspension(old_cause);
392 method->ThrowInvocationTimeError();
393 return;
394 } else {
395 DCHECK(method->IsNative());
396 num_regs = num_ins = ArtMethod::NumArgRegisters(method->GetShorty());
397 if (!method->IsStatic()) {
398 num_regs++;
399 num_ins++;
400 }
401 }
402 // Set up shadow frame with matching number of reference slots to vregs.
403 ShadowFrame* last_shadow_frame = self->GetManagedStack()->GetTopShadowFrame();
404 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
405 CREATE_SHADOW_FRAME(num_regs, last_shadow_frame, method, /* dex pc */ 0);
406 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
407 self->PushShadowFrame(shadow_frame);
408
409 size_t cur_reg = num_regs - num_ins;
410 if (!method->IsStatic()) {
411 CHECK(receiver != nullptr);
412 shadow_frame->SetVRegReference(cur_reg, receiver.Ptr());
413 ++cur_reg;
414 }
415 uint32_t shorty_len = 0;
416 const char* shorty = method->GetShorty(&shorty_len);
417 for (size_t shorty_pos = 0, arg_pos = 0; cur_reg < num_regs; ++shorty_pos, ++arg_pos, cur_reg++) {
418 DCHECK_LT(shorty_pos + 1, shorty_len);
419 switch (shorty[shorty_pos + 1]) {
420 case 'L': {
421 ObjPtr<mirror::Object> o =
422 reinterpret_cast<StackReference<mirror::Object>*>(&args[arg_pos])->AsMirrorPtr();
423 shadow_frame->SetVRegReference(cur_reg, o.Ptr());
424 break;
425 }
426 case 'J': case 'D': {
427 uint64_t wide_value = (static_cast<uint64_t>(args[arg_pos + 1]) << 32) | args[arg_pos];
428 shadow_frame->SetVRegLong(cur_reg, wide_value);
429 cur_reg++;
430 arg_pos++;
431 break;
432 }
433 default:
434 shadow_frame->SetVReg(cur_reg, args[arg_pos]);
435 break;
436 }
437 }
438 self->EndAssertNoThreadSuspension(old_cause);
439 // Do this after populating the shadow frame in case EnsureInitialized causes a GC.
440 if (method->IsStatic() && UNLIKELY(!method->GetDeclaringClass()->IsInitialized())) {
441 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
442 StackHandleScope<1> hs(self);
443 Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass()));
444 if (UNLIKELY(!class_linker->EnsureInitialized(self, h_class, true, true))) {
445 CHECK(self->IsExceptionPending());
446 self->PopShadowFrame();
447 return;
448 }
449 }
450 if (LIKELY(!method->IsNative())) {
451 JValue r = Execute(self, code_item, *shadow_frame, JValue(), stay_in_interpreter);
452 if (result != nullptr) {
453 *result = r;
454 }
455 } else {
456 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
457 // generated stub) except during testing and image writing.
458 // Update args to be the args in the shadow frame since the input ones could hold stale
459 // references pointers due to moving GC.
460 args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1);
461 if (!Runtime::Current()->IsStarted()) {
462 UnstartedRuntime::Jni(self, method, receiver.Ptr(), args, result);
463 } else {
464 InterpreterJni(self, method, shorty, receiver, args, result);
465 }
466 }
467 self->PopShadowFrame();
468 }
469
IsStringInit(const Instruction * instr,ArtMethod * caller)470 static bool IsStringInit(const Instruction* instr, ArtMethod* caller)
471 REQUIRES_SHARED(Locks::mutator_lock_) {
472 if (instr->Opcode() == Instruction::INVOKE_DIRECT ||
473 instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) {
474 // Instead of calling ResolveMethod() which has suspend point and can trigger
475 // GC, look up the callee method symbolically.
476 uint16_t callee_method_idx = (instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) ?
477 instr->VRegB_3rc() : instr->VRegB_35c();
478 const DexFile* dex_file = caller->GetDexFile();
479 const DexFile::MethodId& method_id = dex_file->GetMethodId(callee_method_idx);
480 const char* class_name = dex_file->StringByTypeIdx(method_id.class_idx_);
481 const char* method_name = dex_file->GetMethodName(method_id);
482 // Compare method's class name and method name against string init.
483 // It's ok since it's not allowed to create your own java/lang/String.
484 // TODO: verify that assumption.
485 if ((strcmp(class_name, "Ljava/lang/String;") == 0) &&
486 (strcmp(method_name, "<init>") == 0)) {
487 return true;
488 }
489 }
490 return false;
491 }
492
GetReceiverRegisterForStringInit(const Instruction * instr)493 static int16_t GetReceiverRegisterForStringInit(const Instruction* instr) {
494 DCHECK(instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE ||
495 instr->Opcode() == Instruction::INVOKE_DIRECT);
496 return (instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) ?
497 instr->VRegC_3rc() : instr->VRegC_35c();
498 }
499
EnterInterpreterFromDeoptimize(Thread * self,ShadowFrame * shadow_frame,bool from_code,JValue * ret_val)500 void EnterInterpreterFromDeoptimize(Thread* self,
501 ShadowFrame* shadow_frame,
502 bool from_code,
503 JValue* ret_val)
504 REQUIRES_SHARED(Locks::mutator_lock_) {
505 JValue value;
506 // Set value to last known result in case the shadow frame chain is empty.
507 value.SetJ(ret_val->GetJ());
508 // Are we executing the first shadow frame?
509 bool first = true;
510 while (shadow_frame != nullptr) {
511 // We do not want to recover lock state for lock counting when deoptimizing. Currently,
512 // the compiler should not have compiled a method that failed structured-locking checks.
513 DCHECK(!shadow_frame->GetMethod()->MustCountLocks());
514
515 self->SetTopOfShadowStack(shadow_frame);
516 const DexFile::CodeItem* code_item = shadow_frame->GetMethod()->GetCodeItem();
517 const uint32_t dex_pc = shadow_frame->GetDexPC();
518 uint32_t new_dex_pc = dex_pc;
519 if (UNLIKELY(self->IsExceptionPending())) {
520 // If we deoptimize from the QuickExceptionHandler, we already reported the exception to
521 // the instrumentation. To prevent from reporting it a second time, we simply pass a
522 // null Instrumentation*.
523 const instrumentation::Instrumentation* const instrumentation =
524 first ? nullptr : Runtime::Current()->GetInstrumentation();
525 uint32_t found_dex_pc = FindNextInstructionFollowingException(self, *shadow_frame, dex_pc,
526 instrumentation);
527 new_dex_pc = found_dex_pc; // the dex pc of a matching catch handler
528 // or DexFile::kDexNoIndex if there is none.
529 } else if (!from_code) {
530 // For the debugger and full deoptimization stack, we must go past the invoke
531 // instruction, as it already executed.
532 // TODO: should be tested more once b/17586779 is fixed.
533 const Instruction* instr = Instruction::At(&code_item->insns_[dex_pc]);
534 if (instr->IsInvoke()) {
535 if (IsStringInit(instr, shadow_frame->GetMethod())) {
536 uint16_t this_obj_vreg = GetReceiverRegisterForStringInit(instr);
537 // Move the StringFactory.newStringFromChars() result into the register representing
538 // "this object" when invoking the string constructor in the original dex instruction.
539 // Also move the result into all aliases.
540 DCHECK(value.GetL()->IsString());
541 SetStringInitValueToAllAliases(shadow_frame, this_obj_vreg, value);
542 // Calling string constructor in the original dex code doesn't generate a result value.
543 value.SetJ(0);
544 }
545 new_dex_pc = dex_pc + instr->SizeInCodeUnits();
546 } else if (instr->Opcode() == Instruction::NEW_INSTANCE) {
547 // It's possible to deoptimize at a NEW_INSTANCE dex instruciton that's for a
548 // java string, which is turned into a call into StringFactory.newEmptyString();
549 // Move the StringFactory.newEmptyString() result into the destination register.
550 DCHECK(value.GetL()->IsString());
551 shadow_frame->SetVRegReference(instr->VRegA_21c(), value.GetL());
552 // new-instance doesn't generate a result value.
553 value.SetJ(0);
554 // Skip the dex instruction since we essentially come back from an invocation.
555 new_dex_pc = dex_pc + instr->SizeInCodeUnits();
556 if (kIsDebugBuild) {
557 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
558 // This is a suspend point. But it's ok since value has been set into shadow_frame.
559 ObjPtr<mirror::Class> klass = class_linker->ResolveType(
560 dex::TypeIndex(instr->VRegB_21c()), shadow_frame->GetMethod());
561 DCHECK(klass->IsStringClass());
562 }
563 } else {
564 CHECK(false) << "Unexpected instruction opcode " << instr->Opcode()
565 << " at dex_pc " << dex_pc
566 << " of method: " << ArtMethod::PrettyMethod(shadow_frame->GetMethod(), false);
567 }
568 } else {
569 // Nothing to do, the dex_pc is the one at which the code requested
570 // the deoptimization.
571 }
572 if (new_dex_pc != DexFile::kDexNoIndex) {
573 shadow_frame->SetDexPC(new_dex_pc);
574 value = Execute(self, code_item, *shadow_frame, value);
575 }
576 ShadowFrame* old_frame = shadow_frame;
577 shadow_frame = shadow_frame->GetLink();
578 ShadowFrame::DeleteDeoptimizedFrame(old_frame);
579 // Following deoptimizations of shadow frames must pass the invoke instruction.
580 from_code = false;
581 first = false;
582 }
583 ret_val->SetJ(value.GetJ());
584 }
585
EnterInterpreterFromEntryPoint(Thread * self,const DexFile::CodeItem * code_item,ShadowFrame * shadow_frame)586 JValue EnterInterpreterFromEntryPoint(Thread* self, const DexFile::CodeItem* code_item,
587 ShadowFrame* shadow_frame) {
588 DCHECK_EQ(self, Thread::Current());
589 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
590 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
591 ThrowStackOverflowError(self);
592 return JValue();
593 }
594
595 jit::Jit* jit = Runtime::Current()->GetJit();
596 if (jit != nullptr) {
597 jit->NotifyCompiledCodeToInterpreterTransition(self, shadow_frame->GetMethod());
598 }
599 return Execute(self, code_item, *shadow_frame, JValue());
600 }
601
ArtInterpreterToInterpreterBridge(Thread * self,const DexFile::CodeItem * code_item,ShadowFrame * shadow_frame,JValue * result)602 void ArtInterpreterToInterpreterBridge(Thread* self,
603 const DexFile::CodeItem* code_item,
604 ShadowFrame* shadow_frame,
605 JValue* result) {
606 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
607 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
608 ThrowStackOverflowError(self);
609 return;
610 }
611
612 self->PushShadowFrame(shadow_frame);
613 ArtMethod* method = shadow_frame->GetMethod();
614 // Ensure static methods are initialized.
615 const bool is_static = method->IsStatic();
616 if (is_static) {
617 ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass();
618 if (UNLIKELY(!declaring_class->IsInitialized())) {
619 StackHandleScope<1> hs(self);
620 HandleWrapperObjPtr<mirror::Class> h_declaring_class(hs.NewHandleWrapper(&declaring_class));
621 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
622 self, h_declaring_class, true, true))) {
623 DCHECK(self->IsExceptionPending());
624 self->PopShadowFrame();
625 return;
626 }
627 CHECK(h_declaring_class->IsInitializing());
628 }
629 }
630
631 if (LIKELY(!shadow_frame->GetMethod()->IsNative())) {
632 result->SetJ(Execute(self, code_item, *shadow_frame, JValue()).GetJ());
633 } else {
634 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
635 // generated stub) except during testing and image writing.
636 CHECK(!Runtime::Current()->IsStarted());
637 ObjPtr<mirror::Object> receiver = is_static ? nullptr : shadow_frame->GetVRegReference(0);
638 uint32_t* args = shadow_frame->GetVRegArgs(is_static ? 0 : 1);
639 UnstartedRuntime::Jni(self, shadow_frame->GetMethod(), receiver.Ptr(), args, result);
640 }
641
642 self->PopShadowFrame();
643 }
644
CheckInterpreterAsmConstants()645 void CheckInterpreterAsmConstants() {
646 CheckMterpAsmConstants();
647 }
648
InitInterpreterTls(Thread * self)649 void InitInterpreterTls(Thread* self) {
650 InitMterpTls(self);
651 }
652
653 } // namespace interpreter
654 } // namespace art
655