1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "interpreter.h"
18
19 #include <limits>
20 #include <string_view>
21
22 #include "common_dex_operations.h"
23 #include "common_throws.h"
24 #include "dex/dex_file_types.h"
25 #include "interpreter_common.h"
26 #include "interpreter_switch_impl.h"
27 #include "jit/jit.h"
28 #include "jit/jit_code_cache.h"
29 #include "jvalue-inl.h"
30 #include "mirror/string-inl.h"
31 #include "nativehelper/scoped_local_ref.h"
32 #include "scoped_thread_state_change-inl.h"
33 #include "shadow_frame-inl.h"
34 #include "stack.h"
35 #include "thread-inl.h"
36 #include "unstarted_runtime.h"
37
38 namespace art {
39 namespace interpreter {
40
ObjArg(uint32_t arg)41 ALWAYS_INLINE static ObjPtr<mirror::Object> ObjArg(uint32_t arg)
42 REQUIRES_SHARED(Locks::mutator_lock_) {
43 return reinterpret_cast<mirror::Object*>(arg);
44 }
45
InterpreterJni(Thread * self,ArtMethod * method,std::string_view shorty,ObjPtr<mirror::Object> receiver,uint32_t * args,JValue * result)46 static void InterpreterJni(Thread* self,
47 ArtMethod* method,
48 std::string_view shorty,
49 ObjPtr<mirror::Object> receiver,
50 uint32_t* args,
51 JValue* result)
52 REQUIRES_SHARED(Locks::mutator_lock_) {
53 // TODO: The following enters JNI code using a typedef-ed function rather than the JNI compiler,
54 // it should be removed and JNI compiled stubs used instead.
55 ScopedObjectAccessUnchecked soa(self);
56 if (method->IsStatic()) {
57 if (shorty == "L") {
58 using fntype = jobject(JNIEnv*, jclass);
59 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
60 ScopedLocalRef<jclass> klass(soa.Env(),
61 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
62 jobject jresult;
63 {
64 ScopedThreadStateChange tsc(self, ThreadState::kNative);
65 jresult = fn(soa.Env(), klass.get());
66 }
67 result->SetL(soa.Decode<mirror::Object>(jresult));
68 } else if (shorty == "V") {
69 using fntype = void(JNIEnv*, jclass);
70 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
71 ScopedLocalRef<jclass> klass(soa.Env(),
72 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
73 ScopedThreadStateChange tsc(self, ThreadState::kNative);
74 fn(soa.Env(), klass.get());
75 } else if (shorty == "Z") {
76 using fntype = jboolean(JNIEnv*, jclass);
77 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
78 ScopedLocalRef<jclass> klass(soa.Env(),
79 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
80 ScopedThreadStateChange tsc(self, ThreadState::kNative);
81 result->SetZ(fn(soa.Env(), klass.get()));
82 } else if (shorty == "BI") {
83 using fntype = jbyte(JNIEnv*, jclass, jint);
84 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
85 ScopedLocalRef<jclass> klass(soa.Env(),
86 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
87 ScopedThreadStateChange tsc(self, ThreadState::kNative);
88 result->SetB(fn(soa.Env(), klass.get(), args[0]));
89 } else if (shorty == "II") {
90 using fntype = jint(JNIEnv*, jclass, jint);
91 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
92 ScopedLocalRef<jclass> klass(soa.Env(),
93 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
94 ScopedThreadStateChange tsc(self, ThreadState::kNative);
95 result->SetI(fn(soa.Env(), klass.get(), args[0]));
96 } else if (shorty == "LL") {
97 using fntype = jobject(JNIEnv*, jclass, jobject);
98 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
99 ScopedLocalRef<jclass> klass(soa.Env(),
100 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
101 ScopedLocalRef<jobject> arg0(soa.Env(),
102 soa.AddLocalReference<jobject>(ObjArg(args[0])));
103 jobject jresult;
104 {
105 ScopedThreadStateChange tsc(self, ThreadState::kNative);
106 jresult = fn(soa.Env(), klass.get(), arg0.get());
107 }
108 result->SetL(soa.Decode<mirror::Object>(jresult));
109 } else if (shorty == "IIZ") {
110 using fntype = jint(JNIEnv*, jclass, jint, jboolean);
111 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
112 ScopedLocalRef<jclass> klass(soa.Env(),
113 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
114 ScopedThreadStateChange tsc(self, ThreadState::kNative);
115 result->SetI(fn(soa.Env(), klass.get(), args[0], args[1]));
116 } else if (shorty == "ILI") {
117 using fntype = jint(JNIEnv*, jclass, jobject, jint);
118 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(
119 method->GetEntryPointFromJni()));
120 ScopedLocalRef<jclass> klass(soa.Env(),
121 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
122 ScopedLocalRef<jobject> arg0(soa.Env(),
123 soa.AddLocalReference<jobject>(ObjArg(args[0])));
124 ScopedThreadStateChange tsc(self, ThreadState::kNative);
125 result->SetI(fn(soa.Env(), klass.get(), arg0.get(), args[1]));
126 } else if (shorty == "SIZ") {
127 using fntype = jshort(JNIEnv*, jclass, jint, jboolean);
128 fntype* const fn =
129 reinterpret_cast<fntype*>(const_cast<void*>(method->GetEntryPointFromJni()));
130 ScopedLocalRef<jclass> klass(soa.Env(),
131 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
132 ScopedThreadStateChange tsc(self, ThreadState::kNative);
133 result->SetS(fn(soa.Env(), klass.get(), args[0], args[1]));
134 } else if (shorty == "VIZ") {
135 using fntype = void(JNIEnv*, jclass, jint, jboolean);
136 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
137 ScopedLocalRef<jclass> klass(soa.Env(),
138 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
139 ScopedThreadStateChange tsc(self, ThreadState::kNative);
140 fn(soa.Env(), klass.get(), args[0], args[1]);
141 } else if (shorty == "ZLL") {
142 using fntype = jboolean(JNIEnv*, jclass, jobject, jobject);
143 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
144 ScopedLocalRef<jclass> klass(soa.Env(),
145 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
146 ScopedLocalRef<jobject> arg0(soa.Env(),
147 soa.AddLocalReference<jobject>(ObjArg(args[0])));
148 ScopedLocalRef<jobject> arg1(soa.Env(),
149 soa.AddLocalReference<jobject>(ObjArg(args[1])));
150 ScopedThreadStateChange tsc(self, ThreadState::kNative);
151 result->SetZ(fn(soa.Env(), klass.get(), arg0.get(), arg1.get()));
152 } else if (shorty == "ZILL") {
153 using fntype = jboolean(JNIEnv*, jclass, jint, jobject, jobject);
154 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
155 ScopedLocalRef<jclass> klass(soa.Env(),
156 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
157 ScopedLocalRef<jobject> arg1(soa.Env(),
158 soa.AddLocalReference<jobject>(ObjArg(args[1])));
159 ScopedLocalRef<jobject> arg2(soa.Env(),
160 soa.AddLocalReference<jobject>(ObjArg(args[2])));
161 ScopedThreadStateChange tsc(self, ThreadState::kNative);
162 result->SetZ(fn(soa.Env(), klass.get(), args[0], arg1.get(), arg2.get()));
163 } else if (shorty == "VILII") {
164 using fntype = void(JNIEnv*, jclass, jint, jobject, jint, jint);
165 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
166 ScopedLocalRef<jclass> klass(soa.Env(),
167 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
168 ScopedLocalRef<jobject> arg1(soa.Env(),
169 soa.AddLocalReference<jobject>(ObjArg(args[1])));
170 ScopedThreadStateChange tsc(self, ThreadState::kNative);
171 fn(soa.Env(), klass.get(), args[0], arg1.get(), args[2], args[3]);
172 } else if (shorty == "VLILII") {
173 using fntype = void(JNIEnv*, jclass, jobject, jint, jobject, jint, jint);
174 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
175 ScopedLocalRef<jclass> klass(soa.Env(),
176 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
177 ScopedLocalRef<jobject> arg0(soa.Env(),
178 soa.AddLocalReference<jobject>(ObjArg(args[0])));
179 ScopedLocalRef<jobject> arg2(soa.Env(),
180 soa.AddLocalReference<jobject>(ObjArg(args[2])));
181 ScopedThreadStateChange tsc(self, ThreadState::kNative);
182 fn(soa.Env(), klass.get(), arg0.get(), args[1], arg2.get(), args[3], args[4]);
183 } else {
184 LOG(FATAL) << "Do something with static native method: " << method->PrettyMethod()
185 << " shorty: " << shorty;
186 }
187 } else {
188 if (shorty == "L") {
189 using fntype = jobject(JNIEnv*, jobject);
190 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
191 ScopedLocalRef<jobject> rcvr(soa.Env(),
192 soa.AddLocalReference<jobject>(receiver));
193 jobject jresult;
194 {
195 ScopedThreadStateChange tsc(self, ThreadState::kNative);
196 jresult = fn(soa.Env(), rcvr.get());
197 }
198 result->SetL(soa.Decode<mirror::Object>(jresult));
199 } else if (shorty == "V") {
200 using fntype = void(JNIEnv*, jobject);
201 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
202 ScopedLocalRef<jobject> rcvr(soa.Env(),
203 soa.AddLocalReference<jobject>(receiver));
204 ScopedThreadStateChange tsc(self, ThreadState::kNative);
205 fn(soa.Env(), rcvr.get());
206 } else if (shorty == "LL") {
207 using fntype = jobject(JNIEnv*, jobject, jobject);
208 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
209 ScopedLocalRef<jobject> rcvr(soa.Env(),
210 soa.AddLocalReference<jobject>(receiver));
211 ScopedLocalRef<jobject> arg0(soa.Env(),
212 soa.AddLocalReference<jobject>(ObjArg(args[0])));
213 jobject jresult;
214 {
215 ScopedThreadStateChange tsc(self, ThreadState::kNative);
216 jresult = fn(soa.Env(), rcvr.get(), arg0.get());
217 }
218 result->SetL(soa.Decode<mirror::Object>(jresult));
219 ScopedThreadStateChange tsc(self, ThreadState::kNative);
220 } else if (shorty == "III") {
221 using fntype = jint(JNIEnv*, jobject, jint, jint);
222 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
223 ScopedLocalRef<jobject> rcvr(soa.Env(),
224 soa.AddLocalReference<jobject>(receiver));
225 ScopedThreadStateChange tsc(self, ThreadState::kNative);
226 result->SetI(fn(soa.Env(), rcvr.get(), args[0], args[1]));
227 } else {
228 LOG(FATAL) << "Do something with native method: " << method->PrettyMethod()
229 << " shorty: " << shorty;
230 }
231 }
232 }
233
234 NO_STACK_PROTECTOR
ExecuteSwitch(Thread * self,const CodeItemDataAccessor & accessor,ShadowFrame & shadow_frame,JValue result_register,bool interpret_one_instruction)235 static JValue ExecuteSwitch(Thread* self,
236 const CodeItemDataAccessor& accessor,
237 ShadowFrame& shadow_frame,
238 JValue result_register,
239 bool interpret_one_instruction) REQUIRES_SHARED(Locks::mutator_lock_) {
240 if (Runtime::Current()->IsActiveTransaction()) {
241 return ExecuteSwitchImpl<true>(
242 self, accessor, shadow_frame, result_register, interpret_one_instruction);
243 } else {
244 return ExecuteSwitchImpl<false>(
245 self, accessor, shadow_frame, result_register, interpret_one_instruction);
246 }
247 }
248
249 NO_STACK_PROTECTOR
Execute(Thread * self,const CodeItemDataAccessor & accessor,ShadowFrame & shadow_frame,JValue result_register,bool stay_in_interpreter=false,bool from_deoptimize=false)250 static inline JValue Execute(
251 Thread* self,
252 const CodeItemDataAccessor& accessor,
253 ShadowFrame& shadow_frame,
254 JValue result_register,
255 bool stay_in_interpreter = false,
256 bool from_deoptimize = false) REQUIRES_SHARED(Locks::mutator_lock_) {
257 DCHECK(!shadow_frame.GetMethod()->IsAbstract());
258 DCHECK(!shadow_frame.GetMethod()->IsNative());
259
260 // We cache the result of NeedsDexPcEvents in the shadow frame so we don't need to call
261 // NeedsDexPcEvents on every instruction for better performance. NeedsDexPcEvents only gets
262 // updated asynchronoulsy in a SuspendAll scope and any existing shadow frames are updated with
263 // new value. So it is safe to cache it here.
264 shadow_frame.SetNotifyDexPcMoveEvents(
265 Runtime::Current()->GetInstrumentation()->NeedsDexPcEvents(shadow_frame.GetMethod(), self));
266
267 if (LIKELY(!from_deoptimize)) { // Entering the method, but not via deoptimization.
268 if (kIsDebugBuild) {
269 CHECK_EQ(shadow_frame.GetDexPC(), 0u);
270 self->AssertNoPendingException();
271 }
272 ArtMethod *method = shadow_frame.GetMethod();
273
274 // If we can continue in JIT and have JITed code available execute JITed code.
275 if (!stay_in_interpreter && !self->IsForceInterpreter() && !shadow_frame.GetForcePopFrame()) {
276 jit::Jit* jit = Runtime::Current()->GetJit();
277 if (jit != nullptr) {
278 jit->MethodEntered(self, shadow_frame.GetMethod());
279 if (jit->CanInvokeCompiledCode(method)) {
280 JValue result;
281
282 // Pop the shadow frame before calling into compiled code.
283 self->PopShadowFrame();
284 // Calculate the offset of the first input reg. The input registers are in the high regs.
285 // It's ok to access the code item here since JIT code will have been touched by the
286 // interpreter and compiler already.
287 uint16_t arg_offset = accessor.RegistersSize() - accessor.InsSize();
288 ArtInterpreterToCompiledCodeBridge(self, nullptr, &shadow_frame, arg_offset, &result);
289 // Push the shadow frame back as the caller will expect it.
290 self->PushShadowFrame(&shadow_frame);
291
292 return result;
293 }
294 }
295 }
296
297 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
298 if (UNLIKELY(instrumentation->HasMethodEntryListeners() || shadow_frame.GetForcePopFrame())) {
299 instrumentation->MethodEnterEvent(self, method);
300 if (UNLIKELY(shadow_frame.GetForcePopFrame())) {
301 // The caller will retry this invoke or ignore the result. Just return immediately without
302 // any value.
303 DCHECK(Runtime::Current()->AreNonStandardExitsEnabled());
304 JValue ret = JValue();
305 PerformNonStandardReturn(self,
306 shadow_frame,
307 ret,
308 instrumentation,
309 accessor.InsSize(),
310 /* unlock_monitors= */ false);
311 return ret;
312 }
313 if (UNLIKELY(self->IsExceptionPending())) {
314 instrumentation->MethodUnwindEvent(self,
315 method,
316 0);
317 JValue ret = JValue();
318 if (UNLIKELY(shadow_frame.GetForcePopFrame())) {
319 DCHECK(Runtime::Current()->AreNonStandardExitsEnabled());
320 PerformNonStandardReturn(self,
321 shadow_frame,
322 ret,
323 instrumentation,
324 accessor.InsSize(),
325 /* unlock_monitors= */ false);
326 }
327 return ret;
328 }
329 }
330 }
331
332 ArtMethod* method = shadow_frame.GetMethod();
333
334 DCheckStaticState(self, method);
335
336 // Lock counting is a special version of accessibility checks, and for simplicity and
337 // reduction of template parameters, we gate it behind access-checks mode.
338 DCHECK_IMPLIES(method->SkipAccessChecks(), !method->MustCountLocks());
339
340 VLOG(interpreter) << "Interpreting " << method->PrettyMethod();
341
342 return ExecuteSwitch(
343 self, accessor, shadow_frame, result_register, /*interpret_one_instruction=*/ false);
344 }
345
EnterInterpreterFromInvoke(Thread * self,ArtMethod * method,ObjPtr<mirror::Object> receiver,uint32_t * args,JValue * result,bool stay_in_interpreter)346 void EnterInterpreterFromInvoke(Thread* self,
347 ArtMethod* method,
348 ObjPtr<mirror::Object> receiver,
349 uint32_t* args,
350 JValue* result,
351 bool stay_in_interpreter) {
352 DCHECK_EQ(self, Thread::Current());
353 bool implicit_check = Runtime::Current()->GetImplicitStackOverflowChecks();
354 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
355 ThrowStackOverflowError(self);
356 return;
357 }
358
359 // This can happen if we are in forced interpreter mode and an obsolete method is called using
360 // reflection.
361 if (UNLIKELY(method->IsObsolete())) {
362 ThrowInternalError("Attempting to invoke obsolete version of '%s'.",
363 method->PrettyMethod().c_str());
364 return;
365 }
366
367 const char* old_cause = self->StartAssertNoThreadSuspension("EnterInterpreterFromInvoke");
368 CodeItemDataAccessor accessor(method->DexInstructionData());
369 uint16_t num_regs;
370 uint16_t num_ins;
371 if (accessor.HasCodeItem()) {
372 num_regs = accessor.RegistersSize();
373 num_ins = accessor.InsSize();
374 } else if (!method->IsInvokable()) {
375 self->EndAssertNoThreadSuspension(old_cause);
376 method->ThrowInvocationTimeError(receiver);
377 return;
378 } else {
379 DCHECK(method->IsNative()) << method->PrettyMethod();
380 num_regs = num_ins = ArtMethod::NumArgRegisters(method->GetShorty());
381 if (!method->IsStatic()) {
382 num_regs++;
383 num_ins++;
384 }
385 }
386 // Set up shadow frame with matching number of reference slots to vregs.
387 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
388 CREATE_SHADOW_FRAME(num_regs, method, /* dex pc */ 0);
389 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
390
391 size_t cur_reg = num_regs - num_ins;
392 if (!method->IsStatic()) {
393 CHECK(receiver != nullptr);
394 shadow_frame->SetVRegReference(cur_reg, receiver);
395 ++cur_reg;
396 }
397 uint32_t shorty_len = 0;
398 const char* shorty = method->GetShorty(&shorty_len);
399 for (size_t shorty_pos = 0, arg_pos = 0; cur_reg < num_regs; ++shorty_pos, ++arg_pos, cur_reg++) {
400 DCHECK_LT(shorty_pos + 1, shorty_len);
401 switch (shorty[shorty_pos + 1]) {
402 case 'L': {
403 ObjPtr<mirror::Object> o =
404 reinterpret_cast<StackReference<mirror::Object>*>(&args[arg_pos])->AsMirrorPtr();
405 shadow_frame->SetVRegReference(cur_reg, o);
406 break;
407 }
408 case 'J': case 'D': {
409 uint64_t wide_value = (static_cast<uint64_t>(args[arg_pos + 1]) << 32) | args[arg_pos];
410 shadow_frame->SetVRegLong(cur_reg, wide_value);
411 cur_reg++;
412 arg_pos++;
413 break;
414 }
415 default:
416 shadow_frame->SetVReg(cur_reg, args[arg_pos]);
417 break;
418 }
419 }
420 self->EndAssertNoThreadSuspension(old_cause);
421 if (!EnsureInitialized(self, shadow_frame)) {
422 return;
423 }
424 self->PushShadowFrame(shadow_frame);
425 if (LIKELY(!method->IsNative())) {
426 JValue r = Execute(self, accessor, *shadow_frame, JValue(), stay_in_interpreter);
427 if (result != nullptr) {
428 *result = r;
429 }
430 } else {
431 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
432 // generated stub) except during testing and image writing.
433 // Update args to be the args in the shadow frame since the input ones could hold stale
434 // references pointers due to moving GC.
435 args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1);
436 if (!Runtime::Current()->IsStarted()) {
437 UnstartedRuntime::Jni(self, method, receiver.Ptr(), args, result);
438 } else {
439 InterpreterJni(self, method, shorty, receiver, args, result);
440 }
441 }
442 self->PopShadowFrame();
443 }
444
GetReceiverRegisterForStringInit(const Instruction * instr)445 static int16_t GetReceiverRegisterForStringInit(const Instruction* instr) {
446 DCHECK(instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE ||
447 instr->Opcode() == Instruction::INVOKE_DIRECT);
448 return (instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) ?
449 instr->VRegC_3rc() : instr->VRegC_35c();
450 }
451
EnterInterpreterFromDeoptimize(Thread * self,ShadowFrame * shadow_frame,JValue * ret_val,bool from_code,DeoptimizationMethodType deopt_method_type)452 void EnterInterpreterFromDeoptimize(Thread* self,
453 ShadowFrame* shadow_frame,
454 JValue* ret_val,
455 bool from_code,
456 DeoptimizationMethodType deopt_method_type)
457 REQUIRES_SHARED(Locks::mutator_lock_) {
458 JValue value;
459 // Set value to last known result in case the shadow frame chain is empty.
460 value.SetJ(ret_val->GetJ());
461 // How many frames we have executed.
462 size_t frame_cnt = 0;
463 while (shadow_frame != nullptr) {
464 // We do not want to recover lock state for lock counting when deoptimizing. Currently,
465 // the compiler should not have compiled a method that failed structured-locking checks.
466 DCHECK(!shadow_frame->GetMethod()->MustCountLocks());
467
468 self->SetTopOfShadowStack(shadow_frame);
469 CodeItemDataAccessor accessor(shadow_frame->GetMethod()->DexInstructionData());
470 const uint32_t dex_pc = shadow_frame->GetDexPC();
471 uint32_t new_dex_pc = dex_pc;
472 if (UNLIKELY(self->IsExceptionPending())) {
473 DCHECK(self->GetException() != Thread::GetDeoptimizationException());
474 // If we deoptimize from the QuickExceptionHandler, we already reported the exception throw
475 // event to the instrumentation. Skip throw listeners for the first frame. The deopt check
476 // should happen after the throw listener is called as throw listener can trigger a
477 // deoptimization.
478 new_dex_pc = MoveToExceptionHandler(self,
479 *shadow_frame,
480 /* skip_listeners= */ false,
481 /* skip_throw_listener= */ frame_cnt == 0) ?
482 shadow_frame->GetDexPC() :
483 dex::kDexNoIndex;
484 } else if (!from_code) {
485 // Deoptimization is not called from code directly.
486 const Instruction* instr = &accessor.InstructionAt(dex_pc);
487 if (deopt_method_type == DeoptimizationMethodType::kKeepDexPc ||
488 shadow_frame->GetForceRetryInstruction()) {
489 DCHECK(frame_cnt == 0 || shadow_frame->GetForceRetryInstruction())
490 << "frame_cnt: " << frame_cnt
491 << " force-retry: " << shadow_frame->GetForceRetryInstruction();
492 // Need to re-execute the dex instruction.
493 // (1) An invocation might be split into class initialization and invoke.
494 // In this case, the invoke should not be skipped.
495 // (2) A suspend check should also execute the dex instruction at the
496 // corresponding dex pc.
497 // If the ForceRetryInstruction bit is set this must be the second frame (the first being
498 // the one that is being popped).
499 DCHECK_EQ(new_dex_pc, dex_pc);
500 shadow_frame->SetForceRetryInstruction(false);
501 } else if (instr->Opcode() == Instruction::MONITOR_ENTER ||
502 instr->Opcode() == Instruction::MONITOR_EXIT) {
503 DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
504 DCHECK_EQ(frame_cnt, 0u);
505 // Non-idempotent dex instruction should not be re-executed.
506 // On the other hand, if a MONITOR_ENTER is at the dex_pc of a suspend
507 // check, that MONITOR_ENTER should be executed. That case is handled
508 // above.
509 new_dex_pc = dex_pc + instr->SizeInCodeUnits();
510 } else if (instr->IsInvoke()) {
511 DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
512 if (IsStringInit(*instr, shadow_frame->GetMethod())) {
513 uint16_t this_obj_vreg = GetReceiverRegisterForStringInit(instr);
514 // Move the StringFactory.newStringFromChars() result into the register representing
515 // "this object" when invoking the string constructor in the original dex instruction.
516 // Also move the result into all aliases.
517 DCHECK(value.GetL()->IsString());
518 SetStringInitValueToAllAliases(shadow_frame, this_obj_vreg, value);
519 // Calling string constructor in the original dex code doesn't generate a result value.
520 value.SetJ(0);
521 }
522 new_dex_pc = dex_pc + instr->SizeInCodeUnits();
523 } else if (instr->Opcode() == Instruction::NEW_INSTANCE) {
524 // A NEW_INSTANCE is simply re-executed, including
525 // "new-instance String" which is compiled into a call into
526 // StringFactory.newEmptyString().
527 DCHECK_EQ(new_dex_pc, dex_pc);
528 } else {
529 DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
530 DCHECK_EQ(frame_cnt, 0u);
531 // By default, we re-execute the dex instruction since if they are not
532 // an invoke, so that we don't have to decode the dex instruction to move
533 // result into the right vreg. All slow paths have been audited to be
534 // idempotent except monitor-enter/exit and invocation stubs.
535 // TODO: move result and advance dex pc. That also requires that we
536 // can tell the return type of a runtime method, possibly by decoding
537 // the dex instruction at the caller.
538 DCHECK_EQ(new_dex_pc, dex_pc);
539 }
540 } else {
541 // Nothing to do, the dex_pc is the one at which the code requested
542 // the deoptimization.
543 DCHECK_EQ(frame_cnt, 0u);
544 DCHECK_EQ(new_dex_pc, dex_pc);
545 }
546 if (new_dex_pc != dex::kDexNoIndex) {
547 shadow_frame->SetDexPC(new_dex_pc);
548 value = Execute(self,
549 accessor,
550 *shadow_frame,
551 value,
552 /* stay_in_interpreter= */ true,
553 /* from_deoptimize= */ true);
554 }
555 ShadowFrame* old_frame = shadow_frame;
556 shadow_frame = shadow_frame->GetLink();
557 ShadowFrame::DeleteDeoptimizedFrame(old_frame);
558 // Following deoptimizations of shadow frames must be at invocation point
559 // and should advance dex pc past the invoke instruction.
560 from_code = false;
561 deopt_method_type = DeoptimizationMethodType::kDefault;
562 frame_cnt++;
563 }
564 ret_val->SetJ(value.GetJ());
565 }
566
567 NO_STACK_PROTECTOR
EnterInterpreterFromEntryPoint(Thread * self,const CodeItemDataAccessor & accessor,ShadowFrame * shadow_frame)568 JValue EnterInterpreterFromEntryPoint(Thread* self, const CodeItemDataAccessor& accessor,
569 ShadowFrame* shadow_frame) {
570 DCHECK_EQ(self, Thread::Current());
571 bool implicit_check = Runtime::Current()->GetImplicitStackOverflowChecks();
572 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
573 ThrowStackOverflowError(self);
574 return JValue();
575 }
576
577 jit::Jit* jit = Runtime::Current()->GetJit();
578 if (jit != nullptr) {
579 jit->NotifyCompiledCodeToInterpreterTransition(self, shadow_frame->GetMethod());
580 }
581 return Execute(self, accessor, *shadow_frame, JValue());
582 }
583
584 NO_STACK_PROTECTOR
ArtInterpreterToInterpreterBridge(Thread * self,const CodeItemDataAccessor & accessor,ShadowFrame * shadow_frame,JValue * result)585 void ArtInterpreterToInterpreterBridge(Thread* self,
586 const CodeItemDataAccessor& accessor,
587 ShadowFrame* shadow_frame,
588 JValue* result) {
589 bool implicit_check = Runtime::Current()->GetImplicitStackOverflowChecks();
590 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
591 ThrowStackOverflowError(self);
592 return;
593 }
594
595 self->PushShadowFrame(shadow_frame);
596
597 if (LIKELY(!shadow_frame->GetMethod()->IsNative())) {
598 result->SetJ(Execute(self, accessor, *shadow_frame, JValue()).GetJ());
599 } else {
600 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
601 // generated stub) except during testing and image writing.
602 CHECK(!Runtime::Current()->IsStarted());
603 bool is_static = shadow_frame->GetMethod()->IsStatic();
604 ObjPtr<mirror::Object> receiver = is_static ? nullptr : shadow_frame->GetVRegReference(0);
605 uint32_t* args = shadow_frame->GetVRegArgs(is_static ? 0 : 1);
606 UnstartedRuntime::Jni(self, shadow_frame->GetMethod(), receiver.Ptr(), args, result);
607 }
608
609 self->PopShadowFrame();
610 }
611
CheckInterpreterAsmConstants()612 void CheckInterpreterAsmConstants() {
613 CheckNterpAsmConstants();
614 }
615
PrevFrameWillRetry(Thread * self,const ShadowFrame & frame)616 bool PrevFrameWillRetry(Thread* self, const ShadowFrame& frame) {
617 ShadowFrame* prev_frame = frame.GetLink();
618 if (prev_frame == nullptr) {
619 NthCallerVisitor vis(self, 1, false);
620 vis.WalkStack();
621 prev_frame = vis.GetCurrentShadowFrame();
622 if (prev_frame == nullptr) {
623 prev_frame = self->FindDebuggerShadowFrame(vis.GetFrameId());
624 }
625 }
626 return prev_frame != nullptr && prev_frame->GetForceRetryInstruction();
627 }
628
629 } // namespace interpreter
630 } // namespace art
631