1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_INTERPRETER_INTERPRETER_SWITCH_IMPL_INL_H_
18 #define ART_RUNTIME_INTERPRETER_INTERPRETER_SWITCH_IMPL_INL_H_
19
20 #include "interpreter_switch_impl.h"
21
22 #include "base/enums.h"
23 #include "base/globals.h"
24 #include "base/memory_tool.h"
25 #include "base/quasi_atomic.h"
26 #include "dex/dex_file_types.h"
27 #include "dex/dex_instruction_list.h"
28 #include "experimental_flags.h"
29 #include "handle_scope.h"
30 #include "interpreter_common.h"
31 #include "interpreter/shadow_frame.h"
32 #include "jit/jit-inl.h"
33 #include "jvalue-inl.h"
34 #include "mirror/string-alloc-inl.h"
35 #include "mirror/throwable.h"
36 #include "nth_caller_visitor.h"
37 #include "safe_math.h"
38 #include "shadow_frame-inl.h"
39 #include "thread.h"
40 #include "verifier/method_verifier.h"
41
42 namespace art {
43 namespace interpreter {
44
45 // Short-lived helper class which executes single DEX bytecode. It is inlined by compiler.
46 //
47 // The function names must match the names from dex_instruction_list.h and have no arguments.
48 //
49 // Any relevant execution information is stored in the fields - it should be kept to minimum.
50 //
51 // Helper methods may return boolean value - in which case 'false' always means
52 // "stop executing current opcode" (which does not necessarily exit the interpreter loop).
53 //
54 template<bool do_access_check, bool transaction_active>
55 class InstructionHandler {
56 public:
57 template <bool kMonitorCounting>
UnlockHeldMonitors(Thread * self,ShadowFrame * shadow_frame)58 static NO_INLINE void UnlockHeldMonitors(Thread* self, ShadowFrame* shadow_frame)
59 REQUIRES_SHARED(Locks::mutator_lock_) {
60 DCHECK(shadow_frame->GetForcePopFrame());
61 // Unlock all monitors.
62 if (kMonitorCounting && shadow_frame->GetMethod()->MustCountLocks()) {
63 // Get the monitors from the shadow-frame monitor-count data.
64 shadow_frame->GetLockCountData().VisitMonitors(
65 [&](mirror::Object** obj) REQUIRES_SHARED(Locks::mutator_lock_) {
66 // Since we don't use the 'obj' pointer after the DoMonitorExit everything should be fine
67 // WRT suspension.
68 DoMonitorExit<do_assignability_check>(self, shadow_frame, *obj);
69 });
70 } else {
71 std::vector<verifier::MethodVerifier::DexLockInfo> locks;
72 verifier::MethodVerifier::FindLocksAtDexPc(shadow_frame->GetMethod(),
73 shadow_frame->GetDexPC(),
74 &locks,
75 Runtime::Current()->GetTargetSdkVersion());
76 for (const auto& reg : locks) {
77 if (UNLIKELY(reg.dex_registers.empty())) {
78 LOG(ERROR) << "Unable to determine reference locked by "
79 << shadow_frame->GetMethod()->PrettyMethod() << " at pc "
80 << shadow_frame->GetDexPC();
81 } else {
82 DoMonitorExit<do_assignability_check>(
83 self, shadow_frame, shadow_frame->GetVRegReference(*reg.dex_registers.begin()));
84 }
85 }
86 }
87 }
88
CheckForceReturn()89 ALWAYS_INLINE WARN_UNUSED bool CheckForceReturn()
90 REQUIRES_SHARED(Locks::mutator_lock_) {
91 if (UNLIKELY(shadow_frame.GetForcePopFrame())) {
92 DCHECK(PrevFrameWillRetry(self, shadow_frame))
93 << "Pop frame forced without previous frame ready to retry instruction!";
94 DCHECK(Runtime::Current()->AreNonStandardExitsEnabled());
95 UnlockHeldMonitors<do_assignability_check>(self, &shadow_frame);
96 DoMonitorCheckOnExit<do_assignability_check>(self, &shadow_frame);
97 if (UNLIKELY(NeedsMethodExitEvent(instrumentation))) {
98 SendMethodExitEvents(self,
99 instrumentation,
100 shadow_frame,
101 shadow_frame.GetThisObject(Accessor().InsSize()),
102 shadow_frame.GetMethod(),
103 inst->GetDexPc(Insns()),
104 JValue());
105 }
106 ctx->result = JValue(); /* Handled in caller. */
107 exit_interpreter_loop = true;
108 return false;
109 }
110 return true;
111 }
112
HandlePendingExceptionWithInstrumentationImpl(const instrumentation::Instrumentation * instr)113 NO_INLINE WARN_UNUSED bool HandlePendingExceptionWithInstrumentationImpl(
114 const instrumentation::Instrumentation* instr)
115 REQUIRES_SHARED(Locks::mutator_lock_) {
116 DCHECK(self->IsExceptionPending());
117 self->AllowThreadSuspension();
118 if (!CheckForceReturn()) {
119 return false;
120 }
121 if (!MoveToExceptionHandler(self, shadow_frame, instr)) {
122 /* Structured locking is to be enforced for abnormal termination, too. */
123 DoMonitorCheckOnExit<do_assignability_check>(self, &shadow_frame);
124 if (ctx->interpret_one_instruction) {
125 /* Signal mterp to return to caller */
126 shadow_frame.SetDexPC(dex::kDexNoIndex);
127 }
128 ctx->result = JValue(); /* Handled in caller. */
129 exit_interpreter_loop = true;
130 return false; // Return to caller.
131 }
132 if (!CheckForceReturn()) {
133 return false;
134 }
135 int32_t displacement =
136 static_cast<int32_t>(shadow_frame.GetDexPC()) - static_cast<int32_t>(dex_pc);
137 inst = inst->RelativeAt(displacement);
138 return false; // Stop executing this opcode and continue in the exception handler.
139 }
140
141 // Forwards the call to the NO_INLINE HandlePendingExceptionWithInstrumentationImpl.
HandlePendingExceptionWithInstrumentation(const instrumentation::Instrumentation * instr)142 ALWAYS_INLINE WARN_UNUSED bool HandlePendingExceptionWithInstrumentation(
143 const instrumentation::Instrumentation* instr)
144 REQUIRES_SHARED(Locks::mutator_lock_) {
145 // We need to help the compiler a bit to make the NO_INLINE call efficient.
146 // * All handler fields should be in registers, so we do not want to take the object
147 // address (for 'this' argument). Make a copy of the handler just for the slow path.
148 // * The modifiable fields should also be in registers, so we don't want to store their
149 // address even in the handler copy. Make a copy of them just for the call as well.
150 const Instruction* inst_copy = inst;
151 bool exit_loop_copy = exit_interpreter_loop;
152 InstructionHandler<do_access_check, transaction_active> handler_copy(
153 ctx, instrumentation, self, shadow_frame, dex_pc, inst_copy, inst_data, exit_loop_copy);
154 bool result = handler_copy.HandlePendingExceptionWithInstrumentationImpl(instr);
155 inst = inst_copy;
156 exit_interpreter_loop = exit_loop_copy;
157 return result;
158 }
159
HandlePendingException()160 ALWAYS_INLINE WARN_UNUSED bool HandlePendingException()
161 REQUIRES_SHARED(Locks::mutator_lock_) {
162 return HandlePendingExceptionWithInstrumentation(instrumentation);
163 }
164
PossiblyHandlePendingExceptionOnInvokeImpl(bool is_exception_pending,const Instruction * next_inst)165 ALWAYS_INLINE WARN_UNUSED bool PossiblyHandlePendingExceptionOnInvokeImpl(
166 bool is_exception_pending,
167 const Instruction* next_inst)
168 REQUIRES_SHARED(Locks::mutator_lock_) {
169 if (UNLIKELY(shadow_frame.GetForceRetryInstruction())) {
170 /* Don't need to do anything except clear the flag and exception. We leave the */
171 /* instruction the same so it will be re-executed on the next go-around. */
172 DCHECK(inst->IsInvoke());
173 shadow_frame.SetForceRetryInstruction(false);
174 if (UNLIKELY(is_exception_pending)) {
175 DCHECK(self->IsExceptionPending());
176 if (kIsDebugBuild) {
177 LOG(WARNING) << "Suppressing exception for instruction-retry: "
178 << self->GetException()->Dump();
179 }
180 self->ClearException();
181 }
182 } else if (UNLIKELY(is_exception_pending)) {
183 /* Should have succeeded. */
184 DCHECK(!shadow_frame.GetForceRetryInstruction());
185 if (!HandlePendingException()) {
186 return false;
187 }
188 } else {
189 inst = next_inst;
190 }
191 return true;
192 }
193
PossiblyHandlePendingException(bool is_exception_pending,const Instruction * next_inst)194 ALWAYS_INLINE WARN_UNUSED bool PossiblyHandlePendingException(
195 bool is_exception_pending,
196 const Instruction* next_inst)
197 REQUIRES_SHARED(Locks::mutator_lock_) {
198 /* Should only be on invoke instructions. */
199 DCHECK(!shadow_frame.GetForceRetryInstruction());
200 if (UNLIKELY(is_exception_pending)) {
201 if (!HandlePendingException()) {
202 return false;
203 }
204 } else {
205 inst = next_inst;
206 }
207 return true;
208 }
209
HandleMonitorChecks()210 ALWAYS_INLINE WARN_UNUSED bool HandleMonitorChecks()
211 REQUIRES_SHARED(Locks::mutator_lock_) {
212 if (!DoMonitorCheckOnExit<do_assignability_check>(self, &shadow_frame)) {
213 if (!HandlePendingException()) {
214 return false;
215 }
216 }
217 return true;
218 }
219
220 // Code to run before each dex instruction.
Preamble()221 ALWAYS_INLINE WARN_UNUSED bool Preamble()
222 REQUIRES_SHARED(Locks::mutator_lock_) {
223 /* We need to put this before & after the instrumentation to avoid having to put in a */
224 /* post-script macro. */
225 if (!CheckForceReturn()) {
226 return false;
227 }
228 if (UNLIKELY(instrumentation->HasDexPcListeners())) {
229 uint8_t opcode = inst->Opcode(inst_data);
230 bool is_move_result_object = (opcode == Instruction::MOVE_RESULT_OBJECT);
231 JValue* save_ref = is_move_result_object ? &ctx->result_register : nullptr;
232 if (UNLIKELY(!DoDexPcMoveEvent(self,
233 Accessor(),
234 shadow_frame,
235 dex_pc,
236 instrumentation,
237 save_ref))) {
238 if (!HandlePendingException()) {
239 return false;
240 }
241 }
242 if (!CheckForceReturn()) {
243 return false;
244 }
245 }
246 return true;
247 }
248
BranchInstrumentation(int32_t offset)249 ALWAYS_INLINE WARN_UNUSED bool BranchInstrumentation(int32_t offset)
250 REQUIRES_SHARED(Locks::mutator_lock_) {
251 if (UNLIKELY(instrumentation->HasBranchListeners())) {
252 instrumentation->Branch(self, shadow_frame.GetMethod(), dex_pc, offset);
253 }
254 JValue result;
255 if (jit::Jit::MaybeDoOnStackReplacement(self,
256 shadow_frame.GetMethod(),
257 dex_pc,
258 offset,
259 &result)) {
260 if (ctx->interpret_one_instruction) {
261 /* OSR has completed execution of the method. Signal mterp to return to caller */
262 shadow_frame.SetDexPC(dex::kDexNoIndex);
263 }
264 ctx->result = result;
265 exit_interpreter_loop = true;
266 return false;
267 }
268 return true;
269 }
270
HotnessUpdate()271 ALWAYS_INLINE void HotnessUpdate()
272 REQUIRES_SHARED(Locks::mutator_lock_) {
273 jit::Jit* jit = Runtime::Current()->GetJit();
274 if (jit != nullptr) {
275 jit->AddSamples(self, shadow_frame.GetMethod(), 1, /*with_backedges=*/ true);
276 }
277 }
278
HandleAsyncException()279 ALWAYS_INLINE WARN_UNUSED bool HandleAsyncException()
280 REQUIRES_SHARED(Locks::mutator_lock_) {
281 if (UNLIKELY(self->ObserveAsyncException())) {
282 if (!HandlePendingException()) {
283 return false;
284 }
285 }
286 return true;
287 }
288
HandleBackwardBranch(int32_t offset)289 ALWAYS_INLINE void HandleBackwardBranch(int32_t offset)
290 REQUIRES_SHARED(Locks::mutator_lock_) {
291 if (IsBackwardBranch(offset)) {
292 HotnessUpdate();
293 /* Record new dex pc early to have consistent suspend point at loop header. */
294 shadow_frame.SetDexPC(inst->GetDexPc(Insns()));
295 self->AllowThreadSuspension();
296 }
297 }
298
299 // Unlike most other events the DexPcMovedEvent can be sent when there is a pending exception (if
300 // the next instruction is MOVE_EXCEPTION). This means it needs to be handled carefully to be able
301 // to detect exceptions thrown by the DexPcMovedEvent itself. These exceptions could be thrown by
302 // jvmti-agents while handling breakpoint or single step events. We had to move this into its own
303 // function because it was making ExecuteSwitchImpl have too large a stack.
DoDexPcMoveEvent(Thread * self,const CodeItemDataAccessor & accessor,const ShadowFrame & shadow_frame,uint32_t dex_pc,const instrumentation::Instrumentation * instrumentation,JValue * save_ref)304 NO_INLINE static bool DoDexPcMoveEvent(Thread* self,
305 const CodeItemDataAccessor& accessor,
306 const ShadowFrame& shadow_frame,
307 uint32_t dex_pc,
308 const instrumentation::Instrumentation* instrumentation,
309 JValue* save_ref)
310 REQUIRES_SHARED(Locks::mutator_lock_) {
311 DCHECK(instrumentation->HasDexPcListeners());
312 StackHandleScope<2> hs(self);
313 Handle<mirror::Throwable> thr(hs.NewHandle(self->GetException()));
314 mirror::Object* null_obj = nullptr;
315 HandleWrapper<mirror::Object> h(
316 hs.NewHandleWrapper(LIKELY(save_ref == nullptr) ? &null_obj : save_ref->GetGCRoot()));
317 self->ClearException();
318 instrumentation->DexPcMovedEvent(self,
319 shadow_frame.GetThisObject(accessor.InsSize()),
320 shadow_frame.GetMethod(),
321 dex_pc);
322 if (UNLIKELY(self->IsExceptionPending())) {
323 // We got a new exception in the dex-pc-moved event.
324 // We just let this exception replace the old one.
325 // TODO It would be good to add the old exception to the
326 // suppressed exceptions of the new one if possible.
327 return false;
328 } else {
329 if (UNLIKELY(!thr.IsNull())) {
330 self->SetException(thr.Get());
331 }
332 return true;
333 }
334 }
335
NeedsMethodExitEvent(const instrumentation::Instrumentation * ins)336 static bool NeedsMethodExitEvent(const instrumentation::Instrumentation* ins)
337 REQUIRES_SHARED(Locks::mutator_lock_) {
338 return ins->HasMethodExitListeners() || ins->HasWatchedFramePopListeners();
339 }
340
341 // Sends the normal method exit event.
342 // Returns true if the events succeeded and false if there is a pending exception.
SendMethodExitEvents(Thread * self,const instrumentation::Instrumentation * instrumentation,const ShadowFrame & frame,ObjPtr<mirror::Object> thiz,ArtMethod * method,uint32_t dex_pc,const JValue & result)343 NO_INLINE static bool SendMethodExitEvents(
344 Thread* self,
345 const instrumentation::Instrumentation* instrumentation,
346 const ShadowFrame& frame,
347 ObjPtr<mirror::Object> thiz,
348 ArtMethod* method,
349 uint32_t dex_pc,
350 const JValue& result)
351 REQUIRES_SHARED(Locks::mutator_lock_) {
352 bool had_event = false;
353 // We don't send method-exit if it's a pop-frame. We still send frame_popped though.
354 if (UNLIKELY(instrumentation->HasMethodExitListeners() && !frame.GetForcePopFrame())) {
355 had_event = true;
356 instrumentation->MethodExitEvent(self, thiz.Ptr(), method, dex_pc, result);
357 }
358 if (UNLIKELY(frame.NeedsNotifyPop() && instrumentation->HasWatchedFramePopListeners())) {
359 had_event = true;
360 instrumentation->WatchedFramePopped(self, frame);
361 }
362 if (UNLIKELY(had_event)) {
363 return !self->IsExceptionPending();
364 } else {
365 return true;
366 }
367 }
368
369 #define BRANCH_INSTRUMENTATION(offset) \
370 if (!BranchInstrumentation(offset)) { \
371 return; \
372 }
373
374 #define HANDLE_PENDING_EXCEPTION() \
375 if (!HandlePendingException()) { \
376 return; \
377 }
378
379 #define POSSIBLY_HANDLE_PENDING_EXCEPTION(is_exception_pending, next_function) \
380 if (!PossiblyHandlePendingException(is_exception_pending, inst->next_function())) { \
381 return; \
382 }
383
384 #define POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE_POLYMORPHIC(is_exception_pending) \
385 if (!PossiblyHandlePendingExceptionOnInvokeImpl(is_exception_pending, inst->Next_4xx())) { \
386 return; \
387 }
388
389 #define POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(is_exception_pending) \
390 if (!PossiblyHandlePendingExceptionOnInvokeImpl(is_exception_pending, inst->Next_3xx())) { \
391 return; \
392 }
393
NOP()394 ALWAYS_INLINE void NOP() REQUIRES_SHARED(Locks::mutator_lock_) {
395 inst = inst->Next_1xx();
396 }
397
MOVE()398 ALWAYS_INLINE void MOVE() REQUIRES_SHARED(Locks::mutator_lock_) {
399 shadow_frame.SetVReg(inst->VRegA_12x(inst_data),
400 shadow_frame.GetVReg(inst->VRegB_12x(inst_data)));
401 inst = inst->Next_1xx();
402 }
403
MOVE_FROM16()404 ALWAYS_INLINE void MOVE_FROM16() REQUIRES_SHARED(Locks::mutator_lock_) {
405 shadow_frame.SetVReg(inst->VRegA_22x(inst_data),
406 shadow_frame.GetVReg(inst->VRegB_22x()));
407 inst = inst->Next_2xx();
408 }
409
MOVE_16()410 ALWAYS_INLINE void MOVE_16() REQUIRES_SHARED(Locks::mutator_lock_) {
411 shadow_frame.SetVReg(inst->VRegA_32x(),
412 shadow_frame.GetVReg(inst->VRegB_32x()));
413 inst = inst->Next_3xx();
414 }
415
MOVE_WIDE()416 ALWAYS_INLINE void MOVE_WIDE() REQUIRES_SHARED(Locks::mutator_lock_) {
417 shadow_frame.SetVRegLong(inst->VRegA_12x(inst_data),
418 shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data)));
419 inst = inst->Next_1xx();
420 }
421
MOVE_WIDE_FROM16()422 ALWAYS_INLINE void MOVE_WIDE_FROM16() REQUIRES_SHARED(Locks::mutator_lock_) {
423 shadow_frame.SetVRegLong(inst->VRegA_22x(inst_data),
424 shadow_frame.GetVRegLong(inst->VRegB_22x()));
425 inst = inst->Next_2xx();
426 }
427
MOVE_WIDE_16()428 ALWAYS_INLINE void MOVE_WIDE_16() REQUIRES_SHARED(Locks::mutator_lock_) {
429 shadow_frame.SetVRegLong(inst->VRegA_32x(),
430 shadow_frame.GetVRegLong(inst->VRegB_32x()));
431 inst = inst->Next_3xx();
432 }
433
MOVE_OBJECT()434 ALWAYS_INLINE void MOVE_OBJECT() REQUIRES_SHARED(Locks::mutator_lock_) {
435 shadow_frame.SetVRegReference(inst->VRegA_12x(inst_data),
436 shadow_frame.GetVRegReference(inst->VRegB_12x(inst_data)));
437 inst = inst->Next_1xx();
438 }
439
MOVE_OBJECT_FROM16()440 ALWAYS_INLINE void MOVE_OBJECT_FROM16() REQUIRES_SHARED(Locks::mutator_lock_) {
441 shadow_frame.SetVRegReference(inst->VRegA_22x(inst_data),
442 shadow_frame.GetVRegReference(inst->VRegB_22x()));
443 inst = inst->Next_2xx();
444 }
445
MOVE_OBJECT_16()446 ALWAYS_INLINE void MOVE_OBJECT_16() REQUIRES_SHARED(Locks::mutator_lock_) {
447 shadow_frame.SetVRegReference(inst->VRegA_32x(),
448 shadow_frame.GetVRegReference(inst->VRegB_32x()));
449 inst = inst->Next_3xx();
450 }
451
MOVE_RESULT()452 ALWAYS_INLINE void MOVE_RESULT() REQUIRES_SHARED(Locks::mutator_lock_) {
453 shadow_frame.SetVReg(inst->VRegA_11x(inst_data), ResultRegister()->GetI());
454 inst = inst->Next_1xx();
455 }
456
MOVE_RESULT_WIDE()457 ALWAYS_INLINE void MOVE_RESULT_WIDE() REQUIRES_SHARED(Locks::mutator_lock_) {
458 shadow_frame.SetVRegLong(inst->VRegA_11x(inst_data), ResultRegister()->GetJ());
459 inst = inst->Next_1xx();
460 }
461
MOVE_RESULT_OBJECT()462 ALWAYS_INLINE void MOVE_RESULT_OBJECT() REQUIRES_SHARED(Locks::mutator_lock_) {
463 shadow_frame.SetVRegReference(inst->VRegA_11x(inst_data), ResultRegister()->GetL());
464 inst = inst->Next_1xx();
465 }
466
MOVE_EXCEPTION()467 ALWAYS_INLINE void MOVE_EXCEPTION() REQUIRES_SHARED(Locks::mutator_lock_) {
468 ObjPtr<mirror::Throwable> exception = self->GetException();
469 DCHECK(exception != nullptr) << "No pending exception on MOVE_EXCEPTION instruction";
470 shadow_frame.SetVRegReference(inst->VRegA_11x(inst_data), exception);
471 self->ClearException();
472 inst = inst->Next_1xx();
473 }
474
RETURN_VOID_NO_BARRIER()475 ALWAYS_INLINE void RETURN_VOID_NO_BARRIER() REQUIRES_SHARED(Locks::mutator_lock_) {
476 JValue result;
477 self->AllowThreadSuspension();
478 if (!HandleMonitorChecks()) {
479 return;
480 }
481 if (UNLIKELY(NeedsMethodExitEvent(instrumentation) &&
482 !SendMethodExitEvents(self,
483 instrumentation,
484 shadow_frame,
485 shadow_frame.GetThisObject(Accessor().InsSize()),
486 shadow_frame.GetMethod(),
487 inst->GetDexPc(Insns()),
488 result))) {
489 if (!HandlePendingExceptionWithInstrumentation(nullptr)) {
490 return;
491 }
492 }
493 if (ctx->interpret_one_instruction) {
494 /* Signal mterp to return to caller */
495 shadow_frame.SetDexPC(dex::kDexNoIndex);
496 }
497 ctx->result = result;
498 exit_interpreter_loop = true;
499 }
500
RETURN_VOID()501 ALWAYS_INLINE void RETURN_VOID() REQUIRES_SHARED(Locks::mutator_lock_) {
502 QuasiAtomic::ThreadFenceForConstructor();
503 JValue result;
504 self->AllowThreadSuspension();
505 if (!HandleMonitorChecks()) {
506 return;
507 }
508 if (UNLIKELY(NeedsMethodExitEvent(instrumentation) &&
509 !SendMethodExitEvents(self,
510 instrumentation,
511 shadow_frame,
512 shadow_frame.GetThisObject(Accessor().InsSize()),
513 shadow_frame.GetMethod(),
514 inst->GetDexPc(Insns()),
515 result))) {
516 if (!HandlePendingExceptionWithInstrumentation(nullptr)) {
517 return;
518 }
519 }
520 if (ctx->interpret_one_instruction) {
521 /* Signal mterp to return to caller */
522 shadow_frame.SetDexPC(dex::kDexNoIndex);
523 }
524 ctx->result = result;
525 exit_interpreter_loop = true;
526 }
527
RETURN()528 ALWAYS_INLINE void RETURN() REQUIRES_SHARED(Locks::mutator_lock_) {
529 JValue result;
530 result.SetJ(0);
531 result.SetI(shadow_frame.GetVReg(inst->VRegA_11x(inst_data)));
532 self->AllowThreadSuspension();
533 if (!HandleMonitorChecks()) {
534 return;
535 }
536 if (UNLIKELY(NeedsMethodExitEvent(instrumentation) &&
537 !SendMethodExitEvents(self,
538 instrumentation,
539 shadow_frame,
540 shadow_frame.GetThisObject(Accessor().InsSize()),
541 shadow_frame.GetMethod(),
542 inst->GetDexPc(Insns()),
543 result))) {
544 if (!HandlePendingExceptionWithInstrumentation(nullptr)) {
545 return;
546 }
547 }
548 if (ctx->interpret_one_instruction) {
549 /* Signal mterp to return to caller */
550 shadow_frame.SetDexPC(dex::kDexNoIndex);
551 }
552 ctx->result = result;
553 exit_interpreter_loop = true;
554 }
555
RETURN_WIDE()556 ALWAYS_INLINE void RETURN_WIDE() REQUIRES_SHARED(Locks::mutator_lock_) {
557 JValue result;
558 result.SetJ(shadow_frame.GetVRegLong(inst->VRegA_11x(inst_data)));
559 self->AllowThreadSuspension();
560 if (!HandleMonitorChecks()) {
561 return;
562 }
563 if (UNLIKELY(NeedsMethodExitEvent(instrumentation) &&
564 !SendMethodExitEvents(self,
565 instrumentation,
566 shadow_frame,
567 shadow_frame.GetThisObject(Accessor().InsSize()),
568 shadow_frame.GetMethod(),
569 inst->GetDexPc(Insns()),
570 result))) {
571 if (!HandlePendingExceptionWithInstrumentation(nullptr)) {
572 return;
573 }
574 }
575 if (ctx->interpret_one_instruction) {
576 /* Signal mterp to return to caller */
577 shadow_frame.SetDexPC(dex::kDexNoIndex);
578 }
579 ctx->result = result;
580 exit_interpreter_loop = true;
581 }
582
RETURN_OBJECT()583 ALWAYS_INLINE void RETURN_OBJECT() REQUIRES_SHARED(Locks::mutator_lock_) {
584 JValue result;
585 self->AllowThreadSuspension();
586 if (!HandleMonitorChecks()) {
587 return;
588 }
589 const size_t ref_idx = inst->VRegA_11x(inst_data);
590 ObjPtr<mirror::Object> obj_result = shadow_frame.GetVRegReference(ref_idx);
591 if (do_assignability_check && obj_result != nullptr) {
592 ObjPtr<mirror::Class> return_type = shadow_frame.GetMethod()->ResolveReturnType();
593 // Re-load since it might have moved.
594 obj_result = shadow_frame.GetVRegReference(ref_idx);
595 if (return_type == nullptr) {
596 // Return the pending exception.
597 HANDLE_PENDING_EXCEPTION();
598 }
599 if (!obj_result->VerifierInstanceOf(return_type)) {
600 // This should never happen.
601 std::string temp1, temp2;
602 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
603 "Returning '%s' that is not instance of return type '%s'",
604 obj_result->GetClass()->GetDescriptor(&temp1),
605 return_type->GetDescriptor(&temp2));
606 HANDLE_PENDING_EXCEPTION();
607 }
608 }
609 result.SetL(obj_result);
610 if (UNLIKELY(NeedsMethodExitEvent(instrumentation) &&
611 !SendMethodExitEvents(self,
612 instrumentation,
613 shadow_frame,
614 shadow_frame.GetThisObject(Accessor().InsSize()),
615 shadow_frame.GetMethod(),
616 inst->GetDexPc(Insns()),
617 result))) {
618 if (!HandlePendingExceptionWithInstrumentation(nullptr)) {
619 return;
620 }
621 }
622 // Re-load since it might have moved during the MethodExitEvent.
623 result.SetL(shadow_frame.GetVRegReference(ref_idx));
624 if (ctx->interpret_one_instruction) {
625 /* Signal mterp to return to caller */
626 shadow_frame.SetDexPC(dex::kDexNoIndex);
627 }
628 ctx->result = result;
629 exit_interpreter_loop = true;
630 }
631
CONST_4()632 ALWAYS_INLINE void CONST_4() REQUIRES_SHARED(Locks::mutator_lock_) {
633 uint4_t dst = inst->VRegA_11n(inst_data);
634 int4_t val = inst->VRegB_11n(inst_data);
635 shadow_frame.SetVReg(dst, val);
636 if (val == 0) {
637 shadow_frame.SetVRegReference(dst, nullptr);
638 }
639 inst = inst->Next_1xx();
640 }
641
CONST_16()642 ALWAYS_INLINE void CONST_16() REQUIRES_SHARED(Locks::mutator_lock_) {
643 uint8_t dst = inst->VRegA_21s(inst_data);
644 int16_t val = inst->VRegB_21s();
645 shadow_frame.SetVReg(dst, val);
646 if (val == 0) {
647 shadow_frame.SetVRegReference(dst, nullptr);
648 }
649 inst = inst->Next_2xx();
650 }
651
CONST()652 ALWAYS_INLINE void CONST() REQUIRES_SHARED(Locks::mutator_lock_) {
653 uint8_t dst = inst->VRegA_31i(inst_data);
654 int32_t val = inst->VRegB_31i();
655 shadow_frame.SetVReg(dst, val);
656 if (val == 0) {
657 shadow_frame.SetVRegReference(dst, nullptr);
658 }
659 inst = inst->Next_3xx();
660 }
661
CONST_HIGH16()662 ALWAYS_INLINE void CONST_HIGH16() REQUIRES_SHARED(Locks::mutator_lock_) {
663 uint8_t dst = inst->VRegA_21h(inst_data);
664 int32_t val = static_cast<int32_t>(inst->VRegB_21h() << 16);
665 shadow_frame.SetVReg(dst, val);
666 if (val == 0) {
667 shadow_frame.SetVRegReference(dst, nullptr);
668 }
669 inst = inst->Next_2xx();
670 }
671
CONST_WIDE_16()672 ALWAYS_INLINE void CONST_WIDE_16() REQUIRES_SHARED(Locks::mutator_lock_) {
673 shadow_frame.SetVRegLong(inst->VRegA_21s(inst_data), inst->VRegB_21s());
674 inst = inst->Next_2xx();
675 }
676
CONST_WIDE_32()677 ALWAYS_INLINE void CONST_WIDE_32() REQUIRES_SHARED(Locks::mutator_lock_) {
678 shadow_frame.SetVRegLong(inst->VRegA_31i(inst_data), inst->VRegB_31i());
679 inst = inst->Next_3xx();
680 }
681
CONST_WIDE()682 ALWAYS_INLINE void CONST_WIDE() REQUIRES_SHARED(Locks::mutator_lock_) {
683 shadow_frame.SetVRegLong(inst->VRegA_51l(inst_data), inst->VRegB_51l());
684 inst = inst->Next_51l();
685 }
686
CONST_WIDE_HIGH16()687 ALWAYS_INLINE void CONST_WIDE_HIGH16() REQUIRES_SHARED(Locks::mutator_lock_) {
688 shadow_frame.SetVRegLong(inst->VRegA_21h(inst_data),
689 static_cast<uint64_t>(inst->VRegB_21h()) << 48);
690 inst = inst->Next_2xx();
691 }
692
CONST_STRING()693 ALWAYS_INLINE void CONST_STRING() REQUIRES_SHARED(Locks::mutator_lock_) {
694 ObjPtr<mirror::String> s = ResolveString(self,
695 shadow_frame,
696 dex::StringIndex(inst->VRegB_21c()));
697 if (UNLIKELY(s == nullptr)) {
698 HANDLE_PENDING_EXCEPTION();
699 } else {
700 shadow_frame.SetVRegReference(inst->VRegA_21c(inst_data), s);
701 inst = inst->Next_2xx();
702 }
703 }
704
CONST_STRING_JUMBO()705 ALWAYS_INLINE void CONST_STRING_JUMBO() REQUIRES_SHARED(Locks::mutator_lock_) {
706 ObjPtr<mirror::String> s = ResolveString(self,
707 shadow_frame,
708 dex::StringIndex(inst->VRegB_31c()));
709 if (UNLIKELY(s == nullptr)) {
710 HANDLE_PENDING_EXCEPTION();
711 } else {
712 shadow_frame.SetVRegReference(inst->VRegA_31c(inst_data), s);
713 inst = inst->Next_3xx();
714 }
715 }
716
CONST_CLASS()717 ALWAYS_INLINE void CONST_CLASS() REQUIRES_SHARED(Locks::mutator_lock_) {
718 ObjPtr<mirror::Class> c = ResolveVerifyAndClinit(dex::TypeIndex(inst->VRegB_21c()),
719 shadow_frame.GetMethod(),
720 self,
721 false,
722 do_access_check);
723 if (UNLIKELY(c == nullptr)) {
724 HANDLE_PENDING_EXCEPTION();
725 } else {
726 shadow_frame.SetVRegReference(inst->VRegA_21c(inst_data), c);
727 inst = inst->Next_2xx();
728 }
729 }
730
CONST_METHOD_HANDLE()731 ALWAYS_INLINE void CONST_METHOD_HANDLE() REQUIRES_SHARED(Locks::mutator_lock_) {
732 ClassLinker* cl = Runtime::Current()->GetClassLinker();
733 ObjPtr<mirror::MethodHandle> mh = cl->ResolveMethodHandle(self,
734 inst->VRegB_21c(),
735 shadow_frame.GetMethod());
736 if (UNLIKELY(mh == nullptr)) {
737 HANDLE_PENDING_EXCEPTION();
738 } else {
739 shadow_frame.SetVRegReference(inst->VRegA_21c(inst_data), mh);
740 inst = inst->Next_2xx();
741 }
742 }
743
CONST_METHOD_TYPE()744 ALWAYS_INLINE void CONST_METHOD_TYPE() REQUIRES_SHARED(Locks::mutator_lock_) {
745 ClassLinker* cl = Runtime::Current()->GetClassLinker();
746 ObjPtr<mirror::MethodType> mt = cl->ResolveMethodType(self,
747 dex::ProtoIndex(inst->VRegB_21c()),
748 shadow_frame.GetMethod());
749 if (UNLIKELY(mt == nullptr)) {
750 HANDLE_PENDING_EXCEPTION();
751 } else {
752 shadow_frame.SetVRegReference(inst->VRegA_21c(inst_data), mt);
753 inst = inst->Next_2xx();
754 }
755 }
756
MONITOR_ENTER()757 ALWAYS_INLINE void MONITOR_ENTER() REQUIRES_SHARED(Locks::mutator_lock_) {
758 if (!HandleAsyncException()) {
759 return;
760 }
761 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(inst->VRegA_11x(inst_data));
762 if (UNLIKELY(obj == nullptr)) {
763 ThrowNullPointerExceptionFromInterpreter();
764 HANDLE_PENDING_EXCEPTION();
765 } else {
766 DoMonitorEnter<do_assignability_check>(self, &shadow_frame, obj);
767 POSSIBLY_HANDLE_PENDING_EXCEPTION(self->IsExceptionPending(), Next_1xx);
768 }
769 }
770
MONITOR_EXIT()771 ALWAYS_INLINE void MONITOR_EXIT() REQUIRES_SHARED(Locks::mutator_lock_) {
772 if (!HandleAsyncException()) {
773 return;
774 }
775 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(inst->VRegA_11x(inst_data));
776 if (UNLIKELY(obj == nullptr)) {
777 ThrowNullPointerExceptionFromInterpreter();
778 HANDLE_PENDING_EXCEPTION();
779 } else {
780 DoMonitorExit<do_assignability_check>(self, &shadow_frame, obj);
781 POSSIBLY_HANDLE_PENDING_EXCEPTION(self->IsExceptionPending(), Next_1xx);
782 }
783 }
784
CHECK_CAST()785 ALWAYS_INLINE void CHECK_CAST() REQUIRES_SHARED(Locks::mutator_lock_) {
786 ObjPtr<mirror::Class> c = ResolveVerifyAndClinit(dex::TypeIndex(inst->VRegB_21c()),
787 shadow_frame.GetMethod(),
788 self,
789 false,
790 do_access_check);
791 if (UNLIKELY(c == nullptr)) {
792 HANDLE_PENDING_EXCEPTION();
793 } else {
794 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(inst->VRegA_21c(inst_data));
795 if (UNLIKELY(obj != nullptr && !obj->InstanceOf(c))) {
796 ThrowClassCastException(c, obj->GetClass());
797 HANDLE_PENDING_EXCEPTION();
798 } else {
799 inst = inst->Next_2xx();
800 }
801 }
802 }
803
INSTANCE_OF()804 ALWAYS_INLINE void INSTANCE_OF() REQUIRES_SHARED(Locks::mutator_lock_) {
805 ObjPtr<mirror::Class> c = ResolveVerifyAndClinit(dex::TypeIndex(inst->VRegC_22c()),
806 shadow_frame.GetMethod(),
807 self,
808 false,
809 do_access_check);
810 if (UNLIKELY(c == nullptr)) {
811 HANDLE_PENDING_EXCEPTION();
812 } else {
813 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data));
814 shadow_frame.SetVReg(inst->VRegA_22c(inst_data),
815 (obj != nullptr && obj->InstanceOf(c)) ? 1 : 0);
816 inst = inst->Next_2xx();
817 }
818 }
819
ARRAY_LENGTH()820 ALWAYS_INLINE void ARRAY_LENGTH() REQUIRES_SHARED(Locks::mutator_lock_) {
821 ObjPtr<mirror::Object> array = shadow_frame.GetVRegReference(inst->VRegB_12x(inst_data));
822 if (UNLIKELY(array == nullptr)) {
823 ThrowNullPointerExceptionFromInterpreter();
824 HANDLE_PENDING_EXCEPTION();
825 } else {
826 shadow_frame.SetVReg(inst->VRegA_12x(inst_data), array->AsArray()->GetLength());
827 inst = inst->Next_1xx();
828 }
829 }
830
NEW_INSTANCE()831 ALWAYS_INLINE void NEW_INSTANCE() REQUIRES_SHARED(Locks::mutator_lock_) {
832 ObjPtr<mirror::Object> obj = nullptr;
833 ObjPtr<mirror::Class> c = ResolveVerifyAndClinit(dex::TypeIndex(inst->VRegB_21c()),
834 shadow_frame.GetMethod(),
835 self,
836 false,
837 do_access_check);
838 if (LIKELY(c != nullptr)) {
839 if (UNLIKELY(c->IsStringClass())) {
840 gc::AllocatorType allocator_type = Runtime::Current()->GetHeap()->GetCurrentAllocator();
841 obj = mirror::String::AllocEmptyString<true>(self, allocator_type);
842 } else {
843 obj = AllocObjectFromCode<true>(
844 c.Ptr(),
845 self,
846 Runtime::Current()->GetHeap()->GetCurrentAllocator());
847 }
848 }
849 if (UNLIKELY(obj == nullptr)) {
850 HANDLE_PENDING_EXCEPTION();
851 } else {
852 obj->GetClass()->AssertInitializedOrInitializingInThread(self);
853 // Don't allow finalizable objects to be allocated during a transaction since these can't
854 // be finalized without a started runtime.
855 if (transaction_active && obj->GetClass()->IsFinalizable()) {
856 AbortTransactionF(self, "Allocating finalizable object in transaction: %s",
857 obj->PrettyTypeOf().c_str());
858 HANDLE_PENDING_EXCEPTION();
859 }
860 shadow_frame.SetVRegReference(inst->VRegA_21c(inst_data), obj);
861 inst = inst->Next_2xx();
862 }
863 }
864
NEW_ARRAY()865 ALWAYS_INLINE void NEW_ARRAY() REQUIRES_SHARED(Locks::mutator_lock_) {
866 int32_t length = shadow_frame.GetVReg(inst->VRegB_22c(inst_data));
867 ObjPtr<mirror::Object> obj = AllocArrayFromCode<do_access_check, true>(
868 dex::TypeIndex(inst->VRegC_22c()),
869 length,
870 shadow_frame.GetMethod(),
871 self,
872 Runtime::Current()->GetHeap()->GetCurrentAllocator());
873 if (UNLIKELY(obj == nullptr)) {
874 HANDLE_PENDING_EXCEPTION();
875 } else {
876 shadow_frame.SetVRegReference(inst->VRegA_22c(inst_data), obj);
877 inst = inst->Next_2xx();
878 }
879 }
880
FILLED_NEW_ARRAY()881 ALWAYS_INLINE void FILLED_NEW_ARRAY() REQUIRES_SHARED(Locks::mutator_lock_) {
882 bool success =
883 DoFilledNewArray<false, do_access_check, transaction_active>(inst, shadow_frame, self,
884 ResultRegister());
885 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_3xx);
886 }
887
FILLED_NEW_ARRAY_RANGE()888 ALWAYS_INLINE void FILLED_NEW_ARRAY_RANGE() REQUIRES_SHARED(Locks::mutator_lock_) {
889 bool success =
890 DoFilledNewArray<true, do_access_check, transaction_active>(inst, shadow_frame,
891 self, ResultRegister());
892 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_3xx);
893 }
894
FILL_ARRAY_DATA()895 ALWAYS_INLINE void FILL_ARRAY_DATA() REQUIRES_SHARED(Locks::mutator_lock_) {
896 const uint16_t* payload_addr = reinterpret_cast<const uint16_t*>(inst) + inst->VRegB_31t();
897 const Instruction::ArrayDataPayload* payload =
898 reinterpret_cast<const Instruction::ArrayDataPayload*>(payload_addr);
899 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(inst->VRegA_31t(inst_data));
900 bool success = FillArrayData(obj, payload);
901 if (!success) {
902 HANDLE_PENDING_EXCEPTION();
903 }
904 if (transaction_active) {
905 RecordArrayElementsInTransaction(obj->AsArray(), payload->element_count);
906 }
907 inst = inst->Next_3xx();
908 }
909
THROW()910 ALWAYS_INLINE void THROW() REQUIRES_SHARED(Locks::mutator_lock_) {
911 if (!HandleAsyncException()) {
912 return;
913 }
914 ObjPtr<mirror::Object> exception =
915 shadow_frame.GetVRegReference(inst->VRegA_11x(inst_data));
916 if (UNLIKELY(exception == nullptr)) {
917 ThrowNullPointerException("throw with null exception");
918 } else if (do_assignability_check && !exception->GetClass()->IsThrowableClass()) {
919 // This should never happen.
920 std::string temp;
921 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
922 "Throwing '%s' that is not instance of Throwable",
923 exception->GetClass()->GetDescriptor(&temp));
924 } else {
925 self->SetException(exception->AsThrowable());
926 }
927 HANDLE_PENDING_EXCEPTION();
928 }
929
GOTO()930 ALWAYS_INLINE void GOTO() REQUIRES_SHARED(Locks::mutator_lock_) {
931 if (!HandleAsyncException()) {
932 return;
933 }
934 int8_t offset = inst->VRegA_10t(inst_data);
935 BRANCH_INSTRUMENTATION(offset);
936 inst = inst->RelativeAt(offset);
937 HandleBackwardBranch(offset);
938 }
939
GOTO_16()940 ALWAYS_INLINE void GOTO_16() REQUIRES_SHARED(Locks::mutator_lock_) {
941 if (!HandleAsyncException()) {
942 return;
943 }
944 int16_t offset = inst->VRegA_20t();
945 BRANCH_INSTRUMENTATION(offset);
946 inst = inst->RelativeAt(offset);
947 HandleBackwardBranch(offset);
948 }
949
GOTO_32()950 ALWAYS_INLINE void GOTO_32() REQUIRES_SHARED(Locks::mutator_lock_) {
951 if (!HandleAsyncException()) {
952 return;
953 }
954 int32_t offset = inst->VRegA_30t();
955 BRANCH_INSTRUMENTATION(offset);
956 inst = inst->RelativeAt(offset);
957 HandleBackwardBranch(offset);
958 }
959
PACKED_SWITCH()960 ALWAYS_INLINE void PACKED_SWITCH() REQUIRES_SHARED(Locks::mutator_lock_) {
961 int32_t offset = DoPackedSwitch(inst, shadow_frame, inst_data);
962 BRANCH_INSTRUMENTATION(offset);
963 inst = inst->RelativeAt(offset);
964 HandleBackwardBranch(offset);
965 }
966
SPARSE_SWITCH()967 ALWAYS_INLINE void SPARSE_SWITCH() REQUIRES_SHARED(Locks::mutator_lock_) {
968 int32_t offset = DoSparseSwitch(inst, shadow_frame, inst_data);
969 BRANCH_INSTRUMENTATION(offset);
970 inst = inst->RelativeAt(offset);
971 HandleBackwardBranch(offset);
972 }
973
974 #pragma clang diagnostic push
975 #pragma clang diagnostic ignored "-Wfloat-equal"
976
977
CMPL_FLOAT()978 ALWAYS_INLINE void CMPL_FLOAT() REQUIRES_SHARED(Locks::mutator_lock_) {
979 float val1 = shadow_frame.GetVRegFloat(inst->VRegB_23x());
980 float val2 = shadow_frame.GetVRegFloat(inst->VRegC_23x());
981 int32_t result;
982 if (val1 > val2) {
983 result = 1;
984 } else if (val1 == val2) {
985 result = 0;
986 } else {
987 result = -1;
988 }
989 shadow_frame.SetVReg(inst->VRegA_23x(inst_data), result);
990 inst = inst->Next_2xx();
991 }
992
CMPG_FLOAT()993 ALWAYS_INLINE void CMPG_FLOAT() REQUIRES_SHARED(Locks::mutator_lock_) {
994 float val1 = shadow_frame.GetVRegFloat(inst->VRegB_23x());
995 float val2 = shadow_frame.GetVRegFloat(inst->VRegC_23x());
996 int32_t result;
997 if (val1 < val2) {
998 result = -1;
999 } else if (val1 == val2) {
1000 result = 0;
1001 } else {
1002 result = 1;
1003 }
1004 shadow_frame.SetVReg(inst->VRegA_23x(inst_data), result);
1005 inst = inst->Next_2xx();
1006 }
1007
CMPL_DOUBLE()1008 ALWAYS_INLINE void CMPL_DOUBLE() REQUIRES_SHARED(Locks::mutator_lock_) {
1009 double val1 = shadow_frame.GetVRegDouble(inst->VRegB_23x());
1010 double val2 = shadow_frame.GetVRegDouble(inst->VRegC_23x());
1011 int32_t result;
1012 if (val1 > val2) {
1013 result = 1;
1014 } else if (val1 == val2) {
1015 result = 0;
1016 } else {
1017 result = -1;
1018 }
1019 shadow_frame.SetVReg(inst->VRegA_23x(inst_data), result);
1020 inst = inst->Next_2xx();
1021 }
1022
1023
CMPG_DOUBLE()1024 ALWAYS_INLINE void CMPG_DOUBLE() REQUIRES_SHARED(Locks::mutator_lock_) {
1025 double val1 = shadow_frame.GetVRegDouble(inst->VRegB_23x());
1026 double val2 = shadow_frame.GetVRegDouble(inst->VRegC_23x());
1027 int32_t result;
1028 if (val1 < val2) {
1029 result = -1;
1030 } else if (val1 == val2) {
1031 result = 0;
1032 } else {
1033 result = 1;
1034 }
1035 shadow_frame.SetVReg(inst->VRegA_23x(inst_data), result);
1036 inst = inst->Next_2xx();
1037 }
1038
1039 #pragma clang diagnostic pop
1040
1041
CMP_LONG()1042 ALWAYS_INLINE void CMP_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
1043 int64_t val1 = shadow_frame.GetVRegLong(inst->VRegB_23x());
1044 int64_t val2 = shadow_frame.GetVRegLong(inst->VRegC_23x());
1045 int32_t result;
1046 if (val1 > val2) {
1047 result = 1;
1048 } else if (val1 == val2) {
1049 result = 0;
1050 } else {
1051 result = -1;
1052 }
1053 shadow_frame.SetVReg(inst->VRegA_23x(inst_data), result);
1054 inst = inst->Next_2xx();
1055 }
1056
IF_EQ()1057 ALWAYS_INLINE void IF_EQ() REQUIRES_SHARED(Locks::mutator_lock_) {
1058 if (shadow_frame.GetVReg(inst->VRegA_22t(inst_data)) ==
1059 shadow_frame.GetVReg(inst->VRegB_22t(inst_data))) {
1060 int16_t offset = inst->VRegC_22t();
1061 BRANCH_INSTRUMENTATION(offset);
1062 inst = inst->RelativeAt(offset);
1063 HandleBackwardBranch(offset);
1064 } else {
1065 BRANCH_INSTRUMENTATION(2);
1066 inst = inst->Next_2xx();
1067 }
1068 }
1069
IF_NE()1070 ALWAYS_INLINE void IF_NE() REQUIRES_SHARED(Locks::mutator_lock_) {
1071 if (shadow_frame.GetVReg(inst->VRegA_22t(inst_data)) !=
1072 shadow_frame.GetVReg(inst->VRegB_22t(inst_data))) {
1073 int16_t offset = inst->VRegC_22t();
1074 BRANCH_INSTRUMENTATION(offset);
1075 inst = inst->RelativeAt(offset);
1076 HandleBackwardBranch(offset);
1077 } else {
1078 BRANCH_INSTRUMENTATION(2);
1079 inst = inst->Next_2xx();
1080 }
1081 }
1082
IF_LT()1083 ALWAYS_INLINE void IF_LT() REQUIRES_SHARED(Locks::mutator_lock_) {
1084 if (shadow_frame.GetVReg(inst->VRegA_22t(inst_data)) <
1085 shadow_frame.GetVReg(inst->VRegB_22t(inst_data))) {
1086 int16_t offset = inst->VRegC_22t();
1087 BRANCH_INSTRUMENTATION(offset);
1088 inst = inst->RelativeAt(offset);
1089 HandleBackwardBranch(offset);
1090 } else {
1091 BRANCH_INSTRUMENTATION(2);
1092 inst = inst->Next_2xx();
1093 }
1094 }
1095
IF_GE()1096 ALWAYS_INLINE void IF_GE() REQUIRES_SHARED(Locks::mutator_lock_) {
1097 if (shadow_frame.GetVReg(inst->VRegA_22t(inst_data)) >=
1098 shadow_frame.GetVReg(inst->VRegB_22t(inst_data))) {
1099 int16_t offset = inst->VRegC_22t();
1100 BRANCH_INSTRUMENTATION(offset);
1101 inst = inst->RelativeAt(offset);
1102 HandleBackwardBranch(offset);
1103 } else {
1104 BRANCH_INSTRUMENTATION(2);
1105 inst = inst->Next_2xx();
1106 }
1107 }
1108
IF_GT()1109 ALWAYS_INLINE void IF_GT() REQUIRES_SHARED(Locks::mutator_lock_) {
1110 if (shadow_frame.GetVReg(inst->VRegA_22t(inst_data)) >
1111 shadow_frame.GetVReg(inst->VRegB_22t(inst_data))) {
1112 int16_t offset = inst->VRegC_22t();
1113 BRANCH_INSTRUMENTATION(offset);
1114 inst = inst->RelativeAt(offset);
1115 HandleBackwardBranch(offset);
1116 } else {
1117 BRANCH_INSTRUMENTATION(2);
1118 inst = inst->Next_2xx();
1119 }
1120 }
1121
IF_LE()1122 ALWAYS_INLINE void IF_LE() REQUIRES_SHARED(Locks::mutator_lock_) {
1123 if (shadow_frame.GetVReg(inst->VRegA_22t(inst_data)) <=
1124 shadow_frame.GetVReg(inst->VRegB_22t(inst_data))) {
1125 int16_t offset = inst->VRegC_22t();
1126 BRANCH_INSTRUMENTATION(offset);
1127 inst = inst->RelativeAt(offset);
1128 HandleBackwardBranch(offset);
1129 } else {
1130 BRANCH_INSTRUMENTATION(2);
1131 inst = inst->Next_2xx();
1132 }
1133 }
1134
IF_EQZ()1135 ALWAYS_INLINE void IF_EQZ() REQUIRES_SHARED(Locks::mutator_lock_) {
1136 if (shadow_frame.GetVReg(inst->VRegA_21t(inst_data)) == 0) {
1137 int16_t offset = inst->VRegB_21t();
1138 BRANCH_INSTRUMENTATION(offset);
1139 inst = inst->RelativeAt(offset);
1140 HandleBackwardBranch(offset);
1141 } else {
1142 BRANCH_INSTRUMENTATION(2);
1143 inst = inst->Next_2xx();
1144 }
1145 }
1146
IF_NEZ()1147 ALWAYS_INLINE void IF_NEZ() REQUIRES_SHARED(Locks::mutator_lock_) {
1148 if (shadow_frame.GetVReg(inst->VRegA_21t(inst_data)) != 0) {
1149 int16_t offset = inst->VRegB_21t();
1150 BRANCH_INSTRUMENTATION(offset);
1151 inst = inst->RelativeAt(offset);
1152 HandleBackwardBranch(offset);
1153 } else {
1154 BRANCH_INSTRUMENTATION(2);
1155 inst = inst->Next_2xx();
1156 }
1157 }
1158
IF_LTZ()1159 ALWAYS_INLINE void IF_LTZ() REQUIRES_SHARED(Locks::mutator_lock_) {
1160 if (shadow_frame.GetVReg(inst->VRegA_21t(inst_data)) < 0) {
1161 int16_t offset = inst->VRegB_21t();
1162 BRANCH_INSTRUMENTATION(offset);
1163 inst = inst->RelativeAt(offset);
1164 HandleBackwardBranch(offset);
1165 } else {
1166 BRANCH_INSTRUMENTATION(2);
1167 inst = inst->Next_2xx();
1168 }
1169 }
1170
IF_GEZ()1171 ALWAYS_INLINE void IF_GEZ() REQUIRES_SHARED(Locks::mutator_lock_) {
1172 if (shadow_frame.GetVReg(inst->VRegA_21t(inst_data)) >= 0) {
1173 int16_t offset = inst->VRegB_21t();
1174 BRANCH_INSTRUMENTATION(offset);
1175 inst = inst->RelativeAt(offset);
1176 HandleBackwardBranch(offset);
1177 } else {
1178 BRANCH_INSTRUMENTATION(2);
1179 inst = inst->Next_2xx();
1180 }
1181 }
1182
IF_GTZ()1183 ALWAYS_INLINE void IF_GTZ() REQUIRES_SHARED(Locks::mutator_lock_) {
1184 if (shadow_frame.GetVReg(inst->VRegA_21t(inst_data)) > 0) {
1185 int16_t offset = inst->VRegB_21t();
1186 BRANCH_INSTRUMENTATION(offset);
1187 inst = inst->RelativeAt(offset);
1188 HandleBackwardBranch(offset);
1189 } else {
1190 BRANCH_INSTRUMENTATION(2);
1191 inst = inst->Next_2xx();
1192 }
1193 }
1194
IF_LEZ()1195 ALWAYS_INLINE void IF_LEZ() REQUIRES_SHARED(Locks::mutator_lock_) {
1196 if (shadow_frame.GetVReg(inst->VRegA_21t(inst_data)) <= 0) {
1197 int16_t offset = inst->VRegB_21t();
1198 BRANCH_INSTRUMENTATION(offset);
1199 inst = inst->RelativeAt(offset);
1200 HandleBackwardBranch(offset);
1201 } else {
1202 BRANCH_INSTRUMENTATION(2);
1203 inst = inst->Next_2xx();
1204 }
1205 }
1206
AGET_BOOLEAN()1207 ALWAYS_INLINE void AGET_BOOLEAN() REQUIRES_SHARED(Locks::mutator_lock_) {
1208 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1209 if (UNLIKELY(a == nullptr)) {
1210 ThrowNullPointerExceptionFromInterpreter();
1211 HANDLE_PENDING_EXCEPTION();
1212 }
1213 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1214 ObjPtr<mirror::BooleanArray> array = a->AsBooleanArray();
1215 if (array->CheckIsValidIndex(index)) {
1216 shadow_frame.SetVReg(inst->VRegA_23x(inst_data), array->GetWithoutChecks(index));
1217 inst = inst->Next_2xx();
1218 } else {
1219 HANDLE_PENDING_EXCEPTION();
1220 }
1221 }
1222
AGET_BYTE()1223 ALWAYS_INLINE void AGET_BYTE() REQUIRES_SHARED(Locks::mutator_lock_) {
1224 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1225 if (UNLIKELY(a == nullptr)) {
1226 ThrowNullPointerExceptionFromInterpreter();
1227 HANDLE_PENDING_EXCEPTION();
1228 }
1229 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1230 ObjPtr<mirror::ByteArray> array = a->AsByteArray();
1231 if (array->CheckIsValidIndex(index)) {
1232 shadow_frame.SetVReg(inst->VRegA_23x(inst_data), array->GetWithoutChecks(index));
1233 inst = inst->Next_2xx();
1234 } else {
1235 HANDLE_PENDING_EXCEPTION();
1236 }
1237 }
1238
AGET_CHAR()1239 ALWAYS_INLINE void AGET_CHAR() REQUIRES_SHARED(Locks::mutator_lock_) {
1240 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1241 if (UNLIKELY(a == nullptr)) {
1242 ThrowNullPointerExceptionFromInterpreter();
1243 HANDLE_PENDING_EXCEPTION();
1244 }
1245 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1246 ObjPtr<mirror::CharArray> array = a->AsCharArray();
1247 if (array->CheckIsValidIndex(index)) {
1248 shadow_frame.SetVReg(inst->VRegA_23x(inst_data), array->GetWithoutChecks(index));
1249 inst = inst->Next_2xx();
1250 } else {
1251 HANDLE_PENDING_EXCEPTION();
1252 }
1253 }
1254
AGET_SHORT()1255 ALWAYS_INLINE void AGET_SHORT() REQUIRES_SHARED(Locks::mutator_lock_) {
1256 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1257 if (UNLIKELY(a == nullptr)) {
1258 ThrowNullPointerExceptionFromInterpreter();
1259 HANDLE_PENDING_EXCEPTION();
1260 }
1261 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1262 ObjPtr<mirror::ShortArray> array = a->AsShortArray();
1263 if (array->CheckIsValidIndex(index)) {
1264 shadow_frame.SetVReg(inst->VRegA_23x(inst_data), array->GetWithoutChecks(index));
1265 inst = inst->Next_2xx();
1266 } else {
1267 HANDLE_PENDING_EXCEPTION();
1268 }
1269 }
1270
AGET()1271 ALWAYS_INLINE void AGET() REQUIRES_SHARED(Locks::mutator_lock_) {
1272 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1273 if (UNLIKELY(a == nullptr)) {
1274 ThrowNullPointerExceptionFromInterpreter();
1275 HANDLE_PENDING_EXCEPTION();
1276 }
1277 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1278 DCHECK(a->IsIntArray() || a->IsFloatArray()) << a->PrettyTypeOf();
1279 ObjPtr<mirror::IntArray> array = ObjPtr<mirror::IntArray>::DownCast(a);
1280 if (array->CheckIsValidIndex(index)) {
1281 shadow_frame.SetVReg(inst->VRegA_23x(inst_data), array->GetWithoutChecks(index));
1282 inst = inst->Next_2xx();
1283 } else {
1284 HANDLE_PENDING_EXCEPTION();
1285 }
1286 }
1287
AGET_WIDE()1288 ALWAYS_INLINE void AGET_WIDE() REQUIRES_SHARED(Locks::mutator_lock_) {
1289 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1290 if (UNLIKELY(a == nullptr)) {
1291 ThrowNullPointerExceptionFromInterpreter();
1292 HANDLE_PENDING_EXCEPTION();
1293 }
1294 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1295 DCHECK(a->IsLongArray() || a->IsDoubleArray()) << a->PrettyTypeOf();
1296 ObjPtr<mirror::LongArray> array = ObjPtr<mirror::LongArray>::DownCast(a);
1297 if (array->CheckIsValidIndex(index)) {
1298 shadow_frame.SetVRegLong(inst->VRegA_23x(inst_data), array->GetWithoutChecks(index));
1299 inst = inst->Next_2xx();
1300 } else {
1301 HANDLE_PENDING_EXCEPTION();
1302 }
1303 }
1304
AGET_OBJECT()1305 ALWAYS_INLINE void AGET_OBJECT() REQUIRES_SHARED(Locks::mutator_lock_) {
1306 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1307 if (UNLIKELY(a == nullptr)) {
1308 ThrowNullPointerExceptionFromInterpreter();
1309 HANDLE_PENDING_EXCEPTION();
1310 }
1311 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1312 ObjPtr<mirror::ObjectArray<mirror::Object>> array = a->AsObjectArray<mirror::Object>();
1313 if (array->CheckIsValidIndex(index)) {
1314 shadow_frame.SetVRegReference(inst->VRegA_23x(inst_data), array->GetWithoutChecks(index));
1315 inst = inst->Next_2xx();
1316 } else {
1317 HANDLE_PENDING_EXCEPTION();
1318 }
1319 }
1320
APUT_BOOLEAN()1321 ALWAYS_INLINE void APUT_BOOLEAN() REQUIRES_SHARED(Locks::mutator_lock_) {
1322 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1323 if (UNLIKELY(a == nullptr)) {
1324 ThrowNullPointerExceptionFromInterpreter();
1325 HANDLE_PENDING_EXCEPTION();
1326 }
1327 uint8_t val = shadow_frame.GetVReg(inst->VRegA_23x(inst_data));
1328 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1329 ObjPtr<mirror::BooleanArray> array = a->AsBooleanArray();
1330 if (array->CheckIsValidIndex(index)) {
1331 array->SetWithoutChecks<transaction_active>(index, val);
1332 inst = inst->Next_2xx();
1333 } else {
1334 HANDLE_PENDING_EXCEPTION();
1335 }
1336 }
1337
APUT_BYTE()1338 ALWAYS_INLINE void APUT_BYTE() REQUIRES_SHARED(Locks::mutator_lock_) {
1339 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1340 if (UNLIKELY(a == nullptr)) {
1341 ThrowNullPointerExceptionFromInterpreter();
1342 HANDLE_PENDING_EXCEPTION();
1343 }
1344 int8_t val = shadow_frame.GetVReg(inst->VRegA_23x(inst_data));
1345 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1346 ObjPtr<mirror::ByteArray> array = a->AsByteArray();
1347 if (array->CheckIsValidIndex(index)) {
1348 array->SetWithoutChecks<transaction_active>(index, val);
1349 inst = inst->Next_2xx();
1350 } else {
1351 HANDLE_PENDING_EXCEPTION();
1352 }
1353 }
1354
APUT_CHAR()1355 ALWAYS_INLINE void APUT_CHAR() REQUIRES_SHARED(Locks::mutator_lock_) {
1356 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1357 if (UNLIKELY(a == nullptr)) {
1358 ThrowNullPointerExceptionFromInterpreter();
1359 HANDLE_PENDING_EXCEPTION();
1360 }
1361 uint16_t val = shadow_frame.GetVReg(inst->VRegA_23x(inst_data));
1362 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1363 ObjPtr<mirror::CharArray> array = a->AsCharArray();
1364 if (array->CheckIsValidIndex(index)) {
1365 array->SetWithoutChecks<transaction_active>(index, val);
1366 inst = inst->Next_2xx();
1367 } else {
1368 HANDLE_PENDING_EXCEPTION();
1369 }
1370 }
1371
APUT_SHORT()1372 ALWAYS_INLINE void APUT_SHORT() REQUIRES_SHARED(Locks::mutator_lock_) {
1373 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1374 if (UNLIKELY(a == nullptr)) {
1375 ThrowNullPointerExceptionFromInterpreter();
1376 HANDLE_PENDING_EXCEPTION();
1377 }
1378 int16_t val = shadow_frame.GetVReg(inst->VRegA_23x(inst_data));
1379 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1380 ObjPtr<mirror::ShortArray> array = a->AsShortArray();
1381 if (array->CheckIsValidIndex(index)) {
1382 array->SetWithoutChecks<transaction_active>(index, val);
1383 inst = inst->Next_2xx();
1384 } else {
1385 HANDLE_PENDING_EXCEPTION();
1386 }
1387 }
1388
APUT()1389 ALWAYS_INLINE void APUT() REQUIRES_SHARED(Locks::mutator_lock_) {
1390 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1391 if (UNLIKELY(a == nullptr)) {
1392 ThrowNullPointerExceptionFromInterpreter();
1393 HANDLE_PENDING_EXCEPTION();
1394 }
1395 int32_t val = shadow_frame.GetVReg(inst->VRegA_23x(inst_data));
1396 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1397 DCHECK(a->IsIntArray() || a->IsFloatArray()) << a->PrettyTypeOf();
1398 ObjPtr<mirror::IntArray> array = ObjPtr<mirror::IntArray>::DownCast(a);
1399 if (array->CheckIsValidIndex(index)) {
1400 array->SetWithoutChecks<transaction_active>(index, val);
1401 inst = inst->Next_2xx();
1402 } else {
1403 HANDLE_PENDING_EXCEPTION();
1404 }
1405 }
1406
APUT_WIDE()1407 ALWAYS_INLINE void APUT_WIDE() REQUIRES_SHARED(Locks::mutator_lock_) {
1408 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1409 if (UNLIKELY(a == nullptr)) {
1410 ThrowNullPointerExceptionFromInterpreter();
1411 HANDLE_PENDING_EXCEPTION();
1412 }
1413 int64_t val = shadow_frame.GetVRegLong(inst->VRegA_23x(inst_data));
1414 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1415 DCHECK(a->IsLongArray() || a->IsDoubleArray()) << a->PrettyTypeOf();
1416 ObjPtr<mirror::LongArray> array = ObjPtr<mirror::LongArray>::DownCast(a);
1417 if (array->CheckIsValidIndex(index)) {
1418 array->SetWithoutChecks<transaction_active>(index, val);
1419 inst = inst->Next_2xx();
1420 } else {
1421 HANDLE_PENDING_EXCEPTION();
1422 }
1423 }
1424
APUT_OBJECT()1425 ALWAYS_INLINE void APUT_OBJECT() REQUIRES_SHARED(Locks::mutator_lock_) {
1426 ObjPtr<mirror::Object> a = shadow_frame.GetVRegReference(inst->VRegB_23x());
1427 if (UNLIKELY(a == nullptr)) {
1428 ThrowNullPointerExceptionFromInterpreter();
1429 HANDLE_PENDING_EXCEPTION();
1430 }
1431 int32_t index = shadow_frame.GetVReg(inst->VRegC_23x());
1432 ObjPtr<mirror::Object> val = shadow_frame.GetVRegReference(inst->VRegA_23x(inst_data));
1433 ObjPtr<mirror::ObjectArray<mirror::Object>> array = a->AsObjectArray<mirror::Object>();
1434 if (array->CheckIsValidIndex(index) && array->CheckAssignable(val)) {
1435 array->SetWithoutChecks<transaction_active>(index, val);
1436 inst = inst->Next_2xx();
1437 } else {
1438 HANDLE_PENDING_EXCEPTION();
1439 }
1440 }
1441
IGET_BOOLEAN()1442 ALWAYS_INLINE void IGET_BOOLEAN() REQUIRES_SHARED(Locks::mutator_lock_) {
1443 bool success = DoFieldGet<InstancePrimitiveRead, Primitive::kPrimBoolean, do_access_check>(
1444 self, shadow_frame, inst, inst_data);
1445 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1446 }
1447
IGET_BYTE()1448 ALWAYS_INLINE void IGET_BYTE() REQUIRES_SHARED(Locks::mutator_lock_) {
1449 bool success = DoFieldGet<InstancePrimitiveRead, Primitive::kPrimByte, do_access_check>(
1450 self, shadow_frame, inst, inst_data);
1451 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1452 }
1453
IGET_CHAR()1454 ALWAYS_INLINE void IGET_CHAR() REQUIRES_SHARED(Locks::mutator_lock_) {
1455 bool success = DoFieldGet<InstancePrimitiveRead, Primitive::kPrimChar, do_access_check>(
1456 self, shadow_frame, inst, inst_data);
1457 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1458 }
1459
IGET_SHORT()1460 ALWAYS_INLINE void IGET_SHORT() REQUIRES_SHARED(Locks::mutator_lock_) {
1461 bool success = DoFieldGet<InstancePrimitiveRead, Primitive::kPrimShort, do_access_check>(
1462 self, shadow_frame, inst, inst_data);
1463 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1464 }
1465
IGET()1466 ALWAYS_INLINE void IGET() REQUIRES_SHARED(Locks::mutator_lock_) {
1467 bool success = DoFieldGet<InstancePrimitiveRead, Primitive::kPrimInt, do_access_check>(
1468 self, shadow_frame, inst, inst_data);
1469 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1470 }
1471
IGET_WIDE()1472 ALWAYS_INLINE void IGET_WIDE() REQUIRES_SHARED(Locks::mutator_lock_) {
1473 bool success = DoFieldGet<InstancePrimitiveRead, Primitive::kPrimLong, do_access_check>(
1474 self, shadow_frame, inst, inst_data);
1475 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1476 }
1477
IGET_OBJECT()1478 ALWAYS_INLINE void IGET_OBJECT() REQUIRES_SHARED(Locks::mutator_lock_) {
1479 bool success = DoFieldGet<InstanceObjectRead, Primitive::kPrimNot, do_access_check>(
1480 self, shadow_frame, inst, inst_data);
1481 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1482 }
1483
IGET_QUICK()1484 ALWAYS_INLINE void IGET_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1485 bool success = DoIGetQuick<Primitive::kPrimInt>(shadow_frame, inst, inst_data);
1486 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1487 }
1488
IGET_WIDE_QUICK()1489 ALWAYS_INLINE void IGET_WIDE_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1490 bool success = DoIGetQuick<Primitive::kPrimLong>(shadow_frame, inst, inst_data);
1491 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1492 }
1493
IGET_OBJECT_QUICK()1494 ALWAYS_INLINE void IGET_OBJECT_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1495 bool success = DoIGetQuick<Primitive::kPrimNot>(shadow_frame, inst, inst_data);
1496 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1497 }
1498
IGET_BOOLEAN_QUICK()1499 ALWAYS_INLINE void IGET_BOOLEAN_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1500 bool success = DoIGetQuick<Primitive::kPrimBoolean>(shadow_frame, inst, inst_data);
1501 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1502 }
1503
IGET_BYTE_QUICK()1504 ALWAYS_INLINE void IGET_BYTE_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1505 bool success = DoIGetQuick<Primitive::kPrimByte>(shadow_frame, inst, inst_data);
1506 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1507 }
1508
IGET_CHAR_QUICK()1509 ALWAYS_INLINE void IGET_CHAR_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1510 bool success = DoIGetQuick<Primitive::kPrimChar>(shadow_frame, inst, inst_data);
1511 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1512 }
1513
IGET_SHORT_QUICK()1514 ALWAYS_INLINE void IGET_SHORT_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1515 bool success = DoIGetQuick<Primitive::kPrimShort>(shadow_frame, inst, inst_data);
1516 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1517 }
1518
SGET_BOOLEAN()1519 ALWAYS_INLINE void SGET_BOOLEAN() REQUIRES_SHARED(Locks::mutator_lock_) {
1520 bool success = DoFieldGet<StaticPrimitiveRead, Primitive::kPrimBoolean, do_access_check,
1521 transaction_active>(self, shadow_frame, inst, inst_data);
1522 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1523 }
1524
SGET_BYTE()1525 ALWAYS_INLINE void SGET_BYTE() REQUIRES_SHARED(Locks::mutator_lock_) {
1526 bool success = DoFieldGet<StaticPrimitiveRead, Primitive::kPrimByte, do_access_check,
1527 transaction_active>(self, shadow_frame, inst, inst_data);
1528 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1529 }
1530
SGET_CHAR()1531 ALWAYS_INLINE void SGET_CHAR() REQUIRES_SHARED(Locks::mutator_lock_) {
1532 bool success = DoFieldGet<StaticPrimitiveRead, Primitive::kPrimChar, do_access_check,
1533 transaction_active>(self, shadow_frame, inst, inst_data);
1534 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1535 }
1536
SGET_SHORT()1537 ALWAYS_INLINE void SGET_SHORT() REQUIRES_SHARED(Locks::mutator_lock_) {
1538 bool success = DoFieldGet<StaticPrimitiveRead, Primitive::kPrimShort, do_access_check,
1539 transaction_active>(self, shadow_frame, inst, inst_data);
1540 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1541 }
1542
SGET()1543 ALWAYS_INLINE void SGET() REQUIRES_SHARED(Locks::mutator_lock_) {
1544 bool success = DoFieldGet<StaticPrimitiveRead, Primitive::kPrimInt, do_access_check,
1545 transaction_active>(self, shadow_frame, inst, inst_data);
1546 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1547 }
1548
SGET_WIDE()1549 ALWAYS_INLINE void SGET_WIDE() REQUIRES_SHARED(Locks::mutator_lock_) {
1550 bool success = DoFieldGet<StaticPrimitiveRead, Primitive::kPrimLong, do_access_check,
1551 transaction_active>(self, shadow_frame, inst, inst_data);
1552 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1553 }
1554
SGET_OBJECT()1555 ALWAYS_INLINE void SGET_OBJECT() REQUIRES_SHARED(Locks::mutator_lock_) {
1556 bool success = DoFieldGet<StaticObjectRead, Primitive::kPrimNot, do_access_check,
1557 transaction_active>(self, shadow_frame, inst, inst_data);
1558 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1559 }
1560
IPUT_BOOLEAN()1561 ALWAYS_INLINE void IPUT_BOOLEAN() REQUIRES_SHARED(Locks::mutator_lock_) {
1562 bool success = DoFieldPut<InstancePrimitiveWrite, Primitive::kPrimBoolean, do_access_check,
1563 transaction_active>(self, shadow_frame, inst, inst_data);
1564 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1565 }
1566
IPUT_BYTE()1567 ALWAYS_INLINE void IPUT_BYTE() REQUIRES_SHARED(Locks::mutator_lock_) {
1568 bool success = DoFieldPut<InstancePrimitiveWrite, Primitive::kPrimByte, do_access_check,
1569 transaction_active>(self, shadow_frame, inst, inst_data);
1570 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1571 }
1572
IPUT_CHAR()1573 ALWAYS_INLINE void IPUT_CHAR() REQUIRES_SHARED(Locks::mutator_lock_) {
1574 bool success = DoFieldPut<InstancePrimitiveWrite, Primitive::kPrimChar, do_access_check,
1575 transaction_active>(self, shadow_frame, inst, inst_data);
1576 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1577 }
1578
IPUT_SHORT()1579 ALWAYS_INLINE void IPUT_SHORT() REQUIRES_SHARED(Locks::mutator_lock_) {
1580 bool success = DoFieldPut<InstancePrimitiveWrite, Primitive::kPrimShort, do_access_check,
1581 transaction_active>(self, shadow_frame, inst, inst_data);
1582 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1583 }
1584
IPUT()1585 ALWAYS_INLINE void IPUT() REQUIRES_SHARED(Locks::mutator_lock_) {
1586 bool success = DoFieldPut<InstancePrimitiveWrite, Primitive::kPrimInt, do_access_check,
1587 transaction_active>(self, shadow_frame, inst, inst_data);
1588 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1589 }
1590
IPUT_WIDE()1591 ALWAYS_INLINE void IPUT_WIDE() REQUIRES_SHARED(Locks::mutator_lock_) {
1592 bool success = DoFieldPut<InstancePrimitiveWrite, Primitive::kPrimLong, do_access_check,
1593 transaction_active>(self, shadow_frame, inst, inst_data);
1594 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1595 }
1596
IPUT_OBJECT()1597 ALWAYS_INLINE void IPUT_OBJECT() REQUIRES_SHARED(Locks::mutator_lock_) {
1598 bool success = DoFieldPut<InstanceObjectWrite, Primitive::kPrimNot, do_access_check,
1599 transaction_active>(self, shadow_frame, inst, inst_data);
1600 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1601 }
1602
IPUT_QUICK()1603 ALWAYS_INLINE void IPUT_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1604 bool success = DoIPutQuick<Primitive::kPrimInt, transaction_active>(
1605 shadow_frame, inst, inst_data);
1606 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1607 }
1608
IPUT_BOOLEAN_QUICK()1609 ALWAYS_INLINE void IPUT_BOOLEAN_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1610 bool success = DoIPutQuick<Primitive::kPrimBoolean, transaction_active>(
1611 shadow_frame, inst, inst_data);
1612 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1613 }
1614
IPUT_BYTE_QUICK()1615 ALWAYS_INLINE void IPUT_BYTE_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1616 bool success = DoIPutQuick<Primitive::kPrimByte, transaction_active>(
1617 shadow_frame, inst, inst_data);
1618 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1619 }
1620
IPUT_CHAR_QUICK()1621 ALWAYS_INLINE void IPUT_CHAR_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1622 bool success = DoIPutQuick<Primitive::kPrimChar, transaction_active>(
1623 shadow_frame, inst, inst_data);
1624 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1625 }
1626
IPUT_SHORT_QUICK()1627 ALWAYS_INLINE void IPUT_SHORT_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1628 bool success = DoIPutQuick<Primitive::kPrimShort, transaction_active>(
1629 shadow_frame, inst, inst_data);
1630 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1631 }
1632
IPUT_WIDE_QUICK()1633 ALWAYS_INLINE void IPUT_WIDE_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1634 bool success = DoIPutQuick<Primitive::kPrimLong, transaction_active>(
1635 shadow_frame, inst, inst_data);
1636 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1637 }
1638
IPUT_OBJECT_QUICK()1639 ALWAYS_INLINE void IPUT_OBJECT_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1640 bool success = DoIPutQuick<Primitive::kPrimNot, transaction_active>(
1641 shadow_frame, inst, inst_data);
1642 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1643 }
1644
SPUT_BOOLEAN()1645 ALWAYS_INLINE void SPUT_BOOLEAN() REQUIRES_SHARED(Locks::mutator_lock_) {
1646 bool success = DoFieldPut<StaticPrimitiveWrite, Primitive::kPrimBoolean, do_access_check,
1647 transaction_active>(self, shadow_frame, inst, inst_data);
1648 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1649 }
1650
SPUT_BYTE()1651 ALWAYS_INLINE void SPUT_BYTE() REQUIRES_SHARED(Locks::mutator_lock_) {
1652 bool success = DoFieldPut<StaticPrimitiveWrite, Primitive::kPrimByte, do_access_check,
1653 transaction_active>(self, shadow_frame, inst, inst_data);
1654 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1655 }
1656
SPUT_CHAR()1657 ALWAYS_INLINE void SPUT_CHAR() REQUIRES_SHARED(Locks::mutator_lock_) {
1658 bool success = DoFieldPut<StaticPrimitiveWrite, Primitive::kPrimChar, do_access_check,
1659 transaction_active>(self, shadow_frame, inst, inst_data);
1660 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1661 }
1662
SPUT_SHORT()1663 ALWAYS_INLINE void SPUT_SHORT() REQUIRES_SHARED(Locks::mutator_lock_) {
1664 bool success = DoFieldPut<StaticPrimitiveWrite, Primitive::kPrimShort, do_access_check,
1665 transaction_active>(self, shadow_frame, inst, inst_data);
1666 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1667 }
1668
SPUT()1669 ALWAYS_INLINE void SPUT() REQUIRES_SHARED(Locks::mutator_lock_) {
1670 bool success = DoFieldPut<StaticPrimitiveWrite, Primitive::kPrimInt, do_access_check,
1671 transaction_active>(self, shadow_frame, inst, inst_data);
1672 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1673 }
1674
SPUT_WIDE()1675 ALWAYS_INLINE void SPUT_WIDE() REQUIRES_SHARED(Locks::mutator_lock_) {
1676 bool success = DoFieldPut<StaticPrimitiveWrite, Primitive::kPrimLong, do_access_check,
1677 transaction_active>(self, shadow_frame, inst, inst_data);
1678 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1679 }
1680
SPUT_OBJECT()1681 ALWAYS_INLINE void SPUT_OBJECT() REQUIRES_SHARED(Locks::mutator_lock_) {
1682 bool success = DoFieldPut<StaticObjectWrite, Primitive::kPrimNot, do_access_check,
1683 transaction_active>(self, shadow_frame, inst, inst_data);
1684 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1685 }
1686
INVOKE_VIRTUAL()1687 ALWAYS_INLINE void INVOKE_VIRTUAL() REQUIRES_SHARED(Locks::mutator_lock_) {
1688 bool success = DoInvoke<kVirtual, false, do_access_check, /*is_mterp=*/ false>(
1689 self, shadow_frame, inst, inst_data, ResultRegister());
1690 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1691 }
1692
INVOKE_VIRTUAL_RANGE()1693 ALWAYS_INLINE void INVOKE_VIRTUAL_RANGE() REQUIRES_SHARED(Locks::mutator_lock_) {
1694 bool success = DoInvoke<kVirtual, true, do_access_check, /*is_mterp=*/ false>(
1695 self, shadow_frame, inst, inst_data, ResultRegister());
1696 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1697 }
1698
INVOKE_SUPER()1699 ALWAYS_INLINE void INVOKE_SUPER() REQUIRES_SHARED(Locks::mutator_lock_) {
1700 bool success = DoInvoke<kSuper, false, do_access_check, /*is_mterp=*/ false>(
1701 self, shadow_frame, inst, inst_data, ResultRegister());
1702 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1703 }
1704
INVOKE_SUPER_RANGE()1705 ALWAYS_INLINE void INVOKE_SUPER_RANGE() REQUIRES_SHARED(Locks::mutator_lock_) {
1706 bool success = DoInvoke<kSuper, true, do_access_check, /*is_mterp=*/ false>(
1707 self, shadow_frame, inst, inst_data, ResultRegister());
1708 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1709 }
1710
INVOKE_DIRECT()1711 ALWAYS_INLINE void INVOKE_DIRECT() REQUIRES_SHARED(Locks::mutator_lock_) {
1712 bool success = DoInvoke<kDirect, false, do_access_check, /*is_mterp=*/ false>(
1713 self, shadow_frame, inst, inst_data, ResultRegister());
1714 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1715 }
1716
INVOKE_DIRECT_RANGE()1717 ALWAYS_INLINE void INVOKE_DIRECT_RANGE() REQUIRES_SHARED(Locks::mutator_lock_) {
1718 bool success = DoInvoke<kDirect, true, do_access_check, /*is_mterp=*/ false>(
1719 self, shadow_frame, inst, inst_data, ResultRegister());
1720 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1721 }
1722
INVOKE_INTERFACE()1723 ALWAYS_INLINE void INVOKE_INTERFACE() REQUIRES_SHARED(Locks::mutator_lock_) {
1724 bool success = DoInvoke<kInterface, false, do_access_check, /*is_mterp=*/ false>(
1725 self, shadow_frame, inst, inst_data, ResultRegister());
1726 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1727 }
1728
INVOKE_INTERFACE_RANGE()1729 ALWAYS_INLINE void INVOKE_INTERFACE_RANGE() REQUIRES_SHARED(Locks::mutator_lock_) {
1730 bool success = DoInvoke<kInterface, true, do_access_check, /*is_mterp=*/ false>(
1731 self, shadow_frame, inst, inst_data, ResultRegister());
1732 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1733 }
1734
INVOKE_STATIC()1735 ALWAYS_INLINE void INVOKE_STATIC() REQUIRES_SHARED(Locks::mutator_lock_) {
1736 bool success = DoInvoke<kStatic, false, do_access_check, /*is_mterp=*/ false>(
1737 self, shadow_frame, inst, inst_data, ResultRegister());
1738 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1739 }
1740
INVOKE_STATIC_RANGE()1741 ALWAYS_INLINE void INVOKE_STATIC_RANGE() REQUIRES_SHARED(Locks::mutator_lock_) {
1742 bool success = DoInvoke<kStatic, true, do_access_check, /*is_mterp=*/ false>(
1743 self, shadow_frame, inst, inst_data, ResultRegister());
1744 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1745 }
1746
INVOKE_VIRTUAL_QUICK()1747 ALWAYS_INLINE void INVOKE_VIRTUAL_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1748 bool success = DoInvoke<kVirtual, false, do_access_check, /*is_mterp=*/ false,
1749 /*is_quick=*/ true>(self, shadow_frame, inst, inst_data, ResultRegister());
1750 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1751 }
1752
INVOKE_VIRTUAL_RANGE_QUICK()1753 ALWAYS_INLINE void INVOKE_VIRTUAL_RANGE_QUICK() REQUIRES_SHARED(Locks::mutator_lock_) {
1754 bool success = DoInvoke<kVirtual, true, do_access_check, /*is_mterp=*/ false,
1755 /*is_quick=*/ true>(self, shadow_frame, inst, inst_data, ResultRegister());
1756 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1757 }
1758
INVOKE_POLYMORPHIC()1759 ALWAYS_INLINE void INVOKE_POLYMORPHIC() REQUIRES_SHARED(Locks::mutator_lock_) {
1760 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
1761 bool success = DoInvokePolymorphic</* is_range= */ false>(
1762 self, shadow_frame, inst, inst_data, ResultRegister());
1763 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE_POLYMORPHIC(!success);
1764 }
1765
INVOKE_POLYMORPHIC_RANGE()1766 ALWAYS_INLINE void INVOKE_POLYMORPHIC_RANGE() REQUIRES_SHARED(Locks::mutator_lock_) {
1767 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
1768 bool success = DoInvokePolymorphic</* is_range= */ true>(
1769 self, shadow_frame, inst, inst_data, ResultRegister());
1770 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE_POLYMORPHIC(!success);
1771 }
1772
INVOKE_CUSTOM()1773 ALWAYS_INLINE void INVOKE_CUSTOM() REQUIRES_SHARED(Locks::mutator_lock_) {
1774 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
1775 bool success = DoInvokeCustom</* is_range= */ false>(
1776 self, shadow_frame, inst, inst_data, ResultRegister());
1777 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1778 }
1779
INVOKE_CUSTOM_RANGE()1780 ALWAYS_INLINE void INVOKE_CUSTOM_RANGE() REQUIRES_SHARED(Locks::mutator_lock_) {
1781 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
1782 bool success = DoInvokeCustom</* is_range= */ true>(
1783 self, shadow_frame, inst, inst_data, ResultRegister());
1784 POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE(!success);
1785 }
1786
NEG_INT()1787 ALWAYS_INLINE void NEG_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1788 shadow_frame.SetVReg(
1789 inst->VRegA_12x(inst_data), -shadow_frame.GetVReg(inst->VRegB_12x(inst_data)));
1790 inst = inst->Next_1xx();
1791 }
1792
NOT_INT()1793 ALWAYS_INLINE void NOT_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1794 shadow_frame.SetVReg(
1795 inst->VRegA_12x(inst_data), ~shadow_frame.GetVReg(inst->VRegB_12x(inst_data)));
1796 inst = inst->Next_1xx();
1797 }
1798
NEG_LONG()1799 ALWAYS_INLINE void NEG_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
1800 shadow_frame.SetVRegLong(
1801 inst->VRegA_12x(inst_data), -shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data)));
1802 inst = inst->Next_1xx();
1803 }
1804
NOT_LONG()1805 ALWAYS_INLINE void NOT_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
1806 shadow_frame.SetVRegLong(
1807 inst->VRegA_12x(inst_data), ~shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data)));
1808 inst = inst->Next_1xx();
1809 }
1810
NEG_FLOAT()1811 ALWAYS_INLINE void NEG_FLOAT() REQUIRES_SHARED(Locks::mutator_lock_) {
1812 shadow_frame.SetVRegFloat(
1813 inst->VRegA_12x(inst_data), -shadow_frame.GetVRegFloat(inst->VRegB_12x(inst_data)));
1814 inst = inst->Next_1xx();
1815 }
1816
NEG_DOUBLE()1817 ALWAYS_INLINE void NEG_DOUBLE() REQUIRES_SHARED(Locks::mutator_lock_) {
1818 shadow_frame.SetVRegDouble(
1819 inst->VRegA_12x(inst_data), -shadow_frame.GetVRegDouble(inst->VRegB_12x(inst_data)));
1820 inst = inst->Next_1xx();
1821 }
1822
INT_TO_LONG()1823 ALWAYS_INLINE void INT_TO_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
1824 shadow_frame.SetVRegLong(inst->VRegA_12x(inst_data),
1825 shadow_frame.GetVReg(inst->VRegB_12x(inst_data)));
1826 inst = inst->Next_1xx();
1827 }
1828
INT_TO_FLOAT()1829 ALWAYS_INLINE void INT_TO_FLOAT() REQUIRES_SHARED(Locks::mutator_lock_) {
1830 shadow_frame.SetVRegFloat(inst->VRegA_12x(inst_data),
1831 shadow_frame.GetVReg(inst->VRegB_12x(inst_data)));
1832 inst = inst->Next_1xx();
1833 }
1834
INT_TO_DOUBLE()1835 ALWAYS_INLINE void INT_TO_DOUBLE() REQUIRES_SHARED(Locks::mutator_lock_) {
1836 shadow_frame.SetVRegDouble(inst->VRegA_12x(inst_data),
1837 shadow_frame.GetVReg(inst->VRegB_12x(inst_data)));
1838 inst = inst->Next_1xx();
1839 }
1840
LONG_TO_INT()1841 ALWAYS_INLINE void LONG_TO_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1842 shadow_frame.SetVReg(inst->VRegA_12x(inst_data),
1843 shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data)));
1844 inst = inst->Next_1xx();
1845 }
1846
LONG_TO_FLOAT()1847 ALWAYS_INLINE void LONG_TO_FLOAT() REQUIRES_SHARED(Locks::mutator_lock_) {
1848 shadow_frame.SetVRegFloat(inst->VRegA_12x(inst_data),
1849 shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data)));
1850 inst = inst->Next_1xx();
1851 }
1852
LONG_TO_DOUBLE()1853 ALWAYS_INLINE void LONG_TO_DOUBLE() REQUIRES_SHARED(Locks::mutator_lock_) {
1854 shadow_frame.SetVRegDouble(inst->VRegA_12x(inst_data),
1855 shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data)));
1856 inst = inst->Next_1xx();
1857 }
1858
FLOAT_TO_INT()1859 ALWAYS_INLINE void FLOAT_TO_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1860 float val = shadow_frame.GetVRegFloat(inst->VRegB_12x(inst_data));
1861 int32_t result = art_float_to_integral<int32_t, float>(val);
1862 shadow_frame.SetVReg(inst->VRegA_12x(inst_data), result);
1863 inst = inst->Next_1xx();
1864 }
1865
FLOAT_TO_LONG()1866 ALWAYS_INLINE void FLOAT_TO_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
1867 float val = shadow_frame.GetVRegFloat(inst->VRegB_12x(inst_data));
1868 int64_t result = art_float_to_integral<int64_t, float>(val);
1869 shadow_frame.SetVRegLong(inst->VRegA_12x(inst_data), result);
1870 inst = inst->Next_1xx();
1871 }
1872
FLOAT_TO_DOUBLE()1873 ALWAYS_INLINE void FLOAT_TO_DOUBLE() REQUIRES_SHARED(Locks::mutator_lock_) {
1874 shadow_frame.SetVRegDouble(inst->VRegA_12x(inst_data),
1875 shadow_frame.GetVRegFloat(inst->VRegB_12x(inst_data)));
1876 inst = inst->Next_1xx();
1877 }
1878
DOUBLE_TO_INT()1879 ALWAYS_INLINE void DOUBLE_TO_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1880 double val = shadow_frame.GetVRegDouble(inst->VRegB_12x(inst_data));
1881 int32_t result = art_float_to_integral<int32_t, double>(val);
1882 shadow_frame.SetVReg(inst->VRegA_12x(inst_data), result);
1883 inst = inst->Next_1xx();
1884 }
1885
DOUBLE_TO_LONG()1886 ALWAYS_INLINE void DOUBLE_TO_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
1887 double val = shadow_frame.GetVRegDouble(inst->VRegB_12x(inst_data));
1888 int64_t result = art_float_to_integral<int64_t, double>(val);
1889 shadow_frame.SetVRegLong(inst->VRegA_12x(inst_data), result);
1890 inst = inst->Next_1xx();
1891 }
1892
DOUBLE_TO_FLOAT()1893 ALWAYS_INLINE void DOUBLE_TO_FLOAT() REQUIRES_SHARED(Locks::mutator_lock_) {
1894 shadow_frame.SetVRegFloat(inst->VRegA_12x(inst_data),
1895 shadow_frame.GetVRegDouble(inst->VRegB_12x(inst_data)));
1896 inst = inst->Next_1xx();
1897 }
1898
INT_TO_BYTE()1899 ALWAYS_INLINE void INT_TO_BYTE() REQUIRES_SHARED(Locks::mutator_lock_) {
1900 shadow_frame.SetVReg(inst->VRegA_12x(inst_data), static_cast<int8_t>(
1901 shadow_frame.GetVReg(inst->VRegB_12x(inst_data))));
1902 inst = inst->Next_1xx();
1903 }
1904
INT_TO_CHAR()1905 ALWAYS_INLINE void INT_TO_CHAR() REQUIRES_SHARED(Locks::mutator_lock_) {
1906 shadow_frame.SetVReg(inst->VRegA_12x(inst_data), static_cast<uint16_t>(
1907 shadow_frame.GetVReg(inst->VRegB_12x(inst_data))));
1908 inst = inst->Next_1xx();
1909 }
1910
INT_TO_SHORT()1911 ALWAYS_INLINE void INT_TO_SHORT() REQUIRES_SHARED(Locks::mutator_lock_) {
1912 shadow_frame.SetVReg(inst->VRegA_12x(inst_data), static_cast<int16_t>(
1913 shadow_frame.GetVReg(inst->VRegB_12x(inst_data))));
1914 inst = inst->Next_1xx();
1915 }
1916
ADD_INT()1917 ALWAYS_INLINE void ADD_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1918 shadow_frame.SetVReg(inst->VRegA_23x(inst_data),
1919 SafeAdd(shadow_frame.GetVReg(inst->VRegB_23x()),
1920 shadow_frame.GetVReg(inst->VRegC_23x())));
1921 inst = inst->Next_2xx();
1922 }
1923
SUB_INT()1924 ALWAYS_INLINE void SUB_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1925 shadow_frame.SetVReg(inst->VRegA_23x(inst_data),
1926 SafeSub(shadow_frame.GetVReg(inst->VRegB_23x()),
1927 shadow_frame.GetVReg(inst->VRegC_23x())));
1928 inst = inst->Next_2xx();
1929 }
1930
MUL_INT()1931 ALWAYS_INLINE void MUL_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1932 shadow_frame.SetVReg(inst->VRegA_23x(inst_data),
1933 SafeMul(shadow_frame.GetVReg(inst->VRegB_23x()),
1934 shadow_frame.GetVReg(inst->VRegC_23x())));
1935 inst = inst->Next_2xx();
1936 }
1937
DIV_INT()1938 ALWAYS_INLINE void DIV_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1939 bool success = DoIntDivide(shadow_frame, inst->VRegA_23x(inst_data),
1940 shadow_frame.GetVReg(inst->VRegB_23x()),
1941 shadow_frame.GetVReg(inst->VRegC_23x()));
1942 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1943 }
1944
REM_INT()1945 ALWAYS_INLINE void REM_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1946 bool success = DoIntRemainder(shadow_frame, inst->VRegA_23x(inst_data),
1947 shadow_frame.GetVReg(inst->VRegB_23x()),
1948 shadow_frame.GetVReg(inst->VRegC_23x()));
1949 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
1950 }
1951
SHL_INT()1952 ALWAYS_INLINE void SHL_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1953 shadow_frame.SetVReg(inst->VRegA_23x(inst_data),
1954 shadow_frame.GetVReg(inst->VRegB_23x()) <<
1955 (shadow_frame.GetVReg(inst->VRegC_23x()) & 0x1f));
1956 inst = inst->Next_2xx();
1957 }
1958
SHR_INT()1959 ALWAYS_INLINE void SHR_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1960 shadow_frame.SetVReg(inst->VRegA_23x(inst_data),
1961 shadow_frame.GetVReg(inst->VRegB_23x()) >>
1962 (shadow_frame.GetVReg(inst->VRegC_23x()) & 0x1f));
1963 inst = inst->Next_2xx();
1964 }
1965
USHR_INT()1966 ALWAYS_INLINE void USHR_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1967 shadow_frame.SetVReg(inst->VRegA_23x(inst_data),
1968 static_cast<uint32_t>(shadow_frame.GetVReg(inst->VRegB_23x())) >>
1969 (shadow_frame.GetVReg(inst->VRegC_23x()) & 0x1f));
1970 inst = inst->Next_2xx();
1971 }
1972
AND_INT()1973 ALWAYS_INLINE void AND_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1974 shadow_frame.SetVReg(inst->VRegA_23x(inst_data),
1975 shadow_frame.GetVReg(inst->VRegB_23x()) &
1976 shadow_frame.GetVReg(inst->VRegC_23x()));
1977 inst = inst->Next_2xx();
1978 }
1979
OR_INT()1980 ALWAYS_INLINE void OR_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1981 shadow_frame.SetVReg(inst->VRegA_23x(inst_data),
1982 shadow_frame.GetVReg(inst->VRegB_23x()) |
1983 shadow_frame.GetVReg(inst->VRegC_23x()));
1984 inst = inst->Next_2xx();
1985 }
1986
XOR_INT()1987 ALWAYS_INLINE void XOR_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
1988 shadow_frame.SetVReg(inst->VRegA_23x(inst_data),
1989 shadow_frame.GetVReg(inst->VRegB_23x()) ^
1990 shadow_frame.GetVReg(inst->VRegC_23x()));
1991 inst = inst->Next_2xx();
1992 }
1993
ADD_LONG()1994 ALWAYS_INLINE void ADD_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
1995 shadow_frame.SetVRegLong(inst->VRegA_23x(inst_data),
1996 SafeAdd(shadow_frame.GetVRegLong(inst->VRegB_23x()),
1997 shadow_frame.GetVRegLong(inst->VRegC_23x())));
1998 inst = inst->Next_2xx();
1999 }
2000
SUB_LONG()2001 ALWAYS_INLINE void SUB_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
2002 shadow_frame.SetVRegLong(inst->VRegA_23x(inst_data),
2003 SafeSub(shadow_frame.GetVRegLong(inst->VRegB_23x()),
2004 shadow_frame.GetVRegLong(inst->VRegC_23x())));
2005 inst = inst->Next_2xx();
2006 }
2007
MUL_LONG()2008 ALWAYS_INLINE void MUL_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
2009 shadow_frame.SetVRegLong(inst->VRegA_23x(inst_data),
2010 SafeMul(shadow_frame.GetVRegLong(inst->VRegB_23x()),
2011 shadow_frame.GetVRegLong(inst->VRegC_23x())));
2012 inst = inst->Next_2xx();
2013 }
2014
DIV_LONG()2015 ALWAYS_INLINE void DIV_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
2016 DoLongDivide(shadow_frame, inst->VRegA_23x(inst_data),
2017 shadow_frame.GetVRegLong(inst->VRegB_23x()),
2018 shadow_frame.GetVRegLong(inst->VRegC_23x()));
2019 POSSIBLY_HANDLE_PENDING_EXCEPTION(self->IsExceptionPending(), Next_2xx);
2020 }
2021
REM_LONG()2022 ALWAYS_INLINE void REM_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
2023 DoLongRemainder(shadow_frame, inst->VRegA_23x(inst_data),
2024 shadow_frame.GetVRegLong(inst->VRegB_23x()),
2025 shadow_frame.GetVRegLong(inst->VRegC_23x()));
2026 POSSIBLY_HANDLE_PENDING_EXCEPTION(self->IsExceptionPending(), Next_2xx);
2027 }
2028
AND_LONG()2029 ALWAYS_INLINE void AND_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
2030 shadow_frame.SetVRegLong(inst->VRegA_23x(inst_data),
2031 shadow_frame.GetVRegLong(inst->VRegB_23x()) &
2032 shadow_frame.GetVRegLong(inst->VRegC_23x()));
2033 inst = inst->Next_2xx();
2034 }
2035
OR_LONG()2036 ALWAYS_INLINE void OR_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
2037 shadow_frame.SetVRegLong(inst->VRegA_23x(inst_data),
2038 shadow_frame.GetVRegLong(inst->VRegB_23x()) |
2039 shadow_frame.GetVRegLong(inst->VRegC_23x()));
2040 inst = inst->Next_2xx();
2041 }
2042
XOR_LONG()2043 ALWAYS_INLINE void XOR_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
2044 shadow_frame.SetVRegLong(inst->VRegA_23x(inst_data),
2045 shadow_frame.GetVRegLong(inst->VRegB_23x()) ^
2046 shadow_frame.GetVRegLong(inst->VRegC_23x()));
2047 inst = inst->Next_2xx();
2048 }
2049
SHL_LONG()2050 ALWAYS_INLINE void SHL_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
2051 shadow_frame.SetVRegLong(inst->VRegA_23x(inst_data),
2052 shadow_frame.GetVRegLong(inst->VRegB_23x()) <<
2053 (shadow_frame.GetVReg(inst->VRegC_23x()) & 0x3f));
2054 inst = inst->Next_2xx();
2055 }
2056
SHR_LONG()2057 ALWAYS_INLINE void SHR_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
2058 shadow_frame.SetVRegLong(inst->VRegA_23x(inst_data),
2059 shadow_frame.GetVRegLong(inst->VRegB_23x()) >>
2060 (shadow_frame.GetVReg(inst->VRegC_23x()) & 0x3f));
2061 inst = inst->Next_2xx();
2062 }
2063
USHR_LONG()2064 ALWAYS_INLINE void USHR_LONG() REQUIRES_SHARED(Locks::mutator_lock_) {
2065 shadow_frame.SetVRegLong(inst->VRegA_23x(inst_data),
2066 static_cast<uint64_t>(shadow_frame.GetVRegLong(inst->VRegB_23x())) >>
2067 (shadow_frame.GetVReg(inst->VRegC_23x()) & 0x3f));
2068 inst = inst->Next_2xx();
2069 }
2070
ADD_FLOAT()2071 ALWAYS_INLINE void ADD_FLOAT() REQUIRES_SHARED(Locks::mutator_lock_) {
2072 shadow_frame.SetVRegFloat(inst->VRegA_23x(inst_data),
2073 shadow_frame.GetVRegFloat(inst->VRegB_23x()) +
2074 shadow_frame.GetVRegFloat(inst->VRegC_23x()));
2075 inst = inst->Next_2xx();
2076 }
2077
SUB_FLOAT()2078 ALWAYS_INLINE void SUB_FLOAT() REQUIRES_SHARED(Locks::mutator_lock_) {
2079 shadow_frame.SetVRegFloat(inst->VRegA_23x(inst_data),
2080 shadow_frame.GetVRegFloat(inst->VRegB_23x()) -
2081 shadow_frame.GetVRegFloat(inst->VRegC_23x()));
2082 inst = inst->Next_2xx();
2083 }
2084
MUL_FLOAT()2085 ALWAYS_INLINE void MUL_FLOAT() REQUIRES_SHARED(Locks::mutator_lock_) {
2086 shadow_frame.SetVRegFloat(inst->VRegA_23x(inst_data),
2087 shadow_frame.GetVRegFloat(inst->VRegB_23x()) *
2088 shadow_frame.GetVRegFloat(inst->VRegC_23x()));
2089 inst = inst->Next_2xx();
2090 }
2091
DIV_FLOAT()2092 ALWAYS_INLINE void DIV_FLOAT() REQUIRES_SHARED(Locks::mutator_lock_) {
2093 shadow_frame.SetVRegFloat(inst->VRegA_23x(inst_data),
2094 shadow_frame.GetVRegFloat(inst->VRegB_23x()) /
2095 shadow_frame.GetVRegFloat(inst->VRegC_23x()));
2096 inst = inst->Next_2xx();
2097 }
2098
REM_FLOAT()2099 ALWAYS_INLINE void REM_FLOAT() REQUIRES_SHARED(Locks::mutator_lock_) {
2100 shadow_frame.SetVRegFloat(inst->VRegA_23x(inst_data),
2101 fmodf(shadow_frame.GetVRegFloat(inst->VRegB_23x()),
2102 shadow_frame.GetVRegFloat(inst->VRegC_23x())));
2103 inst = inst->Next_2xx();
2104 }
2105
ADD_DOUBLE()2106 ALWAYS_INLINE void ADD_DOUBLE() REQUIRES_SHARED(Locks::mutator_lock_) {
2107 shadow_frame.SetVRegDouble(inst->VRegA_23x(inst_data),
2108 shadow_frame.GetVRegDouble(inst->VRegB_23x()) +
2109 shadow_frame.GetVRegDouble(inst->VRegC_23x()));
2110 inst = inst->Next_2xx();
2111 }
2112
SUB_DOUBLE()2113 ALWAYS_INLINE void SUB_DOUBLE() REQUIRES_SHARED(Locks::mutator_lock_) {
2114 shadow_frame.SetVRegDouble(inst->VRegA_23x(inst_data),
2115 shadow_frame.GetVRegDouble(inst->VRegB_23x()) -
2116 shadow_frame.GetVRegDouble(inst->VRegC_23x()));
2117 inst = inst->Next_2xx();
2118 }
2119
MUL_DOUBLE()2120 ALWAYS_INLINE void MUL_DOUBLE() REQUIRES_SHARED(Locks::mutator_lock_) {
2121 shadow_frame.SetVRegDouble(inst->VRegA_23x(inst_data),
2122 shadow_frame.GetVRegDouble(inst->VRegB_23x()) *
2123 shadow_frame.GetVRegDouble(inst->VRegC_23x()));
2124 inst = inst->Next_2xx();
2125 }
2126
DIV_DOUBLE()2127 ALWAYS_INLINE void DIV_DOUBLE() REQUIRES_SHARED(Locks::mutator_lock_) {
2128 shadow_frame.SetVRegDouble(inst->VRegA_23x(inst_data),
2129 shadow_frame.GetVRegDouble(inst->VRegB_23x()) /
2130 shadow_frame.GetVRegDouble(inst->VRegC_23x()));
2131 inst = inst->Next_2xx();
2132 }
2133
REM_DOUBLE()2134 ALWAYS_INLINE void REM_DOUBLE() REQUIRES_SHARED(Locks::mutator_lock_) {
2135 shadow_frame.SetVRegDouble(inst->VRegA_23x(inst_data),
2136 fmod(shadow_frame.GetVRegDouble(inst->VRegB_23x()),
2137 shadow_frame.GetVRegDouble(inst->VRegC_23x())));
2138 inst = inst->Next_2xx();
2139 }
2140
ADD_INT_2ADDR()2141 ALWAYS_INLINE void ADD_INT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2142 uint4_t vregA = inst->VRegA_12x(inst_data);
2143 shadow_frame.SetVReg(vregA, SafeAdd(shadow_frame.GetVReg(vregA),
2144 shadow_frame.GetVReg(inst->VRegB_12x(inst_data))));
2145 inst = inst->Next_1xx();
2146 }
2147
SUB_INT_2ADDR()2148 ALWAYS_INLINE void SUB_INT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2149 uint4_t vregA = inst->VRegA_12x(inst_data);
2150 shadow_frame.SetVReg(vregA,
2151 SafeSub(shadow_frame.GetVReg(vregA),
2152 shadow_frame.GetVReg(inst->VRegB_12x(inst_data))));
2153 inst = inst->Next_1xx();
2154 }
2155
MUL_INT_2ADDR()2156 ALWAYS_INLINE void MUL_INT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2157 uint4_t vregA = inst->VRegA_12x(inst_data);
2158 shadow_frame.SetVReg(vregA,
2159 SafeMul(shadow_frame.GetVReg(vregA),
2160 shadow_frame.GetVReg(inst->VRegB_12x(inst_data))));
2161 inst = inst->Next_1xx();
2162 }
2163
DIV_INT_2ADDR()2164 ALWAYS_INLINE void DIV_INT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2165 uint4_t vregA = inst->VRegA_12x(inst_data);
2166 bool success = DoIntDivide(shadow_frame, vregA, shadow_frame.GetVReg(vregA),
2167 shadow_frame.GetVReg(inst->VRegB_12x(inst_data)));
2168 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_1xx);
2169 }
2170
REM_INT_2ADDR()2171 ALWAYS_INLINE void REM_INT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2172 uint4_t vregA = inst->VRegA_12x(inst_data);
2173 bool success = DoIntRemainder(shadow_frame, vregA, shadow_frame.GetVReg(vregA),
2174 shadow_frame.GetVReg(inst->VRegB_12x(inst_data)));
2175 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_1xx);
2176 }
2177
SHL_INT_2ADDR()2178 ALWAYS_INLINE void SHL_INT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2179 uint4_t vregA = inst->VRegA_12x(inst_data);
2180 shadow_frame.SetVReg(vregA,
2181 shadow_frame.GetVReg(vregA) <<
2182 (shadow_frame.GetVReg(inst->VRegB_12x(inst_data)) & 0x1f));
2183 inst = inst->Next_1xx();
2184 }
2185
SHR_INT_2ADDR()2186 ALWAYS_INLINE void SHR_INT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2187 uint4_t vregA = inst->VRegA_12x(inst_data);
2188 shadow_frame.SetVReg(vregA,
2189 shadow_frame.GetVReg(vregA) >>
2190 (shadow_frame.GetVReg(inst->VRegB_12x(inst_data)) & 0x1f));
2191 inst = inst->Next_1xx();
2192 }
2193
USHR_INT_2ADDR()2194 ALWAYS_INLINE void USHR_INT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2195 uint4_t vregA = inst->VRegA_12x(inst_data);
2196 shadow_frame.SetVReg(vregA,
2197 static_cast<uint32_t>(shadow_frame.GetVReg(vregA)) >>
2198 (shadow_frame.GetVReg(inst->VRegB_12x(inst_data)) & 0x1f));
2199 inst = inst->Next_1xx();
2200 }
2201
AND_INT_2ADDR()2202 ALWAYS_INLINE void AND_INT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2203 uint4_t vregA = inst->VRegA_12x(inst_data);
2204 shadow_frame.SetVReg(vregA,
2205 shadow_frame.GetVReg(vregA) &
2206 shadow_frame.GetVReg(inst->VRegB_12x(inst_data)));
2207 inst = inst->Next_1xx();
2208 }
2209
OR_INT_2ADDR()2210 ALWAYS_INLINE void OR_INT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2211 uint4_t vregA = inst->VRegA_12x(inst_data);
2212 shadow_frame.SetVReg(vregA,
2213 shadow_frame.GetVReg(vregA) |
2214 shadow_frame.GetVReg(inst->VRegB_12x(inst_data)));
2215 inst = inst->Next_1xx();
2216 }
2217
XOR_INT_2ADDR()2218 ALWAYS_INLINE void XOR_INT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2219 uint4_t vregA = inst->VRegA_12x(inst_data);
2220 shadow_frame.SetVReg(vregA,
2221 shadow_frame.GetVReg(vregA) ^
2222 shadow_frame.GetVReg(inst->VRegB_12x(inst_data)));
2223 inst = inst->Next_1xx();
2224 }
2225
ADD_LONG_2ADDR()2226 ALWAYS_INLINE void ADD_LONG_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2227 uint4_t vregA = inst->VRegA_12x(inst_data);
2228 shadow_frame.SetVRegLong(vregA,
2229 SafeAdd(shadow_frame.GetVRegLong(vregA),
2230 shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data))));
2231 inst = inst->Next_1xx();
2232 }
2233
SUB_LONG_2ADDR()2234 ALWAYS_INLINE void SUB_LONG_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2235 uint4_t vregA = inst->VRegA_12x(inst_data);
2236 shadow_frame.SetVRegLong(vregA,
2237 SafeSub(shadow_frame.GetVRegLong(vregA),
2238 shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data))));
2239 inst = inst->Next_1xx();
2240 }
2241
MUL_LONG_2ADDR()2242 ALWAYS_INLINE void MUL_LONG_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2243 uint4_t vregA = inst->VRegA_12x(inst_data);
2244 shadow_frame.SetVRegLong(vregA,
2245 SafeMul(shadow_frame.GetVRegLong(vregA),
2246 shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data))));
2247 inst = inst->Next_1xx();
2248 }
2249
DIV_LONG_2ADDR()2250 ALWAYS_INLINE void DIV_LONG_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2251 uint4_t vregA = inst->VRegA_12x(inst_data);
2252 DoLongDivide(shadow_frame, vregA, shadow_frame.GetVRegLong(vregA),
2253 shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data)));
2254 POSSIBLY_HANDLE_PENDING_EXCEPTION(self->IsExceptionPending(), Next_1xx);
2255 }
2256
REM_LONG_2ADDR()2257 ALWAYS_INLINE void REM_LONG_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2258 uint4_t vregA = inst->VRegA_12x(inst_data);
2259 DoLongRemainder(shadow_frame, vregA, shadow_frame.GetVRegLong(vregA),
2260 shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data)));
2261 POSSIBLY_HANDLE_PENDING_EXCEPTION(self->IsExceptionPending(), Next_1xx);
2262 }
2263
AND_LONG_2ADDR()2264 ALWAYS_INLINE void AND_LONG_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2265 uint4_t vregA = inst->VRegA_12x(inst_data);
2266 shadow_frame.SetVRegLong(vregA,
2267 shadow_frame.GetVRegLong(vregA) &
2268 shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data)));
2269 inst = inst->Next_1xx();
2270 }
2271
OR_LONG_2ADDR()2272 ALWAYS_INLINE void OR_LONG_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2273 uint4_t vregA = inst->VRegA_12x(inst_data);
2274 shadow_frame.SetVRegLong(vregA,
2275 shadow_frame.GetVRegLong(vregA) |
2276 shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data)));
2277 inst = inst->Next_1xx();
2278 }
2279
XOR_LONG_2ADDR()2280 ALWAYS_INLINE void XOR_LONG_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2281 uint4_t vregA = inst->VRegA_12x(inst_data);
2282 shadow_frame.SetVRegLong(vregA,
2283 shadow_frame.GetVRegLong(vregA) ^
2284 shadow_frame.GetVRegLong(inst->VRegB_12x(inst_data)));
2285 inst = inst->Next_1xx();
2286 }
2287
SHL_LONG_2ADDR()2288 ALWAYS_INLINE void SHL_LONG_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2289 uint4_t vregA = inst->VRegA_12x(inst_data);
2290 shadow_frame.SetVRegLong(vregA,
2291 shadow_frame.GetVRegLong(vregA) <<
2292 (shadow_frame.GetVReg(inst->VRegB_12x(inst_data)) & 0x3f));
2293 inst = inst->Next_1xx();
2294 }
2295
SHR_LONG_2ADDR()2296 ALWAYS_INLINE void SHR_LONG_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2297 uint4_t vregA = inst->VRegA_12x(inst_data);
2298 shadow_frame.SetVRegLong(vregA,
2299 shadow_frame.GetVRegLong(vregA) >>
2300 (shadow_frame.GetVReg(inst->VRegB_12x(inst_data)) & 0x3f));
2301 inst = inst->Next_1xx();
2302 }
2303
USHR_LONG_2ADDR()2304 ALWAYS_INLINE void USHR_LONG_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2305 uint4_t vregA = inst->VRegA_12x(inst_data);
2306 shadow_frame.SetVRegLong(vregA,
2307 static_cast<uint64_t>(shadow_frame.GetVRegLong(vregA)) >>
2308 (shadow_frame.GetVReg(inst->VRegB_12x(inst_data)) & 0x3f));
2309 inst = inst->Next_1xx();
2310 }
2311
ADD_FLOAT_2ADDR()2312 ALWAYS_INLINE void ADD_FLOAT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2313 uint4_t vregA = inst->VRegA_12x(inst_data);
2314 shadow_frame.SetVRegFloat(vregA,
2315 shadow_frame.GetVRegFloat(vregA) +
2316 shadow_frame.GetVRegFloat(inst->VRegB_12x(inst_data)));
2317 inst = inst->Next_1xx();
2318 }
2319
SUB_FLOAT_2ADDR()2320 ALWAYS_INLINE void SUB_FLOAT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2321 uint4_t vregA = inst->VRegA_12x(inst_data);
2322 shadow_frame.SetVRegFloat(vregA,
2323 shadow_frame.GetVRegFloat(vregA) -
2324 shadow_frame.GetVRegFloat(inst->VRegB_12x(inst_data)));
2325 inst = inst->Next_1xx();
2326 }
2327
MUL_FLOAT_2ADDR()2328 ALWAYS_INLINE void MUL_FLOAT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2329 uint4_t vregA = inst->VRegA_12x(inst_data);
2330 shadow_frame.SetVRegFloat(vregA,
2331 shadow_frame.GetVRegFloat(vregA) *
2332 shadow_frame.GetVRegFloat(inst->VRegB_12x(inst_data)));
2333 inst = inst->Next_1xx();
2334 }
2335
DIV_FLOAT_2ADDR()2336 ALWAYS_INLINE void DIV_FLOAT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2337 uint4_t vregA = inst->VRegA_12x(inst_data);
2338 shadow_frame.SetVRegFloat(vregA,
2339 shadow_frame.GetVRegFloat(vregA) /
2340 shadow_frame.GetVRegFloat(inst->VRegB_12x(inst_data)));
2341 inst = inst->Next_1xx();
2342 }
2343
REM_FLOAT_2ADDR()2344 ALWAYS_INLINE void REM_FLOAT_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2345 uint4_t vregA = inst->VRegA_12x(inst_data);
2346 shadow_frame.SetVRegFloat(vregA,
2347 fmodf(shadow_frame.GetVRegFloat(vregA),
2348 shadow_frame.GetVRegFloat(inst->VRegB_12x(inst_data))));
2349 inst = inst->Next_1xx();
2350 }
2351
ADD_DOUBLE_2ADDR()2352 ALWAYS_INLINE void ADD_DOUBLE_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2353 uint4_t vregA = inst->VRegA_12x(inst_data);
2354 shadow_frame.SetVRegDouble(vregA,
2355 shadow_frame.GetVRegDouble(vregA) +
2356 shadow_frame.GetVRegDouble(inst->VRegB_12x(inst_data)));
2357 inst = inst->Next_1xx();
2358 }
2359
SUB_DOUBLE_2ADDR()2360 ALWAYS_INLINE void SUB_DOUBLE_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2361 uint4_t vregA = inst->VRegA_12x(inst_data);
2362 shadow_frame.SetVRegDouble(vregA,
2363 shadow_frame.GetVRegDouble(vregA) -
2364 shadow_frame.GetVRegDouble(inst->VRegB_12x(inst_data)));
2365 inst = inst->Next_1xx();
2366 }
2367
MUL_DOUBLE_2ADDR()2368 ALWAYS_INLINE void MUL_DOUBLE_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2369 uint4_t vregA = inst->VRegA_12x(inst_data);
2370 shadow_frame.SetVRegDouble(vregA,
2371 shadow_frame.GetVRegDouble(vregA) *
2372 shadow_frame.GetVRegDouble(inst->VRegB_12x(inst_data)));
2373 inst = inst->Next_1xx();
2374 }
2375
DIV_DOUBLE_2ADDR()2376 ALWAYS_INLINE void DIV_DOUBLE_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2377 uint4_t vregA = inst->VRegA_12x(inst_data);
2378 shadow_frame.SetVRegDouble(vregA,
2379 shadow_frame.GetVRegDouble(vregA) /
2380 shadow_frame.GetVRegDouble(inst->VRegB_12x(inst_data)));
2381 inst = inst->Next_1xx();
2382 }
2383
REM_DOUBLE_2ADDR()2384 ALWAYS_INLINE void REM_DOUBLE_2ADDR() REQUIRES_SHARED(Locks::mutator_lock_) {
2385 uint4_t vregA = inst->VRegA_12x(inst_data);
2386 shadow_frame.SetVRegDouble(vregA,
2387 fmod(shadow_frame.GetVRegDouble(vregA),
2388 shadow_frame.GetVRegDouble(inst->VRegB_12x(inst_data))));
2389 inst = inst->Next_1xx();
2390 }
2391
ADD_INT_LIT16()2392 ALWAYS_INLINE void ADD_INT_LIT16() REQUIRES_SHARED(Locks::mutator_lock_) {
2393 shadow_frame.SetVReg(inst->VRegA_22s(inst_data),
2394 SafeAdd(shadow_frame.GetVReg(inst->VRegB_22s(inst_data)),
2395 inst->VRegC_22s()));
2396 inst = inst->Next_2xx();
2397 }
2398
RSUB_INT()2399 ALWAYS_INLINE void RSUB_INT() REQUIRES_SHARED(Locks::mutator_lock_) {
2400 shadow_frame.SetVReg(inst->VRegA_22s(inst_data),
2401 SafeSub(inst->VRegC_22s(),
2402 shadow_frame.GetVReg(inst->VRegB_22s(inst_data))));
2403 inst = inst->Next_2xx();
2404 }
2405
MUL_INT_LIT16()2406 ALWAYS_INLINE void MUL_INT_LIT16() REQUIRES_SHARED(Locks::mutator_lock_) {
2407 shadow_frame.SetVReg(inst->VRegA_22s(inst_data),
2408 SafeMul(shadow_frame.GetVReg(inst->VRegB_22s(inst_data)),
2409 inst->VRegC_22s()));
2410 inst = inst->Next_2xx();
2411 }
2412
DIV_INT_LIT16()2413 ALWAYS_INLINE void DIV_INT_LIT16() REQUIRES_SHARED(Locks::mutator_lock_) {
2414 bool success = DoIntDivide(shadow_frame, inst->VRegA_22s(inst_data),
2415 shadow_frame.GetVReg(inst->VRegB_22s(inst_data)),
2416 inst->VRegC_22s());
2417 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
2418 }
2419
REM_INT_LIT16()2420 ALWAYS_INLINE void REM_INT_LIT16() REQUIRES_SHARED(Locks::mutator_lock_) {
2421 bool success = DoIntRemainder(shadow_frame, inst->VRegA_22s(inst_data),
2422 shadow_frame.GetVReg(inst->VRegB_22s(inst_data)),
2423 inst->VRegC_22s());
2424 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
2425 }
2426
AND_INT_LIT16()2427 ALWAYS_INLINE void AND_INT_LIT16() REQUIRES_SHARED(Locks::mutator_lock_) {
2428 shadow_frame.SetVReg(inst->VRegA_22s(inst_data),
2429 shadow_frame.GetVReg(inst->VRegB_22s(inst_data)) &
2430 inst->VRegC_22s());
2431 inst = inst->Next_2xx();
2432 }
2433
OR_INT_LIT16()2434 ALWAYS_INLINE void OR_INT_LIT16() REQUIRES_SHARED(Locks::mutator_lock_) {
2435 shadow_frame.SetVReg(inst->VRegA_22s(inst_data),
2436 shadow_frame.GetVReg(inst->VRegB_22s(inst_data)) |
2437 inst->VRegC_22s());
2438 inst = inst->Next_2xx();
2439 }
2440
XOR_INT_LIT16()2441 ALWAYS_INLINE void XOR_INT_LIT16() REQUIRES_SHARED(Locks::mutator_lock_) {
2442 shadow_frame.SetVReg(inst->VRegA_22s(inst_data),
2443 shadow_frame.GetVReg(inst->VRegB_22s(inst_data)) ^
2444 inst->VRegC_22s());
2445 inst = inst->Next_2xx();
2446 }
2447
ADD_INT_LIT8()2448 ALWAYS_INLINE void ADD_INT_LIT8() REQUIRES_SHARED(Locks::mutator_lock_) {
2449 shadow_frame.SetVReg(inst->VRegA_22b(inst_data),
2450 SafeAdd(shadow_frame.GetVReg(inst->VRegB_22b()), inst->VRegC_22b()));
2451 inst = inst->Next_2xx();
2452 }
2453
RSUB_INT_LIT8()2454 ALWAYS_INLINE void RSUB_INT_LIT8() REQUIRES_SHARED(Locks::mutator_lock_) {
2455 shadow_frame.SetVReg(inst->VRegA_22b(inst_data),
2456 SafeSub(inst->VRegC_22b(), shadow_frame.GetVReg(inst->VRegB_22b())));
2457 inst = inst->Next_2xx();
2458 }
2459
MUL_INT_LIT8()2460 ALWAYS_INLINE void MUL_INT_LIT8() REQUIRES_SHARED(Locks::mutator_lock_) {
2461 shadow_frame.SetVReg(inst->VRegA_22b(inst_data),
2462 SafeMul(shadow_frame.GetVReg(inst->VRegB_22b()), inst->VRegC_22b()));
2463 inst = inst->Next_2xx();
2464 }
2465
DIV_INT_LIT8()2466 ALWAYS_INLINE void DIV_INT_LIT8() REQUIRES_SHARED(Locks::mutator_lock_) {
2467 bool success = DoIntDivide(shadow_frame, inst->VRegA_22b(inst_data),
2468 shadow_frame.GetVReg(inst->VRegB_22b()), inst->VRegC_22b());
2469 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
2470 }
2471
REM_INT_LIT8()2472 ALWAYS_INLINE void REM_INT_LIT8() REQUIRES_SHARED(Locks::mutator_lock_) {
2473 bool success = DoIntRemainder(shadow_frame, inst->VRegA_22b(inst_data),
2474 shadow_frame.GetVReg(inst->VRegB_22b()), inst->VRegC_22b());
2475 POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
2476 }
2477
AND_INT_LIT8()2478 ALWAYS_INLINE void AND_INT_LIT8() REQUIRES_SHARED(Locks::mutator_lock_) {
2479 shadow_frame.SetVReg(inst->VRegA_22b(inst_data),
2480 shadow_frame.GetVReg(inst->VRegB_22b()) &
2481 inst->VRegC_22b());
2482 inst = inst->Next_2xx();
2483 }
2484
OR_INT_LIT8()2485 ALWAYS_INLINE void OR_INT_LIT8() REQUIRES_SHARED(Locks::mutator_lock_) {
2486 shadow_frame.SetVReg(inst->VRegA_22b(inst_data),
2487 shadow_frame.GetVReg(inst->VRegB_22b()) |
2488 inst->VRegC_22b());
2489 inst = inst->Next_2xx();
2490 }
2491
XOR_INT_LIT8()2492 ALWAYS_INLINE void XOR_INT_LIT8() REQUIRES_SHARED(Locks::mutator_lock_) {
2493 shadow_frame.SetVReg(inst->VRegA_22b(inst_data),
2494 shadow_frame.GetVReg(inst->VRegB_22b()) ^
2495 inst->VRegC_22b());
2496 inst = inst->Next_2xx();
2497 }
2498
SHL_INT_LIT8()2499 ALWAYS_INLINE void SHL_INT_LIT8() REQUIRES_SHARED(Locks::mutator_lock_) {
2500 shadow_frame.SetVReg(inst->VRegA_22b(inst_data),
2501 shadow_frame.GetVReg(inst->VRegB_22b()) <<
2502 (inst->VRegC_22b() & 0x1f));
2503 inst = inst->Next_2xx();
2504 }
2505
SHR_INT_LIT8()2506 ALWAYS_INLINE void SHR_INT_LIT8() REQUIRES_SHARED(Locks::mutator_lock_) {
2507 shadow_frame.SetVReg(inst->VRegA_22b(inst_data),
2508 shadow_frame.GetVReg(inst->VRegB_22b()) >>
2509 (inst->VRegC_22b() & 0x1f));
2510 inst = inst->Next_2xx();
2511 }
2512
USHR_INT_LIT8()2513 ALWAYS_INLINE void USHR_INT_LIT8() REQUIRES_SHARED(Locks::mutator_lock_) {
2514 shadow_frame.SetVReg(inst->VRegA_22b(inst_data),
2515 static_cast<uint32_t>(shadow_frame.GetVReg(inst->VRegB_22b())) >>
2516 (inst->VRegC_22b() & 0x1f));
2517 inst = inst->Next_2xx();
2518 }
2519
UNUSED_3E()2520 ALWAYS_INLINE void UNUSED_3E() REQUIRES_SHARED(Locks::mutator_lock_) {
2521 UnexpectedOpcode(inst, shadow_frame);
2522 }
2523
UNUSED_3F()2524 ALWAYS_INLINE void UNUSED_3F() REQUIRES_SHARED(Locks::mutator_lock_) {
2525 UnexpectedOpcode(inst, shadow_frame);
2526 }
2527
UNUSED_40()2528 ALWAYS_INLINE void UNUSED_40() REQUIRES_SHARED(Locks::mutator_lock_) {
2529 UnexpectedOpcode(inst, shadow_frame);
2530 }
2531
UNUSED_41()2532 ALWAYS_INLINE void UNUSED_41() REQUIRES_SHARED(Locks::mutator_lock_) {
2533 UnexpectedOpcode(inst, shadow_frame);
2534 }
2535
UNUSED_42()2536 ALWAYS_INLINE void UNUSED_42() REQUIRES_SHARED(Locks::mutator_lock_) {
2537 UnexpectedOpcode(inst, shadow_frame);
2538 }
2539
UNUSED_43()2540 ALWAYS_INLINE void UNUSED_43() REQUIRES_SHARED(Locks::mutator_lock_) {
2541 UnexpectedOpcode(inst, shadow_frame);
2542 }
2543
UNUSED_79()2544 ALWAYS_INLINE void UNUSED_79() REQUIRES_SHARED(Locks::mutator_lock_) {
2545 UnexpectedOpcode(inst, shadow_frame);
2546 }
2547
UNUSED_7A()2548 ALWAYS_INLINE void UNUSED_7A() REQUIRES_SHARED(Locks::mutator_lock_) {
2549 UnexpectedOpcode(inst, shadow_frame);
2550 }
2551
UNUSED_F3()2552 ALWAYS_INLINE void UNUSED_F3() REQUIRES_SHARED(Locks::mutator_lock_) {
2553 UnexpectedOpcode(inst, shadow_frame);
2554 }
2555
UNUSED_F4()2556 ALWAYS_INLINE void UNUSED_F4() REQUIRES_SHARED(Locks::mutator_lock_) {
2557 UnexpectedOpcode(inst, shadow_frame);
2558 }
2559
UNUSED_F5()2560 ALWAYS_INLINE void UNUSED_F5() REQUIRES_SHARED(Locks::mutator_lock_) {
2561 UnexpectedOpcode(inst, shadow_frame);
2562 }
2563
UNUSED_F6()2564 ALWAYS_INLINE void UNUSED_F6() REQUIRES_SHARED(Locks::mutator_lock_) {
2565 UnexpectedOpcode(inst, shadow_frame);
2566 }
2567
UNUSED_F7()2568 ALWAYS_INLINE void UNUSED_F7() REQUIRES_SHARED(Locks::mutator_lock_) {
2569 UnexpectedOpcode(inst, shadow_frame);
2570 }
2571
UNUSED_F8()2572 ALWAYS_INLINE void UNUSED_F8() REQUIRES_SHARED(Locks::mutator_lock_) {
2573 UnexpectedOpcode(inst, shadow_frame);
2574 }
2575
UNUSED_F9()2576 ALWAYS_INLINE void UNUSED_F9() REQUIRES_SHARED(Locks::mutator_lock_) {
2577 UnexpectedOpcode(inst, shadow_frame);
2578 }
2579
InstructionHandler(SwitchImplContext * ctx,const instrumentation::Instrumentation * instrumentation,Thread * self,ShadowFrame & shadow_frame,uint16_t dex_pc,const Instruction * & inst,uint16_t inst_data,bool & exit_interpreter_loop)2580 ALWAYS_INLINE InstructionHandler(SwitchImplContext* ctx,
2581 const instrumentation::Instrumentation* instrumentation,
2582 Thread* self,
2583 ShadowFrame& shadow_frame,
2584 uint16_t dex_pc,
2585 const Instruction*& inst,
2586 uint16_t inst_data,
2587 bool& exit_interpreter_loop)
2588 : ctx(ctx),
2589 instrumentation(instrumentation),
2590 self(self),
2591 shadow_frame(shadow_frame),
2592 dex_pc(dex_pc),
2593 inst(inst),
2594 inst_data(inst_data),
2595 exit_interpreter_loop(exit_interpreter_loop) {
2596 }
2597
2598 private:
2599 static constexpr bool do_assignability_check = do_access_check;
2600
Accessor()2601 const CodeItemDataAccessor& Accessor() { return ctx->accessor; }
Insns()2602 const uint16_t* Insns() { return ctx->accessor.Insns(); }
ResultRegister()2603 JValue* ResultRegister() { return &ctx->result_register; }
2604
2605 SwitchImplContext* const ctx;
2606 const instrumentation::Instrumentation* const instrumentation;
2607 Thread* const self;
2608 ShadowFrame& shadow_frame;
2609 uint32_t const dex_pc;
2610 const Instruction*& inst;
2611 uint16_t const inst_data;
2612 bool& exit_interpreter_loop;
2613 };
2614
2615 #undef BRANCH_INSTRUMENTATION
2616 #undef POSSIBLY_HANDLE_PENDING_EXCEPTION
2617 #undef POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE
2618 #undef POSSIBLY_HANDLE_PENDING_EXCEPTION_ON_INVOKE_POLYMORPHIC
2619 #undef HANDLE_PENDING_EXCEPTION
2620
2621 // TODO On ASAN builds this function gets a huge stack frame. Since normally we run in the mterp
2622 // this shouldn't cause any problems for stack overflow detection. Remove this once b/117341496 is
2623 // fixed.
2624 template<bool do_access_check, bool transaction_active>
ExecuteSwitchImplCpp(SwitchImplContext * ctx)2625 ATTRIBUTE_NO_SANITIZE_ADDRESS void ExecuteSwitchImplCpp(SwitchImplContext* ctx) {
2626 Thread* self = ctx->self;
2627 const CodeItemDataAccessor& accessor = ctx->accessor;
2628 ShadowFrame& shadow_frame = ctx->shadow_frame;
2629 if (UNLIKELY(!shadow_frame.HasReferenceArray())) {
2630 LOG(FATAL) << "Invalid shadow frame for interpreter use";
2631 ctx->result = JValue();
2632 return;
2633 }
2634 self->VerifyStack();
2635
2636 uint32_t dex_pc = shadow_frame.GetDexPC();
2637 const auto* const instrumentation = Runtime::Current()->GetInstrumentation();
2638 const uint16_t* const insns = accessor.Insns();
2639 const Instruction* inst = Instruction::At(insns + dex_pc);
2640 uint16_t inst_data;
2641
2642 DCHECK(!shadow_frame.GetForceRetryInstruction())
2643 << "Entered interpreter from invoke without retry instruction being handled!";
2644
2645 bool const interpret_one_instruction = ctx->interpret_one_instruction;
2646 while (true) {
2647 dex_pc = inst->GetDexPc(insns);
2648 shadow_frame.SetDexPC(dex_pc);
2649 TraceExecution(shadow_frame, inst, dex_pc);
2650 inst_data = inst->Fetch16(0);
2651 {
2652 bool exit_loop = false;
2653 InstructionHandler<do_access_check, transaction_active> handler(
2654 ctx, instrumentation, self, shadow_frame, dex_pc, inst, inst_data, exit_loop);
2655 if (!handler.Preamble()) {
2656 if (UNLIKELY(exit_loop)) {
2657 return;
2658 }
2659 if (UNLIKELY(interpret_one_instruction)) {
2660 break;
2661 }
2662 continue;
2663 }
2664 }
2665 switch (inst->Opcode(inst_data)) {
2666 #define OPCODE_CASE(OPCODE, OPCODE_NAME, pname, f, i, a, e, v) \
2667 case OPCODE: { \
2668 bool exit_loop = false; \
2669 InstructionHandler<do_access_check, transaction_active> handler( \
2670 ctx, instrumentation, self, shadow_frame, dex_pc, inst, inst_data, exit_loop); \
2671 handler.OPCODE_NAME(); \
2672 /* TODO: Advance 'inst' here, instead of explicitly in each handler */ \
2673 if (UNLIKELY(exit_loop)) { \
2674 return; \
2675 } \
2676 break; \
2677 }
2678 DEX_INSTRUCTION_LIST(OPCODE_CASE)
2679 #undef OPCODE_CASE
2680 }
2681 if (UNLIKELY(interpret_one_instruction)) {
2682 break;
2683 }
2684 }
2685 // Record where we stopped.
2686 shadow_frame.SetDexPC(inst->GetDexPc(insns));
2687 ctx->result = ctx->result_register;
2688 return;
2689 } // NOLINT(readability/fn_size)
2690
2691 } // namespace interpreter
2692 } // namespace art
2693
2694 #endif // ART_RUNTIME_INTERPRETER_INTERPRETER_SWITCH_IMPL_INL_H_
2695