• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_INTERPRETER_INTERPRETER_COMMON_H_
18 #define ART_RUNTIME_INTERPRETER_INTERPRETER_COMMON_H_
19 
20 #include "interpreter.h"
21 #include "interpreter_intrinsics.h"
22 
23 #include <math.h>
24 
25 #include <iostream>
26 #include <sstream>
27 #include <atomic>
28 
29 #include "android-base/stringprintf.h"
30 
31 #include "art_field-inl.h"
32 #include "art_method-inl.h"
33 #include "base/enums.h"
34 #include "base/logging.h"
35 #include "base/macros.h"
36 #include "class_linker-inl.h"
37 #include "common_dex_operations.h"
38 #include "common_throws.h"
39 #include "dex_file-inl.h"
40 #include "dex_instruction-inl.h"
41 #include "entrypoints/entrypoint_utils-inl.h"
42 #include "handle_scope-inl.h"
43 #include "jit/jit.h"
44 #include "mirror/call_site.h"
45 #include "mirror/class-inl.h"
46 #include "mirror/dex_cache.h"
47 #include "mirror/method.h"
48 #include "mirror/method_handles_lookup.h"
49 #include "mirror/object-inl.h"
50 #include "mirror/object_array-inl.h"
51 #include "mirror/string-inl.h"
52 #include "obj_ptr.h"
53 #include "stack.h"
54 #include "thread.h"
55 #include "unstarted_runtime.h"
56 #include "well_known_classes.h"
57 
58 namespace art {
59 namespace interpreter {
60 
61 void ThrowNullPointerExceptionFromInterpreter()
62     REQUIRES_SHARED(Locks::mutator_lock_);
63 
64 template <bool kMonitorCounting>
DoMonitorEnter(Thread * self,ShadowFrame * frame,ObjPtr<mirror::Object> ref)65 static inline void DoMonitorEnter(Thread* self, ShadowFrame* frame, ObjPtr<mirror::Object> ref)
66     NO_THREAD_SAFETY_ANALYSIS
67     REQUIRES(!Roles::uninterruptible_) {
68   StackHandleScope<1> hs(self);
69   Handle<mirror::Object> h_ref(hs.NewHandle(ref));
70   h_ref->MonitorEnter(self);
71   if (kMonitorCounting && frame->GetMethod()->MustCountLocks()) {
72     frame->GetLockCountData().AddMonitor(self, h_ref.Get());
73   }
74 }
75 
76 template <bool kMonitorCounting>
DoMonitorExit(Thread * self,ShadowFrame * frame,ObjPtr<mirror::Object> ref)77 static inline void DoMonitorExit(Thread* self, ShadowFrame* frame, ObjPtr<mirror::Object> ref)
78     NO_THREAD_SAFETY_ANALYSIS
79     REQUIRES(!Roles::uninterruptible_) {
80   StackHandleScope<1> hs(self);
81   Handle<mirror::Object> h_ref(hs.NewHandle(ref));
82   h_ref->MonitorExit(self);
83   if (kMonitorCounting && frame->GetMethod()->MustCountLocks()) {
84     frame->GetLockCountData().RemoveMonitorOrThrow(self, h_ref.Get());
85   }
86 }
87 
88 template <bool kMonitorCounting>
DoMonitorCheckOnExit(Thread * self,ShadowFrame * frame)89 static inline bool DoMonitorCheckOnExit(Thread* self, ShadowFrame* frame)
90     NO_THREAD_SAFETY_ANALYSIS
91     REQUIRES(!Roles::uninterruptible_) {
92   if (kMonitorCounting && frame->GetMethod()->MustCountLocks()) {
93     return frame->GetLockCountData().CheckAllMonitorsReleasedOrThrow(self);
94   }
95   return true;
96 }
97 
98 void AbortTransactionF(Thread* self, const char* fmt, ...)
99     __attribute__((__format__(__printf__, 2, 3)))
100     REQUIRES_SHARED(Locks::mutator_lock_);
101 
102 void AbortTransactionV(Thread* self, const char* fmt, va_list args)
103     REQUIRES_SHARED(Locks::mutator_lock_);
104 
105 void RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array, int32_t count)
106     REQUIRES_SHARED(Locks::mutator_lock_);
107 
108 // Invokes the given method. This is part of the invocation support and is used by DoInvoke,
109 // DoFastInvoke and DoInvokeVirtualQuick functions.
110 // Returns true on success, otherwise throws an exception and returns false.
111 template<bool is_range, bool do_assignability_check>
112 bool DoCall(ArtMethod* called_method, Thread* self, ShadowFrame& shadow_frame,
113             const Instruction* inst, uint16_t inst_data, JValue* result);
114 
115 // Handles streamlined non-range invoke static, direct and virtual instructions originating in
116 // mterp. Access checks and instrumentation other than jit profiling are not supported, but does
117 // support interpreter intrinsics if applicable.
118 // Returns true on success, otherwise throws an exception and returns false.
119 template<InvokeType type>
DoFastInvoke(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)120 static inline bool DoFastInvoke(Thread* self,
121                                 ShadowFrame& shadow_frame,
122                                 const Instruction* inst,
123                                 uint16_t inst_data,
124                                 JValue* result) {
125   const uint32_t method_idx = inst->VRegB_35c();
126   const uint32_t vregC = inst->VRegC_35c();
127   ObjPtr<mirror::Object> receiver = (type == kStatic)
128       ? nullptr
129       : shadow_frame.GetVRegReference(vregC);
130   ArtMethod* sf_method = shadow_frame.GetMethod();
131   ArtMethod* const called_method = FindMethodFromCode<type, false>(
132       method_idx, &receiver, sf_method, self);
133   // The shadow frame should already be pushed, so we don't need to update it.
134   if (UNLIKELY(called_method == nullptr)) {
135     CHECK(self->IsExceptionPending());
136     result->SetJ(0);
137     return false;
138   } else if (UNLIKELY(!called_method->IsInvokable())) {
139     called_method->ThrowInvocationTimeError();
140     result->SetJ(0);
141     return false;
142   } else {
143     jit::Jit* jit = Runtime::Current()->GetJit();
144     if (jit != nullptr) {
145       if (type == kVirtual) {
146         jit->InvokeVirtualOrInterface(receiver, sf_method, shadow_frame.GetDexPC(), called_method);
147       }
148       jit->AddSamples(self, sf_method, 1, /*with_backedges*/false);
149     }
150     if (called_method->IsIntrinsic()) {
151       if (MterpHandleIntrinsic(&shadow_frame, called_method, inst, inst_data,
152                                shadow_frame.GetResultRegister())) {
153         return !self->IsExceptionPending();
154       }
155     }
156     return DoCall<false, false>(called_method, self, shadow_frame, inst, inst_data, result);
157   }
158 }
159 
160 // Handles all invoke-XXX/range instructions except for invoke-polymorphic[/range].
161 // Returns true on success, otherwise throws an exception and returns false.
162 template<InvokeType type, bool is_range, bool do_access_check>
DoInvoke(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)163 static inline bool DoInvoke(Thread* self,
164                             ShadowFrame& shadow_frame,
165                             const Instruction* inst,
166                             uint16_t inst_data,
167                             JValue* result) {
168   const uint32_t method_idx = (is_range) ? inst->VRegB_3rc() : inst->VRegB_35c();
169   const uint32_t vregC = (is_range) ? inst->VRegC_3rc() : inst->VRegC_35c();
170   ObjPtr<mirror::Object> receiver = (type == kStatic) ? nullptr : shadow_frame.GetVRegReference(vregC);
171   ArtMethod* sf_method = shadow_frame.GetMethod();
172   ArtMethod* const called_method = FindMethodFromCode<type, do_access_check>(
173       method_idx, &receiver, sf_method, self);
174   // The shadow frame should already be pushed, so we don't need to update it.
175   if (UNLIKELY(called_method == nullptr)) {
176     CHECK(self->IsExceptionPending());
177     result->SetJ(0);
178     return false;
179   } else if (UNLIKELY(!called_method->IsInvokable())) {
180     called_method->ThrowInvocationTimeError();
181     result->SetJ(0);
182     return false;
183   } else {
184     jit::Jit* jit = Runtime::Current()->GetJit();
185     if (jit != nullptr) {
186       if (type == kVirtual || type == kInterface) {
187         jit->InvokeVirtualOrInterface(receiver, sf_method, shadow_frame.GetDexPC(), called_method);
188       }
189       jit->AddSamples(self, sf_method, 1, /*with_backedges*/false);
190     }
191     // TODO: Remove the InvokeVirtualOrInterface instrumentation, as it was only used by the JIT.
192     if (type == kVirtual || type == kInterface) {
193       instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
194       if (UNLIKELY(instrumentation->HasInvokeVirtualOrInterfaceListeners())) {
195         instrumentation->InvokeVirtualOrInterface(
196             self, receiver.Ptr(), sf_method, shadow_frame.GetDexPC(), called_method);
197       }
198     }
199     return DoCall<is_range, do_access_check>(called_method, self, shadow_frame, inst, inst_data,
200                                              result);
201   }
202 }
203 
204 // Performs a signature polymorphic invoke (invoke-polymorphic/invoke-polymorphic-range).
205 template<bool is_range>
206 bool DoInvokePolymorphic(Thread* self,
207                          ShadowFrame& shadow_frame,
208                          const Instruction* inst,
209                          uint16_t inst_data,
210                          JValue* result);
211 
212 // Performs a custom invoke (invoke-custom/invoke-custom-range).
213 template<bool is_range>
214 bool DoInvokeCustom(Thread* self,
215                     ShadowFrame& shadow_frame,
216                     const Instruction* inst,
217                     uint16_t inst_data,
218                     JValue* result);
219 
220 // Handles invoke-virtual-quick and invoke-virtual-quick-range instructions.
221 // Returns true on success, otherwise throws an exception and returns false.
222 template<bool is_range>
DoInvokeVirtualQuick(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)223 static inline bool DoInvokeVirtualQuick(Thread* self, ShadowFrame& shadow_frame,
224                                         const Instruction* inst, uint16_t inst_data,
225                                         JValue* result) {
226   const uint32_t vregC = (is_range) ? inst->VRegC_3rc() : inst->VRegC_35c();
227   ObjPtr<mirror::Object> const receiver = shadow_frame.GetVRegReference(vregC);
228   if (UNLIKELY(receiver == nullptr)) {
229     // We lost the reference to the method index so we cannot get a more
230     // precised exception message.
231     ThrowNullPointerExceptionFromDexPC();
232     return false;
233   }
234   const uint32_t vtable_idx = (is_range) ? inst->VRegB_3rc() : inst->VRegB_35c();
235   // Debug code for b/31357497. To be removed.
236   if (kUseReadBarrier) {
237     CHECK(receiver->GetClass() != nullptr)
238         << "Null class found in object " << receiver << " in region type "
239         << Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->
240             RegionSpace()->GetRegionType(receiver.Ptr());
241   }
242   CHECK(receiver->GetClass()->ShouldHaveEmbeddedVTable());
243   ArtMethod* const called_method = receiver->GetClass()->GetEmbeddedVTableEntry(
244       vtable_idx, Runtime::Current()->GetClassLinker()->GetImagePointerSize());
245   if (UNLIKELY(called_method == nullptr)) {
246     CHECK(self->IsExceptionPending());
247     result->SetJ(0);
248     return false;
249   } else if (UNLIKELY(!called_method->IsInvokable())) {
250     called_method->ThrowInvocationTimeError();
251     result->SetJ(0);
252     return false;
253   } else {
254     jit::Jit* jit = Runtime::Current()->GetJit();
255     if (jit != nullptr) {
256       jit->InvokeVirtualOrInterface(
257           receiver, shadow_frame.GetMethod(), shadow_frame.GetDexPC(), called_method);
258       jit->AddSamples(self, shadow_frame.GetMethod(), 1, /*with_backedges*/false);
259     }
260     instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
261     // TODO: Remove the InvokeVirtualOrInterface instrumentation, as it was only used by the JIT.
262     if (UNLIKELY(instrumentation->HasInvokeVirtualOrInterfaceListeners())) {
263       instrumentation->InvokeVirtualOrInterface(
264           self, receiver.Ptr(), shadow_frame.GetMethod(), shadow_frame.GetDexPC(), called_method);
265     }
266     // No need to check since we've been quickened.
267     return DoCall<is_range, false>(called_method, self, shadow_frame, inst, inst_data, result);
268   }
269 }
270 
271 // Handles iget-XXX and sget-XXX instructions.
272 // Returns true on success, otherwise throws an exception and returns false.
273 template<FindFieldType find_type, Primitive::Type field_type, bool do_access_check>
274 bool DoFieldGet(Thread* self, ShadowFrame& shadow_frame, const Instruction* inst,
275                 uint16_t inst_data) REQUIRES_SHARED(Locks::mutator_lock_);
276 
277 // Handles iget-quick, iget-wide-quick and iget-object-quick instructions.
278 // Returns true on success, otherwise throws an exception and returns false.
279 template<Primitive::Type field_type>
280 bool DoIGetQuick(ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data)
281     REQUIRES_SHARED(Locks::mutator_lock_);
282 
283 // Handles iput-XXX and sput-XXX instructions.
284 // Returns true on success, otherwise throws an exception and returns false.
285 template<FindFieldType find_type, Primitive::Type field_type, bool do_access_check,
286          bool transaction_active>
287 bool DoFieldPut(Thread* self, const ShadowFrame& shadow_frame, const Instruction* inst,
288                 uint16_t inst_data) REQUIRES_SHARED(Locks::mutator_lock_);
289 
290 // Handles iput-quick, iput-wide-quick and iput-object-quick instructions.
291 // Returns true on success, otherwise throws an exception and returns false.
292 template<Primitive::Type field_type, bool transaction_active>
293 bool DoIPutQuick(const ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data)
294     REQUIRES_SHARED(Locks::mutator_lock_);
295 
296 
297 // Handles string resolution for const-string and const-string-jumbo instructions. Also ensures the
298 // java.lang.String class is initialized.
ResolveString(Thread * self,ShadowFrame & shadow_frame,dex::StringIndex string_idx)299 static inline ObjPtr<mirror::String> ResolveString(Thread* self,
300                                                    ShadowFrame& shadow_frame,
301                                                    dex::StringIndex string_idx)
302     REQUIRES_SHARED(Locks::mutator_lock_) {
303   ObjPtr<mirror::Class> java_lang_string_class = mirror::String::GetJavaLangString();
304   if (UNLIKELY(!java_lang_string_class->IsInitialized())) {
305     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
306     StackHandleScope<1> hs(self);
307     Handle<mirror::Class> h_class(hs.NewHandle(java_lang_string_class));
308     if (UNLIKELY(!class_linker->EnsureInitialized(self, h_class, true, true))) {
309       DCHECK(self->IsExceptionPending());
310       return nullptr;
311     }
312   }
313   ArtMethod* method = shadow_frame.GetMethod();
314   ObjPtr<mirror::String> string_ptr = method->GetDexCache()->GetResolvedString(string_idx);
315   if (UNLIKELY(string_ptr == nullptr)) {
316     StackHandleScope<1> hs(self);
317     Handle<mirror::DexCache> dex_cache(hs.NewHandle(method->GetDexCache()));
318     string_ptr = Runtime::Current()->GetClassLinker()->ResolveString(*dex_cache->GetDexFile(),
319                                                                      string_idx,
320                                                                      dex_cache);
321   }
322   return string_ptr;
323 }
324 
325 // Handles div-int, div-int/2addr, div-int/li16 and div-int/lit8 instructions.
326 // Returns true on success, otherwise throws a java.lang.ArithmeticException and return false.
DoIntDivide(ShadowFrame & shadow_frame,size_t result_reg,int32_t dividend,int32_t divisor)327 static inline bool DoIntDivide(ShadowFrame& shadow_frame, size_t result_reg,
328                                int32_t dividend, int32_t divisor)
329     REQUIRES_SHARED(Locks::mutator_lock_) {
330   constexpr int32_t kMinInt = std::numeric_limits<int32_t>::min();
331   if (UNLIKELY(divisor == 0)) {
332     ThrowArithmeticExceptionDivideByZero();
333     return false;
334   }
335   if (UNLIKELY(dividend == kMinInt && divisor == -1)) {
336     shadow_frame.SetVReg(result_reg, kMinInt);
337   } else {
338     shadow_frame.SetVReg(result_reg, dividend / divisor);
339   }
340   return true;
341 }
342 
343 // Handles rem-int, rem-int/2addr, rem-int/li16 and rem-int/lit8 instructions.
344 // Returns true on success, otherwise throws a java.lang.ArithmeticException and return false.
DoIntRemainder(ShadowFrame & shadow_frame,size_t result_reg,int32_t dividend,int32_t divisor)345 static inline bool DoIntRemainder(ShadowFrame& shadow_frame, size_t result_reg,
346                                   int32_t dividend, int32_t divisor)
347     REQUIRES_SHARED(Locks::mutator_lock_) {
348   constexpr int32_t kMinInt = std::numeric_limits<int32_t>::min();
349   if (UNLIKELY(divisor == 0)) {
350     ThrowArithmeticExceptionDivideByZero();
351     return false;
352   }
353   if (UNLIKELY(dividend == kMinInt && divisor == -1)) {
354     shadow_frame.SetVReg(result_reg, 0);
355   } else {
356     shadow_frame.SetVReg(result_reg, dividend % divisor);
357   }
358   return true;
359 }
360 
361 // Handles div-long and div-long-2addr instructions.
362 // Returns true on success, otherwise throws a java.lang.ArithmeticException and return false.
DoLongDivide(ShadowFrame & shadow_frame,size_t result_reg,int64_t dividend,int64_t divisor)363 static inline bool DoLongDivide(ShadowFrame& shadow_frame,
364                                 size_t result_reg,
365                                 int64_t dividend,
366                                 int64_t divisor)
367     REQUIRES_SHARED(Locks::mutator_lock_) {
368   const int64_t kMinLong = std::numeric_limits<int64_t>::min();
369   if (UNLIKELY(divisor == 0)) {
370     ThrowArithmeticExceptionDivideByZero();
371     return false;
372   }
373   if (UNLIKELY(dividend == kMinLong && divisor == -1)) {
374     shadow_frame.SetVRegLong(result_reg, kMinLong);
375   } else {
376     shadow_frame.SetVRegLong(result_reg, dividend / divisor);
377   }
378   return true;
379 }
380 
381 // Handles rem-long and rem-long-2addr instructions.
382 // Returns true on success, otherwise throws a java.lang.ArithmeticException and return false.
DoLongRemainder(ShadowFrame & shadow_frame,size_t result_reg,int64_t dividend,int64_t divisor)383 static inline bool DoLongRemainder(ShadowFrame& shadow_frame,
384                                    size_t result_reg,
385                                    int64_t dividend,
386                                    int64_t divisor)
387     REQUIRES_SHARED(Locks::mutator_lock_) {
388   const int64_t kMinLong = std::numeric_limits<int64_t>::min();
389   if (UNLIKELY(divisor == 0)) {
390     ThrowArithmeticExceptionDivideByZero();
391     return false;
392   }
393   if (UNLIKELY(dividend == kMinLong && divisor == -1)) {
394     shadow_frame.SetVRegLong(result_reg, 0);
395   } else {
396     shadow_frame.SetVRegLong(result_reg, dividend % divisor);
397   }
398   return true;
399 }
400 
401 // Handles filled-new-array and filled-new-array-range instructions.
402 // Returns true on success, otherwise throws an exception and returns false.
403 template <bool is_range, bool do_access_check, bool transaction_active>
404 bool DoFilledNewArray(const Instruction* inst, const ShadowFrame& shadow_frame,
405                       Thread* self, JValue* result);
406 
407 // Handles packed-switch instruction.
408 // Returns the branch offset to the next instruction to execute.
DoPackedSwitch(const Instruction * inst,const ShadowFrame & shadow_frame,uint16_t inst_data)409 static inline int32_t DoPackedSwitch(const Instruction* inst, const ShadowFrame& shadow_frame,
410                                      uint16_t inst_data)
411     REQUIRES_SHARED(Locks::mutator_lock_) {
412   DCHECK(inst->Opcode() == Instruction::PACKED_SWITCH);
413   const uint16_t* switch_data = reinterpret_cast<const uint16_t*>(inst) + inst->VRegB_31t();
414   int32_t test_val = shadow_frame.GetVReg(inst->VRegA_31t(inst_data));
415   DCHECK_EQ(switch_data[0], static_cast<uint16_t>(Instruction::kPackedSwitchSignature));
416   uint16_t size = switch_data[1];
417   if (size == 0) {
418     // Empty packed switch, move forward by 3 (size of PACKED_SWITCH).
419     return 3;
420   }
421   const int32_t* keys = reinterpret_cast<const int32_t*>(&switch_data[2]);
422   DCHECK_ALIGNED(keys, 4);
423   int32_t first_key = keys[0];
424   const int32_t* targets = reinterpret_cast<const int32_t*>(&switch_data[4]);
425   DCHECK_ALIGNED(targets, 4);
426   int32_t index = test_val - first_key;
427   if (index >= 0 && index < size) {
428     return targets[index];
429   } else {
430     // No corresponding value: move forward by 3 (size of PACKED_SWITCH).
431     return 3;
432   }
433 }
434 
435 // Handles sparse-switch instruction.
436 // Returns the branch offset to the next instruction to execute.
DoSparseSwitch(const Instruction * inst,const ShadowFrame & shadow_frame,uint16_t inst_data)437 static inline int32_t DoSparseSwitch(const Instruction* inst, const ShadowFrame& shadow_frame,
438                                      uint16_t inst_data)
439     REQUIRES_SHARED(Locks::mutator_lock_) {
440   DCHECK(inst->Opcode() == Instruction::SPARSE_SWITCH);
441   const uint16_t* switch_data = reinterpret_cast<const uint16_t*>(inst) + inst->VRegB_31t();
442   int32_t test_val = shadow_frame.GetVReg(inst->VRegA_31t(inst_data));
443   DCHECK_EQ(switch_data[0], static_cast<uint16_t>(Instruction::kSparseSwitchSignature));
444   uint16_t size = switch_data[1];
445   // Return length of SPARSE_SWITCH if size is 0.
446   if (size == 0) {
447     return 3;
448   }
449   const int32_t* keys = reinterpret_cast<const int32_t*>(&switch_data[2]);
450   DCHECK_ALIGNED(keys, 4);
451   const int32_t* entries = keys + size;
452   DCHECK_ALIGNED(entries, 4);
453   int lo = 0;
454   int hi = size - 1;
455   while (lo <= hi) {
456     int mid = (lo + hi) / 2;
457     int32_t foundVal = keys[mid];
458     if (test_val < foundVal) {
459       hi = mid - 1;
460     } else if (test_val > foundVal) {
461       lo = mid + 1;
462     } else {
463       return entries[mid];
464     }
465   }
466   // No corresponding value: move forward by 3 (size of SPARSE_SWITCH).
467   return 3;
468 }
469 
470 uint32_t FindNextInstructionFollowingException(Thread* self, ShadowFrame& shadow_frame,
471     uint32_t dex_pc, const instrumentation::Instrumentation* instrumentation)
472         REQUIRES_SHARED(Locks::mutator_lock_);
473 
474 NO_RETURN void UnexpectedOpcode(const Instruction* inst, const ShadowFrame& shadow_frame)
475   __attribute__((cold))
476   REQUIRES_SHARED(Locks::mutator_lock_);
477 
478 // Set true if you want TraceExecution invocation before each bytecode execution.
479 constexpr bool kTraceExecutionEnabled = false;
480 
TraceExecution(const ShadowFrame & shadow_frame,const Instruction * inst,const uint32_t dex_pc)481 static inline void TraceExecution(const ShadowFrame& shadow_frame, const Instruction* inst,
482                                   const uint32_t dex_pc)
483     REQUIRES_SHARED(Locks::mutator_lock_) {
484   if (kTraceExecutionEnabled) {
485 #define TRACE_LOG std::cerr
486     std::ostringstream oss;
487     oss << shadow_frame.GetMethod()->PrettyMethod()
488         << android::base::StringPrintf("\n0x%x: ", dex_pc)
489         << inst->DumpString(shadow_frame.GetMethod()->GetDexFile()) << "\n";
490     for (uint32_t i = 0; i < shadow_frame.NumberOfVRegs(); ++i) {
491       uint32_t raw_value = shadow_frame.GetVReg(i);
492       ObjPtr<mirror::Object> ref_value = shadow_frame.GetVRegReference(i);
493       oss << android::base::StringPrintf(" vreg%u=0x%08X", i, raw_value);
494       if (ref_value != nullptr) {
495         if (ref_value->GetClass()->IsStringClass() &&
496             !ref_value->AsString()->IsValueNull()) {
497           oss << "/java.lang.String \"" << ref_value->AsString()->ToModifiedUtf8() << "\"";
498         } else {
499           oss << "/" << ref_value->PrettyTypeOf();
500         }
501       }
502     }
503     TRACE_LOG << oss.str() << "\n";
504 #undef TRACE_LOG
505   }
506 }
507 
IsBackwardBranch(int32_t branch_offset)508 static inline bool IsBackwardBranch(int32_t branch_offset) {
509   return branch_offset <= 0;
510 }
511 
512 // Assign register 'src_reg' from shadow_frame to register 'dest_reg' into new_shadow_frame.
AssignRegister(ShadowFrame * new_shadow_frame,const ShadowFrame & shadow_frame,size_t dest_reg,size_t src_reg)513 static inline void AssignRegister(ShadowFrame* new_shadow_frame, const ShadowFrame& shadow_frame,
514                                   size_t dest_reg, size_t src_reg)
515     REQUIRES_SHARED(Locks::mutator_lock_) {
516   // Uint required, so that sign extension does not make this wrong on 64b systems
517   uint32_t src_value = shadow_frame.GetVReg(src_reg);
518   ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference<kVerifyNone>(src_reg);
519 
520   // If both register locations contains the same value, the register probably holds a reference.
521   // Note: As an optimization, non-moving collectors leave a stale reference value
522   // in the references array even after the original vreg was overwritten to a non-reference.
523   if (src_value == reinterpret_cast<uintptr_t>(o.Ptr())) {
524     new_shadow_frame->SetVRegReference(dest_reg, o.Ptr());
525   } else {
526     new_shadow_frame->SetVReg(dest_reg, src_value);
527   }
528 }
529 
530 // The arg_offset is the offset to the first input register in the frame.
531 void ArtInterpreterToCompiledCodeBridge(Thread* self,
532                                         ArtMethod* caller,
533                                         ShadowFrame* shadow_frame,
534                                         uint16_t arg_offset,
535                                         JValue* result);
536 
537 // Set string value created from StringFactory.newStringFromXXX() into all aliases of
538 // StringFactory.newEmptyString().
539 void SetStringInitValueToAllAliases(ShadowFrame* shadow_frame,
540                                     uint16_t this_obj_vreg,
541                                     JValue result);
542 
543 // Explicitly instantiate all DoInvoke functions.
544 #define EXPLICIT_DO_INVOKE_TEMPLATE_DECL(_type, _is_range, _do_check)                      \
545   template REQUIRES_SHARED(Locks::mutator_lock_)                                           \
546   bool DoInvoke<_type, _is_range, _do_check>(Thread* self,                                 \
547                                              ShadowFrame& shadow_frame,                    \
548                                              const Instruction* inst, uint16_t inst_data,  \
549                                              JValue* result)
550 
551 #define EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL(_type)       \
552   EXPLICIT_DO_INVOKE_TEMPLATE_DECL(_type, false, false);  \
553   EXPLICIT_DO_INVOKE_TEMPLATE_DECL(_type, false, true);   \
554   EXPLICIT_DO_INVOKE_TEMPLATE_DECL(_type, true, false);   \
555   EXPLICIT_DO_INVOKE_TEMPLATE_DECL(_type, true, true);
556 
557 EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL(kStatic)      // invoke-static/range.
558 EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL(kDirect)      // invoke-direct/range.
559 EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL(kVirtual)     // invoke-virtual/range.
560 EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL(kSuper)       // invoke-super/range.
561 EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL(kInterface)   // invoke-interface/range.
562 #undef EXPLICIT_DO_INVOKE_ALL_TEMPLATE_DECL
563 #undef EXPLICIT_DO_INVOKE_TEMPLATE_DECL
564 
565 // Explicitly instantiate all DoFastInvoke functions.
566 #define EXPLICIT_DO_FAST_INVOKE_TEMPLATE_DECL(_type)                     \
567   template REQUIRES_SHARED(Locks::mutator_lock_)                         \
568   bool DoFastInvoke<_type>(Thread* self,                                 \
569                            ShadowFrame& shadow_frame,                    \
570                            const Instruction* inst, uint16_t inst_data,  \
571                            JValue* result)
572 
573 EXPLICIT_DO_FAST_INVOKE_TEMPLATE_DECL(kStatic);     // invoke-static
574 EXPLICIT_DO_FAST_INVOKE_TEMPLATE_DECL(kDirect);     // invoke-direct
575 EXPLICIT_DO_FAST_INVOKE_TEMPLATE_DECL(kVirtual);    // invoke-virtual
576 #undef EXPLICIT_DO_FAST_INVOKE_TEMPLATE_DECL
577 
578 // Explicitly instantiate all DoInvokeVirtualQuick functions.
579 #define EXPLICIT_DO_INVOKE_VIRTUAL_QUICK_TEMPLATE_DECL(_is_range)                    \
580   template REQUIRES_SHARED(Locks::mutator_lock_)                               \
581   bool DoInvokeVirtualQuick<_is_range>(Thread* self, ShadowFrame& shadow_frame,      \
582                                        const Instruction* inst, uint16_t inst_data,  \
583                                        JValue* result)
584 
585 EXPLICIT_DO_INVOKE_VIRTUAL_QUICK_TEMPLATE_DECL(false);  // invoke-virtual-quick.
586 EXPLICIT_DO_INVOKE_VIRTUAL_QUICK_TEMPLATE_DECL(true);   // invoke-virtual-quick-range.
587 #undef EXPLICIT_INSTANTIATION_DO_INVOKE_VIRTUAL_QUICK
588 
589 }  // namespace interpreter
590 }  // namespace art
591 
592 #endif  // ART_RUNTIME_INTERPRETER_INTERPRETER_COMMON_H_
593