1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_INTERPRETER_INTERPRETER_COMMON_H_
18 #define ART_RUNTIME_INTERPRETER_INTERPRETER_COMMON_H_
19
20 #include "android-base/macros.h"
21 #include "instrumentation.h"
22 #include "interpreter.h"
23 #include "interpreter_intrinsics.h"
24 #include "transaction.h"
25
26 #include <math.h>
27
28 #include <atomic>
29 #include <iostream>
30 #include <sstream>
31
32 #include <android-base/logging.h>
33 #include <android-base/stringprintf.h>
34
35 #include "art_field-inl.h"
36 #include "art_method-inl.h"
37 #include "base/enums.h"
38 #include "base/locks.h"
39 #include "base/logging.h"
40 #include "base/macros.h"
41 #include "class_linker-inl.h"
42 #include "class_root-inl.h"
43 #include "common_dex_operations.h"
44 #include "common_throws.h"
45 #include "dex/dex_file-inl.h"
46 #include "dex/dex_instruction-inl.h"
47 #include "entrypoints/entrypoint_utils-inl.h"
48 #include "handle_scope-inl.h"
49 #include "interpreter_cache-inl.h"
50 #include "interpreter_switch_impl.h"
51 #include "jit/jit-inl.h"
52 #include "mirror/call_site.h"
53 #include "mirror/class-inl.h"
54 #include "mirror/dex_cache.h"
55 #include "mirror/method.h"
56 #include "mirror/method_handles_lookup.h"
57 #include "mirror/object-inl.h"
58 #include "mirror/object_array-inl.h"
59 #include "mirror/string-inl.h"
60 #include "obj_ptr.h"
61 #include "stack.h"
62 #include "thread.h"
63 #include "thread-inl.h"
64 #include "unstarted_runtime.h"
65 #include "verifier/method_verifier.h"
66 #include "well_known_classes.h"
67
68 namespace art {
69 namespace interpreter {
70
71 void ThrowNullPointerExceptionFromInterpreter()
72 REQUIRES_SHARED(Locks::mutator_lock_);
73
74 template <bool kMonitorCounting>
DoMonitorEnter(Thread * self,ShadowFrame * frame,ObjPtr<mirror::Object> ref)75 static inline void DoMonitorEnter(Thread* self, ShadowFrame* frame, ObjPtr<mirror::Object> ref)
76 NO_THREAD_SAFETY_ANALYSIS
77 REQUIRES(!Roles::uninterruptible_) {
78 DCHECK(!ref.IsNull());
79 StackHandleScope<1> hs(self);
80 Handle<mirror::Object> h_ref(hs.NewHandle(ref));
81 h_ref->MonitorEnter(self);
82 DCHECK(self->HoldsLock(h_ref.Get()));
83 if (UNLIKELY(self->IsExceptionPending())) {
84 bool unlocked = h_ref->MonitorExit(self);
85 DCHECK(unlocked);
86 return;
87 }
88 if (kMonitorCounting && frame->GetMethod()->MustCountLocks()) {
89 frame->GetLockCountData().AddMonitor(self, h_ref.Get());
90 }
91 }
92
93 template <bool kMonitorCounting>
DoMonitorExit(Thread * self,ShadowFrame * frame,ObjPtr<mirror::Object> ref)94 static inline void DoMonitorExit(Thread* self, ShadowFrame* frame, ObjPtr<mirror::Object> ref)
95 NO_THREAD_SAFETY_ANALYSIS
96 REQUIRES(!Roles::uninterruptible_) {
97 StackHandleScope<1> hs(self);
98 Handle<mirror::Object> h_ref(hs.NewHandle(ref));
99 h_ref->MonitorExit(self);
100 if (kMonitorCounting && frame->GetMethod()->MustCountLocks()) {
101 frame->GetLockCountData().RemoveMonitorOrThrow(self, h_ref.Get());
102 }
103 }
104
105 template <bool kMonitorCounting>
DoMonitorCheckOnExit(Thread * self,ShadowFrame * frame)106 static inline bool DoMonitorCheckOnExit(Thread* self, ShadowFrame* frame)
107 NO_THREAD_SAFETY_ANALYSIS
108 REQUIRES(!Roles::uninterruptible_) {
109 if (kMonitorCounting && frame->GetMethod()->MustCountLocks()) {
110 return frame->GetLockCountData().CheckAllMonitorsReleasedOrThrow(self);
111 }
112 return true;
113 }
114
115 void AbortTransactionF(Thread* self, const char* fmt, ...)
116 __attribute__((__format__(__printf__, 2, 3)))
117 REQUIRES_SHARED(Locks::mutator_lock_);
118
119 void AbortTransactionV(Thread* self, const char* fmt, va_list args)
120 REQUIRES_SHARED(Locks::mutator_lock_);
121
122 void RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array, int32_t count)
123 REQUIRES_SHARED(Locks::mutator_lock_);
124
125 // Invokes the given method. This is part of the invocation support and is used by DoInvoke,
126 // DoFastInvoke and DoInvokeVirtualQuick functions.
127 // Returns true on success, otherwise throws an exception and returns false.
128 template<bool is_range, bool do_assignability_check>
129 bool DoCall(ArtMethod* called_method, Thread* self, ShadowFrame& shadow_frame,
130 const Instruction* inst, uint16_t inst_data, JValue* result);
131
132 // Called by the switch interpreter to know if we can stay in it.
133 bool ShouldStayInSwitchInterpreter(ArtMethod* method)
134 REQUIRES_SHARED(Locks::mutator_lock_);
135
136 // Throws exception if we are getting close to the end of the stack.
137 NO_INLINE bool CheckStackOverflow(Thread* self, size_t frame_size)
138 REQUIRES_SHARED(Locks::mutator_lock_);
139
140
141 // Sends the normal method exit event.
142 // Returns true if the events succeeded and false if there is a pending exception.
143 template <typename T> bool SendMethodExitEvents(
144 Thread* self,
145 const instrumentation::Instrumentation* instrumentation,
146 ShadowFrame& frame,
147 ArtMethod* method,
148 T& result) REQUIRES_SHARED(Locks::mutator_lock_);
149
150 static inline ALWAYS_INLINE WARN_UNUSED bool
NeedsMethodExitEvent(const instrumentation::Instrumentation * ins)151 NeedsMethodExitEvent(const instrumentation::Instrumentation* ins)
152 REQUIRES_SHARED(Locks::mutator_lock_) {
153 return ins->HasMethodExitListeners() || ins->HasWatchedFramePopListeners();
154 }
155
156 // NO_INLINE so we won't bloat the interpreter with this very cold lock-release code.
157 template <bool kMonitorCounting>
UnlockHeldMonitors(Thread * self,ShadowFrame * shadow_frame)158 static NO_INLINE void UnlockHeldMonitors(Thread* self, ShadowFrame* shadow_frame)
159 REQUIRES_SHARED(Locks::mutator_lock_) {
160 DCHECK(shadow_frame->GetForcePopFrame() ||
161 Runtime::Current()->IsTransactionAborted());
162 // Unlock all monitors.
163 if (kMonitorCounting && shadow_frame->GetMethod()->MustCountLocks()) {
164 // Get the monitors from the shadow-frame monitor-count data.
165 shadow_frame->GetLockCountData().VisitMonitors(
166 [&](mirror::Object** obj) REQUIRES_SHARED(Locks::mutator_lock_) {
167 // Since we don't use the 'obj' pointer after the DoMonitorExit everything should be fine
168 // WRT suspension.
169 DoMonitorExit<kMonitorCounting>(self, shadow_frame, *obj);
170 });
171 } else {
172 std::vector<verifier::MethodVerifier::DexLockInfo> locks;
173 verifier::MethodVerifier::FindLocksAtDexPc(shadow_frame->GetMethod(),
174 shadow_frame->GetDexPC(),
175 &locks,
176 Runtime::Current()->GetTargetSdkVersion());
177 for (const auto& reg : locks) {
178 if (UNLIKELY(reg.dex_registers.empty())) {
179 LOG(ERROR) << "Unable to determine reference locked by "
180 << shadow_frame->GetMethod()->PrettyMethod() << " at pc "
181 << shadow_frame->GetDexPC();
182 } else {
183 DoMonitorExit<kMonitorCounting>(
184 self, shadow_frame, shadow_frame->GetVRegReference(*reg.dex_registers.begin()));
185 }
186 }
187 }
188 }
189
190 enum class MonitorState {
191 kNoMonitorsLocked,
192 kCountingMonitors,
193 kNormalMonitors,
194 };
195
196 template<MonitorState kMonitorState>
PerformNonStandardReturn(Thread * self,ShadowFrame & frame,JValue & result,const instrumentation::Instrumentation * instrumentation,uint16_t num_dex_inst)197 static inline ALWAYS_INLINE void PerformNonStandardReturn(
198 Thread* self,
199 ShadowFrame& frame,
200 JValue& result,
201 const instrumentation::Instrumentation* instrumentation,
202 uint16_t num_dex_inst) REQUIRES_SHARED(Locks::mutator_lock_) {
203 static constexpr bool kMonitorCounting = (kMonitorState == MonitorState::kCountingMonitors);
204 ObjPtr<mirror::Object> thiz(frame.GetThisObject(num_dex_inst));
205 StackHandleScope<1u> hs(self);
206 if (UNLIKELY(self->IsExceptionPending())) {
207 LOG(WARNING) << "Suppressing exception for non-standard method exit: "
208 << self->GetException()->Dump();
209 self->ClearException();
210 }
211 if (kMonitorState != MonitorState::kNoMonitorsLocked) {
212 UnlockHeldMonitors<kMonitorCounting>(self, &frame);
213 }
214 DoMonitorCheckOnExit<kMonitorCounting>(self, &frame);
215 result = JValue();
216 if (UNLIKELY(NeedsMethodExitEvent(instrumentation))) {
217 SendMethodExitEvents(self, instrumentation, frame, frame.GetMethod(), result);
218 }
219 }
220
221 // Handles all invoke-XXX/range instructions except for invoke-polymorphic[/range].
222 // Returns true on success, otherwise throws an exception and returns false.
223 template<InvokeType type, bool is_range, bool do_access_check, bool is_mterp>
DoInvoke(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)224 static ALWAYS_INLINE bool DoInvoke(Thread* self,
225 ShadowFrame& shadow_frame,
226 const Instruction* inst,
227 uint16_t inst_data,
228 JValue* result)
229 REQUIRES_SHARED(Locks::mutator_lock_) {
230 // Make sure to check for async exceptions before anything else.
231 if (UNLIKELY(self->ObserveAsyncException())) {
232 return false;
233 }
234 const uint32_t method_idx = (is_range) ? inst->VRegB_3rc() : inst->VRegB_35c();
235 const uint32_t vregC = (is_range) ? inst->VRegC_3rc() : inst->VRegC_35c();
236 ArtMethod* sf_method = shadow_frame.GetMethod();
237
238 // Try to find the method in small thread-local cache first (only used when
239 // nterp is not used as mterp and nterp use the cache in an incompatible way).
240 InterpreterCache* tls_cache = self->GetInterpreterCache();
241 size_t tls_value;
242 ArtMethod* resolved_method;
243 if (!IsNterpSupported() && LIKELY(tls_cache->Get(self, inst, &tls_value))) {
244 resolved_method = reinterpret_cast<ArtMethod*>(tls_value);
245 } else {
246 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
247 constexpr ClassLinker::ResolveMode resolve_mode =
248 do_access_check ? ClassLinker::ResolveMode::kCheckICCEAndIAE
249 : ClassLinker::ResolveMode::kNoChecks;
250 resolved_method = class_linker->ResolveMethod<resolve_mode>(self, method_idx, sf_method, type);
251 if (UNLIKELY(resolved_method == nullptr)) {
252 CHECK(self->IsExceptionPending());
253 result->SetJ(0);
254 return false;
255 }
256 if (!IsNterpSupported()) {
257 tls_cache->Set(self, inst, reinterpret_cast<size_t>(resolved_method));
258 }
259 }
260
261 // Null pointer check and virtual method resolution.
262 ObjPtr<mirror::Object> receiver =
263 (type == kStatic) ? nullptr : shadow_frame.GetVRegReference(vregC);
264 ArtMethod* called_method;
265 called_method = FindMethodToCall<type, do_access_check>(
266 method_idx, resolved_method, &receiver, sf_method, self);
267 if (UNLIKELY(called_method == nullptr)) {
268 CHECK(self->IsExceptionPending());
269 result->SetJ(0);
270 return false;
271 }
272 if (UNLIKELY(!called_method->IsInvokable())) {
273 called_method->ThrowInvocationTimeError();
274 result->SetJ(0);
275 return false;
276 }
277
278 jit::Jit* jit = Runtime::Current()->GetJit();
279 if (is_mterp && !is_range && called_method->IsIntrinsic()) {
280 if (MterpHandleIntrinsic(&shadow_frame, called_method, inst, inst_data,
281 shadow_frame.GetResultRegister())) {
282 if (jit != nullptr && sf_method != nullptr) {
283 jit->NotifyInterpreterToCompiledCodeTransition(self, sf_method);
284 }
285 return !self->IsExceptionPending();
286 }
287 }
288
289 return DoCall<is_range, do_access_check>(called_method, self, shadow_frame, inst, inst_data,
290 result);
291 }
292
ResolveMethodHandle(Thread * self,uint32_t method_handle_index,ArtMethod * referrer)293 static inline ObjPtr<mirror::MethodHandle> ResolveMethodHandle(Thread* self,
294 uint32_t method_handle_index,
295 ArtMethod* referrer)
296 REQUIRES_SHARED(Locks::mutator_lock_) {
297 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
298 return class_linker->ResolveMethodHandle(self, method_handle_index, referrer);
299 }
300
ResolveMethodType(Thread * self,dex::ProtoIndex method_type_index,ArtMethod * referrer)301 static inline ObjPtr<mirror::MethodType> ResolveMethodType(Thread* self,
302 dex::ProtoIndex method_type_index,
303 ArtMethod* referrer)
304 REQUIRES_SHARED(Locks::mutator_lock_) {
305 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
306 return class_linker->ResolveMethodType(self, method_type_index, referrer);
307 }
308
309 #define DECLARE_SIGNATURE_POLYMORPHIC_HANDLER(Name, ...) \
310 bool Do ## Name(Thread* self, \
311 ShadowFrame& shadow_frame, \
312 const Instruction* inst, \
313 uint16_t inst_data, \
314 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_);
315 #include "intrinsics_list.h"
316 INTRINSICS_LIST(DECLARE_SIGNATURE_POLYMORPHIC_HANDLER)
317 #undef INTRINSICS_LIST
318 #undef DECLARE_SIGNATURE_POLYMORPHIC_HANDLER
319
320 // Performs a invoke-polymorphic or invoke-polymorphic-range.
321 template<bool is_range>
322 bool DoInvokePolymorphic(Thread* self,
323 ShadowFrame& shadow_frame,
324 const Instruction* inst,
325 uint16_t inst_data,
326 JValue* result)
327 REQUIRES_SHARED(Locks::mutator_lock_);
328
329 bool DoInvokeCustom(Thread* self,
330 ShadowFrame& shadow_frame,
331 uint32_t call_site_idx,
332 const InstructionOperands* operands,
333 JValue* result)
334 REQUIRES_SHARED(Locks::mutator_lock_);
335
336 // Performs a custom invoke (invoke-custom/invoke-custom-range).
337 template<bool is_range>
DoInvokeCustom(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)338 bool DoInvokeCustom(Thread* self,
339 ShadowFrame& shadow_frame,
340 const Instruction* inst,
341 uint16_t inst_data,
342 JValue* result)
343 REQUIRES_SHARED(Locks::mutator_lock_) {
344 const uint32_t call_site_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c();
345 if (is_range) {
346 RangeInstructionOperands operands(inst->VRegC_3rc(), inst->VRegA_3rc());
347 return DoInvokeCustom(self, shadow_frame, call_site_idx, &operands, result);
348 } else {
349 uint32_t args[Instruction::kMaxVarArgRegs];
350 inst->GetVarArgs(args, inst_data);
351 VarArgsInstructionOperands operands(args, inst->VRegA_35c());
352 return DoInvokeCustom(self, shadow_frame, call_site_idx, &operands, result);
353 }
354 }
355
356 template<Primitive::Type field_type>
GetFieldValue(const ShadowFrame & shadow_frame,uint32_t vreg)357 ALWAYS_INLINE static JValue GetFieldValue(const ShadowFrame& shadow_frame, uint32_t vreg)
358 REQUIRES_SHARED(Locks::mutator_lock_) {
359 JValue field_value;
360 switch (field_type) {
361 case Primitive::kPrimBoolean:
362 field_value.SetZ(static_cast<uint8_t>(shadow_frame.GetVReg(vreg)));
363 break;
364 case Primitive::kPrimByte:
365 field_value.SetB(static_cast<int8_t>(shadow_frame.GetVReg(vreg)));
366 break;
367 case Primitive::kPrimChar:
368 field_value.SetC(static_cast<uint16_t>(shadow_frame.GetVReg(vreg)));
369 break;
370 case Primitive::kPrimShort:
371 field_value.SetS(static_cast<int16_t>(shadow_frame.GetVReg(vreg)));
372 break;
373 case Primitive::kPrimInt:
374 field_value.SetI(shadow_frame.GetVReg(vreg));
375 break;
376 case Primitive::kPrimLong:
377 field_value.SetJ(shadow_frame.GetVRegLong(vreg));
378 break;
379 case Primitive::kPrimNot:
380 field_value.SetL(shadow_frame.GetVRegReference(vreg));
381 break;
382 default:
383 LOG(FATAL) << "Unreachable: " << field_type;
384 UNREACHABLE();
385 }
386 return field_value;
387 }
388
389 // Handles iget-XXX and sget-XXX instructions.
390 // Returns true on success, otherwise throws an exception and returns false.
391 template<FindFieldType find_type, Primitive::Type field_type, bool do_access_check,
392 bool transaction_active = false>
DoFieldGet(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data)393 ALWAYS_INLINE bool DoFieldGet(Thread* self, ShadowFrame& shadow_frame, const Instruction* inst,
394 uint16_t inst_data) REQUIRES_SHARED(Locks::mutator_lock_) {
395 const bool is_static = (find_type == StaticObjectRead) || (find_type == StaticPrimitiveRead);
396 const uint32_t field_idx = is_static ? inst->VRegB_21c() : inst->VRegC_22c();
397 ArtMethod* method = shadow_frame.GetMethod();
398 ArtField* f = FindFieldFromCode<find_type, do_access_check>(
399 field_idx, method, self, Primitive::ComponentSize(field_type));
400 if (UNLIKELY(f == nullptr)) {
401 CHECK(self->IsExceptionPending());
402 return false;
403 }
404 ObjPtr<mirror::Object> obj;
405 if (is_static) {
406 obj = f->GetDeclaringClass();
407 if (transaction_active) {
408 if (Runtime::Current()->GetTransaction()->ReadConstraint(obj)) {
409 Runtime::Current()->AbortTransactionAndThrowAbortError(self, "Can't read static fields of "
410 + obj->PrettyTypeOf() + " since it does not belong to clinit's class.");
411 return false;
412 }
413 }
414 } else {
415 obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data));
416 if (UNLIKELY(obj == nullptr)) {
417 ThrowNullPointerExceptionForFieldAccess(f, method, true);
418 return false;
419 }
420 }
421
422 JValue result;
423 if (UNLIKELY(!DoFieldGetCommon<field_type>(self, shadow_frame, obj, f, &result))) {
424 // Instrumentation threw an error!
425 CHECK(self->IsExceptionPending());
426 return false;
427 }
428 uint32_t vregA = is_static ? inst->VRegA_21c(inst_data) : inst->VRegA_22c(inst_data);
429 switch (field_type) {
430 case Primitive::kPrimBoolean:
431 shadow_frame.SetVReg(vregA, result.GetZ());
432 break;
433 case Primitive::kPrimByte:
434 shadow_frame.SetVReg(vregA, result.GetB());
435 break;
436 case Primitive::kPrimChar:
437 shadow_frame.SetVReg(vregA, result.GetC());
438 break;
439 case Primitive::kPrimShort:
440 shadow_frame.SetVReg(vregA, result.GetS());
441 break;
442 case Primitive::kPrimInt:
443 shadow_frame.SetVReg(vregA, result.GetI());
444 break;
445 case Primitive::kPrimLong:
446 shadow_frame.SetVRegLong(vregA, result.GetJ());
447 break;
448 case Primitive::kPrimNot:
449 shadow_frame.SetVRegReference(vregA, result.GetL());
450 break;
451 default:
452 LOG(FATAL) << "Unreachable: " << field_type;
453 UNREACHABLE();
454 }
455 return true;
456 }
457
CheckWriteConstraint(Thread * self,ObjPtr<mirror::Object> obj)458 static inline bool CheckWriteConstraint(Thread* self, ObjPtr<mirror::Object> obj)
459 REQUIRES_SHARED(Locks::mutator_lock_) {
460 Runtime* runtime = Runtime::Current();
461 if (runtime->GetTransaction()->WriteConstraint(obj)) {
462 DCHECK(runtime->GetHeap()->ObjectIsInBootImageSpace(obj) || obj->IsClass());
463 const char* base_msg = runtime->GetHeap()->ObjectIsInBootImageSpace(obj)
464 ? "Can't set fields of boot image "
465 : "Can't set fields of ";
466 runtime->AbortTransactionAndThrowAbortError(self, base_msg + obj->PrettyTypeOf());
467 return false;
468 }
469 return true;
470 }
471
CheckWriteValueConstraint(Thread * self,ObjPtr<mirror::Object> value)472 static inline bool CheckWriteValueConstraint(Thread* self, ObjPtr<mirror::Object> value)
473 REQUIRES_SHARED(Locks::mutator_lock_) {
474 Runtime* runtime = Runtime::Current();
475 if (runtime->GetTransaction()->WriteValueConstraint(value)) {
476 DCHECK(value != nullptr);
477 std::string msg = value->IsClass()
478 ? "Can't store reference to class " + value->AsClass()->PrettyDescriptor()
479 : "Can't store reference to instance of " + value->GetClass()->PrettyDescriptor();
480 runtime->AbortTransactionAndThrowAbortError(self, msg);
481 return false;
482 }
483 return true;
484 }
485
486 // Handles iput-XXX and sput-XXX instructions.
487 // Returns true on success, otherwise throws an exception and returns false.
488 template<FindFieldType find_type, Primitive::Type field_type, bool do_access_check,
489 bool transaction_active>
DoFieldPut(Thread * self,const ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data)490 ALWAYS_INLINE bool DoFieldPut(Thread* self, const ShadowFrame& shadow_frame,
491 const Instruction* inst, uint16_t inst_data)
492 REQUIRES_SHARED(Locks::mutator_lock_) {
493 const bool do_assignability_check = do_access_check;
494 bool is_static = (find_type == StaticObjectWrite) || (find_type == StaticPrimitiveWrite);
495 uint32_t field_idx = is_static ? inst->VRegB_21c() : inst->VRegC_22c();
496 ArtMethod* method = shadow_frame.GetMethod();
497 ArtField* f = FindFieldFromCode<find_type, do_access_check>(
498 field_idx, method, self, Primitive::ComponentSize(field_type));
499 if (UNLIKELY(f == nullptr)) {
500 CHECK(self->IsExceptionPending());
501 return false;
502 }
503 ObjPtr<mirror::Object> obj;
504 if (is_static) {
505 obj = f->GetDeclaringClass();
506 } else {
507 obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data));
508 if (UNLIKELY(obj == nullptr)) {
509 ThrowNullPointerExceptionForFieldAccess(f, method, false);
510 return false;
511 }
512 }
513 if (transaction_active && !CheckWriteConstraint(self, obj)) {
514 return false;
515 }
516
517 uint32_t vregA = is_static ? inst->VRegA_21c(inst_data) : inst->VRegA_22c(inst_data);
518 JValue value = GetFieldValue<field_type>(shadow_frame, vregA);
519
520 if (transaction_active &&
521 field_type == Primitive::kPrimNot &&
522 !CheckWriteValueConstraint(self, value.GetL())) {
523 return false;
524 }
525
526 return DoFieldPutCommon<field_type, do_assignability_check, transaction_active>(self,
527 shadow_frame,
528 obj,
529 f,
530 value);
531 }
532
533 // Handles string resolution for const-string and const-string-jumbo instructions. Also ensures the
534 // java.lang.String class is initialized.
ResolveString(Thread * self,ShadowFrame & shadow_frame,dex::StringIndex string_idx)535 static inline ObjPtr<mirror::String> ResolveString(Thread* self,
536 ShadowFrame& shadow_frame,
537 dex::StringIndex string_idx)
538 REQUIRES_SHARED(Locks::mutator_lock_) {
539 ObjPtr<mirror::Class> java_lang_string_class = GetClassRoot<mirror::String>();
540 if (UNLIKELY(!java_lang_string_class->IsVisiblyInitialized())) {
541 StackHandleScope<1> hs(self);
542 Handle<mirror::Class> h_class(hs.NewHandle(java_lang_string_class));
543 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
544 self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true))) {
545 DCHECK(self->IsExceptionPending());
546 return nullptr;
547 }
548 DCHECK(h_class->IsInitializing());
549 }
550 ArtMethod* method = shadow_frame.GetMethod();
551 ObjPtr<mirror::String> string_ptr =
552 Runtime::Current()->GetClassLinker()->ResolveString(string_idx, method);
553 return string_ptr;
554 }
555
556 // Handles div-int, div-int/2addr, div-int/li16 and div-int/lit8 instructions.
557 // Returns true on success, otherwise throws a java.lang.ArithmeticException and return false.
DoIntDivide(ShadowFrame & shadow_frame,size_t result_reg,int32_t dividend,int32_t divisor)558 static inline bool DoIntDivide(ShadowFrame& shadow_frame, size_t result_reg,
559 int32_t dividend, int32_t divisor)
560 REQUIRES_SHARED(Locks::mutator_lock_) {
561 constexpr int32_t kMinInt = std::numeric_limits<int32_t>::min();
562 if (UNLIKELY(divisor == 0)) {
563 ThrowArithmeticExceptionDivideByZero();
564 return false;
565 }
566 if (UNLIKELY(dividend == kMinInt && divisor == -1)) {
567 shadow_frame.SetVReg(result_reg, kMinInt);
568 } else {
569 shadow_frame.SetVReg(result_reg, dividend / divisor);
570 }
571 return true;
572 }
573
574 // Handles rem-int, rem-int/2addr, rem-int/li16 and rem-int/lit8 instructions.
575 // Returns true on success, otherwise throws a java.lang.ArithmeticException and return false.
DoIntRemainder(ShadowFrame & shadow_frame,size_t result_reg,int32_t dividend,int32_t divisor)576 static inline bool DoIntRemainder(ShadowFrame& shadow_frame, size_t result_reg,
577 int32_t dividend, int32_t divisor)
578 REQUIRES_SHARED(Locks::mutator_lock_) {
579 constexpr int32_t kMinInt = std::numeric_limits<int32_t>::min();
580 if (UNLIKELY(divisor == 0)) {
581 ThrowArithmeticExceptionDivideByZero();
582 return false;
583 }
584 if (UNLIKELY(dividend == kMinInt && divisor == -1)) {
585 shadow_frame.SetVReg(result_reg, 0);
586 } else {
587 shadow_frame.SetVReg(result_reg, dividend % divisor);
588 }
589 return true;
590 }
591
592 // Handles div-long and div-long-2addr instructions.
593 // Returns true on success, otherwise throws a java.lang.ArithmeticException and return false.
DoLongDivide(ShadowFrame & shadow_frame,size_t result_reg,int64_t dividend,int64_t divisor)594 static inline bool DoLongDivide(ShadowFrame& shadow_frame,
595 size_t result_reg,
596 int64_t dividend,
597 int64_t divisor)
598 REQUIRES_SHARED(Locks::mutator_lock_) {
599 const int64_t kMinLong = std::numeric_limits<int64_t>::min();
600 if (UNLIKELY(divisor == 0)) {
601 ThrowArithmeticExceptionDivideByZero();
602 return false;
603 }
604 if (UNLIKELY(dividend == kMinLong && divisor == -1)) {
605 shadow_frame.SetVRegLong(result_reg, kMinLong);
606 } else {
607 shadow_frame.SetVRegLong(result_reg, dividend / divisor);
608 }
609 return true;
610 }
611
612 // Handles rem-long and rem-long-2addr instructions.
613 // Returns true on success, otherwise throws a java.lang.ArithmeticException and return false.
DoLongRemainder(ShadowFrame & shadow_frame,size_t result_reg,int64_t dividend,int64_t divisor)614 static inline bool DoLongRemainder(ShadowFrame& shadow_frame,
615 size_t result_reg,
616 int64_t dividend,
617 int64_t divisor)
618 REQUIRES_SHARED(Locks::mutator_lock_) {
619 const int64_t kMinLong = std::numeric_limits<int64_t>::min();
620 if (UNLIKELY(divisor == 0)) {
621 ThrowArithmeticExceptionDivideByZero();
622 return false;
623 }
624 if (UNLIKELY(dividend == kMinLong && divisor == -1)) {
625 shadow_frame.SetVRegLong(result_reg, 0);
626 } else {
627 shadow_frame.SetVRegLong(result_reg, dividend % divisor);
628 }
629 return true;
630 }
631
632 // Handles filled-new-array and filled-new-array-range instructions.
633 // Returns true on success, otherwise throws an exception and returns false.
634 template <bool is_range, bool do_access_check, bool transaction_active>
635 bool DoFilledNewArray(const Instruction* inst, const ShadowFrame& shadow_frame,
636 Thread* self, JValue* result);
637
638 // Handles packed-switch instruction.
639 // Returns the branch offset to the next instruction to execute.
DoPackedSwitch(const Instruction * inst,const ShadowFrame & shadow_frame,uint16_t inst_data)640 static inline int32_t DoPackedSwitch(const Instruction* inst, const ShadowFrame& shadow_frame,
641 uint16_t inst_data)
642 REQUIRES_SHARED(Locks::mutator_lock_) {
643 DCHECK(inst->Opcode() == Instruction::PACKED_SWITCH);
644 const uint16_t* switch_data = reinterpret_cast<const uint16_t*>(inst) + inst->VRegB_31t();
645 int32_t test_val = shadow_frame.GetVReg(inst->VRegA_31t(inst_data));
646 DCHECK_EQ(switch_data[0], static_cast<uint16_t>(Instruction::kPackedSwitchSignature));
647 uint16_t size = switch_data[1];
648 if (size == 0) {
649 // Empty packed switch, move forward by 3 (size of PACKED_SWITCH).
650 return 3;
651 }
652 const int32_t* keys = reinterpret_cast<const int32_t*>(&switch_data[2]);
653 DCHECK_ALIGNED(keys, 4);
654 int32_t first_key = keys[0];
655 const int32_t* targets = reinterpret_cast<const int32_t*>(&switch_data[4]);
656 DCHECK_ALIGNED(targets, 4);
657 int32_t index = test_val - first_key;
658 if (index >= 0 && index < size) {
659 return targets[index];
660 } else {
661 // No corresponding value: move forward by 3 (size of PACKED_SWITCH).
662 return 3;
663 }
664 }
665
666 // Handles sparse-switch instruction.
667 // Returns the branch offset to the next instruction to execute.
DoSparseSwitch(const Instruction * inst,const ShadowFrame & shadow_frame,uint16_t inst_data)668 static inline int32_t DoSparseSwitch(const Instruction* inst, const ShadowFrame& shadow_frame,
669 uint16_t inst_data)
670 REQUIRES_SHARED(Locks::mutator_lock_) {
671 DCHECK(inst->Opcode() == Instruction::SPARSE_SWITCH);
672 const uint16_t* switch_data = reinterpret_cast<const uint16_t*>(inst) + inst->VRegB_31t();
673 int32_t test_val = shadow_frame.GetVReg(inst->VRegA_31t(inst_data));
674 DCHECK_EQ(switch_data[0], static_cast<uint16_t>(Instruction::kSparseSwitchSignature));
675 uint16_t size = switch_data[1];
676 // Return length of SPARSE_SWITCH if size is 0.
677 if (size == 0) {
678 return 3;
679 }
680 const int32_t* keys = reinterpret_cast<const int32_t*>(&switch_data[2]);
681 DCHECK_ALIGNED(keys, 4);
682 const int32_t* entries = keys + size;
683 DCHECK_ALIGNED(entries, 4);
684 int lo = 0;
685 int hi = size - 1;
686 while (lo <= hi) {
687 int mid = (lo + hi) / 2;
688 int32_t foundVal = keys[mid];
689 if (test_val < foundVal) {
690 hi = mid - 1;
691 } else if (test_val > foundVal) {
692 lo = mid + 1;
693 } else {
694 return entries[mid];
695 }
696 }
697 // No corresponding value: move forward by 3 (size of SPARSE_SWITCH).
698 return 3;
699 }
700
701 // We execute any instrumentation events triggered by throwing and/or handing the pending exception
702 // and change the shadow_frames dex_pc to the appropriate exception handler if the current method
703 // has one. If the exception has been handled and the shadow_frame is now pointing to a catch clause
704 // we return true. If the current method is unable to handle the exception we return false.
705 // This function accepts a null Instrumentation* as a way to cause instrumentation events not to be
706 // reported.
707 // TODO We might wish to reconsider how we cause some events to be ignored.
708 bool MoveToExceptionHandler(Thread* self,
709 ShadowFrame& shadow_frame,
710 bool skip_listeners,
711 bool skip_throw_listener) REQUIRES_SHARED(Locks::mutator_lock_);
712
713 NO_RETURN void UnexpectedOpcode(const Instruction* inst, const ShadowFrame& shadow_frame)
714 __attribute__((cold))
715 REQUIRES_SHARED(Locks::mutator_lock_);
716
717 // Set true if you want TraceExecution invocation before each bytecode execution.
718 constexpr bool kTraceExecutionEnabled = false;
719
TraceExecution(const ShadowFrame & shadow_frame,const Instruction * inst,const uint32_t dex_pc)720 static inline void TraceExecution(const ShadowFrame& shadow_frame, const Instruction* inst,
721 const uint32_t dex_pc)
722 REQUIRES_SHARED(Locks::mutator_lock_) {
723 if (kTraceExecutionEnabled) {
724 #define TRACE_LOG std::cerr
725 std::ostringstream oss;
726 oss << shadow_frame.GetMethod()->PrettyMethod()
727 << android::base::StringPrintf("\n0x%x: ", dex_pc)
728 << inst->DumpString(shadow_frame.GetMethod()->GetDexFile()) << "\n";
729 for (uint32_t i = 0; i < shadow_frame.NumberOfVRegs(); ++i) {
730 uint32_t raw_value = shadow_frame.GetVReg(i);
731 ObjPtr<mirror::Object> ref_value = shadow_frame.GetVRegReference(i);
732 oss << android::base::StringPrintf(" vreg%u=0x%08X", i, raw_value);
733 if (ref_value != nullptr) {
734 if (ref_value->GetClass()->IsStringClass() &&
735 !ref_value->AsString()->IsValueNull()) {
736 oss << "/java.lang.String \"" << ref_value->AsString()->ToModifiedUtf8() << "\"";
737 } else {
738 oss << "/" << ref_value->PrettyTypeOf();
739 }
740 }
741 }
742 TRACE_LOG << oss.str() << "\n";
743 #undef TRACE_LOG
744 }
745 }
746
IsBackwardBranch(int32_t branch_offset)747 static inline bool IsBackwardBranch(int32_t branch_offset) {
748 return branch_offset <= 0;
749 }
750
751 // The arg_offset is the offset to the first input register in the frame.
752 void ArtInterpreterToCompiledCodeBridge(Thread* self,
753 ArtMethod* caller,
754 ShadowFrame* shadow_frame,
755 uint16_t arg_offset,
756 JValue* result);
757
IsStringInit(const DexFile * dex_file,uint32_t method_idx)758 static inline bool IsStringInit(const DexFile* dex_file, uint32_t method_idx)
759 REQUIRES_SHARED(Locks::mutator_lock_) {
760 const dex::MethodId& method_id = dex_file->GetMethodId(method_idx);
761 const char* class_name = dex_file->StringByTypeIdx(method_id.class_idx_);
762 const char* method_name = dex_file->GetMethodName(method_id);
763 // Instead of calling ResolveMethod() which has suspend point and can trigger
764 // GC, look up the method symbolically.
765 // Compare method's class name and method name against string init.
766 // It's ok since it's not allowed to create your own java/lang/String.
767 // TODO: verify that assumption.
768 if ((strcmp(class_name, "Ljava/lang/String;") == 0) &&
769 (strcmp(method_name, "<init>") == 0)) {
770 return true;
771 }
772 return false;
773 }
774
IsStringInit(const Instruction * instr,ArtMethod * caller)775 static inline bool IsStringInit(const Instruction* instr, ArtMethod* caller)
776 REQUIRES_SHARED(Locks::mutator_lock_) {
777 if (instr->Opcode() == Instruction::INVOKE_DIRECT ||
778 instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) {
779 uint16_t callee_method_idx = (instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) ?
780 instr->VRegB_3rc() : instr->VRegB_35c();
781 return IsStringInit(caller->GetDexFile(), callee_method_idx);
782 }
783 return false;
784 }
785
786 // Set string value created from StringFactory.newStringFromXXX() into all aliases of
787 // StringFactory.newEmptyString().
788 void SetStringInitValueToAllAliases(ShadowFrame* shadow_frame,
789 uint16_t this_obj_vreg,
790 JValue result);
791
792 } // namespace interpreter
793 } // namespace art
794
795 #endif // ART_RUNTIME_INTERPRETER_INTERPRETER_COMMON_H_
796