1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "interpreter_common.h"
18
19 #include <cmath>
20
21 #include "base/casts.h"
22 #include "base/enums.h"
23 #include "class_root-inl.h"
24 #include "debugger.h"
25 #include "dex/dex_file_types.h"
26 #include "entrypoints/runtime_asm_entrypoints.h"
27 #include "handle.h"
28 #include "intrinsics_enum.h"
29 #include "jit/jit.h"
30 #include "jvalue-inl.h"
31 #include "method_handles-inl.h"
32 #include "method_handles.h"
33 #include "mirror/array-alloc-inl.h"
34 #include "mirror/array-inl.h"
35 #include "mirror/call_site-inl.h"
36 #include "mirror/class.h"
37 #include "mirror/emulated_stack_frame.h"
38 #include "mirror/method_handle_impl-inl.h"
39 #include "mirror/method_type-inl.h"
40 #include "mirror/object_array-alloc-inl.h"
41 #include "mirror/object_array-inl.h"
42 #include "mirror/var_handle.h"
43 #include "reflection-inl.h"
44 #include "reflection.h"
45 #include "shadow_frame-inl.h"
46 #include "stack.h"
47 #include "thread-inl.h"
48 #include "transaction.h"
49 #include "var_handles.h"
50 #include "well_known_classes.h"
51
52 namespace art {
53 namespace interpreter {
54
ThrowNullPointerExceptionFromInterpreter()55 void ThrowNullPointerExceptionFromInterpreter() {
56 ThrowNullPointerExceptionFromDexPC();
57 }
58
CheckStackOverflow(Thread * self,size_t frame_size)59 bool CheckStackOverflow(Thread* self, size_t frame_size)
60 REQUIRES_SHARED(Locks::mutator_lock_) {
61 bool implicit_check = Runtime::Current()->GetImplicitStackOverflowChecks();
62 uint8_t* stack_end = self->GetStackEndForInterpreter(implicit_check);
63 if (UNLIKELY(__builtin_frame_address(0) < stack_end + frame_size)) {
64 ThrowStackOverflowError(self);
65 return false;
66 }
67 return true;
68 }
69
ShouldStayInSwitchInterpreter(ArtMethod * method)70 bool ShouldStayInSwitchInterpreter(ArtMethod* method)
71 REQUIRES_SHARED(Locks::mutator_lock_) {
72 if (!Runtime::Current()->IsStarted()) {
73 // For unstarted runtimes, always use the interpreter entrypoint. This fixes the case where
74 // we are doing cross compilation. Note that GetEntryPointFromQuickCompiledCode doesn't use
75 // the image pointer size here and this may case an overflow if it is called from the
76 // compiler. b/62402160
77 return true;
78 }
79
80 if (UNLIKELY(method->IsNative() || method->IsProxyMethod())) {
81 return false;
82 }
83
84 if (Thread::Current()->IsForceInterpreter()) {
85 // Force the use of interpreter when it is required by the debugger.
86 return true;
87 }
88
89 if (Thread::Current()->IsAsyncExceptionPending()) {
90 // Force use of interpreter to handle async-exceptions
91 return true;
92 }
93
94 const void* code = method->GetEntryPointFromQuickCompiledCode();
95 return Runtime::Current()->GetClassLinker()->IsQuickToInterpreterBridge(code);
96 }
97
98 template <typename T>
SendMethodExitEvents(Thread * self,const instrumentation::Instrumentation * instrumentation,ShadowFrame & frame,ArtMethod * method,T & result)99 bool SendMethodExitEvents(Thread* self,
100 const instrumentation::Instrumentation* instrumentation,
101 ShadowFrame& frame,
102 ArtMethod* method,
103 T& result) {
104 bool had_event = false;
105 // We can get additional ForcePopFrame requests during handling of these events. We should
106 // respect these and send additional instrumentation events.
107 do {
108 frame.SetForcePopFrame(false);
109 if (UNLIKELY(instrumentation->HasMethodExitListeners() && !frame.GetSkipMethodExitEvents())) {
110 had_event = true;
111 instrumentation->MethodExitEvent(self, method, instrumentation::OptionalFrame{frame}, result);
112 }
113 // We don't send method-exit if it's a pop-frame. We still send frame_popped though.
114 if (UNLIKELY(frame.NeedsNotifyPop() && instrumentation->HasWatchedFramePopListeners())) {
115 had_event = true;
116 instrumentation->WatchedFramePopped(self, frame);
117 }
118 } while (UNLIKELY(frame.GetForcePopFrame()));
119 if (UNLIKELY(had_event)) {
120 return !self->IsExceptionPending();
121 } else {
122 return true;
123 }
124 }
125
126 template
127 bool SendMethodExitEvents(Thread* self,
128 const instrumentation::Instrumentation* instrumentation,
129 ShadowFrame& frame,
130 ArtMethod* method,
131 MutableHandle<mirror::Object>& result);
132
133 template
134 bool SendMethodExitEvents(Thread* self,
135 const instrumentation::Instrumentation* instrumentation,
136 ShadowFrame& frame,
137 ArtMethod* method,
138 JValue& result);
139
140 // We execute any instrumentation events that are triggered by this exception and change the
141 // shadow_frame's dex_pc to that of the exception handler if there is one in the current method.
142 // Return true if we should continue executing in the current method and false if we need to go up
143 // the stack to find an exception handler.
144 // We accept a null Instrumentation* meaning we must not report anything to the instrumentation.
145 // TODO We should have a better way to skip instrumentation reporting or possibly rethink that
146 // behavior.
MoveToExceptionHandler(Thread * self,ShadowFrame & shadow_frame,bool skip_listeners,bool skip_throw_listener)147 bool MoveToExceptionHandler(Thread* self,
148 ShadowFrame& shadow_frame,
149 bool skip_listeners,
150 bool skip_throw_listener) {
151 self->VerifyStack();
152 StackHandleScope<2> hs(self);
153 Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException()));
154 const instrumentation::Instrumentation* instrumentation =
155 Runtime::Current()->GetInstrumentation();
156 if (!skip_throw_listener &&
157 instrumentation->HasExceptionThrownListeners() &&
158 self->IsExceptionThrownByCurrentMethod(exception.Get())) {
159 // See b/65049545 for why we don't need to check to see if the exception has changed.
160 instrumentation->ExceptionThrownEvent(self, exception.Get());
161 if (shadow_frame.GetForcePopFrame()) {
162 // We will check in the caller for GetForcePopFrame again. We need to bail out early to
163 // prevent an ExceptionHandledEvent from also being sent before popping.
164 return true;
165 }
166 }
167 bool clear_exception = false;
168 uint32_t found_dex_pc = shadow_frame.GetMethod()->FindCatchBlock(
169 hs.NewHandle(exception->GetClass()), shadow_frame.GetDexPC(), &clear_exception);
170 if (found_dex_pc == dex::kDexNoIndex) {
171 if (!skip_listeners) {
172 if (shadow_frame.NeedsNotifyPop()) {
173 instrumentation->WatchedFramePopped(self, shadow_frame);
174 if (shadow_frame.GetForcePopFrame()) {
175 // We will check in the caller for GetForcePopFrame again. We need to bail out early to
176 // prevent an ExceptionHandledEvent from also being sent before popping and to ensure we
177 // handle other types of non-standard-exits.
178 return true;
179 }
180 }
181 // Exception is not caught by the current method. We will unwind to the
182 // caller. Notify any instrumentation listener.
183 instrumentation->MethodUnwindEvent(self,
184 shadow_frame.GetMethod(),
185 shadow_frame.GetDexPC());
186 }
187 return shadow_frame.GetForcePopFrame();
188 } else {
189 shadow_frame.SetDexPC(found_dex_pc);
190 if (!skip_listeners && instrumentation->HasExceptionHandledListeners()) {
191 self->ClearException();
192 instrumentation->ExceptionHandledEvent(self, exception.Get());
193 if (UNLIKELY(self->IsExceptionPending())) {
194 // Exception handled event threw an exception. Try to find the handler for this one.
195 return MoveToExceptionHandler(self, shadow_frame, skip_listeners, skip_throw_listener);
196 } else if (!clear_exception) {
197 self->SetException(exception.Get());
198 }
199 } else if (clear_exception) {
200 self->ClearException();
201 }
202 return true;
203 }
204 }
205
UnexpectedOpcode(const Instruction * inst,const ShadowFrame & shadow_frame)206 void UnexpectedOpcode(const Instruction* inst, const ShadowFrame& shadow_frame) {
207 LOG(FATAL) << "Unexpected instruction: "
208 << inst->DumpString(shadow_frame.GetMethod()->GetDexFile());
209 UNREACHABLE();
210 }
211
AbortTransactionF(Thread * self,const char * fmt,...)212 void AbortTransactionF(Thread* self, const char* fmt, ...) {
213 va_list args;
214 va_start(args, fmt);
215 AbortTransactionV(self, fmt, args);
216 va_end(args);
217 }
218
AbortTransactionV(Thread * self,const char * fmt,va_list args)219 void AbortTransactionV(Thread* self, const char* fmt, va_list args) {
220 CHECK(Runtime::Current()->IsActiveTransaction());
221 // Constructs abort message.
222 std::string abort_msg;
223 android::base::StringAppendV(&abort_msg, fmt, args);
224 // Throws an exception so we can abort the transaction and rollback every change.
225 Runtime::Current()->AbortTransactionAndThrowAbortError(self, abort_msg);
226 }
227
228 // START DECLARATIONS :
229 //
230 // These additional declarations are required because clang complains
231 // about ALWAYS_INLINE (-Werror, -Wgcc-compat) in definitions.
232 //
233
234 template <bool is_range>
235 NO_STACK_PROTECTOR
236 static ALWAYS_INLINE bool DoCallCommon(ArtMethod* called_method,
237 Thread* self,
238 ShadowFrame& shadow_frame,
239 JValue* result,
240 uint16_t number_of_inputs,
241 uint32_t (&arg)[Instruction::kMaxVarArgRegs],
242 uint32_t vregC,
243 bool string_init) REQUIRES_SHARED(Locks::mutator_lock_);
244
245 template <bool is_range>
246 ALWAYS_INLINE void CopyRegisters(ShadowFrame& caller_frame,
247 ShadowFrame* callee_frame,
248 const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
249 const size_t first_src_reg,
250 const size_t first_dest_reg,
251 const size_t num_regs) REQUIRES_SHARED(Locks::mutator_lock_);
252
253 // END DECLARATIONS.
254
255 NO_STACK_PROTECTOR
ArtInterpreterToCompiledCodeBridge(Thread * self,ArtMethod * caller,ShadowFrame * shadow_frame,uint16_t arg_offset,JValue * result)256 void ArtInterpreterToCompiledCodeBridge(Thread* self,
257 ArtMethod* caller,
258 ShadowFrame* shadow_frame,
259 uint16_t arg_offset,
260 JValue* result)
261 REQUIRES_SHARED(Locks::mutator_lock_) {
262 ArtMethod* method = shadow_frame->GetMethod();
263 // Basic checks for the arg_offset. If there's no code item, the arg_offset must be 0. Otherwise,
264 // check that the arg_offset isn't greater than the number of registers. A stronger check is
265 // difficult since the frame may contain space for all the registers in the method, or only enough
266 // space for the arguments.
267 if (kIsDebugBuild) {
268 if (method->GetCodeItem() == nullptr) {
269 DCHECK_EQ(0u, arg_offset) << method->PrettyMethod();
270 } else {
271 DCHECK_LE(arg_offset, shadow_frame->NumberOfVRegs());
272 }
273 }
274 jit::Jit* jit = Runtime::Current()->GetJit();
275 if (jit != nullptr && caller != nullptr) {
276 jit->NotifyInterpreterToCompiledCodeTransition(self, caller);
277 }
278 method->Invoke(self, shadow_frame->GetVRegArgs(arg_offset),
279 (shadow_frame->NumberOfVRegs() - arg_offset) * sizeof(uint32_t),
280 result, method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty());
281 }
282
SetStringInitValueToAllAliases(ShadowFrame * shadow_frame,uint16_t this_obj_vreg,JValue result)283 void SetStringInitValueToAllAliases(ShadowFrame* shadow_frame,
284 uint16_t this_obj_vreg,
285 JValue result)
286 REQUIRES_SHARED(Locks::mutator_lock_) {
287 ObjPtr<mirror::Object> existing = shadow_frame->GetVRegReference(this_obj_vreg);
288 if (existing == nullptr) {
289 // If it's null, we come from compiled code that was deoptimized. Nothing to do,
290 // as the compiler verified there was no alias.
291 // Set the new string result of the StringFactory.
292 shadow_frame->SetVRegReference(this_obj_vreg, result.GetL());
293 return;
294 }
295 // Set the string init result into all aliases.
296 for (uint32_t i = 0, e = shadow_frame->NumberOfVRegs(); i < e; ++i) {
297 if (shadow_frame->GetVRegReference(i) == existing) {
298 DCHECK_EQ(shadow_frame->GetVRegReference(i),
299 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
300 shadow_frame->SetVRegReference(i, result.GetL());
301 DCHECK_EQ(shadow_frame->GetVRegReference(i),
302 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
303 }
304 }
305 }
306
307 template<bool is_range>
DoMethodHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,bool invoke_exact,const Instruction * inst,uint16_t inst_data,JValue * result)308 static bool DoMethodHandleInvokeCommon(Thread* self,
309 ShadowFrame& shadow_frame,
310 bool invoke_exact,
311 const Instruction* inst,
312 uint16_t inst_data,
313 JValue* result)
314 REQUIRES_SHARED(Locks::mutator_lock_) {
315 // Make sure to check for async exceptions
316 if (UNLIKELY(self->ObserveAsyncException())) {
317 return false;
318 }
319 // Invoke-polymorphic instructions always take a receiver. i.e, they are never static.
320 const uint32_t vRegC = (is_range) ? inst->VRegC_4rcc() : inst->VRegC_45cc();
321 const int invoke_method_idx = (is_range) ? inst->VRegB_4rcc() : inst->VRegB_45cc();
322
323 // Initialize |result| to 0 as this is the default return value for
324 // polymorphic invocations of method handle types with void return
325 // and provides a sensible return result in error cases.
326 result->SetJ(0);
327
328 // The invoke_method_idx here is the name of the signature polymorphic method that
329 // was symbolically invoked in bytecode (say MethodHandle.invoke or MethodHandle.invokeExact)
330 // and not the method that we'll dispatch to in the end.
331 StackHandleScope<2> hs(self);
332 Handle<mirror::MethodHandle> method_handle(hs.NewHandle(
333 ObjPtr<mirror::MethodHandle>::DownCast(shadow_frame.GetVRegReference(vRegC))));
334 if (UNLIKELY(method_handle == nullptr)) {
335 // Note that the invoke type is kVirtual here because a call to a signature
336 // polymorphic method is shaped like a virtual call at the bytecode level.
337 ThrowNullPointerExceptionForMethodAccess(invoke_method_idx, InvokeType::kVirtual);
338 return false;
339 }
340
341 // The vRegH value gives the index of the proto_id associated with this
342 // signature polymorphic call site.
343 const uint16_t vRegH = (is_range) ? inst->VRegH_4rcc() : inst->VRegH_45cc();
344 const dex::ProtoIndex callsite_proto_id(vRegH);
345
346 // Call through to the classlinker and ask it to resolve the static type associated
347 // with the callsite. This information is stored in the dex cache so it's
348 // guaranteed to be fast after the first resolution.
349 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
350 Handle<mirror::MethodType> callsite_type(hs.NewHandle(
351 class_linker->ResolveMethodType(self, callsite_proto_id, shadow_frame.GetMethod())));
352
353 // This implies we couldn't resolve one or more types in this method handle.
354 if (UNLIKELY(callsite_type == nullptr)) {
355 CHECK(self->IsExceptionPending());
356 return false;
357 }
358
359 // There is a common dispatch method for method handles that takes
360 // arguments either from a range or an array of arguments depending
361 // on whether the DEX instruction is invoke-polymorphic/range or
362 // invoke-polymorphic. The array here is for the latter.
363 if (UNLIKELY(is_range)) {
364 // VRegC is the register holding the method handle. Arguments passed
365 // to the method handle's target do not include the method handle.
366 RangeInstructionOperands operands(inst->VRegC_4rcc() + 1, inst->VRegA_4rcc() - 1);
367 if (invoke_exact) {
368 return MethodHandleInvokeExact(self,
369 shadow_frame,
370 method_handle,
371 callsite_type,
372 &operands,
373 result);
374 } else {
375 return MethodHandleInvoke(self,
376 shadow_frame,
377 method_handle,
378 callsite_type,
379 &operands,
380 result);
381 }
382 } else {
383 // Get the register arguments for the invoke.
384 uint32_t args[Instruction::kMaxVarArgRegs] = {};
385 inst->GetVarArgs(args, inst_data);
386 // Drop the first register which is the method handle performing the invoke.
387 memmove(args, args + 1, sizeof(args[0]) * (Instruction::kMaxVarArgRegs - 1));
388 args[Instruction::kMaxVarArgRegs - 1] = 0;
389 VarArgsInstructionOperands operands(args, inst->VRegA_45cc() - 1);
390 if (invoke_exact) {
391 return MethodHandleInvokeExact(self,
392 shadow_frame,
393 method_handle,
394 callsite_type,
395 &operands,
396 result);
397 } else {
398 return MethodHandleInvoke(self,
399 shadow_frame,
400 method_handle,
401 callsite_type,
402 &operands,
403 result);
404 }
405 }
406 }
407
DoMethodHandleInvokeExact(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)408 bool DoMethodHandleInvokeExact(Thread* self,
409 ShadowFrame& shadow_frame,
410 const Instruction* inst,
411 uint16_t inst_data,
412 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
413 if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
414 static const bool kIsRange = false;
415 return DoMethodHandleInvokeCommon<kIsRange>(
416 self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
417 } else {
418 DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
419 static const bool kIsRange = true;
420 return DoMethodHandleInvokeCommon<kIsRange>(
421 self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
422 }
423 }
424
DoMethodHandleInvoke(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)425 bool DoMethodHandleInvoke(Thread* self,
426 ShadowFrame& shadow_frame,
427 const Instruction* inst,
428 uint16_t inst_data,
429 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
430 if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
431 static const bool kIsRange = false;
432 return DoMethodHandleInvokeCommon<kIsRange>(
433 self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
434 } else {
435 DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
436 static const bool kIsRange = true;
437 return DoMethodHandleInvokeCommon<kIsRange>(
438 self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
439 }
440 }
441
DoVarHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result,mirror::VarHandle::AccessMode access_mode)442 static bool DoVarHandleInvokeCommon(Thread* self,
443 ShadowFrame& shadow_frame,
444 const Instruction* inst,
445 uint16_t inst_data,
446 JValue* result,
447 mirror::VarHandle::AccessMode access_mode)
448 REQUIRES_SHARED(Locks::mutator_lock_) {
449 // Make sure to check for async exceptions
450 if (UNLIKELY(self->ObserveAsyncException())) {
451 return false;
452 }
453
454 StackHandleScope<2> hs(self);
455 bool is_var_args = inst->HasVarArgs();
456 const uint16_t vRegH = is_var_args ? inst->VRegH_45cc() : inst->VRegH_4rcc();
457 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
458 Handle<mirror::MethodType> callsite_type(hs.NewHandle(
459 class_linker->ResolveMethodType(self, dex::ProtoIndex(vRegH), shadow_frame.GetMethod())));
460 // This implies we couldn't resolve one or more types in this VarHandle.
461 if (UNLIKELY(callsite_type == nullptr)) {
462 CHECK(self->IsExceptionPending());
463 return false;
464 }
465
466 const uint32_t vRegC = is_var_args ? inst->VRegC_45cc() : inst->VRegC_4rcc();
467 ObjPtr<mirror::Object> receiver(shadow_frame.GetVRegReference(vRegC));
468 Handle<mirror::VarHandle> var_handle(hs.NewHandle(ObjPtr<mirror::VarHandle>::DownCast(receiver)));
469 if (is_var_args) {
470 uint32_t args[Instruction::kMaxVarArgRegs];
471 inst->GetVarArgs(args, inst_data);
472 VarArgsInstructionOperands all_operands(args, inst->VRegA_45cc());
473 NoReceiverInstructionOperands operands(&all_operands);
474 return VarHandleInvokeAccessor(self,
475 shadow_frame,
476 var_handle,
477 callsite_type,
478 access_mode,
479 &operands,
480 result);
481 } else {
482 RangeInstructionOperands all_operands(inst->VRegC_4rcc(), inst->VRegA_4rcc());
483 NoReceiverInstructionOperands operands(&all_operands);
484 return VarHandleInvokeAccessor(self,
485 shadow_frame,
486 var_handle,
487 callsite_type,
488 access_mode,
489 &operands,
490 result);
491 }
492 }
493
494 #define DO_VAR_HANDLE_ACCESSOR(_access_mode) \
495 bool DoVarHandle ## _access_mode(Thread* self, \
496 ShadowFrame& shadow_frame, \
497 const Instruction* inst, \
498 uint16_t inst_data, \
499 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) { \
500 const auto access_mode = mirror::VarHandle::AccessMode::k ## _access_mode; \
501 return DoVarHandleInvokeCommon(self, shadow_frame, inst, inst_data, result, access_mode); \
502 }
503
504 DO_VAR_HANDLE_ACCESSOR(CompareAndExchange)
DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)505 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)
506 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeRelease)
507 DO_VAR_HANDLE_ACCESSOR(CompareAndSet)
508 DO_VAR_HANDLE_ACCESSOR(Get)
509 DO_VAR_HANDLE_ACCESSOR(GetAcquire)
510 DO_VAR_HANDLE_ACCESSOR(GetAndAdd)
511 DO_VAR_HANDLE_ACCESSOR(GetAndAddAcquire)
512 DO_VAR_HANDLE_ACCESSOR(GetAndAddRelease)
513 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAnd)
514 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndAcquire)
515 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndRelease)
516 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOr)
517 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrAcquire)
518 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrRelease)
519 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXor)
520 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorAcquire)
521 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorRelease)
522 DO_VAR_HANDLE_ACCESSOR(GetAndSet)
523 DO_VAR_HANDLE_ACCESSOR(GetAndSetAcquire)
524 DO_VAR_HANDLE_ACCESSOR(GetAndSetRelease)
525 DO_VAR_HANDLE_ACCESSOR(GetOpaque)
526 DO_VAR_HANDLE_ACCESSOR(GetVolatile)
527 DO_VAR_HANDLE_ACCESSOR(Set)
528 DO_VAR_HANDLE_ACCESSOR(SetOpaque)
529 DO_VAR_HANDLE_ACCESSOR(SetRelease)
530 DO_VAR_HANDLE_ACCESSOR(SetVolatile)
531 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSet)
532 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetAcquire)
533 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetPlain)
534 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetRelease)
535
536 #undef DO_VAR_HANDLE_ACCESSOR
537
538 template<bool is_range>
539 bool DoInvokePolymorphic(Thread* self,
540 ShadowFrame& shadow_frame,
541 const Instruction* inst,
542 uint16_t inst_data,
543 JValue* result) {
544 const int invoke_method_idx = inst->VRegB();
545 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
546 ArtMethod* invoke_method =
547 class_linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
548 self, invoke_method_idx, shadow_frame.GetMethod(), kPolymorphic);
549
550 // Ensure intrinsic identifiers are initialized.
551 DCHECK(invoke_method->IsIntrinsic());
552
553 // Dispatch based on intrinsic identifier associated with method.
554 switch (static_cast<art::Intrinsics>(invoke_method->GetIntrinsic())) {
555 #define CASE_SIGNATURE_POLYMORPHIC_INTRINSIC(Name, ...) \
556 case Intrinsics::k##Name: \
557 return Do ## Name(self, shadow_frame, inst, inst_data, result);
558 #include "intrinsics_list.h"
559 SIGNATURE_POLYMORPHIC_INTRINSICS_LIST(CASE_SIGNATURE_POLYMORPHIC_INTRINSIC)
560 #undef INTRINSICS_LIST
561 #undef SIGNATURE_POLYMORPHIC_INTRINSICS_LIST
562 #undef CASE_SIGNATURE_POLYMORPHIC_INTRINSIC
563 default:
564 LOG(FATAL) << "Unreachable: " << invoke_method->GetIntrinsic();
565 UNREACHABLE();
566 return false;
567 }
568 }
569
ConvertScalarBootstrapArgument(jvalue value)570 static JValue ConvertScalarBootstrapArgument(jvalue value) {
571 // value either contains a primitive scalar value if it corresponds
572 // to a primitive type, or it contains an integer value if it
573 // corresponds to an object instance reference id (e.g. a string id).
574 return JValue::FromPrimitive(value.j);
575 }
576
GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)577 static ObjPtr<mirror::Class> GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)
578 REQUIRES_SHARED(Locks::mutator_lock_) {
579 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
580 ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
581 switch (type) {
582 case EncodedArrayValueIterator::ValueType::kBoolean:
583 case EncodedArrayValueIterator::ValueType::kByte:
584 case EncodedArrayValueIterator::ValueType::kChar:
585 case EncodedArrayValueIterator::ValueType::kShort:
586 // These types are disallowed by JVMS. Treat as integers. This
587 // will result in CCE's being raised if the BSM has one of these
588 // types.
589 case EncodedArrayValueIterator::ValueType::kInt:
590 return GetClassRoot(ClassRoot::kPrimitiveInt, class_roots);
591 case EncodedArrayValueIterator::ValueType::kLong:
592 return GetClassRoot(ClassRoot::kPrimitiveLong, class_roots);
593 case EncodedArrayValueIterator::ValueType::kFloat:
594 return GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots);
595 case EncodedArrayValueIterator::ValueType::kDouble:
596 return GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots);
597 case EncodedArrayValueIterator::ValueType::kMethodType:
598 return GetClassRoot<mirror::MethodType>(class_roots);
599 case EncodedArrayValueIterator::ValueType::kMethodHandle:
600 return GetClassRoot<mirror::MethodHandle>(class_roots);
601 case EncodedArrayValueIterator::ValueType::kString:
602 return GetClassRoot<mirror::String>();
603 case EncodedArrayValueIterator::ValueType::kType:
604 return GetClassRoot<mirror::Class>();
605 case EncodedArrayValueIterator::ValueType::kField:
606 case EncodedArrayValueIterator::ValueType::kMethod:
607 case EncodedArrayValueIterator::ValueType::kEnum:
608 case EncodedArrayValueIterator::ValueType::kArray:
609 case EncodedArrayValueIterator::ValueType::kAnnotation:
610 case EncodedArrayValueIterator::ValueType::kNull:
611 return nullptr;
612 }
613 }
614
GetArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,EncodedArrayValueIterator::ValueType type,const JValue * encoded_value,JValue * decoded_value)615 static bool GetArgumentForBootstrapMethod(Thread* self,
616 ArtMethod* referrer,
617 EncodedArrayValueIterator::ValueType type,
618 const JValue* encoded_value,
619 JValue* decoded_value)
620 REQUIRES_SHARED(Locks::mutator_lock_) {
621 // The encoded_value contains either a scalar value (IJDF) or a
622 // scalar DEX file index to a reference type to be materialized.
623 switch (type) {
624 case EncodedArrayValueIterator::ValueType::kInt:
625 case EncodedArrayValueIterator::ValueType::kFloat:
626 decoded_value->SetI(encoded_value->GetI());
627 return true;
628 case EncodedArrayValueIterator::ValueType::kLong:
629 case EncodedArrayValueIterator::ValueType::kDouble:
630 decoded_value->SetJ(encoded_value->GetJ());
631 return true;
632 case EncodedArrayValueIterator::ValueType::kMethodType: {
633 StackHandleScope<2> hs(self);
634 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
635 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
636 dex::ProtoIndex proto_idx(encoded_value->GetC());
637 ClassLinker* cl = Runtime::Current()->GetClassLinker();
638 ObjPtr<mirror::MethodType> o =
639 cl->ResolveMethodType(self, proto_idx, dex_cache, class_loader);
640 if (UNLIKELY(o.IsNull())) {
641 DCHECK(self->IsExceptionPending());
642 return false;
643 }
644 decoded_value->SetL(o);
645 return true;
646 }
647 case EncodedArrayValueIterator::ValueType::kMethodHandle: {
648 uint32_t index = static_cast<uint32_t>(encoded_value->GetI());
649 ClassLinker* cl = Runtime::Current()->GetClassLinker();
650 ObjPtr<mirror::MethodHandle> o = cl->ResolveMethodHandle(self, index, referrer);
651 if (UNLIKELY(o.IsNull())) {
652 DCHECK(self->IsExceptionPending());
653 return false;
654 }
655 decoded_value->SetL(o);
656 return true;
657 }
658 case EncodedArrayValueIterator::ValueType::kString: {
659 dex::StringIndex index(static_cast<uint32_t>(encoded_value->GetI()));
660 ClassLinker* cl = Runtime::Current()->GetClassLinker();
661 ObjPtr<mirror::String> o = cl->ResolveString(index, referrer);
662 if (UNLIKELY(o.IsNull())) {
663 DCHECK(self->IsExceptionPending());
664 return false;
665 }
666 decoded_value->SetL(o);
667 return true;
668 }
669 case EncodedArrayValueIterator::ValueType::kType: {
670 dex::TypeIndex index(static_cast<uint32_t>(encoded_value->GetI()));
671 ClassLinker* cl = Runtime::Current()->GetClassLinker();
672 ObjPtr<mirror::Class> o = cl->ResolveType(index, referrer);
673 if (UNLIKELY(o.IsNull())) {
674 DCHECK(self->IsExceptionPending());
675 return false;
676 }
677 decoded_value->SetL(o);
678 return true;
679 }
680 case EncodedArrayValueIterator::ValueType::kBoolean:
681 case EncodedArrayValueIterator::ValueType::kByte:
682 case EncodedArrayValueIterator::ValueType::kChar:
683 case EncodedArrayValueIterator::ValueType::kShort:
684 case EncodedArrayValueIterator::ValueType::kField:
685 case EncodedArrayValueIterator::ValueType::kMethod:
686 case EncodedArrayValueIterator::ValueType::kEnum:
687 case EncodedArrayValueIterator::ValueType::kArray:
688 case EncodedArrayValueIterator::ValueType::kAnnotation:
689 case EncodedArrayValueIterator::ValueType::kNull:
690 // Unreachable - unsupported types that have been checked when
691 // determining the effect call site type based on the bootstrap
692 // argument types.
693 UNREACHABLE();
694 }
695 }
696
PackArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)697 static bool PackArgumentForBootstrapMethod(Thread* self,
698 ArtMethod* referrer,
699 CallSiteArrayValueIterator* it,
700 ShadowFrameSetter* setter)
701 REQUIRES_SHARED(Locks::mutator_lock_) {
702 auto type = it->GetValueType();
703 const JValue encoded_value = ConvertScalarBootstrapArgument(it->GetJavaValue());
704 JValue decoded_value;
705 if (!GetArgumentForBootstrapMethod(self, referrer, type, &encoded_value, &decoded_value)) {
706 return false;
707 }
708 switch (it->GetValueType()) {
709 case EncodedArrayValueIterator::ValueType::kInt:
710 case EncodedArrayValueIterator::ValueType::kFloat:
711 setter->Set(static_cast<uint32_t>(decoded_value.GetI()));
712 return true;
713 case EncodedArrayValueIterator::ValueType::kLong:
714 case EncodedArrayValueIterator::ValueType::kDouble:
715 setter->SetLong(decoded_value.GetJ());
716 return true;
717 case EncodedArrayValueIterator::ValueType::kMethodType:
718 case EncodedArrayValueIterator::ValueType::kMethodHandle:
719 case EncodedArrayValueIterator::ValueType::kString:
720 case EncodedArrayValueIterator::ValueType::kType:
721 setter->SetReference(decoded_value.GetL());
722 return true;
723 case EncodedArrayValueIterator::ValueType::kBoolean:
724 case EncodedArrayValueIterator::ValueType::kByte:
725 case EncodedArrayValueIterator::ValueType::kChar:
726 case EncodedArrayValueIterator::ValueType::kShort:
727 case EncodedArrayValueIterator::ValueType::kField:
728 case EncodedArrayValueIterator::ValueType::kMethod:
729 case EncodedArrayValueIterator::ValueType::kEnum:
730 case EncodedArrayValueIterator::ValueType::kArray:
731 case EncodedArrayValueIterator::ValueType::kAnnotation:
732 case EncodedArrayValueIterator::ValueType::kNull:
733 // Unreachable - unsupported types that have been checked when
734 // determining the effect call site type based on the bootstrap
735 // argument types.
736 UNREACHABLE();
737 }
738 }
739
PackCollectorArrayForBootstrapMethod(Thread * self,ArtMethod * referrer,ObjPtr<mirror::Class> array_type,int32_t array_length,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)740 static bool PackCollectorArrayForBootstrapMethod(Thread* self,
741 ArtMethod* referrer,
742 ObjPtr<mirror::Class> array_type,
743 int32_t array_length,
744 CallSiteArrayValueIterator* it,
745 ShadowFrameSetter* setter)
746 REQUIRES_SHARED(Locks::mutator_lock_) {
747 StackHandleScope<1> hs(self);
748 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
749 JValue decoded_value;
750
751 #define COLLECT_PRIMITIVE_ARRAY(Descriptor, Type) \
752 Handle<mirror::Type ## Array> array = \
753 hs.NewHandle(mirror::Type ## Array::Alloc(self, array_length)); \
754 if (array.IsNull()) { \
755 return false; \
756 } \
757 for (int32_t i = 0; it->HasNext(); it->Next(), ++i) { \
758 auto type = it->GetValueType(); \
759 DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type); \
760 const JValue encoded_value = \
761 ConvertScalarBootstrapArgument(it->GetJavaValue()); \
762 GetArgumentForBootstrapMethod(self, \
763 referrer, \
764 type, \
765 &encoded_value, \
766 &decoded_value); \
767 array->Set(i, decoded_value.Get ## Descriptor()); \
768 } \
769 setter->SetReference(array.Get()); \
770 return true;
771
772 #define COLLECT_REFERENCE_ARRAY(T, Type) \
773 Handle<mirror::ObjectArray<T>> array = /* NOLINT */ \
774 hs.NewHandle(mirror::ObjectArray<T>::Alloc(self, \
775 array_type, \
776 array_length)); \
777 if (array.IsNull()) { \
778 return false; \
779 } \
780 for (int32_t i = 0; it->HasNext(); it->Next(), ++i) { \
781 auto type = it->GetValueType(); \
782 DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type); \
783 const JValue encoded_value = \
784 ConvertScalarBootstrapArgument(it->GetJavaValue()); \
785 if (!GetArgumentForBootstrapMethod(self, \
786 referrer, \
787 type, \
788 &encoded_value, \
789 &decoded_value)) { \
790 return false; \
791 } \
792 ObjPtr<mirror::Object> o = decoded_value.GetL(); \
793 if (Runtime::Current()->IsActiveTransaction()) { \
794 array->Set<true>(i, ObjPtr<T>::DownCast(o)); \
795 } else { \
796 array->Set<false>(i, ObjPtr<T>::DownCast(o)); \
797 } \
798 } \
799 setter->SetReference(array.Get()); \
800 return true;
801
802 ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
803 ObjPtr<mirror::Class> component_type = array_type->GetComponentType();
804 if (component_type == GetClassRoot(ClassRoot::kPrimitiveInt, class_roots)) {
805 COLLECT_PRIMITIVE_ARRAY(I, Int);
806 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveLong, class_roots)) {
807 COLLECT_PRIMITIVE_ARRAY(J, Long);
808 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots)) {
809 COLLECT_PRIMITIVE_ARRAY(F, Float);
810 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots)) {
811 COLLECT_PRIMITIVE_ARRAY(D, Double);
812 } else if (component_type == GetClassRoot<mirror::MethodType>()) {
813 COLLECT_REFERENCE_ARRAY(mirror::MethodType, MethodType);
814 } else if (component_type == GetClassRoot<mirror::MethodHandle>()) {
815 COLLECT_REFERENCE_ARRAY(mirror::MethodHandle, MethodHandle);
816 } else if (component_type == GetClassRoot<mirror::String>(class_roots)) {
817 COLLECT_REFERENCE_ARRAY(mirror::String, String);
818 } else if (component_type == GetClassRoot<mirror::Class>()) {
819 COLLECT_REFERENCE_ARRAY(mirror::Class, Type);
820 } else {
821 UNREACHABLE();
822 }
823 #undef COLLECT_PRIMITIVE_ARRAY
824 #undef COLLECT_REFERENCE_ARRAY
825 }
826
BuildCallSiteForBootstrapMethod(Thread * self,const DexFile * dex_file,uint32_t call_site_idx)827 static ObjPtr<mirror::MethodType> BuildCallSiteForBootstrapMethod(Thread* self,
828 const DexFile* dex_file,
829 uint32_t call_site_idx)
830 REQUIRES_SHARED(Locks::mutator_lock_) {
831 const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
832 CallSiteArrayValueIterator it(*dex_file, csi);
833 DCHECK_GE(it.Size(), 1u);
834
835 StackHandleScope<2> hs(self);
836 // Create array for parameter types.
837 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
838 ObjPtr<mirror::Class> class_array_type =
839 GetClassRoot<mirror::ObjectArray<mirror::Class>>(class_linker);
840 Handle<mirror::ObjectArray<mirror::Class>> ptypes = hs.NewHandle(
841 mirror::ObjectArray<mirror::Class>::Alloc(self,
842 class_array_type,
843 static_cast<int>(it.Size())));
844 if (ptypes.IsNull()) {
845 DCHECK(self->IsExceptionPending());
846 return nullptr;
847 }
848
849 // Populate the first argument with an instance of j.l.i.MethodHandles.Lookup
850 // that the runtime will construct.
851 ptypes->Set(0, GetClassRoot<mirror::MethodHandlesLookup>(class_linker));
852 it.Next();
853
854 // The remaining parameter types are derived from the types of
855 // arguments present in the DEX file.
856 int index = 1;
857 while (it.HasNext()) {
858 ObjPtr<mirror::Class> ptype = GetClassForBootstrapArgument(it.GetValueType());
859 if (ptype.IsNull()) {
860 ThrowClassCastException("Unsupported bootstrap argument type");
861 return nullptr;
862 }
863 ptypes->Set(index, ptype);
864 index++;
865 it.Next();
866 }
867 DCHECK_EQ(static_cast<size_t>(index), it.Size());
868
869 // By definition, the return type is always a j.l.i.CallSite.
870 Handle<mirror::Class> rtype = hs.NewHandle(GetClassRoot<mirror::CallSite>());
871 return mirror::MethodType::Create(self, rtype, ptypes);
872 }
873
InvokeBootstrapMethod(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)874 static ObjPtr<mirror::CallSite> InvokeBootstrapMethod(Thread* self,
875 ShadowFrame& shadow_frame,
876 uint32_t call_site_idx)
877 REQUIRES_SHARED(Locks::mutator_lock_) {
878 StackHandleScope<5> hs(self);
879 // There are three mandatory arguments expected from the call site
880 // value array in the DEX file: the bootstrap method handle, the
881 // method name to pass to the bootstrap method, and the method type
882 // to pass to the bootstrap method.
883 static constexpr size_t kMandatoryArgumentsCount = 3;
884 ArtMethod* referrer = shadow_frame.GetMethod();
885 const DexFile* dex_file = referrer->GetDexFile();
886 const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
887 CallSiteArrayValueIterator it(*dex_file, csi);
888 if (it.Size() < kMandatoryArgumentsCount) {
889 ThrowBootstrapMethodError("Truncated bootstrap arguments (%zu < %zu)",
890 it.Size(), kMandatoryArgumentsCount);
891 return nullptr;
892 }
893
894 if (it.GetValueType() != EncodedArrayValueIterator::ValueType::kMethodHandle) {
895 ThrowBootstrapMethodError("First bootstrap argument is not a method handle");
896 return nullptr;
897 }
898
899 uint32_t bsm_index = static_cast<uint32_t>(it.GetJavaValue().i);
900 it.Next();
901
902 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
903 Handle<mirror::MethodHandle> bsm =
904 hs.NewHandle(class_linker->ResolveMethodHandle(self, bsm_index, referrer));
905 if (bsm.IsNull()) {
906 DCHECK(self->IsExceptionPending());
907 return nullptr;
908 }
909
910 if (bsm->GetHandleKind() != mirror::MethodHandle::Kind::kInvokeStatic) {
911 // JLS suggests also accepting constructors. This is currently
912 // hard as constructor invocations happen via transformers in ART
913 // today. The constructor would need to be a class derived from java.lang.invoke.CallSite.
914 ThrowBootstrapMethodError("Unsupported bootstrap method invocation kind");
915 return nullptr;
916 }
917
918 // Construct the local call site type information based on the 3
919 // mandatory arguments provided by the runtime and the static arguments
920 // in the DEX file. We will use these arguments to build a shadow frame.
921 MutableHandle<mirror::MethodType> call_site_type =
922 hs.NewHandle(BuildCallSiteForBootstrapMethod(self, dex_file, call_site_idx));
923 if (call_site_type.IsNull()) {
924 DCHECK(self->IsExceptionPending());
925 return nullptr;
926 }
927
928 // Check if this BSM is targeting a variable arity method. If so,
929 // we'll need to collect the trailing arguments into an array.
930 Handle<mirror::Array> collector_arguments;
931 int32_t collector_arguments_length;
932 if (bsm->GetTargetMethod()->IsVarargs()) {
933 int number_of_bsm_parameters = bsm->GetMethodType()->GetNumberOfPTypes();
934 if (number_of_bsm_parameters == 0) {
935 ThrowBootstrapMethodError("Variable arity BSM does not have any arguments");
936 return nullptr;
937 }
938 Handle<mirror::Class> collector_array_class =
939 hs.NewHandle(bsm->GetMethodType()->GetPTypes()->Get(number_of_bsm_parameters - 1));
940 if (!collector_array_class->IsArrayClass()) {
941 ThrowBootstrapMethodError("Variable arity BSM does not have array as final argument");
942 return nullptr;
943 }
944 // The call site may include no arguments to be collected. In this
945 // case the number of arguments must be at least the number of BSM
946 // parameters less the collector array.
947 if (call_site_type->GetNumberOfPTypes() < number_of_bsm_parameters - 1) {
948 ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
949 return nullptr;
950 }
951 // Check all the arguments to be collected match the collector array component type.
952 for (int i = number_of_bsm_parameters - 1; i < call_site_type->GetNumberOfPTypes(); ++i) {
953 if (call_site_type->GetPTypes()->Get(i) != collector_array_class->GetComponentType()) {
954 ThrowClassCastException(collector_array_class->GetComponentType(),
955 call_site_type->GetPTypes()->Get(i));
956 return nullptr;
957 }
958 }
959 // Update the call site method type so it now includes the collector array.
960 int32_t collector_arguments_start = number_of_bsm_parameters - 1;
961 collector_arguments_length = call_site_type->GetNumberOfPTypes() - number_of_bsm_parameters + 1;
962 call_site_type.Assign(
963 mirror::MethodType::CollectTrailingArguments(self,
964 call_site_type.Get(),
965 collector_array_class.Get(),
966 collector_arguments_start));
967 if (call_site_type.IsNull()) {
968 DCHECK(self->IsExceptionPending());
969 return nullptr;
970 }
971 } else {
972 collector_arguments_length = 0;
973 }
974
975 if (call_site_type->GetNumberOfPTypes() != bsm->GetMethodType()->GetNumberOfPTypes()) {
976 ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
977 return nullptr;
978 }
979
980 // BSM invocation has a different set of exceptions that
981 // j.l.i.MethodHandle.invoke(). Scan arguments looking for CCE
982 // "opportunities". Unfortunately we cannot just leave this to the
983 // method handle invocation as this might generate a WMTE.
984 for (int32_t i = 0; i < call_site_type->GetNumberOfPTypes(); ++i) {
985 ObjPtr<mirror::Class> from = call_site_type->GetPTypes()->Get(i);
986 ObjPtr<mirror::Class> to = bsm->GetMethodType()->GetPTypes()->Get(i);
987 if (!IsParameterTypeConvertible(from, to)) {
988 ThrowClassCastException(from, to);
989 return nullptr;
990 }
991 }
992 if (!IsReturnTypeConvertible(call_site_type->GetRType(), bsm->GetMethodType()->GetRType())) {
993 ThrowClassCastException(bsm->GetMethodType()->GetRType(), call_site_type->GetRType());
994 return nullptr;
995 }
996
997 // Set-up a shadow frame for invoking the bootstrap method handle.
998 ShadowFrameAllocaUniquePtr bootstrap_frame =
999 CREATE_SHADOW_FRAME(call_site_type->NumberOfVRegs(),
1000 referrer,
1001 shadow_frame.GetDexPC());
1002 ScopedStackedShadowFramePusher pusher(self, bootstrap_frame.get());
1003 ShadowFrameSetter setter(bootstrap_frame.get(), 0u);
1004
1005 // The first parameter is a MethodHandles lookup instance.
1006 Handle<mirror::Class> lookup_class =
1007 hs.NewHandle(shadow_frame.GetMethod()->GetDeclaringClass());
1008 ObjPtr<mirror::MethodHandlesLookup> lookup =
1009 mirror::MethodHandlesLookup::Create(self, lookup_class);
1010 if (lookup.IsNull()) {
1011 DCHECK(self->IsExceptionPending());
1012 return nullptr;
1013 }
1014 setter.SetReference(lookup);
1015
1016 // Pack the remaining arguments into the frame.
1017 int number_of_arguments = call_site_type->GetNumberOfPTypes();
1018 int argument_index;
1019 for (argument_index = 1; argument_index < number_of_arguments; ++argument_index) {
1020 if (argument_index == number_of_arguments - 1 &&
1021 call_site_type->GetPTypes()->Get(argument_index)->IsArrayClass()) {
1022 ObjPtr<mirror::Class> array_type = call_site_type->GetPTypes()->Get(argument_index);
1023 if (!PackCollectorArrayForBootstrapMethod(self,
1024 referrer,
1025 array_type,
1026 collector_arguments_length,
1027 &it,
1028 &setter)) {
1029 DCHECK(self->IsExceptionPending());
1030 return nullptr;
1031 }
1032 } else if (!PackArgumentForBootstrapMethod(self, referrer, &it, &setter)) {
1033 DCHECK(self->IsExceptionPending());
1034 return nullptr;
1035 }
1036 it.Next();
1037 }
1038 DCHECK(!it.HasNext());
1039 DCHECK(setter.Done());
1040
1041 // Invoke the bootstrap method handle.
1042 JValue result;
1043 RangeInstructionOperands operands(0, bootstrap_frame->NumberOfVRegs());
1044 bool invoke_success = MethodHandleInvoke(self,
1045 *bootstrap_frame,
1046 bsm,
1047 call_site_type,
1048 &operands,
1049 &result);
1050 if (!invoke_success) {
1051 DCHECK(self->IsExceptionPending());
1052 return nullptr;
1053 }
1054
1055 Handle<mirror::Object> object(hs.NewHandle(result.GetL()));
1056 if (UNLIKELY(object.IsNull())) {
1057 // This will typically be for LambdaMetafactory which is not supported.
1058 ThrowClassCastException("Bootstrap method returned null");
1059 return nullptr;
1060 }
1061
1062 // Check the result type is a subclass of j.l.i.CallSite.
1063 ObjPtr<mirror::Class> call_site_class = GetClassRoot<mirror::CallSite>(class_linker);
1064 if (UNLIKELY(!object->InstanceOf(call_site_class))) {
1065 ThrowClassCastException(object->GetClass(), call_site_class);
1066 return nullptr;
1067 }
1068
1069 // Check the call site target is not null as we're going to invoke it.
1070 ObjPtr<mirror::CallSite> call_site = ObjPtr<mirror::CallSite>::DownCast(result.GetL());
1071 ObjPtr<mirror::MethodHandle> target = call_site->GetTarget();
1072 if (UNLIKELY(target == nullptr)) {
1073 ThrowClassCastException("Bootstrap method returned a CallSite with a null target");
1074 return nullptr;
1075 }
1076 return call_site;
1077 }
1078
1079 namespace {
1080
DoResolveCallSite(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)1081 ObjPtr<mirror::CallSite> DoResolveCallSite(Thread* self,
1082 ShadowFrame& shadow_frame,
1083 uint32_t call_site_idx)
1084 REQUIRES_SHARED(Locks::mutator_lock_) {
1085 StackHandleScope<1> hs(self);
1086 Handle<mirror::DexCache> dex_cache(hs.NewHandle(shadow_frame.GetMethod()->GetDexCache()));
1087
1088 // Get the call site from the DexCache if present.
1089 ObjPtr<mirror::CallSite> call_site = dex_cache->GetResolvedCallSite(call_site_idx);
1090 if (LIKELY(call_site != nullptr)) {
1091 return call_site;
1092 }
1093
1094 // Invoke the bootstrap method to get a candidate call site.
1095 call_site = InvokeBootstrapMethod(self, shadow_frame, call_site_idx);
1096 if (UNLIKELY(call_site == nullptr)) {
1097 if (!self->GetException()->IsError()) {
1098 // Use a BootstrapMethodError if the exception is not an instance of java.lang.Error.
1099 ThrowWrappedBootstrapMethodError("Exception from call site #%u bootstrap method",
1100 call_site_idx);
1101 }
1102 return nullptr;
1103 }
1104
1105 // Attempt to place the candidate call site into the DexCache, return the winning call site.
1106 return dex_cache->SetResolvedCallSite(call_site_idx, call_site);
1107 }
1108
1109 } // namespace
1110
DoInvokeCustom(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx,const InstructionOperands * operands,JValue * result)1111 bool DoInvokeCustom(Thread* self,
1112 ShadowFrame& shadow_frame,
1113 uint32_t call_site_idx,
1114 const InstructionOperands* operands,
1115 JValue* result) {
1116 // Make sure to check for async exceptions
1117 if (UNLIKELY(self->ObserveAsyncException())) {
1118 return false;
1119 }
1120
1121 // invoke-custom is not supported in transactions. In transactions
1122 // there is a limited set of types supported. invoke-custom allows
1123 // running arbitrary code and instantiating arbitrary types.
1124 CHECK(!Runtime::Current()->IsActiveTransaction());
1125
1126 ObjPtr<mirror::CallSite> call_site = DoResolveCallSite(self, shadow_frame, call_site_idx);
1127 if (call_site.IsNull()) {
1128 DCHECK(self->IsExceptionPending());
1129 return false;
1130 }
1131
1132 StackHandleScope<2> hs(self);
1133 Handle<mirror::MethodHandle> target = hs.NewHandle(call_site->GetTarget());
1134 Handle<mirror::MethodType> target_method_type = hs.NewHandle(target->GetMethodType());
1135 DCHECK_EQ(operands->GetNumberOfOperands(), target_method_type->NumberOfVRegs())
1136 << " call_site_idx" << call_site_idx;
1137 return MethodHandleInvokeExact(self,
1138 shadow_frame,
1139 target,
1140 target_method_type,
1141 operands,
1142 result);
1143 }
1144
1145 // Assign register 'src_reg' from shadow_frame to register 'dest_reg' into new_shadow_frame.
AssignRegister(ShadowFrame * new_shadow_frame,const ShadowFrame & shadow_frame,size_t dest_reg,size_t src_reg)1146 static inline void AssignRegister(ShadowFrame* new_shadow_frame, const ShadowFrame& shadow_frame,
1147 size_t dest_reg, size_t src_reg)
1148 REQUIRES_SHARED(Locks::mutator_lock_) {
1149 // Uint required, so that sign extension does not make this wrong on 64b systems
1150 uint32_t src_value = shadow_frame.GetVReg(src_reg);
1151 ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference<kVerifyNone>(src_reg);
1152
1153 // If both register locations contains the same value, the register probably holds a reference.
1154 // Note: As an optimization, non-moving collectors leave a stale reference value
1155 // in the references array even after the original vreg was overwritten to a non-reference.
1156 if (src_value == reinterpret_cast32<uint32_t>(o.Ptr())) {
1157 new_shadow_frame->SetVRegReference(dest_reg, o);
1158 } else {
1159 new_shadow_frame->SetVReg(dest_reg, src_value);
1160 }
1161 }
1162
1163 template <bool is_range>
CopyRegisters(ShadowFrame & caller_frame,ShadowFrame * callee_frame,const uint32_t (& arg)[Instruction::kMaxVarArgRegs],const size_t first_src_reg,const size_t first_dest_reg,const size_t num_regs)1164 inline void CopyRegisters(ShadowFrame& caller_frame,
1165 ShadowFrame* callee_frame,
1166 const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1167 const size_t first_src_reg,
1168 const size_t first_dest_reg,
1169 const size_t num_regs) {
1170 if (is_range) {
1171 const size_t dest_reg_bound = first_dest_reg + num_regs;
1172 for (size_t src_reg = first_src_reg, dest_reg = first_dest_reg; dest_reg < dest_reg_bound;
1173 ++dest_reg, ++src_reg) {
1174 AssignRegister(callee_frame, caller_frame, dest_reg, src_reg);
1175 }
1176 } else {
1177 DCHECK_LE(num_regs, arraysize(arg));
1178
1179 for (size_t arg_index = 0; arg_index < num_regs; ++arg_index) {
1180 AssignRegister(callee_frame, caller_frame, first_dest_reg + arg_index, arg[arg_index]);
1181 }
1182 }
1183 }
1184
1185 template <bool is_range>
DoCallCommon(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,JValue * result,uint16_t number_of_inputs,uint32_t (& arg)[Instruction::kMaxVarArgRegs],uint32_t vregC,bool string_init)1186 static inline bool DoCallCommon(ArtMethod* called_method,
1187 Thread* self,
1188 ShadowFrame& shadow_frame,
1189 JValue* result,
1190 uint16_t number_of_inputs,
1191 uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1192 uint32_t vregC,
1193 bool string_init) {
1194 // Compute method information.
1195 CodeItemDataAccessor accessor(called_method->DexInstructionData());
1196 // Number of registers for the callee's call frame.
1197 uint16_t num_regs;
1198 // Test whether to use the interpreter or compiler entrypoint, and save that result to pass to
1199 // PerformCall. A deoptimization could occur at any time, and we shouldn't change which
1200 // entrypoint to use once we start building the shadow frame.
1201
1202 const bool use_interpreter_entrypoint = ShouldStayInSwitchInterpreter(called_method);
1203 if (LIKELY(accessor.HasCodeItem())) {
1204 // When transitioning to compiled code, space only needs to be reserved for the input registers.
1205 // The rest of the frame gets discarded. This also prevents accessing the called method's code
1206 // item, saving memory by keeping code items of compiled code untouched.
1207 if (!use_interpreter_entrypoint) {
1208 DCHECK(!Runtime::Current()->IsAotCompiler()) << "Compiler should use interpreter entrypoint";
1209 num_regs = number_of_inputs;
1210 } else {
1211 num_regs = accessor.RegistersSize();
1212 DCHECK_EQ(string_init ? number_of_inputs - 1 : number_of_inputs, accessor.InsSize());
1213 }
1214 } else {
1215 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1216 num_regs = number_of_inputs;
1217 }
1218
1219 // Hack for String init:
1220 //
1221 // Rewrite invoke-x java.lang.String.<init>(this, a, b, c, ...) into:
1222 // invoke-x StringFactory(a, b, c, ...)
1223 // by effectively dropping the first virtual register from the invoke.
1224 //
1225 // (at this point the ArtMethod has already been replaced,
1226 // so we just need to fix-up the arguments)
1227 //
1228 // Note that FindMethodFromCode in entrypoint_utils-inl.h was also special-cased
1229 // to handle the compiler optimization of replacing `this` with null without
1230 // throwing NullPointerException.
1231 uint32_t string_init_vreg_this = is_range ? vregC : arg[0];
1232 if (UNLIKELY(string_init)) {
1233 DCHECK_GT(num_regs, 0u); // As the method is an instance method, there should be at least 1.
1234
1235 // The new StringFactory call is static and has one fewer argument.
1236 if (!accessor.HasCodeItem()) {
1237 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1238 num_regs--;
1239 } // else ... don't need to change num_regs since it comes up from the string_init's code item
1240 number_of_inputs--;
1241
1242 // Rewrite the var-args, dropping the 0th argument ("this")
1243 for (uint32_t i = 1; i < arraysize(arg); ++i) {
1244 arg[i - 1] = arg[i];
1245 }
1246 arg[arraysize(arg) - 1] = 0;
1247
1248 // Rewrite the non-var-arg case
1249 vregC++; // Skips the 0th vreg in the range ("this").
1250 }
1251
1252 // Parameter registers go at the end of the shadow frame.
1253 DCHECK_GE(num_regs, number_of_inputs);
1254 size_t first_dest_reg = num_regs - number_of_inputs;
1255 DCHECK_NE(first_dest_reg, (size_t)-1);
1256
1257 // Allocate shadow frame on the stack.
1258 const char* old_cause = self->StartAssertNoThreadSuspension("DoCallCommon");
1259 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
1260 CREATE_SHADOW_FRAME(num_regs, called_method, /* dex pc */ 0);
1261 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
1262
1263 // Initialize new shadow frame by copying the registers from the callee shadow frame.
1264 if (!shadow_frame.GetMethod()->SkipAccessChecks()) {
1265 // Slow path.
1266 // We might need to do class loading, which incurs a thread state change to kNative. So
1267 // register the shadow frame as under construction and allow suspension again.
1268 ScopedStackedShadowFramePusher pusher(self, new_shadow_frame);
1269 self->EndAssertNoThreadSuspension(old_cause);
1270
1271 // ArtMethod here is needed to check type information of the call site against the callee.
1272 // Type information is retrieved from a DexFile/DexCache for that respective declared method.
1273 //
1274 // As a special case for proxy methods, which are not dex-backed,
1275 // we have to retrieve type information from the proxy's method
1276 // interface method instead (which is dex backed since proxies are never interfaces).
1277 ArtMethod* method =
1278 new_shadow_frame->GetMethod()->GetInterfaceMethodIfProxy(kRuntimePointerSize);
1279
1280 // We need to do runtime check on reference assignment. We need to load the shorty
1281 // to get the exact type of each reference argument.
1282 const dex::TypeList* params = method->GetParameterTypeList();
1283 uint32_t shorty_len = 0;
1284 const char* shorty = method->GetShorty(&shorty_len);
1285
1286 // Handle receiver apart since it's not part of the shorty.
1287 size_t dest_reg = first_dest_reg;
1288 size_t arg_offset = 0;
1289
1290 if (!method->IsStatic()) {
1291 size_t receiver_reg = is_range ? vregC : arg[0];
1292 new_shadow_frame->SetVRegReference(dest_reg, shadow_frame.GetVRegReference(receiver_reg));
1293 ++dest_reg;
1294 ++arg_offset;
1295 DCHECK(!string_init); // All StringFactory methods are static.
1296 }
1297
1298 // Copy the caller's invoke-* arguments into the callee's parameter registers.
1299 for (uint32_t shorty_pos = 0; dest_reg < num_regs; ++shorty_pos, ++dest_reg, ++arg_offset) {
1300 // Skip the 0th 'shorty' type since it represents the return type.
1301 DCHECK_LT(shorty_pos + 1, shorty_len) << "for shorty '" << shorty << "'";
1302 const size_t src_reg = (is_range) ? vregC + arg_offset : arg[arg_offset];
1303 switch (shorty[shorty_pos + 1]) {
1304 // Handle Object references. 1 virtual register slot.
1305 case 'L': {
1306 ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference(src_reg);
1307 if (o != nullptr) {
1308 const dex::TypeIndex type_idx = params->GetTypeItem(shorty_pos).type_idx_;
1309 ObjPtr<mirror::Class> arg_type = method->GetDexCache()->GetResolvedType(type_idx);
1310 if (arg_type == nullptr) {
1311 StackHandleScope<1> hs(self);
1312 // Preserve o since it is used below and GetClassFromTypeIndex may cause thread
1313 // suspension.
1314 HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&o);
1315 arg_type = method->ResolveClassFromTypeIndex(type_idx);
1316 if (arg_type == nullptr) {
1317 CHECK(self->IsExceptionPending());
1318 return false;
1319 }
1320 }
1321 if (!o->VerifierInstanceOf(arg_type)) {
1322 // This should never happen.
1323 std::string temp1, temp2;
1324 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1325 "Invoking %s with bad arg %d, type '%s' not instance of '%s'",
1326 new_shadow_frame->GetMethod()->GetName(), shorty_pos,
1327 o->GetClass()->GetDescriptor(&temp1),
1328 arg_type->GetDescriptor(&temp2));
1329 return false;
1330 }
1331 }
1332 new_shadow_frame->SetVRegReference(dest_reg, o);
1333 break;
1334 }
1335 // Handle doubles and longs. 2 consecutive virtual register slots.
1336 case 'J': case 'D': {
1337 uint64_t wide_value =
1338 (static_cast<uint64_t>(shadow_frame.GetVReg(src_reg + 1)) << BitSizeOf<uint32_t>()) |
1339 static_cast<uint32_t>(shadow_frame.GetVReg(src_reg));
1340 new_shadow_frame->SetVRegLong(dest_reg, wide_value);
1341 // Skip the next virtual register slot since we already used it.
1342 ++dest_reg;
1343 ++arg_offset;
1344 break;
1345 }
1346 // Handle all other primitives that are always 1 virtual register slot.
1347 default:
1348 new_shadow_frame->SetVReg(dest_reg, shadow_frame.GetVReg(src_reg));
1349 break;
1350 }
1351 }
1352 } else {
1353 if (is_range) {
1354 DCHECK_EQ(num_regs, first_dest_reg + number_of_inputs);
1355 }
1356
1357 CopyRegisters<is_range>(shadow_frame,
1358 new_shadow_frame,
1359 arg,
1360 vregC,
1361 first_dest_reg,
1362 number_of_inputs);
1363 self->EndAssertNoThreadSuspension(old_cause);
1364 }
1365
1366 PerformCall(self,
1367 accessor,
1368 shadow_frame.GetMethod(),
1369 first_dest_reg,
1370 new_shadow_frame,
1371 result,
1372 use_interpreter_entrypoint);
1373
1374 if (string_init && !self->IsExceptionPending()) {
1375 SetStringInitValueToAllAliases(&shadow_frame, string_init_vreg_this, *result);
1376 }
1377
1378 return !self->IsExceptionPending();
1379 }
1380
1381 template<bool is_range>
1382 NO_STACK_PROTECTOR
DoCall(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,bool is_string_init,JValue * result)1383 bool DoCall(ArtMethod* called_method,
1384 Thread* self,
1385 ShadowFrame& shadow_frame,
1386 const Instruction* inst,
1387 uint16_t inst_data,
1388 bool is_string_init,
1389 JValue* result) {
1390 // Argument word count.
1391 const uint16_t number_of_inputs =
1392 (is_range) ? inst->VRegA_3rc(inst_data) : inst->VRegA_35c(inst_data);
1393
1394 // TODO: find a cleaner way to separate non-range and range information without duplicating
1395 // code.
1396 uint32_t arg[Instruction::kMaxVarArgRegs] = {}; // only used in invoke-XXX.
1397 uint32_t vregC = 0;
1398 if (is_range) {
1399 vregC = inst->VRegC_3rc();
1400 } else {
1401 vregC = inst->VRegC_35c();
1402 inst->GetVarArgs(arg, inst_data);
1403 }
1404
1405 return DoCallCommon<is_range>(
1406 called_method,
1407 self,
1408 shadow_frame,
1409 result,
1410 number_of_inputs,
1411 arg,
1412 vregC,
1413 is_string_init);
1414 }
1415
1416 template <bool is_range, bool transaction_active>
DoFilledNewArray(const Instruction * inst,const ShadowFrame & shadow_frame,Thread * self,JValue * result)1417 bool DoFilledNewArray(const Instruction* inst,
1418 const ShadowFrame& shadow_frame,
1419 Thread* self,
1420 JValue* result) {
1421 DCHECK(inst->Opcode() == Instruction::FILLED_NEW_ARRAY ||
1422 inst->Opcode() == Instruction::FILLED_NEW_ARRAY_RANGE);
1423 const int32_t length = is_range ? inst->VRegA_3rc() : inst->VRegA_35c();
1424 if (!is_range) {
1425 // Checks FILLED_NEW_ARRAY's length does not exceed 5 arguments.
1426 CHECK_LE(length, 5);
1427 }
1428 if (UNLIKELY(length < 0)) {
1429 ThrowNegativeArraySizeException(length);
1430 return false;
1431 }
1432 uint16_t type_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c();
1433 bool do_access_check = !shadow_frame.GetMethod()->SkipAccessChecks();
1434 ObjPtr<mirror::Class> array_class = ResolveVerifyAndClinit(dex::TypeIndex(type_idx),
1435 shadow_frame.GetMethod(),
1436 self,
1437 false,
1438 do_access_check);
1439 if (UNLIKELY(array_class == nullptr)) {
1440 DCHECK(self->IsExceptionPending());
1441 return false;
1442 }
1443 CHECK(array_class->IsArrayClass());
1444 ObjPtr<mirror::Class> component_class = array_class->GetComponentType();
1445 const bool is_primitive_int_component = component_class->IsPrimitiveInt();
1446 if (UNLIKELY(component_class->IsPrimitive() && !is_primitive_int_component)) {
1447 if (component_class->IsPrimitiveLong() || component_class->IsPrimitiveDouble()) {
1448 ThrowRuntimeException("Bad filled array request for type %s",
1449 component_class->PrettyDescriptor().c_str());
1450 } else {
1451 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1452 "Found type %s; filled-new-array not implemented for anything but 'int'",
1453 component_class->PrettyDescriptor().c_str());
1454 }
1455 return false;
1456 }
1457 ObjPtr<mirror::Object> new_array = mirror::Array::Alloc(
1458 self,
1459 array_class,
1460 length,
1461 array_class->GetComponentSizeShift(),
1462 Runtime::Current()->GetHeap()->GetCurrentAllocator());
1463 if (UNLIKELY(new_array == nullptr)) {
1464 self->AssertPendingOOMException();
1465 return false;
1466 }
1467 uint32_t arg[Instruction::kMaxVarArgRegs]; // only used in filled-new-array.
1468 uint32_t vregC = 0; // only used in filled-new-array-range.
1469 if (is_range) {
1470 vregC = inst->VRegC_3rc();
1471 } else {
1472 inst->GetVarArgs(arg);
1473 }
1474 for (int32_t i = 0; i < length; ++i) {
1475 size_t src_reg = is_range ? vregC + i : arg[i];
1476 if (is_primitive_int_component) {
1477 new_array->AsIntArray()->SetWithoutChecks<transaction_active>(
1478 i, shadow_frame.GetVReg(src_reg));
1479 } else {
1480 new_array->AsObjectArray<mirror::Object>()->SetWithoutChecks<transaction_active>(
1481 i, shadow_frame.GetVRegReference(src_reg));
1482 }
1483 }
1484
1485 result->SetL(new_array);
1486 return true;
1487 }
1488
1489 // TODO: Use ObjPtr here.
1490 template<typename T>
RecordArrayElementsInTransactionImpl(ObjPtr<mirror::PrimitiveArray<T>> array,int32_t count)1491 static void RecordArrayElementsInTransactionImpl(ObjPtr<mirror::PrimitiveArray<T>> array,
1492 int32_t count)
1493 REQUIRES_SHARED(Locks::mutator_lock_) {
1494 Runtime* runtime = Runtime::Current();
1495 for (int32_t i = 0; i < count; ++i) {
1496 runtime->RecordWriteArray(array.Ptr(), i, array->GetWithoutChecks(i));
1497 }
1498 }
1499
RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array,int32_t count)1500 void RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array, int32_t count)
1501 REQUIRES_SHARED(Locks::mutator_lock_) {
1502 DCHECK(Runtime::Current()->IsActiveTransaction());
1503 DCHECK(array != nullptr);
1504 DCHECK_LE(count, array->GetLength());
1505 Primitive::Type primitive_component_type = array->GetClass()->GetComponentType()->GetPrimitiveType();
1506 switch (primitive_component_type) {
1507 case Primitive::kPrimBoolean:
1508 RecordArrayElementsInTransactionImpl(array->AsBooleanArray(), count);
1509 break;
1510 case Primitive::kPrimByte:
1511 RecordArrayElementsInTransactionImpl(array->AsByteArray(), count);
1512 break;
1513 case Primitive::kPrimChar:
1514 RecordArrayElementsInTransactionImpl(array->AsCharArray(), count);
1515 break;
1516 case Primitive::kPrimShort:
1517 RecordArrayElementsInTransactionImpl(array->AsShortArray(), count);
1518 break;
1519 case Primitive::kPrimInt:
1520 RecordArrayElementsInTransactionImpl(array->AsIntArray(), count);
1521 break;
1522 case Primitive::kPrimFloat:
1523 RecordArrayElementsInTransactionImpl(array->AsFloatArray(), count);
1524 break;
1525 case Primitive::kPrimLong:
1526 RecordArrayElementsInTransactionImpl(array->AsLongArray(), count);
1527 break;
1528 case Primitive::kPrimDouble:
1529 RecordArrayElementsInTransactionImpl(array->AsDoubleArray(), count);
1530 break;
1531 default:
1532 LOG(FATAL) << "Unsupported primitive type " << primitive_component_type
1533 << " in fill-array-data";
1534 UNREACHABLE();
1535 }
1536 }
1537
UnlockHeldMonitors(Thread * self,ShadowFrame * shadow_frame)1538 void UnlockHeldMonitors(Thread* self, ShadowFrame* shadow_frame)
1539 REQUIRES_SHARED(Locks::mutator_lock_) {
1540 DCHECK(shadow_frame->GetForcePopFrame() || Runtime::Current()->IsTransactionAborted());
1541 // Unlock all monitors.
1542 if (shadow_frame->GetMethod()->MustCountLocks()) {
1543 DCHECK(!shadow_frame->GetMethod()->SkipAccessChecks());
1544 // Get the monitors from the shadow-frame monitor-count data.
1545 shadow_frame->GetLockCountData().VisitMonitors(
1546 [&](mirror::Object** obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1547 // Since we don't use the 'obj' pointer after the DoMonitorExit everything should be fine
1548 // WRT suspension.
1549 DoMonitorExit(self, shadow_frame, *obj);
1550 });
1551 } else {
1552 std::vector<verifier::MethodVerifier::DexLockInfo> locks;
1553 verifier::MethodVerifier::FindLocksAtDexPc(shadow_frame->GetMethod(),
1554 shadow_frame->GetDexPC(),
1555 &locks,
1556 Runtime::Current()->GetTargetSdkVersion());
1557 for (const auto& reg : locks) {
1558 if (UNLIKELY(reg.dex_registers.empty())) {
1559 LOG(ERROR) << "Unable to determine reference locked by "
1560 << shadow_frame->GetMethod()->PrettyMethod() << " at pc "
1561 << shadow_frame->GetDexPC();
1562 } else {
1563 DoMonitorExit(
1564 self, shadow_frame, shadow_frame->GetVRegReference(*reg.dex_registers.begin()));
1565 }
1566 }
1567 }
1568 }
1569
1570 // Explicit DoCall template function declarations.
1571 #define EXPLICIT_DO_CALL_TEMPLATE_DECL(_is_range) \
1572 template REQUIRES_SHARED(Locks::mutator_lock_) \
1573 bool DoCall<_is_range>(ArtMethod* method, \
1574 Thread* self, \
1575 ShadowFrame& shadow_frame, \
1576 const Instruction* inst, \
1577 uint16_t inst_data, \
1578 bool string_init, \
1579 JValue* result)
1580 EXPLICIT_DO_CALL_TEMPLATE_DECL(false);
1581 EXPLICIT_DO_CALL_TEMPLATE_DECL(true);
1582 #undef EXPLICIT_DO_CALL_TEMPLATE_DECL
1583
1584 // Explicit DoInvokePolymorphic template function declarations.
1585 #define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range) \
1586 template REQUIRES_SHARED(Locks::mutator_lock_) \
1587 bool DoInvokePolymorphic<_is_range>( \
1588 Thread* self, ShadowFrame& shadow_frame, const Instruction* inst, \
1589 uint16_t inst_data, JValue* result)
1590 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false);
1591 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true);
1592 #undef EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL
1593
1594 // Explicit DoFilledNewArray template function declarations.
1595 #define EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(_is_range_, _transaction_active) \
1596 template REQUIRES_SHARED(Locks::mutator_lock_) \
1597 bool DoFilledNewArray<_is_range_, _transaction_active>(const Instruction* inst, \
1598 const ShadowFrame& shadow_frame, \
1599 Thread* self, \
1600 JValue* result)
1601 #define EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(_transaction_active) \
1602 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false, _transaction_active); \
1603 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true, _transaction_active)
1604 EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(false);
1605 EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(true);
1606 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL
1607 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL
1608
1609 } // namespace interpreter
1610 } // namespace art
1611