1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "interpreter_common.h"
18
19 #include <cmath>
20
21 #include "base/casts.h"
22 #include "base/enums.h"
23 #include "class_root-inl.h"
24 #include "debugger.h"
25 #include "dex/dex_file_types.h"
26 #include "entrypoints/runtime_asm_entrypoints.h"
27 #include "handle.h"
28 #include "intrinsics_enum.h"
29 #include "jit/jit.h"
30 #include "jvalue-inl.h"
31 #include "method_handles-inl.h"
32 #include "method_handles.h"
33 #include "mirror/array-alloc-inl.h"
34 #include "mirror/array-inl.h"
35 #include "mirror/call_site-inl.h"
36 #include "mirror/class.h"
37 #include "mirror/emulated_stack_frame.h"
38 #include "mirror/method_handle_impl-inl.h"
39 #include "mirror/method_type-inl.h"
40 #include "mirror/object_array-alloc-inl.h"
41 #include "mirror/object_array-inl.h"
42 #include "mirror/var_handle.h"
43 #include "reflection-inl.h"
44 #include "reflection.h"
45 #include "shadow_frame-inl.h"
46 #include "stack.h"
47 #include "thread-inl.h"
48 #include "transaction.h"
49 #include "var_handles.h"
50 #include "well_known_classes.h"
51
52 namespace art {
53 namespace interpreter {
54
ThrowNullPointerExceptionFromInterpreter()55 void ThrowNullPointerExceptionFromInterpreter() {
56 ThrowNullPointerExceptionFromDexPC();
57 }
58
CheckStackOverflow(Thread * self,size_t frame_size)59 bool CheckStackOverflow(Thread* self, size_t frame_size)
60 REQUIRES_SHARED(Locks::mutator_lock_) {
61 bool implicit_check = Runtime::Current()->GetImplicitStackOverflowChecks();
62 uint8_t* stack_end = self->GetStackEndForInterpreter(implicit_check);
63 if (UNLIKELY(__builtin_frame_address(0) < stack_end + frame_size)) {
64 ThrowStackOverflowError(self);
65 return false;
66 }
67 return true;
68 }
69
ShouldStayInSwitchInterpreter(ArtMethod * method)70 bool ShouldStayInSwitchInterpreter(ArtMethod* method)
71 REQUIRES_SHARED(Locks::mutator_lock_) {
72 if (!Runtime::Current()->IsStarted()) {
73 // For unstarted runtimes, always use the interpreter entrypoint. This fixes the case where
74 // we are doing cross compilation. Note that GetEntryPointFromQuickCompiledCode doesn't use
75 // the image pointer size here and this may case an overflow if it is called from the
76 // compiler. b/62402160
77 return true;
78 }
79
80 if (UNLIKELY(method->IsNative() || method->IsProxyMethod())) {
81 return false;
82 }
83
84 if (Thread::Current()->IsForceInterpreter()) {
85 // Force the use of interpreter when it is required by the debugger.
86 return true;
87 }
88
89 if (Thread::Current()->IsAsyncExceptionPending()) {
90 // Force use of interpreter to handle async-exceptions
91 return true;
92 }
93
94 const void* code = method->GetEntryPointFromQuickCompiledCode();
95 if (code == GetQuickInstrumentationEntryPoint()) {
96 code = Runtime::Current()->GetInstrumentation()->GetCodeForInvoke(method);
97 }
98
99 return Runtime::Current()->GetClassLinker()->IsQuickToInterpreterBridge(code);
100 }
101
102 template <typename T>
SendMethodExitEvents(Thread * self,const instrumentation::Instrumentation * instrumentation,ShadowFrame & frame,ArtMethod * method,T & result)103 bool SendMethodExitEvents(Thread* self,
104 const instrumentation::Instrumentation* instrumentation,
105 ShadowFrame& frame,
106 ArtMethod* method,
107 T& result) {
108 bool had_event = false;
109 // We can get additional ForcePopFrame requests during handling of these events. We should
110 // respect these and send additional instrumentation events.
111 do {
112 frame.SetForcePopFrame(false);
113 if (UNLIKELY(instrumentation->HasMethodExitListeners() && !frame.GetSkipMethodExitEvents())) {
114 had_event = true;
115 instrumentation->MethodExitEvent(self, method, instrumentation::OptionalFrame{frame}, result);
116 }
117 // We don't send method-exit if it's a pop-frame. We still send frame_popped though.
118 if (UNLIKELY(frame.NeedsNotifyPop() && instrumentation->HasWatchedFramePopListeners())) {
119 had_event = true;
120 instrumentation->WatchedFramePopped(self, frame);
121 }
122 } while (UNLIKELY(frame.GetForcePopFrame()));
123 if (UNLIKELY(had_event)) {
124 return !self->IsExceptionPending();
125 } else {
126 return true;
127 }
128 }
129
130 template
131 bool SendMethodExitEvents(Thread* self,
132 const instrumentation::Instrumentation* instrumentation,
133 ShadowFrame& frame,
134 ArtMethod* method,
135 MutableHandle<mirror::Object>& result);
136
137 template
138 bool SendMethodExitEvents(Thread* self,
139 const instrumentation::Instrumentation* instrumentation,
140 ShadowFrame& frame,
141 ArtMethod* method,
142 JValue& result);
143
144 // We execute any instrumentation events that are triggered by this exception and change the
145 // shadow_frame's dex_pc to that of the exception handler if there is one in the current method.
146 // Return true if we should continue executing in the current method and false if we need to go up
147 // the stack to find an exception handler.
148 // We accept a null Instrumentation* meaning we must not report anything to the instrumentation.
149 // TODO We should have a better way to skip instrumentation reporting or possibly rethink that
150 // behavior.
MoveToExceptionHandler(Thread * self,ShadowFrame & shadow_frame,bool skip_listeners,bool skip_throw_listener)151 bool MoveToExceptionHandler(Thread* self,
152 ShadowFrame& shadow_frame,
153 bool skip_listeners,
154 bool skip_throw_listener) {
155 self->VerifyStack();
156 StackHandleScope<2> hs(self);
157 Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException()));
158 const instrumentation::Instrumentation* instrumentation =
159 Runtime::Current()->GetInstrumentation();
160 if (!skip_throw_listener &&
161 instrumentation->HasExceptionThrownListeners() &&
162 self->IsExceptionThrownByCurrentMethod(exception.Get())) {
163 // See b/65049545 for why we don't need to check to see if the exception has changed.
164 instrumentation->ExceptionThrownEvent(self, exception.Get());
165 if (shadow_frame.GetForcePopFrame()) {
166 // We will check in the caller for GetForcePopFrame again. We need to bail out early to
167 // prevent an ExceptionHandledEvent from also being sent before popping.
168 return true;
169 }
170 }
171 bool clear_exception = false;
172 uint32_t found_dex_pc = shadow_frame.GetMethod()->FindCatchBlock(
173 hs.NewHandle(exception->GetClass()), shadow_frame.GetDexPC(), &clear_exception);
174 if (found_dex_pc == dex::kDexNoIndex) {
175 if (!skip_listeners) {
176 if (shadow_frame.NeedsNotifyPop()) {
177 instrumentation->WatchedFramePopped(self, shadow_frame);
178 if (shadow_frame.GetForcePopFrame()) {
179 // We will check in the caller for GetForcePopFrame again. We need to bail out early to
180 // prevent an ExceptionHandledEvent from also being sent before popping and to ensure we
181 // handle other types of non-standard-exits.
182 return true;
183 }
184 }
185 // Exception is not caught by the current method. We will unwind to the
186 // caller. Notify any instrumentation listener.
187 instrumentation->MethodUnwindEvent(self,
188 shadow_frame.GetThisObject(),
189 shadow_frame.GetMethod(),
190 shadow_frame.GetDexPC());
191 }
192 return shadow_frame.GetForcePopFrame();
193 } else {
194 shadow_frame.SetDexPC(found_dex_pc);
195 if (!skip_listeners && instrumentation->HasExceptionHandledListeners()) {
196 self->ClearException();
197 instrumentation->ExceptionHandledEvent(self, exception.Get());
198 if (UNLIKELY(self->IsExceptionPending())) {
199 // Exception handled event threw an exception. Try to find the handler for this one.
200 return MoveToExceptionHandler(self, shadow_frame, skip_listeners, skip_throw_listener);
201 } else if (!clear_exception) {
202 self->SetException(exception.Get());
203 }
204 } else if (clear_exception) {
205 self->ClearException();
206 }
207 return true;
208 }
209 }
210
UnexpectedOpcode(const Instruction * inst,const ShadowFrame & shadow_frame)211 void UnexpectedOpcode(const Instruction* inst, const ShadowFrame& shadow_frame) {
212 LOG(FATAL) << "Unexpected instruction: "
213 << inst->DumpString(shadow_frame.GetMethod()->GetDexFile());
214 UNREACHABLE();
215 }
216
AbortTransactionF(Thread * self,const char * fmt,...)217 void AbortTransactionF(Thread* self, const char* fmt, ...) {
218 va_list args;
219 va_start(args, fmt);
220 AbortTransactionV(self, fmt, args);
221 va_end(args);
222 }
223
AbortTransactionV(Thread * self,const char * fmt,va_list args)224 void AbortTransactionV(Thread* self, const char* fmt, va_list args) {
225 CHECK(Runtime::Current()->IsActiveTransaction());
226 // Constructs abort message.
227 std::string abort_msg;
228 android::base::StringAppendV(&abort_msg, fmt, args);
229 // Throws an exception so we can abort the transaction and rollback every change.
230 Runtime::Current()->AbortTransactionAndThrowAbortError(self, abort_msg);
231 }
232
233 // START DECLARATIONS :
234 //
235 // These additional declarations are required because clang complains
236 // about ALWAYS_INLINE (-Werror, -Wgcc-compat) in definitions.
237 //
238
239 template <bool is_range, bool do_assignability_check>
240 static ALWAYS_INLINE bool DoCallCommon(ArtMethod* called_method,
241 Thread* self,
242 ShadowFrame& shadow_frame,
243 JValue* result,
244 uint16_t number_of_inputs,
245 uint32_t (&arg)[Instruction::kMaxVarArgRegs],
246 uint32_t vregC) REQUIRES_SHARED(Locks::mutator_lock_);
247
248 template <bool is_range>
249 ALWAYS_INLINE void CopyRegisters(ShadowFrame& caller_frame,
250 ShadowFrame* callee_frame,
251 const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
252 const size_t first_src_reg,
253 const size_t first_dest_reg,
254 const size_t num_regs) REQUIRES_SHARED(Locks::mutator_lock_);
255
256 // END DECLARATIONS.
257
ArtInterpreterToCompiledCodeBridge(Thread * self,ArtMethod * caller,ShadowFrame * shadow_frame,uint16_t arg_offset,JValue * result)258 void ArtInterpreterToCompiledCodeBridge(Thread* self,
259 ArtMethod* caller,
260 ShadowFrame* shadow_frame,
261 uint16_t arg_offset,
262 JValue* result)
263 REQUIRES_SHARED(Locks::mutator_lock_) {
264 ArtMethod* method = shadow_frame->GetMethod();
265 // Ensure static methods are initialized.
266 if (method->IsStatic()) {
267 ObjPtr<mirror::Class> declaringClass = method->GetDeclaringClass();
268 if (UNLIKELY(!declaringClass->IsVisiblyInitialized())) {
269 self->PushShadowFrame(shadow_frame);
270 StackHandleScope<1> hs(self);
271 Handle<mirror::Class> h_class(hs.NewHandle(declaringClass));
272 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
273 self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true))) {
274 self->PopShadowFrame();
275 DCHECK(self->IsExceptionPending());
276 return;
277 }
278 self->PopShadowFrame();
279 DCHECK(h_class->IsInitializing());
280 // Reload from shadow frame in case the method moved, this is faster than adding a handle.
281 method = shadow_frame->GetMethod();
282 }
283 }
284 // Basic checks for the arg_offset. If there's no code item, the arg_offset must be 0. Otherwise,
285 // check that the arg_offset isn't greater than the number of registers. A stronger check is
286 // difficult since the frame may contain space for all the registers in the method, or only enough
287 // space for the arguments.
288 if (kIsDebugBuild) {
289 if (method->GetCodeItem() == nullptr) {
290 DCHECK_EQ(0u, arg_offset) << method->PrettyMethod();
291 } else {
292 DCHECK_LE(arg_offset, shadow_frame->NumberOfVRegs());
293 }
294 }
295 jit::Jit* jit = Runtime::Current()->GetJit();
296 if (jit != nullptr && caller != nullptr) {
297 jit->NotifyInterpreterToCompiledCodeTransition(self, caller);
298 }
299 method->Invoke(self, shadow_frame->GetVRegArgs(arg_offset),
300 (shadow_frame->NumberOfVRegs() - arg_offset) * sizeof(uint32_t),
301 result, method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty());
302 }
303
SetStringInitValueToAllAliases(ShadowFrame * shadow_frame,uint16_t this_obj_vreg,JValue result)304 void SetStringInitValueToAllAliases(ShadowFrame* shadow_frame,
305 uint16_t this_obj_vreg,
306 JValue result)
307 REQUIRES_SHARED(Locks::mutator_lock_) {
308 ObjPtr<mirror::Object> existing = shadow_frame->GetVRegReference(this_obj_vreg);
309 if (existing == nullptr) {
310 // If it's null, we come from compiled code that was deoptimized. Nothing to do,
311 // as the compiler verified there was no alias.
312 // Set the new string result of the StringFactory.
313 shadow_frame->SetVRegReference(this_obj_vreg, result.GetL());
314 return;
315 }
316 // Set the string init result into all aliases.
317 for (uint32_t i = 0, e = shadow_frame->NumberOfVRegs(); i < e; ++i) {
318 if (shadow_frame->GetVRegReference(i) == existing) {
319 DCHECK_EQ(shadow_frame->GetVRegReference(i),
320 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
321 shadow_frame->SetVRegReference(i, result.GetL());
322 DCHECK_EQ(shadow_frame->GetVRegReference(i),
323 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
324 }
325 }
326 }
327
328 template<bool is_range>
DoMethodHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,bool invoke_exact,const Instruction * inst,uint16_t inst_data,JValue * result)329 static bool DoMethodHandleInvokeCommon(Thread* self,
330 ShadowFrame& shadow_frame,
331 bool invoke_exact,
332 const Instruction* inst,
333 uint16_t inst_data,
334 JValue* result)
335 REQUIRES_SHARED(Locks::mutator_lock_) {
336 // Make sure to check for async exceptions
337 if (UNLIKELY(self->ObserveAsyncException())) {
338 return false;
339 }
340 // Invoke-polymorphic instructions always take a receiver. i.e, they are never static.
341 const uint32_t vRegC = (is_range) ? inst->VRegC_4rcc() : inst->VRegC_45cc();
342 const int invoke_method_idx = (is_range) ? inst->VRegB_4rcc() : inst->VRegB_45cc();
343
344 // Initialize |result| to 0 as this is the default return value for
345 // polymorphic invocations of method handle types with void return
346 // and provides a sensible return result in error cases.
347 result->SetJ(0);
348
349 // The invoke_method_idx here is the name of the signature polymorphic method that
350 // was symbolically invoked in bytecode (say MethodHandle.invoke or MethodHandle.invokeExact)
351 // and not the method that we'll dispatch to in the end.
352 StackHandleScope<2> hs(self);
353 Handle<mirror::MethodHandle> method_handle(hs.NewHandle(
354 ObjPtr<mirror::MethodHandle>::DownCast(shadow_frame.GetVRegReference(vRegC))));
355 if (UNLIKELY(method_handle == nullptr)) {
356 // Note that the invoke type is kVirtual here because a call to a signature
357 // polymorphic method is shaped like a virtual call at the bytecode level.
358 ThrowNullPointerExceptionForMethodAccess(invoke_method_idx, InvokeType::kVirtual);
359 return false;
360 }
361
362 // The vRegH value gives the index of the proto_id associated with this
363 // signature polymorphic call site.
364 const uint16_t vRegH = (is_range) ? inst->VRegH_4rcc() : inst->VRegH_45cc();
365 const dex::ProtoIndex callsite_proto_id(vRegH);
366
367 // Call through to the classlinker and ask it to resolve the static type associated
368 // with the callsite. This information is stored in the dex cache so it's
369 // guaranteed to be fast after the first resolution.
370 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
371 Handle<mirror::MethodType> callsite_type(hs.NewHandle(
372 class_linker->ResolveMethodType(self, callsite_proto_id, shadow_frame.GetMethod())));
373
374 // This implies we couldn't resolve one or more types in this method handle.
375 if (UNLIKELY(callsite_type == nullptr)) {
376 CHECK(self->IsExceptionPending());
377 return false;
378 }
379
380 // There is a common dispatch method for method handles that takes
381 // arguments either from a range or an array of arguments depending
382 // on whether the DEX instruction is invoke-polymorphic/range or
383 // invoke-polymorphic. The array here is for the latter.
384 if (UNLIKELY(is_range)) {
385 // VRegC is the register holding the method handle. Arguments passed
386 // to the method handle's target do not include the method handle.
387 RangeInstructionOperands operands(inst->VRegC_4rcc() + 1, inst->VRegA_4rcc() - 1);
388 if (invoke_exact) {
389 return MethodHandleInvokeExact(self,
390 shadow_frame,
391 method_handle,
392 callsite_type,
393 &operands,
394 result);
395 } else {
396 return MethodHandleInvoke(self,
397 shadow_frame,
398 method_handle,
399 callsite_type,
400 &operands,
401 result);
402 }
403 } else {
404 // Get the register arguments for the invoke.
405 uint32_t args[Instruction::kMaxVarArgRegs] = {};
406 inst->GetVarArgs(args, inst_data);
407 // Drop the first register which is the method handle performing the invoke.
408 memmove(args, args + 1, sizeof(args[0]) * (Instruction::kMaxVarArgRegs - 1));
409 args[Instruction::kMaxVarArgRegs - 1] = 0;
410 VarArgsInstructionOperands operands(args, inst->VRegA_45cc() - 1);
411 if (invoke_exact) {
412 return MethodHandleInvokeExact(self,
413 shadow_frame,
414 method_handle,
415 callsite_type,
416 &operands,
417 result);
418 } else {
419 return MethodHandleInvoke(self,
420 shadow_frame,
421 method_handle,
422 callsite_type,
423 &operands,
424 result);
425 }
426 }
427 }
428
DoMethodHandleInvokeExact(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)429 bool DoMethodHandleInvokeExact(Thread* self,
430 ShadowFrame& shadow_frame,
431 const Instruction* inst,
432 uint16_t inst_data,
433 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
434 if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
435 static const bool kIsRange = false;
436 return DoMethodHandleInvokeCommon<kIsRange>(
437 self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
438 } else {
439 DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
440 static const bool kIsRange = true;
441 return DoMethodHandleInvokeCommon<kIsRange>(
442 self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
443 }
444 }
445
DoMethodHandleInvoke(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)446 bool DoMethodHandleInvoke(Thread* self,
447 ShadowFrame& shadow_frame,
448 const Instruction* inst,
449 uint16_t inst_data,
450 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
451 if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
452 static const bool kIsRange = false;
453 return DoMethodHandleInvokeCommon<kIsRange>(
454 self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
455 } else {
456 DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
457 static const bool kIsRange = true;
458 return DoMethodHandleInvokeCommon<kIsRange>(
459 self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
460 }
461 }
462
DoVarHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result,mirror::VarHandle::AccessMode access_mode)463 static bool DoVarHandleInvokeCommon(Thread* self,
464 ShadowFrame& shadow_frame,
465 const Instruction* inst,
466 uint16_t inst_data,
467 JValue* result,
468 mirror::VarHandle::AccessMode access_mode)
469 REQUIRES_SHARED(Locks::mutator_lock_) {
470 // Make sure to check for async exceptions
471 if (UNLIKELY(self->ObserveAsyncException())) {
472 return false;
473 }
474
475 StackHandleScope<2> hs(self);
476 bool is_var_args = inst->HasVarArgs();
477 const uint16_t vRegH = is_var_args ? inst->VRegH_45cc() : inst->VRegH_4rcc();
478 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
479 Handle<mirror::MethodType> callsite_type(hs.NewHandle(
480 class_linker->ResolveMethodType(self, dex::ProtoIndex(vRegH), shadow_frame.GetMethod())));
481 // This implies we couldn't resolve one or more types in this VarHandle.
482 if (UNLIKELY(callsite_type == nullptr)) {
483 CHECK(self->IsExceptionPending());
484 return false;
485 }
486
487 const uint32_t vRegC = is_var_args ? inst->VRegC_45cc() : inst->VRegC_4rcc();
488 ObjPtr<mirror::Object> receiver(shadow_frame.GetVRegReference(vRegC));
489 Handle<mirror::VarHandle> var_handle(hs.NewHandle(ObjPtr<mirror::VarHandle>::DownCast(receiver)));
490 if (is_var_args) {
491 uint32_t args[Instruction::kMaxVarArgRegs];
492 inst->GetVarArgs(args, inst_data);
493 VarArgsInstructionOperands all_operands(args, inst->VRegA_45cc());
494 NoReceiverInstructionOperands operands(&all_operands);
495 return VarHandleInvokeAccessor(self,
496 shadow_frame,
497 var_handle,
498 callsite_type,
499 access_mode,
500 &operands,
501 result);
502 } else {
503 RangeInstructionOperands all_operands(inst->VRegC_4rcc(), inst->VRegA_4rcc());
504 NoReceiverInstructionOperands operands(&all_operands);
505 return VarHandleInvokeAccessor(self,
506 shadow_frame,
507 var_handle,
508 callsite_type,
509 access_mode,
510 &operands,
511 result);
512 }
513 }
514
515 #define DO_VAR_HANDLE_ACCESSOR(_access_mode) \
516 bool DoVarHandle ## _access_mode(Thread* self, \
517 ShadowFrame& shadow_frame, \
518 const Instruction* inst, \
519 uint16_t inst_data, \
520 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) { \
521 const auto access_mode = mirror::VarHandle::AccessMode::k ## _access_mode; \
522 return DoVarHandleInvokeCommon(self, shadow_frame, inst, inst_data, result, access_mode); \
523 }
524
525 DO_VAR_HANDLE_ACCESSOR(CompareAndExchange)
DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)526 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)
527 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeRelease)
528 DO_VAR_HANDLE_ACCESSOR(CompareAndSet)
529 DO_VAR_HANDLE_ACCESSOR(Get)
530 DO_VAR_HANDLE_ACCESSOR(GetAcquire)
531 DO_VAR_HANDLE_ACCESSOR(GetAndAdd)
532 DO_VAR_HANDLE_ACCESSOR(GetAndAddAcquire)
533 DO_VAR_HANDLE_ACCESSOR(GetAndAddRelease)
534 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAnd)
535 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndAcquire)
536 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndRelease)
537 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOr)
538 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrAcquire)
539 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrRelease)
540 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXor)
541 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorAcquire)
542 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorRelease)
543 DO_VAR_HANDLE_ACCESSOR(GetAndSet)
544 DO_VAR_HANDLE_ACCESSOR(GetAndSetAcquire)
545 DO_VAR_HANDLE_ACCESSOR(GetAndSetRelease)
546 DO_VAR_HANDLE_ACCESSOR(GetOpaque)
547 DO_VAR_HANDLE_ACCESSOR(GetVolatile)
548 DO_VAR_HANDLE_ACCESSOR(Set)
549 DO_VAR_HANDLE_ACCESSOR(SetOpaque)
550 DO_VAR_HANDLE_ACCESSOR(SetRelease)
551 DO_VAR_HANDLE_ACCESSOR(SetVolatile)
552 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSet)
553 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetAcquire)
554 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetPlain)
555 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetRelease)
556
557 #undef DO_VAR_HANDLE_ACCESSOR
558
559 template<bool is_range>
560 bool DoInvokePolymorphic(Thread* self,
561 ShadowFrame& shadow_frame,
562 const Instruction* inst,
563 uint16_t inst_data,
564 JValue* result) {
565 const int invoke_method_idx = inst->VRegB();
566 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
567 ArtMethod* invoke_method =
568 class_linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
569 self, invoke_method_idx, shadow_frame.GetMethod(), kPolymorphic);
570
571 // Ensure intrinsic identifiers are initialized.
572 DCHECK(invoke_method->IsIntrinsic());
573
574 // Dispatch based on intrinsic identifier associated with method.
575 switch (static_cast<art::Intrinsics>(invoke_method->GetIntrinsic())) {
576 #define CASE_SIGNATURE_POLYMORPHIC_INTRINSIC(Name, ...) \
577 case Intrinsics::k##Name: \
578 return Do ## Name(self, shadow_frame, inst, inst_data, result);
579 #include "intrinsics_list.h"
580 SIGNATURE_POLYMORPHIC_INTRINSICS_LIST(CASE_SIGNATURE_POLYMORPHIC_INTRINSIC)
581 #undef INTRINSICS_LIST
582 #undef SIGNATURE_POLYMORPHIC_INTRINSICS_LIST
583 #undef CASE_SIGNATURE_POLYMORPHIC_INTRINSIC
584 default:
585 LOG(FATAL) << "Unreachable: " << invoke_method->GetIntrinsic();
586 UNREACHABLE();
587 return false;
588 }
589 }
590
ConvertScalarBootstrapArgument(jvalue value)591 static JValue ConvertScalarBootstrapArgument(jvalue value) {
592 // value either contains a primitive scalar value if it corresponds
593 // to a primitive type, or it contains an integer value if it
594 // corresponds to an object instance reference id (e.g. a string id).
595 return JValue::FromPrimitive(value.j);
596 }
597
GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)598 static ObjPtr<mirror::Class> GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)
599 REQUIRES_SHARED(Locks::mutator_lock_) {
600 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
601 ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
602 switch (type) {
603 case EncodedArrayValueIterator::ValueType::kBoolean:
604 case EncodedArrayValueIterator::ValueType::kByte:
605 case EncodedArrayValueIterator::ValueType::kChar:
606 case EncodedArrayValueIterator::ValueType::kShort:
607 // These types are disallowed by JVMS. Treat as integers. This
608 // will result in CCE's being raised if the BSM has one of these
609 // types.
610 case EncodedArrayValueIterator::ValueType::kInt:
611 return GetClassRoot(ClassRoot::kPrimitiveInt, class_roots);
612 case EncodedArrayValueIterator::ValueType::kLong:
613 return GetClassRoot(ClassRoot::kPrimitiveLong, class_roots);
614 case EncodedArrayValueIterator::ValueType::kFloat:
615 return GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots);
616 case EncodedArrayValueIterator::ValueType::kDouble:
617 return GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots);
618 case EncodedArrayValueIterator::ValueType::kMethodType:
619 return GetClassRoot<mirror::MethodType>(class_roots);
620 case EncodedArrayValueIterator::ValueType::kMethodHandle:
621 return GetClassRoot<mirror::MethodHandle>(class_roots);
622 case EncodedArrayValueIterator::ValueType::kString:
623 return GetClassRoot<mirror::String>();
624 case EncodedArrayValueIterator::ValueType::kType:
625 return GetClassRoot<mirror::Class>();
626 case EncodedArrayValueIterator::ValueType::kField:
627 case EncodedArrayValueIterator::ValueType::kMethod:
628 case EncodedArrayValueIterator::ValueType::kEnum:
629 case EncodedArrayValueIterator::ValueType::kArray:
630 case EncodedArrayValueIterator::ValueType::kAnnotation:
631 case EncodedArrayValueIterator::ValueType::kNull:
632 return nullptr;
633 }
634 }
635
GetArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,EncodedArrayValueIterator::ValueType type,const JValue * encoded_value,JValue * decoded_value)636 static bool GetArgumentForBootstrapMethod(Thread* self,
637 ArtMethod* referrer,
638 EncodedArrayValueIterator::ValueType type,
639 const JValue* encoded_value,
640 JValue* decoded_value)
641 REQUIRES_SHARED(Locks::mutator_lock_) {
642 // The encoded_value contains either a scalar value (IJDF) or a
643 // scalar DEX file index to a reference type to be materialized.
644 switch (type) {
645 case EncodedArrayValueIterator::ValueType::kInt:
646 case EncodedArrayValueIterator::ValueType::kFloat:
647 decoded_value->SetI(encoded_value->GetI());
648 return true;
649 case EncodedArrayValueIterator::ValueType::kLong:
650 case EncodedArrayValueIterator::ValueType::kDouble:
651 decoded_value->SetJ(encoded_value->GetJ());
652 return true;
653 case EncodedArrayValueIterator::ValueType::kMethodType: {
654 StackHandleScope<2> hs(self);
655 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
656 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
657 dex::ProtoIndex proto_idx(encoded_value->GetC());
658 ClassLinker* cl = Runtime::Current()->GetClassLinker();
659 ObjPtr<mirror::MethodType> o =
660 cl->ResolveMethodType(self, proto_idx, dex_cache, class_loader);
661 if (UNLIKELY(o.IsNull())) {
662 DCHECK(self->IsExceptionPending());
663 return false;
664 }
665 decoded_value->SetL(o);
666 return true;
667 }
668 case EncodedArrayValueIterator::ValueType::kMethodHandle: {
669 uint32_t index = static_cast<uint32_t>(encoded_value->GetI());
670 ClassLinker* cl = Runtime::Current()->GetClassLinker();
671 ObjPtr<mirror::MethodHandle> o = cl->ResolveMethodHandle(self, index, referrer);
672 if (UNLIKELY(o.IsNull())) {
673 DCHECK(self->IsExceptionPending());
674 return false;
675 }
676 decoded_value->SetL(o);
677 return true;
678 }
679 case EncodedArrayValueIterator::ValueType::kString: {
680 dex::StringIndex index(static_cast<uint32_t>(encoded_value->GetI()));
681 ClassLinker* cl = Runtime::Current()->GetClassLinker();
682 ObjPtr<mirror::String> o = cl->ResolveString(index, referrer);
683 if (UNLIKELY(o.IsNull())) {
684 DCHECK(self->IsExceptionPending());
685 return false;
686 }
687 decoded_value->SetL(o);
688 return true;
689 }
690 case EncodedArrayValueIterator::ValueType::kType: {
691 dex::TypeIndex index(static_cast<uint32_t>(encoded_value->GetI()));
692 ClassLinker* cl = Runtime::Current()->GetClassLinker();
693 ObjPtr<mirror::Class> o = cl->ResolveType(index, referrer);
694 if (UNLIKELY(o.IsNull())) {
695 DCHECK(self->IsExceptionPending());
696 return false;
697 }
698 decoded_value->SetL(o);
699 return true;
700 }
701 case EncodedArrayValueIterator::ValueType::kBoolean:
702 case EncodedArrayValueIterator::ValueType::kByte:
703 case EncodedArrayValueIterator::ValueType::kChar:
704 case EncodedArrayValueIterator::ValueType::kShort:
705 case EncodedArrayValueIterator::ValueType::kField:
706 case EncodedArrayValueIterator::ValueType::kMethod:
707 case EncodedArrayValueIterator::ValueType::kEnum:
708 case EncodedArrayValueIterator::ValueType::kArray:
709 case EncodedArrayValueIterator::ValueType::kAnnotation:
710 case EncodedArrayValueIterator::ValueType::kNull:
711 // Unreachable - unsupported types that have been checked when
712 // determining the effect call site type based on the bootstrap
713 // argument types.
714 UNREACHABLE();
715 }
716 }
717
PackArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)718 static bool PackArgumentForBootstrapMethod(Thread* self,
719 ArtMethod* referrer,
720 CallSiteArrayValueIterator* it,
721 ShadowFrameSetter* setter)
722 REQUIRES_SHARED(Locks::mutator_lock_) {
723 auto type = it->GetValueType();
724 const JValue encoded_value = ConvertScalarBootstrapArgument(it->GetJavaValue());
725 JValue decoded_value;
726 if (!GetArgumentForBootstrapMethod(self, referrer, type, &encoded_value, &decoded_value)) {
727 return false;
728 }
729 switch (it->GetValueType()) {
730 case EncodedArrayValueIterator::ValueType::kInt:
731 case EncodedArrayValueIterator::ValueType::kFloat:
732 setter->Set(static_cast<uint32_t>(decoded_value.GetI()));
733 return true;
734 case EncodedArrayValueIterator::ValueType::kLong:
735 case EncodedArrayValueIterator::ValueType::kDouble:
736 setter->SetLong(decoded_value.GetJ());
737 return true;
738 case EncodedArrayValueIterator::ValueType::kMethodType:
739 case EncodedArrayValueIterator::ValueType::kMethodHandle:
740 case EncodedArrayValueIterator::ValueType::kString:
741 case EncodedArrayValueIterator::ValueType::kType:
742 setter->SetReference(decoded_value.GetL());
743 return true;
744 case EncodedArrayValueIterator::ValueType::kBoolean:
745 case EncodedArrayValueIterator::ValueType::kByte:
746 case EncodedArrayValueIterator::ValueType::kChar:
747 case EncodedArrayValueIterator::ValueType::kShort:
748 case EncodedArrayValueIterator::ValueType::kField:
749 case EncodedArrayValueIterator::ValueType::kMethod:
750 case EncodedArrayValueIterator::ValueType::kEnum:
751 case EncodedArrayValueIterator::ValueType::kArray:
752 case EncodedArrayValueIterator::ValueType::kAnnotation:
753 case EncodedArrayValueIterator::ValueType::kNull:
754 // Unreachable - unsupported types that have been checked when
755 // determining the effect call site type based on the bootstrap
756 // argument types.
757 UNREACHABLE();
758 }
759 }
760
PackCollectorArrayForBootstrapMethod(Thread * self,ArtMethod * referrer,ObjPtr<mirror::Class> array_type,int32_t array_length,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)761 static bool PackCollectorArrayForBootstrapMethod(Thread* self,
762 ArtMethod* referrer,
763 ObjPtr<mirror::Class> array_type,
764 int32_t array_length,
765 CallSiteArrayValueIterator* it,
766 ShadowFrameSetter* setter)
767 REQUIRES_SHARED(Locks::mutator_lock_) {
768 StackHandleScope<1> hs(self);
769 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
770 JValue decoded_value;
771
772 #define COLLECT_PRIMITIVE_ARRAY(Descriptor, Type) \
773 Handle<mirror::Type ## Array> array = \
774 hs.NewHandle(mirror::Type ## Array::Alloc(self, array_length)); \
775 if (array.IsNull()) { \
776 return false; \
777 } \
778 for (int32_t i = 0; it->HasNext(); it->Next(), ++i) { \
779 auto type = it->GetValueType(); \
780 DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type); \
781 const JValue encoded_value = \
782 ConvertScalarBootstrapArgument(it->GetJavaValue()); \
783 GetArgumentForBootstrapMethod(self, \
784 referrer, \
785 type, \
786 &encoded_value, \
787 &decoded_value); \
788 array->Set(i, decoded_value.Get ## Descriptor()); \
789 } \
790 setter->SetReference(array.Get()); \
791 return true;
792
793 #define COLLECT_REFERENCE_ARRAY(T, Type) \
794 Handle<mirror::ObjectArray<T>> array = /* NOLINT */ \
795 hs.NewHandle(mirror::ObjectArray<T>::Alloc(self, \
796 array_type, \
797 array_length)); \
798 if (array.IsNull()) { \
799 return false; \
800 } \
801 for (int32_t i = 0; it->HasNext(); it->Next(), ++i) { \
802 auto type = it->GetValueType(); \
803 DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type); \
804 const JValue encoded_value = \
805 ConvertScalarBootstrapArgument(it->GetJavaValue()); \
806 if (!GetArgumentForBootstrapMethod(self, \
807 referrer, \
808 type, \
809 &encoded_value, \
810 &decoded_value)) { \
811 return false; \
812 } \
813 ObjPtr<mirror::Object> o = decoded_value.GetL(); \
814 if (Runtime::Current()->IsActiveTransaction()) { \
815 array->Set<true>(i, ObjPtr<T>::DownCast(o)); \
816 } else { \
817 array->Set<false>(i, ObjPtr<T>::DownCast(o)); \
818 } \
819 } \
820 setter->SetReference(array.Get()); \
821 return true;
822
823 ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
824 ObjPtr<mirror::Class> component_type = array_type->GetComponentType();
825 if (component_type == GetClassRoot(ClassRoot::kPrimitiveInt, class_roots)) {
826 COLLECT_PRIMITIVE_ARRAY(I, Int);
827 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveLong, class_roots)) {
828 COLLECT_PRIMITIVE_ARRAY(J, Long);
829 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots)) {
830 COLLECT_PRIMITIVE_ARRAY(F, Float);
831 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots)) {
832 COLLECT_PRIMITIVE_ARRAY(D, Double);
833 } else if (component_type == GetClassRoot<mirror::MethodType>()) {
834 COLLECT_REFERENCE_ARRAY(mirror::MethodType, MethodType);
835 } else if (component_type == GetClassRoot<mirror::MethodHandle>()) {
836 COLLECT_REFERENCE_ARRAY(mirror::MethodHandle, MethodHandle);
837 } else if (component_type == GetClassRoot<mirror::String>(class_roots)) {
838 COLLECT_REFERENCE_ARRAY(mirror::String, String);
839 } else if (component_type == GetClassRoot<mirror::Class>()) {
840 COLLECT_REFERENCE_ARRAY(mirror::Class, Type);
841 } else {
842 UNREACHABLE();
843 }
844 #undef COLLECT_PRIMITIVE_ARRAY
845 #undef COLLECT_REFERENCE_ARRAY
846 }
847
BuildCallSiteForBootstrapMethod(Thread * self,const DexFile * dex_file,uint32_t call_site_idx)848 static ObjPtr<mirror::MethodType> BuildCallSiteForBootstrapMethod(Thread* self,
849 const DexFile* dex_file,
850 uint32_t call_site_idx)
851 REQUIRES_SHARED(Locks::mutator_lock_) {
852 const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
853 CallSiteArrayValueIterator it(*dex_file, csi);
854 DCHECK_GE(it.Size(), 1u);
855
856 StackHandleScope<2> hs(self);
857 // Create array for parameter types.
858 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
859 ObjPtr<mirror::Class> class_array_type =
860 GetClassRoot<mirror::ObjectArray<mirror::Class>>(class_linker);
861 Handle<mirror::ObjectArray<mirror::Class>> ptypes = hs.NewHandle(
862 mirror::ObjectArray<mirror::Class>::Alloc(self,
863 class_array_type,
864 static_cast<int>(it.Size())));
865 if (ptypes.IsNull()) {
866 DCHECK(self->IsExceptionPending());
867 return nullptr;
868 }
869
870 // Populate the first argument with an instance of j.l.i.MethodHandles.Lookup
871 // that the runtime will construct.
872 ptypes->Set(0, GetClassRoot<mirror::MethodHandlesLookup>(class_linker));
873 it.Next();
874
875 // The remaining parameter types are derived from the types of
876 // arguments present in the DEX file.
877 int index = 1;
878 while (it.HasNext()) {
879 ObjPtr<mirror::Class> ptype = GetClassForBootstrapArgument(it.GetValueType());
880 if (ptype.IsNull()) {
881 ThrowClassCastException("Unsupported bootstrap argument type");
882 return nullptr;
883 }
884 ptypes->Set(index, ptype);
885 index++;
886 it.Next();
887 }
888 DCHECK_EQ(static_cast<size_t>(index), it.Size());
889
890 // By definition, the return type is always a j.l.i.CallSite.
891 Handle<mirror::Class> rtype = hs.NewHandle(GetClassRoot<mirror::CallSite>());
892 return mirror::MethodType::Create(self, rtype, ptypes);
893 }
894
InvokeBootstrapMethod(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)895 static ObjPtr<mirror::CallSite> InvokeBootstrapMethod(Thread* self,
896 ShadowFrame& shadow_frame,
897 uint32_t call_site_idx)
898 REQUIRES_SHARED(Locks::mutator_lock_) {
899 StackHandleScope<5> hs(self);
900 // There are three mandatory arguments expected from the call site
901 // value array in the DEX file: the bootstrap method handle, the
902 // method name to pass to the bootstrap method, and the method type
903 // to pass to the bootstrap method.
904 static constexpr size_t kMandatoryArgumentsCount = 3;
905 ArtMethod* referrer = shadow_frame.GetMethod();
906 const DexFile* dex_file = referrer->GetDexFile();
907 const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
908 CallSiteArrayValueIterator it(*dex_file, csi);
909 if (it.Size() < kMandatoryArgumentsCount) {
910 ThrowBootstrapMethodError("Truncated bootstrap arguments (%zu < %zu)",
911 it.Size(), kMandatoryArgumentsCount);
912 return nullptr;
913 }
914
915 if (it.GetValueType() != EncodedArrayValueIterator::ValueType::kMethodHandle) {
916 ThrowBootstrapMethodError("First bootstrap argument is not a method handle");
917 return nullptr;
918 }
919
920 uint32_t bsm_index = static_cast<uint32_t>(it.GetJavaValue().i);
921 it.Next();
922
923 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
924 Handle<mirror::MethodHandle> bsm =
925 hs.NewHandle(class_linker->ResolveMethodHandle(self, bsm_index, referrer));
926 if (bsm.IsNull()) {
927 DCHECK(self->IsExceptionPending());
928 return nullptr;
929 }
930
931 if (bsm->GetHandleKind() != mirror::MethodHandle::Kind::kInvokeStatic) {
932 // JLS suggests also accepting constructors. This is currently
933 // hard as constructor invocations happen via transformers in ART
934 // today. The constructor would need to be a class derived from java.lang.invoke.CallSite.
935 ThrowBootstrapMethodError("Unsupported bootstrap method invocation kind");
936 return nullptr;
937 }
938
939 // Construct the local call site type information based on the 3
940 // mandatory arguments provided by the runtime and the static arguments
941 // in the DEX file. We will use these arguments to build a shadow frame.
942 MutableHandle<mirror::MethodType> call_site_type =
943 hs.NewHandle(BuildCallSiteForBootstrapMethod(self, dex_file, call_site_idx));
944 if (call_site_type.IsNull()) {
945 DCHECK(self->IsExceptionPending());
946 return nullptr;
947 }
948
949 // Check if this BSM is targeting a variable arity method. If so,
950 // we'll need to collect the trailing arguments into an array.
951 Handle<mirror::Array> collector_arguments;
952 int32_t collector_arguments_length;
953 if (bsm->GetTargetMethod()->IsVarargs()) {
954 int number_of_bsm_parameters = bsm->GetMethodType()->GetNumberOfPTypes();
955 if (number_of_bsm_parameters == 0) {
956 ThrowBootstrapMethodError("Variable arity BSM does not have any arguments");
957 return nullptr;
958 }
959 Handle<mirror::Class> collector_array_class =
960 hs.NewHandle(bsm->GetMethodType()->GetPTypes()->Get(number_of_bsm_parameters - 1));
961 if (!collector_array_class->IsArrayClass()) {
962 ThrowBootstrapMethodError("Variable arity BSM does not have array as final argument");
963 return nullptr;
964 }
965 // The call site may include no arguments to be collected. In this
966 // case the number of arguments must be at least the number of BSM
967 // parameters less the collector array.
968 if (call_site_type->GetNumberOfPTypes() < number_of_bsm_parameters - 1) {
969 ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
970 return nullptr;
971 }
972 // Check all the arguments to be collected match the collector array component type.
973 for (int i = number_of_bsm_parameters - 1; i < call_site_type->GetNumberOfPTypes(); ++i) {
974 if (call_site_type->GetPTypes()->Get(i) != collector_array_class->GetComponentType()) {
975 ThrowClassCastException(collector_array_class->GetComponentType(),
976 call_site_type->GetPTypes()->Get(i));
977 return nullptr;
978 }
979 }
980 // Update the call site method type so it now includes the collector array.
981 int32_t collector_arguments_start = number_of_bsm_parameters - 1;
982 collector_arguments_length = call_site_type->GetNumberOfPTypes() - number_of_bsm_parameters + 1;
983 call_site_type.Assign(
984 mirror::MethodType::CollectTrailingArguments(self,
985 call_site_type.Get(),
986 collector_array_class.Get(),
987 collector_arguments_start));
988 if (call_site_type.IsNull()) {
989 DCHECK(self->IsExceptionPending());
990 return nullptr;
991 }
992 } else {
993 collector_arguments_length = 0;
994 }
995
996 if (call_site_type->GetNumberOfPTypes() != bsm->GetMethodType()->GetNumberOfPTypes()) {
997 ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
998 return nullptr;
999 }
1000
1001 // BSM invocation has a different set of exceptions that
1002 // j.l.i.MethodHandle.invoke(). Scan arguments looking for CCE
1003 // "opportunities". Unfortunately we cannot just leave this to the
1004 // method handle invocation as this might generate a WMTE.
1005 for (int32_t i = 0; i < call_site_type->GetNumberOfPTypes(); ++i) {
1006 ObjPtr<mirror::Class> from = call_site_type->GetPTypes()->Get(i);
1007 ObjPtr<mirror::Class> to = bsm->GetMethodType()->GetPTypes()->Get(i);
1008 if (!IsParameterTypeConvertible(from, to)) {
1009 ThrowClassCastException(from, to);
1010 return nullptr;
1011 }
1012 }
1013 if (!IsReturnTypeConvertible(call_site_type->GetRType(), bsm->GetMethodType()->GetRType())) {
1014 ThrowClassCastException(bsm->GetMethodType()->GetRType(), call_site_type->GetRType());
1015 return nullptr;
1016 }
1017
1018 // Set-up a shadow frame for invoking the bootstrap method handle.
1019 ShadowFrameAllocaUniquePtr bootstrap_frame =
1020 CREATE_SHADOW_FRAME(call_site_type->NumberOfVRegs(),
1021 nullptr,
1022 referrer,
1023 shadow_frame.GetDexPC());
1024 ScopedStackedShadowFramePusher pusher(
1025 self, bootstrap_frame.get(), StackedShadowFrameType::kShadowFrameUnderConstruction);
1026 ShadowFrameSetter setter(bootstrap_frame.get(), 0u);
1027
1028 // The first parameter is a MethodHandles lookup instance.
1029 Handle<mirror::Class> lookup_class =
1030 hs.NewHandle(shadow_frame.GetMethod()->GetDeclaringClass());
1031 ObjPtr<mirror::MethodHandlesLookup> lookup =
1032 mirror::MethodHandlesLookup::Create(self, lookup_class);
1033 if (lookup.IsNull()) {
1034 DCHECK(self->IsExceptionPending());
1035 return nullptr;
1036 }
1037 setter.SetReference(lookup);
1038
1039 // Pack the remaining arguments into the frame.
1040 int number_of_arguments = call_site_type->GetNumberOfPTypes();
1041 int argument_index;
1042 for (argument_index = 1; argument_index < number_of_arguments; ++argument_index) {
1043 if (argument_index == number_of_arguments - 1 &&
1044 call_site_type->GetPTypes()->Get(argument_index)->IsArrayClass()) {
1045 ObjPtr<mirror::Class> array_type = call_site_type->GetPTypes()->Get(argument_index);
1046 if (!PackCollectorArrayForBootstrapMethod(self,
1047 referrer,
1048 array_type,
1049 collector_arguments_length,
1050 &it,
1051 &setter)) {
1052 DCHECK(self->IsExceptionPending());
1053 return nullptr;
1054 }
1055 } else if (!PackArgumentForBootstrapMethod(self, referrer, &it, &setter)) {
1056 DCHECK(self->IsExceptionPending());
1057 return nullptr;
1058 }
1059 it.Next();
1060 }
1061 DCHECK(!it.HasNext());
1062 DCHECK(setter.Done());
1063
1064 // Invoke the bootstrap method handle.
1065 JValue result;
1066 RangeInstructionOperands operands(0, bootstrap_frame->NumberOfVRegs());
1067 bool invoke_success = MethodHandleInvoke(self,
1068 *bootstrap_frame,
1069 bsm,
1070 call_site_type,
1071 &operands,
1072 &result);
1073 if (!invoke_success) {
1074 DCHECK(self->IsExceptionPending());
1075 return nullptr;
1076 }
1077
1078 Handle<mirror::Object> object(hs.NewHandle(result.GetL()));
1079 if (UNLIKELY(object.IsNull())) {
1080 // This will typically be for LambdaMetafactory which is not supported.
1081 ThrowClassCastException("Bootstrap method returned null");
1082 return nullptr;
1083 }
1084
1085 // Check the result type is a subclass of j.l.i.CallSite.
1086 ObjPtr<mirror::Class> call_site_class = GetClassRoot<mirror::CallSite>(class_linker);
1087 if (UNLIKELY(!object->InstanceOf(call_site_class))) {
1088 ThrowClassCastException(object->GetClass(), call_site_class);
1089 return nullptr;
1090 }
1091
1092 // Check the call site target is not null as we're going to invoke it.
1093 ObjPtr<mirror::CallSite> call_site = ObjPtr<mirror::CallSite>::DownCast(result.GetL());
1094 ObjPtr<mirror::MethodHandle> target = call_site->GetTarget();
1095 if (UNLIKELY(target == nullptr)) {
1096 ThrowClassCastException("Bootstrap method returned a CallSite with a null target");
1097 return nullptr;
1098 }
1099 return call_site;
1100 }
1101
1102 namespace {
1103
DoResolveCallSite(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)1104 ObjPtr<mirror::CallSite> DoResolveCallSite(Thread* self,
1105 ShadowFrame& shadow_frame,
1106 uint32_t call_site_idx)
1107 REQUIRES_SHARED(Locks::mutator_lock_) {
1108 StackHandleScope<1> hs(self);
1109 Handle<mirror::DexCache> dex_cache(hs.NewHandle(shadow_frame.GetMethod()->GetDexCache()));
1110
1111 // Get the call site from the DexCache if present.
1112 ObjPtr<mirror::CallSite> call_site = dex_cache->GetResolvedCallSite(call_site_idx);
1113 if (LIKELY(call_site != nullptr)) {
1114 return call_site;
1115 }
1116
1117 // Invoke the bootstrap method to get a candidate call site.
1118 call_site = InvokeBootstrapMethod(self, shadow_frame, call_site_idx);
1119 if (UNLIKELY(call_site == nullptr)) {
1120 if (!self->GetException()->IsError()) {
1121 // Use a BootstrapMethodError if the exception is not an instance of java.lang.Error.
1122 ThrowWrappedBootstrapMethodError("Exception from call site #%u bootstrap method",
1123 call_site_idx);
1124 }
1125 return nullptr;
1126 }
1127
1128 // Attempt to place the candidate call site into the DexCache, return the winning call site.
1129 return dex_cache->SetResolvedCallSite(call_site_idx, call_site);
1130 }
1131
1132 } // namespace
1133
DoInvokeCustom(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx,const InstructionOperands * operands,JValue * result)1134 bool DoInvokeCustom(Thread* self,
1135 ShadowFrame& shadow_frame,
1136 uint32_t call_site_idx,
1137 const InstructionOperands* operands,
1138 JValue* result) {
1139 // Make sure to check for async exceptions
1140 if (UNLIKELY(self->ObserveAsyncException())) {
1141 return false;
1142 }
1143
1144 // invoke-custom is not supported in transactions. In transactions
1145 // there is a limited set of types supported. invoke-custom allows
1146 // running arbitrary code and instantiating arbitrary types.
1147 CHECK(!Runtime::Current()->IsActiveTransaction());
1148
1149 ObjPtr<mirror::CallSite> call_site = DoResolveCallSite(self, shadow_frame, call_site_idx);
1150 if (call_site.IsNull()) {
1151 DCHECK(self->IsExceptionPending());
1152 return false;
1153 }
1154
1155 StackHandleScope<2> hs(self);
1156 Handle<mirror::MethodHandle> target = hs.NewHandle(call_site->GetTarget());
1157 Handle<mirror::MethodType> target_method_type = hs.NewHandle(target->GetMethodType());
1158 DCHECK_EQ(operands->GetNumberOfOperands(), target_method_type->NumberOfVRegs())
1159 << " call_site_idx" << call_site_idx;
1160 return MethodHandleInvokeExact(self,
1161 shadow_frame,
1162 target,
1163 target_method_type,
1164 operands,
1165 result);
1166 }
1167
1168 // Assign register 'src_reg' from shadow_frame to register 'dest_reg' into new_shadow_frame.
AssignRegister(ShadowFrame * new_shadow_frame,const ShadowFrame & shadow_frame,size_t dest_reg,size_t src_reg)1169 static inline void AssignRegister(ShadowFrame* new_shadow_frame, const ShadowFrame& shadow_frame,
1170 size_t dest_reg, size_t src_reg)
1171 REQUIRES_SHARED(Locks::mutator_lock_) {
1172 // Uint required, so that sign extension does not make this wrong on 64b systems
1173 uint32_t src_value = shadow_frame.GetVReg(src_reg);
1174 ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference<kVerifyNone>(src_reg);
1175
1176 // If both register locations contains the same value, the register probably holds a reference.
1177 // Note: As an optimization, non-moving collectors leave a stale reference value
1178 // in the references array even after the original vreg was overwritten to a non-reference.
1179 if (src_value == reinterpret_cast32<uint32_t>(o.Ptr())) {
1180 new_shadow_frame->SetVRegReference(dest_reg, o);
1181 } else {
1182 new_shadow_frame->SetVReg(dest_reg, src_value);
1183 }
1184 }
1185
1186 template <bool is_range>
CopyRegisters(ShadowFrame & caller_frame,ShadowFrame * callee_frame,const uint32_t (& arg)[Instruction::kMaxVarArgRegs],const size_t first_src_reg,const size_t first_dest_reg,const size_t num_regs)1187 inline void CopyRegisters(ShadowFrame& caller_frame,
1188 ShadowFrame* callee_frame,
1189 const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1190 const size_t first_src_reg,
1191 const size_t first_dest_reg,
1192 const size_t num_regs) {
1193 if (is_range) {
1194 const size_t dest_reg_bound = first_dest_reg + num_regs;
1195 for (size_t src_reg = first_src_reg, dest_reg = first_dest_reg; dest_reg < dest_reg_bound;
1196 ++dest_reg, ++src_reg) {
1197 AssignRegister(callee_frame, caller_frame, dest_reg, src_reg);
1198 }
1199 } else {
1200 DCHECK_LE(num_regs, arraysize(arg));
1201
1202 for (size_t arg_index = 0; arg_index < num_regs; ++arg_index) {
1203 AssignRegister(callee_frame, caller_frame, first_dest_reg + arg_index, arg[arg_index]);
1204 }
1205 }
1206 }
1207
1208 template <bool is_range,
1209 bool do_assignability_check>
DoCallCommon(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,JValue * result,uint16_t number_of_inputs,uint32_t (& arg)[Instruction::kMaxVarArgRegs],uint32_t vregC)1210 static inline bool DoCallCommon(ArtMethod* called_method,
1211 Thread* self,
1212 ShadowFrame& shadow_frame,
1213 JValue* result,
1214 uint16_t number_of_inputs,
1215 uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1216 uint32_t vregC) {
1217 bool string_init = false;
1218 // Replace calls to String.<init> with equivalent StringFactory call.
1219 if (UNLIKELY(called_method->GetDeclaringClass()->IsStringClass()
1220 && called_method->IsConstructor())) {
1221 called_method = WellKnownClasses::StringInitToStringFactory(called_method);
1222 string_init = true;
1223 }
1224
1225 // Compute method information.
1226 CodeItemDataAccessor accessor(called_method->DexInstructionData());
1227 // Number of registers for the callee's call frame.
1228 uint16_t num_regs;
1229 // Test whether to use the interpreter or compiler entrypoint, and save that result to pass to
1230 // PerformCall. A deoptimization could occur at any time, and we shouldn't change which
1231 // entrypoint to use once we start building the shadow frame.
1232
1233 const bool use_interpreter_entrypoint = ShouldStayInSwitchInterpreter(called_method);
1234 if (LIKELY(accessor.HasCodeItem())) {
1235 // When transitioning to compiled code, space only needs to be reserved for the input registers.
1236 // The rest of the frame gets discarded. This also prevents accessing the called method's code
1237 // item, saving memory by keeping code items of compiled code untouched.
1238 if (!use_interpreter_entrypoint) {
1239 DCHECK(!Runtime::Current()->IsAotCompiler()) << "Compiler should use interpreter entrypoint";
1240 num_regs = number_of_inputs;
1241 } else {
1242 num_regs = accessor.RegistersSize();
1243 DCHECK_EQ(string_init ? number_of_inputs - 1 : number_of_inputs, accessor.InsSize());
1244 }
1245 } else {
1246 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1247 num_regs = number_of_inputs;
1248 }
1249
1250 // Hack for String init:
1251 //
1252 // Rewrite invoke-x java.lang.String.<init>(this, a, b, c, ...) into:
1253 // invoke-x StringFactory(a, b, c, ...)
1254 // by effectively dropping the first virtual register from the invoke.
1255 //
1256 // (at this point the ArtMethod has already been replaced,
1257 // so we just need to fix-up the arguments)
1258 //
1259 // Note that FindMethodFromCode in entrypoint_utils-inl.h was also special-cased
1260 // to handle the compiler optimization of replacing `this` with null without
1261 // throwing NullPointerException.
1262 uint32_t string_init_vreg_this = is_range ? vregC : arg[0];
1263 if (UNLIKELY(string_init)) {
1264 DCHECK_GT(num_regs, 0u); // As the method is an instance method, there should be at least 1.
1265
1266 // The new StringFactory call is static and has one fewer argument.
1267 if (!accessor.HasCodeItem()) {
1268 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1269 num_regs--;
1270 } // else ... don't need to change num_regs since it comes up from the string_init's code item
1271 number_of_inputs--;
1272
1273 // Rewrite the var-args, dropping the 0th argument ("this")
1274 for (uint32_t i = 1; i < arraysize(arg); ++i) {
1275 arg[i - 1] = arg[i];
1276 }
1277 arg[arraysize(arg) - 1] = 0;
1278
1279 // Rewrite the non-var-arg case
1280 vregC++; // Skips the 0th vreg in the range ("this").
1281 }
1282
1283 // Parameter registers go at the end of the shadow frame.
1284 DCHECK_GE(num_regs, number_of_inputs);
1285 size_t first_dest_reg = num_regs - number_of_inputs;
1286 DCHECK_NE(first_dest_reg, (size_t)-1);
1287
1288 // Allocate shadow frame on the stack.
1289 const char* old_cause = self->StartAssertNoThreadSuspension("DoCallCommon");
1290 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
1291 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0);
1292 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
1293
1294 // Initialize new shadow frame by copying the registers from the callee shadow frame.
1295 if (do_assignability_check) {
1296 // Slow path.
1297 // We might need to do class loading, which incurs a thread state change to kNative. So
1298 // register the shadow frame as under construction and allow suspension again.
1299 ScopedStackedShadowFramePusher pusher(
1300 self, new_shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction);
1301 self->EndAssertNoThreadSuspension(old_cause);
1302
1303 // ArtMethod here is needed to check type information of the call site against the callee.
1304 // Type information is retrieved from a DexFile/DexCache for that respective declared method.
1305 //
1306 // As a special case for proxy methods, which are not dex-backed,
1307 // we have to retrieve type information from the proxy's method
1308 // interface method instead (which is dex backed since proxies are never interfaces).
1309 ArtMethod* method =
1310 new_shadow_frame->GetMethod()->GetInterfaceMethodIfProxy(kRuntimePointerSize);
1311
1312 // We need to do runtime check on reference assignment. We need to load the shorty
1313 // to get the exact type of each reference argument.
1314 const dex::TypeList* params = method->GetParameterTypeList();
1315 uint32_t shorty_len = 0;
1316 const char* shorty = method->GetShorty(&shorty_len);
1317
1318 // Handle receiver apart since it's not part of the shorty.
1319 size_t dest_reg = first_dest_reg;
1320 size_t arg_offset = 0;
1321
1322 if (!method->IsStatic()) {
1323 size_t receiver_reg = is_range ? vregC : arg[0];
1324 new_shadow_frame->SetVRegReference(dest_reg, shadow_frame.GetVRegReference(receiver_reg));
1325 ++dest_reg;
1326 ++arg_offset;
1327 DCHECK(!string_init); // All StringFactory methods are static.
1328 }
1329
1330 // Copy the caller's invoke-* arguments into the callee's parameter registers.
1331 for (uint32_t shorty_pos = 0; dest_reg < num_regs; ++shorty_pos, ++dest_reg, ++arg_offset) {
1332 // Skip the 0th 'shorty' type since it represents the return type.
1333 DCHECK_LT(shorty_pos + 1, shorty_len) << "for shorty '" << shorty << "'";
1334 const size_t src_reg = (is_range) ? vregC + arg_offset : arg[arg_offset];
1335 switch (shorty[shorty_pos + 1]) {
1336 // Handle Object references. 1 virtual register slot.
1337 case 'L': {
1338 ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference(src_reg);
1339 if (do_assignability_check && o != nullptr) {
1340 const dex::TypeIndex type_idx = params->GetTypeItem(shorty_pos).type_idx_;
1341 ObjPtr<mirror::Class> arg_type = method->GetDexCache()->GetResolvedType(type_idx);
1342 if (arg_type == nullptr) {
1343 StackHandleScope<1> hs(self);
1344 // Preserve o since it is used below and GetClassFromTypeIndex may cause thread
1345 // suspension.
1346 HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&o);
1347 arg_type = method->ResolveClassFromTypeIndex(type_idx);
1348 if (arg_type == nullptr) {
1349 CHECK(self->IsExceptionPending());
1350 return false;
1351 }
1352 }
1353 if (!o->VerifierInstanceOf(arg_type)) {
1354 // This should never happen.
1355 std::string temp1, temp2;
1356 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1357 "Invoking %s with bad arg %d, type '%s' not instance of '%s'",
1358 new_shadow_frame->GetMethod()->GetName(), shorty_pos,
1359 o->GetClass()->GetDescriptor(&temp1),
1360 arg_type->GetDescriptor(&temp2));
1361 return false;
1362 }
1363 }
1364 new_shadow_frame->SetVRegReference(dest_reg, o);
1365 break;
1366 }
1367 // Handle doubles and longs. 2 consecutive virtual register slots.
1368 case 'J': case 'D': {
1369 uint64_t wide_value =
1370 (static_cast<uint64_t>(shadow_frame.GetVReg(src_reg + 1)) << BitSizeOf<uint32_t>()) |
1371 static_cast<uint32_t>(shadow_frame.GetVReg(src_reg));
1372 new_shadow_frame->SetVRegLong(dest_reg, wide_value);
1373 // Skip the next virtual register slot since we already used it.
1374 ++dest_reg;
1375 ++arg_offset;
1376 break;
1377 }
1378 // Handle all other primitives that are always 1 virtual register slot.
1379 default:
1380 new_shadow_frame->SetVReg(dest_reg, shadow_frame.GetVReg(src_reg));
1381 break;
1382 }
1383 }
1384 } else {
1385 if (is_range) {
1386 DCHECK_EQ(num_regs, first_dest_reg + number_of_inputs);
1387 }
1388
1389 CopyRegisters<is_range>(shadow_frame,
1390 new_shadow_frame,
1391 arg,
1392 vregC,
1393 first_dest_reg,
1394 number_of_inputs);
1395 self->EndAssertNoThreadSuspension(old_cause);
1396 }
1397
1398 PerformCall(self,
1399 accessor,
1400 shadow_frame.GetMethod(),
1401 first_dest_reg,
1402 new_shadow_frame,
1403 result,
1404 use_interpreter_entrypoint);
1405
1406 if (string_init && !self->IsExceptionPending()) {
1407 SetStringInitValueToAllAliases(&shadow_frame, string_init_vreg_this, *result);
1408 }
1409
1410 return !self->IsExceptionPending();
1411 }
1412
1413 template<bool is_range, bool do_assignability_check>
DoCall(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)1414 bool DoCall(ArtMethod* called_method, Thread* self, ShadowFrame& shadow_frame,
1415 const Instruction* inst, uint16_t inst_data, JValue* result) {
1416 // Argument word count.
1417 const uint16_t number_of_inputs =
1418 (is_range) ? inst->VRegA_3rc(inst_data) : inst->VRegA_35c(inst_data);
1419
1420 // TODO: find a cleaner way to separate non-range and range information without duplicating
1421 // code.
1422 uint32_t arg[Instruction::kMaxVarArgRegs] = {}; // only used in invoke-XXX.
1423 uint32_t vregC = 0;
1424 if (is_range) {
1425 vregC = inst->VRegC_3rc();
1426 } else {
1427 vregC = inst->VRegC_35c();
1428 inst->GetVarArgs(arg, inst_data);
1429 }
1430
1431 return DoCallCommon<is_range, do_assignability_check>(
1432 called_method, self, shadow_frame,
1433 result, number_of_inputs, arg, vregC);
1434 }
1435
1436 template <bool is_range, bool do_access_check, bool transaction_active>
DoFilledNewArray(const Instruction * inst,const ShadowFrame & shadow_frame,Thread * self,JValue * result)1437 bool DoFilledNewArray(const Instruction* inst,
1438 const ShadowFrame& shadow_frame,
1439 Thread* self,
1440 JValue* result) {
1441 DCHECK(inst->Opcode() == Instruction::FILLED_NEW_ARRAY ||
1442 inst->Opcode() == Instruction::FILLED_NEW_ARRAY_RANGE);
1443 const int32_t length = is_range ? inst->VRegA_3rc() : inst->VRegA_35c();
1444 if (!is_range) {
1445 // Checks FILLED_NEW_ARRAY's length does not exceed 5 arguments.
1446 CHECK_LE(length, 5);
1447 }
1448 if (UNLIKELY(length < 0)) {
1449 ThrowNegativeArraySizeException(length);
1450 return false;
1451 }
1452 uint16_t type_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c();
1453 ObjPtr<mirror::Class> array_class = ResolveVerifyAndClinit(dex::TypeIndex(type_idx),
1454 shadow_frame.GetMethod(),
1455 self,
1456 false,
1457 do_access_check);
1458 if (UNLIKELY(array_class == nullptr)) {
1459 DCHECK(self->IsExceptionPending());
1460 return false;
1461 }
1462 CHECK(array_class->IsArrayClass());
1463 ObjPtr<mirror::Class> component_class = array_class->GetComponentType();
1464 const bool is_primitive_int_component = component_class->IsPrimitiveInt();
1465 if (UNLIKELY(component_class->IsPrimitive() && !is_primitive_int_component)) {
1466 if (component_class->IsPrimitiveLong() || component_class->IsPrimitiveDouble()) {
1467 ThrowRuntimeException("Bad filled array request for type %s",
1468 component_class->PrettyDescriptor().c_str());
1469 } else {
1470 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1471 "Found type %s; filled-new-array not implemented for anything but 'int'",
1472 component_class->PrettyDescriptor().c_str());
1473 }
1474 return false;
1475 }
1476 ObjPtr<mirror::Object> new_array = mirror::Array::Alloc(
1477 self,
1478 array_class,
1479 length,
1480 array_class->GetComponentSizeShift(),
1481 Runtime::Current()->GetHeap()->GetCurrentAllocator());
1482 if (UNLIKELY(new_array == nullptr)) {
1483 self->AssertPendingOOMException();
1484 return false;
1485 }
1486 uint32_t arg[Instruction::kMaxVarArgRegs]; // only used in filled-new-array.
1487 uint32_t vregC = 0; // only used in filled-new-array-range.
1488 if (is_range) {
1489 vregC = inst->VRegC_3rc();
1490 } else {
1491 inst->GetVarArgs(arg);
1492 }
1493 for (int32_t i = 0; i < length; ++i) {
1494 size_t src_reg = is_range ? vregC + i : arg[i];
1495 if (is_primitive_int_component) {
1496 new_array->AsIntArray()->SetWithoutChecks<transaction_active>(
1497 i, shadow_frame.GetVReg(src_reg));
1498 } else {
1499 new_array->AsObjectArray<mirror::Object>()->SetWithoutChecks<transaction_active>(
1500 i, shadow_frame.GetVRegReference(src_reg));
1501 }
1502 }
1503
1504 result->SetL(new_array);
1505 return true;
1506 }
1507
1508 // TODO: Use ObjPtr here.
1509 template<typename T>
RecordArrayElementsInTransactionImpl(ObjPtr<mirror::PrimitiveArray<T>> array,int32_t count)1510 static void RecordArrayElementsInTransactionImpl(ObjPtr<mirror::PrimitiveArray<T>> array,
1511 int32_t count)
1512 REQUIRES_SHARED(Locks::mutator_lock_) {
1513 Runtime* runtime = Runtime::Current();
1514 for (int32_t i = 0; i < count; ++i) {
1515 runtime->RecordWriteArray(array.Ptr(), i, array->GetWithoutChecks(i));
1516 }
1517 }
1518
RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array,int32_t count)1519 void RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array, int32_t count)
1520 REQUIRES_SHARED(Locks::mutator_lock_) {
1521 DCHECK(Runtime::Current()->IsActiveTransaction());
1522 DCHECK(array != nullptr);
1523 DCHECK_LE(count, array->GetLength());
1524 Primitive::Type primitive_component_type = array->GetClass()->GetComponentType()->GetPrimitiveType();
1525 switch (primitive_component_type) {
1526 case Primitive::kPrimBoolean:
1527 RecordArrayElementsInTransactionImpl(array->AsBooleanArray(), count);
1528 break;
1529 case Primitive::kPrimByte:
1530 RecordArrayElementsInTransactionImpl(array->AsByteArray(), count);
1531 break;
1532 case Primitive::kPrimChar:
1533 RecordArrayElementsInTransactionImpl(array->AsCharArray(), count);
1534 break;
1535 case Primitive::kPrimShort:
1536 RecordArrayElementsInTransactionImpl(array->AsShortArray(), count);
1537 break;
1538 case Primitive::kPrimInt:
1539 RecordArrayElementsInTransactionImpl(array->AsIntArray(), count);
1540 break;
1541 case Primitive::kPrimFloat:
1542 RecordArrayElementsInTransactionImpl(array->AsFloatArray(), count);
1543 break;
1544 case Primitive::kPrimLong:
1545 RecordArrayElementsInTransactionImpl(array->AsLongArray(), count);
1546 break;
1547 case Primitive::kPrimDouble:
1548 RecordArrayElementsInTransactionImpl(array->AsDoubleArray(), count);
1549 break;
1550 default:
1551 LOG(FATAL) << "Unsupported primitive type " << primitive_component_type
1552 << " in fill-array-data";
1553 UNREACHABLE();
1554 }
1555 }
1556
1557 // Explicit DoCall template function declarations.
1558 #define EXPLICIT_DO_CALL_TEMPLATE_DECL(_is_range, _do_assignability_check) \
1559 template REQUIRES_SHARED(Locks::mutator_lock_) \
1560 bool DoCall<_is_range, _do_assignability_check>(ArtMethod* method, Thread* self, \
1561 ShadowFrame& shadow_frame, \
1562 const Instruction* inst, uint16_t inst_data, \
1563 JValue* result)
1564 EXPLICIT_DO_CALL_TEMPLATE_DECL(false, false);
1565 EXPLICIT_DO_CALL_TEMPLATE_DECL(false, true);
1566 EXPLICIT_DO_CALL_TEMPLATE_DECL(true, false);
1567 EXPLICIT_DO_CALL_TEMPLATE_DECL(true, true);
1568 #undef EXPLICIT_DO_CALL_TEMPLATE_DECL
1569
1570 // Explicit DoInvokePolymorphic template function declarations.
1571 #define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range) \
1572 template REQUIRES_SHARED(Locks::mutator_lock_) \
1573 bool DoInvokePolymorphic<_is_range>( \
1574 Thread* self, ShadowFrame& shadow_frame, const Instruction* inst, \
1575 uint16_t inst_data, JValue* result)
1576 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false);
1577 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true);
1578 #undef EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL
1579
1580 // Explicit DoFilledNewArray template function declarations.
1581 #define EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(_is_range_, _check, _transaction_active) \
1582 template REQUIRES_SHARED(Locks::mutator_lock_) \
1583 bool DoFilledNewArray<_is_range_, _check, _transaction_active>(const Instruction* inst, \
1584 const ShadowFrame& shadow_frame, \
1585 Thread* self, JValue* result)
1586 #define EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(_transaction_active) \
1587 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false, false, _transaction_active); \
1588 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false, true, _transaction_active); \
1589 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true, false, _transaction_active); \
1590 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true, true, _transaction_active)
1591 EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(false);
1592 EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(true);
1593 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL
1594 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL
1595
1596 } // namespace interpreter
1597 } // namespace art
1598