1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "method_handles-inl.h"
18
19 #include "android-base/stringprintf.h"
20
21 #include "common_dex_operations.h"
22 #include "jvalue.h"
23 #include "jvalue-inl.h"
24 #include "mirror/emulated_stack_frame.h"
25 #include "mirror/method_handle_impl-inl.h"
26 #include "mirror/method_type.h"
27 #include "reflection.h"
28 #include "reflection-inl.h"
29 #include "well_known_classes.h"
30
31 namespace art {
32
33 using android::base::StringPrintf;
34
35 namespace {
36
37 #define PRIMITIVES_LIST(V) \
38 V(Primitive::kPrimBoolean, Boolean, Boolean, Z) \
39 V(Primitive::kPrimByte, Byte, Byte, B) \
40 V(Primitive::kPrimChar, Char, Character, C) \
41 V(Primitive::kPrimShort, Short, Short, S) \
42 V(Primitive::kPrimInt, Int, Integer, I) \
43 V(Primitive::kPrimLong, Long, Long, J) \
44 V(Primitive::kPrimFloat, Float, Float, F) \
45 V(Primitive::kPrimDouble, Double, Double, D)
46
47 // Assigns |type| to the primitive type associated with |klass|. Returns
48 // true iff. |klass| was a boxed type (Integer, Long etc.), false otherwise.
GetUnboxedPrimitiveType(ObjPtr<mirror::Class> klass,Primitive::Type * type)49 bool GetUnboxedPrimitiveType(ObjPtr<mirror::Class> klass, Primitive::Type* type)
50 REQUIRES_SHARED(Locks::mutator_lock_) {
51 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
52 std::string storage;
53 const char* descriptor = klass->GetDescriptor(&storage);
54 static const char kJavaLangPrefix[] = "Ljava/lang/";
55 static const size_t kJavaLangPrefixSize = sizeof(kJavaLangPrefix) - 1;
56 if (strncmp(descriptor, kJavaLangPrefix, kJavaLangPrefixSize) != 0) {
57 return false;
58 }
59
60 descriptor += kJavaLangPrefixSize;
61 #define LOOKUP_PRIMITIVE(primitive, _, java_name, ___) \
62 if (strcmp(descriptor, #java_name ";") == 0) { \
63 *type = primitive; \
64 return true; \
65 }
66
67 PRIMITIVES_LIST(LOOKUP_PRIMITIVE);
68 #undef LOOKUP_PRIMITIVE
69 return false;
70 }
71
GetBoxedPrimitiveClass(Primitive::Type type)72 ObjPtr<mirror::Class> GetBoxedPrimitiveClass(Primitive::Type type)
73 REQUIRES_SHARED(Locks::mutator_lock_) {
74 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
75 jmethodID m = nullptr;
76 switch (type) {
77 #define CASE_PRIMITIVE(primitive, _, java_name, __) \
78 case primitive: \
79 m = WellKnownClasses::java_lang_ ## java_name ## _valueOf; \
80 break;
81 PRIMITIVES_LIST(CASE_PRIMITIVE);
82 #undef CASE_PRIMITIVE
83 case Primitive::Type::kPrimNot:
84 case Primitive::Type::kPrimVoid:
85 return nullptr;
86 }
87 return jni::DecodeArtMethod(m)->GetDeclaringClass();
88 }
89
GetUnboxedTypeAndValue(ObjPtr<mirror::Object> o,Primitive::Type * type,JValue * value)90 bool GetUnboxedTypeAndValue(ObjPtr<mirror::Object> o, Primitive::Type* type, JValue* value)
91 REQUIRES_SHARED(Locks::mutator_lock_) {
92 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
93 ObjPtr<mirror::Class> klass = o->GetClass();
94 ArtField* primitive_field = &klass->GetIFieldsPtr()->At(0);
95 #define CASE_PRIMITIVE(primitive, abbrev, _, shorthand) \
96 if (klass == GetBoxedPrimitiveClass(primitive)) { \
97 *type = primitive; \
98 value->Set ## shorthand(primitive_field->Get ## abbrev(o)); \
99 return true; \
100 }
101 PRIMITIVES_LIST(CASE_PRIMITIVE)
102 #undef CASE_PRIMITIVE
103 return false;
104 }
105
IsReferenceType(Primitive::Type type)106 inline bool IsReferenceType(Primitive::Type type) {
107 return type == Primitive::kPrimNot;
108 }
109
IsPrimitiveType(Primitive::Type type)110 inline bool IsPrimitiveType(Primitive::Type type) {
111 return !IsReferenceType(type);
112 }
113
114 } // namespace
115
IsParameterTypeConvertible(ObjPtr<mirror::Class> from,ObjPtr<mirror::Class> to)116 bool IsParameterTypeConvertible(ObjPtr<mirror::Class> from, ObjPtr<mirror::Class> to)
117 REQUIRES_SHARED(Locks::mutator_lock_) {
118 // This function returns true if there's any conceivable conversion
119 // between |from| and |to|. It's expected this method will be used
120 // to determine if a WrongMethodTypeException should be raised. The
121 // decision logic follows the documentation for MethodType.asType().
122 if (from == to) {
123 return true;
124 }
125
126 Primitive::Type from_primitive = from->GetPrimitiveType();
127 Primitive::Type to_primitive = to->GetPrimitiveType();
128 DCHECK(from_primitive != Primitive::Type::kPrimVoid);
129 DCHECK(to_primitive != Primitive::Type::kPrimVoid);
130
131 // If |to| and |from| are references.
132 if (IsReferenceType(from_primitive) && IsReferenceType(to_primitive)) {
133 // Assignability is determined during parameter conversion when
134 // invoking the associated method handle.
135 return true;
136 }
137
138 // If |to| and |from| are primitives and a widening conversion exists.
139 if (Primitive::IsWidenable(from_primitive, to_primitive)) {
140 return true;
141 }
142
143 // If |to| is a reference and |from| is a primitive, then boxing conversion.
144 if (IsReferenceType(to_primitive) && IsPrimitiveType(from_primitive)) {
145 return to->IsAssignableFrom(GetBoxedPrimitiveClass(from_primitive));
146 }
147
148 // If |from| is a reference and |to| is a primitive, then unboxing conversion.
149 if (IsPrimitiveType(to_primitive) && IsReferenceType(from_primitive)) {
150 if (from->DescriptorEquals("Ljava/lang/Object;")) {
151 // Object might be converted into a primitive during unboxing.
152 return true;
153 }
154
155 if (Primitive::IsNumericType(to_primitive) && from->DescriptorEquals("Ljava/lang/Number;")) {
156 // Number might be unboxed into any of the number primitive types.
157 return true;
158 }
159
160 Primitive::Type unboxed_type;
161 if (GetUnboxedPrimitiveType(from, &unboxed_type)) {
162 if (unboxed_type == to_primitive) {
163 // Straightforward unboxing conversion such as Boolean => boolean.
164 return true;
165 }
166
167 // Check if widening operations for numeric primitives would work,
168 // such as Byte => byte => long.
169 return Primitive::IsWidenable(unboxed_type, to_primitive);
170 }
171 }
172
173 return false;
174 }
175
IsReturnTypeConvertible(ObjPtr<mirror::Class> from,ObjPtr<mirror::Class> to)176 bool IsReturnTypeConvertible(ObjPtr<mirror::Class> from, ObjPtr<mirror::Class> to)
177 REQUIRES_SHARED(Locks::mutator_lock_) {
178 if (to->GetPrimitiveType() == Primitive::Type::kPrimVoid) {
179 // Result will be ignored.
180 return true;
181 } else if (from->GetPrimitiveType() == Primitive::Type::kPrimVoid) {
182 // Returned value will be 0 / null.
183 return true;
184 } else {
185 // Otherwise apply usual parameter conversion rules.
186 return IsParameterTypeConvertible(from, to);
187 }
188 }
189
ConvertJValueCommon(Handle<mirror::MethodType> callsite_type,Handle<mirror::MethodType> callee_type,ObjPtr<mirror::Class> from,ObjPtr<mirror::Class> to,JValue * value)190 bool ConvertJValueCommon(
191 Handle<mirror::MethodType> callsite_type,
192 Handle<mirror::MethodType> callee_type,
193 ObjPtr<mirror::Class> from,
194 ObjPtr<mirror::Class> to,
195 JValue* value) {
196 // The reader maybe concerned about the safety of the heap object
197 // that may be in |value|. There is only one case where allocation
198 // is obviously needed and that's for boxing. However, in the case
199 // of boxing |value| contains a non-reference type.
200
201 const Primitive::Type from_type = from->GetPrimitiveType();
202 const Primitive::Type to_type = to->GetPrimitiveType();
203
204 // Put incoming value into |src_value| and set return value to 0.
205 // Errors and conversions from void require the return value to be 0.
206 const JValue src_value(*value);
207 value->SetJ(0);
208
209 // Conversion from void set result to zero.
210 if (from_type == Primitive::kPrimVoid) {
211 return true;
212 }
213
214 // This method must be called only when the types don't match.
215 DCHECK(from != to);
216
217 if (IsPrimitiveType(from_type) && IsPrimitiveType(to_type)) {
218 // The source and target types are both primitives.
219 if (UNLIKELY(!ConvertPrimitiveValueNoThrow(from_type, to_type, src_value, value))) {
220 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
221 return false;
222 }
223 return true;
224 } else if (IsReferenceType(from_type) && IsReferenceType(to_type)) {
225 // They're both reference types. If "from" is null, we can pass it
226 // through unchanged. If not, we must generate a cast exception if
227 // |to| is not assignable from the dynamic type of |ref|.
228 //
229 // Playing it safe with StackHandleScope here, not expecting any allocation
230 // in mirror::Class::IsAssignable().
231 StackHandleScope<2> hs(Thread::Current());
232 Handle<mirror::Class> h_to(hs.NewHandle(to));
233 Handle<mirror::Object> h_obj(hs.NewHandle(src_value.GetL()));
234 if (h_obj != nullptr && !to->IsAssignableFrom(h_obj->GetClass())) {
235 ThrowClassCastException(h_to.Get(), h_obj->GetClass());
236 return false;
237 }
238 value->SetL(h_obj.Get());
239 return true;
240 } else if (IsReferenceType(to_type)) {
241 DCHECK(IsPrimitiveType(from_type));
242 // The source type is a primitive and the target type is a reference, so we must box.
243 // The target type maybe a super class of the boxed source type, for example,
244 // if the source type is int, it's boxed type is java.lang.Integer, and the target
245 // type could be java.lang.Number.
246 Primitive::Type type;
247 if (!GetUnboxedPrimitiveType(to, &type)) {
248 ObjPtr<mirror::Class> boxed_from_class = GetBoxedPrimitiveClass(from_type);
249 if (boxed_from_class->IsSubClass(to)) {
250 type = from_type;
251 } else {
252 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
253 return false;
254 }
255 }
256
257 if (UNLIKELY(from_type != type)) {
258 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
259 return false;
260 }
261
262 if (!ConvertPrimitiveValueNoThrow(from_type, type, src_value, value)) {
263 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
264 return false;
265 }
266
267 // Then perform the actual boxing, and then set the reference.
268 ObjPtr<mirror::Object> boxed = BoxPrimitive(type, src_value);
269 value->SetL(boxed.Ptr());
270 return true;
271 } else {
272 // The source type is a reference and the target type is a primitive, so we must unbox.
273 DCHECK(IsReferenceType(from_type));
274 DCHECK(IsPrimitiveType(to_type));
275
276 ObjPtr<mirror::Object> from_obj(src_value.GetL());
277 if (UNLIKELY(from_obj == nullptr)) {
278 ThrowNullPointerException(
279 StringPrintf("Expected to unbox a '%s' primitive type but was returned null",
280 from->PrettyDescriptor().c_str()).c_str());
281 return false;
282 }
283
284 Primitive::Type unboxed_type;
285 JValue unboxed_value;
286 if (UNLIKELY(!GetUnboxedTypeAndValue(from_obj, &unboxed_type, &unboxed_value))) {
287 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
288 return false;
289 }
290
291 if (UNLIKELY(!ConvertPrimitiveValueNoThrow(unboxed_type, to_type, unboxed_value, value))) {
292 ThrowClassCastException(from, to);
293 return false;
294 }
295
296 return true;
297 }
298 }
299
300 namespace {
301
302 template <bool is_range>
CopyArgumentsFromCallerFrame(const ShadowFrame & caller_frame,ShadowFrame * callee_frame,const uint32_t (& args)[Instruction::kMaxVarArgRegs],uint32_t first_arg,const size_t first_dst_reg,const size_t num_regs)303 inline void CopyArgumentsFromCallerFrame(const ShadowFrame& caller_frame,
304 ShadowFrame* callee_frame,
305 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
306 uint32_t first_arg,
307 const size_t first_dst_reg,
308 const size_t num_regs)
309 REQUIRES_SHARED(Locks::mutator_lock_) {
310 for (size_t i = 0; i < num_regs; ++i) {
311 size_t dst_reg = first_dst_reg + i;
312 size_t src_reg = is_range ? (first_arg + i) : args[i];
313 // Uint required, so that sign extension does not make this wrong on 64-bit systems
314 uint32_t src_value = caller_frame.GetVReg(src_reg);
315 ObjPtr<mirror::Object> o = caller_frame.GetVRegReference<kVerifyNone>(src_reg);
316 // If both register locations contains the same value, the register probably holds a reference.
317 // Note: As an optimization, non-moving collectors leave a stale reference value
318 // in the references array even after the original vreg was overwritten to a non-reference.
319 if (src_value == reinterpret_cast<uintptr_t>(o.Ptr())) {
320 callee_frame->SetVRegReference(dst_reg, o.Ptr());
321 } else {
322 callee_frame->SetVReg(dst_reg, src_value);
323 }
324 }
325 }
326
327 template <bool is_range>
ConvertAndCopyArgumentsFromCallerFrame(Thread * self,Handle<mirror::MethodType> callsite_type,Handle<mirror::MethodType> callee_type,const ShadowFrame & caller_frame,const uint32_t (& args)[Instruction::kMaxVarArgRegs],uint32_t first_arg,uint32_t first_dst_reg,ShadowFrame * callee_frame)328 inline bool ConvertAndCopyArgumentsFromCallerFrame(
329 Thread* self,
330 Handle<mirror::MethodType> callsite_type,
331 Handle<mirror::MethodType> callee_type,
332 const ShadowFrame& caller_frame,
333 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
334 uint32_t first_arg,
335 uint32_t first_dst_reg,
336 ShadowFrame* callee_frame)
337 REQUIRES_SHARED(Locks::mutator_lock_) {
338 ObjPtr<mirror::ObjectArray<mirror::Class>> from_types(callsite_type->GetPTypes());
339 ObjPtr<mirror::ObjectArray<mirror::Class>> to_types(callee_type->GetPTypes());
340
341 const int32_t num_method_params = from_types->GetLength();
342 if (to_types->GetLength() != num_method_params) {
343 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
344 return false;
345 }
346
347 ShadowFrameGetter<is_range> getter(first_arg, args, caller_frame);
348 ShadowFrameSetter setter(callee_frame, first_dst_reg);
349
350 return PerformConversions<ShadowFrameGetter<is_range>, ShadowFrameSetter>(self,
351 callsite_type,
352 callee_type,
353 &getter,
354 &setter,
355 num_method_params);
356 }
357
IsMethodHandleInvokeExact(const ArtMethod * const method)358 inline bool IsMethodHandleInvokeExact(const ArtMethod* const method) {
359 if (method == jni::DecodeArtMethod(WellKnownClasses::java_lang_invoke_MethodHandle_invokeExact)) {
360 return true;
361 } else {
362 DCHECK_EQ(method, jni::DecodeArtMethod(WellKnownClasses::java_lang_invoke_MethodHandle_invoke));
363 return false;
364 }
365 }
366
IsInvoke(const mirror::MethodHandle::Kind handle_kind)367 inline bool IsInvoke(const mirror::MethodHandle::Kind handle_kind) {
368 return handle_kind <= mirror::MethodHandle::Kind::kLastInvokeKind;
369 }
370
IsInvokeTransform(const mirror::MethodHandle::Kind handle_kind)371 inline bool IsInvokeTransform(const mirror::MethodHandle::Kind handle_kind) {
372 return (handle_kind == mirror::MethodHandle::Kind::kInvokeTransform
373 || handle_kind == mirror::MethodHandle::Kind::kInvokeCallSiteTransform);
374 }
375
IsFieldAccess(mirror::MethodHandle::Kind handle_kind)376 inline bool IsFieldAccess(mirror::MethodHandle::Kind handle_kind) {
377 return (handle_kind >= mirror::MethodHandle::Kind::kFirstAccessorKind
378 && handle_kind <= mirror::MethodHandle::Kind::kLastAccessorKind);
379 }
380
381 // Calculate the number of ins for a proxy or native method, where we
382 // can't just look at the code item.
GetInsForProxyOrNativeMethod(ArtMethod * method)383 static inline size_t GetInsForProxyOrNativeMethod(ArtMethod* method)
384 REQUIRES_SHARED(Locks::mutator_lock_) {
385 DCHECK(method->IsNative() || method->IsProxyMethod());
386 method = method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
387 uint32_t shorty_length = 0;
388 const char* shorty = method->GetShorty(&shorty_length);
389
390 // Static methods do not include the receiver. The receiver isn't included
391 // in the shorty_length though the return value is.
392 size_t num_ins = method->IsStatic() ? shorty_length - 1 : shorty_length;
393 for (const char* c = shorty + 1; *c != '\0'; ++c) {
394 if (*c == 'J' || *c == 'D') {
395 ++num_ins;
396 }
397 }
398 return num_ins;
399 }
400
401 // Returns true iff. the callsite type for a polymorphic invoke is transformer
402 // like, i.e that it has a single input argument whose type is
403 // dalvik.system.EmulatedStackFrame.
IsCallerTransformer(Handle<mirror::MethodType> callsite_type)404 static inline bool IsCallerTransformer(Handle<mirror::MethodType> callsite_type)
405 REQUIRES_SHARED(Locks::mutator_lock_) {
406 ObjPtr<mirror::ObjectArray<mirror::Class>> param_types(callsite_type->GetPTypes());
407 if (param_types->GetLength() == 1) {
408 ObjPtr<mirror::Class> param(param_types->GetWithoutChecks(0));
409 // NB Comparing descriptor here as it appears faster in cycle simulation than using:
410 // param == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_EmulatedStackFrame)
411 // Costs are 98 vs 173 cycles per invocation.
412 return param->DescriptorEquals("Ldalvik/system/EmulatedStackFrame;");
413 }
414
415 return false;
416 }
417
418 template <bool is_range>
DoCallPolymorphic(ArtMethod * called_method,Handle<mirror::MethodType> callsite_type,Handle<mirror::MethodType> target_type,Thread * self,ShadowFrame & shadow_frame,const uint32_t (& args)[Instruction::kMaxVarArgRegs],uint32_t first_arg,JValue * result)419 static inline bool DoCallPolymorphic(ArtMethod* called_method,
420 Handle<mirror::MethodType> callsite_type,
421 Handle<mirror::MethodType> target_type,
422 Thread* self,
423 ShadowFrame& shadow_frame,
424 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
425 uint32_t first_arg,
426 JValue* result)
427 REQUIRES_SHARED(Locks::mutator_lock_) {
428 // Compute method information.
429 const DexFile::CodeItem* code_item = called_method->GetCodeItem();
430
431 // Number of registers for the callee's call frame. Note that for non-exact
432 // invokes, we always derive this information from the callee method. We
433 // cannot guarantee during verification that the number of registers encoded
434 // in the invoke is equal to the number of ins for the callee. This is because
435 // some transformations (such as boxing a long -> Long or wideining an
436 // int -> long will change that number.
437 uint16_t num_regs;
438 size_t num_input_regs;
439 size_t first_dest_reg;
440 if (LIKELY(code_item != nullptr)) {
441 num_regs = code_item->registers_size_;
442 first_dest_reg = num_regs - code_item->ins_size_;
443 num_input_regs = code_item->ins_size_;
444 // Parameter registers go at the end of the shadow frame.
445 DCHECK_NE(first_dest_reg, (size_t)-1);
446 } else {
447 // No local regs for proxy and native methods.
448 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
449 num_regs = num_input_regs = GetInsForProxyOrNativeMethod(called_method);
450 first_dest_reg = 0;
451 }
452
453 // Allocate shadow frame on the stack.
454 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
455 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0);
456 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
457
458 // Whether this polymorphic invoke was issued by a transformer method.
459 bool is_caller_transformer = false;
460 // Thread might be suspended during PerformArgumentConversions due to the
461 // allocations performed during boxing.
462 {
463 ScopedStackedShadowFramePusher pusher(
464 self, new_shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction);
465 if (callsite_type->IsExactMatch(target_type.Get())) {
466 // This is an exact invoke, we can take the fast path of just copying all
467 // registers without performing any argument conversions.
468 CopyArgumentsFromCallerFrame<is_range>(shadow_frame,
469 new_shadow_frame,
470 args,
471 first_arg,
472 first_dest_reg,
473 num_input_regs);
474 } else {
475 // This includes the case where we're entering this invoke-polymorphic
476 // from a transformer method. In that case, the callsite_type will contain
477 // a single argument of type dalvik.system.EmulatedStackFrame. In that
478 // case, we'll have to unmarshal the EmulatedStackFrame into the
479 // new_shadow_frame and perform argument conversions on it.
480 if (IsCallerTransformer(callsite_type)) {
481 is_caller_transformer = true;
482 // The emulated stack frame is the first and only argument when we're coming
483 // through from a transformer.
484 size_t first_arg_register = (is_range) ? first_arg : args[0];
485 ObjPtr<mirror::EmulatedStackFrame> emulated_stack_frame(
486 reinterpret_cast<mirror::EmulatedStackFrame*>(
487 shadow_frame.GetVRegReference(first_arg_register)));
488 if (!emulated_stack_frame->WriteToShadowFrame(self,
489 target_type,
490 first_dest_reg,
491 new_shadow_frame)) {
492 DCHECK(self->IsExceptionPending());
493 result->SetL(0);
494 return false;
495 }
496 } else {
497 if (!callsite_type->IsConvertible(target_type.Get())) {
498 ThrowWrongMethodTypeException(target_type.Get(), callsite_type.Get());
499 return false;
500 }
501 if (!ConvertAndCopyArgumentsFromCallerFrame<is_range>(self,
502 callsite_type,
503 target_type,
504 shadow_frame,
505 args,
506 first_arg,
507 first_dest_reg,
508 new_shadow_frame)) {
509 DCHECK(self->IsExceptionPending());
510 result->SetL(0);
511 return false;
512 }
513 }
514 }
515 }
516
517 bool use_interpreter_entrypoint = ClassLinker::ShouldUseInterpreterEntrypoint(
518 called_method, called_method->GetEntryPointFromQuickCompiledCode());
519 PerformCall(self,
520 code_item,
521 shadow_frame.GetMethod(),
522 first_dest_reg,
523 new_shadow_frame,
524 result,
525 use_interpreter_entrypoint);
526 if (self->IsExceptionPending()) {
527 return false;
528 }
529
530 // If the caller of this signature polymorphic method was a transformer,
531 // we need to copy the result back out to the emulated stack frame.
532 if (is_caller_transformer) {
533 StackHandleScope<2> hs(self);
534 size_t first_callee_register = is_range ? (first_arg) : args[0];
535 Handle<mirror::EmulatedStackFrame> emulated_stack_frame(
536 hs.NewHandle(reinterpret_cast<mirror::EmulatedStackFrame*>(
537 shadow_frame.GetVRegReference(first_callee_register))));
538 Handle<mirror::MethodType> emulated_stack_type(hs.NewHandle(emulated_stack_frame->GetType()));
539 JValue local_result;
540 local_result.SetJ(result->GetJ());
541
542 if (ConvertReturnValue(emulated_stack_type, target_type, &local_result)) {
543 emulated_stack_frame->SetReturnValue(self, local_result);
544 return true;
545 }
546
547 DCHECK(self->IsExceptionPending());
548 return false;
549 }
550
551 return ConvertReturnValue(callsite_type, target_type, result);
552 }
553
554 template <bool is_range>
DoCallTransform(ArtMethod * called_method,Handle<mirror::MethodType> callsite_type,Handle<mirror::MethodType> callee_type,Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> receiver,const uint32_t (& args)[Instruction::kMaxVarArgRegs],uint32_t first_arg,JValue * result)555 static inline bool DoCallTransform(ArtMethod* called_method,
556 Handle<mirror::MethodType> callsite_type,
557 Handle<mirror::MethodType> callee_type,
558 Thread* self,
559 ShadowFrame& shadow_frame,
560 Handle<mirror::MethodHandle> receiver,
561 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
562 uint32_t first_arg,
563 JValue* result)
564 REQUIRES_SHARED(Locks::mutator_lock_) {
565 // This can be fixed to two, because the method we're calling here
566 // (MethodHandle.transformInternal) doesn't have any locals and the signature
567 // is known :
568 //
569 // private MethodHandle.transformInternal(EmulatedStackFrame sf);
570 //
571 // This means we need only two vregs :
572 // - One for the receiver object.
573 // - One for the only method argument (an EmulatedStackFrame).
574 static constexpr size_t kNumRegsForTransform = 2;
575
576 const DexFile::CodeItem* code_item = called_method->GetCodeItem();
577 DCHECK(code_item != nullptr);
578 DCHECK_EQ(kNumRegsForTransform, code_item->registers_size_);
579 DCHECK_EQ(kNumRegsForTransform, code_item->ins_size_);
580
581 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
582 CREATE_SHADOW_FRAME(kNumRegsForTransform, &shadow_frame, called_method, /* dex pc */ 0);
583 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
584
585 StackHandleScope<1> hs(self);
586 MutableHandle<mirror::EmulatedStackFrame> sf(hs.NewHandle<mirror::EmulatedStackFrame>(nullptr));
587 if (IsCallerTransformer(callsite_type)) {
588 // If we're entering this transformer from another transformer, we can pass
589 // through the handle directly to the callee, instead of having to
590 // instantiate a new stack frame based on the shadow frame.
591 size_t first_callee_register = is_range ? first_arg : args[0];
592 sf.Assign(reinterpret_cast<mirror::EmulatedStackFrame*>(
593 shadow_frame.GetVRegReference(first_callee_register)));
594 } else {
595 sf.Assign(mirror::EmulatedStackFrame::CreateFromShadowFrameAndArgs<is_range>(self,
596 callsite_type,
597 callee_type,
598 shadow_frame,
599 first_arg,
600 args));
601
602 // Something went wrong while creating the emulated stack frame, we should
603 // throw the pending exception.
604 if (sf == nullptr) {
605 DCHECK(self->IsExceptionPending());
606 return false;
607 }
608 }
609
610 new_shadow_frame->SetVRegReference(0, receiver.Get());
611 new_shadow_frame->SetVRegReference(1, sf.Get());
612
613 bool use_interpreter_entrypoint = ClassLinker::ShouldUseInterpreterEntrypoint(
614 called_method, called_method->GetEntryPointFromQuickCompiledCode());
615 PerformCall(self,
616 code_item,
617 shadow_frame.GetMethod(),
618 0 /* first destination register */,
619 new_shadow_frame,
620 result,
621 use_interpreter_entrypoint);
622 if (self->IsExceptionPending()) {
623 return false;
624 }
625
626 // If the called transformer method we called has returned a value, then we
627 // need to copy it back to |result|.
628 sf->GetReturnValue(self, result);
629 return ConvertReturnValue(callsite_type, callee_type, result);
630 }
631
GetAndInitializeDeclaringClass(Thread * self,ArtField * field)632 inline static ObjPtr<mirror::Class> GetAndInitializeDeclaringClass(Thread* self, ArtField* field)
633 REQUIRES_SHARED(Locks::mutator_lock_) {
634 // Method handle invocations on static fields should ensure class is
635 // initialized. This usually happens when an instance is constructed
636 // or class members referenced, but this is not guaranteed when
637 // looking up method handles.
638 ObjPtr<mirror::Class> klass = field->GetDeclaringClass();
639 if (UNLIKELY(!klass->IsInitialized())) {
640 StackHandleScope<1> hs(self);
641 HandleWrapperObjPtr<mirror::Class> h(hs.NewHandleWrapper(&klass));
642 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h, true, true)) {
643 DCHECK(self->IsExceptionPending());
644 return nullptr;
645 }
646 }
647 return klass;
648 }
649
RefineTargetMethod(Thread * self,ShadowFrame & shadow_frame,const mirror::MethodHandle::Kind & handle_kind,Handle<mirror::MethodType> handle_type,Handle<mirror::MethodType> callsite_type,const uint32_t receiver_reg,ArtMethod * target_method)650 ArtMethod* RefineTargetMethod(Thread* self,
651 ShadowFrame& shadow_frame,
652 const mirror::MethodHandle::Kind& handle_kind,
653 Handle<mirror::MethodType> handle_type,
654 Handle<mirror::MethodType> callsite_type,
655 const uint32_t receiver_reg,
656 ArtMethod* target_method)
657 REQUIRES_SHARED(Locks::mutator_lock_) {
658 if (handle_kind == mirror::MethodHandle::Kind::kInvokeVirtual ||
659 handle_kind == mirror::MethodHandle::Kind::kInvokeInterface) {
660 // For virtual and interface methods ensure target_method points to
661 // the actual method to invoke.
662 ObjPtr<mirror::Object> receiver(shadow_frame.GetVRegReference(receiver_reg));
663 if (IsCallerTransformer(callsite_type)) {
664 // The current receiver is an emulated stack frame, the method's
665 // receiver needs to be fetched from there as the emulated frame
666 // will be unpacked into a new frame.
667 receiver = ObjPtr<mirror::EmulatedStackFrame>::DownCast(receiver)->GetReceiver();
668 }
669
670 ObjPtr<mirror::Class> declaring_class(target_method->GetDeclaringClass());
671 if (receiver == nullptr || receiver->GetClass() != declaring_class) {
672 // Verify that _vRegC is an object reference and of the type expected by
673 // the receiver.
674 if (!VerifyObjectIsClass(receiver, declaring_class)) {
675 DCHECK(self->IsExceptionPending());
676 return nullptr;
677 }
678 return receiver->GetClass()->FindVirtualMethodForVirtualOrInterface(
679 target_method, kRuntimePointerSize);
680 }
681 } else if (handle_kind == mirror::MethodHandle::Kind::kInvokeDirect) {
682 // String constructors are a special case, they are replaced with
683 // StringFactory methods.
684 if (target_method->IsConstructor() && target_method->GetDeclaringClass()->IsStringClass()) {
685 DCHECK(handle_type->GetRType()->IsStringClass());
686 return WellKnownClasses::StringInitToStringFactory(target_method);
687 }
688 } else if (handle_kind == mirror::MethodHandle::Kind::kInvokeSuper) {
689 ObjPtr<mirror::Class> declaring_class = target_method->GetDeclaringClass();
690
691 // Note that we're not dynamically dispatching on the type of the receiver
692 // here. We use the static type of the "receiver" object that we've
693 // recorded in the method handle's type, which will be the same as the
694 // special caller that was specified at the point of lookup.
695 ObjPtr<mirror::Class> referrer_class = handle_type->GetPTypes()->Get(0);
696 if (!declaring_class->IsInterface()) {
697 ObjPtr<mirror::Class> super_class = referrer_class->GetSuperClass();
698 uint16_t vtable_index = target_method->GetMethodIndex();
699 DCHECK(super_class != nullptr);
700 DCHECK(super_class->HasVTable());
701 // Note that super_class is a super of referrer_class and target_method
702 // will always be declared by super_class (or one of its super classes).
703 DCHECK_LT(vtable_index, super_class->GetVTableLength());
704 return super_class->GetVTableEntry(vtable_index, kRuntimePointerSize);
705 } else {
706 return referrer_class->FindVirtualMethodForInterfaceSuper(target_method, kRuntimePointerSize);
707 }
708 }
709 return target_method;
710 }
711
712 template <bool is_range>
DoInvokePolymorphicMethod(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const uint32_t (& args)[Instruction::kMaxVarArgRegs],uint32_t first_arg,JValue * result)713 bool DoInvokePolymorphicMethod(Thread* self,
714 ShadowFrame& shadow_frame,
715 Handle<mirror::MethodHandle> method_handle,
716 Handle<mirror::MethodType> callsite_type,
717 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
718 uint32_t first_arg,
719 JValue* result)
720 REQUIRES_SHARED(Locks::mutator_lock_) {
721 StackHandleScope<1> hs(self);
722 Handle<mirror::MethodType> handle_type(hs.NewHandle(method_handle->GetMethodType()));
723 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
724 DCHECK(IsInvoke(handle_kind));
725
726 // Get the method we're actually invoking along with the kind of
727 // invoke that is desired. We don't need to perform access checks at this
728 // point because they would have been performed on our behalf at the point
729 // of creation of the method handle.
730 ArtMethod* target_method = method_handle->GetTargetMethod();
731 uint32_t receiver_reg = is_range ? first_arg: args[0];
732 ArtMethod* called_method = RefineTargetMethod(self,
733 shadow_frame,
734 handle_kind,
735 handle_type,
736 callsite_type,
737 receiver_reg,
738 target_method);
739 if (called_method == nullptr) {
740 DCHECK(self->IsExceptionPending());
741 return false;
742 }
743
744 if (IsInvokeTransform(handle_kind)) {
745 // There are two cases here - method handles representing regular
746 // transforms and those representing call site transforms. Method
747 // handles for call site transforms adapt their MethodType to match
748 // the call site. For these, the |callee_type| is the same as the
749 // |callsite_type|. The VarargsCollector is such a tranform, its
750 // method type depends on the call site, ie. x(a) or x(a, b), or
751 // x(a, b, c). The VarargsCollector invokes a variable arity method
752 // with the arity arguments in an array.
753 Handle<mirror::MethodType> callee_type =
754 (handle_kind == mirror::MethodHandle::Kind::kInvokeCallSiteTransform) ? callsite_type
755 : handle_type;
756 return DoCallTransform<is_range>(called_method,
757 callsite_type,
758 callee_type,
759 self,
760 shadow_frame,
761 method_handle /* receiver */,
762 args,
763 first_arg,
764 result);
765 } else {
766 return DoCallPolymorphic<is_range>(called_method,
767 callsite_type,
768 handle_type,
769 self,
770 shadow_frame,
771 args,
772 first_arg,
773 result);
774 }
775 }
776
777 // Helper for getters in invoke-polymorphic.
DoFieldGetForInvokePolymorphic(Thread * self,const ShadowFrame & shadow_frame,ObjPtr<mirror::Object> & obj,ArtField * field,Primitive::Type field_type,JValue * result)778 inline static void DoFieldGetForInvokePolymorphic(Thread* self,
779 const ShadowFrame& shadow_frame,
780 ObjPtr<mirror::Object>& obj,
781 ArtField* field,
782 Primitive::Type field_type,
783 JValue* result)
784 REQUIRES_SHARED(Locks::mutator_lock_) {
785 switch (field_type) {
786 case Primitive::kPrimBoolean:
787 DoFieldGetCommon<Primitive::kPrimBoolean>(self, shadow_frame, obj, field, result);
788 break;
789 case Primitive::kPrimByte:
790 DoFieldGetCommon<Primitive::kPrimByte>(self, shadow_frame, obj, field, result);
791 break;
792 case Primitive::kPrimChar:
793 DoFieldGetCommon<Primitive::kPrimChar>(self, shadow_frame, obj, field, result);
794 break;
795 case Primitive::kPrimShort:
796 DoFieldGetCommon<Primitive::kPrimShort>(self, shadow_frame, obj, field, result);
797 break;
798 case Primitive::kPrimInt:
799 DoFieldGetCommon<Primitive::kPrimInt>(self, shadow_frame, obj, field, result);
800 break;
801 case Primitive::kPrimLong:
802 DoFieldGetCommon<Primitive::kPrimLong>(self, shadow_frame, obj, field, result);
803 break;
804 case Primitive::kPrimFloat:
805 DoFieldGetCommon<Primitive::kPrimInt>(self, shadow_frame, obj, field, result);
806 break;
807 case Primitive::kPrimDouble:
808 DoFieldGetCommon<Primitive::kPrimLong>(self, shadow_frame, obj, field, result);
809 break;
810 case Primitive::kPrimNot:
811 DoFieldGetCommon<Primitive::kPrimNot>(self, shadow_frame, obj, field, result);
812 break;
813 case Primitive::kPrimVoid:
814 LOG(FATAL) << "Unreachable: " << field_type;
815 UNREACHABLE();
816 }
817 }
818
819 // Helper for setters in invoke-polymorphic.
DoFieldPutForInvokePolymorphic(Thread * self,ShadowFrame & shadow_frame,ObjPtr<mirror::Object> & obj,ArtField * field,Primitive::Type field_type,JValue & value)820 inline bool DoFieldPutForInvokePolymorphic(Thread* self,
821 ShadowFrame& shadow_frame,
822 ObjPtr<mirror::Object>& obj,
823 ArtField* field,
824 Primitive::Type field_type,
825 JValue& value)
826 REQUIRES_SHARED(Locks::mutator_lock_) {
827 DCHECK(!Runtime::Current()->IsActiveTransaction());
828 static const bool kTransaction = false; // Not in a transaction.
829 static const bool kAssignabilityCheck = false; // No access check.
830 switch (field_type) {
831 case Primitive::kPrimBoolean:
832 return
833 DoFieldPutCommon<Primitive::kPrimBoolean, kAssignabilityCheck, kTransaction>(
834 self, shadow_frame, obj, field, value);
835 case Primitive::kPrimByte:
836 return DoFieldPutCommon<Primitive::kPrimByte, kAssignabilityCheck, kTransaction>(
837 self, shadow_frame, obj, field, value);
838 case Primitive::kPrimChar:
839 return DoFieldPutCommon<Primitive::kPrimChar, kAssignabilityCheck, kTransaction>(
840 self, shadow_frame, obj, field, value);
841 case Primitive::kPrimShort:
842 return DoFieldPutCommon<Primitive::kPrimShort, kAssignabilityCheck, kTransaction>(
843 self, shadow_frame, obj, field, value);
844 case Primitive::kPrimInt:
845 case Primitive::kPrimFloat:
846 return DoFieldPutCommon<Primitive::kPrimInt, kAssignabilityCheck, kTransaction>(
847 self, shadow_frame, obj, field, value);
848 case Primitive::kPrimLong:
849 case Primitive::kPrimDouble:
850 return DoFieldPutCommon<Primitive::kPrimLong, kAssignabilityCheck, kTransaction>(
851 self, shadow_frame, obj, field, value);
852 case Primitive::kPrimNot:
853 return DoFieldPutCommon<Primitive::kPrimNot, kAssignabilityCheck, kTransaction>(
854 self, shadow_frame, obj, field, value);
855 case Primitive::kPrimVoid:
856 LOG(FATAL) << "Unreachable: " << field_type;
857 UNREACHABLE();
858 }
859 }
860
GetValueFromShadowFrame(const ShadowFrame & shadow_frame,Primitive::Type field_type,uint32_t vreg)861 static JValue GetValueFromShadowFrame(const ShadowFrame& shadow_frame,
862 Primitive::Type field_type,
863 uint32_t vreg)
864 REQUIRES_SHARED(Locks::mutator_lock_) {
865 JValue field_value;
866 switch (field_type) {
867 case Primitive::kPrimBoolean:
868 field_value.SetZ(static_cast<uint8_t>(shadow_frame.GetVReg(vreg)));
869 break;
870 case Primitive::kPrimByte:
871 field_value.SetB(static_cast<int8_t>(shadow_frame.GetVReg(vreg)));
872 break;
873 case Primitive::kPrimChar:
874 field_value.SetC(static_cast<uint16_t>(shadow_frame.GetVReg(vreg)));
875 break;
876 case Primitive::kPrimShort:
877 field_value.SetS(static_cast<int16_t>(shadow_frame.GetVReg(vreg)));
878 break;
879 case Primitive::kPrimInt:
880 case Primitive::kPrimFloat:
881 field_value.SetI(shadow_frame.GetVReg(vreg));
882 break;
883 case Primitive::kPrimLong:
884 case Primitive::kPrimDouble:
885 field_value.SetJ(shadow_frame.GetVRegLong(vreg));
886 break;
887 case Primitive::kPrimNot:
888 field_value.SetL(shadow_frame.GetVRegReference(vreg));
889 break;
890 case Primitive::kPrimVoid:
891 LOG(FATAL) << "Unreachable: " << field_type;
892 UNREACHABLE();
893 }
894 return field_value;
895 }
896
897 template <bool is_range, bool do_conversions>
DoInvokePolymorphicFieldAccess(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const uint32_t (& args)[Instruction::kMaxVarArgRegs],uint32_t first_arg,JValue * result)898 bool DoInvokePolymorphicFieldAccess(Thread* self,
899 ShadowFrame& shadow_frame,
900 Handle<mirror::MethodHandle> method_handle,
901 Handle<mirror::MethodType> callsite_type,
902 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
903 uint32_t first_arg,
904 JValue* result)
905 REQUIRES_SHARED(Locks::mutator_lock_) {
906 StackHandleScope<1> hs(self);
907 Handle<mirror::MethodType> handle_type(hs.NewHandle(method_handle->GetMethodType()));
908 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
909 ArtField* field = method_handle->GetTargetField();
910 Primitive::Type field_type = field->GetTypeAsPrimitiveType();
911
912 switch (handle_kind) {
913 case mirror::MethodHandle::kInstanceGet: {
914 size_t obj_reg = is_range ? first_arg : args[0];
915 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(obj_reg);
916 DoFieldGetForInvokePolymorphic(self, shadow_frame, obj, field, field_type, result);
917 if (do_conversions && !ConvertReturnValue(callsite_type, handle_type, result)) {
918 DCHECK(self->IsExceptionPending());
919 return false;
920 }
921 return true;
922 }
923 case mirror::MethodHandle::kStaticGet: {
924 ObjPtr<mirror::Object> obj = GetAndInitializeDeclaringClass(self, field);
925 if (obj == nullptr) {
926 DCHECK(self->IsExceptionPending());
927 return false;
928 }
929 DoFieldGetForInvokePolymorphic(self, shadow_frame, obj, field, field_type, result);
930 if (do_conversions && !ConvertReturnValue(callsite_type, handle_type, result)) {
931 DCHECK(self->IsExceptionPending());
932 return false;
933 }
934 return true;
935 }
936 case mirror::MethodHandle::kInstancePut: {
937 size_t obj_reg = is_range ? first_arg : args[0];
938 size_t value_reg = is_range ? (first_arg + 1) : args[1];
939 const size_t kPTypeIndex = 1;
940 // Use ptypes instead of field type since we may be unboxing a reference for a primitive
941 // field. The field type is incorrect for this case.
942 JValue value = GetValueFromShadowFrame(
943 shadow_frame,
944 callsite_type->GetPTypes()->Get(kPTypeIndex)->GetPrimitiveType(),
945 value_reg);
946 if (do_conversions && !ConvertArgumentValue(callsite_type,
947 handle_type,
948 kPTypeIndex,
949 &value)) {
950 DCHECK(self->IsExceptionPending());
951 return false;
952 }
953 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(obj_reg);
954 return DoFieldPutForInvokePolymorphic(self, shadow_frame, obj, field, field_type, value);
955 }
956 case mirror::MethodHandle::kStaticPut: {
957 ObjPtr<mirror::Object> obj = GetAndInitializeDeclaringClass(self, field);
958 if (obj == nullptr) {
959 DCHECK(self->IsExceptionPending());
960 return false;
961 }
962 size_t value_reg = is_range ? first_arg : args[0];
963 const size_t kPTypeIndex = 0;
964 // Use ptypes instead of field type since we may be unboxing a reference for a primitive
965 // field. The field type is incorrect for this case.
966 JValue value = GetValueFromShadowFrame(
967 shadow_frame,
968 callsite_type->GetPTypes()->Get(kPTypeIndex)->GetPrimitiveType(),
969 value_reg);
970 if (do_conversions && !ConvertArgumentValue(callsite_type,
971 handle_type,
972 kPTypeIndex,
973 &value)) {
974 DCHECK(self->IsExceptionPending());
975 return false;
976 }
977 return DoFieldPutForInvokePolymorphic(self, shadow_frame, obj, field, field_type, value);
978 }
979 default:
980 LOG(FATAL) << "Unreachable: " << handle_kind;
981 UNREACHABLE();
982 }
983 }
984
985 template <bool is_range>
DoInvokePolymorphicNonExact(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const uint32_t (& args)[Instruction::kMaxVarArgRegs],uint32_t first_arg,JValue * result)986 static inline bool DoInvokePolymorphicNonExact(Thread* self,
987 ShadowFrame& shadow_frame,
988 Handle<mirror::MethodHandle> method_handle,
989 Handle<mirror::MethodType> callsite_type,
990 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
991 uint32_t first_arg,
992 JValue* result)
993 REQUIRES_SHARED(Locks::mutator_lock_) {
994 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
995 ObjPtr<mirror::MethodType> handle_type(method_handle->GetMethodType());
996 CHECK(handle_type != nullptr);
997
998 if (IsFieldAccess(handle_kind)) {
999 DCHECK(!callsite_type->IsExactMatch(handle_type.Ptr()));
1000 if (!callsite_type->IsConvertible(handle_type.Ptr())) {
1001 ThrowWrongMethodTypeException(handle_type.Ptr(), callsite_type.Get());
1002 return false;
1003 }
1004 const bool do_convert = true;
1005 return DoInvokePolymorphicFieldAccess<is_range, do_convert>(
1006 self,
1007 shadow_frame,
1008 method_handle,
1009 callsite_type,
1010 args,
1011 first_arg,
1012 result);
1013 }
1014
1015 return DoInvokePolymorphicMethod<is_range>(self,
1016 shadow_frame,
1017 method_handle,
1018 callsite_type,
1019 args,
1020 first_arg,
1021 result);
1022 }
1023
1024 template <bool is_range>
DoInvokePolymorphicExact(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const uint32_t (& args)[Instruction::kMaxVarArgRegs],uint32_t first_arg,JValue * result)1025 bool DoInvokePolymorphicExact(Thread* self,
1026 ShadowFrame& shadow_frame,
1027 Handle<mirror::MethodHandle> method_handle,
1028 Handle<mirror::MethodType> callsite_type,
1029 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
1030 uint32_t first_arg,
1031 JValue* result)
1032 REQUIRES_SHARED(Locks::mutator_lock_) {
1033 StackHandleScope<1> hs(self);
1034 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
1035 Handle<mirror::MethodType> method_handle_type(hs.NewHandle(method_handle->GetMethodType()));
1036 if (IsFieldAccess(handle_kind)) {
1037 const bool do_convert = false;
1038 return DoInvokePolymorphicFieldAccess<is_range, do_convert>(
1039 self,
1040 shadow_frame,
1041 method_handle,
1042 callsite_type,
1043 args,
1044 first_arg,
1045 result);
1046 }
1047
1048 // Slow-path check.
1049 if (IsInvokeTransform(handle_kind) || IsCallerTransformer(callsite_type)) {
1050 return DoInvokePolymorphicMethod<is_range>(self,
1051 shadow_frame,
1052 method_handle,
1053 callsite_type,
1054 args,
1055 first_arg,
1056 result);
1057 }
1058
1059 // On the fast-path. This is equivalent to DoCallPolymoprhic without the conversion paths.
1060 ArtMethod* target_method = method_handle->GetTargetMethod();
1061 uint32_t receiver_reg = is_range ? first_arg : args[0];
1062 ArtMethod* called_method = RefineTargetMethod(self,
1063 shadow_frame,
1064 handle_kind,
1065 method_handle_type,
1066 callsite_type,
1067 receiver_reg,
1068 target_method);
1069 if (called_method == nullptr) {
1070 DCHECK(self->IsExceptionPending());
1071 return false;
1072 }
1073
1074 // Compute method information.
1075 const DexFile::CodeItem* code_item = called_method->GetCodeItem();
1076 uint16_t num_regs;
1077 size_t num_input_regs;
1078 size_t first_dest_reg;
1079 if (LIKELY(code_item != nullptr)) {
1080 num_regs = code_item->registers_size_;
1081 first_dest_reg = num_regs - code_item->ins_size_;
1082 num_input_regs = code_item->ins_size_;
1083 // Parameter registers go at the end of the shadow frame.
1084 DCHECK_NE(first_dest_reg, (size_t)-1);
1085 } else {
1086 // No local regs for proxy and native methods.
1087 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1088 num_regs = num_input_regs = GetInsForProxyOrNativeMethod(called_method);
1089 first_dest_reg = 0;
1090 }
1091
1092 // Allocate shadow frame on the stack.
1093 const char* old_cause = self->StartAssertNoThreadSuspension("DoCallCommon");
1094 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
1095 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0);
1096 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
1097 CopyArgumentsFromCallerFrame<is_range>(shadow_frame,
1098 new_shadow_frame,
1099 args,
1100 first_arg,
1101 first_dest_reg,
1102 num_input_regs);
1103 self->EndAssertNoThreadSuspension(old_cause);
1104
1105 bool use_interpreter_entrypoint = ClassLinker::ShouldUseInterpreterEntrypoint(
1106 called_method, called_method->GetEntryPointFromQuickCompiledCode());
1107 PerformCall(self,
1108 code_item,
1109 shadow_frame.GetMethod(),
1110 first_dest_reg,
1111 new_shadow_frame,
1112 result,
1113 use_interpreter_entrypoint);
1114 if (self->IsExceptionPending()) {
1115 return false;
1116 }
1117 return true;
1118 }
1119
1120 } // namespace
1121
1122 template <bool is_range>
DoInvokePolymorphic(Thread * self,ArtMethod * invoke_method,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const uint32_t (& args)[Instruction::kMaxVarArgRegs],uint32_t first_arg,JValue * result)1123 bool DoInvokePolymorphic(Thread* self,
1124 ArtMethod* invoke_method,
1125 ShadowFrame& shadow_frame,
1126 Handle<mirror::MethodHandle> method_handle,
1127 Handle<mirror::MethodType> callsite_type,
1128 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
1129 uint32_t first_arg,
1130 JValue* result)
1131 REQUIRES_SHARED(Locks::mutator_lock_) {
1132 ObjPtr<mirror::MethodType> method_handle_type = method_handle->GetMethodType();
1133 if (IsMethodHandleInvokeExact(invoke_method)) {
1134 // We need to check the nominal type of the handle in addition to the
1135 // real type. The "nominal" type is present when MethodHandle.asType is
1136 // called any handle, and results in the declared type of the handle
1137 // changing.
1138 ObjPtr<mirror::MethodType> nominal_type(method_handle->GetNominalType());
1139 if (UNLIKELY(nominal_type != nullptr)) {
1140 if (UNLIKELY(!callsite_type->IsExactMatch(nominal_type.Ptr()))) {
1141 ThrowWrongMethodTypeException(nominal_type.Ptr(), callsite_type.Get());
1142 return false;
1143 }
1144
1145 if (LIKELY(!nominal_type->IsExactMatch(method_handle_type.Ptr()))) {
1146 // Different nominal type means we have to treat as non-exact.
1147 return DoInvokePolymorphicNonExact<is_range>(self,
1148 shadow_frame,
1149 method_handle,
1150 callsite_type,
1151 args,
1152 first_arg,
1153 result);
1154 }
1155 }
1156
1157 if (!callsite_type->IsExactMatch(method_handle_type.Ptr())) {
1158 ThrowWrongMethodTypeException(method_handle_type.Ptr(), callsite_type.Get());
1159 return false;
1160 }
1161 return DoInvokePolymorphicExact<is_range>(self,
1162 shadow_frame,
1163 method_handle,
1164 callsite_type,
1165 args,
1166 first_arg,
1167 result);
1168 } else {
1169 if (UNLIKELY(callsite_type->IsExactMatch(method_handle_type.Ptr()))) {
1170 // A non-exact invoke that can be invoked exactly.
1171 return DoInvokePolymorphicExact<is_range>(self,
1172 shadow_frame,
1173 method_handle,
1174 callsite_type,
1175 args,
1176 first_arg,
1177 result);
1178 }
1179 return DoInvokePolymorphicNonExact<is_range>(self,
1180 shadow_frame,
1181 method_handle,
1182 callsite_type,
1183 args,
1184 first_arg,
1185 result);
1186 }
1187 }
1188
1189 #define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range) \
1190 template REQUIRES_SHARED(Locks::mutator_lock_) \
1191 bool DoInvokePolymorphic<_is_range>( \
1192 Thread* self, \
1193 ArtMethod* invoke_method, \
1194 ShadowFrame& shadow_frame, \
1195 Handle<mirror::MethodHandle> method_handle, \
1196 Handle<mirror::MethodType> callsite_type, \
1197 const uint32_t (&args)[Instruction::kMaxVarArgRegs], \
1198 uint32_t first_arg, \
1199 JValue* result)
1200
1201 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true);
1202 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false);
1203 #undef EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL
1204
1205 } // namespace art
1206