1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "method_handles-inl.h"
18
19 #include "android-base/stringprintf.h"
20
21 #include "class_root-inl.h"
22 #include "common_dex_operations.h"
23 #include "common_throws.h"
24 #include "interpreter/shadow_frame-inl.h"
25 #include "jvalue-inl.h"
26 #include "mirror/class-inl.h"
27 #include "mirror/emulated_stack_frame-inl.h"
28 #include "mirror/method_handle_impl-inl.h"
29 #include "mirror/method_type-inl.h"
30 #include "mirror/var_handle.h"
31 #include "reflection-inl.h"
32 #include "reflection.h"
33 #include "well_known_classes.h"
34
35 namespace art {
36
37 using android::base::StringPrintf;
38
39 namespace {
40
41 #define PRIMITIVES_LIST(V) \
42 V(Primitive::kPrimBoolean, Boolean, Boolean, Z) \
43 V(Primitive::kPrimByte, Byte, Byte, B) \
44 V(Primitive::kPrimChar, Char, Character, C) \
45 V(Primitive::kPrimShort, Short, Short, S) \
46 V(Primitive::kPrimInt, Int, Integer, I) \
47 V(Primitive::kPrimLong, Long, Long, J) \
48 V(Primitive::kPrimFloat, Float, Float, F) \
49 V(Primitive::kPrimDouble, Double, Double, D)
50
51 // Assigns |type| to the primitive type associated with |klass|. Returns
52 // true iff. |klass| was a boxed type (Integer, Long etc.), false otherwise.
GetUnboxedPrimitiveType(ObjPtr<mirror::Class> klass,Primitive::Type * type)53 bool GetUnboxedPrimitiveType(ObjPtr<mirror::Class> klass, Primitive::Type* type)
54 REQUIRES_SHARED(Locks::mutator_lock_) {
55 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
56 std::string storage;
57 const char* descriptor = klass->GetDescriptor(&storage);
58 static const char kJavaLangPrefix[] = "Ljava/lang/";
59 static const size_t kJavaLangPrefixSize = sizeof(kJavaLangPrefix) - 1;
60 if (strncmp(descriptor, kJavaLangPrefix, kJavaLangPrefixSize) != 0) {
61 return false;
62 }
63
64 descriptor += kJavaLangPrefixSize;
65 #define LOOKUP_PRIMITIVE(primitive, _, java_name, ___) \
66 if (strcmp(descriptor, #java_name ";") == 0) { \
67 *type = primitive; \
68 return true; \
69 }
70
71 PRIMITIVES_LIST(LOOKUP_PRIMITIVE);
72 #undef LOOKUP_PRIMITIVE
73 return false;
74 }
75
GetBoxedPrimitiveClass(Primitive::Type type)76 ObjPtr<mirror::Class> GetBoxedPrimitiveClass(Primitive::Type type)
77 REQUIRES_SHARED(Locks::mutator_lock_) {
78 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
79 jmethodID m = nullptr;
80 switch (type) {
81 #define CASE_PRIMITIVE(primitive, _, java_name, __) \
82 case primitive: \
83 m = WellKnownClasses::java_lang_ ## java_name ## _valueOf; \
84 break;
85 PRIMITIVES_LIST(CASE_PRIMITIVE);
86 #undef CASE_PRIMITIVE
87 case Primitive::Type::kPrimNot:
88 case Primitive::Type::kPrimVoid:
89 return nullptr;
90 }
91 return jni::DecodeArtMethod(m)->GetDeclaringClass();
92 }
93
GetUnboxedTypeAndValue(ObjPtr<mirror::Object> o,Primitive::Type * type,JValue * value)94 bool GetUnboxedTypeAndValue(ObjPtr<mirror::Object> o, Primitive::Type* type, JValue* value)
95 REQUIRES_SHARED(Locks::mutator_lock_) {
96 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
97 ObjPtr<mirror::Class> klass = o->GetClass();
98 ArtField* primitive_field = &klass->GetIFieldsPtr()->At(0);
99 #define CASE_PRIMITIVE(primitive, abbrev, _, shorthand) \
100 if (klass == GetBoxedPrimitiveClass(primitive)) { \
101 *type = primitive; \
102 value->Set ## shorthand(primitive_field->Get ## abbrev(o)); \
103 return true; \
104 }
105 PRIMITIVES_LIST(CASE_PRIMITIVE)
106 #undef CASE_PRIMITIVE
107 return false;
108 }
109
IsReferenceType(Primitive::Type type)110 inline bool IsReferenceType(Primitive::Type type) {
111 return type == Primitive::kPrimNot;
112 }
113
IsPrimitiveType(Primitive::Type type)114 inline bool IsPrimitiveType(Primitive::Type type) {
115 return !IsReferenceType(type);
116 }
117
118 } // namespace
119
IsParameterTypeConvertible(ObjPtr<mirror::Class> from,ObjPtr<mirror::Class> to)120 bool IsParameterTypeConvertible(ObjPtr<mirror::Class> from, ObjPtr<mirror::Class> to)
121 REQUIRES_SHARED(Locks::mutator_lock_) {
122 // This function returns true if there's any conceivable conversion
123 // between |from| and |to|. It's expected this method will be used
124 // to determine if a WrongMethodTypeException should be raised. The
125 // decision logic follows the documentation for MethodType.asType().
126 if (from == to) {
127 return true;
128 }
129
130 Primitive::Type from_primitive = from->GetPrimitiveType();
131 Primitive::Type to_primitive = to->GetPrimitiveType();
132 DCHECK(from_primitive != Primitive::Type::kPrimVoid);
133 DCHECK(to_primitive != Primitive::Type::kPrimVoid);
134
135 // If |to| and |from| are references.
136 if (IsReferenceType(from_primitive) && IsReferenceType(to_primitive)) {
137 // Assignability is determined during parameter conversion when
138 // invoking the associated method handle.
139 return true;
140 }
141
142 // If |to| and |from| are primitives and a widening conversion exists.
143 if (Primitive::IsWidenable(from_primitive, to_primitive)) {
144 return true;
145 }
146
147 // If |to| is a reference and |from| is a primitive, then boxing conversion.
148 if (IsReferenceType(to_primitive) && IsPrimitiveType(from_primitive)) {
149 return to->IsAssignableFrom(GetBoxedPrimitiveClass(from_primitive));
150 }
151
152 // If |from| is a reference and |to| is a primitive, then unboxing conversion.
153 if (IsPrimitiveType(to_primitive) && IsReferenceType(from_primitive)) {
154 if (from->DescriptorEquals("Ljava/lang/Object;")) {
155 // Object might be converted into a primitive during unboxing.
156 return true;
157 }
158
159 if (Primitive::IsNumericType(to_primitive) && from->DescriptorEquals("Ljava/lang/Number;")) {
160 // Number might be unboxed into any of the number primitive types.
161 return true;
162 }
163
164 Primitive::Type unboxed_type;
165 if (GetUnboxedPrimitiveType(from, &unboxed_type)) {
166 if (unboxed_type == to_primitive) {
167 // Straightforward unboxing conversion such as Boolean => boolean.
168 return true;
169 }
170
171 // Check if widening operations for numeric primitives would work,
172 // such as Byte => byte => long.
173 return Primitive::IsWidenable(unboxed_type, to_primitive);
174 }
175 }
176
177 return false;
178 }
179
IsReturnTypeConvertible(ObjPtr<mirror::Class> from,ObjPtr<mirror::Class> to)180 bool IsReturnTypeConvertible(ObjPtr<mirror::Class> from, ObjPtr<mirror::Class> to)
181 REQUIRES_SHARED(Locks::mutator_lock_) {
182 if (to->GetPrimitiveType() == Primitive::Type::kPrimVoid) {
183 // Result will be ignored.
184 return true;
185 } else if (from->GetPrimitiveType() == Primitive::Type::kPrimVoid) {
186 // Returned value will be 0 / null.
187 return true;
188 } else {
189 // Otherwise apply usual parameter conversion rules.
190 return IsParameterTypeConvertible(from, to);
191 }
192 }
193
ConvertJValueCommon(Handle<mirror::MethodType> callsite_type,Handle<mirror::MethodType> callee_type,ObjPtr<mirror::Class> from,ObjPtr<mirror::Class> to,JValue * value)194 bool ConvertJValueCommon(
195 Handle<mirror::MethodType> callsite_type,
196 Handle<mirror::MethodType> callee_type,
197 ObjPtr<mirror::Class> from,
198 ObjPtr<mirror::Class> to,
199 JValue* value) {
200 // The reader maybe concerned about the safety of the heap object
201 // that may be in |value|. There is only one case where allocation
202 // is obviously needed and that's for boxing. However, in the case
203 // of boxing |value| contains a non-reference type.
204
205 const Primitive::Type from_type = from->GetPrimitiveType();
206 const Primitive::Type to_type = to->GetPrimitiveType();
207
208 // Put incoming value into |src_value| and set return value to 0.
209 // Errors and conversions from void require the return value to be 0.
210 const JValue src_value(*value);
211 value->SetJ(0);
212
213 // Conversion from void set result to zero.
214 if (from_type == Primitive::kPrimVoid) {
215 return true;
216 }
217
218 // This method must be called only when the types don't match.
219 DCHECK(from != to);
220
221 if (IsPrimitiveType(from_type) && IsPrimitiveType(to_type)) {
222 // The source and target types are both primitives.
223 if (UNLIKELY(!ConvertPrimitiveValueNoThrow(from_type, to_type, src_value, value))) {
224 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
225 return false;
226 }
227 return true;
228 } else if (IsReferenceType(from_type) && IsReferenceType(to_type)) {
229 // They're both reference types. If "from" is null, we can pass it
230 // through unchanged. If not, we must generate a cast exception if
231 // |to| is not assignable from the dynamic type of |ref|.
232 //
233 // Playing it safe with StackHandleScope here, not expecting any allocation
234 // in mirror::Class::IsAssignable().
235 StackHandleScope<2> hs(Thread::Current());
236 Handle<mirror::Class> h_to(hs.NewHandle(to));
237 Handle<mirror::Object> h_obj(hs.NewHandle(src_value.GetL()));
238 if (UNLIKELY(!h_obj.IsNull() && !to->IsAssignableFrom(h_obj->GetClass()))) {
239 ThrowClassCastException(h_to.Get(), h_obj->GetClass());
240 return false;
241 }
242 value->SetL(h_obj.Get());
243 return true;
244 } else if (IsReferenceType(to_type)) {
245 DCHECK(IsPrimitiveType(from_type));
246 // The source type is a primitive and the target type is a reference, so we must box.
247 // The target type maybe a super class of the boxed source type, for example,
248 // if the source type is int, it's boxed type is java.lang.Integer, and the target
249 // type could be java.lang.Number.
250 Primitive::Type type;
251 if (!GetUnboxedPrimitiveType(to, &type)) {
252 ObjPtr<mirror::Class> boxed_from_class = GetBoxedPrimitiveClass(from_type);
253 if (LIKELY(boxed_from_class->IsSubClass(to))) {
254 type = from_type;
255 } else {
256 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
257 return false;
258 }
259 }
260
261 if (UNLIKELY(from_type != type)) {
262 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
263 return false;
264 }
265
266 if (UNLIKELY(!ConvertPrimitiveValueNoThrow(from_type, type, src_value, value))) {
267 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
268 return false;
269 }
270
271 // Then perform the actual boxing, and then set the reference.
272 ObjPtr<mirror::Object> boxed = BoxPrimitive(type, src_value);
273 value->SetL(boxed);
274 return true;
275 } else {
276 // The source type is a reference and the target type is a primitive, so we must unbox.
277 DCHECK(IsReferenceType(from_type));
278 DCHECK(IsPrimitiveType(to_type));
279
280 ObjPtr<mirror::Object> from_obj(src_value.GetL());
281 if (UNLIKELY(from_obj.IsNull())) {
282 ThrowNullPointerException(
283 StringPrintf("Expected to unbox a '%s' primitive type but was returned null",
284 from->PrettyDescriptor().c_str()).c_str());
285 return false;
286 }
287
288 Primitive::Type unboxed_type;
289 JValue unboxed_value;
290 if (UNLIKELY(!GetUnboxedTypeAndValue(from_obj, &unboxed_type, &unboxed_value))) {
291 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
292 return false;
293 }
294
295 if (UNLIKELY(!ConvertPrimitiveValueNoThrow(unboxed_type, to_type, unboxed_value, value))) {
296 if (from->IsAssignableFrom(GetBoxedPrimitiveClass(to_type))) {
297 // CallSite may be Number, but the Number object is
298 // incompatible, e.g. Number (Integer) for a short.
299 ThrowClassCastException(from, to);
300 } else {
301 // CallSite is incompatible, e.g. Integer for a short.
302 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
303 }
304 return false;
305 }
306
307 return true;
308 }
309 }
310
311 namespace {
312
CopyArgumentsFromCallerFrame(const ShadowFrame & caller_frame,ShadowFrame * callee_frame,const InstructionOperands * const operands,const size_t first_dst_reg)313 inline void CopyArgumentsFromCallerFrame(const ShadowFrame& caller_frame,
314 ShadowFrame* callee_frame,
315 const InstructionOperands* const operands,
316 const size_t first_dst_reg)
317 REQUIRES_SHARED(Locks::mutator_lock_) {
318 for (size_t i = 0; i < operands->GetNumberOfOperands(); ++i) {
319 size_t dst_reg = first_dst_reg + i;
320 size_t src_reg = operands->GetOperand(i);
321 // Uint required, so that sign extension does not make this wrong on 64-bit systems
322 uint32_t src_value = caller_frame.GetVReg(src_reg);
323 ObjPtr<mirror::Object> o = caller_frame.GetVRegReference<kVerifyNone>(src_reg);
324 // If both register locations contains the same value, the register probably holds a reference.
325 // Note: As an optimization, non-moving collectors leave a stale reference value
326 // in the references array even after the original vreg was overwritten to a non-reference.
327 if (src_value == reinterpret_cast<uintptr_t>(o.Ptr())) {
328 callee_frame->SetVRegReference(dst_reg, o);
329 } else {
330 callee_frame->SetVReg(dst_reg, src_value);
331 }
332 }
333 }
334
ConvertAndCopyArgumentsFromCallerFrame(Thread * self,Handle<mirror::MethodType> callsite_type,Handle<mirror::MethodType> callee_type,const ShadowFrame & caller_frame,uint32_t first_dest_reg,const InstructionOperands * const operands,ShadowFrame * callee_frame)335 inline bool ConvertAndCopyArgumentsFromCallerFrame(
336 Thread* self,
337 Handle<mirror::MethodType> callsite_type,
338 Handle<mirror::MethodType> callee_type,
339 const ShadowFrame& caller_frame,
340 uint32_t first_dest_reg,
341 const InstructionOperands* const operands,
342 ShadowFrame* callee_frame)
343 REQUIRES_SHARED(Locks::mutator_lock_) {
344 ObjPtr<mirror::ObjectArray<mirror::Class>> from_types(callsite_type->GetPTypes());
345 ObjPtr<mirror::ObjectArray<mirror::Class>> to_types(callee_type->GetPTypes());
346
347 const int32_t num_method_params = from_types->GetLength();
348 if (to_types->GetLength() != num_method_params) {
349 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
350 return false;
351 }
352
353 ShadowFrameGetter getter(caller_frame, operands);
354 ShadowFrameSetter setter(callee_frame, first_dest_reg);
355 return PerformConversions<ShadowFrameGetter, ShadowFrameSetter>(self,
356 callsite_type,
357 callee_type,
358 &getter,
359 &setter,
360 num_method_params);
361 }
362
IsInvoke(const mirror::MethodHandle::Kind handle_kind)363 inline bool IsInvoke(const mirror::MethodHandle::Kind handle_kind) {
364 return handle_kind <= mirror::MethodHandle::Kind::kLastInvokeKind;
365 }
366
IsInvokeTransform(const mirror::MethodHandle::Kind handle_kind)367 inline bool IsInvokeTransform(const mirror::MethodHandle::Kind handle_kind) {
368 return (handle_kind == mirror::MethodHandle::Kind::kInvokeTransform
369 || handle_kind == mirror::MethodHandle::Kind::kInvokeCallSiteTransform);
370 }
371
IsInvokeVarHandle(const mirror::MethodHandle::Kind handle_kind)372 inline bool IsInvokeVarHandle(const mirror::MethodHandle::Kind handle_kind) {
373 return (handle_kind == mirror::MethodHandle::Kind::kInvokeVarHandle ||
374 handle_kind == mirror::MethodHandle::Kind::kInvokeVarHandleExact);
375 }
376
IsFieldAccess(mirror::MethodHandle::Kind handle_kind)377 inline bool IsFieldAccess(mirror::MethodHandle::Kind handle_kind) {
378 return (handle_kind >= mirror::MethodHandle::Kind::kFirstAccessorKind
379 && handle_kind <= mirror::MethodHandle::Kind::kLastAccessorKind);
380 }
381
382 // Calculate the number of ins for a proxy or native method, where we
383 // can't just look at the code item.
GetInsForProxyOrNativeMethod(ArtMethod * method)384 static inline size_t GetInsForProxyOrNativeMethod(ArtMethod* method)
385 REQUIRES_SHARED(Locks::mutator_lock_) {
386 DCHECK(method->IsNative() || method->IsProxyMethod());
387 method = method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
388 uint32_t shorty_length = 0;
389 const char* shorty = method->GetShorty(&shorty_length);
390
391 // Static methods do not include the receiver. The receiver isn't included
392 // in the shorty_length though the return value is.
393 size_t num_ins = method->IsStatic() ? shorty_length - 1 : shorty_length;
394 for (const char* c = shorty + 1; *c != '\0'; ++c) {
395 if (*c == 'J' || *c == 'D') {
396 ++num_ins;
397 }
398 }
399 return num_ins;
400 }
401
402 // Returns true iff. the callsite type for a polymorphic invoke is transformer
403 // like, i.e that it has a single input argument whose type is
404 // dalvik.system.EmulatedStackFrame.
IsCallerTransformer(Handle<mirror::MethodType> callsite_type)405 static inline bool IsCallerTransformer(Handle<mirror::MethodType> callsite_type)
406 REQUIRES_SHARED(Locks::mutator_lock_) {
407 ObjPtr<mirror::ObjectArray<mirror::Class>> param_types(callsite_type->GetPTypes());
408 if (param_types->GetLength() == 1) {
409 ObjPtr<mirror::Class> param(param_types->GetWithoutChecks(0));
410 // NB Comparing descriptor here as it appears faster in cycle simulation than using:
411 // param == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_EmulatedStackFrame)
412 // Costs are 98 vs 173 cycles per invocation.
413 return param->DescriptorEquals("Ldalvik/system/EmulatedStackFrame;");
414 }
415
416 return false;
417 }
418
MethodHandleInvokeMethod(ArtMethod * called_method,Handle<mirror::MethodType> callsite_type,Handle<mirror::MethodType> target_type,Handle<mirror::MethodType> nominal_type,Thread * self,ShadowFrame & shadow_frame,const InstructionOperands * const operands,JValue * result)419 static inline bool MethodHandleInvokeMethod(ArtMethod* called_method,
420 Handle<mirror::MethodType> callsite_type,
421 Handle<mirror::MethodType> target_type,
422 Handle<mirror::MethodType> nominal_type,
423 Thread* self,
424 ShadowFrame& shadow_frame,
425 const InstructionOperands* const operands,
426 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
427 // Compute method information.
428 CodeItemDataAccessor accessor(called_method->DexInstructionData());
429
430 // Number of registers for the callee's call frame. Note that for non-exact
431 // invokes, we always derive this information from the callee method. We
432 // cannot guarantee during verification that the number of registers encoded
433 // in the invoke is equal to the number of ins for the callee. This is because
434 // some transformations (such as boxing a long -> Long or wideining an
435 // int -> long will change that number.
436 uint16_t num_regs;
437 size_t num_input_regs;
438 size_t first_dest_reg;
439 if (LIKELY(accessor.HasCodeItem())) {
440 num_regs = accessor.RegistersSize();
441 first_dest_reg = num_regs - accessor.InsSize();
442 num_input_regs = accessor.InsSize();
443 // Parameter registers go at the end of the shadow frame.
444 DCHECK_NE(first_dest_reg, (size_t)-1);
445 } else {
446 // No local regs for proxy and native methods.
447 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
448 num_regs = num_input_regs = GetInsForProxyOrNativeMethod(called_method);
449 first_dest_reg = 0;
450 }
451
452 // Allocate shadow frame on the stack.
453 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
454 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0);
455 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
456
457 // Whether this polymorphic invoke was issued by a transformer method.
458 bool is_caller_transformer = false;
459 // Thread might be suspended during PerformArgumentConversions due to the
460 // allocations performed during boxing.
461 {
462 ScopedStackedShadowFramePusher pusher(
463 self, new_shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction);
464 if (callsite_type->IsExactMatch(target_type.Get())) {
465 // This is an exact invoke, we can take the fast path of just copying all
466 // registers without performing any argument conversions.
467 CopyArgumentsFromCallerFrame(shadow_frame,
468 new_shadow_frame,
469 operands,
470 first_dest_reg);
471 } else {
472 // This includes the case where we're entering this invoke-polymorphic
473 // from a transformer method. In that case, the callsite_type will contain
474 // a single argument of type dalvik.system.EmulatedStackFrame. In that
475 // case, we'll have to unmarshal the EmulatedStackFrame into the
476 // new_shadow_frame and perform argument conversions on it.
477 if (IsCallerTransformer(callsite_type)) {
478 is_caller_transformer = true;
479 // The emulated stack frame is the first and only argument when we're coming
480 // through from a transformer.
481 size_t first_arg_register = operands->GetOperand(0);
482 ObjPtr<mirror::EmulatedStackFrame> emulated_stack_frame(
483 ObjPtr<mirror::EmulatedStackFrame>::DownCast(
484 shadow_frame.GetVRegReference(first_arg_register)));
485 if (!emulated_stack_frame->WriteToShadowFrame(self,
486 target_type,
487 first_dest_reg,
488 new_shadow_frame)) {
489 DCHECK(self->IsExceptionPending());
490 result->SetL(nullptr);
491 return false;
492 }
493 } else {
494 if (!callsite_type->IsConvertible(target_type.Get())) {
495 ThrowWrongMethodTypeException(target_type.Get(), callsite_type.Get());
496 return false;
497 }
498 if (!ConvertAndCopyArgumentsFromCallerFrame(self,
499 callsite_type,
500 target_type,
501 shadow_frame,
502 first_dest_reg,
503 operands,
504 new_shadow_frame)) {
505 DCHECK(self->IsExceptionPending());
506 result->SetL(nullptr);
507 return false;
508 }
509 }
510 }
511 }
512
513 bool use_interpreter_entrypoint = ClassLinker::ShouldUseInterpreterEntrypoint(
514 called_method, called_method->GetEntryPointFromQuickCompiledCode());
515 PerformCall(self,
516 accessor,
517 shadow_frame.GetMethod(),
518 first_dest_reg,
519 new_shadow_frame,
520 result,
521 use_interpreter_entrypoint);
522 if (self->IsExceptionPending()) {
523 return false;
524 }
525
526 // If the caller of this signature polymorphic method was a transformer,
527 // we need to copy the result back out to the emulated stack frame.
528 if (is_caller_transformer) {
529 StackHandleScope<2> hs(self);
530 size_t first_callee_register = operands->GetOperand(0);
531 Handle<mirror::EmulatedStackFrame> emulated_stack_frame(
532 hs.NewHandle(ObjPtr<mirror::EmulatedStackFrame>::DownCast(
533 shadow_frame.GetVRegReference(first_callee_register))));
534 Handle<mirror::MethodType> emulated_stack_type(hs.NewHandle(emulated_stack_frame->GetType()));
535 JValue local_result;
536 local_result.SetJ(result->GetJ());
537
538 if (ConvertReturnValue(emulated_stack_type, target_type, &local_result)) {
539 emulated_stack_frame->SetReturnValue(self, local_result);
540 return true;
541 }
542
543 DCHECK(self->IsExceptionPending());
544 return false;
545 }
546
547 if (nominal_type != nullptr) {
548 return ConvertReturnValue(nominal_type, target_type, result) &&
549 ConvertReturnValue(callsite_type, nominal_type, result);
550 }
551
552 return ConvertReturnValue(callsite_type, target_type, result);
553 }
554
MethodHandleInvokeTransform(ArtMethod * called_method,Handle<mirror::MethodType> callsite_type,Handle<mirror::MethodType> callee_type,Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> receiver,const InstructionOperands * const operands,JValue * result)555 static inline bool MethodHandleInvokeTransform(ArtMethod* called_method,
556 Handle<mirror::MethodType> callsite_type,
557 Handle<mirror::MethodType> callee_type,
558 Thread* self,
559 ShadowFrame& shadow_frame,
560 Handle<mirror::MethodHandle> receiver,
561 const InstructionOperands* const operands,
562 JValue* result)
563 REQUIRES_SHARED(Locks::mutator_lock_) {
564 // This can be fixed to two, because the method we're calling here
565 // (MethodHandle.transformInternal) doesn't have any locals and the signature
566 // is known :
567 //
568 // private MethodHandle.transformInternal(EmulatedStackFrame sf);
569 //
570 // This means we need only two vregs :
571 // - One for the receiver object.
572 // - One for the only method argument (an EmulatedStackFrame).
573 static constexpr size_t kNumRegsForTransform = 2;
574
575 CodeItemDataAccessor accessor(called_method->DexInstructionData());
576 DCHECK_EQ(kNumRegsForTransform, accessor.RegistersSize());
577 DCHECK_EQ(kNumRegsForTransform, accessor.InsSize());
578
579 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
580 CREATE_SHADOW_FRAME(kNumRegsForTransform, &shadow_frame, called_method, /* dex pc */ 0);
581 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
582
583 StackHandleScope<1> hs(self);
584 MutableHandle<mirror::EmulatedStackFrame> sf(hs.NewHandle<mirror::EmulatedStackFrame>(nullptr));
585 if (IsCallerTransformer(callsite_type)) {
586 // If we're entering this transformer from another transformer, we can pass
587 // through the handle directly to the callee, instead of having to
588 // instantiate a new stack frame based on the shadow frame.
589 size_t first_callee_register = operands->GetOperand(0);
590 sf.Assign(ObjPtr<mirror::EmulatedStackFrame>::DownCast(
591 shadow_frame.GetVRegReference(first_callee_register)));
592 } else {
593 sf.Assign(mirror::EmulatedStackFrame::CreateFromShadowFrameAndArgs(self,
594 callsite_type,
595 callee_type,
596 shadow_frame,
597 operands));
598
599 // Something went wrong while creating the emulated stack frame, we should
600 // throw the pending exception.
601 if (sf == nullptr) {
602 DCHECK(self->IsExceptionPending());
603 return false;
604 }
605 }
606
607 new_shadow_frame->SetVRegReference(0, receiver.Get());
608 new_shadow_frame->SetVRegReference(1, sf.Get());
609
610 bool use_interpreter_entrypoint = ClassLinker::ShouldUseInterpreterEntrypoint(
611 called_method, called_method->GetEntryPointFromQuickCompiledCode());
612 PerformCall(self,
613 accessor,
614 shadow_frame.GetMethod(),
615 0 /* first destination register */,
616 new_shadow_frame,
617 result,
618 use_interpreter_entrypoint);
619 if (self->IsExceptionPending()) {
620 return false;
621 }
622
623 // If the called transformer method we called has returned a value, then we
624 // need to copy it back to |result|.
625 sf->GetReturnValue(self, result);
626 return ConvertReturnValue(callsite_type, callee_type, result);
627 }
628
GetAndInitializeDeclaringClass(Thread * self,ArtField * field)629 inline static ObjPtr<mirror::Class> GetAndInitializeDeclaringClass(Thread* self, ArtField* field)
630 REQUIRES_SHARED(Locks::mutator_lock_) {
631 // Method handle invocations on static fields should ensure class is
632 // initialized. This usually happens when an instance is constructed
633 // or class members referenced, but this is not guaranteed when
634 // looking up method handles.
635 ObjPtr<mirror::Class> klass = field->GetDeclaringClass();
636 if (UNLIKELY(!klass->IsInitialized())) {
637 StackHandleScope<1> hs(self);
638 HandleWrapperObjPtr<mirror::Class> h(hs.NewHandleWrapper(&klass));
639 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h, true, true)) {
640 DCHECK(self->IsExceptionPending());
641 return nullptr;
642 }
643 }
644 return klass;
645 }
646
RefineTargetMethod(Thread * self,ShadowFrame & shadow_frame,const mirror::MethodHandle::Kind & handle_kind,Handle<mirror::MethodType> handle_type,Handle<mirror::MethodType> callsite_type,const uint32_t receiver_reg,ArtMethod * target_method)647 ArtMethod* RefineTargetMethod(Thread* self,
648 ShadowFrame& shadow_frame,
649 const mirror::MethodHandle::Kind& handle_kind,
650 Handle<mirror::MethodType> handle_type,
651 Handle<mirror::MethodType> callsite_type,
652 const uint32_t receiver_reg,
653 ArtMethod* target_method)
654 REQUIRES_SHARED(Locks::mutator_lock_) {
655 if (handle_kind == mirror::MethodHandle::Kind::kInvokeVirtual ||
656 handle_kind == mirror::MethodHandle::Kind::kInvokeInterface) {
657 // For virtual and interface methods ensure target_method points to
658 // the actual method to invoke.
659 ObjPtr<mirror::Object> receiver(shadow_frame.GetVRegReference(receiver_reg));
660 if (IsCallerTransformer(callsite_type)) {
661 // The current receiver is an emulated stack frame, the method's
662 // receiver needs to be fetched from there as the emulated frame
663 // will be unpacked into a new frame.
664 receiver = ObjPtr<mirror::EmulatedStackFrame>::DownCast(receiver)->GetReceiver();
665 }
666
667 ObjPtr<mirror::Class> declaring_class(target_method->GetDeclaringClass());
668 if (receiver == nullptr || receiver->GetClass() != declaring_class) {
669 // Verify that _vRegC is an object reference and of the type expected by
670 // the receiver.
671 if (!VerifyObjectIsClass(receiver, declaring_class)) {
672 DCHECK(self->IsExceptionPending());
673 return nullptr;
674 }
675 return receiver->GetClass()->FindVirtualMethodForVirtualOrInterface(
676 target_method, kRuntimePointerSize);
677 }
678 } else if (handle_kind == mirror::MethodHandle::Kind::kInvokeDirect) {
679 // String constructors are a special case, they are replaced with
680 // StringFactory methods.
681 if (target_method->IsConstructor() && target_method->GetDeclaringClass()->IsStringClass()) {
682 DCHECK(handle_type->GetRType()->IsStringClass());
683 return WellKnownClasses::StringInitToStringFactory(target_method);
684 }
685 } else if (handle_kind == mirror::MethodHandle::Kind::kInvokeSuper) {
686 // Note that we're not dynamically dispatching on the type of the receiver
687 // here. We use the static type of the "receiver" object that we've
688 // recorded in the method handle's type, which will be the same as the
689 // special caller that was specified at the point of lookup.
690 ObjPtr<mirror::Class> referrer_class = handle_type->GetPTypes()->Get(0);
691 ObjPtr<mirror::Class> declaring_class = target_method->GetDeclaringClass();
692 if (referrer_class == declaring_class) {
693 return target_method;
694 }
695 if (declaring_class->IsInterface()) {
696 if (target_method->IsAbstract()) {
697 std::string msg =
698 "Method " + target_method->PrettyMethod() + " is abstract interface method!";
699 ThrowIllegalAccessException(msg.c_str());
700 return nullptr;
701 }
702 } else {
703 ObjPtr<mirror::Class> super_class = referrer_class->GetSuperClass();
704 uint16_t vtable_index = target_method->GetMethodIndex();
705 DCHECK(super_class != nullptr);
706 DCHECK(super_class->HasVTable());
707 // Note that super_class is a super of referrer_class and target_method
708 // will always be declared by super_class (or one of its super classes).
709 DCHECK_LT(vtable_index, super_class->GetVTableLength());
710 return super_class->GetVTableEntry(vtable_index, kRuntimePointerSize);
711 }
712 }
713 return target_method;
714 }
715
DoInvokePolymorphicMethod(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)716 bool DoInvokePolymorphicMethod(Thread* self,
717 ShadowFrame& shadow_frame,
718 Handle<mirror::MethodHandle> method_handle,
719 Handle<mirror::MethodType> callsite_type,
720 const InstructionOperands* const operands,
721 JValue* result)
722 REQUIRES_SHARED(Locks::mutator_lock_) {
723 StackHandleScope<2> hs(self);
724 Handle<mirror::MethodType> handle_type(hs.NewHandle(method_handle->GetMethodType()));
725 Handle<mirror::MethodType> nominal_handle_type(hs.NewHandle(method_handle->GetNominalType()));
726 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
727 DCHECK(IsInvoke(handle_kind));
728
729 // Get the method we're actually invoking along with the kind of
730 // invoke that is desired. We don't need to perform access checks at this
731 // point because they would have been performed on our behalf at the point
732 // of creation of the method handle.
733 ArtMethod* target_method = method_handle->GetTargetMethod();
734 uint32_t receiver_reg = (operands->GetNumberOfOperands() > 0) ? operands->GetOperand(0) : 0u;
735 ArtMethod* called_method = RefineTargetMethod(self,
736 shadow_frame,
737 handle_kind,
738 handle_type,
739 callsite_type,
740 receiver_reg,
741 target_method);
742 if (called_method == nullptr) {
743 DCHECK(self->IsExceptionPending());
744 return false;
745 }
746
747 if (IsInvokeTransform(handle_kind)) {
748 // There are two cases here - method handles representing regular
749 // transforms and those representing call site transforms. Method
750 // handles for call site transforms adapt their MethodType to match
751 // the call site. For these, the |callee_type| is the same as the
752 // |callsite_type|. The VarargsCollector is such a tranform, its
753 // method type depends on the call site, ie. x(a) or x(a, b), or
754 // x(a, b, c). The VarargsCollector invokes a variable arity method
755 // with the arity arguments in an array.
756 Handle<mirror::MethodType> callee_type =
757 (handle_kind == mirror::MethodHandle::Kind::kInvokeCallSiteTransform) ? callsite_type
758 : handle_type;
759 return MethodHandleInvokeTransform(called_method,
760 callsite_type,
761 callee_type,
762 self,
763 shadow_frame,
764 /* receiver= */ method_handle,
765 operands,
766 result);
767 } else {
768 return MethodHandleInvokeMethod(called_method,
769 callsite_type,
770 handle_type,
771 nominal_handle_type,
772 self,
773 shadow_frame,
774 operands,
775 result);
776 }
777 }
778
779 // Helper for getters in invoke-polymorphic.
MethodHandleFieldGet(Thread * self,const ShadowFrame & shadow_frame,ObjPtr<mirror::Object> & obj,ArtField * field,Primitive::Type field_type,JValue * result)780 inline static void MethodHandleFieldGet(Thread* self,
781 const ShadowFrame& shadow_frame,
782 ObjPtr<mirror::Object>& obj,
783 ArtField* field,
784 Primitive::Type field_type,
785 JValue* result)
786 REQUIRES_SHARED(Locks::mutator_lock_) {
787 switch (field_type) {
788 case Primitive::kPrimBoolean:
789 DoFieldGetCommon<Primitive::kPrimBoolean>(self, shadow_frame, obj, field, result);
790 break;
791 case Primitive::kPrimByte:
792 DoFieldGetCommon<Primitive::kPrimByte>(self, shadow_frame, obj, field, result);
793 break;
794 case Primitive::kPrimChar:
795 DoFieldGetCommon<Primitive::kPrimChar>(self, shadow_frame, obj, field, result);
796 break;
797 case Primitive::kPrimShort:
798 DoFieldGetCommon<Primitive::kPrimShort>(self, shadow_frame, obj, field, result);
799 break;
800 case Primitive::kPrimInt:
801 DoFieldGetCommon<Primitive::kPrimInt>(self, shadow_frame, obj, field, result);
802 break;
803 case Primitive::kPrimLong:
804 DoFieldGetCommon<Primitive::kPrimLong>(self, shadow_frame, obj, field, result);
805 break;
806 case Primitive::kPrimFloat:
807 DoFieldGetCommon<Primitive::kPrimInt>(self, shadow_frame, obj, field, result);
808 break;
809 case Primitive::kPrimDouble:
810 DoFieldGetCommon<Primitive::kPrimLong>(self, shadow_frame, obj, field, result);
811 break;
812 case Primitive::kPrimNot:
813 DoFieldGetCommon<Primitive::kPrimNot>(self, shadow_frame, obj, field, result);
814 break;
815 case Primitive::kPrimVoid:
816 LOG(FATAL) << "Unreachable: " << field_type;
817 UNREACHABLE();
818 }
819 }
820
821 // Helper for setters in invoke-polymorphic.
MethodHandleFieldPut(Thread * self,ShadowFrame & shadow_frame,ObjPtr<mirror::Object> & obj,ArtField * field,Primitive::Type field_type,JValue & value)822 inline bool MethodHandleFieldPut(Thread* self,
823 ShadowFrame& shadow_frame,
824 ObjPtr<mirror::Object>& obj,
825 ArtField* field,
826 Primitive::Type field_type,
827 JValue& value)
828 REQUIRES_SHARED(Locks::mutator_lock_) {
829 DCHECK(!Runtime::Current()->IsActiveTransaction());
830 static const bool kTransaction = false; // Not in a transaction.
831 static const bool kAssignabilityCheck = false; // No access check.
832 switch (field_type) {
833 case Primitive::kPrimBoolean:
834 return
835 DoFieldPutCommon<Primitive::kPrimBoolean, kAssignabilityCheck, kTransaction>(
836 self, shadow_frame, obj, field, value);
837 case Primitive::kPrimByte:
838 return DoFieldPutCommon<Primitive::kPrimByte, kAssignabilityCheck, kTransaction>(
839 self, shadow_frame, obj, field, value);
840 case Primitive::kPrimChar:
841 return DoFieldPutCommon<Primitive::kPrimChar, kAssignabilityCheck, kTransaction>(
842 self, shadow_frame, obj, field, value);
843 case Primitive::kPrimShort:
844 return DoFieldPutCommon<Primitive::kPrimShort, kAssignabilityCheck, kTransaction>(
845 self, shadow_frame, obj, field, value);
846 case Primitive::kPrimInt:
847 case Primitive::kPrimFloat:
848 return DoFieldPutCommon<Primitive::kPrimInt, kAssignabilityCheck, kTransaction>(
849 self, shadow_frame, obj, field, value);
850 case Primitive::kPrimLong:
851 case Primitive::kPrimDouble:
852 return DoFieldPutCommon<Primitive::kPrimLong, kAssignabilityCheck, kTransaction>(
853 self, shadow_frame, obj, field, value);
854 case Primitive::kPrimNot:
855 return DoFieldPutCommon<Primitive::kPrimNot, kAssignabilityCheck, kTransaction>(
856 self, shadow_frame, obj, field, value);
857 case Primitive::kPrimVoid:
858 LOG(FATAL) << "Unreachable: " << field_type;
859 UNREACHABLE();
860 }
861 }
862
GetValueFromShadowFrame(const ShadowFrame & shadow_frame,Primitive::Type field_type,uint32_t vreg)863 static JValue GetValueFromShadowFrame(const ShadowFrame& shadow_frame,
864 Primitive::Type field_type,
865 uint32_t vreg)
866 REQUIRES_SHARED(Locks::mutator_lock_) {
867 JValue field_value;
868 switch (field_type) {
869 case Primitive::kPrimBoolean:
870 field_value.SetZ(static_cast<uint8_t>(shadow_frame.GetVReg(vreg)));
871 break;
872 case Primitive::kPrimByte:
873 field_value.SetB(static_cast<int8_t>(shadow_frame.GetVReg(vreg)));
874 break;
875 case Primitive::kPrimChar:
876 field_value.SetC(static_cast<uint16_t>(shadow_frame.GetVReg(vreg)));
877 break;
878 case Primitive::kPrimShort:
879 field_value.SetS(static_cast<int16_t>(shadow_frame.GetVReg(vreg)));
880 break;
881 case Primitive::kPrimInt:
882 case Primitive::kPrimFloat:
883 field_value.SetI(shadow_frame.GetVReg(vreg));
884 break;
885 case Primitive::kPrimLong:
886 case Primitive::kPrimDouble:
887 field_value.SetJ(shadow_frame.GetVRegLong(vreg));
888 break;
889 case Primitive::kPrimNot:
890 field_value.SetL(shadow_frame.GetVRegReference(vreg));
891 break;
892 case Primitive::kPrimVoid:
893 LOG(FATAL) << "Unreachable: " << field_type;
894 UNREACHABLE();
895 }
896 return field_value;
897 }
898
899 template <bool do_conversions>
MethodHandleFieldAccess(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)900 bool MethodHandleFieldAccess(Thread* self,
901 ShadowFrame& shadow_frame,
902 Handle<mirror::MethodHandle> method_handle,
903 Handle<mirror::MethodType> callsite_type,
904 const InstructionOperands* const operands,
905 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
906 StackHandleScope<1> hs(self);
907 Handle<mirror::MethodType> handle_type(hs.NewHandle(method_handle->GetMethodType()));
908 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
909 ArtField* field = method_handle->GetTargetField();
910 Primitive::Type field_type = field->GetTypeAsPrimitiveType();
911 switch (handle_kind) {
912 case mirror::MethodHandle::kInstanceGet: {
913 size_t obj_reg = operands->GetOperand(0);
914 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(obj_reg);
915 MethodHandleFieldGet(self, shadow_frame, obj, field, field_type, result);
916 if (do_conversions && !ConvertReturnValue(callsite_type, handle_type, result)) {
917 DCHECK(self->IsExceptionPending());
918 return false;
919 }
920 return true;
921 }
922 case mirror::MethodHandle::kStaticGet: {
923 ObjPtr<mirror::Object> obj = GetAndInitializeDeclaringClass(self, field);
924 if (obj == nullptr) {
925 DCHECK(self->IsExceptionPending());
926 return false;
927 }
928 MethodHandleFieldGet(self, shadow_frame, obj, field, field_type, result);
929 if (do_conversions && !ConvertReturnValue(callsite_type, handle_type, result)) {
930 DCHECK(self->IsExceptionPending());
931 return false;
932 }
933 return true;
934 }
935 case mirror::MethodHandle::kInstancePut: {
936 size_t obj_reg = operands->GetOperand(0);
937 size_t value_reg = operands->GetOperand(1);
938 const size_t kPTypeIndex = 1;
939 // Use ptypes instead of field type since we may be unboxing a reference for a primitive
940 // field. The field type is incorrect for this case.
941 JValue value = GetValueFromShadowFrame(
942 shadow_frame,
943 callsite_type->GetPTypes()->Get(kPTypeIndex)->GetPrimitiveType(),
944 value_reg);
945 if (do_conversions && !ConvertArgumentValue(callsite_type,
946 handle_type,
947 kPTypeIndex,
948 &value)) {
949 DCHECK(self->IsExceptionPending());
950 return false;
951 }
952 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(obj_reg);
953 return MethodHandleFieldPut(self, shadow_frame, obj, field, field_type, value);
954 }
955 case mirror::MethodHandle::kStaticPut: {
956 ObjPtr<mirror::Object> obj = GetAndInitializeDeclaringClass(self, field);
957 if (obj == nullptr) {
958 DCHECK(self->IsExceptionPending());
959 return false;
960 }
961 size_t value_reg = operands->GetOperand(0);
962 const size_t kPTypeIndex = 0;
963 // Use ptypes instead of field type since we may be unboxing a reference for a primitive
964 // field. The field type is incorrect for this case.
965 JValue value = GetValueFromShadowFrame(
966 shadow_frame,
967 callsite_type->GetPTypes()->Get(kPTypeIndex)->GetPrimitiveType(),
968 value_reg);
969 if (do_conversions && !ConvertArgumentValue(callsite_type,
970 handle_type,
971 kPTypeIndex,
972 &value)) {
973 DCHECK(self->IsExceptionPending());
974 return false;
975 }
976 return MethodHandleFieldPut(self, shadow_frame, obj, field, field_type, value);
977 }
978 default:
979 LOG(FATAL) << "Unreachable: " << handle_kind;
980 UNREACHABLE();
981 }
982 }
983
DoVarHandleInvokeTranslationUnchecked(Thread * self,ShadowFrame & shadow_frame,mirror::VarHandle::AccessMode access_mode,Handle<mirror::VarHandle> vh,Handle<mirror::MethodType> vh_type,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)984 bool DoVarHandleInvokeTranslationUnchecked(Thread* self,
985 ShadowFrame& shadow_frame,
986 mirror::VarHandle::AccessMode access_mode,
987 Handle<mirror::VarHandle> vh,
988 Handle<mirror::MethodType> vh_type,
989 Handle<mirror::MethodType> callsite_type,
990 const InstructionOperands* const operands,
991 JValue* result)
992 REQUIRES_SHARED(Locks::mutator_lock_) {
993 DCHECK_EQ(operands->GetNumberOfOperands(), static_cast<uint32_t>(vh_type->GetNumberOfPTypes()));
994 DCHECK_EQ(operands->GetNumberOfOperands(),
995 static_cast<uint32_t>(callsite_type->GetNumberOfPTypes()));
996 const size_t vreg_count = vh_type->NumberOfVRegs();
997 ShadowFrameAllocaUniquePtr accessor_frame =
998 CREATE_SHADOW_FRAME(vreg_count, nullptr, shadow_frame.GetMethod(), shadow_frame.GetDexPC());
999 ShadowFrameGetter getter(shadow_frame, operands);
1000 static const uint32_t kFirstAccessorReg = 0;
1001 ShadowFrameSetter setter(accessor_frame.get(), kFirstAccessorReg);
1002 if (!PerformConversions(self, callsite_type, vh_type, &getter, &setter)) {
1003 return false;
1004 }
1005 RangeInstructionOperands accessor_operands(kFirstAccessorReg, kFirstAccessorReg + vreg_count);
1006 if (!vh->Access(access_mode, accessor_frame.get(), &accessor_operands, result)) {
1007 return false;
1008 }
1009 return ConvertReturnValue(callsite_type, vh_type, result);
1010 }
1011
DoVarHandleInvokeTranslation(Thread * self,ShadowFrame & shadow_frame,bool invokeExact,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)1012 bool DoVarHandleInvokeTranslation(Thread* self,
1013 ShadowFrame& shadow_frame,
1014 bool invokeExact,
1015 Handle<mirror::MethodHandle> method_handle,
1016 Handle<mirror::MethodType> callsite_type,
1017 const InstructionOperands* const operands,
1018 JValue* result)
1019 REQUIRES_SHARED(Locks::mutator_lock_) {
1020 if (!invokeExact) {
1021 // Exact invokes are checked for compatability higher up. The
1022 // non-exact invoke path doesn't have a similar check due to
1023 // transformers which have EmulatedStack frame arguments with the
1024 // actual method type associated with the frame.
1025 if (UNLIKELY(!callsite_type->IsConvertible(method_handle->GetMethodType()))) {
1026 ThrowWrongMethodTypeException(method_handle->GetMethodType(), callsite_type.Get());
1027 return false;
1028 }
1029 }
1030
1031 //
1032 // Basic checks that apply in all cases.
1033 //
1034 StackHandleScope<6> hs(self);
1035 Handle<mirror::ObjectArray<mirror::Class>>
1036 callsite_ptypes(hs.NewHandle(callsite_type->GetPTypes()));
1037 Handle<mirror::ObjectArray<mirror::Class>>
1038 mh_ptypes(hs.NewHandle(method_handle->GetMethodType()->GetPTypes()));
1039
1040 // Check that the first parameter is a VarHandle
1041 if (callsite_ptypes->GetLength() < 1 ||
1042 !mh_ptypes->Get(0)->IsAssignableFrom(callsite_ptypes->Get(0)) ||
1043 mh_ptypes->Get(0) != GetClassRoot<mirror::VarHandle>()) {
1044 ThrowWrongMethodTypeException(method_handle->GetMethodType(), callsite_type.Get());
1045 return false;
1046 }
1047
1048 // Get the receiver
1049 ObjPtr<mirror::Object> receiver = shadow_frame.GetVRegReference(operands->GetOperand(0));
1050 if (receiver == nullptr) {
1051 ThrowNullPointerException("Expected argument 1 to be a non-null VarHandle");
1052 return false;
1053 }
1054
1055 // Cast to VarHandle instance
1056 Handle<mirror::VarHandle> vh(hs.NewHandle(ObjPtr<mirror::VarHandle>::DownCast(receiver)));
1057 DCHECK(GetClassRoot<mirror::VarHandle>()->IsAssignableFrom(vh->GetClass()));
1058
1059 // Determine the accessor kind to dispatch
1060 ArtMethod* target_method = method_handle->GetTargetMethod();
1061 int intrinsic_index = target_method->GetIntrinsic();
1062 mirror::VarHandle::AccessMode access_mode =
1063 mirror::VarHandle::GetAccessModeByIntrinsic(static_cast<Intrinsics>(intrinsic_index));
1064 Handle<mirror::MethodType> vh_type =
1065 hs.NewHandle(vh->GetMethodTypeForAccessMode(self, access_mode));
1066 Handle<mirror::MethodType> mh_invoke_type = hs.NewHandle(
1067 mirror::MethodType::CloneWithoutLeadingParameter(self, method_handle->GetMethodType()));
1068 if (method_handle->GetHandleKind() == mirror::MethodHandle::Kind::kInvokeVarHandleExact) {
1069 if (!mh_invoke_type->IsExactMatch(vh_type.Get())) {
1070 ThrowWrongMethodTypeException(vh_type.Get(), mh_invoke_type.Get());
1071 return false;
1072 }
1073 } else {
1074 DCHECK_EQ(method_handle->GetHandleKind(), mirror::MethodHandle::Kind::kInvokeVarHandle);
1075 if (!mh_invoke_type->IsConvertible(vh_type.Get())) {
1076 ThrowWrongMethodTypeException(vh_type.Get(), mh_invoke_type.Get());
1077 return false;
1078 }
1079 }
1080
1081 Handle<mirror::MethodType> callsite_type_without_varhandle =
1082 hs.NewHandle(mirror::MethodType::CloneWithoutLeadingParameter(self, callsite_type.Get()));
1083 NoReceiverInstructionOperands varhandle_operands(operands);
1084 DCHECK_EQ(static_cast<int32_t>(varhandle_operands.GetNumberOfOperands()),
1085 callsite_type_without_varhandle->GetPTypes()->GetLength());
1086 return DoVarHandleInvokeTranslationUnchecked(self,
1087 shadow_frame,
1088 access_mode,
1089 vh,
1090 vh_type,
1091 callsite_type_without_varhandle,
1092 &varhandle_operands,
1093 result);
1094 }
1095
MethodHandleInvokeInternal(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)1096 static inline bool MethodHandleInvokeInternal(Thread* self,
1097 ShadowFrame& shadow_frame,
1098 Handle<mirror::MethodHandle> method_handle,
1099 Handle<mirror::MethodType> callsite_type,
1100 const InstructionOperands* const operands,
1101 JValue* result)
1102 REQUIRES_SHARED(Locks::mutator_lock_) {
1103 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
1104 if (IsFieldAccess(handle_kind)) {
1105 ObjPtr<mirror::MethodType> handle_type(method_handle->GetMethodType());
1106 DCHECK(!callsite_type->IsExactMatch(handle_type.Ptr()));
1107 if (!callsite_type->IsConvertible(handle_type.Ptr())) {
1108 ThrowWrongMethodTypeException(handle_type.Ptr(), callsite_type.Get());
1109 return false;
1110 }
1111 const bool do_convert = true;
1112 return MethodHandleFieldAccess<do_convert>(
1113 self,
1114 shadow_frame,
1115 method_handle,
1116 callsite_type,
1117 operands,
1118 result);
1119 }
1120 if (IsInvokeVarHandle(handle_kind)) {
1121 return DoVarHandleInvokeTranslation(self,
1122 shadow_frame,
1123 /*invokeExact=*/ false,
1124 method_handle,
1125 callsite_type,
1126 operands,
1127 result);
1128 }
1129 return DoInvokePolymorphicMethod(self,
1130 shadow_frame,
1131 method_handle,
1132 callsite_type,
1133 operands,
1134 result);
1135 }
1136
MethodHandleInvokeExactInternal(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)1137 static inline bool MethodHandleInvokeExactInternal(
1138 Thread* self,
1139 ShadowFrame& shadow_frame,
1140 Handle<mirror::MethodHandle> method_handle,
1141 Handle<mirror::MethodType> callsite_type,
1142 const InstructionOperands* const operands,
1143 JValue* result)
1144 REQUIRES_SHARED(Locks::mutator_lock_) {
1145 StackHandleScope<1> hs(self);
1146 Handle<mirror::MethodType> method_handle_type(hs.NewHandle(method_handle->GetMethodType()));
1147 if (!callsite_type->IsExactMatch(method_handle_type.Get())) {
1148 ThrowWrongMethodTypeException(method_handle_type.Get(), callsite_type.Get());
1149 return false;
1150 }
1151
1152 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
1153 if (IsFieldAccess(handle_kind)) {
1154 const bool do_convert = false;
1155 return MethodHandleFieldAccess<do_convert>(self,
1156 shadow_frame,
1157 method_handle,
1158 callsite_type,
1159 operands,
1160 result);
1161 }
1162
1163 // Slow-path check.
1164 if (IsInvokeTransform(handle_kind) ||
1165 IsCallerTransformer(callsite_type)) {
1166 return DoInvokePolymorphicMethod(self,
1167 shadow_frame,
1168 method_handle,
1169 callsite_type,
1170 operands,
1171 result);
1172 } else if (IsInvokeVarHandle(handle_kind)) {
1173 return DoVarHandleInvokeTranslation(self,
1174 shadow_frame,
1175 /*invokeExact=*/ true,
1176 method_handle,
1177 callsite_type,
1178 operands,
1179 result);
1180 }
1181
1182 // On the fast-path. This is equivalent to DoCallPolymoprhic without the conversion paths.
1183 ArtMethod* target_method = method_handle->GetTargetMethod();
1184 uint32_t receiver_reg = (operands->GetNumberOfOperands() > 0) ? operands->GetOperand(0) : 0u;
1185 ArtMethod* called_method = RefineTargetMethod(self,
1186 shadow_frame,
1187 handle_kind,
1188 method_handle_type,
1189 callsite_type,
1190 receiver_reg,
1191 target_method);
1192 if (called_method == nullptr) {
1193 DCHECK(self->IsExceptionPending());
1194 return false;
1195 }
1196
1197 // Compute method information.
1198 CodeItemDataAccessor accessor(called_method->DexInstructionData());
1199 uint16_t num_regs;
1200 size_t num_input_regs;
1201 size_t first_dest_reg;
1202 if (LIKELY(accessor.HasCodeItem())) {
1203 num_regs = accessor.RegistersSize();
1204 first_dest_reg = num_regs - accessor.InsSize();
1205 num_input_regs = accessor.InsSize();
1206 // Parameter registers go at the end of the shadow frame.
1207 DCHECK_NE(first_dest_reg, (size_t)-1);
1208 } else {
1209 // No local regs for proxy and native methods.
1210 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1211 num_regs = num_input_regs = GetInsForProxyOrNativeMethod(called_method);
1212 first_dest_reg = 0;
1213 }
1214
1215 // Allocate shadow frame on the stack.
1216 const char* old_cause = self->StartAssertNoThreadSuspension("DoCallCommon");
1217 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
1218 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0);
1219 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
1220 CopyArgumentsFromCallerFrame(shadow_frame,
1221 new_shadow_frame,
1222 operands,
1223 first_dest_reg);
1224 self->EndAssertNoThreadSuspension(old_cause);
1225
1226 bool use_interpreter_entrypoint = ClassLinker::ShouldUseInterpreterEntrypoint(
1227 called_method, called_method->GetEntryPointFromQuickCompiledCode());
1228 PerformCall(self,
1229 accessor,
1230 shadow_frame.GetMethod(),
1231 first_dest_reg,
1232 new_shadow_frame,
1233 result,
1234 use_interpreter_entrypoint);
1235 if (self->IsExceptionPending()) {
1236 return false;
1237 }
1238 return true;
1239 }
1240
1241 } // namespace
1242
MethodHandleInvoke(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)1243 bool MethodHandleInvoke(Thread* self,
1244 ShadowFrame& shadow_frame,
1245 Handle<mirror::MethodHandle> method_handle,
1246 Handle<mirror::MethodType> callsite_type,
1247 const InstructionOperands* const operands,
1248 JValue* result)
1249 REQUIRES_SHARED(Locks::mutator_lock_) {
1250 if (UNLIKELY(callsite_type->IsExactMatch(method_handle->GetMethodType()))) {
1251 // A non-exact invoke that can be invoked exactly.
1252 return MethodHandleInvokeExactInternal(self,
1253 shadow_frame,
1254 method_handle,
1255 callsite_type,
1256 operands,
1257 result);
1258 } else {
1259 return MethodHandleInvokeInternal(self,
1260 shadow_frame,
1261 method_handle,
1262 callsite_type,
1263 operands,
1264 result);
1265 }
1266 }
1267
MethodHandleInvokeExact(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)1268 bool MethodHandleInvokeExact(Thread* self,
1269 ShadowFrame& shadow_frame,
1270 Handle<mirror::MethodHandle> method_handle,
1271 Handle<mirror::MethodType> callsite_type,
1272 const InstructionOperands* const operands,
1273 JValue* result)
1274 REQUIRES_SHARED(Locks::mutator_lock_) {
1275 // We need to check the nominal type of the handle in addition to the
1276 // real type. The "nominal" type is present when MethodHandle.asType is
1277 // called any handle, and results in the declared type of the handle
1278 // changing.
1279 ObjPtr<mirror::MethodType> nominal_type(method_handle->GetNominalType());
1280 if (UNLIKELY(nominal_type != nullptr)) {
1281 if (UNLIKELY(!callsite_type->IsExactMatch(nominal_type.Ptr()))) {
1282 ThrowWrongMethodTypeException(nominal_type.Ptr(), callsite_type.Get());
1283 return false;
1284 }
1285 if (LIKELY(!nominal_type->IsExactMatch(method_handle->GetMethodType()))) {
1286 // Different nominal type means we have to treat as non-exact.
1287 return MethodHandleInvokeInternal(self,
1288 shadow_frame,
1289 method_handle,
1290 callsite_type,
1291 operands,
1292 result);
1293 }
1294 }
1295 return MethodHandleInvokeExactInternal(self,
1296 shadow_frame,
1297 method_handle,
1298 callsite_type,
1299 operands,
1300 result);
1301 }
1302
1303 } // namespace art
1304