1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "interpreter_common.h"
18
19 #include <cmath>
20
21 #include "base/casts.h"
22 #include "base/enums.h"
23 #include "class_root.h"
24 #include "debugger.h"
25 #include "dex/dex_file_types.h"
26 #include "entrypoints/runtime_asm_entrypoints.h"
27 #include "intrinsics_enum.h"
28 #include "jit/jit.h"
29 #include "jvalue-inl.h"
30 #include "method_handles-inl.h"
31 #include "method_handles.h"
32 #include "mirror/array-alloc-inl.h"
33 #include "mirror/array-inl.h"
34 #include "mirror/call_site-inl.h"
35 #include "mirror/class.h"
36 #include "mirror/emulated_stack_frame.h"
37 #include "mirror/method_handle_impl-inl.h"
38 #include "mirror/method_type-inl.h"
39 #include "mirror/object_array-alloc-inl.h"
40 #include "mirror/object_array-inl.h"
41 #include "mirror/var_handle.h"
42 #include "reflection-inl.h"
43 #include "reflection.h"
44 #include "shadow_frame-inl.h"
45 #include "stack.h"
46 #include "thread-inl.h"
47 #include "transaction.h"
48 #include "var_handles.h"
49 #include "well_known_classes.h"
50
51 namespace art {
52 namespace interpreter {
53
ThrowNullPointerExceptionFromInterpreter()54 void ThrowNullPointerExceptionFromInterpreter() {
55 ThrowNullPointerExceptionFromDexPC();
56 }
57
CheckStackOverflow(Thread * self,size_t frame_size)58 bool CheckStackOverflow(Thread* self, size_t frame_size)
59 REQUIRES_SHARED(Locks::mutator_lock_) {
60 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
61 uint8_t* stack_end = self->GetStackEndForInterpreter(implicit_check);
62 if (UNLIKELY(__builtin_frame_address(0) < stack_end + frame_size)) {
63 ThrowStackOverflowError(self);
64 return false;
65 }
66 return true;
67 }
68
UseFastInterpreterToInterpreterInvoke(ArtMethod * method)69 bool UseFastInterpreterToInterpreterInvoke(ArtMethod* method) {
70 Runtime* runtime = Runtime::Current();
71 const void* quick_code = method->GetEntryPointFromQuickCompiledCode();
72 if (!runtime->GetClassLinker()->IsQuickToInterpreterBridge(quick_code)) {
73 return false;
74 }
75 if (!method->SkipAccessChecks() || method->IsNative() || method->IsProxyMethod()) {
76 return false;
77 }
78 if (method->IsIntrinsic()) {
79 return false;
80 }
81 if (method->GetDeclaringClass()->IsStringClass() && method->IsConstructor()) {
82 return false;
83 }
84 if (method->IsStatic() && !method->GetDeclaringClass()->IsInitialized()) {
85 return false;
86 }
87 ProfilingInfo* profiling_info = method->GetProfilingInfo(kRuntimePointerSize);
88 if ((profiling_info != nullptr) && (profiling_info->GetSavedEntryPoint() != nullptr)) {
89 return false;
90 }
91 return true;
92 }
93
94 template<FindFieldType find_type, Primitive::Type field_type, bool do_access_check,
95 bool transaction_active>
DoFieldGet(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data)96 bool DoFieldGet(Thread* self, ShadowFrame& shadow_frame, const Instruction* inst,
97 uint16_t inst_data) {
98 const bool is_static = (find_type == StaticObjectRead) || (find_type == StaticPrimitiveRead);
99 const uint32_t field_idx = is_static ? inst->VRegB_21c() : inst->VRegC_22c();
100 ArtField* f =
101 FindFieldFromCode<find_type, do_access_check>(field_idx, shadow_frame.GetMethod(), self,
102 Primitive::ComponentSize(field_type));
103 if (UNLIKELY(f == nullptr)) {
104 CHECK(self->IsExceptionPending());
105 return false;
106 }
107 ObjPtr<mirror::Object> obj;
108 if (is_static) {
109 obj = f->GetDeclaringClass();
110 if (transaction_active) {
111 if (Runtime::Current()->GetTransaction()->ReadConstraint(obj.Ptr(), f)) {
112 Runtime::Current()->AbortTransactionAndThrowAbortError(self, "Can't read static fields of "
113 + obj->PrettyTypeOf() + " since it does not belong to clinit's class.");
114 return false;
115 }
116 }
117 } else {
118 obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data));
119 if (UNLIKELY(obj == nullptr)) {
120 ThrowNullPointerExceptionForFieldAccess(f, true);
121 return false;
122 }
123 }
124
125 JValue result;
126 if (UNLIKELY(!DoFieldGetCommon<field_type>(self, shadow_frame, obj, f, &result))) {
127 // Instrumentation threw an error!
128 CHECK(self->IsExceptionPending());
129 return false;
130 }
131 uint32_t vregA = is_static ? inst->VRegA_21c(inst_data) : inst->VRegA_22c(inst_data);
132 switch (field_type) {
133 case Primitive::kPrimBoolean:
134 shadow_frame.SetVReg(vregA, result.GetZ());
135 break;
136 case Primitive::kPrimByte:
137 shadow_frame.SetVReg(vregA, result.GetB());
138 break;
139 case Primitive::kPrimChar:
140 shadow_frame.SetVReg(vregA, result.GetC());
141 break;
142 case Primitive::kPrimShort:
143 shadow_frame.SetVReg(vregA, result.GetS());
144 break;
145 case Primitive::kPrimInt:
146 shadow_frame.SetVReg(vregA, result.GetI());
147 break;
148 case Primitive::kPrimLong:
149 shadow_frame.SetVRegLong(vregA, result.GetJ());
150 break;
151 case Primitive::kPrimNot:
152 shadow_frame.SetVRegReference(vregA, result.GetL());
153 break;
154 default:
155 LOG(FATAL) << "Unreachable: " << field_type;
156 UNREACHABLE();
157 }
158 return true;
159 }
160
161 // Explicitly instantiate all DoFieldGet functions.
162 #define EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL(_find_type, _field_type, _do_check, _transaction_active) \
163 template bool DoFieldGet<_find_type, _field_type, _do_check, _transaction_active>(Thread* self, \
164 ShadowFrame& shadow_frame, \
165 const Instruction* inst, \
166 uint16_t inst_data)
167
168 #define EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(_find_type, _field_type) \
169 EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL(_find_type, _field_type, false, true); \
170 EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL(_find_type, _field_type, false, false); \
171 EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL(_find_type, _field_type, true, true); \
172 EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL(_find_type, _field_type, true, false);
173
174 // iget-XXX
EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead,Primitive::kPrimBoolean)175 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimBoolean)
176 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimByte)
177 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimChar)
178 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimShort)
179 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimInt)
180 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimLong)
181 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstanceObjectRead, Primitive::kPrimNot)
182
183 // sget-XXX
184 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimBoolean)
185 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimByte)
186 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimChar)
187 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimShort)
188 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimInt)
189 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimLong)
190 EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticObjectRead, Primitive::kPrimNot)
191
192 #undef EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL
193 #undef EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL
194
195 // Handles iget-quick, iget-wide-quick and iget-object-quick instructions.
196 // Returns true on success, otherwise throws an exception and returns false.
197 template<Primitive::Type field_type>
198 bool DoIGetQuick(ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data) {
199 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data));
200 if (UNLIKELY(obj == nullptr)) {
201 // We lost the reference to the field index so we cannot get a more
202 // precised exception message.
203 ThrowNullPointerExceptionFromDexPC();
204 return false;
205 }
206 MemberOffset field_offset(inst->VRegC_22c());
207 // Report this field access to instrumentation if needed. Since we only have the offset of
208 // the field from the base of the object, we need to look for it first.
209 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
210 if (UNLIKELY(instrumentation->HasFieldReadListeners())) {
211 ArtField* f = ArtField::FindInstanceFieldWithOffset(obj->GetClass(),
212 field_offset.Uint32Value());
213 DCHECK(f != nullptr);
214 DCHECK(!f->IsStatic());
215 Thread* self = Thread::Current();
216 StackHandleScope<1> hs(self);
217 // Save obj in case the instrumentation event has thread suspension.
218 HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&obj);
219 instrumentation->FieldReadEvent(self,
220 obj.Ptr(),
221 shadow_frame.GetMethod(),
222 shadow_frame.GetDexPC(),
223 f);
224 if (UNLIKELY(self->IsExceptionPending())) {
225 return false;
226 }
227 }
228 // Note: iget-x-quick instructions are only for non-volatile fields.
229 const uint32_t vregA = inst->VRegA_22c(inst_data);
230 switch (field_type) {
231 case Primitive::kPrimInt:
232 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetField32(field_offset)));
233 break;
234 case Primitive::kPrimBoolean:
235 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetFieldBoolean(field_offset)));
236 break;
237 case Primitive::kPrimByte:
238 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetFieldByte(field_offset)));
239 break;
240 case Primitive::kPrimChar:
241 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetFieldChar(field_offset)));
242 break;
243 case Primitive::kPrimShort:
244 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetFieldShort(field_offset)));
245 break;
246 case Primitive::kPrimLong:
247 shadow_frame.SetVRegLong(vregA, static_cast<int64_t>(obj->GetField64(field_offset)));
248 break;
249 case Primitive::kPrimNot:
250 shadow_frame.SetVRegReference(vregA, obj->GetFieldObject<mirror::Object>(field_offset));
251 break;
252 default:
253 LOG(FATAL) << "Unreachable: " << field_type;
254 UNREACHABLE();
255 }
256 return true;
257 }
258
259 // Explicitly instantiate all DoIGetQuick functions.
260 #define EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(_field_type) \
261 template bool DoIGetQuick<_field_type>(ShadowFrame& shadow_frame, const Instruction* inst, \
262 uint16_t inst_data)
263
264 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimInt); // iget-quick.
265 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimBoolean); // iget-boolean-quick.
266 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimByte); // iget-byte-quick.
267 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimChar); // iget-char-quick.
268 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimShort); // iget-short-quick.
269 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimLong); // iget-wide-quick.
270 EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimNot); // iget-object-quick.
271 #undef EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL
272
273 template<Primitive::Type field_type>
GetFieldValue(const ShadowFrame & shadow_frame,uint32_t vreg)274 static JValue GetFieldValue(const ShadowFrame& shadow_frame, uint32_t vreg)
275 REQUIRES_SHARED(Locks::mutator_lock_) {
276 JValue field_value;
277 switch (field_type) {
278 case Primitive::kPrimBoolean:
279 field_value.SetZ(static_cast<uint8_t>(shadow_frame.GetVReg(vreg)));
280 break;
281 case Primitive::kPrimByte:
282 field_value.SetB(static_cast<int8_t>(shadow_frame.GetVReg(vreg)));
283 break;
284 case Primitive::kPrimChar:
285 field_value.SetC(static_cast<uint16_t>(shadow_frame.GetVReg(vreg)));
286 break;
287 case Primitive::kPrimShort:
288 field_value.SetS(static_cast<int16_t>(shadow_frame.GetVReg(vreg)));
289 break;
290 case Primitive::kPrimInt:
291 field_value.SetI(shadow_frame.GetVReg(vreg));
292 break;
293 case Primitive::kPrimLong:
294 field_value.SetJ(shadow_frame.GetVRegLong(vreg));
295 break;
296 case Primitive::kPrimNot:
297 field_value.SetL(shadow_frame.GetVRegReference(vreg));
298 break;
299 default:
300 LOG(FATAL) << "Unreachable: " << field_type;
301 UNREACHABLE();
302 }
303 return field_value;
304 }
305
306 template<FindFieldType find_type, Primitive::Type field_type, bool do_access_check,
307 bool transaction_active>
DoFieldPut(Thread * self,const ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data)308 bool DoFieldPut(Thread* self, const ShadowFrame& shadow_frame, const Instruction* inst,
309 uint16_t inst_data) {
310 const bool do_assignability_check = do_access_check;
311 bool is_static = (find_type == StaticObjectWrite) || (find_type == StaticPrimitiveWrite);
312 uint32_t field_idx = is_static ? inst->VRegB_21c() : inst->VRegC_22c();
313 ArtField* f =
314 FindFieldFromCode<find_type, do_access_check>(field_idx, shadow_frame.GetMethod(), self,
315 Primitive::ComponentSize(field_type));
316 if (UNLIKELY(f == nullptr)) {
317 CHECK(self->IsExceptionPending());
318 return false;
319 }
320 ObjPtr<mirror::Object> obj;
321 if (is_static) {
322 obj = f->GetDeclaringClass();
323 if (transaction_active) {
324 if (Runtime::Current()->GetTransaction()->WriteConstraint(obj.Ptr(), f)) {
325 Runtime::Current()->AbortTransactionAndThrowAbortError(
326 self, "Can't set fields of " + obj->PrettyTypeOf());
327 return false;
328 }
329 }
330
331 } else {
332 obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data));
333 if (UNLIKELY(obj == nullptr)) {
334 ThrowNullPointerExceptionForFieldAccess(f, false);
335 return false;
336 }
337 }
338
339 uint32_t vregA = is_static ? inst->VRegA_21c(inst_data) : inst->VRegA_22c(inst_data);
340 JValue value = GetFieldValue<field_type>(shadow_frame, vregA);
341 return DoFieldPutCommon<field_type, do_assignability_check, transaction_active>(self,
342 shadow_frame,
343 obj,
344 f,
345 value);
346 }
347
348 // Explicitly instantiate all DoFieldPut functions.
349 #define EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, _do_check, _transaction_active) \
350 template bool DoFieldPut<_find_type, _field_type, _do_check, _transaction_active>(Thread* self, \
351 const ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data)
352
353 #define EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(_find_type, _field_type) \
354 EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, false, false); \
355 EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, true, false); \
356 EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, false, true); \
357 EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, true, true);
358
359 // iput-XXX
EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite,Primitive::kPrimBoolean)360 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimBoolean)
361 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimByte)
362 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimChar)
363 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimShort)
364 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimInt)
365 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimLong)
366 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstanceObjectWrite, Primitive::kPrimNot)
367
368 // sput-XXX
369 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimBoolean)
370 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimByte)
371 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimChar)
372 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimShort)
373 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimInt)
374 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimLong)
375 EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticObjectWrite, Primitive::kPrimNot)
376
377 #undef EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL
378 #undef EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL
379
380 template<Primitive::Type field_type, bool transaction_active>
381 bool DoIPutQuick(const ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data) {
382 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data));
383 if (UNLIKELY(obj == nullptr)) {
384 // We lost the reference to the field index so we cannot get a more
385 // precised exception message.
386 ThrowNullPointerExceptionFromDexPC();
387 return false;
388 }
389 MemberOffset field_offset(inst->VRegC_22c());
390 const uint32_t vregA = inst->VRegA_22c(inst_data);
391 // Report this field modification to instrumentation if needed. Since we only have the offset of
392 // the field from the base of the object, we need to look for it first.
393 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
394 if (UNLIKELY(instrumentation->HasFieldWriteListeners())) {
395 ArtField* f = ArtField::FindInstanceFieldWithOffset(obj->GetClass(),
396 field_offset.Uint32Value());
397 DCHECK(f != nullptr);
398 DCHECK(!f->IsStatic());
399 JValue field_value = GetFieldValue<field_type>(shadow_frame, vregA);
400 Thread* self = Thread::Current();
401 StackHandleScope<2> hs(self);
402 // Save obj in case the instrumentation event has thread suspension.
403 HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&obj);
404 mirror::Object* fake_root = nullptr;
405 HandleWrapper<mirror::Object> ret(hs.NewHandleWrapper<mirror::Object>(
406 field_type == Primitive::kPrimNot ? field_value.GetGCRoot() : &fake_root));
407 instrumentation->FieldWriteEvent(self,
408 obj.Ptr(),
409 shadow_frame.GetMethod(),
410 shadow_frame.GetDexPC(),
411 f,
412 field_value);
413 if (UNLIKELY(self->IsExceptionPending())) {
414 return false;
415 }
416 if (UNLIKELY(shadow_frame.GetForcePopFrame())) {
417 // Don't actually set the field. The next instruction will force us to pop.
418 DCHECK(Runtime::Current()->AreNonStandardExitsEnabled());
419 DCHECK(PrevFrameWillRetry(self, shadow_frame));
420 return true;
421 }
422 }
423 // Note: iput-x-quick instructions are only for non-volatile fields.
424 switch (field_type) {
425 case Primitive::kPrimBoolean:
426 obj->SetFieldBoolean<transaction_active>(field_offset, shadow_frame.GetVReg(vregA));
427 break;
428 case Primitive::kPrimByte:
429 obj->SetFieldByte<transaction_active>(field_offset, shadow_frame.GetVReg(vregA));
430 break;
431 case Primitive::kPrimChar:
432 obj->SetFieldChar<transaction_active>(field_offset, shadow_frame.GetVReg(vregA));
433 break;
434 case Primitive::kPrimShort:
435 obj->SetFieldShort<transaction_active>(field_offset, shadow_frame.GetVReg(vregA));
436 break;
437 case Primitive::kPrimInt:
438 obj->SetField32<transaction_active>(field_offset, shadow_frame.GetVReg(vregA));
439 break;
440 case Primitive::kPrimLong:
441 obj->SetField64<transaction_active>(field_offset, shadow_frame.GetVRegLong(vregA));
442 break;
443 case Primitive::kPrimNot:
444 obj->SetFieldObject<transaction_active>(field_offset, shadow_frame.GetVRegReference(vregA));
445 break;
446 default:
447 LOG(FATAL) << "Unreachable: " << field_type;
448 UNREACHABLE();
449 }
450 return true;
451 }
452
453 // Explicitly instantiate all DoIPutQuick functions.
454 #define EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL(_field_type, _transaction_active) \
455 template bool DoIPutQuick<_field_type, _transaction_active>(const ShadowFrame& shadow_frame, \
456 const Instruction* inst, \
457 uint16_t inst_data)
458
459 #define EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(_field_type) \
460 EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL(_field_type, false); \
461 EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL(_field_type, true);
462
463 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimInt) // iput-quick.
EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimBoolean)464 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimBoolean) // iput-boolean-quick.
465 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimByte) // iput-byte-quick.
466 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimChar) // iput-char-quick.
467 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimShort) // iput-short-quick.
468 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimLong) // iput-wide-quick.
469 EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimNot) // iput-object-quick.
470 #undef EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL
471 #undef EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL
472
473 // We execute any instrumentation events that are triggered by this exception and change the
474 // shadow_frame's dex_pc to that of the exception handler if there is one in the current method.
475 // Return true if we should continue executing in the current method and false if we need to go up
476 // the stack to find an exception handler.
477 // We accept a null Instrumentation* meaning we must not report anything to the instrumentation.
478 // TODO We should have a better way to skip instrumentation reporting or possibly rethink that
479 // behavior.
480 bool MoveToExceptionHandler(Thread* self,
481 ShadowFrame& shadow_frame,
482 const instrumentation::Instrumentation* instrumentation) {
483 self->VerifyStack();
484 StackHandleScope<2> hs(self);
485 Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException()));
486 if (instrumentation != nullptr &&
487 instrumentation->HasExceptionThrownListeners() &&
488 self->IsExceptionThrownByCurrentMethod(exception.Get())) {
489 // See b/65049545 for why we don't need to check to see if the exception has changed.
490 instrumentation->ExceptionThrownEvent(self, exception.Get());
491 if (shadow_frame.GetForcePopFrame()) {
492 // We will check in the caller for GetForcePopFrame again. We need to bail out early to
493 // prevent an ExceptionHandledEvent from also being sent before popping.
494 return true;
495 }
496 }
497 bool clear_exception = false;
498 uint32_t found_dex_pc = shadow_frame.GetMethod()->FindCatchBlock(
499 hs.NewHandle(exception->GetClass()), shadow_frame.GetDexPC(), &clear_exception);
500 if (found_dex_pc == dex::kDexNoIndex) {
501 if (instrumentation != nullptr) {
502 if (shadow_frame.NeedsNotifyPop()) {
503 instrumentation->WatchedFramePopped(self, shadow_frame);
504 }
505 // Exception is not caught by the current method. We will unwind to the
506 // caller. Notify any instrumentation listener.
507 instrumentation->MethodUnwindEvent(self,
508 shadow_frame.GetThisObject(),
509 shadow_frame.GetMethod(),
510 shadow_frame.GetDexPC());
511 }
512 return false;
513 } else {
514 shadow_frame.SetDexPC(found_dex_pc);
515 if (instrumentation != nullptr && instrumentation->HasExceptionHandledListeners()) {
516 self->ClearException();
517 instrumentation->ExceptionHandledEvent(self, exception.Get());
518 if (UNLIKELY(self->IsExceptionPending())) {
519 // Exception handled event threw an exception. Try to find the handler for this one.
520 return MoveToExceptionHandler(self, shadow_frame, instrumentation);
521 } else if (!clear_exception) {
522 self->SetException(exception.Get());
523 }
524 } else if (clear_exception) {
525 self->ClearException();
526 }
527 return true;
528 }
529 }
530
UnexpectedOpcode(const Instruction * inst,const ShadowFrame & shadow_frame)531 void UnexpectedOpcode(const Instruction* inst, const ShadowFrame& shadow_frame) {
532 LOG(FATAL) << "Unexpected instruction: "
533 << inst->DumpString(shadow_frame.GetMethod()->GetDexFile());
534 UNREACHABLE();
535 }
536
AbortTransactionF(Thread * self,const char * fmt,...)537 void AbortTransactionF(Thread* self, const char* fmt, ...) {
538 va_list args;
539 va_start(args, fmt);
540 AbortTransactionV(self, fmt, args);
541 va_end(args);
542 }
543
AbortTransactionV(Thread * self,const char * fmt,va_list args)544 void AbortTransactionV(Thread* self, const char* fmt, va_list args) {
545 CHECK(Runtime::Current()->IsActiveTransaction());
546 // Constructs abort message.
547 std::string abort_msg;
548 android::base::StringAppendV(&abort_msg, fmt, args);
549 // Throws an exception so we can abort the transaction and rollback every change.
550 Runtime::Current()->AbortTransactionAndThrowAbortError(self, abort_msg);
551 }
552
553 // START DECLARATIONS :
554 //
555 // These additional declarations are required because clang complains
556 // about ALWAYS_INLINE (-Werror, -Wgcc-compat) in definitions.
557 //
558
559 template <bool is_range, bool do_assignability_check>
560 static ALWAYS_INLINE bool DoCallCommon(ArtMethod* called_method,
561 Thread* self,
562 ShadowFrame& shadow_frame,
563 JValue* result,
564 uint16_t number_of_inputs,
565 uint32_t (&arg)[Instruction::kMaxVarArgRegs],
566 uint32_t vregC) REQUIRES_SHARED(Locks::mutator_lock_);
567
568 template <bool is_range>
569 ALWAYS_INLINE void CopyRegisters(ShadowFrame& caller_frame,
570 ShadowFrame* callee_frame,
571 const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
572 const size_t first_src_reg,
573 const size_t first_dest_reg,
574 const size_t num_regs) REQUIRES_SHARED(Locks::mutator_lock_);
575
576 // END DECLARATIONS.
577
ArtInterpreterToCompiledCodeBridge(Thread * self,ArtMethod * caller,ShadowFrame * shadow_frame,uint16_t arg_offset,JValue * result)578 void ArtInterpreterToCompiledCodeBridge(Thread* self,
579 ArtMethod* caller,
580 ShadowFrame* shadow_frame,
581 uint16_t arg_offset,
582 JValue* result)
583 REQUIRES_SHARED(Locks::mutator_lock_) {
584 ArtMethod* method = shadow_frame->GetMethod();
585 // Ensure static methods are initialized.
586 if (method->IsStatic()) {
587 ObjPtr<mirror::Class> declaringClass = method->GetDeclaringClass();
588 if (UNLIKELY(!declaringClass->IsInitialized())) {
589 self->PushShadowFrame(shadow_frame);
590 StackHandleScope<1> hs(self);
591 Handle<mirror::Class> h_class(hs.NewHandle(declaringClass));
592 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true,
593 true))) {
594 self->PopShadowFrame();
595 DCHECK(self->IsExceptionPending());
596 return;
597 }
598 self->PopShadowFrame();
599 CHECK(h_class->IsInitializing());
600 // Reload from shadow frame in case the method moved, this is faster than adding a handle.
601 method = shadow_frame->GetMethod();
602 }
603 }
604 // Basic checks for the arg_offset. If there's no code item, the arg_offset must be 0. Otherwise,
605 // check that the arg_offset isn't greater than the number of registers. A stronger check is
606 // difficult since the frame may contain space for all the registers in the method, or only enough
607 // space for the arguments.
608 if (kIsDebugBuild) {
609 if (method->GetCodeItem() == nullptr) {
610 DCHECK_EQ(0u, arg_offset) << method->PrettyMethod();
611 } else {
612 DCHECK_LE(arg_offset, shadow_frame->NumberOfVRegs());
613 }
614 }
615 jit::Jit* jit = Runtime::Current()->GetJit();
616 if (jit != nullptr && caller != nullptr) {
617 jit->NotifyInterpreterToCompiledCodeTransition(self, caller);
618 }
619 method->Invoke(self, shadow_frame->GetVRegArgs(arg_offset),
620 (shadow_frame->NumberOfVRegs() - arg_offset) * sizeof(uint32_t),
621 result, method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty());
622 }
623
SetStringInitValueToAllAliases(ShadowFrame * shadow_frame,uint16_t this_obj_vreg,JValue result)624 void SetStringInitValueToAllAliases(ShadowFrame* shadow_frame,
625 uint16_t this_obj_vreg,
626 JValue result)
627 REQUIRES_SHARED(Locks::mutator_lock_) {
628 ObjPtr<mirror::Object> existing = shadow_frame->GetVRegReference(this_obj_vreg);
629 if (existing == nullptr) {
630 // If it's null, we come from compiled code that was deoptimized. Nothing to do,
631 // as the compiler verified there was no alias.
632 // Set the new string result of the StringFactory.
633 shadow_frame->SetVRegReference(this_obj_vreg, result.GetL());
634 return;
635 }
636 // Set the string init result into all aliases.
637 for (uint32_t i = 0, e = shadow_frame->NumberOfVRegs(); i < e; ++i) {
638 if (shadow_frame->GetVRegReference(i) == existing) {
639 DCHECK_EQ(shadow_frame->GetVRegReference(i),
640 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
641 shadow_frame->SetVRegReference(i, result.GetL());
642 DCHECK_EQ(shadow_frame->GetVRegReference(i),
643 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
644 }
645 }
646 }
647
648 template<bool is_range>
DoMethodHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,bool invoke_exact,const Instruction * inst,uint16_t inst_data,JValue * result)649 static bool DoMethodHandleInvokeCommon(Thread* self,
650 ShadowFrame& shadow_frame,
651 bool invoke_exact,
652 const Instruction* inst,
653 uint16_t inst_data,
654 JValue* result)
655 REQUIRES_SHARED(Locks::mutator_lock_) {
656 // Make sure to check for async exceptions
657 if (UNLIKELY(self->ObserveAsyncException())) {
658 return false;
659 }
660 // Invoke-polymorphic instructions always take a receiver. i.e, they are never static.
661 const uint32_t vRegC = (is_range) ? inst->VRegC_4rcc() : inst->VRegC_45cc();
662 const int invoke_method_idx = (is_range) ? inst->VRegB_4rcc() : inst->VRegB_45cc();
663
664 // Initialize |result| to 0 as this is the default return value for
665 // polymorphic invocations of method handle types with void return
666 // and provides sane return result in error cases.
667 result->SetJ(0);
668
669 // The invoke_method_idx here is the name of the signature polymorphic method that
670 // was symbolically invoked in bytecode (say MethodHandle.invoke or MethodHandle.invokeExact)
671 // and not the method that we'll dispatch to in the end.
672 StackHandleScope<2> hs(self);
673 Handle<mirror::MethodHandle> method_handle(hs.NewHandle(
674 ObjPtr<mirror::MethodHandle>::DownCast(shadow_frame.GetVRegReference(vRegC))));
675 if (UNLIKELY(method_handle == nullptr)) {
676 // Note that the invoke type is kVirtual here because a call to a signature
677 // polymorphic method is shaped like a virtual call at the bytecode level.
678 ThrowNullPointerExceptionForMethodAccess(invoke_method_idx, InvokeType::kVirtual);
679 return false;
680 }
681
682 // The vRegH value gives the index of the proto_id associated with this
683 // signature polymorphic call site.
684 const uint16_t vRegH = (is_range) ? inst->VRegH_4rcc() : inst->VRegH_45cc();
685 const dex::ProtoIndex callsite_proto_id(vRegH);
686
687 // Call through to the classlinker and ask it to resolve the static type associated
688 // with the callsite. This information is stored in the dex cache so it's
689 // guaranteed to be fast after the first resolution.
690 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
691 Handle<mirror::MethodType> callsite_type(hs.NewHandle(
692 class_linker->ResolveMethodType(self, callsite_proto_id, shadow_frame.GetMethod())));
693
694 // This implies we couldn't resolve one or more types in this method handle.
695 if (UNLIKELY(callsite_type == nullptr)) {
696 CHECK(self->IsExceptionPending());
697 return false;
698 }
699
700 // There is a common dispatch method for method handles that takes
701 // arguments either from a range or an array of arguments depending
702 // on whether the DEX instruction is invoke-polymorphic/range or
703 // invoke-polymorphic. The array here is for the latter.
704 if (UNLIKELY(is_range)) {
705 // VRegC is the register holding the method handle. Arguments passed
706 // to the method handle's target do not include the method handle.
707 RangeInstructionOperands operands(inst->VRegC_4rcc() + 1, inst->VRegA_4rcc() - 1);
708 if (invoke_exact) {
709 return MethodHandleInvokeExact(self,
710 shadow_frame,
711 method_handle,
712 callsite_type,
713 &operands,
714 result);
715 } else {
716 return MethodHandleInvoke(self,
717 shadow_frame,
718 method_handle,
719 callsite_type,
720 &operands,
721 result);
722 }
723 } else {
724 // Get the register arguments for the invoke.
725 uint32_t args[Instruction::kMaxVarArgRegs] = {};
726 inst->GetVarArgs(args, inst_data);
727 // Drop the first register which is the method handle performing the invoke.
728 memmove(args, args + 1, sizeof(args[0]) * (Instruction::kMaxVarArgRegs - 1));
729 args[Instruction::kMaxVarArgRegs - 1] = 0;
730 VarArgsInstructionOperands operands(args, inst->VRegA_45cc() - 1);
731 if (invoke_exact) {
732 return MethodHandleInvokeExact(self,
733 shadow_frame,
734 method_handle,
735 callsite_type,
736 &operands,
737 result);
738 } else {
739 return MethodHandleInvoke(self,
740 shadow_frame,
741 method_handle,
742 callsite_type,
743 &operands,
744 result);
745 }
746 }
747 }
748
DoMethodHandleInvokeExact(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)749 bool DoMethodHandleInvokeExact(Thread* self,
750 ShadowFrame& shadow_frame,
751 const Instruction* inst,
752 uint16_t inst_data,
753 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
754 if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
755 static const bool kIsRange = false;
756 return DoMethodHandleInvokeCommon<kIsRange>(
757 self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
758 } else {
759 DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
760 static const bool kIsRange = true;
761 return DoMethodHandleInvokeCommon<kIsRange>(
762 self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
763 }
764 }
765
DoMethodHandleInvoke(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)766 bool DoMethodHandleInvoke(Thread* self,
767 ShadowFrame& shadow_frame,
768 const Instruction* inst,
769 uint16_t inst_data,
770 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
771 if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
772 static const bool kIsRange = false;
773 return DoMethodHandleInvokeCommon<kIsRange>(
774 self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
775 } else {
776 DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
777 static const bool kIsRange = true;
778 return DoMethodHandleInvokeCommon<kIsRange>(
779 self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
780 }
781 }
782
DoVarHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result,mirror::VarHandle::AccessMode access_mode)783 static bool DoVarHandleInvokeCommon(Thread* self,
784 ShadowFrame& shadow_frame,
785 const Instruction* inst,
786 uint16_t inst_data,
787 JValue* result,
788 mirror::VarHandle::AccessMode access_mode)
789 REQUIRES_SHARED(Locks::mutator_lock_) {
790 // Make sure to check for async exceptions
791 if (UNLIKELY(self->ObserveAsyncException())) {
792 return false;
793 }
794
795 StackHandleScope<2> hs(self);
796 bool is_var_args = inst->HasVarArgs();
797 const uint16_t vRegH = is_var_args ? inst->VRegH_45cc() : inst->VRegH_4rcc();
798 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
799 Handle<mirror::MethodType> callsite_type(hs.NewHandle(
800 class_linker->ResolveMethodType(self, dex::ProtoIndex(vRegH), shadow_frame.GetMethod())));
801 // This implies we couldn't resolve one or more types in this VarHandle.
802 if (UNLIKELY(callsite_type == nullptr)) {
803 CHECK(self->IsExceptionPending());
804 return false;
805 }
806
807 const uint32_t vRegC = is_var_args ? inst->VRegC_45cc() : inst->VRegC_4rcc();
808 ObjPtr<mirror::Object> receiver(shadow_frame.GetVRegReference(vRegC));
809 Handle<mirror::VarHandle> var_handle(hs.NewHandle(ObjPtr<mirror::VarHandle>::DownCast(receiver)));
810 if (is_var_args) {
811 uint32_t args[Instruction::kMaxVarArgRegs];
812 inst->GetVarArgs(args, inst_data);
813 VarArgsInstructionOperands all_operands(args, inst->VRegA_45cc());
814 NoReceiverInstructionOperands operands(&all_operands);
815 return VarHandleInvokeAccessor(self,
816 shadow_frame,
817 var_handle,
818 callsite_type,
819 access_mode,
820 &operands,
821 result);
822 } else {
823 RangeInstructionOperands all_operands(inst->VRegC_4rcc(), inst->VRegA_4rcc());
824 NoReceiverInstructionOperands operands(&all_operands);
825 return VarHandleInvokeAccessor(self,
826 shadow_frame,
827 var_handle,
828 callsite_type,
829 access_mode,
830 &operands,
831 result);
832 }
833 }
834
835 #define DO_VAR_HANDLE_ACCESSOR(_access_mode) \
836 bool DoVarHandle ## _access_mode(Thread* self, \
837 ShadowFrame& shadow_frame, \
838 const Instruction* inst, \
839 uint16_t inst_data, \
840 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) { \
841 const auto access_mode = mirror::VarHandle::AccessMode::k ## _access_mode; \
842 return DoVarHandleInvokeCommon(self, shadow_frame, inst, inst_data, result, access_mode); \
843 }
844
845 DO_VAR_HANDLE_ACCESSOR(CompareAndExchange)
DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)846 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)
847 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeRelease)
848 DO_VAR_HANDLE_ACCESSOR(CompareAndSet)
849 DO_VAR_HANDLE_ACCESSOR(Get)
850 DO_VAR_HANDLE_ACCESSOR(GetAcquire)
851 DO_VAR_HANDLE_ACCESSOR(GetAndAdd)
852 DO_VAR_HANDLE_ACCESSOR(GetAndAddAcquire)
853 DO_VAR_HANDLE_ACCESSOR(GetAndAddRelease)
854 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAnd)
855 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndAcquire)
856 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndRelease)
857 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOr)
858 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrAcquire)
859 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrRelease)
860 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXor)
861 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorAcquire)
862 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorRelease)
863 DO_VAR_HANDLE_ACCESSOR(GetAndSet)
864 DO_VAR_HANDLE_ACCESSOR(GetAndSetAcquire)
865 DO_VAR_HANDLE_ACCESSOR(GetAndSetRelease)
866 DO_VAR_HANDLE_ACCESSOR(GetOpaque)
867 DO_VAR_HANDLE_ACCESSOR(GetVolatile)
868 DO_VAR_HANDLE_ACCESSOR(Set)
869 DO_VAR_HANDLE_ACCESSOR(SetOpaque)
870 DO_VAR_HANDLE_ACCESSOR(SetRelease)
871 DO_VAR_HANDLE_ACCESSOR(SetVolatile)
872 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSet)
873 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetAcquire)
874 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetPlain)
875 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetRelease)
876
877 #undef DO_VAR_HANDLE_ACCESSOR
878
879 template<bool is_range>
880 bool DoInvokePolymorphic(Thread* self,
881 ShadowFrame& shadow_frame,
882 const Instruction* inst,
883 uint16_t inst_data,
884 JValue* result) {
885 const int invoke_method_idx = inst->VRegB();
886 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
887 ArtMethod* invoke_method =
888 class_linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
889 self, invoke_method_idx, shadow_frame.GetMethod(), kVirtual);
890
891 // Ensure intrinsic identifiers are initialized.
892 DCHECK(invoke_method->IsIntrinsic());
893
894 // Dispatch based on intrinsic identifier associated with method.
895 switch (static_cast<art::Intrinsics>(invoke_method->GetIntrinsic())) {
896 #define CASE_SIGNATURE_POLYMORPHIC_INTRINSIC(Name, ...) \
897 case Intrinsics::k##Name: \
898 return Do ## Name(self, shadow_frame, inst, inst_data, result);
899 #include "intrinsics_list.h"
900 SIGNATURE_POLYMORPHIC_INTRINSICS_LIST(CASE_SIGNATURE_POLYMORPHIC_INTRINSIC)
901 #undef INTRINSICS_LIST
902 #undef SIGNATURE_POLYMORPHIC_INTRINSICS_LIST
903 #undef CASE_SIGNATURE_POLYMORPHIC_INTRINSIC
904 default:
905 LOG(FATAL) << "Unreachable: " << invoke_method->GetIntrinsic();
906 UNREACHABLE();
907 return false;
908 }
909 }
910
ConvertScalarBootstrapArgument(jvalue value)911 static JValue ConvertScalarBootstrapArgument(jvalue value) {
912 // value either contains a primitive scalar value if it corresponds
913 // to a primitive type, or it contains an integer value if it
914 // corresponds to an object instance reference id (e.g. a string id).
915 return JValue::FromPrimitive(value.j);
916 }
917
GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)918 static ObjPtr<mirror::Class> GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)
919 REQUIRES_SHARED(Locks::mutator_lock_) {
920 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
921 ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
922 switch (type) {
923 case EncodedArrayValueIterator::ValueType::kBoolean:
924 case EncodedArrayValueIterator::ValueType::kByte:
925 case EncodedArrayValueIterator::ValueType::kChar:
926 case EncodedArrayValueIterator::ValueType::kShort:
927 // These types are disallowed by JVMS. Treat as integers. This
928 // will result in CCE's being raised if the BSM has one of these
929 // types.
930 case EncodedArrayValueIterator::ValueType::kInt:
931 return GetClassRoot(ClassRoot::kPrimitiveInt, class_roots);
932 case EncodedArrayValueIterator::ValueType::kLong:
933 return GetClassRoot(ClassRoot::kPrimitiveLong, class_roots);
934 case EncodedArrayValueIterator::ValueType::kFloat:
935 return GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots);
936 case EncodedArrayValueIterator::ValueType::kDouble:
937 return GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots);
938 case EncodedArrayValueIterator::ValueType::kMethodType:
939 return GetClassRoot<mirror::MethodType>(class_roots);
940 case EncodedArrayValueIterator::ValueType::kMethodHandle:
941 return GetClassRoot<mirror::MethodHandle>(class_roots);
942 case EncodedArrayValueIterator::ValueType::kString:
943 return GetClassRoot<mirror::String>();
944 case EncodedArrayValueIterator::ValueType::kType:
945 return GetClassRoot<mirror::Class>();
946 case EncodedArrayValueIterator::ValueType::kField:
947 case EncodedArrayValueIterator::ValueType::kMethod:
948 case EncodedArrayValueIterator::ValueType::kEnum:
949 case EncodedArrayValueIterator::ValueType::kArray:
950 case EncodedArrayValueIterator::ValueType::kAnnotation:
951 case EncodedArrayValueIterator::ValueType::kNull:
952 return nullptr;
953 }
954 }
955
GetArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,EncodedArrayValueIterator::ValueType type,const JValue * encoded_value,JValue * decoded_value)956 static bool GetArgumentForBootstrapMethod(Thread* self,
957 ArtMethod* referrer,
958 EncodedArrayValueIterator::ValueType type,
959 const JValue* encoded_value,
960 JValue* decoded_value)
961 REQUIRES_SHARED(Locks::mutator_lock_) {
962 // The encoded_value contains either a scalar value (IJDF) or a
963 // scalar DEX file index to a reference type to be materialized.
964 switch (type) {
965 case EncodedArrayValueIterator::ValueType::kInt:
966 case EncodedArrayValueIterator::ValueType::kFloat:
967 decoded_value->SetI(encoded_value->GetI());
968 return true;
969 case EncodedArrayValueIterator::ValueType::kLong:
970 case EncodedArrayValueIterator::ValueType::kDouble:
971 decoded_value->SetJ(encoded_value->GetJ());
972 return true;
973 case EncodedArrayValueIterator::ValueType::kMethodType: {
974 StackHandleScope<2> hs(self);
975 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
976 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
977 dex::ProtoIndex proto_idx(encoded_value->GetC());
978 ClassLinker* cl = Runtime::Current()->GetClassLinker();
979 ObjPtr<mirror::MethodType> o =
980 cl->ResolveMethodType(self, proto_idx, dex_cache, class_loader);
981 if (UNLIKELY(o.IsNull())) {
982 DCHECK(self->IsExceptionPending());
983 return false;
984 }
985 decoded_value->SetL(o);
986 return true;
987 }
988 case EncodedArrayValueIterator::ValueType::kMethodHandle: {
989 uint32_t index = static_cast<uint32_t>(encoded_value->GetI());
990 ClassLinker* cl = Runtime::Current()->GetClassLinker();
991 ObjPtr<mirror::MethodHandle> o = cl->ResolveMethodHandle(self, index, referrer);
992 if (UNLIKELY(o.IsNull())) {
993 DCHECK(self->IsExceptionPending());
994 return false;
995 }
996 decoded_value->SetL(o);
997 return true;
998 }
999 case EncodedArrayValueIterator::ValueType::kString: {
1000 dex::StringIndex index(static_cast<uint32_t>(encoded_value->GetI()));
1001 ClassLinker* cl = Runtime::Current()->GetClassLinker();
1002 ObjPtr<mirror::String> o = cl->ResolveString(index, referrer);
1003 if (UNLIKELY(o.IsNull())) {
1004 DCHECK(self->IsExceptionPending());
1005 return false;
1006 }
1007 decoded_value->SetL(o);
1008 return true;
1009 }
1010 case EncodedArrayValueIterator::ValueType::kType: {
1011 dex::TypeIndex index(static_cast<uint32_t>(encoded_value->GetI()));
1012 ClassLinker* cl = Runtime::Current()->GetClassLinker();
1013 ObjPtr<mirror::Class> o = cl->ResolveType(index, referrer);
1014 if (UNLIKELY(o.IsNull())) {
1015 DCHECK(self->IsExceptionPending());
1016 return false;
1017 }
1018 decoded_value->SetL(o);
1019 return true;
1020 }
1021 case EncodedArrayValueIterator::ValueType::kBoolean:
1022 case EncodedArrayValueIterator::ValueType::kByte:
1023 case EncodedArrayValueIterator::ValueType::kChar:
1024 case EncodedArrayValueIterator::ValueType::kShort:
1025 case EncodedArrayValueIterator::ValueType::kField:
1026 case EncodedArrayValueIterator::ValueType::kMethod:
1027 case EncodedArrayValueIterator::ValueType::kEnum:
1028 case EncodedArrayValueIterator::ValueType::kArray:
1029 case EncodedArrayValueIterator::ValueType::kAnnotation:
1030 case EncodedArrayValueIterator::ValueType::kNull:
1031 // Unreachable - unsupported types that have been checked when
1032 // determining the effect call site type based on the bootstrap
1033 // argument types.
1034 UNREACHABLE();
1035 }
1036 }
1037
PackArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)1038 static bool PackArgumentForBootstrapMethod(Thread* self,
1039 ArtMethod* referrer,
1040 CallSiteArrayValueIterator* it,
1041 ShadowFrameSetter* setter)
1042 REQUIRES_SHARED(Locks::mutator_lock_) {
1043 auto type = it->GetValueType();
1044 const JValue encoded_value = ConvertScalarBootstrapArgument(it->GetJavaValue());
1045 JValue decoded_value;
1046 if (!GetArgumentForBootstrapMethod(self, referrer, type, &encoded_value, &decoded_value)) {
1047 return false;
1048 }
1049 switch (it->GetValueType()) {
1050 case EncodedArrayValueIterator::ValueType::kInt:
1051 case EncodedArrayValueIterator::ValueType::kFloat:
1052 setter->Set(static_cast<uint32_t>(decoded_value.GetI()));
1053 return true;
1054 case EncodedArrayValueIterator::ValueType::kLong:
1055 case EncodedArrayValueIterator::ValueType::kDouble:
1056 setter->SetLong(decoded_value.GetJ());
1057 return true;
1058 case EncodedArrayValueIterator::ValueType::kMethodType:
1059 case EncodedArrayValueIterator::ValueType::kMethodHandle:
1060 case EncodedArrayValueIterator::ValueType::kString:
1061 case EncodedArrayValueIterator::ValueType::kType:
1062 setter->SetReference(decoded_value.GetL());
1063 return true;
1064 case EncodedArrayValueIterator::ValueType::kBoolean:
1065 case EncodedArrayValueIterator::ValueType::kByte:
1066 case EncodedArrayValueIterator::ValueType::kChar:
1067 case EncodedArrayValueIterator::ValueType::kShort:
1068 case EncodedArrayValueIterator::ValueType::kField:
1069 case EncodedArrayValueIterator::ValueType::kMethod:
1070 case EncodedArrayValueIterator::ValueType::kEnum:
1071 case EncodedArrayValueIterator::ValueType::kArray:
1072 case EncodedArrayValueIterator::ValueType::kAnnotation:
1073 case EncodedArrayValueIterator::ValueType::kNull:
1074 // Unreachable - unsupported types that have been checked when
1075 // determining the effect call site type based on the bootstrap
1076 // argument types.
1077 UNREACHABLE();
1078 }
1079 }
1080
PackCollectorArrayForBootstrapMethod(Thread * self,ArtMethod * referrer,ObjPtr<mirror::Class> array_type,int32_t array_length,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)1081 static bool PackCollectorArrayForBootstrapMethod(Thread* self,
1082 ArtMethod* referrer,
1083 ObjPtr<mirror::Class> array_type,
1084 int32_t array_length,
1085 CallSiteArrayValueIterator* it,
1086 ShadowFrameSetter* setter)
1087 REQUIRES_SHARED(Locks::mutator_lock_) {
1088 StackHandleScope<1> hs(self);
1089 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1090 JValue decoded_value;
1091
1092 #define COLLECT_PRIMITIVE_ARRAY(Descriptor, Type) \
1093 Handle<mirror::Type ## Array> array = \
1094 hs.NewHandle(mirror::Type ## Array::Alloc(self, array_length)); \
1095 if (array.IsNull()) { \
1096 return false; \
1097 } \
1098 for (int32_t i = 0; it->HasNext(); it->Next(), ++i) { \
1099 auto type = it->GetValueType(); \
1100 DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type); \
1101 const JValue encoded_value = \
1102 ConvertScalarBootstrapArgument(it->GetJavaValue()); \
1103 GetArgumentForBootstrapMethod(self, \
1104 referrer, \
1105 type, \
1106 &encoded_value, \
1107 &decoded_value); \
1108 array->Set(i, decoded_value.Get ## Descriptor()); \
1109 } \
1110 setter->SetReference(array.Get()); \
1111 return true;
1112
1113 #define COLLECT_REFERENCE_ARRAY(T, Type) \
1114 Handle<mirror::ObjectArray<T>> array = /* NOLINT */ \
1115 hs.NewHandle(mirror::ObjectArray<T>::Alloc(self, \
1116 array_type, \
1117 array_length)); \
1118 if (array.IsNull()) { \
1119 return false; \
1120 } \
1121 for (int32_t i = 0; it->HasNext(); it->Next(), ++i) { \
1122 auto type = it->GetValueType(); \
1123 DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type); \
1124 const JValue encoded_value = \
1125 ConvertScalarBootstrapArgument(it->GetJavaValue()); \
1126 if (!GetArgumentForBootstrapMethod(self, \
1127 referrer, \
1128 type, \
1129 &encoded_value, \
1130 &decoded_value)) { \
1131 return false; \
1132 } \
1133 ObjPtr<mirror::Object> o = decoded_value.GetL(); \
1134 if (Runtime::Current()->IsActiveTransaction()) { \
1135 array->Set<true>(i, ObjPtr<T>::DownCast(o)); \
1136 } else { \
1137 array->Set<false>(i, ObjPtr<T>::DownCast(o)); \
1138 } \
1139 } \
1140 setter->SetReference(array.Get()); \
1141 return true;
1142
1143 ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
1144 ObjPtr<mirror::Class> component_type = array_type->GetComponentType();
1145 if (component_type == GetClassRoot(ClassRoot::kPrimitiveInt, class_roots)) {
1146 COLLECT_PRIMITIVE_ARRAY(I, Int);
1147 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveLong, class_roots)) {
1148 COLLECT_PRIMITIVE_ARRAY(J, Long);
1149 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots)) {
1150 COLLECT_PRIMITIVE_ARRAY(F, Float);
1151 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots)) {
1152 COLLECT_PRIMITIVE_ARRAY(D, Double);
1153 } else if (component_type == GetClassRoot<mirror::MethodType>()) {
1154 COLLECT_REFERENCE_ARRAY(mirror::MethodType, MethodType);
1155 } else if (component_type == GetClassRoot<mirror::MethodHandle>()) {
1156 COLLECT_REFERENCE_ARRAY(mirror::MethodHandle, MethodHandle);
1157 } else if (component_type == GetClassRoot<mirror::String>(class_roots)) {
1158 COLLECT_REFERENCE_ARRAY(mirror::String, String);
1159 } else if (component_type == GetClassRoot<mirror::Class>()) {
1160 COLLECT_REFERENCE_ARRAY(mirror::Class, Type);
1161 } else {
1162 UNREACHABLE();
1163 }
1164 #undef COLLECT_PRIMITIVE_ARRAY
1165 #undef COLLECT_REFERENCE_ARRAY
1166 }
1167
BuildCallSiteForBootstrapMethod(Thread * self,const DexFile * dex_file,uint32_t call_site_idx)1168 static ObjPtr<mirror::MethodType> BuildCallSiteForBootstrapMethod(Thread* self,
1169 const DexFile* dex_file,
1170 uint32_t call_site_idx)
1171 REQUIRES_SHARED(Locks::mutator_lock_) {
1172 const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
1173 CallSiteArrayValueIterator it(*dex_file, csi);
1174 DCHECK_GE(it.Size(), 1u);
1175
1176 StackHandleScope<2> hs(self);
1177 // Create array for parameter types.
1178 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1179 ObjPtr<mirror::Class> class_array_type =
1180 GetClassRoot<mirror::ObjectArray<mirror::Class>>(class_linker);
1181 Handle<mirror::ObjectArray<mirror::Class>> ptypes = hs.NewHandle(
1182 mirror::ObjectArray<mirror::Class>::Alloc(self,
1183 class_array_type,
1184 static_cast<int>(it.Size())));
1185 if (ptypes.IsNull()) {
1186 DCHECK(self->IsExceptionPending());
1187 return nullptr;
1188 }
1189
1190 // Populate the first argument with an instance of j.l.i.MethodHandles.Lookup
1191 // that the runtime will construct.
1192 ptypes->Set(0, GetClassRoot<mirror::MethodHandlesLookup>(class_linker));
1193 it.Next();
1194
1195 // The remaining parameter types are derived from the types of
1196 // arguments present in the DEX file.
1197 int index = 1;
1198 while (it.HasNext()) {
1199 ObjPtr<mirror::Class> ptype = GetClassForBootstrapArgument(it.GetValueType());
1200 if (ptype.IsNull()) {
1201 ThrowClassCastException("Unsupported bootstrap argument type");
1202 return nullptr;
1203 }
1204 ptypes->Set(index, ptype);
1205 index++;
1206 it.Next();
1207 }
1208 DCHECK_EQ(static_cast<size_t>(index), it.Size());
1209
1210 // By definition, the return type is always a j.l.i.CallSite.
1211 Handle<mirror::Class> rtype = hs.NewHandle(GetClassRoot<mirror::CallSite>());
1212 return mirror::MethodType::Create(self, rtype, ptypes);
1213 }
1214
InvokeBootstrapMethod(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)1215 static ObjPtr<mirror::CallSite> InvokeBootstrapMethod(Thread* self,
1216 ShadowFrame& shadow_frame,
1217 uint32_t call_site_idx)
1218 REQUIRES_SHARED(Locks::mutator_lock_) {
1219 StackHandleScope<5> hs(self);
1220 // There are three mandatory arguments expected from the call site
1221 // value array in the DEX file: the bootstrap method handle, the
1222 // method name to pass to the bootstrap method, and the method type
1223 // to pass to the bootstrap method.
1224 static constexpr size_t kMandatoryArgumentsCount = 3;
1225 ArtMethod* referrer = shadow_frame.GetMethod();
1226 const DexFile* dex_file = referrer->GetDexFile();
1227 const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
1228 CallSiteArrayValueIterator it(*dex_file, csi);
1229 if (it.Size() < kMandatoryArgumentsCount) {
1230 ThrowBootstrapMethodError("Truncated bootstrap arguments (%zu < %zu)",
1231 it.Size(), kMandatoryArgumentsCount);
1232 return nullptr;
1233 }
1234
1235 if (it.GetValueType() != EncodedArrayValueIterator::ValueType::kMethodHandle) {
1236 ThrowBootstrapMethodError("First bootstrap argument is not a method handle");
1237 return nullptr;
1238 }
1239
1240 uint32_t bsm_index = static_cast<uint32_t>(it.GetJavaValue().i);
1241 it.Next();
1242
1243 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1244 Handle<mirror::MethodHandle> bsm =
1245 hs.NewHandle(class_linker->ResolveMethodHandle(self, bsm_index, referrer));
1246 if (bsm.IsNull()) {
1247 DCHECK(self->IsExceptionPending());
1248 return nullptr;
1249 }
1250
1251 if (bsm->GetHandleKind() != mirror::MethodHandle::Kind::kInvokeStatic) {
1252 // JLS suggests also accepting constructors. This is currently
1253 // hard as constructor invocations happen via transformers in ART
1254 // today. The constructor would need to be a class derived from java.lang.invoke.CallSite.
1255 ThrowBootstrapMethodError("Unsupported bootstrap method invocation kind");
1256 return nullptr;
1257 }
1258
1259 // Construct the local call site type information based on the 3
1260 // mandatory arguments provided by the runtime and the static arguments
1261 // in the DEX file. We will use these arguments to build a shadow frame.
1262 MutableHandle<mirror::MethodType> call_site_type =
1263 hs.NewHandle(BuildCallSiteForBootstrapMethod(self, dex_file, call_site_idx));
1264 if (call_site_type.IsNull()) {
1265 DCHECK(self->IsExceptionPending());
1266 return nullptr;
1267 }
1268
1269 // Check if this BSM is targeting a variable arity method. If so,
1270 // we'll need to collect the trailing arguments into an array.
1271 Handle<mirror::Array> collector_arguments;
1272 int32_t collector_arguments_length;
1273 if (bsm->GetTargetMethod()->IsVarargs()) {
1274 int number_of_bsm_parameters = bsm->GetMethodType()->GetNumberOfPTypes();
1275 if (number_of_bsm_parameters == 0) {
1276 ThrowBootstrapMethodError("Variable arity BSM does not have any arguments");
1277 return nullptr;
1278 }
1279 Handle<mirror::Class> collector_array_class =
1280 hs.NewHandle(bsm->GetMethodType()->GetPTypes()->Get(number_of_bsm_parameters - 1));
1281 if (!collector_array_class->IsArrayClass()) {
1282 ThrowBootstrapMethodError("Variable arity BSM does not have array as final argument");
1283 return nullptr;
1284 }
1285 // The call site may include no arguments to be collected. In this
1286 // case the number of arguments must be at least the number of BSM
1287 // parameters less the collector array.
1288 if (call_site_type->GetNumberOfPTypes() < number_of_bsm_parameters - 1) {
1289 ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
1290 return nullptr;
1291 }
1292 // Check all the arguments to be collected match the collector array component type.
1293 for (int i = number_of_bsm_parameters - 1; i < call_site_type->GetNumberOfPTypes(); ++i) {
1294 if (call_site_type->GetPTypes()->Get(i) != collector_array_class->GetComponentType()) {
1295 ThrowClassCastException(collector_array_class->GetComponentType(),
1296 call_site_type->GetPTypes()->Get(i));
1297 return nullptr;
1298 }
1299 }
1300 // Update the call site method type so it now includes the collector array.
1301 int32_t collector_arguments_start = number_of_bsm_parameters - 1;
1302 collector_arguments_length = call_site_type->GetNumberOfPTypes() - number_of_bsm_parameters + 1;
1303 call_site_type.Assign(
1304 mirror::MethodType::CollectTrailingArguments(self,
1305 call_site_type.Get(),
1306 collector_array_class.Get(),
1307 collector_arguments_start));
1308 if (call_site_type.IsNull()) {
1309 DCHECK(self->IsExceptionPending());
1310 return nullptr;
1311 }
1312 } else {
1313 collector_arguments_length = 0;
1314 }
1315
1316 if (call_site_type->GetNumberOfPTypes() != bsm->GetMethodType()->GetNumberOfPTypes()) {
1317 ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
1318 return nullptr;
1319 }
1320
1321 // BSM invocation has a different set of exceptions that
1322 // j.l.i.MethodHandle.invoke(). Scan arguments looking for CCE
1323 // "opportunities". Unfortunately we cannot just leave this to the
1324 // method handle invocation as this might generate a WMTE.
1325 for (int32_t i = 0; i < call_site_type->GetNumberOfPTypes(); ++i) {
1326 ObjPtr<mirror::Class> from = call_site_type->GetPTypes()->Get(i);
1327 ObjPtr<mirror::Class> to = bsm->GetMethodType()->GetPTypes()->Get(i);
1328 if (!IsParameterTypeConvertible(from, to)) {
1329 ThrowClassCastException(from, to);
1330 return nullptr;
1331 }
1332 }
1333 if (!IsReturnTypeConvertible(call_site_type->GetRType(), bsm->GetMethodType()->GetRType())) {
1334 ThrowClassCastException(bsm->GetMethodType()->GetRType(), call_site_type->GetRType());
1335 return nullptr;
1336 }
1337
1338 // Set-up a shadow frame for invoking the bootstrap method handle.
1339 ShadowFrameAllocaUniquePtr bootstrap_frame =
1340 CREATE_SHADOW_FRAME(call_site_type->NumberOfVRegs(),
1341 nullptr,
1342 referrer,
1343 shadow_frame.GetDexPC());
1344 ScopedStackedShadowFramePusher pusher(
1345 self, bootstrap_frame.get(), StackedShadowFrameType::kShadowFrameUnderConstruction);
1346 ShadowFrameSetter setter(bootstrap_frame.get(), 0u);
1347
1348 // The first parameter is a MethodHandles lookup instance.
1349 Handle<mirror::Class> lookup_class =
1350 hs.NewHandle(shadow_frame.GetMethod()->GetDeclaringClass());
1351 ObjPtr<mirror::MethodHandlesLookup> lookup =
1352 mirror::MethodHandlesLookup::Create(self, lookup_class);
1353 if (lookup.IsNull()) {
1354 DCHECK(self->IsExceptionPending());
1355 return nullptr;
1356 }
1357 setter.SetReference(lookup);
1358
1359 // Pack the remaining arguments into the frame.
1360 int number_of_arguments = call_site_type->GetNumberOfPTypes();
1361 int argument_index;
1362 for (argument_index = 1; argument_index < number_of_arguments; ++argument_index) {
1363 if (argument_index == number_of_arguments - 1 &&
1364 call_site_type->GetPTypes()->Get(argument_index)->IsArrayClass()) {
1365 ObjPtr<mirror::Class> array_type = call_site_type->GetPTypes()->Get(argument_index);
1366 if (!PackCollectorArrayForBootstrapMethod(self,
1367 referrer,
1368 array_type,
1369 collector_arguments_length,
1370 &it,
1371 &setter)) {
1372 DCHECK(self->IsExceptionPending());
1373 return nullptr;
1374 }
1375 } else if (!PackArgumentForBootstrapMethod(self, referrer, &it, &setter)) {
1376 DCHECK(self->IsExceptionPending());
1377 return nullptr;
1378 }
1379 it.Next();
1380 }
1381 DCHECK(!it.HasNext());
1382 DCHECK(setter.Done());
1383
1384 // Invoke the bootstrap method handle.
1385 JValue result;
1386 RangeInstructionOperands operands(0, bootstrap_frame->NumberOfVRegs());
1387 bool invoke_success = MethodHandleInvoke(self,
1388 *bootstrap_frame,
1389 bsm,
1390 call_site_type,
1391 &operands,
1392 &result);
1393 if (!invoke_success) {
1394 DCHECK(self->IsExceptionPending());
1395 return nullptr;
1396 }
1397
1398 Handle<mirror::Object> object(hs.NewHandle(result.GetL()));
1399 if (UNLIKELY(object.IsNull())) {
1400 // This will typically be for LambdaMetafactory which is not supported.
1401 ThrowClassCastException("Bootstrap method returned null");
1402 return nullptr;
1403 }
1404
1405 // Check the result type is a subclass of j.l.i.CallSite.
1406 ObjPtr<mirror::Class> call_site_class = GetClassRoot<mirror::CallSite>(class_linker);
1407 if (UNLIKELY(!object->InstanceOf(call_site_class))) {
1408 ThrowClassCastException(object->GetClass(), call_site_class);
1409 return nullptr;
1410 }
1411
1412 // Check the call site target is not null as we're going to invoke it.
1413 ObjPtr<mirror::CallSite> call_site = ObjPtr<mirror::CallSite>::DownCast(result.GetL());
1414 ObjPtr<mirror::MethodHandle> target = call_site->GetTarget();
1415 if (UNLIKELY(target == nullptr)) {
1416 ThrowClassCastException("Bootstrap method returned a CallSite with a null target");
1417 return nullptr;
1418 }
1419 return call_site;
1420 }
1421
1422 namespace {
1423
DoResolveCallSite(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)1424 ObjPtr<mirror::CallSite> DoResolveCallSite(Thread* self,
1425 ShadowFrame& shadow_frame,
1426 uint32_t call_site_idx)
1427 REQUIRES_SHARED(Locks::mutator_lock_) {
1428 StackHandleScope<1> hs(self);
1429 Handle<mirror::DexCache> dex_cache(hs.NewHandle(shadow_frame.GetMethod()->GetDexCache()));
1430
1431 // Get the call site from the DexCache if present.
1432 ObjPtr<mirror::CallSite> call_site = dex_cache->GetResolvedCallSite(call_site_idx);
1433 if (LIKELY(call_site != nullptr)) {
1434 return call_site;
1435 }
1436
1437 // Invoke the bootstrap method to get a candidate call site.
1438 call_site = InvokeBootstrapMethod(self, shadow_frame, call_site_idx);
1439 if (UNLIKELY(call_site == nullptr)) {
1440 if (!self->GetException()->IsError()) {
1441 // Use a BootstrapMethodError if the exception is not an instance of java.lang.Error.
1442 ThrowWrappedBootstrapMethodError("Exception from call site #%u bootstrap method",
1443 call_site_idx);
1444 }
1445 return nullptr;
1446 }
1447
1448 // Attempt to place the candidate call site into the DexCache, return the winning call site.
1449 return dex_cache->SetResolvedCallSite(call_site_idx, call_site);
1450 }
1451
1452 } // namespace
1453
DoInvokeCustom(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx,const InstructionOperands * operands,JValue * result)1454 bool DoInvokeCustom(Thread* self,
1455 ShadowFrame& shadow_frame,
1456 uint32_t call_site_idx,
1457 const InstructionOperands* operands,
1458 JValue* result) {
1459 // Make sure to check for async exceptions
1460 if (UNLIKELY(self->ObserveAsyncException())) {
1461 return false;
1462 }
1463
1464 // invoke-custom is not supported in transactions. In transactions
1465 // there is a limited set of types supported. invoke-custom allows
1466 // running arbitrary code and instantiating arbitrary types.
1467 CHECK(!Runtime::Current()->IsActiveTransaction());
1468
1469 ObjPtr<mirror::CallSite> call_site = DoResolveCallSite(self, shadow_frame, call_site_idx);
1470 if (call_site.IsNull()) {
1471 DCHECK(self->IsExceptionPending());
1472 return false;
1473 }
1474
1475 StackHandleScope<2> hs(self);
1476 Handle<mirror::MethodHandle> target = hs.NewHandle(call_site->GetTarget());
1477 Handle<mirror::MethodType> target_method_type = hs.NewHandle(target->GetMethodType());
1478 DCHECK_EQ(operands->GetNumberOfOperands(), target_method_type->NumberOfVRegs())
1479 << " call_site_idx" << call_site_idx;
1480 return MethodHandleInvokeExact(self,
1481 shadow_frame,
1482 target,
1483 target_method_type,
1484 operands,
1485 result);
1486 }
1487
1488 // Assign register 'src_reg' from shadow_frame to register 'dest_reg' into new_shadow_frame.
AssignRegister(ShadowFrame * new_shadow_frame,const ShadowFrame & shadow_frame,size_t dest_reg,size_t src_reg)1489 static inline void AssignRegister(ShadowFrame* new_shadow_frame, const ShadowFrame& shadow_frame,
1490 size_t dest_reg, size_t src_reg)
1491 REQUIRES_SHARED(Locks::mutator_lock_) {
1492 // Uint required, so that sign extension does not make this wrong on 64b systems
1493 uint32_t src_value = shadow_frame.GetVReg(src_reg);
1494 ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference<kVerifyNone>(src_reg);
1495
1496 // If both register locations contains the same value, the register probably holds a reference.
1497 // Note: As an optimization, non-moving collectors leave a stale reference value
1498 // in the references array even after the original vreg was overwritten to a non-reference.
1499 if (src_value == reinterpret_cast32<uint32_t>(o.Ptr())) {
1500 new_shadow_frame->SetVRegReference(dest_reg, o);
1501 } else {
1502 new_shadow_frame->SetVReg(dest_reg, src_value);
1503 }
1504 }
1505
1506 template <bool is_range>
CopyRegisters(ShadowFrame & caller_frame,ShadowFrame * callee_frame,const uint32_t (& arg)[Instruction::kMaxVarArgRegs],const size_t first_src_reg,const size_t first_dest_reg,const size_t num_regs)1507 inline void CopyRegisters(ShadowFrame& caller_frame,
1508 ShadowFrame* callee_frame,
1509 const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1510 const size_t first_src_reg,
1511 const size_t first_dest_reg,
1512 const size_t num_regs) {
1513 if (is_range) {
1514 const size_t dest_reg_bound = first_dest_reg + num_regs;
1515 for (size_t src_reg = first_src_reg, dest_reg = first_dest_reg; dest_reg < dest_reg_bound;
1516 ++dest_reg, ++src_reg) {
1517 AssignRegister(callee_frame, caller_frame, dest_reg, src_reg);
1518 }
1519 } else {
1520 DCHECK_LE(num_regs, arraysize(arg));
1521
1522 for (size_t arg_index = 0; arg_index < num_regs; ++arg_index) {
1523 AssignRegister(callee_frame, caller_frame, first_dest_reg + arg_index, arg[arg_index]);
1524 }
1525 }
1526 }
1527
1528 template <bool is_range,
1529 bool do_assignability_check>
DoCallCommon(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,JValue * result,uint16_t number_of_inputs,uint32_t (& arg)[Instruction::kMaxVarArgRegs],uint32_t vregC)1530 static inline bool DoCallCommon(ArtMethod* called_method,
1531 Thread* self,
1532 ShadowFrame& shadow_frame,
1533 JValue* result,
1534 uint16_t number_of_inputs,
1535 uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1536 uint32_t vregC) {
1537 bool string_init = false;
1538 // Replace calls to String.<init> with equivalent StringFactory call.
1539 if (UNLIKELY(called_method->GetDeclaringClass()->IsStringClass()
1540 && called_method->IsConstructor())) {
1541 called_method = WellKnownClasses::StringInitToStringFactory(called_method);
1542 string_init = true;
1543 }
1544
1545 // Compute method information.
1546 CodeItemDataAccessor accessor(called_method->DexInstructionData());
1547 // Number of registers for the callee's call frame.
1548 uint16_t num_regs;
1549 // Test whether to use the interpreter or compiler entrypoint, and save that result to pass to
1550 // PerformCall. A deoptimization could occur at any time, and we shouldn't change which
1551 // entrypoint to use once we start building the shadow frame.
1552
1553 // For unstarted runtimes, always use the interpreter entrypoint. This fixes the case where we are
1554 // doing cross compilation. Note that GetEntryPointFromQuickCompiledCode doesn't use the image
1555 // pointer size here and this may case an overflow if it is called from the compiler. b/62402160
1556 const bool use_interpreter_entrypoint = !Runtime::Current()->IsStarted() ||
1557 ClassLinker::ShouldUseInterpreterEntrypoint(
1558 called_method,
1559 called_method->GetEntryPointFromQuickCompiledCode());
1560 if (LIKELY(accessor.HasCodeItem())) {
1561 // When transitioning to compiled code, space only needs to be reserved for the input registers.
1562 // The rest of the frame gets discarded. This also prevents accessing the called method's code
1563 // item, saving memory by keeping code items of compiled code untouched.
1564 if (!use_interpreter_entrypoint) {
1565 DCHECK(!Runtime::Current()->IsAotCompiler()) << "Compiler should use interpreter entrypoint";
1566 num_regs = number_of_inputs;
1567 } else {
1568 num_regs = accessor.RegistersSize();
1569 DCHECK_EQ(string_init ? number_of_inputs - 1 : number_of_inputs, accessor.InsSize());
1570 }
1571 } else {
1572 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1573 num_regs = number_of_inputs;
1574 }
1575
1576 // Hack for String init:
1577 //
1578 // Rewrite invoke-x java.lang.String.<init>(this, a, b, c, ...) into:
1579 // invoke-x StringFactory(a, b, c, ...)
1580 // by effectively dropping the first virtual register from the invoke.
1581 //
1582 // (at this point the ArtMethod has already been replaced,
1583 // so we just need to fix-up the arguments)
1584 //
1585 // Note that FindMethodFromCode in entrypoint_utils-inl.h was also special-cased
1586 // to handle the compiler optimization of replacing `this` with null without
1587 // throwing NullPointerException.
1588 uint32_t string_init_vreg_this = is_range ? vregC : arg[0];
1589 if (UNLIKELY(string_init)) {
1590 DCHECK_GT(num_regs, 0u); // As the method is an instance method, there should be at least 1.
1591
1592 // The new StringFactory call is static and has one fewer argument.
1593 if (!accessor.HasCodeItem()) {
1594 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1595 num_regs--;
1596 } // else ... don't need to change num_regs since it comes up from the string_init's code item
1597 number_of_inputs--;
1598
1599 // Rewrite the var-args, dropping the 0th argument ("this")
1600 for (uint32_t i = 1; i < arraysize(arg); ++i) {
1601 arg[i - 1] = arg[i];
1602 }
1603 arg[arraysize(arg) - 1] = 0;
1604
1605 // Rewrite the non-var-arg case
1606 vregC++; // Skips the 0th vreg in the range ("this").
1607 }
1608
1609 // Parameter registers go at the end of the shadow frame.
1610 DCHECK_GE(num_regs, number_of_inputs);
1611 size_t first_dest_reg = num_regs - number_of_inputs;
1612 DCHECK_NE(first_dest_reg, (size_t)-1);
1613
1614 // Allocate shadow frame on the stack.
1615 const char* old_cause = self->StartAssertNoThreadSuspension("DoCallCommon");
1616 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
1617 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0);
1618 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
1619
1620 // Initialize new shadow frame by copying the registers from the callee shadow frame.
1621 if (do_assignability_check) {
1622 // Slow path.
1623 // We might need to do class loading, which incurs a thread state change to kNative. So
1624 // register the shadow frame as under construction and allow suspension again.
1625 ScopedStackedShadowFramePusher pusher(
1626 self, new_shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction);
1627 self->EndAssertNoThreadSuspension(old_cause);
1628
1629 // ArtMethod here is needed to check type information of the call site against the callee.
1630 // Type information is retrieved from a DexFile/DexCache for that respective declared method.
1631 //
1632 // As a special case for proxy methods, which are not dex-backed,
1633 // we have to retrieve type information from the proxy's method
1634 // interface method instead (which is dex backed since proxies are never interfaces).
1635 ArtMethod* method =
1636 new_shadow_frame->GetMethod()->GetInterfaceMethodIfProxy(kRuntimePointerSize);
1637
1638 // We need to do runtime check on reference assignment. We need to load the shorty
1639 // to get the exact type of each reference argument.
1640 const dex::TypeList* params = method->GetParameterTypeList();
1641 uint32_t shorty_len = 0;
1642 const char* shorty = method->GetShorty(&shorty_len);
1643
1644 // Handle receiver apart since it's not part of the shorty.
1645 size_t dest_reg = first_dest_reg;
1646 size_t arg_offset = 0;
1647
1648 if (!method->IsStatic()) {
1649 size_t receiver_reg = is_range ? vregC : arg[0];
1650 new_shadow_frame->SetVRegReference(dest_reg, shadow_frame.GetVRegReference(receiver_reg));
1651 ++dest_reg;
1652 ++arg_offset;
1653 DCHECK(!string_init); // All StringFactory methods are static.
1654 }
1655
1656 // Copy the caller's invoke-* arguments into the callee's parameter registers.
1657 for (uint32_t shorty_pos = 0; dest_reg < num_regs; ++shorty_pos, ++dest_reg, ++arg_offset) {
1658 // Skip the 0th 'shorty' type since it represents the return type.
1659 DCHECK_LT(shorty_pos + 1, shorty_len) << "for shorty '" << shorty << "'";
1660 const size_t src_reg = (is_range) ? vregC + arg_offset : arg[arg_offset];
1661 switch (shorty[shorty_pos + 1]) {
1662 // Handle Object references. 1 virtual register slot.
1663 case 'L': {
1664 ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference(src_reg);
1665 if (do_assignability_check && o != nullptr) {
1666 const dex::TypeIndex type_idx = params->GetTypeItem(shorty_pos).type_idx_;
1667 ObjPtr<mirror::Class> arg_type = method->GetDexCache()->GetResolvedType(type_idx);
1668 if (arg_type == nullptr) {
1669 StackHandleScope<1> hs(self);
1670 // Preserve o since it is used below and GetClassFromTypeIndex may cause thread
1671 // suspension.
1672 HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&o);
1673 arg_type = method->ResolveClassFromTypeIndex(type_idx);
1674 if (arg_type == nullptr) {
1675 CHECK(self->IsExceptionPending());
1676 return false;
1677 }
1678 }
1679 if (!o->VerifierInstanceOf(arg_type)) {
1680 // This should never happen.
1681 std::string temp1, temp2;
1682 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1683 "Invoking %s with bad arg %d, type '%s' not instance of '%s'",
1684 new_shadow_frame->GetMethod()->GetName(), shorty_pos,
1685 o->GetClass()->GetDescriptor(&temp1),
1686 arg_type->GetDescriptor(&temp2));
1687 return false;
1688 }
1689 }
1690 new_shadow_frame->SetVRegReference(dest_reg, o);
1691 break;
1692 }
1693 // Handle doubles and longs. 2 consecutive virtual register slots.
1694 case 'J': case 'D': {
1695 uint64_t wide_value =
1696 (static_cast<uint64_t>(shadow_frame.GetVReg(src_reg + 1)) << BitSizeOf<uint32_t>()) |
1697 static_cast<uint32_t>(shadow_frame.GetVReg(src_reg));
1698 new_shadow_frame->SetVRegLong(dest_reg, wide_value);
1699 // Skip the next virtual register slot since we already used it.
1700 ++dest_reg;
1701 ++arg_offset;
1702 break;
1703 }
1704 // Handle all other primitives that are always 1 virtual register slot.
1705 default:
1706 new_shadow_frame->SetVReg(dest_reg, shadow_frame.GetVReg(src_reg));
1707 break;
1708 }
1709 }
1710 } else {
1711 if (is_range) {
1712 DCHECK_EQ(num_regs, first_dest_reg + number_of_inputs);
1713 }
1714
1715 CopyRegisters<is_range>(shadow_frame,
1716 new_shadow_frame,
1717 arg,
1718 vregC,
1719 first_dest_reg,
1720 number_of_inputs);
1721 self->EndAssertNoThreadSuspension(old_cause);
1722 }
1723
1724 PerformCall(self,
1725 accessor,
1726 shadow_frame.GetMethod(),
1727 first_dest_reg,
1728 new_shadow_frame,
1729 result,
1730 use_interpreter_entrypoint);
1731
1732 if (string_init && !self->IsExceptionPending()) {
1733 SetStringInitValueToAllAliases(&shadow_frame, string_init_vreg_this, *result);
1734 }
1735
1736 return !self->IsExceptionPending();
1737 }
1738
1739 template<bool is_range, bool do_assignability_check>
DoCall(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)1740 bool DoCall(ArtMethod* called_method, Thread* self, ShadowFrame& shadow_frame,
1741 const Instruction* inst, uint16_t inst_data, JValue* result) {
1742 // Argument word count.
1743 const uint16_t number_of_inputs =
1744 (is_range) ? inst->VRegA_3rc(inst_data) : inst->VRegA_35c(inst_data);
1745
1746 // TODO: find a cleaner way to separate non-range and range information without duplicating
1747 // code.
1748 uint32_t arg[Instruction::kMaxVarArgRegs] = {}; // only used in invoke-XXX.
1749 uint32_t vregC = 0;
1750 if (is_range) {
1751 vregC = inst->VRegC_3rc();
1752 } else {
1753 vregC = inst->VRegC_35c();
1754 inst->GetVarArgs(arg, inst_data);
1755 }
1756
1757 return DoCallCommon<is_range, do_assignability_check>(
1758 called_method, self, shadow_frame,
1759 result, number_of_inputs, arg, vregC);
1760 }
1761
1762 template <bool is_range, bool do_access_check, bool transaction_active>
DoFilledNewArray(const Instruction * inst,const ShadowFrame & shadow_frame,Thread * self,JValue * result)1763 bool DoFilledNewArray(const Instruction* inst,
1764 const ShadowFrame& shadow_frame,
1765 Thread* self,
1766 JValue* result) {
1767 DCHECK(inst->Opcode() == Instruction::FILLED_NEW_ARRAY ||
1768 inst->Opcode() == Instruction::FILLED_NEW_ARRAY_RANGE);
1769 const int32_t length = is_range ? inst->VRegA_3rc() : inst->VRegA_35c();
1770 if (!is_range) {
1771 // Checks FILLED_NEW_ARRAY's length does not exceed 5 arguments.
1772 CHECK_LE(length, 5);
1773 }
1774 if (UNLIKELY(length < 0)) {
1775 ThrowNegativeArraySizeException(length);
1776 return false;
1777 }
1778 uint16_t type_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c();
1779 ObjPtr<mirror::Class> array_class = ResolveVerifyAndClinit(dex::TypeIndex(type_idx),
1780 shadow_frame.GetMethod(),
1781 self,
1782 false,
1783 do_access_check);
1784 if (UNLIKELY(array_class == nullptr)) {
1785 DCHECK(self->IsExceptionPending());
1786 return false;
1787 }
1788 CHECK(array_class->IsArrayClass());
1789 ObjPtr<mirror::Class> component_class = array_class->GetComponentType();
1790 const bool is_primitive_int_component = component_class->IsPrimitiveInt();
1791 if (UNLIKELY(component_class->IsPrimitive() && !is_primitive_int_component)) {
1792 if (component_class->IsPrimitiveLong() || component_class->IsPrimitiveDouble()) {
1793 ThrowRuntimeException("Bad filled array request for type %s",
1794 component_class->PrettyDescriptor().c_str());
1795 } else {
1796 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1797 "Found type %s; filled-new-array not implemented for anything but 'int'",
1798 component_class->PrettyDescriptor().c_str());
1799 }
1800 return false;
1801 }
1802 ObjPtr<mirror::Object> new_array = mirror::Array::Alloc<true>(
1803 self,
1804 array_class,
1805 length,
1806 array_class->GetComponentSizeShift(),
1807 Runtime::Current()->GetHeap()->GetCurrentAllocator());
1808 if (UNLIKELY(new_array == nullptr)) {
1809 self->AssertPendingOOMException();
1810 return false;
1811 }
1812 uint32_t arg[Instruction::kMaxVarArgRegs]; // only used in filled-new-array.
1813 uint32_t vregC = 0; // only used in filled-new-array-range.
1814 if (is_range) {
1815 vregC = inst->VRegC_3rc();
1816 } else {
1817 inst->GetVarArgs(arg);
1818 }
1819 for (int32_t i = 0; i < length; ++i) {
1820 size_t src_reg = is_range ? vregC + i : arg[i];
1821 if (is_primitive_int_component) {
1822 new_array->AsIntArray()->SetWithoutChecks<transaction_active>(
1823 i, shadow_frame.GetVReg(src_reg));
1824 } else {
1825 new_array->AsObjectArray<mirror::Object>()->SetWithoutChecks<transaction_active>(
1826 i, shadow_frame.GetVRegReference(src_reg));
1827 }
1828 }
1829
1830 result->SetL(new_array);
1831 return true;
1832 }
1833
1834 // TODO: Use ObjPtr here.
1835 template<typename T>
RecordArrayElementsInTransactionImpl(ObjPtr<mirror::PrimitiveArray<T>> array,int32_t count)1836 static void RecordArrayElementsInTransactionImpl(ObjPtr<mirror::PrimitiveArray<T>> array,
1837 int32_t count)
1838 REQUIRES_SHARED(Locks::mutator_lock_) {
1839 Runtime* runtime = Runtime::Current();
1840 for (int32_t i = 0; i < count; ++i) {
1841 runtime->RecordWriteArray(array.Ptr(), i, array->GetWithoutChecks(i));
1842 }
1843 }
1844
RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array,int32_t count)1845 void RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array, int32_t count)
1846 REQUIRES_SHARED(Locks::mutator_lock_) {
1847 DCHECK(Runtime::Current()->IsActiveTransaction());
1848 DCHECK(array != nullptr);
1849 DCHECK_LE(count, array->GetLength());
1850 Primitive::Type primitive_component_type = array->GetClass()->GetComponentType()->GetPrimitiveType();
1851 switch (primitive_component_type) {
1852 case Primitive::kPrimBoolean:
1853 RecordArrayElementsInTransactionImpl(array->AsBooleanArray(), count);
1854 break;
1855 case Primitive::kPrimByte:
1856 RecordArrayElementsInTransactionImpl(array->AsByteArray(), count);
1857 break;
1858 case Primitive::kPrimChar:
1859 RecordArrayElementsInTransactionImpl(array->AsCharArray(), count);
1860 break;
1861 case Primitive::kPrimShort:
1862 RecordArrayElementsInTransactionImpl(array->AsShortArray(), count);
1863 break;
1864 case Primitive::kPrimInt:
1865 RecordArrayElementsInTransactionImpl(array->AsIntArray(), count);
1866 break;
1867 case Primitive::kPrimFloat:
1868 RecordArrayElementsInTransactionImpl(array->AsFloatArray(), count);
1869 break;
1870 case Primitive::kPrimLong:
1871 RecordArrayElementsInTransactionImpl(array->AsLongArray(), count);
1872 break;
1873 case Primitive::kPrimDouble:
1874 RecordArrayElementsInTransactionImpl(array->AsDoubleArray(), count);
1875 break;
1876 default:
1877 LOG(FATAL) << "Unsupported primitive type " << primitive_component_type
1878 << " in fill-array-data";
1879 UNREACHABLE();
1880 }
1881 }
1882
1883 // Explicit DoCall template function declarations.
1884 #define EXPLICIT_DO_CALL_TEMPLATE_DECL(_is_range, _do_assignability_check) \
1885 template REQUIRES_SHARED(Locks::mutator_lock_) \
1886 bool DoCall<_is_range, _do_assignability_check>(ArtMethod* method, Thread* self, \
1887 ShadowFrame& shadow_frame, \
1888 const Instruction* inst, uint16_t inst_data, \
1889 JValue* result)
1890 EXPLICIT_DO_CALL_TEMPLATE_DECL(false, false);
1891 EXPLICIT_DO_CALL_TEMPLATE_DECL(false, true);
1892 EXPLICIT_DO_CALL_TEMPLATE_DECL(true, false);
1893 EXPLICIT_DO_CALL_TEMPLATE_DECL(true, true);
1894 #undef EXPLICIT_DO_CALL_TEMPLATE_DECL
1895
1896 // Explicit DoInvokePolymorphic template function declarations.
1897 #define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range) \
1898 template REQUIRES_SHARED(Locks::mutator_lock_) \
1899 bool DoInvokePolymorphic<_is_range>( \
1900 Thread* self, ShadowFrame& shadow_frame, const Instruction* inst, \
1901 uint16_t inst_data, JValue* result)
1902 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false);
1903 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true);
1904 #undef EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL
1905
1906 // Explicit DoFilledNewArray template function declarations.
1907 #define EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(_is_range_, _check, _transaction_active) \
1908 template REQUIRES_SHARED(Locks::mutator_lock_) \
1909 bool DoFilledNewArray<_is_range_, _check, _transaction_active>(const Instruction* inst, \
1910 const ShadowFrame& shadow_frame, \
1911 Thread* self, JValue* result)
1912 #define EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(_transaction_active) \
1913 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false, false, _transaction_active); \
1914 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false, true, _transaction_active); \
1915 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true, false, _transaction_active); \
1916 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true, true, _transaction_active)
1917 EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(false);
1918 EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(true);
1919 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL
1920 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL
1921
1922 } // namespace interpreter
1923 } // namespace art
1924