1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "instruction_builder.h"
18
19 #include "art_method-inl.h"
20 #include "base/arena_bit_vector.h"
21 #include "base/bit_vector-inl.h"
22 #include "base/logging.h"
23 #include "block_builder.h"
24 #include "class_linker-inl.h"
25 #include "code_generator.h"
26 #include "data_type-inl.h"
27 #include "dex/bytecode_utils.h"
28 #include "dex/dex_instruction-inl.h"
29 #include "driver/compiler_options.h"
30 #include "driver/dex_compilation_unit.h"
31 #include "entrypoints/entrypoint_utils-inl.h"
32 #include "handle_cache-inl.h"
33 #include "imtable-inl.h"
34 #include "intrinsics.h"
35 #include "intrinsics_enum.h"
36 #include "intrinsics_utils.h"
37 #include "jit/jit.h"
38 #include "jit/profiling_info.h"
39 #include "mirror/dex_cache.h"
40 #include "oat/oat_file.h"
41 #include "optimizing/data_type.h"
42 #include "optimizing_compiler_stats.h"
43 #include "reflective_handle_scope-inl.h"
44 #include "scoped_thread_state_change-inl.h"
45 #include "sharpening.h"
46 #include "ssa_builder.h"
47 #include "well_known_classes.h"
48
49 namespace art HIDDEN {
50
51 namespace {
52
53 class SamePackageCompare {
54 public:
SamePackageCompare(const DexCompilationUnit & dex_compilation_unit)55 explicit SamePackageCompare(const DexCompilationUnit& dex_compilation_unit)
56 : dex_compilation_unit_(dex_compilation_unit) {}
57
operator ()(ObjPtr<mirror::Class> klass)58 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
59 if (klass->GetClassLoader() != dex_compilation_unit_.GetClassLoader().Get()) {
60 return false;
61 }
62 if (referrers_descriptor_ == nullptr) {
63 const DexFile* dex_file = dex_compilation_unit_.GetDexFile();
64 uint32_t referrers_method_idx = dex_compilation_unit_.GetDexMethodIndex();
65 referrers_descriptor_ =
66 dex_file->GetMethodDeclaringClassDescriptor(dex_file->GetMethodId(referrers_method_idx));
67 referrers_package_length_ = PackageLength(referrers_descriptor_);
68 }
69 std::string temp;
70 const char* klass_descriptor = klass->GetDescriptor(&temp);
71 size_t klass_package_length = PackageLength(klass_descriptor);
72 return (referrers_package_length_ == klass_package_length) &&
73 memcmp(referrers_descriptor_, klass_descriptor, referrers_package_length_) == 0;
74 };
75
76 private:
PackageLength(const char * descriptor)77 static size_t PackageLength(const char* descriptor) {
78 const char* slash_pos = strrchr(descriptor, '/');
79 return (slash_pos != nullptr) ? static_cast<size_t>(slash_pos - descriptor) : 0u;
80 }
81
82 const DexCompilationUnit& dex_compilation_unit_;
83 const char* referrers_descriptor_ = nullptr;
84 size_t referrers_package_length_ = 0u;
85 };
86
87 } // anonymous namespace
88
HInstructionBuilder(HGraph * graph,HBasicBlockBuilder * block_builder,SsaBuilder * ssa_builder,const DexFile * dex_file,const CodeItemDebugInfoAccessor & accessor,DataType::Type return_type,const DexCompilationUnit * dex_compilation_unit,const DexCompilationUnit * outer_compilation_unit,CodeGenerator * code_generator,OptimizingCompilerStats * compiler_stats,ScopedArenaAllocator * local_allocator)89 HInstructionBuilder::HInstructionBuilder(HGraph* graph,
90 HBasicBlockBuilder* block_builder,
91 SsaBuilder* ssa_builder,
92 const DexFile* dex_file,
93 const CodeItemDebugInfoAccessor& accessor,
94 DataType::Type return_type,
95 const DexCompilationUnit* dex_compilation_unit,
96 const DexCompilationUnit* outer_compilation_unit,
97 CodeGenerator* code_generator,
98 OptimizingCompilerStats* compiler_stats,
99 ScopedArenaAllocator* local_allocator)
100 : allocator_(graph->GetAllocator()),
101 graph_(graph),
102 dex_file_(dex_file),
103 code_item_accessor_(accessor),
104 return_type_(return_type),
105 block_builder_(block_builder),
106 ssa_builder_(ssa_builder),
107 code_generator_(code_generator),
108 dex_compilation_unit_(dex_compilation_unit),
109 outer_compilation_unit_(outer_compilation_unit),
110 compilation_stats_(compiler_stats),
111 local_allocator_(local_allocator),
112 locals_for_(local_allocator->Adapter(kArenaAllocGraphBuilder)),
113 current_block_(nullptr),
114 current_locals_(nullptr),
115 latest_result_(nullptr),
116 current_this_parameter_(nullptr),
117 loop_headers_(local_allocator->Adapter(kArenaAllocGraphBuilder)),
118 class_cache_(std::less<dex::TypeIndex>(), local_allocator->Adapter(kArenaAllocGraphBuilder)) {
119 loop_headers_.reserve(kDefaultNumberOfLoops);
120 }
121
FindBlockStartingAt(uint32_t dex_pc) const122 HBasicBlock* HInstructionBuilder::FindBlockStartingAt(uint32_t dex_pc) const {
123 return block_builder_->GetBlockAt(dex_pc);
124 }
125
GetLocalsFor(HBasicBlock * block)126 inline ScopedArenaVector<HInstruction*>* HInstructionBuilder::GetLocalsFor(HBasicBlock* block) {
127 ScopedArenaVector<HInstruction*>* locals = &locals_for_[block->GetBlockId()];
128 const size_t vregs = graph_->GetNumberOfVRegs();
129 if (locals->size() == vregs) {
130 return locals;
131 }
132 return GetLocalsForWithAllocation(block, locals, vregs);
133 }
134
GetLocalsForWithAllocation(HBasicBlock * block,ScopedArenaVector<HInstruction * > * locals,const size_t vregs)135 ScopedArenaVector<HInstruction*>* HInstructionBuilder::GetLocalsForWithAllocation(
136 HBasicBlock* block,
137 ScopedArenaVector<HInstruction*>* locals,
138 const size_t vregs) {
139 DCHECK_NE(locals->size(), vregs);
140 locals->resize(vregs, nullptr);
141 if (block->IsCatchBlock()) {
142 // We record incoming inputs of catch phis at throwing instructions and
143 // must therefore eagerly create the phis. Phis for undefined vregs will
144 // be deleted when the first throwing instruction with the vreg undefined
145 // is encountered. Unused phis will be removed by dead phi analysis.
146 for (size_t i = 0; i < vregs; ++i) {
147 // No point in creating the catch phi if it is already undefined at
148 // the first throwing instruction.
149 HInstruction* current_local_value = (*current_locals_)[i];
150 if (current_local_value != nullptr) {
151 HPhi* phi = new (allocator_) HPhi(
152 allocator_,
153 i,
154 0,
155 current_local_value->GetType());
156 block->AddPhi(phi);
157 (*locals)[i] = phi;
158 }
159 }
160 }
161 return locals;
162 }
163
ValueOfLocalAt(HBasicBlock * block,size_t local)164 inline HInstruction* HInstructionBuilder::ValueOfLocalAt(HBasicBlock* block, size_t local) {
165 ScopedArenaVector<HInstruction*>* locals = GetLocalsFor(block);
166 return (*locals)[local];
167 }
168
InitializeBlockLocals()169 void HInstructionBuilder::InitializeBlockLocals() {
170 current_locals_ = GetLocalsFor(current_block_);
171
172 if (current_block_->IsCatchBlock()) {
173 // Catch phis were already created and inputs collected from throwing sites.
174 if (kIsDebugBuild) {
175 // Make sure there was at least one throwing instruction which initialized
176 // locals (guaranteed by HGraphBuilder) and that all try blocks have been
177 // visited already (from HTryBoundary scoping and reverse post order).
178 bool catch_block_visited = false;
179 for (HBasicBlock* current : graph_->GetReversePostOrder()) {
180 if (current == current_block_) {
181 catch_block_visited = true;
182 } else if (current->IsTryBlock()) {
183 const HTryBoundary& try_entry = current->GetTryCatchInformation()->GetTryEntry();
184 if (try_entry.HasExceptionHandler(*current_block_)) {
185 DCHECK(!catch_block_visited) << "Catch block visited before its try block.";
186 }
187 }
188 }
189 DCHECK_EQ(current_locals_->size(), graph_->GetNumberOfVRegs())
190 << "No instructions throwing into a live catch block.";
191 }
192 } else if (current_block_->IsLoopHeader()) {
193 // If the block is a loop header, we know we only have visited the pre header
194 // because we are visiting in reverse post order. We create phis for all initialized
195 // locals from the pre header. Their inputs will be populated at the end of
196 // the analysis.
197 for (size_t local = 0; local < current_locals_->size(); ++local) {
198 HInstruction* incoming =
199 ValueOfLocalAt(current_block_->GetLoopInformation()->GetPreHeader(), local);
200 if (incoming != nullptr) {
201 HPhi* phi = new (allocator_) HPhi(
202 allocator_,
203 local,
204 0,
205 incoming->GetType());
206 current_block_->AddPhi(phi);
207 (*current_locals_)[local] = phi;
208 }
209 }
210
211 // Save the loop header so that the last phase of the analysis knows which
212 // blocks need to be updated.
213 loop_headers_.push_back(current_block_);
214 } else if (current_block_->GetPredecessors().size() > 0) {
215 // All predecessors have already been visited because we are visiting in reverse post order.
216 // We merge the values of all locals, creating phis if those values differ.
217 for (size_t local = 0; local < current_locals_->size(); ++local) {
218 bool one_predecessor_has_no_value = false;
219 bool is_different = false;
220 HInstruction* value = ValueOfLocalAt(current_block_->GetPredecessors()[0], local);
221
222 for (HBasicBlock* predecessor : current_block_->GetPredecessors()) {
223 HInstruction* current = ValueOfLocalAt(predecessor, local);
224 if (current == nullptr) {
225 one_predecessor_has_no_value = true;
226 break;
227 } else if (current != value) {
228 is_different = true;
229 }
230 }
231
232 if (one_predecessor_has_no_value) {
233 // If one predecessor has no value for this local, we trust the verifier has
234 // successfully checked that there is a store dominating any read after this block.
235 continue;
236 }
237
238 if (is_different) {
239 HInstruction* first_input = ValueOfLocalAt(current_block_->GetPredecessors()[0], local);
240 HPhi* phi = new (allocator_) HPhi(
241 allocator_,
242 local,
243 current_block_->GetPredecessors().size(),
244 first_input->GetType());
245 for (size_t i = 0; i < current_block_->GetPredecessors().size(); i++) {
246 HInstruction* pred_value = ValueOfLocalAt(current_block_->GetPredecessors()[i], local);
247 phi->SetRawInputAt(i, pred_value);
248 }
249 current_block_->AddPhi(phi);
250 value = phi;
251 }
252 (*current_locals_)[local] = value;
253 }
254 }
255 }
256
PropagateLocalsToCatchBlocks()257 void HInstructionBuilder::PropagateLocalsToCatchBlocks() {
258 const HTryBoundary& try_entry = current_block_->GetTryCatchInformation()->GetTryEntry();
259 for (HBasicBlock* catch_block : try_entry.GetExceptionHandlers()) {
260 ScopedArenaVector<HInstruction*>* handler_locals = GetLocalsFor(catch_block);
261 DCHECK_EQ(handler_locals->size(), current_locals_->size());
262 for (size_t vreg = 0, e = current_locals_->size(); vreg < e; ++vreg) {
263 HInstruction* handler_value = (*handler_locals)[vreg];
264 if (handler_value == nullptr) {
265 // Vreg was undefined at a previously encountered throwing instruction
266 // and the catch phi was deleted. Do not record the local value.
267 continue;
268 }
269 DCHECK(handler_value->IsPhi());
270
271 HInstruction* local_value = (*current_locals_)[vreg];
272 if (local_value == nullptr) {
273 // This is the first instruction throwing into `catch_block` where
274 // `vreg` is undefined. Delete the catch phi.
275 catch_block->RemovePhi(handler_value->AsPhi());
276 (*handler_locals)[vreg] = nullptr;
277 } else {
278 // Vreg has been defined at all instructions throwing into `catch_block`
279 // encountered so far. Record the local value in the catch phi.
280 handler_value->AsPhi()->AddInput(local_value);
281 }
282 }
283 }
284 }
285
AppendInstruction(HInstruction * instruction)286 void HInstructionBuilder::AppendInstruction(HInstruction* instruction) {
287 current_block_->AddInstruction(instruction);
288 InitializeInstruction(instruction);
289 }
290
InsertInstructionAtTop(HInstruction * instruction)291 void HInstructionBuilder::InsertInstructionAtTop(HInstruction* instruction) {
292 if (current_block_->GetInstructions().IsEmpty()) {
293 current_block_->AddInstruction(instruction);
294 } else {
295 current_block_->InsertInstructionBefore(instruction, current_block_->GetFirstInstruction());
296 }
297 InitializeInstruction(instruction);
298 }
299
InitializeInstruction(HInstruction * instruction)300 void HInstructionBuilder::InitializeInstruction(HInstruction* instruction) {
301 if (instruction->NeedsEnvironment()) {
302 HEnvironment* environment = HEnvironment::Create(
303 allocator_,
304 current_locals_->size(),
305 graph_->GetArtMethod(),
306 instruction->GetDexPc(),
307 instruction);
308 environment->CopyFrom(allocator_, ArrayRef<HInstruction* const>(*current_locals_));
309 instruction->SetRawEnvironment(environment);
310 }
311 }
312
LoadNullCheckedLocal(uint32_t register_index,uint32_t dex_pc)313 HInstruction* HInstructionBuilder::LoadNullCheckedLocal(uint32_t register_index, uint32_t dex_pc) {
314 HInstruction* ref = LoadLocal(register_index, DataType::Type::kReference);
315 if (!ref->CanBeNull()) {
316 return ref;
317 }
318
319 HNullCheck* null_check = new (allocator_) HNullCheck(ref, dex_pc);
320 AppendInstruction(null_check);
321 return null_check;
322 }
323
SetLoopHeaderPhiInputs()324 void HInstructionBuilder::SetLoopHeaderPhiInputs() {
325 for (size_t i = loop_headers_.size(); i > 0; --i) {
326 HBasicBlock* block = loop_headers_[i - 1];
327 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
328 HPhi* phi = it.Current()->AsPhi();
329 size_t vreg = phi->GetRegNumber();
330 for (HBasicBlock* predecessor : block->GetPredecessors()) {
331 HInstruction* value = ValueOfLocalAt(predecessor, vreg);
332 if (value == nullptr) {
333 // Vreg is undefined at this predecessor. Mark it dead and leave with
334 // fewer inputs than predecessors. SsaChecker will fail if not removed.
335 phi->SetDead();
336 break;
337 } else {
338 phi->AddInput(value);
339 }
340 }
341 }
342 }
343 }
344
IsBlockPopulated(HBasicBlock * block)345 static bool IsBlockPopulated(HBasicBlock* block) {
346 if (block->IsLoopHeader()) {
347 // Suspend checks were inserted into loop headers during building of dominator tree.
348 DCHECK(block->GetFirstInstruction()->IsSuspendCheck());
349 return block->GetFirstInstruction() != block->GetLastInstruction();
350 } else if (block->IsCatchBlock()) {
351 // Nops were inserted into the beginning of catch blocks.
352 DCHECK(block->GetFirstInstruction()->IsNop());
353 return block->GetFirstInstruction() != block->GetLastInstruction();
354 } else {
355 return !block->GetInstructions().IsEmpty();
356 }
357 }
358
Build()359 bool HInstructionBuilder::Build() {
360 DCHECK(code_item_accessor_.HasCodeItem());
361 locals_for_.resize(
362 graph_->GetBlocks().size(),
363 ScopedArenaVector<HInstruction*>(local_allocator_->Adapter(kArenaAllocGraphBuilder)));
364
365 // Find locations where we want to generate extra stackmaps for native debugging.
366 // This allows us to generate the info only at interesting points (for example,
367 // at start of java statement) rather than before every dex instruction.
368 const bool native_debuggable = code_generator_ != nullptr &&
369 code_generator_->GetCompilerOptions().GetNativeDebuggable();
370 ArenaBitVector* native_debug_info_locations = nullptr;
371 if (native_debuggable) {
372 native_debug_info_locations = FindNativeDebugInfoLocations();
373 }
374
375 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
376 current_block_ = block;
377 uint32_t block_dex_pc = current_block_->GetDexPc();
378
379 InitializeBlockLocals();
380
381 if (current_block_->IsEntryBlock()) {
382 InitializeParameters();
383 AppendInstruction(new (allocator_) HSuspendCheck(0u));
384 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
385 AppendInstruction(new (allocator_) HMethodEntryHook(0u));
386 }
387 AppendInstruction(new (allocator_) HGoto(0u));
388 continue;
389 } else if (current_block_->IsExitBlock()) {
390 AppendInstruction(new (allocator_) HExit());
391 continue;
392 } else if (current_block_->IsLoopHeader()) {
393 HSuspendCheck* suspend_check = new (allocator_) HSuspendCheck(current_block_->GetDexPc());
394 current_block_->GetLoopInformation()->SetSuspendCheck(suspend_check);
395 // This is slightly odd because the loop header might not be empty (TryBoundary).
396 // But we're still creating the environment with locals from the top of the block.
397 InsertInstructionAtTop(suspend_check);
398 } else if (current_block_->IsCatchBlock()) {
399 // We add an environment emitting instruction at the beginning of each catch block, in order
400 // to support try catch inlining.
401 // This is slightly odd because the catch block might not be empty (TryBoundary).
402 InsertInstructionAtTop(new (allocator_) HNop(block_dex_pc, /* needs_environment= */ true));
403 }
404
405 if (block_dex_pc == kNoDexPc || current_block_ != block_builder_->GetBlockAt(block_dex_pc)) {
406 // Synthetic block that does not need to be populated.
407 DCHECK(IsBlockPopulated(current_block_));
408 continue;
409 }
410
411 DCHECK(!IsBlockPopulated(current_block_));
412
413 for (const DexInstructionPcPair& pair : code_item_accessor_.InstructionsFrom(block_dex_pc)) {
414 if (current_block_ == nullptr) {
415 // The previous instruction ended this block.
416 break;
417 }
418
419 const uint32_t dex_pc = pair.DexPc();
420 if (dex_pc != block_dex_pc && FindBlockStartingAt(dex_pc) != nullptr) {
421 // This dex_pc starts a new basic block.
422 break;
423 }
424
425 if (current_block_->IsTryBlock() && IsThrowingDexInstruction(pair.Inst())) {
426 PropagateLocalsToCatchBlocks();
427 }
428
429 if (native_debuggable && native_debug_info_locations->IsBitSet(dex_pc)) {
430 AppendInstruction(new (allocator_) HNop(dex_pc, /* needs_environment= */ true));
431 }
432
433 // Note: There may be no Thread for gtests.
434 DCHECK(Thread::Current() == nullptr || !Thread::Current()->IsExceptionPending())
435 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
436 << " " << pair.Inst().Name() << "@" << dex_pc;
437 if (!ProcessDexInstruction(pair.Inst(), dex_pc)) {
438 return false;
439 }
440 DCHECK(Thread::Current() == nullptr || !Thread::Current()->IsExceptionPending())
441 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
442 << " " << pair.Inst().Name() << "@" << dex_pc;
443 }
444
445 if (current_block_ != nullptr) {
446 // Branching instructions clear current_block, so we know the last
447 // instruction of the current block is not a branching instruction.
448 // We add an unconditional Goto to the next block.
449 DCHECK_EQ(current_block_->GetSuccessors().size(), 1u);
450 AppendInstruction(new (allocator_) HGoto());
451 }
452 }
453
454 SetLoopHeaderPhiInputs();
455
456 return true;
457 }
458
BuildIntrinsic(ArtMethod * method)459 void HInstructionBuilder::BuildIntrinsic(ArtMethod* method) {
460 DCHECK(!code_item_accessor_.HasCodeItem());
461 DCHECK(method->IsIntrinsic());
462 if (kIsDebugBuild) {
463 ScopedObjectAccess soa(Thread::Current());
464 CHECK(!method->IsSignaturePolymorphic());
465 }
466
467 locals_for_.resize(
468 graph_->GetBlocks().size(),
469 ScopedArenaVector<HInstruction*>(local_allocator_->Adapter(kArenaAllocGraphBuilder)));
470
471 // Fill the entry block. Do not add suspend check, we do not want a suspend
472 // check in intrinsics; intrinsic methods are supposed to be fast.
473 current_block_ = graph_->GetEntryBlock();
474 InitializeBlockLocals();
475 InitializeParameters();
476 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
477 AppendInstruction(new (allocator_) HMethodEntryHook(0u));
478 }
479 AppendInstruction(new (allocator_) HGoto(0u));
480
481 // Fill the body.
482 current_block_ = current_block_->GetSingleSuccessor();
483 InitializeBlockLocals();
484 DCHECK(!IsBlockPopulated(current_block_));
485
486 // Add the intermediate representation, if available, or invoke instruction.
487 size_t in_vregs = graph_->GetNumberOfInVRegs();
488 size_t number_of_arguments =
489 in_vregs - std::count(current_locals_->end() - in_vregs, current_locals_->end(), nullptr);
490 uint32_t method_idx = dex_compilation_unit_->GetDexMethodIndex();
491 const char* shorty = dex_file_->GetMethodShorty(method_idx);
492 RangeInstructionOperands operands(graph_->GetNumberOfVRegs() - in_vregs, in_vregs);
493 if (!BuildSimpleIntrinsic(method, kNoDexPc, operands, shorty)) {
494 // Some intrinsics without intermediate representation still yield a leaf method,
495 // so build the invoke. Use HInvokeStaticOrDirect even for methods that would
496 // normally use an HInvokeVirtual (sharpen the call).
497 MethodReference target_method(dex_file_, method_idx);
498 HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
499 MethodLoadKind::kRuntimeCall,
500 CodePtrLocation::kCallArtMethod,
501 /* method_load_data= */ 0u
502 };
503 InvokeType invoke_type = dex_compilation_unit_->IsStatic() ? kStatic : kDirect;
504 HInvokeStaticOrDirect* invoke = new (allocator_) HInvokeStaticOrDirect(
505 allocator_,
506 number_of_arguments,
507 /* number_of_out_vregs= */ in_vregs,
508 return_type_,
509 kNoDexPc,
510 target_method,
511 method,
512 dispatch_info,
513 invoke_type,
514 target_method,
515 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone,
516 !graph_->IsDebuggable());
517 HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
518 }
519
520 // Add the return instruction.
521 if (return_type_ == DataType::Type::kVoid) {
522 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
523 AppendInstruction(new (allocator_) HMethodExitHook(graph_->GetNullConstant(), kNoDexPc));
524 }
525 AppendInstruction(new (allocator_) HReturnVoid());
526 } else {
527 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
528 AppendInstruction(new (allocator_) HMethodExitHook(latest_result_, kNoDexPc));
529 }
530 AppendInstruction(new (allocator_) HReturn(latest_result_));
531 }
532
533 // Fill the exit block.
534 DCHECK_EQ(current_block_->GetSingleSuccessor(), graph_->GetExitBlock());
535 current_block_ = graph_->GetExitBlock();
536 InitializeBlockLocals();
537 AppendInstruction(new (allocator_) HExit());
538 }
539
FindNativeDebugInfoLocations()540 ArenaBitVector* HInstructionBuilder::FindNativeDebugInfoLocations() {
541 ArenaBitVector* locations = ArenaBitVector::Create(local_allocator_,
542 code_item_accessor_.InsnsSizeInCodeUnits(),
543 /* expandable= */ false,
544 kArenaAllocGraphBuilder);
545 // The visitor gets called when the line number changes.
546 // In other words, it marks the start of new java statement.
547 code_item_accessor_.DecodeDebugPositionInfo([&](const DexFile::PositionInfo& entry) {
548 locations->SetBit(entry.address_);
549 return false;
550 });
551 // Instruction-specific tweaks.
552 for (const DexInstructionPcPair& inst : code_item_accessor_) {
553 switch (inst->Opcode()) {
554 case Instruction::MOVE_EXCEPTION: {
555 // Stop in native debugger after the exception has been moved.
556 // The compiler also expects the move at the start of basic block so
557 // we do not want to interfere by inserting native-debug-info before it.
558 locations->ClearBit(inst.DexPc());
559 DexInstructionIterator next = std::next(DexInstructionIterator(inst));
560 DCHECK(next.DexPc() != inst.DexPc());
561 if (next != code_item_accessor_.end()) {
562 locations->SetBit(next.DexPc());
563 }
564 break;
565 }
566 default:
567 break;
568 }
569 }
570 return locations;
571 }
572
LoadLocal(uint32_t reg_number,DataType::Type type) const573 HInstruction* HInstructionBuilder::LoadLocal(uint32_t reg_number, DataType::Type type) const {
574 HInstruction* value = (*current_locals_)[reg_number];
575 DCHECK(value != nullptr);
576
577 // If the operation requests a specific type, we make sure its input is of that type.
578 if (type != value->GetType()) {
579 if (DataType::IsFloatingPointType(type)) {
580 value = ssa_builder_->GetFloatOrDoubleEquivalent(value, type);
581 } else if (type == DataType::Type::kReference) {
582 value = ssa_builder_->GetReferenceTypeEquivalent(value);
583 }
584 DCHECK(value != nullptr);
585 }
586
587 return value;
588 }
589
UpdateLocal(uint32_t reg_number,HInstruction * stored_value)590 void HInstructionBuilder::UpdateLocal(uint32_t reg_number, HInstruction* stored_value) {
591 DataType::Type stored_type = stored_value->GetType();
592 DCHECK_NE(stored_type, DataType::Type::kVoid);
593
594 // Storing into vreg `reg_number` may implicitly invalidate the surrounding
595 // registers. Consider the following cases:
596 // (1) Storing a wide value must overwrite previous values in both `reg_number`
597 // and `reg_number+1`. We store `nullptr` in `reg_number+1`.
598 // (2) If vreg `reg_number-1` holds a wide value, writing into `reg_number`
599 // must invalidate it. We store `nullptr` in `reg_number-1`.
600 // Consequently, storing a wide value into the high vreg of another wide value
601 // will invalidate both `reg_number-1` and `reg_number+1`.
602
603 if (reg_number != 0) {
604 HInstruction* local_low = (*current_locals_)[reg_number - 1];
605 if (local_low != nullptr && DataType::Is64BitType(local_low->GetType())) {
606 // The vreg we are storing into was previously the high vreg of a pair.
607 // We need to invalidate its low vreg.
608 DCHECK((*current_locals_)[reg_number] == nullptr);
609 (*current_locals_)[reg_number - 1] = nullptr;
610 }
611 }
612
613 (*current_locals_)[reg_number] = stored_value;
614 if (DataType::Is64BitType(stored_type)) {
615 // We are storing a pair. Invalidate the instruction in the high vreg.
616 (*current_locals_)[reg_number + 1] = nullptr;
617 }
618 }
619
InitializeParameters()620 void HInstructionBuilder::InitializeParameters() {
621 DCHECK(current_block_->IsEntryBlock());
622
623 // outer_compilation_unit_ is null only when unit testing.
624 if (outer_compilation_unit_ == nullptr) {
625 return;
626 }
627
628 const char* shorty = dex_compilation_unit_->GetShorty();
629 uint16_t number_of_parameters = graph_->GetNumberOfInVRegs();
630 uint16_t locals_index = graph_->GetNumberOfLocalVRegs();
631 uint16_t parameter_index = 0;
632
633 const dex::MethodId& referrer_method_id =
634 dex_file_->GetMethodId(dex_compilation_unit_->GetDexMethodIndex());
635 if (!dex_compilation_unit_->IsStatic()) {
636 // Add the implicit 'this' argument, not expressed in the signature.
637 HParameterValue* parameter = new (allocator_) HParameterValue(*dex_file_,
638 referrer_method_id.class_idx_,
639 parameter_index++,
640 DataType::Type::kReference,
641 /* is_this= */ true);
642 AppendInstruction(parameter);
643 UpdateLocal(locals_index++, parameter);
644 number_of_parameters--;
645 current_this_parameter_ = parameter;
646 } else {
647 DCHECK(current_this_parameter_ == nullptr);
648 }
649
650 const dex::ProtoId& proto = dex_file_->GetMethodPrototype(referrer_method_id);
651 const dex::TypeList* arg_types = dex_file_->GetProtoParameters(proto);
652 for (int i = 0, shorty_pos = 1; i < number_of_parameters; i++) {
653 HParameterValue* parameter = new (allocator_) HParameterValue(
654 *dex_file_,
655 arg_types->GetTypeItem(shorty_pos - 1).type_idx_,
656 parameter_index++,
657 DataType::FromShorty(shorty[shorty_pos]),
658 /* is_this= */ false);
659 ++shorty_pos;
660 AppendInstruction(parameter);
661 // Store the parameter value in the local that the dex code will use
662 // to reference that parameter.
663 UpdateLocal(locals_index++, parameter);
664 if (DataType::Is64BitType(parameter->GetType())) {
665 i++;
666 locals_index++;
667 parameter_index++;
668 }
669 }
670 }
671
672 template<typename T, bool kCompareWithZero>
If_21_22t(const Instruction & instruction,uint32_t dex_pc)673 void HInstructionBuilder::If_21_22t(const Instruction& instruction, uint32_t dex_pc) {
674 DCHECK_EQ(kCompareWithZero ? Instruction::Format::k21t : Instruction::Format::k22t,
675 Instruction::FormatOf(instruction.Opcode()));
676 HInstruction* value = LoadLocal(
677 kCompareWithZero ? instruction.VRegA_21t() : instruction.VRegA_22t(),
678 DataType::Type::kInt32);
679 T* comparison = nullptr;
680 if (kCompareWithZero) {
681 comparison = new (allocator_) T(value, graph_->GetIntConstant(0), dex_pc);
682 } else {
683 HInstruction* second = LoadLocal(instruction.VRegB_22t(), DataType::Type::kInt32);
684 comparison = new (allocator_) T(value, second, dex_pc);
685 }
686 AppendInstruction(comparison);
687 HIf* if_instr = new (allocator_) HIf(comparison, dex_pc);
688
689 ProfilingInfo* info = graph_->GetProfilingInfo();
690 if (info != nullptr && !graph_->IsCompilingBaseline()) {
691 BranchCache* cache = info->GetBranchCache(dex_pc);
692 if (cache != nullptr) {
693 if_instr->SetTrueCount(cache->GetTrue());
694 if_instr->SetFalseCount(cache->GetFalse());
695 }
696 }
697
698 // Append after setting true/false count, so that the builder knows if the
699 // instruction needs an environment.
700 AppendInstruction(if_instr);
701 current_block_ = nullptr;
702 }
703
704 template<typename T>
Unop_12x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)705 void HInstructionBuilder::Unop_12x(const Instruction& instruction,
706 DataType::Type type,
707 uint32_t dex_pc) {
708 HInstruction* first = LoadLocal(instruction.VRegB_12x(), type);
709 AppendInstruction(new (allocator_) T(type, first, dex_pc));
710 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction());
711 }
712
Conversion_12x(const Instruction & instruction,DataType::Type input_type,DataType::Type result_type,uint32_t dex_pc)713 void HInstructionBuilder::Conversion_12x(const Instruction& instruction,
714 DataType::Type input_type,
715 DataType::Type result_type,
716 uint32_t dex_pc) {
717 HInstruction* first = LoadLocal(instruction.VRegB_12x(), input_type);
718 AppendInstruction(new (allocator_) HTypeConversion(result_type, first, dex_pc));
719 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction());
720 }
721
722 template<typename T>
Binop_23x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)723 void HInstructionBuilder::Binop_23x(const Instruction& instruction,
724 DataType::Type type,
725 uint32_t dex_pc) {
726 HInstruction* first = LoadLocal(instruction.VRegB_23x(), type);
727 HInstruction* second = LoadLocal(instruction.VRegC_23x(), type);
728 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
729 UpdateLocal(instruction.VRegA_23x(), current_block_->GetLastInstruction());
730 }
731
732 template<typename T>
Binop_23x_shift(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)733 void HInstructionBuilder::Binop_23x_shift(const Instruction& instruction,
734 DataType::Type type,
735 uint32_t dex_pc) {
736 HInstruction* first = LoadLocal(instruction.VRegB_23x(), type);
737 HInstruction* second = LoadLocal(instruction.VRegC_23x(), DataType::Type::kInt32);
738 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
739 UpdateLocal(instruction.VRegA_23x(), current_block_->GetLastInstruction());
740 }
741
Binop_23x_cmp(const Instruction & instruction,DataType::Type type,ComparisonBias bias,uint32_t dex_pc)742 void HInstructionBuilder::Binop_23x_cmp(const Instruction& instruction,
743 DataType::Type type,
744 ComparisonBias bias,
745 uint32_t dex_pc) {
746 HInstruction* first = LoadLocal(instruction.VRegB_23x(), type);
747 HInstruction* second = LoadLocal(instruction.VRegC_23x(), type);
748 AppendInstruction(new (allocator_) HCompare(type, first, second, bias, dex_pc));
749 UpdateLocal(instruction.VRegA_23x(), current_block_->GetLastInstruction());
750 }
751
752 template<typename T>
Binop_12x_shift(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)753 void HInstructionBuilder::Binop_12x_shift(const Instruction& instruction,
754 DataType::Type type,
755 uint32_t dex_pc) {
756 HInstruction* first = LoadLocal(instruction.VRegA_12x(), type);
757 HInstruction* second = LoadLocal(instruction.VRegB_12x(), DataType::Type::kInt32);
758 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
759 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction());
760 }
761
762 template<typename T>
Binop_12x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)763 void HInstructionBuilder::Binop_12x(const Instruction& instruction,
764 DataType::Type type,
765 uint32_t dex_pc) {
766 HInstruction* first = LoadLocal(instruction.VRegA_12x(), type);
767 HInstruction* second = LoadLocal(instruction.VRegB_12x(), type);
768 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
769 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction());
770 }
771
772 template<typename T>
Binop_22s(const Instruction & instruction,bool reverse,uint32_t dex_pc)773 void HInstructionBuilder::Binop_22s(const Instruction& instruction, bool reverse, uint32_t dex_pc) {
774 HInstruction* first = LoadLocal(instruction.VRegB_22s(), DataType::Type::kInt32);
775 HInstruction* second = graph_->GetIntConstant(instruction.VRegC_22s());
776 if (reverse) {
777 std::swap(first, second);
778 }
779 AppendInstruction(new (allocator_) T(DataType::Type::kInt32, first, second, dex_pc));
780 UpdateLocal(instruction.VRegA_22s(), current_block_->GetLastInstruction());
781 }
782
783 template<typename T>
Binop_22b(const Instruction & instruction,bool reverse,uint32_t dex_pc)784 void HInstructionBuilder::Binop_22b(const Instruction& instruction, bool reverse, uint32_t dex_pc) {
785 HInstruction* first = LoadLocal(instruction.VRegB_22b(), DataType::Type::kInt32);
786 HInstruction* second = graph_->GetIntConstant(instruction.VRegC_22b());
787 if (reverse) {
788 std::swap(first, second);
789 }
790 AppendInstruction(new (allocator_) T(DataType::Type::kInt32, first, second, dex_pc));
791 UpdateLocal(instruction.VRegA_22b(), current_block_->GetLastInstruction());
792 }
793
794 // Does the method being compiled need any constructor barriers being inserted?
795 // (Always 'false' for methods that aren't <init>.)
RequiresConstructorBarrier(const DexCompilationUnit * cu)796 static bool RequiresConstructorBarrier(const DexCompilationUnit* cu) {
797 // Can be null in unit tests only.
798 if (UNLIKELY(cu == nullptr)) {
799 return false;
800 }
801
802 // Constructor barriers are applicable only for <init> methods.
803 if (LIKELY(!cu->IsConstructor() || cu->IsStatic())) {
804 return false;
805 }
806
807 return cu->RequiresConstructorBarrier();
808 }
809
810 // Returns true if `block` has only one successor which starts at the next
811 // dex_pc after `instruction` at `dex_pc`.
IsFallthroughInstruction(const Instruction & instruction,uint32_t dex_pc,HBasicBlock * block)812 static bool IsFallthroughInstruction(const Instruction& instruction,
813 uint32_t dex_pc,
814 HBasicBlock* block) {
815 uint32_t next_dex_pc = dex_pc + instruction.SizeInCodeUnits();
816 return block->GetSingleSuccessor()->GetDexPc() == next_dex_pc;
817 }
818
BuildSwitch(const Instruction & instruction,uint32_t dex_pc)819 void HInstructionBuilder::BuildSwitch(const Instruction& instruction, uint32_t dex_pc) {
820 HInstruction* value = LoadLocal(instruction.VRegA_31t(), DataType::Type::kInt32);
821 DexSwitchTable table(instruction, dex_pc);
822
823 if (table.GetNumEntries() == 0) {
824 // Empty Switch. Code falls through to the next block.
825 DCHECK(IsFallthroughInstruction(instruction, dex_pc, current_block_));
826 AppendInstruction(new (allocator_) HGoto(dex_pc));
827 } else if (table.ShouldBuildDecisionTree()) {
828 for (DexSwitchTableIterator it(table); !it.Done(); it.Advance()) {
829 HInstruction* case_value = graph_->GetIntConstant(it.CurrentKey());
830 HEqual* comparison = new (allocator_) HEqual(value, case_value, dex_pc);
831 AppendInstruction(comparison);
832 AppendInstruction(new (allocator_) HIf(comparison, dex_pc));
833
834 if (!it.IsLast()) {
835 current_block_ = FindBlockStartingAt(it.GetDexPcForCurrentIndex());
836 }
837 }
838 } else {
839 AppendInstruction(
840 new (allocator_) HPackedSwitch(table.GetEntryAt(0), table.GetNumEntries(), value, dex_pc));
841 }
842
843 current_block_ = nullptr;
844 }
845
846 template <DataType::Type type>
BuildMove(uint32_t dest_reg,uint32_t src_reg)847 ALWAYS_INLINE inline void HInstructionBuilder::BuildMove(uint32_t dest_reg, uint32_t src_reg) {
848 // The verifier has no notion of a null type, so a move-object of constant 0
849 // will lead to the same constant 0 in the destination register. To mimic
850 // this behavior, we just pretend we haven't seen a type change (int to reference)
851 // for the 0 constant and phis. We rely on our type propagation to eventually get the
852 // types correct.
853 constexpr bool is_reference = type == DataType::Type::kReference;
854 HInstruction* value = is_reference ? (*current_locals_)[src_reg] : /* not needed */ nullptr;
855 if (is_reference && value->IsIntConstant()) {
856 DCHECK_EQ(value->AsIntConstant()->GetValue(), 0);
857 } else if (is_reference && value->IsPhi()) {
858 DCHECK(value->GetType() == DataType::Type::kInt32 ||
859 value->GetType() == DataType::Type::kReference);
860 } else {
861 value = LoadLocal(src_reg, type);
862 }
863 UpdateLocal(dest_reg, value);
864 }
865
BuildReturn(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)866 void HInstructionBuilder::BuildReturn(const Instruction& instruction,
867 DataType::Type type,
868 uint32_t dex_pc) {
869 if (type == DataType::Type::kVoid) {
870 // Only <init> (which is a return-void) could possibly have a constructor fence.
871 // This may insert additional redundant constructor fences from the super constructors.
872 // TODO: remove redundant constructor fences (b/36656456).
873 if (RequiresConstructorBarrier(dex_compilation_unit_)) {
874 // Compiling instance constructor.
875 DCHECK_STREQ("<init>", graph_->GetMethodName());
876
877 HInstruction* fence_target = current_this_parameter_;
878 DCHECK(fence_target != nullptr);
879
880 AppendInstruction(new (allocator_) HConstructorFence(fence_target, dex_pc, allocator_));
881 MaybeRecordStat(
882 compilation_stats_,
883 MethodCompilationStat::kConstructorFenceGeneratedFinal);
884 }
885 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
886 // Return value is not used for void functions. We pass NullConstant to
887 // avoid special cases when generating code.
888 AppendInstruction(new (allocator_) HMethodExitHook(graph_->GetNullConstant(), dex_pc));
889 }
890 AppendInstruction(new (allocator_) HReturnVoid(dex_pc));
891 } else {
892 DCHECK(!RequiresConstructorBarrier(dex_compilation_unit_));
893 HInstruction* value = LoadLocal(instruction.VRegA_11x(), type);
894 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
895 AppendInstruction(new (allocator_) HMethodExitHook(value, dex_pc));
896 }
897 AppendInstruction(new (allocator_) HReturn(value, dex_pc));
898 }
899 current_block_ = nullptr;
900 }
901
GetInvokeTypeFromOpCode(Instruction::Code opcode)902 static InvokeType GetInvokeTypeFromOpCode(Instruction::Code opcode) {
903 switch (opcode) {
904 case Instruction::INVOKE_STATIC:
905 case Instruction::INVOKE_STATIC_RANGE:
906 return kStatic;
907 case Instruction::INVOKE_DIRECT:
908 case Instruction::INVOKE_DIRECT_RANGE:
909 return kDirect;
910 case Instruction::INVOKE_VIRTUAL:
911 case Instruction::INVOKE_VIRTUAL_RANGE:
912 return kVirtual;
913 case Instruction::INVOKE_INTERFACE:
914 case Instruction::INVOKE_INTERFACE_RANGE:
915 return kInterface;
916 case Instruction::INVOKE_SUPER_RANGE:
917 case Instruction::INVOKE_SUPER:
918 return kSuper;
919 default:
920 LOG(FATAL) << "Unexpected invoke opcode: " << opcode;
921 UNREACHABLE();
922 }
923 }
924
925 // Try to resolve a method using the class linker. Return null if a method could
926 // not be resolved or the resolved method cannot be used for some reason.
927 // Also retrieve method data needed for creating the invoke intermediate
928 // representation while we hold the mutator lock here.
ResolveMethod(uint16_t method_idx,ArtMethod * referrer,const DexCompilationUnit & dex_compilation_unit,InvokeType * invoke_type,MethodReference * resolved_method_info,uint16_t * imt_or_vtable_index,bool * is_string_constructor)929 static ArtMethod* ResolveMethod(uint16_t method_idx,
930 ArtMethod* referrer,
931 const DexCompilationUnit& dex_compilation_unit,
932 /*inout*/InvokeType* invoke_type,
933 /*out*/MethodReference* resolved_method_info,
934 /*out*/uint16_t* imt_or_vtable_index,
935 /*out*/bool* is_string_constructor) {
936 ScopedObjectAccess soa(Thread::Current());
937
938 ClassLinker* class_linker = dex_compilation_unit.GetClassLinker();
939 Handle<mirror::ClassLoader> class_loader = dex_compilation_unit.GetClassLoader();
940
941 ArtMethod* resolved_method = nullptr;
942 if (referrer == nullptr) {
943 // The referrer may be unresolved for AOT if we're compiling a class that cannot be
944 // resolved because, for example, we don't find a superclass in the classpath.
945 resolved_method = class_linker->ResolveMethodId(
946 method_idx, dex_compilation_unit.GetDexCache(), class_loader);
947 } else if (referrer->SkipAccessChecks()) {
948 resolved_method = class_linker->ResolveMethodId(method_idx, referrer);
949 } else {
950 resolved_method = class_linker->ResolveMethodWithChecks(
951 method_idx,
952 referrer,
953 *invoke_type);
954 }
955
956 if (UNLIKELY(resolved_method == nullptr)) {
957 // Clean up any exception left by type resolution.
958 soa.Self()->ClearException();
959 return nullptr;
960 }
961 DCHECK(!soa.Self()->IsExceptionPending());
962
963 if (referrer == nullptr) {
964 ObjPtr<mirror::Class> referenced_class = class_linker->LookupResolvedType(
965 dex_compilation_unit.GetDexFile()->GetMethodId(method_idx).class_idx_,
966 dex_compilation_unit.GetDexCache().Get(),
967 class_loader.Get());
968 DCHECK(referenced_class != nullptr); // Must have been resolved when resolving the method.
969 if (class_linker->ThrowIfInvokeClassMismatch(referenced_class,
970 *dex_compilation_unit.GetDexFile(),
971 *invoke_type)) {
972 soa.Self()->ClearException();
973 return nullptr;
974 }
975 // The class linker cannot check access without a referrer, so we have to do it.
976 // Check if the declaring class or referencing class is accessible.
977 SamePackageCompare same_package(dex_compilation_unit);
978 ObjPtr<mirror::Class> declaring_class = resolved_method->GetDeclaringClass();
979 bool declaring_class_accessible = declaring_class->IsPublic() || same_package(declaring_class);
980 if (!declaring_class_accessible) {
981 // It is possible to access members from an inaccessible superclass
982 // by referencing them through an accessible subclass.
983 if (!referenced_class->IsPublic() && !same_package(referenced_class)) {
984 return nullptr;
985 }
986 }
987 // Check whether the method itself is accessible.
988 // Since the referrer is unresolved but the method is resolved, it cannot be
989 // inside the same class, so a private method is known to be inaccessible.
990 // And without a resolved referrer, we cannot check for protected member access
991 // in superlass, so we handle only access to public member or within the package.
992 if (resolved_method->IsPrivate() ||
993 (!resolved_method->IsPublic() && !declaring_class_accessible)) {
994 return nullptr;
995 }
996
997 if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(*invoke_type))) {
998 return nullptr;
999 }
1000 }
1001
1002 // We have to special case the invoke-super case, as ClassLinker::ResolveMethod does not.
1003 // We need to look at the referrer's super class vtable. We need to do this to know if we need to
1004 // make this an invoke-unresolved to handle cross-dex invokes or abstract super methods, both of
1005 // which require runtime handling.
1006 if (*invoke_type == kSuper) {
1007 if (referrer == nullptr) {
1008 // We could not determine the method's class we need to wait until runtime.
1009 DCHECK(Runtime::Current()->IsAotCompiler());
1010 return nullptr;
1011 }
1012 ArtMethod* actual_method = FindSuperMethodToCall</*access_check=*/true>(
1013 method_idx, resolved_method, referrer, soa.Self());
1014 if (actual_method == nullptr) {
1015 // Clean up any exception left by method resolution.
1016 soa.Self()->ClearException();
1017 return nullptr;
1018 }
1019 if (!actual_method->IsInvokable()) {
1020 // Fail if the actual method cannot be invoked. Otherwise, the runtime resolution stub
1021 // could resolve the callee to the wrong method.
1022 return nullptr;
1023 }
1024 // Call GetCanonicalMethod in case the resolved method is a copy: for super calls, the encoding
1025 // of ArtMethod in BSS relies on not having copies there.
1026 resolved_method = actual_method->GetCanonicalMethod(class_linker->GetImagePointerSize());
1027 }
1028
1029 if (*invoke_type == kInterface) {
1030 if (resolved_method->GetDeclaringClass()->IsObjectClass()) {
1031 // If the resolved method is from j.l.Object, emit a virtual call instead.
1032 // The IMT conflict stub only handles interface methods.
1033 *invoke_type = kVirtual;
1034 } else {
1035 DCHECK(resolved_method->GetDeclaringClass()->IsInterface());
1036 }
1037 }
1038
1039 *resolved_method_info =
1040 MethodReference(resolved_method->GetDexFile(), resolved_method->GetDexMethodIndex());
1041 if (*invoke_type == kVirtual) {
1042 // For HInvokeVirtual we need the vtable index.
1043 *imt_or_vtable_index = resolved_method->GetVtableIndex();
1044 } else if (*invoke_type == kInterface) {
1045 // For HInvokeInterface we need the IMT index.
1046 *imt_or_vtable_index = resolved_method->GetImtIndex();
1047 DCHECK_EQ(*imt_or_vtable_index, ImTable::GetImtIndex(resolved_method));
1048 }
1049
1050 *is_string_constructor = resolved_method->IsStringConstructor();
1051
1052 return resolved_method;
1053 }
1054
IsSignaturePolymorphic(ArtMethod * method)1055 static bool IsSignaturePolymorphic(ArtMethod* method) {
1056 if (!method->IsIntrinsic()) {
1057 return false;
1058 }
1059 Intrinsics intrinsic = method->GetIntrinsic();
1060
1061 switch (intrinsic) {
1062 #define IS_POLYMOPHIC(Name, ...) \
1063 case Intrinsics::k ## Name:
1064 ART_SIGNATURE_POLYMORPHIC_INTRINSICS_LIST(IS_POLYMOPHIC)
1065 #undef IS_POLYMOPHIC
1066 return true;
1067 default:
1068 return false;
1069 }
1070 }
1071
BuildInvoke(const Instruction & instruction,uint32_t dex_pc,uint32_t method_idx,const InstructionOperands & operands)1072 bool HInstructionBuilder::BuildInvoke(const Instruction& instruction,
1073 uint32_t dex_pc,
1074 uint32_t method_idx,
1075 const InstructionOperands& operands) {
1076 InvokeType invoke_type = GetInvokeTypeFromOpCode(instruction.Opcode());
1077 const char* shorty = dex_file_->GetMethodShorty(method_idx);
1078 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1079
1080 // Remove the return type from the 'proto'.
1081 size_t number_of_arguments = strlen(shorty) - 1;
1082 if (invoke_type != kStatic) { // instance call
1083 // One extra argument for 'this'.
1084 number_of_arguments++;
1085 }
1086
1087 MethodReference resolved_method_reference(nullptr, 0u);
1088 bool is_string_constructor = false;
1089 uint16_t imt_or_vtable_index = DexFile::kDexNoIndex16;
1090 ArtMethod* resolved_method = ResolveMethod(method_idx,
1091 graph_->GetArtMethod(),
1092 *dex_compilation_unit_,
1093 &invoke_type,
1094 &resolved_method_reference,
1095 &imt_or_vtable_index,
1096 &is_string_constructor);
1097
1098 MethodReference method_reference(&graph_->GetDexFile(), method_idx);
1099
1100 // In the wild there are apps which have invoke-virtual targeting signature polymorphic methods
1101 // like MethodHandle.invokeExact. It never worked in the first place: such calls were dispatched
1102 // to the JNI implementation, which throws UOE.
1103 // Now, when a signature-polymorphic method is implemented as an intrinsic, compiler's attempt to
1104 // devirtualize such ill-formed virtual calls can lead to compiler crashes as an intrinsic
1105 // (like MethodHandle.invokeExact) might expect arguments to be set up in a different manner than
1106 // it's done for virtual calls.
1107 // Create HInvokeUnresolved to make sure that such invoke-virtual calls are not devirtualized
1108 // and are treated as native method calls.
1109 if (kIsDebugBuild && resolved_method != nullptr) {
1110 ScopedObjectAccess soa(Thread::Current());
1111 CHECK_EQ(IsSignaturePolymorphic(resolved_method), resolved_method->IsSignaturePolymorphic());
1112 }
1113
1114 if (UNLIKELY(resolved_method == nullptr ||
1115 (invoke_type != kPolymorphic && IsSignaturePolymorphic(resolved_method)))) {
1116 DCHECK(!Thread::Current()->IsExceptionPending());
1117 if (resolved_method == nullptr) {
1118 MaybeRecordStat(compilation_stats_,
1119 MethodCompilationStat::kUnresolvedMethod);
1120 }
1121 HInvoke* invoke = new (allocator_) HInvokeUnresolved(allocator_,
1122 number_of_arguments,
1123 operands.GetNumberOfOperands(),
1124 return_type,
1125 dex_pc,
1126 method_reference,
1127 invoke_type);
1128 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ true);
1129 }
1130
1131 // Replace calls to String.<init> with StringFactory.
1132 if (is_string_constructor) {
1133 uint32_t string_init_entry_point = WellKnownClasses::StringInitToEntryPoint(resolved_method);
1134 HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
1135 MethodLoadKind::kStringInit,
1136 CodePtrLocation::kCallArtMethod,
1137 dchecked_integral_cast<uint64_t>(string_init_entry_point)
1138 };
1139 // We pass null for the resolved_method to ensure optimizations
1140 // don't rely on it.
1141 HInvoke* invoke = new (allocator_) HInvokeStaticOrDirect(
1142 allocator_,
1143 number_of_arguments - 1,
1144 operands.GetNumberOfOperands() - 1,
1145 /* return_type= */ DataType::Type::kReference,
1146 dex_pc,
1147 method_reference,
1148 /* resolved_method= */ nullptr,
1149 dispatch_info,
1150 invoke_type,
1151 resolved_method_reference,
1152 HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit,
1153 !graph_->IsDebuggable());
1154 return HandleStringInit(invoke, operands, shorty);
1155 }
1156
1157 // Potential class initialization check, in the case of a static method call.
1158 HInvokeStaticOrDirect::ClinitCheckRequirement clinit_check_requirement =
1159 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
1160 HClinitCheck* clinit_check = nullptr;
1161 if (invoke_type == kStatic) {
1162 clinit_check = ProcessClinitCheckForInvoke(dex_pc, resolved_method, &clinit_check_requirement);
1163 }
1164
1165 // Try to build an HIR replacement for the intrinsic.
1166 if (UNLIKELY(resolved_method->IsIntrinsic()) && !graph_->IsDebuggable()) {
1167 // All intrinsics are in the primary boot image, so their class can always be referenced
1168 // and we do not need to rely on the implicit class initialization check. The class should
1169 // be initialized but we do not require that here.
1170 DCHECK_NE(clinit_check_requirement, HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit);
1171 if (BuildSimpleIntrinsic(resolved_method, dex_pc, operands, shorty)) {
1172 return true;
1173 }
1174 }
1175
1176 HInvoke* invoke = nullptr;
1177 if (invoke_type == kDirect || invoke_type == kStatic || invoke_type == kSuper) {
1178 // For sharpening, we create another MethodReference, to account for the
1179 // kSuper case below where we cannot find a dex method index.
1180 bool has_method_id = true;
1181 if (invoke_type == kSuper) {
1182 uint32_t dex_method_index = method_reference.index;
1183 if (IsSameDexFile(*resolved_method_reference.dex_file,
1184 *dex_compilation_unit_->GetDexFile())) {
1185 // Update the method index to the one resolved. Note that this may be a no-op if
1186 // we resolved to the method referenced by the instruction.
1187 dex_method_index = resolved_method_reference.index;
1188 } else {
1189 // Try to find a dex method index in this caller's dex file.
1190 ScopedObjectAccess soa(Thread::Current());
1191 dex_method_index = resolved_method->FindDexMethodIndexInOtherDexFile(
1192 *dex_compilation_unit_->GetDexFile(), method_idx);
1193 }
1194 if (dex_method_index == dex::kDexNoIndex) {
1195 has_method_id = false;
1196 } else {
1197 method_reference.index = dex_method_index;
1198 }
1199 }
1200 HInvokeStaticOrDirect::DispatchInfo dispatch_info =
1201 HSharpening::SharpenLoadMethod(resolved_method,
1202 has_method_id,
1203 /* for_interface_call= */ false,
1204 code_generator_);
1205 if (dispatch_info.code_ptr_location == CodePtrLocation::kCallCriticalNative) {
1206 graph_->SetHasDirectCriticalNativeCall(true);
1207 }
1208 invoke = new (allocator_) HInvokeStaticOrDirect(allocator_,
1209 number_of_arguments,
1210 operands.GetNumberOfOperands(),
1211 return_type,
1212 dex_pc,
1213 method_reference,
1214 resolved_method,
1215 dispatch_info,
1216 invoke_type,
1217 resolved_method_reference,
1218 clinit_check_requirement,
1219 !graph_->IsDebuggable());
1220 if (clinit_check != nullptr) {
1221 // Add the class initialization check as last input of `invoke`.
1222 DCHECK_EQ(clinit_check_requirement, HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit);
1223 size_t clinit_check_index = invoke->InputCount() - 1u;
1224 DCHECK(invoke->InputAt(clinit_check_index) == nullptr);
1225 invoke->SetArgumentAt(clinit_check_index, clinit_check);
1226 }
1227 } else if (invoke_type == kVirtual) {
1228 invoke = new (allocator_) HInvokeVirtual(allocator_,
1229 number_of_arguments,
1230 operands.GetNumberOfOperands(),
1231 return_type,
1232 dex_pc,
1233 method_reference,
1234 resolved_method,
1235 resolved_method_reference,
1236 /*vtable_index=*/ imt_or_vtable_index,
1237 !graph_->IsDebuggable());
1238 } else {
1239 DCHECK_EQ(invoke_type, kInterface);
1240 if (kIsDebugBuild) {
1241 ScopedObjectAccess soa(Thread::Current());
1242 DCHECK(resolved_method->GetDeclaringClass()->IsInterface());
1243 }
1244 MethodLoadKind load_kind = HSharpening::SharpenLoadMethod(
1245 resolved_method,
1246 /* has_method_id= */ true,
1247 /* for_interface_call= */ true,
1248 code_generator_)
1249 .method_load_kind;
1250 invoke = new (allocator_) HInvokeInterface(allocator_,
1251 number_of_arguments,
1252 operands.GetNumberOfOperands(),
1253 return_type,
1254 dex_pc,
1255 method_reference,
1256 resolved_method,
1257 resolved_method_reference,
1258 /*imt_index=*/ imt_or_vtable_index,
1259 load_kind,
1260 !graph_->IsDebuggable());
1261 }
1262 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
1263 }
1264
VarHandleAccessorNeedsReturnTypeCheck(HInvoke * invoke,DataType::Type return_type)1265 static bool VarHandleAccessorNeedsReturnTypeCheck(HInvoke* invoke, DataType::Type return_type) {
1266 mirror::VarHandle::AccessModeTemplate access_mode_template =
1267 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
1268
1269 switch (access_mode_template) {
1270 case mirror::VarHandle::AccessModeTemplate::kGet:
1271 case mirror::VarHandle::AccessModeTemplate::kGetAndUpdate:
1272 case mirror::VarHandle::AccessModeTemplate::kCompareAndExchange:
1273 return return_type == DataType::Type::kReference;
1274 case mirror::VarHandle::AccessModeTemplate::kSet:
1275 case mirror::VarHandle::AccessModeTemplate::kCompareAndSet:
1276 return false;
1277 }
1278 }
1279
1280 // This function initializes `VarHandleOptimizations`, does a number of static checks and disables
1281 // the intrinsic if some of the checks fail. This is necessary for the code generator to work (for
1282 // both the baseline and the optimizing compiler).
DecideVarHandleIntrinsic(HInvoke * invoke)1283 static void DecideVarHandleIntrinsic(HInvoke* invoke) {
1284 switch (invoke->GetIntrinsic()) {
1285 case Intrinsics::kVarHandleCompareAndExchange:
1286 case Intrinsics::kVarHandleCompareAndExchangeAcquire:
1287 case Intrinsics::kVarHandleCompareAndExchangeRelease:
1288 case Intrinsics::kVarHandleCompareAndSet:
1289 case Intrinsics::kVarHandleGet:
1290 case Intrinsics::kVarHandleGetAcquire:
1291 case Intrinsics::kVarHandleGetAndAdd:
1292 case Intrinsics::kVarHandleGetAndAddAcquire:
1293 case Intrinsics::kVarHandleGetAndAddRelease:
1294 case Intrinsics::kVarHandleGetAndBitwiseAnd:
1295 case Intrinsics::kVarHandleGetAndBitwiseAndAcquire:
1296 case Intrinsics::kVarHandleGetAndBitwiseAndRelease:
1297 case Intrinsics::kVarHandleGetAndBitwiseOr:
1298 case Intrinsics::kVarHandleGetAndBitwiseOrAcquire:
1299 case Intrinsics::kVarHandleGetAndBitwiseOrRelease:
1300 case Intrinsics::kVarHandleGetAndBitwiseXor:
1301 case Intrinsics::kVarHandleGetAndBitwiseXorAcquire:
1302 case Intrinsics::kVarHandleGetAndBitwiseXorRelease:
1303 case Intrinsics::kVarHandleGetAndSet:
1304 case Intrinsics::kVarHandleGetAndSetAcquire:
1305 case Intrinsics::kVarHandleGetAndSetRelease:
1306 case Intrinsics::kVarHandleGetOpaque:
1307 case Intrinsics::kVarHandleGetVolatile:
1308 case Intrinsics::kVarHandleSet:
1309 case Intrinsics::kVarHandleSetOpaque:
1310 case Intrinsics::kVarHandleSetRelease:
1311 case Intrinsics::kVarHandleSetVolatile:
1312 case Intrinsics::kVarHandleWeakCompareAndSet:
1313 case Intrinsics::kVarHandleWeakCompareAndSetAcquire:
1314 case Intrinsics::kVarHandleWeakCompareAndSetPlain:
1315 case Intrinsics::kVarHandleWeakCompareAndSetRelease:
1316 break;
1317 default:
1318 return; // Not a VarHandle intrinsic, skip.
1319 }
1320
1321 DCHECK(invoke->IsInvokePolymorphic());
1322 VarHandleOptimizations optimizations(invoke);
1323
1324 // Do only simple static checks here (those for which we have enough information). More complex
1325 // checks should be done in instruction simplifier, which runs after other optimization passes
1326 // that may provide useful information.
1327
1328 size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke);
1329 if (expected_coordinates_count > 2u) {
1330 optimizations.SetDoNotIntrinsify();
1331 return;
1332 }
1333 if (expected_coordinates_count != 0u) {
1334 // Except for static fields (no coordinates), the first coordinate must be a reference.
1335 // Do not intrinsify if the reference is null as we would always go to slow path anyway.
1336 HInstruction* object = invoke->InputAt(1);
1337 if (object->GetType() != DataType::Type::kReference || object->IsNullConstant()) {
1338 optimizations.SetDoNotIntrinsify();
1339 return;
1340 }
1341 }
1342 if (expected_coordinates_count == 2u) {
1343 // For arrays and views, the second coordinate must be convertible to `int`.
1344 // In this context, `boolean` is not convertible but we have to look at the shorty
1345 // as compiler transformations can give the invoke a valid boolean input.
1346 DataType::Type index_type = GetDataTypeFromShorty(invoke, 2);
1347 if (index_type == DataType::Type::kBool ||
1348 DataType::Kind(index_type) != DataType::Type::kInt32) {
1349 optimizations.SetDoNotIntrinsify();
1350 return;
1351 }
1352 }
1353
1354 uint32_t number_of_arguments = invoke->GetNumberOfArguments();
1355 DataType::Type return_type = invoke->GetType();
1356 mirror::VarHandle::AccessModeTemplate access_mode_template =
1357 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
1358 switch (access_mode_template) {
1359 case mirror::VarHandle::AccessModeTemplate::kGet:
1360 // The return type should be the same as varType, so it shouldn't be void.
1361 if (return_type == DataType::Type::kVoid) {
1362 optimizations.SetDoNotIntrinsify();
1363 return;
1364 }
1365 break;
1366 case mirror::VarHandle::AccessModeTemplate::kSet:
1367 if (return_type != DataType::Type::kVoid) {
1368 optimizations.SetDoNotIntrinsify();
1369 return;
1370 }
1371 break;
1372 case mirror::VarHandle::AccessModeTemplate::kCompareAndSet: {
1373 if (return_type != DataType::Type::kBool) {
1374 optimizations.SetDoNotIntrinsify();
1375 return;
1376 }
1377 uint32_t expected_value_index = number_of_arguments - 2;
1378 uint32_t new_value_index = number_of_arguments - 1;
1379 DataType::Type expected_value_type = GetDataTypeFromShorty(invoke, expected_value_index);
1380 DataType::Type new_value_type = GetDataTypeFromShorty(invoke, new_value_index);
1381 if (expected_value_type != new_value_type) {
1382 optimizations.SetDoNotIntrinsify();
1383 return;
1384 }
1385 break;
1386 }
1387 case mirror::VarHandle::AccessModeTemplate::kCompareAndExchange: {
1388 uint32_t expected_value_index = number_of_arguments - 2;
1389 uint32_t new_value_index = number_of_arguments - 1;
1390 DataType::Type expected_value_type = GetDataTypeFromShorty(invoke, expected_value_index);
1391 DataType::Type new_value_type = GetDataTypeFromShorty(invoke, new_value_index);
1392 if (expected_value_type != new_value_type || return_type != expected_value_type) {
1393 optimizations.SetDoNotIntrinsify();
1394 return;
1395 }
1396 break;
1397 }
1398 case mirror::VarHandle::AccessModeTemplate::kGetAndUpdate: {
1399 DataType::Type value_type = GetDataTypeFromShorty(invoke, number_of_arguments - 1);
1400 if (IsVarHandleGetAndAdd(invoke) &&
1401 (value_type == DataType::Type::kReference || value_type == DataType::Type::kBool)) {
1402 // We should only add numerical types.
1403 //
1404 // For byte array views floating-point types are not allowed, see javadoc comments for
1405 // java.lang.invoke.MethodHandles.byteArrayViewVarHandle(). But ART treats them as numeric
1406 // types in ByteArrayViewVarHandle::Access(). Consequently we do generate intrinsic code,
1407 // but it always fails access mode check at runtime.
1408 optimizations.SetDoNotIntrinsify();
1409 return;
1410 } else if (IsVarHandleGetAndBitwiseOp(invoke) && !DataType::IsIntegralType(value_type)) {
1411 // We can only apply operators to bitwise integral types.
1412 // Note that bitwise VarHandle operations accept a non-integral boolean type and
1413 // perform the appropriate logical operation. However, the result is the same as
1414 // using the bitwise operation on our boolean representation and this fits well
1415 // with DataType::IsIntegralType() treating the compiler type kBool as integral.
1416 optimizations.SetDoNotIntrinsify();
1417 return;
1418 }
1419 if (value_type != return_type && return_type != DataType::Type::kVoid) {
1420 optimizations.SetDoNotIntrinsify();
1421 return;
1422 }
1423 break;
1424 }
1425 }
1426 }
1427
BuildInvokePolymorphic(uint32_t dex_pc,uint32_t method_idx,dex::ProtoIndex proto_idx,const InstructionOperands & operands)1428 bool HInstructionBuilder::BuildInvokePolymorphic(uint32_t dex_pc,
1429 uint32_t method_idx,
1430 dex::ProtoIndex proto_idx,
1431 const InstructionOperands& operands) {
1432 const char* shorty = dex_file_->GetShorty(proto_idx);
1433 DCHECK_EQ(1 + ArtMethod::NumArgRegisters(shorty), operands.GetNumberOfOperands());
1434 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1435 size_t number_of_arguments = strlen(shorty);
1436 // We use ResolveMethod which is also used in BuildInvoke in order to
1437 // not duplicate code. As such, we need to provide is_string_constructor
1438 // even if we don't need it afterwards.
1439 InvokeType invoke_type = InvokeType::kPolymorphic;
1440 bool is_string_constructor = false;
1441 uint16_t imt_or_vtable_index = DexFile::kDexNoIndex16;
1442 MethodReference resolved_method_reference(nullptr, 0u);
1443 ArtMethod* resolved_method = ResolveMethod(method_idx,
1444 graph_->GetArtMethod(),
1445 *dex_compilation_unit_,
1446 &invoke_type,
1447 &resolved_method_reference,
1448 &imt_or_vtable_index,
1449 &is_string_constructor);
1450
1451 MethodReference method_reference(&graph_->GetDexFile(), method_idx);
1452
1453 // MethodHandle.invokeExact intrinsic needs to check whether call-site matches with MethodHandle's
1454 // type. To do that, MethodType corresponding to the call-site is passed as an extra input.
1455 // Other invoke-polymorphic calls do not need it.
1456 bool can_be_intrinsified =
1457 static_cast<Intrinsics>(resolved_method->GetIntrinsic()) ==
1458 Intrinsics::kMethodHandleInvokeExact;
1459
1460 uint32_t number_of_other_inputs = can_be_intrinsified ? 1u : 0u;
1461
1462 HInvoke* invoke = new (allocator_) HInvokePolymorphic(allocator_,
1463 number_of_arguments,
1464 operands.GetNumberOfOperands(),
1465 number_of_other_inputs,
1466 return_type,
1467 dex_pc,
1468 method_reference,
1469 resolved_method,
1470 resolved_method_reference,
1471 proto_idx);
1472 if (!HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false)) {
1473 return false;
1474 }
1475
1476 DCHECK_EQ(invoke->AsInvokePolymorphic()->IsMethodHandleInvokeExact(), can_be_intrinsified);
1477
1478 if (invoke->GetIntrinsic() != Intrinsics::kNone &&
1479 invoke->GetIntrinsic() != Intrinsics::kMethodHandleInvoke &&
1480 invoke->GetIntrinsic() != Intrinsics::kMethodHandleInvokeExact &&
1481 VarHandleAccessorNeedsReturnTypeCheck(invoke, return_type)) {
1482 // Type check is needed because VarHandle intrinsics do not type check the retrieved reference.
1483 ScopedObjectAccess soa(Thread::Current());
1484 ArtMethod* referrer = graph_->GetArtMethod();
1485 dex::TypeIndex return_type_index =
1486 referrer->GetDexFile()->GetProtoId(proto_idx).return_type_idx_;
1487
1488 BuildTypeCheck(/* is_instance_of= */ false, invoke, return_type_index, dex_pc);
1489 latest_result_ = current_block_->GetLastInstruction();
1490 }
1491
1492 DecideVarHandleIntrinsic(invoke);
1493
1494 return true;
1495 }
1496
1497
BuildInvokeCustom(uint32_t dex_pc,uint32_t call_site_idx,const InstructionOperands & operands)1498 bool HInstructionBuilder::BuildInvokeCustom(uint32_t dex_pc,
1499 uint32_t call_site_idx,
1500 const InstructionOperands& operands) {
1501 dex::ProtoIndex proto_idx = dex_file_->GetProtoIndexForCallSite(call_site_idx);
1502 const char* shorty = dex_file_->GetShorty(proto_idx);
1503 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1504 size_t number_of_arguments = strlen(shorty) - 1;
1505 // HInvokeCustom takes a DexNoNoIndex method reference.
1506 MethodReference method_reference(&graph_->GetDexFile(), dex::kDexNoIndex);
1507 HInvoke* invoke = new (allocator_) HInvokeCustom(allocator_,
1508 number_of_arguments,
1509 operands.GetNumberOfOperands(),
1510 call_site_idx,
1511 return_type,
1512 dex_pc,
1513 method_reference,
1514 !graph_->IsDebuggable());
1515 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
1516 }
1517
BuildNewInstance(dex::TypeIndex type_index,uint32_t dex_pc)1518 HNewInstance* HInstructionBuilder::BuildNewInstance(dex::TypeIndex type_index, uint32_t dex_pc) {
1519 ScopedObjectAccess soa(Thread::Current());
1520
1521 HLoadClass* load_class = BuildLoadClass(type_index, dex_pc);
1522
1523 HInstruction* cls = load_class;
1524 Handle<mirror::Class> klass = load_class->GetClass();
1525
1526 if (!IsInitialized(klass.Get())) {
1527 cls = new (allocator_) HClinitCheck(load_class, dex_pc);
1528 AppendInstruction(cls);
1529 }
1530
1531 // Only the access check entrypoint handles the finalizable class case. If we
1532 // need access checks, then we haven't resolved the method and the class may
1533 // again be finalizable.
1534 QuickEntrypointEnum entrypoint = kQuickAllocObjectInitialized;
1535 if (load_class->NeedsAccessCheck() ||
1536 klass == nullptr || // Finalizable/instantiable is unknown.
1537 klass->IsFinalizable() ||
1538 klass.Get() == klass->GetClass() || // Classes cannot be allocated in code
1539 !klass->IsInstantiable()) {
1540 entrypoint = kQuickAllocObjectWithChecks;
1541 }
1542 // We will always be able to resolve the string class since it is in the BCP.
1543 if (!klass.IsNull() && klass->IsStringClass()) {
1544 entrypoint = kQuickAllocStringObject;
1545 }
1546
1547 // Consider classes we haven't resolved as potentially finalizable.
1548 bool finalizable = (klass == nullptr) || klass->IsFinalizable();
1549
1550 HNewInstance* new_instance = new (allocator_) HNewInstance(
1551 cls,
1552 dex_pc,
1553 type_index,
1554 *dex_compilation_unit_->GetDexFile(),
1555 finalizable,
1556 entrypoint);
1557 AppendInstruction(new_instance);
1558
1559 return new_instance;
1560 }
1561
BuildConstructorFenceForAllocation(HInstruction * allocation)1562 void HInstructionBuilder::BuildConstructorFenceForAllocation(HInstruction* allocation) {
1563 DCHECK(allocation != nullptr &&
1564 (allocation->IsNewInstance() ||
1565 allocation->IsNewArray())); // corresponding to "new" keyword in JLS.
1566
1567 if (allocation->IsNewInstance()) {
1568 // STRING SPECIAL HANDLING:
1569 // -------------------------------
1570 // Strings have a real HNewInstance node but they end up always having 0 uses.
1571 // All uses of a String HNewInstance are always transformed to replace their input
1572 // of the HNewInstance with an input of the invoke to StringFactory.
1573 //
1574 // Do not emit an HConstructorFence here since it can inhibit some String new-instance
1575 // optimizations (to pass checker tests that rely on those optimizations).
1576 HNewInstance* new_inst = allocation->AsNewInstance();
1577 HLoadClass* load_class = new_inst->GetLoadClass();
1578
1579 Thread* self = Thread::Current();
1580 ScopedObjectAccess soa(self);
1581 StackHandleScope<1> hs(self);
1582 Handle<mirror::Class> klass = load_class->GetClass();
1583 if (klass != nullptr && klass->IsStringClass()) {
1584 return;
1585 // Note: Do not use allocation->IsStringAlloc which requires
1586 // a valid ReferenceTypeInfo, but that doesn't get made until after reference type
1587 // propagation (and instruction builder is too early).
1588 }
1589 // (In terms of correctness, the StringFactory needs to provide its own
1590 // default initialization barrier, see below.)
1591 }
1592
1593 // JLS 17.4.5 "Happens-before Order" describes:
1594 //
1595 // The default initialization of any object happens-before any other actions (other than
1596 // default-writes) of a program.
1597 //
1598 // In our implementation the default initialization of an object to type T means
1599 // setting all of its initial data (object[0..size)) to 0, and setting the
1600 // object's class header (i.e. object.getClass() == T.class).
1601 //
1602 // In practice this fence ensures that the writes to the object header
1603 // are visible to other threads if this object escapes the current thread.
1604 // (and in theory the 0-initializing, but that happens automatically
1605 // when new memory pages are mapped in by the OS).
1606 HConstructorFence* ctor_fence =
1607 new (allocator_) HConstructorFence(allocation, allocation->GetDexPc(), allocator_);
1608 AppendInstruction(ctor_fence);
1609 MaybeRecordStat(
1610 compilation_stats_,
1611 MethodCompilationStat::kConstructorFenceGeneratedNew);
1612 }
1613
IsInImage(ObjPtr<mirror::Class> cls,const CompilerOptions & compiler_options)1614 static bool IsInImage(ObjPtr<mirror::Class> cls, const CompilerOptions& compiler_options)
1615 REQUIRES_SHARED(Locks::mutator_lock_) {
1616 if (Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(cls)) {
1617 return true;
1618 }
1619 if (compiler_options.IsGeneratingImage()) {
1620 std::string temp;
1621 const char* descriptor = cls->GetDescriptor(&temp);
1622 return compiler_options.IsImageClass(descriptor);
1623 } else {
1624 return false;
1625 }
1626 }
1627
IsSubClass(ObjPtr<mirror::Class> to_test,ObjPtr<mirror::Class> super_class)1628 static bool IsSubClass(ObjPtr<mirror::Class> to_test, ObjPtr<mirror::Class> super_class)
1629 REQUIRES_SHARED(Locks::mutator_lock_) {
1630 return to_test != nullptr && !to_test->IsInterface() && to_test->IsSubClass(super_class);
1631 }
1632
HasTrivialClinit(ObjPtr<mirror::Class> klass,PointerSize pointer_size)1633 static bool HasTrivialClinit(ObjPtr<mirror::Class> klass, PointerSize pointer_size)
1634 REQUIRES_SHARED(Locks::mutator_lock_) {
1635 // Check if the class has encoded fields that trigger bytecode execution.
1636 // (Encoded fields are just a different representation of <clinit>.)
1637 if (klass->HasStaticFields()) {
1638 DCHECK(klass->GetClassDef() != nullptr);
1639 EncodedStaticFieldValueIterator it(klass->GetDexFile(), *klass->GetClassDef());
1640 for (; it.HasNext(); it.Next()) {
1641 switch (it.GetValueType()) {
1642 case EncodedArrayValueIterator::ValueType::kBoolean:
1643 case EncodedArrayValueIterator::ValueType::kByte:
1644 case EncodedArrayValueIterator::ValueType::kShort:
1645 case EncodedArrayValueIterator::ValueType::kChar:
1646 case EncodedArrayValueIterator::ValueType::kInt:
1647 case EncodedArrayValueIterator::ValueType::kLong:
1648 case EncodedArrayValueIterator::ValueType::kFloat:
1649 case EncodedArrayValueIterator::ValueType::kDouble:
1650 case EncodedArrayValueIterator::ValueType::kNull:
1651 case EncodedArrayValueIterator::ValueType::kString:
1652 // Primitive, null or j.l.String initialization is permitted.
1653 break;
1654 case EncodedArrayValueIterator::ValueType::kType:
1655 // Type initialization can load classes and execute bytecode through a class loader
1656 // which can execute arbitrary bytecode. We do not optimize for known class loaders;
1657 // kType is rarely used (if ever).
1658 return false;
1659 default:
1660 // Other types in the encoded static field list are rejected by the DexFileVerifier.
1661 LOG(FATAL) << "Unexpected type " << it.GetValueType();
1662 UNREACHABLE();
1663 }
1664 }
1665 }
1666 // Check if the class has <clinit> that executes arbitrary code.
1667 // Initialization of static fields of the class itself with constants is allowed.
1668 ArtMethod* clinit = klass->FindClassInitializer(pointer_size);
1669 if (clinit != nullptr) {
1670 const DexFile& dex_file = *clinit->GetDexFile();
1671 CodeItemInstructionAccessor accessor(dex_file, clinit->GetCodeItem());
1672 for (DexInstructionPcPair it : accessor) {
1673 switch (it->Opcode()) {
1674 case Instruction::CONST_4:
1675 case Instruction::CONST_16:
1676 case Instruction::CONST:
1677 case Instruction::CONST_HIGH16:
1678 case Instruction::CONST_WIDE_16:
1679 case Instruction::CONST_WIDE_32:
1680 case Instruction::CONST_WIDE:
1681 case Instruction::CONST_WIDE_HIGH16:
1682 case Instruction::CONST_STRING:
1683 case Instruction::CONST_STRING_JUMBO:
1684 // Primitive, null or j.l.String initialization is permitted.
1685 break;
1686 case Instruction::RETURN_VOID:
1687 break;
1688 case Instruction::SPUT:
1689 case Instruction::SPUT_WIDE:
1690 case Instruction::SPUT_OBJECT:
1691 case Instruction::SPUT_BOOLEAN:
1692 case Instruction::SPUT_BYTE:
1693 case Instruction::SPUT_CHAR:
1694 case Instruction::SPUT_SHORT:
1695 // Only initialization of a static field of the same class is permitted.
1696 if (dex_file.GetFieldId(it->VRegB_21c()).class_idx_ != klass->GetDexTypeIndex()) {
1697 return false;
1698 }
1699 break;
1700 case Instruction::NEW_ARRAY:
1701 // Only primitive arrays are permitted.
1702 if (Primitive::GetType(dex_file.GetTypeDescriptor(dex_file.GetTypeId(
1703 dex::TypeIndex(it->VRegC_22c())))[1]) == Primitive::kPrimNot) {
1704 return false;
1705 }
1706 break;
1707 case Instruction::APUT:
1708 case Instruction::APUT_WIDE:
1709 case Instruction::APUT_BOOLEAN:
1710 case Instruction::APUT_BYTE:
1711 case Instruction::APUT_CHAR:
1712 case Instruction::APUT_SHORT:
1713 case Instruction::FILL_ARRAY_DATA:
1714 case Instruction::NOP:
1715 // Allow initialization of primitive arrays (only constants can be stored).
1716 // Note: We expect NOPs used for fill-array-data-payload but accept all NOPs
1717 // (even unreferenced switch payloads if they make it through the verifier).
1718 break;
1719 default:
1720 return false;
1721 }
1722 }
1723 }
1724 return true;
1725 }
1726
HasTrivialInitialization(ObjPtr<mirror::Class> cls,const CompilerOptions & compiler_options)1727 static bool HasTrivialInitialization(ObjPtr<mirror::Class> cls,
1728 const CompilerOptions& compiler_options)
1729 REQUIRES_SHARED(Locks::mutator_lock_) {
1730 Runtime* runtime = Runtime::Current();
1731 PointerSize pointer_size = runtime->GetClassLinker()->GetImagePointerSize();
1732
1733 // Check the superclass chain.
1734 for (ObjPtr<mirror::Class> klass = cls; klass != nullptr; klass = klass->GetSuperClass()) {
1735 if (klass->IsInitialized() && IsInImage(klass, compiler_options)) {
1736 break; // `klass` and its superclasses are already initialized in the boot or app image.
1737 }
1738 if (!HasTrivialClinit(klass, pointer_size)) {
1739 return false;
1740 }
1741 }
1742
1743 // Also check interfaces with default methods as they need to be initialized as well.
1744 ObjPtr<mirror::IfTable> iftable = cls->GetIfTable();
1745 DCHECK(iftable != nullptr);
1746 for (int32_t i = 0, count = iftable->Count(); i != count; ++i) {
1747 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
1748 if (!iface->HasDefaultMethods()) {
1749 continue; // Initializing `cls` does not initialize this interface.
1750 }
1751 if (iface->IsInitialized() && IsInImage(iface, compiler_options)) {
1752 continue; // This interface is already initialized in the boot or app image.
1753 }
1754 if (!HasTrivialClinit(iface, pointer_size)) {
1755 return false;
1756 }
1757 }
1758 return true;
1759 }
1760
IsInitialized(ObjPtr<mirror::Class> cls) const1761 bool HInstructionBuilder::IsInitialized(ObjPtr<mirror::Class> cls) const {
1762 if (cls == nullptr) {
1763 return false;
1764 }
1765
1766 // Check if the class will be initialized at runtime.
1767 if (cls->IsInitialized()) {
1768 const CompilerOptions& compiler_options = code_generator_->GetCompilerOptions();
1769 if (compiler_options.IsAotCompiler()) {
1770 // Assume loaded only if klass is in the boot or app image.
1771 if (IsInImage(cls, compiler_options)) {
1772 return true;
1773 }
1774 } else {
1775 DCHECK(compiler_options.IsJitCompiler());
1776 if (Runtime::Current()->GetJit()->CanAssumeInitialized(
1777 cls,
1778 compiler_options.IsJitCompilerForSharedCode())) {
1779 // For JIT, the class cannot revert to an uninitialized state.
1780 return true;
1781 }
1782 }
1783 }
1784
1785 // We can avoid the class initialization check for `cls` in static methods and constructors
1786 // in the very same class; invoking a static method involves a class initialization check
1787 // and so does the instance allocation that must be executed before invoking a constructor.
1788 // Other instance methods of the same class can run on an escaped instance
1789 // of an erroneous class. Even a superclass may need to be checked as the subclass
1790 // can be completely initialized while the superclass is initializing and the subclass
1791 // remains initialized when the superclass initializer throws afterwards. b/62478025
1792 // Note: The HClinitCheck+HInvokeStaticOrDirect merging can still apply.
1793 auto is_static_method_or_constructor_of_cls = [cls](const DexCompilationUnit& compilation_unit)
1794 REQUIRES_SHARED(Locks::mutator_lock_) {
1795 return (compilation_unit.GetAccessFlags() & (kAccStatic | kAccConstructor)) != 0u &&
1796 compilation_unit.GetCompilingClass().Get() == cls;
1797 };
1798 if (is_static_method_or_constructor_of_cls(*outer_compilation_unit_) ||
1799 // Check also the innermost method. Though excessive copies of ClinitCheck can be
1800 // eliminated by GVN, that happens only after the decision whether to inline the
1801 // graph or not and that may depend on the presence of the ClinitCheck.
1802 // TODO: We should walk over the entire inlined method chain, but we don't pass that
1803 // information to the builder.
1804 is_static_method_or_constructor_of_cls(*dex_compilation_unit_)) {
1805 return true;
1806 }
1807
1808 // Otherwise, we may be able to avoid the check if `cls` is a superclass of a method being
1809 // compiled here (anywhere in the inlining chain) as the `cls` must have started initializing
1810 // before calling any `cls` or subclass methods. Static methods require a clinit check and
1811 // instance methods require an instance which cannot be created before doing a clinit check.
1812 // When a subclass of `cls` starts initializing, it starts initializing its superclass
1813 // chain up to `cls` without running any bytecode, i.e. without any opportunity for circular
1814 // initialization weirdness.
1815 //
1816 // If the initialization of `cls` is trivial (`cls` and its superclasses and superinterfaces
1817 // with default methods initialize only their own static fields using constant values), it must
1818 // complete, either successfully or by throwing and marking `cls` erroneous, without allocating
1819 // any instances of `cls` or subclasses (or any other class) and without calling any methods.
1820 // If it completes by throwing, no instances of `cls` shall be created and no subclass method
1821 // bytecode shall execute (see above), therefore the instruction we're building shall be
1822 // unreachable. By reaching the instruction, we know that `cls` was initialized successfully.
1823 //
1824 // TODO: We should walk over the entire inlined methods chain, but we don't pass that
1825 // information to the builder. (We could also check if we're guaranteed a non-null instance
1826 // of `cls` at this location but that's outside the scope of the instruction builder.)
1827 bool is_subclass = IsSubClass(outer_compilation_unit_->GetCompilingClass().Get(), cls);
1828 if (dex_compilation_unit_ != outer_compilation_unit_) {
1829 is_subclass = is_subclass ||
1830 IsSubClass(dex_compilation_unit_->GetCompilingClass().Get(), cls);
1831 }
1832 if (is_subclass && HasTrivialInitialization(cls, code_generator_->GetCompilerOptions())) {
1833 return true;
1834 }
1835
1836 return false;
1837 }
1838
ProcessClinitCheckForInvoke(uint32_t dex_pc,ArtMethod * resolved_method,HInvokeStaticOrDirect::ClinitCheckRequirement * clinit_check_requirement)1839 HClinitCheck* HInstructionBuilder::ProcessClinitCheckForInvoke(
1840 uint32_t dex_pc,
1841 ArtMethod* resolved_method,
1842 HInvokeStaticOrDirect::ClinitCheckRequirement* clinit_check_requirement) {
1843 ScopedObjectAccess soa(Thread::Current());
1844 ObjPtr<mirror::Class> klass = resolved_method->GetDeclaringClass();
1845
1846 HClinitCheck* clinit_check = nullptr;
1847 if (IsInitialized(klass)) {
1848 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
1849 } else {
1850 Handle<mirror::Class> h_klass = graph_->GetHandleCache()->NewHandle(klass);
1851 HLoadClass* cls = BuildLoadClass(h_klass->GetDexTypeIndex(),
1852 h_klass->GetDexFile(),
1853 h_klass,
1854 dex_pc,
1855 /* needs_access_check= */ false);
1856 if (cls != nullptr) {
1857 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit;
1858 clinit_check = new (allocator_) HClinitCheck(cls, dex_pc);
1859 AppendInstruction(clinit_check);
1860 } else {
1861 // Let the invoke handle this with an implicit class initialization check.
1862 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit;
1863 }
1864 }
1865 return clinit_check;
1866 }
1867
SetupInvokeArguments(HInstruction * invoke,const InstructionOperands & operands,const char * shorty,ReceiverArg receiver_arg)1868 bool HInstructionBuilder::SetupInvokeArguments(HInstruction* invoke,
1869 const InstructionOperands& operands,
1870 const char* shorty,
1871 ReceiverArg receiver_arg) {
1872 // Note: The `invoke` can be an intrinsic replacement, so not necessaritly HInvoke.
1873 // In that case, do not log errors, they shall be reported when we try to build the HInvoke.
1874 uint32_t shorty_index = 1; // Skip the return type.
1875 const size_t number_of_operands = operands.GetNumberOfOperands();
1876 bool argument_length_error = false;
1877
1878 size_t start_index = 0u;
1879 size_t argument_index = 0u;
1880 if (receiver_arg != ReceiverArg::kNone) {
1881 if (number_of_operands == 0u) {
1882 argument_length_error = true;
1883 } else {
1884 start_index = 1u;
1885 if (receiver_arg != ReceiverArg::kIgnored) {
1886 uint32_t obj_reg = operands.GetOperand(0u);
1887 HInstruction* arg = (receiver_arg == ReceiverArg::kPlainArg)
1888 ? LoadLocal(obj_reg, DataType::Type::kReference)
1889 : LoadNullCheckedLocal(obj_reg, invoke->GetDexPc());
1890 if (receiver_arg != ReceiverArg::kNullCheckedOnly) {
1891 invoke->SetRawInputAt(0u, arg);
1892 argument_index = 1u;
1893 }
1894 }
1895 }
1896 }
1897
1898 for (size_t i = start_index; i < number_of_operands; ++i, ++argument_index) {
1899 // Make sure we don't go over the expected arguments or over the number of
1900 // dex registers given. If the instruction was seen as dead by the verifier,
1901 // it hasn't been properly checked.
1902 if (UNLIKELY(shorty[shorty_index] == 0)) {
1903 argument_length_error = true;
1904 break;
1905 }
1906 DataType::Type type = DataType::FromShorty(shorty[shorty_index++]);
1907 bool is_wide = (type == DataType::Type::kInt64) || (type == DataType::Type::kFloat64);
1908 if (is_wide && ((i + 1 == number_of_operands) ||
1909 (operands.GetOperand(i) + 1 != operands.GetOperand(i + 1)))) {
1910 if (invoke->IsInvoke()) {
1911 // Longs and doubles should be in pairs, that is, sequential registers. The verifier should
1912 // reject any class where this is violated. However, the verifier only does these checks
1913 // on non trivially dead instructions, so we just bailout the compilation.
1914 VLOG(compiler) << "Did not compile "
1915 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
1916 << " because of non-sequential dex register pair in wide argument";
1917 MaybeRecordStat(compilation_stats_,
1918 MethodCompilationStat::kNotCompiledMalformedOpcode);
1919 }
1920 return false;
1921 }
1922 HInstruction* arg = LoadLocal(operands.GetOperand(i), type);
1923 DCHECK(invoke->InputAt(argument_index) == nullptr);
1924 invoke->SetRawInputAt(argument_index, arg);
1925 if (is_wide) {
1926 ++i;
1927 }
1928 }
1929
1930 argument_length_error = argument_length_error || shorty[shorty_index] != 0;
1931 if (argument_length_error) {
1932 if (invoke->IsInvoke()) {
1933 VLOG(compiler) << "Did not compile "
1934 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
1935 << " because of wrong number of arguments in invoke instruction";
1936 MaybeRecordStat(compilation_stats_,
1937 MethodCompilationStat::kNotCompiledMalformedOpcode);
1938 }
1939 return false;
1940 }
1941
1942 if (invoke->IsInvokeStaticOrDirect() &&
1943 HInvokeStaticOrDirect::NeedsCurrentMethodInput(
1944 invoke->AsInvokeStaticOrDirect()->GetDispatchInfo())) {
1945 DCHECK_EQ(argument_index, invoke->AsInvokeStaticOrDirect()->GetCurrentMethodIndex());
1946 DCHECK(invoke->InputAt(argument_index) == nullptr);
1947 invoke->SetRawInputAt(argument_index, graph_->GetCurrentMethod());
1948 }
1949
1950 if (invoke->IsInvokeInterface() &&
1951 (invoke->AsInvokeInterface()->GetHiddenArgumentLoadKind() == MethodLoadKind::kRecursive)) {
1952 invoke->SetRawInputAt(invoke->AsInvokeInterface()->GetNumberOfArguments() - 1,
1953 graph_->GetCurrentMethod());
1954 }
1955
1956 if (invoke->IsInvokePolymorphic()) {
1957 HInvokePolymorphic* invoke_polymorphic = invoke->AsInvokePolymorphic();
1958
1959 // MethodHandle.invokeExact intrinsic expects MethodType corresponding to the call-site as an
1960 // extra input to determine whether to throw WrongMethodTypeException or execute target method.
1961 if (invoke_polymorphic->IsMethodHandleInvokeExact()) {
1962 HLoadMethodType* load_method_type =
1963 new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(),
1964 invoke_polymorphic->GetProtoIndex(),
1965 graph_->GetDexFile(),
1966 invoke_polymorphic->GetDexPc());
1967 HSharpening::ProcessLoadMethodType(load_method_type,
1968 code_generator_,
1969 *dex_compilation_unit_,
1970 graph_->GetHandleCache()->GetHandles());
1971 invoke->SetRawInputAt(invoke_polymorphic->GetNumberOfArguments(), load_method_type);
1972 AppendInstruction(load_method_type);
1973 }
1974 }
1975
1976 return true;
1977 }
1978
HandleInvoke(HInvoke * invoke,const InstructionOperands & operands,const char * shorty,bool is_unresolved)1979 bool HInstructionBuilder::HandleInvoke(HInvoke* invoke,
1980 const InstructionOperands& operands,
1981 const char* shorty,
1982 bool is_unresolved) {
1983 DCHECK_IMPLIES(invoke->IsInvokeStaticOrDirect(),
1984 !invoke->AsInvokeStaticOrDirect()->IsStringInit());
1985
1986 ReceiverArg receiver_arg = (invoke->GetInvokeType() == InvokeType::kStatic)
1987 ? ReceiverArg::kNone
1988 : (is_unresolved ? ReceiverArg::kPlainArg : ReceiverArg::kNullCheckedArg);
1989 if (!SetupInvokeArguments(invoke, operands, shorty, receiver_arg)) {
1990 return false;
1991 }
1992
1993 AppendInstruction(invoke);
1994 latest_result_ = invoke;
1995
1996 return true;
1997 }
1998
BuildSimpleIntrinsic(ArtMethod * method,uint32_t dex_pc,const InstructionOperands & operands,const char * shorty)1999 bool HInstructionBuilder::BuildSimpleIntrinsic(ArtMethod* method,
2000 uint32_t dex_pc,
2001 const InstructionOperands& operands,
2002 const char* shorty) {
2003 Intrinsics intrinsic = method->GetIntrinsic();
2004 DCHECK_NE(intrinsic, Intrinsics::kNone);
2005 constexpr DataType::Type kInt32 = DataType::Type::kInt32;
2006 constexpr DataType::Type kInt64 = DataType::Type::kInt64;
2007 constexpr DataType::Type kFloat32 = DataType::Type::kFloat32;
2008 constexpr DataType::Type kFloat64 = DataType::Type::kFloat64;
2009 ReceiverArg receiver_arg = method->IsStatic() ? ReceiverArg::kNone : ReceiverArg::kNullCheckedArg;
2010 HInstruction* instruction = nullptr;
2011 switch (intrinsic) {
2012 case Intrinsics::kIntegerRotateLeft:
2013 instruction = new (allocator_) HRol(kInt32, /*value=*/ nullptr, /*distance=*/ nullptr);
2014 break;
2015 case Intrinsics::kIntegerRotateRight:
2016 instruction = new (allocator_) HRor(kInt32, /*value=*/ nullptr, /*distance=*/ nullptr);
2017 break;
2018 case Intrinsics::kLongRotateLeft:
2019 instruction = new (allocator_) HRol(kInt64, /*value=*/ nullptr, /*distance=*/ nullptr);
2020 break;
2021 case Intrinsics::kLongRotateRight:
2022 instruction = new (allocator_) HRor(kInt64, /*value=*/ nullptr, /*distance=*/ nullptr);
2023 break;
2024 case Intrinsics::kIntegerCompare:
2025 instruction = new (allocator_) HCompare(
2026 kInt32, /*first=*/ nullptr, /*second=*/ nullptr, ComparisonBias::kNoBias, dex_pc);
2027 break;
2028 case Intrinsics::kLongCompare:
2029 instruction = new (allocator_) HCompare(
2030 kInt64, /*first=*/ nullptr, /*second=*/ nullptr, ComparisonBias::kNoBias, dex_pc);
2031 break;
2032 case Intrinsics::kIntegerSignum:
2033 instruction = new (allocator_) HCompare(
2034 kInt32, /*first=*/ nullptr, graph_->GetIntConstant(0), ComparisonBias::kNoBias, dex_pc);
2035 break;
2036 case Intrinsics::kLongSignum:
2037 instruction = new (allocator_) HCompare(
2038 kInt64, /*first=*/ nullptr, graph_->GetLongConstant(0), ComparisonBias::kNoBias, dex_pc);
2039 break;
2040 case Intrinsics::kFloatIsNaN:
2041 case Intrinsics::kDoubleIsNaN: {
2042 // IsNaN(x) is the same as x != x.
2043 instruction = new (allocator_) HNotEqual(/*first=*/ nullptr, /*second=*/ nullptr, dex_pc);
2044 instruction->AsCondition()->SetBias(ComparisonBias::kLtBias);
2045 break;
2046 }
2047 case Intrinsics::kStringCharAt:
2048 // We treat String as an array to allow DCE and BCE to seamlessly work on strings.
2049 instruction = new (allocator_) HArrayGet(/*array=*/ nullptr,
2050 /*index=*/ nullptr,
2051 DataType::Type::kUint16,
2052 SideEffects::None(), // Strings are immutable.
2053 dex_pc,
2054 /*is_string_char_at=*/ true);
2055 break;
2056 case Intrinsics::kStringIsEmpty:
2057 case Intrinsics::kStringLength:
2058 // We treat String as an array to allow DCE and BCE to seamlessly work on strings.
2059 // For String.isEmpty(), we add a comparison with 0 below.
2060 instruction =
2061 new (allocator_) HArrayLength(/*array=*/ nullptr, dex_pc, /* is_string_length= */ true);
2062 break;
2063 case Intrinsics::kUnsafeLoadFence:
2064 case Intrinsics::kJdkUnsafeLoadFence:
2065 receiver_arg = ReceiverArg::kNullCheckedOnly;
2066 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
2067 break;
2068 case Intrinsics::kUnsafeStoreFence:
2069 case Intrinsics::kJdkUnsafeStoreFence:
2070 receiver_arg = ReceiverArg::kNullCheckedOnly;
2071 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyStore, dex_pc);
2072 break;
2073 case Intrinsics::kUnsafeFullFence:
2074 case Intrinsics::kJdkUnsafeFullFence:
2075 receiver_arg = ReceiverArg::kNullCheckedOnly;
2076 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyAny, dex_pc);
2077 break;
2078 case Intrinsics::kVarHandleFullFence:
2079 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyAny, dex_pc);
2080 break;
2081 case Intrinsics::kVarHandleAcquireFence:
2082 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
2083 break;
2084 case Intrinsics::kVarHandleReleaseFence:
2085 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyStore, dex_pc);
2086 break;
2087 case Intrinsics::kVarHandleLoadLoadFence:
2088 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
2089 break;
2090 case Intrinsics::kVarHandleStoreStoreFence:
2091 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kStoreStore, dex_pc);
2092 break;
2093 case Intrinsics::kMathMinIntInt:
2094 instruction = new (allocator_) HMin(kInt32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2095 break;
2096 case Intrinsics::kMathMinLongLong:
2097 instruction = new (allocator_) HMin(kInt64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2098 break;
2099 case Intrinsics::kMathMinFloatFloat:
2100 instruction = new (allocator_) HMin(kFloat32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2101 break;
2102 case Intrinsics::kMathMinDoubleDouble:
2103 instruction = new (allocator_) HMin(kFloat64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2104 break;
2105 case Intrinsics::kMathMaxIntInt:
2106 instruction = new (allocator_) HMax(kInt32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2107 break;
2108 case Intrinsics::kMathMaxLongLong:
2109 instruction = new (allocator_) HMax(kInt64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2110 break;
2111 case Intrinsics::kMathMaxFloatFloat:
2112 instruction = new (allocator_) HMax(kFloat32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2113 break;
2114 case Intrinsics::kMathMaxDoubleDouble:
2115 instruction = new (allocator_) HMax(kFloat64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2116 break;
2117 case Intrinsics::kMathAbsInt:
2118 instruction = new (allocator_) HAbs(kInt32, /*input=*/ nullptr, dex_pc);
2119 break;
2120 case Intrinsics::kMathAbsLong:
2121 instruction = new (allocator_) HAbs(kInt64, /*input=*/ nullptr, dex_pc);
2122 break;
2123 case Intrinsics::kMathAbsFloat:
2124 instruction = new (allocator_) HAbs(kFloat32, /*input=*/ nullptr, dex_pc);
2125 break;
2126 case Intrinsics::kMathAbsDouble:
2127 instruction = new (allocator_) HAbs(kFloat64, /*input=*/ nullptr, dex_pc);
2128 break;
2129 default:
2130 // We do not have intermediate representation for other intrinsics.
2131 DCHECK(!IsIntrinsicWithSpecializedHir(intrinsic));
2132 return false;
2133 }
2134 DCHECK(instruction != nullptr);
2135 if (!SetupInvokeArguments(instruction, operands, shorty, receiver_arg)) {
2136 return false;
2137 }
2138
2139 switch (intrinsic) {
2140 case Intrinsics::kFloatIsNaN:
2141 case Intrinsics::kDoubleIsNaN:
2142 // Set the second input to be the same as first.
2143 DCHECK(instruction->IsNotEqual());
2144 DCHECK(instruction->InputAt(1u) == nullptr);
2145 instruction->SetRawInputAt(1u, instruction->InputAt(0u));
2146 break;
2147 case Intrinsics::kStringCharAt: {
2148 // Add bounds check.
2149 HInstruction* array = instruction->InputAt(0u);
2150 HInstruction* index = instruction->InputAt(1u);
2151 HInstruction* length =
2152 new (allocator_) HArrayLength(array, dex_pc, /*is_string_length=*/ true);
2153 AppendInstruction(length);
2154 HBoundsCheck* bounds_check =
2155 new (allocator_) HBoundsCheck(index, length, dex_pc, /*is_string_char_at=*/ true);
2156 AppendInstruction(bounds_check);
2157 graph_->SetHasBoundsChecks(true);
2158 instruction->SetRawInputAt(1u, bounds_check);
2159 break;
2160 }
2161 case Intrinsics::kStringIsEmpty: {
2162 // Compare the length with 0.
2163 DCHECK(instruction->IsArrayLength());
2164 AppendInstruction(instruction);
2165 HEqual* equal = new (allocator_) HEqual(instruction, graph_->GetIntConstant(0), dex_pc);
2166 instruction = equal;
2167 break;
2168 }
2169 default:
2170 break;
2171 }
2172
2173 AppendInstruction(instruction);
2174 latest_result_ = instruction;
2175
2176 return true;
2177 }
2178
HandleStringInit(HInvoke * invoke,const InstructionOperands & operands,const char * shorty)2179 bool HInstructionBuilder::HandleStringInit(HInvoke* invoke,
2180 const InstructionOperands& operands,
2181 const char* shorty) {
2182 DCHECK(invoke->IsInvokeStaticOrDirect());
2183 DCHECK(invoke->AsInvokeStaticOrDirect()->IsStringInit());
2184
2185 if (!SetupInvokeArguments(invoke, operands, shorty, ReceiverArg::kIgnored)) {
2186 return false;
2187 }
2188
2189 AppendInstruction(invoke);
2190
2191 // This is a StringFactory call, not an actual String constructor. Its result
2192 // replaces the empty String pre-allocated by NewInstance.
2193 uint32_t orig_this_reg = operands.GetOperand(0);
2194 HInstruction* arg_this = LoadLocal(orig_this_reg, DataType::Type::kReference);
2195
2196 // Replacing the NewInstance might render it redundant. Keep a list of these
2197 // to be visited once it is clear whether it has remaining uses.
2198 if (arg_this->IsNewInstance()) {
2199 ssa_builder_->AddUninitializedString(arg_this->AsNewInstance());
2200 } else {
2201 DCHECK(arg_this->IsPhi());
2202 // We can get a phi as input of a String.<init> if there is a loop between the
2203 // allocation and the String.<init> call. As we don't know which other phis might alias
2204 // with `arg_this`, we keep a record of those invocations so we can later replace
2205 // the allocation with the invocation.
2206 // Add the actual 'this' input so the analysis knows what is the allocation instruction.
2207 // The input will be removed during the analysis.
2208 invoke->AddInput(arg_this);
2209 ssa_builder_->AddUninitializedStringPhi(invoke);
2210 }
2211 // Walk over all vregs and replace any occurrence of `arg_this` with `invoke`.
2212 for (size_t vreg = 0, e = current_locals_->size(); vreg < e; ++vreg) {
2213 if ((*current_locals_)[vreg] == arg_this) {
2214 (*current_locals_)[vreg] = invoke;
2215 }
2216 }
2217 return true;
2218 }
2219
GetFieldAccessType(const DexFile & dex_file,uint16_t field_index)2220 static DataType::Type GetFieldAccessType(const DexFile& dex_file, uint16_t field_index) {
2221 const dex::FieldId& field_id = dex_file.GetFieldId(field_index);
2222 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
2223 return DataType::FromShorty(type[0]);
2224 }
2225
BuildInstanceFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put)2226 bool HInstructionBuilder::BuildInstanceFieldAccess(const Instruction& instruction,
2227 uint32_t dex_pc,
2228 bool is_put) {
2229 uint32_t source_or_dest_reg = instruction.VRegA_22c();
2230 uint32_t obj_reg = instruction.VRegB_22c();
2231 uint16_t field_index = instruction.VRegC_22c();
2232
2233 ScopedObjectAccess soa(Thread::Current());
2234 ArtField* resolved_field = ResolveField(field_index, /* is_static= */ false, is_put);
2235
2236 // Generate an explicit null check on the reference, unless the field access
2237 // is unresolved. In that case, we rely on the runtime to perform various
2238 // checks first, followed by a null check.
2239 HInstruction* object = (resolved_field == nullptr)
2240 ? LoadLocal(obj_reg, DataType::Type::kReference)
2241 : LoadNullCheckedLocal(obj_reg, dex_pc);
2242
2243 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2244 if (is_put) {
2245 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2246 HInstruction* field_set = nullptr;
2247 if (resolved_field == nullptr) {
2248 MaybeRecordStat(compilation_stats_,
2249 MethodCompilationStat::kUnresolvedField);
2250 field_set = new (allocator_) HUnresolvedInstanceFieldSet(object,
2251 value,
2252 field_type,
2253 field_index,
2254 dex_pc);
2255 } else {
2256 uint16_t class_def_index = resolved_field->GetDeclaringClass()->GetDexClassDefIndex();
2257 field_set = new (allocator_) HInstanceFieldSet(object,
2258 value,
2259 resolved_field,
2260 field_type,
2261 resolved_field->GetOffset(),
2262 resolved_field->IsVolatile(),
2263 field_index,
2264 class_def_index,
2265 *dex_file_,
2266 dex_pc);
2267 }
2268 AppendInstruction(field_set);
2269 } else {
2270 HInstruction* field_get = nullptr;
2271 if (resolved_field == nullptr) {
2272 MaybeRecordStat(compilation_stats_,
2273 MethodCompilationStat::kUnresolvedField);
2274 field_get = new (allocator_) HUnresolvedInstanceFieldGet(object,
2275 field_type,
2276 field_index,
2277 dex_pc);
2278 } else {
2279 uint16_t class_def_index = resolved_field->GetDeclaringClass()->GetDexClassDefIndex();
2280 field_get = new (allocator_) HInstanceFieldGet(object,
2281 resolved_field,
2282 field_type,
2283 resolved_field->GetOffset(),
2284 resolved_field->IsVolatile(),
2285 field_index,
2286 class_def_index,
2287 *dex_file_,
2288 dex_pc);
2289 }
2290 AppendInstruction(field_get);
2291 UpdateLocal(source_or_dest_reg, field_get);
2292 }
2293
2294 return true;
2295 }
2296
BuildUnresolvedStaticFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put,DataType::Type field_type)2297 void HInstructionBuilder::BuildUnresolvedStaticFieldAccess(const Instruction& instruction,
2298 uint32_t dex_pc,
2299 bool is_put,
2300 DataType::Type field_type) {
2301 uint32_t source_or_dest_reg = instruction.VRegA_21c();
2302 uint16_t field_index = instruction.VRegB_21c();
2303
2304 if (is_put) {
2305 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2306 AppendInstruction(
2307 new (allocator_) HUnresolvedStaticFieldSet(value, field_type, field_index, dex_pc));
2308 } else {
2309 AppendInstruction(new (allocator_) HUnresolvedStaticFieldGet(field_type, field_index, dex_pc));
2310 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2311 }
2312 }
2313
ResolveField(uint16_t field_idx,bool is_static,bool is_put)2314 ArtField* HInstructionBuilder::ResolveField(uint16_t field_idx, bool is_static, bool is_put) {
2315 ScopedObjectAccess soa(Thread::Current());
2316
2317 ClassLinker* class_linker = dex_compilation_unit_->GetClassLinker();
2318 Handle<mirror::ClassLoader> class_loader = dex_compilation_unit_->GetClassLoader();
2319
2320 ArtField* resolved_field = class_linker->ResolveFieldJLS(field_idx,
2321 dex_compilation_unit_->GetDexCache(),
2322 class_loader);
2323 DCHECK_EQ(resolved_field == nullptr, soa.Self()->IsExceptionPending())
2324 << "field="
2325 << ((resolved_field == nullptr) ? "null" : resolved_field->PrettyField())
2326 << ", exception="
2327 << (soa.Self()->IsExceptionPending() ? soa.Self()->GetException()->Dump() : "null");
2328 if (UNLIKELY(resolved_field == nullptr)) {
2329 // Clean up any exception left by field resolution.
2330 soa.Self()->ClearException();
2331 return nullptr;
2332 }
2333
2334 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
2335 return nullptr;
2336 }
2337
2338 // Check access.
2339 Handle<mirror::Class> compiling_class = dex_compilation_unit_->GetCompilingClass();
2340 if (compiling_class == nullptr) {
2341 // Check if the declaring class or referencing class is accessible.
2342 SamePackageCompare same_package(*dex_compilation_unit_);
2343 ObjPtr<mirror::Class> declaring_class = resolved_field->GetDeclaringClass();
2344 bool declaring_class_accessible = declaring_class->IsPublic() || same_package(declaring_class);
2345 if (!declaring_class_accessible) {
2346 // It is possible to access members from an inaccessible superclass
2347 // by referencing them through an accessible subclass.
2348 ObjPtr<mirror::Class> referenced_class = class_linker->LookupResolvedType(
2349 dex_compilation_unit_->GetDexFile()->GetFieldId(field_idx).class_idx_,
2350 dex_compilation_unit_->GetDexCache().Get(),
2351 class_loader.Get());
2352 DCHECK(referenced_class != nullptr); // Must have been resolved when resolving the field.
2353 if (!referenced_class->IsPublic() && !same_package(referenced_class)) {
2354 return nullptr;
2355 }
2356 }
2357 // Check whether the field itself is accessible.
2358 // Since the referrer is unresolved but the field is resolved, it cannot be
2359 // inside the same class, so a private field is known to be inaccessible.
2360 // And without a resolved referrer, we cannot check for protected member access
2361 // in superlass, so we handle only access to public member or within the package.
2362 if (resolved_field->IsPrivate() ||
2363 (!resolved_field->IsPublic() && !declaring_class_accessible)) {
2364 return nullptr;
2365 }
2366 } else if (!compiling_class->CanAccessResolvedField(resolved_field->GetDeclaringClass(),
2367 resolved_field,
2368 dex_compilation_unit_->GetDexCache().Get(),
2369 field_idx)) {
2370 return nullptr;
2371 }
2372
2373 if (is_put) {
2374 if (resolved_field->IsFinal() &&
2375 (compiling_class.Get() != resolved_field->GetDeclaringClass())) {
2376 // Final fields can only be updated within their own class.
2377 // TODO: Only allow it in constructors. b/34966607.
2378 return nullptr;
2379 }
2380
2381 // Note: We do not need to resolve the field type for `get` opcodes.
2382 StackArtFieldHandleScope<1> rhs(soa.Self());
2383 ReflectiveHandle<ArtField> resolved_field_handle(rhs.NewHandle(resolved_field));
2384 if (resolved_field->ResolveType().IsNull()) {
2385 // ArtField::ResolveType() may fail as evidenced with a dexing bug (b/78788577).
2386 soa.Self()->ClearException();
2387 return nullptr; // Failure
2388 }
2389 resolved_field = resolved_field_handle.Get();
2390 }
2391
2392 return resolved_field;
2393 }
2394
BuildStaticFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put)2395 void HInstructionBuilder::BuildStaticFieldAccess(const Instruction& instruction,
2396 uint32_t dex_pc,
2397 bool is_put) {
2398 uint32_t source_or_dest_reg = instruction.VRegA_21c();
2399 uint16_t field_index = instruction.VRegB_21c();
2400
2401 ScopedObjectAccess soa(Thread::Current());
2402 ArtField* resolved_field = ResolveField(field_index, /* is_static= */ true, is_put);
2403
2404 if (resolved_field == nullptr) {
2405 MaybeRecordStat(compilation_stats_,
2406 MethodCompilationStat::kUnresolvedField);
2407 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2408 BuildUnresolvedStaticFieldAccess(instruction, dex_pc, is_put, field_type);
2409 return;
2410 }
2411
2412 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2413
2414 Handle<mirror::Class> klass =
2415 graph_->GetHandleCache()->NewHandle(resolved_field->GetDeclaringClass());
2416 HLoadClass* constant = BuildLoadClass(klass->GetDexTypeIndex(),
2417 klass->GetDexFile(),
2418 klass,
2419 dex_pc,
2420 /* needs_access_check= */ false);
2421
2422 if (constant == nullptr) {
2423 // The class cannot be referenced from this compiled code. Generate
2424 // an unresolved access.
2425 MaybeRecordStat(compilation_stats_,
2426 MethodCompilationStat::kUnresolvedFieldNotAFastAccess);
2427 BuildUnresolvedStaticFieldAccess(instruction, dex_pc, is_put, field_type);
2428 return;
2429 }
2430
2431 HInstruction* cls = constant;
2432 if (!IsInitialized(klass.Get())) {
2433 cls = new (allocator_) HClinitCheck(constant, dex_pc);
2434 AppendInstruction(cls);
2435 }
2436
2437 uint16_t class_def_index = klass->GetDexClassDefIndex();
2438 if (is_put) {
2439 // We need to keep the class alive before loading the value.
2440 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2441 DCHECK_EQ(HPhi::ToPhiType(value->GetType()), HPhi::ToPhiType(field_type));
2442 AppendInstruction(new (allocator_) HStaticFieldSet(cls,
2443 value,
2444 resolved_field,
2445 field_type,
2446 resolved_field->GetOffset(),
2447 resolved_field->IsVolatile(),
2448 field_index,
2449 class_def_index,
2450 *dex_file_,
2451 dex_pc));
2452 } else {
2453 AppendInstruction(new (allocator_) HStaticFieldGet(cls,
2454 resolved_field,
2455 field_type,
2456 resolved_field->GetOffset(),
2457 resolved_field->IsVolatile(),
2458 field_index,
2459 class_def_index,
2460 *dex_file_,
2461 dex_pc));
2462 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2463 }
2464 }
2465
BuildCheckedDivRem(uint16_t out_vreg,uint16_t first_vreg,int64_t second_vreg_or_constant,uint32_t dex_pc,DataType::Type type,bool second_is_constant,bool is_div)2466 void HInstructionBuilder::BuildCheckedDivRem(uint16_t out_vreg,
2467 uint16_t first_vreg,
2468 int64_t second_vreg_or_constant,
2469 uint32_t dex_pc,
2470 DataType::Type type,
2471 bool second_is_constant,
2472 bool is_div) {
2473 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
2474
2475 HInstruction* first = LoadLocal(first_vreg, type);
2476 HInstruction* second = nullptr;
2477 if (second_is_constant) {
2478 if (type == DataType::Type::kInt32) {
2479 second = graph_->GetIntConstant(second_vreg_or_constant);
2480 } else {
2481 second = graph_->GetLongConstant(second_vreg_or_constant);
2482 }
2483 } else {
2484 second = LoadLocal(second_vreg_or_constant, type);
2485 }
2486
2487 if (!second_is_constant ||
2488 (type == DataType::Type::kInt32 && second->AsIntConstant()->GetValue() == 0) ||
2489 (type == DataType::Type::kInt64 && second->AsLongConstant()->GetValue() == 0)) {
2490 second = new (allocator_) HDivZeroCheck(second, dex_pc);
2491 AppendInstruction(second);
2492 }
2493
2494 if (is_div) {
2495 AppendInstruction(new (allocator_) HDiv(type, first, second, dex_pc));
2496 } else {
2497 AppendInstruction(new (allocator_) HRem(type, first, second, dex_pc));
2498 }
2499 UpdateLocal(out_vreg, current_block_->GetLastInstruction());
2500 }
2501
BuildArrayAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put,DataType::Type anticipated_type)2502 void HInstructionBuilder::BuildArrayAccess(const Instruction& instruction,
2503 uint32_t dex_pc,
2504 bool is_put,
2505 DataType::Type anticipated_type) {
2506 uint8_t source_or_dest_reg = instruction.VRegA_23x();
2507 uint8_t array_reg = instruction.VRegB_23x();
2508 uint8_t index_reg = instruction.VRegC_23x();
2509
2510 HInstruction* object = LoadNullCheckedLocal(array_reg, dex_pc);
2511 HInstruction* length = new (allocator_) HArrayLength(object, dex_pc);
2512 AppendInstruction(length);
2513 HInstruction* index = LoadLocal(index_reg, DataType::Type::kInt32);
2514 index = new (allocator_) HBoundsCheck(index, length, dex_pc);
2515 AppendInstruction(index);
2516 if (is_put) {
2517 HInstruction* value = LoadLocal(source_or_dest_reg, anticipated_type);
2518 // TODO: Insert a type check node if the type is Object.
2519 HArraySet* aset = new (allocator_) HArraySet(object, index, value, anticipated_type, dex_pc);
2520 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2521 AppendInstruction(aset);
2522 } else {
2523 HArrayGet* aget = new (allocator_) HArrayGet(object, index, anticipated_type, dex_pc);
2524 ssa_builder_->MaybeAddAmbiguousArrayGet(aget);
2525 AppendInstruction(aget);
2526 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2527 }
2528 graph_->SetHasBoundsChecks(true);
2529 }
2530
BuildNewArray(uint32_t dex_pc,dex::TypeIndex type_index,HInstruction * length)2531 HNewArray* HInstructionBuilder::BuildNewArray(uint32_t dex_pc,
2532 dex::TypeIndex type_index,
2533 HInstruction* length) {
2534 HLoadClass* cls = BuildLoadClass(type_index, dex_pc);
2535
2536 const char* descriptor = dex_file_->GetTypeDescriptor(dex_file_->GetTypeId(type_index));
2537 DCHECK_EQ(descriptor[0], '[');
2538 size_t component_type_shift = Primitive::ComponentSizeShift(Primitive::GetType(descriptor[1]));
2539
2540 HNewArray* new_array = new (allocator_) HNewArray(cls, length, dex_pc, component_type_shift);
2541 AppendInstruction(new_array);
2542 return new_array;
2543 }
2544
BuildFilledNewArray(uint32_t dex_pc,dex::TypeIndex type_index,const InstructionOperands & operands)2545 bool HInstructionBuilder::BuildFilledNewArray(uint32_t dex_pc,
2546 dex::TypeIndex type_index,
2547 const InstructionOperands& operands) {
2548 const size_t number_of_operands = operands.GetNumberOfOperands();
2549 HInstruction* length = graph_->GetIntConstant(number_of_operands);
2550
2551 HNewArray* new_array = BuildNewArray(dex_pc, type_index, length);
2552 const char* descriptor = dex_file_->GetTypeDescriptor(type_index);
2553 DCHECK_EQ(descriptor[0], '[') << descriptor;
2554 char primitive = descriptor[1];
2555 if (primitive != 'I' && primitive != 'L' && primitive != '[') {
2556 DCHECK(primitive != 'J' && primitive != 'D'); // Rejected by the verifier.
2557 MaybeRecordStat(compilation_stats_, MethodCompilationStat::kNotCompiledMalformedOpcode);
2558 return false;
2559 }
2560 bool is_reference_array = (primitive == 'L') || (primitive == '[');
2561 DataType::Type type = is_reference_array ? DataType::Type::kReference : DataType::Type::kInt32;
2562
2563 for (size_t i = 0; i < number_of_operands; ++i) {
2564 HInstruction* value = LoadLocal(operands.GetOperand(i), type);
2565 HInstruction* index = graph_->GetIntConstant(i);
2566 HArraySet* aset = new (allocator_) HArraySet(new_array, index, value, type, dex_pc);
2567 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2568 AppendInstruction(aset);
2569 }
2570 latest_result_ = new_array;
2571
2572 BuildConstructorFenceForAllocation(new_array);
2573 return true;
2574 }
2575
2576 template <typename T>
BuildFillArrayData(HInstruction * object,const T * data,uint32_t element_count,DataType::Type anticipated_type,uint32_t dex_pc)2577 void HInstructionBuilder::BuildFillArrayData(HInstruction* object,
2578 const T* data,
2579 uint32_t element_count,
2580 DataType::Type anticipated_type,
2581 uint32_t dex_pc) {
2582 for (uint32_t i = 0; i < element_count; ++i) {
2583 HInstruction* index = graph_->GetIntConstant(i);
2584 HInstruction* value = graph_->GetIntConstant(data[i]);
2585 HArraySet* aset = new (allocator_) HArraySet(object, index, value, anticipated_type, dex_pc);
2586 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2587 AppendInstruction(aset);
2588 }
2589 }
2590
BuildFillArrayData(const Instruction & instruction,uint32_t dex_pc)2591 void HInstructionBuilder::BuildFillArrayData(const Instruction& instruction, uint32_t dex_pc) {
2592 HInstruction* array = LoadNullCheckedLocal(instruction.VRegA_31t(), dex_pc);
2593
2594 int32_t payload_offset = instruction.VRegB_31t() + dex_pc;
2595 const Instruction::ArrayDataPayload* payload =
2596 reinterpret_cast<const Instruction::ArrayDataPayload*>(
2597 code_item_accessor_.Insns() + payload_offset);
2598 const uint8_t* data = payload->data;
2599 uint32_t element_count = payload->element_count;
2600
2601 if (element_count == 0u) {
2602 // For empty payload we emit only the null check above.
2603 return;
2604 }
2605
2606 HInstruction* length = new (allocator_) HArrayLength(array, dex_pc);
2607 AppendInstruction(length);
2608
2609 // Implementation of this DEX instruction seems to be that the bounds check is
2610 // done before doing any stores.
2611 HInstruction* last_index = graph_->GetIntConstant(payload->element_count - 1);
2612 AppendInstruction(new (allocator_) HBoundsCheck(last_index, length, dex_pc));
2613
2614 switch (payload->element_width) {
2615 case 1:
2616 BuildFillArrayData(array,
2617 reinterpret_cast<const int8_t*>(data),
2618 element_count,
2619 DataType::Type::kInt8,
2620 dex_pc);
2621 break;
2622 case 2:
2623 BuildFillArrayData(array,
2624 reinterpret_cast<const int16_t*>(data),
2625 element_count,
2626 DataType::Type::kInt16,
2627 dex_pc);
2628 break;
2629 case 4:
2630 BuildFillArrayData(array,
2631 reinterpret_cast<const int32_t*>(data),
2632 element_count,
2633 DataType::Type::kInt32,
2634 dex_pc);
2635 break;
2636 case 8:
2637 BuildFillWideArrayData(array,
2638 reinterpret_cast<const int64_t*>(data),
2639 element_count,
2640 dex_pc);
2641 break;
2642 default:
2643 LOG(FATAL) << "Unknown element width for " << payload->element_width;
2644 }
2645 graph_->SetHasBoundsChecks(true);
2646 }
2647
BuildFillWideArrayData(HInstruction * object,const int64_t * data,uint32_t element_count,uint32_t dex_pc)2648 void HInstructionBuilder::BuildFillWideArrayData(HInstruction* object,
2649 const int64_t* data,
2650 uint32_t element_count,
2651 uint32_t dex_pc) {
2652 for (uint32_t i = 0; i < element_count; ++i) {
2653 HInstruction* index = graph_->GetIntConstant(i);
2654 HInstruction* value = graph_->GetLongConstant(data[i]);
2655 HArraySet* aset =
2656 new (allocator_) HArraySet(object, index, value, DataType::Type::kInt64, dex_pc);
2657 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2658 AppendInstruction(aset);
2659 }
2660 }
2661
BuildLoadString(dex::StringIndex string_index,uint32_t dex_pc)2662 void HInstructionBuilder::BuildLoadString(dex::StringIndex string_index, uint32_t dex_pc) {
2663 HLoadString* load_string =
2664 new (allocator_) HLoadString(graph_->GetCurrentMethod(), string_index, *dex_file_, dex_pc);
2665 HSharpening::ProcessLoadString(load_string,
2666 code_generator_,
2667 *dex_compilation_unit_,
2668 graph_->GetHandleCache()->GetHandles());
2669 AppendInstruction(load_string);
2670 }
2671
BuildLoadClass(dex::TypeIndex type_index,uint32_t dex_pc)2672 HLoadClass* HInstructionBuilder::BuildLoadClass(dex::TypeIndex type_index, uint32_t dex_pc) {
2673 ScopedObjectAccess soa(Thread::Current());
2674 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2675 Handle<mirror::Class> klass = ResolveClass(soa, type_index);
2676 bool needs_access_check = LoadClassNeedsAccessCheck(type_index, klass.Get());
2677 return BuildLoadClass(type_index, dex_file, klass, dex_pc, needs_access_check);
2678 }
2679
BuildLoadClass(dex::TypeIndex type_index,const DexFile & dex_file,Handle<mirror::Class> klass,uint32_t dex_pc,bool needs_access_check)2680 HLoadClass* HInstructionBuilder::BuildLoadClass(dex::TypeIndex type_index,
2681 const DexFile& dex_file,
2682 Handle<mirror::Class> klass,
2683 uint32_t dex_pc,
2684 bool needs_access_check) {
2685 // Try to find a reference in the compiling dex file.
2686 const DexFile* actual_dex_file = &dex_file;
2687 if (!IsSameDexFile(dex_file, *dex_compilation_unit_->GetDexFile())) {
2688 dex::TypeIndex local_type_index =
2689 klass->FindTypeIndexInOtherDexFile(*dex_compilation_unit_->GetDexFile());
2690 if (local_type_index.IsValid()) {
2691 type_index = local_type_index;
2692 actual_dex_file = dex_compilation_unit_->GetDexFile();
2693 }
2694 }
2695
2696 // We cannot use the referrer's class load kind if we need to do an access check.
2697 // If the `klass` is unresolved, we need access check with the exception of the referrer's
2698 // class, see LoadClassNeedsAccessCheck(), so the `!needs_access_check` check is enough.
2699 // Otherwise, also check if the `klass` is the same as the compiling class, which also
2700 // conveniently rejects the case of unresolved compiling class.
2701 bool is_referrers_class =
2702 !needs_access_check &&
2703 (klass == nullptr || outer_compilation_unit_->GetCompilingClass().Get() == klass.Get());
2704 // Note: `klass` must be from `graph_->GetHandleCache()`.
2705 HLoadClass* load_class = new (allocator_) HLoadClass(
2706 graph_->GetCurrentMethod(),
2707 type_index,
2708 *actual_dex_file,
2709 klass,
2710 is_referrers_class,
2711 dex_pc,
2712 needs_access_check);
2713
2714 HLoadClass::LoadKind load_kind = HSharpening::ComputeLoadClassKind(load_class,
2715 code_generator_,
2716 *dex_compilation_unit_);
2717
2718 if (load_kind == HLoadClass::LoadKind::kInvalid) {
2719 // We actually cannot reference this class, we're forced to bail.
2720 return nullptr;
2721 }
2722 // Load kind must be set before inserting the instruction into the graph.
2723 load_class->SetLoadKind(load_kind);
2724 AppendInstruction(load_class);
2725 return load_class;
2726 }
2727
ResolveClass(ScopedObjectAccess & soa,dex::TypeIndex type_index)2728 Handle<mirror::Class> HInstructionBuilder::ResolveClass(ScopedObjectAccess& soa,
2729 dex::TypeIndex type_index) {
2730 auto it = class_cache_.find(type_index);
2731 if (it != class_cache_.end()) {
2732 return it->second;
2733 }
2734
2735 ObjPtr<mirror::Class> klass = dex_compilation_unit_->GetClassLinker()->ResolveType(
2736 type_index, dex_compilation_unit_->GetDexCache(), dex_compilation_unit_->GetClassLoader());
2737 DCHECK_EQ(klass == nullptr, soa.Self()->IsExceptionPending());
2738 soa.Self()->ClearException(); // Clean up the exception left by type resolution if any.
2739
2740 Handle<mirror::Class> h_klass = graph_->GetHandleCache()->NewHandle(klass);
2741 class_cache_.Put(type_index, h_klass);
2742 return h_klass;
2743 }
2744
LoadClassNeedsAccessCheck(dex::TypeIndex type_index,ObjPtr<mirror::Class> klass)2745 bool HInstructionBuilder::LoadClassNeedsAccessCheck(dex::TypeIndex type_index,
2746 ObjPtr<mirror::Class> klass) {
2747 if (klass == nullptr) {
2748 // If the class is unresolved, we can avoid access checks only for references to
2749 // the compiling class as determined by checking the descriptor and ClassLoader.
2750 if (outer_compilation_unit_->GetCompilingClass() != nullptr) {
2751 // Compiling class is resolved, so different from the unresolved class.
2752 return true;
2753 }
2754 if (dex_compilation_unit_->GetClassLoader().Get() !=
2755 outer_compilation_unit_->GetClassLoader().Get()) {
2756 // Resolving the same descriptor in a different ClassLoader than the
2757 // defining loader of the compiling class shall either fail to find
2758 // the class definition, or find a different one.
2759 // (Assuming no custom ClassLoader hierarchy with circular delegation.)
2760 return true;
2761 }
2762 // Check if the class is the outer method's class.
2763 // For the same dex file compare type indexes, otherwise descriptors.
2764 const DexFile* outer_dex_file = outer_compilation_unit_->GetDexFile();
2765 const DexFile* inner_dex_file = dex_compilation_unit_->GetDexFile();
2766 const dex::ClassDef& outer_class_def =
2767 outer_dex_file->GetClassDef(outer_compilation_unit_->GetClassDefIndex());
2768 if (IsSameDexFile(*inner_dex_file, *outer_dex_file)) {
2769 if (type_index != outer_class_def.class_idx_) {
2770 return true;
2771 }
2772 } else {
2773 const std::string_view outer_descriptor =
2774 outer_dex_file->GetTypeDescriptorView(outer_class_def.class_idx_);
2775 const std::string_view target_descriptor =
2776 inner_dex_file->GetTypeDescriptorView(type_index);
2777 if (outer_descriptor != target_descriptor) {
2778 return true;
2779 }
2780 }
2781 // For inlined methods we also need to check if the compiling class
2782 // is public or in the same package as the inlined method's class.
2783 if (dex_compilation_unit_ != outer_compilation_unit_ &&
2784 (outer_class_def.access_flags_ & kAccPublic) == 0) {
2785 DCHECK(dex_compilation_unit_->GetCompilingClass() != nullptr);
2786 SamePackageCompare same_package(*outer_compilation_unit_);
2787 if (!same_package(dex_compilation_unit_->GetCompilingClass().Get())) {
2788 return true;
2789 }
2790 }
2791 return false;
2792 } else if (klass->IsPublic()) {
2793 return false;
2794 } else if (dex_compilation_unit_->GetCompilingClass() != nullptr) {
2795 return !dex_compilation_unit_->GetCompilingClass()->CanAccess(klass);
2796 } else {
2797 SamePackageCompare same_package(*dex_compilation_unit_);
2798 return !same_package(klass);
2799 }
2800 }
2801
BuildLoadMethodHandle(uint16_t method_handle_index,uint32_t dex_pc)2802 void HInstructionBuilder::BuildLoadMethodHandle(uint16_t method_handle_index, uint32_t dex_pc) {
2803 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2804 HLoadMethodHandle* load_method_handle = new (allocator_) HLoadMethodHandle(
2805 graph_->GetCurrentMethod(), method_handle_index, dex_file, dex_pc);
2806 AppendInstruction(load_method_handle);
2807 }
2808
BuildLoadMethodType(dex::ProtoIndex proto_index,uint32_t dex_pc)2809 void HInstructionBuilder::BuildLoadMethodType(dex::ProtoIndex proto_index, uint32_t dex_pc) {
2810 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2811 HLoadMethodType* load_method_type =
2812 new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(), proto_index, dex_file, dex_pc);
2813 HSharpening::ProcessLoadMethodType(load_method_type,
2814 code_generator_,
2815 *dex_compilation_unit_,
2816 graph_->GetHandleCache()->GetHandles());
2817 AppendInstruction(load_method_type);
2818 }
2819
BuildTypeCheck(bool is_instance_of,HInstruction * object,dex::TypeIndex type_index,uint32_t dex_pc)2820 void HInstructionBuilder::BuildTypeCheck(bool is_instance_of,
2821 HInstruction* object,
2822 dex::TypeIndex type_index,
2823 uint32_t dex_pc) {
2824 ScopedObjectAccess soa(Thread::Current());
2825 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2826 Handle<mirror::Class> klass = ResolveClass(soa, type_index);
2827 bool needs_access_check = LoadClassNeedsAccessCheck(type_index, klass.Get());
2828 TypeCheckKind check_kind = HSharpening::ComputeTypeCheckKind(
2829 klass.Get(), code_generator_, needs_access_check);
2830
2831 HInstruction* class_or_null = nullptr;
2832 HIntConstant* bitstring_path_to_root = nullptr;
2833 HIntConstant* bitstring_mask = nullptr;
2834 if (check_kind == TypeCheckKind::kBitstringCheck) {
2835 // TODO: Allow using the bitstring check also if we need an access check.
2836 DCHECK(!needs_access_check);
2837 class_or_null = graph_->GetNullConstant();
2838 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2839 uint32_t path_to_root =
2840 SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootForTarget(klass.Get());
2841 uint32_t mask = SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootMask(klass.Get());
2842 bitstring_path_to_root = graph_->GetIntConstant(static_cast<int32_t>(path_to_root));
2843 bitstring_mask = graph_->GetIntConstant(static_cast<int32_t>(mask));
2844 } else {
2845 class_or_null = BuildLoadClass(type_index, dex_file, klass, dex_pc, needs_access_check);
2846 }
2847 DCHECK(class_or_null != nullptr);
2848
2849 if (is_instance_of) {
2850 AppendInstruction(new (allocator_) HInstanceOf(object,
2851 class_or_null,
2852 check_kind,
2853 klass,
2854 dex_pc,
2855 allocator_,
2856 bitstring_path_to_root,
2857 bitstring_mask));
2858 } else {
2859 // We emit a CheckCast followed by a BoundType. CheckCast is a statement
2860 // which may throw. If it succeeds BoundType sets the new type of `object`
2861 // for all subsequent uses.
2862 AppendInstruction(
2863 new (allocator_) HCheckCast(object,
2864 class_or_null,
2865 check_kind,
2866 klass,
2867 dex_pc,
2868 allocator_,
2869 bitstring_path_to_root,
2870 bitstring_mask));
2871 AppendInstruction(new (allocator_) HBoundType(object, dex_pc));
2872 }
2873 }
2874
BuildTypeCheck(const Instruction & instruction,uint8_t destination,uint8_t reference,dex::TypeIndex type_index,uint32_t dex_pc)2875 void HInstructionBuilder::BuildTypeCheck(const Instruction& instruction,
2876 uint8_t destination,
2877 uint8_t reference,
2878 dex::TypeIndex type_index,
2879 uint32_t dex_pc) {
2880 HInstruction* object = LoadLocal(reference, DataType::Type::kReference);
2881 bool is_instance_of = instruction.Opcode() == Instruction::INSTANCE_OF;
2882
2883 BuildTypeCheck(is_instance_of, object, type_index, dex_pc);
2884
2885 if (is_instance_of) {
2886 UpdateLocal(destination, current_block_->GetLastInstruction());
2887 } else {
2888 DCHECK_EQ(instruction.Opcode(), Instruction::CHECK_CAST);
2889 UpdateLocal(reference, current_block_->GetLastInstruction());
2890 }
2891 }
2892
ProcessDexInstruction(const Instruction & instruction,uint32_t dex_pc)2893 bool HInstructionBuilder::ProcessDexInstruction(const Instruction& instruction, uint32_t dex_pc) {
2894 switch (instruction.Opcode()) {
2895 case Instruction::CONST_4: {
2896 int32_t register_index = instruction.VRegA_11n();
2897 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_11n());
2898 UpdateLocal(register_index, constant);
2899 break;
2900 }
2901
2902 case Instruction::CONST_16: {
2903 int32_t register_index = instruction.VRegA_21s();
2904 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_21s());
2905 UpdateLocal(register_index, constant);
2906 break;
2907 }
2908
2909 case Instruction::CONST: {
2910 int32_t register_index = instruction.VRegA_31i();
2911 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_31i());
2912 UpdateLocal(register_index, constant);
2913 break;
2914 }
2915
2916 case Instruction::CONST_HIGH16: {
2917 int32_t register_index = instruction.VRegA_21h();
2918 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_21h() << 16);
2919 UpdateLocal(register_index, constant);
2920 break;
2921 }
2922
2923 case Instruction::CONST_WIDE_16: {
2924 int32_t register_index = instruction.VRegA_21s();
2925 // Get 16 bits of constant value, sign extended to 64 bits.
2926 int64_t value = instruction.VRegB_21s();
2927 value <<= 48;
2928 value >>= 48;
2929 HLongConstant* constant = graph_->GetLongConstant(value);
2930 UpdateLocal(register_index, constant);
2931 break;
2932 }
2933
2934 case Instruction::CONST_WIDE_32: {
2935 int32_t register_index = instruction.VRegA_31i();
2936 // Get 32 bits of constant value, sign extended to 64 bits.
2937 int64_t value = instruction.VRegB_31i();
2938 value <<= 32;
2939 value >>= 32;
2940 HLongConstant* constant = graph_->GetLongConstant(value);
2941 UpdateLocal(register_index, constant);
2942 break;
2943 }
2944
2945 case Instruction::CONST_WIDE: {
2946 int32_t register_index = instruction.VRegA_51l();
2947 HLongConstant* constant = graph_->GetLongConstant(instruction.VRegB_51l());
2948 UpdateLocal(register_index, constant);
2949 break;
2950 }
2951
2952 case Instruction::CONST_WIDE_HIGH16: {
2953 int32_t register_index = instruction.VRegA_21h();
2954 int64_t value = static_cast<int64_t>(instruction.VRegB_21h()) << 48;
2955 HLongConstant* constant = graph_->GetLongConstant(value);
2956 UpdateLocal(register_index, constant);
2957 break;
2958 }
2959
2960 // Note that the SSA building will refine the types for moves.
2961
2962 case Instruction::MOVE: {
2963 BuildMove<DataType::Type::kInt32>(instruction.VRegA_12x(), instruction.VRegB_12x());
2964 break;
2965 }
2966
2967 case Instruction::MOVE_FROM16: {
2968 BuildMove<DataType::Type::kInt32>(instruction.VRegA_22x(), instruction.VRegB_22x());
2969 break;
2970 }
2971
2972 case Instruction::MOVE_16: {
2973 BuildMove<DataType::Type::kInt32>(instruction.VRegA_32x(), instruction.VRegB_32x());
2974 break;
2975 }
2976
2977 case Instruction::MOVE_WIDE: {
2978 BuildMove<DataType::Type::kInt64>(instruction.VRegA_12x(), instruction.VRegB_12x());
2979 break;
2980 }
2981
2982 case Instruction::MOVE_WIDE_FROM16: {
2983 BuildMove<DataType::Type::kInt64>(instruction.VRegA_22x(), instruction.VRegB_22x());
2984 break;
2985 }
2986
2987 case Instruction::MOVE_WIDE_16: {
2988 BuildMove<DataType::Type::kInt64>(instruction.VRegA_32x(), instruction.VRegB_32x());
2989 break;
2990 }
2991
2992 case Instruction::MOVE_OBJECT: {
2993 BuildMove<DataType::Type::kReference>(instruction.VRegA_12x(), instruction.VRegB_12x());
2994 break;
2995 }
2996
2997 case Instruction::MOVE_OBJECT_FROM16: {
2998 BuildMove<DataType::Type::kReference>(instruction.VRegA_22x(), instruction.VRegB_22x());
2999 break;
3000 }
3001
3002 case Instruction::MOVE_OBJECT_16: {
3003 BuildMove<DataType::Type::kReference>(instruction.VRegA_32x(), instruction.VRegB_32x());
3004 break;
3005 }
3006
3007 case Instruction::RETURN_VOID: {
3008 BuildReturn(instruction, DataType::Type::kVoid, dex_pc);
3009 break;
3010 }
3011
3012 #define IF_XX(comparison, cond) \
3013 case Instruction::IF_##cond: \
3014 If_21_22t<comparison, /* kCompareWithZero= */ false>(instruction, dex_pc); \
3015 break; \
3016 case Instruction::IF_##cond##Z: \
3017 If_21_22t<comparison, /* kCompareWithZero= */ true>(instruction, dex_pc); \
3018 break;
3019
3020 IF_XX(HEqual, EQ);
3021 IF_XX(HNotEqual, NE);
3022 IF_XX(HLessThan, LT);
3023 IF_XX(HLessThanOrEqual, LE);
3024 IF_XX(HGreaterThan, GT);
3025 IF_XX(HGreaterThanOrEqual, GE);
3026 #undef IF_XX
3027
3028 case Instruction::GOTO:
3029 case Instruction::GOTO_16:
3030 case Instruction::GOTO_32: {
3031 AppendInstruction(new (allocator_) HGoto(dex_pc));
3032 current_block_ = nullptr;
3033 break;
3034 }
3035
3036 case Instruction::RETURN: {
3037 BuildReturn(instruction, return_type_, dex_pc);
3038 break;
3039 }
3040
3041 case Instruction::RETURN_OBJECT: {
3042 BuildReturn(instruction, return_type_, dex_pc);
3043 break;
3044 }
3045
3046 case Instruction::RETURN_WIDE: {
3047 BuildReturn(instruction, return_type_, dex_pc);
3048 break;
3049 }
3050
3051 case Instruction::INVOKE_DIRECT:
3052 case Instruction::INVOKE_INTERFACE:
3053 case Instruction::INVOKE_STATIC:
3054 case Instruction::INVOKE_SUPER:
3055 case Instruction::INVOKE_VIRTUAL: {
3056 uint16_t method_idx = instruction.VRegB_35c();
3057 uint32_t args[5];
3058 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
3059 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
3060 if (!BuildInvoke(instruction, dex_pc, method_idx, operands)) {
3061 return false;
3062 }
3063 break;
3064 }
3065
3066 case Instruction::INVOKE_DIRECT_RANGE:
3067 case Instruction::INVOKE_INTERFACE_RANGE:
3068 case Instruction::INVOKE_STATIC_RANGE:
3069 case Instruction::INVOKE_SUPER_RANGE:
3070 case Instruction::INVOKE_VIRTUAL_RANGE: {
3071 uint16_t method_idx = instruction.VRegB_3rc();
3072 RangeInstructionOperands operands(instruction.VRegC_3rc(), instruction.VRegA_3rc());
3073 if (!BuildInvoke(instruction, dex_pc, method_idx, operands)) {
3074 return false;
3075 }
3076 break;
3077 }
3078
3079 case Instruction::INVOKE_POLYMORPHIC: {
3080 uint16_t method_idx = instruction.VRegB_45cc();
3081 dex::ProtoIndex proto_idx(instruction.VRegH_45cc());
3082 uint32_t args[5];
3083 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
3084 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
3085 return BuildInvokePolymorphic(dex_pc, method_idx, proto_idx, operands);
3086 }
3087
3088 case Instruction::INVOKE_POLYMORPHIC_RANGE: {
3089 uint16_t method_idx = instruction.VRegB_4rcc();
3090 dex::ProtoIndex proto_idx(instruction.VRegH_4rcc());
3091 RangeInstructionOperands operands(instruction.VRegC_4rcc(), instruction.VRegA_4rcc());
3092 return BuildInvokePolymorphic(dex_pc, method_idx, proto_idx, operands);
3093 }
3094
3095 case Instruction::INVOKE_CUSTOM: {
3096 uint16_t call_site_idx = instruction.VRegB_35c();
3097 uint32_t args[5];
3098 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
3099 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
3100 return BuildInvokeCustom(dex_pc, call_site_idx, operands);
3101 }
3102
3103 case Instruction::INVOKE_CUSTOM_RANGE: {
3104 uint16_t call_site_idx = instruction.VRegB_3rc();
3105 RangeInstructionOperands operands(instruction.VRegC_3rc(), instruction.VRegA_3rc());
3106 return BuildInvokeCustom(dex_pc, call_site_idx, operands);
3107 }
3108
3109 case Instruction::NEG_INT: {
3110 Unop_12x<HNeg>(instruction, DataType::Type::kInt32, dex_pc);
3111 break;
3112 }
3113
3114 case Instruction::NEG_LONG: {
3115 Unop_12x<HNeg>(instruction, DataType::Type::kInt64, dex_pc);
3116 break;
3117 }
3118
3119 case Instruction::NEG_FLOAT: {
3120 Unop_12x<HNeg>(instruction, DataType::Type::kFloat32, dex_pc);
3121 break;
3122 }
3123
3124 case Instruction::NEG_DOUBLE: {
3125 Unop_12x<HNeg>(instruction, DataType::Type::kFloat64, dex_pc);
3126 break;
3127 }
3128
3129 case Instruction::NOT_INT: {
3130 Unop_12x<HNot>(instruction, DataType::Type::kInt32, dex_pc);
3131 break;
3132 }
3133
3134 case Instruction::NOT_LONG: {
3135 Unop_12x<HNot>(instruction, DataType::Type::kInt64, dex_pc);
3136 break;
3137 }
3138
3139 case Instruction::INT_TO_LONG: {
3140 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt64, dex_pc);
3141 break;
3142 }
3143
3144 case Instruction::INT_TO_FLOAT: {
3145 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kFloat32, dex_pc);
3146 break;
3147 }
3148
3149 case Instruction::INT_TO_DOUBLE: {
3150 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kFloat64, dex_pc);
3151 break;
3152 }
3153
3154 case Instruction::LONG_TO_INT: {
3155 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kInt32, dex_pc);
3156 break;
3157 }
3158
3159 case Instruction::LONG_TO_FLOAT: {
3160 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kFloat32, dex_pc);
3161 break;
3162 }
3163
3164 case Instruction::LONG_TO_DOUBLE: {
3165 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kFloat64, dex_pc);
3166 break;
3167 }
3168
3169 case Instruction::FLOAT_TO_INT: {
3170 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kInt32, dex_pc);
3171 break;
3172 }
3173
3174 case Instruction::FLOAT_TO_LONG: {
3175 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kInt64, dex_pc);
3176 break;
3177 }
3178
3179 case Instruction::FLOAT_TO_DOUBLE: {
3180 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kFloat64, dex_pc);
3181 break;
3182 }
3183
3184 case Instruction::DOUBLE_TO_INT: {
3185 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kInt32, dex_pc);
3186 break;
3187 }
3188
3189 case Instruction::DOUBLE_TO_LONG: {
3190 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kInt64, dex_pc);
3191 break;
3192 }
3193
3194 case Instruction::DOUBLE_TO_FLOAT: {
3195 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kFloat32, dex_pc);
3196 break;
3197 }
3198
3199 case Instruction::INT_TO_BYTE: {
3200 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt8, dex_pc);
3201 break;
3202 }
3203
3204 case Instruction::INT_TO_SHORT: {
3205 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt16, dex_pc);
3206 break;
3207 }
3208
3209 case Instruction::INT_TO_CHAR: {
3210 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kUint16, dex_pc);
3211 break;
3212 }
3213
3214 case Instruction::ADD_INT: {
3215 Binop_23x<HAdd>(instruction, DataType::Type::kInt32, dex_pc);
3216 break;
3217 }
3218
3219 case Instruction::ADD_LONG: {
3220 Binop_23x<HAdd>(instruction, DataType::Type::kInt64, dex_pc);
3221 break;
3222 }
3223
3224 case Instruction::ADD_DOUBLE: {
3225 Binop_23x<HAdd>(instruction, DataType::Type::kFloat64, dex_pc);
3226 break;
3227 }
3228
3229 case Instruction::ADD_FLOAT: {
3230 Binop_23x<HAdd>(instruction, DataType::Type::kFloat32, dex_pc);
3231 break;
3232 }
3233
3234 case Instruction::SUB_INT: {
3235 Binop_23x<HSub>(instruction, DataType::Type::kInt32, dex_pc);
3236 break;
3237 }
3238
3239 case Instruction::SUB_LONG: {
3240 Binop_23x<HSub>(instruction, DataType::Type::kInt64, dex_pc);
3241 break;
3242 }
3243
3244 case Instruction::SUB_FLOAT: {
3245 Binop_23x<HSub>(instruction, DataType::Type::kFloat32, dex_pc);
3246 break;
3247 }
3248
3249 case Instruction::SUB_DOUBLE: {
3250 Binop_23x<HSub>(instruction, DataType::Type::kFloat64, dex_pc);
3251 break;
3252 }
3253
3254 case Instruction::ADD_INT_2ADDR: {
3255 Binop_12x<HAdd>(instruction, DataType::Type::kInt32, dex_pc);
3256 break;
3257 }
3258
3259 case Instruction::MUL_INT: {
3260 Binop_23x<HMul>(instruction, DataType::Type::kInt32, dex_pc);
3261 break;
3262 }
3263
3264 case Instruction::MUL_LONG: {
3265 Binop_23x<HMul>(instruction, DataType::Type::kInt64, dex_pc);
3266 break;
3267 }
3268
3269 case Instruction::MUL_FLOAT: {
3270 Binop_23x<HMul>(instruction, DataType::Type::kFloat32, dex_pc);
3271 break;
3272 }
3273
3274 case Instruction::MUL_DOUBLE: {
3275 Binop_23x<HMul>(instruction, DataType::Type::kFloat64, dex_pc);
3276 break;
3277 }
3278
3279 case Instruction::DIV_INT: {
3280 BuildCheckedDivRem(instruction.VRegA_23x(),
3281 instruction.VRegB_23x(),
3282 instruction.VRegC_23x(),
3283 dex_pc,
3284 DataType::Type::kInt32,
3285 /* second_is_constant= */ false,
3286 /* is_div=*/ true);
3287 break;
3288 }
3289
3290 case Instruction::DIV_LONG: {
3291 BuildCheckedDivRem(instruction.VRegA_23x(),
3292 instruction.VRegB_23x(),
3293 instruction.VRegC_23x(),
3294 dex_pc,
3295 DataType::Type::kInt64,
3296 /* second_is_constant= */ false,
3297 /* is_div=*/ true);
3298 break;
3299 }
3300
3301 case Instruction::DIV_FLOAT: {
3302 Binop_23x<HDiv>(instruction, DataType::Type::kFloat32, dex_pc);
3303 break;
3304 }
3305
3306 case Instruction::DIV_DOUBLE: {
3307 Binop_23x<HDiv>(instruction, DataType::Type::kFloat64, dex_pc);
3308 break;
3309 }
3310
3311 case Instruction::REM_INT: {
3312 BuildCheckedDivRem(instruction.VRegA_23x(),
3313 instruction.VRegB_23x(),
3314 instruction.VRegC_23x(),
3315 dex_pc,
3316 DataType::Type::kInt32,
3317 /* second_is_constant= */ false,
3318 /* is_div=*/ false);
3319 break;
3320 }
3321
3322 case Instruction::REM_LONG: {
3323 BuildCheckedDivRem(instruction.VRegA_23x(),
3324 instruction.VRegB_23x(),
3325 instruction.VRegC_23x(),
3326 dex_pc,
3327 DataType::Type::kInt64,
3328 /* second_is_constant= */ false,
3329 /* is_div=*/ false);
3330 break;
3331 }
3332
3333 case Instruction::REM_FLOAT: {
3334 Binop_23x<HRem>(instruction, DataType::Type::kFloat32, dex_pc);
3335 break;
3336 }
3337
3338 case Instruction::REM_DOUBLE: {
3339 Binop_23x<HRem>(instruction, DataType::Type::kFloat64, dex_pc);
3340 break;
3341 }
3342
3343 case Instruction::AND_INT: {
3344 Binop_23x<HAnd>(instruction, DataType::Type::kInt32, dex_pc);
3345 break;
3346 }
3347
3348 case Instruction::AND_LONG: {
3349 Binop_23x<HAnd>(instruction, DataType::Type::kInt64, dex_pc);
3350 break;
3351 }
3352
3353 case Instruction::SHL_INT: {
3354 Binop_23x_shift<HShl>(instruction, DataType::Type::kInt32, dex_pc);
3355 break;
3356 }
3357
3358 case Instruction::SHL_LONG: {
3359 Binop_23x_shift<HShl>(instruction, DataType::Type::kInt64, dex_pc);
3360 break;
3361 }
3362
3363 case Instruction::SHR_INT: {
3364 Binop_23x_shift<HShr>(instruction, DataType::Type::kInt32, dex_pc);
3365 break;
3366 }
3367
3368 case Instruction::SHR_LONG: {
3369 Binop_23x_shift<HShr>(instruction, DataType::Type::kInt64, dex_pc);
3370 break;
3371 }
3372
3373 case Instruction::USHR_INT: {
3374 Binop_23x_shift<HUShr>(instruction, DataType::Type::kInt32, dex_pc);
3375 break;
3376 }
3377
3378 case Instruction::USHR_LONG: {
3379 Binop_23x_shift<HUShr>(instruction, DataType::Type::kInt64, dex_pc);
3380 break;
3381 }
3382
3383 case Instruction::OR_INT: {
3384 Binop_23x<HOr>(instruction, DataType::Type::kInt32, dex_pc);
3385 break;
3386 }
3387
3388 case Instruction::OR_LONG: {
3389 Binop_23x<HOr>(instruction, DataType::Type::kInt64, dex_pc);
3390 break;
3391 }
3392
3393 case Instruction::XOR_INT: {
3394 Binop_23x<HXor>(instruction, DataType::Type::kInt32, dex_pc);
3395 break;
3396 }
3397
3398 case Instruction::XOR_LONG: {
3399 Binop_23x<HXor>(instruction, DataType::Type::kInt64, dex_pc);
3400 break;
3401 }
3402
3403 case Instruction::ADD_LONG_2ADDR: {
3404 Binop_12x<HAdd>(instruction, DataType::Type::kInt64, dex_pc);
3405 break;
3406 }
3407
3408 case Instruction::ADD_DOUBLE_2ADDR: {
3409 Binop_12x<HAdd>(instruction, DataType::Type::kFloat64, dex_pc);
3410 break;
3411 }
3412
3413 case Instruction::ADD_FLOAT_2ADDR: {
3414 Binop_12x<HAdd>(instruction, DataType::Type::kFloat32, dex_pc);
3415 break;
3416 }
3417
3418 case Instruction::SUB_INT_2ADDR: {
3419 Binop_12x<HSub>(instruction, DataType::Type::kInt32, dex_pc);
3420 break;
3421 }
3422
3423 case Instruction::SUB_LONG_2ADDR: {
3424 Binop_12x<HSub>(instruction, DataType::Type::kInt64, dex_pc);
3425 break;
3426 }
3427
3428 case Instruction::SUB_FLOAT_2ADDR: {
3429 Binop_12x<HSub>(instruction, DataType::Type::kFloat32, dex_pc);
3430 break;
3431 }
3432
3433 case Instruction::SUB_DOUBLE_2ADDR: {
3434 Binop_12x<HSub>(instruction, DataType::Type::kFloat64, dex_pc);
3435 break;
3436 }
3437
3438 case Instruction::MUL_INT_2ADDR: {
3439 Binop_12x<HMul>(instruction, DataType::Type::kInt32, dex_pc);
3440 break;
3441 }
3442
3443 case Instruction::MUL_LONG_2ADDR: {
3444 Binop_12x<HMul>(instruction, DataType::Type::kInt64, dex_pc);
3445 break;
3446 }
3447
3448 case Instruction::MUL_FLOAT_2ADDR: {
3449 Binop_12x<HMul>(instruction, DataType::Type::kFloat32, dex_pc);
3450 break;
3451 }
3452
3453 case Instruction::MUL_DOUBLE_2ADDR: {
3454 Binop_12x<HMul>(instruction, DataType::Type::kFloat64, dex_pc);
3455 break;
3456 }
3457
3458 case Instruction::DIV_INT_2ADDR: {
3459 BuildCheckedDivRem(instruction.VRegA_12x(),
3460 instruction.VRegA_12x(),
3461 instruction.VRegB_12x(),
3462 dex_pc,
3463 DataType::Type::kInt32,
3464 /* second_is_constant= */ false,
3465 /* is_div=*/ true);
3466 break;
3467 }
3468
3469 case Instruction::DIV_LONG_2ADDR: {
3470 BuildCheckedDivRem(instruction.VRegA_12x(),
3471 instruction.VRegA_12x(),
3472 instruction.VRegB_12x(),
3473 dex_pc,
3474 DataType::Type::kInt64,
3475 /* second_is_constant= */ false,
3476 /* is_div=*/ true);
3477 break;
3478 }
3479
3480 case Instruction::REM_INT_2ADDR: {
3481 BuildCheckedDivRem(instruction.VRegA_12x(),
3482 instruction.VRegA_12x(),
3483 instruction.VRegB_12x(),
3484 dex_pc,
3485 DataType::Type::kInt32,
3486 /* second_is_constant= */ false,
3487 /* is_div=*/ false);
3488 break;
3489 }
3490
3491 case Instruction::REM_LONG_2ADDR: {
3492 BuildCheckedDivRem(instruction.VRegA_12x(),
3493 instruction.VRegA_12x(),
3494 instruction.VRegB_12x(),
3495 dex_pc,
3496 DataType::Type::kInt64,
3497 /* second_is_constant= */ false,
3498 /* is_div=*/ false);
3499 break;
3500 }
3501
3502 case Instruction::REM_FLOAT_2ADDR: {
3503 Binop_12x<HRem>(instruction, DataType::Type::kFloat32, dex_pc);
3504 break;
3505 }
3506
3507 case Instruction::REM_DOUBLE_2ADDR: {
3508 Binop_12x<HRem>(instruction, DataType::Type::kFloat64, dex_pc);
3509 break;
3510 }
3511
3512 case Instruction::SHL_INT_2ADDR: {
3513 Binop_12x_shift<HShl>(instruction, DataType::Type::kInt32, dex_pc);
3514 break;
3515 }
3516
3517 case Instruction::SHL_LONG_2ADDR: {
3518 Binop_12x_shift<HShl>(instruction, DataType::Type::kInt64, dex_pc);
3519 break;
3520 }
3521
3522 case Instruction::SHR_INT_2ADDR: {
3523 Binop_12x_shift<HShr>(instruction, DataType::Type::kInt32, dex_pc);
3524 break;
3525 }
3526
3527 case Instruction::SHR_LONG_2ADDR: {
3528 Binop_12x_shift<HShr>(instruction, DataType::Type::kInt64, dex_pc);
3529 break;
3530 }
3531
3532 case Instruction::USHR_INT_2ADDR: {
3533 Binop_12x_shift<HUShr>(instruction, DataType::Type::kInt32, dex_pc);
3534 break;
3535 }
3536
3537 case Instruction::USHR_LONG_2ADDR: {
3538 Binop_12x_shift<HUShr>(instruction, DataType::Type::kInt64, dex_pc);
3539 break;
3540 }
3541
3542 case Instruction::DIV_FLOAT_2ADDR: {
3543 Binop_12x<HDiv>(instruction, DataType::Type::kFloat32, dex_pc);
3544 break;
3545 }
3546
3547 case Instruction::DIV_DOUBLE_2ADDR: {
3548 Binop_12x<HDiv>(instruction, DataType::Type::kFloat64, dex_pc);
3549 break;
3550 }
3551
3552 case Instruction::AND_INT_2ADDR: {
3553 Binop_12x<HAnd>(instruction, DataType::Type::kInt32, dex_pc);
3554 break;
3555 }
3556
3557 case Instruction::AND_LONG_2ADDR: {
3558 Binop_12x<HAnd>(instruction, DataType::Type::kInt64, dex_pc);
3559 break;
3560 }
3561
3562 case Instruction::OR_INT_2ADDR: {
3563 Binop_12x<HOr>(instruction, DataType::Type::kInt32, dex_pc);
3564 break;
3565 }
3566
3567 case Instruction::OR_LONG_2ADDR: {
3568 Binop_12x<HOr>(instruction, DataType::Type::kInt64, dex_pc);
3569 break;
3570 }
3571
3572 case Instruction::XOR_INT_2ADDR: {
3573 Binop_12x<HXor>(instruction, DataType::Type::kInt32, dex_pc);
3574 break;
3575 }
3576
3577 case Instruction::XOR_LONG_2ADDR: {
3578 Binop_12x<HXor>(instruction, DataType::Type::kInt64, dex_pc);
3579 break;
3580 }
3581
3582 case Instruction::ADD_INT_LIT16: {
3583 Binop_22s<HAdd>(instruction, false, dex_pc);
3584 break;
3585 }
3586
3587 case Instruction::AND_INT_LIT16: {
3588 Binop_22s<HAnd>(instruction, false, dex_pc);
3589 break;
3590 }
3591
3592 case Instruction::OR_INT_LIT16: {
3593 Binop_22s<HOr>(instruction, false, dex_pc);
3594 break;
3595 }
3596
3597 case Instruction::XOR_INT_LIT16: {
3598 Binop_22s<HXor>(instruction, false, dex_pc);
3599 break;
3600 }
3601
3602 case Instruction::RSUB_INT: {
3603 Binop_22s<HSub>(instruction, true, dex_pc);
3604 break;
3605 }
3606
3607 case Instruction::MUL_INT_LIT16: {
3608 Binop_22s<HMul>(instruction, false, dex_pc);
3609 break;
3610 }
3611
3612 case Instruction::ADD_INT_LIT8: {
3613 Binop_22b<HAdd>(instruction, false, dex_pc);
3614 break;
3615 }
3616
3617 case Instruction::AND_INT_LIT8: {
3618 Binop_22b<HAnd>(instruction, false, dex_pc);
3619 break;
3620 }
3621
3622 case Instruction::OR_INT_LIT8: {
3623 Binop_22b<HOr>(instruction, false, dex_pc);
3624 break;
3625 }
3626
3627 case Instruction::XOR_INT_LIT8: {
3628 Binop_22b<HXor>(instruction, false, dex_pc);
3629 break;
3630 }
3631
3632 case Instruction::RSUB_INT_LIT8: {
3633 Binop_22b<HSub>(instruction, true, dex_pc);
3634 break;
3635 }
3636
3637 case Instruction::MUL_INT_LIT8: {
3638 Binop_22b<HMul>(instruction, false, dex_pc);
3639 break;
3640 }
3641
3642 case Instruction::DIV_INT_LIT16: {
3643 BuildCheckedDivRem(instruction.VRegA_22s(),
3644 instruction.VRegB_22s(),
3645 instruction.VRegC_22s(),
3646 dex_pc,
3647 DataType::Type::kInt32,
3648 /* second_is_constant= */ true,
3649 /* is_div=*/ true);
3650 break;
3651 }
3652
3653 case Instruction::DIV_INT_LIT8: {
3654 BuildCheckedDivRem(instruction.VRegA_22b(),
3655 instruction.VRegB_22b(),
3656 instruction.VRegC_22b(),
3657 dex_pc,
3658 DataType::Type::kInt32,
3659 /* second_is_constant= */ true,
3660 /* is_div=*/ true);
3661 break;
3662 }
3663
3664 case Instruction::REM_INT_LIT16: {
3665 BuildCheckedDivRem(instruction.VRegA_22s(),
3666 instruction.VRegB_22s(),
3667 instruction.VRegC_22s(),
3668 dex_pc,
3669 DataType::Type::kInt32,
3670 /* second_is_constant= */ true,
3671 /* is_div=*/ false);
3672 break;
3673 }
3674
3675 case Instruction::REM_INT_LIT8: {
3676 BuildCheckedDivRem(instruction.VRegA_22b(),
3677 instruction.VRegB_22b(),
3678 instruction.VRegC_22b(),
3679 dex_pc,
3680 DataType::Type::kInt32,
3681 /* second_is_constant= */ true,
3682 /* is_div=*/ false);
3683 break;
3684 }
3685
3686 case Instruction::SHL_INT_LIT8: {
3687 Binop_22b<HShl>(instruction, false, dex_pc);
3688 break;
3689 }
3690
3691 case Instruction::SHR_INT_LIT8: {
3692 Binop_22b<HShr>(instruction, false, dex_pc);
3693 break;
3694 }
3695
3696 case Instruction::USHR_INT_LIT8: {
3697 Binop_22b<HUShr>(instruction, false, dex_pc);
3698 break;
3699 }
3700
3701 case Instruction::NEW_INSTANCE: {
3702 HNewInstance* new_instance =
3703 BuildNewInstance(dex::TypeIndex(instruction.VRegB_21c()), dex_pc);
3704 DCHECK(new_instance != nullptr);
3705
3706 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3707 BuildConstructorFenceForAllocation(new_instance);
3708 break;
3709 }
3710
3711 case Instruction::NEW_ARRAY: {
3712 dex::TypeIndex type_index(instruction.VRegC_22c());
3713 HInstruction* length = LoadLocal(instruction.VRegB_22c(), DataType::Type::kInt32);
3714 HNewArray* new_array = BuildNewArray(dex_pc, type_index, length);
3715
3716 UpdateLocal(instruction.VRegA_22c(), current_block_->GetLastInstruction());
3717 BuildConstructorFenceForAllocation(new_array);
3718 break;
3719 }
3720
3721 case Instruction::FILLED_NEW_ARRAY: {
3722 dex::TypeIndex type_index(instruction.VRegB_35c());
3723 uint32_t args[5];
3724 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
3725 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
3726 if (!BuildFilledNewArray(dex_pc, type_index, operands)) {
3727 return false;
3728 }
3729 break;
3730 }
3731
3732 case Instruction::FILLED_NEW_ARRAY_RANGE: {
3733 dex::TypeIndex type_index(instruction.VRegB_3rc());
3734 RangeInstructionOperands operands(instruction.VRegC_3rc(), instruction.VRegA_3rc());
3735 if (!BuildFilledNewArray(dex_pc, type_index, operands)) {
3736 return false;
3737 }
3738 break;
3739 }
3740
3741 case Instruction::FILL_ARRAY_DATA: {
3742 BuildFillArrayData(instruction, dex_pc);
3743 break;
3744 }
3745
3746 case Instruction::MOVE_RESULT:
3747 case Instruction::MOVE_RESULT_WIDE:
3748 case Instruction::MOVE_RESULT_OBJECT: {
3749 DCHECK(latest_result_ != nullptr);
3750 UpdateLocal(instruction.VRegA_11x(), latest_result_);
3751 latest_result_ = nullptr;
3752 break;
3753 }
3754
3755 case Instruction::CMP_LONG: {
3756 Binop_23x_cmp(instruction, DataType::Type::kInt64, ComparisonBias::kNoBias, dex_pc);
3757 break;
3758 }
3759
3760 case Instruction::CMPG_FLOAT: {
3761 Binop_23x_cmp(instruction, DataType::Type::kFloat32, ComparisonBias::kGtBias, dex_pc);
3762 break;
3763 }
3764
3765 case Instruction::CMPG_DOUBLE: {
3766 Binop_23x_cmp(instruction, DataType::Type::kFloat64, ComparisonBias::kGtBias, dex_pc);
3767 break;
3768 }
3769
3770 case Instruction::CMPL_FLOAT: {
3771 Binop_23x_cmp(instruction, DataType::Type::kFloat32, ComparisonBias::kLtBias, dex_pc);
3772 break;
3773 }
3774
3775 case Instruction::CMPL_DOUBLE: {
3776 Binop_23x_cmp(instruction, DataType::Type::kFloat64, ComparisonBias::kLtBias, dex_pc);
3777 break;
3778 }
3779
3780 case Instruction::NOP:
3781 break;
3782
3783 case Instruction::IGET:
3784 case Instruction::IGET_WIDE:
3785 case Instruction::IGET_OBJECT:
3786 case Instruction::IGET_BOOLEAN:
3787 case Instruction::IGET_BYTE:
3788 case Instruction::IGET_CHAR:
3789 case Instruction::IGET_SHORT: {
3790 if (!BuildInstanceFieldAccess(instruction, dex_pc, /* is_put= */ false)) {
3791 return false;
3792 }
3793 break;
3794 }
3795
3796 case Instruction::IPUT:
3797 case Instruction::IPUT_WIDE:
3798 case Instruction::IPUT_OBJECT:
3799 case Instruction::IPUT_BOOLEAN:
3800 case Instruction::IPUT_BYTE:
3801 case Instruction::IPUT_CHAR:
3802 case Instruction::IPUT_SHORT: {
3803 if (!BuildInstanceFieldAccess(instruction, dex_pc, /* is_put= */ true)) {
3804 return false;
3805 }
3806 break;
3807 }
3808
3809 case Instruction::SGET:
3810 case Instruction::SGET_WIDE:
3811 case Instruction::SGET_OBJECT:
3812 case Instruction::SGET_BOOLEAN:
3813 case Instruction::SGET_BYTE:
3814 case Instruction::SGET_CHAR:
3815 case Instruction::SGET_SHORT: {
3816 BuildStaticFieldAccess(instruction, dex_pc, /* is_put= */ false);
3817 break;
3818 }
3819
3820 case Instruction::SPUT:
3821 case Instruction::SPUT_WIDE:
3822 case Instruction::SPUT_OBJECT:
3823 case Instruction::SPUT_BOOLEAN:
3824 case Instruction::SPUT_BYTE:
3825 case Instruction::SPUT_CHAR:
3826 case Instruction::SPUT_SHORT: {
3827 BuildStaticFieldAccess(instruction, dex_pc, /* is_put= */ true);
3828 break;
3829 }
3830
3831 #define ARRAY_XX(kind, anticipated_type) \
3832 case Instruction::AGET##kind: { \
3833 BuildArrayAccess(instruction, dex_pc, false, anticipated_type); \
3834 break; \
3835 } \
3836 case Instruction::APUT##kind: { \
3837 BuildArrayAccess(instruction, dex_pc, true, anticipated_type); \
3838 break; \
3839 }
3840
3841 ARRAY_XX(, DataType::Type::kInt32);
3842 ARRAY_XX(_WIDE, DataType::Type::kInt64);
3843 ARRAY_XX(_OBJECT, DataType::Type::kReference);
3844 ARRAY_XX(_BOOLEAN, DataType::Type::kBool);
3845 ARRAY_XX(_BYTE, DataType::Type::kInt8);
3846 ARRAY_XX(_CHAR, DataType::Type::kUint16);
3847 ARRAY_XX(_SHORT, DataType::Type::kInt16);
3848
3849 case Instruction::ARRAY_LENGTH: {
3850 HInstruction* object = LoadNullCheckedLocal(instruction.VRegB_12x(), dex_pc);
3851 AppendInstruction(new (allocator_) HArrayLength(object, dex_pc));
3852 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction());
3853 break;
3854 }
3855
3856 case Instruction::CONST_STRING: {
3857 dex::StringIndex string_index(instruction.VRegB_21c());
3858 BuildLoadString(string_index, dex_pc);
3859 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3860 break;
3861 }
3862
3863 case Instruction::CONST_STRING_JUMBO: {
3864 dex::StringIndex string_index(instruction.VRegB_31c());
3865 BuildLoadString(string_index, dex_pc);
3866 UpdateLocal(instruction.VRegA_31c(), current_block_->GetLastInstruction());
3867 break;
3868 }
3869
3870 case Instruction::CONST_CLASS: {
3871 dex::TypeIndex type_index(instruction.VRegB_21c());
3872 BuildLoadClass(type_index, dex_pc);
3873 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3874 break;
3875 }
3876
3877 case Instruction::CONST_METHOD_HANDLE: {
3878 uint16_t method_handle_idx = instruction.VRegB_21c();
3879 BuildLoadMethodHandle(method_handle_idx, dex_pc);
3880 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3881 break;
3882 }
3883
3884 case Instruction::CONST_METHOD_TYPE: {
3885 dex::ProtoIndex proto_idx(instruction.VRegB_21c());
3886 BuildLoadMethodType(proto_idx, dex_pc);
3887 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3888 break;
3889 }
3890
3891 case Instruction::MOVE_EXCEPTION: {
3892 AppendInstruction(new (allocator_) HLoadException(dex_pc));
3893 UpdateLocal(instruction.VRegA_11x(), current_block_->GetLastInstruction());
3894 AppendInstruction(new (allocator_) HClearException(dex_pc));
3895 break;
3896 }
3897
3898 case Instruction::THROW: {
3899 HInstruction* exception = LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference);
3900 AppendInstruction(new (allocator_) HThrow(exception, dex_pc));
3901 // We finished building this block. Set the current block to null to avoid
3902 // adding dead instructions to it.
3903 current_block_ = nullptr;
3904 break;
3905 }
3906
3907 case Instruction::INSTANCE_OF: {
3908 uint8_t destination = instruction.VRegA_22c();
3909 uint8_t reference = instruction.VRegB_22c();
3910 dex::TypeIndex type_index(instruction.VRegC_22c());
3911 BuildTypeCheck(instruction, destination, reference, type_index, dex_pc);
3912 break;
3913 }
3914
3915 case Instruction::CHECK_CAST: {
3916 uint8_t reference = instruction.VRegA_21c();
3917 dex::TypeIndex type_index(instruction.VRegB_21c());
3918 BuildTypeCheck(instruction, -1, reference, type_index, dex_pc);
3919 break;
3920 }
3921
3922 case Instruction::MONITOR_ENTER: {
3923 AppendInstruction(new (allocator_) HMonitorOperation(
3924 LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference),
3925 HMonitorOperation::OperationKind::kEnter,
3926 dex_pc));
3927 graph_->SetHasMonitorOperations(true);
3928 break;
3929 }
3930
3931 case Instruction::MONITOR_EXIT: {
3932 AppendInstruction(new (allocator_) HMonitorOperation(
3933 LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference),
3934 HMonitorOperation::OperationKind::kExit,
3935 dex_pc));
3936 graph_->SetHasMonitorOperations(true);
3937 break;
3938 }
3939
3940 case Instruction::SPARSE_SWITCH:
3941 case Instruction::PACKED_SWITCH: {
3942 BuildSwitch(instruction, dex_pc);
3943 break;
3944 }
3945
3946 case Instruction::UNUSED_3E ... Instruction::UNUSED_43:
3947 case Instruction::UNUSED_73:
3948 case Instruction::UNUSED_79:
3949 case Instruction::UNUSED_7A:
3950 case Instruction::UNUSED_E3 ... Instruction::UNUSED_F9: {
3951 VLOG(compiler) << "Did not compile "
3952 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
3953 << " because of unhandled instruction "
3954 << instruction.Name();
3955 MaybeRecordStat(compilation_stats_,
3956 MethodCompilationStat::kNotCompiledUnhandledInstruction);
3957 return false;
3958 }
3959 }
3960 return true;
3961 } // NOLINT(readability/fn_size)
3962
LookupResolvedType(dex::TypeIndex type_index,const DexCompilationUnit & compilation_unit) const3963 ObjPtr<mirror::Class> HInstructionBuilder::LookupResolvedType(
3964 dex::TypeIndex type_index,
3965 const DexCompilationUnit& compilation_unit) const {
3966 return compilation_unit.GetClassLinker()->LookupResolvedType(
3967 type_index, compilation_unit.GetDexCache().Get(), compilation_unit.GetClassLoader().Get());
3968 }
3969
LookupReferrerClass() const3970 ObjPtr<mirror::Class> HInstructionBuilder::LookupReferrerClass() const {
3971 // TODO: Cache the result in a Handle<mirror::Class>.
3972 const dex::MethodId& method_id =
3973 dex_compilation_unit_->GetDexFile()->GetMethodId(dex_compilation_unit_->GetDexMethodIndex());
3974 return LookupResolvedType(method_id.class_idx_, *dex_compilation_unit_);
3975 }
3976
3977 } // namespace art
3978