1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "instruction_builder.h"
18
19 #include "art_method-inl.h"
20 #include "base/arena_bit_vector.h"
21 #include "base/bit_vector-inl.h"
22 #include "base/logging.h"
23 #include "block_builder.h"
24 #include "class_linker-inl.h"
25 #include "code_generator.h"
26 #include "data_type-inl.h"
27 #include "dex/bytecode_utils.h"
28 #include "dex/dex_instruction-inl.h"
29 #include "driver/dex_compilation_unit.h"
30 #include "driver/compiler_options.h"
31 #include "entrypoints/entrypoint_utils-inl.h"
32 #include "imtable-inl.h"
33 #include "intrinsics.h"
34 #include "intrinsics_utils.h"
35 #include "jit/jit.h"
36 #include "mirror/dex_cache.h"
37 #include "oat_file.h"
38 #include "optimizing_compiler_stats.h"
39 #include "reflective_handle_scope-inl.h"
40 #include "scoped_thread_state_change-inl.h"
41 #include "sharpening.h"
42 #include "ssa_builder.h"
43 #include "well_known_classes.h"
44
45 namespace art HIDDEN {
46
47 namespace {
48
49 class SamePackageCompare {
50 public:
SamePackageCompare(const DexCompilationUnit & dex_compilation_unit)51 explicit SamePackageCompare(const DexCompilationUnit& dex_compilation_unit)
52 : dex_compilation_unit_(dex_compilation_unit) {}
53
operator ()(ObjPtr<mirror::Class> klass)54 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
55 if (klass->GetClassLoader() != dex_compilation_unit_.GetClassLoader().Get()) {
56 return false;
57 }
58 if (referrers_descriptor_ == nullptr) {
59 const DexFile* dex_file = dex_compilation_unit_.GetDexFile();
60 uint32_t referrers_method_idx = dex_compilation_unit_.GetDexMethodIndex();
61 referrers_descriptor_ =
62 dex_file->StringByTypeIdx(dex_file->GetMethodId(referrers_method_idx).class_idx_);
63 referrers_package_length_ = PackageLength(referrers_descriptor_);
64 }
65 std::string temp;
66 const char* klass_descriptor = klass->GetDescriptor(&temp);
67 size_t klass_package_length = PackageLength(klass_descriptor);
68 return (referrers_package_length_ == klass_package_length) &&
69 memcmp(referrers_descriptor_, klass_descriptor, referrers_package_length_) == 0;
70 };
71
72 private:
PackageLength(const char * descriptor)73 static size_t PackageLength(const char* descriptor) {
74 const char* slash_pos = strrchr(descriptor, '/');
75 return (slash_pos != nullptr) ? static_cast<size_t>(slash_pos - descriptor) : 0u;
76 }
77
78 const DexCompilationUnit& dex_compilation_unit_;
79 const char* referrers_descriptor_ = nullptr;
80 size_t referrers_package_length_ = 0u;
81 };
82
83 } // anonymous namespace
84
HInstructionBuilder(HGraph * graph,HBasicBlockBuilder * block_builder,SsaBuilder * ssa_builder,const DexFile * dex_file,const CodeItemDebugInfoAccessor & accessor,DataType::Type return_type,const DexCompilationUnit * dex_compilation_unit,const DexCompilationUnit * outer_compilation_unit,CodeGenerator * code_generator,OptimizingCompilerStats * compiler_stats,ScopedArenaAllocator * local_allocator)85 HInstructionBuilder::HInstructionBuilder(HGraph* graph,
86 HBasicBlockBuilder* block_builder,
87 SsaBuilder* ssa_builder,
88 const DexFile* dex_file,
89 const CodeItemDebugInfoAccessor& accessor,
90 DataType::Type return_type,
91 const DexCompilationUnit* dex_compilation_unit,
92 const DexCompilationUnit* outer_compilation_unit,
93 CodeGenerator* code_generator,
94 OptimizingCompilerStats* compiler_stats,
95 ScopedArenaAllocator* local_allocator)
96 : allocator_(graph->GetAllocator()),
97 graph_(graph),
98 dex_file_(dex_file),
99 code_item_accessor_(accessor),
100 return_type_(return_type),
101 block_builder_(block_builder),
102 ssa_builder_(ssa_builder),
103 code_generator_(code_generator),
104 dex_compilation_unit_(dex_compilation_unit),
105 outer_compilation_unit_(outer_compilation_unit),
106 compilation_stats_(compiler_stats),
107 local_allocator_(local_allocator),
108 locals_for_(local_allocator->Adapter(kArenaAllocGraphBuilder)),
109 current_block_(nullptr),
110 current_locals_(nullptr),
111 latest_result_(nullptr),
112 current_this_parameter_(nullptr),
113 loop_headers_(local_allocator->Adapter(kArenaAllocGraphBuilder)),
114 class_cache_(std::less<dex::TypeIndex>(), local_allocator->Adapter(kArenaAllocGraphBuilder)) {
115 loop_headers_.reserve(kDefaultNumberOfLoops);
116 }
117
FindBlockStartingAt(uint32_t dex_pc) const118 HBasicBlock* HInstructionBuilder::FindBlockStartingAt(uint32_t dex_pc) const {
119 return block_builder_->GetBlockAt(dex_pc);
120 }
121
GetLocalsFor(HBasicBlock * block)122 inline ScopedArenaVector<HInstruction*>* HInstructionBuilder::GetLocalsFor(HBasicBlock* block) {
123 ScopedArenaVector<HInstruction*>* locals = &locals_for_[block->GetBlockId()];
124 const size_t vregs = graph_->GetNumberOfVRegs();
125 if (locals->size() == vregs) {
126 return locals;
127 }
128 return GetLocalsForWithAllocation(block, locals, vregs);
129 }
130
GetLocalsForWithAllocation(HBasicBlock * block,ScopedArenaVector<HInstruction * > * locals,const size_t vregs)131 ScopedArenaVector<HInstruction*>* HInstructionBuilder::GetLocalsForWithAllocation(
132 HBasicBlock* block,
133 ScopedArenaVector<HInstruction*>* locals,
134 const size_t vregs) {
135 DCHECK_NE(locals->size(), vregs);
136 locals->resize(vregs, nullptr);
137 if (block->IsCatchBlock()) {
138 // We record incoming inputs of catch phis at throwing instructions and
139 // must therefore eagerly create the phis. Phis for undefined vregs will
140 // be deleted when the first throwing instruction with the vreg undefined
141 // is encountered. Unused phis will be removed by dead phi analysis.
142 for (size_t i = 0; i < vregs; ++i) {
143 // No point in creating the catch phi if it is already undefined at
144 // the first throwing instruction.
145 HInstruction* current_local_value = (*current_locals_)[i];
146 if (current_local_value != nullptr) {
147 HPhi* phi = new (allocator_) HPhi(
148 allocator_,
149 i,
150 0,
151 current_local_value->GetType());
152 block->AddPhi(phi);
153 (*locals)[i] = phi;
154 }
155 }
156 }
157 return locals;
158 }
159
ValueOfLocalAt(HBasicBlock * block,size_t local)160 inline HInstruction* HInstructionBuilder::ValueOfLocalAt(HBasicBlock* block, size_t local) {
161 ScopedArenaVector<HInstruction*>* locals = GetLocalsFor(block);
162 return (*locals)[local];
163 }
164
InitializeBlockLocals()165 void HInstructionBuilder::InitializeBlockLocals() {
166 current_locals_ = GetLocalsFor(current_block_);
167
168 if (current_block_->IsCatchBlock()) {
169 // Catch phis were already created and inputs collected from throwing sites.
170 if (kIsDebugBuild) {
171 // Make sure there was at least one throwing instruction which initialized
172 // locals (guaranteed by HGraphBuilder) and that all try blocks have been
173 // visited already (from HTryBoundary scoping and reverse post order).
174 bool catch_block_visited = false;
175 for (HBasicBlock* current : graph_->GetReversePostOrder()) {
176 if (current == current_block_) {
177 catch_block_visited = true;
178 } else if (current->IsTryBlock()) {
179 const HTryBoundary& try_entry = current->GetTryCatchInformation()->GetTryEntry();
180 if (try_entry.HasExceptionHandler(*current_block_)) {
181 DCHECK(!catch_block_visited) << "Catch block visited before its try block.";
182 }
183 }
184 }
185 DCHECK_EQ(current_locals_->size(), graph_->GetNumberOfVRegs())
186 << "No instructions throwing into a live catch block.";
187 }
188 } else if (current_block_->IsLoopHeader()) {
189 // If the block is a loop header, we know we only have visited the pre header
190 // because we are visiting in reverse post order. We create phis for all initialized
191 // locals from the pre header. Their inputs will be populated at the end of
192 // the analysis.
193 for (size_t local = 0; local < current_locals_->size(); ++local) {
194 HInstruction* incoming =
195 ValueOfLocalAt(current_block_->GetLoopInformation()->GetPreHeader(), local);
196 if (incoming != nullptr) {
197 HPhi* phi = new (allocator_) HPhi(
198 allocator_,
199 local,
200 0,
201 incoming->GetType());
202 current_block_->AddPhi(phi);
203 (*current_locals_)[local] = phi;
204 }
205 }
206
207 // Save the loop header so that the last phase of the analysis knows which
208 // blocks need to be updated.
209 loop_headers_.push_back(current_block_);
210 } else if (current_block_->GetPredecessors().size() > 0) {
211 // All predecessors have already been visited because we are visiting in reverse post order.
212 // We merge the values of all locals, creating phis if those values differ.
213 for (size_t local = 0; local < current_locals_->size(); ++local) {
214 bool one_predecessor_has_no_value = false;
215 bool is_different = false;
216 HInstruction* value = ValueOfLocalAt(current_block_->GetPredecessors()[0], local);
217
218 for (HBasicBlock* predecessor : current_block_->GetPredecessors()) {
219 HInstruction* current = ValueOfLocalAt(predecessor, local);
220 if (current == nullptr) {
221 one_predecessor_has_no_value = true;
222 break;
223 } else if (current != value) {
224 is_different = true;
225 }
226 }
227
228 if (one_predecessor_has_no_value) {
229 // If one predecessor has no value for this local, we trust the verifier has
230 // successfully checked that there is a store dominating any read after this block.
231 continue;
232 }
233
234 if (is_different) {
235 HInstruction* first_input = ValueOfLocalAt(current_block_->GetPredecessors()[0], local);
236 HPhi* phi = new (allocator_) HPhi(
237 allocator_,
238 local,
239 current_block_->GetPredecessors().size(),
240 first_input->GetType());
241 for (size_t i = 0; i < current_block_->GetPredecessors().size(); i++) {
242 HInstruction* pred_value = ValueOfLocalAt(current_block_->GetPredecessors()[i], local);
243 phi->SetRawInputAt(i, pred_value);
244 }
245 current_block_->AddPhi(phi);
246 value = phi;
247 }
248 (*current_locals_)[local] = value;
249 }
250 }
251 }
252
PropagateLocalsToCatchBlocks()253 void HInstructionBuilder::PropagateLocalsToCatchBlocks() {
254 const HTryBoundary& try_entry = current_block_->GetTryCatchInformation()->GetTryEntry();
255 for (HBasicBlock* catch_block : try_entry.GetExceptionHandlers()) {
256 ScopedArenaVector<HInstruction*>* handler_locals = GetLocalsFor(catch_block);
257 DCHECK_EQ(handler_locals->size(), current_locals_->size());
258 for (size_t vreg = 0, e = current_locals_->size(); vreg < e; ++vreg) {
259 HInstruction* handler_value = (*handler_locals)[vreg];
260 if (handler_value == nullptr) {
261 // Vreg was undefined at a previously encountered throwing instruction
262 // and the catch phi was deleted. Do not record the local value.
263 continue;
264 }
265 DCHECK(handler_value->IsPhi());
266
267 HInstruction* local_value = (*current_locals_)[vreg];
268 if (local_value == nullptr) {
269 // This is the first instruction throwing into `catch_block` where
270 // `vreg` is undefined. Delete the catch phi.
271 catch_block->RemovePhi(handler_value->AsPhi());
272 (*handler_locals)[vreg] = nullptr;
273 } else {
274 // Vreg has been defined at all instructions throwing into `catch_block`
275 // encountered so far. Record the local value in the catch phi.
276 handler_value->AsPhi()->AddInput(local_value);
277 }
278 }
279 }
280 }
281
AppendInstruction(HInstruction * instruction)282 void HInstructionBuilder::AppendInstruction(HInstruction* instruction) {
283 current_block_->AddInstruction(instruction);
284 InitializeInstruction(instruction);
285 }
286
InsertInstructionAtTop(HInstruction * instruction)287 void HInstructionBuilder::InsertInstructionAtTop(HInstruction* instruction) {
288 if (current_block_->GetInstructions().IsEmpty()) {
289 current_block_->AddInstruction(instruction);
290 } else {
291 current_block_->InsertInstructionBefore(instruction, current_block_->GetFirstInstruction());
292 }
293 InitializeInstruction(instruction);
294 }
295
InitializeInstruction(HInstruction * instruction)296 void HInstructionBuilder::InitializeInstruction(HInstruction* instruction) {
297 if (instruction->NeedsEnvironment()) {
298 HEnvironment* environment = new (allocator_) HEnvironment(
299 allocator_,
300 current_locals_->size(),
301 graph_->GetArtMethod(),
302 instruction->GetDexPc(),
303 instruction);
304 environment->CopyFrom(ArrayRef<HInstruction* const>(*current_locals_));
305 instruction->SetRawEnvironment(environment);
306 }
307 }
308
LoadNullCheckedLocal(uint32_t register_index,uint32_t dex_pc)309 HInstruction* HInstructionBuilder::LoadNullCheckedLocal(uint32_t register_index, uint32_t dex_pc) {
310 HInstruction* ref = LoadLocal(register_index, DataType::Type::kReference);
311 if (!ref->CanBeNull()) {
312 return ref;
313 }
314
315 HNullCheck* null_check = new (allocator_) HNullCheck(ref, dex_pc);
316 AppendInstruction(null_check);
317 return null_check;
318 }
319
SetLoopHeaderPhiInputs()320 void HInstructionBuilder::SetLoopHeaderPhiInputs() {
321 for (size_t i = loop_headers_.size(); i > 0; --i) {
322 HBasicBlock* block = loop_headers_[i - 1];
323 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
324 HPhi* phi = it.Current()->AsPhi();
325 size_t vreg = phi->GetRegNumber();
326 for (HBasicBlock* predecessor : block->GetPredecessors()) {
327 HInstruction* value = ValueOfLocalAt(predecessor, vreg);
328 if (value == nullptr) {
329 // Vreg is undefined at this predecessor. Mark it dead and leave with
330 // fewer inputs than predecessors. SsaChecker will fail if not removed.
331 phi->SetDead();
332 break;
333 } else {
334 phi->AddInput(value);
335 }
336 }
337 }
338 }
339 }
340
IsBlockPopulated(HBasicBlock * block)341 static bool IsBlockPopulated(HBasicBlock* block) {
342 if (block->IsLoopHeader()) {
343 // Suspend checks were inserted into loop headers during building of dominator tree.
344 DCHECK(block->GetFirstInstruction()->IsSuspendCheck());
345 return block->GetFirstInstruction() != block->GetLastInstruction();
346 } else if (block->IsCatchBlock()) {
347 // Nops were inserted into the beginning of catch blocks.
348 DCHECK(block->GetFirstInstruction()->IsNop());
349 return block->GetFirstInstruction() != block->GetLastInstruction();
350 } else {
351 return !block->GetInstructions().IsEmpty();
352 }
353 }
354
Build()355 bool HInstructionBuilder::Build() {
356 DCHECK(code_item_accessor_.HasCodeItem());
357 locals_for_.resize(
358 graph_->GetBlocks().size(),
359 ScopedArenaVector<HInstruction*>(local_allocator_->Adapter(kArenaAllocGraphBuilder)));
360
361 // Find locations where we want to generate extra stackmaps for native debugging.
362 // This allows us to generate the info only at interesting points (for example,
363 // at start of java statement) rather than before every dex instruction.
364 const bool native_debuggable = code_generator_ != nullptr &&
365 code_generator_->GetCompilerOptions().GetNativeDebuggable();
366 ArenaBitVector* native_debug_info_locations = nullptr;
367 if (native_debuggable) {
368 native_debug_info_locations = FindNativeDebugInfoLocations();
369 }
370
371 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
372 current_block_ = block;
373 uint32_t block_dex_pc = current_block_->GetDexPc();
374
375 InitializeBlockLocals();
376
377 if (current_block_->IsEntryBlock()) {
378 InitializeParameters();
379 AppendInstruction(new (allocator_) HSuspendCheck(0u));
380 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
381 AppendInstruction(new (allocator_) HMethodEntryHook(0u));
382 }
383 AppendInstruction(new (allocator_) HGoto(0u));
384 continue;
385 } else if (current_block_->IsExitBlock()) {
386 AppendInstruction(new (allocator_) HExit());
387 continue;
388 } else if (current_block_->IsLoopHeader()) {
389 HSuspendCheck* suspend_check = new (allocator_) HSuspendCheck(current_block_->GetDexPc());
390 current_block_->GetLoopInformation()->SetSuspendCheck(suspend_check);
391 // This is slightly odd because the loop header might not be empty (TryBoundary).
392 // But we're still creating the environment with locals from the top of the block.
393 InsertInstructionAtTop(suspend_check);
394 } else if (current_block_->IsCatchBlock()) {
395 // We add an environment emitting instruction at the beginning of each catch block, in order
396 // to support try catch inlining.
397 // This is slightly odd because the catch block might not be empty (TryBoundary).
398 InsertInstructionAtTop(new (allocator_) HNop(block_dex_pc, /* needs_environment= */ true));
399 }
400
401 if (block_dex_pc == kNoDexPc || current_block_ != block_builder_->GetBlockAt(block_dex_pc)) {
402 // Synthetic block that does not need to be populated.
403 DCHECK(IsBlockPopulated(current_block_));
404 continue;
405 }
406
407 DCHECK(!IsBlockPopulated(current_block_));
408
409 for (const DexInstructionPcPair& pair : code_item_accessor_.InstructionsFrom(block_dex_pc)) {
410 if (current_block_ == nullptr) {
411 // The previous instruction ended this block.
412 break;
413 }
414
415 const uint32_t dex_pc = pair.DexPc();
416 if (dex_pc != block_dex_pc && FindBlockStartingAt(dex_pc) != nullptr) {
417 // This dex_pc starts a new basic block.
418 break;
419 }
420
421 if (current_block_->IsTryBlock() && IsThrowingDexInstruction(pair.Inst())) {
422 PropagateLocalsToCatchBlocks();
423 }
424
425 if (native_debuggable && native_debug_info_locations->IsBitSet(dex_pc)) {
426 AppendInstruction(new (allocator_) HNop(dex_pc, /* needs_environment= */ true));
427 }
428
429 // Note: There may be no Thread for gtests.
430 DCHECK(Thread::Current() == nullptr || !Thread::Current()->IsExceptionPending())
431 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
432 << " " << pair.Inst().Name() << "@" << dex_pc;
433 if (!ProcessDexInstruction(pair.Inst(), dex_pc)) {
434 return false;
435 }
436 DCHECK(Thread::Current() == nullptr || !Thread::Current()->IsExceptionPending())
437 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
438 << " " << pair.Inst().Name() << "@" << dex_pc;
439 }
440
441 if (current_block_ != nullptr) {
442 // Branching instructions clear current_block, so we know the last
443 // instruction of the current block is not a branching instruction.
444 // We add an unconditional Goto to the next block.
445 DCHECK_EQ(current_block_->GetSuccessors().size(), 1u);
446 AppendInstruction(new (allocator_) HGoto());
447 }
448 }
449
450 SetLoopHeaderPhiInputs();
451
452 return true;
453 }
454
BuildIntrinsic(ArtMethod * method)455 void HInstructionBuilder::BuildIntrinsic(ArtMethod* method) {
456 DCHECK(!code_item_accessor_.HasCodeItem());
457 DCHECK(method->IsIntrinsic());
458 if (kIsDebugBuild) {
459 ScopedObjectAccess soa(Thread::Current());
460 CHECK(!method->IsSignaturePolymorphic());
461 }
462
463 locals_for_.resize(
464 graph_->GetBlocks().size(),
465 ScopedArenaVector<HInstruction*>(local_allocator_->Adapter(kArenaAllocGraphBuilder)));
466
467 // Fill the entry block. Do not add suspend check, we do not want a suspend
468 // check in intrinsics; intrinsic methods are supposed to be fast.
469 current_block_ = graph_->GetEntryBlock();
470 InitializeBlockLocals();
471 InitializeParameters();
472 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
473 AppendInstruction(new (allocator_) HMethodEntryHook(0u));
474 }
475 AppendInstruction(new (allocator_) HGoto(0u));
476
477 // Fill the body.
478 current_block_ = current_block_->GetSingleSuccessor();
479 InitializeBlockLocals();
480 DCHECK(!IsBlockPopulated(current_block_));
481
482 // Add the intermediate representation, if available, or invoke instruction.
483 size_t in_vregs = graph_->GetNumberOfInVRegs();
484 size_t number_of_arguments =
485 in_vregs - std::count(current_locals_->end() - in_vregs, current_locals_->end(), nullptr);
486 uint32_t method_idx = dex_compilation_unit_->GetDexMethodIndex();
487 const char* shorty = dex_file_->GetMethodShorty(method_idx);
488 RangeInstructionOperands operands(graph_->GetNumberOfVRegs() - in_vregs, in_vregs);
489 if (!BuildSimpleIntrinsic(method, kNoDexPc, operands, shorty)) {
490 // Some intrinsics without intermediate representation still yield a leaf method,
491 // so build the invoke. Use HInvokeStaticOrDirect even for methods that would
492 // normally use an HInvokeVirtual (sharpen the call).
493 MethodReference target_method(dex_file_, method_idx);
494 HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
495 MethodLoadKind::kRuntimeCall,
496 CodePtrLocation::kCallArtMethod,
497 /* method_load_data= */ 0u
498 };
499 InvokeType invoke_type = dex_compilation_unit_->IsStatic() ? kStatic : kDirect;
500 HInvokeStaticOrDirect* invoke = new (allocator_) HInvokeStaticOrDirect(
501 allocator_,
502 number_of_arguments,
503 return_type_,
504 kNoDexPc,
505 target_method,
506 method,
507 dispatch_info,
508 invoke_type,
509 target_method,
510 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone,
511 !graph_->IsDebuggable());
512 HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
513 }
514
515 // Add the return instruction.
516 if (return_type_ == DataType::Type::kVoid) {
517 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
518 AppendInstruction(new (allocator_) HMethodExitHook(graph_->GetNullConstant(), kNoDexPc));
519 }
520 AppendInstruction(new (allocator_) HReturnVoid());
521 } else {
522 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
523 AppendInstruction(new (allocator_) HMethodExitHook(latest_result_, kNoDexPc));
524 }
525 AppendInstruction(new (allocator_) HReturn(latest_result_));
526 }
527
528 // Fill the exit block.
529 DCHECK_EQ(current_block_->GetSingleSuccessor(), graph_->GetExitBlock());
530 current_block_ = graph_->GetExitBlock();
531 InitializeBlockLocals();
532 AppendInstruction(new (allocator_) HExit());
533 }
534
FindNativeDebugInfoLocations()535 ArenaBitVector* HInstructionBuilder::FindNativeDebugInfoLocations() {
536 ArenaBitVector* locations = ArenaBitVector::Create(local_allocator_,
537 code_item_accessor_.InsnsSizeInCodeUnits(),
538 /* expandable= */ false,
539 kArenaAllocGraphBuilder);
540 locations->ClearAllBits();
541 // The visitor gets called when the line number changes.
542 // In other words, it marks the start of new java statement.
543 code_item_accessor_.DecodeDebugPositionInfo([&](const DexFile::PositionInfo& entry) {
544 locations->SetBit(entry.address_);
545 return false;
546 });
547 // Instruction-specific tweaks.
548 for (const DexInstructionPcPair& inst : code_item_accessor_) {
549 switch (inst->Opcode()) {
550 case Instruction::MOVE_EXCEPTION: {
551 // Stop in native debugger after the exception has been moved.
552 // The compiler also expects the move at the start of basic block so
553 // we do not want to interfere by inserting native-debug-info before it.
554 locations->ClearBit(inst.DexPc());
555 DexInstructionIterator next = std::next(DexInstructionIterator(inst));
556 DCHECK(next.DexPc() != inst.DexPc());
557 if (next != code_item_accessor_.end()) {
558 locations->SetBit(next.DexPc());
559 }
560 break;
561 }
562 default:
563 break;
564 }
565 }
566 return locations;
567 }
568
LoadLocal(uint32_t reg_number,DataType::Type type) const569 HInstruction* HInstructionBuilder::LoadLocal(uint32_t reg_number, DataType::Type type) const {
570 HInstruction* value = (*current_locals_)[reg_number];
571 DCHECK(value != nullptr);
572
573 // If the operation requests a specific type, we make sure its input is of that type.
574 if (type != value->GetType()) {
575 if (DataType::IsFloatingPointType(type)) {
576 value = ssa_builder_->GetFloatOrDoubleEquivalent(value, type);
577 } else if (type == DataType::Type::kReference) {
578 value = ssa_builder_->GetReferenceTypeEquivalent(value);
579 }
580 DCHECK(value != nullptr);
581 }
582
583 return value;
584 }
585
UpdateLocal(uint32_t reg_number,HInstruction * stored_value)586 void HInstructionBuilder::UpdateLocal(uint32_t reg_number, HInstruction* stored_value) {
587 DataType::Type stored_type = stored_value->GetType();
588 DCHECK_NE(stored_type, DataType::Type::kVoid);
589
590 // Storing into vreg `reg_number` may implicitly invalidate the surrounding
591 // registers. Consider the following cases:
592 // (1) Storing a wide value must overwrite previous values in both `reg_number`
593 // and `reg_number+1`. We store `nullptr` in `reg_number+1`.
594 // (2) If vreg `reg_number-1` holds a wide value, writing into `reg_number`
595 // must invalidate it. We store `nullptr` in `reg_number-1`.
596 // Consequently, storing a wide value into the high vreg of another wide value
597 // will invalidate both `reg_number-1` and `reg_number+1`.
598
599 if (reg_number != 0) {
600 HInstruction* local_low = (*current_locals_)[reg_number - 1];
601 if (local_low != nullptr && DataType::Is64BitType(local_low->GetType())) {
602 // The vreg we are storing into was previously the high vreg of a pair.
603 // We need to invalidate its low vreg.
604 DCHECK((*current_locals_)[reg_number] == nullptr);
605 (*current_locals_)[reg_number - 1] = nullptr;
606 }
607 }
608
609 (*current_locals_)[reg_number] = stored_value;
610 if (DataType::Is64BitType(stored_type)) {
611 // We are storing a pair. Invalidate the instruction in the high vreg.
612 (*current_locals_)[reg_number + 1] = nullptr;
613 }
614 }
615
InitializeParameters()616 void HInstructionBuilder::InitializeParameters() {
617 DCHECK(current_block_->IsEntryBlock());
618
619 // outer_compilation_unit_ is null only when unit testing.
620 if (outer_compilation_unit_ == nullptr) {
621 return;
622 }
623
624 const char* shorty = dex_compilation_unit_->GetShorty();
625 uint16_t number_of_parameters = graph_->GetNumberOfInVRegs();
626 uint16_t locals_index = graph_->GetNumberOfLocalVRegs();
627 uint16_t parameter_index = 0;
628
629 const dex::MethodId& referrer_method_id =
630 dex_file_->GetMethodId(dex_compilation_unit_->GetDexMethodIndex());
631 if (!dex_compilation_unit_->IsStatic()) {
632 // Add the implicit 'this' argument, not expressed in the signature.
633 HParameterValue* parameter = new (allocator_) HParameterValue(*dex_file_,
634 referrer_method_id.class_idx_,
635 parameter_index++,
636 DataType::Type::kReference,
637 /* is_this= */ true);
638 AppendInstruction(parameter);
639 UpdateLocal(locals_index++, parameter);
640 number_of_parameters--;
641 current_this_parameter_ = parameter;
642 } else {
643 DCHECK(current_this_parameter_ == nullptr);
644 }
645
646 const dex::ProtoId& proto = dex_file_->GetMethodPrototype(referrer_method_id);
647 const dex::TypeList* arg_types = dex_file_->GetProtoParameters(proto);
648 for (int i = 0, shorty_pos = 1; i < number_of_parameters; i++) {
649 HParameterValue* parameter = new (allocator_) HParameterValue(
650 *dex_file_,
651 arg_types->GetTypeItem(shorty_pos - 1).type_idx_,
652 parameter_index++,
653 DataType::FromShorty(shorty[shorty_pos]),
654 /* is_this= */ false);
655 ++shorty_pos;
656 AppendInstruction(parameter);
657 // Store the parameter value in the local that the dex code will use
658 // to reference that parameter.
659 UpdateLocal(locals_index++, parameter);
660 if (DataType::Is64BitType(parameter->GetType())) {
661 i++;
662 locals_index++;
663 parameter_index++;
664 }
665 }
666 }
667
668 template<typename T>
If_22t(const Instruction & instruction,uint32_t dex_pc)669 void HInstructionBuilder::If_22t(const Instruction& instruction, uint32_t dex_pc) {
670 HInstruction* first = LoadLocal(instruction.VRegA(), DataType::Type::kInt32);
671 HInstruction* second = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
672 T* comparison = new (allocator_) T(first, second, dex_pc);
673 AppendInstruction(comparison);
674 AppendInstruction(new (allocator_) HIf(comparison, dex_pc));
675 current_block_ = nullptr;
676 }
677
678 template<typename T>
If_21t(const Instruction & instruction,uint32_t dex_pc)679 void HInstructionBuilder::If_21t(const Instruction& instruction, uint32_t dex_pc) {
680 HInstruction* value = LoadLocal(instruction.VRegA(), DataType::Type::kInt32);
681 T* comparison = new (allocator_) T(value, graph_->GetIntConstant(0, dex_pc), dex_pc);
682 AppendInstruction(comparison);
683 AppendInstruction(new (allocator_) HIf(comparison, dex_pc));
684 current_block_ = nullptr;
685 }
686
687 template<typename T>
Unop_12x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)688 void HInstructionBuilder::Unop_12x(const Instruction& instruction,
689 DataType::Type type,
690 uint32_t dex_pc) {
691 HInstruction* first = LoadLocal(instruction.VRegB(), type);
692 AppendInstruction(new (allocator_) T(type, first, dex_pc));
693 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
694 }
695
Conversion_12x(const Instruction & instruction,DataType::Type input_type,DataType::Type result_type,uint32_t dex_pc)696 void HInstructionBuilder::Conversion_12x(const Instruction& instruction,
697 DataType::Type input_type,
698 DataType::Type result_type,
699 uint32_t dex_pc) {
700 HInstruction* first = LoadLocal(instruction.VRegB(), input_type);
701 AppendInstruction(new (allocator_) HTypeConversion(result_type, first, dex_pc));
702 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
703 }
704
705 template<typename T>
Binop_23x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)706 void HInstructionBuilder::Binop_23x(const Instruction& instruction,
707 DataType::Type type,
708 uint32_t dex_pc) {
709 HInstruction* first = LoadLocal(instruction.VRegB(), type);
710 HInstruction* second = LoadLocal(instruction.VRegC(), type);
711 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
712 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
713 }
714
715 template<typename T>
Binop_23x_shift(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)716 void HInstructionBuilder::Binop_23x_shift(const Instruction& instruction,
717 DataType::Type type,
718 uint32_t dex_pc) {
719 HInstruction* first = LoadLocal(instruction.VRegB(), type);
720 HInstruction* second = LoadLocal(instruction.VRegC(), DataType::Type::kInt32);
721 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
722 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
723 }
724
Binop_23x_cmp(const Instruction & instruction,DataType::Type type,ComparisonBias bias,uint32_t dex_pc)725 void HInstructionBuilder::Binop_23x_cmp(const Instruction& instruction,
726 DataType::Type type,
727 ComparisonBias bias,
728 uint32_t dex_pc) {
729 HInstruction* first = LoadLocal(instruction.VRegB(), type);
730 HInstruction* second = LoadLocal(instruction.VRegC(), type);
731 AppendInstruction(new (allocator_) HCompare(type, first, second, bias, dex_pc));
732 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
733 }
734
735 template<typename T>
Binop_12x_shift(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)736 void HInstructionBuilder::Binop_12x_shift(const Instruction& instruction,
737 DataType::Type type,
738 uint32_t dex_pc) {
739 HInstruction* first = LoadLocal(instruction.VRegA(), type);
740 HInstruction* second = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
741 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
742 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
743 }
744
745 template<typename T>
Binop_12x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)746 void HInstructionBuilder::Binop_12x(const Instruction& instruction,
747 DataType::Type type,
748 uint32_t dex_pc) {
749 HInstruction* first = LoadLocal(instruction.VRegA(), type);
750 HInstruction* second = LoadLocal(instruction.VRegB(), type);
751 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
752 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
753 }
754
755 template<typename T>
Binop_22s(const Instruction & instruction,bool reverse,uint32_t dex_pc)756 void HInstructionBuilder::Binop_22s(const Instruction& instruction, bool reverse, uint32_t dex_pc) {
757 HInstruction* first = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
758 HInstruction* second = graph_->GetIntConstant(instruction.VRegC_22s(), dex_pc);
759 if (reverse) {
760 std::swap(first, second);
761 }
762 AppendInstruction(new (allocator_) T(DataType::Type::kInt32, first, second, dex_pc));
763 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
764 }
765
766 template<typename T>
Binop_22b(const Instruction & instruction,bool reverse,uint32_t dex_pc)767 void HInstructionBuilder::Binop_22b(const Instruction& instruction, bool reverse, uint32_t dex_pc) {
768 HInstruction* first = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
769 HInstruction* second = graph_->GetIntConstant(instruction.VRegC_22b(), dex_pc);
770 if (reverse) {
771 std::swap(first, second);
772 }
773 AppendInstruction(new (allocator_) T(DataType::Type::kInt32, first, second, dex_pc));
774 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
775 }
776
777 // Does the method being compiled need any constructor barriers being inserted?
778 // (Always 'false' for methods that aren't <init>.)
RequiresConstructorBarrier(const DexCompilationUnit * cu)779 static bool RequiresConstructorBarrier(const DexCompilationUnit* cu) {
780 // Can be null in unit tests only.
781 if (UNLIKELY(cu == nullptr)) {
782 return false;
783 }
784
785 // Constructor barriers are applicable only for <init> methods.
786 if (LIKELY(!cu->IsConstructor() || cu->IsStatic())) {
787 return false;
788 }
789
790 return cu->RequiresConstructorBarrier();
791 }
792
793 // Returns true if `block` has only one successor which starts at the next
794 // dex_pc after `instruction` at `dex_pc`.
IsFallthroughInstruction(const Instruction & instruction,uint32_t dex_pc,HBasicBlock * block)795 static bool IsFallthroughInstruction(const Instruction& instruction,
796 uint32_t dex_pc,
797 HBasicBlock* block) {
798 uint32_t next_dex_pc = dex_pc + instruction.SizeInCodeUnits();
799 return block->GetSingleSuccessor()->GetDexPc() == next_dex_pc;
800 }
801
BuildSwitch(const Instruction & instruction,uint32_t dex_pc)802 void HInstructionBuilder::BuildSwitch(const Instruction& instruction, uint32_t dex_pc) {
803 HInstruction* value = LoadLocal(instruction.VRegA(), DataType::Type::kInt32);
804 DexSwitchTable table(instruction, dex_pc);
805
806 if (table.GetNumEntries() == 0) {
807 // Empty Switch. Code falls through to the next block.
808 DCHECK(IsFallthroughInstruction(instruction, dex_pc, current_block_));
809 AppendInstruction(new (allocator_) HGoto(dex_pc));
810 } else if (table.ShouldBuildDecisionTree()) {
811 for (DexSwitchTableIterator it(table); !it.Done(); it.Advance()) {
812 HInstruction* case_value = graph_->GetIntConstant(it.CurrentKey(), dex_pc);
813 HEqual* comparison = new (allocator_) HEqual(value, case_value, dex_pc);
814 AppendInstruction(comparison);
815 AppendInstruction(new (allocator_) HIf(comparison, dex_pc));
816
817 if (!it.IsLast()) {
818 current_block_ = FindBlockStartingAt(it.GetDexPcForCurrentIndex());
819 }
820 }
821 } else {
822 AppendInstruction(
823 new (allocator_) HPackedSwitch(table.GetEntryAt(0), table.GetNumEntries(), value, dex_pc));
824 }
825
826 current_block_ = nullptr;
827 }
828
BuildReturn(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)829 void HInstructionBuilder::BuildReturn(const Instruction& instruction,
830 DataType::Type type,
831 uint32_t dex_pc) {
832 if (type == DataType::Type::kVoid) {
833 // Only <init> (which is a return-void) could possibly have a constructor fence.
834 // This may insert additional redundant constructor fences from the super constructors.
835 // TODO: remove redundant constructor fences (b/36656456).
836 if (RequiresConstructorBarrier(dex_compilation_unit_)) {
837 // Compiling instance constructor.
838 DCHECK_STREQ("<init>", graph_->GetMethodName());
839
840 HInstruction* fence_target = current_this_parameter_;
841 DCHECK(fence_target != nullptr);
842
843 AppendInstruction(new (allocator_) HConstructorFence(fence_target, dex_pc, allocator_));
844 MaybeRecordStat(
845 compilation_stats_,
846 MethodCompilationStat::kConstructorFenceGeneratedFinal);
847 }
848 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
849 // Return value is not used for void functions. We pass NullConstant to
850 // avoid special cases when generating code.
851 AppendInstruction(new (allocator_) HMethodExitHook(graph_->GetNullConstant(), dex_pc));
852 }
853 AppendInstruction(new (allocator_) HReturnVoid(dex_pc));
854 } else {
855 DCHECK(!RequiresConstructorBarrier(dex_compilation_unit_));
856 HInstruction* value = LoadLocal(instruction.VRegA(), type);
857 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
858 AppendInstruction(new (allocator_) HMethodExitHook(value, dex_pc));
859 }
860 AppendInstruction(new (allocator_) HReturn(value, dex_pc));
861 }
862 current_block_ = nullptr;
863 }
864
GetInvokeTypeFromOpCode(Instruction::Code opcode)865 static InvokeType GetInvokeTypeFromOpCode(Instruction::Code opcode) {
866 switch (opcode) {
867 case Instruction::INVOKE_STATIC:
868 case Instruction::INVOKE_STATIC_RANGE:
869 return kStatic;
870 case Instruction::INVOKE_DIRECT:
871 case Instruction::INVOKE_DIRECT_RANGE:
872 return kDirect;
873 case Instruction::INVOKE_VIRTUAL:
874 case Instruction::INVOKE_VIRTUAL_RANGE:
875 return kVirtual;
876 case Instruction::INVOKE_INTERFACE:
877 case Instruction::INVOKE_INTERFACE_RANGE:
878 return kInterface;
879 case Instruction::INVOKE_SUPER_RANGE:
880 case Instruction::INVOKE_SUPER:
881 return kSuper;
882 default:
883 LOG(FATAL) << "Unexpected invoke opcode: " << opcode;
884 UNREACHABLE();
885 }
886 }
887
888 // Try to resolve a method using the class linker. Return null if a method could
889 // not be resolved or the resolved method cannot be used for some reason.
890 // Also retrieve method data needed for creating the invoke intermediate
891 // representation while we hold the mutator lock here.
ResolveMethod(uint16_t method_idx,ArtMethod * referrer,const DexCompilationUnit & dex_compilation_unit,InvokeType * invoke_type,MethodReference * resolved_method_info,uint16_t * imt_or_vtable_index,bool * is_string_constructor)892 static ArtMethod* ResolveMethod(uint16_t method_idx,
893 ArtMethod* referrer,
894 const DexCompilationUnit& dex_compilation_unit,
895 /*inout*/InvokeType* invoke_type,
896 /*out*/MethodReference* resolved_method_info,
897 /*out*/uint16_t* imt_or_vtable_index,
898 /*out*/bool* is_string_constructor) {
899 ScopedObjectAccess soa(Thread::Current());
900
901 ClassLinker* class_linker = dex_compilation_unit.GetClassLinker();
902 Handle<mirror::ClassLoader> class_loader = dex_compilation_unit.GetClassLoader();
903
904 ArtMethod* resolved_method =
905 class_linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
906 method_idx,
907 dex_compilation_unit.GetDexCache(),
908 class_loader,
909 referrer,
910 *invoke_type);
911
912 if (UNLIKELY(resolved_method == nullptr)) {
913 // Clean up any exception left by type resolution.
914 soa.Self()->ClearException();
915 return nullptr;
916 }
917 DCHECK(!soa.Self()->IsExceptionPending());
918
919 // The referrer may be unresolved for AOT if we're compiling a class that cannot be
920 // resolved because, for example, we don't find a superclass in the classpath.
921 if (referrer == nullptr) {
922 // The class linker cannot check access without a referrer, so we have to do it.
923 // Check if the declaring class or referencing class is accessible.
924 SamePackageCompare same_package(dex_compilation_unit);
925 ObjPtr<mirror::Class> declaring_class = resolved_method->GetDeclaringClass();
926 bool declaring_class_accessible = declaring_class->IsPublic() || same_package(declaring_class);
927 if (!declaring_class_accessible) {
928 // It is possible to access members from an inaccessible superclass
929 // by referencing them through an accessible subclass.
930 ObjPtr<mirror::Class> referenced_class = class_linker->LookupResolvedType(
931 dex_compilation_unit.GetDexFile()->GetMethodId(method_idx).class_idx_,
932 dex_compilation_unit.GetDexCache().Get(),
933 class_loader.Get());
934 DCHECK(referenced_class != nullptr); // Must have been resolved when resolving the method.
935 if (!referenced_class->IsPublic() && !same_package(referenced_class)) {
936 return nullptr;
937 }
938 }
939 // Check whether the method itself is accessible.
940 // Since the referrer is unresolved but the method is resolved, it cannot be
941 // inside the same class, so a private method is known to be inaccessible.
942 // And without a resolved referrer, we cannot check for protected member access
943 // in superlass, so we handle only access to public member or within the package.
944 if (resolved_method->IsPrivate() ||
945 (!resolved_method->IsPublic() && !declaring_class_accessible)) {
946 return nullptr;
947 }
948 }
949
950 // We have to special case the invoke-super case, as ClassLinker::ResolveMethod does not.
951 // We need to look at the referrer's super class vtable. We need to do this to know if we need to
952 // make this an invoke-unresolved to handle cross-dex invokes or abstract super methods, both of
953 // which require runtime handling.
954 if (*invoke_type == kSuper) {
955 if (referrer == nullptr) {
956 // We could not determine the method's class we need to wait until runtime.
957 DCHECK(Runtime::Current()->IsAotCompiler());
958 return nullptr;
959 }
960 ArtMethod* actual_method = FindSuperMethodToCall</*access_check=*/true>(
961 method_idx, resolved_method, referrer, soa.Self());
962 if (actual_method == nullptr) {
963 // Clean up any exception left by method resolution.
964 soa.Self()->ClearException();
965 return nullptr;
966 }
967 if (!actual_method->IsInvokable()) {
968 // Fail if the actual method cannot be invoked. Otherwise, the runtime resolution stub
969 // could resolve the callee to the wrong method.
970 return nullptr;
971 }
972 // Call GetCanonicalMethod in case the resolved method is a copy: for super calls, the encoding
973 // of ArtMethod in BSS relies on not having copies there.
974 resolved_method = actual_method->GetCanonicalMethod(class_linker->GetImagePointerSize());
975 }
976
977 if (*invoke_type == kInterface) {
978 if (resolved_method->GetDeclaringClass()->IsObjectClass()) {
979 // If the resolved method is from j.l.Object, emit a virtual call instead.
980 // The IMT conflict stub only handles interface methods.
981 *invoke_type = kVirtual;
982 } else {
983 DCHECK(resolved_method->GetDeclaringClass()->IsInterface());
984 }
985 }
986
987 *resolved_method_info =
988 MethodReference(resolved_method->GetDexFile(), resolved_method->GetDexMethodIndex());
989 if (*invoke_type == kVirtual) {
990 // For HInvokeVirtual we need the vtable index.
991 *imt_or_vtable_index = resolved_method->GetVtableIndex();
992 } else if (*invoke_type == kInterface) {
993 // For HInvokeInterface we need the IMT index.
994 *imt_or_vtable_index = resolved_method->GetImtIndex();
995 DCHECK_EQ(*imt_or_vtable_index, ImTable::GetImtIndex(resolved_method));
996 }
997
998 *is_string_constructor = resolved_method->IsStringConstructor();
999
1000 return resolved_method;
1001 }
1002
BuildInvoke(const Instruction & instruction,uint32_t dex_pc,uint32_t method_idx,const InstructionOperands & operands)1003 bool HInstructionBuilder::BuildInvoke(const Instruction& instruction,
1004 uint32_t dex_pc,
1005 uint32_t method_idx,
1006 const InstructionOperands& operands) {
1007 InvokeType invoke_type = GetInvokeTypeFromOpCode(instruction.Opcode());
1008 const char* shorty = dex_file_->GetMethodShorty(method_idx);
1009 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1010
1011 // Remove the return type from the 'proto'.
1012 size_t number_of_arguments = strlen(shorty) - 1;
1013 if (invoke_type != kStatic) { // instance call
1014 // One extra argument for 'this'.
1015 number_of_arguments++;
1016 }
1017
1018 MethodReference resolved_method_reference(nullptr, 0u);
1019 bool is_string_constructor = false;
1020 uint16_t imt_or_vtable_index = DexFile::kDexNoIndex16;
1021 ArtMethod* resolved_method = ResolveMethod(method_idx,
1022 graph_->GetArtMethod(),
1023 *dex_compilation_unit_,
1024 &invoke_type,
1025 &resolved_method_reference,
1026 &imt_or_vtable_index,
1027 &is_string_constructor);
1028
1029 MethodReference method_reference(&graph_->GetDexFile(), method_idx);
1030 if (UNLIKELY(resolved_method == nullptr)) {
1031 DCHECK(!Thread::Current()->IsExceptionPending());
1032 MaybeRecordStat(compilation_stats_,
1033 MethodCompilationStat::kUnresolvedMethod);
1034 HInvoke* invoke = new (allocator_) HInvokeUnresolved(allocator_,
1035 number_of_arguments,
1036 return_type,
1037 dex_pc,
1038 method_reference,
1039 invoke_type);
1040 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ true);
1041 }
1042
1043 // Replace calls to String.<init> with StringFactory.
1044 if (is_string_constructor) {
1045 uint32_t string_init_entry_point = WellKnownClasses::StringInitToEntryPoint(resolved_method);
1046 HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
1047 MethodLoadKind::kStringInit,
1048 CodePtrLocation::kCallArtMethod,
1049 dchecked_integral_cast<uint64_t>(string_init_entry_point)
1050 };
1051 // We pass null for the resolved_method to ensure optimizations
1052 // don't rely on it.
1053 HInvoke* invoke = new (allocator_) HInvokeStaticOrDirect(
1054 allocator_,
1055 number_of_arguments - 1,
1056 /* return_type= */ DataType::Type::kReference,
1057 dex_pc,
1058 method_reference,
1059 /* resolved_method= */ nullptr,
1060 dispatch_info,
1061 invoke_type,
1062 resolved_method_reference,
1063 HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit,
1064 !graph_->IsDebuggable());
1065 return HandleStringInit(invoke, operands, shorty);
1066 }
1067
1068 // Potential class initialization check, in the case of a static method call.
1069 HInvokeStaticOrDirect::ClinitCheckRequirement clinit_check_requirement =
1070 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
1071 HClinitCheck* clinit_check = nullptr;
1072 if (invoke_type == kStatic) {
1073 clinit_check = ProcessClinitCheckForInvoke(dex_pc, resolved_method, &clinit_check_requirement);
1074 }
1075
1076 // Try to build an HIR replacement for the intrinsic.
1077 if (UNLIKELY(resolved_method->IsIntrinsic()) && !graph_->IsDebuggable()) {
1078 // All intrinsics are in the primary boot image, so their class can always be referenced
1079 // and we do not need to rely on the implicit class initialization check. The class should
1080 // be initialized but we do not require that here.
1081 DCHECK_NE(clinit_check_requirement, HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit);
1082 if (BuildSimpleIntrinsic(resolved_method, dex_pc, operands, shorty)) {
1083 return true;
1084 }
1085 }
1086
1087 HInvoke* invoke = nullptr;
1088 if (invoke_type == kDirect || invoke_type == kStatic || invoke_type == kSuper) {
1089 // For sharpening, we create another MethodReference, to account for the
1090 // kSuper case below where we cannot find a dex method index.
1091 bool has_method_id = true;
1092 if (invoke_type == kSuper) {
1093 uint32_t dex_method_index = method_reference.index;
1094 if (IsSameDexFile(*resolved_method_reference.dex_file,
1095 *dex_compilation_unit_->GetDexFile())) {
1096 // Update the method index to the one resolved. Note that this may be a no-op if
1097 // we resolved to the method referenced by the instruction.
1098 dex_method_index = resolved_method_reference.index;
1099 } else {
1100 // Try to find a dex method index in this caller's dex file.
1101 ScopedObjectAccess soa(Thread::Current());
1102 dex_method_index = resolved_method->FindDexMethodIndexInOtherDexFile(
1103 *dex_compilation_unit_->GetDexFile(), method_idx);
1104 }
1105 if (dex_method_index == dex::kDexNoIndex) {
1106 has_method_id = false;
1107 } else {
1108 method_reference.index = dex_method_index;
1109 }
1110 }
1111 HInvokeStaticOrDirect::DispatchInfo dispatch_info =
1112 HSharpening::SharpenLoadMethod(resolved_method,
1113 has_method_id,
1114 /* for_interface_call= */ false,
1115 code_generator_);
1116 if (dispatch_info.code_ptr_location == CodePtrLocation::kCallCriticalNative) {
1117 graph_->SetHasDirectCriticalNativeCall(true);
1118 }
1119 invoke = new (allocator_) HInvokeStaticOrDirect(allocator_,
1120 number_of_arguments,
1121 return_type,
1122 dex_pc,
1123 method_reference,
1124 resolved_method,
1125 dispatch_info,
1126 invoke_type,
1127 resolved_method_reference,
1128 clinit_check_requirement,
1129 !graph_->IsDebuggable());
1130 if (clinit_check != nullptr) {
1131 // Add the class initialization check as last input of `invoke`.
1132 DCHECK_EQ(clinit_check_requirement, HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit);
1133 size_t clinit_check_index = invoke->InputCount() - 1u;
1134 DCHECK(invoke->InputAt(clinit_check_index) == nullptr);
1135 invoke->SetArgumentAt(clinit_check_index, clinit_check);
1136 }
1137 } else if (invoke_type == kVirtual) {
1138 invoke = new (allocator_) HInvokeVirtual(allocator_,
1139 number_of_arguments,
1140 return_type,
1141 dex_pc,
1142 method_reference,
1143 resolved_method,
1144 resolved_method_reference,
1145 /*vtable_index=*/ imt_or_vtable_index,
1146 !graph_->IsDebuggable());
1147 } else {
1148 DCHECK_EQ(invoke_type, kInterface);
1149 if (kIsDebugBuild) {
1150 ScopedObjectAccess soa(Thread::Current());
1151 DCHECK(resolved_method->GetDeclaringClass()->IsInterface());
1152 }
1153 MethodLoadKind load_kind = HSharpening::SharpenLoadMethod(
1154 resolved_method,
1155 /* has_method_id= */ true,
1156 /* for_interface_call= */ true,
1157 code_generator_)
1158 .method_load_kind;
1159 invoke = new (allocator_) HInvokeInterface(allocator_,
1160 number_of_arguments,
1161 return_type,
1162 dex_pc,
1163 method_reference,
1164 resolved_method,
1165 resolved_method_reference,
1166 /*imt_index=*/ imt_or_vtable_index,
1167 load_kind,
1168 !graph_->IsDebuggable());
1169 }
1170 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
1171 }
1172
VarHandleAccessorNeedsReturnTypeCheck(HInvoke * invoke,DataType::Type return_type)1173 static bool VarHandleAccessorNeedsReturnTypeCheck(HInvoke* invoke, DataType::Type return_type) {
1174 mirror::VarHandle::AccessModeTemplate access_mode_template =
1175 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
1176
1177 switch (access_mode_template) {
1178 case mirror::VarHandle::AccessModeTemplate::kGet:
1179 case mirror::VarHandle::AccessModeTemplate::kGetAndUpdate:
1180 case mirror::VarHandle::AccessModeTemplate::kCompareAndExchange:
1181 return return_type == DataType::Type::kReference;
1182 case mirror::VarHandle::AccessModeTemplate::kSet:
1183 case mirror::VarHandle::AccessModeTemplate::kCompareAndSet:
1184 return false;
1185 }
1186 }
1187
1188 // This function initializes `VarHandleOptimizations`, does a number of static checks and disables
1189 // the intrinsic if some of the checks fail. This is necessary for the code generator to work (for
1190 // both the baseline and the optimizing compiler).
DecideVarHandleIntrinsic(HInvoke * invoke)1191 static void DecideVarHandleIntrinsic(HInvoke* invoke) {
1192 switch (invoke->GetIntrinsic()) {
1193 case Intrinsics::kVarHandleCompareAndExchange:
1194 case Intrinsics::kVarHandleCompareAndExchangeAcquire:
1195 case Intrinsics::kVarHandleCompareAndExchangeRelease:
1196 case Intrinsics::kVarHandleCompareAndSet:
1197 case Intrinsics::kVarHandleGet:
1198 case Intrinsics::kVarHandleGetAcquire:
1199 case Intrinsics::kVarHandleGetAndAdd:
1200 case Intrinsics::kVarHandleGetAndAddAcquire:
1201 case Intrinsics::kVarHandleGetAndAddRelease:
1202 case Intrinsics::kVarHandleGetAndBitwiseAnd:
1203 case Intrinsics::kVarHandleGetAndBitwiseAndAcquire:
1204 case Intrinsics::kVarHandleGetAndBitwiseAndRelease:
1205 case Intrinsics::kVarHandleGetAndBitwiseOr:
1206 case Intrinsics::kVarHandleGetAndBitwiseOrAcquire:
1207 case Intrinsics::kVarHandleGetAndBitwiseOrRelease:
1208 case Intrinsics::kVarHandleGetAndBitwiseXor:
1209 case Intrinsics::kVarHandleGetAndBitwiseXorAcquire:
1210 case Intrinsics::kVarHandleGetAndBitwiseXorRelease:
1211 case Intrinsics::kVarHandleGetAndSet:
1212 case Intrinsics::kVarHandleGetAndSetAcquire:
1213 case Intrinsics::kVarHandleGetAndSetRelease:
1214 case Intrinsics::kVarHandleGetOpaque:
1215 case Intrinsics::kVarHandleGetVolatile:
1216 case Intrinsics::kVarHandleSet:
1217 case Intrinsics::kVarHandleSetOpaque:
1218 case Intrinsics::kVarHandleSetRelease:
1219 case Intrinsics::kVarHandleSetVolatile:
1220 case Intrinsics::kVarHandleWeakCompareAndSet:
1221 case Intrinsics::kVarHandleWeakCompareAndSetAcquire:
1222 case Intrinsics::kVarHandleWeakCompareAndSetPlain:
1223 case Intrinsics::kVarHandleWeakCompareAndSetRelease:
1224 break;
1225 default:
1226 return; // Not a VarHandle intrinsic, skip.
1227 }
1228
1229 DCHECK(invoke->IsInvokePolymorphic());
1230 VarHandleOptimizations optimizations(invoke);
1231
1232 // Do only simple static checks here (those for which we have enough information). More complex
1233 // checks should be done in instruction simplifier, which runs after other optimization passes
1234 // that may provide useful information.
1235
1236 size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke);
1237 if (expected_coordinates_count > 2u) {
1238 optimizations.SetDoNotIntrinsify();
1239 return;
1240 }
1241 if (expected_coordinates_count != 0u) {
1242 // Except for static fields (no coordinates), the first coordinate must be a reference.
1243 // Do not intrinsify if the reference is null as we would always go to slow path anyway.
1244 HInstruction* object = invoke->InputAt(1);
1245 if (object->GetType() != DataType::Type::kReference || object->IsNullConstant()) {
1246 optimizations.SetDoNotIntrinsify();
1247 return;
1248 }
1249 }
1250 if (expected_coordinates_count == 2u) {
1251 // For arrays and views, the second coordinate must be convertible to `int`.
1252 // In this context, `boolean` is not convertible but we have to look at the shorty
1253 // as compiler transformations can give the invoke a valid boolean input.
1254 DataType::Type index_type = GetDataTypeFromShorty(invoke, 2);
1255 if (index_type == DataType::Type::kBool ||
1256 DataType::Kind(index_type) != DataType::Type::kInt32) {
1257 optimizations.SetDoNotIntrinsify();
1258 return;
1259 }
1260 }
1261
1262 uint32_t number_of_arguments = invoke->GetNumberOfArguments();
1263 DataType::Type return_type = invoke->GetType();
1264 mirror::VarHandle::AccessModeTemplate access_mode_template =
1265 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
1266 switch (access_mode_template) {
1267 case mirror::VarHandle::AccessModeTemplate::kGet:
1268 // The return type should be the same as varType, so it shouldn't be void.
1269 if (return_type == DataType::Type::kVoid) {
1270 optimizations.SetDoNotIntrinsify();
1271 return;
1272 }
1273 break;
1274 case mirror::VarHandle::AccessModeTemplate::kSet:
1275 if (return_type != DataType::Type::kVoid) {
1276 optimizations.SetDoNotIntrinsify();
1277 return;
1278 }
1279 break;
1280 case mirror::VarHandle::AccessModeTemplate::kCompareAndSet: {
1281 if (return_type != DataType::Type::kBool) {
1282 optimizations.SetDoNotIntrinsify();
1283 return;
1284 }
1285 uint32_t expected_value_index = number_of_arguments - 2;
1286 uint32_t new_value_index = number_of_arguments - 1;
1287 DataType::Type expected_value_type = GetDataTypeFromShorty(invoke, expected_value_index);
1288 DataType::Type new_value_type = GetDataTypeFromShorty(invoke, new_value_index);
1289 if (expected_value_type != new_value_type) {
1290 optimizations.SetDoNotIntrinsify();
1291 return;
1292 }
1293 break;
1294 }
1295 case mirror::VarHandle::AccessModeTemplate::kCompareAndExchange: {
1296 uint32_t expected_value_index = number_of_arguments - 2;
1297 uint32_t new_value_index = number_of_arguments - 1;
1298 DataType::Type expected_value_type = GetDataTypeFromShorty(invoke, expected_value_index);
1299 DataType::Type new_value_type = GetDataTypeFromShorty(invoke, new_value_index);
1300 if (expected_value_type != new_value_type || return_type != expected_value_type) {
1301 optimizations.SetDoNotIntrinsify();
1302 return;
1303 }
1304 break;
1305 }
1306 case mirror::VarHandle::AccessModeTemplate::kGetAndUpdate: {
1307 DataType::Type value_type = GetDataTypeFromShorty(invoke, number_of_arguments - 1);
1308 if (IsVarHandleGetAndAdd(invoke) &&
1309 (value_type == DataType::Type::kReference || value_type == DataType::Type::kBool)) {
1310 // We should only add numerical types.
1311 //
1312 // For byte array views floating-point types are not allowed, see javadoc comments for
1313 // java.lang.invoke.MethodHandles.byteArrayViewVarHandle(). But ART treats them as numeric
1314 // types in ByteArrayViewVarHandle::Access(). Consequently we do generate intrinsic code,
1315 // but it always fails access mode check at runtime.
1316 optimizations.SetDoNotIntrinsify();
1317 return;
1318 } else if (IsVarHandleGetAndBitwiseOp(invoke) && !DataType::IsIntegralType(value_type)) {
1319 // We can only apply operators to bitwise integral types.
1320 // Note that bitwise VarHandle operations accept a non-integral boolean type and
1321 // perform the appropriate logical operation. However, the result is the same as
1322 // using the bitwise operation on our boolean representation and this fits well
1323 // with DataType::IsIntegralType() treating the compiler type kBool as integral.
1324 optimizations.SetDoNotIntrinsify();
1325 return;
1326 }
1327 if (value_type != return_type) {
1328 optimizations.SetDoNotIntrinsify();
1329 return;
1330 }
1331 break;
1332 }
1333 }
1334 }
1335
BuildInvokePolymorphic(uint32_t dex_pc,uint32_t method_idx,dex::ProtoIndex proto_idx,const InstructionOperands & operands)1336 bool HInstructionBuilder::BuildInvokePolymorphic(uint32_t dex_pc,
1337 uint32_t method_idx,
1338 dex::ProtoIndex proto_idx,
1339 const InstructionOperands& operands) {
1340 const char* shorty = dex_file_->GetShorty(proto_idx);
1341 DCHECK_EQ(1 + ArtMethod::NumArgRegisters(shorty), operands.GetNumberOfOperands());
1342 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1343 size_t number_of_arguments = strlen(shorty);
1344 // We use ResolveMethod which is also used in BuildInvoke in order to
1345 // not duplicate code. As such, we need to provide is_string_constructor
1346 // even if we don't need it afterwards.
1347 InvokeType invoke_type = InvokeType::kPolymorphic;
1348 bool is_string_constructor = false;
1349 uint16_t imt_or_vtable_index = DexFile::kDexNoIndex16;
1350 MethodReference resolved_method_reference(nullptr, 0u);
1351 ArtMethod* resolved_method = ResolveMethod(method_idx,
1352 graph_->GetArtMethod(),
1353 *dex_compilation_unit_,
1354 &invoke_type,
1355 &resolved_method_reference,
1356 &imt_or_vtable_index,
1357 &is_string_constructor);
1358
1359 MethodReference method_reference(&graph_->GetDexFile(), method_idx);
1360 HInvoke* invoke = new (allocator_) HInvokePolymorphic(allocator_,
1361 number_of_arguments,
1362 return_type,
1363 dex_pc,
1364 method_reference,
1365 resolved_method,
1366 resolved_method_reference,
1367 proto_idx,
1368 !graph_->IsDebuggable());
1369 if (!HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false)) {
1370 return false;
1371 }
1372
1373 if (invoke->GetIntrinsic() != Intrinsics::kNone &&
1374 invoke->GetIntrinsic() != Intrinsics::kMethodHandleInvoke &&
1375 invoke->GetIntrinsic() != Intrinsics::kMethodHandleInvokeExact &&
1376 VarHandleAccessorNeedsReturnTypeCheck(invoke, return_type)) {
1377 // Type check is needed because VarHandle intrinsics do not type check the retrieved reference.
1378 ScopedObjectAccess soa(Thread::Current());
1379 ArtMethod* referrer = graph_->GetArtMethod();
1380 dex::TypeIndex return_type_index =
1381 referrer->GetDexFile()->GetProtoId(proto_idx).return_type_idx_;
1382
1383 BuildTypeCheck(/* is_instance_of= */ false, invoke, return_type_index, dex_pc);
1384 latest_result_ = current_block_->GetLastInstruction();
1385 }
1386
1387 DecideVarHandleIntrinsic(invoke);
1388
1389 return true;
1390 }
1391
1392
BuildInvokeCustom(uint32_t dex_pc,uint32_t call_site_idx,const InstructionOperands & operands)1393 bool HInstructionBuilder::BuildInvokeCustom(uint32_t dex_pc,
1394 uint32_t call_site_idx,
1395 const InstructionOperands& operands) {
1396 dex::ProtoIndex proto_idx = dex_file_->GetProtoIndexForCallSite(call_site_idx);
1397 const char* shorty = dex_file_->GetShorty(proto_idx);
1398 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1399 size_t number_of_arguments = strlen(shorty) - 1;
1400 // HInvokeCustom takes a DexNoNoIndex method reference.
1401 MethodReference method_reference(&graph_->GetDexFile(), dex::kDexNoIndex);
1402 HInvoke* invoke = new (allocator_) HInvokeCustom(allocator_,
1403 number_of_arguments,
1404 call_site_idx,
1405 return_type,
1406 dex_pc,
1407 method_reference,
1408 !graph_->IsDebuggable());
1409 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
1410 }
1411
BuildNewInstance(dex::TypeIndex type_index,uint32_t dex_pc)1412 HNewInstance* HInstructionBuilder::BuildNewInstance(dex::TypeIndex type_index, uint32_t dex_pc) {
1413 ScopedObjectAccess soa(Thread::Current());
1414
1415 HLoadClass* load_class = BuildLoadClass(type_index, dex_pc);
1416
1417 HInstruction* cls = load_class;
1418 Handle<mirror::Class> klass = load_class->GetClass();
1419
1420 if (!IsInitialized(klass.Get())) {
1421 cls = new (allocator_) HClinitCheck(load_class, dex_pc);
1422 AppendInstruction(cls);
1423 }
1424
1425 // Only the access check entrypoint handles the finalizable class case. If we
1426 // need access checks, then we haven't resolved the method and the class may
1427 // again be finalizable.
1428 QuickEntrypointEnum entrypoint = kQuickAllocObjectInitialized;
1429 if (load_class->NeedsAccessCheck() ||
1430 klass == nullptr || // Finalizable/instantiable is unknown.
1431 klass->IsFinalizable() ||
1432 klass.Get() == klass->GetClass() || // Classes cannot be allocated in code
1433 !klass->IsInstantiable()) {
1434 entrypoint = kQuickAllocObjectWithChecks;
1435 }
1436 // We will always be able to resolve the string class since it is in the BCP.
1437 if (!klass.IsNull() && klass->IsStringClass()) {
1438 entrypoint = kQuickAllocStringObject;
1439 }
1440
1441 // Consider classes we haven't resolved as potentially finalizable.
1442 bool finalizable = (klass == nullptr) || klass->IsFinalizable();
1443
1444 HNewInstance* new_instance = new (allocator_) HNewInstance(
1445 cls,
1446 dex_pc,
1447 type_index,
1448 *dex_compilation_unit_->GetDexFile(),
1449 finalizable,
1450 entrypoint);
1451 AppendInstruction(new_instance);
1452
1453 return new_instance;
1454 }
1455
BuildConstructorFenceForAllocation(HInstruction * allocation)1456 void HInstructionBuilder::BuildConstructorFenceForAllocation(HInstruction* allocation) {
1457 DCHECK(allocation != nullptr &&
1458 (allocation->IsNewInstance() ||
1459 allocation->IsNewArray())); // corresponding to "new" keyword in JLS.
1460
1461 if (allocation->IsNewInstance()) {
1462 // STRING SPECIAL HANDLING:
1463 // -------------------------------
1464 // Strings have a real HNewInstance node but they end up always having 0 uses.
1465 // All uses of a String HNewInstance are always transformed to replace their input
1466 // of the HNewInstance with an input of the invoke to StringFactory.
1467 //
1468 // Do not emit an HConstructorFence here since it can inhibit some String new-instance
1469 // optimizations (to pass checker tests that rely on those optimizations).
1470 HNewInstance* new_inst = allocation->AsNewInstance();
1471 HLoadClass* load_class = new_inst->GetLoadClass();
1472
1473 Thread* self = Thread::Current();
1474 ScopedObjectAccess soa(self);
1475 StackHandleScope<1> hs(self);
1476 Handle<mirror::Class> klass = load_class->GetClass();
1477 if (klass != nullptr && klass->IsStringClass()) {
1478 return;
1479 // Note: Do not use allocation->IsStringAlloc which requires
1480 // a valid ReferenceTypeInfo, but that doesn't get made until after reference type
1481 // propagation (and instruction builder is too early).
1482 }
1483 // (In terms of correctness, the StringFactory needs to provide its own
1484 // default initialization barrier, see below.)
1485 }
1486
1487 // JLS 17.4.5 "Happens-before Order" describes:
1488 //
1489 // The default initialization of any object happens-before any other actions (other than
1490 // default-writes) of a program.
1491 //
1492 // In our implementation the default initialization of an object to type T means
1493 // setting all of its initial data (object[0..size)) to 0, and setting the
1494 // object's class header (i.e. object.getClass() == T.class).
1495 //
1496 // In practice this fence ensures that the writes to the object header
1497 // are visible to other threads if this object escapes the current thread.
1498 // (and in theory the 0-initializing, but that happens automatically
1499 // when new memory pages are mapped in by the OS).
1500 HConstructorFence* ctor_fence =
1501 new (allocator_) HConstructorFence(allocation, allocation->GetDexPc(), allocator_);
1502 AppendInstruction(ctor_fence);
1503 MaybeRecordStat(
1504 compilation_stats_,
1505 MethodCompilationStat::kConstructorFenceGeneratedNew);
1506 }
1507
IsInBootImage(ObjPtr<mirror::Class> cls,const CompilerOptions & compiler_options)1508 static bool IsInBootImage(ObjPtr<mirror::Class> cls, const CompilerOptions& compiler_options)
1509 REQUIRES_SHARED(Locks::mutator_lock_) {
1510 if (Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(cls)) {
1511 return true;
1512 }
1513 if (compiler_options.IsBootImage() || compiler_options.IsBootImageExtension()) {
1514 std::string temp;
1515 const char* descriptor = cls->GetDescriptor(&temp);
1516 return compiler_options.IsImageClass(descriptor);
1517 } else {
1518 return false;
1519 }
1520 }
1521
IsSubClass(ObjPtr<mirror::Class> to_test,ObjPtr<mirror::Class> super_class)1522 static bool IsSubClass(ObjPtr<mirror::Class> to_test, ObjPtr<mirror::Class> super_class)
1523 REQUIRES_SHARED(Locks::mutator_lock_) {
1524 return to_test != nullptr && !to_test->IsInterface() && to_test->IsSubClass(super_class);
1525 }
1526
HasTrivialClinit(ObjPtr<mirror::Class> klass,PointerSize pointer_size)1527 static bool HasTrivialClinit(ObjPtr<mirror::Class> klass, PointerSize pointer_size)
1528 REQUIRES_SHARED(Locks::mutator_lock_) {
1529 // Check if the class has encoded fields that trigger bytecode execution.
1530 // (Encoded fields are just a different representation of <clinit>.)
1531 if (klass->NumStaticFields() != 0u) {
1532 DCHECK(klass->GetClassDef() != nullptr);
1533 EncodedStaticFieldValueIterator it(klass->GetDexFile(), *klass->GetClassDef());
1534 for (; it.HasNext(); it.Next()) {
1535 switch (it.GetValueType()) {
1536 case EncodedArrayValueIterator::ValueType::kBoolean:
1537 case EncodedArrayValueIterator::ValueType::kByte:
1538 case EncodedArrayValueIterator::ValueType::kShort:
1539 case EncodedArrayValueIterator::ValueType::kChar:
1540 case EncodedArrayValueIterator::ValueType::kInt:
1541 case EncodedArrayValueIterator::ValueType::kLong:
1542 case EncodedArrayValueIterator::ValueType::kFloat:
1543 case EncodedArrayValueIterator::ValueType::kDouble:
1544 case EncodedArrayValueIterator::ValueType::kNull:
1545 case EncodedArrayValueIterator::ValueType::kString:
1546 // Primitive, null or j.l.String initialization is permitted.
1547 break;
1548 case EncodedArrayValueIterator::ValueType::kType:
1549 // Type initialization can load classes and execute bytecode through a class loader
1550 // which can execute arbitrary bytecode. We do not optimize for known class loaders;
1551 // kType is rarely used (if ever).
1552 return false;
1553 default:
1554 // Other types in the encoded static field list are rejected by the DexFileVerifier.
1555 LOG(FATAL) << "Unexpected type " << it.GetValueType();
1556 UNREACHABLE();
1557 }
1558 }
1559 }
1560 // Check if the class has <clinit> that executes arbitrary code.
1561 // Initialization of static fields of the class itself with constants is allowed.
1562 ArtMethod* clinit = klass->FindClassInitializer(pointer_size);
1563 if (clinit != nullptr) {
1564 const DexFile& dex_file = *clinit->GetDexFile();
1565 CodeItemInstructionAccessor accessor(dex_file, clinit->GetCodeItem());
1566 for (DexInstructionPcPair it : accessor) {
1567 switch (it->Opcode()) {
1568 case Instruction::CONST_4:
1569 case Instruction::CONST_16:
1570 case Instruction::CONST:
1571 case Instruction::CONST_HIGH16:
1572 case Instruction::CONST_WIDE_16:
1573 case Instruction::CONST_WIDE_32:
1574 case Instruction::CONST_WIDE:
1575 case Instruction::CONST_WIDE_HIGH16:
1576 case Instruction::CONST_STRING:
1577 case Instruction::CONST_STRING_JUMBO:
1578 // Primitive, null or j.l.String initialization is permitted.
1579 break;
1580 case Instruction::RETURN_VOID:
1581 break;
1582 case Instruction::SPUT:
1583 case Instruction::SPUT_WIDE:
1584 case Instruction::SPUT_OBJECT:
1585 case Instruction::SPUT_BOOLEAN:
1586 case Instruction::SPUT_BYTE:
1587 case Instruction::SPUT_CHAR:
1588 case Instruction::SPUT_SHORT:
1589 // Only initialization of a static field of the same class is permitted.
1590 if (dex_file.GetFieldId(it->VRegB_21c()).class_idx_ != klass->GetDexTypeIndex()) {
1591 return false;
1592 }
1593 break;
1594 case Instruction::NEW_ARRAY:
1595 // Only primitive arrays are permitted.
1596 if (Primitive::GetType(dex_file.GetTypeDescriptor(dex_file.GetTypeId(
1597 dex::TypeIndex(it->VRegC_22c())))[1]) == Primitive::kPrimNot) {
1598 return false;
1599 }
1600 break;
1601 case Instruction::APUT:
1602 case Instruction::APUT_WIDE:
1603 case Instruction::APUT_BOOLEAN:
1604 case Instruction::APUT_BYTE:
1605 case Instruction::APUT_CHAR:
1606 case Instruction::APUT_SHORT:
1607 case Instruction::FILL_ARRAY_DATA:
1608 case Instruction::NOP:
1609 // Allow initialization of primitive arrays (only constants can be stored).
1610 // Note: We expect NOPs used for fill-array-data-payload but accept all NOPs
1611 // (even unreferenced switch payloads if they make it through the verifier).
1612 break;
1613 default:
1614 return false;
1615 }
1616 }
1617 }
1618 return true;
1619 }
1620
HasTrivialInitialization(ObjPtr<mirror::Class> cls,const CompilerOptions & compiler_options)1621 static bool HasTrivialInitialization(ObjPtr<mirror::Class> cls,
1622 const CompilerOptions& compiler_options)
1623 REQUIRES_SHARED(Locks::mutator_lock_) {
1624 Runtime* runtime = Runtime::Current();
1625 PointerSize pointer_size = runtime->GetClassLinker()->GetImagePointerSize();
1626
1627 // Check the superclass chain.
1628 for (ObjPtr<mirror::Class> klass = cls; klass != nullptr; klass = klass->GetSuperClass()) {
1629 if (klass->IsInitialized() && IsInBootImage(klass, compiler_options)) {
1630 break; // `klass` and its superclasses are already initialized in the boot image.
1631 }
1632 if (!HasTrivialClinit(klass, pointer_size)) {
1633 return false;
1634 }
1635 }
1636
1637 // Also check interfaces with default methods as they need to be initialized as well.
1638 ObjPtr<mirror::IfTable> iftable = cls->GetIfTable();
1639 DCHECK(iftable != nullptr);
1640 for (int32_t i = 0, count = iftable->Count(); i != count; ++i) {
1641 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
1642 if (!iface->HasDefaultMethods()) {
1643 continue; // Initializing `cls` does not initialize this interface.
1644 }
1645 if (iface->IsInitialized() && IsInBootImage(iface, compiler_options)) {
1646 continue; // This interface is already initialized in the boot image.
1647 }
1648 if (!HasTrivialClinit(iface, pointer_size)) {
1649 return false;
1650 }
1651 }
1652 return true;
1653 }
1654
IsInitialized(ObjPtr<mirror::Class> cls) const1655 bool HInstructionBuilder::IsInitialized(ObjPtr<mirror::Class> cls) const {
1656 if (cls == nullptr) {
1657 return false;
1658 }
1659
1660 // Check if the class will be initialized at runtime.
1661 if (cls->IsInitialized()) {
1662 const CompilerOptions& compiler_options = code_generator_->GetCompilerOptions();
1663 if (compiler_options.IsAotCompiler()) {
1664 // Assume loaded only if klass is in the boot image. App classes cannot be assumed
1665 // loaded because we don't even know what class loader will be used to load them.
1666 if (IsInBootImage(cls, compiler_options)) {
1667 return true;
1668 }
1669 } else {
1670 DCHECK(compiler_options.IsJitCompiler());
1671 if (Runtime::Current()->GetJit()->CanAssumeInitialized(
1672 cls,
1673 compiler_options.IsJitCompilerForSharedCode())) {
1674 // For JIT, the class cannot revert to an uninitialized state.
1675 return true;
1676 }
1677 }
1678 }
1679
1680 // We can avoid the class initialization check for `cls` in static methods and constructors
1681 // in the very same class; invoking a static method involves a class initialization check
1682 // and so does the instance allocation that must be executed before invoking a constructor.
1683 // Other instance methods of the same class can run on an escaped instance
1684 // of an erroneous class. Even a superclass may need to be checked as the subclass
1685 // can be completely initialized while the superclass is initializing and the subclass
1686 // remains initialized when the superclass initializer throws afterwards. b/62478025
1687 // Note: The HClinitCheck+HInvokeStaticOrDirect merging can still apply.
1688 auto is_static_method_or_constructor_of_cls = [cls](const DexCompilationUnit& compilation_unit)
1689 REQUIRES_SHARED(Locks::mutator_lock_) {
1690 return (compilation_unit.GetAccessFlags() & (kAccStatic | kAccConstructor)) != 0u &&
1691 compilation_unit.GetCompilingClass().Get() == cls;
1692 };
1693 if (is_static_method_or_constructor_of_cls(*outer_compilation_unit_) ||
1694 // Check also the innermost method. Though excessive copies of ClinitCheck can be
1695 // eliminated by GVN, that happens only after the decision whether to inline the
1696 // graph or not and that may depend on the presence of the ClinitCheck.
1697 // TODO: We should walk over the entire inlined method chain, but we don't pass that
1698 // information to the builder.
1699 is_static_method_or_constructor_of_cls(*dex_compilation_unit_)) {
1700 return true;
1701 }
1702
1703 // Otherwise, we may be able to avoid the check if `cls` is a superclass of a method being
1704 // compiled here (anywhere in the inlining chain) as the `cls` must have started initializing
1705 // before calling any `cls` or subclass methods. Static methods require a clinit check and
1706 // instance methods require an instance which cannot be created before doing a clinit check.
1707 // When a subclass of `cls` starts initializing, it starts initializing its superclass
1708 // chain up to `cls` without running any bytecode, i.e. without any opportunity for circular
1709 // initialization weirdness.
1710 //
1711 // If the initialization of `cls` is trivial (`cls` and its superclasses and superinterfaces
1712 // with default methods initialize only their own static fields using constant values), it must
1713 // complete, either successfully or by throwing and marking `cls` erroneous, without allocating
1714 // any instances of `cls` or subclasses (or any other class) and without calling any methods.
1715 // If it completes by throwing, no instances of `cls` shall be created and no subclass method
1716 // bytecode shall execute (see above), therefore the instruction we're building shall be
1717 // unreachable. By reaching the instruction, we know that `cls` was initialized successfully.
1718 //
1719 // TODO: We should walk over the entire inlined methods chain, but we don't pass that
1720 // information to the builder. (We could also check if we're guaranteed a non-null instance
1721 // of `cls` at this location but that's outside the scope of the instruction builder.)
1722 bool is_subclass = IsSubClass(outer_compilation_unit_->GetCompilingClass().Get(), cls);
1723 if (dex_compilation_unit_ != outer_compilation_unit_) {
1724 is_subclass = is_subclass ||
1725 IsSubClass(dex_compilation_unit_->GetCompilingClass().Get(), cls);
1726 }
1727 if (is_subclass && HasTrivialInitialization(cls, code_generator_->GetCompilerOptions())) {
1728 return true;
1729 }
1730
1731 return false;
1732 }
1733
ProcessClinitCheckForInvoke(uint32_t dex_pc,ArtMethod * resolved_method,HInvokeStaticOrDirect::ClinitCheckRequirement * clinit_check_requirement)1734 HClinitCheck* HInstructionBuilder::ProcessClinitCheckForInvoke(
1735 uint32_t dex_pc,
1736 ArtMethod* resolved_method,
1737 HInvokeStaticOrDirect::ClinitCheckRequirement* clinit_check_requirement) {
1738 ScopedObjectAccess soa(Thread::Current());
1739 ObjPtr<mirror::Class> klass = resolved_method->GetDeclaringClass();
1740
1741 HClinitCheck* clinit_check = nullptr;
1742 if (IsInitialized(klass)) {
1743 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
1744 } else {
1745 Handle<mirror::Class> h_klass = graph_->GetHandleCache()->NewHandle(klass);
1746 HLoadClass* cls = BuildLoadClass(h_klass->GetDexTypeIndex(),
1747 h_klass->GetDexFile(),
1748 h_klass,
1749 dex_pc,
1750 /* needs_access_check= */ false);
1751 if (cls != nullptr) {
1752 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit;
1753 clinit_check = new (allocator_) HClinitCheck(cls, dex_pc);
1754 AppendInstruction(clinit_check);
1755 } else {
1756 // Let the invoke handle this with an implicit class initialization check.
1757 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit;
1758 }
1759 }
1760 return clinit_check;
1761 }
1762
SetupInvokeArguments(HInstruction * invoke,const InstructionOperands & operands,const char * shorty,ReceiverArg receiver_arg)1763 bool HInstructionBuilder::SetupInvokeArguments(HInstruction* invoke,
1764 const InstructionOperands& operands,
1765 const char* shorty,
1766 ReceiverArg receiver_arg) {
1767 // Note: The `invoke` can be an intrinsic replacement, so not necessaritly HInvoke.
1768 // In that case, do not log errors, they shall be reported when we try to build the HInvoke.
1769 uint32_t shorty_index = 1; // Skip the return type.
1770 const size_t number_of_operands = operands.GetNumberOfOperands();
1771 bool argument_length_error = false;
1772
1773 size_t start_index = 0u;
1774 size_t argument_index = 0u;
1775 if (receiver_arg != ReceiverArg::kNone) {
1776 if (number_of_operands == 0u) {
1777 argument_length_error = true;
1778 } else {
1779 start_index = 1u;
1780 if (receiver_arg != ReceiverArg::kIgnored) {
1781 uint32_t obj_reg = operands.GetOperand(0u);
1782 HInstruction* arg = (receiver_arg == ReceiverArg::kPlainArg)
1783 ? LoadLocal(obj_reg, DataType::Type::kReference)
1784 : LoadNullCheckedLocal(obj_reg, invoke->GetDexPc());
1785 if (receiver_arg != ReceiverArg::kNullCheckedOnly) {
1786 invoke->SetRawInputAt(0u, arg);
1787 argument_index = 1u;
1788 }
1789 }
1790 }
1791 }
1792
1793 for (size_t i = start_index; i < number_of_operands; ++i, ++argument_index) {
1794 // Make sure we don't go over the expected arguments or over the number of
1795 // dex registers given. If the instruction was seen as dead by the verifier,
1796 // it hasn't been properly checked.
1797 if (UNLIKELY(shorty[shorty_index] == 0)) {
1798 argument_length_error = true;
1799 break;
1800 }
1801 DataType::Type type = DataType::FromShorty(shorty[shorty_index++]);
1802 bool is_wide = (type == DataType::Type::kInt64) || (type == DataType::Type::kFloat64);
1803 if (is_wide && ((i + 1 == number_of_operands) ||
1804 (operands.GetOperand(i) + 1 != operands.GetOperand(i + 1)))) {
1805 if (invoke->IsInvoke()) {
1806 // Longs and doubles should be in pairs, that is, sequential registers. The verifier should
1807 // reject any class where this is violated. However, the verifier only does these checks
1808 // on non trivially dead instructions, so we just bailout the compilation.
1809 VLOG(compiler) << "Did not compile "
1810 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
1811 << " because of non-sequential dex register pair in wide argument";
1812 MaybeRecordStat(compilation_stats_,
1813 MethodCompilationStat::kNotCompiledMalformedOpcode);
1814 }
1815 return false;
1816 }
1817 HInstruction* arg = LoadLocal(operands.GetOperand(i), type);
1818 DCHECK(invoke->InputAt(argument_index) == nullptr);
1819 invoke->SetRawInputAt(argument_index, arg);
1820 if (is_wide) {
1821 ++i;
1822 }
1823 }
1824
1825 argument_length_error = argument_length_error || shorty[shorty_index] != 0;
1826 if (argument_length_error) {
1827 if (invoke->IsInvoke()) {
1828 VLOG(compiler) << "Did not compile "
1829 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
1830 << " because of wrong number of arguments in invoke instruction";
1831 MaybeRecordStat(compilation_stats_,
1832 MethodCompilationStat::kNotCompiledMalformedOpcode);
1833 }
1834 return false;
1835 }
1836
1837 if (invoke->IsInvokeStaticOrDirect() &&
1838 HInvokeStaticOrDirect::NeedsCurrentMethodInput(
1839 invoke->AsInvokeStaticOrDirect()->GetDispatchInfo())) {
1840 DCHECK_EQ(argument_index, invoke->AsInvokeStaticOrDirect()->GetCurrentMethodIndex());
1841 DCHECK(invoke->InputAt(argument_index) == nullptr);
1842 invoke->SetRawInputAt(argument_index, graph_->GetCurrentMethod());
1843 }
1844
1845 if (invoke->IsInvokeInterface() &&
1846 (invoke->AsInvokeInterface()->GetHiddenArgumentLoadKind() == MethodLoadKind::kRecursive)) {
1847 invoke->SetRawInputAt(invoke->AsInvokeInterface()->GetNumberOfArguments() - 1,
1848 graph_->GetCurrentMethod());
1849 }
1850
1851 return true;
1852 }
1853
HandleInvoke(HInvoke * invoke,const InstructionOperands & operands,const char * shorty,bool is_unresolved)1854 bool HInstructionBuilder::HandleInvoke(HInvoke* invoke,
1855 const InstructionOperands& operands,
1856 const char* shorty,
1857 bool is_unresolved) {
1858 DCHECK_IMPLIES(invoke->IsInvokeStaticOrDirect(),
1859 !invoke->AsInvokeStaticOrDirect()->IsStringInit());
1860
1861 ReceiverArg receiver_arg = (invoke->GetInvokeType() == InvokeType::kStatic)
1862 ? ReceiverArg::kNone
1863 : (is_unresolved ? ReceiverArg::kPlainArg : ReceiverArg::kNullCheckedArg);
1864 if (!SetupInvokeArguments(invoke, operands, shorty, receiver_arg)) {
1865 return false;
1866 }
1867
1868 AppendInstruction(invoke);
1869 latest_result_ = invoke;
1870
1871 return true;
1872 }
1873
BuildSimpleIntrinsic(ArtMethod * method,uint32_t dex_pc,const InstructionOperands & operands,const char * shorty)1874 bool HInstructionBuilder::BuildSimpleIntrinsic(ArtMethod* method,
1875 uint32_t dex_pc,
1876 const InstructionOperands& operands,
1877 const char* shorty) {
1878 Intrinsics intrinsic = static_cast<Intrinsics>(method->GetIntrinsic());
1879 DCHECK_NE(intrinsic, Intrinsics::kNone);
1880 constexpr DataType::Type kInt32 = DataType::Type::kInt32;
1881 constexpr DataType::Type kInt64 = DataType::Type::kInt64;
1882 constexpr DataType::Type kFloat32 = DataType::Type::kFloat32;
1883 constexpr DataType::Type kFloat64 = DataType::Type::kFloat64;
1884 ReceiverArg receiver_arg = method->IsStatic() ? ReceiverArg::kNone : ReceiverArg::kNullCheckedArg;
1885 HInstruction* instruction = nullptr;
1886 switch (intrinsic) {
1887 case Intrinsics::kIntegerRotateRight:
1888 case Intrinsics::kIntegerRotateLeft:
1889 // For rotate left, we negate the distance below.
1890 instruction = new (allocator_) HRor(kInt32, /*value=*/ nullptr, /*distance=*/ nullptr);
1891 break;
1892 case Intrinsics::kLongRotateRight:
1893 case Intrinsics::kLongRotateLeft:
1894 // For rotate left, we negate the distance below.
1895 instruction = new (allocator_) HRor(kInt64, /*value=*/ nullptr, /*distance=*/ nullptr);
1896 break;
1897 case Intrinsics::kIntegerCompare:
1898 instruction = new (allocator_) HCompare(
1899 kInt32, /*first=*/ nullptr, /*second=*/ nullptr, ComparisonBias::kNoBias, dex_pc);
1900 break;
1901 case Intrinsics::kLongCompare:
1902 instruction = new (allocator_) HCompare(
1903 kInt64, /*first=*/ nullptr, /*second=*/ nullptr, ComparisonBias::kNoBias, dex_pc);
1904 break;
1905 case Intrinsics::kIntegerSignum:
1906 instruction = new (allocator_) HCompare(
1907 kInt32, /*first=*/ nullptr, graph_->GetIntConstant(0), ComparisonBias::kNoBias, dex_pc);
1908 break;
1909 case Intrinsics::kLongSignum:
1910 instruction = new (allocator_) HCompare(
1911 kInt64, /*first=*/ nullptr, graph_->GetLongConstant(0), ComparisonBias::kNoBias, dex_pc);
1912 break;
1913 case Intrinsics::kFloatIsNaN:
1914 case Intrinsics::kDoubleIsNaN: {
1915 // IsNaN(x) is the same as x != x.
1916 instruction = new (allocator_) HNotEqual(/*first=*/ nullptr, /*second=*/ nullptr, dex_pc);
1917 instruction->AsCondition()->SetBias(ComparisonBias::kLtBias);
1918 break;
1919 }
1920 case Intrinsics::kStringCharAt:
1921 // We treat String as an array to allow DCE and BCE to seamlessly work on strings.
1922 instruction = new (allocator_) HArrayGet(/*array=*/ nullptr,
1923 /*index=*/ nullptr,
1924 DataType::Type::kUint16,
1925 SideEffects::None(), // Strings are immutable.
1926 dex_pc,
1927 /*is_string_char_at=*/ true);
1928 break;
1929 case Intrinsics::kStringIsEmpty:
1930 case Intrinsics::kStringLength:
1931 // We treat String as an array to allow DCE and BCE to seamlessly work on strings.
1932 // For String.isEmpty(), we add a comparison with 0 below.
1933 instruction =
1934 new (allocator_) HArrayLength(/*array=*/ nullptr, dex_pc, /* is_string_length= */ true);
1935 break;
1936 case Intrinsics::kUnsafeLoadFence:
1937 case Intrinsics::kJdkUnsafeLoadFence:
1938 receiver_arg = ReceiverArg::kNullCheckedOnly;
1939 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
1940 break;
1941 case Intrinsics::kUnsafeStoreFence:
1942 case Intrinsics::kJdkUnsafeStoreFence:
1943 receiver_arg = ReceiverArg::kNullCheckedOnly;
1944 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyStore, dex_pc);
1945 break;
1946 case Intrinsics::kUnsafeFullFence:
1947 case Intrinsics::kJdkUnsafeFullFence:
1948 receiver_arg = ReceiverArg::kNullCheckedOnly;
1949 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyAny, dex_pc);
1950 break;
1951 case Intrinsics::kVarHandleFullFence:
1952 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyAny, dex_pc);
1953 break;
1954 case Intrinsics::kVarHandleAcquireFence:
1955 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
1956 break;
1957 case Intrinsics::kVarHandleReleaseFence:
1958 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyStore, dex_pc);
1959 break;
1960 case Intrinsics::kVarHandleLoadLoadFence:
1961 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
1962 break;
1963 case Intrinsics::kVarHandleStoreStoreFence:
1964 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kStoreStore, dex_pc);
1965 break;
1966 case Intrinsics::kMathMinIntInt:
1967 instruction = new (allocator_) HMin(kInt32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1968 break;
1969 case Intrinsics::kMathMinLongLong:
1970 instruction = new (allocator_) HMin(kInt64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1971 break;
1972 case Intrinsics::kMathMinFloatFloat:
1973 instruction = new (allocator_) HMin(kFloat32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1974 break;
1975 case Intrinsics::kMathMinDoubleDouble:
1976 instruction = new (allocator_) HMin(kFloat64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1977 break;
1978 case Intrinsics::kMathMaxIntInt:
1979 instruction = new (allocator_) HMax(kInt32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1980 break;
1981 case Intrinsics::kMathMaxLongLong:
1982 instruction = new (allocator_) HMax(kInt64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1983 break;
1984 case Intrinsics::kMathMaxFloatFloat:
1985 instruction = new (allocator_) HMax(kFloat32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1986 break;
1987 case Intrinsics::kMathMaxDoubleDouble:
1988 instruction = new (allocator_) HMax(kFloat64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1989 break;
1990 case Intrinsics::kMathAbsInt:
1991 instruction = new (allocator_) HAbs(kInt32, /*input=*/ nullptr, dex_pc);
1992 break;
1993 case Intrinsics::kMathAbsLong:
1994 instruction = new (allocator_) HAbs(kInt64, /*input=*/ nullptr, dex_pc);
1995 break;
1996 case Intrinsics::kMathAbsFloat:
1997 instruction = new (allocator_) HAbs(kFloat32, /*input=*/ nullptr, dex_pc);
1998 break;
1999 case Intrinsics::kMathAbsDouble:
2000 instruction = new (allocator_) HAbs(kFloat64, /*input=*/ nullptr, dex_pc);
2001 break;
2002 default:
2003 // We do not have intermediate representation for other intrinsics.
2004 return false;
2005 }
2006 DCHECK(instruction != nullptr);
2007 if (!SetupInvokeArguments(instruction, operands, shorty, receiver_arg)) {
2008 return false;
2009 }
2010
2011 switch (intrinsic) {
2012 case Intrinsics::kIntegerRotateLeft:
2013 case Intrinsics::kLongRotateLeft: {
2014 // Negate the distance value for rotate left.
2015 DCHECK(instruction->IsRor());
2016 HNeg* neg = new (allocator_) HNeg(kInt32, instruction->InputAt(1u));
2017 AppendInstruction(neg);
2018 instruction->SetRawInputAt(1u, neg);
2019 break;
2020 }
2021 case Intrinsics::kFloatIsNaN:
2022 case Intrinsics::kDoubleIsNaN:
2023 // Set the second input to be the same as first.
2024 DCHECK(instruction->IsNotEqual());
2025 DCHECK(instruction->InputAt(1u) == nullptr);
2026 instruction->SetRawInputAt(1u, instruction->InputAt(0u));
2027 break;
2028 case Intrinsics::kStringCharAt: {
2029 // Add bounds check.
2030 HInstruction* array = instruction->InputAt(0u);
2031 HInstruction* index = instruction->InputAt(1u);
2032 HInstruction* length =
2033 new (allocator_) HArrayLength(array, dex_pc, /*is_string_length=*/ true);
2034 AppendInstruction(length);
2035 HBoundsCheck* bounds_check =
2036 new (allocator_) HBoundsCheck(index, length, dex_pc, /*is_string_char_at=*/ true);
2037 AppendInstruction(bounds_check);
2038 graph_->SetHasBoundsChecks(true);
2039 instruction->SetRawInputAt(1u, bounds_check);
2040 break;
2041 }
2042 case Intrinsics::kStringIsEmpty: {
2043 // Compare the length with 0.
2044 DCHECK(instruction->IsArrayLength());
2045 AppendInstruction(instruction);
2046 HEqual* equal = new (allocator_) HEqual(instruction, graph_->GetIntConstant(0), dex_pc);
2047 instruction = equal;
2048 break;
2049 }
2050 default:
2051 break;
2052 }
2053
2054 AppendInstruction(instruction);
2055 latest_result_ = instruction;
2056
2057 return true;
2058 }
2059
HandleStringInit(HInvoke * invoke,const InstructionOperands & operands,const char * shorty)2060 bool HInstructionBuilder::HandleStringInit(HInvoke* invoke,
2061 const InstructionOperands& operands,
2062 const char* shorty) {
2063 DCHECK(invoke->IsInvokeStaticOrDirect());
2064 DCHECK(invoke->AsInvokeStaticOrDirect()->IsStringInit());
2065
2066 if (!SetupInvokeArguments(invoke, operands, shorty, ReceiverArg::kIgnored)) {
2067 return false;
2068 }
2069
2070 AppendInstruction(invoke);
2071
2072 // This is a StringFactory call, not an actual String constructor. Its result
2073 // replaces the empty String pre-allocated by NewInstance.
2074 uint32_t orig_this_reg = operands.GetOperand(0);
2075 HInstruction* arg_this = LoadLocal(orig_this_reg, DataType::Type::kReference);
2076
2077 // Replacing the NewInstance might render it redundant. Keep a list of these
2078 // to be visited once it is clear whether it has remaining uses.
2079 if (arg_this->IsNewInstance()) {
2080 ssa_builder_->AddUninitializedString(arg_this->AsNewInstance());
2081 } else {
2082 DCHECK(arg_this->IsPhi());
2083 // We can get a phi as input of a String.<init> if there is a loop between the
2084 // allocation and the String.<init> call. As we don't know which other phis might alias
2085 // with `arg_this`, we keep a record of those invocations so we can later replace
2086 // the allocation with the invocation.
2087 // Add the actual 'this' input so the analysis knows what is the allocation instruction.
2088 // The input will be removed during the analysis.
2089 invoke->AddInput(arg_this);
2090 ssa_builder_->AddUninitializedStringPhi(invoke);
2091 }
2092 // Walk over all vregs and replace any occurrence of `arg_this` with `invoke`.
2093 for (size_t vreg = 0, e = current_locals_->size(); vreg < e; ++vreg) {
2094 if ((*current_locals_)[vreg] == arg_this) {
2095 (*current_locals_)[vreg] = invoke;
2096 }
2097 }
2098 return true;
2099 }
2100
GetFieldAccessType(const DexFile & dex_file,uint16_t field_index)2101 static DataType::Type GetFieldAccessType(const DexFile& dex_file, uint16_t field_index) {
2102 const dex::FieldId& field_id = dex_file.GetFieldId(field_index);
2103 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
2104 return DataType::FromShorty(type[0]);
2105 }
2106
BuildInstanceFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put)2107 bool HInstructionBuilder::BuildInstanceFieldAccess(const Instruction& instruction,
2108 uint32_t dex_pc,
2109 bool is_put) {
2110 uint32_t source_or_dest_reg = instruction.VRegA_22c();
2111 uint32_t obj_reg = instruction.VRegB_22c();
2112 uint16_t field_index = instruction.VRegC_22c();
2113
2114 ScopedObjectAccess soa(Thread::Current());
2115 ArtField* resolved_field = ResolveField(field_index, /* is_static= */ false, is_put);
2116
2117 // Generate an explicit null check on the reference, unless the field access
2118 // is unresolved. In that case, we rely on the runtime to perform various
2119 // checks first, followed by a null check.
2120 HInstruction* object = (resolved_field == nullptr)
2121 ? LoadLocal(obj_reg, DataType::Type::kReference)
2122 : LoadNullCheckedLocal(obj_reg, dex_pc);
2123
2124 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2125 if (is_put) {
2126 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2127 HInstruction* field_set = nullptr;
2128 if (resolved_field == nullptr) {
2129 MaybeRecordStat(compilation_stats_,
2130 MethodCompilationStat::kUnresolvedField);
2131 field_set = new (allocator_) HUnresolvedInstanceFieldSet(object,
2132 value,
2133 field_type,
2134 field_index,
2135 dex_pc);
2136 } else {
2137 uint16_t class_def_index = resolved_field->GetDeclaringClass()->GetDexClassDefIndex();
2138 field_set = new (allocator_) HInstanceFieldSet(object,
2139 value,
2140 resolved_field,
2141 field_type,
2142 resolved_field->GetOffset(),
2143 resolved_field->IsVolatile(),
2144 field_index,
2145 class_def_index,
2146 *dex_file_,
2147 dex_pc);
2148 }
2149 AppendInstruction(field_set);
2150 } else {
2151 HInstruction* field_get = nullptr;
2152 if (resolved_field == nullptr) {
2153 MaybeRecordStat(compilation_stats_,
2154 MethodCompilationStat::kUnresolvedField);
2155 field_get = new (allocator_) HUnresolvedInstanceFieldGet(object,
2156 field_type,
2157 field_index,
2158 dex_pc);
2159 } else {
2160 uint16_t class_def_index = resolved_field->GetDeclaringClass()->GetDexClassDefIndex();
2161 field_get = new (allocator_) HInstanceFieldGet(object,
2162 resolved_field,
2163 field_type,
2164 resolved_field->GetOffset(),
2165 resolved_field->IsVolatile(),
2166 field_index,
2167 class_def_index,
2168 *dex_file_,
2169 dex_pc);
2170 }
2171 AppendInstruction(field_get);
2172 UpdateLocal(source_or_dest_reg, field_get);
2173 }
2174
2175 return true;
2176 }
2177
BuildUnresolvedStaticFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put,DataType::Type field_type)2178 void HInstructionBuilder::BuildUnresolvedStaticFieldAccess(const Instruction& instruction,
2179 uint32_t dex_pc,
2180 bool is_put,
2181 DataType::Type field_type) {
2182 uint32_t source_or_dest_reg = instruction.VRegA_21c();
2183 uint16_t field_index = instruction.VRegB_21c();
2184
2185 if (is_put) {
2186 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2187 AppendInstruction(
2188 new (allocator_) HUnresolvedStaticFieldSet(value, field_type, field_index, dex_pc));
2189 } else {
2190 AppendInstruction(new (allocator_) HUnresolvedStaticFieldGet(field_type, field_index, dex_pc));
2191 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2192 }
2193 }
2194
ResolveField(uint16_t field_idx,bool is_static,bool is_put)2195 ArtField* HInstructionBuilder::ResolveField(uint16_t field_idx, bool is_static, bool is_put) {
2196 ScopedObjectAccess soa(Thread::Current());
2197
2198 ClassLinker* class_linker = dex_compilation_unit_->GetClassLinker();
2199 Handle<mirror::ClassLoader> class_loader = dex_compilation_unit_->GetClassLoader();
2200
2201 ArtField* resolved_field = class_linker->ResolveFieldJLS(field_idx,
2202 dex_compilation_unit_->GetDexCache(),
2203 class_loader);
2204 DCHECK_EQ(resolved_field == nullptr, soa.Self()->IsExceptionPending())
2205 << "field="
2206 << ((resolved_field == nullptr) ? "null" : resolved_field->PrettyField())
2207 << ", exception="
2208 << (soa.Self()->IsExceptionPending() ? soa.Self()->GetException()->Dump() : "null");
2209 if (UNLIKELY(resolved_field == nullptr)) {
2210 // Clean up any exception left by field resolution.
2211 soa.Self()->ClearException();
2212 return nullptr;
2213 }
2214
2215 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
2216 return nullptr;
2217 }
2218
2219 // Check access.
2220 Handle<mirror::Class> compiling_class = dex_compilation_unit_->GetCompilingClass();
2221 if (compiling_class == nullptr) {
2222 // Check if the declaring class or referencing class is accessible.
2223 SamePackageCompare same_package(*dex_compilation_unit_);
2224 ObjPtr<mirror::Class> declaring_class = resolved_field->GetDeclaringClass();
2225 bool declaring_class_accessible = declaring_class->IsPublic() || same_package(declaring_class);
2226 if (!declaring_class_accessible) {
2227 // It is possible to access members from an inaccessible superclass
2228 // by referencing them through an accessible subclass.
2229 ObjPtr<mirror::Class> referenced_class = class_linker->LookupResolvedType(
2230 dex_compilation_unit_->GetDexFile()->GetFieldId(field_idx).class_idx_,
2231 dex_compilation_unit_->GetDexCache().Get(),
2232 class_loader.Get());
2233 DCHECK(referenced_class != nullptr); // Must have been resolved when resolving the field.
2234 if (!referenced_class->IsPublic() && !same_package(referenced_class)) {
2235 return nullptr;
2236 }
2237 }
2238 // Check whether the field itself is accessible.
2239 // Since the referrer is unresolved but the field is resolved, it cannot be
2240 // inside the same class, so a private field is known to be inaccessible.
2241 // And without a resolved referrer, we cannot check for protected member access
2242 // in superlass, so we handle only access to public member or within the package.
2243 if (resolved_field->IsPrivate() ||
2244 (!resolved_field->IsPublic() && !declaring_class_accessible)) {
2245 return nullptr;
2246 }
2247 } else if (!compiling_class->CanAccessResolvedField(resolved_field->GetDeclaringClass(),
2248 resolved_field,
2249 dex_compilation_unit_->GetDexCache().Get(),
2250 field_idx)) {
2251 return nullptr;
2252 }
2253
2254 if (is_put) {
2255 if (resolved_field->IsFinal() &&
2256 (compiling_class.Get() != resolved_field->GetDeclaringClass())) {
2257 // Final fields can only be updated within their own class.
2258 // TODO: Only allow it in constructors. b/34966607.
2259 return nullptr;
2260 }
2261
2262 // Note: We do not need to resolve the field type for `get` opcodes.
2263 StackArtFieldHandleScope<1> rhs(soa.Self());
2264 ReflectiveHandle<ArtField> resolved_field_handle(rhs.NewHandle(resolved_field));
2265 if (resolved_field->ResolveType().IsNull()) {
2266 // ArtField::ResolveType() may fail as evidenced with a dexing bug (b/78788577).
2267 soa.Self()->ClearException();
2268 return nullptr; // Failure
2269 }
2270 resolved_field = resolved_field_handle.Get();
2271 }
2272
2273 return resolved_field;
2274 }
2275
BuildStaticFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put)2276 void HInstructionBuilder::BuildStaticFieldAccess(const Instruction& instruction,
2277 uint32_t dex_pc,
2278 bool is_put) {
2279 uint32_t source_or_dest_reg = instruction.VRegA_21c();
2280 uint16_t field_index = instruction.VRegB_21c();
2281
2282 ScopedObjectAccess soa(Thread::Current());
2283 ArtField* resolved_field = ResolveField(field_index, /* is_static= */ true, is_put);
2284
2285 if (resolved_field == nullptr) {
2286 MaybeRecordStat(compilation_stats_,
2287 MethodCompilationStat::kUnresolvedField);
2288 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2289 BuildUnresolvedStaticFieldAccess(instruction, dex_pc, is_put, field_type);
2290 return;
2291 }
2292
2293 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2294
2295 Handle<mirror::Class> klass =
2296 graph_->GetHandleCache()->NewHandle(resolved_field->GetDeclaringClass());
2297 HLoadClass* constant = BuildLoadClass(klass->GetDexTypeIndex(),
2298 klass->GetDexFile(),
2299 klass,
2300 dex_pc,
2301 /* needs_access_check= */ false);
2302
2303 if (constant == nullptr) {
2304 // The class cannot be referenced from this compiled code. Generate
2305 // an unresolved access.
2306 MaybeRecordStat(compilation_stats_,
2307 MethodCompilationStat::kUnresolvedFieldNotAFastAccess);
2308 BuildUnresolvedStaticFieldAccess(instruction, dex_pc, is_put, field_type);
2309 return;
2310 }
2311
2312 HInstruction* cls = constant;
2313 if (!IsInitialized(klass.Get())) {
2314 cls = new (allocator_) HClinitCheck(constant, dex_pc);
2315 AppendInstruction(cls);
2316 }
2317
2318 uint16_t class_def_index = klass->GetDexClassDefIndex();
2319 if (is_put) {
2320 // We need to keep the class alive before loading the value.
2321 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2322 DCHECK_EQ(HPhi::ToPhiType(value->GetType()), HPhi::ToPhiType(field_type));
2323 AppendInstruction(new (allocator_) HStaticFieldSet(cls,
2324 value,
2325 resolved_field,
2326 field_type,
2327 resolved_field->GetOffset(),
2328 resolved_field->IsVolatile(),
2329 field_index,
2330 class_def_index,
2331 *dex_file_,
2332 dex_pc));
2333 } else {
2334 AppendInstruction(new (allocator_) HStaticFieldGet(cls,
2335 resolved_field,
2336 field_type,
2337 resolved_field->GetOffset(),
2338 resolved_field->IsVolatile(),
2339 field_index,
2340 class_def_index,
2341 *dex_file_,
2342 dex_pc));
2343 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2344 }
2345 }
2346
BuildCheckedDivRem(uint16_t out_vreg,uint16_t first_vreg,int64_t second_vreg_or_constant,uint32_t dex_pc,DataType::Type type,bool second_is_constant,bool isDiv)2347 void HInstructionBuilder::BuildCheckedDivRem(uint16_t out_vreg,
2348 uint16_t first_vreg,
2349 int64_t second_vreg_or_constant,
2350 uint32_t dex_pc,
2351 DataType::Type type,
2352 bool second_is_constant,
2353 bool isDiv) {
2354 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
2355
2356 HInstruction* first = LoadLocal(first_vreg, type);
2357 HInstruction* second = nullptr;
2358 if (second_is_constant) {
2359 if (type == DataType::Type::kInt32) {
2360 second = graph_->GetIntConstant(second_vreg_or_constant, dex_pc);
2361 } else {
2362 second = graph_->GetLongConstant(second_vreg_or_constant, dex_pc);
2363 }
2364 } else {
2365 second = LoadLocal(second_vreg_or_constant, type);
2366 }
2367
2368 if (!second_is_constant
2369 || (type == DataType::Type::kInt32 && second->AsIntConstant()->GetValue() == 0)
2370 || (type == DataType::Type::kInt64 && second->AsLongConstant()->GetValue() == 0)) {
2371 second = new (allocator_) HDivZeroCheck(second, dex_pc);
2372 AppendInstruction(second);
2373 }
2374
2375 if (isDiv) {
2376 AppendInstruction(new (allocator_) HDiv(type, first, second, dex_pc));
2377 } else {
2378 AppendInstruction(new (allocator_) HRem(type, first, second, dex_pc));
2379 }
2380 UpdateLocal(out_vreg, current_block_->GetLastInstruction());
2381 }
2382
BuildArrayAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put,DataType::Type anticipated_type)2383 void HInstructionBuilder::BuildArrayAccess(const Instruction& instruction,
2384 uint32_t dex_pc,
2385 bool is_put,
2386 DataType::Type anticipated_type) {
2387 uint8_t source_or_dest_reg = instruction.VRegA_23x();
2388 uint8_t array_reg = instruction.VRegB_23x();
2389 uint8_t index_reg = instruction.VRegC_23x();
2390
2391 HInstruction* object = LoadNullCheckedLocal(array_reg, dex_pc);
2392 HInstruction* length = new (allocator_) HArrayLength(object, dex_pc);
2393 AppendInstruction(length);
2394 HInstruction* index = LoadLocal(index_reg, DataType::Type::kInt32);
2395 index = new (allocator_) HBoundsCheck(index, length, dex_pc);
2396 AppendInstruction(index);
2397 if (is_put) {
2398 HInstruction* value = LoadLocal(source_or_dest_reg, anticipated_type);
2399 // TODO: Insert a type check node if the type is Object.
2400 HArraySet* aset = new (allocator_) HArraySet(object, index, value, anticipated_type, dex_pc);
2401 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2402 AppendInstruction(aset);
2403 } else {
2404 HArrayGet* aget = new (allocator_) HArrayGet(object, index, anticipated_type, dex_pc);
2405 ssa_builder_->MaybeAddAmbiguousArrayGet(aget);
2406 AppendInstruction(aget);
2407 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2408 }
2409 graph_->SetHasBoundsChecks(true);
2410 }
2411
BuildNewArray(uint32_t dex_pc,dex::TypeIndex type_index,HInstruction * length)2412 HNewArray* HInstructionBuilder::BuildNewArray(uint32_t dex_pc,
2413 dex::TypeIndex type_index,
2414 HInstruction* length) {
2415 HLoadClass* cls = BuildLoadClass(type_index, dex_pc);
2416
2417 const char* descriptor = dex_file_->GetTypeDescriptor(dex_file_->GetTypeId(type_index));
2418 DCHECK_EQ(descriptor[0], '[');
2419 size_t component_type_shift = Primitive::ComponentSizeShift(Primitive::GetType(descriptor[1]));
2420
2421 HNewArray* new_array = new (allocator_) HNewArray(cls, length, dex_pc, component_type_shift);
2422 AppendInstruction(new_array);
2423 return new_array;
2424 }
2425
BuildFilledNewArray(uint32_t dex_pc,dex::TypeIndex type_index,const InstructionOperands & operands)2426 HNewArray* HInstructionBuilder::BuildFilledNewArray(uint32_t dex_pc,
2427 dex::TypeIndex type_index,
2428 const InstructionOperands& operands) {
2429 const size_t number_of_operands = operands.GetNumberOfOperands();
2430 HInstruction* length = graph_->GetIntConstant(number_of_operands, dex_pc);
2431
2432 HNewArray* new_array = BuildNewArray(dex_pc, type_index, length);
2433 const char* descriptor = dex_file_->StringByTypeIdx(type_index);
2434 DCHECK_EQ(descriptor[0], '[') << descriptor;
2435 char primitive = descriptor[1];
2436 DCHECK(primitive == 'I'
2437 || primitive == 'L'
2438 || primitive == '[') << descriptor;
2439 bool is_reference_array = (primitive == 'L') || (primitive == '[');
2440 DataType::Type type = is_reference_array ? DataType::Type::kReference : DataType::Type::kInt32;
2441
2442 for (size_t i = 0; i < number_of_operands; ++i) {
2443 HInstruction* value = LoadLocal(operands.GetOperand(i), type);
2444 HInstruction* index = graph_->GetIntConstant(i, dex_pc);
2445 HArraySet* aset = new (allocator_) HArraySet(new_array, index, value, type, dex_pc);
2446 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2447 AppendInstruction(aset);
2448 }
2449 latest_result_ = new_array;
2450
2451 return new_array;
2452 }
2453
2454 template <typename T>
BuildFillArrayData(HInstruction * object,const T * data,uint32_t element_count,DataType::Type anticipated_type,uint32_t dex_pc)2455 void HInstructionBuilder::BuildFillArrayData(HInstruction* object,
2456 const T* data,
2457 uint32_t element_count,
2458 DataType::Type anticipated_type,
2459 uint32_t dex_pc) {
2460 for (uint32_t i = 0; i < element_count; ++i) {
2461 HInstruction* index = graph_->GetIntConstant(i, dex_pc);
2462 HInstruction* value = graph_->GetIntConstant(data[i], dex_pc);
2463 HArraySet* aset = new (allocator_) HArraySet(object, index, value, anticipated_type, dex_pc);
2464 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2465 AppendInstruction(aset);
2466 }
2467 }
2468
BuildFillArrayData(const Instruction & instruction,uint32_t dex_pc)2469 void HInstructionBuilder::BuildFillArrayData(const Instruction& instruction, uint32_t dex_pc) {
2470 HInstruction* array = LoadNullCheckedLocal(instruction.VRegA_31t(), dex_pc);
2471
2472 int32_t payload_offset = instruction.VRegB_31t() + dex_pc;
2473 const Instruction::ArrayDataPayload* payload =
2474 reinterpret_cast<const Instruction::ArrayDataPayload*>(
2475 code_item_accessor_.Insns() + payload_offset);
2476 const uint8_t* data = payload->data;
2477 uint32_t element_count = payload->element_count;
2478
2479 if (element_count == 0u) {
2480 // For empty payload we emit only the null check above.
2481 return;
2482 }
2483
2484 HInstruction* length = new (allocator_) HArrayLength(array, dex_pc);
2485 AppendInstruction(length);
2486
2487 // Implementation of this DEX instruction seems to be that the bounds check is
2488 // done before doing any stores.
2489 HInstruction* last_index = graph_->GetIntConstant(payload->element_count - 1, dex_pc);
2490 AppendInstruction(new (allocator_) HBoundsCheck(last_index, length, dex_pc));
2491
2492 switch (payload->element_width) {
2493 case 1:
2494 BuildFillArrayData(array,
2495 reinterpret_cast<const int8_t*>(data),
2496 element_count,
2497 DataType::Type::kInt8,
2498 dex_pc);
2499 break;
2500 case 2:
2501 BuildFillArrayData(array,
2502 reinterpret_cast<const int16_t*>(data),
2503 element_count,
2504 DataType::Type::kInt16,
2505 dex_pc);
2506 break;
2507 case 4:
2508 BuildFillArrayData(array,
2509 reinterpret_cast<const int32_t*>(data),
2510 element_count,
2511 DataType::Type::kInt32,
2512 dex_pc);
2513 break;
2514 case 8:
2515 BuildFillWideArrayData(array,
2516 reinterpret_cast<const int64_t*>(data),
2517 element_count,
2518 dex_pc);
2519 break;
2520 default:
2521 LOG(FATAL) << "Unknown element width for " << payload->element_width;
2522 }
2523 graph_->SetHasBoundsChecks(true);
2524 }
2525
BuildFillWideArrayData(HInstruction * object,const int64_t * data,uint32_t element_count,uint32_t dex_pc)2526 void HInstructionBuilder::BuildFillWideArrayData(HInstruction* object,
2527 const int64_t* data,
2528 uint32_t element_count,
2529 uint32_t dex_pc) {
2530 for (uint32_t i = 0; i < element_count; ++i) {
2531 HInstruction* index = graph_->GetIntConstant(i, dex_pc);
2532 HInstruction* value = graph_->GetLongConstant(data[i], dex_pc);
2533 HArraySet* aset =
2534 new (allocator_) HArraySet(object, index, value, DataType::Type::kInt64, dex_pc);
2535 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2536 AppendInstruction(aset);
2537 }
2538 }
2539
BuildLoadString(dex::StringIndex string_index,uint32_t dex_pc)2540 void HInstructionBuilder::BuildLoadString(dex::StringIndex string_index, uint32_t dex_pc) {
2541 HLoadString* load_string =
2542 new (allocator_) HLoadString(graph_->GetCurrentMethod(), string_index, *dex_file_, dex_pc);
2543 HSharpening::ProcessLoadString(load_string,
2544 code_generator_,
2545 *dex_compilation_unit_,
2546 graph_->GetHandleCache()->GetHandles());
2547 AppendInstruction(load_string);
2548 }
2549
BuildLoadClass(dex::TypeIndex type_index,uint32_t dex_pc)2550 HLoadClass* HInstructionBuilder::BuildLoadClass(dex::TypeIndex type_index, uint32_t dex_pc) {
2551 ScopedObjectAccess soa(Thread::Current());
2552 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2553 Handle<mirror::Class> klass = ResolveClass(soa, type_index);
2554 bool needs_access_check = LoadClassNeedsAccessCheck(type_index, klass.Get());
2555 return BuildLoadClass(type_index, dex_file, klass, dex_pc, needs_access_check);
2556 }
2557
BuildLoadClass(dex::TypeIndex type_index,const DexFile & dex_file,Handle<mirror::Class> klass,uint32_t dex_pc,bool needs_access_check)2558 HLoadClass* HInstructionBuilder::BuildLoadClass(dex::TypeIndex type_index,
2559 const DexFile& dex_file,
2560 Handle<mirror::Class> klass,
2561 uint32_t dex_pc,
2562 bool needs_access_check) {
2563 // Try to find a reference in the compiling dex file.
2564 const DexFile* actual_dex_file = &dex_file;
2565 if (!IsSameDexFile(dex_file, *dex_compilation_unit_->GetDexFile())) {
2566 dex::TypeIndex local_type_index =
2567 klass->FindTypeIndexInOtherDexFile(*dex_compilation_unit_->GetDexFile());
2568 if (local_type_index.IsValid()) {
2569 type_index = local_type_index;
2570 actual_dex_file = dex_compilation_unit_->GetDexFile();
2571 }
2572 }
2573
2574 // We cannot use the referrer's class load kind if we need to do an access check.
2575 // If the `klass` is unresolved, we need access check with the exception of the referrer's
2576 // class, see LoadClassNeedsAccessCheck(), so the `!needs_access_check` check is enough.
2577 // Otherwise, also check if the `klass` is the same as the compiling class, which also
2578 // conveniently rejects the case of unresolved compiling class.
2579 bool is_referrers_class =
2580 !needs_access_check &&
2581 (klass == nullptr || outer_compilation_unit_->GetCompilingClass().Get() == klass.Get());
2582 // Note: `klass` must be from `graph_->GetHandleCache()`.
2583 HLoadClass* load_class = new (allocator_) HLoadClass(
2584 graph_->GetCurrentMethod(),
2585 type_index,
2586 *actual_dex_file,
2587 klass,
2588 is_referrers_class,
2589 dex_pc,
2590 needs_access_check);
2591
2592 HLoadClass::LoadKind load_kind = HSharpening::ComputeLoadClassKind(load_class,
2593 code_generator_,
2594 *dex_compilation_unit_);
2595
2596 if (load_kind == HLoadClass::LoadKind::kInvalid) {
2597 // We actually cannot reference this class, we're forced to bail.
2598 return nullptr;
2599 }
2600 // Load kind must be set before inserting the instruction into the graph.
2601 load_class->SetLoadKind(load_kind);
2602 AppendInstruction(load_class);
2603 return load_class;
2604 }
2605
ResolveClass(ScopedObjectAccess & soa,dex::TypeIndex type_index)2606 Handle<mirror::Class> HInstructionBuilder::ResolveClass(ScopedObjectAccess& soa,
2607 dex::TypeIndex type_index) {
2608 auto it = class_cache_.find(type_index);
2609 if (it != class_cache_.end()) {
2610 return it->second;
2611 }
2612
2613 ObjPtr<mirror::Class> klass = dex_compilation_unit_->GetClassLinker()->ResolveType(
2614 type_index, dex_compilation_unit_->GetDexCache(), dex_compilation_unit_->GetClassLoader());
2615 DCHECK_EQ(klass == nullptr, soa.Self()->IsExceptionPending());
2616 soa.Self()->ClearException(); // Clean up the exception left by type resolution if any.
2617
2618 Handle<mirror::Class> h_klass = graph_->GetHandleCache()->NewHandle(klass);
2619 class_cache_.Put(type_index, h_klass);
2620 return h_klass;
2621 }
2622
LoadClassNeedsAccessCheck(dex::TypeIndex type_index,ObjPtr<mirror::Class> klass)2623 bool HInstructionBuilder::LoadClassNeedsAccessCheck(dex::TypeIndex type_index,
2624 ObjPtr<mirror::Class> klass) {
2625 if (klass == nullptr) {
2626 // If the class is unresolved, we can avoid access checks only for references to
2627 // the compiling class as determined by checking the descriptor and ClassLoader.
2628 if (outer_compilation_unit_->GetCompilingClass() != nullptr) {
2629 // Compiling class is resolved, so different from the unresolved class.
2630 return true;
2631 }
2632 if (dex_compilation_unit_->GetClassLoader().Get() !=
2633 outer_compilation_unit_->GetClassLoader().Get()) {
2634 // Resolving the same descriptor in a different ClassLoader than the
2635 // defining loader of the compiling class shall either fail to find
2636 // the class definition, or find a different one.
2637 // (Assuming no custom ClassLoader hierarchy with circular delegation.)
2638 return true;
2639 }
2640 // Check if the class is the outer method's class.
2641 // For the same dex file compare type indexes, otherwise descriptors.
2642 const DexFile* outer_dex_file = outer_compilation_unit_->GetDexFile();
2643 const DexFile* inner_dex_file = dex_compilation_unit_->GetDexFile();
2644 const dex::ClassDef& outer_class_def =
2645 outer_dex_file->GetClassDef(outer_compilation_unit_->GetClassDefIndex());
2646 if (IsSameDexFile(*inner_dex_file, *outer_dex_file)) {
2647 if (type_index != outer_class_def.class_idx_) {
2648 return true;
2649 }
2650 } else {
2651 uint32_t outer_utf16_length;
2652 const char* outer_descriptor =
2653 outer_dex_file->StringByTypeIdx(outer_class_def.class_idx_, &outer_utf16_length);
2654 uint32_t target_utf16_length;
2655 const char* target_descriptor =
2656 inner_dex_file->StringByTypeIdx(type_index, &target_utf16_length);
2657 if (outer_utf16_length != target_utf16_length ||
2658 strcmp(outer_descriptor, target_descriptor) != 0) {
2659 return true;
2660 }
2661 }
2662 // For inlined methods we also need to check if the compiling class
2663 // is public or in the same package as the inlined method's class.
2664 if (dex_compilation_unit_ != outer_compilation_unit_ &&
2665 (outer_class_def.access_flags_ & kAccPublic) == 0) {
2666 DCHECK(dex_compilation_unit_->GetCompilingClass() != nullptr);
2667 SamePackageCompare same_package(*outer_compilation_unit_);
2668 if (!same_package(dex_compilation_unit_->GetCompilingClass().Get())) {
2669 return true;
2670 }
2671 }
2672 return false;
2673 } else if (klass->IsPublic()) {
2674 return false;
2675 } else if (dex_compilation_unit_->GetCompilingClass() != nullptr) {
2676 return !dex_compilation_unit_->GetCompilingClass()->CanAccess(klass);
2677 } else {
2678 SamePackageCompare same_package(*dex_compilation_unit_);
2679 return !same_package(klass);
2680 }
2681 }
2682
BuildLoadMethodHandle(uint16_t method_handle_index,uint32_t dex_pc)2683 void HInstructionBuilder::BuildLoadMethodHandle(uint16_t method_handle_index, uint32_t dex_pc) {
2684 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2685 HLoadMethodHandle* load_method_handle = new (allocator_) HLoadMethodHandle(
2686 graph_->GetCurrentMethod(), method_handle_index, dex_file, dex_pc);
2687 AppendInstruction(load_method_handle);
2688 }
2689
BuildLoadMethodType(dex::ProtoIndex proto_index,uint32_t dex_pc)2690 void HInstructionBuilder::BuildLoadMethodType(dex::ProtoIndex proto_index, uint32_t dex_pc) {
2691 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2692 HLoadMethodType* load_method_type =
2693 new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(), proto_index, dex_file, dex_pc);
2694 AppendInstruction(load_method_type);
2695 }
2696
BuildTypeCheck(bool is_instance_of,HInstruction * object,dex::TypeIndex type_index,uint32_t dex_pc)2697 void HInstructionBuilder::BuildTypeCheck(bool is_instance_of,
2698 HInstruction* object,
2699 dex::TypeIndex type_index,
2700 uint32_t dex_pc) {
2701 ScopedObjectAccess soa(Thread::Current());
2702 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2703 Handle<mirror::Class> klass = ResolveClass(soa, type_index);
2704 bool needs_access_check = LoadClassNeedsAccessCheck(type_index, klass.Get());
2705 TypeCheckKind check_kind = HSharpening::ComputeTypeCheckKind(
2706 klass.Get(), code_generator_, needs_access_check);
2707
2708 HInstruction* class_or_null = nullptr;
2709 HIntConstant* bitstring_path_to_root = nullptr;
2710 HIntConstant* bitstring_mask = nullptr;
2711 if (check_kind == TypeCheckKind::kBitstringCheck) {
2712 // TODO: Allow using the bitstring check also if we need an access check.
2713 DCHECK(!needs_access_check);
2714 class_or_null = graph_->GetNullConstant(dex_pc);
2715 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2716 uint32_t path_to_root =
2717 SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootForTarget(klass.Get());
2718 uint32_t mask = SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootMask(klass.Get());
2719 bitstring_path_to_root = graph_->GetIntConstant(static_cast<int32_t>(path_to_root), dex_pc);
2720 bitstring_mask = graph_->GetIntConstant(static_cast<int32_t>(mask), dex_pc);
2721 } else {
2722 class_or_null = BuildLoadClass(type_index, dex_file, klass, dex_pc, needs_access_check);
2723 }
2724 DCHECK(class_or_null != nullptr);
2725
2726 if (is_instance_of) {
2727 AppendInstruction(new (allocator_) HInstanceOf(object,
2728 class_or_null,
2729 check_kind,
2730 klass,
2731 dex_pc,
2732 allocator_,
2733 bitstring_path_to_root,
2734 bitstring_mask));
2735 } else {
2736 // We emit a CheckCast followed by a BoundType. CheckCast is a statement
2737 // which may throw. If it succeeds BoundType sets the new type of `object`
2738 // for all subsequent uses.
2739 AppendInstruction(
2740 new (allocator_) HCheckCast(object,
2741 class_or_null,
2742 check_kind,
2743 klass,
2744 dex_pc,
2745 allocator_,
2746 bitstring_path_to_root,
2747 bitstring_mask));
2748 AppendInstruction(new (allocator_) HBoundType(object, dex_pc));
2749 }
2750 }
2751
BuildTypeCheck(const Instruction & instruction,uint8_t destination,uint8_t reference,dex::TypeIndex type_index,uint32_t dex_pc)2752 void HInstructionBuilder::BuildTypeCheck(const Instruction& instruction,
2753 uint8_t destination,
2754 uint8_t reference,
2755 dex::TypeIndex type_index,
2756 uint32_t dex_pc) {
2757 HInstruction* object = LoadLocal(reference, DataType::Type::kReference);
2758 bool is_instance_of = instruction.Opcode() == Instruction::INSTANCE_OF;
2759
2760 BuildTypeCheck(is_instance_of, object, type_index, dex_pc);
2761
2762 if (is_instance_of) {
2763 UpdateLocal(destination, current_block_->GetLastInstruction());
2764 } else {
2765 DCHECK_EQ(instruction.Opcode(), Instruction::CHECK_CAST);
2766 UpdateLocal(reference, current_block_->GetLastInstruction());
2767 }
2768 }
2769
ProcessDexInstruction(const Instruction & instruction,uint32_t dex_pc)2770 bool HInstructionBuilder::ProcessDexInstruction(const Instruction& instruction, uint32_t dex_pc) {
2771 switch (instruction.Opcode()) {
2772 case Instruction::CONST_4: {
2773 int32_t register_index = instruction.VRegA();
2774 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_11n(), dex_pc);
2775 UpdateLocal(register_index, constant);
2776 break;
2777 }
2778
2779 case Instruction::CONST_16: {
2780 int32_t register_index = instruction.VRegA();
2781 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_21s(), dex_pc);
2782 UpdateLocal(register_index, constant);
2783 break;
2784 }
2785
2786 case Instruction::CONST: {
2787 int32_t register_index = instruction.VRegA();
2788 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_31i(), dex_pc);
2789 UpdateLocal(register_index, constant);
2790 break;
2791 }
2792
2793 case Instruction::CONST_HIGH16: {
2794 int32_t register_index = instruction.VRegA();
2795 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_21h() << 16, dex_pc);
2796 UpdateLocal(register_index, constant);
2797 break;
2798 }
2799
2800 case Instruction::CONST_WIDE_16: {
2801 int32_t register_index = instruction.VRegA();
2802 // Get 16 bits of constant value, sign extended to 64 bits.
2803 int64_t value = instruction.VRegB_21s();
2804 value <<= 48;
2805 value >>= 48;
2806 HLongConstant* constant = graph_->GetLongConstant(value, dex_pc);
2807 UpdateLocal(register_index, constant);
2808 break;
2809 }
2810
2811 case Instruction::CONST_WIDE_32: {
2812 int32_t register_index = instruction.VRegA();
2813 // Get 32 bits of constant value, sign extended to 64 bits.
2814 int64_t value = instruction.VRegB_31i();
2815 value <<= 32;
2816 value >>= 32;
2817 HLongConstant* constant = graph_->GetLongConstant(value, dex_pc);
2818 UpdateLocal(register_index, constant);
2819 break;
2820 }
2821
2822 case Instruction::CONST_WIDE: {
2823 int32_t register_index = instruction.VRegA();
2824 HLongConstant* constant = graph_->GetLongConstant(instruction.VRegB_51l(), dex_pc);
2825 UpdateLocal(register_index, constant);
2826 break;
2827 }
2828
2829 case Instruction::CONST_WIDE_HIGH16: {
2830 int32_t register_index = instruction.VRegA();
2831 int64_t value = static_cast<int64_t>(instruction.VRegB_21h()) << 48;
2832 HLongConstant* constant = graph_->GetLongConstant(value, dex_pc);
2833 UpdateLocal(register_index, constant);
2834 break;
2835 }
2836
2837 // Note that the SSA building will refine the types.
2838 case Instruction::MOVE:
2839 case Instruction::MOVE_FROM16:
2840 case Instruction::MOVE_16: {
2841 HInstruction* value = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
2842 UpdateLocal(instruction.VRegA(), value);
2843 break;
2844 }
2845
2846 // Note that the SSA building will refine the types.
2847 case Instruction::MOVE_WIDE:
2848 case Instruction::MOVE_WIDE_FROM16:
2849 case Instruction::MOVE_WIDE_16: {
2850 HInstruction* value = LoadLocal(instruction.VRegB(), DataType::Type::kInt64);
2851 UpdateLocal(instruction.VRegA(), value);
2852 break;
2853 }
2854
2855 case Instruction::MOVE_OBJECT:
2856 case Instruction::MOVE_OBJECT_16:
2857 case Instruction::MOVE_OBJECT_FROM16: {
2858 // The verifier has no notion of a null type, so a move-object of constant 0
2859 // will lead to the same constant 0 in the destination register. To mimic
2860 // this behavior, we just pretend we haven't seen a type change (int to reference)
2861 // for the 0 constant and phis. We rely on our type propagation to eventually get the
2862 // types correct.
2863 uint32_t reg_number = instruction.VRegB();
2864 HInstruction* value = (*current_locals_)[reg_number];
2865 if (value->IsIntConstant()) {
2866 DCHECK_EQ(value->AsIntConstant()->GetValue(), 0);
2867 } else if (value->IsPhi()) {
2868 DCHECK(value->GetType() == DataType::Type::kInt32 ||
2869 value->GetType() == DataType::Type::kReference);
2870 } else {
2871 value = LoadLocal(reg_number, DataType::Type::kReference);
2872 }
2873 UpdateLocal(instruction.VRegA(), value);
2874 break;
2875 }
2876
2877 case Instruction::RETURN_VOID: {
2878 BuildReturn(instruction, DataType::Type::kVoid, dex_pc);
2879 break;
2880 }
2881
2882 #define IF_XX(comparison, cond) \
2883 case Instruction::IF_##cond: If_22t<comparison>(instruction, dex_pc); break; \
2884 case Instruction::IF_##cond##Z: If_21t<comparison>(instruction, dex_pc); break
2885
2886 IF_XX(HEqual, EQ);
2887 IF_XX(HNotEqual, NE);
2888 IF_XX(HLessThan, LT);
2889 IF_XX(HLessThanOrEqual, LE);
2890 IF_XX(HGreaterThan, GT);
2891 IF_XX(HGreaterThanOrEqual, GE);
2892
2893 case Instruction::GOTO:
2894 case Instruction::GOTO_16:
2895 case Instruction::GOTO_32: {
2896 AppendInstruction(new (allocator_) HGoto(dex_pc));
2897 current_block_ = nullptr;
2898 break;
2899 }
2900
2901 case Instruction::RETURN: {
2902 BuildReturn(instruction, return_type_, dex_pc);
2903 break;
2904 }
2905
2906 case Instruction::RETURN_OBJECT: {
2907 BuildReturn(instruction, return_type_, dex_pc);
2908 break;
2909 }
2910
2911 case Instruction::RETURN_WIDE: {
2912 BuildReturn(instruction, return_type_, dex_pc);
2913 break;
2914 }
2915
2916 case Instruction::INVOKE_DIRECT:
2917 case Instruction::INVOKE_INTERFACE:
2918 case Instruction::INVOKE_STATIC:
2919 case Instruction::INVOKE_SUPER:
2920 case Instruction::INVOKE_VIRTUAL: {
2921 uint16_t method_idx = instruction.VRegB_35c();
2922 uint32_t args[5];
2923 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
2924 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
2925 if (!BuildInvoke(instruction, dex_pc, method_idx, operands)) {
2926 return false;
2927 }
2928 break;
2929 }
2930
2931 case Instruction::INVOKE_DIRECT_RANGE:
2932 case Instruction::INVOKE_INTERFACE_RANGE:
2933 case Instruction::INVOKE_STATIC_RANGE:
2934 case Instruction::INVOKE_SUPER_RANGE:
2935 case Instruction::INVOKE_VIRTUAL_RANGE: {
2936 uint16_t method_idx = instruction.VRegB_3rc();
2937 RangeInstructionOperands operands(instruction.VRegC(), instruction.VRegA_3rc());
2938 if (!BuildInvoke(instruction, dex_pc, method_idx, operands)) {
2939 return false;
2940 }
2941 break;
2942 }
2943
2944 case Instruction::INVOKE_POLYMORPHIC: {
2945 uint16_t method_idx = instruction.VRegB_45cc();
2946 dex::ProtoIndex proto_idx(instruction.VRegH_45cc());
2947 uint32_t args[5];
2948 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
2949 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
2950 return BuildInvokePolymorphic(dex_pc, method_idx, proto_idx, operands);
2951 }
2952
2953 case Instruction::INVOKE_POLYMORPHIC_RANGE: {
2954 uint16_t method_idx = instruction.VRegB_4rcc();
2955 dex::ProtoIndex proto_idx(instruction.VRegH_4rcc());
2956 RangeInstructionOperands operands(instruction.VRegC_4rcc(), instruction.VRegA_4rcc());
2957 return BuildInvokePolymorphic(dex_pc, method_idx, proto_idx, operands);
2958 }
2959
2960 case Instruction::INVOKE_CUSTOM: {
2961 uint16_t call_site_idx = instruction.VRegB_35c();
2962 uint32_t args[5];
2963 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
2964 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
2965 return BuildInvokeCustom(dex_pc, call_site_idx, operands);
2966 }
2967
2968 case Instruction::INVOKE_CUSTOM_RANGE: {
2969 uint16_t call_site_idx = instruction.VRegB_3rc();
2970 RangeInstructionOperands operands(instruction.VRegC_3rc(), instruction.VRegA_3rc());
2971 return BuildInvokeCustom(dex_pc, call_site_idx, operands);
2972 }
2973
2974 case Instruction::NEG_INT: {
2975 Unop_12x<HNeg>(instruction, DataType::Type::kInt32, dex_pc);
2976 break;
2977 }
2978
2979 case Instruction::NEG_LONG: {
2980 Unop_12x<HNeg>(instruction, DataType::Type::kInt64, dex_pc);
2981 break;
2982 }
2983
2984 case Instruction::NEG_FLOAT: {
2985 Unop_12x<HNeg>(instruction, DataType::Type::kFloat32, dex_pc);
2986 break;
2987 }
2988
2989 case Instruction::NEG_DOUBLE: {
2990 Unop_12x<HNeg>(instruction, DataType::Type::kFloat64, dex_pc);
2991 break;
2992 }
2993
2994 case Instruction::NOT_INT: {
2995 Unop_12x<HNot>(instruction, DataType::Type::kInt32, dex_pc);
2996 break;
2997 }
2998
2999 case Instruction::NOT_LONG: {
3000 Unop_12x<HNot>(instruction, DataType::Type::kInt64, dex_pc);
3001 break;
3002 }
3003
3004 case Instruction::INT_TO_LONG: {
3005 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt64, dex_pc);
3006 break;
3007 }
3008
3009 case Instruction::INT_TO_FLOAT: {
3010 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kFloat32, dex_pc);
3011 break;
3012 }
3013
3014 case Instruction::INT_TO_DOUBLE: {
3015 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kFloat64, dex_pc);
3016 break;
3017 }
3018
3019 case Instruction::LONG_TO_INT: {
3020 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kInt32, dex_pc);
3021 break;
3022 }
3023
3024 case Instruction::LONG_TO_FLOAT: {
3025 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kFloat32, dex_pc);
3026 break;
3027 }
3028
3029 case Instruction::LONG_TO_DOUBLE: {
3030 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kFloat64, dex_pc);
3031 break;
3032 }
3033
3034 case Instruction::FLOAT_TO_INT: {
3035 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kInt32, dex_pc);
3036 break;
3037 }
3038
3039 case Instruction::FLOAT_TO_LONG: {
3040 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kInt64, dex_pc);
3041 break;
3042 }
3043
3044 case Instruction::FLOAT_TO_DOUBLE: {
3045 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kFloat64, dex_pc);
3046 break;
3047 }
3048
3049 case Instruction::DOUBLE_TO_INT: {
3050 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kInt32, dex_pc);
3051 break;
3052 }
3053
3054 case Instruction::DOUBLE_TO_LONG: {
3055 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kInt64, dex_pc);
3056 break;
3057 }
3058
3059 case Instruction::DOUBLE_TO_FLOAT: {
3060 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kFloat32, dex_pc);
3061 break;
3062 }
3063
3064 case Instruction::INT_TO_BYTE: {
3065 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt8, dex_pc);
3066 break;
3067 }
3068
3069 case Instruction::INT_TO_SHORT: {
3070 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt16, dex_pc);
3071 break;
3072 }
3073
3074 case Instruction::INT_TO_CHAR: {
3075 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kUint16, dex_pc);
3076 break;
3077 }
3078
3079 case Instruction::ADD_INT: {
3080 Binop_23x<HAdd>(instruction, DataType::Type::kInt32, dex_pc);
3081 break;
3082 }
3083
3084 case Instruction::ADD_LONG: {
3085 Binop_23x<HAdd>(instruction, DataType::Type::kInt64, dex_pc);
3086 break;
3087 }
3088
3089 case Instruction::ADD_DOUBLE: {
3090 Binop_23x<HAdd>(instruction, DataType::Type::kFloat64, dex_pc);
3091 break;
3092 }
3093
3094 case Instruction::ADD_FLOAT: {
3095 Binop_23x<HAdd>(instruction, DataType::Type::kFloat32, dex_pc);
3096 break;
3097 }
3098
3099 case Instruction::SUB_INT: {
3100 Binop_23x<HSub>(instruction, DataType::Type::kInt32, dex_pc);
3101 break;
3102 }
3103
3104 case Instruction::SUB_LONG: {
3105 Binop_23x<HSub>(instruction, DataType::Type::kInt64, dex_pc);
3106 break;
3107 }
3108
3109 case Instruction::SUB_FLOAT: {
3110 Binop_23x<HSub>(instruction, DataType::Type::kFloat32, dex_pc);
3111 break;
3112 }
3113
3114 case Instruction::SUB_DOUBLE: {
3115 Binop_23x<HSub>(instruction, DataType::Type::kFloat64, dex_pc);
3116 break;
3117 }
3118
3119 case Instruction::ADD_INT_2ADDR: {
3120 Binop_12x<HAdd>(instruction, DataType::Type::kInt32, dex_pc);
3121 break;
3122 }
3123
3124 case Instruction::MUL_INT: {
3125 Binop_23x<HMul>(instruction, DataType::Type::kInt32, dex_pc);
3126 break;
3127 }
3128
3129 case Instruction::MUL_LONG: {
3130 Binop_23x<HMul>(instruction, DataType::Type::kInt64, dex_pc);
3131 break;
3132 }
3133
3134 case Instruction::MUL_FLOAT: {
3135 Binop_23x<HMul>(instruction, DataType::Type::kFloat32, dex_pc);
3136 break;
3137 }
3138
3139 case Instruction::MUL_DOUBLE: {
3140 Binop_23x<HMul>(instruction, DataType::Type::kFloat64, dex_pc);
3141 break;
3142 }
3143
3144 case Instruction::DIV_INT: {
3145 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3146 dex_pc, DataType::Type::kInt32, false, true);
3147 break;
3148 }
3149
3150 case Instruction::DIV_LONG: {
3151 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3152 dex_pc, DataType::Type::kInt64, false, true);
3153 break;
3154 }
3155
3156 case Instruction::DIV_FLOAT: {
3157 Binop_23x<HDiv>(instruction, DataType::Type::kFloat32, dex_pc);
3158 break;
3159 }
3160
3161 case Instruction::DIV_DOUBLE: {
3162 Binop_23x<HDiv>(instruction, DataType::Type::kFloat64, dex_pc);
3163 break;
3164 }
3165
3166 case Instruction::REM_INT: {
3167 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3168 dex_pc, DataType::Type::kInt32, false, false);
3169 break;
3170 }
3171
3172 case Instruction::REM_LONG: {
3173 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3174 dex_pc, DataType::Type::kInt64, false, false);
3175 break;
3176 }
3177
3178 case Instruction::REM_FLOAT: {
3179 Binop_23x<HRem>(instruction, DataType::Type::kFloat32, dex_pc);
3180 break;
3181 }
3182
3183 case Instruction::REM_DOUBLE: {
3184 Binop_23x<HRem>(instruction, DataType::Type::kFloat64, dex_pc);
3185 break;
3186 }
3187
3188 case Instruction::AND_INT: {
3189 Binop_23x<HAnd>(instruction, DataType::Type::kInt32, dex_pc);
3190 break;
3191 }
3192
3193 case Instruction::AND_LONG: {
3194 Binop_23x<HAnd>(instruction, DataType::Type::kInt64, dex_pc);
3195 break;
3196 }
3197
3198 case Instruction::SHL_INT: {
3199 Binop_23x_shift<HShl>(instruction, DataType::Type::kInt32, dex_pc);
3200 break;
3201 }
3202
3203 case Instruction::SHL_LONG: {
3204 Binop_23x_shift<HShl>(instruction, DataType::Type::kInt64, dex_pc);
3205 break;
3206 }
3207
3208 case Instruction::SHR_INT: {
3209 Binop_23x_shift<HShr>(instruction, DataType::Type::kInt32, dex_pc);
3210 break;
3211 }
3212
3213 case Instruction::SHR_LONG: {
3214 Binop_23x_shift<HShr>(instruction, DataType::Type::kInt64, dex_pc);
3215 break;
3216 }
3217
3218 case Instruction::USHR_INT: {
3219 Binop_23x_shift<HUShr>(instruction, DataType::Type::kInt32, dex_pc);
3220 break;
3221 }
3222
3223 case Instruction::USHR_LONG: {
3224 Binop_23x_shift<HUShr>(instruction, DataType::Type::kInt64, dex_pc);
3225 break;
3226 }
3227
3228 case Instruction::OR_INT: {
3229 Binop_23x<HOr>(instruction, DataType::Type::kInt32, dex_pc);
3230 break;
3231 }
3232
3233 case Instruction::OR_LONG: {
3234 Binop_23x<HOr>(instruction, DataType::Type::kInt64, dex_pc);
3235 break;
3236 }
3237
3238 case Instruction::XOR_INT: {
3239 Binop_23x<HXor>(instruction, DataType::Type::kInt32, dex_pc);
3240 break;
3241 }
3242
3243 case Instruction::XOR_LONG: {
3244 Binop_23x<HXor>(instruction, DataType::Type::kInt64, dex_pc);
3245 break;
3246 }
3247
3248 case Instruction::ADD_LONG_2ADDR: {
3249 Binop_12x<HAdd>(instruction, DataType::Type::kInt64, dex_pc);
3250 break;
3251 }
3252
3253 case Instruction::ADD_DOUBLE_2ADDR: {
3254 Binop_12x<HAdd>(instruction, DataType::Type::kFloat64, dex_pc);
3255 break;
3256 }
3257
3258 case Instruction::ADD_FLOAT_2ADDR: {
3259 Binop_12x<HAdd>(instruction, DataType::Type::kFloat32, dex_pc);
3260 break;
3261 }
3262
3263 case Instruction::SUB_INT_2ADDR: {
3264 Binop_12x<HSub>(instruction, DataType::Type::kInt32, dex_pc);
3265 break;
3266 }
3267
3268 case Instruction::SUB_LONG_2ADDR: {
3269 Binop_12x<HSub>(instruction, DataType::Type::kInt64, dex_pc);
3270 break;
3271 }
3272
3273 case Instruction::SUB_FLOAT_2ADDR: {
3274 Binop_12x<HSub>(instruction, DataType::Type::kFloat32, dex_pc);
3275 break;
3276 }
3277
3278 case Instruction::SUB_DOUBLE_2ADDR: {
3279 Binop_12x<HSub>(instruction, DataType::Type::kFloat64, dex_pc);
3280 break;
3281 }
3282
3283 case Instruction::MUL_INT_2ADDR: {
3284 Binop_12x<HMul>(instruction, DataType::Type::kInt32, dex_pc);
3285 break;
3286 }
3287
3288 case Instruction::MUL_LONG_2ADDR: {
3289 Binop_12x<HMul>(instruction, DataType::Type::kInt64, dex_pc);
3290 break;
3291 }
3292
3293 case Instruction::MUL_FLOAT_2ADDR: {
3294 Binop_12x<HMul>(instruction, DataType::Type::kFloat32, dex_pc);
3295 break;
3296 }
3297
3298 case Instruction::MUL_DOUBLE_2ADDR: {
3299 Binop_12x<HMul>(instruction, DataType::Type::kFloat64, dex_pc);
3300 break;
3301 }
3302
3303 case Instruction::DIV_INT_2ADDR: {
3304 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(),
3305 dex_pc, DataType::Type::kInt32, false, true);
3306 break;
3307 }
3308
3309 case Instruction::DIV_LONG_2ADDR: {
3310 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(),
3311 dex_pc, DataType::Type::kInt64, false, true);
3312 break;
3313 }
3314
3315 case Instruction::REM_INT_2ADDR: {
3316 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(),
3317 dex_pc, DataType::Type::kInt32, false, false);
3318 break;
3319 }
3320
3321 case Instruction::REM_LONG_2ADDR: {
3322 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(),
3323 dex_pc, DataType::Type::kInt64, false, false);
3324 break;
3325 }
3326
3327 case Instruction::REM_FLOAT_2ADDR: {
3328 Binop_12x<HRem>(instruction, DataType::Type::kFloat32, dex_pc);
3329 break;
3330 }
3331
3332 case Instruction::REM_DOUBLE_2ADDR: {
3333 Binop_12x<HRem>(instruction, DataType::Type::kFloat64, dex_pc);
3334 break;
3335 }
3336
3337 case Instruction::SHL_INT_2ADDR: {
3338 Binop_12x_shift<HShl>(instruction, DataType::Type::kInt32, dex_pc);
3339 break;
3340 }
3341
3342 case Instruction::SHL_LONG_2ADDR: {
3343 Binop_12x_shift<HShl>(instruction, DataType::Type::kInt64, dex_pc);
3344 break;
3345 }
3346
3347 case Instruction::SHR_INT_2ADDR: {
3348 Binop_12x_shift<HShr>(instruction, DataType::Type::kInt32, dex_pc);
3349 break;
3350 }
3351
3352 case Instruction::SHR_LONG_2ADDR: {
3353 Binop_12x_shift<HShr>(instruction, DataType::Type::kInt64, dex_pc);
3354 break;
3355 }
3356
3357 case Instruction::USHR_INT_2ADDR: {
3358 Binop_12x_shift<HUShr>(instruction, DataType::Type::kInt32, dex_pc);
3359 break;
3360 }
3361
3362 case Instruction::USHR_LONG_2ADDR: {
3363 Binop_12x_shift<HUShr>(instruction, DataType::Type::kInt64, dex_pc);
3364 break;
3365 }
3366
3367 case Instruction::DIV_FLOAT_2ADDR: {
3368 Binop_12x<HDiv>(instruction, DataType::Type::kFloat32, dex_pc);
3369 break;
3370 }
3371
3372 case Instruction::DIV_DOUBLE_2ADDR: {
3373 Binop_12x<HDiv>(instruction, DataType::Type::kFloat64, dex_pc);
3374 break;
3375 }
3376
3377 case Instruction::AND_INT_2ADDR: {
3378 Binop_12x<HAnd>(instruction, DataType::Type::kInt32, dex_pc);
3379 break;
3380 }
3381
3382 case Instruction::AND_LONG_2ADDR: {
3383 Binop_12x<HAnd>(instruction, DataType::Type::kInt64, dex_pc);
3384 break;
3385 }
3386
3387 case Instruction::OR_INT_2ADDR: {
3388 Binop_12x<HOr>(instruction, DataType::Type::kInt32, dex_pc);
3389 break;
3390 }
3391
3392 case Instruction::OR_LONG_2ADDR: {
3393 Binop_12x<HOr>(instruction, DataType::Type::kInt64, dex_pc);
3394 break;
3395 }
3396
3397 case Instruction::XOR_INT_2ADDR: {
3398 Binop_12x<HXor>(instruction, DataType::Type::kInt32, dex_pc);
3399 break;
3400 }
3401
3402 case Instruction::XOR_LONG_2ADDR: {
3403 Binop_12x<HXor>(instruction, DataType::Type::kInt64, dex_pc);
3404 break;
3405 }
3406
3407 case Instruction::ADD_INT_LIT16: {
3408 Binop_22s<HAdd>(instruction, false, dex_pc);
3409 break;
3410 }
3411
3412 case Instruction::AND_INT_LIT16: {
3413 Binop_22s<HAnd>(instruction, false, dex_pc);
3414 break;
3415 }
3416
3417 case Instruction::OR_INT_LIT16: {
3418 Binop_22s<HOr>(instruction, false, dex_pc);
3419 break;
3420 }
3421
3422 case Instruction::XOR_INT_LIT16: {
3423 Binop_22s<HXor>(instruction, false, dex_pc);
3424 break;
3425 }
3426
3427 case Instruction::RSUB_INT: {
3428 Binop_22s<HSub>(instruction, true, dex_pc);
3429 break;
3430 }
3431
3432 case Instruction::MUL_INT_LIT16: {
3433 Binop_22s<HMul>(instruction, false, dex_pc);
3434 break;
3435 }
3436
3437 case Instruction::ADD_INT_LIT8: {
3438 Binop_22b<HAdd>(instruction, false, dex_pc);
3439 break;
3440 }
3441
3442 case Instruction::AND_INT_LIT8: {
3443 Binop_22b<HAnd>(instruction, false, dex_pc);
3444 break;
3445 }
3446
3447 case Instruction::OR_INT_LIT8: {
3448 Binop_22b<HOr>(instruction, false, dex_pc);
3449 break;
3450 }
3451
3452 case Instruction::XOR_INT_LIT8: {
3453 Binop_22b<HXor>(instruction, false, dex_pc);
3454 break;
3455 }
3456
3457 case Instruction::RSUB_INT_LIT8: {
3458 Binop_22b<HSub>(instruction, true, dex_pc);
3459 break;
3460 }
3461
3462 case Instruction::MUL_INT_LIT8: {
3463 Binop_22b<HMul>(instruction, false, dex_pc);
3464 break;
3465 }
3466
3467 case Instruction::DIV_INT_LIT16:
3468 case Instruction::DIV_INT_LIT8: {
3469 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3470 dex_pc, DataType::Type::kInt32, true, true);
3471 break;
3472 }
3473
3474 case Instruction::REM_INT_LIT16:
3475 case Instruction::REM_INT_LIT8: {
3476 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3477 dex_pc, DataType::Type::kInt32, true, false);
3478 break;
3479 }
3480
3481 case Instruction::SHL_INT_LIT8: {
3482 Binop_22b<HShl>(instruction, false, dex_pc);
3483 break;
3484 }
3485
3486 case Instruction::SHR_INT_LIT8: {
3487 Binop_22b<HShr>(instruction, false, dex_pc);
3488 break;
3489 }
3490
3491 case Instruction::USHR_INT_LIT8: {
3492 Binop_22b<HUShr>(instruction, false, dex_pc);
3493 break;
3494 }
3495
3496 case Instruction::NEW_INSTANCE: {
3497 HNewInstance* new_instance =
3498 BuildNewInstance(dex::TypeIndex(instruction.VRegB_21c()), dex_pc);
3499 DCHECK(new_instance != nullptr);
3500
3501 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
3502 BuildConstructorFenceForAllocation(new_instance);
3503 break;
3504 }
3505
3506 case Instruction::NEW_ARRAY: {
3507 dex::TypeIndex type_index(instruction.VRegC_22c());
3508 HInstruction* length = LoadLocal(instruction.VRegB_22c(), DataType::Type::kInt32);
3509 HNewArray* new_array = BuildNewArray(dex_pc, type_index, length);
3510
3511 UpdateLocal(instruction.VRegA_22c(), current_block_->GetLastInstruction());
3512 BuildConstructorFenceForAllocation(new_array);
3513 break;
3514 }
3515
3516 case Instruction::FILLED_NEW_ARRAY: {
3517 dex::TypeIndex type_index(instruction.VRegB_35c());
3518 uint32_t args[5];
3519 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
3520 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
3521 HNewArray* new_array = BuildFilledNewArray(dex_pc, type_index, operands);
3522 BuildConstructorFenceForAllocation(new_array);
3523 break;
3524 }
3525
3526 case Instruction::FILLED_NEW_ARRAY_RANGE: {
3527 dex::TypeIndex type_index(instruction.VRegB_3rc());
3528 RangeInstructionOperands operands(instruction.VRegC_3rc(), instruction.VRegA_3rc());
3529 HNewArray* new_array = BuildFilledNewArray(dex_pc, type_index, operands);
3530 BuildConstructorFenceForAllocation(new_array);
3531 break;
3532 }
3533
3534 case Instruction::FILL_ARRAY_DATA: {
3535 BuildFillArrayData(instruction, dex_pc);
3536 break;
3537 }
3538
3539 case Instruction::MOVE_RESULT:
3540 case Instruction::MOVE_RESULT_WIDE:
3541 case Instruction::MOVE_RESULT_OBJECT: {
3542 DCHECK(latest_result_ != nullptr);
3543 UpdateLocal(instruction.VRegA(), latest_result_);
3544 latest_result_ = nullptr;
3545 break;
3546 }
3547
3548 case Instruction::CMP_LONG: {
3549 Binop_23x_cmp(instruction, DataType::Type::kInt64, ComparisonBias::kNoBias, dex_pc);
3550 break;
3551 }
3552
3553 case Instruction::CMPG_FLOAT: {
3554 Binop_23x_cmp(instruction, DataType::Type::kFloat32, ComparisonBias::kGtBias, dex_pc);
3555 break;
3556 }
3557
3558 case Instruction::CMPG_DOUBLE: {
3559 Binop_23x_cmp(instruction, DataType::Type::kFloat64, ComparisonBias::kGtBias, dex_pc);
3560 break;
3561 }
3562
3563 case Instruction::CMPL_FLOAT: {
3564 Binop_23x_cmp(instruction, DataType::Type::kFloat32, ComparisonBias::kLtBias, dex_pc);
3565 break;
3566 }
3567
3568 case Instruction::CMPL_DOUBLE: {
3569 Binop_23x_cmp(instruction, DataType::Type::kFloat64, ComparisonBias::kLtBias, dex_pc);
3570 break;
3571 }
3572
3573 case Instruction::NOP:
3574 break;
3575
3576 case Instruction::IGET:
3577 case Instruction::IGET_WIDE:
3578 case Instruction::IGET_OBJECT:
3579 case Instruction::IGET_BOOLEAN:
3580 case Instruction::IGET_BYTE:
3581 case Instruction::IGET_CHAR:
3582 case Instruction::IGET_SHORT: {
3583 if (!BuildInstanceFieldAccess(instruction, dex_pc, /* is_put= */ false)) {
3584 return false;
3585 }
3586 break;
3587 }
3588
3589 case Instruction::IPUT:
3590 case Instruction::IPUT_WIDE:
3591 case Instruction::IPUT_OBJECT:
3592 case Instruction::IPUT_BOOLEAN:
3593 case Instruction::IPUT_BYTE:
3594 case Instruction::IPUT_CHAR:
3595 case Instruction::IPUT_SHORT: {
3596 if (!BuildInstanceFieldAccess(instruction, dex_pc, /* is_put= */ true)) {
3597 return false;
3598 }
3599 break;
3600 }
3601
3602 case Instruction::SGET:
3603 case Instruction::SGET_WIDE:
3604 case Instruction::SGET_OBJECT:
3605 case Instruction::SGET_BOOLEAN:
3606 case Instruction::SGET_BYTE:
3607 case Instruction::SGET_CHAR:
3608 case Instruction::SGET_SHORT: {
3609 BuildStaticFieldAccess(instruction, dex_pc, /* is_put= */ false);
3610 break;
3611 }
3612
3613 case Instruction::SPUT:
3614 case Instruction::SPUT_WIDE:
3615 case Instruction::SPUT_OBJECT:
3616 case Instruction::SPUT_BOOLEAN:
3617 case Instruction::SPUT_BYTE:
3618 case Instruction::SPUT_CHAR:
3619 case Instruction::SPUT_SHORT: {
3620 BuildStaticFieldAccess(instruction, dex_pc, /* is_put= */ true);
3621 break;
3622 }
3623
3624 #define ARRAY_XX(kind, anticipated_type) \
3625 case Instruction::AGET##kind: { \
3626 BuildArrayAccess(instruction, dex_pc, false, anticipated_type); \
3627 break; \
3628 } \
3629 case Instruction::APUT##kind: { \
3630 BuildArrayAccess(instruction, dex_pc, true, anticipated_type); \
3631 break; \
3632 }
3633
3634 ARRAY_XX(, DataType::Type::kInt32);
3635 ARRAY_XX(_WIDE, DataType::Type::kInt64);
3636 ARRAY_XX(_OBJECT, DataType::Type::kReference);
3637 ARRAY_XX(_BOOLEAN, DataType::Type::kBool);
3638 ARRAY_XX(_BYTE, DataType::Type::kInt8);
3639 ARRAY_XX(_CHAR, DataType::Type::kUint16);
3640 ARRAY_XX(_SHORT, DataType::Type::kInt16);
3641
3642 case Instruction::ARRAY_LENGTH: {
3643 HInstruction* object = LoadNullCheckedLocal(instruction.VRegB_12x(), dex_pc);
3644 AppendInstruction(new (allocator_) HArrayLength(object, dex_pc));
3645 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction());
3646 break;
3647 }
3648
3649 case Instruction::CONST_STRING: {
3650 dex::StringIndex string_index(instruction.VRegB_21c());
3651 BuildLoadString(string_index, dex_pc);
3652 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3653 break;
3654 }
3655
3656 case Instruction::CONST_STRING_JUMBO: {
3657 dex::StringIndex string_index(instruction.VRegB_31c());
3658 BuildLoadString(string_index, dex_pc);
3659 UpdateLocal(instruction.VRegA_31c(), current_block_->GetLastInstruction());
3660 break;
3661 }
3662
3663 case Instruction::CONST_CLASS: {
3664 dex::TypeIndex type_index(instruction.VRegB_21c());
3665 BuildLoadClass(type_index, dex_pc);
3666 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3667 break;
3668 }
3669
3670 case Instruction::CONST_METHOD_HANDLE: {
3671 uint16_t method_handle_idx = instruction.VRegB_21c();
3672 BuildLoadMethodHandle(method_handle_idx, dex_pc);
3673 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3674 break;
3675 }
3676
3677 case Instruction::CONST_METHOD_TYPE: {
3678 dex::ProtoIndex proto_idx(instruction.VRegB_21c());
3679 BuildLoadMethodType(proto_idx, dex_pc);
3680 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3681 break;
3682 }
3683
3684 case Instruction::MOVE_EXCEPTION: {
3685 AppendInstruction(new (allocator_) HLoadException(dex_pc));
3686 UpdateLocal(instruction.VRegA_11x(), current_block_->GetLastInstruction());
3687 AppendInstruction(new (allocator_) HClearException(dex_pc));
3688 break;
3689 }
3690
3691 case Instruction::THROW: {
3692 HInstruction* exception = LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference);
3693 AppendInstruction(new (allocator_) HThrow(exception, dex_pc));
3694 // We finished building this block. Set the current block to null to avoid
3695 // adding dead instructions to it.
3696 current_block_ = nullptr;
3697 break;
3698 }
3699
3700 case Instruction::INSTANCE_OF: {
3701 uint8_t destination = instruction.VRegA_22c();
3702 uint8_t reference = instruction.VRegB_22c();
3703 dex::TypeIndex type_index(instruction.VRegC_22c());
3704 BuildTypeCheck(instruction, destination, reference, type_index, dex_pc);
3705 break;
3706 }
3707
3708 case Instruction::CHECK_CAST: {
3709 uint8_t reference = instruction.VRegA_21c();
3710 dex::TypeIndex type_index(instruction.VRegB_21c());
3711 BuildTypeCheck(instruction, -1, reference, type_index, dex_pc);
3712 break;
3713 }
3714
3715 case Instruction::MONITOR_ENTER: {
3716 AppendInstruction(new (allocator_) HMonitorOperation(
3717 LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference),
3718 HMonitorOperation::OperationKind::kEnter,
3719 dex_pc));
3720 graph_->SetHasMonitorOperations(true);
3721 break;
3722 }
3723
3724 case Instruction::MONITOR_EXIT: {
3725 AppendInstruction(new (allocator_) HMonitorOperation(
3726 LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference),
3727 HMonitorOperation::OperationKind::kExit,
3728 dex_pc));
3729 graph_->SetHasMonitorOperations(true);
3730 break;
3731 }
3732
3733 case Instruction::SPARSE_SWITCH:
3734 case Instruction::PACKED_SWITCH: {
3735 BuildSwitch(instruction, dex_pc);
3736 break;
3737 }
3738
3739 case Instruction::UNUSED_3E ... Instruction::UNUSED_43:
3740 case Instruction::UNUSED_73:
3741 case Instruction::UNUSED_79:
3742 case Instruction::UNUSED_7A:
3743 case Instruction::UNUSED_E3 ... Instruction::UNUSED_F9: {
3744 VLOG(compiler) << "Did not compile "
3745 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
3746 << " because of unhandled instruction "
3747 << instruction.Name();
3748 MaybeRecordStat(compilation_stats_,
3749 MethodCompilationStat::kNotCompiledUnhandledInstruction);
3750 return false;
3751 }
3752 }
3753 return true;
3754 } // NOLINT(readability/fn_size)
3755
LookupResolvedType(dex::TypeIndex type_index,const DexCompilationUnit & compilation_unit) const3756 ObjPtr<mirror::Class> HInstructionBuilder::LookupResolvedType(
3757 dex::TypeIndex type_index,
3758 const DexCompilationUnit& compilation_unit) const {
3759 return compilation_unit.GetClassLinker()->LookupResolvedType(
3760 type_index, compilation_unit.GetDexCache().Get(), compilation_unit.GetClassLoader().Get());
3761 }
3762
LookupReferrerClass() const3763 ObjPtr<mirror::Class> HInstructionBuilder::LookupReferrerClass() const {
3764 // TODO: Cache the result in a Handle<mirror::Class>.
3765 const dex::MethodId& method_id =
3766 dex_compilation_unit_->GetDexFile()->GetMethodId(dex_compilation_unit_->GetDexMethodIndex());
3767 return LookupResolvedType(method_id.class_idx_, *dex_compilation_unit_);
3768 }
3769
3770 } // namespace art
3771