1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "instruction_builder.h"
18
19 #include "art_method-inl.h"
20 #include "base/arena_bit_vector.h"
21 #include "base/bit_vector-inl.h"
22 #include "base/logging.h"
23 #include "block_builder.h"
24 #include "class_linker-inl.h"
25 #include "code_generator.h"
26 #include "data_type-inl.h"
27 #include "dex/bytecode_utils.h"
28 #include "dex/dex_instruction-inl.h"
29 #include "driver/dex_compilation_unit.h"
30 #include "driver/compiler_options.h"
31 #include "entrypoints/entrypoint_utils-inl.h"
32 #include "imtable-inl.h"
33 #include "intrinsics.h"
34 #include "intrinsics_utils.h"
35 #include "jit/jit.h"
36 #include "mirror/dex_cache.h"
37 #include "oat_file.h"
38 #include "optimizing_compiler_stats.h"
39 #include "reflective_handle_scope-inl.h"
40 #include "scoped_thread_state_change-inl.h"
41 #include "sharpening.h"
42 #include "ssa_builder.h"
43 #include "well_known_classes.h"
44
45 namespace art {
46
47 namespace {
48
49 class SamePackageCompare {
50 public:
SamePackageCompare(const DexCompilationUnit & dex_compilation_unit)51 explicit SamePackageCompare(const DexCompilationUnit& dex_compilation_unit)
52 : dex_compilation_unit_(dex_compilation_unit) {}
53
operator ()(ObjPtr<mirror::Class> klass)54 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
55 if (klass->GetClassLoader() != dex_compilation_unit_.GetClassLoader().Get()) {
56 return false;
57 }
58 if (referrers_descriptor_ == nullptr) {
59 const DexFile* dex_file = dex_compilation_unit_.GetDexFile();
60 uint32_t referrers_method_idx = dex_compilation_unit_.GetDexMethodIndex();
61 referrers_descriptor_ =
62 dex_file->StringByTypeIdx(dex_file->GetMethodId(referrers_method_idx).class_idx_);
63 referrers_package_length_ = PackageLength(referrers_descriptor_);
64 }
65 std::string temp;
66 const char* klass_descriptor = klass->GetDescriptor(&temp);
67 size_t klass_package_length = PackageLength(klass_descriptor);
68 return (referrers_package_length_ == klass_package_length) &&
69 memcmp(referrers_descriptor_, klass_descriptor, referrers_package_length_) == 0;
70 };
71
72 private:
PackageLength(const char * descriptor)73 static size_t PackageLength(const char* descriptor) {
74 const char* slash_pos = strrchr(descriptor, '/');
75 return (slash_pos != nullptr) ? static_cast<size_t>(slash_pos - descriptor) : 0u;
76 }
77
78 const DexCompilationUnit& dex_compilation_unit_;
79 const char* referrers_descriptor_ = nullptr;
80 size_t referrers_package_length_ = 0u;
81 };
82
83 } // anonymous namespace
84
HInstructionBuilder(HGraph * graph,HBasicBlockBuilder * block_builder,SsaBuilder * ssa_builder,const DexFile * dex_file,const CodeItemDebugInfoAccessor & accessor,DataType::Type return_type,const DexCompilationUnit * dex_compilation_unit,const DexCompilationUnit * outer_compilation_unit,CodeGenerator * code_generator,OptimizingCompilerStats * compiler_stats,ScopedArenaAllocator * local_allocator)85 HInstructionBuilder::HInstructionBuilder(HGraph* graph,
86 HBasicBlockBuilder* block_builder,
87 SsaBuilder* ssa_builder,
88 const DexFile* dex_file,
89 const CodeItemDebugInfoAccessor& accessor,
90 DataType::Type return_type,
91 const DexCompilationUnit* dex_compilation_unit,
92 const DexCompilationUnit* outer_compilation_unit,
93 CodeGenerator* code_generator,
94 OptimizingCompilerStats* compiler_stats,
95 ScopedArenaAllocator* local_allocator)
96 : allocator_(graph->GetAllocator()),
97 graph_(graph),
98 dex_file_(dex_file),
99 code_item_accessor_(accessor),
100 return_type_(return_type),
101 block_builder_(block_builder),
102 ssa_builder_(ssa_builder),
103 code_generator_(code_generator),
104 dex_compilation_unit_(dex_compilation_unit),
105 outer_compilation_unit_(outer_compilation_unit),
106 compilation_stats_(compiler_stats),
107 local_allocator_(local_allocator),
108 locals_for_(local_allocator->Adapter(kArenaAllocGraphBuilder)),
109 current_block_(nullptr),
110 current_locals_(nullptr),
111 latest_result_(nullptr),
112 current_this_parameter_(nullptr),
113 loop_headers_(local_allocator->Adapter(kArenaAllocGraphBuilder)),
114 class_cache_(std::less<dex::TypeIndex>(), local_allocator->Adapter(kArenaAllocGraphBuilder)) {
115 loop_headers_.reserve(kDefaultNumberOfLoops);
116 }
117
FindBlockStartingAt(uint32_t dex_pc) const118 HBasicBlock* HInstructionBuilder::FindBlockStartingAt(uint32_t dex_pc) const {
119 return block_builder_->GetBlockAt(dex_pc);
120 }
121
GetLocalsFor(HBasicBlock * block)122 inline ScopedArenaVector<HInstruction*>* HInstructionBuilder::GetLocalsFor(HBasicBlock* block) {
123 ScopedArenaVector<HInstruction*>* locals = &locals_for_[block->GetBlockId()];
124 const size_t vregs = graph_->GetNumberOfVRegs();
125 if (locals->size() == vregs) {
126 return locals;
127 }
128 return GetLocalsForWithAllocation(block, locals, vregs);
129 }
130
GetLocalsForWithAllocation(HBasicBlock * block,ScopedArenaVector<HInstruction * > * locals,const size_t vregs)131 ScopedArenaVector<HInstruction*>* HInstructionBuilder::GetLocalsForWithAllocation(
132 HBasicBlock* block,
133 ScopedArenaVector<HInstruction*>* locals,
134 const size_t vregs) {
135 DCHECK_NE(locals->size(), vregs);
136 locals->resize(vregs, nullptr);
137 if (block->IsCatchBlock()) {
138 // We record incoming inputs of catch phis at throwing instructions and
139 // must therefore eagerly create the phis. Phis for undefined vregs will
140 // be deleted when the first throwing instruction with the vreg undefined
141 // is encountered. Unused phis will be removed by dead phi analysis.
142 for (size_t i = 0; i < vregs; ++i) {
143 // No point in creating the catch phi if it is already undefined at
144 // the first throwing instruction.
145 HInstruction* current_local_value = (*current_locals_)[i];
146 if (current_local_value != nullptr) {
147 HPhi* phi = new (allocator_) HPhi(
148 allocator_,
149 i,
150 0,
151 current_local_value->GetType());
152 block->AddPhi(phi);
153 (*locals)[i] = phi;
154 }
155 }
156 }
157 return locals;
158 }
159
ValueOfLocalAt(HBasicBlock * block,size_t local)160 inline HInstruction* HInstructionBuilder::ValueOfLocalAt(HBasicBlock* block, size_t local) {
161 ScopedArenaVector<HInstruction*>* locals = GetLocalsFor(block);
162 return (*locals)[local];
163 }
164
InitializeBlockLocals()165 void HInstructionBuilder::InitializeBlockLocals() {
166 current_locals_ = GetLocalsFor(current_block_);
167
168 if (current_block_->IsCatchBlock()) {
169 // Catch phis were already created and inputs collected from throwing sites.
170 if (kIsDebugBuild) {
171 // Make sure there was at least one throwing instruction which initialized
172 // locals (guaranteed by HGraphBuilder) and that all try blocks have been
173 // visited already (from HTryBoundary scoping and reverse post order).
174 bool catch_block_visited = false;
175 for (HBasicBlock* current : graph_->GetReversePostOrder()) {
176 if (current == current_block_) {
177 catch_block_visited = true;
178 } else if (current->IsTryBlock()) {
179 const HTryBoundary& try_entry = current->GetTryCatchInformation()->GetTryEntry();
180 if (try_entry.HasExceptionHandler(*current_block_)) {
181 DCHECK(!catch_block_visited) << "Catch block visited before its try block.";
182 }
183 }
184 }
185 DCHECK_EQ(current_locals_->size(), graph_->GetNumberOfVRegs())
186 << "No instructions throwing into a live catch block.";
187 }
188 } else if (current_block_->IsLoopHeader()) {
189 // If the block is a loop header, we know we only have visited the pre header
190 // because we are visiting in reverse post order. We create phis for all initialized
191 // locals from the pre header. Their inputs will be populated at the end of
192 // the analysis.
193 for (size_t local = 0; local < current_locals_->size(); ++local) {
194 HInstruction* incoming =
195 ValueOfLocalAt(current_block_->GetLoopInformation()->GetPreHeader(), local);
196 if (incoming != nullptr) {
197 HPhi* phi = new (allocator_) HPhi(
198 allocator_,
199 local,
200 0,
201 incoming->GetType());
202 current_block_->AddPhi(phi);
203 (*current_locals_)[local] = phi;
204 }
205 }
206
207 // Save the loop header so that the last phase of the analysis knows which
208 // blocks need to be updated.
209 loop_headers_.push_back(current_block_);
210 } else if (current_block_->GetPredecessors().size() > 0) {
211 // All predecessors have already been visited because we are visiting in reverse post order.
212 // We merge the values of all locals, creating phis if those values differ.
213 for (size_t local = 0; local < current_locals_->size(); ++local) {
214 bool one_predecessor_has_no_value = false;
215 bool is_different = false;
216 HInstruction* value = ValueOfLocalAt(current_block_->GetPredecessors()[0], local);
217
218 for (HBasicBlock* predecessor : current_block_->GetPredecessors()) {
219 HInstruction* current = ValueOfLocalAt(predecessor, local);
220 if (current == nullptr) {
221 one_predecessor_has_no_value = true;
222 break;
223 } else if (current != value) {
224 is_different = true;
225 }
226 }
227
228 if (one_predecessor_has_no_value) {
229 // If one predecessor has no value for this local, we trust the verifier has
230 // successfully checked that there is a store dominating any read after this block.
231 continue;
232 }
233
234 if (is_different) {
235 HInstruction* first_input = ValueOfLocalAt(current_block_->GetPredecessors()[0], local);
236 HPhi* phi = new (allocator_) HPhi(
237 allocator_,
238 local,
239 current_block_->GetPredecessors().size(),
240 first_input->GetType());
241 for (size_t i = 0; i < current_block_->GetPredecessors().size(); i++) {
242 HInstruction* pred_value = ValueOfLocalAt(current_block_->GetPredecessors()[i], local);
243 phi->SetRawInputAt(i, pred_value);
244 }
245 current_block_->AddPhi(phi);
246 value = phi;
247 }
248 (*current_locals_)[local] = value;
249 }
250 }
251 }
252
PropagateLocalsToCatchBlocks()253 void HInstructionBuilder::PropagateLocalsToCatchBlocks() {
254 const HTryBoundary& try_entry = current_block_->GetTryCatchInformation()->GetTryEntry();
255 for (HBasicBlock* catch_block : try_entry.GetExceptionHandlers()) {
256 ScopedArenaVector<HInstruction*>* handler_locals = GetLocalsFor(catch_block);
257 DCHECK_EQ(handler_locals->size(), current_locals_->size());
258 for (size_t vreg = 0, e = current_locals_->size(); vreg < e; ++vreg) {
259 HInstruction* handler_value = (*handler_locals)[vreg];
260 if (handler_value == nullptr) {
261 // Vreg was undefined at a previously encountered throwing instruction
262 // and the catch phi was deleted. Do not record the local value.
263 continue;
264 }
265 DCHECK(handler_value->IsPhi());
266
267 HInstruction* local_value = (*current_locals_)[vreg];
268 if (local_value == nullptr) {
269 // This is the first instruction throwing into `catch_block` where
270 // `vreg` is undefined. Delete the catch phi.
271 catch_block->RemovePhi(handler_value->AsPhi());
272 (*handler_locals)[vreg] = nullptr;
273 } else {
274 // Vreg has been defined at all instructions throwing into `catch_block`
275 // encountered so far. Record the local value in the catch phi.
276 handler_value->AsPhi()->AddInput(local_value);
277 }
278 }
279 }
280 }
281
AppendInstruction(HInstruction * instruction)282 void HInstructionBuilder::AppendInstruction(HInstruction* instruction) {
283 current_block_->AddInstruction(instruction);
284 InitializeInstruction(instruction);
285 }
286
InsertInstructionAtTop(HInstruction * instruction)287 void HInstructionBuilder::InsertInstructionAtTop(HInstruction* instruction) {
288 if (current_block_->GetInstructions().IsEmpty()) {
289 current_block_->AddInstruction(instruction);
290 } else {
291 current_block_->InsertInstructionBefore(instruction, current_block_->GetFirstInstruction());
292 }
293 InitializeInstruction(instruction);
294 }
295
InitializeInstruction(HInstruction * instruction)296 void HInstructionBuilder::InitializeInstruction(HInstruction* instruction) {
297 if (instruction->NeedsEnvironment()) {
298 HEnvironment* environment = new (allocator_) HEnvironment(
299 allocator_,
300 current_locals_->size(),
301 graph_->GetArtMethod(),
302 instruction->GetDexPc(),
303 instruction);
304 environment->CopyFrom(ArrayRef<HInstruction* const>(*current_locals_));
305 instruction->SetRawEnvironment(environment);
306 }
307 }
308
LoadNullCheckedLocal(uint32_t register_index,uint32_t dex_pc)309 HInstruction* HInstructionBuilder::LoadNullCheckedLocal(uint32_t register_index, uint32_t dex_pc) {
310 HInstruction* ref = LoadLocal(register_index, DataType::Type::kReference);
311 if (!ref->CanBeNull()) {
312 return ref;
313 }
314
315 HNullCheck* null_check = new (allocator_) HNullCheck(ref, dex_pc);
316 AppendInstruction(null_check);
317 return null_check;
318 }
319
SetLoopHeaderPhiInputs()320 void HInstructionBuilder::SetLoopHeaderPhiInputs() {
321 for (size_t i = loop_headers_.size(); i > 0; --i) {
322 HBasicBlock* block = loop_headers_[i - 1];
323 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
324 HPhi* phi = it.Current()->AsPhi();
325 size_t vreg = phi->GetRegNumber();
326 for (HBasicBlock* predecessor : block->GetPredecessors()) {
327 HInstruction* value = ValueOfLocalAt(predecessor, vreg);
328 if (value == nullptr) {
329 // Vreg is undefined at this predecessor. Mark it dead and leave with
330 // fewer inputs than predecessors. SsaChecker will fail if not removed.
331 phi->SetDead();
332 break;
333 } else {
334 phi->AddInput(value);
335 }
336 }
337 }
338 }
339 }
340
IsBlockPopulated(HBasicBlock * block)341 static bool IsBlockPopulated(HBasicBlock* block) {
342 if (block->IsLoopHeader()) {
343 // Suspend checks were inserted into loop headers during building of dominator tree.
344 DCHECK(block->GetFirstInstruction()->IsSuspendCheck());
345 return block->GetFirstInstruction() != block->GetLastInstruction();
346 } else {
347 return !block->GetInstructions().IsEmpty();
348 }
349 }
350
Build()351 bool HInstructionBuilder::Build() {
352 DCHECK(code_item_accessor_.HasCodeItem());
353 locals_for_.resize(
354 graph_->GetBlocks().size(),
355 ScopedArenaVector<HInstruction*>(local_allocator_->Adapter(kArenaAllocGraphBuilder)));
356
357 // Find locations where we want to generate extra stackmaps for native debugging.
358 // This allows us to generate the info only at interesting points (for example,
359 // at start of java statement) rather than before every dex instruction.
360 const bool native_debuggable = code_generator_ != nullptr &&
361 code_generator_->GetCompilerOptions().GetNativeDebuggable();
362 ArenaBitVector* native_debug_info_locations = nullptr;
363 if (native_debuggable) {
364 native_debug_info_locations = FindNativeDebugInfoLocations();
365 }
366
367 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
368 current_block_ = block;
369 uint32_t block_dex_pc = current_block_->GetDexPc();
370
371 InitializeBlockLocals();
372
373 if (current_block_->IsEntryBlock()) {
374 InitializeParameters();
375 AppendInstruction(new (allocator_) HSuspendCheck(0u));
376 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
377 AppendInstruction(new (allocator_) HMethodEntryHook(0u));
378 }
379 AppendInstruction(new (allocator_) HGoto(0u));
380 continue;
381 } else if (current_block_->IsExitBlock()) {
382 AppendInstruction(new (allocator_) HExit());
383 continue;
384 } else if (current_block_->IsLoopHeader()) {
385 HSuspendCheck* suspend_check = new (allocator_) HSuspendCheck(current_block_->GetDexPc());
386 current_block_->GetLoopInformation()->SetSuspendCheck(suspend_check);
387 // This is slightly odd because the loop header might not be empty (TryBoundary).
388 // But we're still creating the environment with locals from the top of the block.
389 InsertInstructionAtTop(suspend_check);
390 }
391
392 if (block_dex_pc == kNoDexPc || current_block_ != block_builder_->GetBlockAt(block_dex_pc)) {
393 // Synthetic block that does not need to be populated.
394 DCHECK(IsBlockPopulated(current_block_));
395 continue;
396 }
397
398 DCHECK(!IsBlockPopulated(current_block_));
399
400 for (const DexInstructionPcPair& pair : code_item_accessor_.InstructionsFrom(block_dex_pc)) {
401 if (current_block_ == nullptr) {
402 // The previous instruction ended this block.
403 break;
404 }
405
406 const uint32_t dex_pc = pair.DexPc();
407 if (dex_pc != block_dex_pc && FindBlockStartingAt(dex_pc) != nullptr) {
408 // This dex_pc starts a new basic block.
409 break;
410 }
411
412 if (current_block_->IsTryBlock() && IsThrowingDexInstruction(pair.Inst())) {
413 PropagateLocalsToCatchBlocks();
414 }
415
416 if (native_debuggable && native_debug_info_locations->IsBitSet(dex_pc)) {
417 AppendInstruction(new (allocator_) HNativeDebugInfo(dex_pc));
418 }
419
420 // Note: There may be no Thread for gtests.
421 DCHECK(Thread::Current() == nullptr || !Thread::Current()->IsExceptionPending())
422 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
423 << " " << pair.Inst().Name() << "@" << dex_pc;
424 if (!ProcessDexInstruction(pair.Inst(), dex_pc)) {
425 return false;
426 }
427 DCHECK(Thread::Current() == nullptr || !Thread::Current()->IsExceptionPending())
428 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
429 << " " << pair.Inst().Name() << "@" << dex_pc;
430 }
431
432 if (current_block_ != nullptr) {
433 // Branching instructions clear current_block, so we know the last
434 // instruction of the current block is not a branching instruction.
435 // We add an unconditional Goto to the next block.
436 DCHECK_EQ(current_block_->GetSuccessors().size(), 1u);
437 AppendInstruction(new (allocator_) HGoto());
438 }
439 }
440
441 SetLoopHeaderPhiInputs();
442
443 return true;
444 }
445
BuildIntrinsic(ArtMethod * method)446 void HInstructionBuilder::BuildIntrinsic(ArtMethod* method) {
447 DCHECK(!code_item_accessor_.HasCodeItem());
448 DCHECK(method->IsIntrinsic());
449 if (kIsDebugBuild) {
450 ScopedObjectAccess soa(Thread::Current());
451 CHECK(!method->IsSignaturePolymorphic());
452 }
453
454 locals_for_.resize(
455 graph_->GetBlocks().size(),
456 ScopedArenaVector<HInstruction*>(local_allocator_->Adapter(kArenaAllocGraphBuilder)));
457
458 // Fill the entry block. Do not add suspend check, we do not want a suspend
459 // check in intrinsics; intrinsic methods are supposed to be fast.
460 current_block_ = graph_->GetEntryBlock();
461 InitializeBlockLocals();
462 InitializeParameters();
463 AppendInstruction(new (allocator_) HGoto(0u));
464
465 // Fill the body.
466 current_block_ = current_block_->GetSingleSuccessor();
467 InitializeBlockLocals();
468 DCHECK(!IsBlockPopulated(current_block_));
469
470 // Add the intermediate representation, if available, or invoke instruction.
471 size_t in_vregs = graph_->GetNumberOfInVRegs();
472 size_t number_of_arguments =
473 in_vregs - std::count(current_locals_->end() - in_vregs, current_locals_->end(), nullptr);
474 uint32_t method_idx = dex_compilation_unit_->GetDexMethodIndex();
475 const char* shorty = dex_file_->GetMethodShorty(method_idx);
476 RangeInstructionOperands operands(graph_->GetNumberOfVRegs() - in_vregs, in_vregs);
477 if (!BuildSimpleIntrinsic(method, kNoDexPc, operands, shorty)) {
478 // Some intrinsics without intermediate representation still yield a leaf method,
479 // so build the invoke. Use HInvokeStaticOrDirect even for methods that would
480 // normally use an HInvokeVirtual (sharpen the call).
481 MethodReference target_method(dex_file_, method_idx);
482 HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
483 MethodLoadKind::kRuntimeCall,
484 CodePtrLocation::kCallArtMethod,
485 /* method_load_data= */ 0u
486 };
487 InvokeType invoke_type = dex_compilation_unit_->IsStatic() ? kStatic : kDirect;
488 HInvokeStaticOrDirect* invoke = new (allocator_) HInvokeStaticOrDirect(
489 allocator_,
490 number_of_arguments,
491 return_type_,
492 kNoDexPc,
493 target_method,
494 method,
495 dispatch_info,
496 invoke_type,
497 target_method,
498 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone);
499 HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
500 }
501
502 // Add the return instruction.
503 if (return_type_ == DataType::Type::kVoid) {
504 AppendInstruction(new (allocator_) HReturnVoid());
505 } else {
506 AppendInstruction(new (allocator_) HReturn(latest_result_));
507 }
508
509 // Fill the exit block.
510 DCHECK_EQ(current_block_->GetSingleSuccessor(), graph_->GetExitBlock());
511 current_block_ = graph_->GetExitBlock();
512 InitializeBlockLocals();
513 AppendInstruction(new (allocator_) HExit());
514 }
515
FindNativeDebugInfoLocations()516 ArenaBitVector* HInstructionBuilder::FindNativeDebugInfoLocations() {
517 ArenaBitVector* locations = ArenaBitVector::Create(local_allocator_,
518 code_item_accessor_.InsnsSizeInCodeUnits(),
519 /* expandable= */ false,
520 kArenaAllocGraphBuilder);
521 locations->ClearAllBits();
522 // The visitor gets called when the line number changes.
523 // In other words, it marks the start of new java statement.
524 code_item_accessor_.DecodeDebugPositionInfo([&](const DexFile::PositionInfo& entry) {
525 locations->SetBit(entry.address_);
526 return false;
527 });
528 // Instruction-specific tweaks.
529 for (const DexInstructionPcPair& inst : code_item_accessor_) {
530 switch (inst->Opcode()) {
531 case Instruction::MOVE_EXCEPTION: {
532 // Stop in native debugger after the exception has been moved.
533 // The compiler also expects the move at the start of basic block so
534 // we do not want to interfere by inserting native-debug-info before it.
535 locations->ClearBit(inst.DexPc());
536 DexInstructionIterator next = std::next(DexInstructionIterator(inst));
537 DCHECK(next.DexPc() != inst.DexPc());
538 if (next != code_item_accessor_.end()) {
539 locations->SetBit(next.DexPc());
540 }
541 break;
542 }
543 default:
544 break;
545 }
546 }
547 return locations;
548 }
549
LoadLocal(uint32_t reg_number,DataType::Type type) const550 HInstruction* HInstructionBuilder::LoadLocal(uint32_t reg_number, DataType::Type type) const {
551 HInstruction* value = (*current_locals_)[reg_number];
552 DCHECK(value != nullptr);
553
554 // If the operation requests a specific type, we make sure its input is of that type.
555 if (type != value->GetType()) {
556 if (DataType::IsFloatingPointType(type)) {
557 value = ssa_builder_->GetFloatOrDoubleEquivalent(value, type);
558 } else if (type == DataType::Type::kReference) {
559 value = ssa_builder_->GetReferenceTypeEquivalent(value);
560 }
561 DCHECK(value != nullptr);
562 }
563
564 return value;
565 }
566
UpdateLocal(uint32_t reg_number,HInstruction * stored_value)567 void HInstructionBuilder::UpdateLocal(uint32_t reg_number, HInstruction* stored_value) {
568 DataType::Type stored_type = stored_value->GetType();
569 DCHECK_NE(stored_type, DataType::Type::kVoid);
570
571 // Storing into vreg `reg_number` may implicitly invalidate the surrounding
572 // registers. Consider the following cases:
573 // (1) Storing a wide value must overwrite previous values in both `reg_number`
574 // and `reg_number+1`. We store `nullptr` in `reg_number+1`.
575 // (2) If vreg `reg_number-1` holds a wide value, writing into `reg_number`
576 // must invalidate it. We store `nullptr` in `reg_number-1`.
577 // Consequently, storing a wide value into the high vreg of another wide value
578 // will invalidate both `reg_number-1` and `reg_number+1`.
579
580 if (reg_number != 0) {
581 HInstruction* local_low = (*current_locals_)[reg_number - 1];
582 if (local_low != nullptr && DataType::Is64BitType(local_low->GetType())) {
583 // The vreg we are storing into was previously the high vreg of a pair.
584 // We need to invalidate its low vreg.
585 DCHECK((*current_locals_)[reg_number] == nullptr);
586 (*current_locals_)[reg_number - 1] = nullptr;
587 }
588 }
589
590 (*current_locals_)[reg_number] = stored_value;
591 if (DataType::Is64BitType(stored_type)) {
592 // We are storing a pair. Invalidate the instruction in the high vreg.
593 (*current_locals_)[reg_number + 1] = nullptr;
594 }
595 }
596
InitializeParameters()597 void HInstructionBuilder::InitializeParameters() {
598 DCHECK(current_block_->IsEntryBlock());
599
600 // outer_compilation_unit_ is null only when unit testing.
601 if (outer_compilation_unit_ == nullptr) {
602 return;
603 }
604
605 const char* shorty = dex_compilation_unit_->GetShorty();
606 uint16_t number_of_parameters = graph_->GetNumberOfInVRegs();
607 uint16_t locals_index = graph_->GetNumberOfLocalVRegs();
608 uint16_t parameter_index = 0;
609
610 const dex::MethodId& referrer_method_id =
611 dex_file_->GetMethodId(dex_compilation_unit_->GetDexMethodIndex());
612 if (!dex_compilation_unit_->IsStatic()) {
613 // Add the implicit 'this' argument, not expressed in the signature.
614 HParameterValue* parameter = new (allocator_) HParameterValue(*dex_file_,
615 referrer_method_id.class_idx_,
616 parameter_index++,
617 DataType::Type::kReference,
618 /* is_this= */ true);
619 AppendInstruction(parameter);
620 UpdateLocal(locals_index++, parameter);
621 number_of_parameters--;
622 current_this_parameter_ = parameter;
623 } else {
624 DCHECK(current_this_parameter_ == nullptr);
625 }
626
627 const dex::ProtoId& proto = dex_file_->GetMethodPrototype(referrer_method_id);
628 const dex::TypeList* arg_types = dex_file_->GetProtoParameters(proto);
629 for (int i = 0, shorty_pos = 1; i < number_of_parameters; i++) {
630 HParameterValue* parameter = new (allocator_) HParameterValue(
631 *dex_file_,
632 arg_types->GetTypeItem(shorty_pos - 1).type_idx_,
633 parameter_index++,
634 DataType::FromShorty(shorty[shorty_pos]),
635 /* is_this= */ false);
636 ++shorty_pos;
637 AppendInstruction(parameter);
638 // Store the parameter value in the local that the dex code will use
639 // to reference that parameter.
640 UpdateLocal(locals_index++, parameter);
641 if (DataType::Is64BitType(parameter->GetType())) {
642 i++;
643 locals_index++;
644 parameter_index++;
645 }
646 }
647 }
648
649 template<typename T>
If_22t(const Instruction & instruction,uint32_t dex_pc)650 void HInstructionBuilder::If_22t(const Instruction& instruction, uint32_t dex_pc) {
651 HInstruction* first = LoadLocal(instruction.VRegA(), DataType::Type::kInt32);
652 HInstruction* second = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
653 T* comparison = new (allocator_) T(first, second, dex_pc);
654 AppendInstruction(comparison);
655 AppendInstruction(new (allocator_) HIf(comparison, dex_pc));
656 current_block_ = nullptr;
657 }
658
659 template<typename T>
If_21t(const Instruction & instruction,uint32_t dex_pc)660 void HInstructionBuilder::If_21t(const Instruction& instruction, uint32_t dex_pc) {
661 HInstruction* value = LoadLocal(instruction.VRegA(), DataType::Type::kInt32);
662 T* comparison = new (allocator_) T(value, graph_->GetIntConstant(0, dex_pc), dex_pc);
663 AppendInstruction(comparison);
664 AppendInstruction(new (allocator_) HIf(comparison, dex_pc));
665 current_block_ = nullptr;
666 }
667
668 template<typename T>
Unop_12x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)669 void HInstructionBuilder::Unop_12x(const Instruction& instruction,
670 DataType::Type type,
671 uint32_t dex_pc) {
672 HInstruction* first = LoadLocal(instruction.VRegB(), type);
673 AppendInstruction(new (allocator_) T(type, first, dex_pc));
674 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
675 }
676
Conversion_12x(const Instruction & instruction,DataType::Type input_type,DataType::Type result_type,uint32_t dex_pc)677 void HInstructionBuilder::Conversion_12x(const Instruction& instruction,
678 DataType::Type input_type,
679 DataType::Type result_type,
680 uint32_t dex_pc) {
681 HInstruction* first = LoadLocal(instruction.VRegB(), input_type);
682 AppendInstruction(new (allocator_) HTypeConversion(result_type, first, dex_pc));
683 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
684 }
685
686 template<typename T>
Binop_23x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)687 void HInstructionBuilder::Binop_23x(const Instruction& instruction,
688 DataType::Type type,
689 uint32_t dex_pc) {
690 HInstruction* first = LoadLocal(instruction.VRegB(), type);
691 HInstruction* second = LoadLocal(instruction.VRegC(), type);
692 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
693 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
694 }
695
696 template<typename T>
Binop_23x_shift(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)697 void HInstructionBuilder::Binop_23x_shift(const Instruction& instruction,
698 DataType::Type type,
699 uint32_t dex_pc) {
700 HInstruction* first = LoadLocal(instruction.VRegB(), type);
701 HInstruction* second = LoadLocal(instruction.VRegC(), DataType::Type::kInt32);
702 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
703 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
704 }
705
Binop_23x_cmp(const Instruction & instruction,DataType::Type type,ComparisonBias bias,uint32_t dex_pc)706 void HInstructionBuilder::Binop_23x_cmp(const Instruction& instruction,
707 DataType::Type type,
708 ComparisonBias bias,
709 uint32_t dex_pc) {
710 HInstruction* first = LoadLocal(instruction.VRegB(), type);
711 HInstruction* second = LoadLocal(instruction.VRegC(), type);
712 AppendInstruction(new (allocator_) HCompare(type, first, second, bias, dex_pc));
713 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
714 }
715
716 template<typename T>
Binop_12x_shift(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)717 void HInstructionBuilder::Binop_12x_shift(const Instruction& instruction,
718 DataType::Type type,
719 uint32_t dex_pc) {
720 HInstruction* first = LoadLocal(instruction.VRegA(), type);
721 HInstruction* second = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
722 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
723 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
724 }
725
726 template<typename T>
Binop_12x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)727 void HInstructionBuilder::Binop_12x(const Instruction& instruction,
728 DataType::Type type,
729 uint32_t dex_pc) {
730 HInstruction* first = LoadLocal(instruction.VRegA(), type);
731 HInstruction* second = LoadLocal(instruction.VRegB(), type);
732 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
733 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
734 }
735
736 template<typename T>
Binop_22s(const Instruction & instruction,bool reverse,uint32_t dex_pc)737 void HInstructionBuilder::Binop_22s(const Instruction& instruction, bool reverse, uint32_t dex_pc) {
738 HInstruction* first = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
739 HInstruction* second = graph_->GetIntConstant(instruction.VRegC_22s(), dex_pc);
740 if (reverse) {
741 std::swap(first, second);
742 }
743 AppendInstruction(new (allocator_) T(DataType::Type::kInt32, first, second, dex_pc));
744 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
745 }
746
747 template<typename T>
Binop_22b(const Instruction & instruction,bool reverse,uint32_t dex_pc)748 void HInstructionBuilder::Binop_22b(const Instruction& instruction, bool reverse, uint32_t dex_pc) {
749 HInstruction* first = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
750 HInstruction* second = graph_->GetIntConstant(instruction.VRegC_22b(), dex_pc);
751 if (reverse) {
752 std::swap(first, second);
753 }
754 AppendInstruction(new (allocator_) T(DataType::Type::kInt32, first, second, dex_pc));
755 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
756 }
757
758 // Does the method being compiled need any constructor barriers being inserted?
759 // (Always 'false' for methods that aren't <init>.)
RequiresConstructorBarrier(const DexCompilationUnit * cu)760 static bool RequiresConstructorBarrier(const DexCompilationUnit* cu) {
761 // Can be null in unit tests only.
762 if (UNLIKELY(cu == nullptr)) {
763 return false;
764 }
765
766 // Constructor barriers are applicable only for <init> methods.
767 if (LIKELY(!cu->IsConstructor() || cu->IsStatic())) {
768 return false;
769 }
770
771 return cu->RequiresConstructorBarrier();
772 }
773
774 // Returns true if `block` has only one successor which starts at the next
775 // dex_pc after `instruction` at `dex_pc`.
IsFallthroughInstruction(const Instruction & instruction,uint32_t dex_pc,HBasicBlock * block)776 static bool IsFallthroughInstruction(const Instruction& instruction,
777 uint32_t dex_pc,
778 HBasicBlock* block) {
779 uint32_t next_dex_pc = dex_pc + instruction.SizeInCodeUnits();
780 return block->GetSingleSuccessor()->GetDexPc() == next_dex_pc;
781 }
782
BuildSwitch(const Instruction & instruction,uint32_t dex_pc)783 void HInstructionBuilder::BuildSwitch(const Instruction& instruction, uint32_t dex_pc) {
784 HInstruction* value = LoadLocal(instruction.VRegA(), DataType::Type::kInt32);
785 DexSwitchTable table(instruction, dex_pc);
786
787 if (table.GetNumEntries() == 0) {
788 // Empty Switch. Code falls through to the next block.
789 DCHECK(IsFallthroughInstruction(instruction, dex_pc, current_block_));
790 AppendInstruction(new (allocator_) HGoto(dex_pc));
791 } else if (table.ShouldBuildDecisionTree()) {
792 for (DexSwitchTableIterator it(table); !it.Done(); it.Advance()) {
793 HInstruction* case_value = graph_->GetIntConstant(it.CurrentKey(), dex_pc);
794 HEqual* comparison = new (allocator_) HEqual(value, case_value, dex_pc);
795 AppendInstruction(comparison);
796 AppendInstruction(new (allocator_) HIf(comparison, dex_pc));
797
798 if (!it.IsLast()) {
799 current_block_ = FindBlockStartingAt(it.GetDexPcForCurrentIndex());
800 }
801 }
802 } else {
803 AppendInstruction(
804 new (allocator_) HPackedSwitch(table.GetEntryAt(0), table.GetNumEntries(), value, dex_pc));
805 }
806
807 current_block_ = nullptr;
808 }
809
BuildReturn(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)810 void HInstructionBuilder::BuildReturn(const Instruction& instruction,
811 DataType::Type type,
812 uint32_t dex_pc) {
813 if (type == DataType::Type::kVoid) {
814 // Only <init> (which is a return-void) could possibly have a constructor fence.
815 // This may insert additional redundant constructor fences from the super constructors.
816 // TODO: remove redundant constructor fences (b/36656456).
817 if (RequiresConstructorBarrier(dex_compilation_unit_)) {
818 // Compiling instance constructor.
819 DCHECK_STREQ("<init>", graph_->GetMethodName());
820
821 HInstruction* fence_target = current_this_parameter_;
822 DCHECK(fence_target != nullptr);
823
824 AppendInstruction(new (allocator_) HConstructorFence(fence_target, dex_pc, allocator_));
825 MaybeRecordStat(
826 compilation_stats_,
827 MethodCompilationStat::kConstructorFenceGeneratedFinal);
828 }
829 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
830 // Return value is not used for void functions. We pass NullConstant to
831 // avoid special cases when generating code.
832 AppendInstruction(new (allocator_) HMethodExitHook(graph_->GetNullConstant(), dex_pc));
833 }
834 AppendInstruction(new (allocator_) HReturnVoid(dex_pc));
835 } else {
836 DCHECK(!RequiresConstructorBarrier(dex_compilation_unit_));
837 HInstruction* value = LoadLocal(instruction.VRegA(), type);
838 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
839 AppendInstruction(new (allocator_) HMethodExitHook(value, dex_pc));
840 }
841 AppendInstruction(new (allocator_) HReturn(value, dex_pc));
842 }
843 current_block_ = nullptr;
844 }
845
GetInvokeTypeFromOpCode(Instruction::Code opcode)846 static InvokeType GetInvokeTypeFromOpCode(Instruction::Code opcode) {
847 switch (opcode) {
848 case Instruction::INVOKE_STATIC:
849 case Instruction::INVOKE_STATIC_RANGE:
850 return kStatic;
851 case Instruction::INVOKE_DIRECT:
852 case Instruction::INVOKE_DIRECT_RANGE:
853 return kDirect;
854 case Instruction::INVOKE_VIRTUAL:
855 case Instruction::INVOKE_VIRTUAL_RANGE:
856 return kVirtual;
857 case Instruction::INVOKE_INTERFACE:
858 case Instruction::INVOKE_INTERFACE_RANGE:
859 return kInterface;
860 case Instruction::INVOKE_SUPER_RANGE:
861 case Instruction::INVOKE_SUPER:
862 return kSuper;
863 default:
864 LOG(FATAL) << "Unexpected invoke opcode: " << opcode;
865 UNREACHABLE();
866 }
867 }
868
869 // Try to resolve a method using the class linker. Return null if a method could
870 // not be resolved or the resolved method cannot be used for some reason.
871 // Also retrieve method data needed for creating the invoke intermediate
872 // representation while we hold the mutator lock here.
ResolveMethod(uint16_t method_idx,ArtMethod * referrer,const DexCompilationUnit & dex_compilation_unit,InvokeType * invoke_type,MethodReference * resolved_method_info,uint16_t * imt_or_vtable_index,bool * is_string_constructor)873 static ArtMethod* ResolveMethod(uint16_t method_idx,
874 ArtMethod* referrer,
875 const DexCompilationUnit& dex_compilation_unit,
876 /*inout*/InvokeType* invoke_type,
877 /*out*/MethodReference* resolved_method_info,
878 /*out*/uint16_t* imt_or_vtable_index,
879 /*out*/bool* is_string_constructor) {
880 ScopedObjectAccess soa(Thread::Current());
881
882 ClassLinker* class_linker = dex_compilation_unit.GetClassLinker();
883 Handle<mirror::ClassLoader> class_loader = dex_compilation_unit.GetClassLoader();
884
885 ArtMethod* resolved_method =
886 class_linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
887 method_idx,
888 dex_compilation_unit.GetDexCache(),
889 class_loader,
890 referrer,
891 *invoke_type);
892
893 if (UNLIKELY(resolved_method == nullptr)) {
894 // Clean up any exception left by type resolution.
895 soa.Self()->ClearException();
896 return nullptr;
897 }
898 DCHECK(!soa.Self()->IsExceptionPending());
899
900 // The referrer may be unresolved for AOT if we're compiling a class that cannot be
901 // resolved because, for example, we don't find a superclass in the classpath.
902 if (referrer == nullptr) {
903 // The class linker cannot check access without a referrer, so we have to do it.
904 // Check if the declaring class or referencing class is accessible.
905 SamePackageCompare same_package(dex_compilation_unit);
906 ObjPtr<mirror::Class> declaring_class = resolved_method->GetDeclaringClass();
907 bool declaring_class_accessible = declaring_class->IsPublic() || same_package(declaring_class);
908 if (!declaring_class_accessible) {
909 // It is possible to access members from an inaccessible superclass
910 // by referencing them through an accessible subclass.
911 ObjPtr<mirror::Class> referenced_class = class_linker->LookupResolvedType(
912 dex_compilation_unit.GetDexFile()->GetMethodId(method_idx).class_idx_,
913 dex_compilation_unit.GetDexCache().Get(),
914 class_loader.Get());
915 DCHECK(referenced_class != nullptr); // Must have been resolved when resolving the method.
916 if (!referenced_class->IsPublic() && !same_package(referenced_class)) {
917 return nullptr;
918 }
919 }
920 // Check whether the method itself is accessible.
921 // Since the referrer is unresolved but the method is resolved, it cannot be
922 // inside the same class, so a private method is known to be inaccessible.
923 // And without a resolved referrer, we cannot check for protected member access
924 // in superlass, so we handle only access to public member or within the package.
925 if (resolved_method->IsPrivate() ||
926 (!resolved_method->IsPublic() && !declaring_class_accessible)) {
927 return nullptr;
928 }
929 }
930
931 // We have to special case the invoke-super case, as ClassLinker::ResolveMethod does not.
932 // We need to look at the referrer's super class vtable. We need to do this to know if we need to
933 // make this an invoke-unresolved to handle cross-dex invokes or abstract super methods, both of
934 // which require runtime handling.
935 if (*invoke_type == kSuper) {
936 if (referrer == nullptr) {
937 // We could not determine the method's class we need to wait until runtime.
938 DCHECK(Runtime::Current()->IsAotCompiler());
939 return nullptr;
940 }
941 ArtMethod* actual_method = FindSuperMethodToCall</*access_check=*/true>(
942 method_idx, resolved_method, referrer, soa.Self());
943 if (actual_method == nullptr) {
944 // Clean up any exception left by method resolution.
945 soa.Self()->ClearException();
946 return nullptr;
947 }
948 if (!actual_method->IsInvokable()) {
949 // Fail if the actual method cannot be invoked. Otherwise, the runtime resolution stub
950 // could resolve the callee to the wrong method.
951 return nullptr;
952 }
953 // Call GetCanonicalMethod in case the resolved method is a copy: for super calls, the encoding
954 // of ArtMethod in BSS relies on not having copies there.
955 resolved_method = actual_method->GetCanonicalMethod(class_linker->GetImagePointerSize());
956 }
957
958 if (*invoke_type == kInterface) {
959 if (resolved_method->GetDeclaringClass()->IsObjectClass()) {
960 // If the resolved method is from j.l.Object, emit a virtual call instead.
961 // The IMT conflict stub only handles interface methods.
962 *invoke_type = kVirtual;
963 } else {
964 DCHECK(resolved_method->GetDeclaringClass()->IsInterface());
965 }
966 }
967
968 *resolved_method_info =
969 MethodReference(resolved_method->GetDexFile(), resolved_method->GetDexMethodIndex());
970 if (*invoke_type == kVirtual) {
971 // For HInvokeVirtual we need the vtable index.
972 *imt_or_vtable_index = resolved_method->GetVtableIndex();
973 } else if (*invoke_type == kInterface) {
974 // For HInvokeInterface we need the IMT index.
975 *imt_or_vtable_index = ImTable::GetImtIndex(resolved_method);
976 }
977
978 *is_string_constructor =
979 resolved_method->IsConstructor() && resolved_method->GetDeclaringClass()->IsStringClass();
980
981 return resolved_method;
982 }
983
BuildInvoke(const Instruction & instruction,uint32_t dex_pc,uint32_t method_idx,const InstructionOperands & operands)984 bool HInstructionBuilder::BuildInvoke(const Instruction& instruction,
985 uint32_t dex_pc,
986 uint32_t method_idx,
987 const InstructionOperands& operands) {
988 InvokeType invoke_type = GetInvokeTypeFromOpCode(instruction.Opcode());
989 const char* shorty = dex_file_->GetMethodShorty(method_idx);
990 DataType::Type return_type = DataType::FromShorty(shorty[0]);
991
992 // Remove the return type from the 'proto'.
993 size_t number_of_arguments = strlen(shorty) - 1;
994 if (invoke_type != kStatic) { // instance call
995 // One extra argument for 'this'.
996 number_of_arguments++;
997 }
998
999 MethodReference resolved_method_reference(nullptr, 0u);
1000 bool is_string_constructor = false;
1001 uint16_t imt_or_vtable_index = DexFile::kDexNoIndex16;
1002 ArtMethod* resolved_method = ResolveMethod(method_idx,
1003 graph_->GetArtMethod(),
1004 *dex_compilation_unit_,
1005 &invoke_type,
1006 &resolved_method_reference,
1007 &imt_or_vtable_index,
1008 &is_string_constructor);
1009
1010 MethodReference method_reference(&graph_->GetDexFile(), method_idx);
1011 if (UNLIKELY(resolved_method == nullptr)) {
1012 DCHECK(!Thread::Current()->IsExceptionPending());
1013 MaybeRecordStat(compilation_stats_,
1014 MethodCompilationStat::kUnresolvedMethod);
1015 HInvoke* invoke = new (allocator_) HInvokeUnresolved(allocator_,
1016 number_of_arguments,
1017 return_type,
1018 dex_pc,
1019 method_reference,
1020 invoke_type);
1021 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ true);
1022 }
1023
1024 // Replace calls to String.<init> with StringFactory.
1025 if (is_string_constructor) {
1026 uint32_t string_init_entry_point = WellKnownClasses::StringInitToEntryPoint(resolved_method);
1027 HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
1028 MethodLoadKind::kStringInit,
1029 CodePtrLocation::kCallArtMethod,
1030 dchecked_integral_cast<uint64_t>(string_init_entry_point)
1031 };
1032 // We pass null for the resolved_method to ensure optimizations
1033 // don't rely on it.
1034 HInvoke* invoke = new (allocator_) HInvokeStaticOrDirect(
1035 allocator_,
1036 number_of_arguments - 1,
1037 /* return_type= */ DataType::Type::kReference,
1038 dex_pc,
1039 method_reference,
1040 /* resolved_method= */ nullptr,
1041 dispatch_info,
1042 invoke_type,
1043 resolved_method_reference,
1044 HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit);
1045 return HandleStringInit(invoke, operands, shorty);
1046 }
1047
1048 // Potential class initialization check, in the case of a static method call.
1049 HInvokeStaticOrDirect::ClinitCheckRequirement clinit_check_requirement =
1050 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
1051 HClinitCheck* clinit_check = nullptr;
1052 if (invoke_type == kStatic) {
1053 clinit_check = ProcessClinitCheckForInvoke(dex_pc, resolved_method, &clinit_check_requirement);
1054 }
1055
1056 // Try to build an HIR replacement for the intrinsic.
1057 if (UNLIKELY(resolved_method->IsIntrinsic())) {
1058 // All intrinsics are in the primary boot image, so their class can always be referenced
1059 // and we do not need to rely on the implicit class initialization check. The class should
1060 // be initialized but we do not require that here.
1061 DCHECK_NE(clinit_check_requirement, HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit);
1062 if (BuildSimpleIntrinsic(resolved_method, dex_pc, operands, shorty)) {
1063 return true;
1064 }
1065 }
1066
1067 HInvoke* invoke = nullptr;
1068 if (invoke_type == kDirect || invoke_type == kStatic || invoke_type == kSuper) {
1069 // For sharpening, we create another MethodReference, to account for the
1070 // kSuper case below where we cannot find a dex method index.
1071 bool has_method_id = true;
1072 if (invoke_type == kSuper) {
1073 uint32_t dex_method_index = method_reference.index;
1074 if (IsSameDexFile(*resolved_method_reference.dex_file,
1075 *dex_compilation_unit_->GetDexFile())) {
1076 // Update the method index to the one resolved. Note that this may be a no-op if
1077 // we resolved to the method referenced by the instruction.
1078 dex_method_index = resolved_method_reference.index;
1079 } else {
1080 // Try to find a dex method index in this caller's dex file.
1081 ScopedObjectAccess soa(Thread::Current());
1082 dex_method_index = resolved_method->FindDexMethodIndexInOtherDexFile(
1083 *dex_compilation_unit_->GetDexFile(), method_idx);
1084 }
1085 if (dex_method_index == dex::kDexNoIndex) {
1086 has_method_id = false;
1087 } else {
1088 method_reference.index = dex_method_index;
1089 }
1090 }
1091 HInvokeStaticOrDirect::DispatchInfo dispatch_info =
1092 HSharpening::SharpenLoadMethod(resolved_method,
1093 has_method_id,
1094 /* for_interface_call= */ false,
1095 code_generator_);
1096 if (dispatch_info.code_ptr_location == CodePtrLocation::kCallCriticalNative) {
1097 graph_->SetHasDirectCriticalNativeCall(true);
1098 }
1099 invoke = new (allocator_) HInvokeStaticOrDirect(allocator_,
1100 number_of_arguments,
1101 return_type,
1102 dex_pc,
1103 method_reference,
1104 resolved_method,
1105 dispatch_info,
1106 invoke_type,
1107 resolved_method_reference,
1108 clinit_check_requirement);
1109 if (clinit_check != nullptr) {
1110 // Add the class initialization check as last input of `invoke`.
1111 DCHECK_EQ(clinit_check_requirement, HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit);
1112 size_t clinit_check_index = invoke->InputCount() - 1u;
1113 DCHECK(invoke->InputAt(clinit_check_index) == nullptr);
1114 invoke->SetArgumentAt(clinit_check_index, clinit_check);
1115 }
1116 } else if (invoke_type == kVirtual) {
1117 invoke = new (allocator_) HInvokeVirtual(allocator_,
1118 number_of_arguments,
1119 return_type,
1120 dex_pc,
1121 method_reference,
1122 resolved_method,
1123 resolved_method_reference,
1124 /*vtable_index=*/ imt_or_vtable_index);
1125 } else {
1126 DCHECK_EQ(invoke_type, kInterface);
1127 if (kIsDebugBuild) {
1128 ScopedObjectAccess soa(Thread::Current());
1129 DCHECK(resolved_method->GetDeclaringClass()->IsInterface());
1130 }
1131 MethodLoadKind load_kind = HSharpening::SharpenLoadMethod(
1132 resolved_method,
1133 /* has_method_id= */ true,
1134 /* for_interface_call= */ true,
1135 code_generator_)
1136 .method_load_kind;
1137 invoke = new (allocator_) HInvokeInterface(allocator_,
1138 number_of_arguments,
1139 return_type,
1140 dex_pc,
1141 method_reference,
1142 resolved_method,
1143 resolved_method_reference,
1144 /*imt_index=*/ imt_or_vtable_index,
1145 load_kind);
1146 }
1147 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
1148 }
1149
VarHandleAccessorNeedsReturnTypeCheck(HInvoke * invoke,DataType::Type return_type)1150 static bool VarHandleAccessorNeedsReturnTypeCheck(HInvoke* invoke, DataType::Type return_type) {
1151 mirror::VarHandle::AccessModeTemplate access_mode_template =
1152 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
1153
1154 switch (access_mode_template) {
1155 case mirror::VarHandle::AccessModeTemplate::kGet:
1156 case mirror::VarHandle::AccessModeTemplate::kGetAndUpdate:
1157 case mirror::VarHandle::AccessModeTemplate::kCompareAndExchange:
1158 return return_type == DataType::Type::kReference;
1159 case mirror::VarHandle::AccessModeTemplate::kSet:
1160 case mirror::VarHandle::AccessModeTemplate::kCompareAndSet:
1161 return false;
1162 }
1163 }
1164
1165 // This function initializes `VarHandleOptimizations`, does a number of static checks and disables
1166 // the intrinsic if some of the checks fail. This is necessary for the code generator to work (for
1167 // both the baseline and the optimizing compiler).
DecideVarHandleIntrinsic(HInvoke * invoke)1168 static void DecideVarHandleIntrinsic(HInvoke* invoke) {
1169 switch (invoke->GetIntrinsic()) {
1170 case Intrinsics::kVarHandleCompareAndExchange:
1171 case Intrinsics::kVarHandleCompareAndExchangeAcquire:
1172 case Intrinsics::kVarHandleCompareAndExchangeRelease:
1173 case Intrinsics::kVarHandleCompareAndSet:
1174 case Intrinsics::kVarHandleGet:
1175 case Intrinsics::kVarHandleGetAcquire:
1176 case Intrinsics::kVarHandleGetAndAdd:
1177 case Intrinsics::kVarHandleGetAndAddAcquire:
1178 case Intrinsics::kVarHandleGetAndAddRelease:
1179 case Intrinsics::kVarHandleGetAndBitwiseAnd:
1180 case Intrinsics::kVarHandleGetAndBitwiseAndAcquire:
1181 case Intrinsics::kVarHandleGetAndBitwiseAndRelease:
1182 case Intrinsics::kVarHandleGetAndBitwiseOr:
1183 case Intrinsics::kVarHandleGetAndBitwiseOrAcquire:
1184 case Intrinsics::kVarHandleGetAndBitwiseOrRelease:
1185 case Intrinsics::kVarHandleGetAndBitwiseXor:
1186 case Intrinsics::kVarHandleGetAndBitwiseXorAcquire:
1187 case Intrinsics::kVarHandleGetAndBitwiseXorRelease:
1188 case Intrinsics::kVarHandleGetAndSet:
1189 case Intrinsics::kVarHandleGetAndSetAcquire:
1190 case Intrinsics::kVarHandleGetAndSetRelease:
1191 case Intrinsics::kVarHandleGetOpaque:
1192 case Intrinsics::kVarHandleGetVolatile:
1193 case Intrinsics::kVarHandleSet:
1194 case Intrinsics::kVarHandleSetOpaque:
1195 case Intrinsics::kVarHandleSetRelease:
1196 case Intrinsics::kVarHandleSetVolatile:
1197 case Intrinsics::kVarHandleWeakCompareAndSet:
1198 case Intrinsics::kVarHandleWeakCompareAndSetAcquire:
1199 case Intrinsics::kVarHandleWeakCompareAndSetPlain:
1200 case Intrinsics::kVarHandleWeakCompareAndSetRelease:
1201 break;
1202 default:
1203 return; // Not a VarHandle intrinsic, skip.
1204 }
1205
1206 DCHECK(invoke->IsInvokePolymorphic());
1207 VarHandleOptimizations optimizations(invoke);
1208
1209 // Do only simple static checks here (those for which we have enough information). More complex
1210 // checks should be done in instruction simplifier, which runs after other optimization passes
1211 // that may provide useful information.
1212
1213 size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke);
1214 if (expected_coordinates_count > 2u) {
1215 optimizations.SetDoNotIntrinsify();
1216 return;
1217 }
1218 if (expected_coordinates_count != 0u) {
1219 // Except for static fields (no coordinates), the first coordinate must be a reference.
1220 // Do not intrinsify if the reference is null as we would always go to slow path anyway.
1221 HInstruction* object = invoke->InputAt(1);
1222 if (object->GetType() != DataType::Type::kReference || object->IsNullConstant()) {
1223 optimizations.SetDoNotIntrinsify();
1224 return;
1225 }
1226 }
1227 if (expected_coordinates_count == 2u) {
1228 // For arrays and views, the second coordinate must be convertible to `int`.
1229 // In this context, `boolean` is not convertible but we have to look at the shorty
1230 // as compiler transformations can give the invoke a valid boolean input.
1231 DataType::Type index_type = GetDataTypeFromShorty(invoke, 2);
1232 if (index_type == DataType::Type::kBool ||
1233 DataType::Kind(index_type) != DataType::Type::kInt32) {
1234 optimizations.SetDoNotIntrinsify();
1235 return;
1236 }
1237 }
1238
1239 uint32_t number_of_arguments = invoke->GetNumberOfArguments();
1240 DataType::Type return_type = invoke->GetType();
1241 mirror::VarHandle::AccessModeTemplate access_mode_template =
1242 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
1243 switch (access_mode_template) {
1244 case mirror::VarHandle::AccessModeTemplate::kGet:
1245 // The return type should be the same as varType, so it shouldn't be void.
1246 if (return_type == DataType::Type::kVoid) {
1247 optimizations.SetDoNotIntrinsify();
1248 return;
1249 }
1250 break;
1251 case mirror::VarHandle::AccessModeTemplate::kSet:
1252 if (return_type != DataType::Type::kVoid) {
1253 optimizations.SetDoNotIntrinsify();
1254 return;
1255 }
1256 break;
1257 case mirror::VarHandle::AccessModeTemplate::kCompareAndSet: {
1258 if (return_type != DataType::Type::kBool) {
1259 optimizations.SetDoNotIntrinsify();
1260 return;
1261 }
1262 uint32_t expected_value_index = number_of_arguments - 2;
1263 uint32_t new_value_index = number_of_arguments - 1;
1264 DataType::Type expected_value_type = GetDataTypeFromShorty(invoke, expected_value_index);
1265 DataType::Type new_value_type = GetDataTypeFromShorty(invoke, new_value_index);
1266 if (expected_value_type != new_value_type) {
1267 optimizations.SetDoNotIntrinsify();
1268 return;
1269 }
1270 break;
1271 }
1272 case mirror::VarHandle::AccessModeTemplate::kCompareAndExchange: {
1273 uint32_t expected_value_index = number_of_arguments - 2;
1274 uint32_t new_value_index = number_of_arguments - 1;
1275 DataType::Type expected_value_type = GetDataTypeFromShorty(invoke, expected_value_index);
1276 DataType::Type new_value_type = GetDataTypeFromShorty(invoke, new_value_index);
1277 if (expected_value_type != new_value_type || return_type != expected_value_type) {
1278 optimizations.SetDoNotIntrinsify();
1279 return;
1280 }
1281 break;
1282 }
1283 case mirror::VarHandle::AccessModeTemplate::kGetAndUpdate: {
1284 DataType::Type value_type = GetDataTypeFromShorty(invoke, number_of_arguments - 1);
1285 if (IsVarHandleGetAndAdd(invoke) &&
1286 (value_type == DataType::Type::kReference || value_type == DataType::Type::kBool)) {
1287 // We should only add numerical types.
1288 //
1289 // For byte array views floating-point types are not allowed, see javadoc comments for
1290 // java.lang.invoke.MethodHandles.byteArrayViewVarHandle(). But ART treats them as numeric
1291 // types in ByteArrayViewVarHandle::Access(). Consequently we do generate intrinsic code,
1292 // but it always fails access mode check at runtime.
1293 optimizations.SetDoNotIntrinsify();
1294 return;
1295 } else if (IsVarHandleGetAndBitwiseOp(invoke) && !DataType::IsIntegralType(value_type)) {
1296 // We can only apply operators to bitwise integral types.
1297 // Note that bitwise VarHandle operations accept a non-integral boolean type and
1298 // perform the appropriate logical operation. However, the result is the same as
1299 // using the bitwise operation on our boolean representation and this fits well
1300 // with DataType::IsIntegralType() treating the compiler type kBool as integral.
1301 optimizations.SetDoNotIntrinsify();
1302 return;
1303 }
1304 if (value_type != return_type) {
1305 optimizations.SetDoNotIntrinsify();
1306 return;
1307 }
1308 break;
1309 }
1310 }
1311 }
1312
BuildInvokePolymorphic(uint32_t dex_pc,uint32_t method_idx,dex::ProtoIndex proto_idx,const InstructionOperands & operands)1313 bool HInstructionBuilder::BuildInvokePolymorphic(uint32_t dex_pc,
1314 uint32_t method_idx,
1315 dex::ProtoIndex proto_idx,
1316 const InstructionOperands& operands) {
1317 const char* shorty = dex_file_->GetShorty(proto_idx);
1318 DCHECK_EQ(1 + ArtMethod::NumArgRegisters(shorty), operands.GetNumberOfOperands());
1319 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1320 size_t number_of_arguments = strlen(shorty);
1321 // We use ResolveMethod which is also used in BuildInvoke in order to
1322 // not duplicate code. As such, we need to provide is_string_constructor
1323 // even if we don't need it afterwards.
1324 InvokeType invoke_type = InvokeType::kPolymorphic;
1325 bool is_string_constructor = false;
1326 uint16_t imt_or_vtable_index = DexFile::kDexNoIndex16;
1327 MethodReference resolved_method_reference(nullptr, 0u);
1328 ArtMethod* resolved_method = ResolveMethod(method_idx,
1329 graph_->GetArtMethod(),
1330 *dex_compilation_unit_,
1331 &invoke_type,
1332 &resolved_method_reference,
1333 &imt_or_vtable_index,
1334 &is_string_constructor);
1335
1336 MethodReference method_reference(&graph_->GetDexFile(), method_idx);
1337 HInvoke* invoke = new (allocator_) HInvokePolymorphic(allocator_,
1338 number_of_arguments,
1339 return_type,
1340 dex_pc,
1341 method_reference,
1342 resolved_method,
1343 resolved_method_reference,
1344 proto_idx);
1345 if (!HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false)) {
1346 return false;
1347 }
1348
1349 if (invoke->GetIntrinsic() != Intrinsics::kMethodHandleInvoke &&
1350 invoke->GetIntrinsic() != Intrinsics::kMethodHandleInvokeExact &&
1351 VarHandleAccessorNeedsReturnTypeCheck(invoke, return_type)) {
1352 // Type check is needed because VarHandle intrinsics do not type check the retrieved reference.
1353 ScopedObjectAccess soa(Thread::Current());
1354 ArtMethod* referrer = graph_->GetArtMethod();
1355 dex::TypeIndex return_type_index =
1356 referrer->GetDexFile()->GetProtoId(proto_idx).return_type_idx_;
1357
1358 BuildTypeCheck(/* is_instance_of= */ false, invoke, return_type_index, dex_pc);
1359 latest_result_ = current_block_->GetLastInstruction();
1360 }
1361
1362 DecideVarHandleIntrinsic(invoke);
1363
1364 return true;
1365 }
1366
1367
BuildInvokeCustom(uint32_t dex_pc,uint32_t call_site_idx,const InstructionOperands & operands)1368 bool HInstructionBuilder::BuildInvokeCustom(uint32_t dex_pc,
1369 uint32_t call_site_idx,
1370 const InstructionOperands& operands) {
1371 dex::ProtoIndex proto_idx = dex_file_->GetProtoIndexForCallSite(call_site_idx);
1372 const char* shorty = dex_file_->GetShorty(proto_idx);
1373 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1374 size_t number_of_arguments = strlen(shorty) - 1;
1375 // HInvokeCustom takes a DexNoNoIndex method reference.
1376 MethodReference method_reference(&graph_->GetDexFile(), dex::kDexNoIndex);
1377 HInvoke* invoke = new (allocator_) HInvokeCustom(allocator_,
1378 number_of_arguments,
1379 call_site_idx,
1380 return_type,
1381 dex_pc,
1382 method_reference);
1383 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
1384 }
1385
BuildNewInstance(dex::TypeIndex type_index,uint32_t dex_pc)1386 HNewInstance* HInstructionBuilder::BuildNewInstance(dex::TypeIndex type_index, uint32_t dex_pc) {
1387 ScopedObjectAccess soa(Thread::Current());
1388
1389 HLoadClass* load_class = BuildLoadClass(type_index, dex_pc);
1390
1391 HInstruction* cls = load_class;
1392 Handle<mirror::Class> klass = load_class->GetClass();
1393
1394 if (!IsInitialized(klass.Get())) {
1395 cls = new (allocator_) HClinitCheck(load_class, dex_pc);
1396 AppendInstruction(cls);
1397 }
1398
1399 // Only the access check entrypoint handles the finalizable class case. If we
1400 // need access checks, then we haven't resolved the method and the class may
1401 // again be finalizable.
1402 QuickEntrypointEnum entrypoint = kQuickAllocObjectInitialized;
1403 if (load_class->NeedsAccessCheck() ||
1404 klass == nullptr || // Finalizable/instantiable is unknown.
1405 klass->IsFinalizable() ||
1406 klass.Get() == klass->GetClass() || // Classes cannot be allocated in code
1407 !klass->IsInstantiable()) {
1408 entrypoint = kQuickAllocObjectWithChecks;
1409 }
1410 // We will always be able to resolve the string class since it is in the BCP.
1411 if (!klass.IsNull() && klass->IsStringClass()) {
1412 entrypoint = kQuickAllocStringObject;
1413 }
1414
1415 // Consider classes we haven't resolved as potentially finalizable.
1416 bool finalizable = (klass == nullptr) || klass->IsFinalizable();
1417
1418 HNewInstance* new_instance = new (allocator_) HNewInstance(
1419 cls,
1420 dex_pc,
1421 type_index,
1422 *dex_compilation_unit_->GetDexFile(),
1423 finalizable,
1424 entrypoint);
1425 AppendInstruction(new_instance);
1426
1427 return new_instance;
1428 }
1429
BuildConstructorFenceForAllocation(HInstruction * allocation)1430 void HInstructionBuilder::BuildConstructorFenceForAllocation(HInstruction* allocation) {
1431 DCHECK(allocation != nullptr &&
1432 (allocation->IsNewInstance() ||
1433 allocation->IsNewArray())); // corresponding to "new" keyword in JLS.
1434
1435 if (allocation->IsNewInstance()) {
1436 // STRING SPECIAL HANDLING:
1437 // -------------------------------
1438 // Strings have a real HNewInstance node but they end up always having 0 uses.
1439 // All uses of a String HNewInstance are always transformed to replace their input
1440 // of the HNewInstance with an input of the invoke to StringFactory.
1441 //
1442 // Do not emit an HConstructorFence here since it can inhibit some String new-instance
1443 // optimizations (to pass checker tests that rely on those optimizations).
1444 HNewInstance* new_inst = allocation->AsNewInstance();
1445 HLoadClass* load_class = new_inst->GetLoadClass();
1446
1447 Thread* self = Thread::Current();
1448 ScopedObjectAccess soa(self);
1449 StackHandleScope<1> hs(self);
1450 Handle<mirror::Class> klass = load_class->GetClass();
1451 if (klass != nullptr && klass->IsStringClass()) {
1452 return;
1453 // Note: Do not use allocation->IsStringAlloc which requires
1454 // a valid ReferenceTypeInfo, but that doesn't get made until after reference type
1455 // propagation (and instruction builder is too early).
1456 }
1457 // (In terms of correctness, the StringFactory needs to provide its own
1458 // default initialization barrier, see below.)
1459 }
1460
1461 // JLS 17.4.5 "Happens-before Order" describes:
1462 //
1463 // The default initialization of any object happens-before any other actions (other than
1464 // default-writes) of a program.
1465 //
1466 // In our implementation the default initialization of an object to type T means
1467 // setting all of its initial data (object[0..size)) to 0, and setting the
1468 // object's class header (i.e. object.getClass() == T.class).
1469 //
1470 // In practice this fence ensures that the writes to the object header
1471 // are visible to other threads if this object escapes the current thread.
1472 // (and in theory the 0-initializing, but that happens automatically
1473 // when new memory pages are mapped in by the OS).
1474 HConstructorFence* ctor_fence =
1475 new (allocator_) HConstructorFence(allocation, allocation->GetDexPc(), allocator_);
1476 AppendInstruction(ctor_fence);
1477 MaybeRecordStat(
1478 compilation_stats_,
1479 MethodCompilationStat::kConstructorFenceGeneratedNew);
1480 }
1481
IsInBootImage(ObjPtr<mirror::Class> cls,const CompilerOptions & compiler_options)1482 static bool IsInBootImage(ObjPtr<mirror::Class> cls, const CompilerOptions& compiler_options)
1483 REQUIRES_SHARED(Locks::mutator_lock_) {
1484 if (Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(cls)) {
1485 return true;
1486 }
1487 if (compiler_options.IsBootImage() || compiler_options.IsBootImageExtension()) {
1488 std::string temp;
1489 const char* descriptor = cls->GetDescriptor(&temp);
1490 return compiler_options.IsImageClass(descriptor);
1491 } else {
1492 return false;
1493 }
1494 }
1495
IsSubClass(ObjPtr<mirror::Class> to_test,ObjPtr<mirror::Class> super_class)1496 static bool IsSubClass(ObjPtr<mirror::Class> to_test, ObjPtr<mirror::Class> super_class)
1497 REQUIRES_SHARED(Locks::mutator_lock_) {
1498 return to_test != nullptr && !to_test->IsInterface() && to_test->IsSubClass(super_class);
1499 }
1500
HasTrivialClinit(ObjPtr<mirror::Class> klass,PointerSize pointer_size)1501 static bool HasTrivialClinit(ObjPtr<mirror::Class> klass, PointerSize pointer_size)
1502 REQUIRES_SHARED(Locks::mutator_lock_) {
1503 // Check if the class has encoded fields that trigger bytecode execution.
1504 // (Encoded fields are just a different representation of <clinit>.)
1505 if (klass->NumStaticFields() != 0u) {
1506 DCHECK(klass->GetClassDef() != nullptr);
1507 EncodedStaticFieldValueIterator it(klass->GetDexFile(), *klass->GetClassDef());
1508 for (; it.HasNext(); it.Next()) {
1509 switch (it.GetValueType()) {
1510 case EncodedArrayValueIterator::ValueType::kBoolean:
1511 case EncodedArrayValueIterator::ValueType::kByte:
1512 case EncodedArrayValueIterator::ValueType::kShort:
1513 case EncodedArrayValueIterator::ValueType::kChar:
1514 case EncodedArrayValueIterator::ValueType::kInt:
1515 case EncodedArrayValueIterator::ValueType::kLong:
1516 case EncodedArrayValueIterator::ValueType::kFloat:
1517 case EncodedArrayValueIterator::ValueType::kDouble:
1518 case EncodedArrayValueIterator::ValueType::kNull:
1519 case EncodedArrayValueIterator::ValueType::kString:
1520 // Primitive, null or j.l.String initialization is permitted.
1521 break;
1522 case EncodedArrayValueIterator::ValueType::kType:
1523 // Type initialization can load classes and execute bytecode through a class loader
1524 // which can execute arbitrary bytecode. We do not optimize for known class loaders;
1525 // kType is rarely used (if ever).
1526 return false;
1527 default:
1528 // Other types in the encoded static field list are rejected by the DexFileVerifier.
1529 LOG(FATAL) << "Unexpected type " << it.GetValueType();
1530 UNREACHABLE();
1531 }
1532 }
1533 }
1534 // Check if the class has <clinit> that executes arbitrary code.
1535 // Initialization of static fields of the class itself with constants is allowed.
1536 ArtMethod* clinit = klass->FindClassInitializer(pointer_size);
1537 if (clinit != nullptr) {
1538 const DexFile& dex_file = *clinit->GetDexFile();
1539 CodeItemInstructionAccessor accessor(dex_file, clinit->GetCodeItem());
1540 for (DexInstructionPcPair it : accessor) {
1541 switch (it->Opcode()) {
1542 case Instruction::CONST_4:
1543 case Instruction::CONST_16:
1544 case Instruction::CONST:
1545 case Instruction::CONST_HIGH16:
1546 case Instruction::CONST_WIDE_16:
1547 case Instruction::CONST_WIDE_32:
1548 case Instruction::CONST_WIDE:
1549 case Instruction::CONST_WIDE_HIGH16:
1550 case Instruction::CONST_STRING:
1551 case Instruction::CONST_STRING_JUMBO:
1552 // Primitive, null or j.l.String initialization is permitted.
1553 break;
1554 case Instruction::RETURN_VOID:
1555 break;
1556 case Instruction::SPUT:
1557 case Instruction::SPUT_WIDE:
1558 case Instruction::SPUT_OBJECT:
1559 case Instruction::SPUT_BOOLEAN:
1560 case Instruction::SPUT_BYTE:
1561 case Instruction::SPUT_CHAR:
1562 case Instruction::SPUT_SHORT:
1563 // Only initialization of a static field of the same class is permitted.
1564 if (dex_file.GetFieldId(it->VRegB_21c()).class_idx_ != klass->GetDexTypeIndex()) {
1565 return false;
1566 }
1567 break;
1568 case Instruction::NEW_ARRAY:
1569 // Only primitive arrays are permitted.
1570 if (Primitive::GetType(dex_file.GetTypeDescriptor(dex_file.GetTypeId(
1571 dex::TypeIndex(it->VRegC_22c())))[1]) == Primitive::kPrimNot) {
1572 return false;
1573 }
1574 break;
1575 case Instruction::APUT:
1576 case Instruction::APUT_WIDE:
1577 case Instruction::APUT_BOOLEAN:
1578 case Instruction::APUT_BYTE:
1579 case Instruction::APUT_CHAR:
1580 case Instruction::APUT_SHORT:
1581 case Instruction::FILL_ARRAY_DATA:
1582 case Instruction::NOP:
1583 // Allow initialization of primitive arrays (only constants can be stored).
1584 // Note: We expect NOPs used for fill-array-data-payload but accept all NOPs
1585 // (even unreferenced switch payloads if they make it through the verifier).
1586 break;
1587 default:
1588 return false;
1589 }
1590 }
1591 }
1592 return true;
1593 }
1594
HasTrivialInitialization(ObjPtr<mirror::Class> cls,const CompilerOptions & compiler_options)1595 static bool HasTrivialInitialization(ObjPtr<mirror::Class> cls,
1596 const CompilerOptions& compiler_options)
1597 REQUIRES_SHARED(Locks::mutator_lock_) {
1598 Runtime* runtime = Runtime::Current();
1599 PointerSize pointer_size = runtime->GetClassLinker()->GetImagePointerSize();
1600
1601 // Check the superclass chain.
1602 for (ObjPtr<mirror::Class> klass = cls; klass != nullptr; klass = klass->GetSuperClass()) {
1603 if (klass->IsInitialized() && IsInBootImage(klass, compiler_options)) {
1604 break; // `klass` and its superclasses are already initialized in the boot image.
1605 }
1606 if (!HasTrivialClinit(klass, pointer_size)) {
1607 return false;
1608 }
1609 }
1610
1611 // Also check interfaces with default methods as they need to be initialized as well.
1612 ObjPtr<mirror::IfTable> iftable = cls->GetIfTable();
1613 DCHECK(iftable != nullptr);
1614 for (int32_t i = 0, count = iftable->Count(); i != count; ++i) {
1615 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
1616 if (!iface->HasDefaultMethods()) {
1617 continue; // Initializing `cls` does not initialize this interface.
1618 }
1619 if (iface->IsInitialized() && IsInBootImage(iface, compiler_options)) {
1620 continue; // This interface is already initialized in the boot image.
1621 }
1622 if (!HasTrivialClinit(iface, pointer_size)) {
1623 return false;
1624 }
1625 }
1626 return true;
1627 }
1628
IsInitialized(ObjPtr<mirror::Class> cls) const1629 bool HInstructionBuilder::IsInitialized(ObjPtr<mirror::Class> cls) const {
1630 if (cls == nullptr) {
1631 return false;
1632 }
1633
1634 // Check if the class will be initialized at runtime.
1635 if (cls->IsInitialized()) {
1636 const CompilerOptions& compiler_options = code_generator_->GetCompilerOptions();
1637 if (compiler_options.IsAotCompiler()) {
1638 // Assume loaded only if klass is in the boot image. App classes cannot be assumed
1639 // loaded because we don't even know what class loader will be used to load them.
1640 if (IsInBootImage(cls, compiler_options)) {
1641 return true;
1642 }
1643 } else {
1644 DCHECK(compiler_options.IsJitCompiler());
1645 if (Runtime::Current()->GetJit()->CanAssumeInitialized(
1646 cls,
1647 compiler_options.IsJitCompilerForSharedCode())) {
1648 // For JIT, the class cannot revert to an uninitialized state.
1649 return true;
1650 }
1651 }
1652 }
1653
1654 // We can avoid the class initialization check for `cls` in static methods and constructors
1655 // in the very same class; invoking a static method involves a class initialization check
1656 // and so does the instance allocation that must be executed before invoking a constructor.
1657 // Other instance methods of the same class can run on an escaped instance
1658 // of an erroneous class. Even a superclass may need to be checked as the subclass
1659 // can be completely initialized while the superclass is initializing and the subclass
1660 // remains initialized when the superclass initializer throws afterwards. b/62478025
1661 // Note: The HClinitCheck+HInvokeStaticOrDirect merging can still apply.
1662 auto is_static_method_or_constructor_of_cls = [cls](const DexCompilationUnit& compilation_unit)
1663 REQUIRES_SHARED(Locks::mutator_lock_) {
1664 return (compilation_unit.GetAccessFlags() & (kAccStatic | kAccConstructor)) != 0u &&
1665 compilation_unit.GetCompilingClass().Get() == cls;
1666 };
1667 if (is_static_method_or_constructor_of_cls(*outer_compilation_unit_) ||
1668 // Check also the innermost method. Though excessive copies of ClinitCheck can be
1669 // eliminated by GVN, that happens only after the decision whether to inline the
1670 // graph or not and that may depend on the presence of the ClinitCheck.
1671 // TODO: We should walk over the entire inlined method chain, but we don't pass that
1672 // information to the builder.
1673 is_static_method_or_constructor_of_cls(*dex_compilation_unit_)) {
1674 return true;
1675 }
1676
1677 // Otherwise, we may be able to avoid the check if `cls` is a superclass of a method being
1678 // compiled here (anywhere in the inlining chain) as the `cls` must have started initializing
1679 // before calling any `cls` or subclass methods. Static methods require a clinit check and
1680 // instance methods require an instance which cannot be created before doing a clinit check.
1681 // When a subclass of `cls` starts initializing, it starts initializing its superclass
1682 // chain up to `cls` without running any bytecode, i.e. without any opportunity for circular
1683 // initialization weirdness.
1684 //
1685 // If the initialization of `cls` is trivial (`cls` and its superclasses and superinterfaces
1686 // with default methods initialize only their own static fields using constant values), it must
1687 // complete, either successfully or by throwing and marking `cls` erroneous, without allocating
1688 // any instances of `cls` or subclasses (or any other class) and without calling any methods.
1689 // If it completes by throwing, no instances of `cls` shall be created and no subclass method
1690 // bytecode shall execute (see above), therefore the instruction we're building shall be
1691 // unreachable. By reaching the instruction, we know that `cls` was initialized successfully.
1692 //
1693 // TODO: We should walk over the entire inlined methods chain, but we don't pass that
1694 // information to the builder. (We could also check if we're guaranteed a non-null instance
1695 // of `cls` at this location but that's outside the scope of the instruction builder.)
1696 bool is_subclass = IsSubClass(outer_compilation_unit_->GetCompilingClass().Get(), cls);
1697 if (dex_compilation_unit_ != outer_compilation_unit_) {
1698 is_subclass = is_subclass ||
1699 IsSubClass(dex_compilation_unit_->GetCompilingClass().Get(), cls);
1700 }
1701 if (is_subclass && HasTrivialInitialization(cls, code_generator_->GetCompilerOptions())) {
1702 return true;
1703 }
1704
1705 return false;
1706 }
1707
ProcessClinitCheckForInvoke(uint32_t dex_pc,ArtMethod * resolved_method,HInvokeStaticOrDirect::ClinitCheckRequirement * clinit_check_requirement)1708 HClinitCheck* HInstructionBuilder::ProcessClinitCheckForInvoke(
1709 uint32_t dex_pc,
1710 ArtMethod* resolved_method,
1711 HInvokeStaticOrDirect::ClinitCheckRequirement* clinit_check_requirement) {
1712 ScopedObjectAccess soa(Thread::Current());
1713 ObjPtr<mirror::Class> klass = resolved_method->GetDeclaringClass();
1714
1715 HClinitCheck* clinit_check = nullptr;
1716 if (IsInitialized(klass)) {
1717 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
1718 } else {
1719 Handle<mirror::Class> h_klass = graph_->GetHandleCache()->NewHandle(klass);
1720 HLoadClass* cls = BuildLoadClass(h_klass->GetDexTypeIndex(),
1721 h_klass->GetDexFile(),
1722 h_klass,
1723 dex_pc,
1724 /* needs_access_check= */ false);
1725 if (cls != nullptr) {
1726 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit;
1727 clinit_check = new (allocator_) HClinitCheck(cls, dex_pc);
1728 AppendInstruction(clinit_check);
1729 } else {
1730 // Let the invoke handle this with an implicit class initialization check.
1731 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit;
1732 }
1733 }
1734 return clinit_check;
1735 }
1736
SetupInvokeArguments(HInstruction * invoke,const InstructionOperands & operands,const char * shorty,ReceiverArg receiver_arg)1737 bool HInstructionBuilder::SetupInvokeArguments(HInstruction* invoke,
1738 const InstructionOperands& operands,
1739 const char* shorty,
1740 ReceiverArg receiver_arg) {
1741 // Note: The `invoke` can be an intrinsic replacement, so not necessaritly HInvoke.
1742 // In that case, do not log errors, they shall be reported when we try to build the HInvoke.
1743 uint32_t shorty_index = 1; // Skip the return type.
1744 const size_t number_of_operands = operands.GetNumberOfOperands();
1745 bool argument_length_error = false;
1746
1747 size_t start_index = 0u;
1748 size_t argument_index = 0u;
1749 if (receiver_arg != ReceiverArg::kNone) {
1750 if (number_of_operands == 0u) {
1751 argument_length_error = true;
1752 } else {
1753 start_index = 1u;
1754 if (receiver_arg != ReceiverArg::kIgnored) {
1755 uint32_t obj_reg = operands.GetOperand(0u);
1756 HInstruction* arg = (receiver_arg == ReceiverArg::kPlainArg)
1757 ? LoadLocal(obj_reg, DataType::Type::kReference)
1758 : LoadNullCheckedLocal(obj_reg, invoke->GetDexPc());
1759 if (receiver_arg != ReceiverArg::kNullCheckedOnly) {
1760 invoke->SetRawInputAt(0u, arg);
1761 argument_index = 1u;
1762 }
1763 }
1764 }
1765 }
1766
1767 for (size_t i = start_index; i < number_of_operands; ++i, ++argument_index) {
1768 // Make sure we don't go over the expected arguments or over the number of
1769 // dex registers given. If the instruction was seen as dead by the verifier,
1770 // it hasn't been properly checked.
1771 if (UNLIKELY(shorty[shorty_index] == 0)) {
1772 argument_length_error = true;
1773 break;
1774 }
1775 DataType::Type type = DataType::FromShorty(shorty[shorty_index++]);
1776 bool is_wide = (type == DataType::Type::kInt64) || (type == DataType::Type::kFloat64);
1777 if (is_wide && ((i + 1 == number_of_operands) ||
1778 (operands.GetOperand(i) + 1 != operands.GetOperand(i + 1)))) {
1779 if (invoke->IsInvoke()) {
1780 // Longs and doubles should be in pairs, that is, sequential registers. The verifier should
1781 // reject any class where this is violated. However, the verifier only does these checks
1782 // on non trivially dead instructions, so we just bailout the compilation.
1783 VLOG(compiler) << "Did not compile "
1784 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
1785 << " because of non-sequential dex register pair in wide argument";
1786 MaybeRecordStat(compilation_stats_,
1787 MethodCompilationStat::kNotCompiledMalformedOpcode);
1788 }
1789 return false;
1790 }
1791 HInstruction* arg = LoadLocal(operands.GetOperand(i), type);
1792 DCHECK(invoke->InputAt(argument_index) == nullptr);
1793 invoke->SetRawInputAt(argument_index, arg);
1794 if (is_wide) {
1795 ++i;
1796 }
1797 }
1798
1799 argument_length_error = argument_length_error || shorty[shorty_index] != 0;
1800 if (argument_length_error) {
1801 if (invoke->IsInvoke()) {
1802 VLOG(compiler) << "Did not compile "
1803 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
1804 << " because of wrong number of arguments in invoke instruction";
1805 MaybeRecordStat(compilation_stats_,
1806 MethodCompilationStat::kNotCompiledMalformedOpcode);
1807 }
1808 return false;
1809 }
1810
1811 if (invoke->IsInvokeStaticOrDirect() &&
1812 HInvokeStaticOrDirect::NeedsCurrentMethodInput(
1813 invoke->AsInvokeStaticOrDirect()->GetDispatchInfo())) {
1814 DCHECK_EQ(argument_index, invoke->AsInvokeStaticOrDirect()->GetCurrentMethodIndex());
1815 DCHECK(invoke->InputAt(argument_index) == nullptr);
1816 invoke->SetRawInputAt(argument_index, graph_->GetCurrentMethod());
1817 }
1818
1819 if (invoke->IsInvokeInterface() &&
1820 (invoke->AsInvokeInterface()->GetHiddenArgumentLoadKind() == MethodLoadKind::kRecursive)) {
1821 invoke->SetRawInputAt(invoke->AsInvokeInterface()->GetNumberOfArguments() - 1,
1822 graph_->GetCurrentMethod());
1823 }
1824
1825 return true;
1826 }
1827
HandleInvoke(HInvoke * invoke,const InstructionOperands & operands,const char * shorty,bool is_unresolved)1828 bool HInstructionBuilder::HandleInvoke(HInvoke* invoke,
1829 const InstructionOperands& operands,
1830 const char* shorty,
1831 bool is_unresolved) {
1832 DCHECK_IMPLIES(invoke->IsInvokeStaticOrDirect(),
1833 !invoke->AsInvokeStaticOrDirect()->IsStringInit());
1834
1835 ReceiverArg receiver_arg = (invoke->GetInvokeType() == InvokeType::kStatic)
1836 ? ReceiverArg::kNone
1837 : (is_unresolved ? ReceiverArg::kPlainArg : ReceiverArg::kNullCheckedArg);
1838 if (!SetupInvokeArguments(invoke, operands, shorty, receiver_arg)) {
1839 return false;
1840 }
1841
1842 AppendInstruction(invoke);
1843 latest_result_ = invoke;
1844
1845 return true;
1846 }
1847
BuildSimpleIntrinsic(ArtMethod * method,uint32_t dex_pc,const InstructionOperands & operands,const char * shorty)1848 bool HInstructionBuilder::BuildSimpleIntrinsic(ArtMethod* method,
1849 uint32_t dex_pc,
1850 const InstructionOperands& operands,
1851 const char* shorty) {
1852 Intrinsics intrinsic = static_cast<Intrinsics>(method->GetIntrinsic());
1853 DCHECK_NE(intrinsic, Intrinsics::kNone);
1854 constexpr DataType::Type kInt32 = DataType::Type::kInt32;
1855 constexpr DataType::Type kInt64 = DataType::Type::kInt64;
1856 constexpr DataType::Type kFloat32 = DataType::Type::kFloat32;
1857 constexpr DataType::Type kFloat64 = DataType::Type::kFloat64;
1858 ReceiverArg receiver_arg = method->IsStatic() ? ReceiverArg::kNone : ReceiverArg::kNullCheckedArg;
1859 HInstruction* instruction = nullptr;
1860 switch (intrinsic) {
1861 case Intrinsics::kIntegerRotateRight:
1862 case Intrinsics::kIntegerRotateLeft:
1863 // For rotate left, we negate the distance below.
1864 instruction = new (allocator_) HRor(kInt32, /*value=*/ nullptr, /*distance=*/ nullptr);
1865 break;
1866 case Intrinsics::kLongRotateRight:
1867 case Intrinsics::kLongRotateLeft:
1868 // For rotate left, we negate the distance below.
1869 instruction = new (allocator_) HRor(kInt64, /*value=*/ nullptr, /*distance=*/ nullptr);
1870 break;
1871 case Intrinsics::kIntegerCompare:
1872 instruction = new (allocator_) HCompare(
1873 kInt32, /*first=*/ nullptr, /*second=*/ nullptr, ComparisonBias::kNoBias, dex_pc);
1874 break;
1875 case Intrinsics::kLongCompare:
1876 instruction = new (allocator_) HCompare(
1877 kInt64, /*first=*/ nullptr, /*second=*/ nullptr, ComparisonBias::kNoBias, dex_pc);
1878 break;
1879 case Intrinsics::kIntegerSignum:
1880 instruction = new (allocator_) HCompare(
1881 kInt32, /*first=*/ nullptr, graph_->GetIntConstant(0), ComparisonBias::kNoBias, dex_pc);
1882 break;
1883 case Intrinsics::kLongSignum:
1884 instruction = new (allocator_) HCompare(
1885 kInt64, /*first=*/ nullptr, graph_->GetLongConstant(0), ComparisonBias::kNoBias, dex_pc);
1886 break;
1887 case Intrinsics::kFloatIsNaN:
1888 case Intrinsics::kDoubleIsNaN: {
1889 // IsNaN(x) is the same as x != x.
1890 instruction = new (allocator_) HNotEqual(/*first=*/ nullptr, /*second=*/ nullptr, dex_pc);
1891 instruction->AsCondition()->SetBias(ComparisonBias::kLtBias);
1892 break;
1893 }
1894 case Intrinsics::kStringCharAt:
1895 // We treat String as an array to allow DCE and BCE to seamlessly work on strings.
1896 instruction = new (allocator_) HArrayGet(/*array=*/ nullptr,
1897 /*index=*/ nullptr,
1898 DataType::Type::kUint16,
1899 SideEffects::None(), // Strings are immutable.
1900 dex_pc,
1901 /*is_string_char_at=*/ true);
1902 break;
1903 case Intrinsics::kStringIsEmpty:
1904 case Intrinsics::kStringLength:
1905 // We treat String as an array to allow DCE and BCE to seamlessly work on strings.
1906 // For String.isEmpty(), we add a comparison with 0 below.
1907 instruction =
1908 new (allocator_) HArrayLength(/*array=*/ nullptr, dex_pc, /* is_string_length= */ true);
1909 break;
1910 case Intrinsics::kUnsafeLoadFence:
1911 case Intrinsics::kJdkUnsafeLoadFence:
1912 receiver_arg = ReceiverArg::kNullCheckedOnly;
1913 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
1914 break;
1915 case Intrinsics::kUnsafeStoreFence:
1916 case Intrinsics::kJdkUnsafeStoreFence:
1917 receiver_arg = ReceiverArg::kNullCheckedOnly;
1918 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyStore, dex_pc);
1919 break;
1920 case Intrinsics::kUnsafeFullFence:
1921 case Intrinsics::kJdkUnsafeFullFence:
1922 receiver_arg = ReceiverArg::kNullCheckedOnly;
1923 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyAny, dex_pc);
1924 break;
1925 case Intrinsics::kVarHandleFullFence:
1926 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyAny, dex_pc);
1927 break;
1928 case Intrinsics::kVarHandleAcquireFence:
1929 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
1930 break;
1931 case Intrinsics::kVarHandleReleaseFence:
1932 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyStore, dex_pc);
1933 break;
1934 case Intrinsics::kVarHandleLoadLoadFence:
1935 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
1936 break;
1937 case Intrinsics::kVarHandleStoreStoreFence:
1938 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kStoreStore, dex_pc);
1939 break;
1940 case Intrinsics::kMathMinIntInt:
1941 instruction = new (allocator_) HMin(kInt32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1942 break;
1943 case Intrinsics::kMathMinLongLong:
1944 instruction = new (allocator_) HMin(kInt64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1945 break;
1946 case Intrinsics::kMathMinFloatFloat:
1947 instruction = new (allocator_) HMin(kFloat32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1948 break;
1949 case Intrinsics::kMathMinDoubleDouble:
1950 instruction = new (allocator_) HMin(kFloat64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1951 break;
1952 case Intrinsics::kMathMaxIntInt:
1953 instruction = new (allocator_) HMax(kInt32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1954 break;
1955 case Intrinsics::kMathMaxLongLong:
1956 instruction = new (allocator_) HMax(kInt64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1957 break;
1958 case Intrinsics::kMathMaxFloatFloat:
1959 instruction = new (allocator_) HMax(kFloat32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1960 break;
1961 case Intrinsics::kMathMaxDoubleDouble:
1962 instruction = new (allocator_) HMax(kFloat64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1963 break;
1964 case Intrinsics::kMathAbsInt:
1965 instruction = new (allocator_) HAbs(kInt32, /*input=*/ nullptr, dex_pc);
1966 break;
1967 case Intrinsics::kMathAbsLong:
1968 instruction = new (allocator_) HAbs(kInt64, /*input=*/ nullptr, dex_pc);
1969 break;
1970 case Intrinsics::kMathAbsFloat:
1971 instruction = new (allocator_) HAbs(kFloat32, /*input=*/ nullptr, dex_pc);
1972 break;
1973 case Intrinsics::kMathAbsDouble:
1974 instruction = new (allocator_) HAbs(kFloat64, /*input=*/ nullptr, dex_pc);
1975 break;
1976 default:
1977 // We do not have intermediate representation for other intrinsics.
1978 return false;
1979 }
1980 DCHECK(instruction != nullptr);
1981 if (!SetupInvokeArguments(instruction, operands, shorty, receiver_arg)) {
1982 return false;
1983 }
1984
1985 switch (intrinsic) {
1986 case Intrinsics::kIntegerRotateLeft:
1987 case Intrinsics::kLongRotateLeft: {
1988 // Negate the distance value for rotate left.
1989 DCHECK(instruction->IsRor());
1990 HNeg* neg = new (allocator_) HNeg(kInt32, instruction->InputAt(1u));
1991 AppendInstruction(neg);
1992 instruction->SetRawInputAt(1u, neg);
1993 break;
1994 }
1995 case Intrinsics::kFloatIsNaN:
1996 case Intrinsics::kDoubleIsNaN:
1997 // Set the second input to be the same as first.
1998 DCHECK(instruction->IsNotEqual());
1999 DCHECK(instruction->InputAt(1u) == nullptr);
2000 instruction->SetRawInputAt(1u, instruction->InputAt(0u));
2001 break;
2002 case Intrinsics::kStringCharAt: {
2003 // Add bounds check.
2004 HInstruction* array = instruction->InputAt(0u);
2005 HInstruction* index = instruction->InputAt(1u);
2006 HInstruction* length =
2007 new (allocator_) HArrayLength(array, dex_pc, /*is_string_length=*/ true);
2008 AppendInstruction(length);
2009 HBoundsCheck* bounds_check =
2010 new (allocator_) HBoundsCheck(index, length, dex_pc, /*is_string_char_at=*/ true);
2011 AppendInstruction(bounds_check);
2012 graph_->SetHasBoundsChecks(true);
2013 instruction->SetRawInputAt(1u, bounds_check);
2014 break;
2015 }
2016 case Intrinsics::kStringIsEmpty: {
2017 // Compare the length with 0.
2018 DCHECK(instruction->IsArrayLength());
2019 AppendInstruction(instruction);
2020 HEqual* equal = new (allocator_) HEqual(instruction, graph_->GetIntConstant(0), dex_pc);
2021 instruction = equal;
2022 break;
2023 }
2024 default:
2025 break;
2026 }
2027
2028 AppendInstruction(instruction);
2029 latest_result_ = instruction;
2030
2031 return true;
2032 }
2033
HandleStringInit(HInvoke * invoke,const InstructionOperands & operands,const char * shorty)2034 bool HInstructionBuilder::HandleStringInit(HInvoke* invoke,
2035 const InstructionOperands& operands,
2036 const char* shorty) {
2037 DCHECK(invoke->IsInvokeStaticOrDirect());
2038 DCHECK(invoke->AsInvokeStaticOrDirect()->IsStringInit());
2039
2040 if (!SetupInvokeArguments(invoke, operands, shorty, ReceiverArg::kIgnored)) {
2041 return false;
2042 }
2043
2044 AppendInstruction(invoke);
2045
2046 // This is a StringFactory call, not an actual String constructor. Its result
2047 // replaces the empty String pre-allocated by NewInstance.
2048 uint32_t orig_this_reg = operands.GetOperand(0);
2049 HInstruction* arg_this = LoadLocal(orig_this_reg, DataType::Type::kReference);
2050
2051 // Replacing the NewInstance might render it redundant. Keep a list of these
2052 // to be visited once it is clear whether it has remaining uses.
2053 if (arg_this->IsNewInstance()) {
2054 ssa_builder_->AddUninitializedString(arg_this->AsNewInstance());
2055 } else {
2056 DCHECK(arg_this->IsPhi());
2057 // We can get a phi as input of a String.<init> if there is a loop between the
2058 // allocation and the String.<init> call. As we don't know which other phis might alias
2059 // with `arg_this`, we keep a record of those invocations so we can later replace
2060 // the allocation with the invocation.
2061 // Add the actual 'this' input so the analysis knows what is the allocation instruction.
2062 // The input will be removed during the analysis.
2063 invoke->AddInput(arg_this);
2064 ssa_builder_->AddUninitializedStringPhi(invoke);
2065 }
2066 // Walk over all vregs and replace any occurrence of `arg_this` with `invoke`.
2067 for (size_t vreg = 0, e = current_locals_->size(); vreg < e; ++vreg) {
2068 if ((*current_locals_)[vreg] == arg_this) {
2069 (*current_locals_)[vreg] = invoke;
2070 }
2071 }
2072 return true;
2073 }
2074
GetFieldAccessType(const DexFile & dex_file,uint16_t field_index)2075 static DataType::Type GetFieldAccessType(const DexFile& dex_file, uint16_t field_index) {
2076 const dex::FieldId& field_id = dex_file.GetFieldId(field_index);
2077 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
2078 return DataType::FromShorty(type[0]);
2079 }
2080
BuildInstanceFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put)2081 bool HInstructionBuilder::BuildInstanceFieldAccess(const Instruction& instruction,
2082 uint32_t dex_pc,
2083 bool is_put) {
2084 uint32_t source_or_dest_reg = instruction.VRegA_22c();
2085 uint32_t obj_reg = instruction.VRegB_22c();
2086 uint16_t field_index = instruction.VRegC_22c();
2087
2088 ScopedObjectAccess soa(Thread::Current());
2089 ArtField* resolved_field = ResolveField(field_index, /* is_static= */ false, is_put);
2090
2091 // Generate an explicit null check on the reference, unless the field access
2092 // is unresolved. In that case, we rely on the runtime to perform various
2093 // checks first, followed by a null check.
2094 HInstruction* object = (resolved_field == nullptr)
2095 ? LoadLocal(obj_reg, DataType::Type::kReference)
2096 : LoadNullCheckedLocal(obj_reg, dex_pc);
2097
2098 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2099 if (is_put) {
2100 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2101 HInstruction* field_set = nullptr;
2102 if (resolved_field == nullptr) {
2103 MaybeRecordStat(compilation_stats_,
2104 MethodCompilationStat::kUnresolvedField);
2105 field_set = new (allocator_) HUnresolvedInstanceFieldSet(object,
2106 value,
2107 field_type,
2108 field_index,
2109 dex_pc);
2110 } else {
2111 uint16_t class_def_index = resolved_field->GetDeclaringClass()->GetDexClassDefIndex();
2112 field_set = new (allocator_) HInstanceFieldSet(object,
2113 value,
2114 resolved_field,
2115 field_type,
2116 resolved_field->GetOffset(),
2117 resolved_field->IsVolatile(),
2118 field_index,
2119 class_def_index,
2120 *dex_file_,
2121 dex_pc);
2122 }
2123 AppendInstruction(field_set);
2124 } else {
2125 HInstruction* field_get = nullptr;
2126 if (resolved_field == nullptr) {
2127 MaybeRecordStat(compilation_stats_,
2128 MethodCompilationStat::kUnresolvedField);
2129 field_get = new (allocator_) HUnresolvedInstanceFieldGet(object,
2130 field_type,
2131 field_index,
2132 dex_pc);
2133 } else {
2134 uint16_t class_def_index = resolved_field->GetDeclaringClass()->GetDexClassDefIndex();
2135 field_get = new (allocator_) HInstanceFieldGet(object,
2136 resolved_field,
2137 field_type,
2138 resolved_field->GetOffset(),
2139 resolved_field->IsVolatile(),
2140 field_index,
2141 class_def_index,
2142 *dex_file_,
2143 dex_pc);
2144 }
2145 AppendInstruction(field_get);
2146 UpdateLocal(source_or_dest_reg, field_get);
2147 }
2148
2149 return true;
2150 }
2151
BuildUnresolvedStaticFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put,DataType::Type field_type)2152 void HInstructionBuilder::BuildUnresolvedStaticFieldAccess(const Instruction& instruction,
2153 uint32_t dex_pc,
2154 bool is_put,
2155 DataType::Type field_type) {
2156 uint32_t source_or_dest_reg = instruction.VRegA_21c();
2157 uint16_t field_index = instruction.VRegB_21c();
2158
2159 if (is_put) {
2160 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2161 AppendInstruction(
2162 new (allocator_) HUnresolvedStaticFieldSet(value, field_type, field_index, dex_pc));
2163 } else {
2164 AppendInstruction(new (allocator_) HUnresolvedStaticFieldGet(field_type, field_index, dex_pc));
2165 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2166 }
2167 }
2168
ResolveField(uint16_t field_idx,bool is_static,bool is_put)2169 ArtField* HInstructionBuilder::ResolveField(uint16_t field_idx, bool is_static, bool is_put) {
2170 ScopedObjectAccess soa(Thread::Current());
2171
2172 ClassLinker* class_linker = dex_compilation_unit_->GetClassLinker();
2173 Handle<mirror::ClassLoader> class_loader = dex_compilation_unit_->GetClassLoader();
2174
2175 ArtField* resolved_field = class_linker->ResolveFieldJLS(field_idx,
2176 dex_compilation_unit_->GetDexCache(),
2177 class_loader);
2178 DCHECK_EQ(resolved_field == nullptr, soa.Self()->IsExceptionPending())
2179 << "field="
2180 << ((resolved_field == nullptr) ? "null" : resolved_field->PrettyField())
2181 << ", exception="
2182 << (soa.Self()->IsExceptionPending() ? soa.Self()->GetException()->Dump() : "null");
2183 if (UNLIKELY(resolved_field == nullptr)) {
2184 // Clean up any exception left by field resolution.
2185 soa.Self()->ClearException();
2186 return nullptr;
2187 }
2188
2189 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
2190 return nullptr;
2191 }
2192
2193 // Check access.
2194 Handle<mirror::Class> compiling_class = dex_compilation_unit_->GetCompilingClass();
2195 if (compiling_class == nullptr) {
2196 // Check if the declaring class or referencing class is accessible.
2197 SamePackageCompare same_package(*dex_compilation_unit_);
2198 ObjPtr<mirror::Class> declaring_class = resolved_field->GetDeclaringClass();
2199 bool declaring_class_accessible = declaring_class->IsPublic() || same_package(declaring_class);
2200 if (!declaring_class_accessible) {
2201 // It is possible to access members from an inaccessible superclass
2202 // by referencing them through an accessible subclass.
2203 ObjPtr<mirror::Class> referenced_class = class_linker->LookupResolvedType(
2204 dex_compilation_unit_->GetDexFile()->GetFieldId(field_idx).class_idx_,
2205 dex_compilation_unit_->GetDexCache().Get(),
2206 class_loader.Get());
2207 DCHECK(referenced_class != nullptr); // Must have been resolved when resolving the field.
2208 if (!referenced_class->IsPublic() && !same_package(referenced_class)) {
2209 return nullptr;
2210 }
2211 }
2212 // Check whether the field itself is accessible.
2213 // Since the referrer is unresolved but the field is resolved, it cannot be
2214 // inside the same class, so a private field is known to be inaccessible.
2215 // And without a resolved referrer, we cannot check for protected member access
2216 // in superlass, so we handle only access to public member or within the package.
2217 if (resolved_field->IsPrivate() ||
2218 (!resolved_field->IsPublic() && !declaring_class_accessible)) {
2219 return nullptr;
2220 }
2221 } else if (!compiling_class->CanAccessResolvedField(resolved_field->GetDeclaringClass(),
2222 resolved_field,
2223 dex_compilation_unit_->GetDexCache().Get(),
2224 field_idx)) {
2225 return nullptr;
2226 }
2227
2228 if (is_put) {
2229 if (resolved_field->IsFinal() &&
2230 (compiling_class.Get() != resolved_field->GetDeclaringClass())) {
2231 // Final fields can only be updated within their own class.
2232 // TODO: Only allow it in constructors. b/34966607.
2233 return nullptr;
2234 }
2235
2236 // Note: We do not need to resolve the field type for `get` opcodes.
2237 StackArtFieldHandleScope<1> rhs(soa.Self());
2238 ReflectiveHandle<ArtField> resolved_field_handle(rhs.NewHandle(resolved_field));
2239 if (resolved_field->ResolveType().IsNull()) {
2240 // ArtField::ResolveType() may fail as evidenced with a dexing bug (b/78788577).
2241 soa.Self()->ClearException();
2242 return nullptr; // Failure
2243 }
2244 resolved_field = resolved_field_handle.Get();
2245 }
2246
2247 return resolved_field;
2248 }
2249
BuildStaticFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put)2250 void HInstructionBuilder::BuildStaticFieldAccess(const Instruction& instruction,
2251 uint32_t dex_pc,
2252 bool is_put) {
2253 uint32_t source_or_dest_reg = instruction.VRegA_21c();
2254 uint16_t field_index = instruction.VRegB_21c();
2255
2256 ScopedObjectAccess soa(Thread::Current());
2257 ArtField* resolved_field = ResolveField(field_index, /* is_static= */ true, is_put);
2258
2259 if (resolved_field == nullptr) {
2260 MaybeRecordStat(compilation_stats_,
2261 MethodCompilationStat::kUnresolvedField);
2262 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2263 BuildUnresolvedStaticFieldAccess(instruction, dex_pc, is_put, field_type);
2264 return;
2265 }
2266
2267 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2268
2269 Handle<mirror::Class> klass =
2270 graph_->GetHandleCache()->NewHandle(resolved_field->GetDeclaringClass());
2271 HLoadClass* constant = BuildLoadClass(klass->GetDexTypeIndex(),
2272 klass->GetDexFile(),
2273 klass,
2274 dex_pc,
2275 /* needs_access_check= */ false);
2276
2277 if (constant == nullptr) {
2278 // The class cannot be referenced from this compiled code. Generate
2279 // an unresolved access.
2280 MaybeRecordStat(compilation_stats_,
2281 MethodCompilationStat::kUnresolvedFieldNotAFastAccess);
2282 BuildUnresolvedStaticFieldAccess(instruction, dex_pc, is_put, field_type);
2283 return;
2284 }
2285
2286 HInstruction* cls = constant;
2287 if (!IsInitialized(klass.Get())) {
2288 cls = new (allocator_) HClinitCheck(constant, dex_pc);
2289 AppendInstruction(cls);
2290 }
2291
2292 uint16_t class_def_index = klass->GetDexClassDefIndex();
2293 if (is_put) {
2294 // We need to keep the class alive before loading the value.
2295 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2296 DCHECK_EQ(HPhi::ToPhiType(value->GetType()), HPhi::ToPhiType(field_type));
2297 AppendInstruction(new (allocator_) HStaticFieldSet(cls,
2298 value,
2299 resolved_field,
2300 field_type,
2301 resolved_field->GetOffset(),
2302 resolved_field->IsVolatile(),
2303 field_index,
2304 class_def_index,
2305 *dex_file_,
2306 dex_pc));
2307 } else {
2308 AppendInstruction(new (allocator_) HStaticFieldGet(cls,
2309 resolved_field,
2310 field_type,
2311 resolved_field->GetOffset(),
2312 resolved_field->IsVolatile(),
2313 field_index,
2314 class_def_index,
2315 *dex_file_,
2316 dex_pc));
2317 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2318 }
2319 }
2320
BuildCheckedDivRem(uint16_t out_vreg,uint16_t first_vreg,int64_t second_vreg_or_constant,uint32_t dex_pc,DataType::Type type,bool second_is_constant,bool isDiv)2321 void HInstructionBuilder::BuildCheckedDivRem(uint16_t out_vreg,
2322 uint16_t first_vreg,
2323 int64_t second_vreg_or_constant,
2324 uint32_t dex_pc,
2325 DataType::Type type,
2326 bool second_is_constant,
2327 bool isDiv) {
2328 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
2329
2330 HInstruction* first = LoadLocal(first_vreg, type);
2331 HInstruction* second = nullptr;
2332 if (second_is_constant) {
2333 if (type == DataType::Type::kInt32) {
2334 second = graph_->GetIntConstant(second_vreg_or_constant, dex_pc);
2335 } else {
2336 second = graph_->GetLongConstant(second_vreg_or_constant, dex_pc);
2337 }
2338 } else {
2339 second = LoadLocal(second_vreg_or_constant, type);
2340 }
2341
2342 if (!second_is_constant
2343 || (type == DataType::Type::kInt32 && second->AsIntConstant()->GetValue() == 0)
2344 || (type == DataType::Type::kInt64 && second->AsLongConstant()->GetValue() == 0)) {
2345 second = new (allocator_) HDivZeroCheck(second, dex_pc);
2346 AppendInstruction(second);
2347 }
2348
2349 if (isDiv) {
2350 AppendInstruction(new (allocator_) HDiv(type, first, second, dex_pc));
2351 } else {
2352 AppendInstruction(new (allocator_) HRem(type, first, second, dex_pc));
2353 }
2354 UpdateLocal(out_vreg, current_block_->GetLastInstruction());
2355 }
2356
BuildArrayAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put,DataType::Type anticipated_type)2357 void HInstructionBuilder::BuildArrayAccess(const Instruction& instruction,
2358 uint32_t dex_pc,
2359 bool is_put,
2360 DataType::Type anticipated_type) {
2361 uint8_t source_or_dest_reg = instruction.VRegA_23x();
2362 uint8_t array_reg = instruction.VRegB_23x();
2363 uint8_t index_reg = instruction.VRegC_23x();
2364
2365 HInstruction* object = LoadNullCheckedLocal(array_reg, dex_pc);
2366 HInstruction* length = new (allocator_) HArrayLength(object, dex_pc);
2367 AppendInstruction(length);
2368 HInstruction* index = LoadLocal(index_reg, DataType::Type::kInt32);
2369 index = new (allocator_) HBoundsCheck(index, length, dex_pc);
2370 AppendInstruction(index);
2371 if (is_put) {
2372 HInstruction* value = LoadLocal(source_or_dest_reg, anticipated_type);
2373 // TODO: Insert a type check node if the type is Object.
2374 HArraySet* aset = new (allocator_) HArraySet(object, index, value, anticipated_type, dex_pc);
2375 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2376 AppendInstruction(aset);
2377 } else {
2378 HArrayGet* aget = new (allocator_) HArrayGet(object, index, anticipated_type, dex_pc);
2379 ssa_builder_->MaybeAddAmbiguousArrayGet(aget);
2380 AppendInstruction(aget);
2381 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2382 }
2383 graph_->SetHasBoundsChecks(true);
2384 }
2385
BuildNewArray(uint32_t dex_pc,dex::TypeIndex type_index,HInstruction * length)2386 HNewArray* HInstructionBuilder::BuildNewArray(uint32_t dex_pc,
2387 dex::TypeIndex type_index,
2388 HInstruction* length) {
2389 HLoadClass* cls = BuildLoadClass(type_index, dex_pc);
2390
2391 const char* descriptor = dex_file_->GetTypeDescriptor(dex_file_->GetTypeId(type_index));
2392 DCHECK_EQ(descriptor[0], '[');
2393 size_t component_type_shift = Primitive::ComponentSizeShift(Primitive::GetType(descriptor[1]));
2394
2395 HNewArray* new_array = new (allocator_) HNewArray(cls, length, dex_pc, component_type_shift);
2396 AppendInstruction(new_array);
2397 return new_array;
2398 }
2399
BuildFilledNewArray(uint32_t dex_pc,dex::TypeIndex type_index,const InstructionOperands & operands)2400 HNewArray* HInstructionBuilder::BuildFilledNewArray(uint32_t dex_pc,
2401 dex::TypeIndex type_index,
2402 const InstructionOperands& operands) {
2403 const size_t number_of_operands = operands.GetNumberOfOperands();
2404 HInstruction* length = graph_->GetIntConstant(number_of_operands, dex_pc);
2405
2406 HNewArray* new_array = BuildNewArray(dex_pc, type_index, length);
2407 const char* descriptor = dex_file_->StringByTypeIdx(type_index);
2408 DCHECK_EQ(descriptor[0], '[') << descriptor;
2409 char primitive = descriptor[1];
2410 DCHECK(primitive == 'I'
2411 || primitive == 'L'
2412 || primitive == '[') << descriptor;
2413 bool is_reference_array = (primitive == 'L') || (primitive == '[');
2414 DataType::Type type = is_reference_array ? DataType::Type::kReference : DataType::Type::kInt32;
2415
2416 for (size_t i = 0; i < number_of_operands; ++i) {
2417 HInstruction* value = LoadLocal(operands.GetOperand(i), type);
2418 HInstruction* index = graph_->GetIntConstant(i, dex_pc);
2419 HArraySet* aset = new (allocator_) HArraySet(new_array, index, value, type, dex_pc);
2420 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2421 AppendInstruction(aset);
2422 }
2423 latest_result_ = new_array;
2424
2425 return new_array;
2426 }
2427
2428 template <typename T>
BuildFillArrayData(HInstruction * object,const T * data,uint32_t element_count,DataType::Type anticipated_type,uint32_t dex_pc)2429 void HInstructionBuilder::BuildFillArrayData(HInstruction* object,
2430 const T* data,
2431 uint32_t element_count,
2432 DataType::Type anticipated_type,
2433 uint32_t dex_pc) {
2434 for (uint32_t i = 0; i < element_count; ++i) {
2435 HInstruction* index = graph_->GetIntConstant(i, dex_pc);
2436 HInstruction* value = graph_->GetIntConstant(data[i], dex_pc);
2437 HArraySet* aset = new (allocator_) HArraySet(object, index, value, anticipated_type, dex_pc);
2438 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2439 AppendInstruction(aset);
2440 }
2441 }
2442
BuildFillArrayData(const Instruction & instruction,uint32_t dex_pc)2443 void HInstructionBuilder::BuildFillArrayData(const Instruction& instruction, uint32_t dex_pc) {
2444 HInstruction* array = LoadNullCheckedLocal(instruction.VRegA_31t(), dex_pc);
2445
2446 int32_t payload_offset = instruction.VRegB_31t() + dex_pc;
2447 const Instruction::ArrayDataPayload* payload =
2448 reinterpret_cast<const Instruction::ArrayDataPayload*>(
2449 code_item_accessor_.Insns() + payload_offset);
2450 const uint8_t* data = payload->data;
2451 uint32_t element_count = payload->element_count;
2452
2453 if (element_count == 0u) {
2454 // For empty payload we emit only the null check above.
2455 return;
2456 }
2457
2458 HInstruction* length = new (allocator_) HArrayLength(array, dex_pc);
2459 AppendInstruction(length);
2460
2461 // Implementation of this DEX instruction seems to be that the bounds check is
2462 // done before doing any stores.
2463 HInstruction* last_index = graph_->GetIntConstant(payload->element_count - 1, dex_pc);
2464 AppendInstruction(new (allocator_) HBoundsCheck(last_index, length, dex_pc));
2465
2466 switch (payload->element_width) {
2467 case 1:
2468 BuildFillArrayData(array,
2469 reinterpret_cast<const int8_t*>(data),
2470 element_count,
2471 DataType::Type::kInt8,
2472 dex_pc);
2473 break;
2474 case 2:
2475 BuildFillArrayData(array,
2476 reinterpret_cast<const int16_t*>(data),
2477 element_count,
2478 DataType::Type::kInt16,
2479 dex_pc);
2480 break;
2481 case 4:
2482 BuildFillArrayData(array,
2483 reinterpret_cast<const int32_t*>(data),
2484 element_count,
2485 DataType::Type::kInt32,
2486 dex_pc);
2487 break;
2488 case 8:
2489 BuildFillWideArrayData(array,
2490 reinterpret_cast<const int64_t*>(data),
2491 element_count,
2492 dex_pc);
2493 break;
2494 default:
2495 LOG(FATAL) << "Unknown element width for " << payload->element_width;
2496 }
2497 graph_->SetHasBoundsChecks(true);
2498 }
2499
BuildFillWideArrayData(HInstruction * object,const int64_t * data,uint32_t element_count,uint32_t dex_pc)2500 void HInstructionBuilder::BuildFillWideArrayData(HInstruction* object,
2501 const int64_t* data,
2502 uint32_t element_count,
2503 uint32_t dex_pc) {
2504 for (uint32_t i = 0; i < element_count; ++i) {
2505 HInstruction* index = graph_->GetIntConstant(i, dex_pc);
2506 HInstruction* value = graph_->GetLongConstant(data[i], dex_pc);
2507 HArraySet* aset =
2508 new (allocator_) HArraySet(object, index, value, DataType::Type::kInt64, dex_pc);
2509 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2510 AppendInstruction(aset);
2511 }
2512 }
2513
BuildLoadString(dex::StringIndex string_index,uint32_t dex_pc)2514 void HInstructionBuilder::BuildLoadString(dex::StringIndex string_index, uint32_t dex_pc) {
2515 HLoadString* load_string =
2516 new (allocator_) HLoadString(graph_->GetCurrentMethod(), string_index, *dex_file_, dex_pc);
2517 HSharpening::ProcessLoadString(load_string,
2518 code_generator_,
2519 *dex_compilation_unit_,
2520 graph_->GetHandleCache()->GetHandles());
2521 AppendInstruction(load_string);
2522 }
2523
BuildLoadClass(dex::TypeIndex type_index,uint32_t dex_pc)2524 HLoadClass* HInstructionBuilder::BuildLoadClass(dex::TypeIndex type_index, uint32_t dex_pc) {
2525 ScopedObjectAccess soa(Thread::Current());
2526 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2527 Handle<mirror::Class> klass = ResolveClass(soa, type_index);
2528 bool needs_access_check = LoadClassNeedsAccessCheck(type_index, klass.Get());
2529 return BuildLoadClass(type_index, dex_file, klass, dex_pc, needs_access_check);
2530 }
2531
BuildLoadClass(dex::TypeIndex type_index,const DexFile & dex_file,Handle<mirror::Class> klass,uint32_t dex_pc,bool needs_access_check)2532 HLoadClass* HInstructionBuilder::BuildLoadClass(dex::TypeIndex type_index,
2533 const DexFile& dex_file,
2534 Handle<mirror::Class> klass,
2535 uint32_t dex_pc,
2536 bool needs_access_check) {
2537 // Try to find a reference in the compiling dex file.
2538 const DexFile* actual_dex_file = &dex_file;
2539 if (!IsSameDexFile(dex_file, *dex_compilation_unit_->GetDexFile())) {
2540 dex::TypeIndex local_type_index =
2541 klass->FindTypeIndexInOtherDexFile(*dex_compilation_unit_->GetDexFile());
2542 if (local_type_index.IsValid()) {
2543 type_index = local_type_index;
2544 actual_dex_file = dex_compilation_unit_->GetDexFile();
2545 }
2546 }
2547
2548 // We cannot use the referrer's class load kind if we need to do an access check.
2549 // If the `klass` is unresolved, we need access check with the exception of the referrer's
2550 // class, see LoadClassNeedsAccessCheck(), so the `!needs_access_check` check is enough.
2551 // Otherwise, also check if the `klass` is the same as the compiling class, which also
2552 // conveniently rejects the case of unresolved compiling class.
2553 bool is_referrers_class =
2554 !needs_access_check &&
2555 (klass == nullptr || outer_compilation_unit_->GetCompilingClass().Get() == klass.Get());
2556 // Note: `klass` must be from `graph_->GetHandleCache()`.
2557 HLoadClass* load_class = new (allocator_) HLoadClass(
2558 graph_->GetCurrentMethod(),
2559 type_index,
2560 *actual_dex_file,
2561 klass,
2562 is_referrers_class,
2563 dex_pc,
2564 needs_access_check);
2565
2566 HLoadClass::LoadKind load_kind = HSharpening::ComputeLoadClassKind(load_class,
2567 code_generator_,
2568 *dex_compilation_unit_);
2569
2570 if (load_kind == HLoadClass::LoadKind::kInvalid) {
2571 // We actually cannot reference this class, we're forced to bail.
2572 return nullptr;
2573 }
2574 // Load kind must be set before inserting the instruction into the graph.
2575 load_class->SetLoadKind(load_kind);
2576 AppendInstruction(load_class);
2577 return load_class;
2578 }
2579
ResolveClass(ScopedObjectAccess & soa,dex::TypeIndex type_index)2580 Handle<mirror::Class> HInstructionBuilder::ResolveClass(ScopedObjectAccess& soa,
2581 dex::TypeIndex type_index) {
2582 auto it = class_cache_.find(type_index);
2583 if (it != class_cache_.end()) {
2584 return it->second;
2585 }
2586
2587 ObjPtr<mirror::Class> klass = dex_compilation_unit_->GetClassLinker()->ResolveType(
2588 type_index, dex_compilation_unit_->GetDexCache(), dex_compilation_unit_->GetClassLoader());
2589 DCHECK_EQ(klass == nullptr, soa.Self()->IsExceptionPending());
2590 soa.Self()->ClearException(); // Clean up the exception left by type resolution if any.
2591
2592 Handle<mirror::Class> h_klass = graph_->GetHandleCache()->NewHandle(klass);
2593 class_cache_.Put(type_index, h_klass);
2594 return h_klass;
2595 }
2596
LoadClassNeedsAccessCheck(dex::TypeIndex type_index,ObjPtr<mirror::Class> klass)2597 bool HInstructionBuilder::LoadClassNeedsAccessCheck(dex::TypeIndex type_index,
2598 ObjPtr<mirror::Class> klass) {
2599 if (klass == nullptr) {
2600 // If the class is unresolved, we can avoid access checks only for references to
2601 // the compiling class as determined by checking the descriptor and ClassLoader.
2602 if (outer_compilation_unit_->GetCompilingClass() != nullptr) {
2603 // Compiling class is resolved, so different from the unresolved class.
2604 return true;
2605 }
2606 if (dex_compilation_unit_->GetClassLoader().Get() !=
2607 outer_compilation_unit_->GetClassLoader().Get()) {
2608 // Resolving the same descriptor in a different ClassLoader than the
2609 // defining loader of the compiling class shall either fail to find
2610 // the class definition, or find a different one.
2611 // (Assuming no custom ClassLoader hierarchy with circular delegation.)
2612 return true;
2613 }
2614 // Check if the class is the outer method's class.
2615 // For the same dex file compare type indexes, otherwise descriptors.
2616 const DexFile* outer_dex_file = outer_compilation_unit_->GetDexFile();
2617 const DexFile* inner_dex_file = dex_compilation_unit_->GetDexFile();
2618 const dex::ClassDef& outer_class_def =
2619 outer_dex_file->GetClassDef(outer_compilation_unit_->GetClassDefIndex());
2620 if (IsSameDexFile(*inner_dex_file, *outer_dex_file)) {
2621 if (type_index != outer_class_def.class_idx_) {
2622 return true;
2623 }
2624 } else {
2625 uint32_t outer_utf16_length;
2626 const char* outer_descriptor =
2627 outer_dex_file->StringByTypeIdx(outer_class_def.class_idx_, &outer_utf16_length);
2628 uint32_t target_utf16_length;
2629 const char* target_descriptor =
2630 inner_dex_file->StringByTypeIdx(type_index, &target_utf16_length);
2631 if (outer_utf16_length != target_utf16_length ||
2632 strcmp(outer_descriptor, target_descriptor) != 0) {
2633 return true;
2634 }
2635 }
2636 // For inlined methods we also need to check if the compiling class
2637 // is public or in the same package as the inlined method's class.
2638 if (dex_compilation_unit_ != outer_compilation_unit_ &&
2639 (outer_class_def.access_flags_ & kAccPublic) == 0) {
2640 DCHECK(dex_compilation_unit_->GetCompilingClass() != nullptr);
2641 SamePackageCompare same_package(*outer_compilation_unit_);
2642 if (!same_package(dex_compilation_unit_->GetCompilingClass().Get())) {
2643 return true;
2644 }
2645 }
2646 return false;
2647 } else if (klass->IsPublic()) {
2648 return false;
2649 } else if (dex_compilation_unit_->GetCompilingClass() != nullptr) {
2650 return !dex_compilation_unit_->GetCompilingClass()->CanAccess(klass);
2651 } else {
2652 SamePackageCompare same_package(*dex_compilation_unit_);
2653 return !same_package(klass);
2654 }
2655 }
2656
BuildLoadMethodHandle(uint16_t method_handle_index,uint32_t dex_pc)2657 void HInstructionBuilder::BuildLoadMethodHandle(uint16_t method_handle_index, uint32_t dex_pc) {
2658 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2659 HLoadMethodHandle* load_method_handle = new (allocator_) HLoadMethodHandle(
2660 graph_->GetCurrentMethod(), method_handle_index, dex_file, dex_pc);
2661 AppendInstruction(load_method_handle);
2662 }
2663
BuildLoadMethodType(dex::ProtoIndex proto_index,uint32_t dex_pc)2664 void HInstructionBuilder::BuildLoadMethodType(dex::ProtoIndex proto_index, uint32_t dex_pc) {
2665 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2666 HLoadMethodType* load_method_type =
2667 new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(), proto_index, dex_file, dex_pc);
2668 AppendInstruction(load_method_type);
2669 }
2670
BuildTypeCheck(bool is_instance_of,HInstruction * object,dex::TypeIndex type_index,uint32_t dex_pc)2671 void HInstructionBuilder::BuildTypeCheck(bool is_instance_of,
2672 HInstruction* object,
2673 dex::TypeIndex type_index,
2674 uint32_t dex_pc) {
2675 ScopedObjectAccess soa(Thread::Current());
2676 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2677 Handle<mirror::Class> klass = ResolveClass(soa, type_index);
2678 bool needs_access_check = LoadClassNeedsAccessCheck(type_index, klass.Get());
2679 TypeCheckKind check_kind = HSharpening::ComputeTypeCheckKind(
2680 klass.Get(), code_generator_, needs_access_check);
2681
2682 HInstruction* class_or_null = nullptr;
2683 HIntConstant* bitstring_path_to_root = nullptr;
2684 HIntConstant* bitstring_mask = nullptr;
2685 if (check_kind == TypeCheckKind::kBitstringCheck) {
2686 // TODO: Allow using the bitstring check also if we need an access check.
2687 DCHECK(!needs_access_check);
2688 class_or_null = graph_->GetNullConstant(dex_pc);
2689 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2690 uint32_t path_to_root =
2691 SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootForTarget(klass.Get());
2692 uint32_t mask = SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootMask(klass.Get());
2693 bitstring_path_to_root = graph_->GetIntConstant(static_cast<int32_t>(path_to_root), dex_pc);
2694 bitstring_mask = graph_->GetIntConstant(static_cast<int32_t>(mask), dex_pc);
2695 } else {
2696 class_or_null = BuildLoadClass(type_index, dex_file, klass, dex_pc, needs_access_check);
2697 }
2698 DCHECK(class_or_null != nullptr);
2699
2700 if (is_instance_of) {
2701 AppendInstruction(new (allocator_) HInstanceOf(object,
2702 class_or_null,
2703 check_kind,
2704 klass,
2705 dex_pc,
2706 allocator_,
2707 bitstring_path_to_root,
2708 bitstring_mask));
2709 } else {
2710 // We emit a CheckCast followed by a BoundType. CheckCast is a statement
2711 // which may throw. If it succeeds BoundType sets the new type of `object`
2712 // for all subsequent uses.
2713 AppendInstruction(
2714 new (allocator_) HCheckCast(object,
2715 class_or_null,
2716 check_kind,
2717 klass,
2718 dex_pc,
2719 allocator_,
2720 bitstring_path_to_root,
2721 bitstring_mask));
2722 AppendInstruction(new (allocator_) HBoundType(object, dex_pc));
2723 }
2724 }
2725
BuildTypeCheck(const Instruction & instruction,uint8_t destination,uint8_t reference,dex::TypeIndex type_index,uint32_t dex_pc)2726 void HInstructionBuilder::BuildTypeCheck(const Instruction& instruction,
2727 uint8_t destination,
2728 uint8_t reference,
2729 dex::TypeIndex type_index,
2730 uint32_t dex_pc) {
2731 HInstruction* object = LoadLocal(reference, DataType::Type::kReference);
2732 bool is_instance_of = instruction.Opcode() == Instruction::INSTANCE_OF;
2733
2734 BuildTypeCheck(is_instance_of, object, type_index, dex_pc);
2735
2736 if (is_instance_of) {
2737 UpdateLocal(destination, current_block_->GetLastInstruction());
2738 } else {
2739 DCHECK_EQ(instruction.Opcode(), Instruction::CHECK_CAST);
2740 UpdateLocal(reference, current_block_->GetLastInstruction());
2741 }
2742 }
2743
ProcessDexInstruction(const Instruction & instruction,uint32_t dex_pc)2744 bool HInstructionBuilder::ProcessDexInstruction(const Instruction& instruction, uint32_t dex_pc) {
2745 switch (instruction.Opcode()) {
2746 case Instruction::CONST_4: {
2747 int32_t register_index = instruction.VRegA();
2748 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_11n(), dex_pc);
2749 UpdateLocal(register_index, constant);
2750 break;
2751 }
2752
2753 case Instruction::CONST_16: {
2754 int32_t register_index = instruction.VRegA();
2755 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_21s(), dex_pc);
2756 UpdateLocal(register_index, constant);
2757 break;
2758 }
2759
2760 case Instruction::CONST: {
2761 int32_t register_index = instruction.VRegA();
2762 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_31i(), dex_pc);
2763 UpdateLocal(register_index, constant);
2764 break;
2765 }
2766
2767 case Instruction::CONST_HIGH16: {
2768 int32_t register_index = instruction.VRegA();
2769 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_21h() << 16, dex_pc);
2770 UpdateLocal(register_index, constant);
2771 break;
2772 }
2773
2774 case Instruction::CONST_WIDE_16: {
2775 int32_t register_index = instruction.VRegA();
2776 // Get 16 bits of constant value, sign extended to 64 bits.
2777 int64_t value = instruction.VRegB_21s();
2778 value <<= 48;
2779 value >>= 48;
2780 HLongConstant* constant = graph_->GetLongConstant(value, dex_pc);
2781 UpdateLocal(register_index, constant);
2782 break;
2783 }
2784
2785 case Instruction::CONST_WIDE_32: {
2786 int32_t register_index = instruction.VRegA();
2787 // Get 32 bits of constant value, sign extended to 64 bits.
2788 int64_t value = instruction.VRegB_31i();
2789 value <<= 32;
2790 value >>= 32;
2791 HLongConstant* constant = graph_->GetLongConstant(value, dex_pc);
2792 UpdateLocal(register_index, constant);
2793 break;
2794 }
2795
2796 case Instruction::CONST_WIDE: {
2797 int32_t register_index = instruction.VRegA();
2798 HLongConstant* constant = graph_->GetLongConstant(instruction.VRegB_51l(), dex_pc);
2799 UpdateLocal(register_index, constant);
2800 break;
2801 }
2802
2803 case Instruction::CONST_WIDE_HIGH16: {
2804 int32_t register_index = instruction.VRegA();
2805 int64_t value = static_cast<int64_t>(instruction.VRegB_21h()) << 48;
2806 HLongConstant* constant = graph_->GetLongConstant(value, dex_pc);
2807 UpdateLocal(register_index, constant);
2808 break;
2809 }
2810
2811 // Note that the SSA building will refine the types.
2812 case Instruction::MOVE:
2813 case Instruction::MOVE_FROM16:
2814 case Instruction::MOVE_16: {
2815 HInstruction* value = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
2816 UpdateLocal(instruction.VRegA(), value);
2817 break;
2818 }
2819
2820 // Note that the SSA building will refine the types.
2821 case Instruction::MOVE_WIDE:
2822 case Instruction::MOVE_WIDE_FROM16:
2823 case Instruction::MOVE_WIDE_16: {
2824 HInstruction* value = LoadLocal(instruction.VRegB(), DataType::Type::kInt64);
2825 UpdateLocal(instruction.VRegA(), value);
2826 break;
2827 }
2828
2829 case Instruction::MOVE_OBJECT:
2830 case Instruction::MOVE_OBJECT_16:
2831 case Instruction::MOVE_OBJECT_FROM16: {
2832 // The verifier has no notion of a null type, so a move-object of constant 0
2833 // will lead to the same constant 0 in the destination register. To mimic
2834 // this behavior, we just pretend we haven't seen a type change (int to reference)
2835 // for the 0 constant and phis. We rely on our type propagation to eventually get the
2836 // types correct.
2837 uint32_t reg_number = instruction.VRegB();
2838 HInstruction* value = (*current_locals_)[reg_number];
2839 if (value->IsIntConstant()) {
2840 DCHECK_EQ(value->AsIntConstant()->GetValue(), 0);
2841 } else if (value->IsPhi()) {
2842 DCHECK(value->GetType() == DataType::Type::kInt32 ||
2843 value->GetType() == DataType::Type::kReference);
2844 } else {
2845 value = LoadLocal(reg_number, DataType::Type::kReference);
2846 }
2847 UpdateLocal(instruction.VRegA(), value);
2848 break;
2849 }
2850
2851 case Instruction::RETURN_VOID: {
2852 BuildReturn(instruction, DataType::Type::kVoid, dex_pc);
2853 break;
2854 }
2855
2856 #define IF_XX(comparison, cond) \
2857 case Instruction::IF_##cond: If_22t<comparison>(instruction, dex_pc); break; \
2858 case Instruction::IF_##cond##Z: If_21t<comparison>(instruction, dex_pc); break
2859
2860 IF_XX(HEqual, EQ);
2861 IF_XX(HNotEqual, NE);
2862 IF_XX(HLessThan, LT);
2863 IF_XX(HLessThanOrEqual, LE);
2864 IF_XX(HGreaterThan, GT);
2865 IF_XX(HGreaterThanOrEqual, GE);
2866
2867 case Instruction::GOTO:
2868 case Instruction::GOTO_16:
2869 case Instruction::GOTO_32: {
2870 AppendInstruction(new (allocator_) HGoto(dex_pc));
2871 current_block_ = nullptr;
2872 break;
2873 }
2874
2875 case Instruction::RETURN: {
2876 BuildReturn(instruction, return_type_, dex_pc);
2877 break;
2878 }
2879
2880 case Instruction::RETURN_OBJECT: {
2881 BuildReturn(instruction, return_type_, dex_pc);
2882 break;
2883 }
2884
2885 case Instruction::RETURN_WIDE: {
2886 BuildReturn(instruction, return_type_, dex_pc);
2887 break;
2888 }
2889
2890 case Instruction::INVOKE_DIRECT:
2891 case Instruction::INVOKE_INTERFACE:
2892 case Instruction::INVOKE_STATIC:
2893 case Instruction::INVOKE_SUPER:
2894 case Instruction::INVOKE_VIRTUAL: {
2895 uint16_t method_idx = instruction.VRegB_35c();
2896 uint32_t args[5];
2897 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
2898 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
2899 if (!BuildInvoke(instruction, dex_pc, method_idx, operands)) {
2900 return false;
2901 }
2902 break;
2903 }
2904
2905 case Instruction::INVOKE_DIRECT_RANGE:
2906 case Instruction::INVOKE_INTERFACE_RANGE:
2907 case Instruction::INVOKE_STATIC_RANGE:
2908 case Instruction::INVOKE_SUPER_RANGE:
2909 case Instruction::INVOKE_VIRTUAL_RANGE: {
2910 uint16_t method_idx = instruction.VRegB_3rc();
2911 RangeInstructionOperands operands(instruction.VRegC(), instruction.VRegA_3rc());
2912 if (!BuildInvoke(instruction, dex_pc, method_idx, operands)) {
2913 return false;
2914 }
2915 break;
2916 }
2917
2918 case Instruction::INVOKE_POLYMORPHIC: {
2919 uint16_t method_idx = instruction.VRegB_45cc();
2920 dex::ProtoIndex proto_idx(instruction.VRegH_45cc());
2921 uint32_t args[5];
2922 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
2923 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
2924 return BuildInvokePolymorphic(dex_pc, method_idx, proto_idx, operands);
2925 }
2926
2927 case Instruction::INVOKE_POLYMORPHIC_RANGE: {
2928 uint16_t method_idx = instruction.VRegB_4rcc();
2929 dex::ProtoIndex proto_idx(instruction.VRegH_4rcc());
2930 RangeInstructionOperands operands(instruction.VRegC_4rcc(), instruction.VRegA_4rcc());
2931 return BuildInvokePolymorphic(dex_pc, method_idx, proto_idx, operands);
2932 }
2933
2934 case Instruction::INVOKE_CUSTOM: {
2935 uint16_t call_site_idx = instruction.VRegB_35c();
2936 uint32_t args[5];
2937 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
2938 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
2939 return BuildInvokeCustom(dex_pc, call_site_idx, operands);
2940 }
2941
2942 case Instruction::INVOKE_CUSTOM_RANGE: {
2943 uint16_t call_site_idx = instruction.VRegB_3rc();
2944 RangeInstructionOperands operands(instruction.VRegC_3rc(), instruction.VRegA_3rc());
2945 return BuildInvokeCustom(dex_pc, call_site_idx, operands);
2946 }
2947
2948 case Instruction::NEG_INT: {
2949 Unop_12x<HNeg>(instruction, DataType::Type::kInt32, dex_pc);
2950 break;
2951 }
2952
2953 case Instruction::NEG_LONG: {
2954 Unop_12x<HNeg>(instruction, DataType::Type::kInt64, dex_pc);
2955 break;
2956 }
2957
2958 case Instruction::NEG_FLOAT: {
2959 Unop_12x<HNeg>(instruction, DataType::Type::kFloat32, dex_pc);
2960 break;
2961 }
2962
2963 case Instruction::NEG_DOUBLE: {
2964 Unop_12x<HNeg>(instruction, DataType::Type::kFloat64, dex_pc);
2965 break;
2966 }
2967
2968 case Instruction::NOT_INT: {
2969 Unop_12x<HNot>(instruction, DataType::Type::kInt32, dex_pc);
2970 break;
2971 }
2972
2973 case Instruction::NOT_LONG: {
2974 Unop_12x<HNot>(instruction, DataType::Type::kInt64, dex_pc);
2975 break;
2976 }
2977
2978 case Instruction::INT_TO_LONG: {
2979 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt64, dex_pc);
2980 break;
2981 }
2982
2983 case Instruction::INT_TO_FLOAT: {
2984 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kFloat32, dex_pc);
2985 break;
2986 }
2987
2988 case Instruction::INT_TO_DOUBLE: {
2989 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kFloat64, dex_pc);
2990 break;
2991 }
2992
2993 case Instruction::LONG_TO_INT: {
2994 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kInt32, dex_pc);
2995 break;
2996 }
2997
2998 case Instruction::LONG_TO_FLOAT: {
2999 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kFloat32, dex_pc);
3000 break;
3001 }
3002
3003 case Instruction::LONG_TO_DOUBLE: {
3004 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kFloat64, dex_pc);
3005 break;
3006 }
3007
3008 case Instruction::FLOAT_TO_INT: {
3009 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kInt32, dex_pc);
3010 break;
3011 }
3012
3013 case Instruction::FLOAT_TO_LONG: {
3014 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kInt64, dex_pc);
3015 break;
3016 }
3017
3018 case Instruction::FLOAT_TO_DOUBLE: {
3019 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kFloat64, dex_pc);
3020 break;
3021 }
3022
3023 case Instruction::DOUBLE_TO_INT: {
3024 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kInt32, dex_pc);
3025 break;
3026 }
3027
3028 case Instruction::DOUBLE_TO_LONG: {
3029 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kInt64, dex_pc);
3030 break;
3031 }
3032
3033 case Instruction::DOUBLE_TO_FLOAT: {
3034 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kFloat32, dex_pc);
3035 break;
3036 }
3037
3038 case Instruction::INT_TO_BYTE: {
3039 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt8, dex_pc);
3040 break;
3041 }
3042
3043 case Instruction::INT_TO_SHORT: {
3044 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt16, dex_pc);
3045 break;
3046 }
3047
3048 case Instruction::INT_TO_CHAR: {
3049 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kUint16, dex_pc);
3050 break;
3051 }
3052
3053 case Instruction::ADD_INT: {
3054 Binop_23x<HAdd>(instruction, DataType::Type::kInt32, dex_pc);
3055 break;
3056 }
3057
3058 case Instruction::ADD_LONG: {
3059 Binop_23x<HAdd>(instruction, DataType::Type::kInt64, dex_pc);
3060 break;
3061 }
3062
3063 case Instruction::ADD_DOUBLE: {
3064 Binop_23x<HAdd>(instruction, DataType::Type::kFloat64, dex_pc);
3065 break;
3066 }
3067
3068 case Instruction::ADD_FLOAT: {
3069 Binop_23x<HAdd>(instruction, DataType::Type::kFloat32, dex_pc);
3070 break;
3071 }
3072
3073 case Instruction::SUB_INT: {
3074 Binop_23x<HSub>(instruction, DataType::Type::kInt32, dex_pc);
3075 break;
3076 }
3077
3078 case Instruction::SUB_LONG: {
3079 Binop_23x<HSub>(instruction, DataType::Type::kInt64, dex_pc);
3080 break;
3081 }
3082
3083 case Instruction::SUB_FLOAT: {
3084 Binop_23x<HSub>(instruction, DataType::Type::kFloat32, dex_pc);
3085 break;
3086 }
3087
3088 case Instruction::SUB_DOUBLE: {
3089 Binop_23x<HSub>(instruction, DataType::Type::kFloat64, dex_pc);
3090 break;
3091 }
3092
3093 case Instruction::ADD_INT_2ADDR: {
3094 Binop_12x<HAdd>(instruction, DataType::Type::kInt32, dex_pc);
3095 break;
3096 }
3097
3098 case Instruction::MUL_INT: {
3099 Binop_23x<HMul>(instruction, DataType::Type::kInt32, dex_pc);
3100 break;
3101 }
3102
3103 case Instruction::MUL_LONG: {
3104 Binop_23x<HMul>(instruction, DataType::Type::kInt64, dex_pc);
3105 break;
3106 }
3107
3108 case Instruction::MUL_FLOAT: {
3109 Binop_23x<HMul>(instruction, DataType::Type::kFloat32, dex_pc);
3110 break;
3111 }
3112
3113 case Instruction::MUL_DOUBLE: {
3114 Binop_23x<HMul>(instruction, DataType::Type::kFloat64, dex_pc);
3115 break;
3116 }
3117
3118 case Instruction::DIV_INT: {
3119 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3120 dex_pc, DataType::Type::kInt32, false, true);
3121 break;
3122 }
3123
3124 case Instruction::DIV_LONG: {
3125 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3126 dex_pc, DataType::Type::kInt64, false, true);
3127 break;
3128 }
3129
3130 case Instruction::DIV_FLOAT: {
3131 Binop_23x<HDiv>(instruction, DataType::Type::kFloat32, dex_pc);
3132 break;
3133 }
3134
3135 case Instruction::DIV_DOUBLE: {
3136 Binop_23x<HDiv>(instruction, DataType::Type::kFloat64, dex_pc);
3137 break;
3138 }
3139
3140 case Instruction::REM_INT: {
3141 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3142 dex_pc, DataType::Type::kInt32, false, false);
3143 break;
3144 }
3145
3146 case Instruction::REM_LONG: {
3147 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3148 dex_pc, DataType::Type::kInt64, false, false);
3149 break;
3150 }
3151
3152 case Instruction::REM_FLOAT: {
3153 Binop_23x<HRem>(instruction, DataType::Type::kFloat32, dex_pc);
3154 break;
3155 }
3156
3157 case Instruction::REM_DOUBLE: {
3158 Binop_23x<HRem>(instruction, DataType::Type::kFloat64, dex_pc);
3159 break;
3160 }
3161
3162 case Instruction::AND_INT: {
3163 Binop_23x<HAnd>(instruction, DataType::Type::kInt32, dex_pc);
3164 break;
3165 }
3166
3167 case Instruction::AND_LONG: {
3168 Binop_23x<HAnd>(instruction, DataType::Type::kInt64, dex_pc);
3169 break;
3170 }
3171
3172 case Instruction::SHL_INT: {
3173 Binop_23x_shift<HShl>(instruction, DataType::Type::kInt32, dex_pc);
3174 break;
3175 }
3176
3177 case Instruction::SHL_LONG: {
3178 Binop_23x_shift<HShl>(instruction, DataType::Type::kInt64, dex_pc);
3179 break;
3180 }
3181
3182 case Instruction::SHR_INT: {
3183 Binop_23x_shift<HShr>(instruction, DataType::Type::kInt32, dex_pc);
3184 break;
3185 }
3186
3187 case Instruction::SHR_LONG: {
3188 Binop_23x_shift<HShr>(instruction, DataType::Type::kInt64, dex_pc);
3189 break;
3190 }
3191
3192 case Instruction::USHR_INT: {
3193 Binop_23x_shift<HUShr>(instruction, DataType::Type::kInt32, dex_pc);
3194 break;
3195 }
3196
3197 case Instruction::USHR_LONG: {
3198 Binop_23x_shift<HUShr>(instruction, DataType::Type::kInt64, dex_pc);
3199 break;
3200 }
3201
3202 case Instruction::OR_INT: {
3203 Binop_23x<HOr>(instruction, DataType::Type::kInt32, dex_pc);
3204 break;
3205 }
3206
3207 case Instruction::OR_LONG: {
3208 Binop_23x<HOr>(instruction, DataType::Type::kInt64, dex_pc);
3209 break;
3210 }
3211
3212 case Instruction::XOR_INT: {
3213 Binop_23x<HXor>(instruction, DataType::Type::kInt32, dex_pc);
3214 break;
3215 }
3216
3217 case Instruction::XOR_LONG: {
3218 Binop_23x<HXor>(instruction, DataType::Type::kInt64, dex_pc);
3219 break;
3220 }
3221
3222 case Instruction::ADD_LONG_2ADDR: {
3223 Binop_12x<HAdd>(instruction, DataType::Type::kInt64, dex_pc);
3224 break;
3225 }
3226
3227 case Instruction::ADD_DOUBLE_2ADDR: {
3228 Binop_12x<HAdd>(instruction, DataType::Type::kFloat64, dex_pc);
3229 break;
3230 }
3231
3232 case Instruction::ADD_FLOAT_2ADDR: {
3233 Binop_12x<HAdd>(instruction, DataType::Type::kFloat32, dex_pc);
3234 break;
3235 }
3236
3237 case Instruction::SUB_INT_2ADDR: {
3238 Binop_12x<HSub>(instruction, DataType::Type::kInt32, dex_pc);
3239 break;
3240 }
3241
3242 case Instruction::SUB_LONG_2ADDR: {
3243 Binop_12x<HSub>(instruction, DataType::Type::kInt64, dex_pc);
3244 break;
3245 }
3246
3247 case Instruction::SUB_FLOAT_2ADDR: {
3248 Binop_12x<HSub>(instruction, DataType::Type::kFloat32, dex_pc);
3249 break;
3250 }
3251
3252 case Instruction::SUB_DOUBLE_2ADDR: {
3253 Binop_12x<HSub>(instruction, DataType::Type::kFloat64, dex_pc);
3254 break;
3255 }
3256
3257 case Instruction::MUL_INT_2ADDR: {
3258 Binop_12x<HMul>(instruction, DataType::Type::kInt32, dex_pc);
3259 break;
3260 }
3261
3262 case Instruction::MUL_LONG_2ADDR: {
3263 Binop_12x<HMul>(instruction, DataType::Type::kInt64, dex_pc);
3264 break;
3265 }
3266
3267 case Instruction::MUL_FLOAT_2ADDR: {
3268 Binop_12x<HMul>(instruction, DataType::Type::kFloat32, dex_pc);
3269 break;
3270 }
3271
3272 case Instruction::MUL_DOUBLE_2ADDR: {
3273 Binop_12x<HMul>(instruction, DataType::Type::kFloat64, dex_pc);
3274 break;
3275 }
3276
3277 case Instruction::DIV_INT_2ADDR: {
3278 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(),
3279 dex_pc, DataType::Type::kInt32, false, true);
3280 break;
3281 }
3282
3283 case Instruction::DIV_LONG_2ADDR: {
3284 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(),
3285 dex_pc, DataType::Type::kInt64, false, true);
3286 break;
3287 }
3288
3289 case Instruction::REM_INT_2ADDR: {
3290 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(),
3291 dex_pc, DataType::Type::kInt32, false, false);
3292 break;
3293 }
3294
3295 case Instruction::REM_LONG_2ADDR: {
3296 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(),
3297 dex_pc, DataType::Type::kInt64, false, false);
3298 break;
3299 }
3300
3301 case Instruction::REM_FLOAT_2ADDR: {
3302 Binop_12x<HRem>(instruction, DataType::Type::kFloat32, dex_pc);
3303 break;
3304 }
3305
3306 case Instruction::REM_DOUBLE_2ADDR: {
3307 Binop_12x<HRem>(instruction, DataType::Type::kFloat64, dex_pc);
3308 break;
3309 }
3310
3311 case Instruction::SHL_INT_2ADDR: {
3312 Binop_12x_shift<HShl>(instruction, DataType::Type::kInt32, dex_pc);
3313 break;
3314 }
3315
3316 case Instruction::SHL_LONG_2ADDR: {
3317 Binop_12x_shift<HShl>(instruction, DataType::Type::kInt64, dex_pc);
3318 break;
3319 }
3320
3321 case Instruction::SHR_INT_2ADDR: {
3322 Binop_12x_shift<HShr>(instruction, DataType::Type::kInt32, dex_pc);
3323 break;
3324 }
3325
3326 case Instruction::SHR_LONG_2ADDR: {
3327 Binop_12x_shift<HShr>(instruction, DataType::Type::kInt64, dex_pc);
3328 break;
3329 }
3330
3331 case Instruction::USHR_INT_2ADDR: {
3332 Binop_12x_shift<HUShr>(instruction, DataType::Type::kInt32, dex_pc);
3333 break;
3334 }
3335
3336 case Instruction::USHR_LONG_2ADDR: {
3337 Binop_12x_shift<HUShr>(instruction, DataType::Type::kInt64, dex_pc);
3338 break;
3339 }
3340
3341 case Instruction::DIV_FLOAT_2ADDR: {
3342 Binop_12x<HDiv>(instruction, DataType::Type::kFloat32, dex_pc);
3343 break;
3344 }
3345
3346 case Instruction::DIV_DOUBLE_2ADDR: {
3347 Binop_12x<HDiv>(instruction, DataType::Type::kFloat64, dex_pc);
3348 break;
3349 }
3350
3351 case Instruction::AND_INT_2ADDR: {
3352 Binop_12x<HAnd>(instruction, DataType::Type::kInt32, dex_pc);
3353 break;
3354 }
3355
3356 case Instruction::AND_LONG_2ADDR: {
3357 Binop_12x<HAnd>(instruction, DataType::Type::kInt64, dex_pc);
3358 break;
3359 }
3360
3361 case Instruction::OR_INT_2ADDR: {
3362 Binop_12x<HOr>(instruction, DataType::Type::kInt32, dex_pc);
3363 break;
3364 }
3365
3366 case Instruction::OR_LONG_2ADDR: {
3367 Binop_12x<HOr>(instruction, DataType::Type::kInt64, dex_pc);
3368 break;
3369 }
3370
3371 case Instruction::XOR_INT_2ADDR: {
3372 Binop_12x<HXor>(instruction, DataType::Type::kInt32, dex_pc);
3373 break;
3374 }
3375
3376 case Instruction::XOR_LONG_2ADDR: {
3377 Binop_12x<HXor>(instruction, DataType::Type::kInt64, dex_pc);
3378 break;
3379 }
3380
3381 case Instruction::ADD_INT_LIT16: {
3382 Binop_22s<HAdd>(instruction, false, dex_pc);
3383 break;
3384 }
3385
3386 case Instruction::AND_INT_LIT16: {
3387 Binop_22s<HAnd>(instruction, false, dex_pc);
3388 break;
3389 }
3390
3391 case Instruction::OR_INT_LIT16: {
3392 Binop_22s<HOr>(instruction, false, dex_pc);
3393 break;
3394 }
3395
3396 case Instruction::XOR_INT_LIT16: {
3397 Binop_22s<HXor>(instruction, false, dex_pc);
3398 break;
3399 }
3400
3401 case Instruction::RSUB_INT: {
3402 Binop_22s<HSub>(instruction, true, dex_pc);
3403 break;
3404 }
3405
3406 case Instruction::MUL_INT_LIT16: {
3407 Binop_22s<HMul>(instruction, false, dex_pc);
3408 break;
3409 }
3410
3411 case Instruction::ADD_INT_LIT8: {
3412 Binop_22b<HAdd>(instruction, false, dex_pc);
3413 break;
3414 }
3415
3416 case Instruction::AND_INT_LIT8: {
3417 Binop_22b<HAnd>(instruction, false, dex_pc);
3418 break;
3419 }
3420
3421 case Instruction::OR_INT_LIT8: {
3422 Binop_22b<HOr>(instruction, false, dex_pc);
3423 break;
3424 }
3425
3426 case Instruction::XOR_INT_LIT8: {
3427 Binop_22b<HXor>(instruction, false, dex_pc);
3428 break;
3429 }
3430
3431 case Instruction::RSUB_INT_LIT8: {
3432 Binop_22b<HSub>(instruction, true, dex_pc);
3433 break;
3434 }
3435
3436 case Instruction::MUL_INT_LIT8: {
3437 Binop_22b<HMul>(instruction, false, dex_pc);
3438 break;
3439 }
3440
3441 case Instruction::DIV_INT_LIT16:
3442 case Instruction::DIV_INT_LIT8: {
3443 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3444 dex_pc, DataType::Type::kInt32, true, true);
3445 break;
3446 }
3447
3448 case Instruction::REM_INT_LIT16:
3449 case Instruction::REM_INT_LIT8: {
3450 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3451 dex_pc, DataType::Type::kInt32, true, false);
3452 break;
3453 }
3454
3455 case Instruction::SHL_INT_LIT8: {
3456 Binop_22b<HShl>(instruction, false, dex_pc);
3457 break;
3458 }
3459
3460 case Instruction::SHR_INT_LIT8: {
3461 Binop_22b<HShr>(instruction, false, dex_pc);
3462 break;
3463 }
3464
3465 case Instruction::USHR_INT_LIT8: {
3466 Binop_22b<HUShr>(instruction, false, dex_pc);
3467 break;
3468 }
3469
3470 case Instruction::NEW_INSTANCE: {
3471 HNewInstance* new_instance =
3472 BuildNewInstance(dex::TypeIndex(instruction.VRegB_21c()), dex_pc);
3473 DCHECK(new_instance != nullptr);
3474
3475 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
3476 BuildConstructorFenceForAllocation(new_instance);
3477 break;
3478 }
3479
3480 case Instruction::NEW_ARRAY: {
3481 dex::TypeIndex type_index(instruction.VRegC_22c());
3482 HInstruction* length = LoadLocal(instruction.VRegB_22c(), DataType::Type::kInt32);
3483 HNewArray* new_array = BuildNewArray(dex_pc, type_index, length);
3484
3485 UpdateLocal(instruction.VRegA_22c(), current_block_->GetLastInstruction());
3486 BuildConstructorFenceForAllocation(new_array);
3487 break;
3488 }
3489
3490 case Instruction::FILLED_NEW_ARRAY: {
3491 dex::TypeIndex type_index(instruction.VRegB_35c());
3492 uint32_t args[5];
3493 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
3494 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
3495 HNewArray* new_array = BuildFilledNewArray(dex_pc, type_index, operands);
3496 BuildConstructorFenceForAllocation(new_array);
3497 break;
3498 }
3499
3500 case Instruction::FILLED_NEW_ARRAY_RANGE: {
3501 dex::TypeIndex type_index(instruction.VRegB_3rc());
3502 RangeInstructionOperands operands(instruction.VRegC_3rc(), instruction.VRegA_3rc());
3503 HNewArray* new_array = BuildFilledNewArray(dex_pc, type_index, operands);
3504 BuildConstructorFenceForAllocation(new_array);
3505 break;
3506 }
3507
3508 case Instruction::FILL_ARRAY_DATA: {
3509 BuildFillArrayData(instruction, dex_pc);
3510 break;
3511 }
3512
3513 case Instruction::MOVE_RESULT:
3514 case Instruction::MOVE_RESULT_WIDE:
3515 case Instruction::MOVE_RESULT_OBJECT: {
3516 DCHECK(latest_result_ != nullptr);
3517 UpdateLocal(instruction.VRegA(), latest_result_);
3518 latest_result_ = nullptr;
3519 break;
3520 }
3521
3522 case Instruction::CMP_LONG: {
3523 Binop_23x_cmp(instruction, DataType::Type::kInt64, ComparisonBias::kNoBias, dex_pc);
3524 break;
3525 }
3526
3527 case Instruction::CMPG_FLOAT: {
3528 Binop_23x_cmp(instruction, DataType::Type::kFloat32, ComparisonBias::kGtBias, dex_pc);
3529 break;
3530 }
3531
3532 case Instruction::CMPG_DOUBLE: {
3533 Binop_23x_cmp(instruction, DataType::Type::kFloat64, ComparisonBias::kGtBias, dex_pc);
3534 break;
3535 }
3536
3537 case Instruction::CMPL_FLOAT: {
3538 Binop_23x_cmp(instruction, DataType::Type::kFloat32, ComparisonBias::kLtBias, dex_pc);
3539 break;
3540 }
3541
3542 case Instruction::CMPL_DOUBLE: {
3543 Binop_23x_cmp(instruction, DataType::Type::kFloat64, ComparisonBias::kLtBias, dex_pc);
3544 break;
3545 }
3546
3547 case Instruction::NOP:
3548 break;
3549
3550 case Instruction::IGET:
3551 case Instruction::IGET_WIDE:
3552 case Instruction::IGET_OBJECT:
3553 case Instruction::IGET_BOOLEAN:
3554 case Instruction::IGET_BYTE:
3555 case Instruction::IGET_CHAR:
3556 case Instruction::IGET_SHORT: {
3557 if (!BuildInstanceFieldAccess(instruction, dex_pc, /* is_put= */ false)) {
3558 return false;
3559 }
3560 break;
3561 }
3562
3563 case Instruction::IPUT:
3564 case Instruction::IPUT_WIDE:
3565 case Instruction::IPUT_OBJECT:
3566 case Instruction::IPUT_BOOLEAN:
3567 case Instruction::IPUT_BYTE:
3568 case Instruction::IPUT_CHAR:
3569 case Instruction::IPUT_SHORT: {
3570 if (!BuildInstanceFieldAccess(instruction, dex_pc, /* is_put= */ true)) {
3571 return false;
3572 }
3573 break;
3574 }
3575
3576 case Instruction::SGET:
3577 case Instruction::SGET_WIDE:
3578 case Instruction::SGET_OBJECT:
3579 case Instruction::SGET_BOOLEAN:
3580 case Instruction::SGET_BYTE:
3581 case Instruction::SGET_CHAR:
3582 case Instruction::SGET_SHORT: {
3583 BuildStaticFieldAccess(instruction, dex_pc, /* is_put= */ false);
3584 break;
3585 }
3586
3587 case Instruction::SPUT:
3588 case Instruction::SPUT_WIDE:
3589 case Instruction::SPUT_OBJECT:
3590 case Instruction::SPUT_BOOLEAN:
3591 case Instruction::SPUT_BYTE:
3592 case Instruction::SPUT_CHAR:
3593 case Instruction::SPUT_SHORT: {
3594 BuildStaticFieldAccess(instruction, dex_pc, /* is_put= */ true);
3595 break;
3596 }
3597
3598 #define ARRAY_XX(kind, anticipated_type) \
3599 case Instruction::AGET##kind: { \
3600 BuildArrayAccess(instruction, dex_pc, false, anticipated_type); \
3601 break; \
3602 } \
3603 case Instruction::APUT##kind: { \
3604 BuildArrayAccess(instruction, dex_pc, true, anticipated_type); \
3605 break; \
3606 }
3607
3608 ARRAY_XX(, DataType::Type::kInt32);
3609 ARRAY_XX(_WIDE, DataType::Type::kInt64);
3610 ARRAY_XX(_OBJECT, DataType::Type::kReference);
3611 ARRAY_XX(_BOOLEAN, DataType::Type::kBool);
3612 ARRAY_XX(_BYTE, DataType::Type::kInt8);
3613 ARRAY_XX(_CHAR, DataType::Type::kUint16);
3614 ARRAY_XX(_SHORT, DataType::Type::kInt16);
3615
3616 case Instruction::ARRAY_LENGTH: {
3617 HInstruction* object = LoadNullCheckedLocal(instruction.VRegB_12x(), dex_pc);
3618 AppendInstruction(new (allocator_) HArrayLength(object, dex_pc));
3619 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction());
3620 break;
3621 }
3622
3623 case Instruction::CONST_STRING: {
3624 dex::StringIndex string_index(instruction.VRegB_21c());
3625 BuildLoadString(string_index, dex_pc);
3626 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3627 break;
3628 }
3629
3630 case Instruction::CONST_STRING_JUMBO: {
3631 dex::StringIndex string_index(instruction.VRegB_31c());
3632 BuildLoadString(string_index, dex_pc);
3633 UpdateLocal(instruction.VRegA_31c(), current_block_->GetLastInstruction());
3634 break;
3635 }
3636
3637 case Instruction::CONST_CLASS: {
3638 dex::TypeIndex type_index(instruction.VRegB_21c());
3639 BuildLoadClass(type_index, dex_pc);
3640 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3641 break;
3642 }
3643
3644 case Instruction::CONST_METHOD_HANDLE: {
3645 uint16_t method_handle_idx = instruction.VRegB_21c();
3646 BuildLoadMethodHandle(method_handle_idx, dex_pc);
3647 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3648 break;
3649 }
3650
3651 case Instruction::CONST_METHOD_TYPE: {
3652 dex::ProtoIndex proto_idx(instruction.VRegB_21c());
3653 BuildLoadMethodType(proto_idx, dex_pc);
3654 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3655 break;
3656 }
3657
3658 case Instruction::MOVE_EXCEPTION: {
3659 AppendInstruction(new (allocator_) HLoadException(dex_pc));
3660 UpdateLocal(instruction.VRegA_11x(), current_block_->GetLastInstruction());
3661 AppendInstruction(new (allocator_) HClearException(dex_pc));
3662 break;
3663 }
3664
3665 case Instruction::THROW: {
3666 HInstruction* exception = LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference);
3667 AppendInstruction(new (allocator_) HThrow(exception, dex_pc));
3668 // We finished building this block. Set the current block to null to avoid
3669 // adding dead instructions to it.
3670 current_block_ = nullptr;
3671 break;
3672 }
3673
3674 case Instruction::INSTANCE_OF: {
3675 uint8_t destination = instruction.VRegA_22c();
3676 uint8_t reference = instruction.VRegB_22c();
3677 dex::TypeIndex type_index(instruction.VRegC_22c());
3678 BuildTypeCheck(instruction, destination, reference, type_index, dex_pc);
3679 break;
3680 }
3681
3682 case Instruction::CHECK_CAST: {
3683 uint8_t reference = instruction.VRegA_21c();
3684 dex::TypeIndex type_index(instruction.VRegB_21c());
3685 BuildTypeCheck(instruction, -1, reference, type_index, dex_pc);
3686 break;
3687 }
3688
3689 case Instruction::MONITOR_ENTER: {
3690 AppendInstruction(new (allocator_) HMonitorOperation(
3691 LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference),
3692 HMonitorOperation::OperationKind::kEnter,
3693 dex_pc));
3694 graph_->SetHasMonitorOperations(true);
3695 break;
3696 }
3697
3698 case Instruction::MONITOR_EXIT: {
3699 AppendInstruction(new (allocator_) HMonitorOperation(
3700 LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference),
3701 HMonitorOperation::OperationKind::kExit,
3702 dex_pc));
3703 graph_->SetHasMonitorOperations(true);
3704 break;
3705 }
3706
3707 case Instruction::SPARSE_SWITCH:
3708 case Instruction::PACKED_SWITCH: {
3709 BuildSwitch(instruction, dex_pc);
3710 break;
3711 }
3712
3713 case Instruction::UNUSED_3E ... Instruction::UNUSED_43:
3714 case Instruction::UNUSED_73:
3715 case Instruction::UNUSED_79:
3716 case Instruction::UNUSED_7A:
3717 case Instruction::UNUSED_E3 ... Instruction::UNUSED_F9: {
3718 VLOG(compiler) << "Did not compile "
3719 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
3720 << " because of unhandled instruction "
3721 << instruction.Name();
3722 MaybeRecordStat(compilation_stats_,
3723 MethodCompilationStat::kNotCompiledUnhandledInstruction);
3724 return false;
3725 }
3726 }
3727 return true;
3728 } // NOLINT(readability/fn_size)
3729
LookupResolvedType(dex::TypeIndex type_index,const DexCompilationUnit & compilation_unit) const3730 ObjPtr<mirror::Class> HInstructionBuilder::LookupResolvedType(
3731 dex::TypeIndex type_index,
3732 const DexCompilationUnit& compilation_unit) const {
3733 return compilation_unit.GetClassLinker()->LookupResolvedType(
3734 type_index, compilation_unit.GetDexCache().Get(), compilation_unit.GetClassLoader().Get());
3735 }
3736
LookupReferrerClass() const3737 ObjPtr<mirror::Class> HInstructionBuilder::LookupReferrerClass() const {
3738 // TODO: Cache the result in a Handle<mirror::Class>.
3739 const dex::MethodId& method_id =
3740 dex_compilation_unit_->GetDexFile()->GetMethodId(dex_compilation_unit_->GetDexMethodIndex());
3741 return LookupResolvedType(method_id.class_idx_, *dex_compilation_unit_);
3742 }
3743
3744 } // namespace art
3745