• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #include "nodes.h"
17 
18 #include <algorithm>
19 #include <cfloat>
20 #include <functional>
21 
22 #include "art_method-inl.h"
23 #include "base/arena_allocator.h"
24 #include "base/arena_bit_vector.h"
25 #include "base/bit_utils.h"
26 #include "base/bit_vector-inl.h"
27 #include "base/bit_vector.h"
28 #include "base/iteration_range.h"
29 #include "base/logging.h"
30 #include "base/malloc_arena_pool.h"
31 #include "base/scoped_arena_allocator.h"
32 #include "base/scoped_arena_containers.h"
33 #include "base/stl_util.h"
34 #include "class_linker-inl.h"
35 #include "class_root-inl.h"
36 #include "code_generator.h"
37 #include "common_dominator.h"
38 #include "intrinsics.h"
39 #include "mirror/class-inl.h"
40 #include "scoped_thread_state_change-inl.h"
41 #include "ssa_builder.h"
42 
43 namespace art HIDDEN {
44 
45 // Enable floating-point static evaluation during constant folding
46 // only if all floating-point operations and constants evaluate in the
47 // range and precision of the type used (i.e., 32-bit float, 64-bit
48 // double).
49 static constexpr bool kEnableFloatingPointStaticEvaluation = (FLT_EVAL_METHOD == 0);
50 
CreateRootHandle(VariableSizedHandleScope * handles,ClassRoot class_root)51 ReferenceTypeInfo::TypeHandle HandleCache::CreateRootHandle(VariableSizedHandleScope* handles,
52                                                             ClassRoot class_root) {
53   // Mutator lock is required for NewHandle and GetClassRoot().
54   ScopedObjectAccess soa(Thread::Current());
55   return handles->NewHandle(GetClassRoot(class_root));
56 }
57 
AddBlock(HBasicBlock * block)58 void HGraph::AddBlock(HBasicBlock* block) {
59   block->SetBlockId(blocks_.size());
60   blocks_.push_back(block);
61 }
62 
FindBackEdges(ArenaBitVector * visited)63 void HGraph::FindBackEdges(ArenaBitVector* visited) {
64   // "visited" must be empty on entry, it's an output argument for all visited (i.e. live) blocks.
65   DCHECK_EQ(visited->GetHighestBitSet(), -1);
66 
67   // Allocate memory from local ScopedArenaAllocator.
68   ScopedArenaAllocator allocator(GetArenaStack());
69   // Nodes that we're currently visiting, indexed by block id.
70   ArenaBitVector visiting(
71       &allocator, blocks_.size(), /* expandable= */ false, kArenaAllocGraphBuilder);
72   visiting.ClearAllBits();
73   // Number of successors visited from a given node, indexed by block id.
74   ScopedArenaVector<size_t> successors_visited(blocks_.size(),
75                                                0u,
76                                                allocator.Adapter(kArenaAllocGraphBuilder));
77   // Stack of nodes that we're currently visiting (same as marked in "visiting" above).
78   ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocGraphBuilder));
79   constexpr size_t kDefaultWorklistSize = 8;
80   worklist.reserve(kDefaultWorklistSize);
81   visited->SetBit(entry_block_->GetBlockId());
82   visiting.SetBit(entry_block_->GetBlockId());
83   worklist.push_back(entry_block_);
84 
85   while (!worklist.empty()) {
86     HBasicBlock* current = worklist.back();
87     uint32_t current_id = current->GetBlockId();
88     if (successors_visited[current_id] == current->GetSuccessors().size()) {
89       visiting.ClearBit(current_id);
90       worklist.pop_back();
91     } else {
92       HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++];
93       uint32_t successor_id = successor->GetBlockId();
94       if (visiting.IsBitSet(successor_id)) {
95         DCHECK(ContainsElement(worklist, successor));
96         successor->AddBackEdge(current);
97       } else if (!visited->IsBitSet(successor_id)) {
98         visited->SetBit(successor_id);
99         visiting.SetBit(successor_id);
100         worklist.push_back(successor);
101       }
102     }
103   }
104 }
105 
106 // Remove the environment use records of the instruction for users.
RemoveEnvironmentUses(HInstruction * instruction)107 void RemoveEnvironmentUses(HInstruction* instruction) {
108   for (HEnvironment* environment = instruction->GetEnvironment();
109        environment != nullptr;
110        environment = environment->GetParent()) {
111     for (size_t i = 0, e = environment->Size(); i < e; ++i) {
112       if (environment->GetInstructionAt(i) != nullptr) {
113         environment->RemoveAsUserOfInput(i);
114       }
115     }
116   }
117 }
118 
119 // Return whether the instruction has an environment and it's used by others.
HasEnvironmentUsedByOthers(HInstruction * instruction)120 bool HasEnvironmentUsedByOthers(HInstruction* instruction) {
121   for (HEnvironment* environment = instruction->GetEnvironment();
122        environment != nullptr;
123        environment = environment->GetParent()) {
124     for (size_t i = 0, e = environment->Size(); i < e; ++i) {
125       HInstruction* user = environment->GetInstructionAt(i);
126       if (user != nullptr) {
127         return true;
128       }
129     }
130   }
131   return false;
132 }
133 
134 // Reset environment records of the instruction itself.
ResetEnvironmentInputRecords(HInstruction * instruction)135 void ResetEnvironmentInputRecords(HInstruction* instruction) {
136   for (HEnvironment* environment = instruction->GetEnvironment();
137        environment != nullptr;
138        environment = environment->GetParent()) {
139     for (size_t i = 0, e = environment->Size(); i < e; ++i) {
140       DCHECK(environment->GetHolder() == instruction);
141       if (environment->GetInstructionAt(i) != nullptr) {
142         environment->SetRawEnvAt(i, nullptr);
143       }
144     }
145   }
146 }
147 
RemoveAsUser(HInstruction * instruction)148 static void RemoveAsUser(HInstruction* instruction) {
149   instruction->RemoveAsUserOfAllInputs();
150   RemoveEnvironmentUses(instruction);
151 }
152 
RemoveDeadBlocksInstructionsAsUsersAndDisconnect(const ArenaBitVector & visited) const153 void HGraph::RemoveDeadBlocksInstructionsAsUsersAndDisconnect(const ArenaBitVector& visited) const {
154   for (size_t i = 0; i < blocks_.size(); ++i) {
155     if (!visited.IsBitSet(i)) {
156       HBasicBlock* block = blocks_[i];
157       if (block == nullptr) continue;
158 
159       // Remove as user.
160       for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
161         RemoveAsUser(it.Current());
162       }
163       for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
164         RemoveAsUser(it.Current());
165       }
166 
167       // Remove non-catch phi uses, and disconnect the block.
168       block->DisconnectFromSuccessors(&visited);
169     }
170   }
171 }
172 
173 // This method assumes `insn` has been removed from all users with the exception of catch
174 // phis because of missing exceptional edges in the graph. It removes the
175 // instruction from catch phi uses, together with inputs of other catch phis in
176 // the catch block at the same index, as these must be dead too.
RemoveCatchPhiUsesOfDeadInstruction(HInstruction * insn)177 static void RemoveCatchPhiUsesOfDeadInstruction(HInstruction* insn) {
178   DCHECK(!insn->HasEnvironmentUses());
179   while (insn->HasNonEnvironmentUses()) {
180     const HUseListNode<HInstruction*>& use = insn->GetUses().front();
181     size_t use_index = use.GetIndex();
182     HBasicBlock* user_block = use.GetUser()->GetBlock();
183     DCHECK(use.GetUser()->IsPhi());
184     DCHECK(user_block->IsCatchBlock());
185     for (HInstructionIterator phi_it(user_block->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
186       phi_it.Current()->AsPhi()->RemoveInputAt(use_index);
187     }
188   }
189 }
190 
RemoveDeadBlocks(const ArenaBitVector & visited)191 void HGraph::RemoveDeadBlocks(const ArenaBitVector& visited) {
192   DCHECK(reverse_post_order_.empty()) << "We shouldn't have dominance information.";
193   for (size_t i = 0; i < blocks_.size(); ++i) {
194     if (!visited.IsBitSet(i)) {
195       HBasicBlock* block = blocks_[i];
196       if (block == nullptr) continue;
197 
198       // Remove all remaining uses (which should be only catch phi uses), and the instructions.
199       block->RemoveCatchPhiUsesAndInstruction(/* building_dominator_tree = */ true);
200 
201       // Remove the block from the list of blocks, so that further analyses
202       // never see it.
203       blocks_[i] = nullptr;
204       if (block->IsExitBlock()) {
205         SetExitBlock(nullptr);
206       }
207       // Mark the block as removed. This is used by the HGraphBuilder to discard
208       // the block as a branch target.
209       block->SetGraph(nullptr);
210     }
211   }
212 }
213 
BuildDominatorTree()214 GraphAnalysisResult HGraph::BuildDominatorTree() {
215   // Allocate memory from local ScopedArenaAllocator.
216   ScopedArenaAllocator allocator(GetArenaStack());
217 
218   ArenaBitVector visited(&allocator, blocks_.size(), false, kArenaAllocGraphBuilder);
219   visited.ClearAllBits();
220 
221   // (1) Find the back edges in the graph doing a DFS traversal.
222   FindBackEdges(&visited);
223 
224   // (2) Remove instructions and phis from blocks not visited during
225   //     the initial DFS as users from other instructions, so that
226   //     users can be safely removed before uses later.
227   //     Also disconnect the block from its successors, updating the successor's phis if needed.
228   RemoveDeadBlocksInstructionsAsUsersAndDisconnect(visited);
229 
230   // (3) Remove blocks not visited during the initial DFS.
231   //     Step (5) requires dead blocks to be removed from the
232   //     predecessors list of live blocks.
233   RemoveDeadBlocks(visited);
234 
235   // (4) Simplify the CFG now, so that we don't need to recompute
236   //     dominators and the reverse post order.
237   SimplifyCFG();
238 
239   // (5) Compute the dominance information and the reverse post order.
240   ComputeDominanceInformation();
241 
242   // (6) Analyze loops discovered through back edge analysis, and
243   //     set the loop information on each block.
244   GraphAnalysisResult result = AnalyzeLoops();
245   if (result != kAnalysisSuccess) {
246     return result;
247   }
248 
249   // (7) Precompute per-block try membership before entering the SSA builder,
250   //     which needs the information to build catch block phis from values of
251   //     locals at throwing instructions inside try blocks.
252   ComputeTryBlockInformation();
253 
254   return kAnalysisSuccess;
255 }
256 
ClearDominanceInformation()257 void HGraph::ClearDominanceInformation() {
258   for (HBasicBlock* block : GetActiveBlocks()) {
259     block->ClearDominanceInformation();
260   }
261   reverse_post_order_.clear();
262 }
263 
ClearLoopInformation()264 void HGraph::ClearLoopInformation() {
265   SetHasLoops(false);
266   SetHasIrreducibleLoops(false);
267   for (HBasicBlock* block : GetActiveBlocks()) {
268     block->SetLoopInformation(nullptr);
269   }
270 }
271 
ClearDominanceInformation()272 void HBasicBlock::ClearDominanceInformation() {
273   dominated_blocks_.clear();
274   dominator_ = nullptr;
275 }
276 
GetFirstInstructionDisregardMoves() const277 HInstruction* HBasicBlock::GetFirstInstructionDisregardMoves() const {
278   HInstruction* instruction = GetFirstInstruction();
279   while (instruction->IsParallelMove()) {
280     instruction = instruction->GetNext();
281   }
282   return instruction;
283 }
284 
UpdateDominatorOfSuccessor(HBasicBlock * block,HBasicBlock * successor)285 static bool UpdateDominatorOfSuccessor(HBasicBlock* block, HBasicBlock* successor) {
286   DCHECK(ContainsElement(block->GetSuccessors(), successor));
287 
288   HBasicBlock* old_dominator = successor->GetDominator();
289   HBasicBlock* new_dominator =
290       (old_dominator == nullptr) ? block
291                                  : CommonDominator::ForPair(old_dominator, block);
292 
293   if (old_dominator == new_dominator) {
294     return false;
295   } else {
296     successor->SetDominator(new_dominator);
297     return true;
298   }
299 }
300 
301 // TODO Consider moving this entirely into LoadStoreAnalysis/Elimination.
PathBetween(uint32_t source_idx,uint32_t dest_idx) const302 bool HGraph::PathBetween(uint32_t source_idx, uint32_t dest_idx) const {
303   DCHECK_LT(source_idx, blocks_.size()) << "source not present in graph!";
304   DCHECK_LT(dest_idx, blocks_.size()) << "dest not present in graph!";
305   DCHECK(blocks_[source_idx] != nullptr);
306   DCHECK(blocks_[dest_idx] != nullptr);
307   return reachability_graph_.IsBitSet(source_idx, dest_idx);
308 }
309 
PathBetween(const HBasicBlock * source,const HBasicBlock * dest) const310 bool HGraph::PathBetween(const HBasicBlock* source, const HBasicBlock* dest) const {
311   if (source == nullptr || dest == nullptr) {
312     return false;
313   }
314   size_t source_idx = source->GetBlockId();
315   size_t dest_idx = dest->GetBlockId();
316   return PathBetween(source_idx, dest_idx);
317 }
318 
319 // This function/struct calculates the reachability of every node from every
320 // other node by iteratively using DFS to find reachability of each individual
321 // block.
322 //
323 // This is in practice faster then the simpler Floyd-Warshall since while that
324 // is O(N**3) this is O(N*(E + N)) where N is the number of blocks and E is the
325 // number of edges. Since in practice each block only has a few outgoing edges
326 // we can confidently say that E ~ B*N where B is a small number (~3). We also
327 // memoize the results as we go allowing us to (potentially) avoid walking the
328 // entire graph for every node. To make best use of this memoization we
329 // calculate the reachability of blocks in PostOrder. This means that
330 // (generally) blocks that are dominated by many other blocks and dominate few
331 // blocks themselves will be examined first. This makes it more likely we can
332 // use our memoized results.
333 class ReachabilityAnalysisHelper {
334  public:
ReachabilityAnalysisHelper(const HGraph * graph,ArenaBitVectorArray * reachability_graph,ArenaStack * arena_stack)335   ReachabilityAnalysisHelper(const HGraph* graph,
336                              ArenaBitVectorArray* reachability_graph,
337                              ArenaStack* arena_stack)
338       : graph_(graph),
339         reachability_graph_(reachability_graph),
340         arena_stack_(arena_stack),
341         temporaries_(arena_stack_),
342         block_size_(RoundUp(graph_->GetBlocks().size(), BitVector::kWordBits)),
343         all_visited_nodes_(
344             &temporaries_, graph_->GetBlocks().size(), false, kArenaAllocReachabilityGraph),
345         not_post_order_visited_(
346             &temporaries_, graph_->GetBlocks().size(), false, kArenaAllocReachabilityGraph) {
347     // We can't adjust the size of reachability graph any more without breaking
348     // our allocator invariants so it had better be large enough.
349     CHECK_GE(reachability_graph_->NumRows(), graph_->GetBlocks().size());
350     CHECK_GE(reachability_graph_->NumColumns(), graph_->GetBlocks().size());
351     not_post_order_visited_.SetInitialBits(graph_->GetBlocks().size());
352   }
353 
CalculateReachability()354   void CalculateReachability() {
355     // Calculate what blocks connect using repeated DFS
356     //
357     // Going in PostOrder should generally give memoization a good shot of
358     // hitting.
359     for (const HBasicBlock* blk : graph_->GetPostOrder()) {
360       if (blk == nullptr) {
361         continue;
362       }
363       not_post_order_visited_.ClearBit(blk->GetBlockId());
364       CalculateConnectednessOn(blk);
365       all_visited_nodes_.SetBit(blk->GetBlockId());
366     }
367     // Get all other bits
368     for (auto idx : not_post_order_visited_.Indexes()) {
369       const HBasicBlock* blk = graph_->GetBlocks()[idx];
370       if (blk == nullptr) {
371         continue;
372       }
373       CalculateConnectednessOn(blk);
374       all_visited_nodes_.SetBit(blk->GetBlockId());
375     }
376   }
377 
378  private:
AddEdge(uint32_t source,const HBasicBlock * dest)379   void AddEdge(uint32_t source, const HBasicBlock* dest) {
380     reachability_graph_->SetBit(source, dest->GetBlockId());
381   }
382 
383   // Union the reachability of 'idx' into 'update_block_idx'. This is done to
384   // implement memoization. In order to improve performance we do this in 4-byte
385   // blocks. Clang should be able to optimize this to larger blocks if possible.
UnionBlock(size_t update_block_idx,size_t idx)386   void UnionBlock(size_t update_block_idx, size_t idx) {
387     reachability_graph_->UnionRows(update_block_idx, idx);
388   }
389 
390   // Single DFS to get connectedness of a single block
CalculateConnectednessOn(const HBasicBlock * const target_block)391   void CalculateConnectednessOn(const HBasicBlock* const target_block) {
392     const uint32_t target_idx = target_block->GetBlockId();
393     ScopedArenaAllocator connectedness_temps(arena_stack_);
394     // What nodes we have already discovered and either have processed or are
395     // already on the queue.
396     ArenaBitVector discovered(
397         &connectedness_temps, graph_->GetBlocks().size(), false, kArenaAllocReachabilityGraph);
398     // The work stack. What blocks we still need to process.
399     ScopedArenaVector<const HBasicBlock*> work_stack(
400         connectedness_temps.Adapter(kArenaAllocReachabilityGraph));
401     // Known max size since otherwise we'd have blocks multiple times. Avoids
402     // re-allocation
403     work_stack.reserve(graph_->GetBlocks().size());
404     discovered.SetBit(target_idx);
405     work_stack.push_back(target_block);
406     // Main DFS Loop.
407     while (!work_stack.empty()) {
408       const HBasicBlock* cur = work_stack.back();
409       work_stack.pop_back();
410       // Memoization of previous runs.
411       if (all_visited_nodes_.IsBitSet(cur->GetBlockId())) {
412         DCHECK_NE(target_block, cur);
413         // Already explored from here. Just use that data.
414         UnionBlock(target_idx, cur->GetBlockId());
415         continue;
416       }
417       for (const HBasicBlock* succ : cur->GetSuccessors()) {
418         AddEdge(target_idx, succ);
419         if (!discovered.IsBitSet(succ->GetBlockId())) {
420           work_stack.push_back(succ);
421           discovered.SetBit(succ->GetBlockId());
422         }
423       }
424     }
425   }
426 
427   const HGraph* graph_;
428   // The graph's reachability_graph_ on the main allocator.
429   ArenaBitVectorArray* reachability_graph_;
430   ArenaStack* arena_stack_;
431   // An allocator for temporary bit-vectors used by this algorithm. The
432   // 'SetBit,ClearBit' on reachability_graph_ prior to the construction of this
433   // object should be the only allocation on the main allocator so it's safe to
434   // make a sub-allocator here.
435   ScopedArenaAllocator temporaries_;
436   // number of columns
437   const size_t block_size_;
438   // Where we've already completely calculated connectedness.
439   ArenaBitVector all_visited_nodes_;
440   // What we never visited and need to do later
441   ArenaBitVector not_post_order_visited_;
442 
443   DISALLOW_COPY_AND_ASSIGN(ReachabilityAnalysisHelper);
444 };
445 
ComputeReachabilityInformation()446 void HGraph::ComputeReachabilityInformation() {
447   DCHECK_EQ(reachability_graph_.GetRawData().NumSetBits(), 0u);
448   DCHECK(reachability_graph_.IsExpandable());
449   // Reserve all the bits we'll need. This is the only allocation on the
450   // standard allocator we do here, enabling us to create a new ScopedArena for
451   // use with temporaries.
452   //
453   // reachability_graph_ acts as |N| x |N| graph for PathBetween. Array is
454   // padded so each row starts on an BitVector::kWordBits-bit alignment for
455   // simplicity and performance, allowing us to union blocks together without
456   // going bit-by-bit.
457   reachability_graph_.Resize(blocks_.size(), blocks_.size(), /*clear=*/false);
458   ReachabilityAnalysisHelper helper(this, &reachability_graph_, GetArenaStack());
459   helper.CalculateReachability();
460 }
461 
ClearReachabilityInformation()462 void HGraph::ClearReachabilityInformation() {
463   reachability_graph_.Clear();
464 }
465 
ComputeDominanceInformation()466 void HGraph::ComputeDominanceInformation() {
467   DCHECK(reverse_post_order_.empty());
468   reverse_post_order_.reserve(blocks_.size());
469   reverse_post_order_.push_back(entry_block_);
470 
471   // Allocate memory from local ScopedArenaAllocator.
472   ScopedArenaAllocator allocator(GetArenaStack());
473   // Number of visits of a given node, indexed by block id.
474   ScopedArenaVector<size_t> visits(blocks_.size(), 0u, allocator.Adapter(kArenaAllocGraphBuilder));
475   // Number of successors visited from a given node, indexed by block id.
476   ScopedArenaVector<size_t> successors_visited(blocks_.size(),
477                                                0u,
478                                                allocator.Adapter(kArenaAllocGraphBuilder));
479   // Nodes for which we need to visit successors.
480   ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocGraphBuilder));
481   constexpr size_t kDefaultWorklistSize = 8;
482   worklist.reserve(kDefaultWorklistSize);
483   worklist.push_back(entry_block_);
484 
485   while (!worklist.empty()) {
486     HBasicBlock* current = worklist.back();
487     uint32_t current_id = current->GetBlockId();
488     if (successors_visited[current_id] == current->GetSuccessors().size()) {
489       worklist.pop_back();
490     } else {
491       HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++];
492       UpdateDominatorOfSuccessor(current, successor);
493 
494       // Once all the forward edges have been visited, we know the immediate
495       // dominator of the block. We can then start visiting its successors.
496       if (++visits[successor->GetBlockId()] ==
497           successor->GetPredecessors().size() - successor->NumberOfBackEdges()) {
498         reverse_post_order_.push_back(successor);
499         worklist.push_back(successor);
500       }
501     }
502   }
503 
504   // Check if the graph has back edges not dominated by their respective headers.
505   // If so, we need to update the dominators of those headers and recursively of
506   // their successors. We do that with a fix-point iteration over all blocks.
507   // The algorithm is guaranteed to terminate because it loops only if the sum
508   // of all dominator chains has decreased in the current iteration.
509   bool must_run_fix_point = false;
510   for (HBasicBlock* block : blocks_) {
511     if (block != nullptr &&
512         block->IsLoopHeader() &&
513         block->GetLoopInformation()->HasBackEdgeNotDominatedByHeader()) {
514       must_run_fix_point = true;
515       break;
516     }
517   }
518   if (must_run_fix_point) {
519     bool update_occurred = true;
520     while (update_occurred) {
521       update_occurred = false;
522       for (HBasicBlock* block : GetReversePostOrder()) {
523         for (HBasicBlock* successor : block->GetSuccessors()) {
524           update_occurred |= UpdateDominatorOfSuccessor(block, successor);
525         }
526       }
527     }
528   }
529 
530   // Make sure that there are no remaining blocks whose dominator information
531   // needs to be updated.
532   if (kIsDebugBuild) {
533     for (HBasicBlock* block : GetReversePostOrder()) {
534       for (HBasicBlock* successor : block->GetSuccessors()) {
535         DCHECK(!UpdateDominatorOfSuccessor(block, successor));
536       }
537     }
538   }
539 
540   // Populate `dominated_blocks_` information after computing all dominators.
541   // The potential presence of irreducible loops requires to do it after.
542   for (HBasicBlock* block : GetReversePostOrder()) {
543     if (!block->IsEntryBlock()) {
544       block->GetDominator()->AddDominatedBlock(block);
545     }
546   }
547 }
548 
SplitEdge(HBasicBlock * block,HBasicBlock * successor)549 HBasicBlock* HGraph::SplitEdge(HBasicBlock* block, HBasicBlock* successor) {
550   HBasicBlock* new_block = new (allocator_) HBasicBlock(this, successor->GetDexPc());
551   AddBlock(new_block);
552   // Use `InsertBetween` to ensure the predecessor index and successor index of
553   // `block` and `successor` are preserved.
554   new_block->InsertBetween(block, successor);
555   return new_block;
556 }
557 
SplitCriticalEdge(HBasicBlock * block,HBasicBlock * successor)558 void HGraph::SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor) {
559   // Insert a new node between `block` and `successor` to split the
560   // critical edge.
561   HBasicBlock* new_block = SplitEdge(block, successor);
562   new_block->AddInstruction(new (allocator_) HGoto(successor->GetDexPc()));
563   if (successor->IsLoopHeader()) {
564     // If we split at a back edge boundary, make the new block the back edge.
565     HLoopInformation* info = successor->GetLoopInformation();
566     if (info->IsBackEdge(*block)) {
567       info->RemoveBackEdge(block);
568       info->AddBackEdge(new_block);
569     }
570   }
571 }
572 
SplitEdgeAndUpdateRPO(HBasicBlock * block,HBasicBlock * successor)573 HBasicBlock* HGraph::SplitEdgeAndUpdateRPO(HBasicBlock* block, HBasicBlock* successor) {
574   HBasicBlock* new_block = SplitEdge(block, successor);
575   // In the RPO we have {... , block, ... , successor}. We want to insert `new_block` right after
576   // `block` to have a consistent RPO without recomputing the whole graph's RPO.
577   reverse_post_order_.insert(
578       reverse_post_order_.begin() + IndexOfElement(reverse_post_order_, block) + 1, new_block);
579   return new_block;
580 }
581 
582 // Reorder phi inputs to match reordering of the block's predecessors.
FixPhisAfterPredecessorsReodering(HBasicBlock * block,size_t first,size_t second)583 static void FixPhisAfterPredecessorsReodering(HBasicBlock* block, size_t first, size_t second) {
584   for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
585     HPhi* phi = it.Current()->AsPhi();
586     HInstruction* first_instr = phi->InputAt(first);
587     HInstruction* second_instr = phi->InputAt(second);
588     phi->ReplaceInput(first_instr, second);
589     phi->ReplaceInput(second_instr, first);
590   }
591 }
592 
593 // Make sure that the first predecessor of a loop header is the incoming block.
OrderLoopHeaderPredecessors(HBasicBlock * header)594 void HGraph::OrderLoopHeaderPredecessors(HBasicBlock* header) {
595   DCHECK(header->IsLoopHeader());
596   HLoopInformation* info = header->GetLoopInformation();
597   if (info->IsBackEdge(*header->GetPredecessors()[0])) {
598     HBasicBlock* to_swap = header->GetPredecessors()[0];
599     for (size_t pred = 1, e = header->GetPredecessors().size(); pred < e; ++pred) {
600       HBasicBlock* predecessor = header->GetPredecessors()[pred];
601       if (!info->IsBackEdge(*predecessor)) {
602         header->predecessors_[pred] = to_swap;
603         header->predecessors_[0] = predecessor;
604         FixPhisAfterPredecessorsReodering(header, 0, pred);
605         break;
606       }
607     }
608   }
609 }
610 
611 // Transform control flow of the loop to a single preheader format (don't touch the data flow).
612 // New_preheader can be already among the header predecessors - this situation will be correctly
613 // processed.
FixControlForNewSinglePreheader(HBasicBlock * header,HBasicBlock * new_preheader)614 static void FixControlForNewSinglePreheader(HBasicBlock* header, HBasicBlock* new_preheader) {
615   HLoopInformation* loop_info = header->GetLoopInformation();
616   for (size_t pred = 0; pred < header->GetPredecessors().size(); ++pred) {
617     HBasicBlock* predecessor = header->GetPredecessors()[pred];
618     if (!loop_info->IsBackEdge(*predecessor) && predecessor != new_preheader) {
619       predecessor->ReplaceSuccessor(header, new_preheader);
620       pred--;
621     }
622   }
623 }
624 
625 //             == Before ==                                               == After ==
626 //      _________         _________                               _________         _________
627 //     | B0      |       | B1      |      (old preheaders)       | B0      |       | B1      |
628 //     |=========|       |=========|                             |=========|       |=========|
629 //     | i0 = .. |       | i1 = .. |                             | i0 = .. |       | i1 = .. |
630 //     |_________|       |_________|                             |_________|       |_________|
631 //           \               /                                         \              /
632 //            \             /                                        ___v____________v___
633 //             \           /               (new preheader)          | B20 <- B0, B1      |
634 //              |         |                                         |====================|
635 //              |         |                                         | i20 = phi(i0, i1)  |
636 //              |         |                                         |____________________|
637 //              |         |                                                   |
638 //    /\        |         |        /\                           /\            |              /\
639 //   /  v_______v_________v_______v  \                         /  v___________v_____________v  \
640 //  |  | B10 <- B0, B1, B2, B3     |  |                       |  | B10 <- B20, B2, B3        |  |
641 //  |  |===========================|  |       (header)        |  |===========================|  |
642 //  |  | i10 = phi(i0, i1, i2, i3) |  |                       |  | i10 = phi(i20, i2, i3)    |  |
643 //  |  |___________________________|  |                       |  |___________________________|  |
644 //  |        /               \        |                       |        /               \        |
645 //  |      ...              ...       |                       |      ...              ...       |
646 //  |   _________         _________   |                       |   _________         _________   |
647 //  |  | B2      |       | B3      |  |                       |  | B2      |       | B3      |  |
648 //  |  |=========|       |=========|  |     (back edges)      |  |=========|       |=========|  |
649 //  |  | i2 = .. |       | i3 = .. |  |                       |  | i2 = .. |       | i3 = .. |  |
650 //  |  |_________|       |_________|  |                       |  |_________|       |_________|  |
651 //   \     /                   \     /                         \     /                   \     /
652 //    \___/                     \___/                           \___/                     \___/
653 //
TransformLoopToSinglePreheaderFormat(HBasicBlock * header)654 void HGraph::TransformLoopToSinglePreheaderFormat(HBasicBlock* header) {
655   HLoopInformation* loop_info = header->GetLoopInformation();
656 
657   HBasicBlock* preheader = new (allocator_) HBasicBlock(this, header->GetDexPc());
658   AddBlock(preheader);
659   preheader->AddInstruction(new (allocator_) HGoto(header->GetDexPc()));
660 
661   // If the old header has no Phis then we only need to fix the control flow.
662   if (header->GetPhis().IsEmpty()) {
663     FixControlForNewSinglePreheader(header, preheader);
664     preheader->AddSuccessor(header);
665     return;
666   }
667 
668   // Find the first non-back edge block in the header's predecessors list.
669   size_t first_nonbackedge_pred_pos = 0;
670   bool found = false;
671   for (size_t pred = 0; pred < header->GetPredecessors().size(); ++pred) {
672     HBasicBlock* predecessor = header->GetPredecessors()[pred];
673     if (!loop_info->IsBackEdge(*predecessor)) {
674       first_nonbackedge_pred_pos = pred;
675       found = true;
676       break;
677     }
678   }
679 
680   DCHECK(found);
681 
682   // Fix the data-flow.
683   for (HInstructionIterator it(header->GetPhis()); !it.Done(); it.Advance()) {
684     HPhi* header_phi = it.Current()->AsPhi();
685 
686     HPhi* preheader_phi = new (GetAllocator()) HPhi(GetAllocator(),
687                                                     header_phi->GetRegNumber(),
688                                                     0,
689                                                     header_phi->GetType());
690     if (header_phi->GetType() == DataType::Type::kReference) {
691       preheader_phi->SetReferenceTypeInfoIfValid(header_phi->GetReferenceTypeInfo());
692     }
693     preheader->AddPhi(preheader_phi);
694 
695     HInstruction* orig_input = header_phi->InputAt(first_nonbackedge_pred_pos);
696     header_phi->ReplaceInput(preheader_phi, first_nonbackedge_pred_pos);
697     preheader_phi->AddInput(orig_input);
698 
699     for (size_t input_pos = first_nonbackedge_pred_pos + 1;
700          input_pos < header_phi->InputCount();
701          input_pos++) {
702       HInstruction* input = header_phi->InputAt(input_pos);
703       HBasicBlock* pred_block = header->GetPredecessors()[input_pos];
704 
705       if (loop_info->Contains(*pred_block)) {
706         DCHECK(loop_info->IsBackEdge(*pred_block));
707       } else {
708         preheader_phi->AddInput(input);
709         header_phi->RemoveInputAt(input_pos);
710         input_pos--;
711       }
712     }
713   }
714 
715   // Fix the control-flow.
716   HBasicBlock* first_pred = header->GetPredecessors()[first_nonbackedge_pred_pos];
717   preheader->InsertBetween(first_pred, header);
718 
719   FixControlForNewSinglePreheader(header, preheader);
720 }
721 
SimplifyLoop(HBasicBlock * header)722 void HGraph::SimplifyLoop(HBasicBlock* header) {
723   HLoopInformation* info = header->GetLoopInformation();
724 
725   // Make sure the loop has only one pre header. This simplifies SSA building by having
726   // to just look at the pre header to know which locals are initialized at entry of the
727   // loop. Also, don't allow the entry block to be a pre header: this simplifies inlining
728   // this graph.
729   size_t number_of_incomings = header->GetPredecessors().size() - info->NumberOfBackEdges();
730   if (number_of_incomings != 1 || (GetEntryBlock()->GetSingleSuccessor() == header)) {
731     TransformLoopToSinglePreheaderFormat(header);
732   }
733 
734   OrderLoopHeaderPredecessors(header);
735 
736   HInstruction* first_instruction = header->GetFirstInstruction();
737   if (first_instruction != nullptr && first_instruction->IsSuspendCheck()) {
738     // Called from DeadBlockElimination. Update SuspendCheck pointer.
739     info->SetSuspendCheck(first_instruction->AsSuspendCheck());
740   }
741 }
742 
ComputeTryBlockInformation()743 void HGraph::ComputeTryBlockInformation() {
744   // Iterate in reverse post order to propagate try membership information from
745   // predecessors to their successors.
746   bool graph_has_try_catch = false;
747 
748   for (HBasicBlock* block : GetReversePostOrder()) {
749     if (block->IsEntryBlock() || block->IsCatchBlock()) {
750       // Catch blocks after simplification have only exceptional predecessors
751       // and hence are never in tries.
752       continue;
753     }
754 
755     // Infer try membership from the first predecessor. Having simplified loops,
756     // the first predecessor can never be a back edge and therefore it must have
757     // been visited already and had its try membership set.
758     HBasicBlock* first_predecessor = block->GetPredecessors()[0];
759     DCHECK_IMPLIES(block->IsLoopHeader(),
760                    !block->GetLoopInformation()->IsBackEdge(*first_predecessor));
761     const HTryBoundary* try_entry = first_predecessor->ComputeTryEntryOfSuccessors();
762     graph_has_try_catch |= try_entry != nullptr;
763     if (try_entry != nullptr &&
764         (block->GetTryCatchInformation() == nullptr ||
765          try_entry != &block->GetTryCatchInformation()->GetTryEntry())) {
766       // We are either setting try block membership for the first time or it
767       // has changed.
768       block->SetTryCatchInformation(new (allocator_) TryCatchInformation(*try_entry));
769     }
770   }
771 
772   SetHasTryCatch(graph_has_try_catch);
773 }
774 
SimplifyCFG()775 void HGraph::SimplifyCFG() {
776 // Simplify the CFG for future analysis, and code generation:
777   // (1): Split critical edges.
778   // (2): Simplify loops by having only one preheader.
779   // NOTE: We're appending new blocks inside the loop, so we need to use index because iterators
780   // can be invalidated. We remember the initial size to avoid iterating over the new blocks.
781   for (size_t block_id = 0u, end = blocks_.size(); block_id != end; ++block_id) {
782     HBasicBlock* block = blocks_[block_id];
783     if (block == nullptr) continue;
784     if (block->GetSuccessors().size() > 1) {
785       // Only split normal-flow edges. We cannot split exceptional edges as they
786       // are synthesized (approximate real control flow), and we do not need to
787       // anyway. Moves that would be inserted there are performed by the runtime.
788       ArrayRef<HBasicBlock* const> normal_successors = block->GetNormalSuccessors();
789       for (size_t j = 0, e = normal_successors.size(); j < e; ++j) {
790         HBasicBlock* successor = normal_successors[j];
791         DCHECK(!successor->IsCatchBlock());
792         if (successor == exit_block_) {
793           // (Throw/Return/ReturnVoid)->TryBoundary->Exit. Special case which we
794           // do not want to split because Goto->Exit is not allowed.
795           DCHECK(block->IsSingleTryBoundary());
796         } else if (successor->GetPredecessors().size() > 1) {
797           SplitCriticalEdge(block, successor);
798           // SplitCriticalEdge could have invalidated the `normal_successors`
799           // ArrayRef. We must re-acquire it.
800           normal_successors = block->GetNormalSuccessors();
801           DCHECK_EQ(normal_successors[j]->GetSingleSuccessor(), successor);
802           DCHECK_EQ(e, normal_successors.size());
803         }
804       }
805     }
806     if (block->IsLoopHeader()) {
807       SimplifyLoop(block);
808     } else if (!block->IsEntryBlock() &&
809                block->GetFirstInstruction() != nullptr &&
810                block->GetFirstInstruction()->IsSuspendCheck()) {
811       // We are being called by the dead code elimiation pass, and what used to be
812       // a loop got dismantled. Just remove the suspend check.
813       block->RemoveInstruction(block->GetFirstInstruction());
814     }
815   }
816 }
817 
AnalyzeLoops() const818 GraphAnalysisResult HGraph::AnalyzeLoops() const {
819   // We iterate post order to ensure we visit inner loops before outer loops.
820   // `PopulateRecursive` needs this guarantee to know whether a natural loop
821   // contains an irreducible loop.
822   for (HBasicBlock* block : GetPostOrder()) {
823     if (block->IsLoopHeader()) {
824       if (block->IsCatchBlock()) {
825         // TODO: Dealing with exceptional back edges could be tricky because
826         //       they only approximate the real control flow. Bail out for now.
827         VLOG(compiler) << "Not compiled: Exceptional back edges";
828         return kAnalysisFailThrowCatchLoop;
829       }
830       block->GetLoopInformation()->Populate();
831     }
832   }
833   return kAnalysisSuccess;
834 }
835 
Dump(std::ostream & os)836 void HLoopInformation::Dump(std::ostream& os) {
837   os << "header: " << header_->GetBlockId() << std::endl;
838   os << "pre header: " << GetPreHeader()->GetBlockId() << std::endl;
839   for (HBasicBlock* block : back_edges_) {
840     os << "back edge: " << block->GetBlockId() << std::endl;
841   }
842   for (HBasicBlock* block : header_->GetPredecessors()) {
843     os << "predecessor: " << block->GetBlockId() << std::endl;
844   }
845   for (uint32_t idx : blocks_.Indexes()) {
846     os << "  in loop: " << idx << std::endl;
847   }
848 }
849 
InsertConstant(HConstant * constant)850 void HGraph::InsertConstant(HConstant* constant) {
851   // New constants are inserted before the SuspendCheck at the bottom of the
852   // entry block. Note that this method can be called from the graph builder and
853   // the entry block therefore may not end with SuspendCheck->Goto yet.
854   HInstruction* insert_before = nullptr;
855 
856   HInstruction* gota = entry_block_->GetLastInstruction();
857   if (gota != nullptr && gota->IsGoto()) {
858     HInstruction* suspend_check = gota->GetPrevious();
859     if (suspend_check != nullptr && suspend_check->IsSuspendCheck()) {
860       insert_before = suspend_check;
861     } else {
862       insert_before = gota;
863     }
864   }
865 
866   if (insert_before == nullptr) {
867     entry_block_->AddInstruction(constant);
868   } else {
869     entry_block_->InsertInstructionBefore(constant, insert_before);
870   }
871 }
872 
GetNullConstant(uint32_t dex_pc)873 HNullConstant* HGraph::GetNullConstant(uint32_t dex_pc) {
874   // For simplicity, don't bother reviving the cached null constant if it is
875   // not null and not in a block. Otherwise, we need to clear the instruction
876   // id and/or any invariants the graph is assuming when adding new instructions.
877   if ((cached_null_constant_ == nullptr) || (cached_null_constant_->GetBlock() == nullptr)) {
878     cached_null_constant_ = new (allocator_) HNullConstant(dex_pc);
879     cached_null_constant_->SetReferenceTypeInfo(GetInexactObjectRti());
880     InsertConstant(cached_null_constant_);
881   }
882   if (kIsDebugBuild) {
883     ScopedObjectAccess soa(Thread::Current());
884     DCHECK(cached_null_constant_->GetReferenceTypeInfo().IsValid());
885   }
886   return cached_null_constant_;
887 }
888 
GetCurrentMethod()889 HCurrentMethod* HGraph::GetCurrentMethod() {
890   // For simplicity, don't bother reviving the cached current method if it is
891   // not null and not in a block. Otherwise, we need to clear the instruction
892   // id and/or any invariants the graph is assuming when adding new instructions.
893   if ((cached_current_method_ == nullptr) || (cached_current_method_->GetBlock() == nullptr)) {
894     cached_current_method_ = new (allocator_) HCurrentMethod(
895         Is64BitInstructionSet(instruction_set_) ? DataType::Type::kInt64 : DataType::Type::kInt32,
896         entry_block_->GetDexPc());
897     if (entry_block_->GetFirstInstruction() == nullptr) {
898       entry_block_->AddInstruction(cached_current_method_);
899     } else {
900       entry_block_->InsertInstructionBefore(
901           cached_current_method_, entry_block_->GetFirstInstruction());
902     }
903   }
904   return cached_current_method_;
905 }
906 
GetMethodName() const907 const char* HGraph::GetMethodName() const {
908   const dex::MethodId& method_id = dex_file_.GetMethodId(method_idx_);
909   return dex_file_.GetMethodName(method_id);
910 }
911 
PrettyMethod(bool with_signature) const912 std::string HGraph::PrettyMethod(bool with_signature) const {
913   return dex_file_.PrettyMethod(method_idx_, with_signature);
914 }
915 
GetConstant(DataType::Type type,int64_t value,uint32_t dex_pc)916 HConstant* HGraph::GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc) {
917   switch (type) {
918     case DataType::Type::kBool:
919       DCHECK(IsUint<1>(value));
920       FALLTHROUGH_INTENDED;
921     case DataType::Type::kUint8:
922     case DataType::Type::kInt8:
923     case DataType::Type::kUint16:
924     case DataType::Type::kInt16:
925     case DataType::Type::kInt32:
926       DCHECK(IsInt(DataType::Size(type) * kBitsPerByte, value));
927       return GetIntConstant(static_cast<int32_t>(value), dex_pc);
928 
929     case DataType::Type::kInt64:
930       return GetLongConstant(value, dex_pc);
931 
932     default:
933       LOG(FATAL) << "Unsupported constant type";
934       UNREACHABLE();
935   }
936 }
937 
CacheFloatConstant(HFloatConstant * constant)938 void HGraph::CacheFloatConstant(HFloatConstant* constant) {
939   int32_t value = bit_cast<int32_t, float>(constant->GetValue());
940   DCHECK(cached_float_constants_.find(value) == cached_float_constants_.end());
941   cached_float_constants_.Overwrite(value, constant);
942 }
943 
CacheDoubleConstant(HDoubleConstant * constant)944 void HGraph::CacheDoubleConstant(HDoubleConstant* constant) {
945   int64_t value = bit_cast<int64_t, double>(constant->GetValue());
946   DCHECK(cached_double_constants_.find(value) == cached_double_constants_.end());
947   cached_double_constants_.Overwrite(value, constant);
948 }
949 
Add(HBasicBlock * block)950 void HLoopInformation::Add(HBasicBlock* block) {
951   blocks_.SetBit(block->GetBlockId());
952 }
953 
Remove(HBasicBlock * block)954 void HLoopInformation::Remove(HBasicBlock* block) {
955   blocks_.ClearBit(block->GetBlockId());
956 }
957 
PopulateRecursive(HBasicBlock * block)958 void HLoopInformation::PopulateRecursive(HBasicBlock* block) {
959   if (blocks_.IsBitSet(block->GetBlockId())) {
960     return;
961   }
962 
963   blocks_.SetBit(block->GetBlockId());
964   block->SetInLoop(this);
965   if (block->IsLoopHeader()) {
966     // We're visiting loops in post-order, so inner loops must have been
967     // populated already.
968     DCHECK(block->GetLoopInformation()->IsPopulated());
969     if (block->GetLoopInformation()->IsIrreducible()) {
970       contains_irreducible_loop_ = true;
971     }
972   }
973   for (HBasicBlock* predecessor : block->GetPredecessors()) {
974     PopulateRecursive(predecessor);
975   }
976 }
977 
PopulateIrreducibleRecursive(HBasicBlock * block,ArenaBitVector * finalized)978 void HLoopInformation::PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized) {
979   size_t block_id = block->GetBlockId();
980 
981   // If `block` is in `finalized`, we know its membership in the loop has been
982   // decided and it does not need to be revisited.
983   if (finalized->IsBitSet(block_id)) {
984     return;
985   }
986 
987   bool is_finalized = false;
988   if (block->IsLoopHeader()) {
989     // If we hit a loop header in an irreducible loop, we first check if the
990     // pre header of that loop belongs to the currently analyzed loop. If it does,
991     // then we visit the back edges.
992     // Note that we cannot use GetPreHeader, as the loop may have not been populated
993     // yet.
994     HBasicBlock* pre_header = block->GetPredecessors()[0];
995     PopulateIrreducibleRecursive(pre_header, finalized);
996     if (blocks_.IsBitSet(pre_header->GetBlockId())) {
997       block->SetInLoop(this);
998       blocks_.SetBit(block_id);
999       finalized->SetBit(block_id);
1000       is_finalized = true;
1001 
1002       HLoopInformation* info = block->GetLoopInformation();
1003       for (HBasicBlock* back_edge : info->GetBackEdges()) {
1004         PopulateIrreducibleRecursive(back_edge, finalized);
1005       }
1006     }
1007   } else {
1008     // Visit all predecessors. If one predecessor is part of the loop, this
1009     // block is also part of this loop.
1010     for (HBasicBlock* predecessor : block->GetPredecessors()) {
1011       PopulateIrreducibleRecursive(predecessor, finalized);
1012       if (!is_finalized && blocks_.IsBitSet(predecessor->GetBlockId())) {
1013         block->SetInLoop(this);
1014         blocks_.SetBit(block_id);
1015         finalized->SetBit(block_id);
1016         is_finalized = true;
1017       }
1018     }
1019   }
1020 
1021   // All predecessors have been recursively visited. Mark finalized if not marked yet.
1022   if (!is_finalized) {
1023     finalized->SetBit(block_id);
1024   }
1025 }
1026 
Populate()1027 void HLoopInformation::Populate() {
1028   DCHECK_EQ(blocks_.NumSetBits(), 0u) << "Loop information has already been populated";
1029   // Populate this loop: starting with the back edge, recursively add predecessors
1030   // that are not already part of that loop. Set the header as part of the loop
1031   // to end the recursion.
1032   // This is a recursive implementation of the algorithm described in
1033   // "Advanced Compiler Design & Implementation" (Muchnick) p192.
1034   HGraph* graph = header_->GetGraph();
1035   blocks_.SetBit(header_->GetBlockId());
1036   header_->SetInLoop(this);
1037 
1038   bool is_irreducible_loop = HasBackEdgeNotDominatedByHeader();
1039 
1040   if (is_irreducible_loop) {
1041     // Allocate memory from local ScopedArenaAllocator.
1042     ScopedArenaAllocator allocator(graph->GetArenaStack());
1043     ArenaBitVector visited(&allocator,
1044                            graph->GetBlocks().size(),
1045                            /* expandable= */ false,
1046                            kArenaAllocGraphBuilder);
1047     visited.ClearAllBits();
1048     // Stop marking blocks at the loop header.
1049     visited.SetBit(header_->GetBlockId());
1050 
1051     for (HBasicBlock* back_edge : GetBackEdges()) {
1052       PopulateIrreducibleRecursive(back_edge, &visited);
1053     }
1054   } else {
1055     for (HBasicBlock* back_edge : GetBackEdges()) {
1056       PopulateRecursive(back_edge);
1057     }
1058   }
1059 
1060   if (!is_irreducible_loop && graph->IsCompilingOsr()) {
1061     // When compiling in OSR mode, all loops in the compiled method may be entered
1062     // from the interpreter. We treat this OSR entry point just like an extra entry
1063     // to an irreducible loop, so we need to mark the method's loops as irreducible.
1064     // This does not apply to inlined loops which do not act as OSR entry points.
1065     if (suspend_check_ == nullptr) {
1066       // Just building the graph in OSR mode, this loop is not inlined. We never build an
1067       // inner graph in OSR mode as we can do OSR transition only from the outer method.
1068       is_irreducible_loop = true;
1069     } else {
1070       // Look at the suspend check's environment to determine if the loop was inlined.
1071       DCHECK(suspend_check_->HasEnvironment());
1072       if (!suspend_check_->GetEnvironment()->IsFromInlinedInvoke()) {
1073         is_irreducible_loop = true;
1074       }
1075     }
1076   }
1077   if (is_irreducible_loop) {
1078     irreducible_ = true;
1079     contains_irreducible_loop_ = true;
1080     graph->SetHasIrreducibleLoops(true);
1081   }
1082   graph->SetHasLoops(true);
1083 }
1084 
PopulateInnerLoopUpwards(HLoopInformation * inner_loop)1085 void HLoopInformation::PopulateInnerLoopUpwards(HLoopInformation* inner_loop) {
1086   DCHECK(inner_loop->GetPreHeader()->GetLoopInformation() == this);
1087   blocks_.Union(&inner_loop->blocks_);
1088   HLoopInformation* outer_loop = GetPreHeader()->GetLoopInformation();
1089   if (outer_loop != nullptr) {
1090     outer_loop->PopulateInnerLoopUpwards(this);
1091   }
1092 }
1093 
GetPreHeader() const1094 HBasicBlock* HLoopInformation::GetPreHeader() const {
1095   HBasicBlock* block = header_->GetPredecessors()[0];
1096   DCHECK(irreducible_ || (block == header_->GetDominator()));
1097   return block;
1098 }
1099 
Contains(const HBasicBlock & block) const1100 bool HLoopInformation::Contains(const HBasicBlock& block) const {
1101   return blocks_.IsBitSet(block.GetBlockId());
1102 }
1103 
IsIn(const HLoopInformation & other) const1104 bool HLoopInformation::IsIn(const HLoopInformation& other) const {
1105   return other.blocks_.IsBitSet(header_->GetBlockId());
1106 }
1107 
IsDefinedOutOfTheLoop(HInstruction * instruction) const1108 bool HLoopInformation::IsDefinedOutOfTheLoop(HInstruction* instruction) const {
1109   return !blocks_.IsBitSet(instruction->GetBlock()->GetBlockId());
1110 }
1111 
GetLifetimeEnd() const1112 size_t HLoopInformation::GetLifetimeEnd() const {
1113   size_t last_position = 0;
1114   for (HBasicBlock* back_edge : GetBackEdges()) {
1115     last_position = std::max(back_edge->GetLifetimeEnd(), last_position);
1116   }
1117   return last_position;
1118 }
1119 
HasBackEdgeNotDominatedByHeader() const1120 bool HLoopInformation::HasBackEdgeNotDominatedByHeader() const {
1121   for (HBasicBlock* back_edge : GetBackEdges()) {
1122     DCHECK(back_edge->GetDominator() != nullptr);
1123     if (!header_->Dominates(back_edge)) {
1124       return true;
1125     }
1126   }
1127   return false;
1128 }
1129 
DominatesAllBackEdges(HBasicBlock * block)1130 bool HLoopInformation::DominatesAllBackEdges(HBasicBlock* block) {
1131   for (HBasicBlock* back_edge : GetBackEdges()) {
1132     if (!block->Dominates(back_edge)) {
1133       return false;
1134     }
1135   }
1136   return true;
1137 }
1138 
1139 
HasExitEdge() const1140 bool HLoopInformation::HasExitEdge() const {
1141   // Determine if this loop has at least one exit edge.
1142   HBlocksInLoopReversePostOrderIterator it_loop(*this);
1143   for (; !it_loop.Done(); it_loop.Advance()) {
1144     for (HBasicBlock* successor : it_loop.Current()->GetSuccessors()) {
1145       if (!Contains(*successor)) {
1146         return true;
1147       }
1148     }
1149   }
1150   return false;
1151 }
1152 
Dominates(const HBasicBlock * other) const1153 bool HBasicBlock::Dominates(const HBasicBlock* other) const {
1154   // Walk up the dominator tree from `other`, to find out if `this`
1155   // is an ancestor.
1156   const HBasicBlock* current = other;
1157   while (current != nullptr) {
1158     if (current == this) {
1159       return true;
1160     }
1161     current = current->GetDominator();
1162   }
1163   return false;
1164 }
1165 
UpdateInputsUsers(HInstruction * instruction)1166 static void UpdateInputsUsers(HInstruction* instruction) {
1167   HInputsRef inputs = instruction->GetInputs();
1168   for (size_t i = 0; i < inputs.size(); ++i) {
1169     inputs[i]->AddUseAt(instruction, i);
1170   }
1171   // Environment should be created later.
1172   DCHECK(!instruction->HasEnvironment());
1173 }
1174 
ReplaceAndRemovePhiWith(HPhi * initial,HPhi * replacement)1175 void HBasicBlock::ReplaceAndRemovePhiWith(HPhi* initial, HPhi* replacement) {
1176   DCHECK(initial->GetBlock() == this);
1177   InsertPhiAfter(replacement, initial);
1178   initial->ReplaceWith(replacement);
1179   RemovePhi(initial);
1180 }
1181 
ReplaceAndRemoveInstructionWith(HInstruction * initial,HInstruction * replacement)1182 void HBasicBlock::ReplaceAndRemoveInstructionWith(HInstruction* initial,
1183                                                   HInstruction* replacement) {
1184   DCHECK(initial->GetBlock() == this);
1185   if (initial->IsControlFlow()) {
1186     // We can only replace a control flow instruction with another control flow instruction.
1187     DCHECK(replacement->IsControlFlow());
1188     DCHECK_EQ(replacement->GetId(), -1);
1189     DCHECK_EQ(replacement->GetType(), DataType::Type::kVoid);
1190     DCHECK_EQ(initial->GetBlock(), this);
1191     DCHECK_EQ(initial->GetType(), DataType::Type::kVoid);
1192     DCHECK(initial->GetUses().empty());
1193     DCHECK(initial->GetEnvUses().empty());
1194     replacement->SetBlock(this);
1195     replacement->SetId(GetGraph()->GetNextInstructionId());
1196     instructions_.InsertInstructionBefore(replacement, initial);
1197     UpdateInputsUsers(replacement);
1198   } else {
1199     InsertInstructionBefore(replacement, initial);
1200     initial->ReplaceWith(replacement);
1201   }
1202   RemoveInstruction(initial);
1203 }
1204 
Add(HInstructionList * instruction_list,HBasicBlock * block,HInstruction * instruction)1205 static void Add(HInstructionList* instruction_list,
1206                 HBasicBlock* block,
1207                 HInstruction* instruction) {
1208   DCHECK(instruction->GetBlock() == nullptr);
1209   DCHECK_EQ(instruction->GetId(), -1);
1210   instruction->SetBlock(block);
1211   instruction->SetId(block->GetGraph()->GetNextInstructionId());
1212   UpdateInputsUsers(instruction);
1213   instruction_list->AddInstruction(instruction);
1214 }
1215 
AddInstruction(HInstruction * instruction)1216 void HBasicBlock::AddInstruction(HInstruction* instruction) {
1217   Add(&instructions_, this, instruction);
1218 }
1219 
AddPhi(HPhi * phi)1220 void HBasicBlock::AddPhi(HPhi* phi) {
1221   Add(&phis_, this, phi);
1222 }
1223 
InsertInstructionBefore(HInstruction * instruction,HInstruction * cursor)1224 void HBasicBlock::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) {
1225   DCHECK(!cursor->IsPhi());
1226   DCHECK(!instruction->IsPhi());
1227   DCHECK_EQ(instruction->GetId(), -1);
1228   DCHECK_NE(cursor->GetId(), -1);
1229   DCHECK_EQ(cursor->GetBlock(), this);
1230   DCHECK(!instruction->IsControlFlow());
1231   instruction->SetBlock(this);
1232   instruction->SetId(GetGraph()->GetNextInstructionId());
1233   UpdateInputsUsers(instruction);
1234   instructions_.InsertInstructionBefore(instruction, cursor);
1235 }
1236 
InsertInstructionAfter(HInstruction * instruction,HInstruction * cursor)1237 void HBasicBlock::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) {
1238   DCHECK(!cursor->IsPhi());
1239   DCHECK(!instruction->IsPhi());
1240   DCHECK_EQ(instruction->GetId(), -1);
1241   DCHECK_NE(cursor->GetId(), -1);
1242   DCHECK_EQ(cursor->GetBlock(), this);
1243   DCHECK(!instruction->IsControlFlow());
1244   DCHECK(!cursor->IsControlFlow());
1245   instruction->SetBlock(this);
1246   instruction->SetId(GetGraph()->GetNextInstructionId());
1247   UpdateInputsUsers(instruction);
1248   instructions_.InsertInstructionAfter(instruction, cursor);
1249 }
1250 
InsertPhiAfter(HPhi * phi,HPhi * cursor)1251 void HBasicBlock::InsertPhiAfter(HPhi* phi, HPhi* cursor) {
1252   DCHECK_EQ(phi->GetId(), -1);
1253   DCHECK_NE(cursor->GetId(), -1);
1254   DCHECK_EQ(cursor->GetBlock(), this);
1255   phi->SetBlock(this);
1256   phi->SetId(GetGraph()->GetNextInstructionId());
1257   UpdateInputsUsers(phi);
1258   phis_.InsertInstructionAfter(phi, cursor);
1259 }
1260 
Remove(HInstructionList * instruction_list,HBasicBlock * block,HInstruction * instruction,bool ensure_safety)1261 static void Remove(HInstructionList* instruction_list,
1262                    HBasicBlock* block,
1263                    HInstruction* instruction,
1264                    bool ensure_safety) {
1265   DCHECK_EQ(block, instruction->GetBlock());
1266   instruction->SetBlock(nullptr);
1267   instruction_list->RemoveInstruction(instruction);
1268   if (ensure_safety) {
1269     DCHECK(instruction->GetUses().empty());
1270     DCHECK(instruction->GetEnvUses().empty());
1271     RemoveAsUser(instruction);
1272   }
1273 }
1274 
RemoveInstruction(HInstruction * instruction,bool ensure_safety)1275 void HBasicBlock::RemoveInstruction(HInstruction* instruction, bool ensure_safety) {
1276   DCHECK(!instruction->IsPhi());
1277   Remove(&instructions_, this, instruction, ensure_safety);
1278 }
1279 
RemovePhi(HPhi * phi,bool ensure_safety)1280 void HBasicBlock::RemovePhi(HPhi* phi, bool ensure_safety) {
1281   Remove(&phis_, this, phi, ensure_safety);
1282 }
1283 
RemoveInstructionOrPhi(HInstruction * instruction,bool ensure_safety)1284 void HBasicBlock::RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety) {
1285   if (instruction->IsPhi()) {
1286     RemovePhi(instruction->AsPhi(), ensure_safety);
1287   } else {
1288     RemoveInstruction(instruction, ensure_safety);
1289   }
1290 }
1291 
CopyFrom(ArrayRef<HInstruction * const> locals)1292 void HEnvironment::CopyFrom(ArrayRef<HInstruction* const> locals) {
1293   for (size_t i = 0; i < locals.size(); i++) {
1294     HInstruction* instruction = locals[i];
1295     SetRawEnvAt(i, instruction);
1296     if (instruction != nullptr) {
1297       instruction->AddEnvUseAt(this, i);
1298     }
1299   }
1300 }
1301 
CopyFrom(HEnvironment * env)1302 void HEnvironment::CopyFrom(HEnvironment* env) {
1303   for (size_t i = 0; i < env->Size(); i++) {
1304     HInstruction* instruction = env->GetInstructionAt(i);
1305     SetRawEnvAt(i, instruction);
1306     if (instruction != nullptr) {
1307       instruction->AddEnvUseAt(this, i);
1308     }
1309   }
1310 }
1311 
CopyFromWithLoopPhiAdjustment(HEnvironment * env,HBasicBlock * loop_header)1312 void HEnvironment::CopyFromWithLoopPhiAdjustment(HEnvironment* env,
1313                                                  HBasicBlock* loop_header) {
1314   DCHECK(loop_header->IsLoopHeader());
1315   for (size_t i = 0; i < env->Size(); i++) {
1316     HInstruction* instruction = env->GetInstructionAt(i);
1317     SetRawEnvAt(i, instruction);
1318     if (instruction == nullptr) {
1319       continue;
1320     }
1321     if (instruction->IsLoopHeaderPhi() && (instruction->GetBlock() == loop_header)) {
1322       // At the end of the loop pre-header, the corresponding value for instruction
1323       // is the first input of the phi.
1324       HInstruction* initial = instruction->AsPhi()->InputAt(0);
1325       SetRawEnvAt(i, initial);
1326       initial->AddEnvUseAt(this, i);
1327     } else {
1328       instruction->AddEnvUseAt(this, i);
1329     }
1330   }
1331 }
1332 
RemoveAsUserOfInput(size_t index) const1333 void HEnvironment::RemoveAsUserOfInput(size_t index) const {
1334   const HUserRecord<HEnvironment*>& env_use = vregs_[index];
1335   HInstruction* user = env_use.GetInstruction();
1336   auto before_env_use_node = env_use.GetBeforeUseNode();
1337   user->env_uses_.erase_after(before_env_use_node);
1338   user->FixUpUserRecordsAfterEnvUseRemoval(before_env_use_node);
1339 }
1340 
ReplaceInput(HInstruction * replacement,size_t index)1341 void HEnvironment::ReplaceInput(HInstruction* replacement, size_t index) {
1342   const HUserRecord<HEnvironment*>& env_use_record = vregs_[index];
1343   HInstruction* orig_instr = env_use_record.GetInstruction();
1344 
1345   DCHECK(orig_instr != replacement);
1346 
1347   HUseList<HEnvironment*>::iterator before_use_node = env_use_record.GetBeforeUseNode();
1348   // Note: fixup_end remains valid across splice_after().
1349   auto fixup_end = replacement->env_uses_.empty() ? replacement->env_uses_.begin()
1350                                                   : ++replacement->env_uses_.begin();
1351   replacement->env_uses_.splice_after(replacement->env_uses_.before_begin(),
1352                                       env_use_record.GetInstruction()->env_uses_,
1353                                       before_use_node);
1354   replacement->FixUpUserRecordsAfterEnvUseInsertion(fixup_end);
1355   orig_instr->FixUpUserRecordsAfterEnvUseRemoval(before_use_node);
1356 }
1357 
Dump(std::ostream & os,bool dump_args)1358 std::ostream& HInstruction::Dump(std::ostream& os, bool dump_args) {
1359   // Note: Handle the case where the instruction has been removed from
1360   // the graph to support debugging output for failed gtests.
1361   HGraph* graph = (GetBlock() != nullptr) ? GetBlock()->GetGraph() : nullptr;
1362   HGraphVisualizer::DumpInstruction(&os, graph, this);
1363   if (dump_args) {
1364     // Allocate memory from local ScopedArenaAllocator.
1365     std::optional<MallocArenaPool> local_arena_pool;
1366     std::optional<ArenaStack> local_arena_stack;
1367     if (UNLIKELY(graph == nullptr)) {
1368       local_arena_pool.emplace();
1369       local_arena_stack.emplace(&local_arena_pool.value());
1370     }
1371     ScopedArenaAllocator allocator(
1372         graph != nullptr ? graph->GetArenaStack() : &local_arena_stack.value());
1373     // Instructions that we already visited. We print each instruction only once.
1374     ArenaBitVector visited(&allocator,
1375                            (graph != nullptr) ? graph->GetCurrentInstructionId() : 0u,
1376                            /* expandable= */ (graph == nullptr),
1377                            kArenaAllocMisc);
1378     visited.ClearAllBits();
1379     visited.SetBit(GetId());
1380     // Keep a queue of instructions with their indentations.
1381     ScopedArenaDeque<std::pair<HInstruction*, size_t>> queue(allocator.Adapter(kArenaAllocMisc));
1382     auto add_args = [&queue](HInstruction* instruction, size_t indentation) {
1383       for (HInstruction* arg : ReverseRange(instruction->GetInputs())) {
1384         queue.emplace_front(arg, indentation);
1385       }
1386     };
1387     add_args(this, /*indentation=*/ 1u);
1388     while (!queue.empty()) {
1389       HInstruction* instruction;
1390       size_t indentation;
1391       std::tie(instruction, indentation) = queue.front();
1392       queue.pop_front();
1393       if (!visited.IsBitSet(instruction->GetId())) {
1394         visited.SetBit(instruction->GetId());
1395         os << '\n';
1396         for (size_t i = 0; i != indentation; ++i) {
1397           os << "  ";
1398         }
1399         HGraphVisualizer::DumpInstruction(&os, graph, instruction);
1400         add_args(instruction, indentation + 1u);
1401       }
1402     }
1403   }
1404   return os;
1405 }
1406 
GetNextDisregardingMoves() const1407 HInstruction* HInstruction::GetNextDisregardingMoves() const {
1408   HInstruction* next = GetNext();
1409   while (next != nullptr && next->IsParallelMove()) {
1410     next = next->GetNext();
1411   }
1412   return next;
1413 }
1414 
GetPreviousDisregardingMoves() const1415 HInstruction* HInstruction::GetPreviousDisregardingMoves() const {
1416   HInstruction* previous = GetPrevious();
1417   while (previous != nullptr && previous->IsParallelMove()) {
1418     previous = previous->GetPrevious();
1419   }
1420   return previous;
1421 }
1422 
AddInstruction(HInstruction * instruction)1423 void HInstructionList::AddInstruction(HInstruction* instruction) {
1424   if (first_instruction_ == nullptr) {
1425     DCHECK(last_instruction_ == nullptr);
1426     first_instruction_ = last_instruction_ = instruction;
1427   } else {
1428     DCHECK(last_instruction_ != nullptr);
1429     last_instruction_->next_ = instruction;
1430     instruction->previous_ = last_instruction_;
1431     last_instruction_ = instruction;
1432   }
1433 }
1434 
InsertInstructionBefore(HInstruction * instruction,HInstruction * cursor)1435 void HInstructionList::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) {
1436   DCHECK(Contains(cursor));
1437   if (cursor == first_instruction_) {
1438     cursor->previous_ = instruction;
1439     instruction->next_ = cursor;
1440     first_instruction_ = instruction;
1441   } else {
1442     instruction->previous_ = cursor->previous_;
1443     instruction->next_ = cursor;
1444     cursor->previous_ = instruction;
1445     instruction->previous_->next_ = instruction;
1446   }
1447 }
1448 
InsertInstructionAfter(HInstruction * instruction,HInstruction * cursor)1449 void HInstructionList::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) {
1450   DCHECK(Contains(cursor));
1451   if (cursor == last_instruction_) {
1452     cursor->next_ = instruction;
1453     instruction->previous_ = cursor;
1454     last_instruction_ = instruction;
1455   } else {
1456     instruction->next_ = cursor->next_;
1457     instruction->previous_ = cursor;
1458     cursor->next_ = instruction;
1459     instruction->next_->previous_ = instruction;
1460   }
1461 }
1462 
RemoveInstruction(HInstruction * instruction)1463 void HInstructionList::RemoveInstruction(HInstruction* instruction) {
1464   if (instruction->previous_ != nullptr) {
1465     instruction->previous_->next_ = instruction->next_;
1466   }
1467   if (instruction->next_ != nullptr) {
1468     instruction->next_->previous_ = instruction->previous_;
1469   }
1470   if (instruction == first_instruction_) {
1471     first_instruction_ = instruction->next_;
1472   }
1473   if (instruction == last_instruction_) {
1474     last_instruction_ = instruction->previous_;
1475   }
1476 }
1477 
Contains(HInstruction * instruction) const1478 bool HInstructionList::Contains(HInstruction* instruction) const {
1479   for (HInstructionIterator it(*this); !it.Done(); it.Advance()) {
1480     if (it.Current() == instruction) {
1481       return true;
1482     }
1483   }
1484   return false;
1485 }
1486 
FoundBefore(const HInstruction * instruction1,const HInstruction * instruction2) const1487 bool HInstructionList::FoundBefore(const HInstruction* instruction1,
1488                                    const HInstruction* instruction2) const {
1489   DCHECK_EQ(instruction1->GetBlock(), instruction2->GetBlock());
1490   for (HInstructionIterator it(*this); !it.Done(); it.Advance()) {
1491     if (it.Current() == instruction1) {
1492       return true;
1493     }
1494     if (it.Current() == instruction2) {
1495       return false;
1496     }
1497   }
1498   LOG(FATAL) << "Did not find an order between two instructions of the same block.";
1499   UNREACHABLE();
1500 }
1501 
Dominates(HInstruction * other_instruction) const1502 bool HInstruction::Dominates(HInstruction* other_instruction) const {
1503   return other_instruction == this || StrictlyDominates(other_instruction);
1504 }
1505 
StrictlyDominates(HInstruction * other_instruction) const1506 bool HInstruction::StrictlyDominates(HInstruction* other_instruction) const {
1507   if (other_instruction == this) {
1508     // An instruction does not strictly dominate itself.
1509     return false;
1510   }
1511   HBasicBlock* block = GetBlock();
1512   HBasicBlock* other_block = other_instruction->GetBlock();
1513   if (block != other_block) {
1514     return GetBlock()->Dominates(other_instruction->GetBlock());
1515   } else {
1516     // If both instructions are in the same block, ensure this
1517     // instruction comes before `other_instruction`.
1518     if (IsPhi()) {
1519       if (!other_instruction->IsPhi()) {
1520         // Phis appear before non phi-instructions so this instruction
1521         // dominates `other_instruction`.
1522         return true;
1523       } else {
1524         // There is no order among phis.
1525         LOG(FATAL) << "There is no dominance between phis of a same block.";
1526         UNREACHABLE();
1527       }
1528     } else {
1529       // `this` is not a phi.
1530       if (other_instruction->IsPhi()) {
1531         // Phis appear before non phi-instructions so this instruction
1532         // does not dominate `other_instruction`.
1533         return false;
1534       } else {
1535         // Check whether this instruction comes before
1536         // `other_instruction` in the instruction list.
1537         return block->GetInstructions().FoundBefore(this, other_instruction);
1538       }
1539     }
1540   }
1541 }
1542 
RemoveEnvironment()1543 void HInstruction::RemoveEnvironment() {
1544   RemoveEnvironmentUses(this);
1545   environment_ = nullptr;
1546 }
1547 
ReplaceWith(HInstruction * other)1548 void HInstruction::ReplaceWith(HInstruction* other) {
1549   DCHECK(other != nullptr);
1550   // Note: fixup_end remains valid across splice_after().
1551   auto fixup_end = other->uses_.empty() ? other->uses_.begin() : ++other->uses_.begin();
1552   other->uses_.splice_after(other->uses_.before_begin(), uses_);
1553   other->FixUpUserRecordsAfterUseInsertion(fixup_end);
1554 
1555   // Note: env_fixup_end remains valid across splice_after().
1556   auto env_fixup_end =
1557       other->env_uses_.empty() ? other->env_uses_.begin() : ++other->env_uses_.begin();
1558   other->env_uses_.splice_after(other->env_uses_.before_begin(), env_uses_);
1559   other->FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
1560 
1561   DCHECK(uses_.empty());
1562   DCHECK(env_uses_.empty());
1563 }
1564 
ReplaceUsesDominatedBy(HInstruction * dominator,HInstruction * replacement,bool strictly_dominated)1565 void HInstruction::ReplaceUsesDominatedBy(HInstruction* dominator,
1566                                           HInstruction* replacement,
1567                                           bool strictly_dominated) {
1568   const HUseList<HInstruction*>& uses = GetUses();
1569   for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) {
1570     HInstruction* user = it->GetUser();
1571     size_t index = it->GetIndex();
1572     // Increment `it` now because `*it` may disappear thanks to user->ReplaceInput().
1573     ++it;
1574     const bool dominated =
1575         strictly_dominated ? dominator->StrictlyDominates(user) : dominator->Dominates(user);
1576 
1577     if (dominated) {
1578       user->ReplaceInput(replacement, index);
1579     } else if (user->IsPhi() && !user->AsPhi()->IsCatchPhi()) {
1580       // If the input flows from a block dominated by `dominator`, we can replace it.
1581       // We do not perform this for catch phis as we don't have control flow support
1582       // for their inputs.
1583       const ArenaVector<HBasicBlock*>& predecessors = user->GetBlock()->GetPredecessors();
1584       HBasicBlock* predecessor = predecessors[index];
1585       if (dominator->GetBlock()->Dominates(predecessor)) {
1586         user->ReplaceInput(replacement, index);
1587       }
1588     }
1589   }
1590 }
1591 
ReplaceEnvUsesDominatedBy(HInstruction * dominator,HInstruction * replacement)1592 void HInstruction::ReplaceEnvUsesDominatedBy(HInstruction* dominator, HInstruction* replacement) {
1593   const HUseList<HEnvironment*>& uses = GetEnvUses();
1594   for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) {
1595     HEnvironment* user = it->GetUser();
1596     size_t index = it->GetIndex();
1597     // Increment `it` now because `*it` may disappear thanks to user->ReplaceInput().
1598     ++it;
1599     if (dominator->StrictlyDominates(user->GetHolder())) {
1600       user->ReplaceInput(replacement, index);
1601     }
1602   }
1603 }
1604 
ReplaceInput(HInstruction * replacement,size_t index)1605 void HInstruction::ReplaceInput(HInstruction* replacement, size_t index) {
1606   HUserRecord<HInstruction*> input_use = InputRecordAt(index);
1607   if (input_use.GetInstruction() == replacement) {
1608     // Nothing to do.
1609     return;
1610   }
1611   HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1612   // Note: fixup_end remains valid across splice_after().
1613   auto fixup_end =
1614       replacement->uses_.empty() ? replacement->uses_.begin() : ++replacement->uses_.begin();
1615   replacement->uses_.splice_after(replacement->uses_.before_begin(),
1616                                   input_use.GetInstruction()->uses_,
1617                                   before_use_node);
1618   replacement->FixUpUserRecordsAfterUseInsertion(fixup_end);
1619   input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1620 }
1621 
EnvironmentSize() const1622 size_t HInstruction::EnvironmentSize() const {
1623   return HasEnvironment() ? environment_->Size() : 0;
1624 }
1625 
AddInput(HInstruction * input)1626 void HVariableInputSizeInstruction::AddInput(HInstruction* input) {
1627   DCHECK(input->GetBlock() != nullptr);
1628   inputs_.push_back(HUserRecord<HInstruction*>(input));
1629   input->AddUseAt(this, inputs_.size() - 1);
1630 }
1631 
InsertInputAt(size_t index,HInstruction * input)1632 void HVariableInputSizeInstruction::InsertInputAt(size_t index, HInstruction* input) {
1633   inputs_.insert(inputs_.begin() + index, HUserRecord<HInstruction*>(input));
1634   input->AddUseAt(this, index);
1635   // Update indexes in use nodes of inputs that have been pushed further back by the insert().
1636   for (size_t i = index + 1u, e = inputs_.size(); i < e; ++i) {
1637     DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i - 1u);
1638     inputs_[i].GetUseNode()->SetIndex(i);
1639   }
1640 }
1641 
RemoveInputAt(size_t index)1642 void HVariableInputSizeInstruction::RemoveInputAt(size_t index) {
1643   RemoveAsUserOfInput(index);
1644   inputs_.erase(inputs_.begin() + index);
1645   // Update indexes in use nodes of inputs that have been pulled forward by the erase().
1646   for (size_t i = index, e = inputs_.size(); i < e; ++i) {
1647     DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i + 1u);
1648     inputs_[i].GetUseNode()->SetIndex(i);
1649   }
1650 }
1651 
RemoveAllInputs()1652 void HVariableInputSizeInstruction::RemoveAllInputs() {
1653   RemoveAsUserOfAllInputs();
1654   DCHECK(!HasNonEnvironmentUses());
1655 
1656   inputs_.clear();
1657   DCHECK_EQ(0u, InputCount());
1658 }
1659 
RemoveConstructorFences(HInstruction * instruction)1660 size_t HConstructorFence::RemoveConstructorFences(HInstruction* instruction) {
1661   DCHECK(instruction->GetBlock() != nullptr);
1662   // Removing constructor fences only makes sense for instructions with an object return type.
1663   DCHECK_EQ(DataType::Type::kReference, instruction->GetType());
1664 
1665   // Return how many instructions were removed for statistic purposes.
1666   size_t remove_count = 0;
1667 
1668   // Efficient implementation that simultaneously (in one pass):
1669   // * Scans the uses list for all constructor fences.
1670   // * Deletes that constructor fence from the uses list of `instruction`.
1671   // * Deletes `instruction` from the constructor fence's inputs.
1672   // * Deletes the constructor fence if it now has 0 inputs.
1673 
1674   const HUseList<HInstruction*>& uses = instruction->GetUses();
1675   // Warning: Although this is "const", we might mutate the list when calling RemoveInputAt.
1676   for (auto it = uses.begin(), end = uses.end(); it != end; ) {
1677     const HUseListNode<HInstruction*>& use_node = *it;
1678     HInstruction* const use_instruction = use_node.GetUser();
1679 
1680     // Advance the iterator immediately once we fetch the use_node.
1681     // Warning: If the input is removed, the current iterator becomes invalid.
1682     ++it;
1683 
1684     if (use_instruction->IsConstructorFence()) {
1685       HConstructorFence* ctor_fence = use_instruction->AsConstructorFence();
1686       size_t input_index = use_node.GetIndex();
1687 
1688       // Process the candidate instruction for removal
1689       // from the graph.
1690 
1691       // Constructor fence instructions are never
1692       // used by other instructions.
1693       //
1694       // If we wanted to make this more generic, it
1695       // could be a runtime if statement.
1696       DCHECK(!ctor_fence->HasUses());
1697 
1698       // A constructor fence's return type is "kPrimVoid"
1699       // and therefore it can't have any environment uses.
1700       DCHECK(!ctor_fence->HasEnvironmentUses());
1701 
1702       // Remove the inputs first, otherwise removing the instruction
1703       // will try to remove its uses while we are already removing uses
1704       // and this operation will fail.
1705       DCHECK_EQ(instruction, ctor_fence->InputAt(input_index));
1706 
1707       // Removing the input will also remove the `use_node`.
1708       // (Do not look at `use_node` after this, it will be a dangling reference).
1709       ctor_fence->RemoveInputAt(input_index);
1710 
1711       // Once all inputs are removed, the fence is considered dead and
1712       // is removed.
1713       if (ctor_fence->InputCount() == 0u) {
1714         ctor_fence->GetBlock()->RemoveInstruction(ctor_fence);
1715         ++remove_count;
1716       }
1717     }
1718   }
1719 
1720   if (kIsDebugBuild) {
1721     // Post-condition checks:
1722     // * None of the uses of `instruction` are a constructor fence.
1723     // * The `instruction` itself did not get removed from a block.
1724     for (const HUseListNode<HInstruction*>& use_node : instruction->GetUses()) {
1725       CHECK(!use_node.GetUser()->IsConstructorFence());
1726     }
1727     CHECK(instruction->GetBlock() != nullptr);
1728   }
1729 
1730   return remove_count;
1731 }
1732 
Merge(HConstructorFence * other)1733 void HConstructorFence::Merge(HConstructorFence* other) {
1734   // Do not delete yourself from the graph.
1735   DCHECK(this != other);
1736   // Don't try to merge with an instruction not associated with a block.
1737   DCHECK(other->GetBlock() != nullptr);
1738   // A constructor fence's return type is "kPrimVoid"
1739   // and therefore it cannot have any environment uses.
1740   DCHECK(!other->HasEnvironmentUses());
1741 
1742   auto has_input = [](HInstruction* haystack, HInstruction* needle) {
1743     // Check if `haystack` has `needle` as any of its inputs.
1744     for (size_t input_count = 0; input_count < haystack->InputCount(); ++input_count) {
1745       if (haystack->InputAt(input_count) == needle) {
1746         return true;
1747       }
1748     }
1749     return false;
1750   };
1751 
1752   // Add any inputs from `other` into `this` if it wasn't already an input.
1753   for (size_t input_count = 0; input_count < other->InputCount(); ++input_count) {
1754     HInstruction* other_input = other->InputAt(input_count);
1755     if (!has_input(this, other_input)) {
1756       AddInput(other_input);
1757     }
1758   }
1759 
1760   other->GetBlock()->RemoveInstruction(other);
1761 }
1762 
GetAssociatedAllocation(bool ignore_inputs)1763 HInstruction* HConstructorFence::GetAssociatedAllocation(bool ignore_inputs) {
1764   HInstruction* new_instance_inst = GetPrevious();
1765   // Check if the immediately preceding instruction is a new-instance/new-array.
1766   // Otherwise this fence is for protecting final fields.
1767   if (new_instance_inst != nullptr &&
1768       (new_instance_inst->IsNewInstance() || new_instance_inst->IsNewArray())) {
1769     if (ignore_inputs) {
1770       // If inputs are ignored, simply check if the predecessor is
1771       // *any* HNewInstance/HNewArray.
1772       //
1773       // Inputs are normally only ignored for prepare_for_register_allocation,
1774       // at which point *any* prior HNewInstance/Array can be considered
1775       // associated.
1776       return new_instance_inst;
1777     } else {
1778       // Normal case: There must be exactly 1 input and the previous instruction
1779       // must be that input.
1780       if (InputCount() == 1u && InputAt(0) == new_instance_inst) {
1781         return new_instance_inst;
1782       }
1783     }
1784   }
1785   return nullptr;
1786 }
1787 
1788 #define DEFINE_ACCEPT(name, super)                                             \
1789 void H##name::Accept(HGraphVisitor* visitor) {                                 \
1790   visitor->Visit##name(this);                                                  \
1791 }
1792 
FOR_EACH_CONCRETE_INSTRUCTION(DEFINE_ACCEPT)1793 FOR_EACH_CONCRETE_INSTRUCTION(DEFINE_ACCEPT)
1794 
1795 #undef DEFINE_ACCEPT
1796 
1797 void HGraphVisitor::VisitInsertionOrder() {
1798   for (HBasicBlock* block : graph_->GetActiveBlocks()) {
1799     VisitBasicBlock(block);
1800   }
1801 }
1802 
VisitReversePostOrder()1803 void HGraphVisitor::VisitReversePostOrder() {
1804   for (HBasicBlock* block : graph_->GetReversePostOrder()) {
1805     VisitBasicBlock(block);
1806   }
1807 }
1808 
VisitBasicBlock(HBasicBlock * block)1809 void HGraphVisitor::VisitBasicBlock(HBasicBlock* block) {
1810   for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
1811     it.Current()->Accept(this);
1812   }
1813   for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
1814     it.Current()->Accept(this);
1815   }
1816 }
1817 
TryStaticEvaluation() const1818 HConstant* HTypeConversion::TryStaticEvaluation() const {
1819   HGraph* graph = GetBlock()->GetGraph();
1820   if (GetInput()->IsIntConstant()) {
1821     int32_t value = GetInput()->AsIntConstant()->GetValue();
1822     switch (GetResultType()) {
1823       case DataType::Type::kInt8:
1824         return graph->GetIntConstant(static_cast<int8_t>(value), GetDexPc());
1825       case DataType::Type::kUint8:
1826         return graph->GetIntConstant(static_cast<uint8_t>(value), GetDexPc());
1827       case DataType::Type::kInt16:
1828         return graph->GetIntConstant(static_cast<int16_t>(value), GetDexPc());
1829       case DataType::Type::kUint16:
1830         return graph->GetIntConstant(static_cast<uint16_t>(value), GetDexPc());
1831       case DataType::Type::kInt64:
1832         return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc());
1833       case DataType::Type::kFloat32:
1834         return graph->GetFloatConstant(static_cast<float>(value), GetDexPc());
1835       case DataType::Type::kFloat64:
1836         return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc());
1837       default:
1838         return nullptr;
1839     }
1840   } else if (GetInput()->IsLongConstant()) {
1841     int64_t value = GetInput()->AsLongConstant()->GetValue();
1842     switch (GetResultType()) {
1843       case DataType::Type::kInt8:
1844         return graph->GetIntConstant(static_cast<int8_t>(value), GetDexPc());
1845       case DataType::Type::kUint8:
1846         return graph->GetIntConstant(static_cast<uint8_t>(value), GetDexPc());
1847       case DataType::Type::kInt16:
1848         return graph->GetIntConstant(static_cast<int16_t>(value), GetDexPc());
1849       case DataType::Type::kUint16:
1850         return graph->GetIntConstant(static_cast<uint16_t>(value), GetDexPc());
1851       case DataType::Type::kInt32:
1852         return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc());
1853       case DataType::Type::kFloat32:
1854         return graph->GetFloatConstant(static_cast<float>(value), GetDexPc());
1855       case DataType::Type::kFloat64:
1856         return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc());
1857       default:
1858         return nullptr;
1859     }
1860   } else if (GetInput()->IsFloatConstant()) {
1861     float value = GetInput()->AsFloatConstant()->GetValue();
1862     switch (GetResultType()) {
1863       case DataType::Type::kInt32:
1864         if (std::isnan(value))
1865           return graph->GetIntConstant(0, GetDexPc());
1866         if (value >= static_cast<float>(kPrimIntMax))
1867           return graph->GetIntConstant(kPrimIntMax, GetDexPc());
1868         if (value <= kPrimIntMin)
1869           return graph->GetIntConstant(kPrimIntMin, GetDexPc());
1870         return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc());
1871       case DataType::Type::kInt64:
1872         if (std::isnan(value))
1873           return graph->GetLongConstant(0, GetDexPc());
1874         if (value >= static_cast<float>(kPrimLongMax))
1875           return graph->GetLongConstant(kPrimLongMax, GetDexPc());
1876         if (value <= kPrimLongMin)
1877           return graph->GetLongConstant(kPrimLongMin, GetDexPc());
1878         return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc());
1879       case DataType::Type::kFloat64:
1880         return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc());
1881       default:
1882         return nullptr;
1883     }
1884   } else if (GetInput()->IsDoubleConstant()) {
1885     double value = GetInput()->AsDoubleConstant()->GetValue();
1886     switch (GetResultType()) {
1887       case DataType::Type::kInt32:
1888         if (std::isnan(value))
1889           return graph->GetIntConstant(0, GetDexPc());
1890         if (value >= kPrimIntMax)
1891           return graph->GetIntConstant(kPrimIntMax, GetDexPc());
1892         if (value <= kPrimLongMin)
1893           return graph->GetIntConstant(kPrimIntMin, GetDexPc());
1894         return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc());
1895       case DataType::Type::kInt64:
1896         if (std::isnan(value))
1897           return graph->GetLongConstant(0, GetDexPc());
1898         if (value >= static_cast<double>(kPrimLongMax))
1899           return graph->GetLongConstant(kPrimLongMax, GetDexPc());
1900         if (value <= kPrimLongMin)
1901           return graph->GetLongConstant(kPrimLongMin, GetDexPc());
1902         return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc());
1903       case DataType::Type::kFloat32:
1904         return graph->GetFloatConstant(static_cast<float>(value), GetDexPc());
1905       default:
1906         return nullptr;
1907     }
1908   }
1909   return nullptr;
1910 }
1911 
TryStaticEvaluation() const1912 HConstant* HUnaryOperation::TryStaticEvaluation() const {
1913   if (GetInput()->IsIntConstant()) {
1914     return Evaluate(GetInput()->AsIntConstant());
1915   } else if (GetInput()->IsLongConstant()) {
1916     return Evaluate(GetInput()->AsLongConstant());
1917   } else if (kEnableFloatingPointStaticEvaluation) {
1918     if (GetInput()->IsFloatConstant()) {
1919       return Evaluate(GetInput()->AsFloatConstant());
1920     } else if (GetInput()->IsDoubleConstant()) {
1921       return Evaluate(GetInput()->AsDoubleConstant());
1922     }
1923   }
1924   return nullptr;
1925 }
1926 
TryStaticEvaluation() const1927 HConstant* HBinaryOperation::TryStaticEvaluation() const {
1928   if (GetLeft()->IsIntConstant() && GetRight()->IsIntConstant()) {
1929     return Evaluate(GetLeft()->AsIntConstant(), GetRight()->AsIntConstant());
1930   } else if (GetLeft()->IsLongConstant()) {
1931     if (GetRight()->IsIntConstant()) {
1932       // The binop(long, int) case is only valid for shifts and rotations.
1933       DCHECK(IsShl() || IsShr() || IsUShr() || IsRor()) << DebugName();
1934       return Evaluate(GetLeft()->AsLongConstant(), GetRight()->AsIntConstant());
1935     } else if (GetRight()->IsLongConstant()) {
1936       return Evaluate(GetLeft()->AsLongConstant(), GetRight()->AsLongConstant());
1937     }
1938   } else if (GetLeft()->IsNullConstant() && GetRight()->IsNullConstant()) {
1939     // The binop(null, null) case is only valid for equal and not-equal conditions.
1940     DCHECK(IsEqual() || IsNotEqual()) << DebugName();
1941     return Evaluate(GetLeft()->AsNullConstant(), GetRight()->AsNullConstant());
1942   } else if (kEnableFloatingPointStaticEvaluation) {
1943     if (GetLeft()->IsFloatConstant() && GetRight()->IsFloatConstant()) {
1944       return Evaluate(GetLeft()->AsFloatConstant(), GetRight()->AsFloatConstant());
1945     } else if (GetLeft()->IsDoubleConstant() && GetRight()->IsDoubleConstant()) {
1946       return Evaluate(GetLeft()->AsDoubleConstant(), GetRight()->AsDoubleConstant());
1947     }
1948   }
1949   return nullptr;
1950 }
1951 
GetConstantRight() const1952 HConstant* HBinaryOperation::GetConstantRight() const {
1953   if (GetRight()->IsConstant()) {
1954     return GetRight()->AsConstant();
1955   } else if (IsCommutative() && GetLeft()->IsConstant()) {
1956     return GetLeft()->AsConstant();
1957   } else {
1958     return nullptr;
1959   }
1960 }
1961 
1962 // If `GetConstantRight()` returns one of the input, this returns the other
1963 // one. Otherwise it returns null.
GetLeastConstantLeft() const1964 HInstruction* HBinaryOperation::GetLeastConstantLeft() const {
1965   HInstruction* most_constant_right = GetConstantRight();
1966   if (most_constant_right == nullptr) {
1967     return nullptr;
1968   } else if (most_constant_right == GetLeft()) {
1969     return GetRight();
1970   } else {
1971     return GetLeft();
1972   }
1973 }
1974 
operator <<(std::ostream & os,ComparisonBias rhs)1975 std::ostream& operator<<(std::ostream& os, ComparisonBias rhs) {
1976   // TODO: Replace with auto-generated operator<<.
1977   switch (rhs) {
1978     case ComparisonBias::kNoBias:
1979       return os << "none";
1980     case ComparisonBias::kGtBias:
1981       return os << "gt";
1982     case ComparisonBias::kLtBias:
1983       return os << "lt";
1984     default:
1985       LOG(FATAL) << "Unknown ComparisonBias: " << static_cast<int>(rhs);
1986       UNREACHABLE();
1987   }
1988 }
1989 
IsBeforeWhenDisregardMoves(HInstruction * instruction) const1990 bool HCondition::IsBeforeWhenDisregardMoves(HInstruction* instruction) const {
1991   return this == instruction->GetPreviousDisregardingMoves();
1992 }
1993 
Equals(const HInstruction * other) const1994 bool HInstruction::Equals(const HInstruction* other) const {
1995   if (GetKind() != other->GetKind()) return false;
1996   if (GetType() != other->GetType()) return false;
1997   if (!InstructionDataEquals(other)) return false;
1998   HConstInputsRef inputs = GetInputs();
1999   HConstInputsRef other_inputs = other->GetInputs();
2000   if (inputs.size() != other_inputs.size()) return false;
2001   for (size_t i = 0; i != inputs.size(); ++i) {
2002     if (inputs[i] != other_inputs[i]) return false;
2003   }
2004 
2005   DCHECK_EQ(ComputeHashCode(), other->ComputeHashCode());
2006   return true;
2007 }
2008 
operator <<(std::ostream & os,HInstruction::InstructionKind rhs)2009 std::ostream& operator<<(std::ostream& os, HInstruction::InstructionKind rhs) {
2010 #define DECLARE_CASE(type, super) case HInstruction::k##type: os << #type; break;
2011   switch (rhs) {
2012     FOR_EACH_CONCRETE_INSTRUCTION(DECLARE_CASE)
2013     default:
2014       os << "Unknown instruction kind " << static_cast<int>(rhs);
2015       break;
2016   }
2017 #undef DECLARE_CASE
2018   return os;
2019 }
2020 
operator <<(std::ostream & os,const HInstruction::NoArgsDump rhs)2021 std::ostream& operator<<(std::ostream& os, const HInstruction::NoArgsDump rhs) {
2022   // TODO Really this should be const but that would require const-ifying
2023   // graph-visualizer and HGraphVisitor which are tangled up everywhere.
2024   return const_cast<HInstruction*>(rhs.ins)->Dump(os, /* dump_args= */ false);
2025 }
2026 
operator <<(std::ostream & os,const HInstruction::ArgsDump rhs)2027 std::ostream& operator<<(std::ostream& os, const HInstruction::ArgsDump rhs) {
2028   // TODO Really this should be const but that would require const-ifying
2029   // graph-visualizer and HGraphVisitor which are tangled up everywhere.
2030   return const_cast<HInstruction*>(rhs.ins)->Dump(os, /* dump_args= */ true);
2031 }
2032 
operator <<(std::ostream & os,const HInstruction & rhs)2033 std::ostream& operator<<(std::ostream& os, const HInstruction& rhs) {
2034   return os << rhs.DumpWithoutArgs();
2035 }
2036 
operator <<(std::ostream & os,const HUseList<HInstruction * > & lst)2037 std::ostream& operator<<(std::ostream& os, const HUseList<HInstruction*>& lst) {
2038   os << "Instructions[";
2039   bool first = true;
2040   for (const auto& hi : lst) {
2041     if (!first) {
2042       os << ", ";
2043     }
2044     first = false;
2045     os << hi.GetUser()->DebugName() << "[id: " << hi.GetUser()->GetId()
2046        << ", blk: " << hi.GetUser()->GetBlock()->GetBlockId() << "]@" << hi.GetIndex();
2047   }
2048   os << "]";
2049   return os;
2050 }
2051 
operator <<(std::ostream & os,const HUseList<HEnvironment * > & lst)2052 std::ostream& operator<<(std::ostream& os, const HUseList<HEnvironment*>& lst) {
2053   os << "Environments[";
2054   bool first = true;
2055   for (const auto& hi : lst) {
2056     if (!first) {
2057       os << ", ";
2058     }
2059     first = false;
2060     os << *hi.GetUser()->GetHolder() << "@" << hi.GetIndex();
2061   }
2062   os << "]";
2063   return os;
2064 }
2065 
Dump(std::ostream & os,CodeGenerator * codegen,std::optional<std::reference_wrapper<const BlockNamer>> namer)2066 std::ostream& HGraph::Dump(std::ostream& os,
2067                            CodeGenerator* codegen,
2068                            std::optional<std::reference_wrapper<const BlockNamer>> namer) {
2069   HGraphVisualizer vis(&os, this, codegen, namer);
2070   vis.DumpGraphDebug();
2071   return os;
2072 }
2073 
MoveBefore(HInstruction * cursor,bool do_checks)2074 void HInstruction::MoveBefore(HInstruction* cursor, bool do_checks) {
2075   if (do_checks) {
2076     DCHECK(!IsPhi());
2077     DCHECK(!IsControlFlow());
2078     DCHECK(CanBeMoved() ||
2079            // HShouldDeoptimizeFlag can only be moved by CHAGuardOptimization.
2080            IsShouldDeoptimizeFlag());
2081     DCHECK(!cursor->IsPhi());
2082   }
2083 
2084   next_->previous_ = previous_;
2085   if (previous_ != nullptr) {
2086     previous_->next_ = next_;
2087   }
2088   if (block_->instructions_.first_instruction_ == this) {
2089     block_->instructions_.first_instruction_ = next_;
2090   }
2091   DCHECK_NE(block_->instructions_.last_instruction_, this);
2092 
2093   previous_ = cursor->previous_;
2094   if (previous_ != nullptr) {
2095     previous_->next_ = this;
2096   }
2097   next_ = cursor;
2098   cursor->previous_ = this;
2099   block_ = cursor->block_;
2100 
2101   if (block_->instructions_.first_instruction_ == cursor) {
2102     block_->instructions_.first_instruction_ = this;
2103   }
2104 }
2105 
MoveBeforeFirstUserAndOutOfLoops()2106 void HInstruction::MoveBeforeFirstUserAndOutOfLoops() {
2107   DCHECK(!CanThrow());
2108   DCHECK(!HasSideEffects());
2109   DCHECK(!HasEnvironmentUses());
2110   DCHECK(HasNonEnvironmentUses());
2111   DCHECK(!IsPhi());  // Makes no sense for Phi.
2112   DCHECK_EQ(InputCount(), 0u);
2113 
2114   // Find the target block.
2115   auto uses_it = GetUses().begin();
2116   auto uses_end = GetUses().end();
2117   HBasicBlock* target_block = uses_it->GetUser()->GetBlock();
2118   ++uses_it;
2119   while (uses_it != uses_end && uses_it->GetUser()->GetBlock() == target_block) {
2120     ++uses_it;
2121   }
2122   if (uses_it != uses_end) {
2123     // This instruction has uses in two or more blocks. Find the common dominator.
2124     CommonDominator finder(target_block);
2125     for (; uses_it != uses_end; ++uses_it) {
2126       finder.Update(uses_it->GetUser()->GetBlock());
2127     }
2128     target_block = finder.Get();
2129     DCHECK(target_block != nullptr);
2130   }
2131   // Move to the first dominator not in a loop.
2132   while (target_block->IsInLoop()) {
2133     target_block = target_block->GetDominator();
2134     DCHECK(target_block != nullptr);
2135   }
2136 
2137   // Find insertion position.
2138   HInstruction* insert_pos = nullptr;
2139   for (const HUseListNode<HInstruction*>& use : GetUses()) {
2140     if (use.GetUser()->GetBlock() == target_block &&
2141         (insert_pos == nullptr || use.GetUser()->StrictlyDominates(insert_pos))) {
2142       insert_pos = use.GetUser();
2143     }
2144   }
2145   if (insert_pos == nullptr) {
2146     // No user in `target_block`, insert before the control flow instruction.
2147     insert_pos = target_block->GetLastInstruction();
2148     DCHECK(insert_pos->IsControlFlow());
2149     // Avoid splitting HCondition from HIf to prevent unnecessary materialization.
2150     if (insert_pos->IsIf()) {
2151       HInstruction* if_input = insert_pos->AsIf()->InputAt(0);
2152       if (if_input == insert_pos->GetPrevious()) {
2153         insert_pos = if_input;
2154       }
2155     }
2156   }
2157   MoveBefore(insert_pos);
2158 }
2159 
SplitBefore(HInstruction * cursor,bool require_graph_not_in_ssa_form)2160 HBasicBlock* HBasicBlock::SplitBefore(HInstruction* cursor, bool require_graph_not_in_ssa_form) {
2161   DCHECK_IMPLIES(require_graph_not_in_ssa_form, !graph_->IsInSsaForm())
2162       << "Support for SSA form not implemented.";
2163   DCHECK_EQ(cursor->GetBlock(), this);
2164 
2165   HBasicBlock* new_block =
2166       new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), cursor->GetDexPc());
2167   new_block->instructions_.first_instruction_ = cursor;
2168   new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2169   instructions_.last_instruction_ = cursor->previous_;
2170   if (cursor->previous_ == nullptr) {
2171     instructions_.first_instruction_ = nullptr;
2172   } else {
2173     cursor->previous_->next_ = nullptr;
2174     cursor->previous_ = nullptr;
2175   }
2176 
2177   new_block->instructions_.SetBlockOfInstructions(new_block);
2178   AddInstruction(new (GetGraph()->GetAllocator()) HGoto(new_block->GetDexPc()));
2179 
2180   for (HBasicBlock* successor : GetSuccessors()) {
2181     successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2182   }
2183   new_block->successors_.swap(successors_);
2184   DCHECK(successors_.empty());
2185   AddSuccessor(new_block);
2186 
2187   GetGraph()->AddBlock(new_block);
2188   return new_block;
2189 }
2190 
CreateImmediateDominator()2191 HBasicBlock* HBasicBlock::CreateImmediateDominator() {
2192   DCHECK(!graph_->IsInSsaForm()) << "Support for SSA form not implemented.";
2193   DCHECK(!IsCatchBlock()) << "Support for updating try/catch information not implemented.";
2194 
2195   HBasicBlock* new_block = new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), GetDexPc());
2196 
2197   for (HBasicBlock* predecessor : GetPredecessors()) {
2198     predecessor->successors_[predecessor->GetSuccessorIndexOf(this)] = new_block;
2199   }
2200   new_block->predecessors_.swap(predecessors_);
2201   DCHECK(predecessors_.empty());
2202   AddPredecessor(new_block);
2203 
2204   GetGraph()->AddBlock(new_block);
2205   return new_block;
2206 }
2207 
SplitBeforeForInlining(HInstruction * cursor)2208 HBasicBlock* HBasicBlock::SplitBeforeForInlining(HInstruction* cursor) {
2209   DCHECK_EQ(cursor->GetBlock(), this);
2210 
2211   HBasicBlock* new_block =
2212       new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), cursor->GetDexPc());
2213   new_block->instructions_.first_instruction_ = cursor;
2214   new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2215   instructions_.last_instruction_ = cursor->previous_;
2216   if (cursor->previous_ == nullptr) {
2217     instructions_.first_instruction_ = nullptr;
2218   } else {
2219     cursor->previous_->next_ = nullptr;
2220     cursor->previous_ = nullptr;
2221   }
2222 
2223   new_block->instructions_.SetBlockOfInstructions(new_block);
2224 
2225   for (HBasicBlock* successor : GetSuccessors()) {
2226     successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2227   }
2228   new_block->successors_.swap(successors_);
2229   DCHECK(successors_.empty());
2230 
2231   for (HBasicBlock* dominated : GetDominatedBlocks()) {
2232     dominated->dominator_ = new_block;
2233   }
2234   new_block->dominated_blocks_.swap(dominated_blocks_);
2235   DCHECK(dominated_blocks_.empty());
2236   return new_block;
2237 }
2238 
SplitAfterForInlining(HInstruction * cursor)2239 HBasicBlock* HBasicBlock::SplitAfterForInlining(HInstruction* cursor) {
2240   DCHECK(!cursor->IsControlFlow());
2241   DCHECK_NE(instructions_.last_instruction_, cursor);
2242   DCHECK_EQ(cursor->GetBlock(), this);
2243 
2244   HBasicBlock* new_block = new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), GetDexPc());
2245   new_block->instructions_.first_instruction_ = cursor->GetNext();
2246   new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2247   cursor->next_->previous_ = nullptr;
2248   cursor->next_ = nullptr;
2249   instructions_.last_instruction_ = cursor;
2250 
2251   new_block->instructions_.SetBlockOfInstructions(new_block);
2252   for (HBasicBlock* successor : GetSuccessors()) {
2253     successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2254   }
2255   new_block->successors_.swap(successors_);
2256   DCHECK(successors_.empty());
2257 
2258   for (HBasicBlock* dominated : GetDominatedBlocks()) {
2259     dominated->dominator_ = new_block;
2260   }
2261   new_block->dominated_blocks_.swap(dominated_blocks_);
2262   DCHECK(dominated_blocks_.empty());
2263   return new_block;
2264 }
2265 
ComputeTryEntryOfSuccessors() const2266 const HTryBoundary* HBasicBlock::ComputeTryEntryOfSuccessors() const {
2267   if (EndsWithTryBoundary()) {
2268     HTryBoundary* try_boundary = GetLastInstruction()->AsTryBoundary();
2269     if (try_boundary->IsEntry()) {
2270       DCHECK(!IsTryBlock());
2271       return try_boundary;
2272     } else {
2273       DCHECK(IsTryBlock());
2274       DCHECK(try_catch_information_->GetTryEntry().HasSameExceptionHandlersAs(*try_boundary));
2275       return nullptr;
2276     }
2277   } else if (IsTryBlock()) {
2278     return &try_catch_information_->GetTryEntry();
2279   } else {
2280     return nullptr;
2281   }
2282 }
2283 
HasThrowingInstructions() const2284 bool HBasicBlock::HasThrowingInstructions() const {
2285   for (HInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) {
2286     if (it.Current()->CanThrow()) {
2287       return true;
2288     }
2289   }
2290   return false;
2291 }
2292 
HasOnlyOneInstruction(const HBasicBlock & block)2293 static bool HasOnlyOneInstruction(const HBasicBlock& block) {
2294   return block.GetPhis().IsEmpty()
2295       && !block.GetInstructions().IsEmpty()
2296       && block.GetFirstInstruction() == block.GetLastInstruction();
2297 }
2298 
IsSingleGoto() const2299 bool HBasicBlock::IsSingleGoto() const {
2300   return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsGoto();
2301 }
2302 
IsSingleReturn() const2303 bool HBasicBlock::IsSingleReturn() const {
2304   return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsReturn();
2305 }
2306 
IsSingleReturnOrReturnVoidAllowingPhis() const2307 bool HBasicBlock::IsSingleReturnOrReturnVoidAllowingPhis() const {
2308   return (GetFirstInstruction() == GetLastInstruction()) &&
2309          (GetLastInstruction()->IsReturn() || GetLastInstruction()->IsReturnVoid());
2310 }
2311 
IsSingleTryBoundary() const2312 bool HBasicBlock::IsSingleTryBoundary() const {
2313   return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsTryBoundary();
2314 }
2315 
EndsWithControlFlowInstruction() const2316 bool HBasicBlock::EndsWithControlFlowInstruction() const {
2317   return !GetInstructions().IsEmpty() && GetLastInstruction()->IsControlFlow();
2318 }
2319 
EndsWithReturn() const2320 bool HBasicBlock::EndsWithReturn() const {
2321   return !GetInstructions().IsEmpty() &&
2322       (GetLastInstruction()->IsReturn() || GetLastInstruction()->IsReturnVoid());
2323 }
2324 
EndsWithIf() const2325 bool HBasicBlock::EndsWithIf() const {
2326   return !GetInstructions().IsEmpty() && GetLastInstruction()->IsIf();
2327 }
2328 
EndsWithTryBoundary() const2329 bool HBasicBlock::EndsWithTryBoundary() const {
2330   return !GetInstructions().IsEmpty() && GetLastInstruction()->IsTryBoundary();
2331 }
2332 
HasSinglePhi() const2333 bool HBasicBlock::HasSinglePhi() const {
2334   return !GetPhis().IsEmpty() && GetFirstPhi()->GetNext() == nullptr;
2335 }
2336 
GetNormalSuccessors() const2337 ArrayRef<HBasicBlock* const> HBasicBlock::GetNormalSuccessors() const {
2338   if (EndsWithTryBoundary()) {
2339     // The normal-flow successor of HTryBoundary is always stored at index zero.
2340     DCHECK_EQ(successors_[0], GetLastInstruction()->AsTryBoundary()->GetNormalFlowSuccessor());
2341     return ArrayRef<HBasicBlock* const>(successors_).SubArray(0u, 1u);
2342   } else {
2343     // All successors of blocks not ending with TryBoundary are normal.
2344     return ArrayRef<HBasicBlock* const>(successors_);
2345   }
2346 }
2347 
GetExceptionalSuccessors() const2348 ArrayRef<HBasicBlock* const> HBasicBlock::GetExceptionalSuccessors() const {
2349   if (EndsWithTryBoundary()) {
2350     return GetLastInstruction()->AsTryBoundary()->GetExceptionHandlers();
2351   } else {
2352     // Blocks not ending with TryBoundary do not have exceptional successors.
2353     return ArrayRef<HBasicBlock* const>();
2354   }
2355 }
2356 
HasSameExceptionHandlersAs(const HTryBoundary & other) const2357 bool HTryBoundary::HasSameExceptionHandlersAs(const HTryBoundary& other) const {
2358   ArrayRef<HBasicBlock* const> handlers1 = GetExceptionHandlers();
2359   ArrayRef<HBasicBlock* const> handlers2 = other.GetExceptionHandlers();
2360 
2361   size_t length = handlers1.size();
2362   if (length != handlers2.size()) {
2363     return false;
2364   }
2365 
2366   // Exception handlers need to be stored in the same order.
2367   for (size_t i = 0; i < length; ++i) {
2368     if (handlers1[i] != handlers2[i]) {
2369       return false;
2370     }
2371   }
2372   return true;
2373 }
2374 
CountSize() const2375 size_t HInstructionList::CountSize() const {
2376   size_t size = 0;
2377   HInstruction* current = first_instruction_;
2378   for (; current != nullptr; current = current->GetNext()) {
2379     size++;
2380   }
2381   return size;
2382 }
2383 
SetBlockOfInstructions(HBasicBlock * block) const2384 void HInstructionList::SetBlockOfInstructions(HBasicBlock* block) const {
2385   for (HInstruction* current = first_instruction_;
2386        current != nullptr;
2387        current = current->GetNext()) {
2388     current->SetBlock(block);
2389   }
2390 }
2391 
AddAfter(HInstruction * cursor,const HInstructionList & instruction_list)2392 void HInstructionList::AddAfter(HInstruction* cursor, const HInstructionList& instruction_list) {
2393   DCHECK(Contains(cursor));
2394   if (!instruction_list.IsEmpty()) {
2395     if (cursor == last_instruction_) {
2396       last_instruction_ = instruction_list.last_instruction_;
2397     } else {
2398       cursor->next_->previous_ = instruction_list.last_instruction_;
2399     }
2400     instruction_list.last_instruction_->next_ = cursor->next_;
2401     cursor->next_ = instruction_list.first_instruction_;
2402     instruction_list.first_instruction_->previous_ = cursor;
2403   }
2404 }
2405 
AddBefore(HInstruction * cursor,const HInstructionList & instruction_list)2406 void HInstructionList::AddBefore(HInstruction* cursor, const HInstructionList& instruction_list) {
2407   DCHECK(Contains(cursor));
2408   if (!instruction_list.IsEmpty()) {
2409     if (cursor == first_instruction_) {
2410       first_instruction_ = instruction_list.first_instruction_;
2411     } else {
2412       cursor->previous_->next_ = instruction_list.first_instruction_;
2413     }
2414     instruction_list.last_instruction_->next_ = cursor;
2415     instruction_list.first_instruction_->previous_ = cursor->previous_;
2416     cursor->previous_ = instruction_list.last_instruction_;
2417   }
2418 }
2419 
Add(const HInstructionList & instruction_list)2420 void HInstructionList::Add(const HInstructionList& instruction_list) {
2421   if (IsEmpty()) {
2422     first_instruction_ = instruction_list.first_instruction_;
2423     last_instruction_ = instruction_list.last_instruction_;
2424   } else {
2425     AddAfter(last_instruction_, instruction_list);
2426   }
2427 }
2428 
DisconnectAndDelete()2429 void HBasicBlock::DisconnectAndDelete() {
2430   // Dominators must be removed after all the blocks they dominate. This way
2431   // a loop header is removed last, a requirement for correct loop information
2432   // iteration.
2433   DCHECK(dominated_blocks_.empty());
2434 
2435   // The following steps gradually remove the block from all its dependants in
2436   // post order (b/27683071).
2437 
2438   // (1) Store a basic block that we'll use in step (5) to find loops to be updated.
2439   //     We need to do this before step (4) which destroys the predecessor list.
2440   HBasicBlock* loop_update_start = this;
2441   if (IsLoopHeader()) {
2442     HLoopInformation* loop_info = GetLoopInformation();
2443     // All other blocks in this loop should have been removed because the header
2444     // was their dominator.
2445     // Note that we do not remove `this` from `loop_info` as it is unreachable.
2446     DCHECK(!loop_info->IsIrreducible());
2447     DCHECK_EQ(loop_info->GetBlocks().NumSetBits(), 1u);
2448     DCHECK_EQ(static_cast<uint32_t>(loop_info->GetBlocks().GetHighestBitSet()), GetBlockId());
2449     loop_update_start = loop_info->GetPreHeader();
2450   }
2451 
2452   // (2) Disconnect the block from its successors and update their phis.
2453   DisconnectFromSuccessors();
2454 
2455   // (3) Remove instructions and phis. Instructions should have no remaining uses
2456   //     except in catch phis. If an instruction is used by a catch phi at `index`,
2457   //     remove `index`-th input of all phis in the catch block since they are
2458   //     guaranteed dead. Note that we may miss dead inputs this way but the
2459   //     graph will always remain consistent.
2460   RemoveCatchPhiUsesAndInstruction(/* building_dominator_tree = */ false);
2461 
2462   // (4) Disconnect the block from its predecessors and update their
2463   //     control-flow instructions.
2464   for (HBasicBlock* predecessor : predecessors_) {
2465     // We should not see any back edges as they would have been removed by step (3).
2466     DCHECK_IMPLIES(IsInLoop(), !GetLoopInformation()->IsBackEdge(*predecessor));
2467 
2468     HInstruction* last_instruction = predecessor->GetLastInstruction();
2469     if (last_instruction->IsTryBoundary() && !IsCatchBlock()) {
2470       // This block is the only normal-flow successor of the TryBoundary which
2471       // makes `predecessor` dead. Since DCE removes blocks in post order,
2472       // exception handlers of this TryBoundary were already visited and any
2473       // remaining handlers therefore must be live. We remove `predecessor` from
2474       // their list of predecessors.
2475       DCHECK_EQ(last_instruction->AsTryBoundary()->GetNormalFlowSuccessor(), this);
2476       while (predecessor->GetSuccessors().size() > 1) {
2477         HBasicBlock* handler = predecessor->GetSuccessors()[1];
2478         DCHECK(handler->IsCatchBlock());
2479         predecessor->RemoveSuccessor(handler);
2480         handler->RemovePredecessor(predecessor);
2481       }
2482     }
2483 
2484     predecessor->RemoveSuccessor(this);
2485     uint32_t num_pred_successors = predecessor->GetSuccessors().size();
2486     if (num_pred_successors == 1u) {
2487       // If we have one successor after removing one, then we must have
2488       // had an HIf, HPackedSwitch or HTryBoundary, as they have more than one
2489       // successor. Replace those with a HGoto.
2490       DCHECK(last_instruction->IsIf() ||
2491              last_instruction->IsPackedSwitch() ||
2492              (last_instruction->IsTryBoundary() && IsCatchBlock()));
2493       predecessor->RemoveInstruction(last_instruction);
2494       predecessor->AddInstruction(new (graph_->GetAllocator()) HGoto(last_instruction->GetDexPc()));
2495     } else if (num_pred_successors == 0u) {
2496       // The predecessor has no remaining successors and therefore must be dead.
2497       // We deliberately leave it without a control-flow instruction so that the
2498       // GraphChecker fails unless it is not removed during the pass too.
2499       predecessor->RemoveInstruction(last_instruction);
2500     } else {
2501       // There are multiple successors left. The removed block might be a successor
2502       // of a PackedSwitch which will be completely removed (perhaps replaced with
2503       // a Goto), or we are deleting a catch block from a TryBoundary. In either
2504       // case, leave `last_instruction` as is for now.
2505       DCHECK(last_instruction->IsPackedSwitch() ||
2506              (last_instruction->IsTryBoundary() && IsCatchBlock()));
2507     }
2508   }
2509   predecessors_.clear();
2510 
2511   // (5) Remove the block from all loops it is included in. Skip the inner-most
2512   //     loop if this is the loop header (see definition of `loop_update_start`)
2513   //     because the loop header's predecessor list has been destroyed in step (4).
2514   for (HLoopInformationOutwardIterator it(*loop_update_start); !it.Done(); it.Advance()) {
2515     HLoopInformation* loop_info = it.Current();
2516     loop_info->Remove(this);
2517     if (loop_info->IsBackEdge(*this)) {
2518       // If this was the last back edge of the loop, we deliberately leave the
2519       // loop in an inconsistent state and will fail GraphChecker unless the
2520       // entire loop is removed during the pass.
2521       loop_info->RemoveBackEdge(this);
2522     }
2523   }
2524 
2525   // (6) Disconnect from the dominator.
2526   dominator_->RemoveDominatedBlock(this);
2527   SetDominator(nullptr);
2528 
2529   // (7) Delete from the graph, update reverse post order.
2530   graph_->DeleteDeadEmptyBlock(this);
2531   SetGraph(nullptr);
2532 }
2533 
DisconnectFromSuccessors(const ArenaBitVector * visited)2534 void HBasicBlock::DisconnectFromSuccessors(const ArenaBitVector* visited) {
2535   for (HBasicBlock* successor : successors_) {
2536     // Delete this block from the list of predecessors.
2537     size_t this_index = successor->GetPredecessorIndexOf(this);
2538     successor->predecessors_.erase(successor->predecessors_.begin() + this_index);
2539 
2540     if (visited != nullptr && !visited->IsBitSet(successor->GetBlockId())) {
2541       // `successor` itself is dead. Therefore, there is no need to update its phis.
2542       continue;
2543     }
2544 
2545     DCHECK(!successor->predecessors_.empty());
2546 
2547     // Remove this block's entries in the successor's phis. Skips exceptional
2548     // successors because catch phi inputs do not correspond to predecessor
2549     // blocks but throwing instructions. They are removed in `RemoveCatchPhiUses`.
2550     if (!successor->IsCatchBlock()) {
2551       if (successor->predecessors_.size() == 1u) {
2552         // The successor has just one predecessor left. Replace phis with the only
2553         // remaining input.
2554         for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
2555           HPhi* phi = phi_it.Current()->AsPhi();
2556           phi->ReplaceWith(phi->InputAt(1 - this_index));
2557           successor->RemovePhi(phi);
2558         }
2559       } else {
2560         for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
2561           phi_it.Current()->AsPhi()->RemoveInputAt(this_index);
2562         }
2563       }
2564     }
2565   }
2566   successors_.clear();
2567 }
2568 
RemoveCatchPhiUsesAndInstruction(bool building_dominator_tree)2569 void HBasicBlock::RemoveCatchPhiUsesAndInstruction(bool building_dominator_tree) {
2570   for (HBackwardInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) {
2571     HInstruction* insn = it.Current();
2572     RemoveCatchPhiUsesOfDeadInstruction(insn);
2573 
2574     // If we are building the dominator tree, we removed all input records previously.
2575     // `RemoveInstruction` will try to remove them again but that's not something we support and we
2576     // will crash. We check here since we won't be checking that in RemoveInstruction.
2577     if (building_dominator_tree) {
2578       DCHECK(insn->GetUses().empty());
2579       DCHECK(insn->GetEnvUses().empty());
2580     }
2581     RemoveInstruction(insn, /* ensure_safety= */ !building_dominator_tree);
2582   }
2583   for (HInstructionIterator it(GetPhis()); !it.Done(); it.Advance()) {
2584     HPhi* insn = it.Current()->AsPhi();
2585     RemoveCatchPhiUsesOfDeadInstruction(insn);
2586 
2587     // If we are building the dominator tree, we removed all input records previously.
2588     // `RemovePhi` will try to remove them again but that's not something we support and we
2589     // will crash. We check here since we won't be checking that in RemovePhi.
2590     if (building_dominator_tree) {
2591       DCHECK(insn->GetUses().empty());
2592       DCHECK(insn->GetEnvUses().empty());
2593     }
2594     RemovePhi(insn, /* ensure_safety= */ !building_dominator_tree);
2595   }
2596 }
2597 
MergeInstructionsWith(HBasicBlock * other)2598 void HBasicBlock::MergeInstructionsWith(HBasicBlock* other) {
2599   DCHECK(EndsWithControlFlowInstruction());
2600   RemoveInstruction(GetLastInstruction());
2601   instructions_.Add(other->GetInstructions());
2602   other->instructions_.SetBlockOfInstructions(this);
2603   other->instructions_.Clear();
2604 }
2605 
MergeWith(HBasicBlock * other)2606 void HBasicBlock::MergeWith(HBasicBlock* other) {
2607   DCHECK_EQ(GetGraph(), other->GetGraph());
2608   DCHECK(ContainsElement(dominated_blocks_, other));
2609   DCHECK_EQ(GetSingleSuccessor(), other);
2610   DCHECK_EQ(other->GetSinglePredecessor(), this);
2611   DCHECK(other->GetPhis().IsEmpty());
2612 
2613   // Move instructions from `other` to `this`.
2614   MergeInstructionsWith(other);
2615 
2616   // Remove `other` from the loops it is included in.
2617   for (HLoopInformationOutwardIterator it(*other); !it.Done(); it.Advance()) {
2618     HLoopInformation* loop_info = it.Current();
2619     loop_info->Remove(other);
2620     if (loop_info->IsBackEdge(*other)) {
2621       loop_info->ReplaceBackEdge(other, this);
2622     }
2623   }
2624 
2625   // Update links to the successors of `other`.
2626   successors_.clear();
2627   for (HBasicBlock* successor : other->GetSuccessors()) {
2628     successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this;
2629   }
2630   successors_.swap(other->successors_);
2631   DCHECK(other->successors_.empty());
2632 
2633   // Update the dominator tree.
2634   RemoveDominatedBlock(other);
2635   for (HBasicBlock* dominated : other->GetDominatedBlocks()) {
2636     dominated->SetDominator(this);
2637   }
2638   dominated_blocks_.insert(
2639       dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end());
2640   other->dominated_blocks_.clear();
2641   other->dominator_ = nullptr;
2642 
2643   // Clear the list of predecessors of `other` in preparation of deleting it.
2644   other->predecessors_.clear();
2645 
2646   // Delete `other` from the graph. The function updates reverse post order.
2647   graph_->DeleteDeadEmptyBlock(other);
2648   other->SetGraph(nullptr);
2649 }
2650 
MergeWithInlined(HBasicBlock * other)2651 void HBasicBlock::MergeWithInlined(HBasicBlock* other) {
2652   DCHECK_NE(GetGraph(), other->GetGraph());
2653   DCHECK(GetDominatedBlocks().empty());
2654   DCHECK(GetSuccessors().empty());
2655   DCHECK(!EndsWithControlFlowInstruction());
2656   DCHECK(other->GetSinglePredecessor()->IsEntryBlock());
2657   DCHECK(other->GetPhis().IsEmpty());
2658   DCHECK(!other->IsInLoop());
2659 
2660   // Move instructions from `other` to `this`.
2661   instructions_.Add(other->GetInstructions());
2662   other->instructions_.SetBlockOfInstructions(this);
2663 
2664   // Update links to the successors of `other`.
2665   successors_.clear();
2666   for (HBasicBlock* successor : other->GetSuccessors()) {
2667     successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this;
2668   }
2669   successors_.swap(other->successors_);
2670   DCHECK(other->successors_.empty());
2671 
2672   // Update the dominator tree.
2673   for (HBasicBlock* dominated : other->GetDominatedBlocks()) {
2674     dominated->SetDominator(this);
2675   }
2676   dominated_blocks_.insert(
2677       dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end());
2678   other->dominated_blocks_.clear();
2679   other->dominator_ = nullptr;
2680   other->graph_ = nullptr;
2681 }
2682 
ReplaceWith(HBasicBlock * other)2683 void HBasicBlock::ReplaceWith(HBasicBlock* other) {
2684   while (!GetPredecessors().empty()) {
2685     HBasicBlock* predecessor = GetPredecessors()[0];
2686     predecessor->ReplaceSuccessor(this, other);
2687   }
2688   while (!GetSuccessors().empty()) {
2689     HBasicBlock* successor = GetSuccessors()[0];
2690     successor->ReplacePredecessor(this, other);
2691   }
2692   for (HBasicBlock* dominated : GetDominatedBlocks()) {
2693     other->AddDominatedBlock(dominated);
2694   }
2695   GetDominator()->ReplaceDominatedBlock(this, other);
2696   other->SetDominator(GetDominator());
2697   dominator_ = nullptr;
2698   graph_ = nullptr;
2699 }
2700 
DeleteDeadEmptyBlock(HBasicBlock * block)2701 void HGraph::DeleteDeadEmptyBlock(HBasicBlock* block) {
2702   DCHECK_EQ(block->GetGraph(), this);
2703   DCHECK(block->GetSuccessors().empty());
2704   DCHECK(block->GetPredecessors().empty());
2705   DCHECK(block->GetDominatedBlocks().empty());
2706   DCHECK(block->GetDominator() == nullptr);
2707   DCHECK(block->GetInstructions().IsEmpty());
2708   DCHECK(block->GetPhis().IsEmpty());
2709 
2710   if (block->IsExitBlock()) {
2711     SetExitBlock(nullptr);
2712   }
2713 
2714   RemoveElement(reverse_post_order_, block);
2715   blocks_[block->GetBlockId()] = nullptr;
2716   block->SetGraph(nullptr);
2717 }
2718 
UpdateLoopAndTryInformationOfNewBlock(HBasicBlock * block,HBasicBlock * reference,bool replace_if_back_edge,bool has_more_specific_try_catch_info)2719 void HGraph::UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
2720                                                    HBasicBlock* reference,
2721                                                    bool replace_if_back_edge,
2722                                                    bool has_more_specific_try_catch_info) {
2723   if (block->IsLoopHeader()) {
2724     // Clear the information of which blocks are contained in that loop. Since the
2725     // information is stored as a bit vector based on block ids, we have to update
2726     // it, as those block ids were specific to the callee graph and we are now adding
2727     // these blocks to the caller graph.
2728     block->GetLoopInformation()->ClearAllBlocks();
2729   }
2730 
2731   // If not already in a loop, update the loop information.
2732   if (!block->IsInLoop()) {
2733     block->SetLoopInformation(reference->GetLoopInformation());
2734   }
2735 
2736   // If the block is in a loop, update all its outward loops.
2737   HLoopInformation* loop_info = block->GetLoopInformation();
2738   if (loop_info != nullptr) {
2739     for (HLoopInformationOutwardIterator loop_it(*block);
2740          !loop_it.Done();
2741          loop_it.Advance()) {
2742       loop_it.Current()->Add(block);
2743     }
2744     if (replace_if_back_edge && loop_info->IsBackEdge(*reference)) {
2745       loop_info->ReplaceBackEdge(reference, block);
2746     }
2747   }
2748 
2749   DCHECK_IMPLIES(has_more_specific_try_catch_info, !reference->IsTryBlock())
2750       << "We don't allow to inline try catches inside of other try blocks.";
2751 
2752   // Update the TryCatchInformation, if we are not inlining a try catch.
2753   if (!has_more_specific_try_catch_info) {
2754     // Copy TryCatchInformation if `reference` is a try block, not if it is a catch block.
2755     TryCatchInformation* try_catch_info =
2756         reference->IsTryBlock() ? reference->GetTryCatchInformation() : nullptr;
2757     block->SetTryCatchInformation(try_catch_info);
2758   }
2759 }
2760 
InlineInto(HGraph * outer_graph,HInvoke * invoke)2761 HInstruction* HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) {
2762   DCHECK(HasExitBlock()) << "Unimplemented scenario";
2763   // Update the environments in this graph to have the invoke's environment
2764   // as parent.
2765   {
2766     // Skip the entry block, we do not need to update the entry's suspend check.
2767     for (HBasicBlock* block : GetReversePostOrderSkipEntryBlock()) {
2768       for (HInstructionIterator instr_it(block->GetInstructions());
2769            !instr_it.Done();
2770            instr_it.Advance()) {
2771         HInstruction* current = instr_it.Current();
2772         if (current->NeedsEnvironment()) {
2773           DCHECK(current->HasEnvironment());
2774           current->GetEnvironment()->SetAndCopyParentChain(
2775               outer_graph->GetAllocator(), invoke->GetEnvironment());
2776         }
2777       }
2778     }
2779   }
2780   outer_graph->UpdateMaximumNumberOfOutVRegs(GetMaximumNumberOfOutVRegs());
2781 
2782   if (HasBoundsChecks()) {
2783     outer_graph->SetHasBoundsChecks(true);
2784   }
2785   if (HasLoops()) {
2786     outer_graph->SetHasLoops(true);
2787   }
2788   if (HasIrreducibleLoops()) {
2789     outer_graph->SetHasIrreducibleLoops(true);
2790   }
2791   if (HasDirectCriticalNativeCall()) {
2792     outer_graph->SetHasDirectCriticalNativeCall(true);
2793   }
2794   if (HasTryCatch()) {
2795     outer_graph->SetHasTryCatch(true);
2796   }
2797   if (HasMonitorOperations()) {
2798     outer_graph->SetHasMonitorOperations(true);
2799   }
2800   if (HasSIMD()) {
2801     outer_graph->SetHasSIMD(true);
2802   }
2803   if (HasAlwaysThrowingInvokes()) {
2804     outer_graph->SetHasAlwaysThrowingInvokes(true);
2805   }
2806 
2807   HInstruction* return_value = nullptr;
2808   if (GetBlocks().size() == 3) {
2809     // Inliner already made sure we don't inline methods that always throw.
2810     DCHECK(!GetBlocks()[1]->GetLastInstruction()->IsThrow());
2811     // Simple case of an entry block, a body block, and an exit block.
2812     // Put the body block's instruction into `invoke`'s block.
2813     HBasicBlock* body = GetBlocks()[1];
2814     DCHECK(GetBlocks()[0]->IsEntryBlock());
2815     DCHECK(GetBlocks()[2]->IsExitBlock());
2816     DCHECK(!body->IsExitBlock());
2817     DCHECK(!body->IsInLoop());
2818     HInstruction* last = body->GetLastInstruction();
2819 
2820     // Note that we add instructions before the invoke only to simplify polymorphic inlining.
2821     invoke->GetBlock()->instructions_.AddBefore(invoke, body->GetInstructions());
2822     body->GetInstructions().SetBlockOfInstructions(invoke->GetBlock());
2823 
2824     // Replace the invoke with the return value of the inlined graph.
2825     if (last->IsReturn()) {
2826       return_value = last->InputAt(0);
2827     } else {
2828       DCHECK(last->IsReturnVoid());
2829     }
2830 
2831     invoke->GetBlock()->RemoveInstruction(last);
2832   } else {
2833     // Need to inline multiple blocks. We split `invoke`'s block
2834     // into two blocks, merge the first block of the inlined graph into
2835     // the first half, and replace the exit block of the inlined graph
2836     // with the second half.
2837     ArenaAllocator* allocator = outer_graph->GetAllocator();
2838     HBasicBlock* at = invoke->GetBlock();
2839     // Note that we split before the invoke only to simplify polymorphic inlining.
2840     HBasicBlock* to = at->SplitBeforeForInlining(invoke);
2841 
2842     HBasicBlock* first = entry_block_->GetSuccessors()[0];
2843     DCHECK(!first->IsInLoop());
2844     DCHECK(first->GetTryCatchInformation() == nullptr);
2845     at->MergeWithInlined(first);
2846     exit_block_->ReplaceWith(to);
2847 
2848     // Update the meta information surrounding blocks:
2849     // (1) the graph they are now in,
2850     // (2) the reverse post order of that graph,
2851     // (3) their potential loop information, inner and outer,
2852     // (4) try block membership.
2853     // Note that we do not need to update catch phi inputs because they
2854     // correspond to the register file of the outer method which the inlinee
2855     // cannot modify.
2856 
2857     // We don't add the entry block, the exit block, and the first block, which
2858     // has been merged with `at`.
2859     static constexpr int kNumberOfSkippedBlocksInCallee = 3;
2860 
2861     // We add the `to` block.
2862     static constexpr int kNumberOfNewBlocksInCaller = 1;
2863     size_t blocks_added = (reverse_post_order_.size() - kNumberOfSkippedBlocksInCallee)
2864         + kNumberOfNewBlocksInCaller;
2865 
2866     // Find the location of `at` in the outer graph's reverse post order. The new
2867     // blocks will be added after it.
2868     size_t index_of_at = IndexOfElement(outer_graph->reverse_post_order_, at);
2869     MakeRoomFor(&outer_graph->reverse_post_order_, blocks_added, index_of_at);
2870 
2871     // Do a reverse post order of the blocks in the callee and do (1), (2), (3)
2872     // and (4) to the blocks that apply.
2873     for (HBasicBlock* current : GetReversePostOrder()) {
2874       if (current != exit_block_ && current != entry_block_ && current != first) {
2875         DCHECK(current->GetGraph() == this);
2876         current->SetGraph(outer_graph);
2877         outer_graph->AddBlock(current);
2878         outer_graph->reverse_post_order_[++index_of_at] = current;
2879         UpdateLoopAndTryInformationOfNewBlock(current,
2880                                               at,
2881                                               /* replace_if_back_edge= */ false,
2882                                               current->GetTryCatchInformation() != nullptr);
2883       }
2884     }
2885 
2886     // Do (1), (2), (3) and (4) to `to`.
2887     to->SetGraph(outer_graph);
2888     outer_graph->AddBlock(to);
2889     outer_graph->reverse_post_order_[++index_of_at] = to;
2890     // Only `to` can become a back edge, as the inlined blocks
2891     // are predecessors of `to`.
2892     UpdateLoopAndTryInformationOfNewBlock(to, at, /* replace_if_back_edge= */ true);
2893 
2894     // Update all predecessors of the exit block (now the `to` block)
2895     // to not `HReturn` but `HGoto` instead. Special case throwing blocks
2896     // to now get the outer graph exit block as successor.
2897     HPhi* return_value_phi = nullptr;
2898     bool rerun_dominance = false;
2899     bool rerun_loop_analysis = false;
2900     for (size_t pred = 0; pred < to->GetPredecessors().size(); ++pred) {
2901       HBasicBlock* predecessor = to->GetPredecessors()[pred];
2902       HInstruction* last = predecessor->GetLastInstruction();
2903 
2904       // At this point we might either have:
2905       // A) Return/ReturnVoid/Throw as the last instruction, or
2906       // B) `Return/ReturnVoid/Throw->TryBoundary` as the last instruction chain
2907 
2908       const bool saw_try_boundary = last->IsTryBoundary();
2909       if (saw_try_boundary) {
2910         DCHECK(predecessor->IsSingleTryBoundary());
2911         DCHECK(!last->AsTryBoundary()->IsEntry());
2912         predecessor = predecessor->GetSinglePredecessor();
2913         last = predecessor->GetLastInstruction();
2914       }
2915 
2916       if (last->IsThrow()) {
2917         if (at->IsTryBlock()) {
2918           DCHECK(!saw_try_boundary) << "We don't support inlining of try blocks into try blocks.";
2919           // Create a TryBoundary of kind:exit and point it to the Exit block.
2920           HBasicBlock* new_block = outer_graph->SplitEdge(predecessor, to);
2921           new_block->AddInstruction(
2922               new (allocator) HTryBoundary(HTryBoundary::BoundaryKind::kExit, last->GetDexPc()));
2923           new_block->ReplaceSuccessor(to, outer_graph->GetExitBlock());
2924 
2925           // Copy information from the predecessor.
2926           new_block->SetLoopInformation(predecessor->GetLoopInformation());
2927           TryCatchInformation* try_catch_info = predecessor->GetTryCatchInformation();
2928           new_block->SetTryCatchInformation(try_catch_info);
2929           for (HBasicBlock* xhandler :
2930                try_catch_info->GetTryEntry().GetBlock()->GetExceptionalSuccessors()) {
2931             new_block->AddSuccessor(xhandler);
2932           }
2933           DCHECK(try_catch_info->GetTryEntry().HasSameExceptionHandlersAs(
2934               *new_block->GetLastInstruction()->AsTryBoundary()));
2935         } else {
2936           // We either have `Throw->TryBoundary` or `Throw`. We want to point the whole chain to the
2937           // exit, so we recompute `predecessor`
2938           predecessor = to->GetPredecessors()[pred];
2939           predecessor->ReplaceSuccessor(to, outer_graph->GetExitBlock());
2940         }
2941 
2942         --pred;
2943         // We need to re-run dominance information, as the exit block now has
2944         // a new predecessor and potential new dominator.
2945         // TODO(solanes): See if it's worth it to hand-modify the domination chain instead of
2946         // rerunning the dominance for the whole graph.
2947         rerun_dominance = true;
2948         if (predecessor->GetLoopInformation() != nullptr) {
2949           // The loop information might have changed e.g. `predecessor` might not be in a loop
2950           // anymore. We only do this if `predecessor` has loop information as it is impossible for
2951           // predecessor to end up in a loop if it wasn't in one before.
2952           rerun_loop_analysis = true;
2953         }
2954       } else {
2955         if (last->IsReturnVoid()) {
2956           DCHECK(return_value == nullptr);
2957           DCHECK(return_value_phi == nullptr);
2958         } else {
2959           DCHECK(last->IsReturn());
2960           if (return_value_phi != nullptr) {
2961             return_value_phi->AddInput(last->InputAt(0));
2962           } else if (return_value == nullptr) {
2963             return_value = last->InputAt(0);
2964           } else {
2965             // There will be multiple returns.
2966             return_value_phi = new (allocator) HPhi(
2967                 allocator, kNoRegNumber, 0, HPhi::ToPhiType(invoke->GetType()), to->GetDexPc());
2968             to->AddPhi(return_value_phi);
2969             return_value_phi->AddInput(return_value);
2970             return_value_phi->AddInput(last->InputAt(0));
2971             return_value = return_value_phi;
2972           }
2973         }
2974         predecessor->AddInstruction(new (allocator) HGoto(last->GetDexPc()));
2975         predecessor->RemoveInstruction(last);
2976 
2977         if (saw_try_boundary) {
2978           predecessor = to->GetPredecessors()[pred];
2979           DCHECK(predecessor->EndsWithTryBoundary());
2980           DCHECK_EQ(predecessor->GetNormalSuccessors().size(), 1u);
2981           if (predecessor->GetSuccessors()[0]->GetPredecessors().size() > 1) {
2982             outer_graph->SplitCriticalEdge(predecessor, to);
2983             rerun_dominance = true;
2984             if (predecessor->GetLoopInformation() != nullptr) {
2985               rerun_loop_analysis = true;
2986             }
2987           }
2988         }
2989       }
2990     }
2991     if (rerun_loop_analysis) {
2992       DCHECK(!outer_graph->HasIrreducibleLoops())
2993           << "Recomputing loop information in graphs with irreducible loops "
2994           << "is unsupported, as it could lead to loop header changes";
2995       outer_graph->ClearLoopInformation();
2996       outer_graph->ClearDominanceInformation();
2997       outer_graph->BuildDominatorTree();
2998     } else if (rerun_dominance) {
2999       outer_graph->ClearDominanceInformation();
3000       outer_graph->ComputeDominanceInformation();
3001     }
3002   }
3003 
3004   // Walk over the entry block and:
3005   // - Move constants from the entry block to the outer_graph's entry block,
3006   // - Replace HParameterValue instructions with their real value.
3007   // - Remove suspend checks, that hold an environment.
3008   // We must do this after the other blocks have been inlined, otherwise ids of
3009   // constants could overlap with the inner graph.
3010   size_t parameter_index = 0;
3011   for (HInstructionIterator it(entry_block_->GetInstructions()); !it.Done(); it.Advance()) {
3012     HInstruction* current = it.Current();
3013     HInstruction* replacement = nullptr;
3014     if (current->IsNullConstant()) {
3015       replacement = outer_graph->GetNullConstant(current->GetDexPc());
3016     } else if (current->IsIntConstant()) {
3017       replacement = outer_graph->GetIntConstant(
3018           current->AsIntConstant()->GetValue(), current->GetDexPc());
3019     } else if (current->IsLongConstant()) {
3020       replacement = outer_graph->GetLongConstant(
3021           current->AsLongConstant()->GetValue(), current->GetDexPc());
3022     } else if (current->IsFloatConstant()) {
3023       replacement = outer_graph->GetFloatConstant(
3024           current->AsFloatConstant()->GetValue(), current->GetDexPc());
3025     } else if (current->IsDoubleConstant()) {
3026       replacement = outer_graph->GetDoubleConstant(
3027           current->AsDoubleConstant()->GetValue(), current->GetDexPc());
3028     } else if (current->IsParameterValue()) {
3029       if (kIsDebugBuild
3030           && invoke->IsInvokeStaticOrDirect()
3031           && invoke->AsInvokeStaticOrDirect()->IsStaticWithExplicitClinitCheck()) {
3032         // Ensure we do not use the last input of `invoke`, as it
3033         // contains a clinit check which is not an actual argument.
3034         size_t last_input_index = invoke->InputCount() - 1;
3035         DCHECK(parameter_index != last_input_index);
3036       }
3037       replacement = invoke->InputAt(parameter_index++);
3038     } else if (current->IsCurrentMethod()) {
3039       replacement = outer_graph->GetCurrentMethod();
3040     } else {
3041       // It is OK to ignore MethodEntryHook for inlined functions.
3042       // In debug mode we don't inline and in release mode method
3043       // tracing is best effort so OK to ignore them.
3044       DCHECK(current->IsGoto() || current->IsSuspendCheck() || current->IsMethodEntryHook());
3045       entry_block_->RemoveInstruction(current);
3046     }
3047     if (replacement != nullptr) {
3048       current->ReplaceWith(replacement);
3049       // If the current is the return value then we need to update the latter.
3050       if (current == return_value) {
3051         DCHECK_EQ(entry_block_, return_value->GetBlock());
3052         return_value = replacement;
3053       }
3054     }
3055   }
3056 
3057   return return_value;
3058 }
3059 
3060 /*
3061  * Loop will be transformed to:
3062  *       old_pre_header
3063  *             |
3064  *          if_block
3065  *           /    \
3066  *  true_block   false_block
3067  *           \    /
3068  *       new_pre_header
3069  *             |
3070  *           header
3071  */
TransformLoopHeaderForBCE(HBasicBlock * header)3072 void HGraph::TransformLoopHeaderForBCE(HBasicBlock* header) {
3073   DCHECK(header->IsLoopHeader());
3074   HBasicBlock* old_pre_header = header->GetDominator();
3075 
3076   // Need extra block to avoid critical edge.
3077   HBasicBlock* if_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
3078   HBasicBlock* true_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
3079   HBasicBlock* false_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
3080   HBasicBlock* new_pre_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
3081   AddBlock(if_block);
3082   AddBlock(true_block);
3083   AddBlock(false_block);
3084   AddBlock(new_pre_header);
3085 
3086   header->ReplacePredecessor(old_pre_header, new_pre_header);
3087   old_pre_header->successors_.clear();
3088   old_pre_header->dominated_blocks_.clear();
3089 
3090   old_pre_header->AddSuccessor(if_block);
3091   if_block->AddSuccessor(true_block);  // True successor
3092   if_block->AddSuccessor(false_block);  // False successor
3093   true_block->AddSuccessor(new_pre_header);
3094   false_block->AddSuccessor(new_pre_header);
3095 
3096   old_pre_header->dominated_blocks_.push_back(if_block);
3097   if_block->SetDominator(old_pre_header);
3098   if_block->dominated_blocks_.push_back(true_block);
3099   true_block->SetDominator(if_block);
3100   if_block->dominated_blocks_.push_back(false_block);
3101   false_block->SetDominator(if_block);
3102   if_block->dominated_blocks_.push_back(new_pre_header);
3103   new_pre_header->SetDominator(if_block);
3104   new_pre_header->dominated_blocks_.push_back(header);
3105   header->SetDominator(new_pre_header);
3106 
3107   // Fix reverse post order.
3108   size_t index_of_header = IndexOfElement(reverse_post_order_, header);
3109   MakeRoomFor(&reverse_post_order_, 4, index_of_header - 1);
3110   reverse_post_order_[index_of_header++] = if_block;
3111   reverse_post_order_[index_of_header++] = true_block;
3112   reverse_post_order_[index_of_header++] = false_block;
3113   reverse_post_order_[index_of_header++] = new_pre_header;
3114 
3115   // The pre_header can never be a back edge of a loop.
3116   DCHECK((old_pre_header->GetLoopInformation() == nullptr) ||
3117          !old_pre_header->GetLoopInformation()->IsBackEdge(*old_pre_header));
3118   UpdateLoopAndTryInformationOfNewBlock(
3119       if_block, old_pre_header, /* replace_if_back_edge= */ false);
3120   UpdateLoopAndTryInformationOfNewBlock(
3121       true_block, old_pre_header, /* replace_if_back_edge= */ false);
3122   UpdateLoopAndTryInformationOfNewBlock(
3123       false_block, old_pre_header, /* replace_if_back_edge= */ false);
3124   UpdateLoopAndTryInformationOfNewBlock(
3125       new_pre_header, old_pre_header, /* replace_if_back_edge= */ false);
3126 }
3127 
TransformLoopForVectorization(HBasicBlock * header,HBasicBlock * body,HBasicBlock * exit)3128 HBasicBlock* HGraph::TransformLoopForVectorization(HBasicBlock* header,
3129                                                    HBasicBlock* body,
3130                                                    HBasicBlock* exit) {
3131   DCHECK(header->IsLoopHeader());
3132   HLoopInformation* loop = header->GetLoopInformation();
3133 
3134   // Add new loop blocks.
3135   HBasicBlock* new_pre_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
3136   HBasicBlock* new_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
3137   HBasicBlock* new_body = new (allocator_) HBasicBlock(this, header->GetDexPc());
3138   AddBlock(new_pre_header);
3139   AddBlock(new_header);
3140   AddBlock(new_body);
3141 
3142   // Set up control flow.
3143   header->ReplaceSuccessor(exit, new_pre_header);
3144   new_pre_header->AddSuccessor(new_header);
3145   new_header->AddSuccessor(exit);
3146   new_header->AddSuccessor(new_body);
3147   new_body->AddSuccessor(new_header);
3148 
3149   // Set up dominators.
3150   header->ReplaceDominatedBlock(exit, new_pre_header);
3151   new_pre_header->SetDominator(header);
3152   new_pre_header->dominated_blocks_.push_back(new_header);
3153   new_header->SetDominator(new_pre_header);
3154   new_header->dominated_blocks_.push_back(new_body);
3155   new_body->SetDominator(new_header);
3156   new_header->dominated_blocks_.push_back(exit);
3157   exit->SetDominator(new_header);
3158 
3159   // Fix reverse post order.
3160   size_t index_of_header = IndexOfElement(reverse_post_order_, header);
3161   MakeRoomFor(&reverse_post_order_, 2, index_of_header);
3162   reverse_post_order_[++index_of_header] = new_pre_header;
3163   reverse_post_order_[++index_of_header] = new_header;
3164   size_t index_of_body = IndexOfElement(reverse_post_order_, body);
3165   MakeRoomFor(&reverse_post_order_, 1, index_of_body - 1);
3166   reverse_post_order_[index_of_body] = new_body;
3167 
3168   // Add gotos and suspend check (client must add conditional in header).
3169   new_pre_header->AddInstruction(new (allocator_) HGoto());
3170   HSuspendCheck* suspend_check = new (allocator_) HSuspendCheck(header->GetDexPc());
3171   new_header->AddInstruction(suspend_check);
3172   new_body->AddInstruction(new (allocator_) HGoto());
3173   DCHECK(loop->GetSuspendCheck() != nullptr);
3174   suspend_check->CopyEnvironmentFromWithLoopPhiAdjustment(
3175       loop->GetSuspendCheck()->GetEnvironment(), header);
3176 
3177   // Update loop information.
3178   new_header->AddBackEdge(new_body);
3179   new_header->GetLoopInformation()->SetSuspendCheck(suspend_check);
3180   new_header->GetLoopInformation()->Populate();
3181   new_pre_header->SetLoopInformation(loop->GetPreHeader()->GetLoopInformation());  // outward
3182   HLoopInformationOutwardIterator it(*new_header);
3183   for (it.Advance(); !it.Done(); it.Advance()) {
3184     it.Current()->Add(new_pre_header);
3185     it.Current()->Add(new_header);
3186     it.Current()->Add(new_body);
3187   }
3188   return new_pre_header;
3189 }
3190 
CheckAgainstUpperBound(ReferenceTypeInfo rti,ReferenceTypeInfo upper_bound_rti)3191 static void CheckAgainstUpperBound(ReferenceTypeInfo rti, ReferenceTypeInfo upper_bound_rti)
3192     REQUIRES_SHARED(Locks::mutator_lock_) {
3193   if (rti.IsValid()) {
3194     DCHECK(upper_bound_rti.IsSupertypeOf(rti))
3195         << " upper_bound_rti: " << upper_bound_rti
3196         << " rti: " << rti;
3197     DCHECK_IMPLIES(upper_bound_rti.GetTypeHandle()->CannotBeAssignedFromOtherTypes(), rti.IsExact())
3198         << " upper_bound_rti: " << upper_bound_rti
3199         << " rti: " << rti;
3200   }
3201 }
3202 
SetReferenceTypeInfo(ReferenceTypeInfo rti)3203 void HInstruction::SetReferenceTypeInfo(ReferenceTypeInfo rti) {
3204   if (kIsDebugBuild) {
3205     DCHECK_EQ(GetType(), DataType::Type::kReference);
3206     ScopedObjectAccess soa(Thread::Current());
3207     DCHECK(rti.IsValid()) << "Invalid RTI for " << DebugName();
3208     if (IsBoundType()) {
3209       // Having the test here spares us from making the method virtual just for
3210       // the sake of a DCHECK.
3211       CheckAgainstUpperBound(rti, AsBoundType()->GetUpperBound());
3212     }
3213   }
3214   reference_type_handle_ = rti.GetTypeHandle();
3215   SetPackedFlag<kFlagReferenceTypeIsExact>(rti.IsExact());
3216 }
3217 
SetReferenceTypeInfoIfValid(ReferenceTypeInfo rti)3218 void HInstruction::SetReferenceTypeInfoIfValid(ReferenceTypeInfo rti) {
3219   if (rti.IsValid()) {
3220     SetReferenceTypeInfo(rti);
3221   }
3222 }
3223 
InstructionDataEquals(const HInstruction * other) const3224 bool HBoundType::InstructionDataEquals(const HInstruction* other) const {
3225   const HBoundType* other_bt = other->AsBoundType();
3226   ScopedObjectAccess soa(Thread::Current());
3227   return GetUpperBound().IsEqual(other_bt->GetUpperBound()) &&
3228          GetUpperCanBeNull() == other_bt->GetUpperCanBeNull() &&
3229          CanBeNull() == other_bt->CanBeNull();
3230 }
3231 
SetUpperBound(const ReferenceTypeInfo & upper_bound,bool can_be_null)3232 void HBoundType::SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null) {
3233   if (kIsDebugBuild) {
3234     ScopedObjectAccess soa(Thread::Current());
3235     DCHECK(upper_bound.IsValid());
3236     DCHECK(!upper_bound_.IsValid()) << "Upper bound should only be set once.";
3237     CheckAgainstUpperBound(GetReferenceTypeInfo(), upper_bound);
3238   }
3239   upper_bound_ = upper_bound;
3240   SetPackedFlag<kFlagUpperCanBeNull>(can_be_null);
3241 }
3242 
Create(TypeHandle type_handle,bool is_exact)3243 ReferenceTypeInfo ReferenceTypeInfo::Create(TypeHandle type_handle, bool is_exact) {
3244   if (kIsDebugBuild) {
3245     ScopedObjectAccess soa(Thread::Current());
3246     DCHECK(IsValidHandle(type_handle));
3247     if (!is_exact) {
3248       DCHECK(!type_handle->CannotBeAssignedFromOtherTypes())
3249           << "Callers of ReferenceTypeInfo::Create should ensure is_exact is properly computed";
3250     }
3251   }
3252   return ReferenceTypeInfo(type_handle, is_exact);
3253 }
3254 
operator <<(std::ostream & os,const ReferenceTypeInfo & rhs)3255 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs) {
3256   ScopedObjectAccess soa(Thread::Current());
3257   os << "["
3258      << " is_valid=" << rhs.IsValid()
3259      << " type=" << (!rhs.IsValid() ? "?" : mirror::Class::PrettyClass(rhs.GetTypeHandle().Get()))
3260      << " is_exact=" << rhs.IsExact()
3261      << " ]";
3262   return os;
3263 }
3264 
HasAnyEnvironmentUseBefore(HInstruction * other)3265 bool HInstruction::HasAnyEnvironmentUseBefore(HInstruction* other) {
3266   // For now, assume that instructions in different blocks may use the
3267   // environment.
3268   // TODO: Use the control flow to decide if this is true.
3269   if (GetBlock() != other->GetBlock()) {
3270     return true;
3271   }
3272 
3273   // We know that we are in the same block. Walk from 'this' to 'other',
3274   // checking to see if there is any instruction with an environment.
3275   HInstruction* current = this;
3276   for (; current != other && current != nullptr; current = current->GetNext()) {
3277     // This is a conservative check, as the instruction result may not be in
3278     // the referenced environment.
3279     if (current->HasEnvironment()) {
3280       return true;
3281     }
3282   }
3283 
3284   // We should have been called with 'this' before 'other' in the block.
3285   // Just confirm this.
3286   DCHECK(current != nullptr);
3287   return false;
3288 }
3289 
SetIntrinsic(Intrinsics intrinsic,IntrinsicNeedsEnvironment needs_env,IntrinsicSideEffects side_effects,IntrinsicExceptions exceptions)3290 void HInvoke::SetIntrinsic(Intrinsics intrinsic,
3291                            IntrinsicNeedsEnvironment needs_env,
3292                            IntrinsicSideEffects side_effects,
3293                            IntrinsicExceptions exceptions) {
3294   intrinsic_ = intrinsic;
3295   IntrinsicOptimizations opt(this);
3296 
3297   // Adjust method's side effects from intrinsic table.
3298   switch (side_effects) {
3299     case kNoSideEffects: SetSideEffects(SideEffects::None()); break;
3300     case kReadSideEffects: SetSideEffects(SideEffects::AllReads()); break;
3301     case kWriteSideEffects: SetSideEffects(SideEffects::AllWrites()); break;
3302     case kAllSideEffects: SetSideEffects(SideEffects::AllExceptGCDependency()); break;
3303   }
3304 
3305   if (needs_env == kNoEnvironment) {
3306     opt.SetDoesNotNeedEnvironment();
3307   } else {
3308     // If we need an environment, that means there will be a call, which can trigger GC.
3309     SetSideEffects(GetSideEffects().Union(SideEffects::CanTriggerGC()));
3310   }
3311   // Adjust method's exception status from intrinsic table.
3312   SetCanThrow(exceptions == kCanThrow);
3313 }
3314 
IsStringAlloc() const3315 bool HNewInstance::IsStringAlloc() const {
3316   return GetEntrypoint() == kQuickAllocStringObject;
3317 }
3318 
NeedsEnvironment() const3319 bool HInvoke::NeedsEnvironment() const {
3320   if (!IsIntrinsic()) {
3321     return true;
3322   }
3323   IntrinsicOptimizations opt(*this);
3324   return !opt.GetDoesNotNeedEnvironment();
3325 }
3326 
GetDexFileForPcRelativeDexCache() const3327 const DexFile& HInvokeStaticOrDirect::GetDexFileForPcRelativeDexCache() const {
3328   ArtMethod* caller = GetEnvironment()->GetMethod();
3329   ScopedObjectAccess soa(Thread::Current());
3330   // `caller` is null for a top-level graph representing a method whose declaring
3331   // class was not resolved.
3332   return caller == nullptr ? GetBlock()->GetGraph()->GetDexFile() : *caller->GetDexFile();
3333 }
3334 
operator <<(std::ostream & os,HInvokeStaticOrDirect::ClinitCheckRequirement rhs)3335 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs) {
3336   switch (rhs) {
3337     case HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit:
3338       return os << "explicit";
3339     case HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit:
3340       return os << "implicit";
3341     case HInvokeStaticOrDirect::ClinitCheckRequirement::kNone:
3342       return os << "none";
3343     default:
3344       LOG(FATAL) << "Unknown ClinitCheckRequirement: " << static_cast<int>(rhs);
3345       UNREACHABLE();
3346   }
3347 }
3348 
CanDoImplicitNullCheckOn(HInstruction * obj) const3349 bool HInvokeVirtual::CanDoImplicitNullCheckOn(HInstruction* obj) const {
3350   if (obj != InputAt(0)) {
3351     return false;
3352   }
3353   switch (GetIntrinsic()) {
3354     case Intrinsics::kNone:
3355       return true;
3356     case Intrinsics::kReferenceRefersTo:
3357       return true;
3358     default:
3359       // TODO: Add implicit null checks in more intrinsics.
3360       return false;
3361   }
3362 }
3363 
InstructionDataEquals(const HInstruction * other) const3364 bool HLoadClass::InstructionDataEquals(const HInstruction* other) const {
3365   const HLoadClass* other_load_class = other->AsLoadClass();
3366   // TODO: To allow GVN for HLoadClass from different dex files, we should compare the type
3367   // names rather than type indexes. However, we shall also have to re-think the hash code.
3368   if (type_index_ != other_load_class->type_index_ ||
3369       GetPackedFields() != other_load_class->GetPackedFields()) {
3370     return false;
3371   }
3372   switch (GetLoadKind()) {
3373     case LoadKind::kBootImageRelRo:
3374     case LoadKind::kJitBootImageAddress:
3375     case LoadKind::kJitTableAddress: {
3376       ScopedObjectAccess soa(Thread::Current());
3377       return GetClass().Get() == other_load_class->GetClass().Get();
3378     }
3379     default:
3380       DCHECK(HasTypeReference(GetLoadKind()));
3381       return IsSameDexFile(GetDexFile(), other_load_class->GetDexFile());
3382   }
3383 }
3384 
InstructionDataEquals(const HInstruction * other) const3385 bool HLoadString::InstructionDataEquals(const HInstruction* other) const {
3386   const HLoadString* other_load_string = other->AsLoadString();
3387   // TODO: To allow GVN for HLoadString from different dex files, we should compare the strings
3388   // rather than their indexes. However, we shall also have to re-think the hash code.
3389   if (string_index_ != other_load_string->string_index_ ||
3390       GetPackedFields() != other_load_string->GetPackedFields()) {
3391     return false;
3392   }
3393   switch (GetLoadKind()) {
3394     case LoadKind::kBootImageRelRo:
3395     case LoadKind::kJitBootImageAddress:
3396     case LoadKind::kJitTableAddress: {
3397       ScopedObjectAccess soa(Thread::Current());
3398       return GetString().Get() == other_load_string->GetString().Get();
3399     }
3400     default:
3401       return IsSameDexFile(GetDexFile(), other_load_string->GetDexFile());
3402   }
3403 }
3404 
RemoveEnvironmentUsers()3405 void HInstruction::RemoveEnvironmentUsers() {
3406   for (const HUseListNode<HEnvironment*>& use : GetEnvUses()) {
3407     HEnvironment* user = use.GetUser();
3408     user->SetRawEnvAt(use.GetIndex(), nullptr);
3409   }
3410   env_uses_.clear();
3411 }
3412 
ReplaceInstrOrPhiByClone(HInstruction * instr)3413 HInstruction* ReplaceInstrOrPhiByClone(HInstruction* instr) {
3414   HInstruction* clone = instr->Clone(instr->GetBlock()->GetGraph()->GetAllocator());
3415   HBasicBlock* block = instr->GetBlock();
3416 
3417   if (instr->IsPhi()) {
3418     HPhi* phi = instr->AsPhi();
3419     DCHECK(!phi->HasEnvironment());
3420     HPhi* phi_clone = clone->AsPhi();
3421     block->ReplaceAndRemovePhiWith(phi, phi_clone);
3422   } else {
3423     block->ReplaceAndRemoveInstructionWith(instr, clone);
3424     if (instr->HasEnvironment()) {
3425       clone->CopyEnvironmentFrom(instr->GetEnvironment());
3426       HLoopInformation* loop_info = block->GetLoopInformation();
3427       if (instr->IsSuspendCheck() && loop_info != nullptr) {
3428         loop_info->SetSuspendCheck(clone->AsSuspendCheck());
3429       }
3430     }
3431   }
3432   return clone;
3433 }
3434 
3435 // Returns an instruction with the opposite Boolean value from 'cond'.
InsertOppositeCondition(HInstruction * cond,HInstruction * cursor)3436 HInstruction* HGraph::InsertOppositeCondition(HInstruction* cond, HInstruction* cursor) {
3437   ArenaAllocator* allocator = GetAllocator();
3438 
3439   if (cond->IsCondition() &&
3440       !DataType::IsFloatingPointType(cond->InputAt(0)->GetType())) {
3441     // Can't reverse floating point conditions.  We have to use HBooleanNot in that case.
3442     HInstruction* lhs = cond->InputAt(0);
3443     HInstruction* rhs = cond->InputAt(1);
3444     HInstruction* replacement = nullptr;
3445     switch (cond->AsCondition()->GetOppositeCondition()) {  // get *opposite*
3446       case kCondEQ: replacement = new (allocator) HEqual(lhs, rhs); break;
3447       case kCondNE: replacement = new (allocator) HNotEqual(lhs, rhs); break;
3448       case kCondLT: replacement = new (allocator) HLessThan(lhs, rhs); break;
3449       case kCondLE: replacement = new (allocator) HLessThanOrEqual(lhs, rhs); break;
3450       case kCondGT: replacement = new (allocator) HGreaterThan(lhs, rhs); break;
3451       case kCondGE: replacement = new (allocator) HGreaterThanOrEqual(lhs, rhs); break;
3452       case kCondB:  replacement = new (allocator) HBelow(lhs, rhs); break;
3453       case kCondBE: replacement = new (allocator) HBelowOrEqual(lhs, rhs); break;
3454       case kCondA:  replacement = new (allocator) HAbove(lhs, rhs); break;
3455       case kCondAE: replacement = new (allocator) HAboveOrEqual(lhs, rhs); break;
3456       default:
3457         LOG(FATAL) << "Unexpected condition";
3458         UNREACHABLE();
3459     }
3460     cursor->GetBlock()->InsertInstructionBefore(replacement, cursor);
3461     return replacement;
3462   } else if (cond->IsIntConstant()) {
3463     HIntConstant* int_const = cond->AsIntConstant();
3464     if (int_const->IsFalse()) {
3465       return GetIntConstant(1);
3466     } else {
3467       DCHECK(int_const->IsTrue()) << int_const->GetValue();
3468       return GetIntConstant(0);
3469     }
3470   } else {
3471     HInstruction* replacement = new (allocator) HBooleanNot(cond);
3472     cursor->GetBlock()->InsertInstructionBefore(replacement, cursor);
3473     return replacement;
3474   }
3475 }
3476 
operator <<(std::ostream & os,const MoveOperands & rhs)3477 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs) {
3478   os << "["
3479      << " source=" << rhs.GetSource()
3480      << " destination=" << rhs.GetDestination()
3481      << " type=" << rhs.GetType()
3482      << " instruction=";
3483   if (rhs.GetInstruction() != nullptr) {
3484     os << rhs.GetInstruction()->DebugName() << ' ' << rhs.GetInstruction()->GetId();
3485   } else {
3486     os << "null";
3487   }
3488   os << " ]";
3489   return os;
3490 }
3491 
operator <<(std::ostream & os,TypeCheckKind rhs)3492 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs) {
3493   switch (rhs) {
3494     case TypeCheckKind::kUnresolvedCheck:
3495       return os << "unresolved_check";
3496     case TypeCheckKind::kExactCheck:
3497       return os << "exact_check";
3498     case TypeCheckKind::kClassHierarchyCheck:
3499       return os << "class_hierarchy_check";
3500     case TypeCheckKind::kAbstractClassCheck:
3501       return os << "abstract_class_check";
3502     case TypeCheckKind::kInterfaceCheck:
3503       return os << "interface_check";
3504     case TypeCheckKind::kArrayObjectCheck:
3505       return os << "array_object_check";
3506     case TypeCheckKind::kArrayCheck:
3507       return os << "array_check";
3508     case TypeCheckKind::kBitstringCheck:
3509       return os << "bitstring_check";
3510     default:
3511       LOG(FATAL) << "Unknown TypeCheckKind: " << static_cast<int>(rhs);
3512       UNREACHABLE();
3513   }
3514 }
3515 
3516 // Check that intrinsic enum values fit within space set aside in ArtMethod modifier flags.
3517 #define CHECK_INTRINSICS_ENUM_VALUES(Name, InvokeType, _, SideEffects, Exceptions, ...) \
3518   static_assert( \
3519     static_cast<uint32_t>(Intrinsics::k ## Name) <= (kAccIntrinsicBits >> CTZ(kAccIntrinsicBits)), \
3520     "Instrinsics enumeration space overflow.");
3521 #include "intrinsics_list.h"
INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES)3522   INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES)
3523 #undef INTRINSICS_LIST
3524 #undef CHECK_INTRINSICS_ENUM_VALUES
3525 
3526 // Function that returns whether an intrinsic needs an environment or not.
3527 static inline IntrinsicNeedsEnvironment NeedsEnvironmentIntrinsic(Intrinsics i) {
3528   switch (i) {
3529     case Intrinsics::kNone:
3530       return kNeedsEnvironment;  // Non-sensical for intrinsic.
3531 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3532     case Intrinsics::k ## Name: \
3533       return NeedsEnv;
3534 #include "intrinsics_list.h"
3535       INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3536 #undef INTRINSICS_LIST
3537 #undef OPTIMIZING_INTRINSICS
3538   }
3539   return kNeedsEnvironment;
3540 }
3541 
3542 // Function that returns whether an intrinsic has side effects.
GetSideEffectsIntrinsic(Intrinsics i)3543 static inline IntrinsicSideEffects GetSideEffectsIntrinsic(Intrinsics i) {
3544   switch (i) {
3545     case Intrinsics::kNone:
3546       return kAllSideEffects;
3547 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3548     case Intrinsics::k ## Name: \
3549       return SideEffects;
3550 #include "intrinsics_list.h"
3551       INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3552 #undef INTRINSICS_LIST
3553 #undef OPTIMIZING_INTRINSICS
3554   }
3555   return kAllSideEffects;
3556 }
3557 
3558 // Function that returns whether an intrinsic can throw exceptions.
GetExceptionsIntrinsic(Intrinsics i)3559 static inline IntrinsicExceptions GetExceptionsIntrinsic(Intrinsics i) {
3560   switch (i) {
3561     case Intrinsics::kNone:
3562       return kCanThrow;
3563 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3564     case Intrinsics::k ## Name: \
3565       return Exceptions;
3566 #include "intrinsics_list.h"
3567       INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3568 #undef INTRINSICS_LIST
3569 #undef OPTIMIZING_INTRINSICS
3570   }
3571   return kCanThrow;
3572 }
3573 
SetResolvedMethod(ArtMethod * method,bool enable_intrinsic_opt)3574 void HInvoke::SetResolvedMethod(ArtMethod* method, bool enable_intrinsic_opt) {
3575   if (method != nullptr && method->IsIntrinsic() && enable_intrinsic_opt) {
3576     Intrinsics intrinsic = static_cast<Intrinsics>(method->GetIntrinsic());
3577     SetIntrinsic(intrinsic,
3578                  NeedsEnvironmentIntrinsic(intrinsic),
3579                  GetSideEffectsIntrinsic(intrinsic),
3580                  GetExceptionsIntrinsic(intrinsic));
3581   }
3582   resolved_method_ = method;
3583 }
3584 
IsGEZero(HInstruction * instruction)3585 bool IsGEZero(HInstruction* instruction) {
3586   DCHECK(instruction != nullptr);
3587   if (instruction->IsArrayLength()) {
3588     return true;
3589   } else if (instruction->IsMin()) {
3590     // Instruction MIN(>=0, >=0) is >= 0.
3591     return IsGEZero(instruction->InputAt(0)) &&
3592            IsGEZero(instruction->InputAt(1));
3593   } else if (instruction->IsAbs()) {
3594     // Instruction ABS(>=0) is >= 0.
3595     // NOTE: ABS(minint) = minint prevents assuming
3596     //       >= 0 without looking at the argument.
3597     return IsGEZero(instruction->InputAt(0));
3598   }
3599   int64_t value = -1;
3600   return IsInt64AndGet(instruction, &value) && value >= 0;
3601 }
3602 
3603 }  // namespace art
3604