• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #include "nodes.h"
17 
18 #include <algorithm>
19 #include <cfloat>
20 #include <functional>
21 
22 #include "art_method-inl.h"
23 #include "base/arena_allocator.h"
24 #include "base/arena_bit_vector.h"
25 #include "base/bit_utils.h"
26 #include "base/bit_vector-inl.h"
27 #include "base/bit_vector.h"
28 #include "base/iteration_range.h"
29 #include "base/logging.h"
30 #include "base/malloc_arena_pool.h"
31 #include "base/scoped_arena_allocator.h"
32 #include "base/scoped_arena_containers.h"
33 #include "base/stl_util.h"
34 #include "class_linker-inl.h"
35 #include "class_root-inl.h"
36 #include "code_generator.h"
37 #include "common_dominator.h"
38 #include "intrinsics.h"
39 #include "mirror/class-inl.h"
40 #include "scoped_thread_state_change-inl.h"
41 #include "ssa_builder.h"
42 
43 namespace art {
44 
45 // Enable floating-point static evaluation during constant folding
46 // only if all floating-point operations and constants evaluate in the
47 // range and precision of the type used (i.e., 32-bit float, 64-bit
48 // double).
49 static constexpr bool kEnableFloatingPointStaticEvaluation = (FLT_EVAL_METHOD == 0);
50 
CreateRootHandle(VariableSizedHandleScope * handles,ClassRoot class_root)51 ReferenceTypeInfo::TypeHandle HandleCache::CreateRootHandle(VariableSizedHandleScope* handles,
52                                                             ClassRoot class_root) {
53   // Mutator lock is required for NewHandle and GetClassRoot().
54   ScopedObjectAccess soa(Thread::Current());
55   return handles->NewHandle(GetClassRoot(class_root));
56 }
57 
AddBlock(HBasicBlock * block)58 void HGraph::AddBlock(HBasicBlock* block) {
59   block->SetBlockId(blocks_.size());
60   blocks_.push_back(block);
61 }
62 
FindBackEdges(ArenaBitVector * visited)63 void HGraph::FindBackEdges(ArenaBitVector* visited) {
64   // "visited" must be empty on entry, it's an output argument for all visited (i.e. live) blocks.
65   DCHECK_EQ(visited->GetHighestBitSet(), -1);
66 
67   // Allocate memory from local ScopedArenaAllocator.
68   ScopedArenaAllocator allocator(GetArenaStack());
69   // Nodes that we're currently visiting, indexed by block id.
70   ArenaBitVector visiting(
71       &allocator, blocks_.size(), /* expandable= */ false, kArenaAllocGraphBuilder);
72   visiting.ClearAllBits();
73   // Number of successors visited from a given node, indexed by block id.
74   ScopedArenaVector<size_t> successors_visited(blocks_.size(),
75                                                0u,
76                                                allocator.Adapter(kArenaAllocGraphBuilder));
77   // Stack of nodes that we're currently visiting (same as marked in "visiting" above).
78   ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocGraphBuilder));
79   constexpr size_t kDefaultWorklistSize = 8;
80   worklist.reserve(kDefaultWorklistSize);
81   visited->SetBit(entry_block_->GetBlockId());
82   visiting.SetBit(entry_block_->GetBlockId());
83   worklist.push_back(entry_block_);
84 
85   while (!worklist.empty()) {
86     HBasicBlock* current = worklist.back();
87     uint32_t current_id = current->GetBlockId();
88     if (successors_visited[current_id] == current->GetSuccessors().size()) {
89       visiting.ClearBit(current_id);
90       worklist.pop_back();
91     } else {
92       HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++];
93       uint32_t successor_id = successor->GetBlockId();
94       if (visiting.IsBitSet(successor_id)) {
95         DCHECK(ContainsElement(worklist, successor));
96         successor->AddBackEdge(current);
97       } else if (!visited->IsBitSet(successor_id)) {
98         visited->SetBit(successor_id);
99         visiting.SetBit(successor_id);
100         worklist.push_back(successor);
101       }
102     }
103   }
104 }
105 
106 // Remove the environment use records of the instruction for users.
RemoveEnvironmentUses(HInstruction * instruction)107 void RemoveEnvironmentUses(HInstruction* instruction) {
108   for (HEnvironment* environment = instruction->GetEnvironment();
109        environment != nullptr;
110        environment = environment->GetParent()) {
111     for (size_t i = 0, e = environment->Size(); i < e; ++i) {
112       if (environment->GetInstructionAt(i) != nullptr) {
113         environment->RemoveAsUserOfInput(i);
114       }
115     }
116   }
117 }
118 
119 // Return whether the instruction has an environment and it's used by others.
HasEnvironmentUsedByOthers(HInstruction * instruction)120 bool HasEnvironmentUsedByOthers(HInstruction* instruction) {
121   for (HEnvironment* environment = instruction->GetEnvironment();
122        environment != nullptr;
123        environment = environment->GetParent()) {
124     for (size_t i = 0, e = environment->Size(); i < e; ++i) {
125       HInstruction* user = environment->GetInstructionAt(i);
126       if (user != nullptr) {
127         return true;
128       }
129     }
130   }
131   return false;
132 }
133 
134 // Reset environment records of the instruction itself.
ResetEnvironmentInputRecords(HInstruction * instruction)135 void ResetEnvironmentInputRecords(HInstruction* instruction) {
136   for (HEnvironment* environment = instruction->GetEnvironment();
137        environment != nullptr;
138        environment = environment->GetParent()) {
139     for (size_t i = 0, e = environment->Size(); i < e; ++i) {
140       DCHECK(environment->GetHolder() == instruction);
141       if (environment->GetInstructionAt(i) != nullptr) {
142         environment->SetRawEnvAt(i, nullptr);
143       }
144     }
145   }
146 }
147 
RemoveAsUser(HInstruction * instruction)148 static void RemoveAsUser(HInstruction* instruction) {
149   instruction->RemoveAsUserOfAllInputs();
150   RemoveEnvironmentUses(instruction);
151 }
152 
RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector & visited) const153 void HGraph::RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const {
154   for (size_t i = 0; i < blocks_.size(); ++i) {
155     if (!visited.IsBitSet(i)) {
156       HBasicBlock* block = blocks_[i];
157       if (block == nullptr) continue;
158       for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
159         RemoveAsUser(it.Current());
160       }
161       for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
162         RemoveAsUser(it.Current());
163       }
164     }
165   }
166 }
167 
RemoveDeadBlocks(const ArenaBitVector & visited)168 void HGraph::RemoveDeadBlocks(const ArenaBitVector& visited) {
169   for (size_t i = 0; i < blocks_.size(); ++i) {
170     if (!visited.IsBitSet(i)) {
171       HBasicBlock* block = blocks_[i];
172       if (block == nullptr) continue;
173       // We only need to update the successor, which might be live.
174       for (HBasicBlock* successor : block->GetSuccessors()) {
175         successor->RemovePredecessor(block);
176       }
177       // Remove the block from the list of blocks, so that further analyses
178       // never see it.
179       blocks_[i] = nullptr;
180       if (block->IsExitBlock()) {
181         SetExitBlock(nullptr);
182       }
183       // Mark the block as removed. This is used by the HGraphBuilder to discard
184       // the block as a branch target.
185       block->SetGraph(nullptr);
186     }
187   }
188 }
189 
BuildDominatorTree()190 GraphAnalysisResult HGraph::BuildDominatorTree() {
191   // Allocate memory from local ScopedArenaAllocator.
192   ScopedArenaAllocator allocator(GetArenaStack());
193 
194   ArenaBitVector visited(&allocator, blocks_.size(), false, kArenaAllocGraphBuilder);
195   visited.ClearAllBits();
196 
197   // (1) Find the back edges in the graph doing a DFS traversal.
198   FindBackEdges(&visited);
199 
200   // (2) Remove instructions and phis from blocks not visited during
201   //     the initial DFS as users from other instructions, so that
202   //     users can be safely removed before uses later.
203   RemoveInstructionsAsUsersFromDeadBlocks(visited);
204 
205   // (3) Remove blocks not visited during the initial DFS.
206   //     Step (5) requires dead blocks to be removed from the
207   //     predecessors list of live blocks.
208   RemoveDeadBlocks(visited);
209 
210   // (4) Simplify the CFG now, so that we don't need to recompute
211   //     dominators and the reverse post order.
212   SimplifyCFG();
213 
214   // (5) Compute the dominance information and the reverse post order.
215   ComputeDominanceInformation();
216 
217   // (6) Analyze loops discovered through back edge analysis, and
218   //     set the loop information on each block.
219   GraphAnalysisResult result = AnalyzeLoops();
220   if (result != kAnalysisSuccess) {
221     return result;
222   }
223 
224   // (7) Precompute per-block try membership before entering the SSA builder,
225   //     which needs the information to build catch block phis from values of
226   //     locals at throwing instructions inside try blocks.
227   ComputeTryBlockInformation();
228 
229   return kAnalysisSuccess;
230 }
231 
ClearDominanceInformation()232 void HGraph::ClearDominanceInformation() {
233   for (HBasicBlock* block : GetActiveBlocks()) {
234     block->ClearDominanceInformation();
235   }
236   reverse_post_order_.clear();
237 }
238 
ClearLoopInformation()239 void HGraph::ClearLoopInformation() {
240   SetHasIrreducibleLoops(false);
241   for (HBasicBlock* block : GetActiveBlocks()) {
242     block->SetLoopInformation(nullptr);
243   }
244 }
245 
ClearDominanceInformation()246 void HBasicBlock::ClearDominanceInformation() {
247   dominated_blocks_.clear();
248   dominator_ = nullptr;
249 }
250 
GetFirstInstructionDisregardMoves() const251 HInstruction* HBasicBlock::GetFirstInstructionDisregardMoves() const {
252   HInstruction* instruction = GetFirstInstruction();
253   while (instruction->IsParallelMove()) {
254     instruction = instruction->GetNext();
255   }
256   return instruction;
257 }
258 
UpdateDominatorOfSuccessor(HBasicBlock * block,HBasicBlock * successor)259 static bool UpdateDominatorOfSuccessor(HBasicBlock* block, HBasicBlock* successor) {
260   DCHECK(ContainsElement(block->GetSuccessors(), successor));
261 
262   HBasicBlock* old_dominator = successor->GetDominator();
263   HBasicBlock* new_dominator =
264       (old_dominator == nullptr) ? block
265                                  : CommonDominator::ForPair(old_dominator, block);
266 
267   if (old_dominator == new_dominator) {
268     return false;
269   } else {
270     successor->SetDominator(new_dominator);
271     return true;
272   }
273 }
274 
275 // TODO Consider moving this entirely into LoadStoreAnalysis/Elimination.
PathBetween(uint32_t source_idx,uint32_t dest_idx) const276 bool HGraph::PathBetween(uint32_t source_idx, uint32_t dest_idx) const {
277   DCHECK_LT(source_idx, blocks_.size()) << "source not present in graph!";
278   DCHECK_LT(dest_idx, blocks_.size()) << "dest not present in graph!";
279   DCHECK(blocks_[source_idx] != nullptr);
280   DCHECK(blocks_[dest_idx] != nullptr);
281   return reachability_graph_.IsBitSet(source_idx, dest_idx);
282 }
283 
PathBetween(const HBasicBlock * source,const HBasicBlock * dest) const284 bool HGraph::PathBetween(const HBasicBlock* source, const HBasicBlock* dest) const {
285   if (source == nullptr || dest == nullptr) {
286     return false;
287   }
288   size_t source_idx = source->GetBlockId();
289   size_t dest_idx = dest->GetBlockId();
290   return PathBetween(source_idx, dest_idx);
291 }
292 
293 // This function/struct calculates the reachability of every node from every
294 // other node by iteratively using DFS to find reachability of each individual
295 // block.
296 //
297 // This is in practice faster then the simpler Floyd-Warshall since while that
298 // is O(N**3) this is O(N*(E + N)) where N is the number of blocks and E is the
299 // number of edges. Since in practice each block only has a few outgoing edges
300 // we can confidently say that E ~ B*N where B is a small number (~3). We also
301 // memoize the results as we go allowing us to (potentially) avoid walking the
302 // entire graph for every node. To make best use of this memoization we
303 // calculate the reachability of blocks in PostOrder. This means that
304 // (generally) blocks that are dominated by many other blocks and dominate few
305 // blocks themselves will be examined first. This makes it more likely we can
306 // use our memoized results.
307 class ReachabilityAnalysisHelper {
308  public:
ReachabilityAnalysisHelper(const HGraph * graph,ArenaBitVectorArray * reachability_graph,ArenaStack * arena_stack)309   ReachabilityAnalysisHelper(const HGraph* graph,
310                              ArenaBitVectorArray* reachability_graph,
311                              ArenaStack* arena_stack)
312       : graph_(graph),
313         reachability_graph_(reachability_graph),
314         arena_stack_(arena_stack),
315         temporaries_(arena_stack_),
316         block_size_(RoundUp(graph_->GetBlocks().size(), BitVector::kWordBits)),
317         all_visited_nodes_(
318             &temporaries_, graph_->GetBlocks().size(), false, kArenaAllocReachabilityGraph),
319         not_post_order_visited_(
320             &temporaries_, graph_->GetBlocks().size(), false, kArenaAllocReachabilityGraph) {
321     // We can't adjust the size of reachability graph any more without breaking
322     // our allocator invariants so it had better be large enough.
323     CHECK_GE(reachability_graph_->NumRows(), graph_->GetBlocks().size());
324     CHECK_GE(reachability_graph_->NumColumns(), graph_->GetBlocks().size());
325     not_post_order_visited_.SetInitialBits(graph_->GetBlocks().size());
326   }
327 
CalculateReachability()328   void CalculateReachability() {
329     // Calculate what blocks connect using repeated DFS
330     //
331     // Going in PostOrder should generally give memoization a good shot of
332     // hitting.
333     for (const HBasicBlock* blk : graph_->GetPostOrder()) {
334       if (blk == nullptr) {
335         continue;
336       }
337       not_post_order_visited_.ClearBit(blk->GetBlockId());
338       CalculateConnectednessOn(blk);
339       all_visited_nodes_.SetBit(blk->GetBlockId());
340     }
341     // Get all other bits
342     for (auto idx : not_post_order_visited_.Indexes()) {
343       const HBasicBlock* blk = graph_->GetBlocks()[idx];
344       if (blk == nullptr) {
345         continue;
346       }
347       CalculateConnectednessOn(blk);
348       all_visited_nodes_.SetBit(blk->GetBlockId());
349     }
350   }
351 
352  private:
AddEdge(uint32_t source,const HBasicBlock * dest)353   void AddEdge(uint32_t source, const HBasicBlock* dest) {
354     reachability_graph_->SetBit(source, dest->GetBlockId());
355   }
356 
357   // Union the reachability of 'idx' into 'update_block_idx'. This is done to
358   // implement memoization. In order to improve performance we do this in 4-byte
359   // blocks. Clang should be able to optimize this to larger blocks if possible.
UnionBlock(size_t update_block_idx,size_t idx)360   void UnionBlock(size_t update_block_idx, size_t idx) {
361     reachability_graph_->UnionRows(update_block_idx, idx);
362   }
363 
364   // Single DFS to get connectedness of a single block
CalculateConnectednessOn(const HBasicBlock * const target_block)365   void CalculateConnectednessOn(const HBasicBlock* const target_block) {
366     const uint32_t target_idx = target_block->GetBlockId();
367     ScopedArenaAllocator connectedness_temps(arena_stack_);
368     // What nodes we have already discovered and either have processed or are
369     // already on the queue.
370     ArenaBitVector discovered(
371         &connectedness_temps, graph_->GetBlocks().size(), false, kArenaAllocReachabilityGraph);
372     // The work stack. What blocks we still need to process.
373     ScopedArenaVector<const HBasicBlock*> work_stack(
374         connectedness_temps.Adapter(kArenaAllocReachabilityGraph));
375     // Known max size since otherwise we'd have blocks multiple times. Avoids
376     // re-allocation
377     work_stack.reserve(graph_->GetBlocks().size());
378     discovered.SetBit(target_idx);
379     work_stack.push_back(target_block);
380     // Main DFS Loop.
381     while (!work_stack.empty()) {
382       const HBasicBlock* cur = work_stack.back();
383       work_stack.pop_back();
384       // Memoization of previous runs.
385       if (all_visited_nodes_.IsBitSet(cur->GetBlockId())) {
386         DCHECK_NE(target_block, cur);
387         // Already explored from here. Just use that data.
388         UnionBlock(target_idx, cur->GetBlockId());
389         continue;
390       }
391       for (const HBasicBlock* succ : cur->GetSuccessors()) {
392         AddEdge(target_idx, succ);
393         if (!discovered.IsBitSet(succ->GetBlockId())) {
394           work_stack.push_back(succ);
395           discovered.SetBit(succ->GetBlockId());
396         }
397       }
398     }
399   }
400 
401   const HGraph* graph_;
402   // The graph's reachability_graph_ on the main allocator.
403   ArenaBitVectorArray* reachability_graph_;
404   ArenaStack* arena_stack_;
405   // An allocator for temporary bit-vectors used by this algorithm. The
406   // 'SetBit,ClearBit' on reachability_graph_ prior to the construction of this
407   // object should be the only allocation on the main allocator so it's safe to
408   // make a sub-allocator here.
409   ScopedArenaAllocator temporaries_;
410   // number of columns
411   const size_t block_size_;
412   // Where we've already completely calculated connectedness.
413   ArenaBitVector all_visited_nodes_;
414   // What we never visited and need to do later
415   ArenaBitVector not_post_order_visited_;
416 
417   DISALLOW_COPY_AND_ASSIGN(ReachabilityAnalysisHelper);
418 };
419 
ComputeReachabilityInformation()420 void HGraph::ComputeReachabilityInformation() {
421   DCHECK_EQ(reachability_graph_.GetRawData().NumSetBits(), 0u);
422   DCHECK(reachability_graph_.IsExpandable());
423   // Reserve all the bits we'll need. This is the only allocation on the
424   // standard allocator we do here, enabling us to create a new ScopedArena for
425   // use with temporaries.
426   //
427   // reachability_graph_ acts as |N| x |N| graph for PathBetween. Array is
428   // padded so each row starts on an BitVector::kWordBits-bit alignment for
429   // simplicity and performance, allowing us to union blocks together without
430   // going bit-by-bit.
431   reachability_graph_.Resize(blocks_.size(), blocks_.size(), /*clear=*/false);
432   ReachabilityAnalysisHelper helper(this, &reachability_graph_, GetArenaStack());
433   helper.CalculateReachability();
434 }
435 
ClearReachabilityInformation()436 void HGraph::ClearReachabilityInformation() {
437   reachability_graph_.Clear();
438 }
439 
ComputeDominanceInformation()440 void HGraph::ComputeDominanceInformation() {
441   DCHECK(reverse_post_order_.empty());
442   reverse_post_order_.reserve(blocks_.size());
443   reverse_post_order_.push_back(entry_block_);
444 
445   // Allocate memory from local ScopedArenaAllocator.
446   ScopedArenaAllocator allocator(GetArenaStack());
447   // Number of visits of a given node, indexed by block id.
448   ScopedArenaVector<size_t> visits(blocks_.size(), 0u, allocator.Adapter(kArenaAllocGraphBuilder));
449   // Number of successors visited from a given node, indexed by block id.
450   ScopedArenaVector<size_t> successors_visited(blocks_.size(),
451                                                0u,
452                                                allocator.Adapter(kArenaAllocGraphBuilder));
453   // Nodes for which we need to visit successors.
454   ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocGraphBuilder));
455   constexpr size_t kDefaultWorklistSize = 8;
456   worklist.reserve(kDefaultWorklistSize);
457   worklist.push_back(entry_block_);
458 
459   while (!worklist.empty()) {
460     HBasicBlock* current = worklist.back();
461     uint32_t current_id = current->GetBlockId();
462     if (successors_visited[current_id] == current->GetSuccessors().size()) {
463       worklist.pop_back();
464     } else {
465       HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++];
466       UpdateDominatorOfSuccessor(current, successor);
467 
468       // Once all the forward edges have been visited, we know the immediate
469       // dominator of the block. We can then start visiting its successors.
470       if (++visits[successor->GetBlockId()] ==
471           successor->GetPredecessors().size() - successor->NumberOfBackEdges()) {
472         reverse_post_order_.push_back(successor);
473         worklist.push_back(successor);
474       }
475     }
476   }
477 
478   // Check if the graph has back edges not dominated by their respective headers.
479   // If so, we need to update the dominators of those headers and recursively of
480   // their successors. We do that with a fix-point iteration over all blocks.
481   // The algorithm is guaranteed to terminate because it loops only if the sum
482   // of all dominator chains has decreased in the current iteration.
483   bool must_run_fix_point = false;
484   for (HBasicBlock* block : blocks_) {
485     if (block != nullptr &&
486         block->IsLoopHeader() &&
487         block->GetLoopInformation()->HasBackEdgeNotDominatedByHeader()) {
488       must_run_fix_point = true;
489       break;
490     }
491   }
492   if (must_run_fix_point) {
493     bool update_occurred = true;
494     while (update_occurred) {
495       update_occurred = false;
496       for (HBasicBlock* block : GetReversePostOrder()) {
497         for (HBasicBlock* successor : block->GetSuccessors()) {
498           update_occurred |= UpdateDominatorOfSuccessor(block, successor);
499         }
500       }
501     }
502   }
503 
504   // Make sure that there are no remaining blocks whose dominator information
505   // needs to be updated.
506   if (kIsDebugBuild) {
507     for (HBasicBlock* block : GetReversePostOrder()) {
508       for (HBasicBlock* successor : block->GetSuccessors()) {
509         DCHECK(!UpdateDominatorOfSuccessor(block, successor));
510       }
511     }
512   }
513 
514   // Populate `dominated_blocks_` information after computing all dominators.
515   // The potential presence of irreducible loops requires to do it after.
516   for (HBasicBlock* block : GetReversePostOrder()) {
517     if (!block->IsEntryBlock()) {
518       block->GetDominator()->AddDominatedBlock(block);
519     }
520   }
521 }
522 
SplitEdge(HBasicBlock * block,HBasicBlock * successor)523 HBasicBlock* HGraph::SplitEdge(HBasicBlock* block, HBasicBlock* successor) {
524   HBasicBlock* new_block = new (allocator_) HBasicBlock(this, successor->GetDexPc());
525   AddBlock(new_block);
526   // Use `InsertBetween` to ensure the predecessor index and successor index of
527   // `block` and `successor` are preserved.
528   new_block->InsertBetween(block, successor);
529   return new_block;
530 }
531 
SplitCriticalEdge(HBasicBlock * block,HBasicBlock * successor)532 void HGraph::SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor) {
533   // Insert a new node between `block` and `successor` to split the
534   // critical edge.
535   HBasicBlock* new_block = SplitEdge(block, successor);
536   new_block->AddInstruction(new (allocator_) HGoto(successor->GetDexPc()));
537   if (successor->IsLoopHeader()) {
538     // If we split at a back edge boundary, make the new block the back edge.
539     HLoopInformation* info = successor->GetLoopInformation();
540     if (info->IsBackEdge(*block)) {
541       info->RemoveBackEdge(block);
542       info->AddBackEdge(new_block);
543     }
544   }
545 }
546 
547 // Reorder phi inputs to match reordering of the block's predecessors.
FixPhisAfterPredecessorsReodering(HBasicBlock * block,size_t first,size_t second)548 static void FixPhisAfterPredecessorsReodering(HBasicBlock* block, size_t first, size_t second) {
549   for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
550     HPhi* phi = it.Current()->AsPhi();
551     HInstruction* first_instr = phi->InputAt(first);
552     HInstruction* second_instr = phi->InputAt(second);
553     phi->ReplaceInput(first_instr, second);
554     phi->ReplaceInput(second_instr, first);
555   }
556 }
557 
558 // Make sure that the first predecessor of a loop header is the incoming block.
OrderLoopHeaderPredecessors(HBasicBlock * header)559 void HGraph::OrderLoopHeaderPredecessors(HBasicBlock* header) {
560   DCHECK(header->IsLoopHeader());
561   HLoopInformation* info = header->GetLoopInformation();
562   if (info->IsBackEdge(*header->GetPredecessors()[0])) {
563     HBasicBlock* to_swap = header->GetPredecessors()[0];
564     for (size_t pred = 1, e = header->GetPredecessors().size(); pred < e; ++pred) {
565       HBasicBlock* predecessor = header->GetPredecessors()[pred];
566       if (!info->IsBackEdge(*predecessor)) {
567         header->predecessors_[pred] = to_swap;
568         header->predecessors_[0] = predecessor;
569         FixPhisAfterPredecessorsReodering(header, 0, pred);
570         break;
571       }
572     }
573   }
574 }
575 
576 // Transform control flow of the loop to a single preheader format (don't touch the data flow).
577 // New_preheader can be already among the header predecessors - this situation will be correctly
578 // processed.
FixControlForNewSinglePreheader(HBasicBlock * header,HBasicBlock * new_preheader)579 static void FixControlForNewSinglePreheader(HBasicBlock* header, HBasicBlock* new_preheader) {
580   HLoopInformation* loop_info = header->GetLoopInformation();
581   for (size_t pred = 0; pred < header->GetPredecessors().size(); ++pred) {
582     HBasicBlock* predecessor = header->GetPredecessors()[pred];
583     if (!loop_info->IsBackEdge(*predecessor) && predecessor != new_preheader) {
584       predecessor->ReplaceSuccessor(header, new_preheader);
585       pred--;
586     }
587   }
588 }
589 
590 //             == Before ==                                               == After ==
591 //      _________         _________                               _________         _________
592 //     | B0      |       | B1      |      (old preheaders)       | B0      |       | B1      |
593 //     |=========|       |=========|                             |=========|       |=========|
594 //     | i0 = .. |       | i1 = .. |                             | i0 = .. |       | i1 = .. |
595 //     |_________|       |_________|                             |_________|       |_________|
596 //           \               /                                         \              /
597 //            \             /                                        ___v____________v___
598 //             \           /               (new preheader)          | B20 <- B0, B1      |
599 //              |         |                                         |====================|
600 //              |         |                                         | i20 = phi(i0, i1)  |
601 //              |         |                                         |____________________|
602 //              |         |                                                   |
603 //    /\        |         |        /\                           /\            |              /\
604 //   /  v_______v_________v_______v  \                         /  v___________v_____________v  \
605 //  |  | B10 <- B0, B1, B2, B3     |  |                       |  | B10 <- B20, B2, B3        |  |
606 //  |  |===========================|  |       (header)        |  |===========================|  |
607 //  |  | i10 = phi(i0, i1, i2, i3) |  |                       |  | i10 = phi(i20, i2, i3)    |  |
608 //  |  |___________________________|  |                       |  |___________________________|  |
609 //  |        /               \        |                       |        /               \        |
610 //  |      ...              ...       |                       |      ...              ...       |
611 //  |   _________         _________   |                       |   _________         _________   |
612 //  |  | B2      |       | B3      |  |                       |  | B2      |       | B3      |  |
613 //  |  |=========|       |=========|  |     (back edges)      |  |=========|       |=========|  |
614 //  |  | i2 = .. |       | i3 = .. |  |                       |  | i2 = .. |       | i3 = .. |  |
615 //  |  |_________|       |_________|  |                       |  |_________|       |_________|  |
616 //   \     /                   \     /                         \     /                   \     /
617 //    \___/                     \___/                           \___/                     \___/
618 //
TransformLoopToSinglePreheaderFormat(HBasicBlock * header)619 void HGraph::TransformLoopToSinglePreheaderFormat(HBasicBlock* header) {
620   HLoopInformation* loop_info = header->GetLoopInformation();
621 
622   HBasicBlock* preheader = new (allocator_) HBasicBlock(this, header->GetDexPc());
623   AddBlock(preheader);
624   preheader->AddInstruction(new (allocator_) HGoto(header->GetDexPc()));
625 
626   // If the old header has no Phis then we only need to fix the control flow.
627   if (header->GetPhis().IsEmpty()) {
628     FixControlForNewSinglePreheader(header, preheader);
629     preheader->AddSuccessor(header);
630     return;
631   }
632 
633   // Find the first non-back edge block in the header's predecessors list.
634   size_t first_nonbackedge_pred_pos = 0;
635   bool found = false;
636   for (size_t pred = 0; pred < header->GetPredecessors().size(); ++pred) {
637     HBasicBlock* predecessor = header->GetPredecessors()[pred];
638     if (!loop_info->IsBackEdge(*predecessor)) {
639       first_nonbackedge_pred_pos = pred;
640       found = true;
641       break;
642     }
643   }
644 
645   DCHECK(found);
646 
647   // Fix the data-flow.
648   for (HInstructionIterator it(header->GetPhis()); !it.Done(); it.Advance()) {
649     HPhi* header_phi = it.Current()->AsPhi();
650 
651     HPhi* preheader_phi = new (GetAllocator()) HPhi(GetAllocator(),
652                                                     header_phi->GetRegNumber(),
653                                                     0,
654                                                     header_phi->GetType());
655     if (header_phi->GetType() == DataType::Type::kReference) {
656       preheader_phi->SetReferenceTypeInfo(header_phi->GetReferenceTypeInfo());
657     }
658     preheader->AddPhi(preheader_phi);
659 
660     HInstruction* orig_input = header_phi->InputAt(first_nonbackedge_pred_pos);
661     header_phi->ReplaceInput(preheader_phi, first_nonbackedge_pred_pos);
662     preheader_phi->AddInput(orig_input);
663 
664     for (size_t input_pos = first_nonbackedge_pred_pos + 1;
665          input_pos < header_phi->InputCount();
666          input_pos++) {
667       HInstruction* input = header_phi->InputAt(input_pos);
668       HBasicBlock* pred_block = header->GetPredecessors()[input_pos];
669 
670       if (loop_info->Contains(*pred_block)) {
671         DCHECK(loop_info->IsBackEdge(*pred_block));
672       } else {
673         preheader_phi->AddInput(input);
674         header_phi->RemoveInputAt(input_pos);
675         input_pos--;
676       }
677     }
678   }
679 
680   // Fix the control-flow.
681   HBasicBlock* first_pred = header->GetPredecessors()[first_nonbackedge_pred_pos];
682   preheader->InsertBetween(first_pred, header);
683 
684   FixControlForNewSinglePreheader(header, preheader);
685 }
686 
SimplifyLoop(HBasicBlock * header)687 void HGraph::SimplifyLoop(HBasicBlock* header) {
688   HLoopInformation* info = header->GetLoopInformation();
689 
690   // Make sure the loop has only one pre header. This simplifies SSA building by having
691   // to just look at the pre header to know which locals are initialized at entry of the
692   // loop. Also, don't allow the entry block to be a pre header: this simplifies inlining
693   // this graph.
694   size_t number_of_incomings = header->GetPredecessors().size() - info->NumberOfBackEdges();
695   if (number_of_incomings != 1 || (GetEntryBlock()->GetSingleSuccessor() == header)) {
696     TransformLoopToSinglePreheaderFormat(header);
697   }
698 
699   OrderLoopHeaderPredecessors(header);
700 
701   HInstruction* first_instruction = header->GetFirstInstruction();
702   if (first_instruction != nullptr && first_instruction->IsSuspendCheck()) {
703     // Called from DeadBlockElimination. Update SuspendCheck pointer.
704     info->SetSuspendCheck(first_instruction->AsSuspendCheck());
705   }
706 }
707 
ComputeTryBlockInformation()708 void HGraph::ComputeTryBlockInformation() {
709   // Iterate in reverse post order to propagate try membership information from
710   // predecessors to their successors.
711   for (HBasicBlock* block : GetReversePostOrder()) {
712     if (block->IsEntryBlock() || block->IsCatchBlock()) {
713       // Catch blocks after simplification have only exceptional predecessors
714       // and hence are never in tries.
715       continue;
716     }
717 
718     // Infer try membership from the first predecessor. Having simplified loops,
719     // the first predecessor can never be a back edge and therefore it must have
720     // been visited already and had its try membership set.
721     HBasicBlock* first_predecessor = block->GetPredecessors()[0];
722     DCHECK_IMPLIES(block->IsLoopHeader(),
723                    !block->GetLoopInformation()->IsBackEdge(*first_predecessor));
724     const HTryBoundary* try_entry = first_predecessor->ComputeTryEntryOfSuccessors();
725     if (try_entry != nullptr &&
726         (block->GetTryCatchInformation() == nullptr ||
727          try_entry != &block->GetTryCatchInformation()->GetTryEntry())) {
728       // We are either setting try block membership for the first time or it
729       // has changed.
730       block->SetTryCatchInformation(new (allocator_) TryCatchInformation(*try_entry));
731     }
732   }
733 }
734 
SimplifyCFG()735 void HGraph::SimplifyCFG() {
736 // Simplify the CFG for future analysis, and code generation:
737   // (1): Split critical edges.
738   // (2): Simplify loops by having only one preheader.
739   // NOTE: We're appending new blocks inside the loop, so we need to use index because iterators
740   // can be invalidated. We remember the initial size to avoid iterating over the new blocks.
741   for (size_t block_id = 0u, end = blocks_.size(); block_id != end; ++block_id) {
742     HBasicBlock* block = blocks_[block_id];
743     if (block == nullptr) continue;
744     if (block->GetSuccessors().size() > 1) {
745       // Only split normal-flow edges. We cannot split exceptional edges as they
746       // are synthesized (approximate real control flow), and we do not need to
747       // anyway. Moves that would be inserted there are performed by the runtime.
748       ArrayRef<HBasicBlock* const> normal_successors = block->GetNormalSuccessors();
749       for (size_t j = 0, e = normal_successors.size(); j < e; ++j) {
750         HBasicBlock* successor = normal_successors[j];
751         DCHECK(!successor->IsCatchBlock());
752         if (successor == exit_block_) {
753           // (Throw/Return/ReturnVoid)->TryBoundary->Exit. Special case which we
754           // do not want to split because Goto->Exit is not allowed.
755           DCHECK(block->IsSingleTryBoundary());
756         } else if (successor->GetPredecessors().size() > 1) {
757           SplitCriticalEdge(block, successor);
758           // SplitCriticalEdge could have invalidated the `normal_successors`
759           // ArrayRef. We must re-acquire it.
760           normal_successors = block->GetNormalSuccessors();
761           DCHECK_EQ(normal_successors[j]->GetSingleSuccessor(), successor);
762           DCHECK_EQ(e, normal_successors.size());
763         }
764       }
765     }
766     if (block->IsLoopHeader()) {
767       SimplifyLoop(block);
768     } else if (!block->IsEntryBlock() &&
769                block->GetFirstInstruction() != nullptr &&
770                block->GetFirstInstruction()->IsSuspendCheck()) {
771       // We are being called by the dead code elimiation pass, and what used to be
772       // a loop got dismantled. Just remove the suspend check.
773       block->RemoveInstruction(block->GetFirstInstruction());
774     }
775   }
776 }
777 
AnalyzeLoops() const778 GraphAnalysisResult HGraph::AnalyzeLoops() const {
779   // We iterate post order to ensure we visit inner loops before outer loops.
780   // `PopulateRecursive` needs this guarantee to know whether a natural loop
781   // contains an irreducible loop.
782   for (HBasicBlock* block : GetPostOrder()) {
783     if (block->IsLoopHeader()) {
784       if (block->IsCatchBlock()) {
785         // TODO: Dealing with exceptional back edges could be tricky because
786         //       they only approximate the real control flow. Bail out for now.
787         VLOG(compiler) << "Not compiled: Exceptional back edges";
788         return kAnalysisFailThrowCatchLoop;
789       }
790       block->GetLoopInformation()->Populate();
791     }
792   }
793   return kAnalysisSuccess;
794 }
795 
Dump(std::ostream & os)796 void HLoopInformation::Dump(std::ostream& os) {
797   os << "header: " << header_->GetBlockId() << std::endl;
798   os << "pre header: " << GetPreHeader()->GetBlockId() << std::endl;
799   for (HBasicBlock* block : back_edges_) {
800     os << "back edge: " << block->GetBlockId() << std::endl;
801   }
802   for (HBasicBlock* block : header_->GetPredecessors()) {
803     os << "predecessor: " << block->GetBlockId() << std::endl;
804   }
805   for (uint32_t idx : blocks_.Indexes()) {
806     os << "  in loop: " << idx << std::endl;
807   }
808 }
809 
InsertConstant(HConstant * constant)810 void HGraph::InsertConstant(HConstant* constant) {
811   // New constants are inserted before the SuspendCheck at the bottom of the
812   // entry block. Note that this method can be called from the graph builder and
813   // the entry block therefore may not end with SuspendCheck->Goto yet.
814   HInstruction* insert_before = nullptr;
815 
816   HInstruction* gota = entry_block_->GetLastInstruction();
817   if (gota != nullptr && gota->IsGoto()) {
818     HInstruction* suspend_check = gota->GetPrevious();
819     if (suspend_check != nullptr && suspend_check->IsSuspendCheck()) {
820       insert_before = suspend_check;
821     } else {
822       insert_before = gota;
823     }
824   }
825 
826   if (insert_before == nullptr) {
827     entry_block_->AddInstruction(constant);
828   } else {
829     entry_block_->InsertInstructionBefore(constant, insert_before);
830   }
831 }
832 
GetNullConstant(uint32_t dex_pc)833 HNullConstant* HGraph::GetNullConstant(uint32_t dex_pc) {
834   // For simplicity, don't bother reviving the cached null constant if it is
835   // not null and not in a block. Otherwise, we need to clear the instruction
836   // id and/or any invariants the graph is assuming when adding new instructions.
837   if ((cached_null_constant_ == nullptr) || (cached_null_constant_->GetBlock() == nullptr)) {
838     cached_null_constant_ = new (allocator_) HNullConstant(dex_pc);
839     cached_null_constant_->SetReferenceTypeInfo(GetInexactObjectRti());
840     InsertConstant(cached_null_constant_);
841   }
842   if (kIsDebugBuild) {
843     ScopedObjectAccess soa(Thread::Current());
844     DCHECK(cached_null_constant_->GetReferenceTypeInfo().IsValid());
845   }
846   return cached_null_constant_;
847 }
848 
GetCurrentMethod()849 HCurrentMethod* HGraph::GetCurrentMethod() {
850   // For simplicity, don't bother reviving the cached current method if it is
851   // not null and not in a block. Otherwise, we need to clear the instruction
852   // id and/or any invariants the graph is assuming when adding new instructions.
853   if ((cached_current_method_ == nullptr) || (cached_current_method_->GetBlock() == nullptr)) {
854     cached_current_method_ = new (allocator_) HCurrentMethod(
855         Is64BitInstructionSet(instruction_set_) ? DataType::Type::kInt64 : DataType::Type::kInt32,
856         entry_block_->GetDexPc());
857     if (entry_block_->GetFirstInstruction() == nullptr) {
858       entry_block_->AddInstruction(cached_current_method_);
859     } else {
860       entry_block_->InsertInstructionBefore(
861           cached_current_method_, entry_block_->GetFirstInstruction());
862     }
863   }
864   return cached_current_method_;
865 }
866 
GetMethodName() const867 const char* HGraph::GetMethodName() const {
868   const dex::MethodId& method_id = dex_file_.GetMethodId(method_idx_);
869   return dex_file_.GetMethodName(method_id);
870 }
871 
PrettyMethod(bool with_signature) const872 std::string HGraph::PrettyMethod(bool with_signature) const {
873   return dex_file_.PrettyMethod(method_idx_, with_signature);
874 }
875 
GetConstant(DataType::Type type,int64_t value,uint32_t dex_pc)876 HConstant* HGraph::GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc) {
877   switch (type) {
878     case DataType::Type::kBool:
879       DCHECK(IsUint<1>(value));
880       FALLTHROUGH_INTENDED;
881     case DataType::Type::kUint8:
882     case DataType::Type::kInt8:
883     case DataType::Type::kUint16:
884     case DataType::Type::kInt16:
885     case DataType::Type::kInt32:
886       DCHECK(IsInt(DataType::Size(type) * kBitsPerByte, value));
887       return GetIntConstant(static_cast<int32_t>(value), dex_pc);
888 
889     case DataType::Type::kInt64:
890       return GetLongConstant(value, dex_pc);
891 
892     default:
893       LOG(FATAL) << "Unsupported constant type";
894       UNREACHABLE();
895   }
896 }
897 
CacheFloatConstant(HFloatConstant * constant)898 void HGraph::CacheFloatConstant(HFloatConstant* constant) {
899   int32_t value = bit_cast<int32_t, float>(constant->GetValue());
900   DCHECK(cached_float_constants_.find(value) == cached_float_constants_.end());
901   cached_float_constants_.Overwrite(value, constant);
902 }
903 
CacheDoubleConstant(HDoubleConstant * constant)904 void HGraph::CacheDoubleConstant(HDoubleConstant* constant) {
905   int64_t value = bit_cast<int64_t, double>(constant->GetValue());
906   DCHECK(cached_double_constants_.find(value) == cached_double_constants_.end());
907   cached_double_constants_.Overwrite(value, constant);
908 }
909 
Add(HBasicBlock * block)910 void HLoopInformation::Add(HBasicBlock* block) {
911   blocks_.SetBit(block->GetBlockId());
912 }
913 
Remove(HBasicBlock * block)914 void HLoopInformation::Remove(HBasicBlock* block) {
915   blocks_.ClearBit(block->GetBlockId());
916 }
917 
PopulateRecursive(HBasicBlock * block)918 void HLoopInformation::PopulateRecursive(HBasicBlock* block) {
919   if (blocks_.IsBitSet(block->GetBlockId())) {
920     return;
921   }
922 
923   blocks_.SetBit(block->GetBlockId());
924   block->SetInLoop(this);
925   if (block->IsLoopHeader()) {
926     // We're visiting loops in post-order, so inner loops must have been
927     // populated already.
928     DCHECK(block->GetLoopInformation()->IsPopulated());
929     if (block->GetLoopInformation()->IsIrreducible()) {
930       contains_irreducible_loop_ = true;
931     }
932   }
933   for (HBasicBlock* predecessor : block->GetPredecessors()) {
934     PopulateRecursive(predecessor);
935   }
936 }
937 
PopulateIrreducibleRecursive(HBasicBlock * block,ArenaBitVector * finalized)938 void HLoopInformation::PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized) {
939   size_t block_id = block->GetBlockId();
940 
941   // If `block` is in `finalized`, we know its membership in the loop has been
942   // decided and it does not need to be revisited.
943   if (finalized->IsBitSet(block_id)) {
944     return;
945   }
946 
947   bool is_finalized = false;
948   if (block->IsLoopHeader()) {
949     // If we hit a loop header in an irreducible loop, we first check if the
950     // pre header of that loop belongs to the currently analyzed loop. If it does,
951     // then we visit the back edges.
952     // Note that we cannot use GetPreHeader, as the loop may have not been populated
953     // yet.
954     HBasicBlock* pre_header = block->GetPredecessors()[0];
955     PopulateIrreducibleRecursive(pre_header, finalized);
956     if (blocks_.IsBitSet(pre_header->GetBlockId())) {
957       block->SetInLoop(this);
958       blocks_.SetBit(block_id);
959       finalized->SetBit(block_id);
960       is_finalized = true;
961 
962       HLoopInformation* info = block->GetLoopInformation();
963       for (HBasicBlock* back_edge : info->GetBackEdges()) {
964         PopulateIrreducibleRecursive(back_edge, finalized);
965       }
966     }
967   } else {
968     // Visit all predecessors. If one predecessor is part of the loop, this
969     // block is also part of this loop.
970     for (HBasicBlock* predecessor : block->GetPredecessors()) {
971       PopulateIrreducibleRecursive(predecessor, finalized);
972       if (!is_finalized && blocks_.IsBitSet(predecessor->GetBlockId())) {
973         block->SetInLoop(this);
974         blocks_.SetBit(block_id);
975         finalized->SetBit(block_id);
976         is_finalized = true;
977       }
978     }
979   }
980 
981   // All predecessors have been recursively visited. Mark finalized if not marked yet.
982   if (!is_finalized) {
983     finalized->SetBit(block_id);
984   }
985 }
986 
Populate()987 void HLoopInformation::Populate() {
988   DCHECK_EQ(blocks_.NumSetBits(), 0u) << "Loop information has already been populated";
989   // Populate this loop: starting with the back edge, recursively add predecessors
990   // that are not already part of that loop. Set the header as part of the loop
991   // to end the recursion.
992   // This is a recursive implementation of the algorithm described in
993   // "Advanced Compiler Design & Implementation" (Muchnick) p192.
994   HGraph* graph = header_->GetGraph();
995   blocks_.SetBit(header_->GetBlockId());
996   header_->SetInLoop(this);
997 
998   bool is_irreducible_loop = HasBackEdgeNotDominatedByHeader();
999 
1000   if (is_irreducible_loop) {
1001     // Allocate memory from local ScopedArenaAllocator.
1002     ScopedArenaAllocator allocator(graph->GetArenaStack());
1003     ArenaBitVector visited(&allocator,
1004                            graph->GetBlocks().size(),
1005                            /* expandable= */ false,
1006                            kArenaAllocGraphBuilder);
1007     visited.ClearAllBits();
1008     // Stop marking blocks at the loop header.
1009     visited.SetBit(header_->GetBlockId());
1010 
1011     for (HBasicBlock* back_edge : GetBackEdges()) {
1012       PopulateIrreducibleRecursive(back_edge, &visited);
1013     }
1014   } else {
1015     for (HBasicBlock* back_edge : GetBackEdges()) {
1016       PopulateRecursive(back_edge);
1017     }
1018   }
1019 
1020   if (!is_irreducible_loop && graph->IsCompilingOsr()) {
1021     // When compiling in OSR mode, all loops in the compiled method may be entered
1022     // from the interpreter. We treat this OSR entry point just like an extra entry
1023     // to an irreducible loop, so we need to mark the method's loops as irreducible.
1024     // This does not apply to inlined loops which do not act as OSR entry points.
1025     if (suspend_check_ == nullptr) {
1026       // Just building the graph in OSR mode, this loop is not inlined. We never build an
1027       // inner graph in OSR mode as we can do OSR transition only from the outer method.
1028       is_irreducible_loop = true;
1029     } else {
1030       // Look at the suspend check's environment to determine if the loop was inlined.
1031       DCHECK(suspend_check_->HasEnvironment());
1032       if (!suspend_check_->GetEnvironment()->IsFromInlinedInvoke()) {
1033         is_irreducible_loop = true;
1034       }
1035     }
1036   }
1037   if (is_irreducible_loop) {
1038     irreducible_ = true;
1039     contains_irreducible_loop_ = true;
1040     graph->SetHasIrreducibleLoops(true);
1041   }
1042   graph->SetHasLoops(true);
1043 }
1044 
PopulateInnerLoopUpwards(HLoopInformation * inner_loop)1045 void HLoopInformation::PopulateInnerLoopUpwards(HLoopInformation* inner_loop) {
1046   DCHECK(inner_loop->GetPreHeader()->GetLoopInformation() == this);
1047   blocks_.Union(&inner_loop->blocks_);
1048   HLoopInformation* outer_loop = GetPreHeader()->GetLoopInformation();
1049   if (outer_loop != nullptr) {
1050     outer_loop->PopulateInnerLoopUpwards(this);
1051   }
1052 }
1053 
GetPreHeader() const1054 HBasicBlock* HLoopInformation::GetPreHeader() const {
1055   HBasicBlock* block = header_->GetPredecessors()[0];
1056   DCHECK(irreducible_ || (block == header_->GetDominator()));
1057   return block;
1058 }
1059 
Contains(const HBasicBlock & block) const1060 bool HLoopInformation::Contains(const HBasicBlock& block) const {
1061   return blocks_.IsBitSet(block.GetBlockId());
1062 }
1063 
IsIn(const HLoopInformation & other) const1064 bool HLoopInformation::IsIn(const HLoopInformation& other) const {
1065   return other.blocks_.IsBitSet(header_->GetBlockId());
1066 }
1067 
IsDefinedOutOfTheLoop(HInstruction * instruction) const1068 bool HLoopInformation::IsDefinedOutOfTheLoop(HInstruction* instruction) const {
1069   return !blocks_.IsBitSet(instruction->GetBlock()->GetBlockId());
1070 }
1071 
GetLifetimeEnd() const1072 size_t HLoopInformation::GetLifetimeEnd() const {
1073   size_t last_position = 0;
1074   for (HBasicBlock* back_edge : GetBackEdges()) {
1075     last_position = std::max(back_edge->GetLifetimeEnd(), last_position);
1076   }
1077   return last_position;
1078 }
1079 
HasBackEdgeNotDominatedByHeader() const1080 bool HLoopInformation::HasBackEdgeNotDominatedByHeader() const {
1081   for (HBasicBlock* back_edge : GetBackEdges()) {
1082     DCHECK(back_edge->GetDominator() != nullptr);
1083     if (!header_->Dominates(back_edge)) {
1084       return true;
1085     }
1086   }
1087   return false;
1088 }
1089 
DominatesAllBackEdges(HBasicBlock * block)1090 bool HLoopInformation::DominatesAllBackEdges(HBasicBlock* block) {
1091   for (HBasicBlock* back_edge : GetBackEdges()) {
1092     if (!block->Dominates(back_edge)) {
1093       return false;
1094     }
1095   }
1096   return true;
1097 }
1098 
1099 
HasExitEdge() const1100 bool HLoopInformation::HasExitEdge() const {
1101   // Determine if this loop has at least one exit edge.
1102   HBlocksInLoopReversePostOrderIterator it_loop(*this);
1103   for (; !it_loop.Done(); it_loop.Advance()) {
1104     for (HBasicBlock* successor : it_loop.Current()->GetSuccessors()) {
1105       if (!Contains(*successor)) {
1106         return true;
1107       }
1108     }
1109   }
1110   return false;
1111 }
1112 
Dominates(const HBasicBlock * other) const1113 bool HBasicBlock::Dominates(const HBasicBlock* other) const {
1114   // Walk up the dominator tree from `other`, to find out if `this`
1115   // is an ancestor.
1116   const HBasicBlock* current = other;
1117   while (current != nullptr) {
1118     if (current == this) {
1119       return true;
1120     }
1121     current = current->GetDominator();
1122   }
1123   return false;
1124 }
1125 
UpdateInputsUsers(HInstruction * instruction)1126 static void UpdateInputsUsers(HInstruction* instruction) {
1127   HInputsRef inputs = instruction->GetInputs();
1128   for (size_t i = 0; i < inputs.size(); ++i) {
1129     inputs[i]->AddUseAt(instruction, i);
1130   }
1131   // Environment should be created later.
1132   DCHECK(!instruction->HasEnvironment());
1133 }
1134 
ReplaceAndRemovePhiWith(HPhi * initial,HPhi * replacement)1135 void HBasicBlock::ReplaceAndRemovePhiWith(HPhi* initial, HPhi* replacement) {
1136   DCHECK(initial->GetBlock() == this);
1137   InsertPhiAfter(replacement, initial);
1138   initial->ReplaceWith(replacement);
1139   RemovePhi(initial);
1140 }
1141 
ReplaceAndRemoveInstructionWith(HInstruction * initial,HInstruction * replacement)1142 void HBasicBlock::ReplaceAndRemoveInstructionWith(HInstruction* initial,
1143                                                   HInstruction* replacement) {
1144   DCHECK(initial->GetBlock() == this);
1145   if (initial->IsControlFlow()) {
1146     // We can only replace a control flow instruction with another control flow instruction.
1147     DCHECK(replacement->IsControlFlow());
1148     DCHECK_EQ(replacement->GetId(), -1);
1149     DCHECK_EQ(replacement->GetType(), DataType::Type::kVoid);
1150     DCHECK_EQ(initial->GetBlock(), this);
1151     DCHECK_EQ(initial->GetType(), DataType::Type::kVoid);
1152     DCHECK(initial->GetUses().empty());
1153     DCHECK(initial->GetEnvUses().empty());
1154     replacement->SetBlock(this);
1155     replacement->SetId(GetGraph()->GetNextInstructionId());
1156     instructions_.InsertInstructionBefore(replacement, initial);
1157     UpdateInputsUsers(replacement);
1158   } else {
1159     InsertInstructionBefore(replacement, initial);
1160     initial->ReplaceWith(replacement);
1161   }
1162   RemoveInstruction(initial);
1163 }
1164 
Add(HInstructionList * instruction_list,HBasicBlock * block,HInstruction * instruction)1165 static void Add(HInstructionList* instruction_list,
1166                 HBasicBlock* block,
1167                 HInstruction* instruction) {
1168   DCHECK(instruction->GetBlock() == nullptr);
1169   DCHECK_EQ(instruction->GetId(), -1);
1170   instruction->SetBlock(block);
1171   instruction->SetId(block->GetGraph()->GetNextInstructionId());
1172   UpdateInputsUsers(instruction);
1173   instruction_list->AddInstruction(instruction);
1174 }
1175 
AddInstruction(HInstruction * instruction)1176 void HBasicBlock::AddInstruction(HInstruction* instruction) {
1177   Add(&instructions_, this, instruction);
1178 }
1179 
AddPhi(HPhi * phi)1180 void HBasicBlock::AddPhi(HPhi* phi) {
1181   Add(&phis_, this, phi);
1182 }
1183 
InsertInstructionBefore(HInstruction * instruction,HInstruction * cursor)1184 void HBasicBlock::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) {
1185   DCHECK(!cursor->IsPhi());
1186   DCHECK(!instruction->IsPhi());
1187   DCHECK_EQ(instruction->GetId(), -1);
1188   DCHECK_NE(cursor->GetId(), -1);
1189   DCHECK_EQ(cursor->GetBlock(), this);
1190   DCHECK(!instruction->IsControlFlow());
1191   instruction->SetBlock(this);
1192   instruction->SetId(GetGraph()->GetNextInstructionId());
1193   UpdateInputsUsers(instruction);
1194   instructions_.InsertInstructionBefore(instruction, cursor);
1195 }
1196 
InsertInstructionAfter(HInstruction * instruction,HInstruction * cursor)1197 void HBasicBlock::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) {
1198   DCHECK(!cursor->IsPhi());
1199   DCHECK(!instruction->IsPhi());
1200   DCHECK_EQ(instruction->GetId(), -1);
1201   DCHECK_NE(cursor->GetId(), -1);
1202   DCHECK_EQ(cursor->GetBlock(), this);
1203   DCHECK(!instruction->IsControlFlow());
1204   DCHECK(!cursor->IsControlFlow());
1205   instruction->SetBlock(this);
1206   instruction->SetId(GetGraph()->GetNextInstructionId());
1207   UpdateInputsUsers(instruction);
1208   instructions_.InsertInstructionAfter(instruction, cursor);
1209 }
1210 
InsertPhiAfter(HPhi * phi,HPhi * cursor)1211 void HBasicBlock::InsertPhiAfter(HPhi* phi, HPhi* cursor) {
1212   DCHECK_EQ(phi->GetId(), -1);
1213   DCHECK_NE(cursor->GetId(), -1);
1214   DCHECK_EQ(cursor->GetBlock(), this);
1215   phi->SetBlock(this);
1216   phi->SetId(GetGraph()->GetNextInstructionId());
1217   UpdateInputsUsers(phi);
1218   phis_.InsertInstructionAfter(phi, cursor);
1219 }
1220 
Remove(HInstructionList * instruction_list,HBasicBlock * block,HInstruction * instruction,bool ensure_safety)1221 static void Remove(HInstructionList* instruction_list,
1222                    HBasicBlock* block,
1223                    HInstruction* instruction,
1224                    bool ensure_safety) {
1225   DCHECK_EQ(block, instruction->GetBlock());
1226   instruction->SetBlock(nullptr);
1227   instruction_list->RemoveInstruction(instruction);
1228   if (ensure_safety) {
1229     DCHECK(instruction->GetUses().empty());
1230     DCHECK(instruction->GetEnvUses().empty());
1231     RemoveAsUser(instruction);
1232   }
1233 }
1234 
RemoveInstruction(HInstruction * instruction,bool ensure_safety)1235 void HBasicBlock::RemoveInstruction(HInstruction* instruction, bool ensure_safety) {
1236   DCHECK(!instruction->IsPhi());
1237   Remove(&instructions_, this, instruction, ensure_safety);
1238 }
1239 
RemovePhi(HPhi * phi,bool ensure_safety)1240 void HBasicBlock::RemovePhi(HPhi* phi, bool ensure_safety) {
1241   Remove(&phis_, this, phi, ensure_safety);
1242 }
1243 
RemoveInstructionOrPhi(HInstruction * instruction,bool ensure_safety)1244 void HBasicBlock::RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety) {
1245   if (instruction->IsPhi()) {
1246     RemovePhi(instruction->AsPhi(), ensure_safety);
1247   } else {
1248     RemoveInstruction(instruction, ensure_safety);
1249   }
1250 }
1251 
CopyFrom(ArrayRef<HInstruction * const> locals)1252 void HEnvironment::CopyFrom(ArrayRef<HInstruction* const> locals) {
1253   for (size_t i = 0; i < locals.size(); i++) {
1254     HInstruction* instruction = locals[i];
1255     SetRawEnvAt(i, instruction);
1256     if (instruction != nullptr) {
1257       instruction->AddEnvUseAt(this, i);
1258     }
1259   }
1260 }
1261 
CopyFrom(HEnvironment * env)1262 void HEnvironment::CopyFrom(HEnvironment* env) {
1263   for (size_t i = 0; i < env->Size(); i++) {
1264     HInstruction* instruction = env->GetInstructionAt(i);
1265     SetRawEnvAt(i, instruction);
1266     if (instruction != nullptr) {
1267       instruction->AddEnvUseAt(this, i);
1268     }
1269   }
1270 }
1271 
CopyFromWithLoopPhiAdjustment(HEnvironment * env,HBasicBlock * loop_header)1272 void HEnvironment::CopyFromWithLoopPhiAdjustment(HEnvironment* env,
1273                                                  HBasicBlock* loop_header) {
1274   DCHECK(loop_header->IsLoopHeader());
1275   for (size_t i = 0; i < env->Size(); i++) {
1276     HInstruction* instruction = env->GetInstructionAt(i);
1277     SetRawEnvAt(i, instruction);
1278     if (instruction == nullptr) {
1279       continue;
1280     }
1281     if (instruction->IsLoopHeaderPhi() && (instruction->GetBlock() == loop_header)) {
1282       // At the end of the loop pre-header, the corresponding value for instruction
1283       // is the first input of the phi.
1284       HInstruction* initial = instruction->AsPhi()->InputAt(0);
1285       SetRawEnvAt(i, initial);
1286       initial->AddEnvUseAt(this, i);
1287     } else {
1288       instruction->AddEnvUseAt(this, i);
1289     }
1290   }
1291 }
1292 
RemoveAsUserOfInput(size_t index) const1293 void HEnvironment::RemoveAsUserOfInput(size_t index) const {
1294   const HUserRecord<HEnvironment*>& env_use = vregs_[index];
1295   HInstruction* user = env_use.GetInstruction();
1296   auto before_env_use_node = env_use.GetBeforeUseNode();
1297   user->env_uses_.erase_after(before_env_use_node);
1298   user->FixUpUserRecordsAfterEnvUseRemoval(before_env_use_node);
1299 }
1300 
ReplaceInput(HInstruction * replacement,size_t index)1301 void HEnvironment::ReplaceInput(HInstruction* replacement, size_t index) {
1302   const HUserRecord<HEnvironment*>& env_use_record = vregs_[index];
1303   HInstruction* orig_instr = env_use_record.GetInstruction();
1304 
1305   DCHECK(orig_instr != replacement);
1306 
1307   HUseList<HEnvironment*>::iterator before_use_node = env_use_record.GetBeforeUseNode();
1308   // Note: fixup_end remains valid across splice_after().
1309   auto fixup_end = replacement->env_uses_.empty() ? replacement->env_uses_.begin()
1310                                                   : ++replacement->env_uses_.begin();
1311   replacement->env_uses_.splice_after(replacement->env_uses_.before_begin(),
1312                                       env_use_record.GetInstruction()->env_uses_,
1313                                       before_use_node);
1314   replacement->FixUpUserRecordsAfterEnvUseInsertion(fixup_end);
1315   orig_instr->FixUpUserRecordsAfterEnvUseRemoval(before_use_node);
1316 }
1317 
Dump(std::ostream & os,bool dump_args)1318 std::ostream& HInstruction::Dump(std::ostream& os, bool dump_args) {
1319   // Note: Handle the case where the instruction has been removed from
1320   // the graph to support debugging output for failed gtests.
1321   HGraph* graph = (GetBlock() != nullptr) ? GetBlock()->GetGraph() : nullptr;
1322   HGraphVisualizer::DumpInstruction(&os, graph, this);
1323   if (dump_args) {
1324     // Allocate memory from local ScopedArenaAllocator.
1325     std::optional<MallocArenaPool> local_arena_pool;
1326     std::optional<ArenaStack> local_arena_stack;
1327     if (UNLIKELY(graph == nullptr)) {
1328       local_arena_pool.emplace();
1329       local_arena_stack.emplace(&local_arena_pool.value());
1330     }
1331     ScopedArenaAllocator allocator(
1332         graph != nullptr ? graph->GetArenaStack() : &local_arena_stack.value());
1333     // Instructions that we already visited. We print each instruction only once.
1334     ArenaBitVector visited(&allocator,
1335                            (graph != nullptr) ? graph->GetCurrentInstructionId() : 0u,
1336                            /* expandable= */ (graph == nullptr),
1337                            kArenaAllocMisc);
1338     visited.ClearAllBits();
1339     visited.SetBit(GetId());
1340     // Keep a queue of instructions with their indentations.
1341     ScopedArenaDeque<std::pair<HInstruction*, size_t>> queue(allocator.Adapter(kArenaAllocMisc));
1342     auto add_args = [&queue](HInstruction* instruction, size_t indentation) {
1343       for (HInstruction* arg : ReverseRange(instruction->GetInputs())) {
1344         queue.emplace_front(arg, indentation);
1345       }
1346     };
1347     add_args(this, /*indentation=*/ 1u);
1348     while (!queue.empty()) {
1349       HInstruction* instruction;
1350       size_t indentation;
1351       std::tie(instruction, indentation) = queue.front();
1352       queue.pop_front();
1353       if (!visited.IsBitSet(instruction->GetId())) {
1354         visited.SetBit(instruction->GetId());
1355         os << '\n';
1356         for (size_t i = 0; i != indentation; ++i) {
1357           os << "  ";
1358         }
1359         HGraphVisualizer::DumpInstruction(&os, graph, instruction);
1360         add_args(instruction, indentation + 1u);
1361       }
1362     }
1363   }
1364   return os;
1365 }
1366 
GetNextDisregardingMoves() const1367 HInstruction* HInstruction::GetNextDisregardingMoves() const {
1368   HInstruction* next = GetNext();
1369   while (next != nullptr && next->IsParallelMove()) {
1370     next = next->GetNext();
1371   }
1372   return next;
1373 }
1374 
GetPreviousDisregardingMoves() const1375 HInstruction* HInstruction::GetPreviousDisregardingMoves() const {
1376   HInstruction* previous = GetPrevious();
1377   while (previous != nullptr && previous->IsParallelMove()) {
1378     previous = previous->GetPrevious();
1379   }
1380   return previous;
1381 }
1382 
AddInstruction(HInstruction * instruction)1383 void HInstructionList::AddInstruction(HInstruction* instruction) {
1384   if (first_instruction_ == nullptr) {
1385     DCHECK(last_instruction_ == nullptr);
1386     first_instruction_ = last_instruction_ = instruction;
1387   } else {
1388     DCHECK(last_instruction_ != nullptr);
1389     last_instruction_->next_ = instruction;
1390     instruction->previous_ = last_instruction_;
1391     last_instruction_ = instruction;
1392   }
1393 }
1394 
InsertInstructionBefore(HInstruction * instruction,HInstruction * cursor)1395 void HInstructionList::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) {
1396   DCHECK(Contains(cursor));
1397   if (cursor == first_instruction_) {
1398     cursor->previous_ = instruction;
1399     instruction->next_ = cursor;
1400     first_instruction_ = instruction;
1401   } else {
1402     instruction->previous_ = cursor->previous_;
1403     instruction->next_ = cursor;
1404     cursor->previous_ = instruction;
1405     instruction->previous_->next_ = instruction;
1406   }
1407 }
1408 
InsertInstructionAfter(HInstruction * instruction,HInstruction * cursor)1409 void HInstructionList::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) {
1410   DCHECK(Contains(cursor));
1411   if (cursor == last_instruction_) {
1412     cursor->next_ = instruction;
1413     instruction->previous_ = cursor;
1414     last_instruction_ = instruction;
1415   } else {
1416     instruction->next_ = cursor->next_;
1417     instruction->previous_ = cursor;
1418     cursor->next_ = instruction;
1419     instruction->next_->previous_ = instruction;
1420   }
1421 }
1422 
RemoveInstruction(HInstruction * instruction)1423 void HInstructionList::RemoveInstruction(HInstruction* instruction) {
1424   if (instruction->previous_ != nullptr) {
1425     instruction->previous_->next_ = instruction->next_;
1426   }
1427   if (instruction->next_ != nullptr) {
1428     instruction->next_->previous_ = instruction->previous_;
1429   }
1430   if (instruction == first_instruction_) {
1431     first_instruction_ = instruction->next_;
1432   }
1433   if (instruction == last_instruction_) {
1434     last_instruction_ = instruction->previous_;
1435   }
1436 }
1437 
Contains(HInstruction * instruction) const1438 bool HInstructionList::Contains(HInstruction* instruction) const {
1439   for (HInstructionIterator it(*this); !it.Done(); it.Advance()) {
1440     if (it.Current() == instruction) {
1441       return true;
1442     }
1443   }
1444   return false;
1445 }
1446 
FoundBefore(const HInstruction * instruction1,const HInstruction * instruction2) const1447 bool HInstructionList::FoundBefore(const HInstruction* instruction1,
1448                                    const HInstruction* instruction2) const {
1449   DCHECK_EQ(instruction1->GetBlock(), instruction2->GetBlock());
1450   for (HInstructionIterator it(*this); !it.Done(); it.Advance()) {
1451     if (it.Current() == instruction1) {
1452       return true;
1453     }
1454     if (it.Current() == instruction2) {
1455       return false;
1456     }
1457   }
1458   LOG(FATAL) << "Did not find an order between two instructions of the same block.";
1459   UNREACHABLE();
1460 }
1461 
StrictlyDominates(HInstruction * other_instruction) const1462 bool HInstruction::StrictlyDominates(HInstruction* other_instruction) const {
1463   if (other_instruction == this) {
1464     // An instruction does not strictly dominate itself.
1465     return false;
1466   }
1467   HBasicBlock* block = GetBlock();
1468   HBasicBlock* other_block = other_instruction->GetBlock();
1469   if (block != other_block) {
1470     return GetBlock()->Dominates(other_instruction->GetBlock());
1471   } else {
1472     // If both instructions are in the same block, ensure this
1473     // instruction comes before `other_instruction`.
1474     if (IsPhi()) {
1475       if (!other_instruction->IsPhi()) {
1476         // Phis appear before non phi-instructions so this instruction
1477         // dominates `other_instruction`.
1478         return true;
1479       } else {
1480         // There is no order among phis.
1481         LOG(FATAL) << "There is no dominance between phis of a same block.";
1482         UNREACHABLE();
1483       }
1484     } else {
1485       // `this` is not a phi.
1486       if (other_instruction->IsPhi()) {
1487         // Phis appear before non phi-instructions so this instruction
1488         // does not dominate `other_instruction`.
1489         return false;
1490       } else {
1491         // Check whether this instruction comes before
1492         // `other_instruction` in the instruction list.
1493         return block->GetInstructions().FoundBefore(this, other_instruction);
1494       }
1495     }
1496   }
1497 }
1498 
RemoveEnvironment()1499 void HInstruction::RemoveEnvironment() {
1500   RemoveEnvironmentUses(this);
1501   environment_ = nullptr;
1502 }
1503 
ReplaceWith(HInstruction * other)1504 void HInstruction::ReplaceWith(HInstruction* other) {
1505   DCHECK(other != nullptr);
1506   // Note: fixup_end remains valid across splice_after().
1507   auto fixup_end = other->uses_.empty() ? other->uses_.begin() : ++other->uses_.begin();
1508   other->uses_.splice_after(other->uses_.before_begin(), uses_);
1509   other->FixUpUserRecordsAfterUseInsertion(fixup_end);
1510 
1511   // Note: env_fixup_end remains valid across splice_after().
1512   auto env_fixup_end =
1513       other->env_uses_.empty() ? other->env_uses_.begin() : ++other->env_uses_.begin();
1514   other->env_uses_.splice_after(other->env_uses_.before_begin(), env_uses_);
1515   other->FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
1516 
1517   DCHECK(uses_.empty());
1518   DCHECK(env_uses_.empty());
1519 }
1520 
ReplaceUsesDominatedBy(HInstruction * dominator,HInstruction * replacement)1521 void HInstruction::ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement) {
1522   const HUseList<HInstruction*>& uses = GetUses();
1523   for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) {
1524     HInstruction* user = it->GetUser();
1525     size_t index = it->GetIndex();
1526     // Increment `it` now because `*it` may disappear thanks to user->ReplaceInput().
1527     ++it;
1528     if (dominator->StrictlyDominates(user)) {
1529       user->ReplaceInput(replacement, index);
1530     } else if (user->IsPhi() && !user->AsPhi()->IsCatchPhi()) {
1531       // If the input flows from a block dominated by `dominator`, we can replace it.
1532       // We do not perform this for catch phis as we don't have control flow support
1533       // for their inputs.
1534       const ArenaVector<HBasicBlock*>& predecessors = user->GetBlock()->GetPredecessors();
1535       HBasicBlock* predecessor = predecessors[index];
1536       if (dominator->GetBlock()->Dominates(predecessor)) {
1537         user->ReplaceInput(replacement, index);
1538       }
1539     }
1540   }
1541 }
1542 
ReplaceEnvUsesDominatedBy(HInstruction * dominator,HInstruction * replacement)1543 void HInstruction::ReplaceEnvUsesDominatedBy(HInstruction* dominator, HInstruction* replacement) {
1544   const HUseList<HEnvironment*>& uses = GetEnvUses();
1545   for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) {
1546     HEnvironment* user = it->GetUser();
1547     size_t index = it->GetIndex();
1548     // Increment `it` now because `*it` may disappear thanks to user->ReplaceInput().
1549     ++it;
1550     if (dominator->StrictlyDominates(user->GetHolder())) {
1551       user->ReplaceInput(replacement, index);
1552     }
1553   }
1554 }
1555 
ReplaceInput(HInstruction * replacement,size_t index)1556 void HInstruction::ReplaceInput(HInstruction* replacement, size_t index) {
1557   HUserRecord<HInstruction*> input_use = InputRecordAt(index);
1558   if (input_use.GetInstruction() == replacement) {
1559     // Nothing to do.
1560     return;
1561   }
1562   HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1563   // Note: fixup_end remains valid across splice_after().
1564   auto fixup_end =
1565       replacement->uses_.empty() ? replacement->uses_.begin() : ++replacement->uses_.begin();
1566   replacement->uses_.splice_after(replacement->uses_.before_begin(),
1567                                   input_use.GetInstruction()->uses_,
1568                                   before_use_node);
1569   replacement->FixUpUserRecordsAfterUseInsertion(fixup_end);
1570   input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1571 }
1572 
EnvironmentSize() const1573 size_t HInstruction::EnvironmentSize() const {
1574   return HasEnvironment() ? environment_->Size() : 0;
1575 }
1576 
AddInput(HInstruction * input)1577 void HVariableInputSizeInstruction::AddInput(HInstruction* input) {
1578   DCHECK(input->GetBlock() != nullptr);
1579   inputs_.push_back(HUserRecord<HInstruction*>(input));
1580   input->AddUseAt(this, inputs_.size() - 1);
1581 }
1582 
InsertInputAt(size_t index,HInstruction * input)1583 void HVariableInputSizeInstruction::InsertInputAt(size_t index, HInstruction* input) {
1584   inputs_.insert(inputs_.begin() + index, HUserRecord<HInstruction*>(input));
1585   input->AddUseAt(this, index);
1586   // Update indexes in use nodes of inputs that have been pushed further back by the insert().
1587   for (size_t i = index + 1u, e = inputs_.size(); i < e; ++i) {
1588     DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i - 1u);
1589     inputs_[i].GetUseNode()->SetIndex(i);
1590   }
1591 }
1592 
RemoveInputAt(size_t index)1593 void HVariableInputSizeInstruction::RemoveInputAt(size_t index) {
1594   RemoveAsUserOfInput(index);
1595   inputs_.erase(inputs_.begin() + index);
1596   // Update indexes in use nodes of inputs that have been pulled forward by the erase().
1597   for (size_t i = index, e = inputs_.size(); i < e; ++i) {
1598     DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i + 1u);
1599     inputs_[i].GetUseNode()->SetIndex(i);
1600   }
1601 }
1602 
RemoveAllInputs()1603 void HVariableInputSizeInstruction::RemoveAllInputs() {
1604   RemoveAsUserOfAllInputs();
1605   DCHECK(!HasNonEnvironmentUses());
1606 
1607   inputs_.clear();
1608   DCHECK_EQ(0u, InputCount());
1609 }
1610 
RemoveConstructorFences(HInstruction * instruction)1611 size_t HConstructorFence::RemoveConstructorFences(HInstruction* instruction) {
1612   DCHECK(instruction->GetBlock() != nullptr);
1613   // Removing constructor fences only makes sense for instructions with an object return type.
1614   DCHECK_EQ(DataType::Type::kReference, instruction->GetType());
1615 
1616   // Return how many instructions were removed for statistic purposes.
1617   size_t remove_count = 0;
1618 
1619   // Efficient implementation that simultaneously (in one pass):
1620   // * Scans the uses list for all constructor fences.
1621   // * Deletes that constructor fence from the uses list of `instruction`.
1622   // * Deletes `instruction` from the constructor fence's inputs.
1623   // * Deletes the constructor fence if it now has 0 inputs.
1624 
1625   const HUseList<HInstruction*>& uses = instruction->GetUses();
1626   // Warning: Although this is "const", we might mutate the list when calling RemoveInputAt.
1627   for (auto it = uses.begin(), end = uses.end(); it != end; ) {
1628     const HUseListNode<HInstruction*>& use_node = *it;
1629     HInstruction* const use_instruction = use_node.GetUser();
1630 
1631     // Advance the iterator immediately once we fetch the use_node.
1632     // Warning: If the input is removed, the current iterator becomes invalid.
1633     ++it;
1634 
1635     if (use_instruction->IsConstructorFence()) {
1636       HConstructorFence* ctor_fence = use_instruction->AsConstructorFence();
1637       size_t input_index = use_node.GetIndex();
1638 
1639       // Process the candidate instruction for removal
1640       // from the graph.
1641 
1642       // Constructor fence instructions are never
1643       // used by other instructions.
1644       //
1645       // If we wanted to make this more generic, it
1646       // could be a runtime if statement.
1647       DCHECK(!ctor_fence->HasUses());
1648 
1649       // A constructor fence's return type is "kPrimVoid"
1650       // and therefore it can't have any environment uses.
1651       DCHECK(!ctor_fence->HasEnvironmentUses());
1652 
1653       // Remove the inputs first, otherwise removing the instruction
1654       // will try to remove its uses while we are already removing uses
1655       // and this operation will fail.
1656       DCHECK_EQ(instruction, ctor_fence->InputAt(input_index));
1657 
1658       // Removing the input will also remove the `use_node`.
1659       // (Do not look at `use_node` after this, it will be a dangling reference).
1660       ctor_fence->RemoveInputAt(input_index);
1661 
1662       // Once all inputs are removed, the fence is considered dead and
1663       // is removed.
1664       if (ctor_fence->InputCount() == 0u) {
1665         ctor_fence->GetBlock()->RemoveInstruction(ctor_fence);
1666         ++remove_count;
1667       }
1668     }
1669   }
1670 
1671   if (kIsDebugBuild) {
1672     // Post-condition checks:
1673     // * None of the uses of `instruction` are a constructor fence.
1674     // * The `instruction` itself did not get removed from a block.
1675     for (const HUseListNode<HInstruction*>& use_node : instruction->GetUses()) {
1676       CHECK(!use_node.GetUser()->IsConstructorFence());
1677     }
1678     CHECK(instruction->GetBlock() != nullptr);
1679   }
1680 
1681   return remove_count;
1682 }
1683 
Merge(HConstructorFence * other)1684 void HConstructorFence::Merge(HConstructorFence* other) {
1685   // Do not delete yourself from the graph.
1686   DCHECK(this != other);
1687   // Don't try to merge with an instruction not associated with a block.
1688   DCHECK(other->GetBlock() != nullptr);
1689   // A constructor fence's return type is "kPrimVoid"
1690   // and therefore it cannot have any environment uses.
1691   DCHECK(!other->HasEnvironmentUses());
1692 
1693   auto has_input = [](HInstruction* haystack, HInstruction* needle) {
1694     // Check if `haystack` has `needle` as any of its inputs.
1695     for (size_t input_count = 0; input_count < haystack->InputCount(); ++input_count) {
1696       if (haystack->InputAt(input_count) == needle) {
1697         return true;
1698       }
1699     }
1700     return false;
1701   };
1702 
1703   // Add any inputs from `other` into `this` if it wasn't already an input.
1704   for (size_t input_count = 0; input_count < other->InputCount(); ++input_count) {
1705     HInstruction* other_input = other->InputAt(input_count);
1706     if (!has_input(this, other_input)) {
1707       AddInput(other_input);
1708     }
1709   }
1710 
1711   other->GetBlock()->RemoveInstruction(other);
1712 }
1713 
GetAssociatedAllocation(bool ignore_inputs)1714 HInstruction* HConstructorFence::GetAssociatedAllocation(bool ignore_inputs) {
1715   HInstruction* new_instance_inst = GetPrevious();
1716   // Check if the immediately preceding instruction is a new-instance/new-array.
1717   // Otherwise this fence is for protecting final fields.
1718   if (new_instance_inst != nullptr &&
1719       (new_instance_inst->IsNewInstance() || new_instance_inst->IsNewArray())) {
1720     if (ignore_inputs) {
1721       // If inputs are ignored, simply check if the predecessor is
1722       // *any* HNewInstance/HNewArray.
1723       //
1724       // Inputs are normally only ignored for prepare_for_register_allocation,
1725       // at which point *any* prior HNewInstance/Array can be considered
1726       // associated.
1727       return new_instance_inst;
1728     } else {
1729       // Normal case: There must be exactly 1 input and the previous instruction
1730       // must be that input.
1731       if (InputCount() == 1u && InputAt(0) == new_instance_inst) {
1732         return new_instance_inst;
1733       }
1734     }
1735   }
1736   return nullptr;
1737 }
1738 
1739 #define DEFINE_ACCEPT(name, super)                                             \
1740 void H##name::Accept(HGraphVisitor* visitor) {                                 \
1741   visitor->Visit##name(this);                                                  \
1742 }
1743 
FOR_EACH_CONCRETE_INSTRUCTION(DEFINE_ACCEPT)1744 FOR_EACH_CONCRETE_INSTRUCTION(DEFINE_ACCEPT)
1745 
1746 #undef DEFINE_ACCEPT
1747 
1748 void HGraphVisitor::VisitInsertionOrder() {
1749   for (HBasicBlock* block : graph_->GetActiveBlocks()) {
1750     VisitBasicBlock(block);
1751   }
1752 }
1753 
VisitReversePostOrder()1754 void HGraphVisitor::VisitReversePostOrder() {
1755   for (HBasicBlock* block : graph_->GetReversePostOrder()) {
1756     VisitBasicBlock(block);
1757   }
1758 }
1759 
VisitBasicBlock(HBasicBlock * block)1760 void HGraphVisitor::VisitBasicBlock(HBasicBlock* block) {
1761   for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
1762     it.Current()->Accept(this);
1763   }
1764   for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
1765     it.Current()->Accept(this);
1766   }
1767 }
1768 
TryStaticEvaluation() const1769 HConstant* HTypeConversion::TryStaticEvaluation() const {
1770   HGraph* graph = GetBlock()->GetGraph();
1771   if (GetInput()->IsIntConstant()) {
1772     int32_t value = GetInput()->AsIntConstant()->GetValue();
1773     switch (GetResultType()) {
1774       case DataType::Type::kInt8:
1775         return graph->GetIntConstant(static_cast<int8_t>(value), GetDexPc());
1776       case DataType::Type::kUint8:
1777         return graph->GetIntConstant(static_cast<uint8_t>(value), GetDexPc());
1778       case DataType::Type::kInt16:
1779         return graph->GetIntConstant(static_cast<int16_t>(value), GetDexPc());
1780       case DataType::Type::kUint16:
1781         return graph->GetIntConstant(static_cast<uint16_t>(value), GetDexPc());
1782       case DataType::Type::kInt64:
1783         return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc());
1784       case DataType::Type::kFloat32:
1785         return graph->GetFloatConstant(static_cast<float>(value), GetDexPc());
1786       case DataType::Type::kFloat64:
1787         return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc());
1788       default:
1789         return nullptr;
1790     }
1791   } else if (GetInput()->IsLongConstant()) {
1792     int64_t value = GetInput()->AsLongConstant()->GetValue();
1793     switch (GetResultType()) {
1794       case DataType::Type::kInt8:
1795         return graph->GetIntConstant(static_cast<int8_t>(value), GetDexPc());
1796       case DataType::Type::kUint8:
1797         return graph->GetIntConstant(static_cast<uint8_t>(value), GetDexPc());
1798       case DataType::Type::kInt16:
1799         return graph->GetIntConstant(static_cast<int16_t>(value), GetDexPc());
1800       case DataType::Type::kUint16:
1801         return graph->GetIntConstant(static_cast<uint16_t>(value), GetDexPc());
1802       case DataType::Type::kInt32:
1803         return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc());
1804       case DataType::Type::kFloat32:
1805         return graph->GetFloatConstant(static_cast<float>(value), GetDexPc());
1806       case DataType::Type::kFloat64:
1807         return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc());
1808       default:
1809         return nullptr;
1810     }
1811   } else if (GetInput()->IsFloatConstant()) {
1812     float value = GetInput()->AsFloatConstant()->GetValue();
1813     switch (GetResultType()) {
1814       case DataType::Type::kInt32:
1815         if (std::isnan(value))
1816           return graph->GetIntConstant(0, GetDexPc());
1817         if (value >= static_cast<float>(kPrimIntMax))
1818           return graph->GetIntConstant(kPrimIntMax, GetDexPc());
1819         if (value <= kPrimIntMin)
1820           return graph->GetIntConstant(kPrimIntMin, GetDexPc());
1821         return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc());
1822       case DataType::Type::kInt64:
1823         if (std::isnan(value))
1824           return graph->GetLongConstant(0, GetDexPc());
1825         if (value >= static_cast<float>(kPrimLongMax))
1826           return graph->GetLongConstant(kPrimLongMax, GetDexPc());
1827         if (value <= kPrimLongMin)
1828           return graph->GetLongConstant(kPrimLongMin, GetDexPc());
1829         return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc());
1830       case DataType::Type::kFloat64:
1831         return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc());
1832       default:
1833         return nullptr;
1834     }
1835   } else if (GetInput()->IsDoubleConstant()) {
1836     double value = GetInput()->AsDoubleConstant()->GetValue();
1837     switch (GetResultType()) {
1838       case DataType::Type::kInt32:
1839         if (std::isnan(value))
1840           return graph->GetIntConstant(0, GetDexPc());
1841         if (value >= kPrimIntMax)
1842           return graph->GetIntConstant(kPrimIntMax, GetDexPc());
1843         if (value <= kPrimLongMin)
1844           return graph->GetIntConstant(kPrimIntMin, GetDexPc());
1845         return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc());
1846       case DataType::Type::kInt64:
1847         if (std::isnan(value))
1848           return graph->GetLongConstant(0, GetDexPc());
1849         if (value >= static_cast<double>(kPrimLongMax))
1850           return graph->GetLongConstant(kPrimLongMax, GetDexPc());
1851         if (value <= kPrimLongMin)
1852           return graph->GetLongConstant(kPrimLongMin, GetDexPc());
1853         return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc());
1854       case DataType::Type::kFloat32:
1855         return graph->GetFloatConstant(static_cast<float>(value), GetDexPc());
1856       default:
1857         return nullptr;
1858     }
1859   }
1860   return nullptr;
1861 }
1862 
TryStaticEvaluation() const1863 HConstant* HUnaryOperation::TryStaticEvaluation() const {
1864   if (GetInput()->IsIntConstant()) {
1865     return Evaluate(GetInput()->AsIntConstant());
1866   } else if (GetInput()->IsLongConstant()) {
1867     return Evaluate(GetInput()->AsLongConstant());
1868   } else if (kEnableFloatingPointStaticEvaluation) {
1869     if (GetInput()->IsFloatConstant()) {
1870       return Evaluate(GetInput()->AsFloatConstant());
1871     } else if (GetInput()->IsDoubleConstant()) {
1872       return Evaluate(GetInput()->AsDoubleConstant());
1873     }
1874   }
1875   return nullptr;
1876 }
1877 
TryStaticEvaluation() const1878 HConstant* HBinaryOperation::TryStaticEvaluation() const {
1879   if (GetLeft()->IsIntConstant() && GetRight()->IsIntConstant()) {
1880     return Evaluate(GetLeft()->AsIntConstant(), GetRight()->AsIntConstant());
1881   } else if (GetLeft()->IsLongConstant()) {
1882     if (GetRight()->IsIntConstant()) {
1883       // The binop(long, int) case is only valid for shifts and rotations.
1884       DCHECK(IsShl() || IsShr() || IsUShr() || IsRor()) << DebugName();
1885       return Evaluate(GetLeft()->AsLongConstant(), GetRight()->AsIntConstant());
1886     } else if (GetRight()->IsLongConstant()) {
1887       return Evaluate(GetLeft()->AsLongConstant(), GetRight()->AsLongConstant());
1888     }
1889   } else if (GetLeft()->IsNullConstant() && GetRight()->IsNullConstant()) {
1890     // The binop(null, null) case is only valid for equal and not-equal conditions.
1891     DCHECK(IsEqual() || IsNotEqual()) << DebugName();
1892     return Evaluate(GetLeft()->AsNullConstant(), GetRight()->AsNullConstant());
1893   } else if (kEnableFloatingPointStaticEvaluation) {
1894     if (GetLeft()->IsFloatConstant() && GetRight()->IsFloatConstant()) {
1895       return Evaluate(GetLeft()->AsFloatConstant(), GetRight()->AsFloatConstant());
1896     } else if (GetLeft()->IsDoubleConstant() && GetRight()->IsDoubleConstant()) {
1897       return Evaluate(GetLeft()->AsDoubleConstant(), GetRight()->AsDoubleConstant());
1898     }
1899   }
1900   return nullptr;
1901 }
1902 
GetConstantRight() const1903 HConstant* HBinaryOperation::GetConstantRight() const {
1904   if (GetRight()->IsConstant()) {
1905     return GetRight()->AsConstant();
1906   } else if (IsCommutative() && GetLeft()->IsConstant()) {
1907     return GetLeft()->AsConstant();
1908   } else {
1909     return nullptr;
1910   }
1911 }
1912 
1913 // If `GetConstantRight()` returns one of the input, this returns the other
1914 // one. Otherwise it returns null.
GetLeastConstantLeft() const1915 HInstruction* HBinaryOperation::GetLeastConstantLeft() const {
1916   HInstruction* most_constant_right = GetConstantRight();
1917   if (most_constant_right == nullptr) {
1918     return nullptr;
1919   } else if (most_constant_right == GetLeft()) {
1920     return GetRight();
1921   } else {
1922     return GetLeft();
1923   }
1924 }
1925 
operator <<(std::ostream & os,ComparisonBias rhs)1926 std::ostream& operator<<(std::ostream& os, ComparisonBias rhs) {
1927   // TODO: Replace with auto-generated operator<<.
1928   switch (rhs) {
1929     case ComparisonBias::kNoBias:
1930       return os << "none";
1931     case ComparisonBias::kGtBias:
1932       return os << "gt";
1933     case ComparisonBias::kLtBias:
1934       return os << "lt";
1935     default:
1936       LOG(FATAL) << "Unknown ComparisonBias: " << static_cast<int>(rhs);
1937       UNREACHABLE();
1938   }
1939 }
1940 
IsBeforeWhenDisregardMoves(HInstruction * instruction) const1941 bool HCondition::IsBeforeWhenDisregardMoves(HInstruction* instruction) const {
1942   return this == instruction->GetPreviousDisregardingMoves();
1943 }
1944 
Equals(const HInstruction * other) const1945 bool HInstruction::Equals(const HInstruction* other) const {
1946   if (GetKind() != other->GetKind()) return false;
1947   if (GetType() != other->GetType()) return false;
1948   if (!InstructionDataEquals(other)) return false;
1949   HConstInputsRef inputs = GetInputs();
1950   HConstInputsRef other_inputs = other->GetInputs();
1951   if (inputs.size() != other_inputs.size()) return false;
1952   for (size_t i = 0; i != inputs.size(); ++i) {
1953     if (inputs[i] != other_inputs[i]) return false;
1954   }
1955 
1956   DCHECK_EQ(ComputeHashCode(), other->ComputeHashCode());
1957   return true;
1958 }
1959 
operator <<(std::ostream & os,HInstruction::InstructionKind rhs)1960 std::ostream& operator<<(std::ostream& os, HInstruction::InstructionKind rhs) {
1961 #define DECLARE_CASE(type, super) case HInstruction::k##type: os << #type; break;
1962   switch (rhs) {
1963     FOR_EACH_CONCRETE_INSTRUCTION(DECLARE_CASE)
1964     default:
1965       os << "Unknown instruction kind " << static_cast<int>(rhs);
1966       break;
1967   }
1968 #undef DECLARE_CASE
1969   return os;
1970 }
1971 
operator <<(std::ostream & os,const HInstruction::NoArgsDump rhs)1972 std::ostream& operator<<(std::ostream& os, const HInstruction::NoArgsDump rhs) {
1973   // TODO Really this should be const but that would require const-ifying
1974   // graph-visualizer and HGraphVisitor which are tangled up everywhere.
1975   return const_cast<HInstruction*>(rhs.ins)->Dump(os, /* dump_args= */ false);
1976 }
1977 
operator <<(std::ostream & os,const HInstruction::ArgsDump rhs)1978 std::ostream& operator<<(std::ostream& os, const HInstruction::ArgsDump rhs) {
1979   // TODO Really this should be const but that would require const-ifying
1980   // graph-visualizer and HGraphVisitor which are tangled up everywhere.
1981   return const_cast<HInstruction*>(rhs.ins)->Dump(os, /* dump_args= */ true);
1982 }
1983 
operator <<(std::ostream & os,const HInstruction & rhs)1984 std::ostream& operator<<(std::ostream& os, const HInstruction& rhs) {
1985   return os << rhs.DumpWithoutArgs();
1986 }
1987 
operator <<(std::ostream & os,const HUseList<HInstruction * > & lst)1988 std::ostream& operator<<(std::ostream& os, const HUseList<HInstruction*>& lst) {
1989   os << "Instructions[";
1990   bool first = true;
1991   for (const auto& hi : lst) {
1992     if (!first) {
1993       os << ", ";
1994     }
1995     first = false;
1996     os << hi.GetUser()->DebugName() << "[id: " << hi.GetUser()->GetId()
1997        << ", blk: " << hi.GetUser()->GetBlock()->GetBlockId() << "]@" << hi.GetIndex();
1998   }
1999   os << "]";
2000   return os;
2001 }
2002 
operator <<(std::ostream & os,const HUseList<HEnvironment * > & lst)2003 std::ostream& operator<<(std::ostream& os, const HUseList<HEnvironment*>& lst) {
2004   os << "Environments[";
2005   bool first = true;
2006   for (const auto& hi : lst) {
2007     if (!first) {
2008       os << ", ";
2009     }
2010     first = false;
2011     os << *hi.GetUser()->GetHolder() << "@" << hi.GetIndex();
2012   }
2013   os << "]";
2014   return os;
2015 }
2016 
Dump(std::ostream & os,CodeGenerator * codegen,std::optional<std::reference_wrapper<const BlockNamer>> namer)2017 std::ostream& HGraph::Dump(std::ostream& os,
2018                            CodeGenerator* codegen,
2019                            std::optional<std::reference_wrapper<const BlockNamer>> namer) {
2020   HGraphVisualizer vis(&os, this, codegen, namer);
2021   vis.DumpGraphDebug();
2022   return os;
2023 }
2024 
MoveBefore(HInstruction * cursor,bool do_checks)2025 void HInstruction::MoveBefore(HInstruction* cursor, bool do_checks) {
2026   if (do_checks) {
2027     DCHECK(!IsPhi());
2028     DCHECK(!IsControlFlow());
2029     DCHECK(CanBeMoved() ||
2030            // HShouldDeoptimizeFlag can only be moved by CHAGuardOptimization.
2031            IsShouldDeoptimizeFlag());
2032     DCHECK(!cursor->IsPhi());
2033   }
2034 
2035   next_->previous_ = previous_;
2036   if (previous_ != nullptr) {
2037     previous_->next_ = next_;
2038   }
2039   if (block_->instructions_.first_instruction_ == this) {
2040     block_->instructions_.first_instruction_ = next_;
2041   }
2042   DCHECK_NE(block_->instructions_.last_instruction_, this);
2043 
2044   previous_ = cursor->previous_;
2045   if (previous_ != nullptr) {
2046     previous_->next_ = this;
2047   }
2048   next_ = cursor;
2049   cursor->previous_ = this;
2050   block_ = cursor->block_;
2051 
2052   if (block_->instructions_.first_instruction_ == cursor) {
2053     block_->instructions_.first_instruction_ = this;
2054   }
2055 }
2056 
MoveBeforeFirstUserAndOutOfLoops()2057 void HInstruction::MoveBeforeFirstUserAndOutOfLoops() {
2058   DCHECK(!CanThrow());
2059   DCHECK(!HasSideEffects());
2060   DCHECK(!HasEnvironmentUses());
2061   DCHECK(HasNonEnvironmentUses());
2062   DCHECK(!IsPhi());  // Makes no sense for Phi.
2063   DCHECK_EQ(InputCount(), 0u);
2064 
2065   // Find the target block.
2066   auto uses_it = GetUses().begin();
2067   auto uses_end = GetUses().end();
2068   HBasicBlock* target_block = uses_it->GetUser()->GetBlock();
2069   ++uses_it;
2070   while (uses_it != uses_end && uses_it->GetUser()->GetBlock() == target_block) {
2071     ++uses_it;
2072   }
2073   if (uses_it != uses_end) {
2074     // This instruction has uses in two or more blocks. Find the common dominator.
2075     CommonDominator finder(target_block);
2076     for (; uses_it != uses_end; ++uses_it) {
2077       finder.Update(uses_it->GetUser()->GetBlock());
2078     }
2079     target_block = finder.Get();
2080     DCHECK(target_block != nullptr);
2081   }
2082   // Move to the first dominator not in a loop.
2083   while (target_block->IsInLoop()) {
2084     target_block = target_block->GetDominator();
2085     DCHECK(target_block != nullptr);
2086   }
2087 
2088   // Find insertion position.
2089   HInstruction* insert_pos = nullptr;
2090   for (const HUseListNode<HInstruction*>& use : GetUses()) {
2091     if (use.GetUser()->GetBlock() == target_block &&
2092         (insert_pos == nullptr || use.GetUser()->StrictlyDominates(insert_pos))) {
2093       insert_pos = use.GetUser();
2094     }
2095   }
2096   if (insert_pos == nullptr) {
2097     // No user in `target_block`, insert before the control flow instruction.
2098     insert_pos = target_block->GetLastInstruction();
2099     DCHECK(insert_pos->IsControlFlow());
2100     // Avoid splitting HCondition from HIf to prevent unnecessary materialization.
2101     if (insert_pos->IsIf()) {
2102       HInstruction* if_input = insert_pos->AsIf()->InputAt(0);
2103       if (if_input == insert_pos->GetPrevious()) {
2104         insert_pos = if_input;
2105       }
2106     }
2107   }
2108   MoveBefore(insert_pos);
2109 }
2110 
SplitBefore(HInstruction * cursor)2111 HBasicBlock* HBasicBlock::SplitBefore(HInstruction* cursor) {
2112   DCHECK(!graph_->IsInSsaForm()) << "Support for SSA form not implemented.";
2113   DCHECK_EQ(cursor->GetBlock(), this);
2114 
2115   HBasicBlock* new_block =
2116       new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), cursor->GetDexPc());
2117   new_block->instructions_.first_instruction_ = cursor;
2118   new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2119   instructions_.last_instruction_ = cursor->previous_;
2120   if (cursor->previous_ == nullptr) {
2121     instructions_.first_instruction_ = nullptr;
2122   } else {
2123     cursor->previous_->next_ = nullptr;
2124     cursor->previous_ = nullptr;
2125   }
2126 
2127   new_block->instructions_.SetBlockOfInstructions(new_block);
2128   AddInstruction(new (GetGraph()->GetAllocator()) HGoto(new_block->GetDexPc()));
2129 
2130   for (HBasicBlock* successor : GetSuccessors()) {
2131     successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2132   }
2133   new_block->successors_.swap(successors_);
2134   DCHECK(successors_.empty());
2135   AddSuccessor(new_block);
2136 
2137   GetGraph()->AddBlock(new_block);
2138   return new_block;
2139 }
2140 
CreateImmediateDominator()2141 HBasicBlock* HBasicBlock::CreateImmediateDominator() {
2142   DCHECK(!graph_->IsInSsaForm()) << "Support for SSA form not implemented.";
2143   DCHECK(!IsCatchBlock()) << "Support for updating try/catch information not implemented.";
2144 
2145   HBasicBlock* new_block = new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), GetDexPc());
2146 
2147   for (HBasicBlock* predecessor : GetPredecessors()) {
2148     predecessor->successors_[predecessor->GetSuccessorIndexOf(this)] = new_block;
2149   }
2150   new_block->predecessors_.swap(predecessors_);
2151   DCHECK(predecessors_.empty());
2152   AddPredecessor(new_block);
2153 
2154   GetGraph()->AddBlock(new_block);
2155   return new_block;
2156 }
2157 
SplitBeforeForInlining(HInstruction * cursor)2158 HBasicBlock* HBasicBlock::SplitBeforeForInlining(HInstruction* cursor) {
2159   DCHECK_EQ(cursor->GetBlock(), this);
2160 
2161   HBasicBlock* new_block =
2162       new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), cursor->GetDexPc());
2163   new_block->instructions_.first_instruction_ = cursor;
2164   new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2165   instructions_.last_instruction_ = cursor->previous_;
2166   if (cursor->previous_ == nullptr) {
2167     instructions_.first_instruction_ = nullptr;
2168   } else {
2169     cursor->previous_->next_ = nullptr;
2170     cursor->previous_ = nullptr;
2171   }
2172 
2173   new_block->instructions_.SetBlockOfInstructions(new_block);
2174 
2175   for (HBasicBlock* successor : GetSuccessors()) {
2176     successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2177   }
2178   new_block->successors_.swap(successors_);
2179   DCHECK(successors_.empty());
2180 
2181   for (HBasicBlock* dominated : GetDominatedBlocks()) {
2182     dominated->dominator_ = new_block;
2183   }
2184   new_block->dominated_blocks_.swap(dominated_blocks_);
2185   DCHECK(dominated_blocks_.empty());
2186   return new_block;
2187 }
2188 
SplitAfterForInlining(HInstruction * cursor)2189 HBasicBlock* HBasicBlock::SplitAfterForInlining(HInstruction* cursor) {
2190   DCHECK(!cursor->IsControlFlow());
2191   DCHECK_NE(instructions_.last_instruction_, cursor);
2192   DCHECK_EQ(cursor->GetBlock(), this);
2193 
2194   HBasicBlock* new_block = new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), GetDexPc());
2195   new_block->instructions_.first_instruction_ = cursor->GetNext();
2196   new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2197   cursor->next_->previous_ = nullptr;
2198   cursor->next_ = nullptr;
2199   instructions_.last_instruction_ = cursor;
2200 
2201   new_block->instructions_.SetBlockOfInstructions(new_block);
2202   for (HBasicBlock* successor : GetSuccessors()) {
2203     successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2204   }
2205   new_block->successors_.swap(successors_);
2206   DCHECK(successors_.empty());
2207 
2208   for (HBasicBlock* dominated : GetDominatedBlocks()) {
2209     dominated->dominator_ = new_block;
2210   }
2211   new_block->dominated_blocks_.swap(dominated_blocks_);
2212   DCHECK(dominated_blocks_.empty());
2213   return new_block;
2214 }
2215 
ComputeTryEntryOfSuccessors() const2216 const HTryBoundary* HBasicBlock::ComputeTryEntryOfSuccessors() const {
2217   if (EndsWithTryBoundary()) {
2218     HTryBoundary* try_boundary = GetLastInstruction()->AsTryBoundary();
2219     if (try_boundary->IsEntry()) {
2220       DCHECK(!IsTryBlock());
2221       return try_boundary;
2222     } else {
2223       DCHECK(IsTryBlock());
2224       DCHECK(try_catch_information_->GetTryEntry().HasSameExceptionHandlersAs(*try_boundary));
2225       return nullptr;
2226     }
2227   } else if (IsTryBlock()) {
2228     return &try_catch_information_->GetTryEntry();
2229   } else {
2230     return nullptr;
2231   }
2232 }
2233 
HasThrowingInstructions() const2234 bool HBasicBlock::HasThrowingInstructions() const {
2235   for (HInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) {
2236     if (it.Current()->CanThrow()) {
2237       return true;
2238     }
2239   }
2240   return false;
2241 }
2242 
HasOnlyOneInstruction(const HBasicBlock & block)2243 static bool HasOnlyOneInstruction(const HBasicBlock& block) {
2244   return block.GetPhis().IsEmpty()
2245       && !block.GetInstructions().IsEmpty()
2246       && block.GetFirstInstruction() == block.GetLastInstruction();
2247 }
2248 
IsSingleGoto() const2249 bool HBasicBlock::IsSingleGoto() const {
2250   return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsGoto();
2251 }
2252 
IsSingleReturn() const2253 bool HBasicBlock::IsSingleReturn() const {
2254   return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsReturn();
2255 }
2256 
IsSingleReturnOrReturnVoidAllowingPhis() const2257 bool HBasicBlock::IsSingleReturnOrReturnVoidAllowingPhis() const {
2258   return (GetFirstInstruction() == GetLastInstruction()) &&
2259          (GetLastInstruction()->IsReturn() || GetLastInstruction()->IsReturnVoid());
2260 }
2261 
IsSingleTryBoundary() const2262 bool HBasicBlock::IsSingleTryBoundary() const {
2263   return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsTryBoundary();
2264 }
2265 
EndsWithControlFlowInstruction() const2266 bool HBasicBlock::EndsWithControlFlowInstruction() const {
2267   return !GetInstructions().IsEmpty() && GetLastInstruction()->IsControlFlow();
2268 }
2269 
EndsWithReturn() const2270 bool HBasicBlock::EndsWithReturn() const {
2271   return !GetInstructions().IsEmpty() &&
2272       (GetLastInstruction()->IsReturn() || GetLastInstruction()->IsReturnVoid());
2273 }
2274 
EndsWithIf() const2275 bool HBasicBlock::EndsWithIf() const {
2276   return !GetInstructions().IsEmpty() && GetLastInstruction()->IsIf();
2277 }
2278 
EndsWithTryBoundary() const2279 bool HBasicBlock::EndsWithTryBoundary() const {
2280   return !GetInstructions().IsEmpty() && GetLastInstruction()->IsTryBoundary();
2281 }
2282 
HasSinglePhi() const2283 bool HBasicBlock::HasSinglePhi() const {
2284   return !GetPhis().IsEmpty() && GetFirstPhi()->GetNext() == nullptr;
2285 }
2286 
GetNormalSuccessors() const2287 ArrayRef<HBasicBlock* const> HBasicBlock::GetNormalSuccessors() const {
2288   if (EndsWithTryBoundary()) {
2289     // The normal-flow successor of HTryBoundary is always stored at index zero.
2290     DCHECK_EQ(successors_[0], GetLastInstruction()->AsTryBoundary()->GetNormalFlowSuccessor());
2291     return ArrayRef<HBasicBlock* const>(successors_).SubArray(0u, 1u);
2292   } else {
2293     // All successors of blocks not ending with TryBoundary are normal.
2294     return ArrayRef<HBasicBlock* const>(successors_);
2295   }
2296 }
2297 
GetExceptionalSuccessors() const2298 ArrayRef<HBasicBlock* const> HBasicBlock::GetExceptionalSuccessors() const {
2299   if (EndsWithTryBoundary()) {
2300     return GetLastInstruction()->AsTryBoundary()->GetExceptionHandlers();
2301   } else {
2302     // Blocks not ending with TryBoundary do not have exceptional successors.
2303     return ArrayRef<HBasicBlock* const>();
2304   }
2305 }
2306 
HasSameExceptionHandlersAs(const HTryBoundary & other) const2307 bool HTryBoundary::HasSameExceptionHandlersAs(const HTryBoundary& other) const {
2308   ArrayRef<HBasicBlock* const> handlers1 = GetExceptionHandlers();
2309   ArrayRef<HBasicBlock* const> handlers2 = other.GetExceptionHandlers();
2310 
2311   size_t length = handlers1.size();
2312   if (length != handlers2.size()) {
2313     return false;
2314   }
2315 
2316   // Exception handlers need to be stored in the same order.
2317   for (size_t i = 0; i < length; ++i) {
2318     if (handlers1[i] != handlers2[i]) {
2319       return false;
2320     }
2321   }
2322   return true;
2323 }
2324 
CountSize() const2325 size_t HInstructionList::CountSize() const {
2326   size_t size = 0;
2327   HInstruction* current = first_instruction_;
2328   for (; current != nullptr; current = current->GetNext()) {
2329     size++;
2330   }
2331   return size;
2332 }
2333 
SetBlockOfInstructions(HBasicBlock * block) const2334 void HInstructionList::SetBlockOfInstructions(HBasicBlock* block) const {
2335   for (HInstruction* current = first_instruction_;
2336        current != nullptr;
2337        current = current->GetNext()) {
2338     current->SetBlock(block);
2339   }
2340 }
2341 
AddAfter(HInstruction * cursor,const HInstructionList & instruction_list)2342 void HInstructionList::AddAfter(HInstruction* cursor, const HInstructionList& instruction_list) {
2343   DCHECK(Contains(cursor));
2344   if (!instruction_list.IsEmpty()) {
2345     if (cursor == last_instruction_) {
2346       last_instruction_ = instruction_list.last_instruction_;
2347     } else {
2348       cursor->next_->previous_ = instruction_list.last_instruction_;
2349     }
2350     instruction_list.last_instruction_->next_ = cursor->next_;
2351     cursor->next_ = instruction_list.first_instruction_;
2352     instruction_list.first_instruction_->previous_ = cursor;
2353   }
2354 }
2355 
AddBefore(HInstruction * cursor,const HInstructionList & instruction_list)2356 void HInstructionList::AddBefore(HInstruction* cursor, const HInstructionList& instruction_list) {
2357   DCHECK(Contains(cursor));
2358   if (!instruction_list.IsEmpty()) {
2359     if (cursor == first_instruction_) {
2360       first_instruction_ = instruction_list.first_instruction_;
2361     } else {
2362       cursor->previous_->next_ = instruction_list.first_instruction_;
2363     }
2364     instruction_list.last_instruction_->next_ = cursor;
2365     instruction_list.first_instruction_->previous_ = cursor->previous_;
2366     cursor->previous_ = instruction_list.last_instruction_;
2367   }
2368 }
2369 
Add(const HInstructionList & instruction_list)2370 void HInstructionList::Add(const HInstructionList& instruction_list) {
2371   if (IsEmpty()) {
2372     first_instruction_ = instruction_list.first_instruction_;
2373     last_instruction_ = instruction_list.last_instruction_;
2374   } else {
2375     AddAfter(last_instruction_, instruction_list);
2376   }
2377 }
2378 
2379 // Should be called on instructions in a dead block in post order. This method
2380 // assumes `insn` has been removed from all users with the exception of catch
2381 // phis because of missing exceptional edges in the graph. It removes the
2382 // instruction from catch phi uses, together with inputs of other catch phis in
2383 // the catch block at the same index, as these must be dead too.
RemoveUsesOfDeadInstruction(HInstruction * insn)2384 static void RemoveUsesOfDeadInstruction(HInstruction* insn) {
2385   DCHECK(!insn->HasEnvironmentUses());
2386   while (insn->HasNonEnvironmentUses()) {
2387     const HUseListNode<HInstruction*>& use = insn->GetUses().front();
2388     size_t use_index = use.GetIndex();
2389     HBasicBlock* user_block =  use.GetUser()->GetBlock();
2390     DCHECK(use.GetUser()->IsPhi() && user_block->IsCatchBlock());
2391     for (HInstructionIterator phi_it(user_block->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
2392       phi_it.Current()->AsPhi()->RemoveInputAt(use_index);
2393     }
2394   }
2395 }
2396 
DisconnectAndDelete()2397 void HBasicBlock::DisconnectAndDelete() {
2398   // Dominators must be removed after all the blocks they dominate. This way
2399   // a loop header is removed last, a requirement for correct loop information
2400   // iteration.
2401   DCHECK(dominated_blocks_.empty());
2402 
2403   // The following steps gradually remove the block from all its dependants in
2404   // post order (b/27683071).
2405 
2406   // (1) Store a basic block that we'll use in step (5) to find loops to be updated.
2407   //     We need to do this before step (4) which destroys the predecessor list.
2408   HBasicBlock* loop_update_start = this;
2409   if (IsLoopHeader()) {
2410     HLoopInformation* loop_info = GetLoopInformation();
2411     // All other blocks in this loop should have been removed because the header
2412     // was their dominator.
2413     // Note that we do not remove `this` from `loop_info` as it is unreachable.
2414     DCHECK(!loop_info->IsIrreducible());
2415     DCHECK_EQ(loop_info->GetBlocks().NumSetBits(), 1u);
2416     DCHECK_EQ(static_cast<uint32_t>(loop_info->GetBlocks().GetHighestBitSet()), GetBlockId());
2417     loop_update_start = loop_info->GetPreHeader();
2418   }
2419 
2420   // (2) Disconnect the block from its successors and update their phis.
2421   for (HBasicBlock* successor : successors_) {
2422     // Delete this block from the list of predecessors.
2423     size_t this_index = successor->GetPredecessorIndexOf(this);
2424     successor->predecessors_.erase(successor->predecessors_.begin() + this_index);
2425 
2426     // Check that `successor` has other predecessors, otherwise `this` is the
2427     // dominator of `successor` which violates the order DCHECKed at the top.
2428     DCHECK(!successor->predecessors_.empty());
2429 
2430     // Remove this block's entries in the successor's phis. Skip exceptional
2431     // successors because catch phi inputs do not correspond to predecessor
2432     // blocks but throwing instructions. The inputs of the catch phis will be
2433     // updated in step (3).
2434     if (!successor->IsCatchBlock()) {
2435       if (successor->predecessors_.size() == 1u) {
2436         // The successor has just one predecessor left. Replace phis with the only
2437         // remaining input.
2438         for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
2439           HPhi* phi = phi_it.Current()->AsPhi();
2440           phi->ReplaceWith(phi->InputAt(1 - this_index));
2441           successor->RemovePhi(phi);
2442         }
2443       } else {
2444         for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
2445           phi_it.Current()->AsPhi()->RemoveInputAt(this_index);
2446         }
2447       }
2448     }
2449   }
2450   successors_.clear();
2451 
2452   // (3) Remove instructions and phis. Instructions should have no remaining uses
2453   //     except in catch phis. If an instruction is used by a catch phi at `index`,
2454   //     remove `index`-th input of all phis in the catch block since they are
2455   //     guaranteed dead. Note that we may miss dead inputs this way but the
2456   //     graph will always remain consistent.
2457   for (HBackwardInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) {
2458     HInstruction* insn = it.Current();
2459     RemoveUsesOfDeadInstruction(insn);
2460     RemoveInstruction(insn);
2461   }
2462   for (HInstructionIterator it(GetPhis()); !it.Done(); it.Advance()) {
2463     HPhi* insn = it.Current()->AsPhi();
2464     RemoveUsesOfDeadInstruction(insn);
2465     RemovePhi(insn);
2466   }
2467 
2468   // (4) Disconnect the block from its predecessors and update their
2469   //     control-flow instructions.
2470   for (HBasicBlock* predecessor : predecessors_) {
2471     // We should not see any back edges as they would have been removed by step (3).
2472     DCHECK_IMPLIES(IsInLoop(), !GetLoopInformation()->IsBackEdge(*predecessor));
2473 
2474     HInstruction* last_instruction = predecessor->GetLastInstruction();
2475     if (last_instruction->IsTryBoundary() && !IsCatchBlock()) {
2476       // This block is the only normal-flow successor of the TryBoundary which
2477       // makes `predecessor` dead. Since DCE removes blocks in post order,
2478       // exception handlers of this TryBoundary were already visited and any
2479       // remaining handlers therefore must be live. We remove `predecessor` from
2480       // their list of predecessors.
2481       DCHECK_EQ(last_instruction->AsTryBoundary()->GetNormalFlowSuccessor(), this);
2482       while (predecessor->GetSuccessors().size() > 1) {
2483         HBasicBlock* handler = predecessor->GetSuccessors()[1];
2484         DCHECK(handler->IsCatchBlock());
2485         predecessor->RemoveSuccessor(handler);
2486         handler->RemovePredecessor(predecessor);
2487       }
2488     }
2489 
2490     predecessor->RemoveSuccessor(this);
2491     uint32_t num_pred_successors = predecessor->GetSuccessors().size();
2492     if (num_pred_successors == 1u) {
2493       // If we have one successor after removing one, then we must have
2494       // had an HIf, HPackedSwitch or HTryBoundary, as they have more than one
2495       // successor. Replace those with a HGoto.
2496       DCHECK(last_instruction->IsIf() ||
2497              last_instruction->IsPackedSwitch() ||
2498              (last_instruction->IsTryBoundary() && IsCatchBlock()));
2499       predecessor->RemoveInstruction(last_instruction);
2500       predecessor->AddInstruction(new (graph_->GetAllocator()) HGoto(last_instruction->GetDexPc()));
2501     } else if (num_pred_successors == 0u) {
2502       // The predecessor has no remaining successors and therefore must be dead.
2503       // We deliberately leave it without a control-flow instruction so that the
2504       // GraphChecker fails unless it is not removed during the pass too.
2505       predecessor->RemoveInstruction(last_instruction);
2506     } else {
2507       // There are multiple successors left. The removed block might be a successor
2508       // of a PackedSwitch which will be completely removed (perhaps replaced with
2509       // a Goto), or we are deleting a catch block from a TryBoundary. In either
2510       // case, leave `last_instruction` as is for now.
2511       DCHECK(last_instruction->IsPackedSwitch() ||
2512              (last_instruction->IsTryBoundary() && IsCatchBlock()));
2513     }
2514   }
2515   predecessors_.clear();
2516 
2517   // (5) Remove the block from all loops it is included in. Skip the inner-most
2518   //     loop if this is the loop header (see definition of `loop_update_start`)
2519   //     because the loop header's predecessor list has been destroyed in step (4).
2520   for (HLoopInformationOutwardIterator it(*loop_update_start); !it.Done(); it.Advance()) {
2521     HLoopInformation* loop_info = it.Current();
2522     loop_info->Remove(this);
2523     if (loop_info->IsBackEdge(*this)) {
2524       // If this was the last back edge of the loop, we deliberately leave the
2525       // loop in an inconsistent state and will fail GraphChecker unless the
2526       // entire loop is removed during the pass.
2527       loop_info->RemoveBackEdge(this);
2528     }
2529   }
2530 
2531   // (6) Disconnect from the dominator.
2532   dominator_->RemoveDominatedBlock(this);
2533   SetDominator(nullptr);
2534 
2535   // (7) Delete from the graph, update reverse post order.
2536   graph_->DeleteDeadEmptyBlock(this);
2537   SetGraph(nullptr);
2538 }
2539 
MergeInstructionsWith(HBasicBlock * other)2540 void HBasicBlock::MergeInstructionsWith(HBasicBlock* other) {
2541   DCHECK(EndsWithControlFlowInstruction());
2542   RemoveInstruction(GetLastInstruction());
2543   instructions_.Add(other->GetInstructions());
2544   other->instructions_.SetBlockOfInstructions(this);
2545   other->instructions_.Clear();
2546 }
2547 
MergeWith(HBasicBlock * other)2548 void HBasicBlock::MergeWith(HBasicBlock* other) {
2549   DCHECK_EQ(GetGraph(), other->GetGraph());
2550   DCHECK(ContainsElement(dominated_blocks_, other));
2551   DCHECK_EQ(GetSingleSuccessor(), other);
2552   DCHECK_EQ(other->GetSinglePredecessor(), this);
2553   DCHECK(other->GetPhis().IsEmpty());
2554 
2555   // Move instructions from `other` to `this`.
2556   MergeInstructionsWith(other);
2557 
2558   // Remove `other` from the loops it is included in.
2559   for (HLoopInformationOutwardIterator it(*other); !it.Done(); it.Advance()) {
2560     HLoopInformation* loop_info = it.Current();
2561     loop_info->Remove(other);
2562     if (loop_info->IsBackEdge(*other)) {
2563       loop_info->ReplaceBackEdge(other, this);
2564     }
2565   }
2566 
2567   // Update links to the successors of `other`.
2568   successors_.clear();
2569   for (HBasicBlock* successor : other->GetSuccessors()) {
2570     successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this;
2571   }
2572   successors_.swap(other->successors_);
2573   DCHECK(other->successors_.empty());
2574 
2575   // Update the dominator tree.
2576   RemoveDominatedBlock(other);
2577   for (HBasicBlock* dominated : other->GetDominatedBlocks()) {
2578     dominated->SetDominator(this);
2579   }
2580   dominated_blocks_.insert(
2581       dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end());
2582   other->dominated_blocks_.clear();
2583   other->dominator_ = nullptr;
2584 
2585   // Clear the list of predecessors of `other` in preparation of deleting it.
2586   other->predecessors_.clear();
2587 
2588   // Delete `other` from the graph. The function updates reverse post order.
2589   graph_->DeleteDeadEmptyBlock(other);
2590   other->SetGraph(nullptr);
2591 }
2592 
MergeWithInlined(HBasicBlock * other)2593 void HBasicBlock::MergeWithInlined(HBasicBlock* other) {
2594   DCHECK_NE(GetGraph(), other->GetGraph());
2595   DCHECK(GetDominatedBlocks().empty());
2596   DCHECK(GetSuccessors().empty());
2597   DCHECK(!EndsWithControlFlowInstruction());
2598   DCHECK(other->GetSinglePredecessor()->IsEntryBlock());
2599   DCHECK(other->GetPhis().IsEmpty());
2600   DCHECK(!other->IsInLoop());
2601 
2602   // Move instructions from `other` to `this`.
2603   instructions_.Add(other->GetInstructions());
2604   other->instructions_.SetBlockOfInstructions(this);
2605 
2606   // Update links to the successors of `other`.
2607   successors_.clear();
2608   for (HBasicBlock* successor : other->GetSuccessors()) {
2609     successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this;
2610   }
2611   successors_.swap(other->successors_);
2612   DCHECK(other->successors_.empty());
2613 
2614   // Update the dominator tree.
2615   for (HBasicBlock* dominated : other->GetDominatedBlocks()) {
2616     dominated->SetDominator(this);
2617   }
2618   dominated_blocks_.insert(
2619       dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end());
2620   other->dominated_blocks_.clear();
2621   other->dominator_ = nullptr;
2622   other->graph_ = nullptr;
2623 }
2624 
ReplaceWith(HBasicBlock * other)2625 void HBasicBlock::ReplaceWith(HBasicBlock* other) {
2626   while (!GetPredecessors().empty()) {
2627     HBasicBlock* predecessor = GetPredecessors()[0];
2628     predecessor->ReplaceSuccessor(this, other);
2629   }
2630   while (!GetSuccessors().empty()) {
2631     HBasicBlock* successor = GetSuccessors()[0];
2632     successor->ReplacePredecessor(this, other);
2633   }
2634   for (HBasicBlock* dominated : GetDominatedBlocks()) {
2635     other->AddDominatedBlock(dominated);
2636   }
2637   GetDominator()->ReplaceDominatedBlock(this, other);
2638   other->SetDominator(GetDominator());
2639   dominator_ = nullptr;
2640   graph_ = nullptr;
2641 }
2642 
DeleteDeadEmptyBlock(HBasicBlock * block)2643 void HGraph::DeleteDeadEmptyBlock(HBasicBlock* block) {
2644   DCHECK_EQ(block->GetGraph(), this);
2645   DCHECK(block->GetSuccessors().empty());
2646   DCHECK(block->GetPredecessors().empty());
2647   DCHECK(block->GetDominatedBlocks().empty());
2648   DCHECK(block->GetDominator() == nullptr);
2649   DCHECK(block->GetInstructions().IsEmpty());
2650   DCHECK(block->GetPhis().IsEmpty());
2651 
2652   if (block->IsExitBlock()) {
2653     SetExitBlock(nullptr);
2654   }
2655 
2656   RemoveElement(reverse_post_order_, block);
2657   blocks_[block->GetBlockId()] = nullptr;
2658   block->SetGraph(nullptr);
2659 }
2660 
UpdateLoopAndTryInformationOfNewBlock(HBasicBlock * block,HBasicBlock * reference,bool replace_if_back_edge)2661 void HGraph::UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
2662                                                    HBasicBlock* reference,
2663                                                    bool replace_if_back_edge) {
2664   if (block->IsLoopHeader()) {
2665     // Clear the information of which blocks are contained in that loop. Since the
2666     // information is stored as a bit vector based on block ids, we have to update
2667     // it, as those block ids were specific to the callee graph and we are now adding
2668     // these blocks to the caller graph.
2669     block->GetLoopInformation()->ClearAllBlocks();
2670   }
2671 
2672   // If not already in a loop, update the loop information.
2673   if (!block->IsInLoop()) {
2674     block->SetLoopInformation(reference->GetLoopInformation());
2675   }
2676 
2677   // If the block is in a loop, update all its outward loops.
2678   HLoopInformation* loop_info = block->GetLoopInformation();
2679   if (loop_info != nullptr) {
2680     for (HLoopInformationOutwardIterator loop_it(*block);
2681          !loop_it.Done();
2682          loop_it.Advance()) {
2683       loop_it.Current()->Add(block);
2684     }
2685     if (replace_if_back_edge && loop_info->IsBackEdge(*reference)) {
2686       loop_info->ReplaceBackEdge(reference, block);
2687     }
2688   }
2689 
2690   // Copy TryCatchInformation if `reference` is a try block, not if it is a catch block.
2691   TryCatchInformation* try_catch_info = reference->IsTryBlock()
2692       ? reference->GetTryCatchInformation()
2693       : nullptr;
2694   block->SetTryCatchInformation(try_catch_info);
2695 }
2696 
InlineInto(HGraph * outer_graph,HInvoke * invoke)2697 HInstruction* HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) {
2698   DCHECK(HasExitBlock()) << "Unimplemented scenario";
2699   // Update the environments in this graph to have the invoke's environment
2700   // as parent.
2701   {
2702     // Skip the entry block, we do not need to update the entry's suspend check.
2703     for (HBasicBlock* block : GetReversePostOrderSkipEntryBlock()) {
2704       for (HInstructionIterator instr_it(block->GetInstructions());
2705            !instr_it.Done();
2706            instr_it.Advance()) {
2707         HInstruction* current = instr_it.Current();
2708         if (current->NeedsEnvironment()) {
2709           DCHECK(current->HasEnvironment());
2710           current->GetEnvironment()->SetAndCopyParentChain(
2711               outer_graph->GetAllocator(), invoke->GetEnvironment());
2712         }
2713       }
2714     }
2715   }
2716   outer_graph->UpdateMaximumNumberOfOutVRegs(GetMaximumNumberOfOutVRegs());
2717 
2718   if (HasBoundsChecks()) {
2719     outer_graph->SetHasBoundsChecks(true);
2720   }
2721   if (HasLoops()) {
2722     outer_graph->SetHasLoops(true);
2723   }
2724   if (HasIrreducibleLoops()) {
2725     outer_graph->SetHasIrreducibleLoops(true);
2726   }
2727   if (HasDirectCriticalNativeCall()) {
2728     outer_graph->SetHasDirectCriticalNativeCall(true);
2729   }
2730   if (HasTryCatch()) {
2731     outer_graph->SetHasTryCatch(true);
2732   }
2733   if (HasSIMD()) {
2734     outer_graph->SetHasSIMD(true);
2735   }
2736 
2737   HInstruction* return_value = nullptr;
2738   if (GetBlocks().size() == 3) {
2739     // Inliner already made sure we don't inline methods that always throw.
2740     DCHECK(!GetBlocks()[1]->GetLastInstruction()->IsThrow());
2741     // Simple case of an entry block, a body block, and an exit block.
2742     // Put the body block's instruction into `invoke`'s block.
2743     HBasicBlock* body = GetBlocks()[1];
2744     DCHECK(GetBlocks()[0]->IsEntryBlock());
2745     DCHECK(GetBlocks()[2]->IsExitBlock());
2746     DCHECK(!body->IsExitBlock());
2747     DCHECK(!body->IsInLoop());
2748     HInstruction* last = body->GetLastInstruction();
2749 
2750     // Note that we add instructions before the invoke only to simplify polymorphic inlining.
2751     invoke->GetBlock()->instructions_.AddBefore(invoke, body->GetInstructions());
2752     body->GetInstructions().SetBlockOfInstructions(invoke->GetBlock());
2753 
2754     // Replace the invoke with the return value of the inlined graph.
2755     if (last->IsReturn()) {
2756       return_value = last->InputAt(0);
2757     } else {
2758       DCHECK(last->IsReturnVoid());
2759     }
2760 
2761     invoke->GetBlock()->RemoveInstruction(last);
2762   } else {
2763     // Need to inline multiple blocks. We split `invoke`'s block
2764     // into two blocks, merge the first block of the inlined graph into
2765     // the first half, and replace the exit block of the inlined graph
2766     // with the second half.
2767     ArenaAllocator* allocator = outer_graph->GetAllocator();
2768     HBasicBlock* at = invoke->GetBlock();
2769     // Note that we split before the invoke only to simplify polymorphic inlining.
2770     HBasicBlock* to = at->SplitBeforeForInlining(invoke);
2771 
2772     HBasicBlock* first = entry_block_->GetSuccessors()[0];
2773     DCHECK(!first->IsInLoop());
2774     at->MergeWithInlined(first);
2775     exit_block_->ReplaceWith(to);
2776 
2777     // Update the meta information surrounding blocks:
2778     // (1) the graph they are now in,
2779     // (2) the reverse post order of that graph,
2780     // (3) their potential loop information, inner and outer,
2781     // (4) try block membership.
2782     // Note that we do not need to update catch phi inputs because they
2783     // correspond to the register file of the outer method which the inlinee
2784     // cannot modify.
2785 
2786     // We don't add the entry block, the exit block, and the first block, which
2787     // has been merged with `at`.
2788     static constexpr int kNumberOfSkippedBlocksInCallee = 3;
2789 
2790     // We add the `to` block.
2791     static constexpr int kNumberOfNewBlocksInCaller = 1;
2792     size_t blocks_added = (reverse_post_order_.size() - kNumberOfSkippedBlocksInCallee)
2793         + kNumberOfNewBlocksInCaller;
2794 
2795     // Find the location of `at` in the outer graph's reverse post order. The new
2796     // blocks will be added after it.
2797     size_t index_of_at = IndexOfElement(outer_graph->reverse_post_order_, at);
2798     MakeRoomFor(&outer_graph->reverse_post_order_, blocks_added, index_of_at);
2799 
2800     // Do a reverse post order of the blocks in the callee and do (1), (2), (3)
2801     // and (4) to the blocks that apply.
2802     for (HBasicBlock* current : GetReversePostOrder()) {
2803       if (current != exit_block_ && current != entry_block_ && current != first) {
2804         DCHECK(current->GetTryCatchInformation() == nullptr);
2805         DCHECK(current->GetGraph() == this);
2806         current->SetGraph(outer_graph);
2807         outer_graph->AddBlock(current);
2808         outer_graph->reverse_post_order_[++index_of_at] = current;
2809         UpdateLoopAndTryInformationOfNewBlock(current, at,  /* replace_if_back_edge= */ false);
2810       }
2811     }
2812 
2813     // Do (1), (2), (3) and (4) to `to`.
2814     to->SetGraph(outer_graph);
2815     outer_graph->AddBlock(to);
2816     outer_graph->reverse_post_order_[++index_of_at] = to;
2817     // Only `to` can become a back edge, as the inlined blocks
2818     // are predecessors of `to`.
2819     UpdateLoopAndTryInformationOfNewBlock(to, at, /* replace_if_back_edge= */ true);
2820 
2821     // Update all predecessors of the exit block (now the `to` block)
2822     // to not `HReturn` but `HGoto` instead. Special case throwing blocks
2823     // to now get the outer graph exit block as successor. Note that the inliner
2824     // currently doesn't support inlining methods with try/catch.
2825     HPhi* return_value_phi = nullptr;
2826     bool rerun_dominance = false;
2827     bool rerun_loop_analysis = false;
2828     for (size_t pred = 0; pred < to->GetPredecessors().size(); ++pred) {
2829       HBasicBlock* predecessor = to->GetPredecessors()[pred];
2830       HInstruction* last = predecessor->GetLastInstruction();
2831       if (last->IsThrow()) {
2832         DCHECK(!at->IsTryBlock());
2833         predecessor->ReplaceSuccessor(to, outer_graph->GetExitBlock());
2834         --pred;
2835         // We need to re-run dominance information, as the exit block now has
2836         // a new dominator.
2837         rerun_dominance = true;
2838         if (predecessor->GetLoopInformation() != nullptr) {
2839           // The exit block and blocks post dominated by the exit block do not belong
2840           // to any loop. Because we do not compute the post dominators, we need to re-run
2841           // loop analysis to get the loop information correct.
2842           rerun_loop_analysis = true;
2843         }
2844       } else {
2845         if (last->IsReturnVoid()) {
2846           DCHECK(return_value == nullptr);
2847           DCHECK(return_value_phi == nullptr);
2848         } else {
2849           DCHECK(last->IsReturn());
2850           if (return_value_phi != nullptr) {
2851             return_value_phi->AddInput(last->InputAt(0));
2852           } else if (return_value == nullptr) {
2853             return_value = last->InputAt(0);
2854           } else {
2855             // There will be multiple returns.
2856             return_value_phi = new (allocator) HPhi(
2857                 allocator, kNoRegNumber, 0, HPhi::ToPhiType(invoke->GetType()), to->GetDexPc());
2858             to->AddPhi(return_value_phi);
2859             return_value_phi->AddInput(return_value);
2860             return_value_phi->AddInput(last->InputAt(0));
2861             return_value = return_value_phi;
2862           }
2863         }
2864         predecessor->AddInstruction(new (allocator) HGoto(last->GetDexPc()));
2865         predecessor->RemoveInstruction(last);
2866       }
2867     }
2868     if (rerun_loop_analysis) {
2869       DCHECK(!outer_graph->HasIrreducibleLoops())
2870           << "Recomputing loop information in graphs with irreducible loops "
2871           << "is unsupported, as it could lead to loop header changes";
2872       outer_graph->ClearLoopInformation();
2873       outer_graph->ClearDominanceInformation();
2874       outer_graph->BuildDominatorTree();
2875     } else if (rerun_dominance) {
2876       outer_graph->ClearDominanceInformation();
2877       outer_graph->ComputeDominanceInformation();
2878     }
2879   }
2880 
2881   // Walk over the entry block and:
2882   // - Move constants from the entry block to the outer_graph's entry block,
2883   // - Replace HParameterValue instructions with their real value.
2884   // - Remove suspend checks, that hold an environment.
2885   // We must do this after the other blocks have been inlined, otherwise ids of
2886   // constants could overlap with the inner graph.
2887   size_t parameter_index = 0;
2888   for (HInstructionIterator it(entry_block_->GetInstructions()); !it.Done(); it.Advance()) {
2889     HInstruction* current = it.Current();
2890     HInstruction* replacement = nullptr;
2891     if (current->IsNullConstant()) {
2892       replacement = outer_graph->GetNullConstant(current->GetDexPc());
2893     } else if (current->IsIntConstant()) {
2894       replacement = outer_graph->GetIntConstant(
2895           current->AsIntConstant()->GetValue(), current->GetDexPc());
2896     } else if (current->IsLongConstant()) {
2897       replacement = outer_graph->GetLongConstant(
2898           current->AsLongConstant()->GetValue(), current->GetDexPc());
2899     } else if (current->IsFloatConstant()) {
2900       replacement = outer_graph->GetFloatConstant(
2901           current->AsFloatConstant()->GetValue(), current->GetDexPc());
2902     } else if (current->IsDoubleConstant()) {
2903       replacement = outer_graph->GetDoubleConstant(
2904           current->AsDoubleConstant()->GetValue(), current->GetDexPc());
2905     } else if (current->IsParameterValue()) {
2906       if (kIsDebugBuild
2907           && invoke->IsInvokeStaticOrDirect()
2908           && invoke->AsInvokeStaticOrDirect()->IsStaticWithExplicitClinitCheck()) {
2909         // Ensure we do not use the last input of `invoke`, as it
2910         // contains a clinit check which is not an actual argument.
2911         size_t last_input_index = invoke->InputCount() - 1;
2912         DCHECK(parameter_index != last_input_index);
2913       }
2914       replacement = invoke->InputAt(parameter_index++);
2915     } else if (current->IsCurrentMethod()) {
2916       replacement = outer_graph->GetCurrentMethod();
2917     } else {
2918       // It is OK to ignore MethodEntryHook for inlined functions.
2919       // In debug mode we don't inline and in release mode method
2920       // tracing is best effort so OK to ignore them.
2921       DCHECK(current->IsGoto() || current->IsSuspendCheck() || current->IsMethodEntryHook());
2922       entry_block_->RemoveInstruction(current);
2923     }
2924     if (replacement != nullptr) {
2925       current->ReplaceWith(replacement);
2926       // If the current is the return value then we need to update the latter.
2927       if (current == return_value) {
2928         DCHECK_EQ(entry_block_, return_value->GetBlock());
2929         return_value = replacement;
2930       }
2931     }
2932   }
2933 
2934   return return_value;
2935 }
2936 
2937 /*
2938  * Loop will be transformed to:
2939  *       old_pre_header
2940  *             |
2941  *          if_block
2942  *           /    \
2943  *  true_block   false_block
2944  *           \    /
2945  *       new_pre_header
2946  *             |
2947  *           header
2948  */
TransformLoopHeaderForBCE(HBasicBlock * header)2949 void HGraph::TransformLoopHeaderForBCE(HBasicBlock* header) {
2950   DCHECK(header->IsLoopHeader());
2951   HBasicBlock* old_pre_header = header->GetDominator();
2952 
2953   // Need extra block to avoid critical edge.
2954   HBasicBlock* if_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
2955   HBasicBlock* true_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
2956   HBasicBlock* false_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
2957   HBasicBlock* new_pre_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
2958   AddBlock(if_block);
2959   AddBlock(true_block);
2960   AddBlock(false_block);
2961   AddBlock(new_pre_header);
2962 
2963   header->ReplacePredecessor(old_pre_header, new_pre_header);
2964   old_pre_header->successors_.clear();
2965   old_pre_header->dominated_blocks_.clear();
2966 
2967   old_pre_header->AddSuccessor(if_block);
2968   if_block->AddSuccessor(true_block);  // True successor
2969   if_block->AddSuccessor(false_block);  // False successor
2970   true_block->AddSuccessor(new_pre_header);
2971   false_block->AddSuccessor(new_pre_header);
2972 
2973   old_pre_header->dominated_blocks_.push_back(if_block);
2974   if_block->SetDominator(old_pre_header);
2975   if_block->dominated_blocks_.push_back(true_block);
2976   true_block->SetDominator(if_block);
2977   if_block->dominated_blocks_.push_back(false_block);
2978   false_block->SetDominator(if_block);
2979   if_block->dominated_blocks_.push_back(new_pre_header);
2980   new_pre_header->SetDominator(if_block);
2981   new_pre_header->dominated_blocks_.push_back(header);
2982   header->SetDominator(new_pre_header);
2983 
2984   // Fix reverse post order.
2985   size_t index_of_header = IndexOfElement(reverse_post_order_, header);
2986   MakeRoomFor(&reverse_post_order_, 4, index_of_header - 1);
2987   reverse_post_order_[index_of_header++] = if_block;
2988   reverse_post_order_[index_of_header++] = true_block;
2989   reverse_post_order_[index_of_header++] = false_block;
2990   reverse_post_order_[index_of_header++] = new_pre_header;
2991 
2992   // The pre_header can never be a back edge of a loop.
2993   DCHECK((old_pre_header->GetLoopInformation() == nullptr) ||
2994          !old_pre_header->GetLoopInformation()->IsBackEdge(*old_pre_header));
2995   UpdateLoopAndTryInformationOfNewBlock(
2996       if_block, old_pre_header, /* replace_if_back_edge= */ false);
2997   UpdateLoopAndTryInformationOfNewBlock(
2998       true_block, old_pre_header, /* replace_if_back_edge= */ false);
2999   UpdateLoopAndTryInformationOfNewBlock(
3000       false_block, old_pre_header, /* replace_if_back_edge= */ false);
3001   UpdateLoopAndTryInformationOfNewBlock(
3002       new_pre_header, old_pre_header, /* replace_if_back_edge= */ false);
3003 }
3004 
TransformLoopForVectorization(HBasicBlock * header,HBasicBlock * body,HBasicBlock * exit)3005 HBasicBlock* HGraph::TransformLoopForVectorization(HBasicBlock* header,
3006                                                    HBasicBlock* body,
3007                                                    HBasicBlock* exit) {
3008   DCHECK(header->IsLoopHeader());
3009   HLoopInformation* loop = header->GetLoopInformation();
3010 
3011   // Add new loop blocks.
3012   HBasicBlock* new_pre_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
3013   HBasicBlock* new_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
3014   HBasicBlock* new_body = new (allocator_) HBasicBlock(this, header->GetDexPc());
3015   AddBlock(new_pre_header);
3016   AddBlock(new_header);
3017   AddBlock(new_body);
3018 
3019   // Set up control flow.
3020   header->ReplaceSuccessor(exit, new_pre_header);
3021   new_pre_header->AddSuccessor(new_header);
3022   new_header->AddSuccessor(exit);
3023   new_header->AddSuccessor(new_body);
3024   new_body->AddSuccessor(new_header);
3025 
3026   // Set up dominators.
3027   header->ReplaceDominatedBlock(exit, new_pre_header);
3028   new_pre_header->SetDominator(header);
3029   new_pre_header->dominated_blocks_.push_back(new_header);
3030   new_header->SetDominator(new_pre_header);
3031   new_header->dominated_blocks_.push_back(new_body);
3032   new_body->SetDominator(new_header);
3033   new_header->dominated_blocks_.push_back(exit);
3034   exit->SetDominator(new_header);
3035 
3036   // Fix reverse post order.
3037   size_t index_of_header = IndexOfElement(reverse_post_order_, header);
3038   MakeRoomFor(&reverse_post_order_, 2, index_of_header);
3039   reverse_post_order_[++index_of_header] = new_pre_header;
3040   reverse_post_order_[++index_of_header] = new_header;
3041   size_t index_of_body = IndexOfElement(reverse_post_order_, body);
3042   MakeRoomFor(&reverse_post_order_, 1, index_of_body - 1);
3043   reverse_post_order_[index_of_body] = new_body;
3044 
3045   // Add gotos and suspend check (client must add conditional in header).
3046   new_pre_header->AddInstruction(new (allocator_) HGoto());
3047   HSuspendCheck* suspend_check = new (allocator_) HSuspendCheck(header->GetDexPc());
3048   new_header->AddInstruction(suspend_check);
3049   new_body->AddInstruction(new (allocator_) HGoto());
3050   suspend_check->CopyEnvironmentFromWithLoopPhiAdjustment(
3051       loop->GetSuspendCheck()->GetEnvironment(), header);
3052 
3053   // Update loop information.
3054   new_header->AddBackEdge(new_body);
3055   new_header->GetLoopInformation()->SetSuspendCheck(suspend_check);
3056   new_header->GetLoopInformation()->Populate();
3057   new_pre_header->SetLoopInformation(loop->GetPreHeader()->GetLoopInformation());  // outward
3058   HLoopInformationOutwardIterator it(*new_header);
3059   for (it.Advance(); !it.Done(); it.Advance()) {
3060     it.Current()->Add(new_pre_header);
3061     it.Current()->Add(new_header);
3062     it.Current()->Add(new_body);
3063   }
3064   return new_pre_header;
3065 }
3066 
CheckAgainstUpperBound(ReferenceTypeInfo rti,ReferenceTypeInfo upper_bound_rti)3067 static void CheckAgainstUpperBound(ReferenceTypeInfo rti, ReferenceTypeInfo upper_bound_rti)
3068     REQUIRES_SHARED(Locks::mutator_lock_) {
3069   if (rti.IsValid()) {
3070     DCHECK(upper_bound_rti.IsSupertypeOf(rti))
3071         << " upper_bound_rti: " << upper_bound_rti
3072         << " rti: " << rti;
3073     DCHECK_IMPLIES(upper_bound_rti.GetTypeHandle()->CannotBeAssignedFromOtherTypes(), rti.IsExact())
3074         << " upper_bound_rti: " << upper_bound_rti
3075         << " rti: " << rti;
3076   }
3077 }
3078 
SetReferenceTypeInfo(ReferenceTypeInfo rti)3079 void HInstruction::SetReferenceTypeInfo(ReferenceTypeInfo rti) {
3080   if (kIsDebugBuild) {
3081     DCHECK_EQ(GetType(), DataType::Type::kReference);
3082     ScopedObjectAccess soa(Thread::Current());
3083     DCHECK(rti.IsValid()) << "Invalid RTI for " << DebugName();
3084     if (IsBoundType()) {
3085       // Having the test here spares us from making the method virtual just for
3086       // the sake of a DCHECK.
3087       CheckAgainstUpperBound(rti, AsBoundType()->GetUpperBound());
3088     }
3089   }
3090   reference_type_handle_ = rti.GetTypeHandle();
3091   SetPackedFlag<kFlagReferenceTypeIsExact>(rti.IsExact());
3092 }
3093 
InstructionDataEquals(const HInstruction * other) const3094 bool HBoundType::InstructionDataEquals(const HInstruction* other) const {
3095   const HBoundType* other_bt = other->AsBoundType();
3096   ScopedObjectAccess soa(Thread::Current());
3097   return GetUpperBound().IsEqual(other_bt->GetUpperBound()) &&
3098          GetUpperCanBeNull() == other_bt->GetUpperCanBeNull() &&
3099          CanBeNull() == other_bt->CanBeNull();
3100 }
3101 
SetUpperBound(const ReferenceTypeInfo & upper_bound,bool can_be_null)3102 void HBoundType::SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null) {
3103   if (kIsDebugBuild) {
3104     ScopedObjectAccess soa(Thread::Current());
3105     DCHECK(upper_bound.IsValid());
3106     DCHECK(!upper_bound_.IsValid()) << "Upper bound should only be set once.";
3107     CheckAgainstUpperBound(GetReferenceTypeInfo(), upper_bound);
3108   }
3109   upper_bound_ = upper_bound;
3110   SetPackedFlag<kFlagUpperCanBeNull>(can_be_null);
3111 }
3112 
Create(TypeHandle type_handle,bool is_exact)3113 ReferenceTypeInfo ReferenceTypeInfo::Create(TypeHandle type_handle, bool is_exact) {
3114   if (kIsDebugBuild) {
3115     ScopedObjectAccess soa(Thread::Current());
3116     DCHECK(IsValidHandle(type_handle));
3117     if (!is_exact) {
3118       DCHECK(!type_handle->CannotBeAssignedFromOtherTypes())
3119           << "Callers of ReferenceTypeInfo::Create should ensure is_exact is properly computed";
3120     }
3121   }
3122   return ReferenceTypeInfo(type_handle, is_exact);
3123 }
3124 
operator <<(std::ostream & os,const ReferenceTypeInfo & rhs)3125 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs) {
3126   ScopedObjectAccess soa(Thread::Current());
3127   os << "["
3128      << " is_valid=" << rhs.IsValid()
3129      << " type=" << (!rhs.IsValid() ? "?" : mirror::Class::PrettyClass(rhs.GetTypeHandle().Get()))
3130      << " is_exact=" << rhs.IsExact()
3131      << " ]";
3132   return os;
3133 }
3134 
HasAnyEnvironmentUseBefore(HInstruction * other)3135 bool HInstruction::HasAnyEnvironmentUseBefore(HInstruction* other) {
3136   // For now, assume that instructions in different blocks may use the
3137   // environment.
3138   // TODO: Use the control flow to decide if this is true.
3139   if (GetBlock() != other->GetBlock()) {
3140     return true;
3141   }
3142 
3143   // We know that we are in the same block. Walk from 'this' to 'other',
3144   // checking to see if there is any instruction with an environment.
3145   HInstruction* current = this;
3146   for (; current != other && current != nullptr; current = current->GetNext()) {
3147     // This is a conservative check, as the instruction result may not be in
3148     // the referenced environment.
3149     if (current->HasEnvironment()) {
3150       return true;
3151     }
3152   }
3153 
3154   // We should have been called with 'this' before 'other' in the block.
3155   // Just confirm this.
3156   DCHECK(current != nullptr);
3157   return false;
3158 }
3159 
SetIntrinsic(Intrinsics intrinsic,IntrinsicNeedsEnvironment needs_env,IntrinsicSideEffects side_effects,IntrinsicExceptions exceptions)3160 void HInvoke::SetIntrinsic(Intrinsics intrinsic,
3161                            IntrinsicNeedsEnvironment needs_env,
3162                            IntrinsicSideEffects side_effects,
3163                            IntrinsicExceptions exceptions) {
3164   intrinsic_ = intrinsic;
3165   IntrinsicOptimizations opt(this);
3166 
3167   // Adjust method's side effects from intrinsic table.
3168   switch (side_effects) {
3169     case kNoSideEffects: SetSideEffects(SideEffects::None()); break;
3170     case kReadSideEffects: SetSideEffects(SideEffects::AllReads()); break;
3171     case kWriteSideEffects: SetSideEffects(SideEffects::AllWrites()); break;
3172     case kAllSideEffects: SetSideEffects(SideEffects::AllExceptGCDependency()); break;
3173   }
3174 
3175   if (needs_env == kNoEnvironment) {
3176     opt.SetDoesNotNeedEnvironment();
3177   } else {
3178     // If we need an environment, that means there will be a call, which can trigger GC.
3179     SetSideEffects(GetSideEffects().Union(SideEffects::CanTriggerGC()));
3180   }
3181   // Adjust method's exception status from intrinsic table.
3182   SetCanThrow(exceptions == kCanThrow);
3183 }
3184 
IsStringAlloc() const3185 bool HNewInstance::IsStringAlloc() const {
3186   return GetEntrypoint() == kQuickAllocStringObject;
3187 }
3188 
NeedsEnvironment() const3189 bool HInvoke::NeedsEnvironment() const {
3190   if (!IsIntrinsic()) {
3191     return true;
3192   }
3193   IntrinsicOptimizations opt(*this);
3194   return !opt.GetDoesNotNeedEnvironment();
3195 }
3196 
GetDexFileForPcRelativeDexCache() const3197 const DexFile& HInvokeStaticOrDirect::GetDexFileForPcRelativeDexCache() const {
3198   ArtMethod* caller = GetEnvironment()->GetMethod();
3199   ScopedObjectAccess soa(Thread::Current());
3200   // `caller` is null for a top-level graph representing a method whose declaring
3201   // class was not resolved.
3202   return caller == nullptr ? GetBlock()->GetGraph()->GetDexFile() : *caller->GetDexFile();
3203 }
3204 
operator <<(std::ostream & os,HInvokeStaticOrDirect::ClinitCheckRequirement rhs)3205 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs) {
3206   switch (rhs) {
3207     case HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit:
3208       return os << "explicit";
3209     case HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit:
3210       return os << "implicit";
3211     case HInvokeStaticOrDirect::ClinitCheckRequirement::kNone:
3212       return os << "none";
3213     default:
3214       LOG(FATAL) << "Unknown ClinitCheckRequirement: " << static_cast<int>(rhs);
3215       UNREACHABLE();
3216   }
3217 }
3218 
CanDoImplicitNullCheckOn(HInstruction * obj) const3219 bool HInvokeVirtual::CanDoImplicitNullCheckOn(HInstruction* obj) const {
3220   if (obj != InputAt(0)) {
3221     return false;
3222   }
3223   switch (GetIntrinsic()) {
3224     case Intrinsics::kNone:
3225       return true;
3226     case Intrinsics::kReferenceRefersTo:
3227       return true;
3228     default:
3229       // TODO: Add implicit null checks in more intrinsics.
3230       return false;
3231   }
3232 }
3233 
InstructionDataEquals(const HInstruction * other) const3234 bool HLoadClass::InstructionDataEquals(const HInstruction* other) const {
3235   const HLoadClass* other_load_class = other->AsLoadClass();
3236   // TODO: To allow GVN for HLoadClass from different dex files, we should compare the type
3237   // names rather than type indexes. However, we shall also have to re-think the hash code.
3238   if (type_index_ != other_load_class->type_index_ ||
3239       GetPackedFields() != other_load_class->GetPackedFields()) {
3240     return false;
3241   }
3242   switch (GetLoadKind()) {
3243     case LoadKind::kBootImageRelRo:
3244     case LoadKind::kJitBootImageAddress:
3245     case LoadKind::kJitTableAddress: {
3246       ScopedObjectAccess soa(Thread::Current());
3247       return GetClass().Get() == other_load_class->GetClass().Get();
3248     }
3249     default:
3250       DCHECK(HasTypeReference(GetLoadKind()));
3251       return IsSameDexFile(GetDexFile(), other_load_class->GetDexFile());
3252   }
3253 }
3254 
InstructionDataEquals(const HInstruction * other) const3255 bool HLoadString::InstructionDataEquals(const HInstruction* other) const {
3256   const HLoadString* other_load_string = other->AsLoadString();
3257   // TODO: To allow GVN for HLoadString from different dex files, we should compare the strings
3258   // rather than their indexes. However, we shall also have to re-think the hash code.
3259   if (string_index_ != other_load_string->string_index_ ||
3260       GetPackedFields() != other_load_string->GetPackedFields()) {
3261     return false;
3262   }
3263   switch (GetLoadKind()) {
3264     case LoadKind::kBootImageRelRo:
3265     case LoadKind::kJitBootImageAddress:
3266     case LoadKind::kJitTableAddress: {
3267       ScopedObjectAccess soa(Thread::Current());
3268       return GetString().Get() == other_load_string->GetString().Get();
3269     }
3270     default:
3271       return IsSameDexFile(GetDexFile(), other_load_string->GetDexFile());
3272   }
3273 }
3274 
RemoveEnvironmentUsers()3275 void HInstruction::RemoveEnvironmentUsers() {
3276   for (const HUseListNode<HEnvironment*>& use : GetEnvUses()) {
3277     HEnvironment* user = use.GetUser();
3278     user->SetRawEnvAt(use.GetIndex(), nullptr);
3279   }
3280   env_uses_.clear();
3281 }
3282 
ReplaceInstrOrPhiByClone(HInstruction * instr)3283 HInstruction* ReplaceInstrOrPhiByClone(HInstruction* instr) {
3284   HInstruction* clone = instr->Clone(instr->GetBlock()->GetGraph()->GetAllocator());
3285   HBasicBlock* block = instr->GetBlock();
3286 
3287   if (instr->IsPhi()) {
3288     HPhi* phi = instr->AsPhi();
3289     DCHECK(!phi->HasEnvironment());
3290     HPhi* phi_clone = clone->AsPhi();
3291     block->ReplaceAndRemovePhiWith(phi, phi_clone);
3292   } else {
3293     block->ReplaceAndRemoveInstructionWith(instr, clone);
3294     if (instr->HasEnvironment()) {
3295       clone->CopyEnvironmentFrom(instr->GetEnvironment());
3296       HLoopInformation* loop_info = block->GetLoopInformation();
3297       if (instr->IsSuspendCheck() && loop_info != nullptr) {
3298         loop_info->SetSuspendCheck(clone->AsSuspendCheck());
3299       }
3300     }
3301   }
3302   return clone;
3303 }
3304 
3305 // Returns an instruction with the opposite Boolean value from 'cond'.
InsertOppositeCondition(HInstruction * cond,HInstruction * cursor)3306 HInstruction* HGraph::InsertOppositeCondition(HInstruction* cond, HInstruction* cursor) {
3307   ArenaAllocator* allocator = GetAllocator();
3308 
3309   if (cond->IsCondition() &&
3310       !DataType::IsFloatingPointType(cond->InputAt(0)->GetType())) {
3311     // Can't reverse floating point conditions.  We have to use HBooleanNot in that case.
3312     HInstruction* lhs = cond->InputAt(0);
3313     HInstruction* rhs = cond->InputAt(1);
3314     HInstruction* replacement = nullptr;
3315     switch (cond->AsCondition()->GetOppositeCondition()) {  // get *opposite*
3316       case kCondEQ: replacement = new (allocator) HEqual(lhs, rhs); break;
3317       case kCondNE: replacement = new (allocator) HNotEqual(lhs, rhs); break;
3318       case kCondLT: replacement = new (allocator) HLessThan(lhs, rhs); break;
3319       case kCondLE: replacement = new (allocator) HLessThanOrEqual(lhs, rhs); break;
3320       case kCondGT: replacement = new (allocator) HGreaterThan(lhs, rhs); break;
3321       case kCondGE: replacement = new (allocator) HGreaterThanOrEqual(lhs, rhs); break;
3322       case kCondB:  replacement = new (allocator) HBelow(lhs, rhs); break;
3323       case kCondBE: replacement = new (allocator) HBelowOrEqual(lhs, rhs); break;
3324       case kCondA:  replacement = new (allocator) HAbove(lhs, rhs); break;
3325       case kCondAE: replacement = new (allocator) HAboveOrEqual(lhs, rhs); break;
3326       default:
3327         LOG(FATAL) << "Unexpected condition";
3328         UNREACHABLE();
3329     }
3330     cursor->GetBlock()->InsertInstructionBefore(replacement, cursor);
3331     return replacement;
3332   } else if (cond->IsIntConstant()) {
3333     HIntConstant* int_const = cond->AsIntConstant();
3334     if (int_const->IsFalse()) {
3335       return GetIntConstant(1);
3336     } else {
3337       DCHECK(int_const->IsTrue()) << int_const->GetValue();
3338       return GetIntConstant(0);
3339     }
3340   } else {
3341     HInstruction* replacement = new (allocator) HBooleanNot(cond);
3342     cursor->GetBlock()->InsertInstructionBefore(replacement, cursor);
3343     return replacement;
3344   }
3345 }
3346 
operator <<(std::ostream & os,const MoveOperands & rhs)3347 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs) {
3348   os << "["
3349      << " source=" << rhs.GetSource()
3350      << " destination=" << rhs.GetDestination()
3351      << " type=" << rhs.GetType()
3352      << " instruction=";
3353   if (rhs.GetInstruction() != nullptr) {
3354     os << rhs.GetInstruction()->DebugName() << ' ' << rhs.GetInstruction()->GetId();
3355   } else {
3356     os << "null";
3357   }
3358   os << " ]";
3359   return os;
3360 }
3361 
operator <<(std::ostream & os,TypeCheckKind rhs)3362 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs) {
3363   switch (rhs) {
3364     case TypeCheckKind::kUnresolvedCheck:
3365       return os << "unresolved_check";
3366     case TypeCheckKind::kExactCheck:
3367       return os << "exact_check";
3368     case TypeCheckKind::kClassHierarchyCheck:
3369       return os << "class_hierarchy_check";
3370     case TypeCheckKind::kAbstractClassCheck:
3371       return os << "abstract_class_check";
3372     case TypeCheckKind::kInterfaceCheck:
3373       return os << "interface_check";
3374     case TypeCheckKind::kArrayObjectCheck:
3375       return os << "array_object_check";
3376     case TypeCheckKind::kArrayCheck:
3377       return os << "array_check";
3378     case TypeCheckKind::kBitstringCheck:
3379       return os << "bitstring_check";
3380     default:
3381       LOG(FATAL) << "Unknown TypeCheckKind: " << static_cast<int>(rhs);
3382       UNREACHABLE();
3383   }
3384 }
3385 
3386 // Check that intrinsic enum values fit within space set aside in ArtMethod modifier flags.
3387 #define CHECK_INTRINSICS_ENUM_VALUES(Name, InvokeType, _, SideEffects, Exceptions, ...) \
3388   static_assert( \
3389     static_cast<uint32_t>(Intrinsics::k ## Name) <= (kAccIntrinsicBits >> CTZ(kAccIntrinsicBits)), \
3390     "Instrinsics enumeration space overflow.");
3391 #include "intrinsics_list.h"
INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES)3392   INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES)
3393 #undef INTRINSICS_LIST
3394 #undef CHECK_INTRINSICS_ENUM_VALUES
3395 
3396 // Function that returns whether an intrinsic needs an environment or not.
3397 static inline IntrinsicNeedsEnvironment NeedsEnvironmentIntrinsic(Intrinsics i) {
3398   switch (i) {
3399     case Intrinsics::kNone:
3400       return kNeedsEnvironment;  // Non-sensical for intrinsic.
3401 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3402     case Intrinsics::k ## Name: \
3403       return NeedsEnv;
3404 #include "intrinsics_list.h"
3405       INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3406 #undef INTRINSICS_LIST
3407 #undef OPTIMIZING_INTRINSICS
3408   }
3409   return kNeedsEnvironment;
3410 }
3411 
3412 // Function that returns whether an intrinsic has side effects.
GetSideEffectsIntrinsic(Intrinsics i)3413 static inline IntrinsicSideEffects GetSideEffectsIntrinsic(Intrinsics i) {
3414   switch (i) {
3415     case Intrinsics::kNone:
3416       return kAllSideEffects;
3417 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3418     case Intrinsics::k ## Name: \
3419       return SideEffects;
3420 #include "intrinsics_list.h"
3421       INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3422 #undef INTRINSICS_LIST
3423 #undef OPTIMIZING_INTRINSICS
3424   }
3425   return kAllSideEffects;
3426 }
3427 
3428 // Function that returns whether an intrinsic can throw exceptions.
GetExceptionsIntrinsic(Intrinsics i)3429 static inline IntrinsicExceptions GetExceptionsIntrinsic(Intrinsics i) {
3430   switch (i) {
3431     case Intrinsics::kNone:
3432       return kCanThrow;
3433 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3434     case Intrinsics::k ## Name: \
3435       return Exceptions;
3436 #include "intrinsics_list.h"
3437       INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3438 #undef INTRINSICS_LIST
3439 #undef OPTIMIZING_INTRINSICS
3440   }
3441   return kCanThrow;
3442 }
3443 
SetResolvedMethod(ArtMethod * method)3444 void HInvoke::SetResolvedMethod(ArtMethod* method) {
3445   if (method != nullptr && method->IsIntrinsic()) {
3446     Intrinsics intrinsic = static_cast<Intrinsics>(method->GetIntrinsic());
3447     SetIntrinsic(intrinsic,
3448                  NeedsEnvironmentIntrinsic(intrinsic),
3449                  GetSideEffectsIntrinsic(intrinsic),
3450                  GetExceptionsIntrinsic(intrinsic));
3451   }
3452   resolved_method_ = method;
3453 }
3454 
IsGEZero(HInstruction * instruction)3455 bool IsGEZero(HInstruction* instruction) {
3456   DCHECK(instruction != nullptr);
3457   if (instruction->IsArrayLength()) {
3458     return true;
3459   } else if (instruction->IsMin()) {
3460     // Instruction MIN(>=0, >=0) is >= 0.
3461     return IsGEZero(instruction->InputAt(0)) &&
3462            IsGEZero(instruction->InputAt(1));
3463   } else if (instruction->IsAbs()) {
3464     // Instruction ABS(>=0) is >= 0.
3465     // NOTE: ABS(minint) = minint prevents assuming
3466     //       >= 0 without looking at the argument.
3467     return IsGEZero(instruction->InputAt(0));
3468   }
3469   int64_t value = -1;
3470   return IsInt64AndGet(instruction, &value) && value >= 0;
3471 }
3472 
3473 }  // namespace art
3474