1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include "nodes.h"
17
18 #include <algorithm>
19 #include <cfloat>
20 #include <functional>
21
22 #include "art_method-inl.h"
23 #include "base/arena_allocator.h"
24 #include "base/arena_bit_vector.h"
25 #include "base/bit_utils.h"
26 #include "base/bit_vector-inl.h"
27 #include "base/bit_vector.h"
28 #include "base/iteration_range.h"
29 #include "base/logging.h"
30 #include "base/malloc_arena_pool.h"
31 #include "base/scoped_arena_allocator.h"
32 #include "base/scoped_arena_containers.h"
33 #include "base/stl_util.h"
34 #include "class_linker-inl.h"
35 #include "class_root-inl.h"
36 #include "code_generator.h"
37 #include "common_dominator.h"
38 #include "intrinsics.h"
39 #include "mirror/class-inl.h"
40 #include "scoped_thread_state_change-inl.h"
41 #include "ssa_builder.h"
42
43 namespace art {
44
45 // Enable floating-point static evaluation during constant folding
46 // only if all floating-point operations and constants evaluate in the
47 // range and precision of the type used (i.e., 32-bit float, 64-bit
48 // double).
49 static constexpr bool kEnableFloatingPointStaticEvaluation = (FLT_EVAL_METHOD == 0);
50
CreateRootHandle(VariableSizedHandleScope * handles,ClassRoot class_root)51 ReferenceTypeInfo::TypeHandle HandleCache::CreateRootHandle(VariableSizedHandleScope* handles,
52 ClassRoot class_root) {
53 // Mutator lock is required for NewHandle and GetClassRoot().
54 ScopedObjectAccess soa(Thread::Current());
55 return handles->NewHandle(GetClassRoot(class_root));
56 }
57
AddBlock(HBasicBlock * block)58 void HGraph::AddBlock(HBasicBlock* block) {
59 block->SetBlockId(blocks_.size());
60 blocks_.push_back(block);
61 }
62
FindBackEdges(ArenaBitVector * visited)63 void HGraph::FindBackEdges(ArenaBitVector* visited) {
64 // "visited" must be empty on entry, it's an output argument for all visited (i.e. live) blocks.
65 DCHECK_EQ(visited->GetHighestBitSet(), -1);
66
67 // Allocate memory from local ScopedArenaAllocator.
68 ScopedArenaAllocator allocator(GetArenaStack());
69 // Nodes that we're currently visiting, indexed by block id.
70 ArenaBitVector visiting(
71 &allocator, blocks_.size(), /* expandable= */ false, kArenaAllocGraphBuilder);
72 visiting.ClearAllBits();
73 // Number of successors visited from a given node, indexed by block id.
74 ScopedArenaVector<size_t> successors_visited(blocks_.size(),
75 0u,
76 allocator.Adapter(kArenaAllocGraphBuilder));
77 // Stack of nodes that we're currently visiting (same as marked in "visiting" above).
78 ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocGraphBuilder));
79 constexpr size_t kDefaultWorklistSize = 8;
80 worklist.reserve(kDefaultWorklistSize);
81 visited->SetBit(entry_block_->GetBlockId());
82 visiting.SetBit(entry_block_->GetBlockId());
83 worklist.push_back(entry_block_);
84
85 while (!worklist.empty()) {
86 HBasicBlock* current = worklist.back();
87 uint32_t current_id = current->GetBlockId();
88 if (successors_visited[current_id] == current->GetSuccessors().size()) {
89 visiting.ClearBit(current_id);
90 worklist.pop_back();
91 } else {
92 HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++];
93 uint32_t successor_id = successor->GetBlockId();
94 if (visiting.IsBitSet(successor_id)) {
95 DCHECK(ContainsElement(worklist, successor));
96 successor->AddBackEdge(current);
97 } else if (!visited->IsBitSet(successor_id)) {
98 visited->SetBit(successor_id);
99 visiting.SetBit(successor_id);
100 worklist.push_back(successor);
101 }
102 }
103 }
104 }
105
106 // Remove the environment use records of the instruction for users.
RemoveEnvironmentUses(HInstruction * instruction)107 void RemoveEnvironmentUses(HInstruction* instruction) {
108 for (HEnvironment* environment = instruction->GetEnvironment();
109 environment != nullptr;
110 environment = environment->GetParent()) {
111 for (size_t i = 0, e = environment->Size(); i < e; ++i) {
112 if (environment->GetInstructionAt(i) != nullptr) {
113 environment->RemoveAsUserOfInput(i);
114 }
115 }
116 }
117 }
118
119 // Return whether the instruction has an environment and it's used by others.
HasEnvironmentUsedByOthers(HInstruction * instruction)120 bool HasEnvironmentUsedByOthers(HInstruction* instruction) {
121 for (HEnvironment* environment = instruction->GetEnvironment();
122 environment != nullptr;
123 environment = environment->GetParent()) {
124 for (size_t i = 0, e = environment->Size(); i < e; ++i) {
125 HInstruction* user = environment->GetInstructionAt(i);
126 if (user != nullptr) {
127 return true;
128 }
129 }
130 }
131 return false;
132 }
133
134 // Reset environment records of the instruction itself.
ResetEnvironmentInputRecords(HInstruction * instruction)135 void ResetEnvironmentInputRecords(HInstruction* instruction) {
136 for (HEnvironment* environment = instruction->GetEnvironment();
137 environment != nullptr;
138 environment = environment->GetParent()) {
139 for (size_t i = 0, e = environment->Size(); i < e; ++i) {
140 DCHECK(environment->GetHolder() == instruction);
141 if (environment->GetInstructionAt(i) != nullptr) {
142 environment->SetRawEnvAt(i, nullptr);
143 }
144 }
145 }
146 }
147
RemoveAsUser(HInstruction * instruction)148 static void RemoveAsUser(HInstruction* instruction) {
149 instruction->RemoveAsUserOfAllInputs();
150 RemoveEnvironmentUses(instruction);
151 }
152
RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector & visited) const153 void HGraph::RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const {
154 for (size_t i = 0; i < blocks_.size(); ++i) {
155 if (!visited.IsBitSet(i)) {
156 HBasicBlock* block = blocks_[i];
157 if (block == nullptr) continue;
158 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
159 RemoveAsUser(it.Current());
160 }
161 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
162 RemoveAsUser(it.Current());
163 }
164 }
165 }
166 }
167
RemoveDeadBlocks(const ArenaBitVector & visited)168 void HGraph::RemoveDeadBlocks(const ArenaBitVector& visited) {
169 for (size_t i = 0; i < blocks_.size(); ++i) {
170 if (!visited.IsBitSet(i)) {
171 HBasicBlock* block = blocks_[i];
172 if (block == nullptr) continue;
173 // We only need to update the successor, which might be live.
174 for (HBasicBlock* successor : block->GetSuccessors()) {
175 successor->RemovePredecessor(block);
176 }
177 // Remove the block from the list of blocks, so that further analyses
178 // never see it.
179 blocks_[i] = nullptr;
180 if (block->IsExitBlock()) {
181 SetExitBlock(nullptr);
182 }
183 // Mark the block as removed. This is used by the HGraphBuilder to discard
184 // the block as a branch target.
185 block->SetGraph(nullptr);
186 }
187 }
188 }
189
BuildDominatorTree()190 GraphAnalysisResult HGraph::BuildDominatorTree() {
191 // Allocate memory from local ScopedArenaAllocator.
192 ScopedArenaAllocator allocator(GetArenaStack());
193
194 ArenaBitVector visited(&allocator, blocks_.size(), false, kArenaAllocGraphBuilder);
195 visited.ClearAllBits();
196
197 // (1) Find the back edges in the graph doing a DFS traversal.
198 FindBackEdges(&visited);
199
200 // (2) Remove instructions and phis from blocks not visited during
201 // the initial DFS as users from other instructions, so that
202 // users can be safely removed before uses later.
203 RemoveInstructionsAsUsersFromDeadBlocks(visited);
204
205 // (3) Remove blocks not visited during the initial DFS.
206 // Step (5) requires dead blocks to be removed from the
207 // predecessors list of live blocks.
208 RemoveDeadBlocks(visited);
209
210 // (4) Simplify the CFG now, so that we don't need to recompute
211 // dominators and the reverse post order.
212 SimplifyCFG();
213
214 // (5) Compute the dominance information and the reverse post order.
215 ComputeDominanceInformation();
216
217 // (6) Analyze loops discovered through back edge analysis, and
218 // set the loop information on each block.
219 GraphAnalysisResult result = AnalyzeLoops();
220 if (result != kAnalysisSuccess) {
221 return result;
222 }
223
224 // (7) Precompute per-block try membership before entering the SSA builder,
225 // which needs the information to build catch block phis from values of
226 // locals at throwing instructions inside try blocks.
227 ComputeTryBlockInformation();
228
229 return kAnalysisSuccess;
230 }
231
ClearDominanceInformation()232 void HGraph::ClearDominanceInformation() {
233 for (HBasicBlock* block : GetActiveBlocks()) {
234 block->ClearDominanceInformation();
235 }
236 reverse_post_order_.clear();
237 }
238
ClearLoopInformation()239 void HGraph::ClearLoopInformation() {
240 SetHasIrreducibleLoops(false);
241 for (HBasicBlock* block : GetActiveBlocks()) {
242 block->SetLoopInformation(nullptr);
243 }
244 }
245
ClearDominanceInformation()246 void HBasicBlock::ClearDominanceInformation() {
247 dominated_blocks_.clear();
248 dominator_ = nullptr;
249 }
250
GetFirstInstructionDisregardMoves() const251 HInstruction* HBasicBlock::GetFirstInstructionDisregardMoves() const {
252 HInstruction* instruction = GetFirstInstruction();
253 while (instruction->IsParallelMove()) {
254 instruction = instruction->GetNext();
255 }
256 return instruction;
257 }
258
UpdateDominatorOfSuccessor(HBasicBlock * block,HBasicBlock * successor)259 static bool UpdateDominatorOfSuccessor(HBasicBlock* block, HBasicBlock* successor) {
260 DCHECK(ContainsElement(block->GetSuccessors(), successor));
261
262 HBasicBlock* old_dominator = successor->GetDominator();
263 HBasicBlock* new_dominator =
264 (old_dominator == nullptr) ? block
265 : CommonDominator::ForPair(old_dominator, block);
266
267 if (old_dominator == new_dominator) {
268 return false;
269 } else {
270 successor->SetDominator(new_dominator);
271 return true;
272 }
273 }
274
275 // TODO Consider moving this entirely into LoadStoreAnalysis/Elimination.
PathBetween(uint32_t source_idx,uint32_t dest_idx) const276 bool HGraph::PathBetween(uint32_t source_idx, uint32_t dest_idx) const {
277 DCHECK_LT(source_idx, blocks_.size()) << "source not present in graph!";
278 DCHECK_LT(dest_idx, blocks_.size()) << "dest not present in graph!";
279 DCHECK(blocks_[source_idx] != nullptr);
280 DCHECK(blocks_[dest_idx] != nullptr);
281 return reachability_graph_.IsBitSet(source_idx, dest_idx);
282 }
283
PathBetween(const HBasicBlock * source,const HBasicBlock * dest) const284 bool HGraph::PathBetween(const HBasicBlock* source, const HBasicBlock* dest) const {
285 if (source == nullptr || dest == nullptr) {
286 return false;
287 }
288 size_t source_idx = source->GetBlockId();
289 size_t dest_idx = dest->GetBlockId();
290 return PathBetween(source_idx, dest_idx);
291 }
292
293 // This function/struct calculates the reachability of every node from every
294 // other node by iteratively using DFS to find reachability of each individual
295 // block.
296 //
297 // This is in practice faster then the simpler Floyd-Warshall since while that
298 // is O(N**3) this is O(N*(E + N)) where N is the number of blocks and E is the
299 // number of edges. Since in practice each block only has a few outgoing edges
300 // we can confidently say that E ~ B*N where B is a small number (~3). We also
301 // memoize the results as we go allowing us to (potentially) avoid walking the
302 // entire graph for every node. To make best use of this memoization we
303 // calculate the reachability of blocks in PostOrder. This means that
304 // (generally) blocks that are dominated by many other blocks and dominate few
305 // blocks themselves will be examined first. This makes it more likely we can
306 // use our memoized results.
307 class ReachabilityAnalysisHelper {
308 public:
ReachabilityAnalysisHelper(const HGraph * graph,ArenaBitVectorArray * reachability_graph,ArenaStack * arena_stack)309 ReachabilityAnalysisHelper(const HGraph* graph,
310 ArenaBitVectorArray* reachability_graph,
311 ArenaStack* arena_stack)
312 : graph_(graph),
313 reachability_graph_(reachability_graph),
314 arena_stack_(arena_stack),
315 temporaries_(arena_stack_),
316 block_size_(RoundUp(graph_->GetBlocks().size(), BitVector::kWordBits)),
317 all_visited_nodes_(
318 &temporaries_, graph_->GetBlocks().size(), false, kArenaAllocReachabilityGraph),
319 not_post_order_visited_(
320 &temporaries_, graph_->GetBlocks().size(), false, kArenaAllocReachabilityGraph) {
321 // We can't adjust the size of reachability graph any more without breaking
322 // our allocator invariants so it had better be large enough.
323 CHECK_GE(reachability_graph_->NumRows(), graph_->GetBlocks().size());
324 CHECK_GE(reachability_graph_->NumColumns(), graph_->GetBlocks().size());
325 not_post_order_visited_.SetInitialBits(graph_->GetBlocks().size());
326 }
327
CalculateReachability()328 void CalculateReachability() {
329 // Calculate what blocks connect using repeated DFS
330 //
331 // Going in PostOrder should generally give memoization a good shot of
332 // hitting.
333 for (const HBasicBlock* blk : graph_->GetPostOrder()) {
334 if (blk == nullptr) {
335 continue;
336 }
337 not_post_order_visited_.ClearBit(blk->GetBlockId());
338 CalculateConnectednessOn(blk);
339 all_visited_nodes_.SetBit(blk->GetBlockId());
340 }
341 // Get all other bits
342 for (auto idx : not_post_order_visited_.Indexes()) {
343 const HBasicBlock* blk = graph_->GetBlocks()[idx];
344 if (blk == nullptr) {
345 continue;
346 }
347 CalculateConnectednessOn(blk);
348 all_visited_nodes_.SetBit(blk->GetBlockId());
349 }
350 }
351
352 private:
AddEdge(uint32_t source,const HBasicBlock * dest)353 void AddEdge(uint32_t source, const HBasicBlock* dest) {
354 reachability_graph_->SetBit(source, dest->GetBlockId());
355 }
356
357 // Union the reachability of 'idx' into 'update_block_idx'. This is done to
358 // implement memoization. In order to improve performance we do this in 4-byte
359 // blocks. Clang should be able to optimize this to larger blocks if possible.
UnionBlock(size_t update_block_idx,size_t idx)360 void UnionBlock(size_t update_block_idx, size_t idx) {
361 reachability_graph_->UnionRows(update_block_idx, idx);
362 }
363
364 // Single DFS to get connectedness of a single block
CalculateConnectednessOn(const HBasicBlock * const target_block)365 void CalculateConnectednessOn(const HBasicBlock* const target_block) {
366 const uint32_t target_idx = target_block->GetBlockId();
367 ScopedArenaAllocator connectedness_temps(arena_stack_);
368 // What nodes we have already discovered and either have processed or are
369 // already on the queue.
370 ArenaBitVector discovered(
371 &connectedness_temps, graph_->GetBlocks().size(), false, kArenaAllocReachabilityGraph);
372 // The work stack. What blocks we still need to process.
373 ScopedArenaVector<const HBasicBlock*> work_stack(
374 connectedness_temps.Adapter(kArenaAllocReachabilityGraph));
375 // Known max size since otherwise we'd have blocks multiple times. Avoids
376 // re-allocation
377 work_stack.reserve(graph_->GetBlocks().size());
378 discovered.SetBit(target_idx);
379 work_stack.push_back(target_block);
380 // Main DFS Loop.
381 while (!work_stack.empty()) {
382 const HBasicBlock* cur = work_stack.back();
383 work_stack.pop_back();
384 // Memoization of previous runs.
385 if (all_visited_nodes_.IsBitSet(cur->GetBlockId())) {
386 DCHECK_NE(target_block, cur);
387 // Already explored from here. Just use that data.
388 UnionBlock(target_idx, cur->GetBlockId());
389 continue;
390 }
391 for (const HBasicBlock* succ : cur->GetSuccessors()) {
392 AddEdge(target_idx, succ);
393 if (!discovered.IsBitSet(succ->GetBlockId())) {
394 work_stack.push_back(succ);
395 discovered.SetBit(succ->GetBlockId());
396 }
397 }
398 }
399 }
400
401 const HGraph* graph_;
402 // The graph's reachability_graph_ on the main allocator.
403 ArenaBitVectorArray* reachability_graph_;
404 ArenaStack* arena_stack_;
405 // An allocator for temporary bit-vectors used by this algorithm. The
406 // 'SetBit,ClearBit' on reachability_graph_ prior to the construction of this
407 // object should be the only allocation on the main allocator so it's safe to
408 // make a sub-allocator here.
409 ScopedArenaAllocator temporaries_;
410 // number of columns
411 const size_t block_size_;
412 // Where we've already completely calculated connectedness.
413 ArenaBitVector all_visited_nodes_;
414 // What we never visited and need to do later
415 ArenaBitVector not_post_order_visited_;
416
417 DISALLOW_COPY_AND_ASSIGN(ReachabilityAnalysisHelper);
418 };
419
ComputeReachabilityInformation()420 void HGraph::ComputeReachabilityInformation() {
421 DCHECK_EQ(reachability_graph_.GetRawData().NumSetBits(), 0u);
422 DCHECK(reachability_graph_.IsExpandable());
423 // Reserve all the bits we'll need. This is the only allocation on the
424 // standard allocator we do here, enabling us to create a new ScopedArena for
425 // use with temporaries.
426 //
427 // reachability_graph_ acts as |N| x |N| graph for PathBetween. Array is
428 // padded so each row starts on an BitVector::kWordBits-bit alignment for
429 // simplicity and performance, allowing us to union blocks together without
430 // going bit-by-bit.
431 reachability_graph_.Resize(blocks_.size(), blocks_.size(), /*clear=*/false);
432 ReachabilityAnalysisHelper helper(this, &reachability_graph_, GetArenaStack());
433 helper.CalculateReachability();
434 }
435
ClearReachabilityInformation()436 void HGraph::ClearReachabilityInformation() {
437 reachability_graph_.Clear();
438 }
439
ComputeDominanceInformation()440 void HGraph::ComputeDominanceInformation() {
441 DCHECK(reverse_post_order_.empty());
442 reverse_post_order_.reserve(blocks_.size());
443 reverse_post_order_.push_back(entry_block_);
444
445 // Allocate memory from local ScopedArenaAllocator.
446 ScopedArenaAllocator allocator(GetArenaStack());
447 // Number of visits of a given node, indexed by block id.
448 ScopedArenaVector<size_t> visits(blocks_.size(), 0u, allocator.Adapter(kArenaAllocGraphBuilder));
449 // Number of successors visited from a given node, indexed by block id.
450 ScopedArenaVector<size_t> successors_visited(blocks_.size(),
451 0u,
452 allocator.Adapter(kArenaAllocGraphBuilder));
453 // Nodes for which we need to visit successors.
454 ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocGraphBuilder));
455 constexpr size_t kDefaultWorklistSize = 8;
456 worklist.reserve(kDefaultWorklistSize);
457 worklist.push_back(entry_block_);
458
459 while (!worklist.empty()) {
460 HBasicBlock* current = worklist.back();
461 uint32_t current_id = current->GetBlockId();
462 if (successors_visited[current_id] == current->GetSuccessors().size()) {
463 worklist.pop_back();
464 } else {
465 HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++];
466 UpdateDominatorOfSuccessor(current, successor);
467
468 // Once all the forward edges have been visited, we know the immediate
469 // dominator of the block. We can then start visiting its successors.
470 if (++visits[successor->GetBlockId()] ==
471 successor->GetPredecessors().size() - successor->NumberOfBackEdges()) {
472 reverse_post_order_.push_back(successor);
473 worklist.push_back(successor);
474 }
475 }
476 }
477
478 // Check if the graph has back edges not dominated by their respective headers.
479 // If so, we need to update the dominators of those headers and recursively of
480 // their successors. We do that with a fix-point iteration over all blocks.
481 // The algorithm is guaranteed to terminate because it loops only if the sum
482 // of all dominator chains has decreased in the current iteration.
483 bool must_run_fix_point = false;
484 for (HBasicBlock* block : blocks_) {
485 if (block != nullptr &&
486 block->IsLoopHeader() &&
487 block->GetLoopInformation()->HasBackEdgeNotDominatedByHeader()) {
488 must_run_fix_point = true;
489 break;
490 }
491 }
492 if (must_run_fix_point) {
493 bool update_occurred = true;
494 while (update_occurred) {
495 update_occurred = false;
496 for (HBasicBlock* block : GetReversePostOrder()) {
497 for (HBasicBlock* successor : block->GetSuccessors()) {
498 update_occurred |= UpdateDominatorOfSuccessor(block, successor);
499 }
500 }
501 }
502 }
503
504 // Make sure that there are no remaining blocks whose dominator information
505 // needs to be updated.
506 if (kIsDebugBuild) {
507 for (HBasicBlock* block : GetReversePostOrder()) {
508 for (HBasicBlock* successor : block->GetSuccessors()) {
509 DCHECK(!UpdateDominatorOfSuccessor(block, successor));
510 }
511 }
512 }
513
514 // Populate `dominated_blocks_` information after computing all dominators.
515 // The potential presence of irreducible loops requires to do it after.
516 for (HBasicBlock* block : GetReversePostOrder()) {
517 if (!block->IsEntryBlock()) {
518 block->GetDominator()->AddDominatedBlock(block);
519 }
520 }
521 }
522
SplitEdge(HBasicBlock * block,HBasicBlock * successor)523 HBasicBlock* HGraph::SplitEdge(HBasicBlock* block, HBasicBlock* successor) {
524 HBasicBlock* new_block = new (allocator_) HBasicBlock(this, successor->GetDexPc());
525 AddBlock(new_block);
526 // Use `InsertBetween` to ensure the predecessor index and successor index of
527 // `block` and `successor` are preserved.
528 new_block->InsertBetween(block, successor);
529 return new_block;
530 }
531
SplitCriticalEdge(HBasicBlock * block,HBasicBlock * successor)532 void HGraph::SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor) {
533 // Insert a new node between `block` and `successor` to split the
534 // critical edge.
535 HBasicBlock* new_block = SplitEdge(block, successor);
536 new_block->AddInstruction(new (allocator_) HGoto(successor->GetDexPc()));
537 if (successor->IsLoopHeader()) {
538 // If we split at a back edge boundary, make the new block the back edge.
539 HLoopInformation* info = successor->GetLoopInformation();
540 if (info->IsBackEdge(*block)) {
541 info->RemoveBackEdge(block);
542 info->AddBackEdge(new_block);
543 }
544 }
545 }
546
547 // Reorder phi inputs to match reordering of the block's predecessors.
FixPhisAfterPredecessorsReodering(HBasicBlock * block,size_t first,size_t second)548 static void FixPhisAfterPredecessorsReodering(HBasicBlock* block, size_t first, size_t second) {
549 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
550 HPhi* phi = it.Current()->AsPhi();
551 HInstruction* first_instr = phi->InputAt(first);
552 HInstruction* second_instr = phi->InputAt(second);
553 phi->ReplaceInput(first_instr, second);
554 phi->ReplaceInput(second_instr, first);
555 }
556 }
557
558 // Make sure that the first predecessor of a loop header is the incoming block.
OrderLoopHeaderPredecessors(HBasicBlock * header)559 void HGraph::OrderLoopHeaderPredecessors(HBasicBlock* header) {
560 DCHECK(header->IsLoopHeader());
561 HLoopInformation* info = header->GetLoopInformation();
562 if (info->IsBackEdge(*header->GetPredecessors()[0])) {
563 HBasicBlock* to_swap = header->GetPredecessors()[0];
564 for (size_t pred = 1, e = header->GetPredecessors().size(); pred < e; ++pred) {
565 HBasicBlock* predecessor = header->GetPredecessors()[pred];
566 if (!info->IsBackEdge(*predecessor)) {
567 header->predecessors_[pred] = to_swap;
568 header->predecessors_[0] = predecessor;
569 FixPhisAfterPredecessorsReodering(header, 0, pred);
570 break;
571 }
572 }
573 }
574 }
575
576 // Transform control flow of the loop to a single preheader format (don't touch the data flow).
577 // New_preheader can be already among the header predecessors - this situation will be correctly
578 // processed.
FixControlForNewSinglePreheader(HBasicBlock * header,HBasicBlock * new_preheader)579 static void FixControlForNewSinglePreheader(HBasicBlock* header, HBasicBlock* new_preheader) {
580 HLoopInformation* loop_info = header->GetLoopInformation();
581 for (size_t pred = 0; pred < header->GetPredecessors().size(); ++pred) {
582 HBasicBlock* predecessor = header->GetPredecessors()[pred];
583 if (!loop_info->IsBackEdge(*predecessor) && predecessor != new_preheader) {
584 predecessor->ReplaceSuccessor(header, new_preheader);
585 pred--;
586 }
587 }
588 }
589
590 // == Before == == After ==
591 // _________ _________ _________ _________
592 // | B0 | | B1 | (old preheaders) | B0 | | B1 |
593 // |=========| |=========| |=========| |=========|
594 // | i0 = .. | | i1 = .. | | i0 = .. | | i1 = .. |
595 // |_________| |_________| |_________| |_________|
596 // \ / \ /
597 // \ / ___v____________v___
598 // \ / (new preheader) | B20 <- B0, B1 |
599 // | | |====================|
600 // | | | i20 = phi(i0, i1) |
601 // | | |____________________|
602 // | | |
603 // /\ | | /\ /\ | /\
604 // / v_______v_________v_______v \ / v___________v_____________v \
605 // | | B10 <- B0, B1, B2, B3 | | | | B10 <- B20, B2, B3 | |
606 // | |===========================| | (header) | |===========================| |
607 // | | i10 = phi(i0, i1, i2, i3) | | | | i10 = phi(i20, i2, i3) | |
608 // | |___________________________| | | |___________________________| |
609 // | / \ | | / \ |
610 // | ... ... | | ... ... |
611 // | _________ _________ | | _________ _________ |
612 // | | B2 | | B3 | | | | B2 | | B3 | |
613 // | |=========| |=========| | (back edges) | |=========| |=========| |
614 // | | i2 = .. | | i3 = .. | | | | i2 = .. | | i3 = .. | |
615 // | |_________| |_________| | | |_________| |_________| |
616 // \ / \ / \ / \ /
617 // \___/ \___/ \___/ \___/
618 //
TransformLoopToSinglePreheaderFormat(HBasicBlock * header)619 void HGraph::TransformLoopToSinglePreheaderFormat(HBasicBlock* header) {
620 HLoopInformation* loop_info = header->GetLoopInformation();
621
622 HBasicBlock* preheader = new (allocator_) HBasicBlock(this, header->GetDexPc());
623 AddBlock(preheader);
624 preheader->AddInstruction(new (allocator_) HGoto(header->GetDexPc()));
625
626 // If the old header has no Phis then we only need to fix the control flow.
627 if (header->GetPhis().IsEmpty()) {
628 FixControlForNewSinglePreheader(header, preheader);
629 preheader->AddSuccessor(header);
630 return;
631 }
632
633 // Find the first non-back edge block in the header's predecessors list.
634 size_t first_nonbackedge_pred_pos = 0;
635 bool found = false;
636 for (size_t pred = 0; pred < header->GetPredecessors().size(); ++pred) {
637 HBasicBlock* predecessor = header->GetPredecessors()[pred];
638 if (!loop_info->IsBackEdge(*predecessor)) {
639 first_nonbackedge_pred_pos = pred;
640 found = true;
641 break;
642 }
643 }
644
645 DCHECK(found);
646
647 // Fix the data-flow.
648 for (HInstructionIterator it(header->GetPhis()); !it.Done(); it.Advance()) {
649 HPhi* header_phi = it.Current()->AsPhi();
650
651 HPhi* preheader_phi = new (GetAllocator()) HPhi(GetAllocator(),
652 header_phi->GetRegNumber(),
653 0,
654 header_phi->GetType());
655 if (header_phi->GetType() == DataType::Type::kReference) {
656 preheader_phi->SetReferenceTypeInfo(header_phi->GetReferenceTypeInfo());
657 }
658 preheader->AddPhi(preheader_phi);
659
660 HInstruction* orig_input = header_phi->InputAt(first_nonbackedge_pred_pos);
661 header_phi->ReplaceInput(preheader_phi, first_nonbackedge_pred_pos);
662 preheader_phi->AddInput(orig_input);
663
664 for (size_t input_pos = first_nonbackedge_pred_pos + 1;
665 input_pos < header_phi->InputCount();
666 input_pos++) {
667 HInstruction* input = header_phi->InputAt(input_pos);
668 HBasicBlock* pred_block = header->GetPredecessors()[input_pos];
669
670 if (loop_info->Contains(*pred_block)) {
671 DCHECK(loop_info->IsBackEdge(*pred_block));
672 } else {
673 preheader_phi->AddInput(input);
674 header_phi->RemoveInputAt(input_pos);
675 input_pos--;
676 }
677 }
678 }
679
680 // Fix the control-flow.
681 HBasicBlock* first_pred = header->GetPredecessors()[first_nonbackedge_pred_pos];
682 preheader->InsertBetween(first_pred, header);
683
684 FixControlForNewSinglePreheader(header, preheader);
685 }
686
SimplifyLoop(HBasicBlock * header)687 void HGraph::SimplifyLoop(HBasicBlock* header) {
688 HLoopInformation* info = header->GetLoopInformation();
689
690 // Make sure the loop has only one pre header. This simplifies SSA building by having
691 // to just look at the pre header to know which locals are initialized at entry of the
692 // loop. Also, don't allow the entry block to be a pre header: this simplifies inlining
693 // this graph.
694 size_t number_of_incomings = header->GetPredecessors().size() - info->NumberOfBackEdges();
695 if (number_of_incomings != 1 || (GetEntryBlock()->GetSingleSuccessor() == header)) {
696 TransformLoopToSinglePreheaderFormat(header);
697 }
698
699 OrderLoopHeaderPredecessors(header);
700
701 HInstruction* first_instruction = header->GetFirstInstruction();
702 if (first_instruction != nullptr && first_instruction->IsSuspendCheck()) {
703 // Called from DeadBlockElimination. Update SuspendCheck pointer.
704 info->SetSuspendCheck(first_instruction->AsSuspendCheck());
705 }
706 }
707
ComputeTryBlockInformation()708 void HGraph::ComputeTryBlockInformation() {
709 // Iterate in reverse post order to propagate try membership information from
710 // predecessors to their successors.
711 for (HBasicBlock* block : GetReversePostOrder()) {
712 if (block->IsEntryBlock() || block->IsCatchBlock()) {
713 // Catch blocks after simplification have only exceptional predecessors
714 // and hence are never in tries.
715 continue;
716 }
717
718 // Infer try membership from the first predecessor. Having simplified loops,
719 // the first predecessor can never be a back edge and therefore it must have
720 // been visited already and had its try membership set.
721 HBasicBlock* first_predecessor = block->GetPredecessors()[0];
722 DCHECK(!block->IsLoopHeader() || !block->GetLoopInformation()->IsBackEdge(*first_predecessor));
723 const HTryBoundary* try_entry = first_predecessor->ComputeTryEntryOfSuccessors();
724 if (try_entry != nullptr &&
725 (block->GetTryCatchInformation() == nullptr ||
726 try_entry != &block->GetTryCatchInformation()->GetTryEntry())) {
727 // We are either setting try block membership for the first time or it
728 // has changed.
729 block->SetTryCatchInformation(new (allocator_) TryCatchInformation(*try_entry));
730 }
731 }
732 }
733
SimplifyCFG()734 void HGraph::SimplifyCFG() {
735 // Simplify the CFG for future analysis, and code generation:
736 // (1): Split critical edges.
737 // (2): Simplify loops by having only one preheader.
738 // NOTE: We're appending new blocks inside the loop, so we need to use index because iterators
739 // can be invalidated. We remember the initial size to avoid iterating over the new blocks.
740 for (size_t block_id = 0u, end = blocks_.size(); block_id != end; ++block_id) {
741 HBasicBlock* block = blocks_[block_id];
742 if (block == nullptr) continue;
743 if (block->GetSuccessors().size() > 1) {
744 // Only split normal-flow edges. We cannot split exceptional edges as they
745 // are synthesized (approximate real control flow), and we do not need to
746 // anyway. Moves that would be inserted there are performed by the runtime.
747 ArrayRef<HBasicBlock* const> normal_successors = block->GetNormalSuccessors();
748 for (size_t j = 0, e = normal_successors.size(); j < e; ++j) {
749 HBasicBlock* successor = normal_successors[j];
750 DCHECK(!successor->IsCatchBlock());
751 if (successor == exit_block_) {
752 // (Throw/Return/ReturnVoid)->TryBoundary->Exit. Special case which we
753 // do not want to split because Goto->Exit is not allowed.
754 DCHECK(block->IsSingleTryBoundary());
755 } else if (successor->GetPredecessors().size() > 1) {
756 SplitCriticalEdge(block, successor);
757 // SplitCriticalEdge could have invalidated the `normal_successors`
758 // ArrayRef. We must re-acquire it.
759 normal_successors = block->GetNormalSuccessors();
760 DCHECK_EQ(normal_successors[j]->GetSingleSuccessor(), successor);
761 DCHECK_EQ(e, normal_successors.size());
762 }
763 }
764 }
765 if (block->IsLoopHeader()) {
766 SimplifyLoop(block);
767 } else if (!block->IsEntryBlock() &&
768 block->GetFirstInstruction() != nullptr &&
769 block->GetFirstInstruction()->IsSuspendCheck()) {
770 // We are being called by the dead code elimiation pass, and what used to be
771 // a loop got dismantled. Just remove the suspend check.
772 block->RemoveInstruction(block->GetFirstInstruction());
773 }
774 }
775 }
776
AnalyzeLoops() const777 GraphAnalysisResult HGraph::AnalyzeLoops() const {
778 // We iterate post order to ensure we visit inner loops before outer loops.
779 // `PopulateRecursive` needs this guarantee to know whether a natural loop
780 // contains an irreducible loop.
781 for (HBasicBlock* block : GetPostOrder()) {
782 if (block->IsLoopHeader()) {
783 if (block->IsCatchBlock()) {
784 // TODO: Dealing with exceptional back edges could be tricky because
785 // they only approximate the real control flow. Bail out for now.
786 VLOG(compiler) << "Not compiled: Exceptional back edges";
787 return kAnalysisFailThrowCatchLoop;
788 }
789 block->GetLoopInformation()->Populate();
790 }
791 }
792 return kAnalysisSuccess;
793 }
794
Dump(std::ostream & os)795 void HLoopInformation::Dump(std::ostream& os) {
796 os << "header: " << header_->GetBlockId() << std::endl;
797 os << "pre header: " << GetPreHeader()->GetBlockId() << std::endl;
798 for (HBasicBlock* block : back_edges_) {
799 os << "back edge: " << block->GetBlockId() << std::endl;
800 }
801 for (HBasicBlock* block : header_->GetPredecessors()) {
802 os << "predecessor: " << block->GetBlockId() << std::endl;
803 }
804 for (uint32_t idx : blocks_.Indexes()) {
805 os << " in loop: " << idx << std::endl;
806 }
807 }
808
InsertConstant(HConstant * constant)809 void HGraph::InsertConstant(HConstant* constant) {
810 // New constants are inserted before the SuspendCheck at the bottom of the
811 // entry block. Note that this method can be called from the graph builder and
812 // the entry block therefore may not end with SuspendCheck->Goto yet.
813 HInstruction* insert_before = nullptr;
814
815 HInstruction* gota = entry_block_->GetLastInstruction();
816 if (gota != nullptr && gota->IsGoto()) {
817 HInstruction* suspend_check = gota->GetPrevious();
818 if (suspend_check != nullptr && suspend_check->IsSuspendCheck()) {
819 insert_before = suspend_check;
820 } else {
821 insert_before = gota;
822 }
823 }
824
825 if (insert_before == nullptr) {
826 entry_block_->AddInstruction(constant);
827 } else {
828 entry_block_->InsertInstructionBefore(constant, insert_before);
829 }
830 }
831
GetNullConstant(uint32_t dex_pc)832 HNullConstant* HGraph::GetNullConstant(uint32_t dex_pc) {
833 // For simplicity, don't bother reviving the cached null constant if it is
834 // not null and not in a block. Otherwise, we need to clear the instruction
835 // id and/or any invariants the graph is assuming when adding new instructions.
836 if ((cached_null_constant_ == nullptr) || (cached_null_constant_->GetBlock() == nullptr)) {
837 cached_null_constant_ = new (allocator_) HNullConstant(dex_pc);
838 cached_null_constant_->SetReferenceTypeInfo(GetInexactObjectRti());
839 InsertConstant(cached_null_constant_);
840 }
841 if (kIsDebugBuild) {
842 ScopedObjectAccess soa(Thread::Current());
843 DCHECK(cached_null_constant_->GetReferenceTypeInfo().IsValid());
844 }
845 return cached_null_constant_;
846 }
847
GetCurrentMethod()848 HCurrentMethod* HGraph::GetCurrentMethod() {
849 // For simplicity, don't bother reviving the cached current method if it is
850 // not null and not in a block. Otherwise, we need to clear the instruction
851 // id and/or any invariants the graph is assuming when adding new instructions.
852 if ((cached_current_method_ == nullptr) || (cached_current_method_->GetBlock() == nullptr)) {
853 cached_current_method_ = new (allocator_) HCurrentMethod(
854 Is64BitInstructionSet(instruction_set_) ? DataType::Type::kInt64 : DataType::Type::kInt32,
855 entry_block_->GetDexPc());
856 if (entry_block_->GetFirstInstruction() == nullptr) {
857 entry_block_->AddInstruction(cached_current_method_);
858 } else {
859 entry_block_->InsertInstructionBefore(
860 cached_current_method_, entry_block_->GetFirstInstruction());
861 }
862 }
863 return cached_current_method_;
864 }
865
GetMethodName() const866 const char* HGraph::GetMethodName() const {
867 const dex::MethodId& method_id = dex_file_.GetMethodId(method_idx_);
868 return dex_file_.GetMethodName(method_id);
869 }
870
PrettyMethod(bool with_signature) const871 std::string HGraph::PrettyMethod(bool with_signature) const {
872 return dex_file_.PrettyMethod(method_idx_, with_signature);
873 }
874
GetConstant(DataType::Type type,int64_t value,uint32_t dex_pc)875 HConstant* HGraph::GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc) {
876 switch (type) {
877 case DataType::Type::kBool:
878 DCHECK(IsUint<1>(value));
879 FALLTHROUGH_INTENDED;
880 case DataType::Type::kUint8:
881 case DataType::Type::kInt8:
882 case DataType::Type::kUint16:
883 case DataType::Type::kInt16:
884 case DataType::Type::kInt32:
885 DCHECK(IsInt(DataType::Size(type) * kBitsPerByte, value));
886 return GetIntConstant(static_cast<int32_t>(value), dex_pc);
887
888 case DataType::Type::kInt64:
889 return GetLongConstant(value, dex_pc);
890
891 default:
892 LOG(FATAL) << "Unsupported constant type";
893 UNREACHABLE();
894 }
895 }
896
CacheFloatConstant(HFloatConstant * constant)897 void HGraph::CacheFloatConstant(HFloatConstant* constant) {
898 int32_t value = bit_cast<int32_t, float>(constant->GetValue());
899 DCHECK(cached_float_constants_.find(value) == cached_float_constants_.end());
900 cached_float_constants_.Overwrite(value, constant);
901 }
902
CacheDoubleConstant(HDoubleConstant * constant)903 void HGraph::CacheDoubleConstant(HDoubleConstant* constant) {
904 int64_t value = bit_cast<int64_t, double>(constant->GetValue());
905 DCHECK(cached_double_constants_.find(value) == cached_double_constants_.end());
906 cached_double_constants_.Overwrite(value, constant);
907 }
908
Add(HBasicBlock * block)909 void HLoopInformation::Add(HBasicBlock* block) {
910 blocks_.SetBit(block->GetBlockId());
911 }
912
Remove(HBasicBlock * block)913 void HLoopInformation::Remove(HBasicBlock* block) {
914 blocks_.ClearBit(block->GetBlockId());
915 }
916
PopulateRecursive(HBasicBlock * block)917 void HLoopInformation::PopulateRecursive(HBasicBlock* block) {
918 if (blocks_.IsBitSet(block->GetBlockId())) {
919 return;
920 }
921
922 blocks_.SetBit(block->GetBlockId());
923 block->SetInLoop(this);
924 if (block->IsLoopHeader()) {
925 // We're visiting loops in post-order, so inner loops must have been
926 // populated already.
927 DCHECK(block->GetLoopInformation()->IsPopulated());
928 if (block->GetLoopInformation()->IsIrreducible()) {
929 contains_irreducible_loop_ = true;
930 }
931 }
932 for (HBasicBlock* predecessor : block->GetPredecessors()) {
933 PopulateRecursive(predecessor);
934 }
935 }
936
PopulateIrreducibleRecursive(HBasicBlock * block,ArenaBitVector * finalized)937 void HLoopInformation::PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized) {
938 size_t block_id = block->GetBlockId();
939
940 // If `block` is in `finalized`, we know its membership in the loop has been
941 // decided and it does not need to be revisited.
942 if (finalized->IsBitSet(block_id)) {
943 return;
944 }
945
946 bool is_finalized = false;
947 if (block->IsLoopHeader()) {
948 // If we hit a loop header in an irreducible loop, we first check if the
949 // pre header of that loop belongs to the currently analyzed loop. If it does,
950 // then we visit the back edges.
951 // Note that we cannot use GetPreHeader, as the loop may have not been populated
952 // yet.
953 HBasicBlock* pre_header = block->GetPredecessors()[0];
954 PopulateIrreducibleRecursive(pre_header, finalized);
955 if (blocks_.IsBitSet(pre_header->GetBlockId())) {
956 block->SetInLoop(this);
957 blocks_.SetBit(block_id);
958 finalized->SetBit(block_id);
959 is_finalized = true;
960
961 HLoopInformation* info = block->GetLoopInformation();
962 for (HBasicBlock* back_edge : info->GetBackEdges()) {
963 PopulateIrreducibleRecursive(back_edge, finalized);
964 }
965 }
966 } else {
967 // Visit all predecessors. If one predecessor is part of the loop, this
968 // block is also part of this loop.
969 for (HBasicBlock* predecessor : block->GetPredecessors()) {
970 PopulateIrreducibleRecursive(predecessor, finalized);
971 if (!is_finalized && blocks_.IsBitSet(predecessor->GetBlockId())) {
972 block->SetInLoop(this);
973 blocks_.SetBit(block_id);
974 finalized->SetBit(block_id);
975 is_finalized = true;
976 }
977 }
978 }
979
980 // All predecessors have been recursively visited. Mark finalized if not marked yet.
981 if (!is_finalized) {
982 finalized->SetBit(block_id);
983 }
984 }
985
Populate()986 void HLoopInformation::Populate() {
987 DCHECK_EQ(blocks_.NumSetBits(), 0u) << "Loop information has already been populated";
988 // Populate this loop: starting with the back edge, recursively add predecessors
989 // that are not already part of that loop. Set the header as part of the loop
990 // to end the recursion.
991 // This is a recursive implementation of the algorithm described in
992 // "Advanced Compiler Design & Implementation" (Muchnick) p192.
993 HGraph* graph = header_->GetGraph();
994 blocks_.SetBit(header_->GetBlockId());
995 header_->SetInLoop(this);
996
997 bool is_irreducible_loop = HasBackEdgeNotDominatedByHeader();
998
999 if (is_irreducible_loop) {
1000 // Allocate memory from local ScopedArenaAllocator.
1001 ScopedArenaAllocator allocator(graph->GetArenaStack());
1002 ArenaBitVector visited(&allocator,
1003 graph->GetBlocks().size(),
1004 /* expandable= */ false,
1005 kArenaAllocGraphBuilder);
1006 visited.ClearAllBits();
1007 // Stop marking blocks at the loop header.
1008 visited.SetBit(header_->GetBlockId());
1009
1010 for (HBasicBlock* back_edge : GetBackEdges()) {
1011 PopulateIrreducibleRecursive(back_edge, &visited);
1012 }
1013 } else {
1014 for (HBasicBlock* back_edge : GetBackEdges()) {
1015 PopulateRecursive(back_edge);
1016 }
1017 }
1018
1019 if (!is_irreducible_loop && graph->IsCompilingOsr()) {
1020 // When compiling in OSR mode, all loops in the compiled method may be entered
1021 // from the interpreter. We treat this OSR entry point just like an extra entry
1022 // to an irreducible loop, so we need to mark the method's loops as irreducible.
1023 // This does not apply to inlined loops which do not act as OSR entry points.
1024 if (suspend_check_ == nullptr) {
1025 // Just building the graph in OSR mode, this loop is not inlined. We never build an
1026 // inner graph in OSR mode as we can do OSR transition only from the outer method.
1027 is_irreducible_loop = true;
1028 } else {
1029 // Look at the suspend check's environment to determine if the loop was inlined.
1030 DCHECK(suspend_check_->HasEnvironment());
1031 if (!suspend_check_->GetEnvironment()->IsFromInlinedInvoke()) {
1032 is_irreducible_loop = true;
1033 }
1034 }
1035 }
1036 if (is_irreducible_loop) {
1037 irreducible_ = true;
1038 contains_irreducible_loop_ = true;
1039 graph->SetHasIrreducibleLoops(true);
1040 }
1041 graph->SetHasLoops(true);
1042 }
1043
PopulateInnerLoopUpwards(HLoopInformation * inner_loop)1044 void HLoopInformation::PopulateInnerLoopUpwards(HLoopInformation* inner_loop) {
1045 DCHECK(inner_loop->GetPreHeader()->GetLoopInformation() == this);
1046 blocks_.Union(&inner_loop->blocks_);
1047 HLoopInformation* outer_loop = GetPreHeader()->GetLoopInformation();
1048 if (outer_loop != nullptr) {
1049 outer_loop->PopulateInnerLoopUpwards(this);
1050 }
1051 }
1052
GetPreHeader() const1053 HBasicBlock* HLoopInformation::GetPreHeader() const {
1054 HBasicBlock* block = header_->GetPredecessors()[0];
1055 DCHECK(irreducible_ || (block == header_->GetDominator()));
1056 return block;
1057 }
1058
Contains(const HBasicBlock & block) const1059 bool HLoopInformation::Contains(const HBasicBlock& block) const {
1060 return blocks_.IsBitSet(block.GetBlockId());
1061 }
1062
IsIn(const HLoopInformation & other) const1063 bool HLoopInformation::IsIn(const HLoopInformation& other) const {
1064 return other.blocks_.IsBitSet(header_->GetBlockId());
1065 }
1066
IsDefinedOutOfTheLoop(HInstruction * instruction) const1067 bool HLoopInformation::IsDefinedOutOfTheLoop(HInstruction* instruction) const {
1068 return !blocks_.IsBitSet(instruction->GetBlock()->GetBlockId());
1069 }
1070
GetLifetimeEnd() const1071 size_t HLoopInformation::GetLifetimeEnd() const {
1072 size_t last_position = 0;
1073 for (HBasicBlock* back_edge : GetBackEdges()) {
1074 last_position = std::max(back_edge->GetLifetimeEnd(), last_position);
1075 }
1076 return last_position;
1077 }
1078
HasBackEdgeNotDominatedByHeader() const1079 bool HLoopInformation::HasBackEdgeNotDominatedByHeader() const {
1080 for (HBasicBlock* back_edge : GetBackEdges()) {
1081 DCHECK(back_edge->GetDominator() != nullptr);
1082 if (!header_->Dominates(back_edge)) {
1083 return true;
1084 }
1085 }
1086 return false;
1087 }
1088
DominatesAllBackEdges(HBasicBlock * block)1089 bool HLoopInformation::DominatesAllBackEdges(HBasicBlock* block) {
1090 for (HBasicBlock* back_edge : GetBackEdges()) {
1091 if (!block->Dominates(back_edge)) {
1092 return false;
1093 }
1094 }
1095 return true;
1096 }
1097
1098
HasExitEdge() const1099 bool HLoopInformation::HasExitEdge() const {
1100 // Determine if this loop has at least one exit edge.
1101 HBlocksInLoopReversePostOrderIterator it_loop(*this);
1102 for (; !it_loop.Done(); it_loop.Advance()) {
1103 for (HBasicBlock* successor : it_loop.Current()->GetSuccessors()) {
1104 if (!Contains(*successor)) {
1105 return true;
1106 }
1107 }
1108 }
1109 return false;
1110 }
1111
Dominates(HBasicBlock * other) const1112 bool HBasicBlock::Dominates(HBasicBlock* other) const {
1113 // Walk up the dominator tree from `other`, to find out if `this`
1114 // is an ancestor.
1115 HBasicBlock* current = other;
1116 while (current != nullptr) {
1117 if (current == this) {
1118 return true;
1119 }
1120 current = current->GetDominator();
1121 }
1122 return false;
1123 }
1124
UpdateInputsUsers(HInstruction * instruction)1125 static void UpdateInputsUsers(HInstruction* instruction) {
1126 HInputsRef inputs = instruction->GetInputs();
1127 for (size_t i = 0; i < inputs.size(); ++i) {
1128 inputs[i]->AddUseAt(instruction, i);
1129 }
1130 // Environment should be created later.
1131 DCHECK(!instruction->HasEnvironment());
1132 }
1133
ReplaceAndRemovePhiWith(HPhi * initial,HPhi * replacement)1134 void HBasicBlock::ReplaceAndRemovePhiWith(HPhi* initial, HPhi* replacement) {
1135 DCHECK(initial->GetBlock() == this);
1136 InsertPhiAfter(replacement, initial);
1137 initial->ReplaceWith(replacement);
1138 RemovePhi(initial);
1139 }
1140
ReplaceAndRemoveInstructionWith(HInstruction * initial,HInstruction * replacement)1141 void HBasicBlock::ReplaceAndRemoveInstructionWith(HInstruction* initial,
1142 HInstruction* replacement) {
1143 DCHECK(initial->GetBlock() == this);
1144 if (initial->IsControlFlow()) {
1145 // We can only replace a control flow instruction with another control flow instruction.
1146 DCHECK(replacement->IsControlFlow());
1147 DCHECK_EQ(replacement->GetId(), -1);
1148 DCHECK_EQ(replacement->GetType(), DataType::Type::kVoid);
1149 DCHECK_EQ(initial->GetBlock(), this);
1150 DCHECK_EQ(initial->GetType(), DataType::Type::kVoid);
1151 DCHECK(initial->GetUses().empty());
1152 DCHECK(initial->GetEnvUses().empty());
1153 replacement->SetBlock(this);
1154 replacement->SetId(GetGraph()->GetNextInstructionId());
1155 instructions_.InsertInstructionBefore(replacement, initial);
1156 UpdateInputsUsers(replacement);
1157 } else {
1158 InsertInstructionBefore(replacement, initial);
1159 initial->ReplaceWith(replacement);
1160 }
1161 RemoveInstruction(initial);
1162 }
1163
Add(HInstructionList * instruction_list,HBasicBlock * block,HInstruction * instruction)1164 static void Add(HInstructionList* instruction_list,
1165 HBasicBlock* block,
1166 HInstruction* instruction) {
1167 DCHECK(instruction->GetBlock() == nullptr);
1168 DCHECK_EQ(instruction->GetId(), -1);
1169 instruction->SetBlock(block);
1170 instruction->SetId(block->GetGraph()->GetNextInstructionId());
1171 UpdateInputsUsers(instruction);
1172 instruction_list->AddInstruction(instruction);
1173 }
1174
AddInstruction(HInstruction * instruction)1175 void HBasicBlock::AddInstruction(HInstruction* instruction) {
1176 Add(&instructions_, this, instruction);
1177 }
1178
AddPhi(HPhi * phi)1179 void HBasicBlock::AddPhi(HPhi* phi) {
1180 Add(&phis_, this, phi);
1181 }
1182
InsertInstructionBefore(HInstruction * instruction,HInstruction * cursor)1183 void HBasicBlock::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) {
1184 DCHECK(!cursor->IsPhi());
1185 DCHECK(!instruction->IsPhi());
1186 DCHECK_EQ(instruction->GetId(), -1);
1187 DCHECK_NE(cursor->GetId(), -1);
1188 DCHECK_EQ(cursor->GetBlock(), this);
1189 DCHECK(!instruction->IsControlFlow());
1190 instruction->SetBlock(this);
1191 instruction->SetId(GetGraph()->GetNextInstructionId());
1192 UpdateInputsUsers(instruction);
1193 instructions_.InsertInstructionBefore(instruction, cursor);
1194 }
1195
InsertInstructionAfter(HInstruction * instruction,HInstruction * cursor)1196 void HBasicBlock::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) {
1197 DCHECK(!cursor->IsPhi());
1198 DCHECK(!instruction->IsPhi());
1199 DCHECK_EQ(instruction->GetId(), -1);
1200 DCHECK_NE(cursor->GetId(), -1);
1201 DCHECK_EQ(cursor->GetBlock(), this);
1202 DCHECK(!instruction->IsControlFlow());
1203 DCHECK(!cursor->IsControlFlow());
1204 instruction->SetBlock(this);
1205 instruction->SetId(GetGraph()->GetNextInstructionId());
1206 UpdateInputsUsers(instruction);
1207 instructions_.InsertInstructionAfter(instruction, cursor);
1208 }
1209
InsertPhiAfter(HPhi * phi,HPhi * cursor)1210 void HBasicBlock::InsertPhiAfter(HPhi* phi, HPhi* cursor) {
1211 DCHECK_EQ(phi->GetId(), -1);
1212 DCHECK_NE(cursor->GetId(), -1);
1213 DCHECK_EQ(cursor->GetBlock(), this);
1214 phi->SetBlock(this);
1215 phi->SetId(GetGraph()->GetNextInstructionId());
1216 UpdateInputsUsers(phi);
1217 phis_.InsertInstructionAfter(phi, cursor);
1218 }
1219
Remove(HInstructionList * instruction_list,HBasicBlock * block,HInstruction * instruction,bool ensure_safety)1220 static void Remove(HInstructionList* instruction_list,
1221 HBasicBlock* block,
1222 HInstruction* instruction,
1223 bool ensure_safety) {
1224 DCHECK_EQ(block, instruction->GetBlock());
1225 instruction->SetBlock(nullptr);
1226 instruction_list->RemoveInstruction(instruction);
1227 if (ensure_safety) {
1228 DCHECK(instruction->GetUses().empty());
1229 DCHECK(instruction->GetEnvUses().empty());
1230 RemoveAsUser(instruction);
1231 }
1232 }
1233
RemoveInstruction(HInstruction * instruction,bool ensure_safety)1234 void HBasicBlock::RemoveInstruction(HInstruction* instruction, bool ensure_safety) {
1235 DCHECK(!instruction->IsPhi());
1236 Remove(&instructions_, this, instruction, ensure_safety);
1237 }
1238
RemovePhi(HPhi * phi,bool ensure_safety)1239 void HBasicBlock::RemovePhi(HPhi* phi, bool ensure_safety) {
1240 Remove(&phis_, this, phi, ensure_safety);
1241 }
1242
RemoveInstructionOrPhi(HInstruction * instruction,bool ensure_safety)1243 void HBasicBlock::RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety) {
1244 if (instruction->IsPhi()) {
1245 RemovePhi(instruction->AsPhi(), ensure_safety);
1246 } else {
1247 RemoveInstruction(instruction, ensure_safety);
1248 }
1249 }
1250
CopyFrom(ArrayRef<HInstruction * const> locals)1251 void HEnvironment::CopyFrom(ArrayRef<HInstruction* const> locals) {
1252 for (size_t i = 0; i < locals.size(); i++) {
1253 HInstruction* instruction = locals[i];
1254 SetRawEnvAt(i, instruction);
1255 if (instruction != nullptr) {
1256 instruction->AddEnvUseAt(this, i);
1257 }
1258 }
1259 }
1260
CopyFrom(HEnvironment * env)1261 void HEnvironment::CopyFrom(HEnvironment* env) {
1262 for (size_t i = 0; i < env->Size(); i++) {
1263 HInstruction* instruction = env->GetInstructionAt(i);
1264 SetRawEnvAt(i, instruction);
1265 if (instruction != nullptr) {
1266 instruction->AddEnvUseAt(this, i);
1267 }
1268 }
1269 }
1270
CopyFromWithLoopPhiAdjustment(HEnvironment * env,HBasicBlock * loop_header)1271 void HEnvironment::CopyFromWithLoopPhiAdjustment(HEnvironment* env,
1272 HBasicBlock* loop_header) {
1273 DCHECK(loop_header->IsLoopHeader());
1274 for (size_t i = 0; i < env->Size(); i++) {
1275 HInstruction* instruction = env->GetInstructionAt(i);
1276 SetRawEnvAt(i, instruction);
1277 if (instruction == nullptr) {
1278 continue;
1279 }
1280 if (instruction->IsLoopHeaderPhi() && (instruction->GetBlock() == loop_header)) {
1281 // At the end of the loop pre-header, the corresponding value for instruction
1282 // is the first input of the phi.
1283 HInstruction* initial = instruction->AsPhi()->InputAt(0);
1284 SetRawEnvAt(i, initial);
1285 initial->AddEnvUseAt(this, i);
1286 } else {
1287 instruction->AddEnvUseAt(this, i);
1288 }
1289 }
1290 }
1291
RemoveAsUserOfInput(size_t index) const1292 void HEnvironment::RemoveAsUserOfInput(size_t index) const {
1293 const HUserRecord<HEnvironment*>& env_use = vregs_[index];
1294 HInstruction* user = env_use.GetInstruction();
1295 auto before_env_use_node = env_use.GetBeforeUseNode();
1296 user->env_uses_.erase_after(before_env_use_node);
1297 user->FixUpUserRecordsAfterEnvUseRemoval(before_env_use_node);
1298 }
1299
ReplaceInput(HInstruction * replacement,size_t index)1300 void HEnvironment::ReplaceInput(HInstruction* replacement, size_t index) {
1301 const HUserRecord<HEnvironment*>& env_use_record = vregs_[index];
1302 HInstruction* orig_instr = env_use_record.GetInstruction();
1303
1304 DCHECK(orig_instr != replacement);
1305
1306 HUseList<HEnvironment*>::iterator before_use_node = env_use_record.GetBeforeUseNode();
1307 // Note: fixup_end remains valid across splice_after().
1308 auto fixup_end = replacement->env_uses_.empty() ? replacement->env_uses_.begin()
1309 : ++replacement->env_uses_.begin();
1310 replacement->env_uses_.splice_after(replacement->env_uses_.before_begin(),
1311 env_use_record.GetInstruction()->env_uses_,
1312 before_use_node);
1313 replacement->FixUpUserRecordsAfterEnvUseInsertion(fixup_end);
1314 orig_instr->FixUpUserRecordsAfterEnvUseRemoval(before_use_node);
1315 }
1316
Dump(std::ostream & os,bool dump_args)1317 std::ostream& HInstruction::Dump(std::ostream& os, bool dump_args) {
1318 // Note: Handle the case where the instruction has been removed from
1319 // the graph to support debugging output for failed gtests.
1320 HGraph* graph = (GetBlock() != nullptr) ? GetBlock()->GetGraph() : nullptr;
1321 HGraphVisualizer::DumpInstruction(&os, graph, this);
1322 if (dump_args) {
1323 // Allocate memory from local ScopedArenaAllocator.
1324 std::optional<MallocArenaPool> local_arena_pool;
1325 std::optional<ArenaStack> local_arena_stack;
1326 if (UNLIKELY(graph == nullptr)) {
1327 local_arena_pool.emplace();
1328 local_arena_stack.emplace(&local_arena_pool.value());
1329 }
1330 ScopedArenaAllocator allocator(
1331 graph != nullptr ? graph->GetArenaStack() : &local_arena_stack.value());
1332 // Instructions that we already visited. We print each instruction only once.
1333 ArenaBitVector visited(&allocator,
1334 (graph != nullptr) ? graph->GetCurrentInstructionId() : 0u,
1335 /* expandable= */ (graph == nullptr),
1336 kArenaAllocMisc);
1337 visited.ClearAllBits();
1338 visited.SetBit(GetId());
1339 // Keep a queue of instructions with their indentations.
1340 ScopedArenaDeque<std::pair<HInstruction*, size_t>> queue(allocator.Adapter(kArenaAllocMisc));
1341 auto add_args = [&queue](HInstruction* instruction, size_t indentation) {
1342 for (HInstruction* arg : ReverseRange(instruction->GetInputs())) {
1343 queue.emplace_front(arg, indentation);
1344 }
1345 };
1346 add_args(this, /*indentation=*/ 1u);
1347 while (!queue.empty()) {
1348 HInstruction* instruction;
1349 size_t indentation;
1350 std::tie(instruction, indentation) = queue.front();
1351 queue.pop_front();
1352 if (!visited.IsBitSet(instruction->GetId())) {
1353 visited.SetBit(instruction->GetId());
1354 os << '\n';
1355 for (size_t i = 0; i != indentation; ++i) {
1356 os << " ";
1357 }
1358 HGraphVisualizer::DumpInstruction(&os, graph, instruction);
1359 add_args(instruction, indentation + 1u);
1360 }
1361 }
1362 }
1363 return os;
1364 }
1365
GetNextDisregardingMoves() const1366 HInstruction* HInstruction::GetNextDisregardingMoves() const {
1367 HInstruction* next = GetNext();
1368 while (next != nullptr && next->IsParallelMove()) {
1369 next = next->GetNext();
1370 }
1371 return next;
1372 }
1373
GetPreviousDisregardingMoves() const1374 HInstruction* HInstruction::GetPreviousDisregardingMoves() const {
1375 HInstruction* previous = GetPrevious();
1376 while (previous != nullptr && previous->IsParallelMove()) {
1377 previous = previous->GetPrevious();
1378 }
1379 return previous;
1380 }
1381
AddInstruction(HInstruction * instruction)1382 void HInstructionList::AddInstruction(HInstruction* instruction) {
1383 if (first_instruction_ == nullptr) {
1384 DCHECK(last_instruction_ == nullptr);
1385 first_instruction_ = last_instruction_ = instruction;
1386 } else {
1387 DCHECK(last_instruction_ != nullptr);
1388 last_instruction_->next_ = instruction;
1389 instruction->previous_ = last_instruction_;
1390 last_instruction_ = instruction;
1391 }
1392 }
1393
InsertInstructionBefore(HInstruction * instruction,HInstruction * cursor)1394 void HInstructionList::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) {
1395 DCHECK(Contains(cursor));
1396 if (cursor == first_instruction_) {
1397 cursor->previous_ = instruction;
1398 instruction->next_ = cursor;
1399 first_instruction_ = instruction;
1400 } else {
1401 instruction->previous_ = cursor->previous_;
1402 instruction->next_ = cursor;
1403 cursor->previous_ = instruction;
1404 instruction->previous_->next_ = instruction;
1405 }
1406 }
1407
InsertInstructionAfter(HInstruction * instruction,HInstruction * cursor)1408 void HInstructionList::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) {
1409 DCHECK(Contains(cursor));
1410 if (cursor == last_instruction_) {
1411 cursor->next_ = instruction;
1412 instruction->previous_ = cursor;
1413 last_instruction_ = instruction;
1414 } else {
1415 instruction->next_ = cursor->next_;
1416 instruction->previous_ = cursor;
1417 cursor->next_ = instruction;
1418 instruction->next_->previous_ = instruction;
1419 }
1420 }
1421
RemoveInstruction(HInstruction * instruction)1422 void HInstructionList::RemoveInstruction(HInstruction* instruction) {
1423 if (instruction->previous_ != nullptr) {
1424 instruction->previous_->next_ = instruction->next_;
1425 }
1426 if (instruction->next_ != nullptr) {
1427 instruction->next_->previous_ = instruction->previous_;
1428 }
1429 if (instruction == first_instruction_) {
1430 first_instruction_ = instruction->next_;
1431 }
1432 if (instruction == last_instruction_) {
1433 last_instruction_ = instruction->previous_;
1434 }
1435 }
1436
Contains(HInstruction * instruction) const1437 bool HInstructionList::Contains(HInstruction* instruction) const {
1438 for (HInstructionIterator it(*this); !it.Done(); it.Advance()) {
1439 if (it.Current() == instruction) {
1440 return true;
1441 }
1442 }
1443 return false;
1444 }
1445
FoundBefore(const HInstruction * instruction1,const HInstruction * instruction2) const1446 bool HInstructionList::FoundBefore(const HInstruction* instruction1,
1447 const HInstruction* instruction2) const {
1448 DCHECK_EQ(instruction1->GetBlock(), instruction2->GetBlock());
1449 for (HInstructionIterator it(*this); !it.Done(); it.Advance()) {
1450 if (it.Current() == instruction1) {
1451 return true;
1452 }
1453 if (it.Current() == instruction2) {
1454 return false;
1455 }
1456 }
1457 LOG(FATAL) << "Did not find an order between two instructions of the same block.";
1458 UNREACHABLE();
1459 }
1460
StrictlyDominates(HInstruction * other_instruction) const1461 bool HInstruction::StrictlyDominates(HInstruction* other_instruction) const {
1462 if (other_instruction == this) {
1463 // An instruction does not strictly dominate itself.
1464 return false;
1465 }
1466 HBasicBlock* block = GetBlock();
1467 HBasicBlock* other_block = other_instruction->GetBlock();
1468 if (block != other_block) {
1469 return GetBlock()->Dominates(other_instruction->GetBlock());
1470 } else {
1471 // If both instructions are in the same block, ensure this
1472 // instruction comes before `other_instruction`.
1473 if (IsPhi()) {
1474 if (!other_instruction->IsPhi()) {
1475 // Phis appear before non phi-instructions so this instruction
1476 // dominates `other_instruction`.
1477 return true;
1478 } else {
1479 // There is no order among phis.
1480 LOG(FATAL) << "There is no dominance between phis of a same block.";
1481 UNREACHABLE();
1482 }
1483 } else {
1484 // `this` is not a phi.
1485 if (other_instruction->IsPhi()) {
1486 // Phis appear before non phi-instructions so this instruction
1487 // does not dominate `other_instruction`.
1488 return false;
1489 } else {
1490 // Check whether this instruction comes before
1491 // `other_instruction` in the instruction list.
1492 return block->GetInstructions().FoundBefore(this, other_instruction);
1493 }
1494 }
1495 }
1496 }
1497
RemoveEnvironment()1498 void HInstruction::RemoveEnvironment() {
1499 RemoveEnvironmentUses(this);
1500 environment_ = nullptr;
1501 }
1502
ReplaceWith(HInstruction * other)1503 void HInstruction::ReplaceWith(HInstruction* other) {
1504 DCHECK(other != nullptr);
1505 // Note: fixup_end remains valid across splice_after().
1506 auto fixup_end = other->uses_.empty() ? other->uses_.begin() : ++other->uses_.begin();
1507 other->uses_.splice_after(other->uses_.before_begin(), uses_);
1508 other->FixUpUserRecordsAfterUseInsertion(fixup_end);
1509
1510 // Note: env_fixup_end remains valid across splice_after().
1511 auto env_fixup_end =
1512 other->env_uses_.empty() ? other->env_uses_.begin() : ++other->env_uses_.begin();
1513 other->env_uses_.splice_after(other->env_uses_.before_begin(), env_uses_);
1514 other->FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
1515
1516 DCHECK(uses_.empty());
1517 DCHECK(env_uses_.empty());
1518 }
1519
ReplaceUsesDominatedBy(HInstruction * dominator,HInstruction * replacement)1520 void HInstruction::ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement) {
1521 const HUseList<HInstruction*>& uses = GetUses();
1522 for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) {
1523 HInstruction* user = it->GetUser();
1524 size_t index = it->GetIndex();
1525 // Increment `it` now because `*it` may disappear thanks to user->ReplaceInput().
1526 ++it;
1527 if (dominator->StrictlyDominates(user)) {
1528 user->ReplaceInput(replacement, index);
1529 } else if (user->IsPhi() && !user->AsPhi()->IsCatchPhi()) {
1530 // If the input flows from a block dominated by `dominator`, we can replace it.
1531 // We do not perform this for catch phis as we don't have control flow support
1532 // for their inputs.
1533 const ArenaVector<HBasicBlock*>& predecessors = user->GetBlock()->GetPredecessors();
1534 HBasicBlock* predecessor = predecessors[index];
1535 if (dominator->GetBlock()->Dominates(predecessor)) {
1536 user->ReplaceInput(replacement, index);
1537 }
1538 }
1539 }
1540 }
1541
ReplaceEnvUsesDominatedBy(HInstruction * dominator,HInstruction * replacement)1542 void HInstruction::ReplaceEnvUsesDominatedBy(HInstruction* dominator, HInstruction* replacement) {
1543 const HUseList<HEnvironment*>& uses = GetEnvUses();
1544 for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) {
1545 HEnvironment* user = it->GetUser();
1546 size_t index = it->GetIndex();
1547 // Increment `it` now because `*it` may disappear thanks to user->ReplaceInput().
1548 ++it;
1549 if (dominator->StrictlyDominates(user->GetHolder())) {
1550 user->ReplaceInput(replacement, index);
1551 }
1552 }
1553 }
1554
ReplaceInput(HInstruction * replacement,size_t index)1555 void HInstruction::ReplaceInput(HInstruction* replacement, size_t index) {
1556 HUserRecord<HInstruction*> input_use = InputRecordAt(index);
1557 if (input_use.GetInstruction() == replacement) {
1558 // Nothing to do.
1559 return;
1560 }
1561 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1562 // Note: fixup_end remains valid across splice_after().
1563 auto fixup_end =
1564 replacement->uses_.empty() ? replacement->uses_.begin() : ++replacement->uses_.begin();
1565 replacement->uses_.splice_after(replacement->uses_.before_begin(),
1566 input_use.GetInstruction()->uses_,
1567 before_use_node);
1568 replacement->FixUpUserRecordsAfterUseInsertion(fixup_end);
1569 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1570 }
1571
EnvironmentSize() const1572 size_t HInstruction::EnvironmentSize() const {
1573 return HasEnvironment() ? environment_->Size() : 0;
1574 }
1575
AddInput(HInstruction * input)1576 void HVariableInputSizeInstruction::AddInput(HInstruction* input) {
1577 DCHECK(input->GetBlock() != nullptr);
1578 inputs_.push_back(HUserRecord<HInstruction*>(input));
1579 input->AddUseAt(this, inputs_.size() - 1);
1580 }
1581
InsertInputAt(size_t index,HInstruction * input)1582 void HVariableInputSizeInstruction::InsertInputAt(size_t index, HInstruction* input) {
1583 inputs_.insert(inputs_.begin() + index, HUserRecord<HInstruction*>(input));
1584 input->AddUseAt(this, index);
1585 // Update indexes in use nodes of inputs that have been pushed further back by the insert().
1586 for (size_t i = index + 1u, e = inputs_.size(); i < e; ++i) {
1587 DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i - 1u);
1588 inputs_[i].GetUseNode()->SetIndex(i);
1589 }
1590 }
1591
RemoveInputAt(size_t index)1592 void HVariableInputSizeInstruction::RemoveInputAt(size_t index) {
1593 RemoveAsUserOfInput(index);
1594 inputs_.erase(inputs_.begin() + index);
1595 // Update indexes in use nodes of inputs that have been pulled forward by the erase().
1596 for (size_t i = index, e = inputs_.size(); i < e; ++i) {
1597 DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i + 1u);
1598 inputs_[i].GetUseNode()->SetIndex(i);
1599 }
1600 }
1601
RemoveAllInputs()1602 void HVariableInputSizeInstruction::RemoveAllInputs() {
1603 RemoveAsUserOfAllInputs();
1604 DCHECK(!HasNonEnvironmentUses());
1605
1606 inputs_.clear();
1607 DCHECK_EQ(0u, InputCount());
1608 }
1609
RemoveConstructorFences(HInstruction * instruction)1610 size_t HConstructorFence::RemoveConstructorFences(HInstruction* instruction) {
1611 DCHECK(instruction->GetBlock() != nullptr);
1612 // Removing constructor fences only makes sense for instructions with an object return type.
1613 DCHECK_EQ(DataType::Type::kReference, instruction->GetType());
1614
1615 // Return how many instructions were removed for statistic purposes.
1616 size_t remove_count = 0;
1617
1618 // Efficient implementation that simultaneously (in one pass):
1619 // * Scans the uses list for all constructor fences.
1620 // * Deletes that constructor fence from the uses list of `instruction`.
1621 // * Deletes `instruction` from the constructor fence's inputs.
1622 // * Deletes the constructor fence if it now has 0 inputs.
1623
1624 const HUseList<HInstruction*>& uses = instruction->GetUses();
1625 // Warning: Although this is "const", we might mutate the list when calling RemoveInputAt.
1626 for (auto it = uses.begin(), end = uses.end(); it != end; ) {
1627 const HUseListNode<HInstruction*>& use_node = *it;
1628 HInstruction* const use_instruction = use_node.GetUser();
1629
1630 // Advance the iterator immediately once we fetch the use_node.
1631 // Warning: If the input is removed, the current iterator becomes invalid.
1632 ++it;
1633
1634 if (use_instruction->IsConstructorFence()) {
1635 HConstructorFence* ctor_fence = use_instruction->AsConstructorFence();
1636 size_t input_index = use_node.GetIndex();
1637
1638 // Process the candidate instruction for removal
1639 // from the graph.
1640
1641 // Constructor fence instructions are never
1642 // used by other instructions.
1643 //
1644 // If we wanted to make this more generic, it
1645 // could be a runtime if statement.
1646 DCHECK(!ctor_fence->HasUses());
1647
1648 // A constructor fence's return type is "kPrimVoid"
1649 // and therefore it can't have any environment uses.
1650 DCHECK(!ctor_fence->HasEnvironmentUses());
1651
1652 // Remove the inputs first, otherwise removing the instruction
1653 // will try to remove its uses while we are already removing uses
1654 // and this operation will fail.
1655 DCHECK_EQ(instruction, ctor_fence->InputAt(input_index));
1656
1657 // Removing the input will also remove the `use_node`.
1658 // (Do not look at `use_node` after this, it will be a dangling reference).
1659 ctor_fence->RemoveInputAt(input_index);
1660
1661 // Once all inputs are removed, the fence is considered dead and
1662 // is removed.
1663 if (ctor_fence->InputCount() == 0u) {
1664 ctor_fence->GetBlock()->RemoveInstruction(ctor_fence);
1665 ++remove_count;
1666 }
1667 }
1668 }
1669
1670 if (kIsDebugBuild) {
1671 // Post-condition checks:
1672 // * None of the uses of `instruction` are a constructor fence.
1673 // * The `instruction` itself did not get removed from a block.
1674 for (const HUseListNode<HInstruction*>& use_node : instruction->GetUses()) {
1675 CHECK(!use_node.GetUser()->IsConstructorFence());
1676 }
1677 CHECK(instruction->GetBlock() != nullptr);
1678 }
1679
1680 return remove_count;
1681 }
1682
Merge(HConstructorFence * other)1683 void HConstructorFence::Merge(HConstructorFence* other) {
1684 // Do not delete yourself from the graph.
1685 DCHECK(this != other);
1686 // Don't try to merge with an instruction not associated with a block.
1687 DCHECK(other->GetBlock() != nullptr);
1688 // A constructor fence's return type is "kPrimVoid"
1689 // and therefore it cannot have any environment uses.
1690 DCHECK(!other->HasEnvironmentUses());
1691
1692 auto has_input = [](HInstruction* haystack, HInstruction* needle) {
1693 // Check if `haystack` has `needle` as any of its inputs.
1694 for (size_t input_count = 0; input_count < haystack->InputCount(); ++input_count) {
1695 if (haystack->InputAt(input_count) == needle) {
1696 return true;
1697 }
1698 }
1699 return false;
1700 };
1701
1702 // Add any inputs from `other` into `this` if it wasn't already an input.
1703 for (size_t input_count = 0; input_count < other->InputCount(); ++input_count) {
1704 HInstruction* other_input = other->InputAt(input_count);
1705 if (!has_input(this, other_input)) {
1706 AddInput(other_input);
1707 }
1708 }
1709
1710 other->GetBlock()->RemoveInstruction(other);
1711 }
1712
GetAssociatedAllocation(bool ignore_inputs)1713 HInstruction* HConstructorFence::GetAssociatedAllocation(bool ignore_inputs) {
1714 HInstruction* new_instance_inst = GetPrevious();
1715 // Check if the immediately preceding instruction is a new-instance/new-array.
1716 // Otherwise this fence is for protecting final fields.
1717 if (new_instance_inst != nullptr &&
1718 (new_instance_inst->IsNewInstance() || new_instance_inst->IsNewArray())) {
1719 if (ignore_inputs) {
1720 // If inputs are ignored, simply check if the predecessor is
1721 // *any* HNewInstance/HNewArray.
1722 //
1723 // Inputs are normally only ignored for prepare_for_register_allocation,
1724 // at which point *any* prior HNewInstance/Array can be considered
1725 // associated.
1726 return new_instance_inst;
1727 } else {
1728 // Normal case: There must be exactly 1 input and the previous instruction
1729 // must be that input.
1730 if (InputCount() == 1u && InputAt(0) == new_instance_inst) {
1731 return new_instance_inst;
1732 }
1733 }
1734 }
1735 return nullptr;
1736 }
1737
1738 #define DEFINE_ACCEPT(name, super) \
1739 void H##name::Accept(HGraphVisitor* visitor) { \
1740 visitor->Visit##name(this); \
1741 }
1742
FOR_EACH_CONCRETE_INSTRUCTION(DEFINE_ACCEPT)1743 FOR_EACH_CONCRETE_INSTRUCTION(DEFINE_ACCEPT)
1744
1745 #undef DEFINE_ACCEPT
1746
1747 void HGraphVisitor::VisitInsertionOrder() {
1748 for (HBasicBlock* block : graph_->GetActiveBlocks()) {
1749 VisitBasicBlock(block);
1750 }
1751 }
1752
VisitReversePostOrder()1753 void HGraphVisitor::VisitReversePostOrder() {
1754 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
1755 VisitBasicBlock(block);
1756 }
1757 }
1758
VisitBasicBlock(HBasicBlock * block)1759 void HGraphVisitor::VisitBasicBlock(HBasicBlock* block) {
1760 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
1761 it.Current()->Accept(this);
1762 }
1763 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
1764 it.Current()->Accept(this);
1765 }
1766 }
1767
TryStaticEvaluation() const1768 HConstant* HTypeConversion::TryStaticEvaluation() const {
1769 HGraph* graph = GetBlock()->GetGraph();
1770 if (GetInput()->IsIntConstant()) {
1771 int32_t value = GetInput()->AsIntConstant()->GetValue();
1772 switch (GetResultType()) {
1773 case DataType::Type::kInt8:
1774 return graph->GetIntConstant(static_cast<int8_t>(value), GetDexPc());
1775 case DataType::Type::kUint8:
1776 return graph->GetIntConstant(static_cast<uint8_t>(value), GetDexPc());
1777 case DataType::Type::kInt16:
1778 return graph->GetIntConstant(static_cast<int16_t>(value), GetDexPc());
1779 case DataType::Type::kUint16:
1780 return graph->GetIntConstant(static_cast<uint16_t>(value), GetDexPc());
1781 case DataType::Type::kInt64:
1782 return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc());
1783 case DataType::Type::kFloat32:
1784 return graph->GetFloatConstant(static_cast<float>(value), GetDexPc());
1785 case DataType::Type::kFloat64:
1786 return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc());
1787 default:
1788 return nullptr;
1789 }
1790 } else if (GetInput()->IsLongConstant()) {
1791 int64_t value = GetInput()->AsLongConstant()->GetValue();
1792 switch (GetResultType()) {
1793 case DataType::Type::kInt8:
1794 return graph->GetIntConstant(static_cast<int8_t>(value), GetDexPc());
1795 case DataType::Type::kUint8:
1796 return graph->GetIntConstant(static_cast<uint8_t>(value), GetDexPc());
1797 case DataType::Type::kInt16:
1798 return graph->GetIntConstant(static_cast<int16_t>(value), GetDexPc());
1799 case DataType::Type::kUint16:
1800 return graph->GetIntConstant(static_cast<uint16_t>(value), GetDexPc());
1801 case DataType::Type::kInt32:
1802 return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc());
1803 case DataType::Type::kFloat32:
1804 return graph->GetFloatConstant(static_cast<float>(value), GetDexPc());
1805 case DataType::Type::kFloat64:
1806 return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc());
1807 default:
1808 return nullptr;
1809 }
1810 } else if (GetInput()->IsFloatConstant()) {
1811 float value = GetInput()->AsFloatConstant()->GetValue();
1812 switch (GetResultType()) {
1813 case DataType::Type::kInt32:
1814 if (std::isnan(value))
1815 return graph->GetIntConstant(0, GetDexPc());
1816 if (value >= static_cast<float>(kPrimIntMax))
1817 return graph->GetIntConstant(kPrimIntMax, GetDexPc());
1818 if (value <= kPrimIntMin)
1819 return graph->GetIntConstant(kPrimIntMin, GetDexPc());
1820 return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc());
1821 case DataType::Type::kInt64:
1822 if (std::isnan(value))
1823 return graph->GetLongConstant(0, GetDexPc());
1824 if (value >= static_cast<float>(kPrimLongMax))
1825 return graph->GetLongConstant(kPrimLongMax, GetDexPc());
1826 if (value <= kPrimLongMin)
1827 return graph->GetLongConstant(kPrimLongMin, GetDexPc());
1828 return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc());
1829 case DataType::Type::kFloat64:
1830 return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc());
1831 default:
1832 return nullptr;
1833 }
1834 } else if (GetInput()->IsDoubleConstant()) {
1835 double value = GetInput()->AsDoubleConstant()->GetValue();
1836 switch (GetResultType()) {
1837 case DataType::Type::kInt32:
1838 if (std::isnan(value))
1839 return graph->GetIntConstant(0, GetDexPc());
1840 if (value >= kPrimIntMax)
1841 return graph->GetIntConstant(kPrimIntMax, GetDexPc());
1842 if (value <= kPrimLongMin)
1843 return graph->GetIntConstant(kPrimIntMin, GetDexPc());
1844 return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc());
1845 case DataType::Type::kInt64:
1846 if (std::isnan(value))
1847 return graph->GetLongConstant(0, GetDexPc());
1848 if (value >= static_cast<double>(kPrimLongMax))
1849 return graph->GetLongConstant(kPrimLongMax, GetDexPc());
1850 if (value <= kPrimLongMin)
1851 return graph->GetLongConstant(kPrimLongMin, GetDexPc());
1852 return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc());
1853 case DataType::Type::kFloat32:
1854 return graph->GetFloatConstant(static_cast<float>(value), GetDexPc());
1855 default:
1856 return nullptr;
1857 }
1858 }
1859 return nullptr;
1860 }
1861
TryStaticEvaluation() const1862 HConstant* HUnaryOperation::TryStaticEvaluation() const {
1863 if (GetInput()->IsIntConstant()) {
1864 return Evaluate(GetInput()->AsIntConstant());
1865 } else if (GetInput()->IsLongConstant()) {
1866 return Evaluate(GetInput()->AsLongConstant());
1867 } else if (kEnableFloatingPointStaticEvaluation) {
1868 if (GetInput()->IsFloatConstant()) {
1869 return Evaluate(GetInput()->AsFloatConstant());
1870 } else if (GetInput()->IsDoubleConstant()) {
1871 return Evaluate(GetInput()->AsDoubleConstant());
1872 }
1873 }
1874 return nullptr;
1875 }
1876
TryStaticEvaluation() const1877 HConstant* HBinaryOperation::TryStaticEvaluation() const {
1878 if (GetLeft()->IsIntConstant() && GetRight()->IsIntConstant()) {
1879 return Evaluate(GetLeft()->AsIntConstant(), GetRight()->AsIntConstant());
1880 } else if (GetLeft()->IsLongConstant()) {
1881 if (GetRight()->IsIntConstant()) {
1882 // The binop(long, int) case is only valid for shifts and rotations.
1883 DCHECK(IsShl() || IsShr() || IsUShr() || IsRor()) << DebugName();
1884 return Evaluate(GetLeft()->AsLongConstant(), GetRight()->AsIntConstant());
1885 } else if (GetRight()->IsLongConstant()) {
1886 return Evaluate(GetLeft()->AsLongConstant(), GetRight()->AsLongConstant());
1887 }
1888 } else if (GetLeft()->IsNullConstant() && GetRight()->IsNullConstant()) {
1889 // The binop(null, null) case is only valid for equal and not-equal conditions.
1890 DCHECK(IsEqual() || IsNotEqual()) << DebugName();
1891 return Evaluate(GetLeft()->AsNullConstant(), GetRight()->AsNullConstant());
1892 } else if (kEnableFloatingPointStaticEvaluation) {
1893 if (GetLeft()->IsFloatConstant() && GetRight()->IsFloatConstant()) {
1894 return Evaluate(GetLeft()->AsFloatConstant(), GetRight()->AsFloatConstant());
1895 } else if (GetLeft()->IsDoubleConstant() && GetRight()->IsDoubleConstant()) {
1896 return Evaluate(GetLeft()->AsDoubleConstant(), GetRight()->AsDoubleConstant());
1897 }
1898 }
1899 return nullptr;
1900 }
1901
GetConstantRight() const1902 HConstant* HBinaryOperation::GetConstantRight() const {
1903 if (GetRight()->IsConstant()) {
1904 return GetRight()->AsConstant();
1905 } else if (IsCommutative() && GetLeft()->IsConstant()) {
1906 return GetLeft()->AsConstant();
1907 } else {
1908 return nullptr;
1909 }
1910 }
1911
1912 // If `GetConstantRight()` returns one of the input, this returns the other
1913 // one. Otherwise it returns null.
GetLeastConstantLeft() const1914 HInstruction* HBinaryOperation::GetLeastConstantLeft() const {
1915 HInstruction* most_constant_right = GetConstantRight();
1916 if (most_constant_right == nullptr) {
1917 return nullptr;
1918 } else if (most_constant_right == GetLeft()) {
1919 return GetRight();
1920 } else {
1921 return GetLeft();
1922 }
1923 }
1924
operator <<(std::ostream & os,ComparisonBias rhs)1925 std::ostream& operator<<(std::ostream& os, ComparisonBias rhs) {
1926 // TODO: Replace with auto-generated operator<<.
1927 switch (rhs) {
1928 case ComparisonBias::kNoBias:
1929 return os << "none";
1930 case ComparisonBias::kGtBias:
1931 return os << "gt";
1932 case ComparisonBias::kLtBias:
1933 return os << "lt";
1934 default:
1935 LOG(FATAL) << "Unknown ComparisonBias: " << static_cast<int>(rhs);
1936 UNREACHABLE();
1937 }
1938 }
1939
IsBeforeWhenDisregardMoves(HInstruction * instruction) const1940 bool HCondition::IsBeforeWhenDisregardMoves(HInstruction* instruction) const {
1941 return this == instruction->GetPreviousDisregardingMoves();
1942 }
1943
Equals(const HInstruction * other) const1944 bool HInstruction::Equals(const HInstruction* other) const {
1945 if (GetKind() != other->GetKind()) return false;
1946 if (GetType() != other->GetType()) return false;
1947 if (!InstructionDataEquals(other)) return false;
1948 HConstInputsRef inputs = GetInputs();
1949 HConstInputsRef other_inputs = other->GetInputs();
1950 if (inputs.size() != other_inputs.size()) return false;
1951 for (size_t i = 0; i != inputs.size(); ++i) {
1952 if (inputs[i] != other_inputs[i]) return false;
1953 }
1954
1955 DCHECK_EQ(ComputeHashCode(), other->ComputeHashCode());
1956 return true;
1957 }
1958
operator <<(std::ostream & os,HInstruction::InstructionKind rhs)1959 std::ostream& operator<<(std::ostream& os, HInstruction::InstructionKind rhs) {
1960 #define DECLARE_CASE(type, super) case HInstruction::k##type: os << #type; break;
1961 switch (rhs) {
1962 FOR_EACH_CONCRETE_INSTRUCTION(DECLARE_CASE)
1963 default:
1964 os << "Unknown instruction kind " << static_cast<int>(rhs);
1965 break;
1966 }
1967 #undef DECLARE_CASE
1968 return os;
1969 }
1970
operator <<(std::ostream & os,const HInstruction::NoArgsDump rhs)1971 std::ostream& operator<<(std::ostream& os, const HInstruction::NoArgsDump rhs) {
1972 // TODO Really this should be const but that would require const-ifying
1973 // graph-visualizer and HGraphVisitor which are tangled up everywhere.
1974 return const_cast<HInstruction*>(rhs.ins)->Dump(os, /* dump_args= */ false);
1975 }
1976
operator <<(std::ostream & os,const HInstruction::ArgsDump rhs)1977 std::ostream& operator<<(std::ostream& os, const HInstruction::ArgsDump rhs) {
1978 // TODO Really this should be const but that would require const-ifying
1979 // graph-visualizer and HGraphVisitor which are tangled up everywhere.
1980 return const_cast<HInstruction*>(rhs.ins)->Dump(os, /* dump_args= */ true);
1981 }
1982
operator <<(std::ostream & os,const HInstruction & rhs)1983 std::ostream& operator<<(std::ostream& os, const HInstruction& rhs) {
1984 return os << rhs.DumpWithoutArgs();
1985 }
1986
operator <<(std::ostream & os,const HUseList<HInstruction * > & lst)1987 std::ostream& operator<<(std::ostream& os, const HUseList<HInstruction*>& lst) {
1988 os << "Instructions[";
1989 bool first = true;
1990 for (const auto& hi : lst) {
1991 if (!first) {
1992 os << ", ";
1993 }
1994 first = false;
1995 os << hi.GetUser()->DebugName() << "[id: " << hi.GetUser()->GetId()
1996 << ", blk: " << hi.GetUser()->GetBlock()->GetBlockId() << "]@" << hi.GetIndex();
1997 }
1998 os << "]";
1999 return os;
2000 }
2001
operator <<(std::ostream & os,const HUseList<HEnvironment * > & lst)2002 std::ostream& operator<<(std::ostream& os, const HUseList<HEnvironment*>& lst) {
2003 os << "Environments[";
2004 bool first = true;
2005 for (const auto& hi : lst) {
2006 if (!first) {
2007 os << ", ";
2008 }
2009 first = false;
2010 os << *hi.GetUser()->GetHolder() << "@" << hi.GetIndex();
2011 }
2012 os << "]";
2013 return os;
2014 }
2015
Dump(std::ostream & os,std::optional<std::reference_wrapper<const BlockNamer>> namer)2016 std::ostream& HGraph::Dump(std::ostream& os,
2017 std::optional<std::reference_wrapper<const BlockNamer>> namer) {
2018 HGraphVisualizer vis(&os, this, nullptr, namer);
2019 vis.DumpGraphDebug();
2020 return os;
2021 }
2022
MoveBefore(HInstruction * cursor,bool do_checks)2023 void HInstruction::MoveBefore(HInstruction* cursor, bool do_checks) {
2024 if (do_checks) {
2025 DCHECK(!IsPhi());
2026 DCHECK(!IsControlFlow());
2027 DCHECK(CanBeMoved() ||
2028 // HShouldDeoptimizeFlag can only be moved by CHAGuardOptimization.
2029 IsShouldDeoptimizeFlag());
2030 DCHECK(!cursor->IsPhi());
2031 }
2032
2033 next_->previous_ = previous_;
2034 if (previous_ != nullptr) {
2035 previous_->next_ = next_;
2036 }
2037 if (block_->instructions_.first_instruction_ == this) {
2038 block_->instructions_.first_instruction_ = next_;
2039 }
2040 DCHECK_NE(block_->instructions_.last_instruction_, this);
2041
2042 previous_ = cursor->previous_;
2043 if (previous_ != nullptr) {
2044 previous_->next_ = this;
2045 }
2046 next_ = cursor;
2047 cursor->previous_ = this;
2048 block_ = cursor->block_;
2049
2050 if (block_->instructions_.first_instruction_ == cursor) {
2051 block_->instructions_.first_instruction_ = this;
2052 }
2053 }
2054
MoveBeforeFirstUserAndOutOfLoops()2055 void HInstruction::MoveBeforeFirstUserAndOutOfLoops() {
2056 DCHECK(!CanThrow());
2057 DCHECK(!HasSideEffects());
2058 DCHECK(!HasEnvironmentUses());
2059 DCHECK(HasNonEnvironmentUses());
2060 DCHECK(!IsPhi()); // Makes no sense for Phi.
2061 DCHECK_EQ(InputCount(), 0u);
2062
2063 // Find the target block.
2064 auto uses_it = GetUses().begin();
2065 auto uses_end = GetUses().end();
2066 HBasicBlock* target_block = uses_it->GetUser()->GetBlock();
2067 ++uses_it;
2068 while (uses_it != uses_end && uses_it->GetUser()->GetBlock() == target_block) {
2069 ++uses_it;
2070 }
2071 if (uses_it != uses_end) {
2072 // This instruction has uses in two or more blocks. Find the common dominator.
2073 CommonDominator finder(target_block);
2074 for (; uses_it != uses_end; ++uses_it) {
2075 finder.Update(uses_it->GetUser()->GetBlock());
2076 }
2077 target_block = finder.Get();
2078 DCHECK(target_block != nullptr);
2079 }
2080 // Move to the first dominator not in a loop.
2081 while (target_block->IsInLoop()) {
2082 target_block = target_block->GetDominator();
2083 DCHECK(target_block != nullptr);
2084 }
2085
2086 // Find insertion position.
2087 HInstruction* insert_pos = nullptr;
2088 for (const HUseListNode<HInstruction*>& use : GetUses()) {
2089 if (use.GetUser()->GetBlock() == target_block &&
2090 (insert_pos == nullptr || use.GetUser()->StrictlyDominates(insert_pos))) {
2091 insert_pos = use.GetUser();
2092 }
2093 }
2094 if (insert_pos == nullptr) {
2095 // No user in `target_block`, insert before the control flow instruction.
2096 insert_pos = target_block->GetLastInstruction();
2097 DCHECK(insert_pos->IsControlFlow());
2098 // Avoid splitting HCondition from HIf to prevent unnecessary materialization.
2099 if (insert_pos->IsIf()) {
2100 HInstruction* if_input = insert_pos->AsIf()->InputAt(0);
2101 if (if_input == insert_pos->GetPrevious()) {
2102 insert_pos = if_input;
2103 }
2104 }
2105 }
2106 MoveBefore(insert_pos);
2107 }
2108
SplitBefore(HInstruction * cursor)2109 HBasicBlock* HBasicBlock::SplitBefore(HInstruction* cursor) {
2110 DCHECK(!graph_->IsInSsaForm()) << "Support for SSA form not implemented.";
2111 DCHECK_EQ(cursor->GetBlock(), this);
2112
2113 HBasicBlock* new_block =
2114 new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), cursor->GetDexPc());
2115 new_block->instructions_.first_instruction_ = cursor;
2116 new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2117 instructions_.last_instruction_ = cursor->previous_;
2118 if (cursor->previous_ == nullptr) {
2119 instructions_.first_instruction_ = nullptr;
2120 } else {
2121 cursor->previous_->next_ = nullptr;
2122 cursor->previous_ = nullptr;
2123 }
2124
2125 new_block->instructions_.SetBlockOfInstructions(new_block);
2126 AddInstruction(new (GetGraph()->GetAllocator()) HGoto(new_block->GetDexPc()));
2127
2128 for (HBasicBlock* successor : GetSuccessors()) {
2129 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2130 }
2131 new_block->successors_.swap(successors_);
2132 DCHECK(successors_.empty());
2133 AddSuccessor(new_block);
2134
2135 GetGraph()->AddBlock(new_block);
2136 return new_block;
2137 }
2138
CreateImmediateDominator()2139 HBasicBlock* HBasicBlock::CreateImmediateDominator() {
2140 DCHECK(!graph_->IsInSsaForm()) << "Support for SSA form not implemented.";
2141 DCHECK(!IsCatchBlock()) << "Support for updating try/catch information not implemented.";
2142
2143 HBasicBlock* new_block = new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), GetDexPc());
2144
2145 for (HBasicBlock* predecessor : GetPredecessors()) {
2146 predecessor->successors_[predecessor->GetSuccessorIndexOf(this)] = new_block;
2147 }
2148 new_block->predecessors_.swap(predecessors_);
2149 DCHECK(predecessors_.empty());
2150 AddPredecessor(new_block);
2151
2152 GetGraph()->AddBlock(new_block);
2153 return new_block;
2154 }
2155
SplitBeforeForInlining(HInstruction * cursor)2156 HBasicBlock* HBasicBlock::SplitBeforeForInlining(HInstruction* cursor) {
2157 DCHECK_EQ(cursor->GetBlock(), this);
2158
2159 HBasicBlock* new_block =
2160 new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), cursor->GetDexPc());
2161 new_block->instructions_.first_instruction_ = cursor;
2162 new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2163 instructions_.last_instruction_ = cursor->previous_;
2164 if (cursor->previous_ == nullptr) {
2165 instructions_.first_instruction_ = nullptr;
2166 } else {
2167 cursor->previous_->next_ = nullptr;
2168 cursor->previous_ = nullptr;
2169 }
2170
2171 new_block->instructions_.SetBlockOfInstructions(new_block);
2172
2173 for (HBasicBlock* successor : GetSuccessors()) {
2174 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2175 }
2176 new_block->successors_.swap(successors_);
2177 DCHECK(successors_.empty());
2178
2179 for (HBasicBlock* dominated : GetDominatedBlocks()) {
2180 dominated->dominator_ = new_block;
2181 }
2182 new_block->dominated_blocks_.swap(dominated_blocks_);
2183 DCHECK(dominated_blocks_.empty());
2184 return new_block;
2185 }
2186
SplitAfterForInlining(HInstruction * cursor)2187 HBasicBlock* HBasicBlock::SplitAfterForInlining(HInstruction* cursor) {
2188 DCHECK(!cursor->IsControlFlow());
2189 DCHECK_NE(instructions_.last_instruction_, cursor);
2190 DCHECK_EQ(cursor->GetBlock(), this);
2191
2192 HBasicBlock* new_block = new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), GetDexPc());
2193 new_block->instructions_.first_instruction_ = cursor->GetNext();
2194 new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2195 cursor->next_->previous_ = nullptr;
2196 cursor->next_ = nullptr;
2197 instructions_.last_instruction_ = cursor;
2198
2199 new_block->instructions_.SetBlockOfInstructions(new_block);
2200 for (HBasicBlock* successor : GetSuccessors()) {
2201 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2202 }
2203 new_block->successors_.swap(successors_);
2204 DCHECK(successors_.empty());
2205
2206 for (HBasicBlock* dominated : GetDominatedBlocks()) {
2207 dominated->dominator_ = new_block;
2208 }
2209 new_block->dominated_blocks_.swap(dominated_blocks_);
2210 DCHECK(dominated_blocks_.empty());
2211 return new_block;
2212 }
2213
ComputeTryEntryOfSuccessors() const2214 const HTryBoundary* HBasicBlock::ComputeTryEntryOfSuccessors() const {
2215 if (EndsWithTryBoundary()) {
2216 HTryBoundary* try_boundary = GetLastInstruction()->AsTryBoundary();
2217 if (try_boundary->IsEntry()) {
2218 DCHECK(!IsTryBlock());
2219 return try_boundary;
2220 } else {
2221 DCHECK(IsTryBlock());
2222 DCHECK(try_catch_information_->GetTryEntry().HasSameExceptionHandlersAs(*try_boundary));
2223 return nullptr;
2224 }
2225 } else if (IsTryBlock()) {
2226 return &try_catch_information_->GetTryEntry();
2227 } else {
2228 return nullptr;
2229 }
2230 }
2231
HasThrowingInstructions() const2232 bool HBasicBlock::HasThrowingInstructions() const {
2233 for (HInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) {
2234 if (it.Current()->CanThrow()) {
2235 return true;
2236 }
2237 }
2238 return false;
2239 }
2240
HasOnlyOneInstruction(const HBasicBlock & block)2241 static bool HasOnlyOneInstruction(const HBasicBlock& block) {
2242 return block.GetPhis().IsEmpty()
2243 && !block.GetInstructions().IsEmpty()
2244 && block.GetFirstInstruction() == block.GetLastInstruction();
2245 }
2246
IsSingleGoto() const2247 bool HBasicBlock::IsSingleGoto() const {
2248 return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsGoto();
2249 }
2250
IsSingleReturn() const2251 bool HBasicBlock::IsSingleReturn() const {
2252 return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsReturn();
2253 }
2254
IsSingleReturnOrReturnVoidAllowingPhis() const2255 bool HBasicBlock::IsSingleReturnOrReturnVoidAllowingPhis() const {
2256 return (GetFirstInstruction() == GetLastInstruction()) &&
2257 (GetLastInstruction()->IsReturn() || GetLastInstruction()->IsReturnVoid());
2258 }
2259
IsSingleTryBoundary() const2260 bool HBasicBlock::IsSingleTryBoundary() const {
2261 return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsTryBoundary();
2262 }
2263
EndsWithControlFlowInstruction() const2264 bool HBasicBlock::EndsWithControlFlowInstruction() const {
2265 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsControlFlow();
2266 }
2267
EndsWithReturn() const2268 bool HBasicBlock::EndsWithReturn() const {
2269 return !GetInstructions().IsEmpty() &&
2270 (GetLastInstruction()->IsReturn() || GetLastInstruction()->IsReturnVoid());
2271 }
2272
EndsWithIf() const2273 bool HBasicBlock::EndsWithIf() const {
2274 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsIf();
2275 }
2276
EndsWithTryBoundary() const2277 bool HBasicBlock::EndsWithTryBoundary() const {
2278 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsTryBoundary();
2279 }
2280
HasSinglePhi() const2281 bool HBasicBlock::HasSinglePhi() const {
2282 return !GetPhis().IsEmpty() && GetFirstPhi()->GetNext() == nullptr;
2283 }
2284
GetNormalSuccessors() const2285 ArrayRef<HBasicBlock* const> HBasicBlock::GetNormalSuccessors() const {
2286 if (EndsWithTryBoundary()) {
2287 // The normal-flow successor of HTryBoundary is always stored at index zero.
2288 DCHECK_EQ(successors_[0], GetLastInstruction()->AsTryBoundary()->GetNormalFlowSuccessor());
2289 return ArrayRef<HBasicBlock* const>(successors_).SubArray(0u, 1u);
2290 } else {
2291 // All successors of blocks not ending with TryBoundary are normal.
2292 return ArrayRef<HBasicBlock* const>(successors_);
2293 }
2294 }
2295
GetExceptionalSuccessors() const2296 ArrayRef<HBasicBlock* const> HBasicBlock::GetExceptionalSuccessors() const {
2297 if (EndsWithTryBoundary()) {
2298 return GetLastInstruction()->AsTryBoundary()->GetExceptionHandlers();
2299 } else {
2300 // Blocks not ending with TryBoundary do not have exceptional successors.
2301 return ArrayRef<HBasicBlock* const>();
2302 }
2303 }
2304
HasSameExceptionHandlersAs(const HTryBoundary & other) const2305 bool HTryBoundary::HasSameExceptionHandlersAs(const HTryBoundary& other) const {
2306 ArrayRef<HBasicBlock* const> handlers1 = GetExceptionHandlers();
2307 ArrayRef<HBasicBlock* const> handlers2 = other.GetExceptionHandlers();
2308
2309 size_t length = handlers1.size();
2310 if (length != handlers2.size()) {
2311 return false;
2312 }
2313
2314 // Exception handlers need to be stored in the same order.
2315 for (size_t i = 0; i < length; ++i) {
2316 if (handlers1[i] != handlers2[i]) {
2317 return false;
2318 }
2319 }
2320 return true;
2321 }
2322
CountSize() const2323 size_t HInstructionList::CountSize() const {
2324 size_t size = 0;
2325 HInstruction* current = first_instruction_;
2326 for (; current != nullptr; current = current->GetNext()) {
2327 size++;
2328 }
2329 return size;
2330 }
2331
SetBlockOfInstructions(HBasicBlock * block) const2332 void HInstructionList::SetBlockOfInstructions(HBasicBlock* block) const {
2333 for (HInstruction* current = first_instruction_;
2334 current != nullptr;
2335 current = current->GetNext()) {
2336 current->SetBlock(block);
2337 }
2338 }
2339
AddAfter(HInstruction * cursor,const HInstructionList & instruction_list)2340 void HInstructionList::AddAfter(HInstruction* cursor, const HInstructionList& instruction_list) {
2341 DCHECK(Contains(cursor));
2342 if (!instruction_list.IsEmpty()) {
2343 if (cursor == last_instruction_) {
2344 last_instruction_ = instruction_list.last_instruction_;
2345 } else {
2346 cursor->next_->previous_ = instruction_list.last_instruction_;
2347 }
2348 instruction_list.last_instruction_->next_ = cursor->next_;
2349 cursor->next_ = instruction_list.first_instruction_;
2350 instruction_list.first_instruction_->previous_ = cursor;
2351 }
2352 }
2353
AddBefore(HInstruction * cursor,const HInstructionList & instruction_list)2354 void HInstructionList::AddBefore(HInstruction* cursor, const HInstructionList& instruction_list) {
2355 DCHECK(Contains(cursor));
2356 if (!instruction_list.IsEmpty()) {
2357 if (cursor == first_instruction_) {
2358 first_instruction_ = instruction_list.first_instruction_;
2359 } else {
2360 cursor->previous_->next_ = instruction_list.first_instruction_;
2361 }
2362 instruction_list.last_instruction_->next_ = cursor;
2363 instruction_list.first_instruction_->previous_ = cursor->previous_;
2364 cursor->previous_ = instruction_list.last_instruction_;
2365 }
2366 }
2367
Add(const HInstructionList & instruction_list)2368 void HInstructionList::Add(const HInstructionList& instruction_list) {
2369 if (IsEmpty()) {
2370 first_instruction_ = instruction_list.first_instruction_;
2371 last_instruction_ = instruction_list.last_instruction_;
2372 } else {
2373 AddAfter(last_instruction_, instruction_list);
2374 }
2375 }
2376
2377 // Should be called on instructions in a dead block in post order. This method
2378 // assumes `insn` has been removed from all users with the exception of catch
2379 // phis because of missing exceptional edges in the graph. It removes the
2380 // instruction from catch phi uses, together with inputs of other catch phis in
2381 // the catch block at the same index, as these must be dead too.
RemoveUsesOfDeadInstruction(HInstruction * insn)2382 static void RemoveUsesOfDeadInstruction(HInstruction* insn) {
2383 DCHECK(!insn->HasEnvironmentUses());
2384 while (insn->HasNonEnvironmentUses()) {
2385 const HUseListNode<HInstruction*>& use = insn->GetUses().front();
2386 size_t use_index = use.GetIndex();
2387 HBasicBlock* user_block = use.GetUser()->GetBlock();
2388 DCHECK(use.GetUser()->IsPhi() && user_block->IsCatchBlock());
2389 for (HInstructionIterator phi_it(user_block->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
2390 phi_it.Current()->AsPhi()->RemoveInputAt(use_index);
2391 }
2392 }
2393 }
2394
DisconnectAndDelete()2395 void HBasicBlock::DisconnectAndDelete() {
2396 // Dominators must be removed after all the blocks they dominate. This way
2397 // a loop header is removed last, a requirement for correct loop information
2398 // iteration.
2399 DCHECK(dominated_blocks_.empty());
2400
2401 // The following steps gradually remove the block from all its dependants in
2402 // post order (b/27683071).
2403
2404 // (1) Store a basic block that we'll use in step (5) to find loops to be updated.
2405 // We need to do this before step (4) which destroys the predecessor list.
2406 HBasicBlock* loop_update_start = this;
2407 if (IsLoopHeader()) {
2408 HLoopInformation* loop_info = GetLoopInformation();
2409 // All other blocks in this loop should have been removed because the header
2410 // was their dominator.
2411 // Note that we do not remove `this` from `loop_info` as it is unreachable.
2412 DCHECK(!loop_info->IsIrreducible());
2413 DCHECK_EQ(loop_info->GetBlocks().NumSetBits(), 1u);
2414 DCHECK_EQ(static_cast<uint32_t>(loop_info->GetBlocks().GetHighestBitSet()), GetBlockId());
2415 loop_update_start = loop_info->GetPreHeader();
2416 }
2417
2418 // (2) Disconnect the block from its successors and update their phis.
2419 for (HBasicBlock* successor : successors_) {
2420 // Delete this block from the list of predecessors.
2421 size_t this_index = successor->GetPredecessorIndexOf(this);
2422 successor->predecessors_.erase(successor->predecessors_.begin() + this_index);
2423
2424 // Check that `successor` has other predecessors, otherwise `this` is the
2425 // dominator of `successor` which violates the order DCHECKed at the top.
2426 DCHECK(!successor->predecessors_.empty());
2427
2428 // Remove this block's entries in the successor's phis. Skip exceptional
2429 // successors because catch phi inputs do not correspond to predecessor
2430 // blocks but throwing instructions. The inputs of the catch phis will be
2431 // updated in step (3).
2432 if (!successor->IsCatchBlock()) {
2433 if (successor->predecessors_.size() == 1u) {
2434 // The successor has just one predecessor left. Replace phis with the only
2435 // remaining input.
2436 for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
2437 HPhi* phi = phi_it.Current()->AsPhi();
2438 phi->ReplaceWith(phi->InputAt(1 - this_index));
2439 successor->RemovePhi(phi);
2440 }
2441 } else {
2442 for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
2443 phi_it.Current()->AsPhi()->RemoveInputAt(this_index);
2444 }
2445 }
2446 }
2447 }
2448 successors_.clear();
2449
2450 // (3) Remove instructions and phis. Instructions should have no remaining uses
2451 // except in catch phis. If an instruction is used by a catch phi at `index`,
2452 // remove `index`-th input of all phis in the catch block since they are
2453 // guaranteed dead. Note that we may miss dead inputs this way but the
2454 // graph will always remain consistent.
2455 for (HBackwardInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) {
2456 HInstruction* insn = it.Current();
2457 RemoveUsesOfDeadInstruction(insn);
2458 RemoveInstruction(insn);
2459 }
2460 for (HInstructionIterator it(GetPhis()); !it.Done(); it.Advance()) {
2461 HPhi* insn = it.Current()->AsPhi();
2462 RemoveUsesOfDeadInstruction(insn);
2463 RemovePhi(insn);
2464 }
2465
2466 // (4) Disconnect the block from its predecessors and update their
2467 // control-flow instructions.
2468 for (HBasicBlock* predecessor : predecessors_) {
2469 // We should not see any back edges as they would have been removed by step (3).
2470 DCHECK(!IsInLoop() || !GetLoopInformation()->IsBackEdge(*predecessor));
2471
2472 HInstruction* last_instruction = predecessor->GetLastInstruction();
2473 if (last_instruction->IsTryBoundary() && !IsCatchBlock()) {
2474 // This block is the only normal-flow successor of the TryBoundary which
2475 // makes `predecessor` dead. Since DCE removes blocks in post order,
2476 // exception handlers of this TryBoundary were already visited and any
2477 // remaining handlers therefore must be live. We remove `predecessor` from
2478 // their list of predecessors.
2479 DCHECK_EQ(last_instruction->AsTryBoundary()->GetNormalFlowSuccessor(), this);
2480 while (predecessor->GetSuccessors().size() > 1) {
2481 HBasicBlock* handler = predecessor->GetSuccessors()[1];
2482 DCHECK(handler->IsCatchBlock());
2483 predecessor->RemoveSuccessor(handler);
2484 handler->RemovePredecessor(predecessor);
2485 }
2486 }
2487
2488 predecessor->RemoveSuccessor(this);
2489 uint32_t num_pred_successors = predecessor->GetSuccessors().size();
2490 if (num_pred_successors == 1u) {
2491 // If we have one successor after removing one, then we must have
2492 // had an HIf, HPackedSwitch or HTryBoundary, as they have more than one
2493 // successor. Replace those with a HGoto.
2494 DCHECK(last_instruction->IsIf() ||
2495 last_instruction->IsPackedSwitch() ||
2496 (last_instruction->IsTryBoundary() && IsCatchBlock()));
2497 predecessor->RemoveInstruction(last_instruction);
2498 predecessor->AddInstruction(new (graph_->GetAllocator()) HGoto(last_instruction->GetDexPc()));
2499 } else if (num_pred_successors == 0u) {
2500 // The predecessor has no remaining successors and therefore must be dead.
2501 // We deliberately leave it without a control-flow instruction so that the
2502 // GraphChecker fails unless it is not removed during the pass too.
2503 predecessor->RemoveInstruction(last_instruction);
2504 } else {
2505 // There are multiple successors left. The removed block might be a successor
2506 // of a PackedSwitch which will be completely removed (perhaps replaced with
2507 // a Goto), or we are deleting a catch block from a TryBoundary. In either
2508 // case, leave `last_instruction` as is for now.
2509 DCHECK(last_instruction->IsPackedSwitch() ||
2510 (last_instruction->IsTryBoundary() && IsCatchBlock()));
2511 }
2512 }
2513 predecessors_.clear();
2514
2515 // (5) Remove the block from all loops it is included in. Skip the inner-most
2516 // loop if this is the loop header (see definition of `loop_update_start`)
2517 // because the loop header's predecessor list has been destroyed in step (4).
2518 for (HLoopInformationOutwardIterator it(*loop_update_start); !it.Done(); it.Advance()) {
2519 HLoopInformation* loop_info = it.Current();
2520 loop_info->Remove(this);
2521 if (loop_info->IsBackEdge(*this)) {
2522 // If this was the last back edge of the loop, we deliberately leave the
2523 // loop in an inconsistent state and will fail GraphChecker unless the
2524 // entire loop is removed during the pass.
2525 loop_info->RemoveBackEdge(this);
2526 }
2527 }
2528
2529 // (6) Disconnect from the dominator.
2530 dominator_->RemoveDominatedBlock(this);
2531 SetDominator(nullptr);
2532
2533 // (7) Delete from the graph, update reverse post order.
2534 graph_->DeleteDeadEmptyBlock(this);
2535 SetGraph(nullptr);
2536 }
2537
MergeInstructionsWith(HBasicBlock * other)2538 void HBasicBlock::MergeInstructionsWith(HBasicBlock* other) {
2539 DCHECK(EndsWithControlFlowInstruction());
2540 RemoveInstruction(GetLastInstruction());
2541 instructions_.Add(other->GetInstructions());
2542 other->instructions_.SetBlockOfInstructions(this);
2543 other->instructions_.Clear();
2544 }
2545
MergeWith(HBasicBlock * other)2546 void HBasicBlock::MergeWith(HBasicBlock* other) {
2547 DCHECK_EQ(GetGraph(), other->GetGraph());
2548 DCHECK(ContainsElement(dominated_blocks_, other));
2549 DCHECK_EQ(GetSingleSuccessor(), other);
2550 DCHECK_EQ(other->GetSinglePredecessor(), this);
2551 DCHECK(other->GetPhis().IsEmpty());
2552
2553 // Move instructions from `other` to `this`.
2554 MergeInstructionsWith(other);
2555
2556 // Remove `other` from the loops it is included in.
2557 for (HLoopInformationOutwardIterator it(*other); !it.Done(); it.Advance()) {
2558 HLoopInformation* loop_info = it.Current();
2559 loop_info->Remove(other);
2560 if (loop_info->IsBackEdge(*other)) {
2561 loop_info->ReplaceBackEdge(other, this);
2562 }
2563 }
2564
2565 // Update links to the successors of `other`.
2566 successors_.clear();
2567 for (HBasicBlock* successor : other->GetSuccessors()) {
2568 successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this;
2569 }
2570 successors_.swap(other->successors_);
2571 DCHECK(other->successors_.empty());
2572
2573 // Update the dominator tree.
2574 RemoveDominatedBlock(other);
2575 for (HBasicBlock* dominated : other->GetDominatedBlocks()) {
2576 dominated->SetDominator(this);
2577 }
2578 dominated_blocks_.insert(
2579 dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end());
2580 other->dominated_blocks_.clear();
2581 other->dominator_ = nullptr;
2582
2583 // Clear the list of predecessors of `other` in preparation of deleting it.
2584 other->predecessors_.clear();
2585
2586 // Delete `other` from the graph. The function updates reverse post order.
2587 graph_->DeleteDeadEmptyBlock(other);
2588 other->SetGraph(nullptr);
2589 }
2590
MergeWithInlined(HBasicBlock * other)2591 void HBasicBlock::MergeWithInlined(HBasicBlock* other) {
2592 DCHECK_NE(GetGraph(), other->GetGraph());
2593 DCHECK(GetDominatedBlocks().empty());
2594 DCHECK(GetSuccessors().empty());
2595 DCHECK(!EndsWithControlFlowInstruction());
2596 DCHECK(other->GetSinglePredecessor()->IsEntryBlock());
2597 DCHECK(other->GetPhis().IsEmpty());
2598 DCHECK(!other->IsInLoop());
2599
2600 // Move instructions from `other` to `this`.
2601 instructions_.Add(other->GetInstructions());
2602 other->instructions_.SetBlockOfInstructions(this);
2603
2604 // Update links to the successors of `other`.
2605 successors_.clear();
2606 for (HBasicBlock* successor : other->GetSuccessors()) {
2607 successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this;
2608 }
2609 successors_.swap(other->successors_);
2610 DCHECK(other->successors_.empty());
2611
2612 // Update the dominator tree.
2613 for (HBasicBlock* dominated : other->GetDominatedBlocks()) {
2614 dominated->SetDominator(this);
2615 }
2616 dominated_blocks_.insert(
2617 dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end());
2618 other->dominated_blocks_.clear();
2619 other->dominator_ = nullptr;
2620 other->graph_ = nullptr;
2621 }
2622
ReplaceWith(HBasicBlock * other)2623 void HBasicBlock::ReplaceWith(HBasicBlock* other) {
2624 while (!GetPredecessors().empty()) {
2625 HBasicBlock* predecessor = GetPredecessors()[0];
2626 predecessor->ReplaceSuccessor(this, other);
2627 }
2628 while (!GetSuccessors().empty()) {
2629 HBasicBlock* successor = GetSuccessors()[0];
2630 successor->ReplacePredecessor(this, other);
2631 }
2632 for (HBasicBlock* dominated : GetDominatedBlocks()) {
2633 other->AddDominatedBlock(dominated);
2634 }
2635 GetDominator()->ReplaceDominatedBlock(this, other);
2636 other->SetDominator(GetDominator());
2637 dominator_ = nullptr;
2638 graph_ = nullptr;
2639 }
2640
DeleteDeadEmptyBlock(HBasicBlock * block)2641 void HGraph::DeleteDeadEmptyBlock(HBasicBlock* block) {
2642 DCHECK_EQ(block->GetGraph(), this);
2643 DCHECK(block->GetSuccessors().empty());
2644 DCHECK(block->GetPredecessors().empty());
2645 DCHECK(block->GetDominatedBlocks().empty());
2646 DCHECK(block->GetDominator() == nullptr);
2647 DCHECK(block->GetInstructions().IsEmpty());
2648 DCHECK(block->GetPhis().IsEmpty());
2649
2650 if (block->IsExitBlock()) {
2651 SetExitBlock(nullptr);
2652 }
2653
2654 RemoveElement(reverse_post_order_, block);
2655 blocks_[block->GetBlockId()] = nullptr;
2656 block->SetGraph(nullptr);
2657 }
2658
UpdateLoopAndTryInformationOfNewBlock(HBasicBlock * block,HBasicBlock * reference,bool replace_if_back_edge)2659 void HGraph::UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
2660 HBasicBlock* reference,
2661 bool replace_if_back_edge) {
2662 if (block->IsLoopHeader()) {
2663 // Clear the information of which blocks are contained in that loop. Since the
2664 // information is stored as a bit vector based on block ids, we have to update
2665 // it, as those block ids were specific to the callee graph and we are now adding
2666 // these blocks to the caller graph.
2667 block->GetLoopInformation()->ClearAllBlocks();
2668 }
2669
2670 // If not already in a loop, update the loop information.
2671 if (!block->IsInLoop()) {
2672 block->SetLoopInformation(reference->GetLoopInformation());
2673 }
2674
2675 // If the block is in a loop, update all its outward loops.
2676 HLoopInformation* loop_info = block->GetLoopInformation();
2677 if (loop_info != nullptr) {
2678 for (HLoopInformationOutwardIterator loop_it(*block);
2679 !loop_it.Done();
2680 loop_it.Advance()) {
2681 loop_it.Current()->Add(block);
2682 }
2683 if (replace_if_back_edge && loop_info->IsBackEdge(*reference)) {
2684 loop_info->ReplaceBackEdge(reference, block);
2685 }
2686 }
2687
2688 // Copy TryCatchInformation if `reference` is a try block, not if it is a catch block.
2689 TryCatchInformation* try_catch_info = reference->IsTryBlock()
2690 ? reference->GetTryCatchInformation()
2691 : nullptr;
2692 block->SetTryCatchInformation(try_catch_info);
2693 }
2694
InlineInto(HGraph * outer_graph,HInvoke * invoke)2695 HInstruction* HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) {
2696 DCHECK(HasExitBlock()) << "Unimplemented scenario";
2697 // Update the environments in this graph to have the invoke's environment
2698 // as parent.
2699 {
2700 // Skip the entry block, we do not need to update the entry's suspend check.
2701 for (HBasicBlock* block : GetReversePostOrderSkipEntryBlock()) {
2702 for (HInstructionIterator instr_it(block->GetInstructions());
2703 !instr_it.Done();
2704 instr_it.Advance()) {
2705 HInstruction* current = instr_it.Current();
2706 if (current->NeedsEnvironment()) {
2707 DCHECK(current->HasEnvironment());
2708 current->GetEnvironment()->SetAndCopyParentChain(
2709 outer_graph->GetAllocator(), invoke->GetEnvironment());
2710 }
2711 }
2712 }
2713 }
2714 outer_graph->UpdateMaximumNumberOfOutVRegs(GetMaximumNumberOfOutVRegs());
2715
2716 if (HasBoundsChecks()) {
2717 outer_graph->SetHasBoundsChecks(true);
2718 }
2719 if (HasLoops()) {
2720 outer_graph->SetHasLoops(true);
2721 }
2722 if (HasIrreducibleLoops()) {
2723 outer_graph->SetHasIrreducibleLoops(true);
2724 }
2725 if (HasDirectCriticalNativeCall()) {
2726 outer_graph->SetHasDirectCriticalNativeCall(true);
2727 }
2728 if (HasTryCatch()) {
2729 outer_graph->SetHasTryCatch(true);
2730 }
2731 if (HasSIMD()) {
2732 outer_graph->SetHasSIMD(true);
2733 }
2734
2735 HInstruction* return_value = nullptr;
2736 if (GetBlocks().size() == 3) {
2737 // Inliner already made sure we don't inline methods that always throw.
2738 DCHECK(!GetBlocks()[1]->GetLastInstruction()->IsThrow());
2739 // Simple case of an entry block, a body block, and an exit block.
2740 // Put the body block's instruction into `invoke`'s block.
2741 HBasicBlock* body = GetBlocks()[1];
2742 DCHECK(GetBlocks()[0]->IsEntryBlock());
2743 DCHECK(GetBlocks()[2]->IsExitBlock());
2744 DCHECK(!body->IsExitBlock());
2745 DCHECK(!body->IsInLoop());
2746 HInstruction* last = body->GetLastInstruction();
2747
2748 // Note that we add instructions before the invoke only to simplify polymorphic inlining.
2749 invoke->GetBlock()->instructions_.AddBefore(invoke, body->GetInstructions());
2750 body->GetInstructions().SetBlockOfInstructions(invoke->GetBlock());
2751
2752 // Replace the invoke with the return value of the inlined graph.
2753 if (last->IsReturn()) {
2754 return_value = last->InputAt(0);
2755 } else {
2756 DCHECK(last->IsReturnVoid());
2757 }
2758
2759 invoke->GetBlock()->RemoveInstruction(last);
2760 } else {
2761 // Need to inline multiple blocks. We split `invoke`'s block
2762 // into two blocks, merge the first block of the inlined graph into
2763 // the first half, and replace the exit block of the inlined graph
2764 // with the second half.
2765 ArenaAllocator* allocator = outer_graph->GetAllocator();
2766 HBasicBlock* at = invoke->GetBlock();
2767 // Note that we split before the invoke only to simplify polymorphic inlining.
2768 HBasicBlock* to = at->SplitBeforeForInlining(invoke);
2769
2770 HBasicBlock* first = entry_block_->GetSuccessors()[0];
2771 DCHECK(!first->IsInLoop());
2772 at->MergeWithInlined(first);
2773 exit_block_->ReplaceWith(to);
2774
2775 // Update the meta information surrounding blocks:
2776 // (1) the graph they are now in,
2777 // (2) the reverse post order of that graph,
2778 // (3) their potential loop information, inner and outer,
2779 // (4) try block membership.
2780 // Note that we do not need to update catch phi inputs because they
2781 // correspond to the register file of the outer method which the inlinee
2782 // cannot modify.
2783
2784 // We don't add the entry block, the exit block, and the first block, which
2785 // has been merged with `at`.
2786 static constexpr int kNumberOfSkippedBlocksInCallee = 3;
2787
2788 // We add the `to` block.
2789 static constexpr int kNumberOfNewBlocksInCaller = 1;
2790 size_t blocks_added = (reverse_post_order_.size() - kNumberOfSkippedBlocksInCallee)
2791 + kNumberOfNewBlocksInCaller;
2792
2793 // Find the location of `at` in the outer graph's reverse post order. The new
2794 // blocks will be added after it.
2795 size_t index_of_at = IndexOfElement(outer_graph->reverse_post_order_, at);
2796 MakeRoomFor(&outer_graph->reverse_post_order_, blocks_added, index_of_at);
2797
2798 // Do a reverse post order of the blocks in the callee and do (1), (2), (3)
2799 // and (4) to the blocks that apply.
2800 for (HBasicBlock* current : GetReversePostOrder()) {
2801 if (current != exit_block_ && current != entry_block_ && current != first) {
2802 DCHECK(current->GetTryCatchInformation() == nullptr);
2803 DCHECK(current->GetGraph() == this);
2804 current->SetGraph(outer_graph);
2805 outer_graph->AddBlock(current);
2806 outer_graph->reverse_post_order_[++index_of_at] = current;
2807 UpdateLoopAndTryInformationOfNewBlock(current, at, /* replace_if_back_edge= */ false);
2808 }
2809 }
2810
2811 // Do (1), (2), (3) and (4) to `to`.
2812 to->SetGraph(outer_graph);
2813 outer_graph->AddBlock(to);
2814 outer_graph->reverse_post_order_[++index_of_at] = to;
2815 // Only `to` can become a back edge, as the inlined blocks
2816 // are predecessors of `to`.
2817 UpdateLoopAndTryInformationOfNewBlock(to, at, /* replace_if_back_edge= */ true);
2818
2819 // Update all predecessors of the exit block (now the `to` block)
2820 // to not `HReturn` but `HGoto` instead. Special case throwing blocks
2821 // to now get the outer graph exit block as successor. Note that the inliner
2822 // currently doesn't support inlining methods with try/catch.
2823 HPhi* return_value_phi = nullptr;
2824 bool rerun_dominance = false;
2825 bool rerun_loop_analysis = false;
2826 for (size_t pred = 0; pred < to->GetPredecessors().size(); ++pred) {
2827 HBasicBlock* predecessor = to->GetPredecessors()[pred];
2828 HInstruction* last = predecessor->GetLastInstruction();
2829 if (last->IsThrow()) {
2830 DCHECK(!at->IsTryBlock());
2831 predecessor->ReplaceSuccessor(to, outer_graph->GetExitBlock());
2832 --pred;
2833 // We need to re-run dominance information, as the exit block now has
2834 // a new dominator.
2835 rerun_dominance = true;
2836 if (predecessor->GetLoopInformation() != nullptr) {
2837 // The exit block and blocks post dominated by the exit block do not belong
2838 // to any loop. Because we do not compute the post dominators, we need to re-run
2839 // loop analysis to get the loop information correct.
2840 rerun_loop_analysis = true;
2841 }
2842 } else {
2843 if (last->IsReturnVoid()) {
2844 DCHECK(return_value == nullptr);
2845 DCHECK(return_value_phi == nullptr);
2846 } else {
2847 DCHECK(last->IsReturn());
2848 if (return_value_phi != nullptr) {
2849 return_value_phi->AddInput(last->InputAt(0));
2850 } else if (return_value == nullptr) {
2851 return_value = last->InputAt(0);
2852 } else {
2853 // There will be multiple returns.
2854 return_value_phi = new (allocator) HPhi(
2855 allocator, kNoRegNumber, 0, HPhi::ToPhiType(invoke->GetType()), to->GetDexPc());
2856 to->AddPhi(return_value_phi);
2857 return_value_phi->AddInput(return_value);
2858 return_value_phi->AddInput(last->InputAt(0));
2859 return_value = return_value_phi;
2860 }
2861 }
2862 predecessor->AddInstruction(new (allocator) HGoto(last->GetDexPc()));
2863 predecessor->RemoveInstruction(last);
2864 }
2865 }
2866 if (rerun_loop_analysis) {
2867 DCHECK(!outer_graph->HasIrreducibleLoops())
2868 << "Recomputing loop information in graphs with irreducible loops "
2869 << "is unsupported, as it could lead to loop header changes";
2870 outer_graph->ClearLoopInformation();
2871 outer_graph->ClearDominanceInformation();
2872 outer_graph->BuildDominatorTree();
2873 } else if (rerun_dominance) {
2874 outer_graph->ClearDominanceInformation();
2875 outer_graph->ComputeDominanceInformation();
2876 }
2877 }
2878
2879 // Walk over the entry block and:
2880 // - Move constants from the entry block to the outer_graph's entry block,
2881 // - Replace HParameterValue instructions with their real value.
2882 // - Remove suspend checks, that hold an environment.
2883 // We must do this after the other blocks have been inlined, otherwise ids of
2884 // constants could overlap with the inner graph.
2885 size_t parameter_index = 0;
2886 for (HInstructionIterator it(entry_block_->GetInstructions()); !it.Done(); it.Advance()) {
2887 HInstruction* current = it.Current();
2888 HInstruction* replacement = nullptr;
2889 if (current->IsNullConstant()) {
2890 replacement = outer_graph->GetNullConstant(current->GetDexPc());
2891 } else if (current->IsIntConstant()) {
2892 replacement = outer_graph->GetIntConstant(
2893 current->AsIntConstant()->GetValue(), current->GetDexPc());
2894 } else if (current->IsLongConstant()) {
2895 replacement = outer_graph->GetLongConstant(
2896 current->AsLongConstant()->GetValue(), current->GetDexPc());
2897 } else if (current->IsFloatConstant()) {
2898 replacement = outer_graph->GetFloatConstant(
2899 current->AsFloatConstant()->GetValue(), current->GetDexPc());
2900 } else if (current->IsDoubleConstant()) {
2901 replacement = outer_graph->GetDoubleConstant(
2902 current->AsDoubleConstant()->GetValue(), current->GetDexPc());
2903 } else if (current->IsParameterValue()) {
2904 if (kIsDebugBuild
2905 && invoke->IsInvokeStaticOrDirect()
2906 && invoke->AsInvokeStaticOrDirect()->IsStaticWithExplicitClinitCheck()) {
2907 // Ensure we do not use the last input of `invoke`, as it
2908 // contains a clinit check which is not an actual argument.
2909 size_t last_input_index = invoke->InputCount() - 1;
2910 DCHECK(parameter_index != last_input_index);
2911 }
2912 replacement = invoke->InputAt(parameter_index++);
2913 } else if (current->IsCurrentMethod()) {
2914 replacement = outer_graph->GetCurrentMethod();
2915 } else {
2916 DCHECK(current->IsGoto() || current->IsSuspendCheck());
2917 entry_block_->RemoveInstruction(current);
2918 }
2919 if (replacement != nullptr) {
2920 current->ReplaceWith(replacement);
2921 // If the current is the return value then we need to update the latter.
2922 if (current == return_value) {
2923 DCHECK_EQ(entry_block_, return_value->GetBlock());
2924 return_value = replacement;
2925 }
2926 }
2927 }
2928
2929 return return_value;
2930 }
2931
2932 /*
2933 * Loop will be transformed to:
2934 * old_pre_header
2935 * |
2936 * if_block
2937 * / \
2938 * true_block false_block
2939 * \ /
2940 * new_pre_header
2941 * |
2942 * header
2943 */
TransformLoopHeaderForBCE(HBasicBlock * header)2944 void HGraph::TransformLoopHeaderForBCE(HBasicBlock* header) {
2945 DCHECK(header->IsLoopHeader());
2946 HBasicBlock* old_pre_header = header->GetDominator();
2947
2948 // Need extra block to avoid critical edge.
2949 HBasicBlock* if_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
2950 HBasicBlock* true_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
2951 HBasicBlock* false_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
2952 HBasicBlock* new_pre_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
2953 AddBlock(if_block);
2954 AddBlock(true_block);
2955 AddBlock(false_block);
2956 AddBlock(new_pre_header);
2957
2958 header->ReplacePredecessor(old_pre_header, new_pre_header);
2959 old_pre_header->successors_.clear();
2960 old_pre_header->dominated_blocks_.clear();
2961
2962 old_pre_header->AddSuccessor(if_block);
2963 if_block->AddSuccessor(true_block); // True successor
2964 if_block->AddSuccessor(false_block); // False successor
2965 true_block->AddSuccessor(new_pre_header);
2966 false_block->AddSuccessor(new_pre_header);
2967
2968 old_pre_header->dominated_blocks_.push_back(if_block);
2969 if_block->SetDominator(old_pre_header);
2970 if_block->dominated_blocks_.push_back(true_block);
2971 true_block->SetDominator(if_block);
2972 if_block->dominated_blocks_.push_back(false_block);
2973 false_block->SetDominator(if_block);
2974 if_block->dominated_blocks_.push_back(new_pre_header);
2975 new_pre_header->SetDominator(if_block);
2976 new_pre_header->dominated_blocks_.push_back(header);
2977 header->SetDominator(new_pre_header);
2978
2979 // Fix reverse post order.
2980 size_t index_of_header = IndexOfElement(reverse_post_order_, header);
2981 MakeRoomFor(&reverse_post_order_, 4, index_of_header - 1);
2982 reverse_post_order_[index_of_header++] = if_block;
2983 reverse_post_order_[index_of_header++] = true_block;
2984 reverse_post_order_[index_of_header++] = false_block;
2985 reverse_post_order_[index_of_header++] = new_pre_header;
2986
2987 // The pre_header can never be a back edge of a loop.
2988 DCHECK((old_pre_header->GetLoopInformation() == nullptr) ||
2989 !old_pre_header->GetLoopInformation()->IsBackEdge(*old_pre_header));
2990 UpdateLoopAndTryInformationOfNewBlock(
2991 if_block, old_pre_header, /* replace_if_back_edge= */ false);
2992 UpdateLoopAndTryInformationOfNewBlock(
2993 true_block, old_pre_header, /* replace_if_back_edge= */ false);
2994 UpdateLoopAndTryInformationOfNewBlock(
2995 false_block, old_pre_header, /* replace_if_back_edge= */ false);
2996 UpdateLoopAndTryInformationOfNewBlock(
2997 new_pre_header, old_pre_header, /* replace_if_back_edge= */ false);
2998 }
2999
TransformLoopForVectorization(HBasicBlock * header,HBasicBlock * body,HBasicBlock * exit)3000 HBasicBlock* HGraph::TransformLoopForVectorization(HBasicBlock* header,
3001 HBasicBlock* body,
3002 HBasicBlock* exit) {
3003 DCHECK(header->IsLoopHeader());
3004 HLoopInformation* loop = header->GetLoopInformation();
3005
3006 // Add new loop blocks.
3007 HBasicBlock* new_pre_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
3008 HBasicBlock* new_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
3009 HBasicBlock* new_body = new (allocator_) HBasicBlock(this, header->GetDexPc());
3010 AddBlock(new_pre_header);
3011 AddBlock(new_header);
3012 AddBlock(new_body);
3013
3014 // Set up control flow.
3015 header->ReplaceSuccessor(exit, new_pre_header);
3016 new_pre_header->AddSuccessor(new_header);
3017 new_header->AddSuccessor(exit);
3018 new_header->AddSuccessor(new_body);
3019 new_body->AddSuccessor(new_header);
3020
3021 // Set up dominators.
3022 header->ReplaceDominatedBlock(exit, new_pre_header);
3023 new_pre_header->SetDominator(header);
3024 new_pre_header->dominated_blocks_.push_back(new_header);
3025 new_header->SetDominator(new_pre_header);
3026 new_header->dominated_blocks_.push_back(new_body);
3027 new_body->SetDominator(new_header);
3028 new_header->dominated_blocks_.push_back(exit);
3029 exit->SetDominator(new_header);
3030
3031 // Fix reverse post order.
3032 size_t index_of_header = IndexOfElement(reverse_post_order_, header);
3033 MakeRoomFor(&reverse_post_order_, 2, index_of_header);
3034 reverse_post_order_[++index_of_header] = new_pre_header;
3035 reverse_post_order_[++index_of_header] = new_header;
3036 size_t index_of_body = IndexOfElement(reverse_post_order_, body);
3037 MakeRoomFor(&reverse_post_order_, 1, index_of_body - 1);
3038 reverse_post_order_[index_of_body] = new_body;
3039
3040 // Add gotos and suspend check (client must add conditional in header).
3041 new_pre_header->AddInstruction(new (allocator_) HGoto());
3042 HSuspendCheck* suspend_check = new (allocator_) HSuspendCheck(header->GetDexPc());
3043 new_header->AddInstruction(suspend_check);
3044 new_body->AddInstruction(new (allocator_) HGoto());
3045 suspend_check->CopyEnvironmentFromWithLoopPhiAdjustment(
3046 loop->GetSuspendCheck()->GetEnvironment(), header);
3047
3048 // Update loop information.
3049 new_header->AddBackEdge(new_body);
3050 new_header->GetLoopInformation()->SetSuspendCheck(suspend_check);
3051 new_header->GetLoopInformation()->Populate();
3052 new_pre_header->SetLoopInformation(loop->GetPreHeader()->GetLoopInformation()); // outward
3053 HLoopInformationOutwardIterator it(*new_header);
3054 for (it.Advance(); !it.Done(); it.Advance()) {
3055 it.Current()->Add(new_pre_header);
3056 it.Current()->Add(new_header);
3057 it.Current()->Add(new_body);
3058 }
3059 return new_pre_header;
3060 }
3061
CheckAgainstUpperBound(ReferenceTypeInfo rti,ReferenceTypeInfo upper_bound_rti)3062 static void CheckAgainstUpperBound(ReferenceTypeInfo rti, ReferenceTypeInfo upper_bound_rti)
3063 REQUIRES_SHARED(Locks::mutator_lock_) {
3064 if (rti.IsValid()) {
3065 DCHECK(upper_bound_rti.IsSupertypeOf(rti))
3066 << " upper_bound_rti: " << upper_bound_rti
3067 << " rti: " << rti;
3068 DCHECK(!upper_bound_rti.GetTypeHandle()->CannotBeAssignedFromOtherTypes() || rti.IsExact())
3069 << " upper_bound_rti: " << upper_bound_rti
3070 << " rti: " << rti;
3071 }
3072 }
3073
SetReferenceTypeInfo(ReferenceTypeInfo rti)3074 void HInstruction::SetReferenceTypeInfo(ReferenceTypeInfo rti) {
3075 if (kIsDebugBuild) {
3076 DCHECK_EQ(GetType(), DataType::Type::kReference);
3077 ScopedObjectAccess soa(Thread::Current());
3078 DCHECK(rti.IsValid()) << "Invalid RTI for " << DebugName();
3079 if (IsBoundType()) {
3080 // Having the test here spares us from making the method virtual just for
3081 // the sake of a DCHECK.
3082 CheckAgainstUpperBound(rti, AsBoundType()->GetUpperBound());
3083 }
3084 }
3085 reference_type_handle_ = rti.GetTypeHandle();
3086 SetPackedFlag<kFlagReferenceTypeIsExact>(rti.IsExact());
3087 }
3088
InstructionDataEquals(const HInstruction * other) const3089 bool HBoundType::InstructionDataEquals(const HInstruction* other) const {
3090 const HBoundType* other_bt = other->AsBoundType();
3091 ScopedObjectAccess soa(Thread::Current());
3092 return GetUpperBound().IsEqual(other_bt->GetUpperBound()) &&
3093 GetUpperCanBeNull() == other_bt->GetUpperCanBeNull() &&
3094 CanBeNull() == other_bt->CanBeNull();
3095 }
3096
SetUpperBound(const ReferenceTypeInfo & upper_bound,bool can_be_null)3097 void HBoundType::SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null) {
3098 if (kIsDebugBuild) {
3099 ScopedObjectAccess soa(Thread::Current());
3100 DCHECK(upper_bound.IsValid());
3101 DCHECK(!upper_bound_.IsValid()) << "Upper bound should only be set once.";
3102 CheckAgainstUpperBound(GetReferenceTypeInfo(), upper_bound);
3103 }
3104 upper_bound_ = upper_bound;
3105 SetPackedFlag<kFlagUpperCanBeNull>(can_be_null);
3106 }
3107
Create(TypeHandle type_handle,bool is_exact)3108 ReferenceTypeInfo ReferenceTypeInfo::Create(TypeHandle type_handle, bool is_exact) {
3109 if (kIsDebugBuild) {
3110 ScopedObjectAccess soa(Thread::Current());
3111 DCHECK(IsValidHandle(type_handle));
3112 if (!is_exact) {
3113 DCHECK(!type_handle->CannotBeAssignedFromOtherTypes())
3114 << "Callers of ReferenceTypeInfo::Create should ensure is_exact is properly computed";
3115 }
3116 }
3117 return ReferenceTypeInfo(type_handle, is_exact);
3118 }
3119
operator <<(std::ostream & os,const ReferenceTypeInfo & rhs)3120 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs) {
3121 ScopedObjectAccess soa(Thread::Current());
3122 os << "["
3123 << " is_valid=" << rhs.IsValid()
3124 << " type=" << (!rhs.IsValid() ? "?" : mirror::Class::PrettyClass(rhs.GetTypeHandle().Get()))
3125 << " is_exact=" << rhs.IsExact()
3126 << " ]";
3127 return os;
3128 }
3129
HasAnyEnvironmentUseBefore(HInstruction * other)3130 bool HInstruction::HasAnyEnvironmentUseBefore(HInstruction* other) {
3131 // For now, assume that instructions in different blocks may use the
3132 // environment.
3133 // TODO: Use the control flow to decide if this is true.
3134 if (GetBlock() != other->GetBlock()) {
3135 return true;
3136 }
3137
3138 // We know that we are in the same block. Walk from 'this' to 'other',
3139 // checking to see if there is any instruction with an environment.
3140 HInstruction* current = this;
3141 for (; current != other && current != nullptr; current = current->GetNext()) {
3142 // This is a conservative check, as the instruction result may not be in
3143 // the referenced environment.
3144 if (current->HasEnvironment()) {
3145 return true;
3146 }
3147 }
3148
3149 // We should have been called with 'this' before 'other' in the block.
3150 // Just confirm this.
3151 DCHECK(current != nullptr);
3152 return false;
3153 }
3154
SetIntrinsic(Intrinsics intrinsic,IntrinsicNeedsEnvironment needs_env,IntrinsicSideEffects side_effects,IntrinsicExceptions exceptions)3155 void HInvoke::SetIntrinsic(Intrinsics intrinsic,
3156 IntrinsicNeedsEnvironment needs_env,
3157 IntrinsicSideEffects side_effects,
3158 IntrinsicExceptions exceptions) {
3159 intrinsic_ = intrinsic;
3160 IntrinsicOptimizations opt(this);
3161
3162 // Adjust method's side effects from intrinsic table.
3163 switch (side_effects) {
3164 case kNoSideEffects: SetSideEffects(SideEffects::None()); break;
3165 case kReadSideEffects: SetSideEffects(SideEffects::AllReads()); break;
3166 case kWriteSideEffects: SetSideEffects(SideEffects::AllWrites()); break;
3167 case kAllSideEffects: SetSideEffects(SideEffects::AllExceptGCDependency()); break;
3168 }
3169
3170 if (needs_env == kNoEnvironment) {
3171 opt.SetDoesNotNeedEnvironment();
3172 } else {
3173 // If we need an environment, that means there will be a call, which can trigger GC.
3174 SetSideEffects(GetSideEffects().Union(SideEffects::CanTriggerGC()));
3175 }
3176 // Adjust method's exception status from intrinsic table.
3177 SetCanThrow(exceptions == kCanThrow);
3178 }
3179
IsStringAlloc() const3180 bool HNewInstance::IsStringAlloc() const {
3181 return GetEntrypoint() == kQuickAllocStringObject;
3182 }
3183
NeedsEnvironment() const3184 bool HInvoke::NeedsEnvironment() const {
3185 if (!IsIntrinsic()) {
3186 return true;
3187 }
3188 IntrinsicOptimizations opt(*this);
3189 return !opt.GetDoesNotNeedEnvironment();
3190 }
3191
GetDexFileForPcRelativeDexCache() const3192 const DexFile& HInvokeStaticOrDirect::GetDexFileForPcRelativeDexCache() const {
3193 ArtMethod* caller = GetEnvironment()->GetMethod();
3194 ScopedObjectAccess soa(Thread::Current());
3195 // `caller` is null for a top-level graph representing a method whose declaring
3196 // class was not resolved.
3197 return caller == nullptr ? GetBlock()->GetGraph()->GetDexFile() : *caller->GetDexFile();
3198 }
3199
operator <<(std::ostream & os,HInvokeStaticOrDirect::ClinitCheckRequirement rhs)3200 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs) {
3201 switch (rhs) {
3202 case HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit:
3203 return os << "explicit";
3204 case HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit:
3205 return os << "implicit";
3206 case HInvokeStaticOrDirect::ClinitCheckRequirement::kNone:
3207 return os << "none";
3208 default:
3209 LOG(FATAL) << "Unknown ClinitCheckRequirement: " << static_cast<int>(rhs);
3210 UNREACHABLE();
3211 }
3212 }
3213
CanDoImplicitNullCheckOn(HInstruction * obj) const3214 bool HInvokeVirtual::CanDoImplicitNullCheckOn(HInstruction* obj) const {
3215 if (obj != InputAt(0)) {
3216 return false;
3217 }
3218 switch (GetIntrinsic()) {
3219 case Intrinsics::kNone:
3220 return true;
3221 case Intrinsics::kReferenceRefersTo:
3222 return true;
3223 default:
3224 // TODO: Add implicit null checks in more intrinsics.
3225 return false;
3226 }
3227 }
3228
InstructionDataEquals(const HInstruction * other) const3229 bool HLoadClass::InstructionDataEquals(const HInstruction* other) const {
3230 const HLoadClass* other_load_class = other->AsLoadClass();
3231 // TODO: To allow GVN for HLoadClass from different dex files, we should compare the type
3232 // names rather than type indexes. However, we shall also have to re-think the hash code.
3233 if (type_index_ != other_load_class->type_index_ ||
3234 GetPackedFields() != other_load_class->GetPackedFields()) {
3235 return false;
3236 }
3237 switch (GetLoadKind()) {
3238 case LoadKind::kBootImageRelRo:
3239 case LoadKind::kJitBootImageAddress:
3240 case LoadKind::kJitTableAddress: {
3241 ScopedObjectAccess soa(Thread::Current());
3242 return GetClass().Get() == other_load_class->GetClass().Get();
3243 }
3244 default:
3245 DCHECK(HasTypeReference(GetLoadKind()));
3246 return IsSameDexFile(GetDexFile(), other_load_class->GetDexFile());
3247 }
3248 }
3249
InstructionDataEquals(const HInstruction * other) const3250 bool HLoadString::InstructionDataEquals(const HInstruction* other) const {
3251 const HLoadString* other_load_string = other->AsLoadString();
3252 // TODO: To allow GVN for HLoadString from different dex files, we should compare the strings
3253 // rather than their indexes. However, we shall also have to re-think the hash code.
3254 if (string_index_ != other_load_string->string_index_ ||
3255 GetPackedFields() != other_load_string->GetPackedFields()) {
3256 return false;
3257 }
3258 switch (GetLoadKind()) {
3259 case LoadKind::kBootImageRelRo:
3260 case LoadKind::kJitBootImageAddress:
3261 case LoadKind::kJitTableAddress: {
3262 ScopedObjectAccess soa(Thread::Current());
3263 return GetString().Get() == other_load_string->GetString().Get();
3264 }
3265 default:
3266 return IsSameDexFile(GetDexFile(), other_load_string->GetDexFile());
3267 }
3268 }
3269
RemoveEnvironmentUsers()3270 void HInstruction::RemoveEnvironmentUsers() {
3271 for (const HUseListNode<HEnvironment*>& use : GetEnvUses()) {
3272 HEnvironment* user = use.GetUser();
3273 user->SetRawEnvAt(use.GetIndex(), nullptr);
3274 }
3275 env_uses_.clear();
3276 }
3277
ReplaceInstrOrPhiByClone(HInstruction * instr)3278 HInstruction* ReplaceInstrOrPhiByClone(HInstruction* instr) {
3279 HInstruction* clone = instr->Clone(instr->GetBlock()->GetGraph()->GetAllocator());
3280 HBasicBlock* block = instr->GetBlock();
3281
3282 if (instr->IsPhi()) {
3283 HPhi* phi = instr->AsPhi();
3284 DCHECK(!phi->HasEnvironment());
3285 HPhi* phi_clone = clone->AsPhi();
3286 block->ReplaceAndRemovePhiWith(phi, phi_clone);
3287 } else {
3288 block->ReplaceAndRemoveInstructionWith(instr, clone);
3289 if (instr->HasEnvironment()) {
3290 clone->CopyEnvironmentFrom(instr->GetEnvironment());
3291 HLoopInformation* loop_info = block->GetLoopInformation();
3292 if (instr->IsSuspendCheck() && loop_info != nullptr) {
3293 loop_info->SetSuspendCheck(clone->AsSuspendCheck());
3294 }
3295 }
3296 }
3297 return clone;
3298 }
3299
3300 // Returns an instruction with the opposite Boolean value from 'cond'.
InsertOppositeCondition(HInstruction * cond,HInstruction * cursor)3301 HInstruction* HGraph::InsertOppositeCondition(HInstruction* cond, HInstruction* cursor) {
3302 ArenaAllocator* allocator = GetAllocator();
3303
3304 if (cond->IsCondition() &&
3305 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType())) {
3306 // Can't reverse floating point conditions. We have to use HBooleanNot in that case.
3307 HInstruction* lhs = cond->InputAt(0);
3308 HInstruction* rhs = cond->InputAt(1);
3309 HInstruction* replacement = nullptr;
3310 switch (cond->AsCondition()->GetOppositeCondition()) { // get *opposite*
3311 case kCondEQ: replacement = new (allocator) HEqual(lhs, rhs); break;
3312 case kCondNE: replacement = new (allocator) HNotEqual(lhs, rhs); break;
3313 case kCondLT: replacement = new (allocator) HLessThan(lhs, rhs); break;
3314 case kCondLE: replacement = new (allocator) HLessThanOrEqual(lhs, rhs); break;
3315 case kCondGT: replacement = new (allocator) HGreaterThan(lhs, rhs); break;
3316 case kCondGE: replacement = new (allocator) HGreaterThanOrEqual(lhs, rhs); break;
3317 case kCondB: replacement = new (allocator) HBelow(lhs, rhs); break;
3318 case kCondBE: replacement = new (allocator) HBelowOrEqual(lhs, rhs); break;
3319 case kCondA: replacement = new (allocator) HAbove(lhs, rhs); break;
3320 case kCondAE: replacement = new (allocator) HAboveOrEqual(lhs, rhs); break;
3321 default:
3322 LOG(FATAL) << "Unexpected condition";
3323 UNREACHABLE();
3324 }
3325 cursor->GetBlock()->InsertInstructionBefore(replacement, cursor);
3326 return replacement;
3327 } else if (cond->IsIntConstant()) {
3328 HIntConstant* int_const = cond->AsIntConstant();
3329 if (int_const->IsFalse()) {
3330 return GetIntConstant(1);
3331 } else {
3332 DCHECK(int_const->IsTrue()) << int_const->GetValue();
3333 return GetIntConstant(0);
3334 }
3335 } else {
3336 HInstruction* replacement = new (allocator) HBooleanNot(cond);
3337 cursor->GetBlock()->InsertInstructionBefore(replacement, cursor);
3338 return replacement;
3339 }
3340 }
3341
operator <<(std::ostream & os,const MoveOperands & rhs)3342 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs) {
3343 os << "["
3344 << " source=" << rhs.GetSource()
3345 << " destination=" << rhs.GetDestination()
3346 << " type=" << rhs.GetType()
3347 << " instruction=";
3348 if (rhs.GetInstruction() != nullptr) {
3349 os << rhs.GetInstruction()->DebugName() << ' ' << rhs.GetInstruction()->GetId();
3350 } else {
3351 os << "null";
3352 }
3353 os << " ]";
3354 return os;
3355 }
3356
operator <<(std::ostream & os,TypeCheckKind rhs)3357 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs) {
3358 switch (rhs) {
3359 case TypeCheckKind::kUnresolvedCheck:
3360 return os << "unresolved_check";
3361 case TypeCheckKind::kExactCheck:
3362 return os << "exact_check";
3363 case TypeCheckKind::kClassHierarchyCheck:
3364 return os << "class_hierarchy_check";
3365 case TypeCheckKind::kAbstractClassCheck:
3366 return os << "abstract_class_check";
3367 case TypeCheckKind::kInterfaceCheck:
3368 return os << "interface_check";
3369 case TypeCheckKind::kArrayObjectCheck:
3370 return os << "array_object_check";
3371 case TypeCheckKind::kArrayCheck:
3372 return os << "array_check";
3373 case TypeCheckKind::kBitstringCheck:
3374 return os << "bitstring_check";
3375 default:
3376 LOG(FATAL) << "Unknown TypeCheckKind: " << static_cast<int>(rhs);
3377 UNREACHABLE();
3378 }
3379 }
3380
3381 // Check that intrinsic enum values fit within space set aside in ArtMethod modifier flags.
3382 #define CHECK_INTRINSICS_ENUM_VALUES(Name, InvokeType, _, SideEffects, Exceptions, ...) \
3383 static_assert( \
3384 static_cast<uint32_t>(Intrinsics::k ## Name) <= (kAccIntrinsicBits >> CTZ(kAccIntrinsicBits)), \
3385 "Instrinsics enumeration space overflow.");
3386 #include "intrinsics_list.h"
INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES)3387 INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES)
3388 #undef INTRINSICS_LIST
3389 #undef CHECK_INTRINSICS_ENUM_VALUES
3390
3391 // Function that returns whether an intrinsic needs an environment or not.
3392 static inline IntrinsicNeedsEnvironment NeedsEnvironmentIntrinsic(Intrinsics i) {
3393 switch (i) {
3394 case Intrinsics::kNone:
3395 return kNeedsEnvironment; // Non-sensical for intrinsic.
3396 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3397 case Intrinsics::k ## Name: \
3398 return NeedsEnv;
3399 #include "intrinsics_list.h"
3400 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3401 #undef INTRINSICS_LIST
3402 #undef OPTIMIZING_INTRINSICS
3403 }
3404 return kNeedsEnvironment;
3405 }
3406
3407 // Function that returns whether an intrinsic has side effects.
GetSideEffectsIntrinsic(Intrinsics i)3408 static inline IntrinsicSideEffects GetSideEffectsIntrinsic(Intrinsics i) {
3409 switch (i) {
3410 case Intrinsics::kNone:
3411 return kAllSideEffects;
3412 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3413 case Intrinsics::k ## Name: \
3414 return SideEffects;
3415 #include "intrinsics_list.h"
3416 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3417 #undef INTRINSICS_LIST
3418 #undef OPTIMIZING_INTRINSICS
3419 }
3420 return kAllSideEffects;
3421 }
3422
3423 // Function that returns whether an intrinsic can throw exceptions.
GetExceptionsIntrinsic(Intrinsics i)3424 static inline IntrinsicExceptions GetExceptionsIntrinsic(Intrinsics i) {
3425 switch (i) {
3426 case Intrinsics::kNone:
3427 return kCanThrow;
3428 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3429 case Intrinsics::k ## Name: \
3430 return Exceptions;
3431 #include "intrinsics_list.h"
3432 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3433 #undef INTRINSICS_LIST
3434 #undef OPTIMIZING_INTRINSICS
3435 }
3436 return kCanThrow;
3437 }
3438
SetResolvedMethod(ArtMethod * method)3439 void HInvoke::SetResolvedMethod(ArtMethod* method) {
3440 if (method != nullptr && method->IsIntrinsic()) {
3441 Intrinsics intrinsic = static_cast<Intrinsics>(method->GetIntrinsic());
3442 SetIntrinsic(intrinsic,
3443 NeedsEnvironmentIntrinsic(intrinsic),
3444 GetSideEffectsIntrinsic(intrinsic),
3445 GetExceptionsIntrinsic(intrinsic));
3446 }
3447 resolved_method_ = method;
3448 }
3449
IsGEZero(HInstruction * instruction)3450 bool IsGEZero(HInstruction* instruction) {
3451 DCHECK(instruction != nullptr);
3452 if (instruction->IsArrayLength()) {
3453 return true;
3454 } else if (instruction->IsMin()) {
3455 // Instruction MIN(>=0, >=0) is >= 0.
3456 return IsGEZero(instruction->InputAt(0)) &&
3457 IsGEZero(instruction->InputAt(1));
3458 } else if (instruction->IsAbs()) {
3459 // Instruction ABS(>=0) is >= 0.
3460 // NOTE: ABS(minint) = minint prevents assuming
3461 // >= 0 without looking at the argument.
3462 return IsGEZero(instruction->InputAt(0));
3463 }
3464 int64_t value = -1;
3465 return IsInt64AndGet(instruction, &value) && value >= 0;
3466 }
3467
3468 } // namespace art
3469