1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include "nodes.h"
17
18 #include <algorithm>
19 #include <cfloat>
20 #include <functional>
21 #include <optional>
22
23 #include "art_method-inl.h"
24 #include "base/arena_allocator.h"
25 #include "base/arena_bit_vector.h"
26 #include "base/bit_utils.h"
27 #include "base/bit_vector-inl.h"
28 #include "base/bit_vector.h"
29 #include "base/iteration_range.h"
30 #include "base/logging.h"
31 #include "base/malloc_arena_pool.h"
32 #include "base/scoped_arena_allocator.h"
33 #include "base/scoped_arena_containers.h"
34 #include "base/stl_util.h"
35 #include "class_linker-inl.h"
36 #include "class_root-inl.h"
37 #include "code_generator.h"
38 #include "common_dominator.h"
39 #include "intrinsic_objects.h"
40 #include "intrinsics.h"
41 #include "intrinsics_list.h"
42 #include "mirror/class-inl.h"
43 #include "scoped_thread_state_change-inl.h"
44 #include "ssa_builder.h"
45
46 namespace art HIDDEN {
47
48 // Enable floating-point static evaluation during constant folding
49 // only if all floating-point operations and constants evaluate in the
50 // range and precision of the type used (i.e., 32-bit float, 64-bit
51 // double).
52 static constexpr bool kEnableFloatingPointStaticEvaluation = (FLT_EVAL_METHOD == 0);
53
AddBlock(HBasicBlock * block)54 void HGraph::AddBlock(HBasicBlock* block) {
55 block->SetBlockId(blocks_.size());
56 blocks_.push_back(block);
57 }
58
AllocateInstructionId()59 inline int32_t HGraph::AllocateInstructionId() {
60 CHECK_NE(current_instruction_id_, INT32_MAX);
61 return current_instruction_id_++;
62 }
63
FindBackEdges(BitVectorView<size_t> visited)64 void HGraph::FindBackEdges(/*out*/ BitVectorView<size_t> visited) {
65 // "visited" must be empty on entry, it's an output argument for all visited (i.e. live) blocks.
66 DCHECK(!visited.IsAnyBitSet());
67
68 // Allocate memory from local ScopedArenaAllocator.
69 ScopedArenaAllocator allocator(GetArenaStack());
70 // Nodes that we're currently visiting, indexed by block id.
71 BitVectorView<size_t> visiting =
72 ArenaBitVector::CreateFixedSize(&allocator, blocks_.size(), kArenaAllocGraphBuilder);
73 // Number of successors visited from a given node, indexed by block id.
74 ScopedArenaVector<size_t> successors_visited(blocks_.size(),
75 0u,
76 allocator.Adapter(kArenaAllocGraphBuilder));
77 // Stack of nodes that we're currently visiting (same as marked in "visiting" above).
78 ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocGraphBuilder));
79 constexpr size_t kDefaultWorklistSize = 8;
80 worklist.reserve(kDefaultWorklistSize);
81 visited.SetBit(entry_block_->GetBlockId());
82 visiting.SetBit(entry_block_->GetBlockId());
83 worklist.push_back(entry_block_);
84
85 while (!worklist.empty()) {
86 HBasicBlock* current = worklist.back();
87 uint32_t current_id = current->GetBlockId();
88 if (successors_visited[current_id] == current->GetSuccessors().size()) {
89 visiting.ClearBit(current_id);
90 worklist.pop_back();
91 } else {
92 HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++];
93 uint32_t successor_id = successor->GetBlockId();
94 if (visiting.IsBitSet(successor_id)) {
95 DCHECK(ContainsElement(worklist, successor));
96 successor->AddBackEdge(current);
97 } else if (!visited.IsBitSet(successor_id)) {
98 visited.SetBit(successor_id);
99 visiting.SetBit(successor_id);
100 worklist.push_back(successor);
101 }
102 }
103 }
104 }
105
106 // Remove the environment use records of the instruction for users.
RemoveEnvironmentUses(HInstruction * instruction)107 void RemoveEnvironmentUses(HInstruction* instruction) {
108 for (HEnvironment* environment = instruction->GetEnvironment();
109 environment != nullptr;
110 environment = environment->GetParent()) {
111 for (size_t i = 0, e = environment->Size(); i < e; ++i) {
112 if (environment->GetInstructionAt(i) != nullptr) {
113 environment->RemoveAsUserOfInput(i);
114 }
115 }
116 }
117 }
118
119 // Return whether the instruction has an environment and it's used by others.
HasEnvironmentUsedByOthers(HInstruction * instruction)120 bool HasEnvironmentUsedByOthers(HInstruction* instruction) {
121 for (HEnvironment* environment = instruction->GetEnvironment();
122 environment != nullptr;
123 environment = environment->GetParent()) {
124 for (size_t i = 0, e = environment->Size(); i < e; ++i) {
125 HInstruction* user = environment->GetInstructionAt(i);
126 if (user != nullptr) {
127 return true;
128 }
129 }
130 }
131 return false;
132 }
133
134 // Reset environment records of the instruction itself.
ResetEnvironmentInputRecords(HInstruction * instruction)135 void ResetEnvironmentInputRecords(HInstruction* instruction) {
136 for (HEnvironment* environment = instruction->GetEnvironment();
137 environment != nullptr;
138 environment = environment->GetParent()) {
139 for (size_t i = 0, e = environment->Size(); i < e; ++i) {
140 DCHECK(environment->GetHolder() == instruction);
141 if (environment->GetInstructionAt(i) != nullptr) {
142 environment->SetRawEnvAt(i, nullptr);
143 }
144 }
145 }
146 }
147
RemoveAsUser(HInstruction * instruction)148 static void RemoveAsUser(HInstruction* instruction) {
149 instruction->RemoveAsUserOfAllInputs();
150 RemoveEnvironmentUses(instruction);
151 }
152
RemoveDeadBlocksInstructionsAsUsersAndDisconnect(BitVectorView<const size_t> visited) const153 void HGraph::RemoveDeadBlocksInstructionsAsUsersAndDisconnect(
154 BitVectorView<const size_t> visited) const {
155 for (size_t i = 0; i < blocks_.size(); ++i) {
156 if (!visited.IsBitSet(i)) {
157 HBasicBlock* block = blocks_[i];
158 if (block == nullptr) continue;
159
160 // Remove as user.
161 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
162 RemoveAsUser(it.Current());
163 }
164 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
165 RemoveAsUser(it.Current());
166 }
167
168 // Remove non-catch phi uses, and disconnect the block.
169 block->DisconnectFromSuccessors(visited);
170 }
171 }
172 }
173
174 // This method assumes `insn` has been removed from all users with the exception of catch
175 // phis because of missing exceptional edges in the graph. It removes the
176 // instruction from catch phi uses, together with inputs of other catch phis in
177 // the catch block at the same index, as these must be dead too.
RemoveCatchPhiUsesOfDeadInstruction(HInstruction * insn)178 static void RemoveCatchPhiUsesOfDeadInstruction(HInstruction* insn) {
179 DCHECK(!insn->HasEnvironmentUses());
180 while (insn->HasNonEnvironmentUses()) {
181 const HUseListNode<HInstruction*>& use = insn->GetUses().front();
182 size_t use_index = use.GetIndex();
183 HBasicBlock* user_block = use.GetUser()->GetBlock();
184 DCHECK(use.GetUser()->IsPhi());
185 DCHECK(user_block->IsCatchBlock());
186 for (HInstructionIterator phi_it(user_block->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
187 phi_it.Current()->AsPhi()->RemoveInputAt(use_index);
188 }
189 }
190 }
191
RemoveDeadBlocks(BitVectorView<const size_t> visited)192 void HGraph::RemoveDeadBlocks(BitVectorView<const size_t> visited) {
193 DCHECK(reverse_post_order_.empty()) << "We shouldn't have dominance information.";
194 for (size_t i = 0; i < blocks_.size(); ++i) {
195 if (!visited.IsBitSet(i)) {
196 HBasicBlock* block = blocks_[i];
197 if (block == nullptr) continue;
198
199 // Remove all remaining uses (which should be only catch phi uses), and the instructions.
200 block->RemoveCatchPhiUsesAndInstruction(/* building_dominator_tree = */ true);
201
202 // Remove the block from the list of blocks, so that further analyses
203 // never see it.
204 blocks_[i] = nullptr;
205 if (block->IsExitBlock()) {
206 SetExitBlock(nullptr);
207 }
208 // Mark the block as removed. This is used by the HGraphBuilder to discard
209 // the block as a branch target.
210 block->SetGraph(nullptr);
211 }
212 }
213 }
214
BuildDominatorTree()215 GraphAnalysisResult HGraph::BuildDominatorTree() {
216 // Allocate memory from local ScopedArenaAllocator.
217 ScopedArenaAllocator allocator(GetArenaStack());
218
219 BitVectorView<size_t> visited =
220 ArenaBitVector::CreateFixedSize(&allocator, blocks_.size(), kArenaAllocGraphBuilder);
221
222 // (1) Find the back edges in the graph doing a DFS traversal.
223 FindBackEdges(visited);
224
225 // (2) Remove instructions and phis from blocks not visited during
226 // the initial DFS as users from other instructions, so that
227 // users can be safely removed before uses later.
228 // Also disconnect the block from its successors, updating the successor's phis if needed.
229 RemoveDeadBlocksInstructionsAsUsersAndDisconnect(visited);
230
231 // (3) Remove blocks not visited during the initial DFS.
232 // Step (5) requires dead blocks to be removed from the
233 // predecessors list of live blocks.
234 RemoveDeadBlocks(visited);
235
236 // (4) Simplify the CFG now, so that we don't need to recompute
237 // dominators and the reverse post order.
238 SimplifyCFG();
239
240 // (5) Compute the dominance information and the reverse post order.
241 ComputeDominanceInformation();
242
243 // (6) Analyze loops discovered through back edge analysis, and
244 // set the loop information on each block.
245 GraphAnalysisResult result = AnalyzeLoops();
246 if (result != kAnalysisSuccess) {
247 return result;
248 }
249
250 // (7) Precompute per-block try membership before entering the SSA builder,
251 // which needs the information to build catch block phis from values of
252 // locals at throwing instructions inside try blocks.
253 ComputeTryBlockInformation();
254
255 return kAnalysisSuccess;
256 }
257
RecomputeDominatorTree()258 GraphAnalysisResult HGraph::RecomputeDominatorTree() {
259 DCHECK(!HasIrreducibleLoops()) << "Recomputing loop information in graphs with irreducible loops "
260 << "is unsupported, as it could lead to loop header changes";
261 ClearLoopInformation();
262 ClearDominanceInformation();
263 return BuildDominatorTree();
264 }
265
ClearDominanceInformation()266 void HGraph::ClearDominanceInformation() {
267 for (HBasicBlock* block : GetActiveBlocks()) {
268 block->ClearDominanceInformation();
269 }
270 reverse_post_order_.clear();
271 }
272
ClearLoopInformation()273 void HGraph::ClearLoopInformation() {
274 SetHasLoops(false);
275 SetHasIrreducibleLoops(false);
276 for (HBasicBlock* block : GetActiveBlocks()) {
277 block->SetLoopInformation(nullptr);
278 }
279 }
280
ClearDominanceInformation()281 void HBasicBlock::ClearDominanceInformation() {
282 dominated_blocks_.clear();
283 dominator_ = nullptr;
284 }
285
GetFirstInstructionDisregardMoves() const286 HInstruction* HBasicBlock::GetFirstInstructionDisregardMoves() const {
287 HInstruction* instruction = GetFirstInstruction();
288 while (instruction->IsParallelMove()) {
289 instruction = instruction->GetNext();
290 }
291 return instruction;
292 }
293
UpdateDominatorOfSuccessor(HBasicBlock * block,HBasicBlock * successor)294 static bool UpdateDominatorOfSuccessor(HBasicBlock* block, HBasicBlock* successor) {
295 DCHECK(ContainsElement(block->GetSuccessors(), successor));
296
297 HBasicBlock* old_dominator = successor->GetDominator();
298 HBasicBlock* new_dominator =
299 (old_dominator == nullptr) ? block
300 : CommonDominator::ForPair(old_dominator, block);
301
302 if (old_dominator == new_dominator) {
303 return false;
304 } else {
305 successor->SetDominator(new_dominator);
306 return true;
307 }
308 }
309
ComputeDominanceInformation()310 void HGraph::ComputeDominanceInformation() {
311 DCHECK(reverse_post_order_.empty());
312 reverse_post_order_.reserve(blocks_.size());
313 reverse_post_order_.push_back(entry_block_);
314
315 // Allocate memory from local ScopedArenaAllocator.
316 ScopedArenaAllocator allocator(GetArenaStack());
317 // Number of visits of a given node, indexed by block id.
318 ScopedArenaVector<size_t> visits(blocks_.size(), 0u, allocator.Adapter(kArenaAllocGraphBuilder));
319 // Number of successors visited from a given node, indexed by block id.
320 ScopedArenaVector<size_t> successors_visited(blocks_.size(),
321 0u,
322 allocator.Adapter(kArenaAllocGraphBuilder));
323 // Nodes for which we need to visit successors.
324 ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocGraphBuilder));
325 constexpr size_t kDefaultWorklistSize = 8;
326 worklist.reserve(kDefaultWorklistSize);
327 worklist.push_back(entry_block_);
328
329 while (!worklist.empty()) {
330 HBasicBlock* current = worklist.back();
331 uint32_t current_id = current->GetBlockId();
332 if (successors_visited[current_id] == current->GetSuccessors().size()) {
333 worklist.pop_back();
334 } else {
335 HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++];
336 UpdateDominatorOfSuccessor(current, successor);
337
338 // Once all the forward edges have been visited, we know the immediate
339 // dominator of the block. We can then start visiting its successors.
340 if (++visits[successor->GetBlockId()] ==
341 successor->GetPredecessors().size() - successor->NumberOfBackEdges()) {
342 reverse_post_order_.push_back(successor);
343 worklist.push_back(successor);
344 }
345 }
346 }
347
348 // Check if the graph has back edges not dominated by their respective headers.
349 // If so, we need to update the dominators of those headers and recursively of
350 // their successors. We do that with a fix-point iteration over all blocks.
351 // The algorithm is guaranteed to terminate because it loops only if the sum
352 // of all dominator chains has decreased in the current iteration.
353 bool must_run_fix_point = false;
354 for (HBasicBlock* block : blocks_) {
355 if (block != nullptr &&
356 block->IsLoopHeader() &&
357 block->GetLoopInformation()->HasBackEdgeNotDominatedByHeader()) {
358 must_run_fix_point = true;
359 break;
360 }
361 }
362 if (must_run_fix_point) {
363 bool update_occurred = true;
364 while (update_occurred) {
365 update_occurred = false;
366 for (HBasicBlock* block : GetReversePostOrder()) {
367 for (HBasicBlock* successor : block->GetSuccessors()) {
368 update_occurred |= UpdateDominatorOfSuccessor(block, successor);
369 }
370 }
371 }
372 }
373
374 // Make sure that there are no remaining blocks whose dominator information
375 // needs to be updated.
376 if (kIsDebugBuild) {
377 for (HBasicBlock* block : GetReversePostOrder()) {
378 for (HBasicBlock* successor : block->GetSuccessors()) {
379 DCHECK(!UpdateDominatorOfSuccessor(block, successor));
380 }
381 }
382 }
383
384 // Populate `dominated_blocks_` information after computing all dominators.
385 // The potential presence of irreducible loops requires to do it after.
386 for (HBasicBlock* block : GetReversePostOrder()) {
387 if (!block->IsEntryBlock()) {
388 block->GetDominator()->AddDominatedBlock(block);
389 }
390 }
391 }
392
SplitEdge(HBasicBlock * block,HBasicBlock * successor)393 HBasicBlock* HGraph::SplitEdge(HBasicBlock* block, HBasicBlock* successor) {
394 HBasicBlock* new_block = new (allocator_) HBasicBlock(this, successor->GetDexPc());
395 AddBlock(new_block);
396 // Use `InsertBetween` to ensure the predecessor index and successor index of
397 // `block` and `successor` are preserved.
398 new_block->InsertBetween(block, successor);
399 return new_block;
400 }
401
SplitCriticalEdge(HBasicBlock * block,HBasicBlock * successor)402 void HGraph::SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor) {
403 // Insert a new node between `block` and `successor` to split the
404 // critical edge.
405 HBasicBlock* new_block = SplitEdge(block, successor);
406 new_block->AddInstruction(new (allocator_) HGoto(successor->GetDexPc()));
407 if (successor->IsLoopHeader()) {
408 // If we split at a back edge boundary, make the new block the back edge.
409 HLoopInformation* info = successor->GetLoopInformation();
410 if (info->IsBackEdge(*block)) {
411 info->RemoveBackEdge(block);
412 info->AddBackEdge(new_block);
413 }
414 }
415 }
416
SplitEdgeAndUpdateRPO(HBasicBlock * block,HBasicBlock * successor)417 HBasicBlock* HGraph::SplitEdgeAndUpdateRPO(HBasicBlock* block, HBasicBlock* successor) {
418 HBasicBlock* new_block = SplitEdge(block, successor);
419 // In the RPO we have {... , block, ... , successor}. We want to insert `new_block` right after
420 // `block` to have a consistent RPO without recomputing the whole graph's RPO.
421 reverse_post_order_.insert(
422 reverse_post_order_.begin() + IndexOfElement(reverse_post_order_, block) + 1, new_block);
423 return new_block;
424 }
425
426 // Reorder phi inputs to match reordering of the block's predecessors.
FixPhisAfterPredecessorsReodering(HBasicBlock * block,size_t first,size_t second)427 static void FixPhisAfterPredecessorsReodering(HBasicBlock* block, size_t first, size_t second) {
428 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
429 HPhi* phi = it.Current()->AsPhi();
430 HInstruction* first_instr = phi->InputAt(first);
431 HInstruction* second_instr = phi->InputAt(second);
432 phi->ReplaceInput(first_instr, second);
433 phi->ReplaceInput(second_instr, first);
434 }
435 }
436
437 // Make sure that the first predecessor of a loop header is the incoming block.
OrderLoopHeaderPredecessors(HBasicBlock * header)438 void HGraph::OrderLoopHeaderPredecessors(HBasicBlock* header) {
439 DCHECK(header->IsLoopHeader());
440 HLoopInformation* info = header->GetLoopInformation();
441 if (info->IsBackEdge(*header->GetPredecessors()[0])) {
442 HBasicBlock* to_swap = header->GetPredecessors()[0];
443 for (size_t pred = 1, e = header->GetPredecessors().size(); pred < e; ++pred) {
444 HBasicBlock* predecessor = header->GetPredecessors()[pred];
445 if (!info->IsBackEdge(*predecessor)) {
446 header->predecessors_[pred] = to_swap;
447 header->predecessors_[0] = predecessor;
448 FixPhisAfterPredecessorsReodering(header, 0, pred);
449 break;
450 }
451 }
452 }
453 }
454
455 // Transform control flow of the loop to a single preheader format (don't touch the data flow).
456 // New_preheader can be already among the header predecessors - this situation will be correctly
457 // processed.
FixControlForNewSinglePreheader(HBasicBlock * header,HBasicBlock * new_preheader)458 static void FixControlForNewSinglePreheader(HBasicBlock* header, HBasicBlock* new_preheader) {
459 HLoopInformation* loop_info = header->GetLoopInformation();
460 for (size_t pred = 0; pred < header->GetPredecessors().size(); ++pred) {
461 HBasicBlock* predecessor = header->GetPredecessors()[pred];
462 if (!loop_info->IsBackEdge(*predecessor) && predecessor != new_preheader) {
463 predecessor->ReplaceSuccessor(header, new_preheader);
464 pred--;
465 }
466 }
467 }
468
469 // == Before == == After ==
470 // _________ _________ _________ _________
471 // | B0 | | B1 | (old preheaders) | B0 | | B1 |
472 // |=========| |=========| |=========| |=========|
473 // | i0 = .. | | i1 = .. | | i0 = .. | | i1 = .. |
474 // |_________| |_________| |_________| |_________|
475 // \ / \ /
476 // \ / ___v____________v___
477 // \ / (new preheader) | B20 <- B0, B1 |
478 // | | |====================|
479 // | | | i20 = phi(i0, i1) |
480 // | | |____________________|
481 // | | |
482 // /\ | | /\ /\ | /\
483 // / v_______v_________v_______v \ / v___________v_____________v \
484 // | | B10 <- B0, B1, B2, B3 | | | | B10 <- B20, B2, B3 | |
485 // | |===========================| | (header) | |===========================| |
486 // | | i10 = phi(i0, i1, i2, i3) | | | | i10 = phi(i20, i2, i3) | |
487 // | |___________________________| | | |___________________________| |
488 // | / \ | | / \ |
489 // | ... ... | | ... ... |
490 // | _________ _________ | | _________ _________ |
491 // | | B2 | | B3 | | | | B2 | | B3 | |
492 // | |=========| |=========| | (back edges) | |=========| |=========| |
493 // | | i2 = .. | | i3 = .. | | | | i2 = .. | | i3 = .. | |
494 // | |_________| |_________| | | |_________| |_________| |
495 // \ / \ / \ / \ /
496 // \___/ \___/ \___/ \___/
497 //
TransformLoopToSinglePreheaderFormat(HBasicBlock * header)498 void HGraph::TransformLoopToSinglePreheaderFormat(HBasicBlock* header) {
499 HLoopInformation* loop_info = header->GetLoopInformation();
500
501 HBasicBlock* preheader = new (allocator_) HBasicBlock(this, header->GetDexPc());
502 AddBlock(preheader);
503 preheader->AddInstruction(new (allocator_) HGoto(header->GetDexPc()));
504
505 // If the old header has no Phis then we only need to fix the control flow.
506 if (header->GetPhis().IsEmpty()) {
507 FixControlForNewSinglePreheader(header, preheader);
508 preheader->AddSuccessor(header);
509 return;
510 }
511
512 // Find the first non-back edge block in the header's predecessors list.
513 size_t first_nonbackedge_pred_pos = 0;
514 bool found = false;
515 for (size_t pred = 0; pred < header->GetPredecessors().size(); ++pred) {
516 HBasicBlock* predecessor = header->GetPredecessors()[pred];
517 if (!loop_info->IsBackEdge(*predecessor)) {
518 first_nonbackedge_pred_pos = pred;
519 found = true;
520 break;
521 }
522 }
523
524 DCHECK(found);
525
526 // Fix the data-flow.
527 for (HInstructionIterator it(header->GetPhis()); !it.Done(); it.Advance()) {
528 HPhi* header_phi = it.Current()->AsPhi();
529
530 HPhi* preheader_phi = new (GetAllocator()) HPhi(GetAllocator(),
531 header_phi->GetRegNumber(),
532 0,
533 header_phi->GetType());
534 if (header_phi->GetType() == DataType::Type::kReference) {
535 preheader_phi->SetReferenceTypeInfoIfValid(header_phi->GetReferenceTypeInfo());
536 }
537 preheader->AddPhi(preheader_phi);
538
539 HInstruction* orig_input = header_phi->InputAt(first_nonbackedge_pred_pos);
540 header_phi->ReplaceInput(preheader_phi, first_nonbackedge_pred_pos);
541 preheader_phi->AddInput(orig_input);
542
543 for (size_t input_pos = first_nonbackedge_pred_pos + 1;
544 input_pos < header_phi->InputCount();
545 input_pos++) {
546 HInstruction* input = header_phi->InputAt(input_pos);
547 HBasicBlock* pred_block = header->GetPredecessors()[input_pos];
548
549 if (loop_info->Contains(*pred_block)) {
550 DCHECK(loop_info->IsBackEdge(*pred_block));
551 } else {
552 preheader_phi->AddInput(input);
553 header_phi->RemoveInputAt(input_pos);
554 input_pos--;
555 }
556 }
557 }
558
559 // Fix the control-flow.
560 HBasicBlock* first_pred = header->GetPredecessors()[first_nonbackedge_pred_pos];
561 preheader->InsertBetween(first_pred, header);
562
563 FixControlForNewSinglePreheader(header, preheader);
564 }
565
SimplifyLoop(HBasicBlock * header)566 void HGraph::SimplifyLoop(HBasicBlock* header) {
567 HLoopInformation* info = header->GetLoopInformation();
568
569 // Make sure the loop has only one pre header. This simplifies SSA building by having
570 // to just look at the pre header to know which locals are initialized at entry of the
571 // loop. Also, don't allow the entry block to be a pre header: this simplifies inlining
572 // this graph.
573 size_t number_of_incomings = header->GetPredecessors().size() - info->NumberOfBackEdges();
574 if (number_of_incomings != 1 || (GetEntryBlock()->GetSingleSuccessor() == header)) {
575 TransformLoopToSinglePreheaderFormat(header);
576 }
577
578 OrderLoopHeaderPredecessors(header);
579
580 HInstruction* first_instruction = header->GetFirstInstruction();
581 if (first_instruction != nullptr && first_instruction->IsSuspendCheck()) {
582 // Called from DeadBlockElimination. Update SuspendCheck pointer.
583 info->SetSuspendCheck(first_instruction->AsSuspendCheck());
584 }
585 }
586
ComputeTryBlockInformation()587 void HGraph::ComputeTryBlockInformation() {
588 // Iterate in reverse post order to propagate try membership information from
589 // predecessors to their successors.
590 bool graph_has_try_catch = false;
591
592 for (HBasicBlock* block : GetReversePostOrder()) {
593 if (block->IsEntryBlock() || block->IsCatchBlock()) {
594 // Catch blocks after simplification have only exceptional predecessors
595 // and hence are never in tries.
596 continue;
597 }
598
599 // Infer try membership from the first predecessor. Having simplified loops,
600 // the first predecessor can never be a back edge and therefore it must have
601 // been visited already and had its try membership set.
602 HBasicBlock* first_predecessor = block->GetPredecessors()[0];
603 DCHECK_IMPLIES(block->IsLoopHeader(),
604 !block->GetLoopInformation()->IsBackEdge(*first_predecessor));
605 const HTryBoundary* try_entry = first_predecessor->ComputeTryEntryOfSuccessors();
606 graph_has_try_catch |= try_entry != nullptr;
607 if (try_entry != nullptr &&
608 (block->GetTryCatchInformation() == nullptr ||
609 try_entry != &block->GetTryCatchInformation()->GetTryEntry())) {
610 // We are either setting try block membership for the first time or it
611 // has changed.
612 block->SetTryCatchInformation(new (allocator_) TryCatchInformation(*try_entry));
613 }
614 }
615
616 SetHasTryCatch(graph_has_try_catch);
617 }
618
SimplifyCFG()619 void HGraph::SimplifyCFG() {
620 // Simplify the CFG for future analysis, and code generation:
621 // (1): Split critical edges.
622 // (2): Simplify loops by having only one preheader.
623 // NOTE: We're appending new blocks inside the loop, so we need to use index because iterators
624 // can be invalidated. We remember the initial size to avoid iterating over the new blocks.
625 for (size_t block_id = 0u, end = blocks_.size(); block_id != end; ++block_id) {
626 HBasicBlock* block = blocks_[block_id];
627 if (block == nullptr) continue;
628 if (block->GetSuccessors().size() > 1) {
629 // Only split normal-flow edges. We cannot split exceptional edges as they
630 // are synthesized (approximate real control flow), and we do not need to
631 // anyway. Moves that would be inserted there are performed by the runtime.
632 ArrayRef<HBasicBlock* const> normal_successors = block->GetNormalSuccessors();
633 for (size_t j = 0, e = normal_successors.size(); j < e; ++j) {
634 HBasicBlock* successor = normal_successors[j];
635 DCHECK(!successor->IsCatchBlock());
636 if (successor == exit_block_) {
637 // (Throw/Return/ReturnVoid)->TryBoundary->Exit. Special case which we
638 // do not want to split because Goto->Exit is not allowed.
639 DCHECK(block->IsSingleTryBoundary());
640 } else if (successor->GetPredecessors().size() > 1) {
641 SplitCriticalEdge(block, successor);
642 // SplitCriticalEdge could have invalidated the `normal_successors`
643 // ArrayRef. We must re-acquire it.
644 normal_successors = block->GetNormalSuccessors();
645 DCHECK_EQ(normal_successors[j]->GetSingleSuccessor(), successor);
646 DCHECK_EQ(e, normal_successors.size());
647 }
648 }
649 }
650 if (block->IsLoopHeader()) {
651 SimplifyLoop(block);
652 } else if (!block->IsEntryBlock() &&
653 block->GetFirstInstruction() != nullptr &&
654 block->GetFirstInstruction()->IsSuspendCheck()) {
655 // We are being called by the dead code elimiation pass, and what used to be
656 // a loop got dismantled. Just remove the suspend check.
657 block->RemoveInstruction(block->GetFirstInstruction());
658 }
659 }
660 }
661
AnalyzeLoops() const662 GraphAnalysisResult HGraph::AnalyzeLoops() const {
663 // We iterate post order to ensure we visit inner loops before outer loops.
664 // `PopulateRecursive` needs this guarantee to know whether a natural loop
665 // contains an irreducible loop.
666 for (HBasicBlock* block : GetPostOrder()) {
667 if (block->IsLoopHeader()) {
668 if (block->IsCatchBlock()) {
669 // TODO: Dealing with exceptional back edges could be tricky because
670 // they only approximate the real control flow. Bail out for now.
671 VLOG(compiler) << "Not compiled: Exceptional back edges";
672 return kAnalysisFailThrowCatchLoop;
673 }
674 block->GetLoopInformation()->Populate();
675 }
676 }
677 return kAnalysisSuccess;
678 }
679
Dump(std::ostream & os)680 void HLoopInformation::Dump(std::ostream& os) {
681 os << "header: " << header_->GetBlockId() << std::endl;
682 os << "pre header: " << GetPreHeader()->GetBlockId() << std::endl;
683 for (HBasicBlock* block : back_edges_) {
684 os << "back edge: " << block->GetBlockId() << std::endl;
685 }
686 for (HBasicBlock* block : header_->GetPredecessors()) {
687 os << "predecessor: " << block->GetBlockId() << std::endl;
688 }
689 for (uint32_t idx : blocks_.Indexes()) {
690 os << " in loop: " << idx << std::endl;
691 }
692 }
693
694 template <class InstructionType, typename ValueType>
CreateConstant(ValueType value,ArenaSafeMap<ValueType,InstructionType * > * cache)695 InstructionType* HGraph::CreateConstant(ValueType value,
696 ArenaSafeMap<ValueType, InstructionType*>* cache) {
697 // Try to find an existing constant of the given value.
698 InstructionType* constant = nullptr;
699 auto cached_constant = cache->find(value);
700 if (cached_constant != cache->end()) {
701 constant = cached_constant->second;
702 }
703
704 // If not found or previously deleted, create and cache a new instruction.
705 // Don't bother reviving a previously deleted instruction, for simplicity.
706 if (constant == nullptr || constant->GetBlock() == nullptr) {
707 constant = new (allocator_) InstructionType(value);
708 cache->Overwrite(value, constant);
709 InsertConstant(constant);
710 }
711 return constant;
712 }
713
InsertConstant(HConstant * constant)714 void HGraph::InsertConstant(HConstant* constant) {
715 // New constants are inserted before the SuspendCheck at the bottom of the
716 // entry block. Note that this method can be called from the graph builder and
717 // the entry block therefore may not end with SuspendCheck->Goto yet.
718 HInstruction* insert_before = nullptr;
719
720 HInstruction* gota = entry_block_->GetLastInstruction();
721 if (gota != nullptr && gota->IsGoto()) {
722 HInstruction* suspend_check = gota->GetPrevious();
723 if (suspend_check != nullptr && suspend_check->IsSuspendCheck()) {
724 insert_before = suspend_check;
725 } else {
726 insert_before = gota;
727 }
728 }
729
730 if (insert_before == nullptr) {
731 entry_block_->AddInstruction(constant);
732 } else {
733 entry_block_->InsertInstructionBefore(constant, insert_before);
734 }
735 }
736
GetNullConstant()737 HNullConstant* HGraph::GetNullConstant() {
738 // For simplicity, don't bother reviving the cached null constant if it is
739 // not null and not in a block. Otherwise, we need to clear the instruction
740 // id and/or any invariants the graph is assuming when adding new instructions.
741 if ((cached_null_constant_ == nullptr) || (cached_null_constant_->GetBlock() == nullptr)) {
742 cached_null_constant_ = new (allocator_) HNullConstant();
743 cached_null_constant_->SetReferenceTypeInfo(GetInexactObjectRti());
744 InsertConstant(cached_null_constant_);
745 }
746 if (kIsDebugBuild) {
747 ScopedObjectAccess soa(Thread::Current());
748 DCHECK(cached_null_constant_->GetReferenceTypeInfo().IsValid());
749 }
750 return cached_null_constant_;
751 }
752
GetIntConstant(int32_t value)753 HIntConstant* HGraph::GetIntConstant(int32_t value) {
754 return CreateConstant(value, &cached_int_constants_);
755 }
756
GetLongConstant(int64_t value)757 HLongConstant* HGraph::GetLongConstant(int64_t value) {
758 return CreateConstant(value, &cached_long_constants_);
759 }
760
GetFloatConstant(float value)761 HFloatConstant* HGraph::GetFloatConstant(float value) {
762 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_);
763 }
764
GetDoubleConstant(double value)765 HDoubleConstant* HGraph::GetDoubleConstant(double value) {
766 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_);
767 }
768
GetCurrentMethod()769 HCurrentMethod* HGraph::GetCurrentMethod() {
770 // For simplicity, don't bother reviving the cached current method if it is
771 // not null and not in a block. Otherwise, we need to clear the instruction
772 // id and/or any invariants the graph is assuming when adding new instructions.
773 if ((cached_current_method_ == nullptr) || (cached_current_method_->GetBlock() == nullptr)) {
774 cached_current_method_ = new (allocator_) HCurrentMethod(
775 Is64BitInstructionSet(instruction_set_) ? DataType::Type::kInt64 : DataType::Type::kInt32,
776 entry_block_->GetDexPc());
777 if (entry_block_->GetFirstInstruction() == nullptr) {
778 entry_block_->AddInstruction(cached_current_method_);
779 } else {
780 entry_block_->InsertInstructionBefore(
781 cached_current_method_, entry_block_->GetFirstInstruction());
782 }
783 }
784 return cached_current_method_;
785 }
786
GetMethodName() const787 const char* HGraph::GetMethodName() const {
788 const dex::MethodId& method_id = dex_file_.GetMethodId(method_idx_);
789 return dex_file_.GetMethodName(method_id);
790 }
791
PrettyMethod(bool with_signature) const792 std::string HGraph::PrettyMethod(bool with_signature) const {
793 return dex_file_.PrettyMethod(method_idx_, with_signature);
794 }
795
GetConstant(DataType::Type type,int64_t value)796 HConstant* HGraph::GetConstant(DataType::Type type, int64_t value) {
797 switch (type) {
798 case DataType::Type::kBool:
799 DCHECK(IsUint<1>(value));
800 FALLTHROUGH_INTENDED;
801 case DataType::Type::kUint8:
802 case DataType::Type::kInt8:
803 case DataType::Type::kUint16:
804 case DataType::Type::kInt16:
805 case DataType::Type::kInt32:
806 DCHECK(IsInt(DataType::Size(type) * kBitsPerByte, value));
807 return GetIntConstant(static_cast<int32_t>(value));
808
809 case DataType::Type::kInt64:
810 return GetLongConstant(value);
811
812 default:
813 LOG(FATAL) << "Unsupported constant type";
814 UNREACHABLE();
815 }
816 }
817
CacheFloatConstant(HFloatConstant * constant)818 void HGraph::CacheFloatConstant(HFloatConstant* constant) {
819 int32_t value = bit_cast<int32_t, float>(constant->GetValue());
820 DCHECK(cached_float_constants_.find(value) == cached_float_constants_.end());
821 cached_float_constants_.Overwrite(value, constant);
822 }
823
CacheDoubleConstant(HDoubleConstant * constant)824 void HGraph::CacheDoubleConstant(HDoubleConstant* constant) {
825 int64_t value = bit_cast<int64_t, double>(constant->GetValue());
826 DCHECK(cached_double_constants_.find(value) == cached_double_constants_.end());
827 cached_double_constants_.Overwrite(value, constant);
828 }
829
Add(HBasicBlock * block)830 void HLoopInformation::Add(HBasicBlock* block) {
831 blocks_.SetBit(block->GetBlockId());
832 }
833
Remove(HBasicBlock * block)834 void HLoopInformation::Remove(HBasicBlock* block) {
835 blocks_.ClearBit(block->GetBlockId());
836 }
837
PopulateRecursive(HBasicBlock * block)838 void HLoopInformation::PopulateRecursive(HBasicBlock* block) {
839 if (blocks_.IsBitSet(block->GetBlockId())) {
840 return;
841 }
842
843 blocks_.SetBit(block->GetBlockId());
844 block->SetInLoop(this);
845 if (block->IsLoopHeader()) {
846 // We're visiting loops in post-order, so inner loops must have been
847 // populated already.
848 DCHECK(block->GetLoopInformation()->IsPopulated());
849 if (block->GetLoopInformation()->IsIrreducible()) {
850 contains_irreducible_loop_ = true;
851 }
852 }
853 for (HBasicBlock* predecessor : block->GetPredecessors()) {
854 PopulateRecursive(predecessor);
855 }
856 }
857
PopulateIrreducibleRecursive(HBasicBlock * block,ArenaBitVector * finalized)858 void HLoopInformation::PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized) {
859 size_t block_id = block->GetBlockId();
860
861 // If `block` is in `finalized`, we know its membership in the loop has been
862 // decided and it does not need to be revisited.
863 if (finalized->IsBitSet(block_id)) {
864 return;
865 }
866
867 bool is_finalized = false;
868 if (block->IsLoopHeader()) {
869 // If we hit a loop header in an irreducible loop, we first check if the
870 // pre header of that loop belongs to the currently analyzed loop. If it does,
871 // then we visit the back edges.
872 // Note that we cannot use GetPreHeader, as the loop may have not been populated
873 // yet.
874 HBasicBlock* pre_header = block->GetPredecessors()[0];
875 PopulateIrreducibleRecursive(pre_header, finalized);
876 if (blocks_.IsBitSet(pre_header->GetBlockId())) {
877 block->SetInLoop(this);
878 blocks_.SetBit(block_id);
879 finalized->SetBit(block_id);
880 is_finalized = true;
881
882 HLoopInformation* info = block->GetLoopInformation();
883 for (HBasicBlock* back_edge : info->GetBackEdges()) {
884 PopulateIrreducibleRecursive(back_edge, finalized);
885 }
886 }
887 } else {
888 // Visit all predecessors. If one predecessor is part of the loop, this
889 // block is also part of this loop.
890 for (HBasicBlock* predecessor : block->GetPredecessors()) {
891 PopulateIrreducibleRecursive(predecessor, finalized);
892 if (!is_finalized && blocks_.IsBitSet(predecessor->GetBlockId())) {
893 block->SetInLoop(this);
894 blocks_.SetBit(block_id);
895 finalized->SetBit(block_id);
896 is_finalized = true;
897 }
898 }
899 }
900
901 // All predecessors have been recursively visited. Mark finalized if not marked yet.
902 if (!is_finalized) {
903 finalized->SetBit(block_id);
904 }
905 }
906
Populate()907 void HLoopInformation::Populate() {
908 DCHECK_EQ(blocks_.NumSetBits(), 0u) << "Loop information has already been populated";
909 // Populate this loop: starting with the back edge, recursively add predecessors
910 // that are not already part of that loop. Set the header as part of the loop
911 // to end the recursion.
912 // This is a recursive implementation of the algorithm described in
913 // "Advanced Compiler Design & Implementation" (Muchnick) p192.
914 HGraph* graph = header_->GetGraph();
915 blocks_.SetBit(header_->GetBlockId());
916 header_->SetInLoop(this);
917
918 bool is_irreducible_loop = HasBackEdgeNotDominatedByHeader();
919
920 if (is_irreducible_loop) {
921 // Allocate memory from local ScopedArenaAllocator.
922 ScopedArenaAllocator allocator(graph->GetArenaStack());
923 ArenaBitVector visited(&allocator,
924 graph->GetBlocks().size(),
925 /* expandable= */ false,
926 kArenaAllocGraphBuilder);
927 // Stop marking blocks at the loop header.
928 visited.SetBit(header_->GetBlockId());
929
930 for (HBasicBlock* back_edge : GetBackEdges()) {
931 PopulateIrreducibleRecursive(back_edge, &visited);
932 }
933 } else {
934 for (HBasicBlock* back_edge : GetBackEdges()) {
935 PopulateRecursive(back_edge);
936 }
937 }
938
939 if (!is_irreducible_loop && graph->IsCompilingOsr()) {
940 // When compiling in OSR mode, all loops in the compiled method may be entered
941 // from the interpreter. We treat this OSR entry point just like an extra entry
942 // to an irreducible loop, so we need to mark the method's loops as irreducible.
943 // This does not apply to inlined loops which do not act as OSR entry points.
944 if (suspend_check_ == nullptr) {
945 // Just building the graph in OSR mode, this loop is not inlined. We never build an
946 // inner graph in OSR mode as we can do OSR transition only from the outer method.
947 is_irreducible_loop = true;
948 } else {
949 // Look at the suspend check's environment to determine if the loop was inlined.
950 DCHECK(suspend_check_->HasEnvironment());
951 if (!suspend_check_->GetEnvironment()->IsFromInlinedInvoke()) {
952 is_irreducible_loop = true;
953 }
954 }
955 }
956 if (is_irreducible_loop) {
957 irreducible_ = true;
958 contains_irreducible_loop_ = true;
959 graph->SetHasIrreducibleLoops(true);
960 }
961 graph->SetHasLoops(true);
962 }
963
PopulateInnerLoopUpwards(HLoopInformation * inner_loop)964 void HLoopInformation::PopulateInnerLoopUpwards(HLoopInformation* inner_loop) {
965 DCHECK(inner_loop->GetPreHeader()->GetLoopInformation() == this);
966 blocks_.Union(&inner_loop->blocks_);
967 HLoopInformation* outer_loop = GetPreHeader()->GetLoopInformation();
968 if (outer_loop != nullptr) {
969 outer_loop->PopulateInnerLoopUpwards(this);
970 }
971 }
972
GetPreHeader() const973 HBasicBlock* HLoopInformation::GetPreHeader() const {
974 HBasicBlock* block = header_->GetPredecessors()[0];
975 DCHECK(irreducible_ || (block == header_->GetDominator()));
976 return block;
977 }
978
Contains(const HBasicBlock & block) const979 bool HLoopInformation::Contains(const HBasicBlock& block) const {
980 return blocks_.IsBitSet(block.GetBlockId());
981 }
982
IsIn(const HLoopInformation & other) const983 bool HLoopInformation::IsIn(const HLoopInformation& other) const {
984 return other.blocks_.IsBitSet(header_->GetBlockId());
985 }
986
IsDefinedOutOfTheLoop(HInstruction * instruction) const987 bool HLoopInformation::IsDefinedOutOfTheLoop(HInstruction* instruction) const {
988 return !blocks_.IsBitSet(instruction->GetBlock()->GetBlockId());
989 }
990
GetLifetimeEnd() const991 size_t HLoopInformation::GetLifetimeEnd() const {
992 size_t last_position = 0;
993 for (HBasicBlock* back_edge : GetBackEdges()) {
994 last_position = std::max(back_edge->GetLifetimeEnd(), last_position);
995 }
996 return last_position;
997 }
998
HasBackEdgeNotDominatedByHeader() const999 bool HLoopInformation::HasBackEdgeNotDominatedByHeader() const {
1000 for (HBasicBlock* back_edge : GetBackEdges()) {
1001 DCHECK(back_edge->GetDominator() != nullptr);
1002 if (!header_->Dominates(back_edge)) {
1003 return true;
1004 }
1005 }
1006 return false;
1007 }
1008
DominatesAllBackEdges(HBasicBlock * block)1009 bool HLoopInformation::DominatesAllBackEdges(HBasicBlock* block) {
1010 for (HBasicBlock* back_edge : GetBackEdges()) {
1011 if (!block->Dominates(back_edge)) {
1012 return false;
1013 }
1014 }
1015 return true;
1016 }
1017
1018
HasExitEdge() const1019 bool HLoopInformation::HasExitEdge() const {
1020 // Determine if this loop has at least one exit edge.
1021 HBlocksInLoopReversePostOrderIterator it_loop(*this);
1022 for (; !it_loop.Done(); it_loop.Advance()) {
1023 for (HBasicBlock* successor : it_loop.Current()->GetSuccessors()) {
1024 if (!Contains(*successor)) {
1025 return true;
1026 }
1027 }
1028 }
1029 return false;
1030 }
1031
Dominates(const HBasicBlock * other) const1032 bool HBasicBlock::Dominates(const HBasicBlock* other) const {
1033 // Walk up the dominator tree from `other`, to find out if `this`
1034 // is an ancestor.
1035 const HBasicBlock* current = other;
1036 while (current != nullptr) {
1037 if (current == this) {
1038 return true;
1039 }
1040 current = current->GetDominator();
1041 }
1042 return false;
1043 }
1044
UpdateInputsUsers(HGraph * graph,HInstruction * instruction)1045 static void UpdateInputsUsers(HGraph* graph, HInstruction* instruction) {
1046 HInputsRef inputs = instruction->GetInputs();
1047 if (inputs.size() != 0u) {
1048 ArenaAllocator* allocator = graph->GetAllocator();
1049 for (size_t i = 0; i < inputs.size(); ++i) {
1050 inputs[i]->AddUseAt(allocator, instruction, i);
1051 }
1052 }
1053 // Environment should be created later.
1054 DCHECK(!instruction->HasEnvironment());
1055 }
1056
ReplaceAndRemovePhiWith(HPhi * initial,HPhi * replacement)1057 void HBasicBlock::ReplaceAndRemovePhiWith(HPhi* initial, HPhi* replacement) {
1058 DCHECK(initial->GetBlock() == this);
1059 InsertPhiAfter(replacement, initial);
1060 initial->ReplaceWith(replacement);
1061 RemovePhi(initial);
1062 }
1063
ReplaceAndRemoveInstructionWith(HInstruction * initial,HInstruction * replacement)1064 void HBasicBlock::ReplaceAndRemoveInstructionWith(HInstruction* initial,
1065 HInstruction* replacement) {
1066 DCHECK(initial->GetBlock() == this);
1067 if (initial->IsControlFlow()) {
1068 // We can only replace a control flow instruction with another control flow instruction.
1069 DCHECK(replacement->IsControlFlow());
1070 DCHECK_EQ(replacement->GetId(), -1);
1071 DCHECK_EQ(replacement->GetType(), DataType::Type::kVoid);
1072 DCHECK_EQ(initial->GetBlock(), this);
1073 DCHECK_EQ(initial->GetType(), DataType::Type::kVoid);
1074 DCHECK(initial->GetUses().empty());
1075 DCHECK(initial->GetEnvUses().empty());
1076 replacement->SetBlock(this);
1077 HGraph* graph = GetGraph();
1078 replacement->SetId(graph->AllocateInstructionId());
1079 instructions_.InsertInstructionBefore(replacement, initial);
1080 UpdateInputsUsers(graph, replacement);
1081 } else {
1082 InsertInstructionBefore(replacement, initial);
1083 initial->ReplaceWith(replacement);
1084 }
1085 RemoveInstruction(initial);
1086 }
1087
Add(HInstructionList * instruction_list,HBasicBlock * block,HInstruction * instruction)1088 static void Add(HInstructionList* instruction_list,
1089 HBasicBlock* block,
1090 HInstruction* instruction) {
1091 DCHECK(instruction->GetBlock() == nullptr);
1092 DCHECK_EQ(instruction->GetId(), -1);
1093 instruction->SetBlock(block);
1094 HGraph* graph = block->GetGraph();
1095 instruction->SetId(graph->AllocateInstructionId());
1096 UpdateInputsUsers(graph, instruction);
1097 instruction_list->AddInstruction(instruction);
1098 }
1099
AddInstruction(HInstruction * instruction)1100 void HBasicBlock::AddInstruction(HInstruction* instruction) {
1101 Add(&instructions_, this, instruction);
1102 }
1103
AddPhi(HPhi * phi)1104 void HBasicBlock::AddPhi(HPhi* phi) {
1105 Add(&phis_, this, phi);
1106 }
1107
InsertInstructionBefore(HInstruction * instruction,HInstruction * cursor)1108 void HBasicBlock::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) {
1109 DCHECK(!cursor->IsPhi());
1110 DCHECK(!instruction->IsPhi());
1111 DCHECK_EQ(instruction->GetId(), -1);
1112 DCHECK_NE(cursor->GetId(), -1);
1113 DCHECK_EQ(cursor->GetBlock(), this);
1114 DCHECK(!instruction->IsControlFlow());
1115 instruction->SetBlock(this);
1116 HGraph* graph = GetGraph();
1117 instruction->SetId(graph->AllocateInstructionId());
1118 UpdateInputsUsers(graph, instruction);
1119 instructions_.InsertInstructionBefore(instruction, cursor);
1120 }
1121
InsertInstructionAfter(HInstruction * instruction,HInstruction * cursor)1122 void HBasicBlock::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) {
1123 DCHECK(!cursor->IsPhi());
1124 DCHECK(!instruction->IsPhi());
1125 DCHECK_EQ(instruction->GetId(), -1);
1126 DCHECK_NE(cursor->GetId(), -1);
1127 DCHECK_EQ(cursor->GetBlock(), this);
1128 DCHECK(!instruction->IsControlFlow());
1129 DCHECK(!cursor->IsControlFlow());
1130 instruction->SetBlock(this);
1131 HGraph* graph = GetGraph();
1132 instruction->SetId(graph->AllocateInstructionId());
1133 UpdateInputsUsers(graph, instruction);
1134 instructions_.InsertInstructionAfter(instruction, cursor);
1135 }
1136
InsertPhiAfter(HPhi * phi,HPhi * cursor)1137 void HBasicBlock::InsertPhiAfter(HPhi* phi, HPhi* cursor) {
1138 DCHECK_EQ(phi->GetId(), -1);
1139 DCHECK_NE(cursor->GetId(), -1);
1140 DCHECK_EQ(cursor->GetBlock(), this);
1141 phi->SetBlock(this);
1142 HGraph* graph = GetGraph();
1143 phi->SetId(graph->AllocateInstructionId());
1144 UpdateInputsUsers(graph, phi);
1145 phis_.InsertInstructionAfter(phi, cursor);
1146 }
1147
Remove(HInstructionList * instruction_list,HBasicBlock * block,HInstruction * instruction,bool ensure_safety)1148 static void Remove(HInstructionList* instruction_list,
1149 HBasicBlock* block,
1150 HInstruction* instruction,
1151 bool ensure_safety) {
1152 DCHECK_EQ(block, instruction->GetBlock());
1153 instruction->SetBlock(nullptr);
1154 instruction_list->RemoveInstruction(instruction);
1155 if (ensure_safety) {
1156 DCHECK(instruction->GetUses().empty());
1157 DCHECK(instruction->GetEnvUses().empty());
1158 RemoveAsUser(instruction);
1159 }
1160 }
1161
RemoveInstruction(HInstruction * instruction,bool ensure_safety)1162 void HBasicBlock::RemoveInstruction(HInstruction* instruction, bool ensure_safety) {
1163 DCHECK(!instruction->IsPhi());
1164 Remove(&instructions_, this, instruction, ensure_safety);
1165 }
1166
RemovePhi(HPhi * phi,bool ensure_safety)1167 void HBasicBlock::RemovePhi(HPhi* phi, bool ensure_safety) {
1168 Remove(&phis_, this, phi, ensure_safety);
1169 }
1170
RemoveInstructionOrPhi(HInstruction * instruction,bool ensure_safety)1171 void HBasicBlock::RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety) {
1172 if (instruction->IsPhi()) {
1173 RemovePhi(instruction->AsPhi(), ensure_safety);
1174 } else {
1175 RemoveInstruction(instruction, ensure_safety);
1176 }
1177 }
1178
CopyFrom(ArenaAllocator * allocator,ArrayRef<HInstruction * const> locals)1179 void HEnvironment::CopyFrom(ArenaAllocator* allocator, ArrayRef<HInstruction* const> locals) {
1180 DCHECK_EQ(locals.size(), Size());
1181 for (size_t i = 0; i < locals.size(); i++) {
1182 HInstruction* instruction = locals[i];
1183 SetRawEnvAt(i, instruction);
1184 if (instruction != nullptr) {
1185 instruction->AddEnvUseAt(allocator, this, i);
1186 }
1187 }
1188 }
1189
CopyFrom(ArenaAllocator * allocator,const HEnvironment * env)1190 void HEnvironment::CopyFrom(ArenaAllocator* allocator, const HEnvironment* env) {
1191 DCHECK_EQ(env->Size(), Size());
1192 for (size_t i = 0; i < env->Size(); i++) {
1193 HInstruction* instruction = env->GetInstructionAt(i);
1194 SetRawEnvAt(i, instruction);
1195 if (instruction != nullptr) {
1196 instruction->AddEnvUseAt(allocator, this, i);
1197 }
1198 }
1199 }
1200
CopyFromWithLoopPhiAdjustment(ArenaAllocator * allocator,HEnvironment * env,HBasicBlock * loop_header)1201 void HEnvironment::CopyFromWithLoopPhiAdjustment(ArenaAllocator* allocator,
1202 HEnvironment* env,
1203 HBasicBlock* loop_header) {
1204 DCHECK(loop_header->IsLoopHeader());
1205 for (size_t i = 0; i < env->Size(); i++) {
1206 HInstruction* instruction = env->GetInstructionAt(i);
1207 SetRawEnvAt(i, instruction);
1208 if (instruction == nullptr) {
1209 continue;
1210 }
1211 if (instruction->IsLoopHeaderPhi() && (instruction->GetBlock() == loop_header)) {
1212 // At the end of the loop pre-header, the corresponding value for instruction
1213 // is the first input of the phi.
1214 HInstruction* initial = instruction->AsPhi()->InputAt(0);
1215 SetRawEnvAt(i, initial);
1216 initial->AddEnvUseAt(allocator, this, i);
1217 } else {
1218 instruction->AddEnvUseAt(allocator, this, i);
1219 }
1220 }
1221 }
1222
RemoveAsUserOfInput(size_t index) const1223 void HEnvironment::RemoveAsUserOfInput(size_t index) const {
1224 const HUserRecord<HEnvironment*>& env_use = GetVRegs()[index];
1225 HInstruction* user = env_use.GetInstruction();
1226 auto before_env_use_node = env_use.GetBeforeUseNode();
1227 user->env_uses_.erase_after(before_env_use_node);
1228 user->FixUpUserRecordsAfterEnvUseRemoval(before_env_use_node);
1229 }
1230
ReplaceInput(HInstruction * replacement,size_t index)1231 void HEnvironment::ReplaceInput(HInstruction* replacement, size_t index) {
1232 const HUserRecord<HEnvironment*>& env_use_record = GetVRegs()[index];
1233 HInstruction* orig_instr = env_use_record.GetInstruction();
1234
1235 DCHECK(orig_instr != replacement);
1236
1237 HUseList<HEnvironment*>::iterator before_use_node = env_use_record.GetBeforeUseNode();
1238 // Note: fixup_end remains valid across splice_after().
1239 auto fixup_end = replacement->env_uses_.empty() ? replacement->env_uses_.begin()
1240 : ++replacement->env_uses_.begin();
1241 replacement->env_uses_.splice_after(replacement->env_uses_.before_begin(),
1242 env_use_record.GetInstruction()->env_uses_,
1243 before_use_node);
1244 replacement->FixUpUserRecordsAfterEnvUseInsertion(fixup_end);
1245 orig_instr->FixUpUserRecordsAfterEnvUseRemoval(before_use_node);
1246 }
1247
Dump(std::ostream & os,bool dump_args)1248 std::ostream& HInstruction::Dump(std::ostream& os, bool dump_args) {
1249 // Note: Handle the case where the instruction has been removed from
1250 // the graph to support debugging output for failed gtests.
1251 HGraph* graph = (GetBlock() != nullptr) ? GetBlock()->GetGraph() : nullptr;
1252 HGraphVisualizer::DumpInstruction(&os, graph, this);
1253 if (dump_args) {
1254 // Allocate memory from local ScopedArenaAllocator.
1255 std::optional<MallocArenaPool> local_arena_pool;
1256 std::optional<ArenaStack> local_arena_stack;
1257 if (UNLIKELY(graph == nullptr)) {
1258 local_arena_pool.emplace();
1259 local_arena_stack.emplace(&local_arena_pool.value());
1260 }
1261 ScopedArenaAllocator allocator(
1262 graph != nullptr ? graph->GetArenaStack() : &local_arena_stack.value());
1263 // Instructions that we already visited. We print each instruction only once.
1264 ArenaBitVector visited(&allocator,
1265 (graph != nullptr) ? graph->GetCurrentInstructionId() : 0u,
1266 /* expandable= */ (graph == nullptr),
1267 kArenaAllocMisc);
1268 visited.SetBit(GetId());
1269 // Keep a queue of instructions with their indentations.
1270 ScopedArenaDeque<std::pair<HInstruction*, size_t>> queue(allocator.Adapter(kArenaAllocMisc));
1271 auto add_args = [&queue](HInstruction* instruction, size_t indentation) {
1272 for (HInstruction* arg : ReverseRange(instruction->GetInputs())) {
1273 queue.emplace_front(arg, indentation);
1274 }
1275 };
1276 add_args(this, /*indentation=*/ 1u);
1277 while (!queue.empty()) {
1278 HInstruction* instruction;
1279 size_t indentation;
1280 std::tie(instruction, indentation) = queue.front();
1281 queue.pop_front();
1282 if (!visited.IsBitSet(instruction->GetId())) {
1283 visited.SetBit(instruction->GetId());
1284 os << '\n';
1285 for (size_t i = 0; i != indentation; ++i) {
1286 os << " ";
1287 }
1288 HGraphVisualizer::DumpInstruction(&os, graph, instruction);
1289 add_args(instruction, indentation + 1u);
1290 }
1291 }
1292 }
1293 return os;
1294 }
1295
GetNextDisregardingMoves() const1296 HInstruction* HInstruction::GetNextDisregardingMoves() const {
1297 HInstruction* next = GetNext();
1298 while (next != nullptr && next->IsParallelMove()) {
1299 next = next->GetNext();
1300 }
1301 return next;
1302 }
1303
GetPreviousDisregardingMoves() const1304 HInstruction* HInstruction::GetPreviousDisregardingMoves() const {
1305 HInstruction* previous = GetPrevious();
1306 while (previous != nullptr && previous->IsParallelMove()) {
1307 previous = previous->GetPrevious();
1308 }
1309 return previous;
1310 }
1311
AddInstruction(HInstruction * instruction)1312 void HInstructionList::AddInstruction(HInstruction* instruction) {
1313 if (first_instruction_ == nullptr) {
1314 DCHECK(last_instruction_ == nullptr);
1315 first_instruction_ = last_instruction_ = instruction;
1316 } else {
1317 DCHECK(last_instruction_ != nullptr);
1318 last_instruction_->next_ = instruction;
1319 instruction->previous_ = last_instruction_;
1320 last_instruction_ = instruction;
1321 }
1322 }
1323
InsertInstructionBefore(HInstruction * instruction,HInstruction * cursor)1324 void HInstructionList::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) {
1325 DCHECK(Contains(cursor));
1326 if (cursor == first_instruction_) {
1327 cursor->previous_ = instruction;
1328 instruction->next_ = cursor;
1329 first_instruction_ = instruction;
1330 } else {
1331 instruction->previous_ = cursor->previous_;
1332 instruction->next_ = cursor;
1333 cursor->previous_ = instruction;
1334 instruction->previous_->next_ = instruction;
1335 }
1336 }
1337
InsertInstructionAfter(HInstruction * instruction,HInstruction * cursor)1338 void HInstructionList::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) {
1339 DCHECK(Contains(cursor));
1340 if (cursor == last_instruction_) {
1341 cursor->next_ = instruction;
1342 instruction->previous_ = cursor;
1343 last_instruction_ = instruction;
1344 } else {
1345 instruction->next_ = cursor->next_;
1346 instruction->previous_ = cursor;
1347 cursor->next_ = instruction;
1348 instruction->next_->previous_ = instruction;
1349 }
1350 }
1351
RemoveInstruction(HInstruction * instruction)1352 void HInstructionList::RemoveInstruction(HInstruction* instruction) {
1353 DCHECK_EQ(instruction->previous_ == nullptr, instruction == first_instruction_);
1354 DCHECK_EQ(instruction->next_ == nullptr, instruction == last_instruction_);
1355
1356 if (instruction == first_instruction_) {
1357 first_instruction_ = instruction->next_;
1358 } else {
1359 instruction->previous_->next_ = instruction->next_;
1360 }
1361
1362 if (instruction == last_instruction_) {
1363 last_instruction_ = instruction->previous_;
1364 } else {
1365 instruction->next_->previous_ = instruction->previous_;
1366 }
1367 }
1368
Contains(HInstruction * instruction) const1369 bool HInstructionList::Contains(HInstruction* instruction) const {
1370 for (HInstructionIterator it(*this); !it.Done(); it.Advance()) {
1371 if (it.Current() == instruction) {
1372 return true;
1373 }
1374 }
1375 return false;
1376 }
1377
FoundBefore(const HInstruction * instruction1,const HInstruction * instruction2) const1378 bool HInstructionList::FoundBefore(const HInstruction* instruction1,
1379 const HInstruction* instruction2) const {
1380 DCHECK_EQ(instruction1->GetBlock(), instruction2->GetBlock());
1381 for (HInstructionIterator it(*this); !it.Done(); it.Advance()) {
1382 if (it.Current() == instruction2) {
1383 return false;
1384 }
1385 if (it.Current() == instruction1) {
1386 return true;
1387 }
1388 }
1389 LOG(FATAL) << "Did not find an order between two instructions of the same block.";
1390 UNREACHABLE();
1391 }
1392
Dominates(HInstruction * other_instruction) const1393 bool HInstruction::Dominates(HInstruction* other_instruction) const {
1394 return other_instruction == this || StrictlyDominates(other_instruction);
1395 }
1396
StrictlyDominates(HInstruction * other_instruction) const1397 bool HInstruction::StrictlyDominates(HInstruction* other_instruction) const {
1398 if (other_instruction == this) {
1399 // An instruction does not strictly dominate itself.
1400 return false;
1401 }
1402 HBasicBlock* block = GetBlock();
1403 HBasicBlock* other_block = other_instruction->GetBlock();
1404 if (block != other_block) {
1405 return GetBlock()->Dominates(other_instruction->GetBlock());
1406 } else {
1407 // If both instructions are in the same block, ensure this
1408 // instruction comes before `other_instruction`.
1409 if (IsPhi()) {
1410 if (!other_instruction->IsPhi()) {
1411 // Phis appear before non phi-instructions so this instruction
1412 // dominates `other_instruction`.
1413 return true;
1414 } else {
1415 // There is no order among phis.
1416 LOG(FATAL) << "There is no dominance between phis of a same block.";
1417 UNREACHABLE();
1418 }
1419 } else {
1420 // `this` is not a phi.
1421 if (other_instruction->IsPhi()) {
1422 // Phis appear before non phi-instructions so this instruction
1423 // does not dominate `other_instruction`.
1424 return false;
1425 } else {
1426 // Check whether this instruction comes before
1427 // `other_instruction` in the instruction list.
1428 return block->GetInstructions().FoundBefore(this, other_instruction);
1429 }
1430 }
1431 }
1432 }
1433
RemoveEnvironment()1434 void HInstruction::RemoveEnvironment() {
1435 RemoveEnvironmentUses(this);
1436 environment_ = nullptr;
1437 }
1438
ReplaceWith(HInstruction * other)1439 void HInstruction::ReplaceWith(HInstruction* other) {
1440 DCHECK(other != nullptr);
1441 // Note: fixup_end remains valid across splice_after().
1442 auto fixup_end = other->uses_.empty() ? other->uses_.begin() : ++other->uses_.begin();
1443 other->uses_.splice_after(other->uses_.before_begin(), uses_);
1444 other->FixUpUserRecordsAfterUseInsertion(fixup_end);
1445
1446 // Note: env_fixup_end remains valid across splice_after().
1447 auto env_fixup_end =
1448 other->env_uses_.empty() ? other->env_uses_.begin() : ++other->env_uses_.begin();
1449 other->env_uses_.splice_after(other->env_uses_.before_begin(), env_uses_);
1450 other->FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
1451
1452 DCHECK(uses_.empty());
1453 DCHECK(env_uses_.empty());
1454 }
1455
ReplaceUsesDominatedBy(HInstruction * dominator,HInstruction * replacement,bool strictly_dominated)1456 void HInstruction::ReplaceUsesDominatedBy(HInstruction* dominator,
1457 HInstruction* replacement,
1458 bool strictly_dominated) {
1459 HBasicBlock* dominator_block = dominator->GetBlock();
1460 BitVectorView<size_t> visited_blocks;
1461
1462 // Lazily compute the dominated blocks to faster calculation of domination afterwards.
1463 auto maybe_generate_visited_blocks = [&visited_blocks, this, dominator_block]() {
1464 if (visited_blocks.SizeInBits() != 0u) {
1465 DCHECK_EQ(visited_blocks.SizeInBits(), GetBlock()->GetGraph()->GetBlocks().size());
1466 return;
1467 }
1468 HGraph* graph = GetBlock()->GetGraph();
1469 visited_blocks = ArenaBitVector::CreateFixedSize(
1470 graph->GetAllocator(), graph->GetBlocks().size(), kArenaAllocMisc);
1471 ScopedArenaAllocator allocator(graph->GetArenaStack());
1472 ScopedArenaQueue<const HBasicBlock*> worklist(allocator.Adapter(kArenaAllocMisc));
1473 worklist.push(dominator_block);
1474
1475 while (!worklist.empty()) {
1476 const HBasicBlock* current = worklist.front();
1477 worklist.pop();
1478 visited_blocks.SetBit(current->GetBlockId());
1479 for (HBasicBlock* dominated : current->GetDominatedBlocks()) {
1480 if (visited_blocks.IsBitSet(dominated->GetBlockId())) {
1481 continue;
1482 }
1483 worklist.push(dominated);
1484 }
1485 }
1486 };
1487
1488 const HUseList<HInstruction*>& uses = GetUses();
1489 for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) {
1490 HInstruction* user = it->GetUser();
1491 HBasicBlock* block = user->GetBlock();
1492 size_t index = it->GetIndex();
1493 // Increment `it` now because `*it` may disappear thanks to user->ReplaceInput().
1494 ++it;
1495 bool dominated = false;
1496 if (dominator_block == block) {
1497 // Trickier case, call the other methods.
1498 dominated =
1499 strictly_dominated ? dominator->StrictlyDominates(user) : dominator->Dominates(user);
1500 } else {
1501 // Block domination.
1502 maybe_generate_visited_blocks();
1503 dominated = visited_blocks.IsBitSet(block->GetBlockId());
1504 }
1505
1506 if (dominated) {
1507 user->ReplaceInput(replacement, index);
1508 } else if (user->IsPhi() && !user->AsPhi()->IsCatchPhi()) {
1509 // If the input flows from a block dominated by `dominator`, we can replace it.
1510 // We do not perform this for catch phis as we don't have control flow support
1511 // for their inputs.
1512 HBasicBlock* predecessor = block->GetPredecessors()[index];
1513 maybe_generate_visited_blocks();
1514 if (visited_blocks.IsBitSet(predecessor->GetBlockId())) {
1515 user->ReplaceInput(replacement, index);
1516 }
1517 }
1518 }
1519 }
1520
ReplaceEnvUsesDominatedBy(HInstruction * dominator,HInstruction * replacement)1521 void HInstruction::ReplaceEnvUsesDominatedBy(HInstruction* dominator, HInstruction* replacement) {
1522 const HUseList<HEnvironment*>& uses = GetEnvUses();
1523 for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) {
1524 HEnvironment* user = it->GetUser();
1525 size_t index = it->GetIndex();
1526 // Increment `it` now because `*it` may disappear thanks to user->ReplaceInput().
1527 ++it;
1528 if (dominator->StrictlyDominates(user->GetHolder())) {
1529 user->ReplaceInput(replacement, index);
1530 }
1531 }
1532 }
1533
ReplaceInput(HInstruction * replacement,size_t index)1534 void HInstruction::ReplaceInput(HInstruction* replacement, size_t index) {
1535 HUserRecord<HInstruction*> input_use = InputRecordAt(index);
1536 if (input_use.GetInstruction() == replacement) {
1537 // Nothing to do.
1538 return;
1539 }
1540 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1541 // Note: fixup_end remains valid across splice_after().
1542 auto fixup_end =
1543 replacement->uses_.empty() ? replacement->uses_.begin() : ++replacement->uses_.begin();
1544 replacement->uses_.splice_after(replacement->uses_.before_begin(),
1545 input_use.GetInstruction()->uses_,
1546 before_use_node);
1547 replacement->FixUpUserRecordsAfterUseInsertion(fixup_end);
1548 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1549 }
1550
EnvironmentSize() const1551 size_t HInstruction::EnvironmentSize() const {
1552 return HasEnvironment() ? environment_->Size() : 0;
1553 }
1554
AddInput(HInstruction * input)1555 void HVariableInputSizeInstruction::AddInput(HInstruction* input) {
1556 DCHECK(input->GetBlock() != nullptr);
1557 inputs_.push_back(HUserRecord<HInstruction*>(input));
1558 input->AddUseAt(GetBlock()->GetGraph()->GetAllocator(), this, inputs_.size() - 1);
1559 }
1560
InsertInputAt(size_t index,HInstruction * input)1561 void HVariableInputSizeInstruction::InsertInputAt(size_t index, HInstruction* input) {
1562 inputs_.insert(inputs_.begin() + index, HUserRecord<HInstruction*>(input));
1563 // Update indexes in use nodes of inputs that have been pushed further back by the insert().
1564 for (size_t i = index + 1u, e = inputs_.size(); i < e; ++i) {
1565 DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i - 1u);
1566 inputs_[i].GetUseNode()->SetIndex(i);
1567 }
1568 // Add the use after updating the indexes. If the `input` is already used by `this`,
1569 // the fixup after use insertion can use those indexes.
1570 input->AddUseAt(GetBlock()->GetGraph()->GetAllocator(), this, index);
1571 }
1572
RemoveInputAt(size_t index)1573 void HVariableInputSizeInstruction::RemoveInputAt(size_t index) {
1574 RemoveAsUserOfInput(index);
1575 inputs_.erase(inputs_.begin() + index);
1576 // Update indexes in use nodes of inputs that have been pulled forward by the erase().
1577 for (size_t i = index, e = inputs_.size(); i < e; ++i) {
1578 DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i + 1u);
1579 inputs_[i].GetUseNode()->SetIndex(i);
1580 }
1581 }
1582
RemoveAllInputs()1583 void HVariableInputSizeInstruction::RemoveAllInputs() {
1584 RemoveAsUserOfAllInputs();
1585 DCHECK(!HasNonEnvironmentUses());
1586
1587 inputs_.clear();
1588 DCHECK_EQ(0u, InputCount());
1589 }
1590
RemoveConstructorFences(HInstruction * instruction)1591 size_t HConstructorFence::RemoveConstructorFences(HInstruction* instruction) {
1592 DCHECK(instruction->GetBlock() != nullptr);
1593 // Removing constructor fences only makes sense for instructions with an object return type.
1594 DCHECK_EQ(DataType::Type::kReference, instruction->GetType());
1595
1596 // Return how many instructions were removed for statistic purposes.
1597 size_t remove_count = 0;
1598
1599 // Efficient implementation that simultaneously (in one pass):
1600 // * Scans the uses list for all constructor fences.
1601 // * Deletes that constructor fence from the uses list of `instruction`.
1602 // * Deletes `instruction` from the constructor fence's inputs.
1603 // * Deletes the constructor fence if it now has 0 inputs.
1604
1605 const HUseList<HInstruction*>& uses = instruction->GetUses();
1606 // Warning: Although this is "const", we might mutate the list when calling RemoveInputAt.
1607 for (auto it = uses.begin(), end = uses.end(); it != end; ) {
1608 const HUseListNode<HInstruction*>& use_node = *it;
1609 HInstruction* const use_instruction = use_node.GetUser();
1610
1611 // Advance the iterator immediately once we fetch the use_node.
1612 // Warning: If the input is removed, the current iterator becomes invalid.
1613 ++it;
1614
1615 if (use_instruction->IsConstructorFence()) {
1616 HConstructorFence* ctor_fence = use_instruction->AsConstructorFence();
1617 size_t input_index = use_node.GetIndex();
1618
1619 // Process the candidate instruction for removal
1620 // from the graph.
1621
1622 // Constructor fence instructions are never
1623 // used by other instructions.
1624 //
1625 // If we wanted to make this more generic, it
1626 // could be a runtime if statement.
1627 DCHECK(!ctor_fence->HasUses());
1628
1629 // A constructor fence's return type is "kPrimVoid"
1630 // and therefore it can't have any environment uses.
1631 DCHECK(!ctor_fence->HasEnvironmentUses());
1632
1633 // Remove the inputs first, otherwise removing the instruction
1634 // will try to remove its uses while we are already removing uses
1635 // and this operation will fail.
1636 DCHECK_EQ(instruction, ctor_fence->InputAt(input_index));
1637
1638 // Removing the input will also remove the `use_node`.
1639 // (Do not look at `use_node` after this, it will be a dangling reference).
1640 ctor_fence->RemoveInputAt(input_index);
1641
1642 // Once all inputs are removed, the fence is considered dead and
1643 // is removed.
1644 if (ctor_fence->InputCount() == 0u) {
1645 ctor_fence->GetBlock()->RemoveInstruction(ctor_fence);
1646 ++remove_count;
1647 }
1648 }
1649 }
1650
1651 if (kIsDebugBuild) {
1652 // Post-condition checks:
1653 // * None of the uses of `instruction` are a constructor fence.
1654 // * The `instruction` itself did not get removed from a block.
1655 for (const HUseListNode<HInstruction*>& use_node : instruction->GetUses()) {
1656 CHECK(!use_node.GetUser()->IsConstructorFence());
1657 }
1658 CHECK(instruction->GetBlock() != nullptr);
1659 }
1660
1661 return remove_count;
1662 }
1663
Merge(HConstructorFence * other)1664 void HConstructorFence::Merge(HConstructorFence* other) {
1665 // Do not delete yourself from the graph.
1666 DCHECK(this != other);
1667 // Don't try to merge with an instruction not associated with a block.
1668 DCHECK(other->GetBlock() != nullptr);
1669 // A constructor fence's return type is "kPrimVoid"
1670 // and therefore it cannot have any environment uses.
1671 DCHECK(!other->HasEnvironmentUses());
1672
1673 auto has_input = [](HInstruction* haystack, HInstruction* needle) {
1674 // Check if `haystack` has `needle` as any of its inputs.
1675 for (size_t input_count = 0; input_count < haystack->InputCount(); ++input_count) {
1676 if (haystack->InputAt(input_count) == needle) {
1677 return true;
1678 }
1679 }
1680 return false;
1681 };
1682
1683 // Add any inputs from `other` into `this` if it wasn't already an input.
1684 for (size_t input_count = 0; input_count < other->InputCount(); ++input_count) {
1685 HInstruction* other_input = other->InputAt(input_count);
1686 if (!has_input(this, other_input)) {
1687 AddInput(other_input);
1688 }
1689 }
1690
1691 other->GetBlock()->RemoveInstruction(other);
1692 }
1693
GetAssociatedAllocation(bool ignore_inputs)1694 HInstruction* HConstructorFence::GetAssociatedAllocation(bool ignore_inputs) {
1695 HInstruction* new_instance_inst = GetPrevious();
1696 // Check if the immediately preceding instruction is a new-instance/new-array.
1697 // Otherwise this fence is for protecting final fields.
1698 if (new_instance_inst != nullptr &&
1699 (new_instance_inst->IsNewInstance() || new_instance_inst->IsNewArray())) {
1700 if (ignore_inputs) {
1701 // If inputs are ignored, simply check if the predecessor is
1702 // *any* HNewInstance/HNewArray.
1703 //
1704 // Inputs are normally only ignored for prepare_for_register_allocation,
1705 // at which point *any* prior HNewInstance/Array can be considered
1706 // associated.
1707 return new_instance_inst;
1708 } else {
1709 // Normal case: There must be exactly 1 input and the previous instruction
1710 // must be that input.
1711 if (InputCount() == 1u && InputAt(0) == new_instance_inst) {
1712 return new_instance_inst;
1713 }
1714 }
1715 }
1716 return nullptr;
1717 }
1718
1719 #define DEFINE_ACCEPT(name, super) \
1720 void H##name::Accept(HGraphVisitor* visitor) { \
1721 visitor->Visit##name(this); \
1722 }
1723
FOR_EACH_CONCRETE_INSTRUCTION(DEFINE_ACCEPT)1724 FOR_EACH_CONCRETE_INSTRUCTION(DEFINE_ACCEPT)
1725
1726 #undef DEFINE_ACCEPT
1727
1728 void HGraphVisitor::VisitInsertionOrder() {
1729 for (HBasicBlock* block : graph_->GetActiveBlocks()) {
1730 VisitBasicBlock(block);
1731 }
1732 }
1733
VisitReversePostOrder()1734 void HGraphVisitor::VisitReversePostOrder() {
1735 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
1736 VisitBasicBlock(block);
1737 }
1738 }
1739
VisitBasicBlock(HBasicBlock * block)1740 void HGraphVisitor::VisitBasicBlock(HBasicBlock* block) {
1741 VisitPhis(block);
1742 VisitNonPhiInstructions(block);
1743 }
1744
VisitPhis(HBasicBlock * block)1745 void HGraphVisitor::VisitPhis(HBasicBlock* block) {
1746 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
1747 DCHECK(it.Current()->IsPhi());
1748 VisitPhi(it.Current()->AsPhi());
1749 }
1750 }
1751
VisitNonPhiInstructions(HBasicBlock * block)1752 void HGraphVisitor::VisitNonPhiInstructions(HBasicBlock* block) {
1753 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
1754 DCHECK(!it.Current()->IsPhi());
1755 it.Current()->Accept(this);
1756 }
1757 }
1758
VisitNonPhiInstructionsHandleChanges(HBasicBlock * block)1759 void HGraphVisitor::VisitNonPhiInstructionsHandleChanges(HBasicBlock* block) {
1760 for (HInstructionIteratorHandleChanges it(block->GetInstructions()); !it.Done(); it.Advance()) {
1761 DCHECK(!it.Current()->IsPhi());
1762 it.Current()->Accept(this);
1763 }
1764 }
1765
TryStaticEvaluation() const1766 HConstant* HTypeConversion::TryStaticEvaluation() const { return TryStaticEvaluation(GetInput()); }
1767
TryStaticEvaluation(HInstruction * input) const1768 HConstant* HTypeConversion::TryStaticEvaluation(HInstruction* input) const {
1769 HGraph* graph = input->GetBlock()->GetGraph();
1770 if (input->IsIntConstant()) {
1771 int32_t value = input->AsIntConstant()->GetValue();
1772 switch (GetResultType()) {
1773 case DataType::Type::kInt8:
1774 return graph->GetIntConstant(static_cast<int8_t>(value));
1775 case DataType::Type::kUint8:
1776 return graph->GetIntConstant(static_cast<uint8_t>(value));
1777 case DataType::Type::kInt16:
1778 return graph->GetIntConstant(static_cast<int16_t>(value));
1779 case DataType::Type::kUint16:
1780 return graph->GetIntConstant(static_cast<uint16_t>(value));
1781 case DataType::Type::kInt64:
1782 return graph->GetLongConstant(static_cast<int64_t>(value));
1783 case DataType::Type::kFloat32:
1784 return graph->GetFloatConstant(static_cast<float>(value));
1785 case DataType::Type::kFloat64:
1786 return graph->GetDoubleConstant(static_cast<double>(value));
1787 default:
1788 return nullptr;
1789 }
1790 } else if (input->IsLongConstant()) {
1791 int64_t value = input->AsLongConstant()->GetValue();
1792 switch (GetResultType()) {
1793 case DataType::Type::kInt8:
1794 return graph->GetIntConstant(static_cast<int8_t>(value));
1795 case DataType::Type::kUint8:
1796 return graph->GetIntConstant(static_cast<uint8_t>(value));
1797 case DataType::Type::kInt16:
1798 return graph->GetIntConstant(static_cast<int16_t>(value));
1799 case DataType::Type::kUint16:
1800 return graph->GetIntConstant(static_cast<uint16_t>(value));
1801 case DataType::Type::kInt32:
1802 return graph->GetIntConstant(static_cast<int32_t>(value));
1803 case DataType::Type::kFloat32:
1804 return graph->GetFloatConstant(static_cast<float>(value));
1805 case DataType::Type::kFloat64:
1806 return graph->GetDoubleConstant(static_cast<double>(value));
1807 default:
1808 return nullptr;
1809 }
1810 } else if (input->IsFloatConstant()) {
1811 float value = input->AsFloatConstant()->GetValue();
1812 switch (GetResultType()) {
1813 case DataType::Type::kInt32:
1814 if (std::isnan(value))
1815 return graph->GetIntConstant(0);
1816 if (value >= static_cast<float>(kPrimIntMax))
1817 return graph->GetIntConstant(kPrimIntMax);
1818 if (value <= kPrimIntMin)
1819 return graph->GetIntConstant(kPrimIntMin);
1820 return graph->GetIntConstant(static_cast<int32_t>(value));
1821 case DataType::Type::kInt64:
1822 if (std::isnan(value))
1823 return graph->GetLongConstant(0);
1824 if (value >= static_cast<float>(kPrimLongMax))
1825 return graph->GetLongConstant(kPrimLongMax);
1826 if (value <= kPrimLongMin)
1827 return graph->GetLongConstant(kPrimLongMin);
1828 return graph->GetLongConstant(static_cast<int64_t>(value));
1829 case DataType::Type::kFloat64:
1830 return graph->GetDoubleConstant(static_cast<double>(value));
1831 default:
1832 return nullptr;
1833 }
1834 } else if (input->IsDoubleConstant()) {
1835 double value = input->AsDoubleConstant()->GetValue();
1836 switch (GetResultType()) {
1837 case DataType::Type::kInt32:
1838 if (std::isnan(value))
1839 return graph->GetIntConstant(0);
1840 if (value >= kPrimIntMax)
1841 return graph->GetIntConstant(kPrimIntMax);
1842 if (value <= kPrimLongMin)
1843 return graph->GetIntConstant(kPrimIntMin);
1844 return graph->GetIntConstant(static_cast<int32_t>(value));
1845 case DataType::Type::kInt64:
1846 if (std::isnan(value))
1847 return graph->GetLongConstant(0);
1848 if (value >= static_cast<double>(kPrimLongMax))
1849 return graph->GetLongConstant(kPrimLongMax);
1850 if (value <= kPrimLongMin)
1851 return graph->GetLongConstant(kPrimLongMin);
1852 return graph->GetLongConstant(static_cast<int64_t>(value));
1853 case DataType::Type::kFloat32:
1854 return graph->GetFloatConstant(static_cast<float>(value));
1855 default:
1856 return nullptr;
1857 }
1858 }
1859 return nullptr;
1860 }
1861
TryStaticEvaluation() const1862 HConstant* HUnaryOperation::TryStaticEvaluation() const { return TryStaticEvaluation(GetInput()); }
1863
TryStaticEvaluation(HInstruction * input) const1864 HConstant* HUnaryOperation::TryStaticEvaluation(HInstruction* input) const {
1865 if (input->IsIntConstant()) {
1866 return Evaluate(input->AsIntConstant());
1867 } else if (input->IsLongConstant()) {
1868 return Evaluate(input->AsLongConstant());
1869 } else if (kEnableFloatingPointStaticEvaluation) {
1870 if (input->IsFloatConstant()) {
1871 return Evaluate(input->AsFloatConstant());
1872 } else if (input->IsDoubleConstant()) {
1873 return Evaluate(input->AsDoubleConstant());
1874 }
1875 }
1876 return nullptr;
1877 }
1878
TryStaticEvaluation() const1879 HConstant* HBinaryOperation::TryStaticEvaluation() const {
1880 return TryStaticEvaluation(GetLeft(), GetRight());
1881 }
1882
TryStaticEvaluation(HInstruction * left,HInstruction * right) const1883 HConstant* HBinaryOperation::TryStaticEvaluation(HInstruction* left, HInstruction* right) const {
1884 if (left->IsIntConstant() && right->IsIntConstant()) {
1885 return Evaluate(left->AsIntConstant(), right->AsIntConstant());
1886 } else if (left->IsLongConstant()) {
1887 if (right->IsIntConstant()) {
1888 // The binop(long, int) case is only valid for shifts and rotations.
1889 DCHECK(IsShl() || IsShr() || IsUShr() || IsRol() || IsRor()) << DebugName();
1890 return Evaluate(left->AsLongConstant(), right->AsIntConstant());
1891 } else if (right->IsLongConstant()) {
1892 return Evaluate(left->AsLongConstant(), right->AsLongConstant());
1893 }
1894 } else if (left->IsNullConstant() && right->IsNullConstant()) {
1895 // The binop(null, null) case is only valid for equal and not-equal conditions.
1896 DCHECK(IsEqual() || IsNotEqual()) << DebugName();
1897 return Evaluate(left->AsNullConstant(), right->AsNullConstant());
1898 } else if (kEnableFloatingPointStaticEvaluation) {
1899 if (left->IsFloatConstant() && right->IsFloatConstant()) {
1900 return Evaluate(left->AsFloatConstant(), right->AsFloatConstant());
1901 } else if (left->IsDoubleConstant() && right->IsDoubleConstant()) {
1902 return Evaluate(left->AsDoubleConstant(), right->AsDoubleConstant());
1903 }
1904 }
1905 return nullptr;
1906 }
1907
GetConstantRight() const1908 HConstant* HBinaryOperation::GetConstantRight() const {
1909 if (GetRight()->IsConstant()) {
1910 return GetRight()->AsConstant();
1911 } else if (IsCommutative() && GetLeft()->IsConstant()) {
1912 return GetLeft()->AsConstant();
1913 } else {
1914 return nullptr;
1915 }
1916 }
1917
1918 // If `GetConstantRight()` returns one of the input, this returns the other
1919 // one. Otherwise it returns null.
GetLeastConstantLeft() const1920 HInstruction* HBinaryOperation::GetLeastConstantLeft() const {
1921 HInstruction* most_constant_right = GetConstantRight();
1922 if (most_constant_right == nullptr) {
1923 return nullptr;
1924 } else if (most_constant_right == GetLeft()) {
1925 return GetRight();
1926 } else {
1927 return GetLeft();
1928 }
1929 }
1930
operator <<(std::ostream & os,ComparisonBias rhs)1931 std::ostream& operator<<(std::ostream& os, ComparisonBias rhs) {
1932 // TODO: Replace with auto-generated operator<<.
1933 switch (rhs) {
1934 case ComparisonBias::kNoBias:
1935 return os << "none";
1936 case ComparisonBias::kGtBias:
1937 return os << "gt";
1938 case ComparisonBias::kLtBias:
1939 return os << "lt";
1940 }
1941 }
1942
Create(HGraph * graph,IfCondition cond,HInstruction * lhs,HInstruction * rhs,uint32_t dex_pc)1943 HCondition* HCondition::Create(HGraph* graph,
1944 IfCondition cond,
1945 HInstruction* lhs,
1946 HInstruction* rhs,
1947 uint32_t dex_pc) {
1948 ArenaAllocator* allocator = graph->GetAllocator();
1949 switch (cond) {
1950 case kCondEQ: return new (allocator) HEqual(lhs, rhs, dex_pc);
1951 case kCondNE: return new (allocator) HNotEqual(lhs, rhs, dex_pc);
1952 case kCondLT: return new (allocator) HLessThan(lhs, rhs, dex_pc);
1953 case kCondLE: return new (allocator) HLessThanOrEqual(lhs, rhs, dex_pc);
1954 case kCondGT: return new (allocator) HGreaterThan(lhs, rhs, dex_pc);
1955 case kCondGE: return new (allocator) HGreaterThanOrEqual(lhs, rhs, dex_pc);
1956 case kCondB: return new (allocator) HBelow(lhs, rhs, dex_pc);
1957 case kCondBE: return new (allocator) HBelowOrEqual(lhs, rhs, dex_pc);
1958 case kCondA: return new (allocator) HAbove(lhs, rhs, dex_pc);
1959 case kCondAE: return new (allocator) HAboveOrEqual(lhs, rhs, dex_pc);
1960 default:
1961 LOG(FATAL) << "Unexpected condition " << cond;
1962 UNREACHABLE();
1963 }
1964 }
1965
IsBeforeWhenDisregardMoves(HInstruction * instruction) const1966 bool HCondition::IsBeforeWhenDisregardMoves(HInstruction* instruction) const {
1967 return this == instruction->GetPreviousDisregardingMoves();
1968 }
1969
Equals(const HInstruction * other) const1970 bool HInstruction::Equals(const HInstruction* other) const {
1971 if (GetKind() != other->GetKind()) return false;
1972 if (GetType() != other->GetType()) return false;
1973 if (!InstructionDataEquals(other)) return false;
1974 HConstInputsRef inputs = GetInputs();
1975 HConstInputsRef other_inputs = other->GetInputs();
1976 if (inputs.size() != other_inputs.size()) return false;
1977 for (size_t i = 0; i != inputs.size(); ++i) {
1978 if (inputs[i] != other_inputs[i]) return false;
1979 }
1980
1981 DCHECK_EQ(ComputeHashCode(), other->ComputeHashCode());
1982 return true;
1983 }
1984
operator <<(std::ostream & os,HInstruction::InstructionKind rhs)1985 std::ostream& operator<<(std::ostream& os, HInstruction::InstructionKind rhs) {
1986 #define DECLARE_CASE(type, super) case HInstruction::k##type: os << #type; break;
1987 switch (rhs) {
1988 FOR_EACH_CONCRETE_INSTRUCTION(DECLARE_CASE)
1989 default:
1990 os << "Unknown instruction kind " << static_cast<int>(rhs);
1991 break;
1992 }
1993 #undef DECLARE_CASE
1994 return os;
1995 }
1996
operator <<(std::ostream & os,const HInstruction::NoArgsDump rhs)1997 std::ostream& operator<<(std::ostream& os, const HInstruction::NoArgsDump rhs) {
1998 // TODO Really this should be const but that would require const-ifying
1999 // graph-visualizer and HGraphVisitor which are tangled up everywhere.
2000 return const_cast<HInstruction*>(rhs.ins)->Dump(os, /* dump_args= */ false);
2001 }
2002
operator <<(std::ostream & os,const HInstruction::ArgsDump rhs)2003 std::ostream& operator<<(std::ostream& os, const HInstruction::ArgsDump rhs) {
2004 // TODO Really this should be const but that would require const-ifying
2005 // graph-visualizer and HGraphVisitor which are tangled up everywhere.
2006 return const_cast<HInstruction*>(rhs.ins)->Dump(os, /* dump_args= */ true);
2007 }
2008
operator <<(std::ostream & os,const HInstruction & rhs)2009 std::ostream& operator<<(std::ostream& os, const HInstruction& rhs) {
2010 return os << rhs.DumpWithoutArgs();
2011 }
2012
operator <<(std::ostream & os,const HUseList<HInstruction * > & lst)2013 std::ostream& operator<<(std::ostream& os, const HUseList<HInstruction*>& lst) {
2014 os << "Instructions[";
2015 bool first = true;
2016 for (const auto& hi : lst) {
2017 if (!first) {
2018 os << ", ";
2019 }
2020 first = false;
2021 os << hi.GetUser()->DebugName() << "[id: " << hi.GetUser()->GetId()
2022 << ", blk: " << hi.GetUser()->GetBlock()->GetBlockId() << "]@" << hi.GetIndex();
2023 }
2024 os << "]";
2025 return os;
2026 }
2027
operator <<(std::ostream & os,const HUseList<HEnvironment * > & lst)2028 std::ostream& operator<<(std::ostream& os, const HUseList<HEnvironment*>& lst) {
2029 os << "Environments[";
2030 bool first = true;
2031 for (const auto& hi : lst) {
2032 if (!first) {
2033 os << ", ";
2034 }
2035 first = false;
2036 os << *hi.GetUser()->GetHolder() << "@" << hi.GetIndex();
2037 }
2038 os << "]";
2039 return os;
2040 }
2041
Dump(std::ostream & os,CodeGenerator * codegen,std::optional<std::reference_wrapper<const BlockNamer>> namer)2042 std::ostream& HGraph::Dump(std::ostream& os,
2043 CodeGenerator* codegen,
2044 std::optional<std::reference_wrapper<const BlockNamer>> namer) {
2045 HGraphVisualizer vis(&os, this, codegen, namer);
2046 vis.DumpGraphDebug();
2047 return os;
2048 }
2049
MoveBefore(HInstruction * cursor,bool do_checks)2050 void HInstruction::MoveBefore(HInstruction* cursor, bool do_checks) {
2051 if (do_checks) {
2052 DCHECK(!IsPhi());
2053 DCHECK(!IsControlFlow());
2054 DCHECK(CanBeMoved() ||
2055 // HShouldDeoptimizeFlag can only be moved by CHAGuardOptimization.
2056 IsShouldDeoptimizeFlag());
2057 DCHECK(!cursor->IsPhi());
2058 }
2059
2060 next_->previous_ = previous_;
2061 if (previous_ != nullptr) {
2062 previous_->next_ = next_;
2063 }
2064 if (block_->instructions_.first_instruction_ == this) {
2065 block_->instructions_.first_instruction_ = next_;
2066 }
2067 DCHECK_NE(block_->instructions_.last_instruction_, this);
2068
2069 previous_ = cursor->previous_;
2070 if (previous_ != nullptr) {
2071 previous_->next_ = this;
2072 }
2073 next_ = cursor;
2074 cursor->previous_ = this;
2075 block_ = cursor->block_;
2076
2077 if (block_->instructions_.first_instruction_ == cursor) {
2078 block_->instructions_.first_instruction_ = this;
2079 }
2080 }
2081
MoveBeforeFirstUserAndOutOfLoops()2082 void HInstruction::MoveBeforeFirstUserAndOutOfLoops() {
2083 DCHECK(!CanThrow());
2084 DCHECK(!HasSideEffects());
2085 DCHECK(!HasEnvironmentUses());
2086 DCHECK(HasNonEnvironmentUses());
2087 DCHECK(!IsPhi()); // Makes no sense for Phi.
2088 DCHECK_EQ(InputCount(), 0u);
2089
2090 // Find the target block.
2091 auto uses_it = GetUses().begin();
2092 auto uses_end = GetUses().end();
2093 HBasicBlock* target_block = uses_it->GetUser()->GetBlock();
2094 ++uses_it;
2095 while (uses_it != uses_end && uses_it->GetUser()->GetBlock() == target_block) {
2096 ++uses_it;
2097 }
2098 if (uses_it != uses_end) {
2099 // This instruction has uses in two or more blocks. Find the common dominator.
2100 CommonDominator finder(target_block);
2101 for (; uses_it != uses_end; ++uses_it) {
2102 finder.Update(uses_it->GetUser()->GetBlock());
2103 }
2104 target_block = finder.Get();
2105 DCHECK(target_block != nullptr);
2106 }
2107 // Move to the first dominator not in a loop.
2108 while (target_block->IsInLoop()) {
2109 target_block = target_block->GetDominator();
2110 DCHECK(target_block != nullptr);
2111 }
2112
2113 // Find insertion position.
2114 HInstruction* insert_pos = nullptr;
2115 for (const HUseListNode<HInstruction*>& use : GetUses()) {
2116 if (use.GetUser()->GetBlock() == target_block &&
2117 (insert_pos == nullptr || use.GetUser()->StrictlyDominates(insert_pos))) {
2118 insert_pos = use.GetUser();
2119 }
2120 }
2121 if (insert_pos == nullptr) {
2122 // No user in `target_block`, insert before the control flow instruction.
2123 insert_pos = target_block->GetLastInstruction();
2124 DCHECK(insert_pos->IsControlFlow());
2125 // Avoid splitting HCondition from HIf to prevent unnecessary materialization.
2126 if (insert_pos->IsIf()) {
2127 HInstruction* if_input = insert_pos->AsIf()->InputAt(0);
2128 if (if_input == insert_pos->GetPrevious()) {
2129 insert_pos = if_input;
2130 }
2131 }
2132 }
2133 MoveBefore(insert_pos);
2134 }
2135
SplitBefore(HInstruction * cursor,bool require_graph_not_in_ssa_form)2136 HBasicBlock* HBasicBlock::SplitBefore(HInstruction* cursor, bool require_graph_not_in_ssa_form) {
2137 DCHECK_IMPLIES(require_graph_not_in_ssa_form, !graph_->IsInSsaForm())
2138 << "Support for SSA form not implemented.";
2139 DCHECK_EQ(cursor->GetBlock(), this);
2140
2141 HBasicBlock* new_block =
2142 new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), cursor->GetDexPc());
2143 new_block->instructions_.first_instruction_ = cursor;
2144 new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2145 instructions_.last_instruction_ = cursor->previous_;
2146 if (cursor->previous_ == nullptr) {
2147 instructions_.first_instruction_ = nullptr;
2148 } else {
2149 cursor->previous_->next_ = nullptr;
2150 cursor->previous_ = nullptr;
2151 }
2152
2153 new_block->instructions_.SetBlockOfInstructions(new_block);
2154 AddInstruction(new (GetGraph()->GetAllocator()) HGoto(new_block->GetDexPc()));
2155
2156 for (HBasicBlock* successor : GetSuccessors()) {
2157 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2158 }
2159 new_block->successors_.swap(successors_);
2160 DCHECK(successors_.empty());
2161 AddSuccessor(new_block);
2162
2163 GetGraph()->AddBlock(new_block);
2164 return new_block;
2165 }
2166
CreateImmediateDominator()2167 HBasicBlock* HBasicBlock::CreateImmediateDominator() {
2168 DCHECK(!graph_->IsInSsaForm()) << "Support for SSA form not implemented.";
2169 DCHECK(!IsCatchBlock()) << "Support for updating try/catch information not implemented.";
2170
2171 HBasicBlock* new_block = new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), GetDexPc());
2172
2173 for (HBasicBlock* predecessor : GetPredecessors()) {
2174 predecessor->successors_[predecessor->GetSuccessorIndexOf(this)] = new_block;
2175 }
2176 new_block->predecessors_.swap(predecessors_);
2177 DCHECK(predecessors_.empty());
2178 AddPredecessor(new_block);
2179
2180 GetGraph()->AddBlock(new_block);
2181 return new_block;
2182 }
2183
SplitBeforeForInlining(HInstruction * cursor)2184 HBasicBlock* HBasicBlock::SplitBeforeForInlining(HInstruction* cursor) {
2185 DCHECK_EQ(cursor->GetBlock(), this);
2186
2187 HBasicBlock* new_block =
2188 new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), cursor->GetDexPc());
2189 new_block->instructions_.first_instruction_ = cursor;
2190 new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2191 instructions_.last_instruction_ = cursor->previous_;
2192 if (cursor->previous_ == nullptr) {
2193 instructions_.first_instruction_ = nullptr;
2194 } else {
2195 cursor->previous_->next_ = nullptr;
2196 cursor->previous_ = nullptr;
2197 }
2198
2199 new_block->instructions_.SetBlockOfInstructions(new_block);
2200
2201 for (HBasicBlock* successor : GetSuccessors()) {
2202 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2203 }
2204 new_block->successors_.swap(successors_);
2205 DCHECK(successors_.empty());
2206
2207 for (HBasicBlock* dominated : GetDominatedBlocks()) {
2208 dominated->dominator_ = new_block;
2209 }
2210 new_block->dominated_blocks_.swap(dominated_blocks_);
2211 DCHECK(dominated_blocks_.empty());
2212 return new_block;
2213 }
2214
SplitAfterForInlining(HInstruction * cursor)2215 HBasicBlock* HBasicBlock::SplitAfterForInlining(HInstruction* cursor) {
2216 DCHECK(!cursor->IsControlFlow());
2217 DCHECK_NE(instructions_.last_instruction_, cursor);
2218 DCHECK_EQ(cursor->GetBlock(), this);
2219
2220 HBasicBlock* new_block = new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), GetDexPc());
2221 new_block->instructions_.first_instruction_ = cursor->GetNext();
2222 new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2223 cursor->next_->previous_ = nullptr;
2224 cursor->next_ = nullptr;
2225 instructions_.last_instruction_ = cursor;
2226
2227 new_block->instructions_.SetBlockOfInstructions(new_block);
2228 for (HBasicBlock* successor : GetSuccessors()) {
2229 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2230 }
2231 new_block->successors_.swap(successors_);
2232 DCHECK(successors_.empty());
2233
2234 for (HBasicBlock* dominated : GetDominatedBlocks()) {
2235 dominated->dominator_ = new_block;
2236 }
2237 new_block->dominated_blocks_.swap(dominated_blocks_);
2238 DCHECK(dominated_blocks_.empty());
2239 return new_block;
2240 }
2241
ComputeTryEntryOfSuccessors() const2242 const HTryBoundary* HBasicBlock::ComputeTryEntryOfSuccessors() const {
2243 if (EndsWithTryBoundary()) {
2244 HTryBoundary* try_boundary = GetLastInstruction()->AsTryBoundary();
2245 if (try_boundary->IsEntry()) {
2246 DCHECK(!IsTryBlock());
2247 return try_boundary;
2248 } else {
2249 DCHECK(IsTryBlock());
2250 DCHECK(try_catch_information_->GetTryEntry().HasSameExceptionHandlersAs(*try_boundary));
2251 return nullptr;
2252 }
2253 } else if (IsTryBlock()) {
2254 return &try_catch_information_->GetTryEntry();
2255 } else {
2256 return nullptr;
2257 }
2258 }
2259
HasThrowingInstructions() const2260 bool HBasicBlock::HasThrowingInstructions() const {
2261 for (HInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) {
2262 if (it.Current()->CanThrow()) {
2263 return true;
2264 }
2265 }
2266 return false;
2267 }
2268
HasOnlyOneInstruction(const HBasicBlock & block)2269 static bool HasOnlyOneInstruction(const HBasicBlock& block) {
2270 return block.GetPhis().IsEmpty()
2271 && !block.GetInstructions().IsEmpty()
2272 && block.GetFirstInstruction() == block.GetLastInstruction();
2273 }
2274
IsSingleGoto() const2275 bool HBasicBlock::IsSingleGoto() const {
2276 return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsGoto();
2277 }
2278
IsSingleReturn() const2279 bool HBasicBlock::IsSingleReturn() const {
2280 return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsReturn();
2281 }
2282
IsSingleReturnOrReturnVoidAllowingPhis() const2283 bool HBasicBlock::IsSingleReturnOrReturnVoidAllowingPhis() const {
2284 return (GetFirstInstruction() == GetLastInstruction()) &&
2285 (GetLastInstruction()->IsReturn() || GetLastInstruction()->IsReturnVoid());
2286 }
2287
IsSingleTryBoundary() const2288 bool HBasicBlock::IsSingleTryBoundary() const {
2289 return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsTryBoundary();
2290 }
2291
EndsWithControlFlowInstruction() const2292 bool HBasicBlock::EndsWithControlFlowInstruction() const {
2293 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsControlFlow();
2294 }
2295
EndsWithReturn() const2296 bool HBasicBlock::EndsWithReturn() const {
2297 return !GetInstructions().IsEmpty() &&
2298 (GetLastInstruction()->IsReturn() || GetLastInstruction()->IsReturnVoid());
2299 }
2300
EndsWithIf() const2301 bool HBasicBlock::EndsWithIf() const {
2302 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsIf();
2303 }
2304
EndsWithTryBoundary() const2305 bool HBasicBlock::EndsWithTryBoundary() const {
2306 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsTryBoundary();
2307 }
2308
HasSinglePhi() const2309 bool HBasicBlock::HasSinglePhi() const {
2310 return !GetPhis().IsEmpty() && GetFirstPhi()->GetNext() == nullptr;
2311 }
2312
GetNormalSuccessors() const2313 ArrayRef<HBasicBlock* const> HBasicBlock::GetNormalSuccessors() const {
2314 if (EndsWithTryBoundary()) {
2315 // The normal-flow successor of HTryBoundary is always stored at index zero.
2316 DCHECK_EQ(successors_[0], GetLastInstruction()->AsTryBoundary()->GetNormalFlowSuccessor());
2317 return ArrayRef<HBasicBlock* const>(successors_).SubArray(0u, 1u);
2318 } else {
2319 // All successors of blocks not ending with TryBoundary are normal.
2320 return ArrayRef<HBasicBlock* const>(successors_);
2321 }
2322 }
2323
GetExceptionalSuccessors() const2324 ArrayRef<HBasicBlock* const> HBasicBlock::GetExceptionalSuccessors() const {
2325 if (EndsWithTryBoundary()) {
2326 return GetLastInstruction()->AsTryBoundary()->GetExceptionHandlers();
2327 } else {
2328 // Blocks not ending with TryBoundary do not have exceptional successors.
2329 return ArrayRef<HBasicBlock* const>();
2330 }
2331 }
2332
HasSameExceptionHandlersAs(const HTryBoundary & other) const2333 bool HTryBoundary::HasSameExceptionHandlersAs(const HTryBoundary& other) const {
2334 ArrayRef<HBasicBlock* const> handlers1 = GetExceptionHandlers();
2335 ArrayRef<HBasicBlock* const> handlers2 = other.GetExceptionHandlers();
2336
2337 size_t length = handlers1.size();
2338 if (length != handlers2.size()) {
2339 return false;
2340 }
2341
2342 // Exception handlers need to be stored in the same order.
2343 for (size_t i = 0; i < length; ++i) {
2344 if (handlers1[i] != handlers2[i]) {
2345 return false;
2346 }
2347 }
2348 return true;
2349 }
2350
CountSize() const2351 size_t HInstructionList::CountSize() const {
2352 size_t size = 0;
2353 HInstruction* current = first_instruction_;
2354 for (; current != nullptr; current = current->GetNext()) {
2355 size++;
2356 }
2357 return size;
2358 }
2359
SetBlockOfInstructions(HBasicBlock * block) const2360 void HInstructionList::SetBlockOfInstructions(HBasicBlock* block) const {
2361 for (HInstruction* current = first_instruction_;
2362 current != nullptr;
2363 current = current->GetNext()) {
2364 current->SetBlock(block);
2365 }
2366 }
2367
AddAfter(HInstruction * cursor,const HInstructionList & instruction_list)2368 void HInstructionList::AddAfter(HInstruction* cursor, const HInstructionList& instruction_list) {
2369 DCHECK(Contains(cursor));
2370 if (!instruction_list.IsEmpty()) {
2371 if (cursor == last_instruction_) {
2372 last_instruction_ = instruction_list.last_instruction_;
2373 } else {
2374 cursor->next_->previous_ = instruction_list.last_instruction_;
2375 }
2376 instruction_list.last_instruction_->next_ = cursor->next_;
2377 cursor->next_ = instruction_list.first_instruction_;
2378 instruction_list.first_instruction_->previous_ = cursor;
2379 }
2380 }
2381
AddBefore(HInstruction * cursor,const HInstructionList & instruction_list)2382 void HInstructionList::AddBefore(HInstruction* cursor, const HInstructionList& instruction_list) {
2383 DCHECK(Contains(cursor));
2384 if (!instruction_list.IsEmpty()) {
2385 if (cursor == first_instruction_) {
2386 first_instruction_ = instruction_list.first_instruction_;
2387 } else {
2388 cursor->previous_->next_ = instruction_list.first_instruction_;
2389 }
2390 instruction_list.last_instruction_->next_ = cursor;
2391 instruction_list.first_instruction_->previous_ = cursor->previous_;
2392 cursor->previous_ = instruction_list.last_instruction_;
2393 }
2394 }
2395
Add(const HInstructionList & instruction_list)2396 void HInstructionList::Add(const HInstructionList& instruction_list) {
2397 if (IsEmpty()) {
2398 first_instruction_ = instruction_list.first_instruction_;
2399 last_instruction_ = instruction_list.last_instruction_;
2400 } else {
2401 AddAfter(last_instruction_, instruction_list);
2402 }
2403 }
2404
DisconnectAndDelete()2405 void HBasicBlock::DisconnectAndDelete() {
2406 // Dominators must be removed after all the blocks they dominate. This way
2407 // a loop header is removed last, a requirement for correct loop information
2408 // iteration.
2409 DCHECK(dominated_blocks_.empty());
2410
2411 // The following steps gradually remove the block from all its dependants in
2412 // post order (b/27683071).
2413
2414 // (1) Store a basic block that we'll use in step (5) to find loops to be updated.
2415 // We need to do this before step (4) which destroys the predecessor list.
2416 HBasicBlock* loop_update_start = this;
2417 if (IsLoopHeader()) {
2418 HLoopInformation* loop_info = GetLoopInformation();
2419 // All other blocks in this loop should have been removed because the header
2420 // was their dominator.
2421 // Note that we do not remove `this` from `loop_info` as it is unreachable.
2422 DCHECK(!loop_info->IsIrreducible());
2423 DCHECK_EQ(loop_info->GetBlocks().NumSetBits(), 1u);
2424 DCHECK_EQ(static_cast<uint32_t>(loop_info->GetBlocks().GetHighestBitSet()), GetBlockId());
2425 loop_update_start = loop_info->GetPreHeader();
2426 }
2427
2428 // (2) Disconnect the block from its successors and update their phis.
2429 DisconnectFromSuccessors();
2430
2431 // (3) Remove instructions and phis. Instructions should have no remaining uses
2432 // except in catch phis. If an instruction is used by a catch phi at `index`,
2433 // remove `index`-th input of all phis in the catch block since they are
2434 // guaranteed dead. Note that we may miss dead inputs this way but the
2435 // graph will always remain consistent.
2436 RemoveCatchPhiUsesAndInstruction(/* building_dominator_tree = */ false);
2437
2438 // (4) Disconnect the block from its predecessors and update their
2439 // control-flow instructions.
2440 for (HBasicBlock* predecessor : predecessors_) {
2441 // We should not see any back edges as they would have been removed by step (3).
2442 DCHECK_IMPLIES(IsInLoop(), !GetLoopInformation()->IsBackEdge(*predecessor));
2443
2444 HInstruction* last_instruction = predecessor->GetLastInstruction();
2445 if (last_instruction->IsTryBoundary() && !IsCatchBlock()) {
2446 // This block is the only normal-flow successor of the TryBoundary which
2447 // makes `predecessor` dead. Since DCE removes blocks in post order,
2448 // exception handlers of this TryBoundary were already visited and any
2449 // remaining handlers therefore must be live. We remove `predecessor` from
2450 // their list of predecessors.
2451 DCHECK_EQ(last_instruction->AsTryBoundary()->GetNormalFlowSuccessor(), this);
2452 while (predecessor->GetSuccessors().size() > 1) {
2453 HBasicBlock* handler = predecessor->GetSuccessors()[1];
2454 DCHECK(handler->IsCatchBlock());
2455 predecessor->RemoveSuccessor(handler);
2456 handler->RemovePredecessor(predecessor);
2457 }
2458 }
2459
2460 predecessor->RemoveSuccessor(this);
2461 uint32_t num_pred_successors = predecessor->GetSuccessors().size();
2462 if (num_pred_successors == 1u) {
2463 // If we have one successor after removing one, then we must have
2464 // had an HIf, HPackedSwitch or HTryBoundary, as they have more than one
2465 // successor. Replace those with a HGoto.
2466 DCHECK(last_instruction->IsIf() ||
2467 last_instruction->IsPackedSwitch() ||
2468 (last_instruction->IsTryBoundary() && IsCatchBlock()));
2469 predecessor->RemoveInstruction(last_instruction);
2470 predecessor->AddInstruction(new (graph_->GetAllocator()) HGoto(last_instruction->GetDexPc()));
2471 } else if (num_pred_successors == 0u) {
2472 // The predecessor has no remaining successors and therefore must be dead.
2473 // We deliberately leave it without a control-flow instruction so that the
2474 // GraphChecker fails unless it is not removed during the pass too.
2475 predecessor->RemoveInstruction(last_instruction);
2476 } else {
2477 // There are multiple successors left. The removed block might be a successor
2478 // of a PackedSwitch which will be completely removed (perhaps replaced with
2479 // a Goto), or we are deleting a catch block from a TryBoundary. In either
2480 // case, leave `last_instruction` as is for now.
2481 DCHECK(last_instruction->IsPackedSwitch() ||
2482 (last_instruction->IsTryBoundary() && IsCatchBlock()));
2483 }
2484 }
2485 predecessors_.clear();
2486
2487 // (5) Remove the block from all loops it is included in. Skip the inner-most
2488 // loop if this is the loop header (see definition of `loop_update_start`)
2489 // because the loop header's predecessor list has been destroyed in step (4).
2490 for (HLoopInformationOutwardIterator it(*loop_update_start); !it.Done(); it.Advance()) {
2491 HLoopInformation* loop_info = it.Current();
2492 loop_info->Remove(this);
2493 if (loop_info->IsBackEdge(*this)) {
2494 // If this was the last back edge of the loop, we deliberately leave the
2495 // loop in an inconsistent state and will fail GraphChecker unless the
2496 // entire loop is removed during the pass.
2497 loop_info->RemoveBackEdge(this);
2498 }
2499 }
2500
2501 // (6) Disconnect from the dominator.
2502 dominator_->RemoveDominatedBlock(this);
2503 SetDominator(nullptr);
2504
2505 // (7) Delete from the graph, update reverse post order.
2506 graph_->DeleteDeadEmptyBlock(this);
2507 }
2508
DisconnectFromSuccessors(BitVectorView<const size_t> visited)2509 void HBasicBlock::DisconnectFromSuccessors(BitVectorView<const size_t> visited) {
2510 DCHECK_IMPLIES(visited.SizeInBits() != 0u, visited.SizeInBits() == graph_->GetBlocks().size());
2511 for (HBasicBlock* successor : successors_) {
2512 // Delete this block from the list of predecessors.
2513 size_t this_index = successor->GetPredecessorIndexOf(this);
2514 successor->predecessors_.erase(successor->predecessors_.begin() + this_index);
2515
2516 if (visited.SizeInBits() != 0u && !visited.IsBitSet(successor->GetBlockId())) {
2517 // `successor` itself is dead. Therefore, there is no need to update its phis.
2518 continue;
2519 }
2520
2521 DCHECK(!successor->predecessors_.empty());
2522
2523 // Remove this block's entries in the successor's phis. Skips exceptional
2524 // successors because catch phi inputs do not correspond to predecessor
2525 // blocks but throwing instructions. They are removed in `RemoveCatchPhiUses`.
2526 if (!successor->IsCatchBlock()) {
2527 if (successor->predecessors_.size() == 1u) {
2528 // The successor has just one predecessor left. Replace phis with the only
2529 // remaining input.
2530 for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
2531 HPhi* phi = phi_it.Current()->AsPhi();
2532 phi->ReplaceWith(phi->InputAt(1 - this_index));
2533 successor->RemovePhi(phi);
2534 }
2535 } else {
2536 for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
2537 phi_it.Current()->AsPhi()->RemoveInputAt(this_index);
2538 }
2539 }
2540 }
2541 }
2542 successors_.clear();
2543 }
2544
RemoveCatchPhiUsesAndInstruction(bool building_dominator_tree)2545 void HBasicBlock::RemoveCatchPhiUsesAndInstruction(bool building_dominator_tree) {
2546 for (HBackwardInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) {
2547 HInstruction* insn = it.Current();
2548 RemoveCatchPhiUsesOfDeadInstruction(insn);
2549
2550 // If we are building the dominator tree, we removed all input records previously.
2551 // `RemoveInstruction` will try to remove them again but that's not something we support and we
2552 // will crash. We check here since we won't be checking that in RemoveInstruction.
2553 if (building_dominator_tree) {
2554 DCHECK(insn->GetUses().empty());
2555 DCHECK(insn->GetEnvUses().empty());
2556 }
2557 RemoveInstruction(insn, /* ensure_safety= */ !building_dominator_tree);
2558 }
2559 for (HInstructionIterator it(GetPhis()); !it.Done(); it.Advance()) {
2560 HPhi* insn = it.Current()->AsPhi();
2561 RemoveCatchPhiUsesOfDeadInstruction(insn);
2562
2563 // If we are building the dominator tree, we removed all input records previously.
2564 // `RemovePhi` will try to remove them again but that's not something we support and we
2565 // will crash. We check here since we won't be checking that in RemovePhi.
2566 if (building_dominator_tree) {
2567 DCHECK(insn->GetUses().empty());
2568 DCHECK(insn->GetEnvUses().empty());
2569 }
2570 RemovePhi(insn, /* ensure_safety= */ !building_dominator_tree);
2571 }
2572 }
2573
MergeInstructionsWith(HBasicBlock * other)2574 void HBasicBlock::MergeInstructionsWith(HBasicBlock* other) {
2575 DCHECK(EndsWithControlFlowInstruction());
2576 RemoveInstruction(GetLastInstruction());
2577 instructions_.Add(other->GetInstructions());
2578 other->instructions_.SetBlockOfInstructions(this);
2579 other->instructions_.Clear();
2580 }
2581
MergeWith(HBasicBlock * other)2582 void HBasicBlock::MergeWith(HBasicBlock* other) {
2583 DCHECK_EQ(GetGraph(), other->GetGraph());
2584 DCHECK(ContainsElement(dominated_blocks_, other));
2585 DCHECK_EQ(GetSingleSuccessor(), other);
2586 DCHECK_EQ(other->GetSinglePredecessor(), this);
2587 DCHECK(other->GetPhis().IsEmpty());
2588
2589 // Move instructions from `other` to `this`.
2590 MergeInstructionsWith(other);
2591
2592 // Remove `other` from the loops it is included in.
2593 for (HLoopInformationOutwardIterator it(*other); !it.Done(); it.Advance()) {
2594 HLoopInformation* loop_info = it.Current();
2595 loop_info->Remove(other);
2596 if (loop_info->IsBackEdge(*other)) {
2597 loop_info->ReplaceBackEdge(other, this);
2598 }
2599 }
2600
2601 // Update links to the successors of `other`.
2602 successors_.clear();
2603 for (HBasicBlock* successor : other->GetSuccessors()) {
2604 successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this;
2605 }
2606 successors_.swap(other->successors_);
2607 DCHECK(other->successors_.empty());
2608
2609 // Update the dominator tree.
2610 RemoveDominatedBlock(other);
2611 for (HBasicBlock* dominated : other->GetDominatedBlocks()) {
2612 dominated->SetDominator(this);
2613 }
2614 dominated_blocks_.insert(
2615 dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end());
2616 other->dominated_blocks_.clear();
2617 other->dominator_ = nullptr;
2618
2619 // Clear the list of predecessors of `other` in preparation of deleting it.
2620 other->predecessors_.clear();
2621
2622 // Delete `other` from the graph. The function updates reverse post order.
2623 graph_->DeleteDeadEmptyBlock(other);
2624 }
2625
MergeWithInlined(HBasicBlock * other)2626 void HBasicBlock::MergeWithInlined(HBasicBlock* other) {
2627 DCHECK_NE(GetGraph(), other->GetGraph());
2628 DCHECK(GetDominatedBlocks().empty());
2629 DCHECK(GetSuccessors().empty());
2630 DCHECK(!EndsWithControlFlowInstruction());
2631 DCHECK(other->GetSinglePredecessor()->IsEntryBlock());
2632 DCHECK(other->GetPhis().IsEmpty());
2633 DCHECK(!other->IsInLoop());
2634
2635 // Move instructions from `other` to `this`.
2636 instructions_.Add(other->GetInstructions());
2637 other->instructions_.SetBlockOfInstructions(this);
2638
2639 // Update links to the successors of `other`.
2640 successors_.clear();
2641 for (HBasicBlock* successor : other->GetSuccessors()) {
2642 successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this;
2643 }
2644 successors_.swap(other->successors_);
2645 DCHECK(other->successors_.empty());
2646
2647 // Update the dominator tree.
2648 for (HBasicBlock* dominated : other->GetDominatedBlocks()) {
2649 dominated->SetDominator(this);
2650 }
2651 dominated_blocks_.insert(
2652 dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end());
2653 other->dominated_blocks_.clear();
2654 other->dominator_ = nullptr;
2655 other->graph_ = nullptr;
2656 }
2657
ReplaceWith(HBasicBlock * other)2658 void HBasicBlock::ReplaceWith(HBasicBlock* other) {
2659 while (!GetPredecessors().empty()) {
2660 HBasicBlock* predecessor = GetPredecessors()[0];
2661 predecessor->ReplaceSuccessor(this, other);
2662 }
2663 while (!GetSuccessors().empty()) {
2664 HBasicBlock* successor = GetSuccessors()[0];
2665 successor->ReplacePredecessor(this, other);
2666 }
2667 for (HBasicBlock* dominated : GetDominatedBlocks()) {
2668 other->AddDominatedBlock(dominated);
2669 }
2670 GetDominator()->ReplaceDominatedBlock(this, other);
2671 other->SetDominator(GetDominator());
2672 dominator_ = nullptr;
2673 graph_ = nullptr;
2674 }
2675
DeleteDeadEmptyBlock(HBasicBlock * block)2676 void HGraph::DeleteDeadEmptyBlock(HBasicBlock* block) {
2677 DCHECK_EQ(block->GetGraph(), this);
2678 DCHECK(block->GetSuccessors().empty());
2679 DCHECK(block->GetPredecessors().empty());
2680 DCHECK(block->GetDominatedBlocks().empty());
2681 DCHECK(block->GetDominator() == nullptr);
2682 DCHECK(block->GetInstructions().IsEmpty());
2683 DCHECK(block->GetPhis().IsEmpty());
2684
2685 if (block->IsExitBlock()) {
2686 SetExitBlock(nullptr);
2687 }
2688
2689 RemoveElement(reverse_post_order_, block);
2690 blocks_[block->GetBlockId()] = nullptr;
2691 block->SetGraph(nullptr);
2692 }
2693
UpdateLoopAndTryInformationOfNewBlock(HBasicBlock * block,HBasicBlock * reference,bool replace_if_back_edge,bool has_more_specific_try_catch_info)2694 void HGraph::UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
2695 HBasicBlock* reference,
2696 bool replace_if_back_edge,
2697 bool has_more_specific_try_catch_info) {
2698 if (block->IsLoopHeader()) {
2699 // Clear the information of which blocks are contained in that loop. Since the
2700 // information is stored as a bit vector based on block ids, we have to update
2701 // it, as those block ids were specific to the callee graph and we are now adding
2702 // these blocks to the caller graph.
2703 block->GetLoopInformation()->ClearAllBlocks();
2704 }
2705
2706 // If not already in a loop, update the loop information.
2707 if (!block->IsInLoop()) {
2708 block->SetLoopInformation(reference->GetLoopInformation());
2709 }
2710
2711 // If the block is in a loop, update all its outward loops.
2712 HLoopInformation* loop_info = block->GetLoopInformation();
2713 if (loop_info != nullptr) {
2714 for (HLoopInformationOutwardIterator loop_it(*block);
2715 !loop_it.Done();
2716 loop_it.Advance()) {
2717 loop_it.Current()->Add(block);
2718 }
2719 if (replace_if_back_edge && loop_info->IsBackEdge(*reference)) {
2720 loop_info->ReplaceBackEdge(reference, block);
2721 }
2722 }
2723
2724 DCHECK_IMPLIES(has_more_specific_try_catch_info, !reference->IsTryBlock())
2725 << "We don't allow to inline try catches inside of other try blocks.";
2726
2727 // Update the TryCatchInformation, if we are not inlining a try catch.
2728 if (!has_more_specific_try_catch_info) {
2729 // Copy TryCatchInformation if `reference` is a try block, not if it is a catch block.
2730 TryCatchInformation* try_catch_info =
2731 reference->IsTryBlock() ? reference->GetTryCatchInformation() : nullptr;
2732 block->SetTryCatchInformation(try_catch_info);
2733 }
2734 }
2735
InlineInto(HGraph * outer_graph,HInvoke * invoke)2736 HInstruction* HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) {
2737 DCHECK(HasExitBlock()) << "Unimplemented scenario";
2738 // Update the environments in this graph to have the invoke's environment
2739 // as parent.
2740 {
2741 // Skip the entry block, we do not need to update the entry's suspend check.
2742 for (HBasicBlock* block : GetReversePostOrderSkipEntryBlock()) {
2743 for (HInstructionIterator instr_it(block->GetInstructions());
2744 !instr_it.Done();
2745 instr_it.Advance()) {
2746 HInstruction* current = instr_it.Current();
2747 if (current->NeedsEnvironment()) {
2748 DCHECK(current->HasEnvironment());
2749 current->GetEnvironment()->SetAndCopyParentChain(
2750 outer_graph->GetAllocator(), invoke->GetEnvironment());
2751 }
2752 }
2753 }
2754 }
2755
2756 if (HasBoundsChecks()) {
2757 outer_graph->SetHasBoundsChecks(true);
2758 }
2759 if (HasLoops()) {
2760 outer_graph->SetHasLoops(true);
2761 }
2762 if (HasIrreducibleLoops()) {
2763 outer_graph->SetHasIrreducibleLoops(true);
2764 }
2765 if (HasDirectCriticalNativeCall()) {
2766 outer_graph->SetHasDirectCriticalNativeCall(true);
2767 }
2768 if (HasTryCatch()) {
2769 outer_graph->SetHasTryCatch(true);
2770 }
2771 if (HasMonitorOperations()) {
2772 outer_graph->SetHasMonitorOperations(true);
2773 }
2774 if (HasTraditionalSIMD()) {
2775 outer_graph->SetHasTraditionalSIMD(true);
2776 }
2777 if (HasPredicatedSIMD()) {
2778 outer_graph->SetHasPredicatedSIMD(true);
2779 }
2780 if (HasAlwaysThrowingInvokes()) {
2781 outer_graph->SetHasAlwaysThrowingInvokes(true);
2782 }
2783
2784 HInstruction* return_value = nullptr;
2785 if (GetBlocks().size() == 3) {
2786 // Inliner already made sure we don't inline methods that always throw.
2787 DCHECK(!GetBlocks()[1]->GetLastInstruction()->IsThrow());
2788 // Simple case of an entry block, a body block, and an exit block.
2789 // Put the body block's instruction into `invoke`'s block.
2790 HBasicBlock* body = GetBlocks()[1];
2791 DCHECK(GetBlocks()[0]->IsEntryBlock());
2792 DCHECK(GetBlocks()[2]->IsExitBlock());
2793 DCHECK(!body->IsExitBlock());
2794 DCHECK(!body->IsInLoop());
2795 HInstruction* last = body->GetLastInstruction();
2796
2797 // Note that we add instructions before the invoke only to simplify polymorphic inlining.
2798 invoke->GetBlock()->instructions_.AddBefore(invoke, body->GetInstructions());
2799 body->GetInstructions().SetBlockOfInstructions(invoke->GetBlock());
2800
2801 // Replace the invoke with the return value of the inlined graph.
2802 if (last->IsReturn()) {
2803 return_value = last->InputAt(0);
2804 } else {
2805 DCHECK(last->IsReturnVoid());
2806 }
2807
2808 invoke->GetBlock()->RemoveInstruction(last);
2809 } else {
2810 // Need to inline multiple blocks. We split `invoke`'s block
2811 // into two blocks, merge the first block of the inlined graph into
2812 // the first half, and replace the exit block of the inlined graph
2813 // with the second half.
2814 ArenaAllocator* allocator = outer_graph->GetAllocator();
2815 HBasicBlock* at = invoke->GetBlock();
2816 // Note that we split before the invoke only to simplify polymorphic inlining.
2817 HBasicBlock* to = at->SplitBeforeForInlining(invoke);
2818
2819 HBasicBlock* first = entry_block_->GetSuccessors()[0];
2820 DCHECK(!first->IsInLoop());
2821 DCHECK(first->GetTryCatchInformation() == nullptr);
2822 at->MergeWithInlined(first);
2823 exit_block_->ReplaceWith(to);
2824
2825 // Update the meta information surrounding blocks:
2826 // (1) the graph they are now in,
2827 // (2) the reverse post order of that graph,
2828 // (3) their potential loop information, inner and outer,
2829 // (4) try block membership.
2830 // Note that we do not need to update catch phi inputs because they
2831 // correspond to the register file of the outer method which the inlinee
2832 // cannot modify.
2833
2834 // We don't add the entry block, the exit block, and the first block, which
2835 // has been merged with `at`.
2836 static constexpr int kNumberOfSkippedBlocksInCallee = 3;
2837
2838 // We add the `to` block.
2839 static constexpr int kNumberOfNewBlocksInCaller = 1;
2840 size_t blocks_added = (reverse_post_order_.size() - kNumberOfSkippedBlocksInCallee)
2841 + kNumberOfNewBlocksInCaller;
2842
2843 // Find the location of `at` in the outer graph's reverse post order. The new
2844 // blocks will be added after it.
2845 size_t index_of_at = IndexOfElement(outer_graph->reverse_post_order_, at);
2846 MakeRoomFor(&outer_graph->reverse_post_order_, blocks_added, index_of_at);
2847
2848 // Do a reverse post order of the blocks in the callee and do (1), (2), (3)
2849 // and (4) to the blocks that apply.
2850 for (HBasicBlock* current : GetReversePostOrder()) {
2851 if (current != exit_block_ && current != entry_block_ && current != first) {
2852 DCHECK(current->GetGraph() == this);
2853 current->SetGraph(outer_graph);
2854 outer_graph->AddBlock(current);
2855 outer_graph->reverse_post_order_[++index_of_at] = current;
2856 UpdateLoopAndTryInformationOfNewBlock(current,
2857 at,
2858 /* replace_if_back_edge= */ false,
2859 current->GetTryCatchInformation() != nullptr);
2860 }
2861 }
2862
2863 // Do (1), (2), (3) and (4) to `to`.
2864 to->SetGraph(outer_graph);
2865 outer_graph->AddBlock(to);
2866 outer_graph->reverse_post_order_[++index_of_at] = to;
2867 // Only `to` can become a back edge, as the inlined blocks
2868 // are predecessors of `to`.
2869 UpdateLoopAndTryInformationOfNewBlock(to, at, /* replace_if_back_edge= */ true);
2870
2871 // Update all predecessors of the exit block (now the `to` block)
2872 // to not `HReturn` but `HGoto` instead. Special case throwing blocks
2873 // to now get the outer graph exit block as successor.
2874 HPhi* return_value_phi = nullptr;
2875 bool rerun_dominance = false;
2876 bool rerun_loop_analysis = false;
2877 for (size_t pred = 0; pred < to->GetPredecessors().size(); ++pred) {
2878 HBasicBlock* predecessor = to->GetPredecessors()[pred];
2879 HInstruction* last = predecessor->GetLastInstruction();
2880
2881 // At this point we might either have:
2882 // A) Return/ReturnVoid/Throw as the last instruction, or
2883 // B) `Return/ReturnVoid/Throw->TryBoundary` as the last instruction chain
2884
2885 const bool saw_try_boundary = last->IsTryBoundary();
2886 if (saw_try_boundary) {
2887 DCHECK(predecessor->IsSingleTryBoundary());
2888 DCHECK(!last->AsTryBoundary()->IsEntry());
2889 predecessor = predecessor->GetSinglePredecessor();
2890 last = predecessor->GetLastInstruction();
2891 }
2892
2893 if (last->IsThrow()) {
2894 if (at->IsTryBlock()) {
2895 DCHECK(!saw_try_boundary) << "We don't support inlining of try blocks into try blocks.";
2896 // Create a TryBoundary of kind:exit and point it to the Exit block.
2897 HBasicBlock* new_block = outer_graph->SplitEdge(predecessor, to);
2898 new_block->AddInstruction(
2899 new (allocator) HTryBoundary(HTryBoundary::BoundaryKind::kExit, last->GetDexPc()));
2900 new_block->ReplaceSuccessor(to, outer_graph->GetExitBlock());
2901
2902 // Copy information from the predecessor.
2903 new_block->SetLoopInformation(predecessor->GetLoopInformation());
2904 TryCatchInformation* try_catch_info = predecessor->GetTryCatchInformation();
2905 new_block->SetTryCatchInformation(try_catch_info);
2906 for (HBasicBlock* xhandler :
2907 try_catch_info->GetTryEntry().GetBlock()->GetExceptionalSuccessors()) {
2908 new_block->AddSuccessor(xhandler);
2909 }
2910 DCHECK(try_catch_info->GetTryEntry().HasSameExceptionHandlersAs(
2911 *new_block->GetLastInstruction()->AsTryBoundary()));
2912 } else {
2913 // We either have `Throw->TryBoundary` or `Throw`. We want to point the whole chain to the
2914 // exit, so we recompute `predecessor`
2915 predecessor = to->GetPredecessors()[pred];
2916 predecessor->ReplaceSuccessor(to, outer_graph->GetExitBlock());
2917 }
2918
2919 --pred;
2920 // We need to re-run dominance information, as the exit block now has
2921 // a new predecessor and potential new dominator.
2922 // TODO(solanes): See if it's worth it to hand-modify the domination chain instead of
2923 // rerunning the dominance for the whole graph.
2924 rerun_dominance = true;
2925 if (predecessor->GetLoopInformation() != nullptr) {
2926 // The loop information might have changed e.g. `predecessor` might not be in a loop
2927 // anymore. We only do this if `predecessor` has loop information as it is impossible for
2928 // predecessor to end up in a loop if it wasn't in one before.
2929 rerun_loop_analysis = true;
2930 }
2931 } else {
2932 if (last->IsReturnVoid()) {
2933 DCHECK(return_value == nullptr);
2934 DCHECK(return_value_phi == nullptr);
2935 } else {
2936 DCHECK(last->IsReturn());
2937 if (return_value_phi != nullptr) {
2938 return_value_phi->AddInput(last->InputAt(0));
2939 } else if (return_value == nullptr) {
2940 return_value = last->InputAt(0);
2941 } else {
2942 // There will be multiple returns.
2943 return_value_phi = new (allocator) HPhi(
2944 allocator, kNoRegNumber, 0, HPhi::ToPhiType(invoke->GetType()), to->GetDexPc());
2945 to->AddPhi(return_value_phi);
2946 return_value_phi->AddInput(return_value);
2947 return_value_phi->AddInput(last->InputAt(0));
2948 return_value = return_value_phi;
2949 }
2950 }
2951 predecessor->AddInstruction(new (allocator) HGoto(last->GetDexPc()));
2952 predecessor->RemoveInstruction(last);
2953
2954 if (saw_try_boundary) {
2955 predecessor = to->GetPredecessors()[pred];
2956 DCHECK(predecessor->EndsWithTryBoundary());
2957 DCHECK_EQ(predecessor->GetNormalSuccessors().size(), 1u);
2958 if (predecessor->GetSuccessors()[0]->GetPredecessors().size() > 1) {
2959 outer_graph->SplitCriticalEdge(predecessor, to);
2960 rerun_dominance = true;
2961 if (predecessor->GetLoopInformation() != nullptr) {
2962 rerun_loop_analysis = true;
2963 }
2964 }
2965 }
2966 }
2967 }
2968 if (rerun_loop_analysis) {
2969 outer_graph->RecomputeDominatorTree();
2970 } else if (rerun_dominance) {
2971 outer_graph->ClearDominanceInformation();
2972 outer_graph->ComputeDominanceInformation();
2973 }
2974 }
2975
2976 // Walk over the entry block and:
2977 // - Move constants from the entry block to the outer_graph's entry block,
2978 // - Replace HParameterValue instructions with their real value.
2979 // - Remove suspend checks, that hold an environment.
2980 // We must do this after the other blocks have been inlined, otherwise ids of
2981 // constants could overlap with the inner graph.
2982 size_t parameter_index = 0;
2983 for (HInstructionIterator it(entry_block_->GetInstructions()); !it.Done(); it.Advance()) {
2984 HInstruction* current = it.Current();
2985 HInstruction* replacement = nullptr;
2986 if (current->IsNullConstant()) {
2987 replacement = outer_graph->GetNullConstant();
2988 } else if (current->IsIntConstant()) {
2989 replacement = outer_graph->GetIntConstant(current->AsIntConstant()->GetValue());
2990 } else if (current->IsLongConstant()) {
2991 replacement = outer_graph->GetLongConstant(current->AsLongConstant()->GetValue());
2992 } else if (current->IsFloatConstant()) {
2993 replacement = outer_graph->GetFloatConstant(current->AsFloatConstant()->GetValue());
2994 } else if (current->IsDoubleConstant()) {
2995 replacement = outer_graph->GetDoubleConstant(current->AsDoubleConstant()->GetValue());
2996 } else if (current->IsParameterValue()) {
2997 if (kIsDebugBuild &&
2998 invoke->IsInvokeStaticOrDirect() &&
2999 invoke->AsInvokeStaticOrDirect()->IsStaticWithExplicitClinitCheck()) {
3000 // Ensure we do not use the last input of `invoke`, as it
3001 // contains a clinit check which is not an actual argument.
3002 size_t last_input_index = invoke->InputCount() - 1;
3003 DCHECK(parameter_index != last_input_index);
3004 }
3005 replacement = invoke->InputAt(parameter_index++);
3006 } else if (current->IsCurrentMethod()) {
3007 replacement = outer_graph->GetCurrentMethod();
3008 } else {
3009 // It is OK to ignore MethodEntryHook for inlined functions.
3010 // In debug mode we don't inline and in release mode method
3011 // tracing is best effort so OK to ignore them.
3012 DCHECK(current->IsGoto() || current->IsSuspendCheck() || current->IsMethodEntryHook());
3013 entry_block_->RemoveInstruction(current);
3014 }
3015 if (replacement != nullptr) {
3016 current->ReplaceWith(replacement);
3017 // If the current is the return value then we need to update the latter.
3018 if (current == return_value) {
3019 DCHECK_EQ(entry_block_, return_value->GetBlock());
3020 return_value = replacement;
3021 }
3022 }
3023 }
3024
3025 return return_value;
3026 }
3027
3028 /*
3029 * Loop will be transformed to:
3030 * old_pre_header
3031 * |
3032 * if_block
3033 * / \
3034 * true_block false_block
3035 * \ /
3036 * new_pre_header
3037 * |
3038 * header
3039 */
TransformLoopHeaderForBCE(HBasicBlock * header)3040 void HGraph::TransformLoopHeaderForBCE(HBasicBlock* header) {
3041 DCHECK(header->IsLoopHeader());
3042 HBasicBlock* old_pre_header = header->GetDominator();
3043
3044 // Need extra block to avoid critical edge.
3045 HBasicBlock* if_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
3046 HBasicBlock* true_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
3047 HBasicBlock* false_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
3048 HBasicBlock* new_pre_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
3049 AddBlock(if_block);
3050 AddBlock(true_block);
3051 AddBlock(false_block);
3052 AddBlock(new_pre_header);
3053
3054 header->ReplacePredecessor(old_pre_header, new_pre_header);
3055 old_pre_header->successors_.clear();
3056 old_pre_header->dominated_blocks_.clear();
3057
3058 old_pre_header->AddSuccessor(if_block);
3059 if_block->AddSuccessor(true_block); // True successor
3060 if_block->AddSuccessor(false_block); // False successor
3061 true_block->AddSuccessor(new_pre_header);
3062 false_block->AddSuccessor(new_pre_header);
3063
3064 old_pre_header->dominated_blocks_.push_back(if_block);
3065 if_block->SetDominator(old_pre_header);
3066 if_block->dominated_blocks_.push_back(true_block);
3067 true_block->SetDominator(if_block);
3068 if_block->dominated_blocks_.push_back(false_block);
3069 false_block->SetDominator(if_block);
3070 if_block->dominated_blocks_.push_back(new_pre_header);
3071 new_pre_header->SetDominator(if_block);
3072 new_pre_header->dominated_blocks_.push_back(header);
3073 header->SetDominator(new_pre_header);
3074
3075 // Fix reverse post order.
3076 size_t index_of_header = IndexOfElement(reverse_post_order_, header);
3077 MakeRoomFor(&reverse_post_order_, 4, index_of_header - 1);
3078 reverse_post_order_[index_of_header++] = if_block;
3079 reverse_post_order_[index_of_header++] = true_block;
3080 reverse_post_order_[index_of_header++] = false_block;
3081 reverse_post_order_[index_of_header++] = new_pre_header;
3082
3083 // The pre_header can never be a back edge of a loop.
3084 DCHECK((old_pre_header->GetLoopInformation() == nullptr) ||
3085 !old_pre_header->GetLoopInformation()->IsBackEdge(*old_pre_header));
3086 UpdateLoopAndTryInformationOfNewBlock(
3087 if_block, old_pre_header, /* replace_if_back_edge= */ false);
3088 UpdateLoopAndTryInformationOfNewBlock(
3089 true_block, old_pre_header, /* replace_if_back_edge= */ false);
3090 UpdateLoopAndTryInformationOfNewBlock(
3091 false_block, old_pre_header, /* replace_if_back_edge= */ false);
3092 UpdateLoopAndTryInformationOfNewBlock(
3093 new_pre_header, old_pre_header, /* replace_if_back_edge= */ false);
3094 }
3095
3096 // Creates a new two-basic-block loop and inserts it between original loop header and
3097 // original loop exit; also adjusts dominators, post order and new LoopInformation.
TransformLoopForVectorization(HBasicBlock * header,HBasicBlock * body,HBasicBlock * exit)3098 HBasicBlock* HGraph::TransformLoopForVectorization(HBasicBlock* header,
3099 HBasicBlock* body,
3100 HBasicBlock* exit) {
3101 DCHECK(header->IsLoopHeader());
3102 HLoopInformation* loop = header->GetLoopInformation();
3103
3104 // Add new loop blocks.
3105 HBasicBlock* new_pre_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
3106 HBasicBlock* new_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
3107 HBasicBlock* new_body = new (allocator_) HBasicBlock(this, header->GetDexPc());
3108 AddBlock(new_pre_header);
3109 AddBlock(new_header);
3110 AddBlock(new_body);
3111
3112 // Set up control flow.
3113 header->ReplaceSuccessor(exit, new_pre_header);
3114 new_pre_header->AddSuccessor(new_header);
3115 new_header->AddSuccessor(exit);
3116 new_header->AddSuccessor(new_body);
3117 new_body->AddSuccessor(new_header);
3118
3119 // Set up dominators.
3120 header->ReplaceDominatedBlock(exit, new_pre_header);
3121 new_pre_header->SetDominator(header);
3122 new_pre_header->dominated_blocks_.push_back(new_header);
3123 new_header->SetDominator(new_pre_header);
3124 new_header->dominated_blocks_.push_back(new_body);
3125 new_body->SetDominator(new_header);
3126 new_header->dominated_blocks_.push_back(exit);
3127 exit->SetDominator(new_header);
3128
3129 // Fix reverse post order.
3130 size_t index_of_header = IndexOfElement(reverse_post_order_, header);
3131 MakeRoomFor(&reverse_post_order_, 2, index_of_header);
3132 reverse_post_order_[++index_of_header] = new_pre_header;
3133 reverse_post_order_[++index_of_header] = new_header;
3134 size_t index_of_body = IndexOfElement(reverse_post_order_, body);
3135 MakeRoomFor(&reverse_post_order_, 1, index_of_body - 1);
3136 reverse_post_order_[index_of_body] = new_body;
3137
3138 // Add gotos and suspend check (client must add conditional in header).
3139 new_pre_header->AddInstruction(new (allocator_) HGoto());
3140 HSuspendCheck* suspend_check = new (allocator_) HSuspendCheck(header->GetDexPc());
3141 new_header->AddInstruction(suspend_check);
3142 new_body->AddInstruction(new (allocator_) HGoto());
3143 DCHECK(loop->GetSuspendCheck() != nullptr);
3144 suspend_check->CopyEnvironmentFromWithLoopPhiAdjustment(
3145 loop->GetSuspendCheck()->GetEnvironment(), header);
3146
3147 // Update loop information.
3148 new_header->AddBackEdge(new_body);
3149 new_header->GetLoopInformation()->SetSuspendCheck(suspend_check);
3150 new_header->GetLoopInformation()->Populate();
3151 new_pre_header->SetLoopInformation(loop->GetPreHeader()->GetLoopInformation()); // outward
3152 HLoopInformationOutwardIterator it(*new_header);
3153 for (it.Advance(); !it.Done(); it.Advance()) {
3154 it.Current()->Add(new_pre_header);
3155 it.Current()->Add(new_header);
3156 it.Current()->Add(new_body);
3157 }
3158 return new_pre_header;
3159 }
3160
CheckAgainstUpperBound(ReferenceTypeInfo rti,ReferenceTypeInfo upper_bound_rti)3161 static void CheckAgainstUpperBound(ReferenceTypeInfo rti, ReferenceTypeInfo upper_bound_rti) {
3162 if (rti.IsValid()) {
3163 ScopedObjectAccess soa(Thread::Current());
3164 DCHECK(upper_bound_rti.IsSupertypeOf(rti))
3165 << " upper_bound_rti: " << upper_bound_rti
3166 << " rti: " << rti;
3167 DCHECK_IMPLIES(upper_bound_rti.GetTypeHandle()->CannotBeAssignedFromOtherTypes(), rti.IsExact())
3168 << " upper_bound_rti: " << upper_bound_rti
3169 << " rti: " << rti;
3170 }
3171 }
3172
SetReferenceTypeInfo(ReferenceTypeInfo rti)3173 void HInstruction::SetReferenceTypeInfo(ReferenceTypeInfo rti) {
3174 if (kIsDebugBuild) {
3175 DCHECK_EQ(GetType(), DataType::Type::kReference);
3176 DCHECK(rti.IsValid()) << "Invalid RTI for " << DebugName();
3177 if (IsBoundType()) {
3178 // Having the test here spares us from making the method virtual just for
3179 // the sake of a DCHECK.
3180 CheckAgainstUpperBound(rti, AsBoundType()->GetUpperBound());
3181 }
3182 }
3183 reference_type_handle_ = rti.GetTypeHandle();
3184 SetPackedFlag<kFlagReferenceTypeIsExact>(rti.IsExact());
3185 }
3186
SetReferenceTypeInfoIfValid(ReferenceTypeInfo rti)3187 void HInstruction::SetReferenceTypeInfoIfValid(ReferenceTypeInfo rti) {
3188 if (rti.IsValid()) {
3189 SetReferenceTypeInfo(rti);
3190 }
3191 }
3192
InstructionDataEquals(const HInstruction * other) const3193 bool HBoundType::InstructionDataEquals(const HInstruction* other) const {
3194 const HBoundType* other_bt = other->AsBoundType();
3195 ScopedObjectAccess soa(Thread::Current());
3196 return GetUpperBound().IsEqual(other_bt->GetUpperBound()) &&
3197 GetUpperCanBeNull() == other_bt->GetUpperCanBeNull() &&
3198 CanBeNull() == other_bt->CanBeNull();
3199 }
3200
SetUpperBound(const ReferenceTypeInfo & upper_bound,bool can_be_null)3201 void HBoundType::SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null) {
3202 if (kIsDebugBuild) {
3203 DCHECK(upper_bound.IsValid());
3204 DCHECK(!upper_bound_.IsValid()) << "Upper bound should only be set once.";
3205 CheckAgainstUpperBound(GetReferenceTypeInfo(), upper_bound);
3206 }
3207 upper_bound_ = upper_bound;
3208 SetPackedFlag<kFlagUpperCanBeNull>(can_be_null);
3209 }
3210
HasAnyEnvironmentUseBefore(HInstruction * other)3211 bool HInstruction::HasAnyEnvironmentUseBefore(HInstruction* other) {
3212 // For now, assume that instructions in different blocks may use the
3213 // environment.
3214 // TODO: Use the control flow to decide if this is true.
3215 if (GetBlock() != other->GetBlock()) {
3216 return true;
3217 }
3218
3219 // We know that we are in the same block. Walk from 'this' to 'other',
3220 // checking to see if there is any instruction with an environment.
3221 HInstruction* current = this;
3222 for (; current != other && current != nullptr; current = current->GetNext()) {
3223 // This is a conservative check, as the instruction result may not be in
3224 // the referenced environment.
3225 if (current->HasEnvironment()) {
3226 return true;
3227 }
3228 }
3229
3230 // We should have been called with 'this' before 'other' in the block.
3231 // Just confirm this.
3232 DCHECK(current != nullptr);
3233 return false;
3234 }
3235
SetIntrinsic(Intrinsics intrinsic,IntrinsicNeedsEnvironment needs_env,IntrinsicSideEffects side_effects,IntrinsicExceptions exceptions)3236 void HInvoke::SetIntrinsic(Intrinsics intrinsic,
3237 IntrinsicNeedsEnvironment needs_env,
3238 IntrinsicSideEffects side_effects,
3239 IntrinsicExceptions exceptions) {
3240 intrinsic_ = intrinsic;
3241 IntrinsicOptimizations opt(this);
3242
3243 // Adjust method's side effects from intrinsic table.
3244 switch (side_effects) {
3245 case kNoSideEffects: SetSideEffects(SideEffects::None()); break;
3246 case kReadSideEffects: SetSideEffects(SideEffects::AllReads()); break;
3247 case kWriteSideEffects: SetSideEffects(SideEffects::AllWrites()); break;
3248 case kAllSideEffects: SetSideEffects(SideEffects::AllExceptGCDependency()); break;
3249 }
3250
3251 if (needs_env == kNoEnvironment) {
3252 opt.SetDoesNotNeedEnvironment();
3253 } else {
3254 // If we need an environment, that means there will be a call, which can trigger GC.
3255 SetSideEffects(GetSideEffects().Union(SideEffects::CanTriggerGC()));
3256 }
3257 // Adjust method's exception status from intrinsic table.
3258 SetCanThrow(exceptions == kCanThrow);
3259 }
3260
IsStringAlloc() const3261 bool HNewInstance::IsStringAlloc() const {
3262 return GetEntrypoint() == kQuickAllocStringObject;
3263 }
3264
NeedsEnvironment() const3265 bool HInvoke::NeedsEnvironment() const {
3266 if (!IsIntrinsic()) {
3267 return true;
3268 }
3269 IntrinsicOptimizations opt(*this);
3270 return !opt.GetDoesNotNeedEnvironment();
3271 }
3272
GetDexFileForPcRelativeDexCache() const3273 const DexFile& HInvokeStaticOrDirect::GetDexFileForPcRelativeDexCache() const {
3274 ArtMethod* caller = GetEnvironment()->GetMethod();
3275 ScopedObjectAccess soa(Thread::Current());
3276 // `caller` is null for a top-level graph representing a method whose declaring
3277 // class was not resolved.
3278 return caller == nullptr ? GetBlock()->GetGraph()->GetDexFile() : *caller->GetDexFile();
3279 }
3280
operator <<(std::ostream & os,HInvokeStaticOrDirect::ClinitCheckRequirement rhs)3281 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs) {
3282 switch (rhs) {
3283 case HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit:
3284 return os << "explicit";
3285 case HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit:
3286 return os << "implicit";
3287 case HInvokeStaticOrDirect::ClinitCheckRequirement::kNone:
3288 return os << "none";
3289 }
3290 }
3291
CanBeNull() const3292 bool HInvokeStaticOrDirect::CanBeNull() const {
3293 if (IsStringInit()) {
3294 return false;
3295 }
3296 return HInvoke::CanBeNull();
3297 }
3298
CanBeNull() const3299 bool HInvoke::CanBeNull() const {
3300 switch (GetIntrinsic()) {
3301 case Intrinsics::kThreadCurrentThread:
3302 case Intrinsics::kStringBufferAppend:
3303 case Intrinsics::kStringBufferToString:
3304 case Intrinsics::kStringBuilderAppendObject:
3305 case Intrinsics::kStringBuilderAppendString:
3306 case Intrinsics::kStringBuilderAppendCharSequence:
3307 case Intrinsics::kStringBuilderAppendCharArray:
3308 case Intrinsics::kStringBuilderAppendBoolean:
3309 case Intrinsics::kStringBuilderAppendChar:
3310 case Intrinsics::kStringBuilderAppendInt:
3311 case Intrinsics::kStringBuilderAppendLong:
3312 case Intrinsics::kStringBuilderAppendFloat:
3313 case Intrinsics::kStringBuilderAppendDouble:
3314 case Intrinsics::kStringBuilderToString:
3315 #define DEFINE_BOXED_CASE(name, unused1, unused2, unused3, unused4) \
3316 case Intrinsics::k##name##ValueOf:
3317 BOXED_TYPES(DEFINE_BOXED_CASE)
3318 #undef DEFINE_BOXED_CASE
3319 return false;
3320 default:
3321 return GetType() == DataType::Type::kReference;
3322 }
3323 }
3324
CanDoImplicitNullCheckOn(HInstruction * obj) const3325 bool HInvokeVirtual::CanDoImplicitNullCheckOn(HInstruction* obj) const {
3326 if (obj != InputAt(0)) {
3327 return false;
3328 }
3329 switch (GetIntrinsic()) {
3330 case Intrinsics::kNone:
3331 return true;
3332 case Intrinsics::kReferenceRefersTo:
3333 return true;
3334 default:
3335 // TODO: Add implicit null checks in more intrinsics.
3336 return false;
3337 }
3338 }
3339
InstructionDataEquals(const HInstruction * other) const3340 bool HLoadClass::InstructionDataEquals(const HInstruction* other) const {
3341 const HLoadClass* other_load_class = other->AsLoadClass();
3342 // TODO: To allow GVN for HLoadClass from different dex files, we should compare the type
3343 // names rather than type indexes. However, we shall also have to re-think the hash code.
3344 if (type_index_ != other_load_class->type_index_ ||
3345 GetPackedFields() != other_load_class->GetPackedFields()) {
3346 return false;
3347 }
3348 switch (GetLoadKind()) {
3349 case LoadKind::kBootImageRelRo:
3350 case LoadKind::kJitBootImageAddress:
3351 case LoadKind::kJitTableAddress: {
3352 ScopedObjectAccess soa(Thread::Current());
3353 return GetClass().Get() == other_load_class->GetClass().Get();
3354 }
3355 default:
3356 DCHECK(HasTypeReference(GetLoadKind()));
3357 return IsSameDexFile(GetDexFile(), other_load_class->GetDexFile());
3358 }
3359 }
3360
InstructionDataEquals(const HInstruction * other) const3361 bool HLoadString::InstructionDataEquals(const HInstruction* other) const {
3362 const HLoadString* other_load_string = other->AsLoadString();
3363 // TODO: To allow GVN for HLoadString from different dex files, we should compare the strings
3364 // rather than their indexes. However, we shall also have to re-think the hash code.
3365 if (string_index_ != other_load_string->string_index_ ||
3366 GetPackedFields() != other_load_string->GetPackedFields()) {
3367 return false;
3368 }
3369 switch (GetLoadKind()) {
3370 case LoadKind::kBootImageRelRo:
3371 case LoadKind::kJitBootImageAddress:
3372 case LoadKind::kJitTableAddress: {
3373 ScopedObjectAccess soa(Thread::Current());
3374 return GetString().Get() == other_load_string->GetString().Get();
3375 }
3376 default:
3377 return IsSameDexFile(GetDexFile(), other_load_string->GetDexFile());
3378 }
3379 }
3380
RemoveEnvironmentUsers()3381 void HInstruction::RemoveEnvironmentUsers() {
3382 for (const HUseListNode<HEnvironment*>& use : GetEnvUses()) {
3383 HEnvironment* user = use.GetUser();
3384 user->SetRawEnvAt(use.GetIndex(), nullptr);
3385 }
3386 env_uses_.clear();
3387 }
3388
ReplaceInstrOrPhiByClone(HInstruction * instr)3389 HInstruction* ReplaceInstrOrPhiByClone(HInstruction* instr) {
3390 HInstruction* clone = instr->Clone(instr->GetBlock()->GetGraph()->GetAllocator());
3391 HBasicBlock* block = instr->GetBlock();
3392
3393 if (instr->IsPhi()) {
3394 HPhi* phi = instr->AsPhi();
3395 DCHECK(!phi->HasEnvironment());
3396 HPhi* phi_clone = clone->AsPhi();
3397 block->ReplaceAndRemovePhiWith(phi, phi_clone);
3398 } else {
3399 block->ReplaceAndRemoveInstructionWith(instr, clone);
3400 if (instr->HasEnvironment()) {
3401 clone->CopyEnvironmentFrom(instr->GetEnvironment());
3402 HLoopInformation* loop_info = block->GetLoopInformation();
3403 if (instr->IsSuspendCheck() && loop_info != nullptr) {
3404 loop_info->SetSuspendCheck(clone->AsSuspendCheck());
3405 }
3406 }
3407 }
3408 return clone;
3409 }
3410
operator <<(std::ostream & os,const MoveOperands & rhs)3411 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs) {
3412 os << "["
3413 << " source=" << rhs.GetSource()
3414 << " destination=" << rhs.GetDestination()
3415 << " type=" << rhs.GetType()
3416 << " instruction=";
3417 if (rhs.GetInstruction() != nullptr) {
3418 os << rhs.GetInstruction()->DebugName() << ' ' << rhs.GetInstruction()->GetId();
3419 } else {
3420 os << "null";
3421 }
3422 os << " ]";
3423 return os;
3424 }
3425
operator <<(std::ostream & os,TypeCheckKind rhs)3426 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs) {
3427 switch (rhs) {
3428 case TypeCheckKind::kUnresolvedCheck:
3429 return os << "unresolved_check";
3430 case TypeCheckKind::kExactCheck:
3431 return os << "exact_check";
3432 case TypeCheckKind::kClassHierarchyCheck:
3433 return os << "class_hierarchy_check";
3434 case TypeCheckKind::kAbstractClassCheck:
3435 return os << "abstract_class_check";
3436 case TypeCheckKind::kInterfaceCheck:
3437 return os << "interface_check";
3438 case TypeCheckKind::kArrayObjectCheck:
3439 return os << "array_object_check";
3440 case TypeCheckKind::kArrayCheck:
3441 return os << "array_check";
3442 case TypeCheckKind::kBitstringCheck:
3443 return os << "bitstring_check";
3444 }
3445 }
3446
3447 // Check that intrinsic enum values fit within space set aside in ArtMethod modifier flags.
3448 #define CHECK_INTRINSICS_ENUM_VALUES(Name, InvokeType, _, SideEffects, Exceptions, ...) \
3449 static_assert( \
3450 static_cast<uint32_t>(Intrinsics::k ## Name) <= (kAccIntrinsicBits >> CTZ(kAccIntrinsicBits)), \
3451 "Intrinsics enumeration space overflow.");
ART_INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES)3452 ART_INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES)
3453 #undef CHECK_INTRINSICS_ENUM_VALUES
3454
3455 // Function that returns whether an intrinsic needs an environment or not.
3456 static inline IntrinsicNeedsEnvironment NeedsEnvironmentIntrinsic(Intrinsics i) {
3457 switch (i) {
3458 case Intrinsics::kNone:
3459 return kNeedsEnvironment; // Non-sensical for intrinsic.
3460 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3461 case Intrinsics::k ## Name: \
3462 return NeedsEnv;
3463 ART_INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3464 #undef OPTIMIZING_INTRINSICS
3465 }
3466 return kNeedsEnvironment;
3467 }
3468
3469 // Function that returns whether an intrinsic has side effects.
GetSideEffectsIntrinsic(Intrinsics i)3470 static inline IntrinsicSideEffects GetSideEffectsIntrinsic(Intrinsics i) {
3471 switch (i) {
3472 case Intrinsics::kNone:
3473 return kAllSideEffects;
3474 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3475 case Intrinsics::k ## Name: \
3476 return SideEffects;
3477 ART_INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3478 #undef OPTIMIZING_INTRINSICS
3479 }
3480 return kAllSideEffects;
3481 }
3482
3483 // Function that returns whether an intrinsic can throw exceptions.
GetExceptionsIntrinsic(Intrinsics i)3484 static inline IntrinsicExceptions GetExceptionsIntrinsic(Intrinsics i) {
3485 switch (i) {
3486 case Intrinsics::kNone:
3487 return kCanThrow;
3488 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3489 case Intrinsics::k ## Name: \
3490 return Exceptions;
3491 ART_INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3492 #undef OPTIMIZING_INTRINSICS
3493 }
3494 return kCanThrow;
3495 }
3496
SetResolvedMethod(ArtMethod * method,bool enable_intrinsic_opt)3497 void HInvoke::SetResolvedMethod(ArtMethod* method, bool enable_intrinsic_opt) {
3498 if (method != nullptr && method->IsIntrinsic() && enable_intrinsic_opt) {
3499 Intrinsics intrinsic = method->GetIntrinsic();
3500 SetIntrinsic(intrinsic,
3501 NeedsEnvironmentIntrinsic(intrinsic),
3502 GetSideEffectsIntrinsic(intrinsic),
3503 GetExceptionsIntrinsic(intrinsic));
3504 }
3505 resolved_method_ = method;
3506 }
3507
IsGEZero(HInstruction * instruction)3508 bool IsGEZero(HInstruction* instruction) {
3509 DCHECK(instruction != nullptr);
3510 if (instruction->IsArrayLength()) {
3511 return true;
3512 } else if (instruction->IsMin()) {
3513 // Instruction MIN(>=0, >=0) is >= 0.
3514 return IsGEZero(instruction->InputAt(0)) &&
3515 IsGEZero(instruction->InputAt(1));
3516 } else if (instruction->IsAbs()) {
3517 // Instruction ABS(>=0) is >= 0.
3518 // NOTE: ABS(minint) = minint prevents assuming
3519 // >= 0 without looking at the argument.
3520 return IsGEZero(instruction->InputAt(0));
3521 }
3522 int64_t value = -1;
3523 return IsInt64AndGet(instruction, &value) && value >= 0;
3524 }
3525
3526 } // namespace art
3527