• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2017 The Khronos Group Inc.
2 // Copyright (c) 2017 Valve Corporation
3 // Copyright (c) 2017 LunarG Inc.
4 // Copyright (c) 2018 Google Inc.
5 //
6 // Licensed under the Apache License, Version 2.0 (the "License");
7 // you may not use this file except in compliance with the License.
8 // You may obtain a copy of the License at
9 //
10 //     http://www.apache.org/licenses/LICENSE-2.0
11 //
12 // Unless required by applicable law or agreed to in writing, software
13 // distributed under the License is distributed on an "AS IS" BASIS,
14 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 // See the License for the specific language governing permissions and
16 // limitations under the License.
17 
18 #include "source/opt/dead_branch_elim_pass.h"
19 
20 #include <list>
21 #include <memory>
22 #include <vector>
23 
24 #include "source/cfa.h"
25 #include "source/opt/ir_context.h"
26 #include "source/opt/struct_cfg_analysis.h"
27 #include "source/util/make_unique.h"
28 
29 namespace spvtools {
30 namespace opt {
31 namespace {
32 constexpr uint32_t kBranchCondTrueLabIdInIdx = 1;
33 constexpr uint32_t kBranchCondFalseLabIdInIdx = 2;
34 }  // namespace
35 
GetConstCondition(uint32_t condId,bool * condVal)36 bool DeadBranchElimPass::GetConstCondition(uint32_t condId, bool* condVal) {
37   bool condIsConst;
38   Instruction* cInst = get_def_use_mgr()->GetDef(condId);
39   switch (cInst->opcode()) {
40     case spv::Op::OpConstantNull:
41     case spv::Op::OpConstantFalse: {
42       *condVal = false;
43       condIsConst = true;
44     } break;
45     case spv::Op::OpConstantTrue: {
46       *condVal = true;
47       condIsConst = true;
48     } break;
49     case spv::Op::OpLogicalNot: {
50       bool negVal;
51       condIsConst =
52           GetConstCondition(cInst->GetSingleWordInOperand(0), &negVal);
53       if (condIsConst) *condVal = !negVal;
54     } break;
55     default: { condIsConst = false; } break;
56   }
57   return condIsConst;
58 }
59 
GetConstInteger(uint32_t selId,uint32_t * selVal)60 bool DeadBranchElimPass::GetConstInteger(uint32_t selId, uint32_t* selVal) {
61   Instruction* sInst = get_def_use_mgr()->GetDef(selId);
62   uint32_t typeId = sInst->type_id();
63   Instruction* typeInst = get_def_use_mgr()->GetDef(typeId);
64   if (!typeInst || (typeInst->opcode() != spv::Op::OpTypeInt)) return false;
65   // TODO(greg-lunarg): Support non-32 bit ints
66   if (typeInst->GetSingleWordInOperand(0) != 32) return false;
67   if (sInst->opcode() == spv::Op::OpConstant) {
68     *selVal = sInst->GetSingleWordInOperand(0);
69     return true;
70   } else if (sInst->opcode() == spv::Op::OpConstantNull) {
71     *selVal = 0;
72     return true;
73   }
74   return false;
75 }
76 
AddBranch(uint32_t labelId,BasicBlock * bp)77 void DeadBranchElimPass::AddBranch(uint32_t labelId, BasicBlock* bp) {
78   assert(get_def_use_mgr()->GetDef(labelId) != nullptr);
79   std::unique_ptr<Instruction> newBranch(
80       new Instruction(context(), spv::Op::OpBranch, 0, 0,
81                       {{spv_operand_type_t::SPV_OPERAND_TYPE_ID, {labelId}}}));
82   context()->AnalyzeDefUse(&*newBranch);
83   context()->set_instr_block(&*newBranch, bp);
84   bp->AddInstruction(std::move(newBranch));
85 }
86 
GetParentBlock(uint32_t id)87 BasicBlock* DeadBranchElimPass::GetParentBlock(uint32_t id) {
88   return context()->get_instr_block(get_def_use_mgr()->GetDef(id));
89 }
90 
MarkLiveBlocks(Function * func,std::unordered_set<BasicBlock * > * live_blocks)91 bool DeadBranchElimPass::MarkLiveBlocks(
92     Function* func, std::unordered_set<BasicBlock*>* live_blocks) {
93   std::vector<std::pair<BasicBlock*, uint32_t>> conditions_to_simplify;
94   std::unordered_set<BasicBlock*> blocks_with_backedge;
95   std::vector<BasicBlock*> stack;
96   stack.push_back(&*func->begin());
97   bool modified = false;
98   while (!stack.empty()) {
99     BasicBlock* block = stack.back();
100     stack.pop_back();
101 
102     // Live blocks doubles as visited set.
103     if (!live_blocks->insert(block).second) continue;
104 
105     uint32_t cont_id = block->ContinueBlockIdIfAny();
106     if (cont_id != 0) {
107       AddBlocksWithBackEdge(cont_id, block->id(), block->MergeBlockIdIfAny(),
108                             &blocks_with_backedge);
109     }
110 
111     Instruction* terminator = block->terminator();
112     uint32_t live_lab_id = 0;
113     // Check if the terminator has a single valid successor.
114     if (terminator->opcode() == spv::Op::OpBranchConditional) {
115       bool condVal;
116       if (GetConstCondition(terminator->GetSingleWordInOperand(0u), &condVal)) {
117         live_lab_id = terminator->GetSingleWordInOperand(
118             condVal ? kBranchCondTrueLabIdInIdx : kBranchCondFalseLabIdInIdx);
119       }
120     } else if (terminator->opcode() == spv::Op::OpSwitch) {
121       uint32_t sel_val;
122       if (GetConstInteger(terminator->GetSingleWordInOperand(0u), &sel_val)) {
123         // Search switch operands for selector value, set live_lab_id to
124         // corresponding label, use default if not found.
125         uint32_t icnt = 0;
126         uint32_t case_val;
127         terminator->WhileEachInOperand(
128             [&icnt, &case_val, &sel_val, &live_lab_id](const uint32_t* idp) {
129               if (icnt == 1) {
130                 // Start with default label.
131                 live_lab_id = *idp;
132               } else if (icnt > 1) {
133                 if (icnt % 2 == 0) {
134                   case_val = *idp;
135                 } else {
136                   if (case_val == sel_val) {
137                     live_lab_id = *idp;
138                     return false;
139                   }
140                 }
141               }
142               ++icnt;
143               return true;
144             });
145       }
146     }
147 
148     // Don't simplify back edges unless it becomes a branch to the header. Every
149     // loop must have exactly one back edge to the loop header, so we cannot
150     // remove it.
151     bool simplify = false;
152     if (live_lab_id != 0) {
153       if (!blocks_with_backedge.count(block)) {
154         // This is not a back edge.
155         simplify = true;
156       } else {
157         const auto& struct_cfg_analysis = context()->GetStructuredCFGAnalysis();
158         uint32_t header_id = struct_cfg_analysis->ContainingLoop(block->id());
159         if (live_lab_id == header_id) {
160           // The new branch will be a branch to the header.
161           simplify = true;
162         }
163       }
164     }
165 
166     if (simplify) {
167       conditions_to_simplify.push_back({block, live_lab_id});
168       stack.push_back(GetParentBlock(live_lab_id));
169     } else {
170       // All successors are live.
171       const auto* const_block = block;
172       const_block->ForEachSuccessorLabel([&stack, this](const uint32_t label) {
173         stack.push_back(GetParentBlock(label));
174       });
175     }
176   }
177 
178   // Traverse |conditions_to_simplify| in reverse order.  This is done so that
179   // we simplify nested constructs before simplifying the constructs that
180   // contain them.
181   for (auto b = conditions_to_simplify.rbegin();
182        b != conditions_to_simplify.rend(); ++b) {
183     modified |= SimplifyBranch(b->first, b->second);
184   }
185 
186   return modified;
187 }
188 
SimplifyBranch(BasicBlock * block,uint32_t live_lab_id)189 bool DeadBranchElimPass::SimplifyBranch(BasicBlock* block,
190                                         uint32_t live_lab_id) {
191   Instruction* merge_inst = block->GetMergeInst();
192   Instruction* terminator = block->terminator();
193   if (merge_inst && merge_inst->opcode() == spv::Op::OpSelectionMerge) {
194     if (merge_inst->NextNode()->opcode() == spv::Op::OpSwitch &&
195         SwitchHasNestedBreak(block->id())) {
196       if (terminator->NumInOperands() == 2) {
197         // We cannot remove the branch, and it already has a single case, so no
198         // work to do.
199         return false;
200       }
201       // We have to keep the switch because it has a nest break, so we
202       // remove all cases except for the live one.
203       Instruction::OperandList new_operands;
204       new_operands.push_back(terminator->GetInOperand(0));
205       new_operands.push_back({SPV_OPERAND_TYPE_ID, {live_lab_id}});
206       terminator->SetInOperands(std::move(new_operands));
207       context()->UpdateDefUse(terminator);
208     } else {
209       // Check if the merge instruction is still needed because of a
210       // non-nested break from the construct.  Move the merge instruction if
211       // it is still needed.
212       StructuredCFGAnalysis* cfg_analysis =
213           context()->GetStructuredCFGAnalysis();
214       Instruction* first_break = FindFirstExitFromSelectionMerge(
215           live_lab_id, merge_inst->GetSingleWordInOperand(0),
216           cfg_analysis->LoopMergeBlock(live_lab_id),
217           cfg_analysis->LoopContinueBlock(live_lab_id),
218           cfg_analysis->SwitchMergeBlock(live_lab_id));
219 
220       AddBranch(live_lab_id, block);
221       context()->KillInst(terminator);
222       if (first_break == nullptr) {
223         context()->KillInst(merge_inst);
224       } else {
225         merge_inst->RemoveFromList();
226         first_break->InsertBefore(std::unique_ptr<Instruction>(merge_inst));
227         context()->set_instr_block(merge_inst,
228                                    context()->get_instr_block(first_break));
229       }
230     }
231   } else {
232     AddBranch(live_lab_id, block);
233     context()->KillInst(terminator);
234   }
235   return true;
236 }
237 
MarkUnreachableStructuredTargets(const std::unordered_set<BasicBlock * > & live_blocks,std::unordered_set<BasicBlock * > * unreachable_merges,std::unordered_map<BasicBlock *,BasicBlock * > * unreachable_continues)238 void DeadBranchElimPass::MarkUnreachableStructuredTargets(
239     const std::unordered_set<BasicBlock*>& live_blocks,
240     std::unordered_set<BasicBlock*>* unreachable_merges,
241     std::unordered_map<BasicBlock*, BasicBlock*>* unreachable_continues) {
242   for (auto block : live_blocks) {
243     if (auto merge_id = block->MergeBlockIdIfAny()) {
244       BasicBlock* merge_block = GetParentBlock(merge_id);
245       if (!live_blocks.count(merge_block)) {
246         unreachable_merges->insert(merge_block);
247       }
248       if (auto cont_id = block->ContinueBlockIdIfAny()) {
249         BasicBlock* cont_block = GetParentBlock(cont_id);
250         if (!live_blocks.count(cont_block)) {
251           (*unreachable_continues)[cont_block] = block;
252         }
253       }
254     }
255   }
256 }
257 
FixPhiNodesInLiveBlocks(Function * func,const std::unordered_set<BasicBlock * > & live_blocks,const std::unordered_map<BasicBlock *,BasicBlock * > & unreachable_continues)258 bool DeadBranchElimPass::FixPhiNodesInLiveBlocks(
259     Function* func, const std::unordered_set<BasicBlock*>& live_blocks,
260     const std::unordered_map<BasicBlock*, BasicBlock*>& unreachable_continues) {
261   bool modified = false;
262   for (auto& block : *func) {
263     if (live_blocks.count(&block)) {
264       for (auto iter = block.begin(); iter != block.end();) {
265         if (iter->opcode() != spv::Op::OpPhi) {
266           break;
267         }
268 
269         bool changed = false;
270         bool backedge_added = false;
271         Instruction* inst = &*iter;
272         std::vector<Operand> operands;
273         // Build a complete set of operands (not just input operands). Start
274         // with type and result id operands.
275         operands.push_back(inst->GetOperand(0u));
276         operands.push_back(inst->GetOperand(1u));
277         // Iterate through the incoming labels and determine which to keep
278         // and/or modify.  If there in an unreachable continue block, there will
279         // be an edge from that block to the header.  We need to keep it to
280         // maintain the structured control flow.  If the header has more that 2
281         // incoming edges, then the OpPhi must have an entry for that edge.
282         // However, if there is only one other incoming edge, the OpPhi can be
283         // eliminated.
284         for (uint32_t i = 1; i < inst->NumInOperands(); i += 2) {
285           BasicBlock* inc = GetParentBlock(inst->GetSingleWordInOperand(i));
286           auto cont_iter = unreachable_continues.find(inc);
287           if (cont_iter != unreachable_continues.end() &&
288               cont_iter->second == &block && inst->NumInOperands() > 4) {
289             if (get_def_use_mgr()
290                     ->GetDef(inst->GetSingleWordInOperand(i - 1))
291                     ->opcode() == spv::Op::OpUndef) {
292               // Already undef incoming value, no change necessary.
293               operands.push_back(inst->GetInOperand(i - 1));
294               operands.push_back(inst->GetInOperand(i));
295               backedge_added = true;
296             } else {
297               // Replace incoming value with undef if this phi exists in the
298               // loop header. Otherwise, this edge is not live since the
299               // unreachable continue block will be replaced with an
300               // unconditional branch to the header only.
301               operands.emplace_back(
302                   SPV_OPERAND_TYPE_ID,
303                   std::initializer_list<uint32_t>{Type2Undef(inst->type_id())});
304               operands.push_back(inst->GetInOperand(i));
305               changed = true;
306               backedge_added = true;
307             }
308           } else if (live_blocks.count(inc) && inc->IsSuccessor(&block)) {
309             // Keep live incoming edge.
310             operands.push_back(inst->GetInOperand(i - 1));
311             operands.push_back(inst->GetInOperand(i));
312           } else {
313             // Remove incoming edge.
314             changed = true;
315           }
316         }
317 
318         if (changed) {
319           modified = true;
320           uint32_t continue_id = block.ContinueBlockIdIfAny();
321           if (!backedge_added && continue_id != 0 &&
322               unreachable_continues.count(GetParentBlock(continue_id)) &&
323               operands.size() > 4) {
324             // Changed the backedge to branch from the continue block instead
325             // of a successor of the continue block. Add an entry to the phi to
326             // provide an undef for the continue block. Since the successor of
327             // the continue must also be unreachable (dominated by the continue
328             // block), any entry for the original backedge has been removed
329             // from the phi operands.
330             operands.emplace_back(
331                 SPV_OPERAND_TYPE_ID,
332                 std::initializer_list<uint32_t>{Type2Undef(inst->type_id())});
333             operands.emplace_back(SPV_OPERAND_TYPE_ID,
334                                   std::initializer_list<uint32_t>{continue_id});
335           }
336 
337           // Either replace the phi with a single value or rebuild the phi out
338           // of |operands|.
339           //
340           // We always have type and result id operands. So this phi has a
341           // single source if there are two more operands beyond those.
342           if (operands.size() == 4) {
343             // First input data operands is at index 2.
344             uint32_t replId = operands[2u].words[0];
345             context()->KillNamesAndDecorates(inst->result_id());
346             context()->ReplaceAllUsesWith(inst->result_id(), replId);
347             iter = context()->KillInst(&*inst);
348           } else {
349             // We've rewritten the operands, so first instruct the def/use
350             // manager to forget uses in the phi before we replace them. After
351             // replacing operands update the def/use manager by re-analyzing
352             // the used ids in this phi.
353             get_def_use_mgr()->EraseUseRecordsOfOperandIds(inst);
354             inst->ReplaceOperands(operands);
355             get_def_use_mgr()->AnalyzeInstUse(inst);
356             ++iter;
357           }
358         } else {
359           ++iter;
360         }
361       }
362     }
363   }
364 
365   return modified;
366 }
367 
EraseDeadBlocks(Function * func,const std::unordered_set<BasicBlock * > & live_blocks,const std::unordered_set<BasicBlock * > & unreachable_merges,const std::unordered_map<BasicBlock *,BasicBlock * > & unreachable_continues)368 bool DeadBranchElimPass::EraseDeadBlocks(
369     Function* func, const std::unordered_set<BasicBlock*>& live_blocks,
370     const std::unordered_set<BasicBlock*>& unreachable_merges,
371     const std::unordered_map<BasicBlock*, BasicBlock*>& unreachable_continues) {
372   bool modified = false;
373   for (auto ebi = func->begin(); ebi != func->end();) {
374     if (unreachable_continues.count(&*ebi)) {
375       uint32_t cont_id = unreachable_continues.find(&*ebi)->second->id();
376       if (ebi->begin() != ebi->tail() ||
377           ebi->terminator()->opcode() != spv::Op::OpBranch ||
378           ebi->terminator()->GetSingleWordInOperand(0u) != cont_id) {
379         // Make unreachable, but leave the label.
380         KillAllInsts(&*ebi, false);
381         // Add unconditional branch to header.
382         assert(unreachable_continues.count(&*ebi));
383         ebi->AddInstruction(MakeUnique<Instruction>(
384             context(), spv::Op::OpBranch, 0, 0,
385             std::initializer_list<Operand>{{SPV_OPERAND_TYPE_ID, {cont_id}}}));
386         get_def_use_mgr()->AnalyzeInstUse(&*ebi->tail());
387         context()->set_instr_block(&*ebi->tail(), &*ebi);
388         modified = true;
389       }
390       ++ebi;
391     } else if (unreachable_merges.count(&*ebi)) {
392       if (ebi->begin() != ebi->tail() ||
393           ebi->terminator()->opcode() != spv::Op::OpUnreachable) {
394         // Make unreachable, but leave the label.
395         KillAllInsts(&*ebi, false);
396         // Add unreachable terminator.
397         ebi->AddInstruction(
398             MakeUnique<Instruction>(context(), spv::Op::OpUnreachable, 0, 0,
399                                     std::initializer_list<Operand>{}));
400         context()->AnalyzeUses(ebi->terminator());
401         context()->set_instr_block(ebi->terminator(), &*ebi);
402         modified = true;
403       }
404       ++ebi;
405     } else if (!live_blocks.count(&*ebi)) {
406       // Kill this block.
407       KillAllInsts(&*ebi);
408       ebi = ebi.Erase();
409       modified = true;
410     } else {
411       ++ebi;
412     }
413   }
414 
415   return modified;
416 }
417 
EliminateDeadBranches(Function * func)418 bool DeadBranchElimPass::EliminateDeadBranches(Function* func) {
419   if (func->IsDeclaration()) {
420     return false;
421   }
422 
423   bool modified = false;
424   std::unordered_set<BasicBlock*> live_blocks;
425   modified |= MarkLiveBlocks(func, &live_blocks);
426 
427   std::unordered_set<BasicBlock*> unreachable_merges;
428   std::unordered_map<BasicBlock*, BasicBlock*> unreachable_continues;
429   MarkUnreachableStructuredTargets(live_blocks, &unreachable_merges,
430                                    &unreachable_continues);
431   modified |= FixPhiNodesInLiveBlocks(func, live_blocks, unreachable_continues);
432   modified |= EraseDeadBlocks(func, live_blocks, unreachable_merges,
433                               unreachable_continues);
434 
435   return modified;
436 }
437 
FixBlockOrder()438 void DeadBranchElimPass::FixBlockOrder() {
439   context()->BuildInvalidAnalyses(IRContext::kAnalysisCFG |
440                                   IRContext::kAnalysisDominatorAnalysis);
441   // Reorders blocks according to DFS of dominator tree.
442   ProcessFunction reorder_dominators = [this](Function* function) {
443     DominatorAnalysis* dominators = context()->GetDominatorAnalysis(function);
444     std::vector<BasicBlock*> blocks;
445     for (auto iter = dominators->GetDomTree().begin();
446          iter != dominators->GetDomTree().end(); ++iter) {
447       if (iter->id() != 0) {
448         blocks.push_back(iter->bb_);
449       }
450     }
451     for (uint32_t i = 1; i < blocks.size(); ++i) {
452       function->MoveBasicBlockToAfter(blocks[i]->id(), blocks[i - 1]);
453     }
454     return true;
455   };
456 
457   // Reorders blocks according to structured order.
458   ProcessFunction reorder_structured = [](Function* function) {
459     function->ReorderBasicBlocksInStructuredOrder();
460     return true;
461   };
462 
463   // Structured order is more intuitive so use it where possible.
464   if (context()->get_feature_mgr()->HasCapability(spv::Capability::Shader)) {
465     context()->ProcessReachableCallTree(reorder_structured);
466   } else {
467     context()->ProcessReachableCallTree(reorder_dominators);
468   }
469 }
470 
Process()471 Pass::Status DeadBranchElimPass::Process() {
472   // Do not process if module contains OpGroupDecorate. Additional
473   // support required in KillNamesAndDecorates().
474   // TODO(greg-lunarg): Add support for OpGroupDecorate
475   for (auto& ai : get_module()->annotations())
476     if (ai.opcode() == spv::Op::OpGroupDecorate)
477       return Status::SuccessWithoutChange;
478   // Process all entry point functions
479   ProcessFunction pfn = [this](Function* fp) {
480     return EliminateDeadBranches(fp);
481   };
482   bool modified = context()->ProcessReachableCallTree(pfn);
483   if (modified) FixBlockOrder();
484   return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
485 }
486 
FindFirstExitFromSelectionMerge(uint32_t start_block_id,uint32_t merge_block_id,uint32_t loop_merge_id,uint32_t loop_continue_id,uint32_t switch_merge_id)487 Instruction* DeadBranchElimPass::FindFirstExitFromSelectionMerge(
488     uint32_t start_block_id, uint32_t merge_block_id, uint32_t loop_merge_id,
489     uint32_t loop_continue_id, uint32_t switch_merge_id) {
490   // To find the "first" exit, we follow branches looking for a conditional
491   // branch that is not in a nested construct and is not the header of a new
492   // construct.  We follow the control flow from |start_block_id| to find the
493   // first one.
494 
495   while (start_block_id != merge_block_id && start_block_id != loop_merge_id &&
496          start_block_id != loop_continue_id) {
497     BasicBlock* start_block = context()->get_instr_block(start_block_id);
498     Instruction* branch = start_block->terminator();
499     uint32_t next_block_id = 0;
500     switch (branch->opcode()) {
501       case spv::Op::OpBranchConditional:
502         next_block_id = start_block->MergeBlockIdIfAny();
503         if (next_block_id == 0) {
504           // If a possible target is the |loop_merge_id| or |loop_continue_id|,
505           // which are not the current merge node, then we continue the search
506           // with the other target.
507           for (uint32_t i = 1; i < 3; i++) {
508             if (branch->GetSingleWordInOperand(i) == loop_merge_id &&
509                 loop_merge_id != merge_block_id) {
510               next_block_id = branch->GetSingleWordInOperand(3 - i);
511               break;
512             }
513             if (branch->GetSingleWordInOperand(i) == loop_continue_id &&
514                 loop_continue_id != merge_block_id) {
515               next_block_id = branch->GetSingleWordInOperand(3 - i);
516               break;
517             }
518             if (branch->GetSingleWordInOperand(i) == switch_merge_id &&
519                 switch_merge_id != merge_block_id) {
520               next_block_id = branch->GetSingleWordInOperand(3 - i);
521               break;
522             }
523           }
524 
525           if (next_block_id == 0) {
526             return branch;
527           }
528         }
529         break;
530       case spv::Op::OpSwitch:
531         next_block_id = start_block->MergeBlockIdIfAny();
532         if (next_block_id == 0) {
533           // A switch with no merge instructions can have at most 5 targets:
534           //   a. |merge_block_id|
535           //   b. |loop_merge_id|
536           //   c. |loop_continue_id|
537           //   d. |switch_merge_id|
538           //   e. 1 block inside the current region.
539           //
540           // Note that because this is a switch, |merge_block_id| must equal
541           // |switch_merge_id|.
542           //
543           // This leads to a number of cases of what to do.
544           //
545           // 1. Does not jump to a block inside of the current construct.  In
546           // this case, there is not conditional break, so we should return
547           // |nullptr|.
548           //
549           // 2. Jumps to |merge_block_id| and a block inside the current
550           // construct.  In this case, this branch conditionally break to the
551           // end of the current construct, so return the current branch.
552           //
553           // 3.  Otherwise, this branch may break, but not to the current merge
554           // block.  So we continue with the block that is inside the loop.
555           bool found_break = false;
556           for (uint32_t i = 1; i < branch->NumInOperands(); i += 2) {
557             uint32_t target = branch->GetSingleWordInOperand(i);
558             if (target == merge_block_id) {
559               found_break = true;
560             } else if (target != loop_merge_id && target != loop_continue_id) {
561               next_block_id = branch->GetSingleWordInOperand(i);
562             }
563           }
564 
565           if (next_block_id == 0) {
566             // Case 1.
567             return nullptr;
568           }
569 
570           if (found_break) {
571             // Case 2.
572             return branch;
573           }
574 
575           // The fall through is case 3.
576         }
577         break;
578       case spv::Op::OpBranch:
579         // Need to check if this is the header of a loop nested in the
580         // selection construct.
581         next_block_id = start_block->MergeBlockIdIfAny();
582         if (next_block_id == 0) {
583           next_block_id = branch->GetSingleWordInOperand(0);
584         }
585         break;
586       default:
587         return nullptr;
588     }
589     start_block_id = next_block_id;
590   }
591   return nullptr;
592 }
593 
AddBlocksWithBackEdge(uint32_t cont_id,uint32_t header_id,uint32_t merge_id,std::unordered_set<BasicBlock * > * blocks_with_back_edges)594 void DeadBranchElimPass::AddBlocksWithBackEdge(
595     uint32_t cont_id, uint32_t header_id, uint32_t merge_id,
596     std::unordered_set<BasicBlock*>* blocks_with_back_edges) {
597   std::unordered_set<uint32_t> visited;
598   visited.insert(cont_id);
599   visited.insert(header_id);
600   visited.insert(merge_id);
601 
602   std::vector<uint32_t> work_list;
603   work_list.push_back(cont_id);
604 
605   while (!work_list.empty()) {
606     uint32_t bb_id = work_list.back();
607     work_list.pop_back();
608 
609     BasicBlock* bb = context()->get_instr_block(bb_id);
610 
611     bool has_back_edge = false;
612     bb->ForEachSuccessorLabel([header_id, &visited, &work_list,
613                                &has_back_edge](uint32_t* succ_label_id) {
614       if (visited.insert(*succ_label_id).second) {
615         work_list.push_back(*succ_label_id);
616       }
617       if (*succ_label_id == header_id) {
618         has_back_edge = true;
619       }
620     });
621 
622     if (has_back_edge) {
623       blocks_with_back_edges->insert(bb);
624     }
625   }
626 }
627 
SwitchHasNestedBreak(uint32_t switch_header_id)628 bool DeadBranchElimPass::SwitchHasNestedBreak(uint32_t switch_header_id) {
629   std::vector<BasicBlock*> block_in_construct;
630   BasicBlock* start_block = context()->get_instr_block(switch_header_id);
631   uint32_t merge_block_id = start_block->MergeBlockIdIfAny();
632 
633   StructuredCFGAnalysis* cfg_analysis = context()->GetStructuredCFGAnalysis();
634   return !get_def_use_mgr()->WhileEachUser(
635       merge_block_id,
636       [this, cfg_analysis, switch_header_id](Instruction* inst) {
637         if (!inst->IsBranch()) {
638           return true;
639         }
640 
641         BasicBlock* bb = context()->get_instr_block(inst);
642         if (bb->id() == switch_header_id) {
643           return true;
644         }
645         return (cfg_analysis->ContainingConstruct(inst) == switch_header_id &&
646                 bb->GetMergeInst() == nullptr);
647       });
648 }
649 
650 }  // namespace opt
651 }  // namespace spvtools
652