• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2017 The Khronos Group Inc.
2 // Copyright (c) 2017 Valve Corporation
3 // Copyright (c) 2017 LunarG Inc.
4 // Copyright (c) 2018 Google Inc.
5 //
6 // Licensed under the Apache License, Version 2.0 (the "License");
7 // you may not use this file except in compliance with the License.
8 // You may obtain a copy of the License at
9 //
10 //     http://www.apache.org/licenses/LICENSE-2.0
11 //
12 // Unless required by applicable law or agreed to in writing, software
13 // distributed under the License is distributed on an "AS IS" BASIS,
14 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 // See the License for the specific language governing permissions and
16 // limitations under the License.
17 
18 #include "source/opt/dead_branch_elim_pass.h"
19 
20 #include <list>
21 #include <memory>
22 #include <vector>
23 
24 #include "source/cfa.h"
25 #include "source/opt/ir_context.h"
26 #include "source/opt/iterator.h"
27 #include "source/opt/struct_cfg_analysis.h"
28 #include "source/util/make_unique.h"
29 
30 namespace spvtools {
31 namespace opt {
32 
33 namespace {
34 
35 const uint32_t kBranchCondTrueLabIdInIdx = 1;
36 const uint32_t kBranchCondFalseLabIdInIdx = 2;
37 
38 }  // anonymous namespace
39 
GetConstCondition(uint32_t condId,bool * condVal)40 bool DeadBranchElimPass::GetConstCondition(uint32_t condId, bool* condVal) {
41   bool condIsConst;
42   Instruction* cInst = get_def_use_mgr()->GetDef(condId);
43   switch (cInst->opcode()) {
44     case SpvOpConstantFalse: {
45       *condVal = false;
46       condIsConst = true;
47     } break;
48     case SpvOpConstantTrue: {
49       *condVal = true;
50       condIsConst = true;
51     } break;
52     case SpvOpLogicalNot: {
53       bool negVal;
54       condIsConst =
55           GetConstCondition(cInst->GetSingleWordInOperand(0), &negVal);
56       if (condIsConst) *condVal = !negVal;
57     } break;
58     default: { condIsConst = false; } break;
59   }
60   return condIsConst;
61 }
62 
GetConstInteger(uint32_t selId,uint32_t * selVal)63 bool DeadBranchElimPass::GetConstInteger(uint32_t selId, uint32_t* selVal) {
64   Instruction* sInst = get_def_use_mgr()->GetDef(selId);
65   uint32_t typeId = sInst->type_id();
66   Instruction* typeInst = get_def_use_mgr()->GetDef(typeId);
67   if (!typeInst || (typeInst->opcode() != SpvOpTypeInt)) return false;
68   // TODO(greg-lunarg): Support non-32 bit ints
69   if (typeInst->GetSingleWordInOperand(0) != 32) return false;
70   if (sInst->opcode() == SpvOpConstant) {
71     *selVal = sInst->GetSingleWordInOperand(0);
72     return true;
73   } else if (sInst->opcode() == SpvOpConstantNull) {
74     *selVal = 0;
75     return true;
76   }
77   return false;
78 }
79 
AddBranch(uint32_t labelId,BasicBlock * bp)80 void DeadBranchElimPass::AddBranch(uint32_t labelId, BasicBlock* bp) {
81   assert(get_def_use_mgr()->GetDef(labelId) != nullptr);
82   std::unique_ptr<Instruction> newBranch(
83       new Instruction(context(), SpvOpBranch, 0, 0,
84                       {{spv_operand_type_t::SPV_OPERAND_TYPE_ID, {labelId}}}));
85   context()->AnalyzeDefUse(&*newBranch);
86   context()->set_instr_block(&*newBranch, bp);
87   bp->AddInstruction(std::move(newBranch));
88 }
89 
GetParentBlock(uint32_t id)90 BasicBlock* DeadBranchElimPass::GetParentBlock(uint32_t id) {
91   return context()->get_instr_block(get_def_use_mgr()->GetDef(id));
92 }
93 
MarkLiveBlocks(Function * func,std::unordered_set<BasicBlock * > * live_blocks)94 bool DeadBranchElimPass::MarkLiveBlocks(
95     Function* func, std::unordered_set<BasicBlock*>* live_blocks) {
96   std::vector<std::pair<BasicBlock*, uint32_t>> conditions_to_simplify;
97   std::unordered_set<BasicBlock*> blocks_with_backedge;
98   std::vector<BasicBlock*> stack;
99   stack.push_back(&*func->begin());
100   bool modified = false;
101   while (!stack.empty()) {
102     BasicBlock* block = stack.back();
103     stack.pop_back();
104 
105     // Live blocks doubles as visited set.
106     if (!live_blocks->insert(block).second) continue;
107 
108     uint32_t cont_id = block->ContinueBlockIdIfAny();
109     if (cont_id != 0) {
110       AddBlocksWithBackEdge(cont_id, block->id(), block->MergeBlockIdIfAny(),
111                             &blocks_with_backedge);
112     }
113 
114     Instruction* terminator = block->terminator();
115     uint32_t live_lab_id = 0;
116     // Check if the terminator has a single valid successor.
117     if (terminator->opcode() == SpvOpBranchConditional) {
118       bool condVal;
119       if (GetConstCondition(terminator->GetSingleWordInOperand(0u), &condVal)) {
120         live_lab_id = terminator->GetSingleWordInOperand(
121             condVal ? kBranchCondTrueLabIdInIdx : kBranchCondFalseLabIdInIdx);
122       }
123     } else if (terminator->opcode() == SpvOpSwitch) {
124       uint32_t sel_val;
125       if (GetConstInteger(terminator->GetSingleWordInOperand(0u), &sel_val)) {
126         // Search switch operands for selector value, set live_lab_id to
127         // corresponding label, use default if not found.
128         uint32_t icnt = 0;
129         uint32_t case_val;
130         terminator->WhileEachInOperand(
131             [&icnt, &case_val, &sel_val, &live_lab_id](const uint32_t* idp) {
132               if (icnt == 1) {
133                 // Start with default label.
134                 live_lab_id = *idp;
135               } else if (icnt > 1) {
136                 if (icnt % 2 == 0) {
137                   case_val = *idp;
138                 } else {
139                   if (case_val == sel_val) {
140                     live_lab_id = *idp;
141                     return false;
142                   }
143                 }
144               }
145               ++icnt;
146               return true;
147             });
148       }
149     }
150 
151     // Don't simplify back edges unless it becomes a branch to the header. Every
152     // loop must have exactly one back edge to the loop header, so we cannot
153     // remove it.
154     bool simplify = false;
155     if (live_lab_id != 0) {
156       if (!blocks_with_backedge.count(block)) {
157         // This is not a back edge.
158         simplify = true;
159       } else {
160         const auto& struct_cfg_analysis = context()->GetStructuredCFGAnalysis();
161         uint32_t header_id = struct_cfg_analysis->ContainingLoop(block->id());
162         if (live_lab_id == header_id) {
163           // The new branch will be a branch to the header.
164           simplify = true;
165         }
166       }
167     }
168 
169     if (simplify) {
170       conditions_to_simplify.push_back({block, live_lab_id});
171       stack.push_back(GetParentBlock(live_lab_id));
172     } else {
173       // All successors are live.
174       const auto* const_block = block;
175       const_block->ForEachSuccessorLabel([&stack, this](const uint32_t label) {
176         stack.push_back(GetParentBlock(label));
177       });
178     }
179   }
180 
181   // Traverse |conditions_to_simplify| in reverse order.  This is done so that
182   // we simplify nested constructs before simplifying the constructs that
183   // contain them.
184   for (auto b = conditions_to_simplify.rbegin();
185        b != conditions_to_simplify.rend(); ++b) {
186     modified |= SimplifyBranch(b->first, b->second);
187   }
188 
189   return modified;
190 }
191 
SimplifyBranch(BasicBlock * block,uint32_t live_lab_id)192 bool DeadBranchElimPass::SimplifyBranch(BasicBlock* block,
193                                         uint32_t live_lab_id) {
194   Instruction* merge_inst = block->GetMergeInst();
195   Instruction* terminator = block->terminator();
196   if (merge_inst && merge_inst->opcode() == SpvOpSelectionMerge) {
197     if (merge_inst->NextNode()->opcode() == SpvOpSwitch &&
198         SwitchHasNestedBreak(block->id())) {
199       if (terminator->NumInOperands() == 2) {
200         // We cannot remove the branch, and it already has a single case, so no
201         // work to do.
202         return false;
203       }
204       // We have to keep the switch because it has a nest break, so we
205       // remove all cases except for the live one.
206       Instruction::OperandList new_operands;
207       new_operands.push_back(terminator->GetInOperand(0));
208       new_operands.push_back({SPV_OPERAND_TYPE_ID, {live_lab_id}});
209       terminator->SetInOperands(move(new_operands));
210       context()->UpdateDefUse(terminator);
211     } else {
212       // Check if the merge instruction is still needed because of a
213       // non-nested break from the construct.  Move the merge instruction if
214       // it is still needed.
215       StructuredCFGAnalysis* cfg_analysis =
216           context()->GetStructuredCFGAnalysis();
217       Instruction* first_break = FindFirstExitFromSelectionMerge(
218           live_lab_id, merge_inst->GetSingleWordInOperand(0),
219           cfg_analysis->LoopMergeBlock(live_lab_id),
220           cfg_analysis->LoopContinueBlock(live_lab_id),
221           cfg_analysis->SwitchMergeBlock(live_lab_id));
222 
223       AddBranch(live_lab_id, block);
224       context()->KillInst(terminator);
225       if (first_break == nullptr) {
226         context()->KillInst(merge_inst);
227       } else {
228         merge_inst->RemoveFromList();
229         first_break->InsertBefore(std::unique_ptr<Instruction>(merge_inst));
230         context()->set_instr_block(merge_inst,
231                                    context()->get_instr_block(first_break));
232       }
233     }
234   } else {
235     AddBranch(live_lab_id, block);
236     context()->KillInst(terminator);
237   }
238   return true;
239 }
240 
MarkUnreachableStructuredTargets(const std::unordered_set<BasicBlock * > & live_blocks,std::unordered_set<BasicBlock * > * unreachable_merges,std::unordered_map<BasicBlock *,BasicBlock * > * unreachable_continues)241 void DeadBranchElimPass::MarkUnreachableStructuredTargets(
242     const std::unordered_set<BasicBlock*>& live_blocks,
243     std::unordered_set<BasicBlock*>* unreachable_merges,
244     std::unordered_map<BasicBlock*, BasicBlock*>* unreachable_continues) {
245   for (auto block : live_blocks) {
246     if (auto merge_id = block->MergeBlockIdIfAny()) {
247       BasicBlock* merge_block = GetParentBlock(merge_id);
248       if (!live_blocks.count(merge_block)) {
249         unreachable_merges->insert(merge_block);
250       }
251       if (auto cont_id = block->ContinueBlockIdIfAny()) {
252         BasicBlock* cont_block = GetParentBlock(cont_id);
253         if (!live_blocks.count(cont_block)) {
254           (*unreachable_continues)[cont_block] = block;
255         }
256       }
257     }
258   }
259 }
260 
FixPhiNodesInLiveBlocks(Function * func,const std::unordered_set<BasicBlock * > & live_blocks,const std::unordered_map<BasicBlock *,BasicBlock * > & unreachable_continues)261 bool DeadBranchElimPass::FixPhiNodesInLiveBlocks(
262     Function* func, const std::unordered_set<BasicBlock*>& live_blocks,
263     const std::unordered_map<BasicBlock*, BasicBlock*>& unreachable_continues) {
264   bool modified = false;
265   for (auto& block : *func) {
266     if (live_blocks.count(&block)) {
267       for (auto iter = block.begin(); iter != block.end();) {
268         if (iter->opcode() != SpvOpPhi) {
269           break;
270         }
271 
272         bool changed = false;
273         bool backedge_added = false;
274         Instruction* inst = &*iter;
275         std::vector<Operand> operands;
276         // Build a complete set of operands (not just input operands). Start
277         // with type and result id operands.
278         operands.push_back(inst->GetOperand(0u));
279         operands.push_back(inst->GetOperand(1u));
280         // Iterate through the incoming labels and determine which to keep
281         // and/or modify.  If there in an unreachable continue block, there will
282         // be an edge from that block to the header.  We need to keep it to
283         // maintain the structured control flow.  If the header has more that 2
284         // incoming edges, then the OpPhi must have an entry for that edge.
285         // However, if there is only one other incoming edge, the OpPhi can be
286         // eliminated.
287         for (uint32_t i = 1; i < inst->NumInOperands(); i += 2) {
288           BasicBlock* inc = GetParentBlock(inst->GetSingleWordInOperand(i));
289           auto cont_iter = unreachable_continues.find(inc);
290           if (cont_iter != unreachable_continues.end() &&
291               cont_iter->second == &block && inst->NumInOperands() > 4) {
292             if (get_def_use_mgr()
293                     ->GetDef(inst->GetSingleWordInOperand(i - 1))
294                     ->opcode() == SpvOpUndef) {
295               // Already undef incoming value, no change necessary.
296               operands.push_back(inst->GetInOperand(i - 1));
297               operands.push_back(inst->GetInOperand(i));
298               backedge_added = true;
299             } else {
300               // Replace incoming value with undef if this phi exists in the
301               // loop header. Otherwise, this edge is not live since the
302               // unreachable continue block will be replaced with an
303               // unconditional branch to the header only.
304               operands.emplace_back(
305                   SPV_OPERAND_TYPE_ID,
306                   std::initializer_list<uint32_t>{Type2Undef(inst->type_id())});
307               operands.push_back(inst->GetInOperand(i));
308               changed = true;
309               backedge_added = true;
310             }
311           } else if (live_blocks.count(inc) && inc->IsSuccessor(&block)) {
312             // Keep live incoming edge.
313             operands.push_back(inst->GetInOperand(i - 1));
314             operands.push_back(inst->GetInOperand(i));
315           } else {
316             // Remove incoming edge.
317             changed = true;
318           }
319         }
320 
321         if (changed) {
322           modified = true;
323           uint32_t continue_id = block.ContinueBlockIdIfAny();
324           if (!backedge_added && continue_id != 0 &&
325               unreachable_continues.count(GetParentBlock(continue_id)) &&
326               operands.size() > 4) {
327             // Changed the backedge to branch from the continue block instead
328             // of a successor of the continue block. Add an entry to the phi to
329             // provide an undef for the continue block. Since the successor of
330             // the continue must also be unreachable (dominated by the continue
331             // block), any entry for the original backedge has been removed
332             // from the phi operands.
333             operands.emplace_back(
334                 SPV_OPERAND_TYPE_ID,
335                 std::initializer_list<uint32_t>{Type2Undef(inst->type_id())});
336             operands.emplace_back(SPV_OPERAND_TYPE_ID,
337                                   std::initializer_list<uint32_t>{continue_id});
338           }
339 
340           // Either replace the phi with a single value or rebuild the phi out
341           // of |operands|.
342           //
343           // We always have type and result id operands. So this phi has a
344           // single source if there are two more operands beyond those.
345           if (operands.size() == 4) {
346             // First input data operands is at index 2.
347             uint32_t replId = operands[2u].words[0];
348             context()->ReplaceAllUsesWith(inst->result_id(), replId);
349             iter = context()->KillInst(&*inst);
350           } else {
351             // We've rewritten the operands, so first instruct the def/use
352             // manager to forget uses in the phi before we replace them. After
353             // replacing operands update the def/use manager by re-analyzing
354             // the used ids in this phi.
355             get_def_use_mgr()->EraseUseRecordsOfOperandIds(inst);
356             inst->ReplaceOperands(operands);
357             get_def_use_mgr()->AnalyzeInstUse(inst);
358             ++iter;
359           }
360         } else {
361           ++iter;
362         }
363       }
364     }
365   }
366 
367   return modified;
368 }
369 
EraseDeadBlocks(Function * func,const std::unordered_set<BasicBlock * > & live_blocks,const std::unordered_set<BasicBlock * > & unreachable_merges,const std::unordered_map<BasicBlock *,BasicBlock * > & unreachable_continues)370 bool DeadBranchElimPass::EraseDeadBlocks(
371     Function* func, const std::unordered_set<BasicBlock*>& live_blocks,
372     const std::unordered_set<BasicBlock*>& unreachable_merges,
373     const std::unordered_map<BasicBlock*, BasicBlock*>& unreachable_continues) {
374   bool modified = false;
375   for (auto ebi = func->begin(); ebi != func->end();) {
376     if (unreachable_continues.count(&*ebi)) {
377       uint32_t cont_id = unreachable_continues.find(&*ebi)->second->id();
378       if (ebi->begin() != ebi->tail() ||
379           ebi->terminator()->opcode() != SpvOpBranch ||
380           ebi->terminator()->GetSingleWordInOperand(0u) != cont_id) {
381         // Make unreachable, but leave the label.
382         KillAllInsts(&*ebi, false);
383         // Add unconditional branch to header.
384         assert(unreachable_continues.count(&*ebi));
385         ebi->AddInstruction(MakeUnique<Instruction>(
386             context(), SpvOpBranch, 0, 0,
387             std::initializer_list<Operand>{{SPV_OPERAND_TYPE_ID, {cont_id}}}));
388         get_def_use_mgr()->AnalyzeInstUse(&*ebi->tail());
389         context()->set_instr_block(&*ebi->tail(), &*ebi);
390         modified = true;
391       }
392       ++ebi;
393     } else if (unreachable_merges.count(&*ebi)) {
394       if (ebi->begin() != ebi->tail() ||
395           ebi->terminator()->opcode() != SpvOpUnreachable) {
396         // Make unreachable, but leave the label.
397         KillAllInsts(&*ebi, false);
398         // Add unreachable terminator.
399         ebi->AddInstruction(
400             MakeUnique<Instruction>(context(), SpvOpUnreachable, 0, 0,
401                                     std::initializer_list<Operand>{}));
402         context()->AnalyzeUses(ebi->terminator());
403         context()->set_instr_block(ebi->terminator(), &*ebi);
404         modified = true;
405       }
406       ++ebi;
407     } else if (!live_blocks.count(&*ebi)) {
408       // Kill this block.
409       KillAllInsts(&*ebi);
410       ebi = ebi.Erase();
411       modified = true;
412     } else {
413       ++ebi;
414     }
415   }
416 
417   return modified;
418 }
419 
EliminateDeadBranches(Function * func)420 bool DeadBranchElimPass::EliminateDeadBranches(Function* func) {
421   bool modified = false;
422   std::unordered_set<BasicBlock*> live_blocks;
423   modified |= MarkLiveBlocks(func, &live_blocks);
424 
425   std::unordered_set<BasicBlock*> unreachable_merges;
426   std::unordered_map<BasicBlock*, BasicBlock*> unreachable_continues;
427   MarkUnreachableStructuredTargets(live_blocks, &unreachable_merges,
428                                    &unreachable_continues);
429   modified |= FixPhiNodesInLiveBlocks(func, live_blocks, unreachable_continues);
430   modified |= EraseDeadBlocks(func, live_blocks, unreachable_merges,
431                               unreachable_continues);
432 
433   return modified;
434 }
435 
FixBlockOrder()436 void DeadBranchElimPass::FixBlockOrder() {
437   context()->BuildInvalidAnalyses(IRContext::kAnalysisCFG |
438                                   IRContext::kAnalysisDominatorAnalysis);
439   // Reorders blocks according to DFS of dominator tree.
440   ProcessFunction reorder_dominators = [this](Function* function) {
441     DominatorAnalysis* dominators = context()->GetDominatorAnalysis(function);
442     std::vector<BasicBlock*> blocks;
443     for (auto iter = dominators->GetDomTree().begin();
444          iter != dominators->GetDomTree().end(); ++iter) {
445       if (iter->id() != 0) {
446         blocks.push_back(iter->bb_);
447       }
448     }
449     for (uint32_t i = 1; i < blocks.size(); ++i) {
450       function->MoveBasicBlockToAfter(blocks[i]->id(), blocks[i - 1]);
451     }
452     return true;
453   };
454 
455   // Reorders blocks according to structured order.
456   ProcessFunction reorder_structured = [this](Function* function) {
457     std::list<BasicBlock*> order;
458     context()->cfg()->ComputeStructuredOrder(function, &*function->begin(),
459                                              &order);
460     std::vector<BasicBlock*> blocks;
461     for (auto block : order) {
462       blocks.push_back(block);
463     }
464     for (uint32_t i = 1; i < blocks.size(); ++i) {
465       function->MoveBasicBlockToAfter(blocks[i]->id(), blocks[i - 1]);
466     }
467     return true;
468   };
469 
470   // Structured order is more intuitive so use it where possible.
471   if (context()->get_feature_mgr()->HasCapability(SpvCapabilityShader)) {
472     context()->ProcessReachableCallTree(reorder_structured);
473   } else {
474     context()->ProcessReachableCallTree(reorder_dominators);
475   }
476 }
477 
Process()478 Pass::Status DeadBranchElimPass::Process() {
479   // Do not process if module contains OpGroupDecorate. Additional
480   // support required in KillNamesAndDecorates().
481   // TODO(greg-lunarg): Add support for OpGroupDecorate
482   for (auto& ai : get_module()->annotations())
483     if (ai.opcode() == SpvOpGroupDecorate) return Status::SuccessWithoutChange;
484   // Process all entry point functions
485   ProcessFunction pfn = [this](Function* fp) {
486     return EliminateDeadBranches(fp);
487   };
488   bool modified = context()->ProcessReachableCallTree(pfn);
489   if (modified) FixBlockOrder();
490   return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
491 }
492 
FindFirstExitFromSelectionMerge(uint32_t start_block_id,uint32_t merge_block_id,uint32_t loop_merge_id,uint32_t loop_continue_id,uint32_t switch_merge_id)493 Instruction* DeadBranchElimPass::FindFirstExitFromSelectionMerge(
494     uint32_t start_block_id, uint32_t merge_block_id, uint32_t loop_merge_id,
495     uint32_t loop_continue_id, uint32_t switch_merge_id) {
496   // To find the "first" exit, we follow branches looking for a conditional
497   // branch that is not in a nested construct and is not the header of a new
498   // construct.  We follow the control flow from |start_block_id| to find the
499   // first one.
500 
501   while (start_block_id != merge_block_id && start_block_id != loop_merge_id &&
502          start_block_id != loop_continue_id) {
503     BasicBlock* start_block = context()->get_instr_block(start_block_id);
504     Instruction* branch = start_block->terminator();
505     uint32_t next_block_id = 0;
506     switch (branch->opcode()) {
507       case SpvOpBranchConditional:
508         next_block_id = start_block->MergeBlockIdIfAny();
509         if (next_block_id == 0) {
510           // If a possible target is the |loop_merge_id| or |loop_continue_id|,
511           // which are not the current merge node, then we continue the search
512           // with the other target.
513           for (uint32_t i = 1; i < 3; i++) {
514             if (branch->GetSingleWordInOperand(i) == loop_merge_id &&
515                 loop_merge_id != merge_block_id) {
516               next_block_id = branch->GetSingleWordInOperand(3 - i);
517               break;
518             }
519             if (branch->GetSingleWordInOperand(i) == loop_continue_id &&
520                 loop_continue_id != merge_block_id) {
521               next_block_id = branch->GetSingleWordInOperand(3 - i);
522               break;
523             }
524             if (branch->GetSingleWordInOperand(i) == switch_merge_id &&
525                 switch_merge_id != merge_block_id) {
526               next_block_id = branch->GetSingleWordInOperand(3 - i);
527               break;
528             }
529           }
530 
531           if (next_block_id == 0) {
532             return branch;
533           }
534         }
535         break;
536       case SpvOpSwitch:
537         next_block_id = start_block->MergeBlockIdIfAny();
538         if (next_block_id == 0) {
539           // A switch with no merge instructions can have at most 5 targets:
540           //   a. |merge_block_id|
541           //   b. |loop_merge_id|
542           //   c. |loop_continue_id|
543           //   d. |switch_merge_id|
544           //   e. 1 block inside the current region.
545           //
546           // Note that because this is a switch, |merge_block_id| must equal
547           // |switch_merge_id|.
548           //
549           // This leads to a number of cases of what to do.
550           //
551           // 1. Does not jump to a block inside of the current construct.  In
552           // this case, there is not conditional break, so we should return
553           // |nullptr|.
554           //
555           // 2. Jumps to |merge_block_id| and a block inside the current
556           // construct.  In this case, this branch conditionally break to the
557           // end of the current construct, so return the current branch.
558           //
559           // 3.  Otherwise, this branch may break, but not to the current merge
560           // block.  So we continue with the block that is inside the loop.
561           bool found_break = false;
562           for (uint32_t i = 1; i < branch->NumInOperands(); i += 2) {
563             uint32_t target = branch->GetSingleWordInOperand(i);
564             if (target == merge_block_id) {
565               found_break = true;
566             } else if (target != loop_merge_id && target != loop_continue_id) {
567               next_block_id = branch->GetSingleWordInOperand(i);
568             }
569           }
570 
571           if (next_block_id == 0) {
572             // Case 1.
573             return nullptr;
574           }
575 
576           if (found_break) {
577             // Case 2.
578             return branch;
579           }
580 
581           // The fall through is case 3.
582         }
583         break;
584       case SpvOpBranch:
585         // Need to check if this is the header of a loop nested in the
586         // selection construct.
587         next_block_id = start_block->MergeBlockIdIfAny();
588         if (next_block_id == 0) {
589           next_block_id = branch->GetSingleWordInOperand(0);
590         }
591         break;
592       default:
593         return nullptr;
594     }
595     start_block_id = next_block_id;
596   }
597   return nullptr;
598 }
599 
AddBlocksWithBackEdge(uint32_t cont_id,uint32_t header_id,uint32_t merge_id,std::unordered_set<BasicBlock * > * blocks_with_back_edges)600 void DeadBranchElimPass::AddBlocksWithBackEdge(
601     uint32_t cont_id, uint32_t header_id, uint32_t merge_id,
602     std::unordered_set<BasicBlock*>* blocks_with_back_edges) {
603   std::unordered_set<uint32_t> visited;
604   visited.insert(cont_id);
605   visited.insert(header_id);
606   visited.insert(merge_id);
607 
608   std::vector<uint32_t> work_list;
609   work_list.push_back(cont_id);
610 
611   while (!work_list.empty()) {
612     uint32_t bb_id = work_list.back();
613     work_list.pop_back();
614 
615     BasicBlock* bb = context()->get_instr_block(bb_id);
616 
617     bool has_back_edge = false;
618     bb->ForEachSuccessorLabel([header_id, &visited, &work_list,
619                                &has_back_edge](uint32_t* succ_label_id) {
620       if (visited.insert(*succ_label_id).second) {
621         work_list.push_back(*succ_label_id);
622       }
623       if (*succ_label_id == header_id) {
624         has_back_edge = true;
625       }
626     });
627 
628     if (has_back_edge) {
629       blocks_with_back_edges->insert(bb);
630     }
631   }
632 }
633 
SwitchHasNestedBreak(uint32_t switch_header_id)634 bool DeadBranchElimPass::SwitchHasNestedBreak(uint32_t switch_header_id) {
635   std::vector<BasicBlock*> block_in_construct;
636   BasicBlock* start_block = context()->get_instr_block(switch_header_id);
637   uint32_t merge_block_id = start_block->MergeBlockIdIfAny();
638 
639   StructuredCFGAnalysis* cfg_analysis = context()->GetStructuredCFGAnalysis();
640   return !get_def_use_mgr()->WhileEachUser(
641       merge_block_id,
642       [this, cfg_analysis, switch_header_id](Instruction* inst) {
643         if (!inst->IsBranch()) {
644           return true;
645         }
646 
647         BasicBlock* bb = context()->get_instr_block(inst);
648         if (bb->id() == switch_header_id) {
649           return true;
650         }
651         return (cfg_analysis->ContainingConstruct(inst) == switch_header_id &&
652                 bb->GetMergeInst() == nullptr);
653       });
654 }
655 
656 }  // namespace opt
657 }  // namespace spvtools
658