• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2017 The Khronos Group Inc.
2 // Copyright (c) 2017 Valve Corporation
3 // Copyright (c) 2017 LunarG Inc.
4 //
5 // Licensed under the Apache License, Version 2.0 (the "License");
6 // you may not use this file except in compliance with the License.
7 // You may obtain a copy of the License at
8 //
9 //     http://www.apache.org/licenses/LICENSE-2.0
10 //
11 // Unless required by applicable law or agreed to in writing, software
12 // distributed under the License is distributed on an "AS IS" BASIS,
13 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 // See the License for the specific language governing permissions and
15 // limitations under the License.
16 
17 #ifndef SOURCE_OPT_INLINE_PASS_H_
18 #define SOURCE_OPT_INLINE_PASS_H_
19 
20 #include <algorithm>
21 #include <list>
22 #include <memory>
23 #include <set>
24 #include <unordered_map>
25 #include <vector>
26 
27 #include "source/opt/decoration_manager.h"
28 #include "source/opt/module.h"
29 #include "source/opt/pass.h"
30 
31 namespace spvtools {
32 namespace opt {
33 
34 // See optimizer.hpp for documentation.
35 class InlinePass : public Pass {
36   using cbb_ptr = const BasicBlock*;
37 
38  public:
39   virtual ~InlinePass() = default;
40 
41  protected:
42   InlinePass();
43 
44   // Add pointer to type to module and return resultId.  Returns 0 if the type
45   // could not be created.
46   uint32_t AddPointerToType(uint32_t type_id, SpvStorageClass storage_class);
47 
48   // Add unconditional branch to labelId to end of block block_ptr.
49   void AddBranch(uint32_t labelId, std::unique_ptr<BasicBlock>* block_ptr);
50 
51   // Add conditional branch to end of block |block_ptr|.
52   void AddBranchCond(uint32_t cond_id, uint32_t true_id, uint32_t false_id,
53                      std::unique_ptr<BasicBlock>* block_ptr);
54 
55   // Add unconditional branch to labelId to end of block block_ptr.
56   void AddLoopMerge(uint32_t merge_id, uint32_t continue_id,
57                     std::unique_ptr<BasicBlock>* block_ptr);
58 
59   // Add store of valId to ptrId to end of block block_ptr.
60   void AddStore(uint32_t ptrId, uint32_t valId,
61                 std::unique_ptr<BasicBlock>* block_ptr);
62 
63   // Add load of ptrId into resultId to end of block block_ptr.
64   void AddLoad(uint32_t typeId, uint32_t resultId, uint32_t ptrId,
65                std::unique_ptr<BasicBlock>* block_ptr);
66 
67   // Return new label.
68   std::unique_ptr<Instruction> NewLabel(uint32_t label_id);
69 
70   // Returns the id for the boolean false value. Looks in the module first
71   // and creates it if not found. Remembers it for future calls.  Returns 0 if
72   // the value could not be created.
73   uint32_t GetFalseId();
74 
75   // Map callee params to caller args
76   void MapParams(Function* calleeFn, BasicBlock::iterator call_inst_itr,
77                  std::unordered_map<uint32_t, uint32_t>* callee2caller);
78 
79   // Clone and map callee locals.  Return true if successful.
80   bool CloneAndMapLocals(Function* calleeFn,
81                          std::vector<std::unique_ptr<Instruction>>* new_vars,
82                          std::unordered_map<uint32_t, uint32_t>* callee2caller);
83 
84   // Create return variable for callee clone code.  The return type of
85   // |calleeFn| must not be void.  Returns  the id of the return variable if
86   // created.  Returns 0 if the return variable could not be created.
87   uint32_t CreateReturnVar(Function* calleeFn,
88                            std::vector<std::unique_ptr<Instruction>>* new_vars);
89 
90   // Return true if instruction must be in the same block that its result
91   // is used.
92   bool IsSameBlockOp(const Instruction* inst) const;
93 
94   // Clone operands which must be in same block as consumer instructions.
95   // Look in preCallSB for instructions that need cloning. Look in
96   // postCallSB for instructions already cloned. Add cloned instruction
97   // to postCallSB.
98   bool CloneSameBlockOps(std::unique_ptr<Instruction>* inst,
99                          std::unordered_map<uint32_t, uint32_t>* postCallSB,
100                          std::unordered_map<uint32_t, Instruction*>* preCallSB,
101                          std::unique_ptr<BasicBlock>* block_ptr);
102 
103   // Return in new_blocks the result of inlining the call at call_inst_itr
104   // within its block at call_block_itr. The block at call_block_itr can
105   // just be replaced with the blocks in new_blocks. Any additional branches
106   // are avoided. Debug instructions are cloned along with their callee
107   // instructions. Early returns are replaced by a store to a local return
108   // variable and a branch to a (created) exit block where the local variable
109   // is returned. Formal parameters are trivially mapped to their actual
110   // parameters. Note that the first block in new_blocks retains the label
111   // of the original calling block. Also note that if an exit block is
112   // created, it is the last block of new_blocks.
113   //
114   // Also return in new_vars additional OpVariable instructions required by
115   // and to be inserted into the caller function after the block at
116   // call_block_itr is replaced with new_blocks.
117   //
118   // Returns true if successful.
119   bool GenInlineCode(std::vector<std::unique_ptr<BasicBlock>>* new_blocks,
120                      std::vector<std::unique_ptr<Instruction>>* new_vars,
121                      BasicBlock::iterator call_inst_itr,
122                      UptrVectorIterator<BasicBlock> call_block_itr);
123 
124   // Return true if |inst| is a function call that can be inlined.
125   bool IsInlinableFunctionCall(const Instruction* inst);
126 
127   // Return true if |func| does not have a return that is
128   // nested in a structured if, switch or loop.
129   bool HasNoReturnInStructuredConstruct(Function* func);
130 
131   // Return true if |func| has no return in a loop. The current analysis
132   // requires structured control flow, so return false if control flow not
133   // structured ie. module is not a shader.
134   bool HasNoReturnInLoop(Function* func);
135 
136   // Find all functions with multiple returns and no returns in loops
137   void AnalyzeReturns(Function* func);
138 
139   // Return true if |func| is a function that can be inlined.
140   bool IsInlinableFunction(Function* func);
141 
142   // Update phis in succeeding blocks to point to new last block
143   void UpdateSucceedingPhis(
144       std::vector<std::unique_ptr<BasicBlock>>& new_blocks);
145 
146   // Initialize state for optimization of |module|
147   void InitializeInline();
148 
149   // Map from function's result id to function.
150   std::unordered_map<uint32_t, Function*> id2function_;
151 
152   // Map from block's label id to block. TODO(dnovillo): This is superfluous wrt
153   // CFG. It has functionality not present in CFG. Consolidate.
154   std::unordered_map<uint32_t, BasicBlock*> id2block_;
155 
156   // Set of ids of functions with early return.
157   std::set<uint32_t> early_return_funcs_;
158 
159   // Set of ids of functions with no returns in loop
160   std::set<uint32_t> no_return_in_loop_;
161 
162   // Set of ids of inlinable functions
163   std::set<uint32_t> inlinable_;
164 
165   // result id for OpConstantFalse
166   uint32_t false_id_;
167 };
168 
169 }  // namespace opt
170 }  // namespace spvtools
171 
172 #endif  // SOURCE_OPT_INLINE_PASS_H_
173