• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/backend/jump-threading.h"
6 #include "src/compiler/backend/code-generator-impl.h"
7 
8 namespace v8 {
9 namespace internal {
10 namespace compiler {
11 
12 #define TRACE(...)                                \
13   do {                                            \
14     if (FLAG_trace_turbo_jt) PrintF(__VA_ARGS__); \
15   } while (false)
16 
17 namespace {
18 
19 struct JumpThreadingState {
20   bool forwarded;
21   ZoneVector<RpoNumber>& result;
22   ZoneStack<RpoNumber>& stack;
23 
Clearv8::internal::compiler::__anona3a2ea440111::JumpThreadingState24   void Clear(size_t count) { result.assign(count, unvisited()); }
PushIfUnvisitedv8::internal::compiler::__anona3a2ea440111::JumpThreadingState25   void PushIfUnvisited(RpoNumber num) {
26     if (result[num.ToInt()] == unvisited()) {
27       stack.push(num);
28       result[num.ToInt()] = onstack();
29     }
30   }
Forwardv8::internal::compiler::__anona3a2ea440111::JumpThreadingState31   void Forward(RpoNumber to) {
32     RpoNumber from = stack.top();
33     RpoNumber to_to = result[to.ToInt()];
34     bool pop = true;
35     if (to == from) {
36       TRACE("  xx %d\n", from.ToInt());
37       result[from.ToInt()] = from;
38     } else if (to_to == unvisited()) {
39       TRACE("  fw %d -> %d (recurse)\n", from.ToInt(), to.ToInt());
40       stack.push(to);
41       result[to.ToInt()] = onstack();
42       pop = false;  // recurse.
43     } else if (to_to == onstack()) {
44       TRACE("  fw %d -> %d (cycle)\n", from.ToInt(), to.ToInt());
45       result[from.ToInt()] = to;  // break the cycle.
46       forwarded = true;
47     } else {
48       TRACE("  fw %d -> %d (forward)\n", from.ToInt(), to.ToInt());
49       result[from.ToInt()] = to_to;  // forward the block.
50       forwarded = true;
51     }
52     if (pop) stack.pop();
53   }
unvisitedv8::internal::compiler::__anona3a2ea440111::JumpThreadingState54   RpoNumber unvisited() { return RpoNumber::FromInt(-1); }
onstackv8::internal::compiler::__anona3a2ea440111::JumpThreadingState55   RpoNumber onstack() { return RpoNumber::FromInt(-2); }
56 };
57 
IsBlockWithBranchPoisoning(InstructionSequence * code,InstructionBlock * block)58 bool IsBlockWithBranchPoisoning(InstructionSequence* code,
59                                 InstructionBlock* block) {
60   if (block->PredecessorCount() != 1) return false;
61   RpoNumber pred_rpo = (block->predecessors())[0];
62   const InstructionBlock* pred = code->InstructionBlockAt(pred_rpo);
63   if (pred->code_start() == pred->code_end()) return false;
64   Instruction* instr = code->InstructionAt(pred->code_end() - 1);
65   FlagsMode mode = FlagsModeField::decode(instr->opcode());
66   return mode == kFlags_branch_and_poison;
67 }
68 
69 }  // namespace
70 
ComputeForwarding(Zone * local_zone,ZoneVector<RpoNumber> * result,InstructionSequence * code,bool frame_at_start)71 bool JumpThreading::ComputeForwarding(Zone* local_zone,
72                                       ZoneVector<RpoNumber>* result,
73                                       InstructionSequence* code,
74                                       bool frame_at_start) {
75   ZoneStack<RpoNumber> stack(local_zone);
76   JumpThreadingState state = {false, *result, stack};
77   state.Clear(code->InstructionBlockCount());
78   RpoNumber empty_deconstruct_frame_return_block = RpoNumber::Invalid();
79   int32_t empty_deconstruct_frame_return_size;
80   RpoNumber empty_no_deconstruct_frame_return_block = RpoNumber::Invalid();
81   int32_t empty_no_deconstruct_frame_return_size;
82 
83   // Iterate over the blocks forward, pushing the blocks onto the stack.
84   for (auto const block : code->instruction_blocks()) {
85     RpoNumber current = block->rpo_number();
86     state.PushIfUnvisited(current);
87 
88     // Process the stack, which implements DFS through empty blocks.
89     while (!state.stack.empty()) {
90       InstructionBlock* block = code->InstructionBlockAt(state.stack.top());
91       // Process the instructions in a block up to a non-empty instruction.
92       TRACE("jt [%d] B%d\n", static_cast<int>(stack.size()),
93             block->rpo_number().ToInt());
94       RpoNumber fw = block->rpo_number();
95       if (!IsBlockWithBranchPoisoning(code, block)) {
96         bool fallthru = true;
97         for (int i = block->code_start(); i < block->code_end(); ++i) {
98           Instruction* instr = code->InstructionAt(i);
99           if (!instr->AreMovesRedundant()) {
100             // can't skip instructions with non redundant moves.
101             TRACE("  parallel move\n");
102             fallthru = false;
103           } else if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
104             // can't skip instructions with flags continuations.
105             TRACE("  flags\n");
106             fallthru = false;
107           } else if (instr->IsNop()) {
108             // skip nops.
109             TRACE("  nop\n");
110             continue;
111           } else if (instr->arch_opcode() == kArchJmp) {
112             // try to forward the jump instruction.
113             TRACE("  jmp\n");
114             // if this block deconstructs the frame, we can't forward it.
115             // TODO(mtrofin): we can still forward if we end up building
116             // the frame at start. So we should move the decision of whether
117             // to build a frame or not in the register allocator, and trickle it
118             // here and to the code generator.
119             if (frame_at_start || !(block->must_deconstruct_frame() ||
120                                     block->must_construct_frame())) {
121               fw = code->InputRpo(instr, 0);
122             }
123             fallthru = false;
124           } else if (instr->IsRet()) {
125             TRACE("  ret\n");
126             if (fallthru) {
127               CHECK_IMPLIES(block->must_construct_frame(),
128                             block->must_deconstruct_frame());
129               // Only handle returns with immediate/constant operands, since
130               // they must always be the same for all returns in a function.
131               // Dynamic return values might use different registers at
132               // different return sites and therefore cannot be shared.
133               if (instr->InputAt(0)->IsImmediate()) {
134                 int32_t return_size =
135                     ImmediateOperand::cast(instr->InputAt(0))->inline_value();
136                 // Instructions can be shared only for blocks that share
137                 // the same |must_deconstruct_frame| attribute.
138                 if (block->must_deconstruct_frame()) {
139                   if (empty_deconstruct_frame_return_block ==
140                       RpoNumber::Invalid()) {
141                     empty_deconstruct_frame_return_block = block->rpo_number();
142                     empty_deconstruct_frame_return_size = return_size;
143                   } else if (empty_deconstruct_frame_return_size ==
144                              return_size) {
145                     fw = empty_deconstruct_frame_return_block;
146                     block->clear_must_deconstruct_frame();
147                   }
148                 } else {
149                   if (empty_no_deconstruct_frame_return_block ==
150                       RpoNumber::Invalid()) {
151                     empty_no_deconstruct_frame_return_block =
152                         block->rpo_number();
153                     empty_no_deconstruct_frame_return_size = return_size;
154                   } else if (empty_no_deconstruct_frame_return_size ==
155                              return_size) {
156                     fw = empty_no_deconstruct_frame_return_block;
157                   }
158                 }
159               }
160             }
161             fallthru = false;
162           } else {
163             // can't skip other instructions.
164             TRACE("  other\n");
165             fallthru = false;
166           }
167           break;
168         }
169         if (fallthru) {
170           int next = 1 + block->rpo_number().ToInt();
171           if (next < code->InstructionBlockCount())
172             fw = RpoNumber::FromInt(next);
173         }
174       }
175       state.Forward(fw);
176     }
177   }
178 
179 #ifdef DEBUG
180   for (RpoNumber num : *result) {
181     DCHECK(num.IsValid());
182   }
183 #endif
184 
185   if (FLAG_trace_turbo_jt) {
186     for (int i = 0; i < static_cast<int>(result->size()); i++) {
187       TRACE("B%d ", i);
188       int to = (*result)[i].ToInt();
189       if (i != to) {
190         TRACE("-> B%d\n", to);
191       } else {
192         TRACE("\n");
193       }
194     }
195   }
196 
197   return state.forwarded;
198 }
199 
ApplyForwarding(Zone * local_zone,ZoneVector<RpoNumber> const & result,InstructionSequence * code)200 void JumpThreading::ApplyForwarding(Zone* local_zone,
201                                     ZoneVector<RpoNumber> const& result,
202                                     InstructionSequence* code) {
203   if (!FLAG_turbo_jt) return;
204 
205   ZoneVector<bool> skip(static_cast<int>(result.size()), false, local_zone);
206 
207   // Skip empty blocks when the previous block doesn't fall through.
208   bool prev_fallthru = true;
209   for (auto const block : code->instruction_blocks()) {
210     RpoNumber block_rpo = block->rpo_number();
211     int block_num = block_rpo.ToInt();
212     RpoNumber result_rpo = result[block_num];
213     skip[block_num] = !prev_fallthru && result_rpo != block_rpo;
214 
215     if (result_rpo != block_rpo) {
216       // We need the handler information to be propagated, so that branch
217       // targets are annotated as necessary for control flow integrity
218       // checks (when enabled).
219       if (code->InstructionBlockAt(block_rpo)->IsHandler()) {
220         code->InstructionBlockAt(result_rpo)->MarkHandler();
221       }
222     }
223 
224     bool fallthru = true;
225     for (int i = block->code_start(); i < block->code_end(); ++i) {
226       Instruction* instr = code->InstructionAt(i);
227       FlagsMode mode = FlagsModeField::decode(instr->opcode());
228       if (mode == kFlags_branch || mode == kFlags_branch_and_poison) {
229         fallthru = false;  // branches don't fall through to the next block.
230       } else if (instr->arch_opcode() == kArchJmp ||
231                  instr->arch_opcode() == kArchRet) {
232         if (skip[block_num]) {
233           // Overwrite a redundant jump with a nop.
234           TRACE("jt-fw nop @%d\n", i);
235           instr->OverwriteWithNop();
236           // If this block was marked as a handler, it can be unmarked now.
237           code->InstructionBlockAt(block_rpo)->UnmarkHandler();
238         }
239         fallthru = false;  // jumps don't fall through to the next block.
240       }
241     }
242     prev_fallthru = fallthru;
243   }
244 
245   // Patch RPO immediates.
246   InstructionSequence::Immediates& immediates = code->immediates();
247   for (size_t i = 0; i < immediates.size(); i++) {
248     Constant constant = immediates[i];
249     if (constant.type() == Constant::kRpoNumber) {
250       RpoNumber rpo = constant.ToRpoNumber();
251       RpoNumber fw = result[rpo.ToInt()];
252       if (!(fw == rpo)) immediates[i] = Constant(fw);
253     }
254   }
255 
256   // Renumber the blocks so that IsNextInAssemblyOrder() will return true,
257   // even if there are skipped blocks in-between.
258   int ao = 0;
259   for (auto const block : code->ao_blocks()) {
260     block->set_ao_number(RpoNumber::FromInt(ao));
261     if (!skip[block->rpo_number().ToInt()]) ao++;
262   }
263 }
264 
265 #undef TRACE
266 
267 }  // namespace compiler
268 }  // namespace internal
269 }  // namespace v8
270