1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/bytecode-analysis.h"
6
7 #include "src/interpreter/bytecode-array-iterator.h"
8 #include "src/interpreter/bytecode-array-random-iterator.h"
9 #include "src/utils/ostreams.h"
10 #include "src/objects/objects-inl.h"
11
12 namespace v8 {
13 namespace internal {
14 namespace compiler {
15
16 using interpreter::Bytecode;
17 using interpreter::Bytecodes;
18 using interpreter::OperandType;
19
BytecodeLoopAssignments(int parameter_count,int register_count,Zone * zone)20 BytecodeLoopAssignments::BytecodeLoopAssignments(int parameter_count,
21 int register_count, Zone* zone)
22 : parameter_count_(parameter_count),
23 bit_vector_(
24 zone->New<BitVector>(parameter_count + register_count, zone)) {}
25
Add(interpreter::Register r)26 void BytecodeLoopAssignments::Add(interpreter::Register r) {
27 if (r.is_parameter()) {
28 bit_vector_->Add(r.ToParameterIndex(parameter_count_));
29 } else {
30 bit_vector_->Add(parameter_count_ + r.index());
31 }
32 }
33
AddList(interpreter::Register r,uint32_t count)34 void BytecodeLoopAssignments::AddList(interpreter::Register r, uint32_t count) {
35 if (r.is_parameter()) {
36 for (uint32_t i = 0; i < count; i++) {
37 DCHECK(interpreter::Register(r.index() + i).is_parameter());
38 bit_vector_->Add(r.ToParameterIndex(parameter_count_) + i);
39 }
40 } else {
41 for (uint32_t i = 0; i < count; i++) {
42 DCHECK(!interpreter::Register(r.index() + i).is_parameter());
43 bit_vector_->Add(parameter_count_ + r.index() + i);
44 }
45 }
46 }
47
48
Union(const BytecodeLoopAssignments & other)49 void BytecodeLoopAssignments::Union(const BytecodeLoopAssignments& other) {
50 bit_vector_->Union(*other.bit_vector_);
51 }
52
ContainsParameter(int index) const53 bool BytecodeLoopAssignments::ContainsParameter(int index) const {
54 DCHECK_GE(index, 0);
55 DCHECK_LT(index, parameter_count());
56 return bit_vector_->Contains(index);
57 }
58
ContainsLocal(int index) const59 bool BytecodeLoopAssignments::ContainsLocal(int index) const {
60 DCHECK_GE(index, 0);
61 DCHECK_LT(index, local_count());
62 return bit_vector_->Contains(parameter_count_ + index);
63 }
64
ResumeJumpTarget(int suspend_id,int target_offset,int final_target_offset)65 ResumeJumpTarget::ResumeJumpTarget(int suspend_id, int target_offset,
66 int final_target_offset)
67 : suspend_id_(suspend_id),
68 target_offset_(target_offset),
69 final_target_offset_(final_target_offset) {}
70
Leaf(int suspend_id,int target_offset)71 ResumeJumpTarget ResumeJumpTarget::Leaf(int suspend_id, int target_offset) {
72 return ResumeJumpTarget(suspend_id, target_offset, target_offset);
73 }
74
AtLoopHeader(int loop_header_offset,const ResumeJumpTarget & next)75 ResumeJumpTarget ResumeJumpTarget::AtLoopHeader(int loop_header_offset,
76 const ResumeJumpTarget& next) {
77 return ResumeJumpTarget(next.suspend_id(), loop_header_offset,
78 next.target_offset());
79 }
80
BytecodeAnalysis(Handle<BytecodeArray> bytecode_array,Zone * zone,BailoutId osr_bailout_id,bool analyze_liveness)81 BytecodeAnalysis::BytecodeAnalysis(Handle<BytecodeArray> bytecode_array,
82 Zone* zone, BailoutId osr_bailout_id,
83 bool analyze_liveness)
84 : bytecode_array_(bytecode_array),
85 zone_(zone),
86 osr_bailout_id_(osr_bailout_id),
87 analyze_liveness_(analyze_liveness),
88 loop_stack_(zone),
89 loop_end_index_queue_(zone),
90 resume_jump_targets_(zone),
91 end_to_header_(zone),
92 header_to_info_(zone),
93 osr_entry_point_(-1),
94 liveness_map_(bytecode_array->length(), zone) {
95 Analyze();
96 }
97
98 namespace {
99
UpdateInLiveness(Bytecode bytecode,BytecodeLivenessState * in_liveness,const interpreter::BytecodeArrayAccessor & accessor)100 void UpdateInLiveness(Bytecode bytecode, BytecodeLivenessState* in_liveness,
101 const interpreter::BytecodeArrayAccessor& accessor) {
102 int num_operands = Bytecodes::NumberOfOperands(bytecode);
103 const OperandType* operand_types = Bytecodes::GetOperandTypes(bytecode);
104
105 // Special case Suspend and Resume to just pass through liveness.
106 if (bytecode == Bytecode::kSuspendGenerator) {
107 // The generator object has to be live.
108 in_liveness->MarkRegisterLive(accessor.GetRegisterOperand(0).index());
109 // Suspend additionally reads and returns the accumulator
110 DCHECK(Bytecodes::ReadsAccumulator(bytecode));
111 in_liveness->MarkAccumulatorLive();
112 return;
113 }
114 if (bytecode == Bytecode::kResumeGenerator) {
115 // The generator object has to be live.
116 in_liveness->MarkRegisterLive(accessor.GetRegisterOperand(0).index());
117 return;
118 }
119
120 if (Bytecodes::WritesAccumulator(bytecode)) {
121 in_liveness->MarkAccumulatorDead();
122 }
123 for (int i = 0; i < num_operands; ++i) {
124 switch (operand_types[i]) {
125 case OperandType::kRegOut: {
126 interpreter::Register r = accessor.GetRegisterOperand(i);
127 if (!r.is_parameter()) {
128 in_liveness->MarkRegisterDead(r.index());
129 }
130 break;
131 }
132 case OperandType::kRegOutList: {
133 interpreter::Register r = accessor.GetRegisterOperand(i++);
134 uint32_t reg_count = accessor.GetRegisterCountOperand(i);
135 if (!r.is_parameter()) {
136 for (uint32_t j = 0; j < reg_count; ++j) {
137 DCHECK(!interpreter::Register(r.index() + j).is_parameter());
138 in_liveness->MarkRegisterDead(r.index() + j);
139 }
140 }
141 break;
142 }
143 case OperandType::kRegOutPair: {
144 interpreter::Register r = accessor.GetRegisterOperand(i);
145 if (!r.is_parameter()) {
146 DCHECK(!interpreter::Register(r.index() + 1).is_parameter());
147 in_liveness->MarkRegisterDead(r.index());
148 in_liveness->MarkRegisterDead(r.index() + 1);
149 }
150 break;
151 }
152 case OperandType::kRegOutTriple: {
153 interpreter::Register r = accessor.GetRegisterOperand(i);
154 if (!r.is_parameter()) {
155 DCHECK(!interpreter::Register(r.index() + 1).is_parameter());
156 DCHECK(!interpreter::Register(r.index() + 2).is_parameter());
157 in_liveness->MarkRegisterDead(r.index());
158 in_liveness->MarkRegisterDead(r.index() + 1);
159 in_liveness->MarkRegisterDead(r.index() + 2);
160 }
161 break;
162 }
163 default:
164 DCHECK(!Bytecodes::IsRegisterOutputOperandType(operand_types[i]));
165 break;
166 }
167 }
168
169 if (Bytecodes::ReadsAccumulator(bytecode)) {
170 in_liveness->MarkAccumulatorLive();
171 }
172 for (int i = 0; i < num_operands; ++i) {
173 switch (operand_types[i]) {
174 case OperandType::kReg: {
175 interpreter::Register r = accessor.GetRegisterOperand(i);
176 if (!r.is_parameter()) {
177 in_liveness->MarkRegisterLive(r.index());
178 }
179 break;
180 }
181 case OperandType::kRegPair: {
182 interpreter::Register r = accessor.GetRegisterOperand(i);
183 if (!r.is_parameter()) {
184 DCHECK(!interpreter::Register(r.index() + 1).is_parameter());
185 in_liveness->MarkRegisterLive(r.index());
186 in_liveness->MarkRegisterLive(r.index() + 1);
187 }
188 break;
189 }
190 case OperandType::kRegList: {
191 interpreter::Register r = accessor.GetRegisterOperand(i++);
192 uint32_t reg_count = accessor.GetRegisterCountOperand(i);
193 if (!r.is_parameter()) {
194 for (uint32_t j = 0; j < reg_count; ++j) {
195 DCHECK(!interpreter::Register(r.index() + j).is_parameter());
196 in_liveness->MarkRegisterLive(r.index() + j);
197 }
198 }
199 break;
200 }
201 default:
202 DCHECK(!Bytecodes::IsRegisterInputOperandType(operand_types[i]));
203 break;
204 }
205 }
206 }
207
UpdateOutLiveness(Bytecode bytecode,BytecodeLivenessState * out_liveness,BytecodeLivenessState * next_bytecode_in_liveness,const interpreter::BytecodeArrayAccessor & accessor,Handle<BytecodeArray> bytecode_array,const BytecodeLivenessMap & liveness_map)208 void UpdateOutLiveness(Bytecode bytecode, BytecodeLivenessState* out_liveness,
209 BytecodeLivenessState* next_bytecode_in_liveness,
210 const interpreter::BytecodeArrayAccessor& accessor,
211 Handle<BytecodeArray> bytecode_array,
212 const BytecodeLivenessMap& liveness_map) {
213 int current_offset = accessor.current_offset();
214
215 // Special case Suspend and Resume to just pass through liveness.
216 if (bytecode == Bytecode::kSuspendGenerator ||
217 bytecode == Bytecode::kResumeGenerator) {
218 out_liveness->Union(*next_bytecode_in_liveness);
219 return;
220 }
221
222 // Update from jump target (if any). Skip loops, we update these manually in
223 // the liveness iterations.
224 if (Bytecodes::IsForwardJump(bytecode)) {
225 int target_offset = accessor.GetJumpTargetOffset();
226 out_liveness->Union(*liveness_map.GetInLiveness(target_offset));
227 } else if (Bytecodes::IsSwitch(bytecode)) {
228 for (const auto& entry : accessor.GetJumpTableTargetOffsets()) {
229 out_liveness->Union(*liveness_map.GetInLiveness(entry.target_offset));
230 }
231 }
232
233 // Update from next bytecode (unless there isn't one or this is an
234 // unconditional jump).
235 if (next_bytecode_in_liveness != nullptr &&
236 !Bytecodes::IsUnconditionalJump(bytecode)) {
237 out_liveness->Union(*next_bytecode_in_liveness);
238 }
239
240 // Update from exception handler (if any).
241 if (!interpreter::Bytecodes::IsWithoutExternalSideEffects(bytecode)) {
242 int handler_context;
243 // TODO(leszeks): We should look up this range only once per entry.
244 HandlerTable table(*bytecode_array);
245 int handler_offset =
246 table.LookupRange(current_offset, &handler_context, nullptr);
247
248 if (handler_offset != -1) {
249 bool was_accumulator_live = out_liveness->AccumulatorIsLive();
250 out_liveness->Union(*liveness_map.GetInLiveness(handler_offset));
251 out_liveness->MarkRegisterLive(handler_context);
252 if (!was_accumulator_live) {
253 // The accumulator is reset to the exception on entry into a handler,
254 // and so shouldn't be considered live coming out of this bytecode just
255 // because it's live coming into the handler. So, kill the accumulator
256 // if the handler is the only thing that made it live.
257 out_liveness->MarkAccumulatorDead();
258
259 // TODO(leszeks): Ideally the accumulator wouldn't be considered live at
260 // the start of the handler, but looking up if the current bytecode is
261 // the start of a handler is not free, so we should only do it if we
262 // decide it's necessary.
263 }
264 }
265 }
266 }
267
UpdateLiveness(Bytecode bytecode,BytecodeLiveness const & liveness,BytecodeLivenessState ** next_bytecode_in_liveness,const interpreter::BytecodeArrayAccessor & accessor,Handle<BytecodeArray> bytecode_array,const BytecodeLivenessMap & liveness_map)268 void UpdateLiveness(Bytecode bytecode, BytecodeLiveness const& liveness,
269 BytecodeLivenessState** next_bytecode_in_liveness,
270 const interpreter::BytecodeArrayAccessor& accessor,
271 Handle<BytecodeArray> bytecode_array,
272 const BytecodeLivenessMap& liveness_map) {
273 UpdateOutLiveness(bytecode, liveness.out, *next_bytecode_in_liveness,
274 accessor, bytecode_array, liveness_map);
275 liveness.in->CopyFrom(*liveness.out);
276 UpdateInLiveness(bytecode, liveness.in, accessor);
277
278 *next_bytecode_in_liveness = liveness.in;
279 }
280
UpdateAssignments(Bytecode bytecode,BytecodeLoopAssignments * assignments,const interpreter::BytecodeArrayAccessor & accessor)281 void UpdateAssignments(Bytecode bytecode, BytecodeLoopAssignments* assignments,
282 const interpreter::BytecodeArrayAccessor& accessor) {
283 int num_operands = Bytecodes::NumberOfOperands(bytecode);
284 const OperandType* operand_types = Bytecodes::GetOperandTypes(bytecode);
285
286 for (int i = 0; i < num_operands; ++i) {
287 switch (operand_types[i]) {
288 case OperandType::kRegOut: {
289 assignments->Add(accessor.GetRegisterOperand(i));
290 break;
291 }
292 case OperandType::kRegOutList: {
293 interpreter::Register r = accessor.GetRegisterOperand(i++);
294 uint32_t reg_count = accessor.GetRegisterCountOperand(i);
295 assignments->AddList(r, reg_count);
296 break;
297 }
298 case OperandType::kRegOutPair: {
299 assignments->AddList(accessor.GetRegisterOperand(i), 2);
300 break;
301 }
302 case OperandType::kRegOutTriple: {
303 assignments->AddList(accessor.GetRegisterOperand(i), 3);
304 break;
305 }
306 default:
307 DCHECK(!Bytecodes::IsRegisterOutputOperandType(operand_types[i]));
308 break;
309 }
310 }
311 }
312
313 } // namespace
314
Analyze()315 void BytecodeAnalysis::Analyze() {
316 loop_stack_.push({-1, nullptr});
317
318 BytecodeLivenessState* next_bytecode_in_liveness = nullptr;
319 int generator_switch_index = -1;
320 int osr_loop_end_offset = osr_bailout_id_.ToInt();
321 DCHECK_EQ(osr_loop_end_offset < 0, osr_bailout_id_.IsNone());
322
323 interpreter::BytecodeArrayRandomIterator iterator(bytecode_array(), zone());
324 for (iterator.GoToEnd(); iterator.IsValid(); --iterator) {
325 Bytecode bytecode = iterator.current_bytecode();
326 int current_offset = iterator.current_offset();
327
328 if (bytecode == Bytecode::kSwitchOnGeneratorState) {
329 DCHECK_EQ(generator_switch_index, -1);
330 generator_switch_index = iterator.current_index();
331 } else if (bytecode == Bytecode::kJumpLoop) {
332 // Every byte up to and including the last byte within the backwards jump
333 // instruction is considered part of the loop, set loop end accordingly.
334 int loop_end = current_offset + iterator.current_bytecode_size();
335 int loop_header = iterator.GetJumpTargetOffset();
336 PushLoop(loop_header, loop_end);
337
338 if (current_offset == osr_loop_end_offset) {
339 osr_entry_point_ = loop_header;
340 } else if (current_offset < osr_loop_end_offset) {
341 // Assert that we've found the osr_entry_point if we've gone past the
342 // osr_loop_end_offset. Note, we are iterating the bytecode in reverse,
343 // so the less-than in the above condition is correct.
344 DCHECK_LE(0, osr_entry_point_);
345 }
346
347 // Save the index so that we can do another pass later.
348 if (analyze_liveness_) {
349 loop_end_index_queue_.push_back(iterator.current_index());
350 }
351 }
352
353 // We have to pop from loop_stack_ if:
354 // 1) We entered the body of the loop
355 // 2) If we have a JumpLoop that jumps to itself (i.e an empty loop)
356 bool pop_current_loop = loop_stack_.size() > 1 &&
357 (bytecode != Bytecode::kJumpLoop ||
358 iterator.GetJumpTargetOffset() == current_offset);
359
360 if (pop_current_loop) {
361 LoopStackEntry& current_loop = loop_stack_.top();
362 LoopInfo* current_loop_info = current_loop.loop_info;
363
364 // TODO(leszeks): Ideally, we'd only set values that were assigned in
365 // the loop *and* are live when the loop exits. However, this requires
366 // tracking the out-liveness of *all* loop exits, which is not
367 // information we currently have.
368 UpdateAssignments(bytecode, ¤t_loop_info->assignments(), iterator);
369
370 // Update suspend counts for this loop.
371 if (bytecode == Bytecode::kSuspendGenerator) {
372 int suspend_id = iterator.GetUnsignedImmediateOperand(3);
373 int resume_offset = current_offset + iterator.current_bytecode_size();
374 current_loop_info->AddResumeTarget(
375 ResumeJumpTarget::Leaf(suspend_id, resume_offset));
376 }
377
378 // If we've reached the header of the loop, pop it off the stack.
379 if (current_offset == current_loop.header_offset) {
380 loop_stack_.pop();
381 if (loop_stack_.size() > 1) {
382 // If there is still an outer loop, propagate inner loop assignments.
383 LoopInfo* parent_loop_info = loop_stack_.top().loop_info;
384
385 parent_loop_info->assignments().Union(
386 current_loop_info->assignments());
387
388 // Also, propagate resume targets. Instead of jumping to the target
389 // itself, the outer loop will jump to this loop header for any
390 // targets that are inside the current loop, so that this loop stays
391 // reducible. Hence, a nested loop of the form:
392 //
393 // switch (#1 -> suspend1, #2 -> suspend2)
394 // loop {
395 // suspend1: suspend #1
396 // loop {
397 // suspend2: suspend #2
398 // }
399 // }
400 //
401 // becomes:
402 //
403 // switch (#1 -> loop1, #2 -> loop1)
404 // loop1: loop {
405 // switch (#1 -> suspend1, #2 -> loop2)
406 // suspend1: suspend #1
407 // loop2: loop {
408 // switch (#2 -> suspend2)
409 // suspend2: suspend #2
410 // }
411 // }
412 for (const auto& target : current_loop_info->resume_jump_targets()) {
413 parent_loop_info->AddResumeTarget(
414 ResumeJumpTarget::AtLoopHeader(current_offset, target));
415 }
416
417 } else {
418 // Otherwise, just propagate inner loop suspends to top-level.
419 for (const auto& target : current_loop_info->resume_jump_targets()) {
420 resume_jump_targets_.push_back(
421 ResumeJumpTarget::AtLoopHeader(current_offset, target));
422 }
423 }
424 }
425 } else if (bytecode == Bytecode::kSuspendGenerator) {
426 // If we're not in a loop, we still need to look for suspends.
427 // TODO(leszeks): It would be nice to de-duplicate this with the in-loop
428 // case
429 int suspend_id = iterator.GetUnsignedImmediateOperand(3);
430 int resume_offset = current_offset + iterator.current_bytecode_size();
431 resume_jump_targets_.push_back(
432 ResumeJumpTarget::Leaf(suspend_id, resume_offset));
433 }
434
435 if (analyze_liveness_) {
436 BytecodeLiveness const& liveness = liveness_map_.InitializeLiveness(
437 current_offset, bytecode_array()->register_count(), zone());
438 UpdateLiveness(bytecode, liveness, &next_bytecode_in_liveness, iterator,
439 bytecode_array(), liveness_map_);
440 }
441 }
442
443 DCHECK_EQ(loop_stack_.size(), 1u);
444 DCHECK_EQ(loop_stack_.top().header_offset, -1);
445
446 DCHECK(ResumeJumpTargetsAreValid());
447
448 if (!analyze_liveness_) return;
449
450 // At this point, every bytecode has a valid in and out liveness, except for
451 // propagating liveness across back edges (i.e. JumpLoop). Subsequent liveness
452 // analysis iterations can only add additional liveness bits that are pulled
453 // across these back edges.
454 //
455 // Furthermore, a loop header's in-liveness can only change based on any
456 // bytecodes *after* the loop end -- it cannot change as a result of the
457 // JumpLoop liveness being updated, as the only liveness bits than can be
458 // added to the loop body are those of the loop header.
459 //
460 // So, if we know that the liveness of bytecodes after a loop header won't
461 // change (e.g. because there are no loops in them, or we have already ensured
462 // those loops are valid), we can safely update the loop end and pass over the
463 // loop body, and then never have to pass over that loop end again, because we
464 // have shown that its target, the loop header, can't change from the entries
465 // after the loop, and can't change from any loop body pass.
466 //
467 // This means that in a pass, we can iterate backwards over the bytecode
468 // array, process any loops that we encounter, and on subsequent passes we can
469 // skip processing those loops (though we still have to process inner loops).
470 //
471 // Equivalently, we can queue up loop ends from back to front, and pass over
472 // the loops in that order, as this preserves both the bottom-to-top and
473 // outer-to-inner requirements.
474
475 for (int loop_end_index : loop_end_index_queue_) {
476 iterator.GoToIndex(loop_end_index);
477
478 DCHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpLoop);
479
480 int header_offset = iterator.GetJumpTargetOffset();
481 int end_offset = iterator.current_offset();
482
483 BytecodeLiveness& header_liveness =
484 liveness_map_.GetLiveness(header_offset);
485 BytecodeLiveness& end_liveness = liveness_map_.GetLiveness(end_offset);
486
487 if (!end_liveness.out->UnionIsChanged(*header_liveness.in)) {
488 // Only update the loop body if the loop end liveness changed.
489 continue;
490 }
491 end_liveness.in->CopyFrom(*end_liveness.out);
492 next_bytecode_in_liveness = end_liveness.in;
493
494 // Advance into the loop body.
495 --iterator;
496 for (; iterator.current_offset() > header_offset; --iterator) {
497 Bytecode bytecode = iterator.current_bytecode();
498 int current_offset = iterator.current_offset();
499 BytecodeLiveness const& liveness =
500 liveness_map_.GetLiveness(current_offset);
501 UpdateLiveness(bytecode, liveness, &next_bytecode_in_liveness, iterator,
502 bytecode_array(), liveness_map_);
503 }
504 // Now we are at the loop header. Since the in-liveness of the header
505 // can't change, we need only to update the out-liveness.
506 UpdateOutLiveness(iterator.current_bytecode(), header_liveness.out,
507 next_bytecode_in_liveness, iterator, bytecode_array(),
508 liveness_map_);
509 }
510
511 // Process the generator switch statement separately, once the loops are done.
512 // This has to be a separate pass because the generator switch can jump into
513 // the middle of loops (and is the only kind of jump that can jump across a
514 // loop header).
515 if (generator_switch_index != -1) {
516 iterator.GoToIndex(generator_switch_index);
517 DCHECK_EQ(iterator.current_bytecode(), Bytecode::kSwitchOnGeneratorState);
518
519 int current_offset = iterator.current_offset();
520 BytecodeLiveness& switch_liveness =
521 liveness_map_.GetLiveness(current_offset);
522
523 bool any_changed = false;
524 for (const auto& entry : iterator.GetJumpTableTargetOffsets()) {
525 if (switch_liveness.out->UnionIsChanged(
526 *liveness_map_.GetInLiveness(entry.target_offset))) {
527 any_changed = true;
528 }
529 }
530
531 // If the switch liveness changed, we have to propagate it up the remaining
532 // bytecodes before it.
533 if (any_changed) {
534 switch_liveness.in->CopyFrom(*switch_liveness.out);
535 UpdateInLiveness(Bytecode::kSwitchOnGeneratorState, switch_liveness.in,
536 iterator);
537 next_bytecode_in_liveness = switch_liveness.in;
538 for (--iterator; iterator.IsValid(); --iterator) {
539 Bytecode bytecode = iterator.current_bytecode();
540 int current_offset = iterator.current_offset();
541 BytecodeLiveness const& liveness =
542 liveness_map_.GetLiveness(current_offset);
543
544 // There shouldn't be any more loops.
545 DCHECK_NE(bytecode, Bytecode::kJumpLoop);
546
547 UpdateLiveness(bytecode, liveness, &next_bytecode_in_liveness, iterator,
548 bytecode_array(), liveness_map_);
549 }
550 }
551 }
552
553 DCHECK(analyze_liveness_);
554 if (FLAG_trace_environment_liveness) {
555 StdoutStream of;
556 PrintLivenessTo(of);
557 }
558
559 DCHECK(LivenessIsValid());
560 }
561
PushLoop(int loop_header,int loop_end)562 void BytecodeAnalysis::PushLoop(int loop_header, int loop_end) {
563 DCHECK_LT(loop_header, loop_end);
564 DCHECK_LT(loop_stack_.top().header_offset, loop_header);
565 DCHECK_EQ(end_to_header_.find(loop_end), end_to_header_.end());
566 DCHECK_EQ(header_to_info_.find(loop_header), header_to_info_.end());
567
568 int parent_offset = loop_stack_.top().header_offset;
569
570 end_to_header_.insert({loop_end, loop_header});
571 auto it = header_to_info_.insert(
572 {loop_header, LoopInfo(parent_offset, bytecode_array_->parameter_count(),
573 bytecode_array_->register_count(), zone_)});
574 // Get the loop info pointer from the output of insert.
575 LoopInfo* loop_info = &it.first->second;
576
577 loop_stack_.push({loop_header, loop_info});
578 }
579
IsLoopHeader(int offset) const580 bool BytecodeAnalysis::IsLoopHeader(int offset) const {
581 return header_to_info_.find(offset) != header_to_info_.end();
582 }
583
GetLoopOffsetFor(int offset) const584 int BytecodeAnalysis::GetLoopOffsetFor(int offset) const {
585 auto loop_end_to_header = end_to_header_.upper_bound(offset);
586 // If there is no next end => offset is not in a loop.
587 if (loop_end_to_header == end_to_header_.end()) {
588 return -1;
589 }
590 // If the header precedes the offset, this is the loop
591 //
592 // .> header <--loop_end_to_header
593 // |
594 // | <--offset
595 // |
596 // `- end
597 if (loop_end_to_header->second <= offset) {
598 return loop_end_to_header->second;
599 }
600 // Otherwise there is a (potentially nested) loop after this offset.
601 //
602 // <--offset
603 //
604 // .> header
605 // |
606 // | .> header <--loop_end_to_header
607 // | |
608 // | `- end
609 // |
610 // `- end
611 // We just return the parent of the next loop (might be -1).
612 DCHECK(header_to_info_.upper_bound(offset) != header_to_info_.end());
613
614 return header_to_info_.upper_bound(offset)->second.parent_offset();
615 }
616
GetLoopInfoFor(int header_offset) const617 const LoopInfo& BytecodeAnalysis::GetLoopInfoFor(int header_offset) const {
618 DCHECK(IsLoopHeader(header_offset));
619
620 return header_to_info_.find(header_offset)->second;
621 }
622
GetInLivenessFor(int offset) const623 const BytecodeLivenessState* BytecodeAnalysis::GetInLivenessFor(
624 int offset) const {
625 if (!analyze_liveness_) return nullptr;
626
627 return liveness_map_.GetInLiveness(offset);
628 }
629
GetOutLivenessFor(int offset) const630 const BytecodeLivenessState* BytecodeAnalysis::GetOutLivenessFor(
631 int offset) const {
632 if (!analyze_liveness_) return nullptr;
633
634 return liveness_map_.GetOutLiveness(offset);
635 }
636
PrintLivenessTo(std::ostream & os) const637 std::ostream& BytecodeAnalysis::PrintLivenessTo(std::ostream& os) const {
638 interpreter::BytecodeArrayIterator iterator(bytecode_array());
639
640 for (; !iterator.done(); iterator.Advance()) {
641 int current_offset = iterator.current_offset();
642
643 const BitVector& in_liveness =
644 GetInLivenessFor(current_offset)->bit_vector();
645 const BitVector& out_liveness =
646 GetOutLivenessFor(current_offset)->bit_vector();
647
648 for (int i = 0; i < in_liveness.length(); ++i) {
649 os << (in_liveness.Contains(i) ? "L" : ".");
650 }
651 os << " -> ";
652
653 for (int i = 0; i < out_liveness.length(); ++i) {
654 os << (out_liveness.Contains(i) ? "L" : ".");
655 }
656
657 os << " | " << current_offset << ": ";
658 iterator.PrintTo(os) << std::endl;
659 }
660
661 return os;
662 }
663
664 #if DEBUG
ResumeJumpTargetsAreValid()665 bool BytecodeAnalysis::ResumeJumpTargetsAreValid() {
666 bool valid = true;
667
668 // Find the generator switch.
669 interpreter::BytecodeArrayRandomIterator iterator(bytecode_array(), zone());
670 for (iterator.GoToStart(); iterator.IsValid(); ++iterator) {
671 if (iterator.current_bytecode() == Bytecode::kSwitchOnGeneratorState) {
672 break;
673 }
674 }
675
676 // If the iterator is invalid, we've reached the end without finding the
677 // generator switch. So, ensure there are no jump targets and exit.
678 if (!iterator.IsValid()) {
679 // Check top-level.
680 if (!resume_jump_targets().empty()) {
681 PrintF(stderr,
682 "Found %zu top-level resume targets but no resume switch\n",
683 resume_jump_targets().size());
684 valid = false;
685 }
686 // Check loops.
687 for (const std::pair<const int, LoopInfo>& loop_info : header_to_info_) {
688 if (!loop_info.second.resume_jump_targets().empty()) {
689 PrintF(stderr,
690 "Found %zu resume targets at loop at offset %d, but no resume "
691 "switch\n",
692 loop_info.second.resume_jump_targets().size(), loop_info.first);
693 valid = false;
694 }
695 }
696
697 return valid;
698 }
699
700 // Otherwise, we've found the resume switch. Check that the top level jumps
701 // only to leaves and loop headers, then check that each loop header handles
702 // all the unresolved jumps, also jumping only to leaves and inner loop
703 // headers.
704
705 // First collect all required suspend ids.
706 std::map<int, int> unresolved_suspend_ids;
707 for (const interpreter::JumpTableTargetOffset& offset :
708 iterator.GetJumpTableTargetOffsets()) {
709 int suspend_id = offset.case_value;
710 int resume_offset = offset.target_offset;
711
712 unresolved_suspend_ids[suspend_id] = resume_offset;
713 }
714
715 // Check top-level.
716 if (!ResumeJumpTargetLeavesResolveSuspendIds(-1, resume_jump_targets(),
717 &unresolved_suspend_ids)) {
718 valid = false;
719 }
720 // Check loops.
721 for (const std::pair<const int, LoopInfo>& loop_info : header_to_info_) {
722 if (!ResumeJumpTargetLeavesResolveSuspendIds(
723 loop_info.first, loop_info.second.resume_jump_targets(),
724 &unresolved_suspend_ids)) {
725 valid = false;
726 }
727 }
728
729 // Check that everything is resolved.
730 if (!unresolved_suspend_ids.empty()) {
731 PrintF(stderr,
732 "Found suspend ids that are not resolved by a final leaf resume "
733 "jump:\n");
734
735 for (const std::pair<const int, int>& target : unresolved_suspend_ids) {
736 PrintF(stderr, " %d -> %d\n", target.first, target.second);
737 }
738 valid = false;
739 }
740
741 return valid;
742 }
743
ResumeJumpTargetLeavesResolveSuspendIds(int parent_offset,const ZoneVector<ResumeJumpTarget> & resume_jump_targets,std::map<int,int> * unresolved_suspend_ids)744 bool BytecodeAnalysis::ResumeJumpTargetLeavesResolveSuspendIds(
745 int parent_offset, const ZoneVector<ResumeJumpTarget>& resume_jump_targets,
746 std::map<int, int>* unresolved_suspend_ids) {
747 bool valid = true;
748 for (const ResumeJumpTarget& target : resume_jump_targets) {
749 std::map<int, int>::iterator it =
750 unresolved_suspend_ids->find(target.suspend_id());
751 if (it == unresolved_suspend_ids->end()) {
752 PrintF(
753 stderr,
754 "No unresolved suspend found for resume target with suspend id %d\n",
755 target.suspend_id());
756 valid = false;
757 continue;
758 }
759 int expected_target = it->second;
760
761 if (target.is_leaf()) {
762 // Leaves should have the expected target as their target.
763 if (target.target_offset() != expected_target) {
764 PrintF(
765 stderr,
766 "Expected leaf resume target for id %d to have target offset %d, "
767 "but had %d\n",
768 target.suspend_id(), expected_target, target.target_offset());
769 valid = false;
770 } else {
771 // Make sure we're resuming to a Resume bytecode
772 interpreter::BytecodeArrayAccessor accessor(bytecode_array(),
773 target.target_offset());
774 if (accessor.current_bytecode() != Bytecode::kResumeGenerator) {
775 PrintF(stderr,
776 "Expected resume target for id %d, offset %d, to be "
777 "ResumeGenerator, but found %s\n",
778 target.suspend_id(), target.target_offset(),
779 Bytecodes::ToString(accessor.current_bytecode()));
780
781 valid = false;
782 }
783 }
784 // We've resolved this suspend id, so erase it to make sure we don't
785 // resolve it twice.
786 unresolved_suspend_ids->erase(it);
787 } else {
788 // Non-leaves should have a direct inner loop header as their target.
789 if (!IsLoopHeader(target.target_offset())) {
790 PrintF(stderr,
791 "Expected non-leaf resume target for id %d to have a loop "
792 "header at target offset %d\n",
793 target.suspend_id(), target.target_offset());
794 valid = false;
795 } else {
796 LoopInfo loop_info = GetLoopInfoFor(target.target_offset());
797 if (loop_info.parent_offset() != parent_offset) {
798 PrintF(stderr,
799 "Expected non-leaf resume target for id %d to have a direct "
800 "inner loop at target offset %d\n",
801 target.suspend_id(), target.target_offset());
802 valid = false;
803 }
804 // If the target loop is a valid inner loop, we'll check its validity
805 // when we analyze its resume targets.
806 }
807 }
808 }
809 return valid;
810 }
811
LivenessIsValid()812 bool BytecodeAnalysis::LivenessIsValid() {
813 interpreter::BytecodeArrayRandomIterator iterator(bytecode_array(), zone());
814
815 BytecodeLivenessState previous_liveness(bytecode_array()->register_count(),
816 zone());
817
818 int invalid_offset = -1;
819 int which_invalid = -1;
820
821 BytecodeLivenessState* next_bytecode_in_liveness = nullptr;
822
823 // Ensure that there are no liveness changes if we iterate one more time.
824 for (iterator.GoToEnd(); iterator.IsValid(); --iterator) {
825 Bytecode bytecode = iterator.current_bytecode();
826
827 int current_offset = iterator.current_offset();
828
829 BytecodeLiveness& liveness = liveness_map_.GetLiveness(current_offset);
830
831 previous_liveness.CopyFrom(*liveness.out);
832
833 UpdateOutLiveness(bytecode, liveness.out, next_bytecode_in_liveness,
834 iterator, bytecode_array(), liveness_map_);
835 // UpdateOutLiveness skips kJumpLoop, so we update it manually.
836 if (bytecode == Bytecode::kJumpLoop) {
837 int target_offset = iterator.GetJumpTargetOffset();
838 liveness.out->Union(*liveness_map_.GetInLiveness(target_offset));
839 }
840
841 if (!liveness.out->Equals(previous_liveness)) {
842 // Reset the invalid liveness.
843 liveness.out->CopyFrom(previous_liveness);
844 invalid_offset = current_offset;
845 which_invalid = 1;
846 break;
847 }
848
849 previous_liveness.CopyFrom(*liveness.in);
850
851 liveness.in->CopyFrom(*liveness.out);
852 UpdateInLiveness(bytecode, liveness.in, iterator);
853
854 if (!liveness.in->Equals(previous_liveness)) {
855 // Reset the invalid liveness.
856 liveness.in->CopyFrom(previous_liveness);
857 invalid_offset = current_offset;
858 which_invalid = 0;
859 break;
860 }
861
862 next_bytecode_in_liveness = liveness.in;
863 }
864
865 // Ensure that the accumulator is not live when jumping out of a loop, or on
866 // the back-edge of a loop.
867 for (iterator.GoToStart(); iterator.IsValid() && invalid_offset == -1;
868 ++iterator) {
869 Bytecode bytecode = iterator.current_bytecode();
870 int current_offset = iterator.current_offset();
871 int loop_header = GetLoopOffsetFor(current_offset);
872
873 // We only care if we're inside a loop.
874 if (loop_header == -1) continue;
875
876 // We only care about jumps.
877 if (!Bytecodes::IsJump(bytecode)) continue;
878
879 int jump_target = iterator.GetJumpTargetOffset();
880
881 // If this is a forward jump to somewhere else in the same loop, ignore it.
882 if (Bytecodes::IsForwardJump(bytecode) &&
883 GetLoopOffsetFor(jump_target) == loop_header) {
884 continue;
885 }
886
887 // The accumulator must be dead at the start of the target of the jump.
888 if (liveness_map_.GetLiveness(jump_target).in->AccumulatorIsLive()) {
889 invalid_offset = jump_target;
890 which_invalid = 0;
891 break;
892 }
893 }
894
895 if (invalid_offset != -1) {
896 OFStream of(stderr);
897 of << "Invalid liveness:" << std::endl;
898
899 // Dump the bytecode, annotated with the liveness and marking loops.
900
901 int loop_indent = 0;
902
903 interpreter::BytecodeArrayIterator forward_iterator(bytecode_array());
904 for (; !forward_iterator.done(); forward_iterator.Advance()) {
905 int current_offset = forward_iterator.current_offset();
906 const BitVector& in_liveness =
907 GetInLivenessFor(current_offset)->bit_vector();
908 const BitVector& out_liveness =
909 GetOutLivenessFor(current_offset)->bit_vector();
910
911 for (int i = 0; i < in_liveness.length(); ++i) {
912 of << (in_liveness.Contains(i) ? 'L' : '.');
913 }
914
915 of << " | ";
916
917 for (int i = 0; i < out_liveness.length(); ++i) {
918 of << (out_liveness.Contains(i) ? 'L' : '.');
919 }
920
921 of << " : " << current_offset << " : ";
922
923 // Draw loop back edges by indentin everything between loop headers and
924 // jump loop instructions.
925 if (forward_iterator.current_bytecode() == Bytecode::kJumpLoop) {
926 loop_indent--;
927 }
928 for (int i = 0; i < loop_indent; ++i) {
929 of << "| ";
930 }
931 if (forward_iterator.current_bytecode() == Bytecode::kJumpLoop) {
932 of << "`-";
933 } else if (IsLoopHeader(current_offset)) {
934 of << ".>";
935 loop_indent++;
936 }
937 forward_iterator.PrintTo(of);
938 if (Bytecodes::IsJump(forward_iterator.current_bytecode())) {
939 of << " (@" << forward_iterator.GetJumpTargetOffset() << ")";
940 }
941 of << std::endl;
942
943 if (current_offset == invalid_offset) {
944 // Underline the invalid liveness.
945 if (which_invalid == 0) {
946 for (int i = 0; i < in_liveness.length(); ++i) {
947 of << '^';
948 }
949 for (int i = 0; i < out_liveness.length() + 3; ++i) {
950 of << ' ';
951 }
952 } else {
953 for (int i = 0; i < in_liveness.length() + 3; ++i) {
954 of << ' ';
955 }
956 for (int i = 0; i < out_liveness.length(); ++i) {
957 of << '^';
958 }
959 }
960
961 // Make sure to draw the loop indentation marks on this additional line.
962 of << " : " << current_offset << " : ";
963 for (int i = 0; i < loop_indent; ++i) {
964 of << "| ";
965 }
966
967 of << std::endl;
968 }
969 }
970 }
971
972 return invalid_offset == -1;
973 }
974 #endif
975
976 } // namespace compiler
977 } // namespace internal
978 } // namespace v8
979