• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/register-allocator-verifier.h"
6 
7 #include "src/bit-vector.h"
8 #include "src/compiler/instruction.h"
9 #include "src/ostreams.h"
10 
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14 
15 namespace {
16 
OperandCount(const Instruction * instr)17 size_t OperandCount(const Instruction* instr) {
18   return instr->InputCount() + instr->OutputCount() + instr->TempCount();
19 }
20 
VerifyEmptyGaps(const Instruction * instr)21 void VerifyEmptyGaps(const Instruction* instr) {
22   for (int i = Instruction::FIRST_GAP_POSITION;
23        i <= Instruction::LAST_GAP_POSITION; i++) {
24     Instruction::GapPosition inner_pos =
25         static_cast<Instruction::GapPosition>(i);
26     CHECK_NULL(instr->GetParallelMove(inner_pos));
27   }
28 }
29 
VerifyAllocatedGaps(const Instruction * instr,const char * caller_info)30 void VerifyAllocatedGaps(const Instruction* instr, const char* caller_info) {
31   for (int i = Instruction::FIRST_GAP_POSITION;
32        i <= Instruction::LAST_GAP_POSITION; i++) {
33     Instruction::GapPosition inner_pos =
34         static_cast<Instruction::GapPosition>(i);
35     const ParallelMove* moves = instr->GetParallelMove(inner_pos);
36     if (moves == nullptr) continue;
37     for (const MoveOperands* move : *moves) {
38       if (move->IsRedundant()) continue;
39       CHECK_WITH_MSG(
40           move->source().IsAllocated() || move->source().IsConstant(),
41           caller_info);
42       CHECK_WITH_MSG(move->destination().IsAllocated(), caller_info);
43     }
44   }
45 }
46 
47 }  // namespace
48 
RegisterAllocatorVerifier(Zone * zone,const RegisterConfiguration * config,const InstructionSequence * sequence)49 RegisterAllocatorVerifier::RegisterAllocatorVerifier(
50     Zone* zone, const RegisterConfiguration* config,
51     const InstructionSequence* sequence)
52     : zone_(zone),
53       config_(config),
54       sequence_(sequence),
55       constraints_(zone),
56       assessments_(zone),
57       outstanding_assessments_(zone) {
58   constraints_.reserve(sequence->instructions().size());
59   // TODO(dcarney): model unique constraints.
60   // Construct OperandConstraints for all InstructionOperands, eliminating
61   // kSameAsFirst along the way.
62   for (const Instruction* instr : sequence->instructions()) {
63     // All gaps should be totally unallocated at this point.
64     VerifyEmptyGaps(instr);
65     const size_t operand_count = OperandCount(instr);
66     OperandConstraint* op_constraints =
67         zone->NewArray<OperandConstraint>(operand_count);
68     size_t count = 0;
69     for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
70       BuildConstraint(instr->InputAt(i), &op_constraints[count]);
71       VerifyInput(op_constraints[count]);
72     }
73     for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
74       BuildConstraint(instr->TempAt(i), &op_constraints[count]);
75       VerifyTemp(op_constraints[count]);
76     }
77     for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
78       BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
79       if (op_constraints[count].type_ == kSameAsFirst) {
80         CHECK_LT(0, instr->InputCount());
81         op_constraints[count].type_ = op_constraints[0].type_;
82         op_constraints[count].value_ = op_constraints[0].value_;
83       }
84       VerifyOutput(op_constraints[count]);
85     }
86     InstructionConstraint instr_constraint = {instr, operand_count,
87                                               op_constraints};
88     constraints()->push_back(instr_constraint);
89   }
90 }
91 
VerifyInput(const OperandConstraint & constraint)92 void RegisterAllocatorVerifier::VerifyInput(
93     const OperandConstraint& constraint) {
94   CHECK_NE(kSameAsFirst, constraint.type_);
95   if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) {
96     CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
97              constraint.virtual_register_);
98   }
99 }
100 
VerifyTemp(const OperandConstraint & constraint)101 void RegisterAllocatorVerifier::VerifyTemp(
102     const OperandConstraint& constraint) {
103   CHECK_NE(kSameAsFirst, constraint.type_);
104   CHECK_NE(kImmediate, constraint.type_);
105   CHECK_NE(kExplicit, constraint.type_);
106   CHECK_NE(kConstant, constraint.type_);
107 }
108 
VerifyOutput(const OperandConstraint & constraint)109 void RegisterAllocatorVerifier::VerifyOutput(
110     const OperandConstraint& constraint) {
111   CHECK_NE(kImmediate, constraint.type_);
112   CHECK_NE(kExplicit, constraint.type_);
113   CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
114            constraint.virtual_register_);
115 }
116 
VerifyAssignment(const char * caller_info)117 void RegisterAllocatorVerifier::VerifyAssignment(const char* caller_info) {
118   caller_info_ = caller_info;
119   CHECK(sequence()->instructions().size() == constraints()->size());
120   auto instr_it = sequence()->begin();
121   for (const auto& instr_constraint : *constraints()) {
122     const Instruction* instr = instr_constraint.instruction_;
123     // All gaps should be totally allocated at this point.
124     VerifyAllocatedGaps(instr, caller_info_);
125     const size_t operand_count = instr_constraint.operand_constaints_size_;
126     const OperandConstraint* op_constraints =
127         instr_constraint.operand_constraints_;
128     CHECK_EQ(instr, *instr_it);
129     CHECK(operand_count == OperandCount(instr));
130     size_t count = 0;
131     for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
132       CheckConstraint(instr->InputAt(i), &op_constraints[count]);
133     }
134     for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
135       CheckConstraint(instr->TempAt(i), &op_constraints[count]);
136     }
137     for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
138       CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
139     }
140     ++instr_it;
141   }
142 }
143 
BuildConstraint(const InstructionOperand * op,OperandConstraint * constraint)144 void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
145                                                 OperandConstraint* constraint) {
146   constraint->value_ = kMinInt;
147   constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
148   if (op->IsConstant()) {
149     constraint->type_ = kConstant;
150     constraint->value_ = ConstantOperand::cast(op)->virtual_register();
151     constraint->virtual_register_ = constraint->value_;
152   } else if (op->IsExplicit()) {
153     constraint->type_ = kExplicit;
154   } else if (op->IsImmediate()) {
155     const ImmediateOperand* imm = ImmediateOperand::cast(op);
156     int value = imm->type() == ImmediateOperand::INLINE ? imm->inline_value()
157                                                         : imm->indexed_value();
158     constraint->type_ = kImmediate;
159     constraint->value_ = value;
160   } else {
161     CHECK(op->IsUnallocated());
162     const UnallocatedOperand* unallocated = UnallocatedOperand::cast(op);
163     int vreg = unallocated->virtual_register();
164     constraint->virtual_register_ = vreg;
165     if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
166       constraint->type_ = kFixedSlot;
167       constraint->value_ = unallocated->fixed_slot_index();
168     } else {
169       switch (unallocated->extended_policy()) {
170         case UnallocatedOperand::REGISTER_OR_SLOT:
171         case UnallocatedOperand::NONE:
172           if (sequence()->IsFP(vreg)) {
173             constraint->type_ = kRegisterOrSlotFP;
174           } else {
175             constraint->type_ = kRegisterOrSlot;
176           }
177           break;
178         case UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT:
179           DCHECK(!sequence()->IsFP(vreg));
180           constraint->type_ = kRegisterOrSlotOrConstant;
181           break;
182         case UnallocatedOperand::FIXED_REGISTER:
183           if (unallocated->HasSecondaryStorage()) {
184             constraint->type_ = kRegisterAndSlot;
185             constraint->spilled_slot_ = unallocated->GetSecondaryStorage();
186           } else {
187             constraint->type_ = kFixedRegister;
188           }
189           constraint->value_ = unallocated->fixed_register_index();
190           break;
191         case UnallocatedOperand::FIXED_FP_REGISTER:
192           constraint->type_ = kFixedFPRegister;
193           constraint->value_ = unallocated->fixed_register_index();
194           break;
195         case UnallocatedOperand::MUST_HAVE_REGISTER:
196           if (sequence()->IsFP(vreg)) {
197             constraint->type_ = kFPRegister;
198           } else {
199             constraint->type_ = kRegister;
200           }
201           break;
202         case UnallocatedOperand::MUST_HAVE_SLOT:
203           constraint->type_ = kSlot;
204           constraint->value_ =
205               ElementSizeLog2Of(sequence()->GetRepresentation(vreg));
206           break;
207         case UnallocatedOperand::SAME_AS_FIRST_INPUT:
208           constraint->type_ = kSameAsFirst;
209           break;
210       }
211     }
212   }
213 }
214 
CheckConstraint(const InstructionOperand * op,const OperandConstraint * constraint)215 void RegisterAllocatorVerifier::CheckConstraint(
216     const InstructionOperand* op, const OperandConstraint* constraint) {
217   switch (constraint->type_) {
218     case kConstant:
219       CHECK_WITH_MSG(op->IsConstant(), caller_info_);
220       CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
221                constraint->value_);
222       return;
223     case kImmediate: {
224       CHECK_WITH_MSG(op->IsImmediate(), caller_info_);
225       const ImmediateOperand* imm = ImmediateOperand::cast(op);
226       int value = imm->type() == ImmediateOperand::INLINE
227                       ? imm->inline_value()
228                       : imm->indexed_value();
229       CHECK_EQ(value, constraint->value_);
230       return;
231     }
232     case kRegister:
233       CHECK_WITH_MSG(op->IsRegister(), caller_info_);
234       return;
235     case kFPRegister:
236       CHECK_WITH_MSG(op->IsFPRegister(), caller_info_);
237       return;
238     case kExplicit:
239       CHECK_WITH_MSG(op->IsExplicit(), caller_info_);
240       return;
241     case kFixedRegister:
242     case kRegisterAndSlot:
243       CHECK_WITH_MSG(op->IsRegister(), caller_info_);
244       CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
245       return;
246     case kFixedFPRegister:
247       CHECK_WITH_MSG(op->IsFPRegister(), caller_info_);
248       CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
249       return;
250     case kFixedSlot:
251       CHECK_WITH_MSG(op->IsStackSlot() || op->IsFPStackSlot(), caller_info_);
252       CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
253       return;
254     case kSlot:
255       CHECK_WITH_MSG(op->IsStackSlot() || op->IsFPStackSlot(), caller_info_);
256       CHECK_EQ(ElementSizeLog2Of(LocationOperand::cast(op)->representation()),
257                constraint->value_);
258       return;
259     case kRegisterOrSlot:
260       CHECK_WITH_MSG(op->IsRegister() || op->IsStackSlot(), caller_info_);
261       return;
262     case kRegisterOrSlotFP:
263       CHECK_WITH_MSG(op->IsFPRegister() || op->IsFPStackSlot(), caller_info_);
264       return;
265     case kRegisterOrSlotOrConstant:
266       CHECK_WITH_MSG(op->IsRegister() || op->IsStackSlot() || op->IsConstant(),
267                      caller_info_);
268       return;
269     case kSameAsFirst:
270       CHECK_WITH_MSG(false, caller_info_);
271       return;
272   }
273 }
274 
PerformMoves(const Instruction * instruction)275 void BlockAssessments::PerformMoves(const Instruction* instruction) {
276   const ParallelMove* first =
277       instruction->GetParallelMove(Instruction::GapPosition::START);
278   PerformParallelMoves(first);
279   const ParallelMove* last =
280       instruction->GetParallelMove(Instruction::GapPosition::END);
281   PerformParallelMoves(last);
282 }
283 
PerformParallelMoves(const ParallelMove * moves)284 void BlockAssessments::PerformParallelMoves(const ParallelMove* moves) {
285   if (moves == nullptr) return;
286 
287   CHECK(map_for_moves_.empty());
288   for (MoveOperands* move : *moves) {
289     if (move->IsEliminated() || move->IsRedundant()) continue;
290     auto it = map_.find(move->source());
291     // The RHS of a parallel move should have been already assessed.
292     CHECK(it != map_.end());
293     // The LHS of a parallel move should not have been assigned in this
294     // parallel move.
295     CHECK(map_for_moves_.find(move->destination()) == map_for_moves_.end());
296     // Copy the assessment to the destination.
297     map_for_moves_[move->destination()] = it->second;
298   }
299   for (auto pair : map_for_moves_) {
300     map_[pair.first] = pair.second;
301   }
302   map_for_moves_.clear();
303 }
304 
DropRegisters()305 void BlockAssessments::DropRegisters() {
306   for (auto iterator = map().begin(), end = map().end(); iterator != end;) {
307     auto current = iterator;
308     ++iterator;
309     InstructionOperand op = current->first;
310     if (op.IsAnyRegister()) map().erase(current);
311   }
312 }
313 
Print() const314 void BlockAssessments::Print() const {
315   StdoutStream os;
316   for (const auto pair : map()) {
317     const InstructionOperand op = pair.first;
318     const Assessment* assessment = pair.second;
319     // Use operator<< so we can write the assessment on the same
320     // line. Since we need a register configuration, just pick
321     // Turbofan for now.
322     PrintableInstructionOperand wrapper = {RegisterConfiguration::Default(),
323                                            op};
324     os << wrapper << " : ";
325     if (assessment->kind() == AssessmentKind::Final) {
326       os << "v" << FinalAssessment::cast(assessment)->virtual_register();
327     } else {
328       os << "P";
329     }
330     os << std::endl;
331   }
332   os << std::endl;
333 }
334 
CreateForBlock(const InstructionBlock * block)335 BlockAssessments* RegisterAllocatorVerifier::CreateForBlock(
336     const InstructionBlock* block) {
337   RpoNumber current_block_id = block->rpo_number();
338 
339   BlockAssessments* ret = new (zone()) BlockAssessments(zone());
340   if (block->PredecessorCount() == 0) {
341     // TODO(mtrofin): the following check should hold, however, in certain
342     // unit tests it is invalidated by the last block. Investigate and
343     // normalize the CFG.
344     // CHECK_EQ(0, current_block_id.ToInt());
345     // The phi size test below is because we can, technically, have phi
346     // instructions with one argument. Some tests expose that, too.
347   } else if (block->PredecessorCount() == 1 && block->phis().size() == 0) {
348     const BlockAssessments* prev_block = assessments_[block->predecessors()[0]];
349     ret->CopyFrom(prev_block);
350   } else {
351     for (RpoNumber pred_id : block->predecessors()) {
352       // For every operand coming from any of the predecessors, create an
353       // Unfinalized assessment.
354       auto iterator = assessments_.find(pred_id);
355       if (iterator == assessments_.end()) {
356         // This block is the head of a loop, and this predecessor is the
357         // loopback
358         // arc.
359         // Validate this is a loop case, otherwise the CFG is malformed.
360         CHECK(pred_id >= current_block_id);
361         CHECK(block->IsLoopHeader());
362         continue;
363       }
364       const BlockAssessments* pred_assessments = iterator->second;
365       CHECK_NOT_NULL(pred_assessments);
366       for (auto pair : pred_assessments->map()) {
367         InstructionOperand operand = pair.first;
368         if (ret->map().find(operand) == ret->map().end()) {
369           ret->map().insert(std::make_pair(
370               operand, new (zone()) PendingAssessment(zone(), block, operand)));
371         }
372       }
373     }
374   }
375   return ret;
376 }
377 
ValidatePendingAssessment(RpoNumber block_id,InstructionOperand op,const BlockAssessments * current_assessments,PendingAssessment * const assessment,int virtual_register)378 void RegisterAllocatorVerifier::ValidatePendingAssessment(
379     RpoNumber block_id, InstructionOperand op,
380     const BlockAssessments* current_assessments,
381     PendingAssessment* const assessment, int virtual_register) {
382   if (assessment->IsAliasOf(virtual_register)) return;
383 
384   // When validating a pending assessment, it is possible some of the
385   // assessments for the original operand (the one where the assessment was
386   // created for first) are also pending. To avoid recursion, we use a work
387   // list. To deal with cycles, we keep a set of seen nodes.
388   Zone local_zone(zone()->allocator(), ZONE_NAME);
389   ZoneQueue<std::pair<const PendingAssessment*, int>> worklist(&local_zone);
390   ZoneSet<RpoNumber> seen(&local_zone);
391   worklist.push(std::make_pair(assessment, virtual_register));
392   seen.insert(block_id);
393 
394   while (!worklist.empty()) {
395     auto work = worklist.front();
396     const PendingAssessment* current_assessment = work.first;
397     int current_virtual_register = work.second;
398     InstructionOperand current_operand = current_assessment->operand();
399     worklist.pop();
400 
401     const InstructionBlock* origin = current_assessment->origin();
402     CHECK(origin->PredecessorCount() > 1 || origin->phis().size() > 0);
403 
404     // Check if the virtual register is a phi first, instead of relying on
405     // the incoming assessments. In particular, this handles the case
406     // v1 = phi v0 v0, which structurally is identical to v0 having been
407     // defined at the top of a diamond, and arriving at the node joining the
408     // diamond's branches.
409     const PhiInstruction* phi = nullptr;
410     for (const PhiInstruction* candidate : origin->phis()) {
411       if (candidate->virtual_register() == current_virtual_register) {
412         phi = candidate;
413         break;
414       }
415     }
416 
417     int op_index = 0;
418     for (RpoNumber pred : origin->predecessors()) {
419       int expected =
420           phi != nullptr ? phi->operands()[op_index] : current_virtual_register;
421 
422       ++op_index;
423       auto pred_assignment = assessments_.find(pred);
424       if (pred_assignment == assessments_.end()) {
425         CHECK(origin->IsLoopHeader());
426         auto todo_iter = outstanding_assessments_.find(pred);
427         DelayedAssessments* set = nullptr;
428         if (todo_iter == outstanding_assessments_.end()) {
429           set = new (zone()) DelayedAssessments(zone());
430           outstanding_assessments_.insert(std::make_pair(pred, set));
431         } else {
432           set = todo_iter->second;
433         }
434         set->AddDelayedAssessment(current_operand, expected);
435         continue;
436       }
437 
438       const BlockAssessments* pred_assessments = pred_assignment->second;
439       auto found_contribution = pred_assessments->map().find(current_operand);
440       CHECK(found_contribution != pred_assessments->map().end());
441       Assessment* contribution = found_contribution->second;
442 
443       switch (contribution->kind()) {
444         case Final:
445           CHECK_EQ(FinalAssessment::cast(contribution)->virtual_register(),
446                    expected);
447           break;
448         case Pending: {
449           // This happens if we have a diamond feeding into another one, and
450           // the inner one never being used - other than for carrying the value.
451           const PendingAssessment* next = PendingAssessment::cast(contribution);
452           if (seen.find(pred) == seen.end()) {
453             worklist.push({next, expected});
454             seen.insert(pred);
455           }
456           // Note that we do not want to finalize pending assessments at the
457           // beginning of a block - which is the information we'd have
458           // available here. This is because this operand may be reused to
459           // define duplicate phis.
460           break;
461         }
462       }
463     }
464   }
465   assessment->AddAlias(virtual_register);
466 }
467 
ValidateUse(RpoNumber block_id,BlockAssessments * current_assessments,InstructionOperand op,int virtual_register)468 void RegisterAllocatorVerifier::ValidateUse(
469     RpoNumber block_id, BlockAssessments* current_assessments,
470     InstructionOperand op, int virtual_register) {
471   auto iterator = current_assessments->map().find(op);
472   // We should have seen this operand before.
473   CHECK(iterator != current_assessments->map().end());
474   Assessment* assessment = iterator->second;
475 
476   switch (assessment->kind()) {
477     case Final:
478       CHECK_EQ(FinalAssessment::cast(assessment)->virtual_register(),
479                virtual_register);
480       break;
481     case Pending: {
482       PendingAssessment* pending = PendingAssessment::cast(assessment);
483       ValidatePendingAssessment(block_id, op, current_assessments, pending,
484                                 virtual_register);
485       break;
486     }
487   }
488 }
489 
VerifyGapMoves()490 void RegisterAllocatorVerifier::VerifyGapMoves() {
491   CHECK(assessments_.empty());
492   CHECK(outstanding_assessments_.empty());
493   const size_t block_count = sequence()->instruction_blocks().size();
494   for (size_t block_index = 0; block_index < block_count; ++block_index) {
495     const InstructionBlock* block =
496         sequence()->instruction_blocks()[block_index];
497     BlockAssessments* block_assessments = CreateForBlock(block);
498 
499     for (int instr_index = block->code_start(); instr_index < block->code_end();
500          ++instr_index) {
501       const InstructionConstraint& instr_constraint = constraints_[instr_index];
502       const Instruction* instr = instr_constraint.instruction_;
503       block_assessments->PerformMoves(instr);
504 
505       const OperandConstraint* op_constraints =
506           instr_constraint.operand_constraints_;
507       size_t count = 0;
508       for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
509         if (op_constraints[count].type_ == kImmediate ||
510             op_constraints[count].type_ == kExplicit) {
511           continue;
512         }
513         int virtual_register = op_constraints[count].virtual_register_;
514         InstructionOperand op = *instr->InputAt(i);
515         ValidateUse(block->rpo_number(), block_assessments, op,
516                     virtual_register);
517       }
518       for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
519         block_assessments->Drop(*instr->TempAt(i));
520       }
521       if (instr->IsCall()) {
522         block_assessments->DropRegisters();
523       }
524       for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
525         int virtual_register = op_constraints[count].virtual_register_;
526         block_assessments->AddDefinition(*instr->OutputAt(i), virtual_register);
527         if (op_constraints[count].type_ == kRegisterAndSlot) {
528           const AllocatedOperand* reg_op =
529               AllocatedOperand::cast(instr->OutputAt(i));
530           MachineRepresentation rep = reg_op->representation();
531           const AllocatedOperand* stack_op = AllocatedOperand::New(
532               zone(), LocationOperand::LocationKind::STACK_SLOT, rep,
533               op_constraints[i].spilled_slot_);
534           block_assessments->AddDefinition(*stack_op, virtual_register);
535         }
536       }
537     }
538     // Now commit the assessments for this block. If there are any delayed
539     // assessments, ValidatePendingAssessment should see this block, too.
540     assessments_[block->rpo_number()] = block_assessments;
541 
542     auto todo_iter = outstanding_assessments_.find(block->rpo_number());
543     if (todo_iter == outstanding_assessments_.end()) continue;
544     DelayedAssessments* todo = todo_iter->second;
545     for (auto pair : todo->map()) {
546       InstructionOperand op = pair.first;
547       int vreg = pair.second;
548       auto found_op = block_assessments->map().find(op);
549       CHECK(found_op != block_assessments->map().end());
550       switch (found_op->second->kind()) {
551         case Final:
552           CHECK_EQ(FinalAssessment::cast(found_op->second)->virtual_register(),
553                    vreg);
554           break;
555         case Pending:
556           ValidatePendingAssessment(block->rpo_number(), op, block_assessments,
557                                     PendingAssessment::cast(found_op->second),
558                                     vreg);
559           break;
560       }
561     }
562   }
563 }
564 
565 }  // namespace compiler
566 }  // namespace internal
567 }  // namespace v8
568