• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/backend/register-allocator-verifier.h"
6 
7 #include "src/compiler/backend/instruction.h"
8 #include "src/utils/bit-vector.h"
9 #include "src/utils/ostreams.h"
10 
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14 
15 namespace {
16 
OperandCount(const Instruction * instr)17 size_t OperandCount(const Instruction* instr) {
18   return instr->InputCount() + instr->OutputCount() + instr->TempCount();
19 }
20 
VerifyEmptyGaps(const Instruction * instr)21 void VerifyEmptyGaps(const Instruction* instr) {
22   for (int i = Instruction::FIRST_GAP_POSITION;
23        i <= Instruction::LAST_GAP_POSITION; i++) {
24     Instruction::GapPosition inner_pos =
25         static_cast<Instruction::GapPosition>(i);
26     CHECK_NULL(instr->GetParallelMove(inner_pos));
27   }
28 }
29 
VerifyAllocatedGaps(const Instruction * instr,const char * caller_info)30 void VerifyAllocatedGaps(const Instruction* instr, const char* caller_info) {
31   for (int i = Instruction::FIRST_GAP_POSITION;
32        i <= Instruction::LAST_GAP_POSITION; i++) {
33     Instruction::GapPosition inner_pos =
34         static_cast<Instruction::GapPosition>(i);
35     const ParallelMove* moves = instr->GetParallelMove(inner_pos);
36     if (moves == nullptr) continue;
37     for (const MoveOperands* move : *moves) {
38       if (move->IsRedundant()) continue;
39       CHECK_WITH_MSG(
40           move->source().IsAllocated() || move->source().IsConstant(),
41           caller_info);
42       CHECK_WITH_MSG(move->destination().IsAllocated(), caller_info);
43     }
44   }
45 }
46 
47 }  // namespace
48 
RegisterAllocatorVerifier(Zone * zone,const RegisterConfiguration * config,const InstructionSequence * sequence,const Frame * frame)49 RegisterAllocatorVerifier::RegisterAllocatorVerifier(
50     Zone* zone, const RegisterConfiguration* config,
51     const InstructionSequence* sequence, const Frame* frame)
52     : zone_(zone),
53       config_(config),
54       sequence_(sequence),
55       constraints_(zone),
56       assessments_(zone),
57       outstanding_assessments_(zone),
58       spill_slot_delta_(frame->GetTotalFrameSlotCount() -
59                         frame->GetSpillSlotCount()) {
60   constraints_.reserve(sequence->instructions().size());
61   // TODO(dcarney): model unique constraints.
62   // Construct OperandConstraints for all InstructionOperands, eliminating
63   // kSameAsFirst along the way.
64   for (const Instruction* instr : sequence->instructions()) {
65     // All gaps should be totally unallocated at this point.
66     VerifyEmptyGaps(instr);
67     const size_t operand_count = OperandCount(instr);
68     OperandConstraint* op_constraints =
69         zone->NewArray<OperandConstraint>(operand_count);
70     size_t count = 0;
71     for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
72       BuildConstraint(instr->InputAt(i), &op_constraints[count]);
73       VerifyInput(op_constraints[count]);
74     }
75     for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
76       BuildConstraint(instr->TempAt(i), &op_constraints[count]);
77       VerifyTemp(op_constraints[count]);
78     }
79     for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
80       BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
81       if (op_constraints[count].type_ == kSameAsFirst) {
82         CHECK_LT(0, instr->InputCount());
83         op_constraints[count].type_ = op_constraints[0].type_;
84         op_constraints[count].value_ = op_constraints[0].value_;
85       }
86       VerifyOutput(op_constraints[count]);
87     }
88     InstructionConstraint instr_constraint = {instr, operand_count,
89                                               op_constraints};
90     constraints()->push_back(instr_constraint);
91   }
92 }
93 
VerifyInput(const OperandConstraint & constraint)94 void RegisterAllocatorVerifier::VerifyInput(
95     const OperandConstraint& constraint) {
96   CHECK_NE(kSameAsFirst, constraint.type_);
97   if (constraint.type_ != kImmediate) {
98     CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
99              constraint.virtual_register_);
100   }
101 }
102 
VerifyTemp(const OperandConstraint & constraint)103 void RegisterAllocatorVerifier::VerifyTemp(
104     const OperandConstraint& constraint) {
105   CHECK_NE(kSameAsFirst, constraint.type_);
106   CHECK_NE(kImmediate, constraint.type_);
107   CHECK_NE(kConstant, constraint.type_);
108 }
109 
VerifyOutput(const OperandConstraint & constraint)110 void RegisterAllocatorVerifier::VerifyOutput(
111     const OperandConstraint& constraint) {
112   CHECK_NE(kImmediate, constraint.type_);
113   CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
114            constraint.virtual_register_);
115 }
116 
VerifyAssignment(const char * caller_info)117 void RegisterAllocatorVerifier::VerifyAssignment(const char* caller_info) {
118   caller_info_ = caller_info;
119   CHECK(sequence()->instructions().size() == constraints()->size());
120   auto instr_it = sequence()->begin();
121   for (const auto& instr_constraint : *constraints()) {
122     const Instruction* instr = instr_constraint.instruction_;
123     // All gaps should be totally allocated at this point.
124     VerifyAllocatedGaps(instr, caller_info_);
125     const size_t operand_count = instr_constraint.operand_constaints_size_;
126     const OperandConstraint* op_constraints =
127         instr_constraint.operand_constraints_;
128     CHECK_EQ(instr, *instr_it);
129     CHECK(operand_count == OperandCount(instr));
130     size_t count = 0;
131     for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
132       CheckConstraint(instr->InputAt(i), &op_constraints[count]);
133     }
134     for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
135       CheckConstraint(instr->TempAt(i), &op_constraints[count]);
136     }
137     for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
138       CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
139     }
140     ++instr_it;
141   }
142 }
143 
BuildConstraint(const InstructionOperand * op,OperandConstraint * constraint)144 void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
145                                                 OperandConstraint* constraint) {
146   constraint->value_ = kMinInt;
147   constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
148   if (op->IsConstant()) {
149     constraint->type_ = kConstant;
150     constraint->value_ = ConstantOperand::cast(op)->virtual_register();
151     constraint->virtual_register_ = constraint->value_;
152   } else if (op->IsImmediate()) {
153     const ImmediateOperand* imm = ImmediateOperand::cast(op);
154     int value = imm->type() == ImmediateOperand::INLINE ? imm->inline_value()
155                                                         : imm->indexed_value();
156     constraint->type_ = kImmediate;
157     constraint->value_ = value;
158   } else {
159     CHECK(op->IsUnallocated());
160     const UnallocatedOperand* unallocated = UnallocatedOperand::cast(op);
161     int vreg = unallocated->virtual_register();
162     constraint->virtual_register_ = vreg;
163     if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
164       constraint->type_ = kFixedSlot;
165       constraint->value_ = unallocated->fixed_slot_index();
166     } else {
167       switch (unallocated->extended_policy()) {
168         case UnallocatedOperand::REGISTER_OR_SLOT:
169         case UnallocatedOperand::NONE:
170           if (sequence()->IsFP(vreg)) {
171             constraint->type_ = kRegisterOrSlotFP;
172           } else {
173             constraint->type_ = kRegisterOrSlot;
174           }
175           break;
176         case UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT:
177           DCHECK(!sequence()->IsFP(vreg));
178           constraint->type_ = kRegisterOrSlotOrConstant;
179           break;
180         case UnallocatedOperand::FIXED_REGISTER:
181           if (unallocated->HasSecondaryStorage()) {
182             constraint->type_ = kRegisterAndSlot;
183             constraint->spilled_slot_ = unallocated->GetSecondaryStorage();
184           } else {
185             constraint->type_ = kFixedRegister;
186           }
187           constraint->value_ = unallocated->fixed_register_index();
188           break;
189         case UnallocatedOperand::FIXED_FP_REGISTER:
190           constraint->type_ = kFixedFPRegister;
191           constraint->value_ = unallocated->fixed_register_index();
192           break;
193         case UnallocatedOperand::MUST_HAVE_REGISTER:
194           if (sequence()->IsFP(vreg)) {
195             constraint->type_ = kFPRegister;
196           } else {
197             constraint->type_ = kRegister;
198           }
199           break;
200         case UnallocatedOperand::MUST_HAVE_SLOT:
201           constraint->type_ = kSlot;
202           constraint->value_ =
203               ElementSizeLog2Of(sequence()->GetRepresentation(vreg));
204           break;
205         case UnallocatedOperand::SAME_AS_FIRST_INPUT:
206           constraint->type_ = kSameAsFirst;
207           break;
208       }
209     }
210   }
211 }
212 
CheckConstraint(const InstructionOperand * op,const OperandConstraint * constraint)213 void RegisterAllocatorVerifier::CheckConstraint(
214     const InstructionOperand* op, const OperandConstraint* constraint) {
215   switch (constraint->type_) {
216     case kConstant:
217       CHECK_WITH_MSG(op->IsConstant(), caller_info_);
218       CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
219                constraint->value_);
220       return;
221     case kImmediate: {
222       CHECK_WITH_MSG(op->IsImmediate(), caller_info_);
223       const ImmediateOperand* imm = ImmediateOperand::cast(op);
224       int value = imm->type() == ImmediateOperand::INLINE
225                       ? imm->inline_value()
226                       : imm->indexed_value();
227       CHECK_EQ(value, constraint->value_);
228       return;
229     }
230     case kRegister:
231       CHECK_WITH_MSG(op->IsRegister(), caller_info_);
232       return;
233     case kFPRegister:
234       CHECK_WITH_MSG(op->IsFPRegister(), caller_info_);
235       return;
236     case kFixedRegister:
237     case kRegisterAndSlot:
238       CHECK_WITH_MSG(op->IsRegister(), caller_info_);
239       CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
240       return;
241     case kFixedFPRegister:
242       CHECK_WITH_MSG(op->IsFPRegister(), caller_info_);
243       CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
244       return;
245     case kFixedSlot:
246       CHECK_WITH_MSG(op->IsStackSlot() || op->IsFPStackSlot(), caller_info_);
247       CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
248       return;
249     case kSlot:
250       CHECK_WITH_MSG(op->IsStackSlot() || op->IsFPStackSlot(), caller_info_);
251       CHECK_EQ(ElementSizeLog2Of(LocationOperand::cast(op)->representation()),
252                constraint->value_);
253       return;
254     case kRegisterOrSlot:
255       CHECK_WITH_MSG(op->IsRegister() || op->IsStackSlot(), caller_info_);
256       return;
257     case kRegisterOrSlotFP:
258       CHECK_WITH_MSG(op->IsFPRegister() || op->IsFPStackSlot(), caller_info_);
259       return;
260     case kRegisterOrSlotOrConstant:
261       CHECK_WITH_MSG(op->IsRegister() || op->IsStackSlot() || op->IsConstant(),
262                      caller_info_);
263       return;
264     case kSameAsFirst:
265       CHECK_WITH_MSG(false, caller_info_);
266       return;
267   }
268 }
269 
PerformMoves(const Instruction * instruction)270 void BlockAssessments::PerformMoves(const Instruction* instruction) {
271   const ParallelMove* first =
272       instruction->GetParallelMove(Instruction::GapPosition::START);
273   PerformParallelMoves(first);
274   const ParallelMove* last =
275       instruction->GetParallelMove(Instruction::GapPosition::END);
276   PerformParallelMoves(last);
277 }
278 
PerformParallelMoves(const ParallelMove * moves)279 void BlockAssessments::PerformParallelMoves(const ParallelMove* moves) {
280   if (moves == nullptr) return;
281 
282   CHECK(map_for_moves_.empty());
283   for (MoveOperands* move : *moves) {
284     if (move->IsEliminated() || move->IsRedundant()) continue;
285     auto it = map_.find(move->source());
286     // The RHS of a parallel move should have been already assessed.
287     CHECK(it != map_.end());
288     // The LHS of a parallel move should not have been assigned in this
289     // parallel move.
290     CHECK(map_for_moves_.find(move->destination()) == map_for_moves_.end());
291     // The RHS of a parallel move should not be a stale reference.
292     CHECK(!IsStaleReferenceStackSlot(move->source()));
293     // Copy the assessment to the destination.
294     map_for_moves_[move->destination()] = it->second;
295   }
296   for (auto pair : map_for_moves_) {
297     // Re-insert the existing key for the new assignment so that it has the
298     // correct representation (which is ignored by the canonicalizing map
299     // comparator).
300     InstructionOperand op = pair.first;
301     map_.erase(op);
302     map_.insert(pair);
303     // Destination is no longer a stale reference.
304     stale_ref_stack_slots().erase(op);
305   }
306   map_for_moves_.clear();
307 }
308 
DropRegisters()309 void BlockAssessments::DropRegisters() {
310   for (auto iterator = map().begin(), end = map().end(); iterator != end;) {
311     auto current = iterator;
312     ++iterator;
313     InstructionOperand op = current->first;
314     if (op.IsAnyRegister()) map().erase(current);
315   }
316 }
317 
CheckReferenceMap(const ReferenceMap * reference_map)318 void BlockAssessments::CheckReferenceMap(const ReferenceMap* reference_map) {
319   // First mark all existing reference stack spill slots as stale.
320   for (auto pair : map()) {
321     InstructionOperand op = pair.first;
322     if (op.IsStackSlot()) {
323       const LocationOperand* loc_op = LocationOperand::cast(&op);
324       // Only mark arguments that are spill slots as stale, the reference map
325       // doesn't track arguments or fixed stack slots, which are implicitly
326       // tracked by the GC.
327       if (CanBeTaggedOrCompressedPointer(loc_op->representation()) &&
328           loc_op->index() >= spill_slot_delta()) {
329         stale_ref_stack_slots().insert(op);
330       }
331     }
332   }
333 
334   // Now remove any stack spill slots in the reference map from the list of
335   // stale slots.
336   for (auto ref_map_operand : reference_map->reference_operands()) {
337     if (ref_map_operand.IsStackSlot()) {
338       auto pair = map().find(ref_map_operand);
339       CHECK(pair != map().end());
340       stale_ref_stack_slots().erase(pair->first);
341     }
342   }
343 }
344 
IsStaleReferenceStackSlot(InstructionOperand op)345 bool BlockAssessments::IsStaleReferenceStackSlot(InstructionOperand op) {
346   if (!op.IsStackSlot()) return false;
347 
348   const LocationOperand* loc_op = LocationOperand::cast(&op);
349   return CanBeTaggedOrCompressedPointer(loc_op->representation()) &&
350          stale_ref_stack_slots().find(op) != stale_ref_stack_slots().end();
351 }
352 
Print() const353 void BlockAssessments::Print() const {
354   StdoutStream os;
355   for (const auto pair : map()) {
356     const InstructionOperand op = pair.first;
357     const Assessment* assessment = pair.second;
358     // Use operator<< so we can write the assessment on the same
359     // line.
360     os << op << " : ";
361     if (assessment->kind() == AssessmentKind::Final) {
362       os << "v" << FinalAssessment::cast(assessment)->virtual_register();
363     } else {
364       os << "P";
365     }
366     if (stale_ref_stack_slots().find(op) != stale_ref_stack_slots().end()) {
367       os << " (stale reference)";
368     }
369     os << std::endl;
370   }
371   os << std::endl;
372 }
373 
CreateForBlock(const InstructionBlock * block)374 BlockAssessments* RegisterAllocatorVerifier::CreateForBlock(
375     const InstructionBlock* block) {
376   RpoNumber current_block_id = block->rpo_number();
377 
378   BlockAssessments* ret =
379       zone()->New<BlockAssessments>(zone(), spill_slot_delta());
380   if (block->PredecessorCount() == 0) {
381     // TODO(mtrofin): the following check should hold, however, in certain
382     // unit tests it is invalidated by the last block. Investigate and
383     // normalize the CFG.
384     // CHECK_EQ(0, current_block_id.ToInt());
385     // The phi size test below is because we can, technically, have phi
386     // instructions with one argument. Some tests expose that, too.
387   } else if (block->PredecessorCount() == 1 && block->phis().size() == 0) {
388     const BlockAssessments* prev_block = assessments_[block->predecessors()[0]];
389     ret->CopyFrom(prev_block);
390   } else {
391     for (RpoNumber pred_id : block->predecessors()) {
392       // For every operand coming from any of the predecessors, create an
393       // Unfinalized assessment.
394       auto iterator = assessments_.find(pred_id);
395       if (iterator == assessments_.end()) {
396         // This block is the head of a loop, and this predecessor is the
397         // loopback
398         // arc.
399         // Validate this is a loop case, otherwise the CFG is malformed.
400         CHECK(pred_id >= current_block_id);
401         CHECK(block->IsLoopHeader());
402         continue;
403       }
404       const BlockAssessments* pred_assessments = iterator->second;
405       CHECK_NOT_NULL(pred_assessments);
406       for (auto pair : pred_assessments->map()) {
407         InstructionOperand operand = pair.first;
408         if (ret->map().find(operand) == ret->map().end()) {
409           ret->map().insert(std::make_pair(
410               operand, zone()->New<PendingAssessment>(zone(), block, operand)));
411         }
412       }
413 
414       // Any references stack slots that became stale in predecessors will be
415       // stale here.
416       ret->stale_ref_stack_slots().insert(
417           pred_assessments->stale_ref_stack_slots().begin(),
418           pred_assessments->stale_ref_stack_slots().end());
419     }
420   }
421   return ret;
422 }
423 
ValidatePendingAssessment(RpoNumber block_id,InstructionOperand op,const BlockAssessments * current_assessments,PendingAssessment * const assessment,int virtual_register)424 void RegisterAllocatorVerifier::ValidatePendingAssessment(
425     RpoNumber block_id, InstructionOperand op,
426     const BlockAssessments* current_assessments,
427     PendingAssessment* const assessment, int virtual_register) {
428   if (assessment->IsAliasOf(virtual_register)) return;
429 
430   // When validating a pending assessment, it is possible some of the
431   // assessments for the original operand (the one where the assessment was
432   // created for first) are also pending. To avoid recursion, we use a work
433   // list. To deal with cycles, we keep a set of seen nodes.
434   Zone local_zone(zone()->allocator(), ZONE_NAME);
435   ZoneQueue<std::pair<const PendingAssessment*, int>> worklist(&local_zone);
436   ZoneSet<RpoNumber> seen(&local_zone);
437   worklist.push(std::make_pair(assessment, virtual_register));
438   seen.insert(block_id);
439 
440   while (!worklist.empty()) {
441     auto work = worklist.front();
442     const PendingAssessment* current_assessment = work.first;
443     int current_virtual_register = work.second;
444     InstructionOperand current_operand = current_assessment->operand();
445     worklist.pop();
446 
447     const InstructionBlock* origin = current_assessment->origin();
448     CHECK(origin->PredecessorCount() > 1 || origin->phis().size() > 0);
449 
450     // Check if the virtual register is a phi first, instead of relying on
451     // the incoming assessments. In particular, this handles the case
452     // v1 = phi v0 v0, which structurally is identical to v0 having been
453     // defined at the top of a diamond, and arriving at the node joining the
454     // diamond's branches.
455     const PhiInstruction* phi = nullptr;
456     for (const PhiInstruction* candidate : origin->phis()) {
457       if (candidate->virtual_register() == current_virtual_register) {
458         phi = candidate;
459         break;
460       }
461     }
462 
463     int op_index = 0;
464     for (RpoNumber pred : origin->predecessors()) {
465       int expected =
466           phi != nullptr ? phi->operands()[op_index] : current_virtual_register;
467 
468       ++op_index;
469       auto pred_assignment = assessments_.find(pred);
470       if (pred_assignment == assessments_.end()) {
471         CHECK(origin->IsLoopHeader());
472         auto todo_iter = outstanding_assessments_.find(pred);
473         DelayedAssessments* set = nullptr;
474         if (todo_iter == outstanding_assessments_.end()) {
475           set = zone()->New<DelayedAssessments>(zone());
476           outstanding_assessments_.insert(std::make_pair(pred, set));
477         } else {
478           set = todo_iter->second;
479         }
480         set->AddDelayedAssessment(current_operand, expected);
481         continue;
482       }
483 
484       const BlockAssessments* pred_assessments = pred_assignment->second;
485       auto found_contribution = pred_assessments->map().find(current_operand);
486       CHECK(found_contribution != pred_assessments->map().end());
487       Assessment* contribution = found_contribution->second;
488 
489       switch (contribution->kind()) {
490         case Final:
491           CHECK_EQ(FinalAssessment::cast(contribution)->virtual_register(),
492                    expected);
493           break;
494         case Pending: {
495           // This happens if we have a diamond feeding into another one, and
496           // the inner one never being used - other than for carrying the value.
497           const PendingAssessment* next = PendingAssessment::cast(contribution);
498           if (seen.find(pred) == seen.end()) {
499             worklist.push({next, expected});
500             seen.insert(pred);
501           }
502           // Note that we do not want to finalize pending assessments at the
503           // beginning of a block - which is the information we'd have
504           // available here. This is because this operand may be reused to
505           // define duplicate phis.
506           break;
507         }
508       }
509     }
510   }
511   assessment->AddAlias(virtual_register);
512 }
513 
ValidateUse(RpoNumber block_id,BlockAssessments * current_assessments,InstructionOperand op,int virtual_register)514 void RegisterAllocatorVerifier::ValidateUse(
515     RpoNumber block_id, BlockAssessments* current_assessments,
516     InstructionOperand op, int virtual_register) {
517   auto iterator = current_assessments->map().find(op);
518   // We should have seen this operand before.
519   CHECK(iterator != current_assessments->map().end());
520   Assessment* assessment = iterator->second;
521 
522   // The operand shouldn't be a stale reference stack slot.
523   CHECK(!current_assessments->IsStaleReferenceStackSlot(op));
524 
525   switch (assessment->kind()) {
526     case Final:
527       CHECK_EQ(FinalAssessment::cast(assessment)->virtual_register(),
528                virtual_register);
529       break;
530     case Pending: {
531       PendingAssessment* pending = PendingAssessment::cast(assessment);
532       ValidatePendingAssessment(block_id, op, current_assessments, pending,
533                                 virtual_register);
534       break;
535     }
536   }
537 }
538 
VerifyGapMoves()539 void RegisterAllocatorVerifier::VerifyGapMoves() {
540   CHECK(assessments_.empty());
541   CHECK(outstanding_assessments_.empty());
542   const size_t block_count = sequence()->instruction_blocks().size();
543   for (size_t block_index = 0; block_index < block_count; ++block_index) {
544     const InstructionBlock* block =
545         sequence()->instruction_blocks()[block_index];
546     BlockAssessments* block_assessments = CreateForBlock(block);
547 
548     for (int instr_index = block->code_start(); instr_index < block->code_end();
549          ++instr_index) {
550       const InstructionConstraint& instr_constraint = constraints_[instr_index];
551       const Instruction* instr = instr_constraint.instruction_;
552       block_assessments->PerformMoves(instr);
553 
554       const OperandConstraint* op_constraints =
555           instr_constraint.operand_constraints_;
556       size_t count = 0;
557       for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
558         if (op_constraints[count].type_ == kImmediate) {
559           continue;
560         }
561         int virtual_register = op_constraints[count].virtual_register_;
562         InstructionOperand op = *instr->InputAt(i);
563         ValidateUse(block->rpo_number(), block_assessments, op,
564                     virtual_register);
565       }
566       for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
567         block_assessments->Drop(*instr->TempAt(i));
568       }
569       if (instr->IsCall()) {
570         block_assessments->DropRegisters();
571       }
572       if (instr->HasReferenceMap()) {
573         block_assessments->CheckReferenceMap(instr->reference_map());
574       }
575       for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
576         int virtual_register = op_constraints[count].virtual_register_;
577         block_assessments->AddDefinition(*instr->OutputAt(i), virtual_register);
578         if (op_constraints[count].type_ == kRegisterAndSlot) {
579           const AllocatedOperand* reg_op =
580               AllocatedOperand::cast(instr->OutputAt(i));
581           MachineRepresentation rep = reg_op->representation();
582           const AllocatedOperand* stack_op = AllocatedOperand::New(
583               zone(), LocationOperand::LocationKind::STACK_SLOT, rep,
584               op_constraints[i].spilled_slot_);
585           block_assessments->AddDefinition(*stack_op, virtual_register);
586         }
587       }
588     }
589     // Now commit the assessments for this block. If there are any delayed
590     // assessments, ValidatePendingAssessment should see this block, too.
591     assessments_[block->rpo_number()] = block_assessments;
592 
593     auto todo_iter = outstanding_assessments_.find(block->rpo_number());
594     if (todo_iter == outstanding_assessments_.end()) continue;
595     DelayedAssessments* todo = todo_iter->second;
596     for (auto pair : todo->map()) {
597       InstructionOperand op = pair.first;
598       int vreg = pair.second;
599       auto found_op = block_assessments->map().find(op);
600       CHECK(found_op != block_assessments->map().end());
601       // This block is a jump back to the loop header, ensure that the op hasn't
602       // become a stale reference during the blocks in the loop.
603       CHECK(!block_assessments->IsStaleReferenceStackSlot(op));
604       switch (found_op->second->kind()) {
605         case Final:
606           CHECK_EQ(FinalAssessment::cast(found_op->second)->virtual_register(),
607                    vreg);
608           break;
609         case Pending:
610           ValidatePendingAssessment(block->rpo_number(), op, block_assessments,
611                                     PendingAssessment::cast(found_op->second),
612                                     vreg);
613           break;
614       }
615     }
616   }
617 }
618 
619 }  // namespace compiler
620 }  // namespace internal
621 }  // namespace v8
622