• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/register-allocator-verifier.h"
6 
7 #include "src/bit-vector.h"
8 #include "src/compiler/instruction.h"
9 #include "src/ostreams.h"
10 
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14 
15 namespace {
16 
OperandCount(const Instruction * instr)17 size_t OperandCount(const Instruction* instr) {
18   return instr->InputCount() + instr->OutputCount() + instr->TempCount();
19 }
20 
21 
VerifyEmptyGaps(const Instruction * instr)22 void VerifyEmptyGaps(const Instruction* instr) {
23   for (int i = Instruction::FIRST_GAP_POSITION;
24        i <= Instruction::LAST_GAP_POSITION; i++) {
25     Instruction::GapPosition inner_pos =
26         static_cast<Instruction::GapPosition>(i);
27     CHECK(instr->GetParallelMove(inner_pos) == nullptr);
28   }
29 }
30 
31 
VerifyAllocatedGaps(const Instruction * instr)32 void VerifyAllocatedGaps(const Instruction* instr) {
33   for (int i = Instruction::FIRST_GAP_POSITION;
34        i <= Instruction::LAST_GAP_POSITION; i++) {
35     Instruction::GapPosition inner_pos =
36         static_cast<Instruction::GapPosition>(i);
37     const ParallelMove* moves = instr->GetParallelMove(inner_pos);
38     if (moves == nullptr) continue;
39     for (const MoveOperands* move : *moves) {
40       if (move->IsRedundant()) continue;
41       CHECK(move->source().IsAllocated() || move->source().IsConstant());
42       CHECK(move->destination().IsAllocated());
43     }
44   }
45 }
46 
47 }  // namespace
48 
RegisterAllocatorVerifier(Zone * zone,const RegisterConfiguration * config,const InstructionSequence * sequence)49 RegisterAllocatorVerifier::RegisterAllocatorVerifier(
50     Zone* zone, const RegisterConfiguration* config,
51     const InstructionSequence* sequence)
52     : zone_(zone),
53       config_(config),
54       sequence_(sequence),
55       constraints_(zone),
56       assessments_(zone),
57       outstanding_assessments_(zone) {
58   constraints_.reserve(sequence->instructions().size());
59   // TODO(dcarney): model unique constraints.
60   // Construct OperandConstraints for all InstructionOperands, eliminating
61   // kSameAsFirst along the way.
62   for (const Instruction* instr : sequence->instructions()) {
63     // All gaps should be totally unallocated at this point.
64     VerifyEmptyGaps(instr);
65     const size_t operand_count = OperandCount(instr);
66     OperandConstraint* op_constraints =
67         zone->NewArray<OperandConstraint>(operand_count);
68     size_t count = 0;
69     for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
70       BuildConstraint(instr->InputAt(i), &op_constraints[count]);
71       VerifyInput(op_constraints[count]);
72     }
73     for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
74       BuildConstraint(instr->TempAt(i), &op_constraints[count]);
75       VerifyTemp(op_constraints[count]);
76     }
77     for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
78       BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
79       if (op_constraints[count].type_ == kSameAsFirst) {
80         CHECK(instr->InputCount() > 0);
81         op_constraints[count].type_ = op_constraints[0].type_;
82         op_constraints[count].value_ = op_constraints[0].value_;
83       }
84       VerifyOutput(op_constraints[count]);
85     }
86     InstructionConstraint instr_constraint = {instr, operand_count,
87                                               op_constraints};
88     constraints()->push_back(instr_constraint);
89   }
90 }
91 
VerifyInput(const OperandConstraint & constraint)92 void RegisterAllocatorVerifier::VerifyInput(
93     const OperandConstraint& constraint) {
94   CHECK_NE(kSameAsFirst, constraint.type_);
95   if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) {
96     CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
97              constraint.virtual_register_);
98   }
99 }
100 
VerifyTemp(const OperandConstraint & constraint)101 void RegisterAllocatorVerifier::VerifyTemp(
102     const OperandConstraint& constraint) {
103   CHECK_NE(kSameAsFirst, constraint.type_);
104   CHECK_NE(kImmediate, constraint.type_);
105   CHECK_NE(kExplicit, constraint.type_);
106   CHECK_NE(kConstant, constraint.type_);
107 }
108 
VerifyOutput(const OperandConstraint & constraint)109 void RegisterAllocatorVerifier::VerifyOutput(
110     const OperandConstraint& constraint) {
111   CHECK_NE(kImmediate, constraint.type_);
112   CHECK_NE(kExplicit, constraint.type_);
113   CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
114            constraint.virtual_register_);
115 }
116 
VerifyAssignment()117 void RegisterAllocatorVerifier::VerifyAssignment() {
118   CHECK(sequence()->instructions().size() == constraints()->size());
119   auto instr_it = sequence()->begin();
120   for (const auto& instr_constraint : *constraints()) {
121     const Instruction* instr = instr_constraint.instruction_;
122     // All gaps should be totally allocated at this point.
123     VerifyAllocatedGaps(instr);
124     const size_t operand_count = instr_constraint.operand_constaints_size_;
125     const OperandConstraint* op_constraints =
126         instr_constraint.operand_constraints_;
127     CHECK_EQ(instr, *instr_it);
128     CHECK(operand_count == OperandCount(instr));
129     size_t count = 0;
130     for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
131       CheckConstraint(instr->InputAt(i), &op_constraints[count]);
132     }
133     for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
134       CheckConstraint(instr->TempAt(i), &op_constraints[count]);
135     }
136     for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
137       CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
138     }
139     ++instr_it;
140   }
141 }
142 
BuildConstraint(const InstructionOperand * op,OperandConstraint * constraint)143 void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
144                                                 OperandConstraint* constraint) {
145   constraint->value_ = kMinInt;
146   constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
147   if (op->IsConstant()) {
148     constraint->type_ = kConstant;
149     constraint->value_ = ConstantOperand::cast(op)->virtual_register();
150     constraint->virtual_register_ = constraint->value_;
151   } else if (op->IsExplicit()) {
152     constraint->type_ = kExplicit;
153   } else if (op->IsImmediate()) {
154     const ImmediateOperand* imm = ImmediateOperand::cast(op);
155     int value = imm->type() == ImmediateOperand::INLINE ? imm->inline_value()
156                                                         : imm->indexed_value();
157     constraint->type_ = kImmediate;
158     constraint->value_ = value;
159   } else {
160     CHECK(op->IsUnallocated());
161     const UnallocatedOperand* unallocated = UnallocatedOperand::cast(op);
162     int vreg = unallocated->virtual_register();
163     constraint->virtual_register_ = vreg;
164     if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
165       constraint->type_ = kFixedSlot;
166       constraint->value_ = unallocated->fixed_slot_index();
167     } else {
168       switch (unallocated->extended_policy()) {
169         case UnallocatedOperand::ANY:
170         case UnallocatedOperand::NONE:
171           if (sequence()->IsFP(vreg)) {
172             constraint->type_ = kNoneFP;
173           } else {
174             constraint->type_ = kNone;
175           }
176           break;
177         case UnallocatedOperand::FIXED_REGISTER:
178           if (unallocated->HasSecondaryStorage()) {
179             constraint->type_ = kRegisterAndSlot;
180             constraint->spilled_slot_ = unallocated->GetSecondaryStorage();
181           } else {
182             constraint->type_ = kFixedRegister;
183           }
184           constraint->value_ = unallocated->fixed_register_index();
185           break;
186         case UnallocatedOperand::FIXED_FP_REGISTER:
187           constraint->type_ = kFixedFPRegister;
188           constraint->value_ = unallocated->fixed_register_index();
189           break;
190         case UnallocatedOperand::MUST_HAVE_REGISTER:
191           if (sequence()->IsFP(vreg)) {
192             constraint->type_ = kFPRegister;
193           } else {
194             constraint->type_ = kRegister;
195           }
196           break;
197         case UnallocatedOperand::MUST_HAVE_SLOT:
198           constraint->type_ = kSlot;
199           constraint->value_ =
200               ElementSizeLog2Of(sequence()->GetRepresentation(vreg));
201           break;
202         case UnallocatedOperand::SAME_AS_FIRST_INPUT:
203           constraint->type_ = kSameAsFirst;
204           break;
205       }
206     }
207   }
208 }
209 
CheckConstraint(const InstructionOperand * op,const OperandConstraint * constraint)210 void RegisterAllocatorVerifier::CheckConstraint(
211     const InstructionOperand* op, const OperandConstraint* constraint) {
212   switch (constraint->type_) {
213     case kConstant:
214       CHECK(op->IsConstant());
215       CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
216                constraint->value_);
217       return;
218     case kImmediate: {
219       CHECK(op->IsImmediate());
220       const ImmediateOperand* imm = ImmediateOperand::cast(op);
221       int value = imm->type() == ImmediateOperand::INLINE
222                       ? imm->inline_value()
223                       : imm->indexed_value();
224       CHECK_EQ(value, constraint->value_);
225       return;
226     }
227     case kRegister:
228       CHECK(op->IsRegister());
229       return;
230     case kFPRegister:
231       CHECK(op->IsFPRegister());
232       return;
233     case kExplicit:
234       CHECK(op->IsExplicit());
235       return;
236     case kFixedRegister:
237     case kRegisterAndSlot:
238       CHECK(op->IsRegister());
239       CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
240       return;
241     case kFixedFPRegister:
242       CHECK(op->IsFPRegister());
243       CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
244       return;
245     case kFixedSlot:
246       CHECK(op->IsStackSlot() || op->IsFPStackSlot());
247       CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
248       return;
249     case kSlot:
250       CHECK(op->IsStackSlot() || op->IsFPStackSlot());
251       CHECK_EQ(ElementSizeLog2Of(LocationOperand::cast(op)->representation()),
252                constraint->value_);
253       return;
254     case kNone:
255       CHECK(op->IsRegister() || op->IsStackSlot());
256       return;
257     case kNoneFP:
258       CHECK(op->IsFPRegister() || op->IsFPStackSlot());
259       return;
260     case kSameAsFirst:
261       CHECK(false);
262       return;
263   }
264 }
265 
PerformMoves(const Instruction * instruction)266 void BlockAssessments::PerformMoves(const Instruction* instruction) {
267   const ParallelMove* first =
268       instruction->GetParallelMove(Instruction::GapPosition::START);
269   PerformParallelMoves(first);
270   const ParallelMove* last =
271       instruction->GetParallelMove(Instruction::GapPosition::END);
272   PerformParallelMoves(last);
273 }
274 
PerformParallelMoves(const ParallelMove * moves)275 void BlockAssessments::PerformParallelMoves(const ParallelMove* moves) {
276   if (moves == nullptr) return;
277 
278   CHECK(map_for_moves_.empty());
279   for (MoveOperands* move : *moves) {
280     if (move->IsEliminated() || move->IsRedundant()) continue;
281     auto it = map_.find(move->source());
282     // The RHS of a parallel move should have been already assessed.
283     CHECK(it != map_.end());
284     // The LHS of a parallel move should not have been assigned in this
285     // parallel move.
286     CHECK(map_for_moves_.find(move->destination()) == map_for_moves_.end());
287     // Copy the assessment to the destination.
288     map_for_moves_[move->destination()] = it->second;
289   }
290   for (auto pair : map_for_moves_) {
291     map_[pair.first] = pair.second;
292   }
293   map_for_moves_.clear();
294 }
295 
DropRegisters()296 void BlockAssessments::DropRegisters() {
297   for (auto iterator = map().begin(), end = map().end(); iterator != end;) {
298     auto current = iterator;
299     ++iterator;
300     InstructionOperand op = current->first;
301     if (op.IsAnyRegister()) map().erase(current);
302   }
303 }
304 
Print() const305 void BlockAssessments::Print() const {
306   OFStream os(stdout);
307   for (const auto pair : map()) {
308     const InstructionOperand op = pair.first;
309     const Assessment* assessment = pair.second;
310     // Use operator<< so we can write the assessment on the same
311     // line. Since we need a register configuration, just pick
312     // Turbofan for now.
313     PrintableInstructionOperand wrapper = {RegisterConfiguration::Turbofan(),
314                                            op};
315     os << wrapper << " : ";
316     if (assessment->kind() == AssessmentKind::Final) {
317       os << "v" << FinalAssessment::cast(assessment)->virtual_register();
318     } else {
319       os << "P";
320     }
321     os << std::endl;
322   }
323   os << std::endl;
324 }
325 
CreateForBlock(const InstructionBlock * block)326 BlockAssessments* RegisterAllocatorVerifier::CreateForBlock(
327     const InstructionBlock* block) {
328   RpoNumber current_block_id = block->rpo_number();
329 
330   BlockAssessments* ret = new (zone()) BlockAssessments(zone());
331   if (block->PredecessorCount() == 0) {
332     // TODO(mtrofin): the following check should hold, however, in certain
333     // unit tests it is invalidated by the last block. Investigate and
334     // normalize the CFG.
335     // CHECK(current_block_id.ToInt() == 0);
336     // The phi size test below is because we can, technically, have phi
337     // instructions with one argument. Some tests expose that, too.
338   } else if (block->PredecessorCount() == 1 && block->phis().size() == 0) {
339     const BlockAssessments* prev_block = assessments_[block->predecessors()[0]];
340     ret->CopyFrom(prev_block);
341   } else {
342     for (RpoNumber pred_id : block->predecessors()) {
343       // For every operand coming from any of the predecessors, create an
344       // Unfinalized assessment.
345       auto iterator = assessments_.find(pred_id);
346       if (iterator == assessments_.end()) {
347         // This block is the head of a loop, and this predecessor is the
348         // loopback
349         // arc.
350         // Validate this is a loop case, otherwise the CFG is malformed.
351         CHECK(pred_id >= current_block_id);
352         CHECK(block->IsLoopHeader());
353         continue;
354       }
355       const BlockAssessments* pred_assessments = iterator->second;
356       CHECK_NOT_NULL(pred_assessments);
357       for (auto pair : pred_assessments->map()) {
358         InstructionOperand operand = pair.first;
359         if (ret->map().find(operand) == ret->map().end()) {
360           ret->map().insert(std::make_pair(
361               operand, new (zone()) PendingAssessment(block, operand)));
362         }
363       }
364     }
365   }
366   return ret;
367 }
368 
ValidatePendingAssessment(RpoNumber block_id,InstructionOperand op,BlockAssessments * current_assessments,const PendingAssessment * assessment,int virtual_register)369 void RegisterAllocatorVerifier::ValidatePendingAssessment(
370     RpoNumber block_id, InstructionOperand op,
371     BlockAssessments* current_assessments, const PendingAssessment* assessment,
372     int virtual_register) {
373   // When validating a pending assessment, it is possible some of the
374   // assessments
375   // for the original operand (the one where the assessment was created for
376   // first) are also pending. To avoid recursion, we use a work list. To
377   // deal with cycles, we keep a set of seen nodes.
378   Zone local_zone(zone()->allocator(), ZONE_NAME);
379   ZoneQueue<std::pair<const PendingAssessment*, int>> worklist(&local_zone);
380   ZoneSet<RpoNumber> seen(&local_zone);
381   worklist.push(std::make_pair(assessment, virtual_register));
382   seen.insert(block_id);
383 
384   while (!worklist.empty()) {
385     auto work = worklist.front();
386     const PendingAssessment* current_assessment = work.first;
387     int current_virtual_register = work.second;
388     InstructionOperand current_operand = current_assessment->operand();
389     worklist.pop();
390 
391     const InstructionBlock* origin = current_assessment->origin();
392     CHECK(origin->PredecessorCount() > 1 || origin->phis().size() > 0);
393 
394     // Check if the virtual register is a phi first, instead of relying on
395     // the incoming assessments. In particular, this handles the case
396     // v1 = phi v0 v0, which structurally is identical to v0 having been
397     // defined at the top of a diamond, and arriving at the node joining the
398     // diamond's branches.
399     const PhiInstruction* phi = nullptr;
400     for (const PhiInstruction* candidate : origin->phis()) {
401       if (candidate->virtual_register() == current_virtual_register) {
402         phi = candidate;
403         break;
404       }
405     }
406 
407     int op_index = 0;
408     for (RpoNumber pred : origin->predecessors()) {
409       int expected =
410           phi != nullptr ? phi->operands()[op_index] : current_virtual_register;
411 
412       ++op_index;
413       auto pred_assignment = assessments_.find(pred);
414       if (pred_assignment == assessments_.end()) {
415         CHECK(origin->IsLoopHeader());
416         auto todo_iter = outstanding_assessments_.find(pred);
417         DelayedAssessments* set = nullptr;
418         if (todo_iter == outstanding_assessments_.end()) {
419           set = new (zone()) DelayedAssessments(zone());
420           outstanding_assessments_.insert(std::make_pair(pred, set));
421         } else {
422           set = todo_iter->second;
423         }
424         set->AddDelayedAssessment(current_operand, expected);
425         continue;
426       }
427 
428       const BlockAssessments* pred_assessments = pred_assignment->second;
429       auto found_contribution = pred_assessments->map().find(current_operand);
430       CHECK(found_contribution != pred_assessments->map().end());
431       Assessment* contribution = found_contribution->second;
432 
433       switch (contribution->kind()) {
434         case Final:
435           ValidateFinalAssessment(
436               block_id, current_operand, current_assessments,
437               FinalAssessment::cast(contribution), expected);
438           break;
439         case Pending: {
440           // This happens if we have a diamond feeding into another one, and
441           // the inner one never being used - other than for carrying the value.
442           const PendingAssessment* next = PendingAssessment::cast(contribution);
443           if (seen.find(pred) == seen.end()) {
444             worklist.push({next, expected});
445             seen.insert(pred);
446           }
447           // Note that we do not want to finalize pending assessments at the
448           // beginning of a block - which is the information we'd have
449           // available here. This is because this operand may be reused to
450           // define
451           // duplicate phis.
452           break;
453         }
454       }
455     }
456   }
457   // If everything checks out, we may make the assessment.
458   current_assessments->map()[op] =
459       new (zone()) FinalAssessment(virtual_register, assessment);
460 }
461 
ValidateFinalAssessment(RpoNumber block_id,InstructionOperand op,BlockAssessments * current_assessments,const FinalAssessment * assessment,int virtual_register)462 void RegisterAllocatorVerifier::ValidateFinalAssessment(
463     RpoNumber block_id, InstructionOperand op,
464     BlockAssessments* current_assessments, const FinalAssessment* assessment,
465     int virtual_register) {
466   if (assessment->virtual_register() == virtual_register) return;
467   // If we have 2 phis with the exact same operand list, and the first phi is
468   // used before the second one, via the operand incoming to the block,
469   // and the second one's operand is defined (via a parallel move) after the
470   // use, then the original operand will be assigned to the first phi. We
471   // then look at the original pending assessment to ascertain if op
472   // is virtual_register.
473   const PendingAssessment* old = assessment->original_pending_assessment();
474   CHECK_NOT_NULL(old);
475   RpoNumber old_block = old->origin()->rpo_number();
476   DCHECK_LE(old_block, block_id);
477   BlockAssessments* old_block_assessments =
478       old_block == block_id ? current_assessments : assessments_[old_block];
479   ValidatePendingAssessment(old_block, op, old_block_assessments, old,
480                             virtual_register);
481 }
482 
ValidateUse(RpoNumber block_id,BlockAssessments * current_assessments,InstructionOperand op,int virtual_register)483 void RegisterAllocatorVerifier::ValidateUse(
484     RpoNumber block_id, BlockAssessments* current_assessments,
485     InstructionOperand op, int virtual_register) {
486   auto iterator = current_assessments->map().find(op);
487   // We should have seen this operand before.
488   CHECK(iterator != current_assessments->map().end());
489   Assessment* assessment = iterator->second;
490 
491   switch (assessment->kind()) {
492     case Final:
493       ValidateFinalAssessment(block_id, op, current_assessments,
494                               FinalAssessment::cast(assessment),
495                               virtual_register);
496       break;
497     case Pending: {
498       const PendingAssessment* pending = PendingAssessment::cast(assessment);
499       ValidatePendingAssessment(block_id, op, current_assessments, pending,
500                                 virtual_register);
501       break;
502     }
503   }
504 }
505 
VerifyGapMoves()506 void RegisterAllocatorVerifier::VerifyGapMoves() {
507   CHECK(assessments_.empty());
508   CHECK(outstanding_assessments_.empty());
509   const size_t block_count = sequence()->instruction_blocks().size();
510   for (size_t block_index = 0; block_index < block_count; ++block_index) {
511     const InstructionBlock* block =
512         sequence()->instruction_blocks()[block_index];
513     BlockAssessments* block_assessments = CreateForBlock(block);
514 
515     for (int instr_index = block->code_start(); instr_index < block->code_end();
516          ++instr_index) {
517       const InstructionConstraint& instr_constraint = constraints_[instr_index];
518       const Instruction* instr = instr_constraint.instruction_;
519       block_assessments->PerformMoves(instr);
520 
521       const OperandConstraint* op_constraints =
522           instr_constraint.operand_constraints_;
523       size_t count = 0;
524       for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
525         if (op_constraints[count].type_ == kImmediate ||
526             op_constraints[count].type_ == kExplicit) {
527           continue;
528         }
529         int virtual_register = op_constraints[count].virtual_register_;
530         InstructionOperand op = *instr->InputAt(i);
531         ValidateUse(block->rpo_number(), block_assessments, op,
532                     virtual_register);
533       }
534       for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
535         block_assessments->Drop(*instr->TempAt(i));
536       }
537       if (instr->IsCall()) {
538         block_assessments->DropRegisters();
539       }
540       for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
541         int virtual_register = op_constraints[count].virtual_register_;
542         block_assessments->AddDefinition(*instr->OutputAt(i), virtual_register);
543         if (op_constraints[count].type_ == kRegisterAndSlot) {
544           const AllocatedOperand* reg_op =
545               AllocatedOperand::cast(instr->OutputAt(i));
546           MachineRepresentation rep = reg_op->representation();
547           const AllocatedOperand* stack_op = AllocatedOperand::New(
548               zone(), LocationOperand::LocationKind::STACK_SLOT, rep,
549               op_constraints[i].spilled_slot_);
550           block_assessments->AddDefinition(*stack_op, virtual_register);
551         }
552       }
553     }
554     // Now commit the assessments for this block. If there are any delayed
555     // assessments, ValidatePendingAssessment should see this block, too.
556     assessments_[block->rpo_number()] = block_assessments;
557 
558     auto todo_iter = outstanding_assessments_.find(block->rpo_number());
559     if (todo_iter == outstanding_assessments_.end()) continue;
560     DelayedAssessments* todo = todo_iter->second;
561     for (auto pair : todo->map()) {
562       InstructionOperand op = pair.first;
563       int vreg = pair.second;
564       auto found_op = block_assessments->map().find(op);
565       CHECK(found_op != block_assessments->map().end());
566       switch (found_op->second->kind()) {
567         case Final:
568           ValidateFinalAssessment(block->rpo_number(), op, block_assessments,
569                                   FinalAssessment::cast(found_op->second),
570                                   vreg);
571           break;
572         case Pending:
573           const PendingAssessment* pending =
574               PendingAssessment::cast(found_op->second);
575           ValidatePendingAssessment(block->rpo_number(), op, block_assessments,
576                                     pending, vreg);
577           block_assessments->map()[op] =
578               new (zone()) FinalAssessment(vreg, pending);
579           break;
580       }
581     }
582   }
583 }
584 
585 }  // namespace compiler
586 }  // namespace internal
587 }  // namespace v8
588