1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/backend/register-allocator-verifier.h"
6
7 #include "src/compiler/backend/instruction.h"
8 #include "src/utils/bit-vector.h"
9 #include "src/utils/ostreams.h"
10
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14
15 namespace {
16
OperandCount(const Instruction * instr)17 size_t OperandCount(const Instruction* instr) {
18 return instr->InputCount() + instr->OutputCount() + instr->TempCount();
19 }
20
VerifyEmptyGaps(const Instruction * instr)21 void VerifyEmptyGaps(const Instruction* instr) {
22 for (int i = Instruction::FIRST_GAP_POSITION;
23 i <= Instruction::LAST_GAP_POSITION; i++) {
24 Instruction::GapPosition inner_pos =
25 static_cast<Instruction::GapPosition>(i);
26 CHECK_NULL(instr->GetParallelMove(inner_pos));
27 }
28 }
29
VerifyAllocatedGaps(const Instruction * instr,const char * caller_info)30 void VerifyAllocatedGaps(const Instruction* instr, const char* caller_info) {
31 for (int i = Instruction::FIRST_GAP_POSITION;
32 i <= Instruction::LAST_GAP_POSITION; i++) {
33 Instruction::GapPosition inner_pos =
34 static_cast<Instruction::GapPosition>(i);
35 const ParallelMove* moves = instr->GetParallelMove(inner_pos);
36 if (moves == nullptr) continue;
37 for (const MoveOperands* move : *moves) {
38 if (move->IsRedundant()) continue;
39 CHECK_WITH_MSG(
40 move->source().IsAllocated() || move->source().IsConstant(),
41 caller_info);
42 CHECK_WITH_MSG(move->destination().IsAllocated(), caller_info);
43 }
44 }
45 }
46
GetValue(const ImmediateOperand * imm)47 int GetValue(const ImmediateOperand* imm) {
48 switch (imm->type()) {
49 case ImmediateOperand::INLINE_INT32:
50 return imm->inline_int32_value();
51 case ImmediateOperand::INLINE_INT64:
52 return static_cast<int>(imm->inline_int64_value());
53 case ImmediateOperand::INDEXED_RPO:
54 case ImmediateOperand::INDEXED_IMM:
55 return imm->indexed_value();
56 }
57 }
58
59 } // namespace
60
RegisterAllocatorVerifier(Zone * zone,const RegisterConfiguration * config,const InstructionSequence * sequence,const Frame * frame)61 RegisterAllocatorVerifier::RegisterAllocatorVerifier(
62 Zone* zone, const RegisterConfiguration* config,
63 const InstructionSequence* sequence, const Frame* frame)
64 : zone_(zone),
65 config_(config),
66 sequence_(sequence),
67 constraints_(zone),
68 assessments_(zone),
69 outstanding_assessments_(zone),
70 spill_slot_delta_(frame->GetTotalFrameSlotCount() -
71 frame->GetSpillSlotCount()) {
72 constraints_.reserve(sequence->instructions().size());
73 // TODO(dcarney): model unique constraints.
74 // Construct OperandConstraints for all InstructionOperands, eliminating
75 // kSameAsInput along the way.
76 for (const Instruction* instr : sequence->instructions()) {
77 // All gaps should be totally unallocated at this point.
78 VerifyEmptyGaps(instr);
79 const size_t operand_count = OperandCount(instr);
80 OperandConstraint* op_constraints =
81 zone->NewArray<OperandConstraint>(operand_count);
82 size_t count = 0;
83 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
84 BuildConstraint(instr->InputAt(i), &op_constraints[count]);
85 VerifyInput(op_constraints[count]);
86 }
87 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
88 BuildConstraint(instr->TempAt(i), &op_constraints[count]);
89 VerifyTemp(op_constraints[count]);
90 }
91 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
92 BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
93 if (op_constraints[count].type_ == kSameAsInput) {
94 int input_index = op_constraints[count].value_;
95 CHECK_LT(input_index, instr->InputCount());
96 op_constraints[count].type_ = op_constraints[input_index].type_;
97 op_constraints[count].value_ = op_constraints[input_index].value_;
98 }
99 VerifyOutput(op_constraints[count]);
100 }
101 InstructionConstraint instr_constraint = {instr, operand_count,
102 op_constraints};
103 constraints()->push_back(instr_constraint);
104 }
105 }
106
VerifyInput(const OperandConstraint & constraint)107 void RegisterAllocatorVerifier::VerifyInput(
108 const OperandConstraint& constraint) {
109 CHECK_NE(kSameAsInput, constraint.type_);
110 if (constraint.type_ != kImmediate) {
111 CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
112 constraint.virtual_register_);
113 }
114 }
115
VerifyTemp(const OperandConstraint & constraint)116 void RegisterAllocatorVerifier::VerifyTemp(
117 const OperandConstraint& constraint) {
118 CHECK_NE(kSameAsInput, constraint.type_);
119 CHECK_NE(kImmediate, constraint.type_);
120 CHECK_NE(kConstant, constraint.type_);
121 }
122
VerifyOutput(const OperandConstraint & constraint)123 void RegisterAllocatorVerifier::VerifyOutput(
124 const OperandConstraint& constraint) {
125 CHECK_NE(kImmediate, constraint.type_);
126 CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
127 constraint.virtual_register_);
128 }
129
VerifyAssignment(const char * caller_info)130 void RegisterAllocatorVerifier::VerifyAssignment(const char* caller_info) {
131 caller_info_ = caller_info;
132 CHECK(sequence()->instructions().size() == constraints()->size());
133 auto instr_it = sequence()->begin();
134 for (const auto& instr_constraint : *constraints()) {
135 const Instruction* instr = instr_constraint.instruction_;
136 // All gaps should be totally allocated at this point.
137 VerifyAllocatedGaps(instr, caller_info_);
138 const size_t operand_count = instr_constraint.operand_constaints_size_;
139 const OperandConstraint* op_constraints =
140 instr_constraint.operand_constraints_;
141 CHECK_EQ(instr, *instr_it);
142 CHECK(operand_count == OperandCount(instr));
143 size_t count = 0;
144 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
145 CheckConstraint(instr->InputAt(i), &op_constraints[count]);
146 }
147 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
148 CheckConstraint(instr->TempAt(i), &op_constraints[count]);
149 }
150 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
151 CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
152 }
153 ++instr_it;
154 }
155 }
156
BuildConstraint(const InstructionOperand * op,OperandConstraint * constraint)157 void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
158 OperandConstraint* constraint) {
159 constraint->value_ = kMinInt;
160 constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
161 if (op->IsConstant()) {
162 constraint->type_ = kConstant;
163 constraint->value_ = ConstantOperand::cast(op)->virtual_register();
164 constraint->virtual_register_ = constraint->value_;
165 } else if (op->IsImmediate()) {
166 const ImmediateOperand* imm = ImmediateOperand::cast(op);
167 constraint->type_ = kImmediate;
168 constraint->value_ = GetValue(imm);
169 } else {
170 CHECK(op->IsUnallocated());
171 const UnallocatedOperand* unallocated = UnallocatedOperand::cast(op);
172 int vreg = unallocated->virtual_register();
173 constraint->virtual_register_ = vreg;
174 if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
175 constraint->type_ = kFixedSlot;
176 constraint->value_ = unallocated->fixed_slot_index();
177 } else {
178 switch (unallocated->extended_policy()) {
179 case UnallocatedOperand::REGISTER_OR_SLOT:
180 case UnallocatedOperand::NONE:
181 if (sequence()->IsFP(vreg)) {
182 constraint->type_ = kRegisterOrSlotFP;
183 } else {
184 constraint->type_ = kRegisterOrSlot;
185 }
186 break;
187 case UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT:
188 DCHECK(!sequence()->IsFP(vreg));
189 constraint->type_ = kRegisterOrSlotOrConstant;
190 break;
191 case UnallocatedOperand::FIXED_REGISTER:
192 if (unallocated->HasSecondaryStorage()) {
193 constraint->type_ = kRegisterAndSlot;
194 constraint->spilled_slot_ = unallocated->GetSecondaryStorage();
195 } else {
196 constraint->type_ = kFixedRegister;
197 }
198 constraint->value_ = unallocated->fixed_register_index();
199 break;
200 case UnallocatedOperand::FIXED_FP_REGISTER:
201 constraint->type_ = kFixedFPRegister;
202 constraint->value_ = unallocated->fixed_register_index();
203 break;
204 case UnallocatedOperand::MUST_HAVE_REGISTER:
205 if (sequence()->IsFP(vreg)) {
206 constraint->type_ = kFPRegister;
207 } else {
208 constraint->type_ = kRegister;
209 }
210 break;
211 case UnallocatedOperand::MUST_HAVE_SLOT:
212 constraint->type_ = kSlot;
213 constraint->value_ =
214 ElementSizeLog2Of(sequence()->GetRepresentation(vreg));
215 break;
216 case UnallocatedOperand::SAME_AS_INPUT:
217 constraint->type_ = kSameAsInput;
218 constraint->value_ = unallocated->input_index();
219 break;
220 }
221 }
222 }
223 }
224
CheckConstraint(const InstructionOperand * op,const OperandConstraint * constraint)225 void RegisterAllocatorVerifier::CheckConstraint(
226 const InstructionOperand* op, const OperandConstraint* constraint) {
227 switch (constraint->type_) {
228 case kConstant:
229 CHECK_WITH_MSG(op->IsConstant(), caller_info_);
230 CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
231 constraint->value_);
232 return;
233 case kImmediate: {
234 CHECK_WITH_MSG(op->IsImmediate(), caller_info_);
235 const ImmediateOperand* imm = ImmediateOperand::cast(op);
236 int value = GetValue(imm);
237 CHECK_EQ(value, constraint->value_);
238 return;
239 }
240 case kRegister:
241 CHECK_WITH_MSG(op->IsRegister(), caller_info_);
242 return;
243 case kFPRegister:
244 CHECK_WITH_MSG(op->IsFPRegister(), caller_info_);
245 return;
246 case kFixedRegister:
247 case kRegisterAndSlot:
248 CHECK_WITH_MSG(op->IsRegister(), caller_info_);
249 CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
250 return;
251 case kFixedFPRegister:
252 CHECK_WITH_MSG(op->IsFPRegister(), caller_info_);
253 CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
254 return;
255 case kFixedSlot:
256 CHECK_WITH_MSG(op->IsStackSlot() || op->IsFPStackSlot(), caller_info_);
257 CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
258 return;
259 case kSlot:
260 CHECK_WITH_MSG(op->IsStackSlot() || op->IsFPStackSlot(), caller_info_);
261 CHECK_EQ(ElementSizeLog2Of(LocationOperand::cast(op)->representation()),
262 constraint->value_);
263 return;
264 case kRegisterOrSlot:
265 CHECK_WITH_MSG(op->IsRegister() || op->IsStackSlot(), caller_info_);
266 return;
267 case kRegisterOrSlotFP:
268 CHECK_WITH_MSG(op->IsFPRegister() || op->IsFPStackSlot(), caller_info_);
269 return;
270 case kRegisterOrSlotOrConstant:
271 CHECK_WITH_MSG(op->IsRegister() || op->IsStackSlot() || op->IsConstant(),
272 caller_info_);
273 return;
274 case kSameAsInput:
275 CHECK_WITH_MSG(false, caller_info_);
276 return;
277 }
278 }
279
PerformMoves(const Instruction * instruction)280 void BlockAssessments::PerformMoves(const Instruction* instruction) {
281 const ParallelMove* first =
282 instruction->GetParallelMove(Instruction::GapPosition::START);
283 PerformParallelMoves(first);
284 const ParallelMove* last =
285 instruction->GetParallelMove(Instruction::GapPosition::END);
286 PerformParallelMoves(last);
287 }
288
PerformParallelMoves(const ParallelMove * moves)289 void BlockAssessments::PerformParallelMoves(const ParallelMove* moves) {
290 if (moves == nullptr) return;
291
292 CHECK(map_for_moves_.empty());
293 for (MoveOperands* move : *moves) {
294 if (move->IsEliminated() || move->IsRedundant()) continue;
295 auto it = map_.find(move->source());
296 // The RHS of a parallel move should have been already assessed.
297 CHECK(it != map_.end());
298 // The LHS of a parallel move should not have been assigned in this
299 // parallel move.
300 CHECK(map_for_moves_.find(move->destination()) == map_for_moves_.end());
301 // The RHS of a parallel move should not be a stale reference.
302 CHECK(!IsStaleReferenceStackSlot(move->source()));
303 // Copy the assessment to the destination.
304 map_for_moves_[move->destination()] = it->second;
305 }
306 for (auto pair : map_for_moves_) {
307 // Re-insert the existing key for the new assignment so that it has the
308 // correct representation (which is ignored by the canonicalizing map
309 // comparator).
310 InstructionOperand op = pair.first;
311 map_.erase(op);
312 map_.insert(pair);
313 // Destination is no longer a stale reference.
314 stale_ref_stack_slots().erase(op);
315 }
316 map_for_moves_.clear();
317 }
318
DropRegisters()319 void BlockAssessments::DropRegisters() {
320 for (auto iterator = map().begin(), end = map().end(); iterator != end;) {
321 auto current = iterator;
322 ++iterator;
323 InstructionOperand op = current->first;
324 if (op.IsAnyRegister()) map().erase(current);
325 }
326 }
327
CheckReferenceMap(const ReferenceMap * reference_map)328 void BlockAssessments::CheckReferenceMap(const ReferenceMap* reference_map) {
329 // First mark all existing reference stack spill slots as stale.
330 for (auto pair : map()) {
331 InstructionOperand op = pair.first;
332 if (op.IsStackSlot()) {
333 const LocationOperand* loc_op = LocationOperand::cast(&op);
334 // Only mark arguments that are spill slots as stale, the reference map
335 // doesn't track arguments or fixed stack slots, which are implicitly
336 // tracked by the GC.
337 if (CanBeTaggedOrCompressedPointer(loc_op->representation()) &&
338 loc_op->index() >= spill_slot_delta()) {
339 stale_ref_stack_slots().insert(op);
340 }
341 }
342 }
343
344 // Now remove any stack spill slots in the reference map from the list of
345 // stale slots.
346 for (auto ref_map_operand : reference_map->reference_operands()) {
347 if (ref_map_operand.IsStackSlot()) {
348 auto pair = map().find(ref_map_operand);
349 CHECK(pair != map().end());
350 stale_ref_stack_slots().erase(pair->first);
351 }
352 }
353 }
354
IsStaleReferenceStackSlot(InstructionOperand op)355 bool BlockAssessments::IsStaleReferenceStackSlot(InstructionOperand op) {
356 if (!op.IsStackSlot()) return false;
357
358 const LocationOperand* loc_op = LocationOperand::cast(&op);
359 return CanBeTaggedOrCompressedPointer(loc_op->representation()) &&
360 stale_ref_stack_slots().find(op) != stale_ref_stack_slots().end();
361 }
362
Print() const363 void BlockAssessments::Print() const {
364 StdoutStream os;
365 for (const auto& pair : map()) {
366 const InstructionOperand op = pair.first;
367 const Assessment* assessment = pair.second;
368 // Use operator<< so we can write the assessment on the same
369 // line.
370 os << op << " : ";
371 if (assessment->kind() == AssessmentKind::Final) {
372 os << "v" << FinalAssessment::cast(assessment)->virtual_register();
373 } else {
374 os << "P";
375 }
376 if (stale_ref_stack_slots().find(op) != stale_ref_stack_slots().end()) {
377 os << " (stale reference)";
378 }
379 os << std::endl;
380 }
381 os << std::endl;
382 }
383
CreateForBlock(const InstructionBlock * block)384 BlockAssessments* RegisterAllocatorVerifier::CreateForBlock(
385 const InstructionBlock* block) {
386 RpoNumber current_block_id = block->rpo_number();
387
388 BlockAssessments* ret =
389 zone()->New<BlockAssessments>(zone(), spill_slot_delta());
390 if (block->PredecessorCount() == 0) {
391 // TODO(mtrofin): the following check should hold, however, in certain
392 // unit tests it is invalidated by the last block. Investigate and
393 // normalize the CFG.
394 // CHECK_EQ(0, current_block_id.ToInt());
395 // The phi size test below is because we can, technically, have phi
396 // instructions with one argument. Some tests expose that, too.
397 } else if (block->PredecessorCount() == 1 && block->phis().size() == 0) {
398 const BlockAssessments* prev_block = assessments_[block->predecessors()[0]];
399 ret->CopyFrom(prev_block);
400 } else {
401 for (RpoNumber pred_id : block->predecessors()) {
402 // For every operand coming from any of the predecessors, create an
403 // Unfinalized assessment.
404 auto iterator = assessments_.find(pred_id);
405 if (iterator == assessments_.end()) {
406 // This block is the head of a loop, and this predecessor is the
407 // loopback
408 // arc.
409 // Validate this is a loop case, otherwise the CFG is malformed.
410 CHECK(pred_id >= current_block_id);
411 CHECK(block->IsLoopHeader());
412 continue;
413 }
414 const BlockAssessments* pred_assessments = iterator->second;
415 CHECK_NOT_NULL(pred_assessments);
416 for (auto pair : pred_assessments->map()) {
417 InstructionOperand operand = pair.first;
418 if (ret->map().find(operand) == ret->map().end()) {
419 ret->map().insert(std::make_pair(
420 operand, zone()->New<PendingAssessment>(zone(), block, operand)));
421 }
422 }
423
424 // Any references stack slots that became stale in predecessors will be
425 // stale here.
426 ret->stale_ref_stack_slots().insert(
427 pred_assessments->stale_ref_stack_slots().begin(),
428 pred_assessments->stale_ref_stack_slots().end());
429 }
430 }
431 return ret;
432 }
433
ValidatePendingAssessment(RpoNumber block_id,InstructionOperand op,const BlockAssessments * current_assessments,PendingAssessment * const assessment,int virtual_register)434 void RegisterAllocatorVerifier::ValidatePendingAssessment(
435 RpoNumber block_id, InstructionOperand op,
436 const BlockAssessments* current_assessments,
437 PendingAssessment* const assessment, int virtual_register) {
438 if (assessment->IsAliasOf(virtual_register)) return;
439
440 // When validating a pending assessment, it is possible some of the
441 // assessments for the original operand (the one where the assessment was
442 // created for first) are also pending. To avoid recursion, we use a work
443 // list. To deal with cycles, we keep a set of seen nodes.
444 Zone local_zone(zone()->allocator(), ZONE_NAME);
445 ZoneQueue<std::pair<const PendingAssessment*, int>> worklist(&local_zone);
446 ZoneSet<RpoNumber> seen(&local_zone);
447 worklist.push(std::make_pair(assessment, virtual_register));
448 seen.insert(block_id);
449
450 while (!worklist.empty()) {
451 auto work = worklist.front();
452 const PendingAssessment* current_assessment = work.first;
453 int current_virtual_register = work.second;
454 InstructionOperand current_operand = current_assessment->operand();
455 worklist.pop();
456
457 const InstructionBlock* origin = current_assessment->origin();
458 CHECK(origin->PredecessorCount() > 1 || origin->phis().size() > 0);
459
460 // Check if the virtual register is a phi first, instead of relying on
461 // the incoming assessments. In particular, this handles the case
462 // v1 = phi v0 v0, which structurally is identical to v0 having been
463 // defined at the top of a diamond, and arriving at the node joining the
464 // diamond's branches.
465 const PhiInstruction* phi = nullptr;
466 for (const PhiInstruction* candidate : origin->phis()) {
467 if (candidate->virtual_register() == current_virtual_register) {
468 phi = candidate;
469 break;
470 }
471 }
472
473 int op_index = 0;
474 for (RpoNumber pred : origin->predecessors()) {
475 int expected =
476 phi != nullptr ? phi->operands()[op_index] : current_virtual_register;
477
478 ++op_index;
479 auto pred_assignment = assessments_.find(pred);
480 if (pred_assignment == assessments_.end()) {
481 CHECK(origin->IsLoopHeader());
482 auto todo_iter = outstanding_assessments_.find(pred);
483 DelayedAssessments* set = nullptr;
484 if (todo_iter == outstanding_assessments_.end()) {
485 set = zone()->New<DelayedAssessments>(zone());
486 outstanding_assessments_.insert(std::make_pair(pred, set));
487 } else {
488 set = todo_iter->second;
489 }
490 set->AddDelayedAssessment(current_operand, expected);
491 continue;
492 }
493
494 const BlockAssessments* pred_assessments = pred_assignment->second;
495 auto found_contribution = pred_assessments->map().find(current_operand);
496 CHECK(found_contribution != pred_assessments->map().end());
497 Assessment* contribution = found_contribution->second;
498
499 switch (contribution->kind()) {
500 case Final:
501 CHECK_EQ(FinalAssessment::cast(contribution)->virtual_register(),
502 expected);
503 break;
504 case Pending: {
505 // This happens if we have a diamond feeding into another one, and
506 // the inner one never being used - other than for carrying the value.
507 const PendingAssessment* next = PendingAssessment::cast(contribution);
508 if (seen.find(pred) == seen.end()) {
509 worklist.push({next, expected});
510 seen.insert(pred);
511 }
512 // Note that we do not want to finalize pending assessments at the
513 // beginning of a block - which is the information we'd have
514 // available here. This is because this operand may be reused to
515 // define duplicate phis.
516 break;
517 }
518 }
519 }
520 }
521 assessment->AddAlias(virtual_register);
522 }
523
ValidateUse(RpoNumber block_id,BlockAssessments * current_assessments,InstructionOperand op,int virtual_register)524 void RegisterAllocatorVerifier::ValidateUse(
525 RpoNumber block_id, BlockAssessments* current_assessments,
526 InstructionOperand op, int virtual_register) {
527 auto iterator = current_assessments->map().find(op);
528 // We should have seen this operand before.
529 CHECK(iterator != current_assessments->map().end());
530 Assessment* assessment = iterator->second;
531
532 // The operand shouldn't be a stale reference stack slot.
533 CHECK(!current_assessments->IsStaleReferenceStackSlot(op));
534
535 switch (assessment->kind()) {
536 case Final:
537 CHECK_EQ(FinalAssessment::cast(assessment)->virtual_register(),
538 virtual_register);
539 break;
540 case Pending: {
541 PendingAssessment* pending = PendingAssessment::cast(assessment);
542 ValidatePendingAssessment(block_id, op, current_assessments, pending,
543 virtual_register);
544 break;
545 }
546 }
547 }
548
VerifyGapMoves()549 void RegisterAllocatorVerifier::VerifyGapMoves() {
550 CHECK(assessments_.empty());
551 CHECK(outstanding_assessments_.empty());
552 const size_t block_count = sequence()->instruction_blocks().size();
553 for (size_t block_index = 0; block_index < block_count; ++block_index) {
554 const InstructionBlock* block =
555 sequence()->instruction_blocks()[block_index];
556 BlockAssessments* block_assessments = CreateForBlock(block);
557
558 for (int instr_index = block->code_start(); instr_index < block->code_end();
559 ++instr_index) {
560 const InstructionConstraint& instr_constraint = constraints_[instr_index];
561 const Instruction* instr = instr_constraint.instruction_;
562 block_assessments->PerformMoves(instr);
563
564 const OperandConstraint* op_constraints =
565 instr_constraint.operand_constraints_;
566 size_t count = 0;
567 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
568 if (op_constraints[count].type_ == kImmediate) {
569 continue;
570 }
571 int virtual_register = op_constraints[count].virtual_register_;
572 InstructionOperand op = *instr->InputAt(i);
573 ValidateUse(block->rpo_number(), block_assessments, op,
574 virtual_register);
575 }
576 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
577 block_assessments->Drop(*instr->TempAt(i));
578 }
579 if (instr->IsCall()) {
580 block_assessments->DropRegisters();
581 }
582 if (instr->HasReferenceMap()) {
583 block_assessments->CheckReferenceMap(instr->reference_map());
584 }
585 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
586 int virtual_register = op_constraints[count].virtual_register_;
587 block_assessments->AddDefinition(*instr->OutputAt(i), virtual_register);
588 if (op_constraints[count].type_ == kRegisterAndSlot) {
589 const AllocatedOperand* reg_op =
590 AllocatedOperand::cast(instr->OutputAt(i));
591 MachineRepresentation rep = reg_op->representation();
592 const AllocatedOperand* stack_op = AllocatedOperand::New(
593 zone(), LocationOperand::LocationKind::STACK_SLOT, rep,
594 op_constraints[i].spilled_slot_);
595 block_assessments->AddDefinition(*stack_op, virtual_register);
596 }
597 }
598 }
599 // Now commit the assessments for this block. If there are any delayed
600 // assessments, ValidatePendingAssessment should see this block, too.
601 assessments_[block->rpo_number()] = block_assessments;
602
603 auto todo_iter = outstanding_assessments_.find(block->rpo_number());
604 if (todo_iter == outstanding_assessments_.end()) continue;
605 DelayedAssessments* todo = todo_iter->second;
606 for (auto pair : todo->map()) {
607 InstructionOperand op = pair.first;
608 int vreg = pair.second;
609 auto found_op = block_assessments->map().find(op);
610 CHECK(found_op != block_assessments->map().end());
611 // This block is a jump back to the loop header, ensure that the op hasn't
612 // become a stale reference during the blocks in the loop.
613 CHECK(!block_assessments->IsStaleReferenceStackSlot(op));
614 switch (found_op->second->kind()) {
615 case Final:
616 CHECK_EQ(FinalAssessment::cast(found_op->second)->virtual_register(),
617 vreg);
618 break;
619 case Pending:
620 ValidatePendingAssessment(block->rpo_number(), op, block_assessments,
621 PendingAssessment::cast(found_op->second),
622 vreg);
623 break;
624 }
625 }
626 }
627 }
628
629 } // namespace compiler
630 } // namespace internal
631 } // namespace v8
632