1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/code-generator.h"
6
7 #include "src/address-map.h"
8 #include "src/compiler/code-generator-impl.h"
9 #include "src/compiler/linkage.h"
10 #include "src/compiler/pipeline.h"
11 #include "src/frames-inl.h"
12
13 namespace v8 {
14 namespace internal {
15 namespace compiler {
16
17 class CodeGenerator::JumpTable final : public ZoneObject {
18 public:
JumpTable(JumpTable * next,Label ** targets,size_t target_count)19 JumpTable(JumpTable* next, Label** targets, size_t target_count)
20 : next_(next), targets_(targets), target_count_(target_count) {}
21
label()22 Label* label() { return &label_; }
next() const23 JumpTable* next() const { return next_; }
targets() const24 Label** targets() const { return targets_; }
target_count() const25 size_t target_count() const { return target_count_; }
26
27 private:
28 Label label_;
29 JumpTable* const next_;
30 Label** const targets_;
31 size_t const target_count_;
32 };
33
CodeGenerator(Frame * frame,Linkage * linkage,InstructionSequence * code,CompilationInfo * info)34 CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
35 InstructionSequence* code, CompilationInfo* info)
36 : frame_access_state_(nullptr),
37 linkage_(linkage),
38 code_(code),
39 info_(info),
40 labels_(zone()->NewArray<Label>(code->InstructionBlockCount())),
41 current_block_(RpoNumber::Invalid()),
42 current_source_position_(SourcePosition::Unknown()),
43 masm_(info->isolate(), nullptr, 0, CodeObjectRequired::kYes),
44 resolver_(this),
45 safepoints_(code->zone()),
46 handlers_(code->zone()),
47 deoptimization_exits_(code->zone()),
48 deoptimization_states_(code->zone()),
49 deoptimization_literals_(code->zone()),
50 inlined_function_count_(0),
51 translations_(code->zone()),
52 last_lazy_deopt_pc_(0),
53 jump_tables_(nullptr),
54 ools_(nullptr),
55 osr_pc_offset_(-1) {
56 for (int i = 0; i < code->InstructionBlockCount(); ++i) {
57 new (&labels_[i]) Label;
58 }
59 CreateFrameAccessState(frame);
60 }
61
CreateFrameAccessState(Frame * frame)62 void CodeGenerator::CreateFrameAccessState(Frame* frame) {
63 FinishFrame(frame);
64 frame_access_state_ = new (code()->zone()) FrameAccessState(frame);
65 }
66
GenerateCode()67 Handle<Code> CodeGenerator::GenerateCode() {
68 CompilationInfo* info = this->info();
69
70 // Open a frame scope to indicate that there is a frame on the stack. The
71 // MANUAL indicates that the scope shouldn't actually generate code to set up
72 // the frame (that is done in AssemblePrologue).
73 FrameScope frame_scope(masm(), StackFrame::MANUAL);
74
75 // Emit a code line info recording start event.
76 PositionsRecorder* recorder = masm()->positions_recorder();
77 LOG_CODE_EVENT(isolate(), CodeStartLinePosInfoRecordEvent(recorder));
78
79 // Place function entry hook if requested to do so.
80 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
81 ProfileEntryHookStub::MaybeCallEntryHook(masm());
82 }
83 // Architecture-specific, linkage-specific prologue.
84 info->set_prologue_offset(masm()->pc_offset());
85
86 // Define deoptimization literals for all inlined functions.
87 DCHECK_EQ(0u, deoptimization_literals_.size());
88 for (const CompilationInfo::InlinedFunctionHolder& inlined :
89 info->inlined_functions()) {
90 if (!inlined.shared_info.is_identical_to(info->shared_info())) {
91 DefineDeoptimizationLiteral(inlined.shared_info);
92 }
93 }
94 inlined_function_count_ = deoptimization_literals_.size();
95
96 // Define deoptimization literals for all unoptimized code objects of inlined
97 // functions. This ensures unoptimized code is kept alive by optimized code.
98 for (const CompilationInfo::InlinedFunctionHolder& inlined :
99 info->inlined_functions()) {
100 if (!inlined.shared_info.is_identical_to(info->shared_info())) {
101 DefineDeoptimizationLiteral(inlined.inlined_code_object_root);
102 }
103 }
104
105 // Assemble all non-deferred blocks, followed by deferred ones.
106 for (int deferred = 0; deferred < 2; ++deferred) {
107 for (const InstructionBlock* block : code()->instruction_blocks()) {
108 if (block->IsDeferred() == (deferred == 0)) {
109 continue;
110 }
111 // Align loop headers on 16-byte boundaries.
112 if (block->IsLoopHeader()) masm()->Align(16);
113 // Ensure lazy deopt doesn't patch handler entry points.
114 if (block->IsHandler()) EnsureSpaceForLazyDeopt();
115 // Bind a label for a block.
116 current_block_ = block->rpo_number();
117 if (FLAG_code_comments) {
118 // TODO(titzer): these code comments are a giant memory leak.
119 Vector<char> buffer = Vector<char>::New(200);
120 char* buffer_start = buffer.start();
121
122 int next = SNPrintF(
123 buffer, "-- B%d start%s%s%s%s", block->rpo_number().ToInt(),
124 block->IsDeferred() ? " (deferred)" : "",
125 block->needs_frame() ? "" : " (no frame)",
126 block->must_construct_frame() ? " (construct frame)" : "",
127 block->must_deconstruct_frame() ? " (deconstruct frame)" : "");
128
129 buffer = buffer.SubVector(next, buffer.length());
130
131 if (block->IsLoopHeader()) {
132 next =
133 SNPrintF(buffer, " (loop up to %d)", block->loop_end().ToInt());
134 buffer = buffer.SubVector(next, buffer.length());
135 }
136 if (block->loop_header().IsValid()) {
137 next =
138 SNPrintF(buffer, " (in loop %d)", block->loop_header().ToInt());
139 buffer = buffer.SubVector(next, buffer.length());
140 }
141 SNPrintF(buffer, " --");
142 masm()->RecordComment(buffer_start);
143 }
144
145 frame_access_state()->MarkHasFrame(block->needs_frame());
146
147 masm()->bind(GetLabel(current_block_));
148 if (block->must_construct_frame()) {
149 AssembleConstructFrame();
150 // We need to setup the root register after we assemble the prologue, to
151 // avoid clobbering callee saved registers in case of C linkage and
152 // using the roots.
153 // TODO(mtrofin): investigate how we can avoid doing this repeatedly.
154 if (linkage()->GetIncomingDescriptor()->InitializeRootRegister()) {
155 masm()->InitializeRootRegister();
156 }
157 }
158
159 CodeGenResult result;
160 if (FLAG_enable_embedded_constant_pool && !block->needs_frame()) {
161 ConstantPoolUnavailableScope constant_pool_unavailable(masm());
162 result = AssembleBlock(block);
163 } else {
164 result = AssembleBlock(block);
165 }
166 if (result != kSuccess) return Handle<Code>();
167 }
168 }
169
170 // Assemble all out-of-line code.
171 if (ools_) {
172 masm()->RecordComment("-- Out of line code --");
173 for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
174 masm()->bind(ool->entry());
175 ool->Generate();
176 if (ool->exit()->is_bound()) masm()->jmp(ool->exit());
177 }
178 }
179
180 // Assemble all eager deoptimization exits.
181 for (DeoptimizationExit* exit : deoptimization_exits_) {
182 masm()->bind(exit->label());
183 AssembleDeoptimizerCall(exit->deoptimization_id(), Deoptimizer::EAGER);
184 }
185
186 // Ensure there is space for lazy deoptimization in the code.
187 if (info->ShouldEnsureSpaceForLazyDeopt()) {
188 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
189 while (masm()->pc_offset() < target_offset) {
190 masm()->nop();
191 }
192 }
193
194 FinishCode(masm());
195
196 // Emit the jump tables.
197 if (jump_tables_) {
198 masm()->Align(kPointerSize);
199 for (JumpTable* table = jump_tables_; table; table = table->next()) {
200 masm()->bind(table->label());
201 AssembleJumpTable(table->targets(), table->target_count());
202 }
203 }
204
205 safepoints()->Emit(masm(), frame()->GetTotalFrameSlotCount());
206
207 Handle<Code> result =
208 v8::internal::CodeGenerator::MakeCodeEpilogue(masm(), info);
209 result->set_is_turbofanned(true);
210 result->set_stack_slots(frame()->GetTotalFrameSlotCount());
211 result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
212
213 // Emit exception handler table.
214 if (!handlers_.empty()) {
215 Handle<HandlerTable> table =
216 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
217 HandlerTable::LengthForReturn(static_cast<int>(handlers_.size())),
218 TENURED));
219 for (size_t i = 0; i < handlers_.size(); ++i) {
220 int position = handlers_[i].handler->pos();
221 HandlerTable::CatchPrediction prediction = handlers_[i].caught_locally
222 ? HandlerTable::CAUGHT
223 : HandlerTable::UNCAUGHT;
224 table->SetReturnOffset(static_cast<int>(i), handlers_[i].pc_offset);
225 table->SetReturnHandler(static_cast<int>(i), position, prediction);
226 }
227 result->set_handler_table(*table);
228 }
229
230 PopulateDeoptimizationData(result);
231
232 // Ensure there is space for lazy deoptimization in the relocation info.
233 if (info->ShouldEnsureSpaceForLazyDeopt()) {
234 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result);
235 }
236
237 // Emit a code line info recording stop event.
238 void* line_info = recorder->DetachJITHandlerData();
239 LOG_CODE_EVENT(isolate(), CodeEndLinePosInfoRecordEvent(
240 AbstractCode::cast(*result), line_info));
241
242 return result;
243 }
244
245
IsNextInAssemblyOrder(RpoNumber block) const246 bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
247 return code()
248 ->InstructionBlockAt(current_block_)
249 ->ao_number()
250 .IsNext(code()->InstructionBlockAt(block)->ao_number());
251 }
252
253
RecordSafepoint(ReferenceMap * references,Safepoint::Kind kind,int arguments,Safepoint::DeoptMode deopt_mode)254 void CodeGenerator::RecordSafepoint(ReferenceMap* references,
255 Safepoint::Kind kind, int arguments,
256 Safepoint::DeoptMode deopt_mode) {
257 Safepoint safepoint =
258 safepoints()->DefineSafepoint(masm(), kind, arguments, deopt_mode);
259 int stackSlotToSpillSlotDelta =
260 frame()->GetTotalFrameSlotCount() - frame()->GetSpillSlotCount();
261 for (const InstructionOperand& operand : references->reference_operands()) {
262 if (operand.IsStackSlot()) {
263 int index = LocationOperand::cast(operand).index();
264 DCHECK(index >= 0);
265 // We might index values in the fixed part of the frame (i.e. the
266 // closure pointer or the context pointer); these are not spill slots
267 // and therefore don't work with the SafepointTable currently, but
268 // we also don't need to worry about them, since the GC has special
269 // knowledge about those fields anyway.
270 if (index < stackSlotToSpillSlotDelta) continue;
271 safepoint.DefinePointerSlot(index, zone());
272 } else if (operand.IsRegister() && (kind & Safepoint::kWithRegisters)) {
273 Register reg = LocationOperand::cast(operand).GetRegister();
274 safepoint.DefinePointerRegister(reg, zone());
275 }
276 }
277 }
278
IsMaterializableFromFrame(Handle<HeapObject> object,int * slot_return)279 bool CodeGenerator::IsMaterializableFromFrame(Handle<HeapObject> object,
280 int* slot_return) {
281 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
282 if (object.is_identical_to(info()->context()) && !info()->is_osr()) {
283 *slot_return = Frame::kContextSlot;
284 return true;
285 } else if (object.is_identical_to(info()->closure())) {
286 *slot_return = Frame::kJSFunctionSlot;
287 return true;
288 }
289 }
290 return false;
291 }
292
293
IsMaterializableFromRoot(Handle<HeapObject> object,Heap::RootListIndex * index_return)294 bool CodeGenerator::IsMaterializableFromRoot(
295 Handle<HeapObject> object, Heap::RootListIndex* index_return) {
296 const CallDescriptor* incoming_descriptor =
297 linkage()->GetIncomingDescriptor();
298 if (incoming_descriptor->flags() & CallDescriptor::kCanUseRoots) {
299 RootIndexMap map(isolate());
300 int root_index = map.Lookup(*object);
301 if (root_index != RootIndexMap::kInvalidRootIndex) {
302 *index_return = static_cast<Heap::RootListIndex>(root_index);
303 return true;
304 }
305 }
306 return false;
307 }
308
AssembleBlock(const InstructionBlock * block)309 CodeGenerator::CodeGenResult CodeGenerator::AssembleBlock(
310 const InstructionBlock* block) {
311 for (int i = block->code_start(); i < block->code_end(); ++i) {
312 Instruction* instr = code()->InstructionAt(i);
313 CodeGenResult result = AssembleInstruction(instr, block);
314 if (result != kSuccess) return result;
315 }
316 return kSuccess;
317 }
318
AssembleInstruction(Instruction * instr,const InstructionBlock * block)319 CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction(
320 Instruction* instr, const InstructionBlock* block) {
321 AssembleGaps(instr);
322 DCHECK_IMPLIES(
323 block->must_deconstruct_frame(),
324 instr != code()->InstructionAt(block->last_instruction_index()) ||
325 instr->IsRet() || instr->IsJump());
326 if (instr->IsJump() && block->must_deconstruct_frame()) {
327 AssembleDeconstructFrame();
328 }
329 AssembleSourcePosition(instr);
330 // Assemble architecture-specific code for the instruction.
331 CodeGenResult result = AssembleArchInstruction(instr);
332 if (result != kSuccess) return result;
333
334 FlagsMode mode = FlagsModeField::decode(instr->opcode());
335 FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
336 switch (mode) {
337 case kFlags_branch: {
338 // Assemble a branch after this instruction.
339 InstructionOperandConverter i(this, instr);
340 RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
341 RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
342
343 if (true_rpo == false_rpo) {
344 // redundant branch.
345 if (!IsNextInAssemblyOrder(true_rpo)) {
346 AssembleArchJump(true_rpo);
347 }
348 return kSuccess;
349 }
350 if (IsNextInAssemblyOrder(true_rpo)) {
351 // true block is next, can fall through if condition negated.
352 std::swap(true_rpo, false_rpo);
353 condition = NegateFlagsCondition(condition);
354 }
355 BranchInfo branch;
356 branch.condition = condition;
357 branch.true_label = GetLabel(true_rpo);
358 branch.false_label = GetLabel(false_rpo);
359 branch.fallthru = IsNextInAssemblyOrder(false_rpo);
360 // Assemble architecture-specific branch.
361 AssembleArchBranch(instr, &branch);
362 break;
363 }
364 case kFlags_deoptimize: {
365 // Assemble a conditional eager deoptimization after this instruction.
366 InstructionOperandConverter i(this, instr);
367 size_t frame_state_offset = MiscField::decode(instr->opcode());
368 DeoptimizationExit* const exit =
369 AddDeoptimizationExit(instr, frame_state_offset);
370 Label continue_label;
371 BranchInfo branch;
372 branch.condition = condition;
373 branch.true_label = exit->label();
374 branch.false_label = &continue_label;
375 branch.fallthru = true;
376 // Assemble architecture-specific branch.
377 AssembleArchBranch(instr, &branch);
378 masm()->bind(&continue_label);
379 break;
380 }
381 case kFlags_set: {
382 // Assemble a boolean materialization after this instruction.
383 AssembleArchBoolean(instr, condition);
384 break;
385 }
386 case kFlags_none: {
387 break;
388 }
389 }
390 return kSuccess;
391 }
392
393
AssembleSourcePosition(Instruction * instr)394 void CodeGenerator::AssembleSourcePosition(Instruction* instr) {
395 SourcePosition source_position;
396 if (!code()->GetSourcePosition(instr, &source_position)) return;
397 if (source_position == current_source_position_) return;
398 current_source_position_ = source_position;
399 if (source_position.IsUnknown()) return;
400 int code_pos = source_position.raw();
401 masm()->positions_recorder()->RecordPosition(code_pos);
402 if (FLAG_code_comments) {
403 CompilationInfo* info = this->info();
404 if (!info->parse_info()) return;
405 Vector<char> buffer = Vector<char>::New(256);
406 int ln = Script::GetLineNumber(info->script(), code_pos);
407 int cn = Script::GetColumnNumber(info->script(), code_pos);
408 if (info->script()->name()->IsString()) {
409 Handle<String> file(String::cast(info->script()->name()));
410 base::OS::SNPrintF(buffer.start(), buffer.length(), "-- %s:%d:%d --",
411 file->ToCString().get(), ln, cn);
412 } else {
413 base::OS::SNPrintF(buffer.start(), buffer.length(),
414 "-- <unknown>:%d:%d --", ln, cn);
415 }
416 masm()->RecordComment(buffer.start());
417 }
418 }
419
420
AssembleGaps(Instruction * instr)421 void CodeGenerator::AssembleGaps(Instruction* instr) {
422 for (int i = Instruction::FIRST_GAP_POSITION;
423 i <= Instruction::LAST_GAP_POSITION; i++) {
424 Instruction::GapPosition inner_pos =
425 static_cast<Instruction::GapPosition>(i);
426 ParallelMove* move = instr->GetParallelMove(inner_pos);
427 if (move != nullptr) resolver()->Resolve(move);
428 }
429 }
430
431
PopulateDeoptimizationData(Handle<Code> code_object)432 void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
433 CompilationInfo* info = this->info();
434 int deopt_count = static_cast<int>(deoptimization_states_.size());
435 if (deopt_count == 0 && !info->is_osr()) return;
436 Handle<DeoptimizationInputData> data =
437 DeoptimizationInputData::New(isolate(), deopt_count, TENURED);
438
439 Handle<ByteArray> translation_array =
440 translations_.CreateByteArray(isolate()->factory());
441
442 data->SetTranslationByteArray(*translation_array);
443 data->SetInlinedFunctionCount(
444 Smi::FromInt(static_cast<int>(inlined_function_count_)));
445 data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
446
447 if (info->has_shared_info()) {
448 data->SetSharedFunctionInfo(*info->shared_info());
449 } else {
450 data->SetSharedFunctionInfo(Smi::FromInt(0));
451 }
452
453 Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
454 static_cast<int>(deoptimization_literals_.size()), TENURED);
455 {
456 AllowDeferredHandleDereference copy_handles;
457 for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
458 literals->set(i, *deoptimization_literals_[i]);
459 }
460 data->SetLiteralArray(*literals);
461 }
462
463 if (info->is_osr()) {
464 DCHECK(osr_pc_offset_ >= 0);
465 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
466 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
467 } else {
468 BailoutId osr_ast_id = BailoutId::None();
469 data->SetOsrAstId(Smi::FromInt(osr_ast_id.ToInt()));
470 data->SetOsrPcOffset(Smi::FromInt(-1));
471 }
472
473 // Populate deoptimization entries.
474 for (int i = 0; i < deopt_count; i++) {
475 DeoptimizationState* deoptimization_state = deoptimization_states_[i];
476 data->SetAstId(i, deoptimization_state->bailout_id());
477 CHECK(deoptimization_states_[i]);
478 data->SetTranslationIndex(
479 i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
480 data->SetArgumentsStackHeight(i, Smi::FromInt(0));
481 data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
482 }
483
484 code_object->set_deoptimization_data(*data);
485 }
486
487
AddJumpTable(Label ** targets,size_t target_count)488 Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
489 jump_tables_ = new (zone()) JumpTable(jump_tables_, targets, target_count);
490 return jump_tables_->label();
491 }
492
493
RecordCallPosition(Instruction * instr)494 void CodeGenerator::RecordCallPosition(Instruction* instr) {
495 CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
496
497 bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
498
499 RecordSafepoint(
500 instr->reference_map(), Safepoint::kSimple, 0,
501 needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
502
503 if (flags & CallDescriptor::kHasExceptionHandler) {
504 InstructionOperandConverter i(this, instr);
505 bool caught = flags & CallDescriptor::kHasLocalCatchHandler;
506 RpoNumber handler_rpo = i.InputRpo(instr->InputCount() - 1);
507 handlers_.push_back({caught, GetLabel(handler_rpo), masm()->pc_offset()});
508 }
509
510 if (needs_frame_state) {
511 MarkLazyDeoptSite();
512 // If the frame state is present, it starts at argument 1 (just after the
513 // code address).
514 size_t frame_state_offset = 1;
515 FrameStateDescriptor* descriptor =
516 GetFrameStateDescriptor(instr, frame_state_offset);
517 int pc_offset = masm()->pc_offset();
518 int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
519 descriptor->state_combine());
520 // If the pre-call frame state differs from the post-call one, produce the
521 // pre-call frame state, too.
522 // TODO(jarin) We might want to avoid building the pre-call frame state
523 // because it is only used to get locals and arguments (by the debugger and
524 // f.arguments), and those are the same in the pre-call and post-call
525 // states.
526 if (!descriptor->state_combine().IsOutputIgnored()) {
527 deopt_state_id = BuildTranslation(instr, -1, frame_state_offset,
528 OutputFrameStateCombine::Ignore());
529 }
530 #if DEBUG
531 // Make sure all the values live in stack slots or they are immediates.
532 // (The values should not live in register because registers are clobbered
533 // by calls.)
534 for (size_t i = 0; i < descriptor->GetSize(); i++) {
535 InstructionOperand* op = instr->InputAt(frame_state_offset + 1 + i);
536 CHECK(op->IsStackSlot() || op->IsFPStackSlot() || op->IsImmediate());
537 }
538 #endif
539 safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
540 }
541 }
542
543
DefineDeoptimizationLiteral(Handle<Object> literal)544 int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
545 int result = static_cast<int>(deoptimization_literals_.size());
546 for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
547 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
548 }
549 deoptimization_literals_.push_back(literal);
550 return result;
551 }
552
553
GetFrameStateDescriptor(Instruction * instr,size_t frame_state_offset)554 FrameStateDescriptor* CodeGenerator::GetFrameStateDescriptor(
555 Instruction* instr, size_t frame_state_offset) {
556 InstructionOperandConverter i(this, instr);
557 InstructionSequence::StateId state_id =
558 InstructionSequence::StateId::FromInt(i.InputInt32(frame_state_offset));
559 return code()->GetFrameStateDescriptor(state_id);
560 }
561
562
TranslateStateValueDescriptor(StateValueDescriptor * desc,Translation * translation,InstructionOperandIterator * iter)563 void CodeGenerator::TranslateStateValueDescriptor(
564 StateValueDescriptor* desc, Translation* translation,
565 InstructionOperandIterator* iter) {
566 if (desc->IsNested()) {
567 translation->BeginCapturedObject(static_cast<int>(desc->size()));
568 for (size_t index = 0; index < desc->fields().size(); index++) {
569 TranslateStateValueDescriptor(&desc->fields()[index], translation, iter);
570 }
571 } else if (desc->IsDuplicate()) {
572 translation->DuplicateObject(static_cast<int>(desc->id()));
573 } else {
574 DCHECK(desc->IsPlain());
575 AddTranslationForOperand(translation, iter->instruction(), iter->Advance(),
576 desc->type());
577 }
578 }
579
580
TranslateFrameStateDescriptorOperands(FrameStateDescriptor * desc,InstructionOperandIterator * iter,OutputFrameStateCombine combine,Translation * translation)581 void CodeGenerator::TranslateFrameStateDescriptorOperands(
582 FrameStateDescriptor* desc, InstructionOperandIterator* iter,
583 OutputFrameStateCombine combine, Translation* translation) {
584 for (size_t index = 0; index < desc->GetSize(combine); index++) {
585 switch (combine.kind()) {
586 case OutputFrameStateCombine::kPushOutput: {
587 DCHECK(combine.GetPushCount() <= iter->instruction()->OutputCount());
588 size_t size_without_output =
589 desc->GetSize(OutputFrameStateCombine::Ignore());
590 // If the index is past the existing stack items in values_.
591 if (index >= size_without_output) {
592 // Materialize the result of the call instruction in this slot.
593 AddTranslationForOperand(
594 translation, iter->instruction(),
595 iter->instruction()->OutputAt(index - size_without_output),
596 MachineType::AnyTagged());
597 continue;
598 }
599 break;
600 }
601 case OutputFrameStateCombine::kPokeAt:
602 // The result of the call should be placed at position
603 // [index_from_top] in the stack (overwriting whatever was
604 // previously there).
605 size_t index_from_top =
606 desc->GetSize(combine) - 1 - combine.GetOffsetToPokeAt();
607 if (index >= index_from_top &&
608 index < index_from_top + iter->instruction()->OutputCount()) {
609 AddTranslationForOperand(
610 translation, iter->instruction(),
611 iter->instruction()->OutputAt(index - index_from_top),
612 MachineType::AnyTagged());
613 iter->Advance(); // We do not use this input, but we need to
614 // advace, as the input got replaced.
615 continue;
616 }
617 break;
618 }
619 StateValueDescriptor* value_desc = desc->GetStateValueDescriptor();
620 TranslateStateValueDescriptor(&value_desc->fields()[index], translation,
621 iter);
622 }
623 }
624
625
BuildTranslationForFrameStateDescriptor(FrameStateDescriptor * descriptor,InstructionOperandIterator * iter,Translation * translation,OutputFrameStateCombine state_combine)626 void CodeGenerator::BuildTranslationForFrameStateDescriptor(
627 FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
628 Translation* translation, OutputFrameStateCombine state_combine) {
629 // Outer-most state must be added to translation first.
630 if (descriptor->outer_state() != nullptr) {
631 BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), iter,
632 translation,
633 OutputFrameStateCombine::Ignore());
634 }
635
636 Handle<SharedFunctionInfo> shared_info;
637 if (!descriptor->shared_info().ToHandle(&shared_info)) {
638 if (!info()->has_shared_info()) {
639 return; // Stub with no SharedFunctionInfo.
640 }
641 shared_info = info()->shared_info();
642 }
643 int shared_info_id = DefineDeoptimizationLiteral(shared_info);
644
645 switch (descriptor->type()) {
646 case FrameStateType::kJavaScriptFunction:
647 translation->BeginJSFrame(
648 descriptor->bailout_id(), shared_info_id,
649 static_cast<unsigned int>(descriptor->GetSize(state_combine) -
650 (1 + descriptor->parameters_count())));
651 break;
652 case FrameStateType::kInterpretedFunction:
653 translation->BeginInterpretedFrame(
654 descriptor->bailout_id(), shared_info_id,
655 static_cast<unsigned int>(descriptor->locals_count() + 1));
656 break;
657 case FrameStateType::kArgumentsAdaptor:
658 translation->BeginArgumentsAdaptorFrame(
659 shared_info_id,
660 static_cast<unsigned int>(descriptor->parameters_count()));
661 break;
662 case FrameStateType::kTailCallerFunction:
663 translation->BeginTailCallerFrame(shared_info_id);
664 break;
665 case FrameStateType::kConstructStub:
666 translation->BeginConstructStubFrame(
667 shared_info_id,
668 static_cast<unsigned int>(descriptor->parameters_count()));
669 break;
670 }
671
672 TranslateFrameStateDescriptorOperands(descriptor, iter, state_combine,
673 translation);
674 }
675
676
BuildTranslation(Instruction * instr,int pc_offset,size_t frame_state_offset,OutputFrameStateCombine state_combine)677 int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
678 size_t frame_state_offset,
679 OutputFrameStateCombine state_combine) {
680 FrameStateDescriptor* descriptor =
681 GetFrameStateDescriptor(instr, frame_state_offset);
682 frame_state_offset++;
683
684 Translation translation(
685 &translations_, static_cast<int>(descriptor->GetFrameCount()),
686 static_cast<int>(descriptor->GetJSFrameCount()), zone());
687 InstructionOperandIterator iter(instr, frame_state_offset);
688 BuildTranslationForFrameStateDescriptor(descriptor, &iter, &translation,
689 state_combine);
690
691 int deoptimization_id = static_cast<int>(deoptimization_states_.size());
692
693 deoptimization_states_.push_back(new (zone()) DeoptimizationState(
694 descriptor->bailout_id(), translation.index(), pc_offset));
695
696 return deoptimization_id;
697 }
698
699
AddTranslationForOperand(Translation * translation,Instruction * instr,InstructionOperand * op,MachineType type)700 void CodeGenerator::AddTranslationForOperand(Translation* translation,
701 Instruction* instr,
702 InstructionOperand* op,
703 MachineType type) {
704 if (op->IsStackSlot()) {
705 if (type.representation() == MachineRepresentation::kBit) {
706 translation->StoreBoolStackSlot(LocationOperand::cast(op)->index());
707 } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
708 type == MachineType::Int32()) {
709 translation->StoreInt32StackSlot(LocationOperand::cast(op)->index());
710 } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
711 type == MachineType::Uint32()) {
712 translation->StoreUint32StackSlot(LocationOperand::cast(op)->index());
713 } else if (type.representation() == MachineRepresentation::kTagged) {
714 translation->StoreStackSlot(LocationOperand::cast(op)->index());
715 } else {
716 CHECK(false);
717 }
718 } else if (op->IsFPStackSlot()) {
719 if (type.representation() == MachineRepresentation::kFloat64) {
720 translation->StoreDoubleStackSlot(LocationOperand::cast(op)->index());
721 } else {
722 DCHECK_EQ(MachineRepresentation::kFloat32, type.representation());
723 translation->StoreFloatStackSlot(LocationOperand::cast(op)->index());
724 }
725 } else if (op->IsRegister()) {
726 InstructionOperandConverter converter(this, instr);
727 if (type.representation() == MachineRepresentation::kBit) {
728 translation->StoreBoolRegister(converter.ToRegister(op));
729 } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
730 type == MachineType::Int32()) {
731 translation->StoreInt32Register(converter.ToRegister(op));
732 } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
733 type == MachineType::Uint32()) {
734 translation->StoreUint32Register(converter.ToRegister(op));
735 } else if (type.representation() == MachineRepresentation::kTagged) {
736 translation->StoreRegister(converter.ToRegister(op));
737 } else {
738 CHECK(false);
739 }
740 } else if (op->IsFPRegister()) {
741 InstructionOperandConverter converter(this, instr);
742 if (type.representation() == MachineRepresentation::kFloat64) {
743 translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
744 } else {
745 DCHECK_EQ(MachineRepresentation::kFloat32, type.representation());
746 translation->StoreFloatRegister(converter.ToFloatRegister(op));
747 }
748 } else if (op->IsImmediate()) {
749 InstructionOperandConverter converter(this, instr);
750 Constant constant = converter.ToConstant(op);
751 Handle<Object> constant_object;
752 switch (constant.type()) {
753 case Constant::kInt32:
754 DCHECK(type == MachineType::Int32() || type == MachineType::Uint32() ||
755 type.representation() == MachineRepresentation::kBit);
756 constant_object =
757 isolate()->factory()->NewNumberFromInt(constant.ToInt32());
758 break;
759 case Constant::kFloat32:
760 DCHECK(type.representation() == MachineRepresentation::kFloat32 ||
761 type.representation() == MachineRepresentation::kTagged);
762 constant_object = isolate()->factory()->NewNumber(constant.ToFloat32());
763 break;
764 case Constant::kFloat64:
765 DCHECK(type.representation() == MachineRepresentation::kFloat64 ||
766 type.representation() == MachineRepresentation::kTagged);
767 constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
768 break;
769 case Constant::kHeapObject:
770 DCHECK(type.representation() == MachineRepresentation::kTagged);
771 constant_object = constant.ToHeapObject();
772 break;
773 default:
774 CHECK(false);
775 }
776 if (constant_object.is_identical_to(info()->closure())) {
777 translation->StoreJSFrameFunction();
778 } else {
779 int literal_id = DefineDeoptimizationLiteral(constant_object);
780 translation->StoreLiteral(literal_id);
781 }
782 } else {
783 CHECK(false);
784 }
785 }
786
787
MarkLazyDeoptSite()788 void CodeGenerator::MarkLazyDeoptSite() {
789 last_lazy_deopt_pc_ = masm()->pc_offset();
790 }
791
AddDeoptimizationExit(Instruction * instr,size_t frame_state_offset)792 DeoptimizationExit* CodeGenerator::AddDeoptimizationExit(
793 Instruction* instr, size_t frame_state_offset) {
794 int const deoptimization_id = BuildTranslation(
795 instr, -1, frame_state_offset, OutputFrameStateCombine::Ignore());
796 DeoptimizationExit* const exit =
797 new (zone()) DeoptimizationExit(deoptimization_id);
798 deoptimization_exits_.push_back(exit);
799 return exit;
800 }
801
TailCallFrameStackSlotDelta(int stack_param_delta)802 int CodeGenerator::TailCallFrameStackSlotDelta(int stack_param_delta) {
803 // Leave the PC on the stack on platforms that have that as part of their ABI
804 int pc_slots = V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
805 int sp_slot_delta = frame_access_state()->has_frame()
806 ? (frame()->GetTotalFrameSlotCount() - pc_slots)
807 : 0;
808 // Discard only slots that won't be used by new parameters.
809 sp_slot_delta += stack_param_delta;
810 return sp_slot_delta;
811 }
812
813
OutOfLineCode(CodeGenerator * gen)814 OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
815 : frame_(gen->frame()), masm_(gen->masm()), next_(gen->ools_) {
816 gen->ools_ = this;
817 }
818
819
~OutOfLineCode()820 OutOfLineCode::~OutOfLineCode() {}
821
822 } // namespace compiler
823 } // namespace internal
824 } // namespace v8
825