• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/code-generator.h"
6 
7 #include "src/address-map.h"
8 #include "src/base/adapters.h"
9 #include "src/compilation-info.h"
10 #include "src/compiler/code-generator-impl.h"
11 #include "src/compiler/linkage.h"
12 #include "src/compiler/pipeline.h"
13 #include "src/frames-inl.h"
14 
15 namespace v8 {
16 namespace internal {
17 namespace compiler {
18 
19 class CodeGenerator::JumpTable final : public ZoneObject {
20  public:
JumpTable(JumpTable * next,Label ** targets,size_t target_count)21   JumpTable(JumpTable* next, Label** targets, size_t target_count)
22       : next_(next), targets_(targets), target_count_(target_count) {}
23 
label()24   Label* label() { return &label_; }
next() const25   JumpTable* next() const { return next_; }
targets() const26   Label** targets() const { return targets_; }
target_count() const27   size_t target_count() const { return target_count_; }
28 
29  private:
30   Label label_;
31   JumpTable* const next_;
32   Label** const targets_;
33   size_t const target_count_;
34 };
35 
CodeGenerator(Frame * frame,Linkage * linkage,InstructionSequence * code,CompilationInfo * info)36 CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
37                              InstructionSequence* code, CompilationInfo* info)
38     : frame_access_state_(nullptr),
39       linkage_(linkage),
40       code_(code),
41       unwinding_info_writer_(zone()),
42       info_(info),
43       labels_(zone()->NewArray<Label>(code->InstructionBlockCount())),
44       current_block_(RpoNumber::Invalid()),
45       current_source_position_(SourcePosition::Unknown()),
46       masm_(info->isolate(), nullptr, 0, CodeObjectRequired::kNo),
47       resolver_(this),
48       safepoints_(code->zone()),
49       handlers_(code->zone()),
50       deoptimization_exits_(code->zone()),
51       deoptimization_states_(code->zone()),
52       deoptimization_literals_(code->zone()),
53       inlined_function_count_(0),
54       translations_(code->zone()),
55       last_lazy_deopt_pc_(0),
56       jump_tables_(nullptr),
57       ools_(nullptr),
58       osr_pc_offset_(-1),
59       optimized_out_literal_id_(-1),
60       source_position_table_builder_(code->zone(),
61                                      info->SourcePositionRecordingMode()) {
62   for (int i = 0; i < code->InstructionBlockCount(); ++i) {
63     new (&labels_[i]) Label;
64   }
65   CreateFrameAccessState(frame);
66 }
67 
isolate() const68 Isolate* CodeGenerator::isolate() const { return info_->isolate(); }
69 
CreateFrameAccessState(Frame * frame)70 void CodeGenerator::CreateFrameAccessState(Frame* frame) {
71   FinishFrame(frame);
72   frame_access_state_ = new (code()->zone()) FrameAccessState(frame);
73 }
74 
75 
GenerateCode()76 Handle<Code> CodeGenerator::GenerateCode() {
77   CompilationInfo* info = this->info();
78 
79   // Open a frame scope to indicate that there is a frame on the stack.  The
80   // MANUAL indicates that the scope shouldn't actually generate code to set up
81   // the frame (that is done in AssemblePrologue).
82   FrameScope frame_scope(masm(), StackFrame::MANUAL);
83 
84   if (info->is_source_positions_enabled()) {
85     SourcePosition source_position(info->shared_info()->start_position());
86     AssembleSourcePosition(source_position);
87   }
88 
89   // Place function entry hook if requested to do so.
90   if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
91     ProfileEntryHookStub::MaybeCallEntryHook(masm());
92   }
93   // Architecture-specific, linkage-specific prologue.
94   info->set_prologue_offset(masm()->pc_offset());
95 
96   // Define deoptimization literals for all inlined functions.
97   DCHECK_EQ(0u, deoptimization_literals_.size());
98   for (CompilationInfo::InlinedFunctionHolder& inlined :
99        info->inlined_functions()) {
100     if (!inlined.shared_info.is_identical_to(info->shared_info())) {
101       int index = DefineDeoptimizationLiteral(inlined.shared_info);
102       inlined.RegisterInlinedFunctionId(index);
103     }
104   }
105   inlined_function_count_ = deoptimization_literals_.size();
106 
107   // Define deoptimization literals for all unoptimized code objects of inlined
108   // functions. This ensures unoptimized code is kept alive by optimized code.
109   for (const CompilationInfo::InlinedFunctionHolder& inlined :
110        info->inlined_functions()) {
111     if (!inlined.shared_info.is_identical_to(info->shared_info())) {
112       DefineDeoptimizationLiteral(inlined.inlined_code_object_root);
113     }
114   }
115 
116   unwinding_info_writer_.SetNumberOfInstructionBlocks(
117       code()->InstructionBlockCount());
118 
119   // Assemble all non-deferred blocks, followed by deferred ones.
120   for (int deferred = 0; deferred < 2; ++deferred) {
121     for (const InstructionBlock* block : code()->instruction_blocks()) {
122       if (block->IsDeferred() == (deferred == 0)) {
123         continue;
124       }
125       // Align loop headers on 16-byte boundaries.
126       if (block->IsLoopHeader()) masm()->Align(16);
127       // Ensure lazy deopt doesn't patch handler entry points.
128       if (block->IsHandler()) EnsureSpaceForLazyDeopt();
129       // Bind a label for a block.
130       current_block_ = block->rpo_number();
131       unwinding_info_writer_.BeginInstructionBlock(masm()->pc_offset(), block);
132       if (FLAG_code_comments) {
133         // TODO(titzer): these code comments are a giant memory leak.
134         Vector<char> buffer = Vector<char>::New(200);
135         char* buffer_start = buffer.start();
136 
137         int next = SNPrintF(
138             buffer, "-- B%d start%s%s%s%s", block->rpo_number().ToInt(),
139             block->IsDeferred() ? " (deferred)" : "",
140             block->needs_frame() ? "" : " (no frame)",
141             block->must_construct_frame() ? " (construct frame)" : "",
142             block->must_deconstruct_frame() ? " (deconstruct frame)" : "");
143 
144         buffer = buffer.SubVector(next, buffer.length());
145 
146         if (block->IsLoopHeader()) {
147           next =
148               SNPrintF(buffer, " (loop up to %d)", block->loop_end().ToInt());
149           buffer = buffer.SubVector(next, buffer.length());
150         }
151         if (block->loop_header().IsValid()) {
152           next =
153               SNPrintF(buffer, " (in loop %d)", block->loop_header().ToInt());
154           buffer = buffer.SubVector(next, buffer.length());
155         }
156         SNPrintF(buffer, " --");
157         masm()->RecordComment(buffer_start);
158       }
159 
160       frame_access_state()->MarkHasFrame(block->needs_frame());
161 
162       masm()->bind(GetLabel(current_block_));
163       if (block->must_construct_frame()) {
164         AssembleConstructFrame();
165         // We need to setup the root register after we assemble the prologue, to
166         // avoid clobbering callee saved registers in case of C linkage and
167         // using the roots.
168         // TODO(mtrofin): investigate how we can avoid doing this repeatedly.
169         if (linkage()->GetIncomingDescriptor()->InitializeRootRegister()) {
170           masm()->InitializeRootRegister();
171         }
172       }
173 
174       CodeGenResult result;
175       if (FLAG_enable_embedded_constant_pool && !block->needs_frame()) {
176         ConstantPoolUnavailableScope constant_pool_unavailable(masm());
177         result = AssembleBlock(block);
178       } else {
179         result = AssembleBlock(block);
180       }
181       if (result != kSuccess) return Handle<Code>();
182       unwinding_info_writer_.EndInstructionBlock(block);
183     }
184   }
185 
186   // Assemble all out-of-line code.
187   if (ools_) {
188     masm()->RecordComment("-- Out of line code --");
189     for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
190       masm()->bind(ool->entry());
191       ool->Generate();
192       if (ool->exit()->is_bound()) masm()->jmp(ool->exit());
193     }
194   }
195 
196   // Assemble all eager deoptimization exits.
197   for (DeoptimizationExit* exit : deoptimization_exits_) {
198     masm()->bind(exit->label());
199     AssembleDeoptimizerCall(exit->deoptimization_id(), exit->pos());
200   }
201 
202   // Ensure there is space for lazy deoptimization in the code.
203   if (info->ShouldEnsureSpaceForLazyDeopt()) {
204     int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
205     while (masm()->pc_offset() < target_offset) {
206       masm()->nop();
207     }
208   }
209 
210   FinishCode(masm());
211 
212   // Emit the jump tables.
213   if (jump_tables_) {
214     masm()->Align(kPointerSize);
215     for (JumpTable* table = jump_tables_; table; table = table->next()) {
216       masm()->bind(table->label());
217       AssembleJumpTable(table->targets(), table->target_count());
218     }
219   }
220 
221   safepoints()->Emit(masm(), frame()->GetTotalFrameSlotCount());
222 
223   unwinding_info_writer_.Finish(masm()->pc_offset());
224 
225   Handle<Code> result = v8::internal::CodeGenerator::MakeCodeEpilogue(
226       masm(), unwinding_info_writer_.eh_frame_writer(), info, Handle<Object>());
227   result->set_is_turbofanned(true);
228   result->set_stack_slots(frame()->GetTotalFrameSlotCount());
229   result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
230   Handle<ByteArray> source_positions =
231       source_position_table_builder_.ToSourcePositionTable(
232           isolate(), Handle<AbstractCode>::cast(result));
233   result->set_source_position_table(*source_positions);
234 
235   // Emit exception handler table.
236   if (!handlers_.empty()) {
237     Handle<HandlerTable> table =
238         Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
239             HandlerTable::LengthForReturn(static_cast<int>(handlers_.size())),
240             TENURED));
241     for (size_t i = 0; i < handlers_.size(); ++i) {
242       table->SetReturnOffset(static_cast<int>(i), handlers_[i].pc_offset);
243       table->SetReturnHandler(static_cast<int>(i), handlers_[i].handler->pos());
244     }
245     result->set_handler_table(*table);
246   }
247 
248   PopulateDeoptimizationData(result);
249 
250   // Ensure there is space for lazy deoptimization in the relocation info.
251   if (info->ShouldEnsureSpaceForLazyDeopt()) {
252     Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result);
253   }
254 
255   return result;
256 }
257 
258 
IsNextInAssemblyOrder(RpoNumber block) const259 bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
260   return code()
261       ->InstructionBlockAt(current_block_)
262       ->ao_number()
263       .IsNext(code()->InstructionBlockAt(block)->ao_number());
264 }
265 
266 
RecordSafepoint(ReferenceMap * references,Safepoint::Kind kind,int arguments,Safepoint::DeoptMode deopt_mode)267 void CodeGenerator::RecordSafepoint(ReferenceMap* references,
268                                     Safepoint::Kind kind, int arguments,
269                                     Safepoint::DeoptMode deopt_mode) {
270   Safepoint safepoint =
271       safepoints()->DefineSafepoint(masm(), kind, arguments, deopt_mode);
272   int stackSlotToSpillSlotDelta =
273       frame()->GetTotalFrameSlotCount() - frame()->GetSpillSlotCount();
274   for (const InstructionOperand& operand : references->reference_operands()) {
275     if (operand.IsStackSlot()) {
276       int index = LocationOperand::cast(operand).index();
277       DCHECK(index >= 0);
278       // We might index values in the fixed part of the frame (i.e. the
279       // closure pointer or the context pointer); these are not spill slots
280       // and therefore don't work with the SafepointTable currently, but
281       // we also don't need to worry about them, since the GC has special
282       // knowledge about those fields anyway.
283       if (index < stackSlotToSpillSlotDelta) continue;
284       safepoint.DefinePointerSlot(index, zone());
285     } else if (operand.IsRegister() && (kind & Safepoint::kWithRegisters)) {
286       Register reg = LocationOperand::cast(operand).GetRegister();
287       safepoint.DefinePointerRegister(reg, zone());
288     }
289   }
290 }
291 
IsMaterializableFromRoot(Handle<HeapObject> object,Heap::RootListIndex * index_return)292 bool CodeGenerator::IsMaterializableFromRoot(
293     Handle<HeapObject> object, Heap::RootListIndex* index_return) {
294   const CallDescriptor* incoming_descriptor =
295       linkage()->GetIncomingDescriptor();
296   if (incoming_descriptor->flags() & CallDescriptor::kCanUseRoots) {
297     RootIndexMap map(isolate());
298     int root_index = map.Lookup(*object);
299     if (root_index != RootIndexMap::kInvalidRootIndex) {
300       *index_return = static_cast<Heap::RootListIndex>(root_index);
301       return true;
302     }
303   }
304   return false;
305 }
306 
AssembleBlock(const InstructionBlock * block)307 CodeGenerator::CodeGenResult CodeGenerator::AssembleBlock(
308     const InstructionBlock* block) {
309   for (int i = block->code_start(); i < block->code_end(); ++i) {
310     Instruction* instr = code()->InstructionAt(i);
311     CodeGenResult result = AssembleInstruction(instr, block);
312     if (result != kSuccess) return result;
313   }
314   return kSuccess;
315 }
316 
IsValidPush(InstructionOperand source,CodeGenerator::PushTypeFlags push_type)317 bool CodeGenerator::IsValidPush(InstructionOperand source,
318                                 CodeGenerator::PushTypeFlags push_type) {
319   if (source.IsImmediate() &&
320       ((push_type & CodeGenerator::kImmediatePush) != 0)) {
321     return true;
322   }
323   if ((source.IsRegister() || source.IsStackSlot()) &&
324       ((push_type & CodeGenerator::kScalarPush) != 0)) {
325     return true;
326   }
327   if ((source.IsFloatRegister() || source.IsFloatStackSlot()) &&
328       ((push_type & CodeGenerator::kFloat32Push) != 0)) {
329     return true;
330   }
331   if ((source.IsDoubleRegister() || source.IsFloatStackSlot()) &&
332       ((push_type & CodeGenerator::kFloat64Push) != 0)) {
333     return true;
334   }
335   return false;
336 }
337 
GetPushCompatibleMoves(Instruction * instr,PushTypeFlags push_type,ZoneVector<MoveOperands * > * pushes)338 void CodeGenerator::GetPushCompatibleMoves(Instruction* instr,
339                                            PushTypeFlags push_type,
340                                            ZoneVector<MoveOperands*>* pushes) {
341   pushes->clear();
342   for (int i = Instruction::FIRST_GAP_POSITION;
343        i <= Instruction::LAST_GAP_POSITION; ++i) {
344     Instruction::GapPosition inner_pos =
345         static_cast<Instruction::GapPosition>(i);
346     ParallelMove* parallel_move = instr->GetParallelMove(inner_pos);
347     if (parallel_move != nullptr) {
348       for (auto move : *parallel_move) {
349         InstructionOperand source = move->source();
350         InstructionOperand destination = move->destination();
351         int first_push_compatible_index =
352             V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
353         // If there are any moves from slots that will be overridden by pushes,
354         // then the full gap resolver must be used since optimization with
355         // pushes don't participate in the parallel move and might clobber
356         // values needed for the gap resolve.
357         if (source.IsStackSlot() &&
358             LocationOperand::cast(source).index() >=
359                 first_push_compatible_index) {
360           pushes->clear();
361           return;
362         }
363         // TODO(danno): Right now, only consider moves from the FIRST gap for
364         // pushes. Theoretically, we could extract pushes for both gaps (there
365         // are cases where this happens), but the logic for that would also have
366         // to check to make sure that non-memory inputs to the pushes from the
367         // LAST gap don't get clobbered in the FIRST gap.
368         if (i == Instruction::FIRST_GAP_POSITION) {
369           if (destination.IsStackSlot() &&
370               LocationOperand::cast(destination).index() >=
371                   first_push_compatible_index) {
372             int index = LocationOperand::cast(destination).index();
373             if (IsValidPush(source, push_type)) {
374               if (index >= static_cast<int>(pushes->size())) {
375                 pushes->resize(index + 1);
376               }
377               (*pushes)[index] = move;
378             }
379           }
380         }
381       }
382     }
383   }
384 
385   // For now, only support a set of continuous pushes at the end of the list.
386   size_t push_count_upper_bound = pushes->size();
387   size_t push_begin = push_count_upper_bound;
388   for (auto move : base::Reversed(*pushes)) {
389     if (move == nullptr) break;
390     push_begin--;
391   }
392   size_t push_count = pushes->size() - push_begin;
393   std::copy(pushes->begin() + push_begin,
394             pushes->begin() + push_begin + push_count, pushes->begin());
395   pushes->resize(push_count);
396 }
397 
AssembleInstruction(Instruction * instr,const InstructionBlock * block)398 CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction(
399     Instruction* instr, const InstructionBlock* block) {
400   int first_unused_stack_slot;
401   FlagsMode mode = FlagsModeField::decode(instr->opcode());
402   if (mode != kFlags_trap) {
403     AssembleSourcePosition(instr);
404   }
405   bool adjust_stack =
406       GetSlotAboveSPBeforeTailCall(instr, &first_unused_stack_slot);
407   if (adjust_stack) AssembleTailCallBeforeGap(instr, first_unused_stack_slot);
408   AssembleGaps(instr);
409   if (adjust_stack) AssembleTailCallAfterGap(instr, first_unused_stack_slot);
410   DCHECK_IMPLIES(
411       block->must_deconstruct_frame(),
412       instr != code()->InstructionAt(block->last_instruction_index()) ||
413           instr->IsRet() || instr->IsJump());
414   if (instr->IsJump() && block->must_deconstruct_frame()) {
415     AssembleDeconstructFrame();
416   }
417   // Assemble architecture-specific code for the instruction.
418   CodeGenResult result = AssembleArchInstruction(instr);
419   if (result != kSuccess) return result;
420 
421   FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
422   switch (mode) {
423     case kFlags_branch: {
424       // Assemble a branch after this instruction.
425       InstructionOperandConverter i(this, instr);
426       RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
427       RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
428 
429       if (true_rpo == false_rpo) {
430         // redundant branch.
431         if (!IsNextInAssemblyOrder(true_rpo)) {
432           AssembleArchJump(true_rpo);
433         }
434         return kSuccess;
435       }
436       if (IsNextInAssemblyOrder(true_rpo)) {
437         // true block is next, can fall through if condition negated.
438         std::swap(true_rpo, false_rpo);
439         condition = NegateFlagsCondition(condition);
440       }
441       BranchInfo branch;
442       branch.condition = condition;
443       branch.true_label = GetLabel(true_rpo);
444       branch.false_label = GetLabel(false_rpo);
445       branch.fallthru = IsNextInAssemblyOrder(false_rpo);
446       // Assemble architecture-specific branch.
447       AssembleArchBranch(instr, &branch);
448       break;
449     }
450     case kFlags_deoptimize: {
451       // Assemble a conditional eager deoptimization after this instruction.
452       InstructionOperandConverter i(this, instr);
453       size_t frame_state_offset = MiscField::decode(instr->opcode());
454       DeoptimizationExit* const exit =
455           AddDeoptimizationExit(instr, frame_state_offset);
456       Label continue_label;
457       BranchInfo branch;
458       branch.condition = condition;
459       branch.true_label = exit->label();
460       branch.false_label = &continue_label;
461       branch.fallthru = true;
462       // Assemble architecture-specific branch.
463       AssembleArchBranch(instr, &branch);
464       masm()->bind(&continue_label);
465       break;
466     }
467     case kFlags_set: {
468       // Assemble a boolean materialization after this instruction.
469       AssembleArchBoolean(instr, condition);
470       break;
471     }
472     case kFlags_trap: {
473       AssembleArchTrap(instr, condition);
474       break;
475     }
476     case kFlags_none: {
477       break;
478     }
479   }
480   return kSuccess;
481 }
482 
AssembleSourcePosition(Instruction * instr)483 void CodeGenerator::AssembleSourcePosition(Instruction* instr) {
484   SourcePosition source_position = SourcePosition::Unknown();
485   if (instr->IsNop() && instr->AreMovesRedundant()) return;
486   if (!code()->GetSourcePosition(instr, &source_position)) return;
487   AssembleSourcePosition(source_position);
488 }
489 
AssembleSourcePosition(SourcePosition source_position)490 void CodeGenerator::AssembleSourcePosition(SourcePosition source_position) {
491   if (source_position == current_source_position_) return;
492   current_source_position_ = source_position;
493   if (!source_position.IsKnown()) return;
494   source_position_table_builder_.AddPosition(masm()->pc_offset(),
495                                              source_position, false);
496   if (FLAG_code_comments) {
497     CompilationInfo* info = this->info();
498     if (!info->parse_info()) return;
499     std::ostringstream buffer;
500     buffer << "-- ";
501     if (FLAG_trace_turbo) {
502       buffer << source_position;
503     } else {
504       buffer << source_position.InliningStack(info);
505     }
506     buffer << " --";
507     masm()->RecordComment(StrDup(buffer.str().c_str()));
508   }
509 }
510 
GetSlotAboveSPBeforeTailCall(Instruction * instr,int * slot)511 bool CodeGenerator::GetSlotAboveSPBeforeTailCall(Instruction* instr,
512                                                  int* slot) {
513   if (instr->IsTailCall()) {
514     InstructionOperandConverter g(this, instr);
515     *slot = g.InputInt32(instr->InputCount() - 1);
516     return true;
517   } else {
518     return false;
519   }
520 }
521 
AssembleGaps(Instruction * instr)522 void CodeGenerator::AssembleGaps(Instruction* instr) {
523   for (int i = Instruction::FIRST_GAP_POSITION;
524        i <= Instruction::LAST_GAP_POSITION; i++) {
525     Instruction::GapPosition inner_pos =
526         static_cast<Instruction::GapPosition>(i);
527     ParallelMove* move = instr->GetParallelMove(inner_pos);
528     if (move != nullptr) resolver()->Resolve(move);
529   }
530 }
531 
532 namespace {
533 
CreateInliningPositions(CompilationInfo * info)534 Handle<PodArray<InliningPosition>> CreateInliningPositions(
535     CompilationInfo* info) {
536   const CompilationInfo::InlinedFunctionList& inlined_functions =
537       info->inlined_functions();
538   if (inlined_functions.size() == 0) {
539     return Handle<PodArray<InliningPosition>>::cast(
540         info->isolate()->factory()->empty_byte_array());
541   }
542   Handle<PodArray<InliningPosition>> inl_positions =
543       PodArray<InliningPosition>::New(
544           info->isolate(), static_cast<int>(inlined_functions.size()), TENURED);
545   for (size_t i = 0; i < inlined_functions.size(); ++i) {
546     inl_positions->set(static_cast<int>(i), inlined_functions[i].position);
547   }
548   return inl_positions;
549 }
550 
551 }  // namespace
552 
PopulateDeoptimizationData(Handle<Code> code_object)553 void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
554   CompilationInfo* info = this->info();
555   int deopt_count = static_cast<int>(deoptimization_states_.size());
556   if (deopt_count == 0 && !info->is_osr()) return;
557   Handle<DeoptimizationInputData> data =
558       DeoptimizationInputData::New(isolate(), deopt_count, TENURED);
559 
560   Handle<ByteArray> translation_array =
561       translations_.CreateByteArray(isolate()->factory());
562 
563   data->SetTranslationByteArray(*translation_array);
564   data->SetInlinedFunctionCount(
565       Smi::FromInt(static_cast<int>(inlined_function_count_)));
566   data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
567 
568   if (info->has_shared_info()) {
569     data->SetSharedFunctionInfo(*info->shared_info());
570   } else {
571     data->SetSharedFunctionInfo(Smi::kZero);
572   }
573 
574   Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
575       static_cast<int>(deoptimization_literals_.size()), TENURED);
576   {
577     AllowDeferredHandleDereference copy_handles;
578     for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
579       literals->set(i, *deoptimization_literals_[i]);
580     }
581     data->SetLiteralArray(*literals);
582   }
583 
584   Handle<PodArray<InliningPosition>> inl_pos = CreateInliningPositions(info);
585   data->SetInliningPositions(*inl_pos);
586 
587   if (info->is_osr()) {
588     DCHECK(osr_pc_offset_ >= 0);
589     data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
590     data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
591   } else {
592     BailoutId osr_ast_id = BailoutId::None();
593     data->SetOsrAstId(Smi::FromInt(osr_ast_id.ToInt()));
594     data->SetOsrPcOffset(Smi::FromInt(-1));
595   }
596 
597   // Populate deoptimization entries.
598   for (int i = 0; i < deopt_count; i++) {
599     DeoptimizationState* deoptimization_state = deoptimization_states_[i];
600     data->SetAstId(i, deoptimization_state->bailout_id());
601     CHECK(deoptimization_states_[i]);
602     data->SetTranslationIndex(
603         i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
604     data->SetArgumentsStackHeight(i, Smi::kZero);
605     data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
606   }
607 
608   code_object->set_deoptimization_data(*data);
609 }
610 
611 
AddJumpTable(Label ** targets,size_t target_count)612 Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
613   jump_tables_ = new (zone()) JumpTable(jump_tables_, targets, target_count);
614   return jump_tables_->label();
615 }
616 
617 
RecordCallPosition(Instruction * instr)618 void CodeGenerator::RecordCallPosition(Instruction* instr) {
619   CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
620 
621   bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
622 
623   RecordSafepoint(
624       instr->reference_map(), Safepoint::kSimple, 0,
625       needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
626 
627   if (flags & CallDescriptor::kHasExceptionHandler) {
628     InstructionOperandConverter i(this, instr);
629     RpoNumber handler_rpo = i.InputRpo(instr->InputCount() - 1);
630     handlers_.push_back({GetLabel(handler_rpo), masm()->pc_offset()});
631   }
632 
633   if (needs_frame_state) {
634     MarkLazyDeoptSite();
635     // If the frame state is present, it starts at argument 1 (just after the
636     // code address).
637     size_t frame_state_offset = 1;
638     FrameStateDescriptor* descriptor =
639         GetDeoptimizationEntry(instr, frame_state_offset).descriptor();
640     int pc_offset = masm()->pc_offset();
641     int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
642                                           descriptor->state_combine());
643     // If the pre-call frame state differs from the post-call one, produce the
644     // pre-call frame state, too.
645     // TODO(jarin) We might want to avoid building the pre-call frame state
646     // because it is only used to get locals and arguments (by the debugger and
647     // f.arguments), and those are the same in the pre-call and post-call
648     // states.
649     if (!descriptor->state_combine().IsOutputIgnored()) {
650       deopt_state_id = BuildTranslation(instr, -1, frame_state_offset,
651                                         OutputFrameStateCombine::Ignore());
652     }
653     safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
654   }
655 }
656 
657 
DefineDeoptimizationLiteral(Handle<Object> literal)658 int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
659   int result = static_cast<int>(deoptimization_literals_.size());
660   for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
661     if (deoptimization_literals_[i].is_identical_to(literal)) return i;
662   }
663   deoptimization_literals_.push_back(literal);
664   return result;
665 }
666 
GetDeoptimizationEntry(Instruction * instr,size_t frame_state_offset)667 DeoptimizationEntry const& CodeGenerator::GetDeoptimizationEntry(
668     Instruction* instr, size_t frame_state_offset) {
669   InstructionOperandConverter i(this, instr);
670   int const state_id = i.InputInt32(frame_state_offset);
671   return code()->GetDeoptimizationEntry(state_id);
672 }
673 
GetDeoptimizationKind(int deoptimization_id) const674 DeoptimizeKind CodeGenerator::GetDeoptimizationKind(
675     int deoptimization_id) const {
676   size_t const index = static_cast<size_t>(deoptimization_id);
677   DCHECK_LT(index, deoptimization_states_.size());
678   return deoptimization_states_[index]->kind();
679 }
680 
GetDeoptimizationReason(int deoptimization_id) const681 DeoptimizeReason CodeGenerator::GetDeoptimizationReason(
682     int deoptimization_id) const {
683   size_t const index = static_cast<size_t>(deoptimization_id);
684   DCHECK_LT(index, deoptimization_states_.size());
685   return deoptimization_states_[index]->reason();
686 }
687 
TranslateStateValueDescriptor(StateValueDescriptor * desc,StateValueList * nested,Translation * translation,InstructionOperandIterator * iter)688 void CodeGenerator::TranslateStateValueDescriptor(
689     StateValueDescriptor* desc, StateValueList* nested,
690     Translation* translation, InstructionOperandIterator* iter) {
691   // Note:
692   // If translation is null, we just skip the relevant instruction operands.
693   if (desc->IsNested()) {
694     if (translation != nullptr) {
695       translation->BeginCapturedObject(static_cast<int>(nested->size()));
696     }
697     for (auto field : *nested) {
698       TranslateStateValueDescriptor(field.desc, field.nested, translation,
699                                     iter);
700     }
701   } else if (desc->IsArguments()) {
702     if (translation != nullptr) {
703       translation->BeginArgumentsObject(0);
704     }
705   } else if (desc->IsDuplicate()) {
706     if (translation != nullptr) {
707       translation->DuplicateObject(static_cast<int>(desc->id()));
708     }
709   } else if (desc->IsPlain()) {
710     InstructionOperand* op = iter->Advance();
711     if (translation != nullptr) {
712       AddTranslationForOperand(translation, iter->instruction(), op,
713                                desc->type());
714     }
715   } else {
716     DCHECK(desc->IsOptimizedOut());
717     if (translation != nullptr) {
718       if (optimized_out_literal_id_ == -1) {
719         optimized_out_literal_id_ =
720             DefineDeoptimizationLiteral(isolate()->factory()->optimized_out());
721       }
722       translation->StoreLiteral(optimized_out_literal_id_);
723     }
724   }
725 }
726 
727 
TranslateFrameStateDescriptorOperands(FrameStateDescriptor * desc,InstructionOperandIterator * iter,OutputFrameStateCombine combine,Translation * translation)728 void CodeGenerator::TranslateFrameStateDescriptorOperands(
729     FrameStateDescriptor* desc, InstructionOperandIterator* iter,
730     OutputFrameStateCombine combine, Translation* translation) {
731   size_t index = 0;
732   StateValueList* values = desc->GetStateValueDescriptors();
733   for (StateValueList::iterator it = values->begin(); it != values->end();
734        ++it, ++index) {
735     StateValueDescriptor* value_desc = (*it).desc;
736     if (combine.kind() == OutputFrameStateCombine::kPokeAt) {
737       // The result of the call should be placed at position
738       // [index_from_top] in the stack (overwriting whatever was
739       // previously there).
740       size_t index_from_top =
741           desc->GetSize(combine) - 1 - combine.GetOffsetToPokeAt();
742       if (index >= index_from_top &&
743           index < index_from_top + iter->instruction()->OutputCount()) {
744         DCHECK_NOT_NULL(translation);
745         AddTranslationForOperand(
746             translation, iter->instruction(),
747             iter->instruction()->OutputAt(index - index_from_top),
748             MachineType::AnyTagged());
749         // Skip the instruction operands.
750         TranslateStateValueDescriptor(value_desc, (*it).nested, nullptr, iter);
751         continue;
752       }
753     }
754     TranslateStateValueDescriptor(value_desc, (*it).nested, translation, iter);
755   }
756   DCHECK_EQ(desc->GetSize(OutputFrameStateCombine::Ignore()), index);
757 
758   if (combine.kind() == OutputFrameStateCombine::kPushOutput) {
759     DCHECK(combine.GetPushCount() <= iter->instruction()->OutputCount());
760     for (size_t output = 0; output < combine.GetPushCount(); output++) {
761       // Materialize the result of the call instruction in this slot.
762       AddTranslationForOperand(translation, iter->instruction(),
763                                iter->instruction()->OutputAt(output),
764                                MachineType::AnyTagged());
765     }
766   }
767 }
768 
769 
BuildTranslationForFrameStateDescriptor(FrameStateDescriptor * descriptor,InstructionOperandIterator * iter,Translation * translation,OutputFrameStateCombine state_combine)770 void CodeGenerator::BuildTranslationForFrameStateDescriptor(
771     FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
772     Translation* translation, OutputFrameStateCombine state_combine) {
773   // Outer-most state must be added to translation first.
774   if (descriptor->outer_state() != nullptr) {
775     BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), iter,
776                                             translation,
777                                             OutputFrameStateCombine::Ignore());
778   }
779 
780   Handle<SharedFunctionInfo> shared_info;
781   if (!descriptor->shared_info().ToHandle(&shared_info)) {
782     if (!info()->has_shared_info()) {
783       return;  // Stub with no SharedFunctionInfo.
784     }
785     shared_info = info()->shared_info();
786   }
787   int shared_info_id = DefineDeoptimizationLiteral(shared_info);
788 
789   switch (descriptor->type()) {
790     case FrameStateType::kJavaScriptFunction:
791       translation->BeginJSFrame(
792           descriptor->bailout_id(), shared_info_id,
793           static_cast<unsigned int>(descriptor->GetSize(state_combine) -
794                                     (1 + descriptor->parameters_count())));
795       break;
796     case FrameStateType::kInterpretedFunction:
797       translation->BeginInterpretedFrame(
798           descriptor->bailout_id(), shared_info_id,
799           static_cast<unsigned int>(descriptor->locals_count() + 1));
800       break;
801     case FrameStateType::kArgumentsAdaptor:
802       translation->BeginArgumentsAdaptorFrame(
803           shared_info_id,
804           static_cast<unsigned int>(descriptor->parameters_count()));
805       break;
806     case FrameStateType::kTailCallerFunction:
807       translation->BeginTailCallerFrame(shared_info_id);
808       break;
809     case FrameStateType::kConstructStub:
810       DCHECK(descriptor->bailout_id().IsValidForConstructStub());
811       translation->BeginConstructStubFrame(
812           descriptor->bailout_id(), shared_info_id,
813           static_cast<unsigned int>(descriptor->parameters_count()));
814       break;
815     case FrameStateType::kGetterStub:
816       translation->BeginGetterStubFrame(shared_info_id);
817       break;
818     case FrameStateType::kSetterStub:
819       translation->BeginSetterStubFrame(shared_info_id);
820       break;
821   }
822 
823   TranslateFrameStateDescriptorOperands(descriptor, iter, state_combine,
824                                         translation);
825 }
826 
827 
BuildTranslation(Instruction * instr,int pc_offset,size_t frame_state_offset,OutputFrameStateCombine state_combine)828 int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
829                                     size_t frame_state_offset,
830                                     OutputFrameStateCombine state_combine) {
831   DeoptimizationEntry const& entry =
832       GetDeoptimizationEntry(instr, frame_state_offset);
833   FrameStateDescriptor* const descriptor = entry.descriptor();
834   frame_state_offset++;
835 
836   Translation translation(
837       &translations_, static_cast<int>(descriptor->GetFrameCount()),
838       static_cast<int>(descriptor->GetJSFrameCount()), zone());
839   InstructionOperandIterator iter(instr, frame_state_offset);
840   BuildTranslationForFrameStateDescriptor(descriptor, &iter, &translation,
841                                           state_combine);
842 
843   int deoptimization_id = static_cast<int>(deoptimization_states_.size());
844 
845   deoptimization_states_.push_back(new (zone()) DeoptimizationState(
846       descriptor->bailout_id(), translation.index(), pc_offset, entry.kind(),
847       entry.reason()));
848 
849   return deoptimization_id;
850 }
851 
852 
AddTranslationForOperand(Translation * translation,Instruction * instr,InstructionOperand * op,MachineType type)853 void CodeGenerator::AddTranslationForOperand(Translation* translation,
854                                              Instruction* instr,
855                                              InstructionOperand* op,
856                                              MachineType type) {
857   if (op->IsStackSlot()) {
858     if (type.representation() == MachineRepresentation::kBit) {
859       translation->StoreBoolStackSlot(LocationOperand::cast(op)->index());
860     } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
861                type == MachineType::Int32()) {
862       translation->StoreInt32StackSlot(LocationOperand::cast(op)->index());
863     } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
864                type == MachineType::Uint32()) {
865       translation->StoreUint32StackSlot(LocationOperand::cast(op)->index());
866     } else {
867       CHECK_EQ(MachineRepresentation::kTagged, type.representation());
868       translation->StoreStackSlot(LocationOperand::cast(op)->index());
869     }
870   } else if (op->IsFPStackSlot()) {
871     if (type.representation() == MachineRepresentation::kFloat64) {
872       translation->StoreDoubleStackSlot(LocationOperand::cast(op)->index());
873     } else {
874       CHECK_EQ(MachineRepresentation::kFloat32, type.representation());
875       translation->StoreFloatStackSlot(LocationOperand::cast(op)->index());
876     }
877   } else if (op->IsRegister()) {
878     InstructionOperandConverter converter(this, instr);
879     if (type.representation() == MachineRepresentation::kBit) {
880       translation->StoreBoolRegister(converter.ToRegister(op));
881     } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
882                type == MachineType::Int32()) {
883       translation->StoreInt32Register(converter.ToRegister(op));
884     } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
885                type == MachineType::Uint32()) {
886       translation->StoreUint32Register(converter.ToRegister(op));
887     } else {
888       CHECK_EQ(MachineRepresentation::kTagged, type.representation());
889       translation->StoreRegister(converter.ToRegister(op));
890     }
891   } else if (op->IsFPRegister()) {
892     InstructionOperandConverter converter(this, instr);
893     if (type.representation() == MachineRepresentation::kFloat64) {
894       translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
895     } else {
896       CHECK_EQ(MachineRepresentation::kFloat32, type.representation());
897       translation->StoreFloatRegister(converter.ToFloatRegister(op));
898     }
899   } else {
900     CHECK(op->IsImmediate());
901     InstructionOperandConverter converter(this, instr);
902     Constant constant = converter.ToConstant(op);
903     Handle<Object> constant_object;
904     switch (constant.type()) {
905       case Constant::kInt32:
906         if (type.representation() == MachineRepresentation::kTagged) {
907           // When pointers are 4 bytes, we can use int32 constants to represent
908           // Smis.
909           DCHECK_EQ(4, kPointerSize);
910           constant_object =
911               handle(reinterpret_cast<Smi*>(constant.ToInt32()), isolate());
912           DCHECK(constant_object->IsSmi());
913         } else if (type.representation() == MachineRepresentation::kBit) {
914           if (constant.ToInt32() == 0) {
915             constant_object = isolate()->factory()->false_value();
916           } else {
917             DCHECK_EQ(1, constant.ToInt32());
918             constant_object = isolate()->factory()->true_value();
919           }
920         } else {
921           // TODO(jarin,bmeurer): We currently pass in raw pointers to the
922           // JSFunction::entry here. We should really consider fixing this.
923           DCHECK(type == MachineType::Int32() ||
924                  type == MachineType::Uint32() ||
925                  type.representation() == MachineRepresentation::kWord32 ||
926                  type.representation() == MachineRepresentation::kNone);
927           DCHECK(type.representation() != MachineRepresentation::kNone ||
928                  constant.ToInt32() == FrameStateDescriptor::kImpossibleValue);
929           if (type == MachineType::Uint32()) {
930             constant_object =
931                 isolate()->factory()->NewNumberFromUint(constant.ToInt32());
932           } else {
933             constant_object =
934                 isolate()->factory()->NewNumberFromInt(constant.ToInt32());
935           }
936         }
937         break;
938       case Constant::kInt64:
939         // When pointers are 8 bytes, we can use int64 constants to represent
940         // Smis.
941         // TODO(jarin,bmeurer): We currently pass in raw pointers to the
942         // JSFunction::entry here. We should really consider fixing this.
943         DCHECK(type.representation() == MachineRepresentation::kWord64 ||
944                type.representation() == MachineRepresentation::kTagged);
945         DCHECK_EQ(8, kPointerSize);
946         constant_object =
947             handle(reinterpret_cast<Smi*>(constant.ToInt64()), isolate());
948         DCHECK(constant_object->IsSmi());
949         break;
950       case Constant::kFloat32:
951         DCHECK(type.representation() == MachineRepresentation::kFloat32 ||
952                type.representation() == MachineRepresentation::kTagged);
953         constant_object = isolate()->factory()->NewNumber(constant.ToFloat32());
954         break;
955       case Constant::kFloat64:
956         DCHECK(type.representation() == MachineRepresentation::kFloat64 ||
957                type.representation() == MachineRepresentation::kTagged);
958         constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
959         break;
960       case Constant::kHeapObject:
961         DCHECK_EQ(MachineRepresentation::kTagged, type.representation());
962         constant_object = constant.ToHeapObject();
963         break;
964       default:
965         UNREACHABLE();
966     }
967     if (constant_object.is_identical_to(info()->closure())) {
968       translation->StoreJSFrameFunction();
969     } else {
970       int literal_id = DefineDeoptimizationLiteral(constant_object);
971       translation->StoreLiteral(literal_id);
972     }
973   }
974 }
975 
976 
MarkLazyDeoptSite()977 void CodeGenerator::MarkLazyDeoptSite() {
978   last_lazy_deopt_pc_ = masm()->pc_offset();
979 }
980 
AddDeoptimizationExit(Instruction * instr,size_t frame_state_offset)981 DeoptimizationExit* CodeGenerator::AddDeoptimizationExit(
982     Instruction* instr, size_t frame_state_offset) {
983   int const deoptimization_id = BuildTranslation(
984       instr, -1, frame_state_offset, OutputFrameStateCombine::Ignore());
985   DeoptimizationExit* const exit = new (zone())
986       DeoptimizationExit(deoptimization_id, current_source_position_);
987   deoptimization_exits_.push_back(exit);
988   return exit;
989 }
990 
OutOfLineCode(CodeGenerator * gen)991 OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
992     : frame_(gen->frame()), masm_(gen->masm()), next_(gen->ools_) {
993   gen->ools_ = this;
994 }
995 
996 
~OutOfLineCode()997 OutOfLineCode::~OutOfLineCode() {}
998 
999 }  // namespace compiler
1000 }  // namespace internal
1001 }  // namespace v8
1002