• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_CRANKSHAFT_X64_LITHIUM_CODEGEN_X64_H_
6 #define V8_CRANKSHAFT_X64_LITHIUM_CODEGEN_X64_H_
7 
8 
9 #include "src/ast/scopes.h"
10 #include "src/base/logging.h"
11 #include "src/crankshaft/lithium-codegen.h"
12 #include "src/crankshaft/x64/lithium-gap-resolver-x64.h"
13 #include "src/crankshaft/x64/lithium-x64.h"
14 #include "src/deoptimizer.h"
15 #include "src/safepoint-table.h"
16 #include "src/utils.h"
17 
18 namespace v8 {
19 namespace internal {
20 
21 // Forward declarations.
22 class LDeferredCode;
23 class SafepointGenerator;
24 
25 class LCodeGen: public LCodeGenBase {
26  public:
LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)27   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
28       : LCodeGenBase(chunk, assembler, info),
29         jump_table_(4, info->zone()),
30         scope_(info->scope()),
31         deferred_(8, info->zone()),
32         frame_is_built_(false),
33         safepoints_(info->zone()),
34         resolver_(this),
35         expected_safepoint_kind_(Safepoint::kSimple) {
36     PopulateDeoptimizationLiteralsWithInlinedFunctions();
37   }
38 
LookupDestination(int block_id)39   int LookupDestination(int block_id) const {
40     return chunk()->LookupDestination(block_id);
41   }
42 
IsNextEmittedBlock(int block_id)43   bool IsNextEmittedBlock(int block_id) const {
44     return LookupDestination(block_id) == GetNextEmittedBlock();
45   }
46 
NeedsEagerFrame()47   bool NeedsEagerFrame() const {
48     return HasAllocatedStackSlots() || info()->is_non_deferred_calling() ||
49            !info()->IsStub() || info()->requires_frame();
50   }
NeedsDeferredFrame()51   bool NeedsDeferredFrame() const {
52     return !NeedsEagerFrame() && info()->is_deferred_calling();
53   }
54 
55   // Support for converting LOperands to assembler types.
56   Register ToRegister(LOperand* op) const;
57   XMMRegister ToDoubleRegister(LOperand* op) const;
58   bool IsInteger32Constant(LConstantOperand* op) const;
59   bool IsExternalConstant(LConstantOperand* op) const;
60   bool IsDehoistedKeyConstant(LConstantOperand* op) const;
61   bool IsSmiConstant(LConstantOperand* op) const;
62   int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
63   int32_t ToInteger32(LConstantOperand* op) const;
64   Smi* ToSmi(LConstantOperand* op) const;
65   double ToDouble(LConstantOperand* op) const;
66   ExternalReference ToExternalReference(LConstantOperand* op) const;
67   Handle<Object> ToHandle(LConstantOperand* op) const;
68   Operand ToOperand(LOperand* op) const;
69 
70   // Try to generate code for the entire chunk, but it may fail if the
71   // chunk contains constructs we cannot handle. Returns true if the
72   // code generation attempt succeeded.
73   bool GenerateCode();
74 
75   // Finish the code by setting stack height, safepoint, and bailout
76   // information on it.
77   void FinishCode(Handle<Code> code);
78 
79   // Deferred code support.
80   void DoDeferredNumberTagD(LNumberTagD* instr);
81 
82   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
83   void DoDeferredNumberTagIU(LInstruction* instr,
84                              LOperand* value,
85                              LOperand* temp1,
86                              LOperand* temp2,
87                              IntegerSignedness signedness);
88 
89   void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
90   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
91   void DoDeferredStackCheck(LStackCheck* instr);
92   void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
93   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
94   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
95   void DoDeferredAllocate(LAllocate* instr);
96   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
97   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
98                                    Register object,
99                                    Register index);
100 
101 // Parallel move support.
102   void DoParallelMove(LParallelMove* move);
103   void DoGap(LGap* instr);
104 
105   // Emit frame translation commands for an environment.
106   void WriteTranslation(LEnvironment* environment, Translation* translation);
107 
108   // Declare methods that deal with the individual node types.
109 #define DECLARE_DO(type) void Do##type(L##type* node);
LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)110   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
111 #undef DECLARE_DO
112 
113  private:
114   LPlatformChunk* chunk() const { return chunk_; }
scope()115   Scope* scope() const { return scope_; }
graph()116   HGraph* graph() const { return chunk()->graph(); }
117 
double_scratch0()118   XMMRegister double_scratch0() const { return kScratchDoubleReg; }
119 
120   void EmitClassOfTest(Label* if_true,
121                        Label* if_false,
122                        Handle<String> class_name,
123                        Register input,
124                        Register temporary,
125                        Register scratch);
126 
HasAllocatedStackSlots()127   bool HasAllocatedStackSlots() const {
128     return chunk()->HasAllocatedStackSlots();
129   }
GetStackSlotCount()130   int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); }
GetTotalFrameSlotCount()131   int GetTotalFrameSlotCount() const {
132     return chunk()->GetTotalFrameSlotCount();
133   }
134 
AddDeferredCode(LDeferredCode * code)135   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
136 
137 
138   void SaveCallerDoubles();
139   void RestoreCallerDoubles();
140 
141   // Code generation passes.  Returns true if code generation should
142   // continue.
143   void GenerateBodyInstructionPre(LInstruction* instr) override;
144   void GenerateBodyInstructionPost(LInstruction* instr) override;
145   bool GeneratePrologue();
146   bool GenerateDeferredCode();
147   bool GenerateJumpTable();
148   bool GenerateSafepointTable();
149 
150   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
151   void GenerateOsrPrologue();
152 
153   enum SafepointMode {
154     RECORD_SIMPLE_SAFEPOINT,
155     RECORD_SAFEPOINT_WITH_REGISTERS
156   };
157 
158   void CallCodeGeneric(Handle<Code> code,
159                        RelocInfo::Mode mode,
160                        LInstruction* instr,
161                        SafepointMode safepoint_mode,
162                        int argc);
163 
164 
165   void CallCode(Handle<Code> code,
166                 RelocInfo::Mode mode,
167                 LInstruction* instr);
168 
169   void CallRuntime(const Runtime::Function* function,
170                    int num_arguments,
171                    LInstruction* instr,
172                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
173 
CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)174   void CallRuntime(Runtime::FunctionId id,
175                    int num_arguments,
176                    LInstruction* instr) {
177     const Runtime::Function* function = Runtime::FunctionForId(id);
178     CallRuntime(function, num_arguments, instr);
179   }
180 
CallRuntime(Runtime::FunctionId id,LInstruction * instr)181   void CallRuntime(Runtime::FunctionId id, LInstruction* instr) {
182     const Runtime::Function* function = Runtime::FunctionForId(id);
183     CallRuntime(function, function->nargs, instr);
184   }
185 
186   void CallRuntimeFromDeferred(Runtime::FunctionId id,
187                                int argc,
188                                LInstruction* instr,
189                                LOperand* context);
190 
191   void LoadContextFromDeferred(LOperand* context);
192 
193   void PrepareForTailCall(const ParameterCount& actual, Register scratch1,
194                           Register scratch2, Register scratch3);
195 
196   // Generate a direct call to a known function.  Expects the function
197   // to be in rdi.
198   void CallKnownFunction(Handle<JSFunction> function,
199                          int formal_parameter_count, int arity,
200                          bool is_tail_call, LInstruction* instr);
201 
202   void RecordSafepointWithLazyDeopt(LInstruction* instr,
203                                     SafepointMode safepoint_mode,
204                                     int argc);
205   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
206                                             Safepoint::DeoptMode mode);
207   void DeoptimizeIf(Condition cc, LInstruction* instr,
208                     Deoptimizer::DeoptReason deopt_reason,
209                     Deoptimizer::BailoutType bailout_type);
210   void DeoptimizeIf(Condition cc, LInstruction* instr,
211                     Deoptimizer::DeoptReason deopt_reason);
212 
DeoptEveryNTimes()213   bool DeoptEveryNTimes() {
214     return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
215   }
216 
217   void AddToTranslation(LEnvironment* environment,
218                         Translation* translation,
219                         LOperand* op,
220                         bool is_tagged,
221                         bool is_uint32,
222                         int* object_index_pointer,
223                         int* dematerialized_index_pointer);
224 
225   Register ToRegister(int index) const;
226   XMMRegister ToDoubleRegister(int index) const;
227   Operand BuildFastArrayOperand(
228       LOperand* elements_pointer,
229       LOperand* key,
230       Representation key_representation,
231       ElementsKind elements_kind,
232       uint32_t base_offset);
233 
234   Operand BuildSeqStringOperand(Register string,
235                                 LOperand* index,
236                                 String::Encoding encoding);
237 
238   void EmitIntegerMathAbs(LMathAbs* instr);
239   void EmitSmiMathAbs(LMathAbs* instr);
240 
241   // Support for recording safepoint and position information.
242   void RecordSafepoint(LPointerMap* pointers,
243                        Safepoint::Kind kind,
244                        int arguments,
245                        Safepoint::DeoptMode mode);
246   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
247   void RecordSafepoint(Safepoint::DeoptMode mode);
248   void RecordSafepointWithRegisters(LPointerMap* pointers,
249                                     int arguments,
250                                     Safepoint::DeoptMode mode);
251   void RecordAndWritePosition(int position) override;
252 
253   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
254   void EmitGoto(int block);
255 
256   // EmitBranch expects to be the last instruction of a block.
257   template<class InstrType>
258   void EmitBranch(InstrType instr, Condition cc);
259   template <class InstrType>
260   void EmitTrueBranch(InstrType instr, Condition cc);
261   template <class InstrType>
262   void EmitFalseBranch(InstrType instr, Condition cc);
263   void EmitNumberUntagD(LNumberUntagD* instr, Register input,
264                         XMMRegister result, NumberUntagDMode mode);
265 
266   // Emits optimized code for typeof x == "y".  Modifies input register.
267   // Returns the condition on which a final split to
268   // true and false label should be made, to optimize fallthrough.
269   Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
270 
271   // Emits optimized code for %_IsString(x).  Preserves input register.
272   // Returns the condition on which a final split to
273   // true and false label should be made, to optimize fallthrough.
274   Condition EmitIsString(Register input,
275                          Register temp1,
276                          Label* is_not_string,
277                          SmiCheck check_needed);
278 
279   // Emits code for pushing either a tagged constant, a (non-double)
280   // register, or a stack slot operand.
281   void EmitPushTaggedOperand(LOperand* operand);
282 
283   // Emits optimized code to deep-copy the contents of statically known
284   // object graphs (e.g. object literal boilerplate).
285   void EmitDeepCopy(Handle<JSObject> object,
286                     Register result,
287                     Register source,
288                     int* offset,
289                     AllocationSiteMode mode);
290 
291   void EnsureSpaceForLazyDeopt(int space_needed) override;
292   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
293   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
294   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
295   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
296   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
297   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
298 
299   template <class T>
300   void EmitVectorLoadICRegisters(T* instr);
301   template <class T>
302   void EmitVectorStoreICRegisters(T* instr);
303 
304 #ifdef _MSC_VER
305   // On windows, you may not access the stack more than one page below
306   // the most recently mapped page. To make the allocated area randomly
307   // accessible, we write an arbitrary value to each page in range
308   // rsp + offset - page_size .. rsp in turn.
309   void MakeSureStackPagesMapped(int offset);
310 #endif
311 
312   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
313   Scope* const scope_;
314   ZoneList<LDeferredCode*> deferred_;
315   bool frame_is_built_;
316 
317   // Builder that keeps track of safepoints in the code. The table
318   // itself is emitted at the end of the generated code.
319   SafepointTableBuilder safepoints_;
320 
321   // Compiler from a set of parallel moves to a sequential list of moves.
322   LGapResolver resolver_;
323 
324   Safepoint::Kind expected_safepoint_kind_;
325 
326   class PushSafepointRegistersScope final BASE_EMBEDDED {
327    public:
PushSafepointRegistersScope(LCodeGen * codegen)328     explicit PushSafepointRegistersScope(LCodeGen* codegen)
329         : codegen_(codegen) {
330       DCHECK(codegen_->info()->is_calling());
331       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
332       codegen_->masm_->PushSafepointRegisters();
333       codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
334     }
335 
~PushSafepointRegistersScope()336     ~PushSafepointRegistersScope() {
337       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
338       codegen_->masm_->PopSafepointRegisters();
339       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
340     }
341 
342    private:
343     LCodeGen* codegen_;
344   };
345 
346   friend class LDeferredCode;
347   friend class LEnvironment;
348   friend class SafepointGenerator;
349   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
350 };
351 
352 
353 class LDeferredCode: public ZoneObject {
354  public:
LDeferredCode(LCodeGen * codegen)355   explicit LDeferredCode(LCodeGen* codegen)
356       : codegen_(codegen),
357         external_exit_(NULL),
358         instruction_index_(codegen->current_instruction_) {
359     codegen->AddDeferredCode(this);
360   }
361 
~LDeferredCode()362   virtual ~LDeferredCode() {}
363   virtual void Generate() = 0;
364   virtual LInstruction* instr() = 0;
365 
SetExit(Label * exit)366   void SetExit(Label* exit) { external_exit_ = exit; }
entry()367   Label* entry() { return &entry_; }
exit()368   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
done()369   Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
instruction_index()370   int instruction_index() const { return instruction_index_; }
371 
372  protected:
codegen()373   LCodeGen* codegen() const { return codegen_; }
masm()374   MacroAssembler* masm() const { return codegen_->masm(); }
375 
376  private:
377   LCodeGen* codegen_;
378   Label entry_;
379   Label exit_;
380   Label done_;
381   Label* external_exit_;
382   int instruction_index_;
383 };
384 
385 }  // namespace internal
386 }  // namespace v8
387 
388 #endif  // V8_CRANKSHAFT_X64_LITHIUM_CODEGEN_X64_H_
389