• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_CRANKSHAFT_ARM64_LITHIUM_CODEGEN_ARM64_H_
6 #define V8_CRANKSHAFT_ARM64_LITHIUM_CODEGEN_ARM64_H_
7 
8 #include "src/crankshaft/arm64/lithium-arm64.h"
9 
10 #include "src/ast/scopes.h"
11 #include "src/crankshaft/arm64/lithium-gap-resolver-arm64.h"
12 #include "src/crankshaft/lithium-codegen.h"
13 #include "src/deoptimizer.h"
14 #include "src/safepoint-table.h"
15 #include "src/utils.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 // Forward declarations.
21 class LDeferredCode;
22 class SafepointGenerator;
23 class BranchGenerator;
24 
25 class LCodeGen: public LCodeGenBase {
26  public:
LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)27   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
28       : LCodeGenBase(chunk, assembler, info),
29         jump_table_(4, info->zone()),
30         scope_(info->scope()),
31         deferred_(8, info->zone()),
32         frame_is_built_(false),
33         safepoints_(info->zone()),
34         resolver_(this),
35         expected_safepoint_kind_(Safepoint::kSimple),
36         pushed_arguments_(0) {
37     PopulateDeoptimizationLiteralsWithInlinedFunctions();
38   }
39 
40   // Simple accessors.
scope()41   Scope* scope() const { return scope_; }
42 
LookupDestination(int block_id)43   int LookupDestination(int block_id) const {
44     return chunk()->LookupDestination(block_id);
45   }
46 
IsNextEmittedBlock(int block_id)47   bool IsNextEmittedBlock(int block_id) const {
48     return LookupDestination(block_id) == GetNextEmittedBlock();
49   }
50 
NeedsEagerFrame()51   bool NeedsEagerFrame() const {
52     return GetStackSlotCount() > 0 ||
53         info()->is_non_deferred_calling() ||
54         !info()->IsStub() ||
55         info()->requires_frame();
56   }
NeedsDeferredFrame()57   bool NeedsDeferredFrame() const {
58     return !NeedsEagerFrame() && info()->is_deferred_calling();
59   }
60 
GetLinkRegisterState()61   LinkRegisterStatus GetLinkRegisterState() const {
62     return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved;
63   }
64 
65   // Try to generate code for the entire chunk, but it may fail if the
66   // chunk contains constructs we cannot handle. Returns true if the
67   // code generation attempt succeeded.
68   bool GenerateCode();
69 
70   // Finish the code by setting stack height, safepoint, and bailout
71   // information on it.
72   void FinishCode(Handle<Code> code);
73 
74   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
75   // Support for converting LOperands to assembler types.
76   Register ToRegister(LOperand* op) const;
77   Register ToRegister32(LOperand* op) const;
78   Operand ToOperand(LOperand* op);
79   Operand ToOperand32(LOperand* op);
80   enum StackMode { kMustUseFramePointer, kCanUseStackPointer };
81   MemOperand ToMemOperand(LOperand* op,
82                           StackMode stack_mode = kCanUseStackPointer) const;
83   Handle<Object> ToHandle(LConstantOperand* op) const;
84 
85   template <class LI>
86   Operand ToShiftedRightOperand32(LOperand* right, LI* shift_info);
87 
JSShiftAmountFromLConstant(LOperand * constant)88   int JSShiftAmountFromLConstant(LOperand* constant) {
89     return ToInteger32(LConstantOperand::cast(constant)) & 0x1f;
90   }
91 
92   // TODO(jbramley): Examine these helpers and check that they make sense.
93   // IsInteger32Constant returns true for smi constants, for example.
94   bool IsInteger32Constant(LConstantOperand* op) const;
95   bool IsSmi(LConstantOperand* op) const;
96 
97   int32_t ToInteger32(LConstantOperand* op) const;
98   Smi* ToSmi(LConstantOperand* op) const;
99   double ToDouble(LConstantOperand* op) const;
100   DoubleRegister ToDoubleRegister(LOperand* op) const;
101 
102   // Declare methods that deal with the individual node types.
103 #define DECLARE_DO(type) void Do##type(L##type* node);
LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)104   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
105 #undef DECLARE_DO
106 
107  private:
108   // Return a double scratch register which can be used locally
109   // when generating code for a lithium instruction.
110   DoubleRegister double_scratch() { return crankshaft_fp_scratch; }
111 
112   // Deferred code support.
113   void DoDeferredNumberTagD(LNumberTagD* instr);
114   void DoDeferredStackCheck(LStackCheck* instr);
115   void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
116   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
117   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
118   void DoDeferredMathAbsTagged(LMathAbsTagged* instr,
119                                Label* exit,
120                                Label* allocation_entry);
121 
122   void DoDeferredNumberTagU(LInstruction* instr,
123                             LOperand* value,
124                             LOperand* temp1,
125                             LOperand* temp2);
126   void DoDeferredTaggedToI(LTaggedToI* instr,
127                            LOperand* value,
128                            LOperand* temp1,
129                            LOperand* temp2);
130   void DoDeferredAllocate(LAllocate* instr);
131   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
132   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
133                                    Register result,
134                                    Register object,
135                                    Register index);
136 
137   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
138   void EmitGoto(int block);
139   void DoGap(LGap* instr);
140 
141   // Generic version of EmitBranch. It contains some code to avoid emitting a
142   // branch on the next emitted basic block where we could just fall-through.
143   // You shouldn't use that directly but rather consider one of the helper like
144   // LCodeGen::EmitBranch, LCodeGen::EmitCompareAndBranch...
145   template<class InstrType>
146   void EmitBranchGeneric(InstrType instr,
147                          const BranchGenerator& branch);
148 
149   template<class InstrType>
150   void EmitBranch(InstrType instr, Condition condition);
151 
152   template<class InstrType>
153   void EmitCompareAndBranch(InstrType instr,
154                             Condition condition,
155                             const Register& lhs,
156                             const Operand& rhs);
157 
158   template<class InstrType>
159   void EmitTestAndBranch(InstrType instr,
160                          Condition condition,
161                          const Register& value,
162                          uint64_t mask);
163 
164   template<class InstrType>
165   void EmitBranchIfNonZeroNumber(InstrType instr,
166                                  const FPRegister& value,
167                                  const FPRegister& scratch);
168 
169   template<class InstrType>
170   void EmitBranchIfHeapNumber(InstrType instr,
171                               const Register& value);
172 
173   template<class InstrType>
174   void EmitBranchIfRoot(InstrType instr,
175                         const Register& value,
176                         Heap::RootListIndex index);
177 
178   // Emits optimized code to deep-copy the contents of statically known object
179   // graphs (e.g. object literal boilerplate). Expects a pointer to the
180   // allocated destination object in the result register, and a pointer to the
181   // source object in the source register.
182   void EmitDeepCopy(Handle<JSObject> object,
183                     Register result,
184                     Register source,
185                     Register scratch,
186                     int* offset,
187                     AllocationSiteMode mode);
188 
189   template <class T>
190   void EmitVectorLoadICRegisters(T* instr);
191   template <class T>
192   void EmitVectorStoreICRegisters(T* instr);
193 
194   // Emits optimized code for %_IsString(x).  Preserves input register.
195   // Returns the condition on which a final split to
196   // true and false label should be made, to optimize fallthrough.
197   Condition EmitIsString(Register input, Register temp1, Label* is_not_string,
198                          SmiCheck check_needed);
199 
200   MemOperand BuildSeqStringOperand(Register string,
201                                    Register temp,
202                                    LOperand* index,
203                                    String::Encoding encoding);
204   void DeoptimizeBranch(LInstruction* instr,
205                         Deoptimizer::DeoptReason deopt_reason,
206                         BranchType branch_type, Register reg = NoReg,
207                         int bit = -1,
208                         Deoptimizer::BailoutType* override_bailout_type = NULL);
209   void Deoptimize(LInstruction* instr, Deoptimizer::DeoptReason deopt_reason,
210                   Deoptimizer::BailoutType* override_bailout_type = NULL);
211   void DeoptimizeIf(Condition cond, LInstruction* instr,
212                     Deoptimizer::DeoptReason deopt_reason);
213   void DeoptimizeIfZero(Register rt, LInstruction* instr,
214                         Deoptimizer::DeoptReason deopt_reason);
215   void DeoptimizeIfNotZero(Register rt, LInstruction* instr,
216                            Deoptimizer::DeoptReason deopt_reason);
217   void DeoptimizeIfNegative(Register rt, LInstruction* instr,
218                             Deoptimizer::DeoptReason deopt_reason);
219   void DeoptimizeIfSmi(Register rt, LInstruction* instr,
220                        Deoptimizer::DeoptReason deopt_reason);
221   void DeoptimizeIfNotSmi(Register rt, LInstruction* instr,
222                           Deoptimizer::DeoptReason deopt_reason);
223   void DeoptimizeIfRoot(Register rt, Heap::RootListIndex index,
224                         LInstruction* instr,
225                         Deoptimizer::DeoptReason deopt_reason);
226   void DeoptimizeIfNotRoot(Register rt, Heap::RootListIndex index,
227                            LInstruction* instr,
228                            Deoptimizer::DeoptReason deopt_reason);
229   void DeoptimizeIfNotHeapNumber(Register object, LInstruction* instr);
230   void DeoptimizeIfMinusZero(DoubleRegister input, LInstruction* instr,
231                              Deoptimizer::DeoptReason deopt_reason);
232   void DeoptimizeIfBitSet(Register rt, int bit, LInstruction* instr,
233                           Deoptimizer::DeoptReason deopt_reason);
234   void DeoptimizeIfBitClear(Register rt, int bit, LInstruction* instr,
235                             Deoptimizer::DeoptReason deopt_reason);
236 
237   MemOperand PrepareKeyedExternalArrayOperand(Register key,
238                                               Register base,
239                                               Register scratch,
240                                               bool key_is_smi,
241                                               bool key_is_constant,
242                                               int constant_key,
243                                               ElementsKind elements_kind,
244                                               int base_offset);
245   MemOperand PrepareKeyedArrayOperand(Register base,
246                                       Register elements,
247                                       Register key,
248                                       bool key_is_tagged,
249                                       ElementsKind elements_kind,
250                                       Representation representation,
251                                       int base_offset);
252 
253   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
254                                             Safepoint::DeoptMode mode);
255 
GetStackSlotCount()256   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
257 
AddDeferredCode(LDeferredCode * code)258   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
259 
260   // Emit frame translation commands for an environment.
261   void WriteTranslation(LEnvironment* environment, Translation* translation);
262 
263   void AddToTranslation(LEnvironment* environment,
264                         Translation* translation,
265                         LOperand* op,
266                         bool is_tagged,
267                         bool is_uint32,
268                         int* object_index_pointer,
269                         int* dematerialized_index_pointer);
270 
271   void SaveCallerDoubles();
272   void RestoreCallerDoubles();
273 
274   // Code generation steps.  Returns true if code generation should continue.
275   void GenerateBodyInstructionPre(LInstruction* instr) override;
276   bool GeneratePrologue();
277   bool GenerateDeferredCode();
278   bool GenerateJumpTable();
279   bool GenerateSafepointTable();
280 
281   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
282   void GenerateOsrPrologue();
283 
284   enum SafepointMode {
285     RECORD_SIMPLE_SAFEPOINT,
286     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
287   };
288 
289   void CallCode(Handle<Code> code,
290                 RelocInfo::Mode mode,
291                 LInstruction* instr);
292 
293   void CallCodeGeneric(Handle<Code> code,
294                        RelocInfo::Mode mode,
295                        LInstruction* instr,
296                        SafepointMode safepoint_mode);
297 
298   void CallRuntime(const Runtime::Function* function,
299                    int num_arguments,
300                    LInstruction* instr,
301                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
302 
CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)303   void CallRuntime(Runtime::FunctionId id,
304                    int num_arguments,
305                    LInstruction* instr) {
306     const Runtime::Function* function = Runtime::FunctionForId(id);
307     CallRuntime(function, num_arguments, instr);
308   }
309 
CallRuntime(Runtime::FunctionId id,LInstruction * instr)310   void CallRuntime(Runtime::FunctionId id, LInstruction* instr) {
311     const Runtime::Function* function = Runtime::FunctionForId(id);
312     CallRuntime(function, function->nargs, instr);
313   }
314 
315   void LoadContextFromDeferred(LOperand* context);
316   void CallRuntimeFromDeferred(Runtime::FunctionId id,
317                                int argc,
318                                LInstruction* instr,
319                                LOperand* context);
320 
321   // Generate a direct call to a known function.  Expects the function
322   // to be in x1.
323   void CallKnownFunction(Handle<JSFunction> function,
324                          int formal_parameter_count, int arity,
325                          LInstruction* instr);
326 
327   // Support for recording safepoint and position information.
328   void RecordAndWritePosition(int position) override;
329   void RecordSafepoint(LPointerMap* pointers,
330                        Safepoint::Kind kind,
331                        int arguments,
332                        Safepoint::DeoptMode mode);
333   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
334   void RecordSafepoint(Safepoint::DeoptMode mode);
335   void RecordSafepointWithRegisters(LPointerMap* pointers,
336                                     int arguments,
337                                     Safepoint::DeoptMode mode);
338   void RecordSafepointWithLazyDeopt(LInstruction* instr,
339                                     SafepointMode safepoint_mode);
340 
341   void EnsureSpaceForLazyDeopt(int space_needed) override;
342 
343   ZoneList<Deoptimizer::JumpTableEntry*> jump_table_;
344   Scope* const scope_;
345   ZoneList<LDeferredCode*> deferred_;
346   bool frame_is_built_;
347 
348   // Builder that keeps track of safepoints in the code. The table itself is
349   // emitted at the end of the generated code.
350   SafepointTableBuilder safepoints_;
351 
352   // Compiler from a set of parallel moves to a sequential list of moves.
353   LGapResolver resolver_;
354 
355   Safepoint::Kind expected_safepoint_kind_;
356 
357   // The number of arguments pushed onto the stack, either by this block or by a
358   // predecessor.
359   int pushed_arguments_;
360 
RecordPushedArgumentsDelta(int delta)361   void RecordPushedArgumentsDelta(int delta) {
362     pushed_arguments_ += delta;
363     DCHECK(pushed_arguments_ >= 0);
364   }
365 
366   int old_position_;
367 
368   class PushSafepointRegistersScope BASE_EMBEDDED {
369    public:
PushSafepointRegistersScope(LCodeGen * codegen)370     explicit PushSafepointRegistersScope(LCodeGen* codegen)
371         : codegen_(codegen) {
372       DCHECK(codegen_->info()->is_calling());
373       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
374       codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
375 
376       UseScratchRegisterScope temps(codegen_->masm_);
377       // Preserve the value of lr which must be saved on the stack (the call to
378       // the stub will clobber it).
379       Register to_be_pushed_lr =
380           temps.UnsafeAcquire(StoreRegistersStateStub::to_be_pushed_lr());
381       codegen_->masm_->Mov(to_be_pushed_lr, lr);
382       StoreRegistersStateStub stub(codegen_->isolate());
383       codegen_->masm_->CallStub(&stub);
384     }
385 
~PushSafepointRegistersScope()386     ~PushSafepointRegistersScope() {
387       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
388       RestoreRegistersStateStub stub(codegen_->isolate());
389       codegen_->masm_->CallStub(&stub);
390       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
391     }
392 
393    private:
394     LCodeGen* codegen_;
395   };
396 
397   friend class LDeferredCode;
398   friend class SafepointGenerator;
399   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
400 };
401 
402 
403 class LDeferredCode: public ZoneObject {
404  public:
LDeferredCode(LCodeGen * codegen)405   explicit LDeferredCode(LCodeGen* codegen)
406       : codegen_(codegen),
407         external_exit_(NULL),
408         instruction_index_(codegen->current_instruction_) {
409     codegen->AddDeferredCode(this);
410   }
411 
~LDeferredCode()412   virtual ~LDeferredCode() { }
413   virtual void Generate() = 0;
414   virtual LInstruction* instr() = 0;
415 
SetExit(Label * exit)416   void SetExit(Label* exit) { external_exit_ = exit; }
entry()417   Label* entry() { return &entry_; }
exit()418   Label* exit() { return (external_exit_ != NULL) ? external_exit_ : &exit_; }
instruction_index()419   int instruction_index() const { return instruction_index_; }
420 
421  protected:
codegen()422   LCodeGen* codegen() const { return codegen_; }
masm()423   MacroAssembler* masm() const { return codegen_->masm(); }
424 
425  private:
426   LCodeGen* codegen_;
427   Label entry_;
428   Label exit_;
429   Label* external_exit_;
430   int instruction_index_;
431 };
432 
433 
434 // This is the abstract class used by EmitBranchGeneric.
435 // It is used to emit code for conditional branching. The Emit() function
436 // emits code to branch when the condition holds and EmitInverted() emits
437 // the branch when the inverted condition is verified.
438 //
439 // For actual examples of condition see the concrete implementation in
440 // lithium-codegen-arm64.cc (e.g. BranchOnCondition, CompareAndBranch).
441 class BranchGenerator BASE_EMBEDDED {
442  public:
BranchGenerator(LCodeGen * codegen)443   explicit BranchGenerator(LCodeGen* codegen)
444     : codegen_(codegen) { }
445 
~BranchGenerator()446   virtual ~BranchGenerator() { }
447 
448   virtual void Emit(Label* label) const = 0;
449   virtual void EmitInverted(Label* label) const = 0;
450 
451  protected:
masm()452   MacroAssembler* masm() const { return codegen_->masm(); }
453 
454   LCodeGen* codegen_;
455 };
456 
457 }  // namespace internal
458 }  // namespace v8
459 
460 #endif  // V8_CRANKSHAFT_ARM64_LITHIUM_CODEGEN_ARM64_H_
461