• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_CRANKSHAFT_ARM_LITHIUM_CODEGEN_ARM_H_
6 #define V8_CRANKSHAFT_ARM_LITHIUM_CODEGEN_ARM_H_
7 
8 #include "src/ast/scopes.h"
9 #include "src/crankshaft/arm/lithium-arm.h"
10 #include "src/crankshaft/arm/lithium-gap-resolver-arm.h"
11 #include "src/crankshaft/lithium-codegen.h"
12 #include "src/deoptimizer.h"
13 #include "src/safepoint-table.h"
14 #include "src/utils.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 // Forward declarations.
20 class LDeferredCode;
21 class SafepointGenerator;
22 
23 class LCodeGen: public LCodeGenBase {
24  public:
LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)25   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
26       : LCodeGenBase(chunk, assembler, info),
27         jump_table_(4, info->zone()),
28         scope_(info->scope()),
29         deferred_(8, info->zone()),
30         frame_is_built_(false),
31         safepoints_(info->zone()),
32         resolver_(this),
33         expected_safepoint_kind_(Safepoint::kSimple) {
34     PopulateDeoptimizationLiteralsWithInlinedFunctions();
35   }
36 
37 
LookupDestination(int block_id)38   int LookupDestination(int block_id) const {
39     return chunk()->LookupDestination(block_id);
40   }
41 
IsNextEmittedBlock(int block_id)42   bool IsNextEmittedBlock(int block_id) const {
43     return LookupDestination(block_id) == GetNextEmittedBlock();
44   }
45 
NeedsEagerFrame()46   bool NeedsEagerFrame() const {
47     return HasAllocatedStackSlots() || info()->is_non_deferred_calling() ||
48            !info()->IsStub() || info()->requires_frame();
49   }
NeedsDeferredFrame()50   bool NeedsDeferredFrame() const {
51     return !NeedsEagerFrame() && info()->is_deferred_calling();
52   }
53 
GetLinkRegisterState()54   LinkRegisterStatus GetLinkRegisterState() const {
55     return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved;
56   }
57 
58   // Support for converting LOperands to assembler types.
59   // LOperand must be a register.
60   Register ToRegister(LOperand* op) const;
61 
62   // LOperand is loaded into scratch, unless already a register.
63   Register EmitLoadRegister(LOperand* op, Register scratch);
64 
65   // LOperand must be a double register.
66   DwVfpRegister ToDoubleRegister(LOperand* op) const;
67 
68   // LOperand is loaded into dbl_scratch, unless already a double register.
69   DwVfpRegister EmitLoadDoubleRegister(LOperand* op,
70                                        SwVfpRegister flt_scratch,
71                                        DwVfpRegister dbl_scratch);
72   int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
73   int32_t ToInteger32(LConstantOperand* op) const;
74   Smi* ToSmi(LConstantOperand* op) const;
75   double ToDouble(LConstantOperand* op) const;
76   Operand ToOperand(LOperand* op);
77   MemOperand ToMemOperand(LOperand* op) const;
78   // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
79   MemOperand ToHighMemOperand(LOperand* op) const;
80 
81   bool IsInteger32(LConstantOperand* op) const;
82   bool IsSmi(LConstantOperand* op) const;
83   Handle<Object> ToHandle(LConstantOperand* op) const;
84 
85   // Try to generate code for the entire chunk, but it may fail if the
86   // chunk contains constructs we cannot handle. Returns true if the
87   // code generation attempt succeeded.
88   bool GenerateCode();
89 
90   // Finish the code by setting stack height, safepoint, and bailout
91   // information on it.
92   void FinishCode(Handle<Code> code);
93 
94   // Deferred code support.
95   void DoDeferredNumberTagD(LNumberTagD* instr);
96 
97   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
98   void DoDeferredNumberTagIU(LInstruction* instr,
99                              LOperand* value,
100                              LOperand* temp1,
101                              LOperand* temp2,
102                              IntegerSignedness signedness);
103 
104   void DoDeferredTaggedToI(LTaggedToI* instr);
105   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
106   void DoDeferredStackCheck(LStackCheck* instr);
107   void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
108   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
109   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
110   void DoDeferredAllocate(LAllocate* instr);
111   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
112   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
113                                    Register result,
114                                    Register object,
115                                    Register index);
116 
117   // Parallel move support.
118   void DoParallelMove(LParallelMove* move);
119   void DoGap(LGap* instr);
120 
121   MemOperand PrepareKeyedOperand(Register key,
122                                  Register base,
123                                  bool key_is_constant,
124                                  int constant_key,
125                                  int element_size,
126                                  int shift_size,
127                                  int base_offset);
128 
129   // Emit frame translation commands for an environment.
130   void WriteTranslation(LEnvironment* environment, Translation* translation);
131 
132   // Declare methods that deal with the individual node types.
133 #define DECLARE_DO(type) void Do##type(L##type* node);
LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)134   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
135 #undef DECLARE_DO
136 
137  private:
138   Scope* scope() const { return scope_; }
139 
scratch0()140   Register scratch0() { return r9; }
double_scratch0()141   LowDwVfpRegister double_scratch0() { return kScratchDoubleReg; }
142 
143   LInstruction* GetNextInstruction();
144 
145   void EmitClassOfTest(Label* if_true,
146                        Label* if_false,
147                        Handle<String> class_name,
148                        Register input,
149                        Register temporary,
150                        Register temporary2);
151 
HasAllocatedStackSlots()152   bool HasAllocatedStackSlots() const {
153     return chunk()->HasAllocatedStackSlots();
154   }
GetStackSlotCount()155   int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); }
GetTotalFrameSlotCount()156   int GetTotalFrameSlotCount() const {
157     return chunk()->GetTotalFrameSlotCount();
158   }
159 
AddDeferredCode(LDeferredCode * code)160   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
161 
162   void SaveCallerDoubles();
163   void RestoreCallerDoubles();
164 
165   // Code generation passes.  Returns true if code generation should
166   // continue.
167   void GenerateBodyInstructionPre(LInstruction* instr) override;
168   bool GeneratePrologue();
169   bool GenerateDeferredCode();
170   bool GenerateJumpTable();
171   bool GenerateSafepointTable();
172 
173   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
174   void GenerateOsrPrologue();
175 
176   enum SafepointMode {
177     RECORD_SIMPLE_SAFEPOINT,
178     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
179   };
180 
181   int CallCodeSize(Handle<Code> code, RelocInfo::Mode mode);
182 
183   void CallCode(
184       Handle<Code> code,
185       RelocInfo::Mode mode,
186       LInstruction* instr,
187       TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS);
188 
189   void CallCodeGeneric(
190       Handle<Code> code,
191       RelocInfo::Mode mode,
192       LInstruction* instr,
193       SafepointMode safepoint_mode,
194       TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS);
195 
196   void CallRuntime(const Runtime::Function* function,
197                    int num_arguments,
198                    LInstruction* instr,
199                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
200 
CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)201   void CallRuntime(Runtime::FunctionId id,
202                    int num_arguments,
203                    LInstruction* instr) {
204     const Runtime::Function* function = Runtime::FunctionForId(id);
205     CallRuntime(function, num_arguments, instr);
206   }
207 
CallRuntime(Runtime::FunctionId id,LInstruction * instr)208   void CallRuntime(Runtime::FunctionId id, LInstruction* instr) {
209     const Runtime::Function* function = Runtime::FunctionForId(id);
210     CallRuntime(function, function->nargs, instr);
211   }
212 
213   void LoadContextFromDeferred(LOperand* context);
214   void CallRuntimeFromDeferred(Runtime::FunctionId id,
215                                int argc,
216                                LInstruction* instr,
217                                LOperand* context);
218 
219   void PrepareForTailCall(const ParameterCount& actual, Register scratch1,
220                           Register scratch2, Register scratch3);
221 
222   // Generate a direct call to a known function.  Expects the function
223   // to be in r1.
224   void CallKnownFunction(Handle<JSFunction> function,
225                          int formal_parameter_count, int arity,
226                          bool is_tail_call, LInstruction* instr);
227 
228   void RecordSafepointWithLazyDeopt(LInstruction* instr,
229                                     SafepointMode safepoint_mode);
230 
231   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
232                                             Safepoint::DeoptMode mode);
233   void DeoptimizeIf(Condition condition, LInstruction* instr,
234                     DeoptimizeReason deopt_reason,
235                     Deoptimizer::BailoutType bailout_type);
236   void DeoptimizeIf(Condition condition, LInstruction* instr,
237                     DeoptimizeReason deopt_reason);
238 
239   void AddToTranslation(LEnvironment* environment,
240                         Translation* translation,
241                         LOperand* op,
242                         bool is_tagged,
243                         bool is_uint32,
244                         int* object_index_pointer,
245                         int* dematerialized_index_pointer);
246 
247   Register ToRegister(int index) const;
248   DwVfpRegister ToDoubleRegister(int index) const;
249 
250   MemOperand BuildSeqStringOperand(Register string,
251                                    LOperand* index,
252                                    String::Encoding encoding);
253 
254   void EmitIntegerMathAbs(LMathAbs* instr);
255 
256   // Support for recording safepoint information.
257   void RecordSafepoint(LPointerMap* pointers,
258                        Safepoint::Kind kind,
259                        int arguments,
260                        Safepoint::DeoptMode mode);
261   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
262   void RecordSafepoint(Safepoint::DeoptMode mode);
263   void RecordSafepointWithRegisters(LPointerMap* pointers,
264                                     int arguments,
265                                     Safepoint::DeoptMode mode);
266 
267   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
268   void EmitGoto(int block);
269 
270   // EmitBranch expects to be the last instruction of a block.
271   template<class InstrType>
272   void EmitBranch(InstrType instr, Condition condition);
273   template <class InstrType>
274   void EmitTrueBranch(InstrType instr, Condition condition);
275   template <class InstrType>
276   void EmitFalseBranch(InstrType instr, Condition condition);
277   void EmitNumberUntagD(LNumberUntagD* instr, Register input,
278                         DwVfpRegister result, NumberUntagDMode mode);
279 
280   // Emits optimized code for typeof x == "y".  Modifies input register.
281   // Returns the condition on which a final split to
282   // true and false label should be made, to optimize fallthrough.
283   Condition EmitTypeofIs(Label* true_label,
284                          Label* false_label,
285                          Register input,
286                          Handle<String> type_name);
287 
288   // Emits optimized code for %_IsString(x).  Preserves input register.
289   // Returns the condition on which a final split to
290   // true and false label should be made, to optimize fallthrough.
291   Condition EmitIsString(Register input,
292                          Register temp1,
293                          Label* is_not_string,
294                          SmiCheck check_needed);
295 
296   // Emits optimized code to deep-copy the contents of statically known
297   // object graphs (e.g. object literal boilerplate).
298   void EmitDeepCopy(Handle<JSObject> object,
299                     Register result,
300                     Register source,
301                     int* offset,
302                     AllocationSiteMode mode);
303 
304   void EnsureSpaceForLazyDeopt(int space_needed) override;
305   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
306   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
307   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
308   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
309   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
310   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
311 
312   template <class T>
313   void EmitVectorLoadICRegisters(T* instr);
314 
315   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
316   Scope* const scope_;
317   ZoneList<LDeferredCode*> deferred_;
318   bool frame_is_built_;
319 
320   // Builder that keeps track of safepoints in the code. The table
321   // itself is emitted at the end of the generated code.
322   SafepointTableBuilder safepoints_;
323 
324   // Compiler from a set of parallel moves to a sequential list of moves.
325   LGapResolver resolver_;
326 
327   Safepoint::Kind expected_safepoint_kind_;
328 
329   class PushSafepointRegistersScope final BASE_EMBEDDED {
330    public:
PushSafepointRegistersScope(LCodeGen * codegen)331     explicit PushSafepointRegistersScope(LCodeGen* codegen)
332         : codegen_(codegen) {
333       DCHECK(codegen_->info()->is_calling());
334       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
335       codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
336       codegen_->masm_->PushSafepointRegisters();
337     }
338 
~PushSafepointRegistersScope()339     ~PushSafepointRegistersScope() {
340       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
341       codegen_->masm_->PopSafepointRegisters();
342       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
343     }
344 
345    private:
346     LCodeGen* codegen_;
347   };
348 
349   friend class LDeferredCode;
350   friend class LEnvironment;
351   friend class SafepointGenerator;
352   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
353 };
354 
355 
356 class LDeferredCode : public ZoneObject {
357  public:
LDeferredCode(LCodeGen * codegen)358   explicit LDeferredCode(LCodeGen* codegen)
359       : codegen_(codegen),
360         external_exit_(NULL),
361         instruction_index_(codegen->current_instruction_) {
362     codegen->AddDeferredCode(this);
363   }
364 
~LDeferredCode()365   virtual ~LDeferredCode() {}
366   virtual void Generate() = 0;
367   virtual LInstruction* instr() = 0;
368 
SetExit(Label * exit)369   void SetExit(Label* exit) { external_exit_ = exit; }
entry()370   Label* entry() { return &entry_; }
exit()371   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
instruction_index()372   int instruction_index() const { return instruction_index_; }
373 
374  protected:
codegen()375   LCodeGen* codegen() const { return codegen_; }
masm()376   MacroAssembler* masm() const { return codegen_->masm(); }
377 
378  private:
379   LCodeGen* codegen_;
380   Label entry_;
381   Label exit_;
382   Label* external_exit_;
383   int instruction_index_;
384 };
385 
386 }  // namespace internal
387 }  // namespace v8
388 
389 #endif  // V8_CRANKSHAFT_ARM_LITHIUM_CODEGEN_ARM_H_
390