• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_CRANKSHAFT_PPC_LITHIUM_CODEGEN_PPC_H_
6 #define V8_CRANKSHAFT_PPC_LITHIUM_CODEGEN_PPC_H_
7 
8 #include "src/ast/scopes.h"
9 #include "src/crankshaft/lithium-codegen.h"
10 #include "src/crankshaft/ppc/lithium-gap-resolver-ppc.h"
11 #include "src/crankshaft/ppc/lithium-ppc.h"
12 #include "src/deoptimizer.h"
13 #include "src/safepoint-table.h"
14 #include "src/utils.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 // Forward declarations.
20 class LDeferredCode;
21 class SafepointGenerator;
22 
23 class LCodeGen : public LCodeGenBase {
24  public:
LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)25   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
26       : LCodeGenBase(chunk, assembler, info),
27         jump_table_(4, info->zone()),
28         scope_(info->scope()),
29         deferred_(8, info->zone()),
30         frame_is_built_(false),
31         safepoints_(info->zone()),
32         resolver_(this),
33         expected_safepoint_kind_(Safepoint::kSimple) {
34     PopulateDeoptimizationLiteralsWithInlinedFunctions();
35   }
36 
37 
LookupDestination(int block_id)38   int LookupDestination(int block_id) const {
39     return chunk()->LookupDestination(block_id);
40   }
41 
IsNextEmittedBlock(int block_id)42   bool IsNextEmittedBlock(int block_id) const {
43     return LookupDestination(block_id) == GetNextEmittedBlock();
44   }
45 
NeedsEagerFrame()46   bool NeedsEagerFrame() const {
47     return HasAllocatedStackSlots() || info()->is_non_deferred_calling() ||
48            !info()->IsStub() || info()->requires_frame();
49   }
NeedsDeferredFrame()50   bool NeedsDeferredFrame() const {
51     return !NeedsEagerFrame() && info()->is_deferred_calling();
52   }
53 
GetLinkRegisterState()54   LinkRegisterStatus GetLinkRegisterState() const {
55     return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved;
56   }
57 
58   // Support for converting LOperands to assembler types.
59   // LOperand must be a register.
60   Register ToRegister(LOperand* op) const;
61 
62   // LOperand is loaded into scratch, unless already a register.
63   Register EmitLoadRegister(LOperand* op, Register scratch);
64 
65   // LConstantOperand must be an Integer32 or Smi
66   void EmitLoadIntegerConstant(LConstantOperand* const_op, Register dst);
67 
68   // LOperand must be a double register.
69   DoubleRegister ToDoubleRegister(LOperand* op) const;
70 
71   intptr_t ToRepresentation(LConstantOperand* op,
72                             const Representation& r) const;
73   int32_t ToInteger32(LConstantOperand* op) const;
74   Smi* ToSmi(LConstantOperand* op) const;
75   double ToDouble(LConstantOperand* op) const;
76   Operand ToOperand(LOperand* op);
77   MemOperand ToMemOperand(LOperand* op) const;
78   // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
79   MemOperand ToHighMemOperand(LOperand* op) const;
80 
81   bool IsInteger32(LConstantOperand* op) const;
82   bool IsSmi(LConstantOperand* op) const;
83   Handle<Object> ToHandle(LConstantOperand* op) const;
84 
85   // Try to generate code for the entire chunk, but it may fail if the
86   // chunk contains constructs we cannot handle. Returns true if the
87   // code generation attempt succeeded.
88   bool GenerateCode();
89 
90   // Finish the code by setting stack height, safepoint, and bailout
91   // information on it.
92   void FinishCode(Handle<Code> code);
93 
94   // Deferred code support.
95   void DoDeferredNumberTagD(LNumberTagD* instr);
96 
97   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
98   void DoDeferredNumberTagIU(LInstruction* instr, LOperand* value,
99                              LOperand* temp1, LOperand* temp2,
100                              IntegerSignedness signedness);
101 
102   void DoDeferredTaggedToI(LTaggedToI* instr);
103   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
104   void DoDeferredStackCheck(LStackCheck* instr);
105   void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
106   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
107   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
108   void DoDeferredAllocate(LAllocate* instr);
109   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
110   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, Register result,
111                                    Register object, Register index);
112 
113   // Parallel move support.
114   void DoParallelMove(LParallelMove* move);
115   void DoGap(LGap* instr);
116 
117   MemOperand PrepareKeyedOperand(Register key, Register base,
118                                  bool key_is_constant, bool key_is_tagged,
119                                  int constant_key, int element_size_shift,
120                                  int base_offset);
121 
122   // Emit frame translation commands for an environment.
123   void WriteTranslation(LEnvironment* environment, Translation* translation);
124 
125 // Declare methods that deal with the individual node types.
126 #define DECLARE_DO(type) void Do##type(L##type* node);
LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)127   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
128 #undef DECLARE_DO
129 
130  private:
131   Scope* scope() const { return scope_; }
132 
scratch0()133   Register scratch0() { return kLithiumScratch; }
double_scratch0()134   DoubleRegister double_scratch0() { return kScratchDoubleReg; }
135 
136   LInstruction* GetNextInstruction();
137 
138   void EmitClassOfTest(Label* if_true, Label* if_false,
139                        Handle<String> class_name, Register input,
140                        Register temporary, Register temporary2);
141 
HasAllocatedStackSlots()142   bool HasAllocatedStackSlots() const {
143     return chunk()->HasAllocatedStackSlots();
144   }
GetStackSlotCount()145   int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); }
GetTotalFrameSlotCount()146   int GetTotalFrameSlotCount() const {
147     return chunk()->GetTotalFrameSlotCount();
148   }
149 
AddDeferredCode(LDeferredCode * code)150   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
151 
152   void SaveCallerDoubles();
153   void RestoreCallerDoubles();
154 
155   // Code generation passes.  Returns true if code generation should
156   // continue.
157   void GenerateBodyInstructionPre(LInstruction* instr) override;
158   bool GeneratePrologue();
159   bool GenerateDeferredCode();
160   bool GenerateJumpTable();
161   bool GenerateSafepointTable();
162 
163   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
164   void GenerateOsrPrologue();
165 
166   enum SafepointMode {
167     RECORD_SIMPLE_SAFEPOINT,
168     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
169   };
170 
171   void CallCode(Handle<Code> code, RelocInfo::Mode mode, LInstruction* instr);
172 
173   void CallCodeGeneric(Handle<Code> code, RelocInfo::Mode mode,
174                        LInstruction* instr, SafepointMode safepoint_mode);
175 
176   void CallRuntime(const Runtime::Function* function, int num_arguments,
177                    LInstruction* instr,
178                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
179 
CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)180   void CallRuntime(Runtime::FunctionId id, int num_arguments,
181                    LInstruction* instr) {
182     const Runtime::Function* function = Runtime::FunctionForId(id);
183     CallRuntime(function, num_arguments, instr);
184   }
185 
CallRuntime(Runtime::FunctionId id,LInstruction * instr)186   void CallRuntime(Runtime::FunctionId id, LInstruction* instr) {
187     const Runtime::Function* function = Runtime::FunctionForId(id);
188     CallRuntime(function, function->nargs, instr);
189   }
190 
191   void LoadContextFromDeferred(LOperand* context);
192   void CallRuntimeFromDeferred(Runtime::FunctionId id, int argc,
193                                LInstruction* instr, LOperand* context);
194 
195   void PrepareForTailCall(const ParameterCount& actual, Register scratch1,
196                           Register scratch2, Register scratch3);
197 
198   // Generate a direct call to a known function.  Expects the function
199   // to be in r4.
200   void CallKnownFunction(Handle<JSFunction> function,
201                          int formal_parameter_count, int arity,
202                          bool is_tail_call, LInstruction* instr);
203 
204   void RecordSafepointWithLazyDeopt(LInstruction* instr,
205                                     SafepointMode safepoint_mode);
206 
207   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
208                                             Safepoint::DeoptMode mode);
209   void DeoptimizeIf(Condition condition, LInstruction* instr,
210                     DeoptimizeReason deopt_reason,
211                     Deoptimizer::BailoutType bailout_type, CRegister cr = cr7);
212   void DeoptimizeIf(Condition condition, LInstruction* instr,
213                     DeoptimizeReason deopt_reason, CRegister cr = cr7);
214 
215   void AddToTranslation(LEnvironment* environment, Translation* translation,
216                         LOperand* op, bool is_tagged, bool is_uint32,
217                         int* object_index_pointer,
218                         int* dematerialized_index_pointer);
219 
220   Register ToRegister(int index) const;
221   DoubleRegister ToDoubleRegister(int index) const;
222 
223   MemOperand BuildSeqStringOperand(Register string, LOperand* index,
224                                    String::Encoding encoding);
225 
226   void EmitMathAbs(LMathAbs* instr);
227 #if V8_TARGET_ARCH_PPC64
228   void EmitInteger32MathAbs(LMathAbs* instr);
229 #endif
230 
231   // Support for recording safepoint information.
232   void RecordSafepoint(LPointerMap* pointers, Safepoint::Kind kind,
233                        int arguments, Safepoint::DeoptMode mode);
234   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
235   void RecordSafepoint(Safepoint::DeoptMode mode);
236   void RecordSafepointWithRegisters(LPointerMap* pointers, int arguments,
237                                     Safepoint::DeoptMode mode);
238 
239   static Condition TokenToCondition(Token::Value op);
240   void EmitGoto(int block);
241 
242   // EmitBranch expects to be the last instruction of a block.
243   template <class InstrType>
244   void EmitBranch(InstrType instr, Condition condition, CRegister cr = cr7);
245   template <class InstrType>
246   void EmitTrueBranch(InstrType instr, Condition condition, CRegister cr = cr7);
247   template <class InstrType>
248   void EmitFalseBranch(InstrType instr, Condition condition,
249                        CRegister cr = cr7);
250   void EmitNumberUntagD(LNumberUntagD* instr, Register input,
251                         DoubleRegister result, NumberUntagDMode mode);
252 
253   // Emits optimized code for typeof x == "y".  Modifies input register.
254   // Returns the condition on which a final split to
255   // true and false label should be made, to optimize fallthrough.
256   Condition EmitTypeofIs(Label* true_label, Label* false_label, Register input,
257                          Handle<String> type_name);
258 
259   // Emits optimized code for %_IsString(x).  Preserves input register.
260   // Returns the condition on which a final split to
261   // true and false label should be made, to optimize fallthrough.
262   Condition EmitIsString(Register input, Register temp1, Label* is_not_string,
263                          SmiCheck check_needed);
264 
265   // Emits optimized code to deep-copy the contents of statically known
266   // object graphs (e.g. object literal boilerplate).
267   void EmitDeepCopy(Handle<JSObject> object, Register result, Register source,
268                     int* offset, AllocationSiteMode mode);
269 
270   void EnsureSpaceForLazyDeopt(int space_needed) override;
271   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
272   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
273   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
274   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
275   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
276   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
277 
278   template <class T>
279   void EmitVectorLoadICRegisters(T* instr);
280 
281   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
282   Scope* const scope_;
283   ZoneList<LDeferredCode*> deferred_;
284   bool frame_is_built_;
285 
286   // Builder that keeps track of safepoints in the code. The table
287   // itself is emitted at the end of the generated code.
288   SafepointTableBuilder safepoints_;
289 
290   // Compiler from a set of parallel moves to a sequential list of moves.
291   LGapResolver resolver_;
292 
293   Safepoint::Kind expected_safepoint_kind_;
294 
295   class PushSafepointRegistersScope final BASE_EMBEDDED {
296    public:
297     explicit PushSafepointRegistersScope(LCodeGen* codegen);
298 
299     ~PushSafepointRegistersScope();
300 
301    private:
302     LCodeGen* codegen_;
303   };
304 
305   friend class LDeferredCode;
306   friend class LEnvironment;
307   friend class SafepointGenerator;
308   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
309 };
310 
311 
312 class LDeferredCode : public ZoneObject {
313  public:
LDeferredCode(LCodeGen * codegen)314   explicit LDeferredCode(LCodeGen* codegen)
315       : codegen_(codegen),
316         external_exit_(NULL),
317         instruction_index_(codegen->current_instruction_) {
318     codegen->AddDeferredCode(this);
319   }
320 
~LDeferredCode()321   virtual ~LDeferredCode() {}
322   virtual void Generate() = 0;
323   virtual LInstruction* instr() = 0;
324 
SetExit(Label * exit)325   void SetExit(Label* exit) { external_exit_ = exit; }
entry()326   Label* entry() { return &entry_; }
exit()327   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
instruction_index()328   int instruction_index() const { return instruction_index_; }
329 
330  protected:
codegen()331   LCodeGen* codegen() const { return codegen_; }
masm()332   MacroAssembler* masm() const { return codegen_->masm(); }
333 
334  private:
335   LCodeGen* codegen_;
336   Label entry_;
337   Label exit_;
338   Label* external_exit_;
339   int instruction_index_;
340 };
341 }  // namespace internal
342 }  // namespace v8
343 
344 #endif  // V8_CRANKSHAFT_PPC_LITHIUM_CODEGEN_PPC_H_
345