• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
6 #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
7 
8 #include "src/deoptimizer.h"
9 #include "src/mips/lithium-gap-resolver-mips.h"
10 #include "src/mips/lithium-mips.h"
11 #include "src/lithium-codegen.h"
12 #include "src/safepoint-table.h"
13 #include "src/scopes.h"
14 #include "src/utils.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 // Forward declarations.
20 class LDeferredCode;
21 class SafepointGenerator;
22 
23 class LCodeGen: public LCodeGenBase {
24  public:
LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)25   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
26       : LCodeGenBase(chunk, assembler, info),
27         deoptimizations_(4, info->zone()),
28         deopt_jump_table_(4, info->zone()),
29         deoptimization_literals_(8, info->zone()),
30         inlined_function_count_(0),
31         scope_(info->scope()),
32         translations_(info->zone()),
33         deferred_(8, info->zone()),
34         osr_pc_offset_(-1),
35         frame_is_built_(false),
36         safepoints_(info->zone()),
37         resolver_(this),
38         expected_safepoint_kind_(Safepoint::kSimple) {
39     PopulateDeoptimizationLiteralsWithInlinedFunctions();
40   }
41 
42 
LookupDestination(int block_id)43   int LookupDestination(int block_id) const {
44     return chunk()->LookupDestination(block_id);
45   }
46 
IsNextEmittedBlock(int block_id)47   bool IsNextEmittedBlock(int block_id) const {
48     return LookupDestination(block_id) == GetNextEmittedBlock();
49   }
50 
NeedsEagerFrame()51   bool NeedsEagerFrame() const {
52     return GetStackSlotCount() > 0 ||
53         info()->is_non_deferred_calling() ||
54         !info()->IsStub() ||
55         info()->requires_frame();
56   }
NeedsDeferredFrame()57   bool NeedsDeferredFrame() const {
58     return !NeedsEagerFrame() && info()->is_deferred_calling();
59   }
60 
GetRAState()61   RAStatus GetRAState() const {
62     return frame_is_built_ ? kRAHasBeenSaved : kRAHasNotBeenSaved;
63   }
64 
65   // Support for converting LOperands to assembler types.
66   // LOperand must be a register.
67   Register ToRegister(LOperand* op) const;
68 
69   // LOperand is loaded into scratch, unless already a register.
70   Register EmitLoadRegister(LOperand* op, Register scratch);
71 
72   // LOperand must be a double register.
73   DoubleRegister ToDoubleRegister(LOperand* op) const;
74 
75   // LOperand is loaded into dbl_scratch, unless already a double register.
76   DoubleRegister EmitLoadDoubleRegister(LOperand* op,
77                                         FloatRegister flt_scratch,
78                                         DoubleRegister dbl_scratch);
79   int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
80   int32_t ToInteger32(LConstantOperand* op) const;
81   Smi* ToSmi(LConstantOperand* op) const;
82   double ToDouble(LConstantOperand* op) const;
83   Operand ToOperand(LOperand* op);
84   MemOperand ToMemOperand(LOperand* op) const;
85   // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
86   MemOperand ToHighMemOperand(LOperand* op) const;
87 
88   bool IsInteger32(LConstantOperand* op) const;
89   bool IsSmi(LConstantOperand* op) const;
90   Handle<Object> ToHandle(LConstantOperand* op) const;
91 
92   // Try to generate code for the entire chunk, but it may fail if the
93   // chunk contains constructs we cannot handle. Returns true if the
94   // code generation attempt succeeded.
95   bool GenerateCode();
96 
97   // Finish the code by setting stack height, safepoint, and bailout
98   // information on it.
99   void FinishCode(Handle<Code> code);
100 
101   void DoDeferredNumberTagD(LNumberTagD* instr);
102 
103   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
104   void DoDeferredNumberTagIU(LInstruction* instr,
105                              LOperand* value,
106                              LOperand* temp1,
107                              LOperand* temp2,
108                              IntegerSignedness signedness);
109 
110   void DoDeferredTaggedToI(LTaggedToI* instr);
111   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
112   void DoDeferredStackCheck(LStackCheck* instr);
113   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
114   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
115   void DoDeferredAllocate(LAllocate* instr);
116   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
117                                        Label* map_check);
118 
119   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
120   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
121                                    Register result,
122                                    Register object,
123                                    Register index);
124 
125   // Parallel move support.
126   void DoParallelMove(LParallelMove* move);
127   void DoGap(LGap* instr);
128 
129   MemOperand PrepareKeyedOperand(Register key,
130                                  Register base,
131                                  bool key_is_constant,
132                                  int constant_key,
133                                  int element_size,
134                                  int shift_size,
135                                  int base_offset);
136 
137   // Emit frame translation commands for an environment.
138   void WriteTranslation(LEnvironment* environment, Translation* translation);
139 
140   // Declare methods that deal with the individual node types.
141 #define DECLARE_DO(type) void Do##type(L##type* node);
LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)142   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
143 #undef DECLARE_DO
144 
145  private:
146   StrictMode strict_mode() const { return info()->strict_mode(); }
147 
scope()148   Scope* scope() const { return scope_; }
149 
scratch0()150   Register scratch0() { return kLithiumScratchReg; }
scratch1()151   Register scratch1() { return kLithiumScratchReg2; }
double_scratch0()152   DoubleRegister double_scratch0() { return kLithiumScratchDouble; }
153 
154   LInstruction* GetNextInstruction();
155 
156   void EmitClassOfTest(Label* if_true,
157                        Label* if_false,
158                        Handle<String> class_name,
159                        Register input,
160                        Register temporary,
161                        Register temporary2);
162 
GetStackSlotCount()163   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
164 
AddDeferredCode(LDeferredCode * code)165   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
166 
167   void SaveCallerDoubles();
168   void RestoreCallerDoubles();
169 
170   // Code generation passes.  Returns true if code generation should
171   // continue.
172   void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
173   bool GeneratePrologue();
174   bool GenerateDeferredCode();
175   bool GenerateDeoptJumpTable();
176   bool GenerateSafepointTable();
177 
178   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
179   void GenerateOsrPrologue();
180 
181   enum SafepointMode {
182     RECORD_SIMPLE_SAFEPOINT,
183     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
184   };
185 
186   void CallCode(Handle<Code> code,
187                 RelocInfo::Mode mode,
188                 LInstruction* instr);
189 
190   void CallCodeGeneric(Handle<Code> code,
191                        RelocInfo::Mode mode,
192                        LInstruction* instr,
193                        SafepointMode safepoint_mode);
194 
195   void CallRuntime(const Runtime::Function* function,
196                    int num_arguments,
197                    LInstruction* instr,
198                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
199 
CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)200   void CallRuntime(Runtime::FunctionId id,
201                    int num_arguments,
202                    LInstruction* instr) {
203     const Runtime::Function* function = Runtime::FunctionForId(id);
204     CallRuntime(function, num_arguments, instr);
205   }
206 
207   void LoadContextFromDeferred(LOperand* context);
208   void CallRuntimeFromDeferred(Runtime::FunctionId id,
209                                int argc,
210                                LInstruction* instr,
211                                LOperand* context);
212 
213   enum A1State {
214     A1_UNINITIALIZED,
215     A1_CONTAINS_TARGET
216   };
217 
218   // Generate a direct call to a known function.  Expects the function
219   // to be in a1.
220   void CallKnownFunction(Handle<JSFunction> function,
221                          int formal_parameter_count,
222                          int arity,
223                          LInstruction* instr,
224                          A1State a1_state);
225 
226   void RecordSafepointWithLazyDeopt(LInstruction* instr,
227                                     SafepointMode safepoint_mode);
228 
229   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
230                                             Safepoint::DeoptMode mode);
231   void DeoptimizeIf(Condition condition,
232                     LEnvironment* environment,
233                     Deoptimizer::BailoutType bailout_type,
234                     Register src1 = zero_reg,
235                     const Operand& src2 = Operand(zero_reg));
236   void DeoptimizeIf(Condition condition,
237                     LEnvironment* environment,
238                     Register src1 = zero_reg,
239                     const Operand& src2 = Operand(zero_reg));
240 
241   void AddToTranslation(LEnvironment* environment,
242                         Translation* translation,
243                         LOperand* op,
244                         bool is_tagged,
245                         bool is_uint32,
246                         int* object_index_pointer,
247                         int* dematerialized_index_pointer);
248   void PopulateDeoptimizationData(Handle<Code> code);
249   int DefineDeoptimizationLiteral(Handle<Object> literal);
250 
251   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
252 
253   Register ToRegister(int index) const;
254   DoubleRegister ToDoubleRegister(int index) const;
255 
256   MemOperand BuildSeqStringOperand(Register string,
257                                    LOperand* index,
258                                    String::Encoding encoding);
259 
260   void EmitIntegerMathAbs(LMathAbs* instr);
261 
262   // Support for recording safepoint and position information.
263   void RecordSafepoint(LPointerMap* pointers,
264                        Safepoint::Kind kind,
265                        int arguments,
266                        Safepoint::DeoptMode mode);
267   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
268   void RecordSafepoint(Safepoint::DeoptMode mode);
269   void RecordSafepointWithRegisters(LPointerMap* pointers,
270                                     int arguments,
271                                     Safepoint::DeoptMode mode);
272   void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers,
273                                               int arguments,
274                                               Safepoint::DeoptMode mode);
275 
276   void RecordAndWritePosition(int position) V8_OVERRIDE;
277 
278   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
279   void EmitGoto(int block);
280 
281   // EmitBranch expects to be the last instruction of a block.
282   template<class InstrType>
283   void EmitBranch(InstrType instr,
284                   Condition condition,
285                   Register src1,
286                   const Operand& src2);
287   template<class InstrType>
288   void EmitBranchF(InstrType instr,
289                    Condition condition,
290                    FPURegister src1,
291                    FPURegister src2);
292   template<class InstrType>
293   void EmitFalseBranch(InstrType instr,
294                        Condition condition,
295                        Register src1,
296                        const Operand& src2);
297   template<class InstrType>
298   void EmitFalseBranchF(InstrType instr,
299                         Condition condition,
300                         FPURegister src1,
301                         FPURegister src2);
302   void EmitCmpI(LOperand* left, LOperand* right);
303   void EmitNumberUntagD(Register input,
304                         DoubleRegister result,
305                         bool allow_undefined_as_nan,
306                         bool deoptimize_on_minus_zero,
307                         LEnvironment* env,
308                         NumberUntagDMode mode);
309 
310   // Emits optimized code for typeof x == "y".  Modifies input register.
311   // Returns the condition on which a final split to
312   // true and false label should be made, to optimize fallthrough.
313   // Returns two registers in cmp1 and cmp2 that can be used in the
314   // Branch instruction after EmitTypeofIs.
315   Condition EmitTypeofIs(Label* true_label,
316                          Label* false_label,
317                          Register input,
318                          Handle<String> type_name,
319                          Register* cmp1,
320                          Operand* cmp2);
321 
322   // Emits optimized code for %_IsObject(x).  Preserves input register.
323   // Returns the condition on which a final split to
324   // true and false label should be made, to optimize fallthrough.
325   Condition EmitIsObject(Register input,
326                          Register temp1,
327                          Register temp2,
328                          Label* is_not_object,
329                          Label* is_object);
330 
331   // Emits optimized code for %_IsString(x).  Preserves input register.
332   // Returns the condition on which a final split to
333   // true and false label should be made, to optimize fallthrough.
334   Condition EmitIsString(Register input,
335                          Register temp1,
336                          Label* is_not_string,
337                          SmiCheck check_needed);
338 
339   // Emits optimized code for %_IsConstructCall().
340   // Caller should branch on equal condition.
341   void EmitIsConstructCall(Register temp1, Register temp2);
342 
343   // Emits optimized code to deep-copy the contents of statically known
344   // object graphs (e.g. object literal boilerplate).
345   void EmitDeepCopy(Handle<JSObject> object,
346                     Register result,
347                     Register source,
348                     int* offset,
349                     AllocationSiteMode mode);
350   // Emit optimized code for integer division.
351   // Inputs are signed.
352   // All registers are clobbered.
353   // If 'remainder' is no_reg, it is not computed.
354   void EmitSignedIntegerDivisionByConstant(Register result,
355                                            Register dividend,
356                                            int32_t divisor,
357                                            Register remainder,
358                                            Register scratch,
359                                            LEnvironment* environment);
360 
361 
362   void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
363   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
364   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
365   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
366   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
367   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
368   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
369 
370   ZoneList<LEnvironment*> deoptimizations_;
371   ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
372   ZoneList<Handle<Object> > deoptimization_literals_;
373   int inlined_function_count_;
374   Scope* const scope_;
375   TranslationBuffer translations_;
376   ZoneList<LDeferredCode*> deferred_;
377   int osr_pc_offset_;
378   bool frame_is_built_;
379 
380   // Builder that keeps track of safepoints in the code. The table
381   // itself is emitted at the end of the generated code.
382   SafepointTableBuilder safepoints_;
383 
384   // Compiler from a set of parallel moves to a sequential list of moves.
385   LGapResolver resolver_;
386 
387   Safepoint::Kind expected_safepoint_kind_;
388 
389   class PushSafepointRegistersScope V8_FINAL  BASE_EMBEDDED {
390    public:
PushSafepointRegistersScope(LCodeGen * codegen,Safepoint::Kind kind)391     PushSafepointRegistersScope(LCodeGen* codegen,
392                                 Safepoint::Kind kind)
393         : codegen_(codegen) {
394       ASSERT(codegen_->info()->is_calling());
395       ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
396       codegen_->expected_safepoint_kind_ = kind;
397 
398       switch (codegen_->expected_safepoint_kind_) {
399         case Safepoint::kWithRegisters: {
400           StoreRegistersStateStub stub1(codegen_->masm_->isolate(),
401                                         kDontSaveFPRegs);
402           codegen_->masm_->push(ra);
403           codegen_->masm_->CallStub(&stub1);
404           break;
405         }
406         case Safepoint::kWithRegistersAndDoubles: {
407           StoreRegistersStateStub stub2(codegen_->masm_->isolate(),
408                                         kSaveFPRegs);
409           codegen_->masm_->push(ra);
410           codegen_->masm_->CallStub(&stub2);
411           break;
412         }
413         default:
414           UNREACHABLE();
415       }
416     }
417 
~PushSafepointRegistersScope()418     ~PushSafepointRegistersScope() {
419       Safepoint::Kind kind = codegen_->expected_safepoint_kind_;
420       ASSERT((kind & Safepoint::kWithRegisters) != 0);
421       switch (kind) {
422         case Safepoint::kWithRegisters: {
423           RestoreRegistersStateStub stub1(codegen_->masm_->isolate(),
424                                           kDontSaveFPRegs);
425           codegen_->masm_->push(ra);
426           codegen_->masm_->CallStub(&stub1);
427           break;
428         }
429         case Safepoint::kWithRegistersAndDoubles: {
430           RestoreRegistersStateStub stub2(codegen_->masm_->isolate(),
431                                           kSaveFPRegs);
432           codegen_->masm_->push(ra);
433           codegen_->masm_->CallStub(&stub2);
434           break;
435         }
436         default:
437           UNREACHABLE();
438       }
439       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
440     }
441 
442    private:
443     LCodeGen* codegen_;
444   };
445 
446   friend class LDeferredCode;
447   friend class LEnvironment;
448   friend class SafepointGenerator;
449   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
450 };
451 
452 
453 class LDeferredCode : public ZoneObject {
454  public:
LDeferredCode(LCodeGen * codegen)455   explicit LDeferredCode(LCodeGen* codegen)
456       : codegen_(codegen),
457         external_exit_(NULL),
458         instruction_index_(codegen->current_instruction_) {
459     codegen->AddDeferredCode(this);
460   }
461 
~LDeferredCode()462   virtual ~LDeferredCode() {}
463   virtual void Generate() = 0;
464   virtual LInstruction* instr() = 0;
465 
SetExit(Label * exit)466   void SetExit(Label* exit) { external_exit_ = exit; }
entry()467   Label* entry() { return &entry_; }
exit()468   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
instruction_index()469   int instruction_index() const { return instruction_index_; }
470 
471  protected:
codegen()472   LCodeGen* codegen() const { return codegen_; }
masm()473   MacroAssembler* masm() const { return codegen_->masm(); }
474 
475  private:
476   LCodeGen* codegen_;
477   Label entry_;
478   Label exit_;
479   Label* external_exit_;
480   int instruction_index_;
481 };
482 
483 } }  // namespace v8::internal
484 
485 #endif  // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
486