• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_ARM_LITHIUM_CODEGEN_ARM_H_
6 #define V8_ARM_LITHIUM_CODEGEN_ARM_H_
7 
8 #include "src/arm/lithium-arm.h"
9 
10 #include "src/arm/lithium-gap-resolver-arm.h"
11 #include "src/deoptimizer.h"
12 #include "src/lithium-codegen.h"
13 #include "src/safepoint-table.h"
14 #include "src/scopes.h"
15 #include "src/utils.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 // Forward declarations.
21 class LDeferredCode;
22 class SafepointGenerator;
23 
24 class LCodeGen: public LCodeGenBase {
25  public:
LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)26   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
27       : LCodeGenBase(chunk, assembler, info),
28         deoptimizations_(4, info->zone()),
29         deopt_jump_table_(4, info->zone()),
30         deoptimization_literals_(8, info->zone()),
31         inlined_function_count_(0),
32         scope_(info->scope()),
33         translations_(info->zone()),
34         deferred_(8, info->zone()),
35         osr_pc_offset_(-1),
36         frame_is_built_(false),
37         safepoints_(info->zone()),
38         resolver_(this),
39         expected_safepoint_kind_(Safepoint::kSimple) {
40     PopulateDeoptimizationLiteralsWithInlinedFunctions();
41   }
42 
43 
LookupDestination(int block_id)44   int LookupDestination(int block_id) const {
45     return chunk()->LookupDestination(block_id);
46   }
47 
IsNextEmittedBlock(int block_id)48   bool IsNextEmittedBlock(int block_id) const {
49     return LookupDestination(block_id) == GetNextEmittedBlock();
50   }
51 
NeedsEagerFrame()52   bool NeedsEagerFrame() const {
53     return GetStackSlotCount() > 0 ||
54         info()->is_non_deferred_calling() ||
55         !info()->IsStub() ||
56         info()->requires_frame();
57   }
NeedsDeferredFrame()58   bool NeedsDeferredFrame() const {
59     return !NeedsEagerFrame() && info()->is_deferred_calling();
60   }
61 
GetLinkRegisterState()62   LinkRegisterStatus GetLinkRegisterState() const {
63     return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved;
64   }
65 
66   // Support for converting LOperands to assembler types.
67   // LOperand must be a register.
68   Register ToRegister(LOperand* op) const;
69 
70   // LOperand is loaded into scratch, unless already a register.
71   Register EmitLoadRegister(LOperand* op, Register scratch);
72 
73   // LOperand must be a double register.
74   DwVfpRegister ToDoubleRegister(LOperand* op) const;
75 
76   // LOperand is loaded into dbl_scratch, unless already a double register.
77   DwVfpRegister EmitLoadDoubleRegister(LOperand* op,
78                                        SwVfpRegister flt_scratch,
79                                        DwVfpRegister dbl_scratch);
80   int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
81   int32_t ToInteger32(LConstantOperand* op) const;
82   Smi* ToSmi(LConstantOperand* op) const;
83   double ToDouble(LConstantOperand* op) const;
84   Operand ToOperand(LOperand* op);
85   MemOperand ToMemOperand(LOperand* op) const;
86   // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
87   MemOperand ToHighMemOperand(LOperand* op) const;
88 
89   bool IsInteger32(LConstantOperand* op) const;
90   bool IsSmi(LConstantOperand* op) const;
91   Handle<Object> ToHandle(LConstantOperand* op) const;
92 
93   // Try to generate code for the entire chunk, but it may fail if the
94   // chunk contains constructs we cannot handle. Returns true if the
95   // code generation attempt succeeded.
96   bool GenerateCode();
97 
98   // Finish the code by setting stack height, safepoint, and bailout
99   // information on it.
100   void FinishCode(Handle<Code> code);
101 
102   // Deferred code support.
103   void DoDeferredNumberTagD(LNumberTagD* instr);
104 
105   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
106   void DoDeferredNumberTagIU(LInstruction* instr,
107                              LOperand* value,
108                              LOperand* temp1,
109                              LOperand* temp2,
110                              IntegerSignedness signedness);
111 
112   void DoDeferredTaggedToI(LTaggedToI* instr);
113   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
114   void DoDeferredStackCheck(LStackCheck* instr);
115   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
116   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
117   void DoDeferredAllocate(LAllocate* instr);
118   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
119                                        Label* map_check);
120   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
121   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
122                                    Register result,
123                                    Register object,
124                                    Register index);
125 
126   // Parallel move support.
127   void DoParallelMove(LParallelMove* move);
128   void DoGap(LGap* instr);
129 
130   MemOperand PrepareKeyedOperand(Register key,
131                                  Register base,
132                                  bool key_is_constant,
133                                  int constant_key,
134                                  int element_size,
135                                  int shift_size,
136                                  int base_offset);
137 
138   // Emit frame translation commands for an environment.
139   void WriteTranslation(LEnvironment* environment, Translation* translation);
140 
141   // Declare methods that deal with the individual node types.
142 #define DECLARE_DO(type) void Do##type(L##type* node);
LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)143   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
144 #undef DECLARE_DO
145 
146  private:
147   StrictMode strict_mode() const { return info()->strict_mode(); }
148 
scope()149   Scope* scope() const { return scope_; }
150 
scratch0()151   Register scratch0() { return r9; }
double_scratch0()152   LowDwVfpRegister double_scratch0() { return kScratchDoubleReg; }
153 
154   LInstruction* GetNextInstruction();
155 
156   void EmitClassOfTest(Label* if_true,
157                        Label* if_false,
158                        Handle<String> class_name,
159                        Register input,
160                        Register temporary,
161                        Register temporary2);
162 
GetStackSlotCount()163   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
164 
AddDeferredCode(LDeferredCode * code)165   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
166 
167   void SaveCallerDoubles();
168   void RestoreCallerDoubles();
169 
170   // Code generation passes.  Returns true if code generation should
171   // continue.
172   void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
173   bool GeneratePrologue();
174   bool GenerateDeferredCode();
175   bool GenerateDeoptJumpTable();
176   bool GenerateSafepointTable();
177 
178   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
179   void GenerateOsrPrologue();
180 
181   enum SafepointMode {
182     RECORD_SIMPLE_SAFEPOINT,
183     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
184   };
185 
186   int CallCodeSize(Handle<Code> code, RelocInfo::Mode mode);
187 
188   void CallCode(
189       Handle<Code> code,
190       RelocInfo::Mode mode,
191       LInstruction* instr,
192       TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS);
193 
194   void CallCodeGeneric(
195       Handle<Code> code,
196       RelocInfo::Mode mode,
197       LInstruction* instr,
198       SafepointMode safepoint_mode,
199       TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS);
200 
201   void CallRuntime(const Runtime::Function* function,
202                    int num_arguments,
203                    LInstruction* instr,
204                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
205 
CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)206   void CallRuntime(Runtime::FunctionId id,
207                    int num_arguments,
208                    LInstruction* instr) {
209     const Runtime::Function* function = Runtime::FunctionForId(id);
210     CallRuntime(function, num_arguments, instr);
211   }
212 
213   void LoadContextFromDeferred(LOperand* context);
214   void CallRuntimeFromDeferred(Runtime::FunctionId id,
215                                int argc,
216                                LInstruction* instr,
217                                LOperand* context);
218 
219   enum R1State {
220     R1_UNINITIALIZED,
221     R1_CONTAINS_TARGET
222   };
223 
224   // Generate a direct call to a known function.  Expects the function
225   // to be in r1.
226   void CallKnownFunction(Handle<JSFunction> function,
227                          int formal_parameter_count,
228                          int arity,
229                          LInstruction* instr,
230                          R1State r1_state);
231 
232   void RecordSafepointWithLazyDeopt(LInstruction* instr,
233                                     SafepointMode safepoint_mode);
234 
235   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
236                                             Safepoint::DeoptMode mode);
237   void DeoptimizeIf(Condition condition,
238                     LEnvironment* environment,
239                     Deoptimizer::BailoutType bailout_type);
240   void DeoptimizeIf(Condition condition, LEnvironment* environment);
241 
242   void AddToTranslation(LEnvironment* environment,
243                         Translation* translation,
244                         LOperand* op,
245                         bool is_tagged,
246                         bool is_uint32,
247                         int* object_index_pointer,
248                         int* dematerialized_index_pointer);
249   void PopulateDeoptimizationData(Handle<Code> code);
250   int DefineDeoptimizationLiteral(Handle<Object> literal);
251 
252   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
253 
254   Register ToRegister(int index) const;
255   DwVfpRegister ToDoubleRegister(int index) const;
256 
257   MemOperand BuildSeqStringOperand(Register string,
258                                    LOperand* index,
259                                    String::Encoding encoding);
260 
261   void EmitIntegerMathAbs(LMathAbs* instr);
262 
263   // Support for recording safepoint and position information.
264   void RecordSafepoint(LPointerMap* pointers,
265                        Safepoint::Kind kind,
266                        int arguments,
267                        Safepoint::DeoptMode mode);
268   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
269   void RecordSafepoint(Safepoint::DeoptMode mode);
270   void RecordSafepointWithRegisters(LPointerMap* pointers,
271                                     int arguments,
272                                     Safepoint::DeoptMode mode);
273   void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers,
274                                               int arguments,
275                                               Safepoint::DeoptMode mode);
276 
277   void RecordAndWritePosition(int position) V8_OVERRIDE;
278 
279   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
280   void EmitGoto(int block);
281 
282   // EmitBranch expects to be the last instruction of a block.
283   template<class InstrType>
284   void EmitBranch(InstrType instr, Condition condition);
285   template<class InstrType>
286   void EmitFalseBranch(InstrType instr, Condition condition);
287   void EmitNumberUntagD(Register input,
288                         DwVfpRegister result,
289                         bool allow_undefined_as_nan,
290                         bool deoptimize_on_minus_zero,
291                         LEnvironment* env,
292                         NumberUntagDMode mode);
293 
294   // Emits optimized code for typeof x == "y".  Modifies input register.
295   // Returns the condition on which a final split to
296   // true and false label should be made, to optimize fallthrough.
297   Condition EmitTypeofIs(Label* true_label,
298                          Label* false_label,
299                          Register input,
300                          Handle<String> type_name);
301 
302   // Emits optimized code for %_IsObject(x).  Preserves input register.
303   // Returns the condition on which a final split to
304   // true and false label should be made, to optimize fallthrough.
305   Condition EmitIsObject(Register input,
306                          Register temp1,
307                          Label* is_not_object,
308                          Label* is_object);
309 
310   // Emits optimized code for %_IsString(x).  Preserves input register.
311   // Returns the condition on which a final split to
312   // true and false label should be made, to optimize fallthrough.
313   Condition EmitIsString(Register input,
314                          Register temp1,
315                          Label* is_not_string,
316                          SmiCheck check_needed);
317 
318   // Emits optimized code for %_IsConstructCall().
319   // Caller should branch on equal condition.
320   void EmitIsConstructCall(Register temp1, Register temp2);
321 
322   // Emits optimized code to deep-copy the contents of statically known
323   // object graphs (e.g. object literal boilerplate).
324   void EmitDeepCopy(Handle<JSObject> object,
325                     Register result,
326                     Register source,
327                     int* offset,
328                     AllocationSiteMode mode);
329 
330   void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
331   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
332   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
333   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
334   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
335   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
336   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
337 
338   ZoneList<LEnvironment*> deoptimizations_;
339   ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
340   ZoneList<Handle<Object> > deoptimization_literals_;
341   int inlined_function_count_;
342   Scope* const scope_;
343   TranslationBuffer translations_;
344   ZoneList<LDeferredCode*> deferred_;
345   int osr_pc_offset_;
346   bool frame_is_built_;
347 
348   // Builder that keeps track of safepoints in the code. The table
349   // itself is emitted at the end of the generated code.
350   SafepointTableBuilder safepoints_;
351 
352   // Compiler from a set of parallel moves to a sequential list of moves.
353   LGapResolver resolver_;
354 
355   Safepoint::Kind expected_safepoint_kind_;
356 
357   class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED {
358    public:
PushSafepointRegistersScope(LCodeGen * codegen,Safepoint::Kind kind)359     PushSafepointRegistersScope(LCodeGen* codegen,
360                                 Safepoint::Kind kind)
361         : codegen_(codegen) {
362       ASSERT(codegen_->info()->is_calling());
363       ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
364       codegen_->expected_safepoint_kind_ = kind;
365 
366       switch (codegen_->expected_safepoint_kind_) {
367         case Safepoint::kWithRegisters:
368           codegen_->masm_->PushSafepointRegisters();
369           break;
370         case Safepoint::kWithRegistersAndDoubles:
371           codegen_->masm_->PushSafepointRegistersAndDoubles();
372           break;
373         default:
374           UNREACHABLE();
375       }
376     }
377 
~PushSafepointRegistersScope()378     ~PushSafepointRegistersScope() {
379       Safepoint::Kind kind = codegen_->expected_safepoint_kind_;
380       ASSERT((kind & Safepoint::kWithRegisters) != 0);
381       switch (kind) {
382         case Safepoint::kWithRegisters:
383           codegen_->masm_->PopSafepointRegisters();
384           break;
385         case Safepoint::kWithRegistersAndDoubles:
386           codegen_->masm_->PopSafepointRegistersAndDoubles();
387           break;
388         default:
389           UNREACHABLE();
390       }
391       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
392     }
393 
394    private:
395     LCodeGen* codegen_;
396   };
397 
398   friend class LDeferredCode;
399   friend class LEnvironment;
400   friend class SafepointGenerator;
401   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
402 };
403 
404 
405 class LDeferredCode : public ZoneObject {
406  public:
LDeferredCode(LCodeGen * codegen)407   explicit LDeferredCode(LCodeGen* codegen)
408       : codegen_(codegen),
409         external_exit_(NULL),
410         instruction_index_(codegen->current_instruction_) {
411     codegen->AddDeferredCode(this);
412   }
413 
~LDeferredCode()414   virtual ~LDeferredCode() {}
415   virtual void Generate() = 0;
416   virtual LInstruction* instr() = 0;
417 
SetExit(Label * exit)418   void SetExit(Label* exit) { external_exit_ = exit; }
entry()419   Label* entry() { return &entry_; }
exit()420   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
instruction_index()421   int instruction_index() const { return instruction_index_; }
422 
423  protected:
codegen()424   LCodeGen* codegen() const { return codegen_; }
masm()425   MacroAssembler* masm() const { return codegen_->masm(); }
426 
427  private:
428   LCodeGen* codegen_;
429   Label entry_;
430   Label exit_;
431   Label* external_exit_;
432   int instruction_index_;
433 };
434 
435 } }  // namespace v8::internal
436 
437 #endif  // V8_ARM_LITHIUM_CODEGEN_ARM_H_
438