1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_CRANKSHAFT_ARM_LITHIUM_CODEGEN_ARM_H_ 6 #define V8_CRANKSHAFT_ARM_LITHIUM_CODEGEN_ARM_H_ 7 8 #include "src/ast/scopes.h" 9 #include "src/crankshaft/arm/lithium-arm.h" 10 #include "src/crankshaft/arm/lithium-gap-resolver-arm.h" 11 #include "src/crankshaft/lithium-codegen.h" 12 #include "src/deoptimizer.h" 13 #include "src/safepoint-table.h" 14 #include "src/utils.h" 15 16 namespace v8 { 17 namespace internal { 18 19 // Forward declarations. 20 class LDeferredCode; 21 class SafepointGenerator; 22 23 class LCodeGen: public LCodeGenBase { 24 public: LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)25 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) 26 : LCodeGenBase(chunk, assembler, info), 27 jump_table_(4, info->zone()), 28 scope_(info->scope()), 29 deferred_(8, info->zone()), 30 frame_is_built_(false), 31 safepoints_(info->zone()), 32 resolver_(this), 33 expected_safepoint_kind_(Safepoint::kSimple) { 34 PopulateDeoptimizationLiteralsWithInlinedFunctions(); 35 } 36 37 LookupDestination(int block_id)38 int LookupDestination(int block_id) const { 39 return chunk()->LookupDestination(block_id); 40 } 41 IsNextEmittedBlock(int block_id)42 bool IsNextEmittedBlock(int block_id) const { 43 return LookupDestination(block_id) == GetNextEmittedBlock(); 44 } 45 NeedsEagerFrame()46 bool NeedsEagerFrame() const { 47 return GetStackSlotCount() > 0 || 48 info()->is_non_deferred_calling() || 49 !info()->IsStub() || 50 info()->requires_frame(); 51 } NeedsDeferredFrame()52 bool NeedsDeferredFrame() const { 53 return !NeedsEagerFrame() && info()->is_deferred_calling(); 54 } 55 GetLinkRegisterState()56 LinkRegisterStatus GetLinkRegisterState() const { 57 return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved; 58 } 59 60 // Support for converting LOperands to assembler types. 61 // LOperand must be a register. 62 Register ToRegister(LOperand* op) const; 63 64 // LOperand is loaded into scratch, unless already a register. 65 Register EmitLoadRegister(LOperand* op, Register scratch); 66 67 // LOperand must be a double register. 68 DwVfpRegister ToDoubleRegister(LOperand* op) const; 69 70 // LOperand is loaded into dbl_scratch, unless already a double register. 71 DwVfpRegister EmitLoadDoubleRegister(LOperand* op, 72 SwVfpRegister flt_scratch, 73 DwVfpRegister dbl_scratch); 74 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; 75 int32_t ToInteger32(LConstantOperand* op) const; 76 Smi* ToSmi(LConstantOperand* op) const; 77 double ToDouble(LConstantOperand* op) const; 78 Operand ToOperand(LOperand* op); 79 MemOperand ToMemOperand(LOperand* op) const; 80 // Returns a MemOperand pointing to the high word of a DoubleStackSlot. 81 MemOperand ToHighMemOperand(LOperand* op) const; 82 83 bool IsInteger32(LConstantOperand* op) const; 84 bool IsSmi(LConstantOperand* op) const; 85 Handle<Object> ToHandle(LConstantOperand* op) const; 86 87 // Try to generate code for the entire chunk, but it may fail if the 88 // chunk contains constructs we cannot handle. Returns true if the 89 // code generation attempt succeeded. 90 bool GenerateCode(); 91 92 // Finish the code by setting stack height, safepoint, and bailout 93 // information on it. 94 void FinishCode(Handle<Code> code); 95 96 // Deferred code support. 97 void DoDeferredNumberTagD(LNumberTagD* instr); 98 99 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; 100 void DoDeferredNumberTagIU(LInstruction* instr, 101 LOperand* value, 102 LOperand* temp1, 103 LOperand* temp2, 104 IntegerSignedness signedness); 105 106 void DoDeferredTaggedToI(LTaggedToI* instr); 107 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); 108 void DoDeferredStackCheck(LStackCheck* instr); 109 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr); 110 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); 111 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); 112 void DoDeferredAllocate(LAllocate* instr); 113 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); 114 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, 115 Register result, 116 Register object, 117 Register index); 118 119 // Parallel move support. 120 void DoParallelMove(LParallelMove* move); 121 void DoGap(LGap* instr); 122 123 MemOperand PrepareKeyedOperand(Register key, 124 Register base, 125 bool key_is_constant, 126 int constant_key, 127 int element_size, 128 int shift_size, 129 int base_offset); 130 131 // Emit frame translation commands for an environment. 132 void WriteTranslation(LEnvironment* environment, Translation* translation); 133 134 // Declare methods that deal with the individual node types. 135 #define DECLARE_DO(type) void Do##type(L##type* node); LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)136 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) 137 #undef DECLARE_DO 138 139 private: 140 LanguageMode language_mode() const { return info()->language_mode(); } 141 scope()142 Scope* scope() const { return scope_; } 143 scratch0()144 Register scratch0() { return r9; } double_scratch0()145 LowDwVfpRegister double_scratch0() { return kScratchDoubleReg; } 146 147 LInstruction* GetNextInstruction(); 148 149 void EmitClassOfTest(Label* if_true, 150 Label* if_false, 151 Handle<String> class_name, 152 Register input, 153 Register temporary, 154 Register temporary2); 155 GetStackSlotCount()156 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } 157 AddDeferredCode(LDeferredCode * code)158 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } 159 160 void SaveCallerDoubles(); 161 void RestoreCallerDoubles(); 162 163 // Code generation passes. Returns true if code generation should 164 // continue. 165 void GenerateBodyInstructionPre(LInstruction* instr) override; 166 bool GeneratePrologue(); 167 bool GenerateDeferredCode(); 168 bool GenerateJumpTable(); 169 bool GenerateSafepointTable(); 170 171 // Generates the custom OSR entrypoint and sets the osr_pc_offset. 172 void GenerateOsrPrologue(); 173 174 enum SafepointMode { 175 RECORD_SIMPLE_SAFEPOINT, 176 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 177 }; 178 179 int CallCodeSize(Handle<Code> code, RelocInfo::Mode mode); 180 181 void CallCode( 182 Handle<Code> code, 183 RelocInfo::Mode mode, 184 LInstruction* instr, 185 TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS); 186 187 void CallCodeGeneric( 188 Handle<Code> code, 189 RelocInfo::Mode mode, 190 LInstruction* instr, 191 SafepointMode safepoint_mode, 192 TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS); 193 194 void CallRuntime(const Runtime::Function* function, 195 int num_arguments, 196 LInstruction* instr, 197 SaveFPRegsMode save_doubles = kDontSaveFPRegs); 198 CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)199 void CallRuntime(Runtime::FunctionId id, 200 int num_arguments, 201 LInstruction* instr) { 202 const Runtime::Function* function = Runtime::FunctionForId(id); 203 CallRuntime(function, num_arguments, instr); 204 } 205 CallRuntime(Runtime::FunctionId id,LInstruction * instr)206 void CallRuntime(Runtime::FunctionId id, LInstruction* instr) { 207 const Runtime::Function* function = Runtime::FunctionForId(id); 208 CallRuntime(function, function->nargs, instr); 209 } 210 211 void LoadContextFromDeferred(LOperand* context); 212 void CallRuntimeFromDeferred(Runtime::FunctionId id, 213 int argc, 214 LInstruction* instr, 215 LOperand* context); 216 217 // Generate a direct call to a known function. Expects the function 218 // to be in r1. 219 void CallKnownFunction(Handle<JSFunction> function, 220 int formal_parameter_count, int arity, 221 LInstruction* instr); 222 223 void RecordSafepointWithLazyDeopt(LInstruction* instr, 224 SafepointMode safepoint_mode); 225 226 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, 227 Safepoint::DeoptMode mode); 228 void DeoptimizeIf(Condition condition, LInstruction* instr, 229 Deoptimizer::DeoptReason deopt_reason, 230 Deoptimizer::BailoutType bailout_type); 231 void DeoptimizeIf(Condition condition, LInstruction* instr, 232 Deoptimizer::DeoptReason deopt_reason); 233 234 void AddToTranslation(LEnvironment* environment, 235 Translation* translation, 236 LOperand* op, 237 bool is_tagged, 238 bool is_uint32, 239 int* object_index_pointer, 240 int* dematerialized_index_pointer); 241 242 Register ToRegister(int index) const; 243 DwVfpRegister ToDoubleRegister(int index) const; 244 245 MemOperand BuildSeqStringOperand(Register string, 246 LOperand* index, 247 String::Encoding encoding); 248 249 void EmitIntegerMathAbs(LMathAbs* instr); 250 251 // Support for recording safepoint and position information. 252 void RecordSafepoint(LPointerMap* pointers, 253 Safepoint::Kind kind, 254 int arguments, 255 Safepoint::DeoptMode mode); 256 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); 257 void RecordSafepoint(Safepoint::DeoptMode mode); 258 void RecordSafepointWithRegisters(LPointerMap* pointers, 259 int arguments, 260 Safepoint::DeoptMode mode); 261 262 void RecordAndWritePosition(int position) override; 263 264 static Condition TokenToCondition(Token::Value op, bool is_unsigned); 265 void EmitGoto(int block); 266 267 // EmitBranch expects to be the last instruction of a block. 268 template<class InstrType> 269 void EmitBranch(InstrType instr, Condition condition); 270 template <class InstrType> 271 void EmitTrueBranch(InstrType instr, Condition condition); 272 template <class InstrType> 273 void EmitFalseBranch(InstrType instr, Condition condition); 274 void EmitNumberUntagD(LNumberUntagD* instr, Register input, 275 DwVfpRegister result, NumberUntagDMode mode); 276 277 // Emits optimized code for typeof x == "y". Modifies input register. 278 // Returns the condition on which a final split to 279 // true and false label should be made, to optimize fallthrough. 280 Condition EmitTypeofIs(Label* true_label, 281 Label* false_label, 282 Register input, 283 Handle<String> type_name); 284 285 // Emits optimized code for %_IsString(x). Preserves input register. 286 // Returns the condition on which a final split to 287 // true and false label should be made, to optimize fallthrough. 288 Condition EmitIsString(Register input, 289 Register temp1, 290 Label* is_not_string, 291 SmiCheck check_needed); 292 293 // Emits optimized code to deep-copy the contents of statically known 294 // object graphs (e.g. object literal boilerplate). 295 void EmitDeepCopy(Handle<JSObject> object, 296 Register result, 297 Register source, 298 int* offset, 299 AllocationSiteMode mode); 300 301 void EnsureSpaceForLazyDeopt(int space_needed) override; 302 void DoLoadKeyedExternalArray(LLoadKeyed* instr); 303 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); 304 void DoLoadKeyedFixedArray(LLoadKeyed* instr); 305 void DoStoreKeyedExternalArray(LStoreKeyed* instr); 306 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); 307 void DoStoreKeyedFixedArray(LStoreKeyed* instr); 308 309 template <class T> 310 void EmitVectorLoadICRegisters(T* instr); 311 template <class T> 312 void EmitVectorStoreICRegisters(T* instr); 313 314 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; 315 Scope* const scope_; 316 ZoneList<LDeferredCode*> deferred_; 317 bool frame_is_built_; 318 319 // Builder that keeps track of safepoints in the code. The table 320 // itself is emitted at the end of the generated code. 321 SafepointTableBuilder safepoints_; 322 323 // Compiler from a set of parallel moves to a sequential list of moves. 324 LGapResolver resolver_; 325 326 Safepoint::Kind expected_safepoint_kind_; 327 328 class PushSafepointRegistersScope final BASE_EMBEDDED { 329 public: PushSafepointRegistersScope(LCodeGen * codegen)330 explicit PushSafepointRegistersScope(LCodeGen* codegen) 331 : codegen_(codegen) { 332 DCHECK(codegen_->info()->is_calling()); 333 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); 334 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters; 335 codegen_->masm_->PushSafepointRegisters(); 336 } 337 ~PushSafepointRegistersScope()338 ~PushSafepointRegistersScope() { 339 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters); 340 codegen_->masm_->PopSafepointRegisters(); 341 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; 342 } 343 344 private: 345 LCodeGen* codegen_; 346 }; 347 348 friend class LDeferredCode; 349 friend class LEnvironment; 350 friend class SafepointGenerator; 351 DISALLOW_COPY_AND_ASSIGN(LCodeGen); 352 }; 353 354 355 class LDeferredCode : public ZoneObject { 356 public: LDeferredCode(LCodeGen * codegen)357 explicit LDeferredCode(LCodeGen* codegen) 358 : codegen_(codegen), 359 external_exit_(NULL), 360 instruction_index_(codegen->current_instruction_) { 361 codegen->AddDeferredCode(this); 362 } 363 ~LDeferredCode()364 virtual ~LDeferredCode() {} 365 virtual void Generate() = 0; 366 virtual LInstruction* instr() = 0; 367 SetExit(Label * exit)368 void SetExit(Label* exit) { external_exit_ = exit; } entry()369 Label* entry() { return &entry_; } exit()370 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } instruction_index()371 int instruction_index() const { return instruction_index_; } 372 373 protected: codegen()374 LCodeGen* codegen() const { return codegen_; } masm()375 MacroAssembler* masm() const { return codegen_->masm(); } 376 377 private: 378 LCodeGen* codegen_; 379 Label entry_; 380 Label exit_; 381 Label* external_exit_; 382 int instruction_index_; 383 }; 384 385 } // namespace internal 386 } // namespace v8 387 388 #endif // V8_CRANKSHAFT_ARM_LITHIUM_CODEGEN_ARM_H_ 389