1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_CRANKSHAFT_ARM_LITHIUM_CODEGEN_ARM_H_ 6 #define V8_CRANKSHAFT_ARM_LITHIUM_CODEGEN_ARM_H_ 7 8 #include "src/ast/scopes.h" 9 #include "src/crankshaft/arm/lithium-arm.h" 10 #include "src/crankshaft/arm/lithium-gap-resolver-arm.h" 11 #include "src/crankshaft/lithium-codegen.h" 12 #include "src/deoptimizer.h" 13 #include "src/safepoint-table.h" 14 #include "src/utils.h" 15 16 namespace v8 { 17 namespace internal { 18 19 // Forward declarations. 20 class LDeferredCode; 21 class SafepointGenerator; 22 23 class LCodeGen: public LCodeGenBase { 24 public: LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)25 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) 26 : LCodeGenBase(chunk, assembler, info), 27 jump_table_(4, info->zone()), 28 scope_(info->scope()), 29 deferred_(8, info->zone()), 30 frame_is_built_(false), 31 safepoints_(info->zone()), 32 resolver_(this), 33 expected_safepoint_kind_(Safepoint::kSimple) { 34 PopulateDeoptimizationLiteralsWithInlinedFunctions(); 35 } 36 37 LookupDestination(int block_id)38 int LookupDestination(int block_id) const { 39 return chunk()->LookupDestination(block_id); 40 } 41 IsNextEmittedBlock(int block_id)42 bool IsNextEmittedBlock(int block_id) const { 43 return LookupDestination(block_id) == GetNextEmittedBlock(); 44 } 45 NeedsEagerFrame()46 bool NeedsEagerFrame() const { 47 return HasAllocatedStackSlots() || info()->is_non_deferred_calling() || 48 !info()->IsStub() || info()->requires_frame(); 49 } NeedsDeferredFrame()50 bool NeedsDeferredFrame() const { 51 return !NeedsEagerFrame() && info()->is_deferred_calling(); 52 } 53 GetLinkRegisterState()54 LinkRegisterStatus GetLinkRegisterState() const { 55 return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved; 56 } 57 58 // Support for converting LOperands to assembler types. 59 // LOperand must be a register. 60 Register ToRegister(LOperand* op) const; 61 62 // LOperand is loaded into scratch, unless already a register. 63 Register EmitLoadRegister(LOperand* op, Register scratch); 64 65 // LOperand must be a double register. 66 DwVfpRegister ToDoubleRegister(LOperand* op) const; 67 68 // LOperand is loaded into dbl_scratch, unless already a double register. 69 DwVfpRegister EmitLoadDoubleRegister(LOperand* op, 70 SwVfpRegister flt_scratch, 71 DwVfpRegister dbl_scratch); 72 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; 73 int32_t ToInteger32(LConstantOperand* op) const; 74 Smi* ToSmi(LConstantOperand* op) const; 75 double ToDouble(LConstantOperand* op) const; 76 Operand ToOperand(LOperand* op); 77 MemOperand ToMemOperand(LOperand* op) const; 78 // Returns a MemOperand pointing to the high word of a DoubleStackSlot. 79 MemOperand ToHighMemOperand(LOperand* op) const; 80 81 bool IsInteger32(LConstantOperand* op) const; 82 bool IsSmi(LConstantOperand* op) const; 83 Handle<Object> ToHandle(LConstantOperand* op) const; 84 85 // Try to generate code for the entire chunk, but it may fail if the 86 // chunk contains constructs we cannot handle. Returns true if the 87 // code generation attempt succeeded. 88 bool GenerateCode(); 89 90 // Finish the code by setting stack height, safepoint, and bailout 91 // information on it. 92 void FinishCode(Handle<Code> code); 93 94 // Deferred code support. 95 void DoDeferredNumberTagD(LNumberTagD* instr); 96 97 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; 98 void DoDeferredNumberTagIU(LInstruction* instr, 99 LOperand* value, 100 LOperand* temp1, 101 LOperand* temp2, 102 IntegerSignedness signedness); 103 104 void DoDeferredTaggedToI(LTaggedToI* instr); 105 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); 106 void DoDeferredStackCheck(LStackCheck* instr); 107 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr); 108 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); 109 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); 110 void DoDeferredAllocate(LAllocate* instr); 111 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); 112 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, 113 Register result, 114 Register object, 115 Register index); 116 117 // Parallel move support. 118 void DoParallelMove(LParallelMove* move); 119 void DoGap(LGap* instr); 120 121 MemOperand PrepareKeyedOperand(Register key, 122 Register base, 123 bool key_is_constant, 124 int constant_key, 125 int element_size, 126 int shift_size, 127 int base_offset); 128 129 // Emit frame translation commands for an environment. 130 void WriteTranslation(LEnvironment* environment, Translation* translation); 131 132 // Declare methods that deal with the individual node types. 133 #define DECLARE_DO(type) void Do##type(L##type* node); LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)134 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) 135 #undef DECLARE_DO 136 137 private: 138 Scope* scope() const { return scope_; } 139 scratch0()140 Register scratch0() { return r9; } double_scratch0()141 LowDwVfpRegister double_scratch0() { return kScratchDoubleReg; } 142 143 LInstruction* GetNextInstruction(); 144 145 void EmitClassOfTest(Label* if_true, 146 Label* if_false, 147 Handle<String> class_name, 148 Register input, 149 Register temporary, 150 Register temporary2); 151 HasAllocatedStackSlots()152 bool HasAllocatedStackSlots() const { 153 return chunk()->HasAllocatedStackSlots(); 154 } GetStackSlotCount()155 int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); } GetTotalFrameSlotCount()156 int GetTotalFrameSlotCount() const { 157 return chunk()->GetTotalFrameSlotCount(); 158 } 159 AddDeferredCode(LDeferredCode * code)160 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } 161 162 void SaveCallerDoubles(); 163 void RestoreCallerDoubles(); 164 165 // Code generation passes. Returns true if code generation should 166 // continue. 167 void GenerateBodyInstructionPre(LInstruction* instr) override; 168 bool GeneratePrologue(); 169 bool GenerateDeferredCode(); 170 bool GenerateJumpTable(); 171 bool GenerateSafepointTable(); 172 173 // Generates the custom OSR entrypoint and sets the osr_pc_offset. 174 void GenerateOsrPrologue(); 175 176 enum SafepointMode { 177 RECORD_SIMPLE_SAFEPOINT, 178 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 179 }; 180 181 int CallCodeSize(Handle<Code> code, RelocInfo::Mode mode); 182 183 void CallCode( 184 Handle<Code> code, 185 RelocInfo::Mode mode, 186 LInstruction* instr, 187 TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS); 188 189 void CallCodeGeneric( 190 Handle<Code> code, 191 RelocInfo::Mode mode, 192 LInstruction* instr, 193 SafepointMode safepoint_mode, 194 TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS); 195 196 void CallRuntime(const Runtime::Function* function, 197 int num_arguments, 198 LInstruction* instr, 199 SaveFPRegsMode save_doubles = kDontSaveFPRegs); 200 CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)201 void CallRuntime(Runtime::FunctionId id, 202 int num_arguments, 203 LInstruction* instr) { 204 const Runtime::Function* function = Runtime::FunctionForId(id); 205 CallRuntime(function, num_arguments, instr); 206 } 207 CallRuntime(Runtime::FunctionId id,LInstruction * instr)208 void CallRuntime(Runtime::FunctionId id, LInstruction* instr) { 209 const Runtime::Function* function = Runtime::FunctionForId(id); 210 CallRuntime(function, function->nargs, instr); 211 } 212 213 void LoadContextFromDeferred(LOperand* context); 214 void CallRuntimeFromDeferred(Runtime::FunctionId id, 215 int argc, 216 LInstruction* instr, 217 LOperand* context); 218 219 void PrepareForTailCall(const ParameterCount& actual, Register scratch1, 220 Register scratch2, Register scratch3); 221 222 // Generate a direct call to a known function. Expects the function 223 // to be in r1. 224 void CallKnownFunction(Handle<JSFunction> function, 225 int formal_parameter_count, int arity, 226 bool is_tail_call, LInstruction* instr); 227 228 void RecordSafepointWithLazyDeopt(LInstruction* instr, 229 SafepointMode safepoint_mode); 230 231 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, 232 Safepoint::DeoptMode mode); 233 void DeoptimizeIf(Condition condition, LInstruction* instr, 234 Deoptimizer::DeoptReason deopt_reason, 235 Deoptimizer::BailoutType bailout_type); 236 void DeoptimizeIf(Condition condition, LInstruction* instr, 237 Deoptimizer::DeoptReason deopt_reason); 238 239 void AddToTranslation(LEnvironment* environment, 240 Translation* translation, 241 LOperand* op, 242 bool is_tagged, 243 bool is_uint32, 244 int* object_index_pointer, 245 int* dematerialized_index_pointer); 246 247 Register ToRegister(int index) const; 248 DwVfpRegister ToDoubleRegister(int index) const; 249 250 MemOperand BuildSeqStringOperand(Register string, 251 LOperand* index, 252 String::Encoding encoding); 253 254 void EmitIntegerMathAbs(LMathAbs* instr); 255 256 // Support for recording safepoint and position information. 257 void RecordSafepoint(LPointerMap* pointers, 258 Safepoint::Kind kind, 259 int arguments, 260 Safepoint::DeoptMode mode); 261 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); 262 void RecordSafepoint(Safepoint::DeoptMode mode); 263 void RecordSafepointWithRegisters(LPointerMap* pointers, 264 int arguments, 265 Safepoint::DeoptMode mode); 266 267 void RecordAndWritePosition(int position) override; 268 269 static Condition TokenToCondition(Token::Value op, bool is_unsigned); 270 void EmitGoto(int block); 271 272 // EmitBranch expects to be the last instruction of a block. 273 template<class InstrType> 274 void EmitBranch(InstrType instr, Condition condition); 275 template <class InstrType> 276 void EmitTrueBranch(InstrType instr, Condition condition); 277 template <class InstrType> 278 void EmitFalseBranch(InstrType instr, Condition condition); 279 void EmitNumberUntagD(LNumberUntagD* instr, Register input, 280 DwVfpRegister result, NumberUntagDMode mode); 281 282 // Emits optimized code for typeof x == "y". Modifies input register. 283 // Returns the condition on which a final split to 284 // true and false label should be made, to optimize fallthrough. 285 Condition EmitTypeofIs(Label* true_label, 286 Label* false_label, 287 Register input, 288 Handle<String> type_name); 289 290 // Emits optimized code for %_IsString(x). Preserves input register. 291 // Returns the condition on which a final split to 292 // true and false label should be made, to optimize fallthrough. 293 Condition EmitIsString(Register input, 294 Register temp1, 295 Label* is_not_string, 296 SmiCheck check_needed); 297 298 // Emits optimized code to deep-copy the contents of statically known 299 // object graphs (e.g. object literal boilerplate). 300 void EmitDeepCopy(Handle<JSObject> object, 301 Register result, 302 Register source, 303 int* offset, 304 AllocationSiteMode mode); 305 306 void EnsureSpaceForLazyDeopt(int space_needed) override; 307 void DoLoadKeyedExternalArray(LLoadKeyed* instr); 308 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); 309 void DoLoadKeyedFixedArray(LLoadKeyed* instr); 310 void DoStoreKeyedExternalArray(LStoreKeyed* instr); 311 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); 312 void DoStoreKeyedFixedArray(LStoreKeyed* instr); 313 314 template <class T> 315 void EmitVectorLoadICRegisters(T* instr); 316 template <class T> 317 void EmitVectorStoreICRegisters(T* instr); 318 319 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; 320 Scope* const scope_; 321 ZoneList<LDeferredCode*> deferred_; 322 bool frame_is_built_; 323 324 // Builder that keeps track of safepoints in the code. The table 325 // itself is emitted at the end of the generated code. 326 SafepointTableBuilder safepoints_; 327 328 // Compiler from a set of parallel moves to a sequential list of moves. 329 LGapResolver resolver_; 330 331 Safepoint::Kind expected_safepoint_kind_; 332 333 class PushSafepointRegistersScope final BASE_EMBEDDED { 334 public: PushSafepointRegistersScope(LCodeGen * codegen)335 explicit PushSafepointRegistersScope(LCodeGen* codegen) 336 : codegen_(codegen) { 337 DCHECK(codegen_->info()->is_calling()); 338 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); 339 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters; 340 codegen_->masm_->PushSafepointRegisters(); 341 } 342 ~PushSafepointRegistersScope()343 ~PushSafepointRegistersScope() { 344 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters); 345 codegen_->masm_->PopSafepointRegisters(); 346 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; 347 } 348 349 private: 350 LCodeGen* codegen_; 351 }; 352 353 friend class LDeferredCode; 354 friend class LEnvironment; 355 friend class SafepointGenerator; 356 DISALLOW_COPY_AND_ASSIGN(LCodeGen); 357 }; 358 359 360 class LDeferredCode : public ZoneObject { 361 public: LDeferredCode(LCodeGen * codegen)362 explicit LDeferredCode(LCodeGen* codegen) 363 : codegen_(codegen), 364 external_exit_(NULL), 365 instruction_index_(codegen->current_instruction_) { 366 codegen->AddDeferredCode(this); 367 } 368 ~LDeferredCode()369 virtual ~LDeferredCode() {} 370 virtual void Generate() = 0; 371 virtual LInstruction* instr() = 0; 372 SetExit(Label * exit)373 void SetExit(Label* exit) { external_exit_ = exit; } entry()374 Label* entry() { return &entry_; } exit()375 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } instruction_index()376 int instruction_index() const { return instruction_index_; } 377 378 protected: codegen()379 LCodeGen* codegen() const { return codegen_; } masm()380 MacroAssembler* masm() const { return codegen_->masm(); } 381 382 private: 383 LCodeGen* codegen_; 384 Label entry_; 385 Label exit_; 386 Label* external_exit_; 387 int instruction_index_; 388 }; 389 390 } // namespace internal 391 } // namespace v8 392 393 #endif // V8_CRANKSHAFT_ARM_LITHIUM_CODEGEN_ARM_H_ 394