1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_X87_LITHIUM_CODEGEN_X87_H_ 6 #define V8_X87_LITHIUM_CODEGEN_X87_H_ 7 8 #include "src/x87/lithium-x87.h" 9 10 #include "src/checks.h" 11 #include "src/deoptimizer.h" 12 #include "src/x87/lithium-gap-resolver-x87.h" 13 #include "src/lithium-codegen.h" 14 #include "src/safepoint-table.h" 15 #include "src/scopes.h" 16 #include "src/utils.h" 17 18 namespace v8 { 19 namespace internal { 20 21 // Forward declarations. 22 class LDeferredCode; 23 class LGapNode; 24 class SafepointGenerator; 25 26 class LCodeGen: public LCodeGenBase { 27 public: LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)28 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) 29 : LCodeGenBase(chunk, assembler, info), 30 deoptimizations_(4, info->zone()), 31 jump_table_(4, info->zone()), 32 deoptimization_literals_(8, info->zone()), 33 inlined_function_count_(0), 34 scope_(info->scope()), 35 translations_(info->zone()), 36 deferred_(8, info->zone()), 37 dynamic_frame_alignment_(false), 38 support_aligned_spilled_doubles_(false), 39 osr_pc_offset_(-1), 40 frame_is_built_(false), 41 x87_stack_(assembler), 42 safepoints_(info->zone()), 43 resolver_(this), 44 expected_safepoint_kind_(Safepoint::kSimple) { 45 PopulateDeoptimizationLiteralsWithInlinedFunctions(); 46 } 47 LookupDestination(int block_id)48 int LookupDestination(int block_id) const { 49 return chunk()->LookupDestination(block_id); 50 } 51 IsNextEmittedBlock(int block_id)52 bool IsNextEmittedBlock(int block_id) const { 53 return LookupDestination(block_id) == GetNextEmittedBlock(); 54 } 55 NeedsEagerFrame()56 bool NeedsEagerFrame() const { 57 return GetStackSlotCount() > 0 || 58 info()->is_non_deferred_calling() || 59 !info()->IsStub() || 60 info()->requires_frame(); 61 } NeedsDeferredFrame()62 bool NeedsDeferredFrame() const { 63 return !NeedsEagerFrame() && info()->is_deferred_calling(); 64 } 65 66 // Support for converting LOperands to assembler types. 67 Operand ToOperand(LOperand* op) const; 68 Register ToRegister(LOperand* op) const; 69 X87Register ToX87Register(LOperand* op) const; 70 71 bool IsInteger32(LConstantOperand* op) const; 72 bool IsSmi(LConstantOperand* op) const; ToImmediate(LOperand * op,const Representation & r)73 Immediate ToImmediate(LOperand* op, const Representation& r) const { 74 return Immediate(ToRepresentation(LConstantOperand::cast(op), r)); 75 } 76 double ToDouble(LConstantOperand* op) const; 77 78 // Support for non-sse2 (x87) floating point stack handling. 79 // These functions maintain the mapping of physical stack registers to our 80 // virtual registers between instructions. 81 enum X87OperandType { kX87DoubleOperand, kX87FloatOperand, kX87IntOperand }; 82 83 void X87Mov(X87Register reg, Operand src, 84 X87OperandType operand = kX87DoubleOperand); 85 void X87Mov(Operand src, X87Register reg, 86 X87OperandType operand = kX87DoubleOperand); 87 88 void X87PrepareBinaryOp( 89 X87Register left, X87Register right, X87Register result); 90 91 void X87LoadForUsage(X87Register reg); 92 void X87LoadForUsage(X87Register reg1, X87Register reg2); X87PrepareToWrite(X87Register reg)93 void X87PrepareToWrite(X87Register reg) { x87_stack_.PrepareToWrite(reg); } X87CommitWrite(X87Register reg)94 void X87CommitWrite(X87Register reg) { x87_stack_.CommitWrite(reg); } 95 96 void X87Fxch(X87Register reg, int other_slot = 0) { 97 x87_stack_.Fxch(reg, other_slot); 98 } X87Free(X87Register reg)99 void X87Free(X87Register reg) { 100 x87_stack_.Free(reg); 101 } 102 103 X87StackEmpty()104 bool X87StackEmpty() { 105 return x87_stack_.depth() == 0; 106 } 107 108 Handle<Object> ToHandle(LConstantOperand* op) const; 109 110 // The operand denoting the second word (the one with a higher address) of 111 // a double stack slot. 112 Operand HighOperand(LOperand* op); 113 114 // Try to generate code for the entire chunk, but it may fail if the 115 // chunk contains constructs we cannot handle. Returns true if the 116 // code generation attempt succeeded. 117 bool GenerateCode(); 118 119 // Finish the code by setting stack height, safepoint, and bailout 120 // information on it. 121 void FinishCode(Handle<Code> code); 122 123 // Deferred code support. 124 void DoDeferredNumberTagD(LNumberTagD* instr); 125 126 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; 127 void DoDeferredNumberTagIU(LInstruction* instr, 128 LOperand* value, 129 LOperand* temp, 130 IntegerSignedness signedness); 131 132 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done); 133 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); 134 void DoDeferredStackCheck(LStackCheck* instr); 135 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); 136 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); 137 void DoDeferredAllocate(LAllocate* instr); 138 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 139 Label* map_check); 140 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); 141 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, 142 Register object, 143 Register index); 144 145 // Parallel move support. 146 void DoParallelMove(LParallelMove* move); 147 void DoGap(LGap* instr); 148 149 // Emit frame translation commands for an environment. 150 void WriteTranslation(LEnvironment* environment, Translation* translation); 151 152 void EnsureRelocSpaceForDeoptimization(); 153 154 // Declare methods that deal with the individual node types. 155 #define DECLARE_DO(type) void Do##type(L##type* node); LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)156 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) 157 #undef DECLARE_DO 158 159 private: 160 StrictMode strict_mode() const { return info()->strict_mode(); } 161 scope()162 Scope* scope() const { return scope_; } 163 164 void EmitClassOfTest(Label* if_true, 165 Label* if_false, 166 Handle<String> class_name, 167 Register input, 168 Register temporary, 169 Register temporary2); 170 GetStackSlotCount()171 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } 172 AddDeferredCode(LDeferredCode * code)173 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } 174 175 // Code generation passes. Returns true if code generation should 176 // continue. 177 void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE; 178 void GenerateBodyInstructionPost(LInstruction* instr) V8_OVERRIDE; 179 bool GeneratePrologue(); 180 bool GenerateDeferredCode(); 181 bool GenerateJumpTable(); 182 bool GenerateSafepointTable(); 183 184 // Generates the custom OSR entrypoint and sets the osr_pc_offset. 185 void GenerateOsrPrologue(); 186 187 enum SafepointMode { 188 RECORD_SIMPLE_SAFEPOINT, 189 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 190 }; 191 192 void CallCode(Handle<Code> code, 193 RelocInfo::Mode mode, 194 LInstruction* instr); 195 196 void CallCodeGeneric(Handle<Code> code, 197 RelocInfo::Mode mode, 198 LInstruction* instr, 199 SafepointMode safepoint_mode); 200 201 void CallRuntime(const Runtime::Function* fun, 202 int argc, 203 LInstruction* instr); 204 CallRuntime(Runtime::FunctionId id,int argc,LInstruction * instr)205 void CallRuntime(Runtime::FunctionId id, 206 int argc, 207 LInstruction* instr) { 208 const Runtime::Function* function = Runtime::FunctionForId(id); 209 CallRuntime(function, argc, instr); 210 } 211 212 void CallRuntimeFromDeferred(Runtime::FunctionId id, 213 int argc, 214 LInstruction* instr, 215 LOperand* context); 216 217 void LoadContextFromDeferred(LOperand* context); 218 219 enum EDIState { 220 EDI_UNINITIALIZED, 221 EDI_CONTAINS_TARGET 222 }; 223 224 // Generate a direct call to a known function. Expects the function 225 // to be in edi. 226 void CallKnownFunction(Handle<JSFunction> function, 227 int formal_parameter_count, 228 int arity, 229 LInstruction* instr, 230 EDIState edi_state); 231 232 void RecordSafepointWithLazyDeopt(LInstruction* instr, 233 SafepointMode safepoint_mode); 234 235 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, 236 Safepoint::DeoptMode mode); 237 void DeoptimizeIf(Condition cc, 238 LEnvironment* environment, 239 Deoptimizer::BailoutType bailout_type); 240 void DeoptimizeIf(Condition cc, LEnvironment* environment); 241 DeoptEveryNTimes()242 bool DeoptEveryNTimes() { 243 return FLAG_deopt_every_n_times != 0 && !info()->IsStub(); 244 } 245 246 void AddToTranslation(LEnvironment* environment, 247 Translation* translation, 248 LOperand* op, 249 bool is_tagged, 250 bool is_uint32, 251 int* object_index_pointer, 252 int* dematerialized_index_pointer); 253 void PopulateDeoptimizationData(Handle<Code> code); 254 int DefineDeoptimizationLiteral(Handle<Object> literal); 255 256 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); 257 258 Register ToRegister(int index) const; 259 X87Register ToX87Register(int index) const; 260 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; 261 int32_t ToInteger32(LConstantOperand* op) const; 262 ExternalReference ToExternalReference(LConstantOperand* op) const; 263 264 Operand BuildFastArrayOperand(LOperand* elements_pointer, 265 LOperand* key, 266 Representation key_representation, 267 ElementsKind elements_kind, 268 uint32_t base_offset); 269 270 Operand BuildSeqStringOperand(Register string, 271 LOperand* index, 272 String::Encoding encoding); 273 274 void EmitIntegerMathAbs(LMathAbs* instr); 275 276 // Support for recording safepoint and position information. 277 void RecordSafepoint(LPointerMap* pointers, 278 Safepoint::Kind kind, 279 int arguments, 280 Safepoint::DeoptMode mode); 281 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); 282 void RecordSafepoint(Safepoint::DeoptMode mode); 283 void RecordSafepointWithRegisters(LPointerMap* pointers, 284 int arguments, 285 Safepoint::DeoptMode mode); 286 287 void RecordAndWritePosition(int position) V8_OVERRIDE; 288 289 static Condition TokenToCondition(Token::Value op, bool is_unsigned); 290 void EmitGoto(int block); 291 292 // EmitBranch expects to be the last instruction of a block. 293 template<class InstrType> 294 void EmitBranch(InstrType instr, Condition cc); 295 template<class InstrType> 296 void EmitFalseBranch(InstrType instr, Condition cc); 297 void EmitNumberUntagDNoSSE2( 298 Register input, 299 Register temp, 300 X87Register res_reg, 301 bool allow_undefined_as_nan, 302 bool deoptimize_on_minus_zero, 303 LEnvironment* env, 304 NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED); 305 306 // Emits optimized code for typeof x == "y". Modifies input register. 307 // Returns the condition on which a final split to 308 // true and false label should be made, to optimize fallthrough. 309 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input); 310 311 // Emits optimized code for %_IsObject(x). Preserves input register. 312 // Returns the condition on which a final split to 313 // true and false label should be made, to optimize fallthrough. 314 Condition EmitIsObject(Register input, 315 Register temp1, 316 Label* is_not_object, 317 Label* is_object); 318 319 // Emits optimized code for %_IsString(x). Preserves input register. 320 // Returns the condition on which a final split to 321 // true and false label should be made, to optimize fallthrough. 322 Condition EmitIsString(Register input, 323 Register temp1, 324 Label* is_not_string, 325 SmiCheck check_needed); 326 327 // Emits optimized code for %_IsConstructCall(). 328 // Caller should branch on equal condition. 329 void EmitIsConstructCall(Register temp); 330 331 // Emits optimized code to deep-copy the contents of statically known 332 // object graphs (e.g. object literal boilerplate). 333 void EmitDeepCopy(Handle<JSObject> object, 334 Register result, 335 Register source, 336 int* offset, 337 AllocationSiteMode mode); 338 339 void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE; 340 void DoLoadKeyedExternalArray(LLoadKeyed* instr); 341 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); 342 void DoLoadKeyedFixedArray(LLoadKeyed* instr); 343 void DoStoreKeyedExternalArray(LStoreKeyed* instr); 344 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); 345 void DoStoreKeyedFixedArray(LStoreKeyed* instr); 346 347 void EmitReturn(LReturn* instr, bool dynamic_frame_alignment); 348 349 // Emits code for pushing either a tagged constant, a (non-double) 350 // register, or a stack slot operand. 351 void EmitPushTaggedOperand(LOperand* operand); 352 353 void X87Fld(Operand src, X87OperandType opts); 354 355 void EmitFlushX87ForDeopt(); FlushX87StackIfNecessary(LInstruction * instr)356 void FlushX87StackIfNecessary(LInstruction* instr) { 357 x87_stack_.FlushIfNecessary(instr, this); 358 } 359 friend class LGapResolver; 360 361 #ifdef _MSC_VER 362 // On windows, you may not access the stack more than one page below 363 // the most recently mapped page. To make the allocated area randomly 364 // accessible, we write an arbitrary value to each page in range 365 // esp + offset - page_size .. esp in turn. 366 void MakeSureStackPagesMapped(int offset); 367 #endif 368 369 ZoneList<LEnvironment*> deoptimizations_; 370 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; 371 ZoneList<Handle<Object> > deoptimization_literals_; 372 int inlined_function_count_; 373 Scope* const scope_; 374 TranslationBuffer translations_; 375 ZoneList<LDeferredCode*> deferred_; 376 bool dynamic_frame_alignment_; 377 bool support_aligned_spilled_doubles_; 378 int osr_pc_offset_; 379 bool frame_is_built_; 380 381 class X87Stack { 382 public: X87Stack(MacroAssembler * masm)383 explicit X87Stack(MacroAssembler* masm) 384 : stack_depth_(0), is_mutable_(true), masm_(masm) { } X87Stack(const X87Stack & other)385 explicit X87Stack(const X87Stack& other) 386 : stack_depth_(other.stack_depth_), is_mutable_(false), masm_(masm()) { 387 for (int i = 0; i < stack_depth_; i++) { 388 stack_[i] = other.stack_[i]; 389 } 390 } 391 bool operator==(const X87Stack& other) const { 392 if (stack_depth_ != other.stack_depth_) return false; 393 for (int i = 0; i < stack_depth_; i++) { 394 if (!stack_[i].is(other.stack_[i])) return false; 395 } 396 return true; 397 } 398 bool Contains(X87Register reg); 399 void Fxch(X87Register reg, int other_slot = 0); 400 void Free(X87Register reg); 401 void PrepareToWrite(X87Register reg); 402 void CommitWrite(X87Register reg); 403 void FlushIfNecessary(LInstruction* instr, LCodeGen* cgen); 404 void LeavingBlock(int current_block_id, LGoto* goto_instr); depth()405 int depth() const { return stack_depth_; } pop()406 void pop() { 407 ASSERT(is_mutable_); 408 stack_depth_--; 409 } push(X87Register reg)410 void push(X87Register reg) { 411 ASSERT(is_mutable_); 412 ASSERT(stack_depth_ < X87Register::kMaxNumAllocatableRegisters); 413 stack_[stack_depth_] = reg; 414 stack_depth_++; 415 } 416 masm()417 MacroAssembler* masm() const { return masm_; } isolate()418 Isolate* isolate() const { return masm_->isolate(); } 419 420 private: 421 int ArrayIndex(X87Register reg); 422 int st2idx(int pos); 423 424 X87Register stack_[X87Register::kMaxNumAllocatableRegisters]; 425 int stack_depth_; 426 bool is_mutable_; 427 MacroAssembler* masm_; 428 }; 429 X87Stack x87_stack_; 430 431 // Builder that keeps track of safepoints in the code. The table 432 // itself is emitted at the end of the generated code. 433 SafepointTableBuilder safepoints_; 434 435 // Compiler from a set of parallel moves to a sequential list of moves. 436 LGapResolver resolver_; 437 438 Safepoint::Kind expected_safepoint_kind_; 439 440 class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED { 441 public: PushSafepointRegistersScope(LCodeGen * codegen)442 explicit PushSafepointRegistersScope(LCodeGen* codegen) 443 : codegen_(codegen) { 444 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); 445 codegen_->masm_->PushSafepointRegisters(); 446 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters; 447 ASSERT(codegen_->info()->is_calling()); 448 } 449 ~PushSafepointRegistersScope()450 ~PushSafepointRegistersScope() { 451 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters); 452 codegen_->masm_->PopSafepointRegisters(); 453 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; 454 } 455 456 private: 457 LCodeGen* codegen_; 458 }; 459 460 friend class LDeferredCode; 461 friend class LEnvironment; 462 friend class SafepointGenerator; 463 DISALLOW_COPY_AND_ASSIGN(LCodeGen); 464 }; 465 466 467 class LDeferredCode : public ZoneObject { 468 public: LDeferredCode(LCodeGen * codegen,const LCodeGen::X87Stack & x87_stack)469 explicit LDeferredCode(LCodeGen* codegen, const LCodeGen::X87Stack& x87_stack) 470 : codegen_(codegen), 471 external_exit_(NULL), 472 instruction_index_(codegen->current_instruction_), 473 x87_stack_(x87_stack) { 474 codegen->AddDeferredCode(this); 475 } 476 ~LDeferredCode()477 virtual ~LDeferredCode() {} 478 virtual void Generate() = 0; 479 virtual LInstruction* instr() = 0; 480 SetExit(Label * exit)481 void SetExit(Label* exit) { external_exit_ = exit; } entry()482 Label* entry() { return &entry_; } exit()483 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } done()484 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); } instruction_index()485 int instruction_index() const { return instruction_index_; } x87_stack()486 const LCodeGen::X87Stack& x87_stack() const { return x87_stack_; } 487 488 protected: codegen()489 LCodeGen* codegen() const { return codegen_; } masm()490 MacroAssembler* masm() const { return codegen_->masm(); } 491 492 private: 493 LCodeGen* codegen_; 494 Label entry_; 495 Label exit_; 496 Label* external_exit_; 497 Label done_; 498 int instruction_index_; 499 LCodeGen::X87Stack x87_stack_; 500 }; 501 502 } } // namespace v8::internal 503 504 #endif // V8_X87_LITHIUM_CODEGEN_X87_H_ 505