1 // Copyright 2014 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_CRANKSHAFT_S390_LITHIUM_CODEGEN_S390_H_ 6 #define V8_CRANKSHAFT_S390_LITHIUM_CODEGEN_S390_H_ 7 8 #include "src/ast/scopes.h" 9 #include "src/crankshaft/lithium-codegen.h" 10 #include "src/crankshaft/s390/lithium-gap-resolver-s390.h" 11 #include "src/crankshaft/s390/lithium-s390.h" 12 #include "src/deoptimizer.h" 13 #include "src/safepoint-table.h" 14 #include "src/utils.h" 15 16 namespace v8 { 17 namespace internal { 18 19 // Forward declarations. 20 class LDeferredCode; 21 class SafepointGenerator; 22 23 class LCodeGen : public LCodeGenBase { 24 public: LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)25 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) 26 : LCodeGenBase(chunk, assembler, info), 27 jump_table_(4, info->zone()), 28 scope_(info->scope()), 29 deferred_(8, info->zone()), 30 frame_is_built_(false), 31 safepoints_(info->zone()), 32 resolver_(this), 33 expected_safepoint_kind_(Safepoint::kSimple) { 34 PopulateDeoptimizationLiteralsWithInlinedFunctions(); 35 } 36 LookupDestination(int block_id)37 int LookupDestination(int block_id) const { 38 return chunk()->LookupDestination(block_id); 39 } 40 IsNextEmittedBlock(int block_id)41 bool IsNextEmittedBlock(int block_id) const { 42 return LookupDestination(block_id) == GetNextEmittedBlock(); 43 } 44 NeedsEagerFrame()45 bool NeedsEagerFrame() const { 46 return HasAllocatedStackSlots() || info()->is_non_deferred_calling() || 47 !info()->IsStub() || info()->requires_frame(); 48 } NeedsDeferredFrame()49 bool NeedsDeferredFrame() const { 50 return !NeedsEagerFrame() && info()->is_deferred_calling(); 51 } 52 GetLinkRegisterState()53 LinkRegisterStatus GetLinkRegisterState() const { 54 return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved; 55 } 56 57 // Support for converting LOperands to assembler types. 58 // LOperand must be a register. 59 Register ToRegister(LOperand* op) const; 60 61 // LOperand is loaded into scratch, unless already a register. 62 Register EmitLoadRegister(LOperand* op, Register scratch); 63 64 // LConstantOperand must be an Integer32 or Smi 65 void EmitLoadIntegerConstant(LConstantOperand* const_op, Register dst); 66 67 // LOperand must be a double register. 68 DoubleRegister ToDoubleRegister(LOperand* op) const; 69 70 intptr_t ToRepresentation(LConstantOperand* op, 71 const Representation& r) const; 72 int32_t ToInteger32(LConstantOperand* op) const; 73 Smi* ToSmi(LConstantOperand* op) const; 74 double ToDouble(LConstantOperand* op) const; 75 Operand ToOperand(LOperand* op); 76 MemOperand ToMemOperand(LOperand* op) const; 77 // Returns a MemOperand pointing to the high word of a DoubleStackSlot. 78 MemOperand ToHighMemOperand(LOperand* op) const; 79 80 bool IsInteger32(LConstantOperand* op) const; 81 bool IsSmi(LConstantOperand* op) const; 82 Handle<Object> ToHandle(LConstantOperand* op) const; 83 84 // Try to generate code for the entire chunk, but it may fail if the 85 // chunk contains constructs we cannot handle. Returns true if the 86 // code generation attempt succeeded. 87 bool GenerateCode(); 88 89 // Finish the code by setting stack height, safepoint, and bailout 90 // information on it. 91 void FinishCode(Handle<Code> code); 92 93 // Deferred code support. 94 void DoDeferredNumberTagD(LNumberTagD* instr); 95 96 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; 97 void DoDeferredNumberTagIU(LInstruction* instr, LOperand* value, 98 LOperand* temp1, LOperand* temp2, 99 IntegerSignedness signedness); 100 101 void DoDeferredTaggedToI(LTaggedToI* instr); 102 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); 103 void DoDeferredStackCheck(LStackCheck* instr); 104 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr); 105 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); 106 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); 107 void DoDeferredAllocate(LAllocate* instr); 108 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); 109 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, Register result, 110 Register object, Register index); 111 112 // Parallel move support. 113 void DoParallelMove(LParallelMove* move); 114 void DoGap(LGap* instr); 115 116 MemOperand PrepareKeyedOperand(Register key, Register base, 117 bool key_is_constant, bool key_is_tagged, 118 int constant_key, int element_size_shift, 119 int base_offset, 120 bool keyMaybeNegative = true); 121 122 // Emit frame translation commands for an environment. 123 void WriteTranslation(LEnvironment* environment, Translation* translation); 124 125 // Declare methods that deal with the individual node types. 126 #define DECLARE_DO(type) void Do##type(L##type* node); LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)127 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) 128 #undef DECLARE_DO 129 130 private: 131 Scope* scope() const { return scope_; } 132 scratch0()133 Register scratch0() { return kLithiumScratch; } double_scratch0()134 DoubleRegister double_scratch0() { return kScratchDoubleReg; } 135 136 LInstruction* GetNextInstruction(); 137 138 void EmitClassOfTest(Label* if_true, Label* if_false, 139 Handle<String> class_name, Register input, 140 Register temporary, Register temporary2); 141 HasAllocatedStackSlots()142 bool HasAllocatedStackSlots() const { 143 return chunk()->HasAllocatedStackSlots(); 144 } GetStackSlotCount()145 int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); } GetTotalFrameSlotCount()146 int GetTotalFrameSlotCount() const { 147 return chunk()->GetTotalFrameSlotCount(); 148 } 149 AddDeferredCode(LDeferredCode * code)150 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } 151 152 void SaveCallerDoubles(); 153 void RestoreCallerDoubles(); 154 155 // Code generation passes. Returns true if code generation should 156 // continue. 157 void GenerateBodyInstructionPre(LInstruction* instr) override; 158 bool GeneratePrologue(); 159 bool GenerateDeferredCode(); 160 bool GenerateJumpTable(); 161 bool GenerateSafepointTable(); 162 163 // Generates the custom OSR entrypoint and sets the osr_pc_offset. 164 void GenerateOsrPrologue(); 165 166 enum SafepointMode { 167 RECORD_SIMPLE_SAFEPOINT, 168 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 169 }; 170 171 void CallCode(Handle<Code> code, RelocInfo::Mode mode, LInstruction* instr); 172 173 void CallCodeGeneric(Handle<Code> code, RelocInfo::Mode mode, 174 LInstruction* instr, SafepointMode safepoint_mode); 175 176 void CallRuntime(const Runtime::Function* function, int num_arguments, 177 LInstruction* instr, 178 SaveFPRegsMode save_doubles = kDontSaveFPRegs); 179 CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)180 void CallRuntime(Runtime::FunctionId id, int num_arguments, 181 LInstruction* instr) { 182 const Runtime::Function* function = Runtime::FunctionForId(id); 183 CallRuntime(function, num_arguments, instr); 184 } 185 CallRuntime(Runtime::FunctionId id,LInstruction * instr)186 void CallRuntime(Runtime::FunctionId id, LInstruction* instr) { 187 const Runtime::Function* function = Runtime::FunctionForId(id); 188 CallRuntime(function, function->nargs, instr); 189 } 190 191 void LoadContextFromDeferred(LOperand* context); 192 void CallRuntimeFromDeferred(Runtime::FunctionId id, int argc, 193 LInstruction* instr, LOperand* context); 194 195 void PrepareForTailCall(const ParameterCount& actual, Register scratch1, 196 Register scratch2, Register scratch3); 197 198 // Generate a direct call to a known function. Expects the function 199 // to be in r4. 200 void CallKnownFunction(Handle<JSFunction> function, 201 int formal_parameter_count, int arity, 202 bool is_tail_call, LInstruction* instr); 203 204 void RecordSafepointWithLazyDeopt(LInstruction* instr, 205 SafepointMode safepoint_mode); 206 207 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, 208 Safepoint::DeoptMode mode); 209 void DeoptimizeIf(Condition condition, LInstruction* instr, 210 Deoptimizer::DeoptReason deopt_reason, 211 Deoptimizer::BailoutType bailout_type, CRegister cr = cr7); 212 void DeoptimizeIf(Condition condition, LInstruction* instr, 213 Deoptimizer::DeoptReason deopt_reason, CRegister cr = cr7); 214 215 void AddToTranslation(LEnvironment* environment, Translation* translation, 216 LOperand* op, bool is_tagged, bool is_uint32, 217 int* object_index_pointer, 218 int* dematerialized_index_pointer); 219 220 Register ToRegister(int index) const; 221 DoubleRegister ToDoubleRegister(int index) const; 222 223 MemOperand BuildSeqStringOperand(Register string, LOperand* index, 224 String::Encoding encoding); 225 226 void EmitMathAbs(LMathAbs* instr); 227 #if V8_TARGET_ARCH_S390X 228 void EmitInteger32MathAbs(LMathAbs* instr); 229 #endif 230 231 // Support for recording safepoint and position information. 232 void RecordSafepoint(LPointerMap* pointers, Safepoint::Kind kind, 233 int arguments, Safepoint::DeoptMode mode); 234 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); 235 void RecordSafepoint(Safepoint::DeoptMode mode); 236 void RecordSafepointWithRegisters(LPointerMap* pointers, int arguments, 237 Safepoint::DeoptMode mode); 238 239 void RecordAndWritePosition(int position) override; 240 241 static Condition TokenToCondition(Token::Value op); 242 void EmitGoto(int block); 243 244 // EmitBranch expects to be the last instruction of a block. 245 template <class InstrType> 246 void EmitBranch(InstrType instr, Condition condition); 247 template <class InstrType> 248 void EmitTrueBranch(InstrType instr, Condition condition); 249 template <class InstrType> 250 void EmitFalseBranch(InstrType instr, Condition condition); 251 void EmitNumberUntagD(LNumberUntagD* instr, Register input, 252 DoubleRegister result, NumberUntagDMode mode); 253 254 // Emits optimized code for typeof x == "y". Modifies input register. 255 // Returns the condition on which a final split to 256 // true and false label should be made, to optimize fallthrough. 257 Condition EmitTypeofIs(Label* true_label, Label* false_label, Register input, 258 Handle<String> type_name); 259 260 // Emits optimized code for %_IsString(x). Preserves input register. 261 // Returns the condition on which a final split to 262 // true and false label should be made, to optimize fallthrough. 263 Condition EmitIsString(Register input, Register temp1, Label* is_not_string, 264 SmiCheck check_needed); 265 266 // Emits optimized code to deep-copy the contents of statically known 267 // object graphs (e.g. object literal boilerplate). 268 void EmitDeepCopy(Handle<JSObject> object, Register result, Register source, 269 int* offset, AllocationSiteMode mode); 270 271 void EnsureSpaceForLazyDeopt(int space_needed) override; 272 void DoLoadKeyedExternalArray(LLoadKeyed* instr); 273 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); 274 void DoLoadKeyedFixedArray(LLoadKeyed* instr); 275 void DoStoreKeyedExternalArray(LStoreKeyed* instr); 276 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); 277 void DoStoreKeyedFixedArray(LStoreKeyed* instr); 278 279 template <class T> 280 void EmitVectorLoadICRegisters(T* instr); 281 template <class T> 282 void EmitVectorStoreICRegisters(T* instr); 283 284 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; 285 Scope* const scope_; 286 ZoneList<LDeferredCode*> deferred_; 287 bool frame_is_built_; 288 289 // Builder that keeps track of safepoints in the code. The table 290 // itself is emitted at the end of the generated code. 291 SafepointTableBuilder safepoints_; 292 293 // Compiler from a set of parallel moves to a sequential list of moves. 294 LGapResolver resolver_; 295 296 Safepoint::Kind expected_safepoint_kind_; 297 298 class PushSafepointRegistersScope final BASE_EMBEDDED { 299 public: PushSafepointRegistersScope(LCodeGen * codegen)300 explicit PushSafepointRegistersScope(LCodeGen* codegen) 301 : codegen_(codegen) { 302 DCHECK(codegen_->info()->is_calling()); 303 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); 304 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters; 305 StoreRegistersStateStub stub(codegen_->isolate()); 306 codegen_->masm_->CallStub(&stub); 307 } 308 ~PushSafepointRegistersScope()309 ~PushSafepointRegistersScope() { 310 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters); 311 RestoreRegistersStateStub stub(codegen_->isolate()); 312 codegen_->masm_->CallStub(&stub); 313 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; 314 } 315 316 private: 317 LCodeGen* codegen_; 318 }; 319 320 friend class LDeferredCode; 321 friend class LEnvironment; 322 friend class SafepointGenerator; 323 DISALLOW_COPY_AND_ASSIGN(LCodeGen); 324 }; 325 326 class LDeferredCode : public ZoneObject { 327 public: LDeferredCode(LCodeGen * codegen)328 explicit LDeferredCode(LCodeGen* codegen) 329 : codegen_(codegen), 330 external_exit_(NULL), 331 instruction_index_(codegen->current_instruction_) { 332 codegen->AddDeferredCode(this); 333 } 334 ~LDeferredCode()335 virtual ~LDeferredCode() {} 336 virtual void Generate() = 0; 337 virtual LInstruction* instr() = 0; 338 SetExit(Label * exit)339 void SetExit(Label* exit) { external_exit_ = exit; } entry()340 Label* entry() { return &entry_; } exit()341 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } instruction_index()342 int instruction_index() const { return instruction_index_; } 343 344 protected: codegen()345 LCodeGen* codegen() const { return codegen_; } masm()346 MacroAssembler* masm() const { return codegen_->masm(); } 347 348 private: 349 LCodeGen* codegen_; 350 Label entry_; 351 Label exit_; 352 Label* external_exit_; 353 int instruction_index_; 354 }; 355 } // namespace internal 356 } // namespace v8 357 358 #endif // V8_CRANKSHAFT_S390_LITHIUM_CODEGEN_S390_H_ 359