1 // Copyright 2014 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_COMPILER_BACKEND_INSTRUCTION_SELECTOR_IMPL_H_ 6 #define V8_COMPILER_BACKEND_INSTRUCTION_SELECTOR_IMPL_H_ 7 8 #include "src/codegen/macro-assembler.h" 9 #include "src/compiler/backend/instruction-selector.h" 10 #include "src/compiler/backend/instruction.h" 11 #include "src/compiler/common-operator.h" 12 #include "src/compiler/linkage.h" 13 #include "src/compiler/schedule.h" 14 #include "src/objects/tagged-index.h" 15 16 namespace v8 { 17 namespace internal { 18 namespace compiler { 19 20 struct CaseInfo { 21 int32_t value; // The case value. 22 int32_t order; // The order for lowering to comparisons (less means earlier). 23 BasicBlock* branch; // The basic blocks corresponding to the case value. 24 }; 25 26 inline bool operator<(const CaseInfo& l, const CaseInfo& r) { 27 return l.order < r.order; 28 } 29 30 // Helper struct containing data about a table or lookup switch. 31 class SwitchInfo { 32 public: SwitchInfo(ZoneVector<CaseInfo> const & cases,int32_t min_value,int32_t max_value,BasicBlock * default_branch)33 SwitchInfo(ZoneVector<CaseInfo> const& cases, int32_t min_value, 34 int32_t max_value, BasicBlock* default_branch) 35 : cases_(cases), 36 min_value_(min_value), 37 max_value_(max_value), 38 default_branch_(default_branch) { 39 if (cases.size() != 0) { 40 DCHECK_LE(min_value, max_value); 41 // Note that {value_range} can be 0 if {min_value} is -2^31 and 42 // {max_value} is 2^31-1, so don't assume that it's non-zero below. 43 value_range_ = 44 1u + bit_cast<uint32_t>(max_value) - bit_cast<uint32_t>(min_value); 45 } else { 46 value_range_ = 0; 47 } 48 } 49 CasesSortedByValue()50 std::vector<CaseInfo> CasesSortedByValue() const { 51 std::vector<CaseInfo> result(cases_.begin(), cases_.end()); 52 std::stable_sort(result.begin(), result.end(), 53 [](CaseInfo a, CaseInfo b) { return a.value < b.value; }); 54 return result; 55 } CasesUnsorted()56 const ZoneVector<CaseInfo>& CasesUnsorted() const { return cases_; } min_value()57 int32_t min_value() const { return min_value_; } max_value()58 int32_t max_value() const { return max_value_; } value_range()59 size_t value_range() const { return value_range_; } case_count()60 size_t case_count() const { return cases_.size(); } default_branch()61 BasicBlock* default_branch() const { return default_branch_; } 62 63 private: 64 const ZoneVector<CaseInfo>& cases_; 65 int32_t min_value_; // minimum value of {cases_} 66 int32_t max_value_; // maximum value of {cases_} 67 size_t value_range_; // |max_value - min_value| + 1 68 BasicBlock* default_branch_; 69 }; 70 71 // A helper class for the instruction selector that simplifies construction of 72 // Operands. This class implements a base for architecture-specific helpers. 73 class OperandGenerator { 74 public: OperandGenerator(InstructionSelector * selector)75 explicit OperandGenerator(InstructionSelector* selector) 76 : selector_(selector) {} 77 NoOutput()78 InstructionOperand NoOutput() { 79 return InstructionOperand(); // Generates an invalid operand. 80 } 81 DefineAsRegister(Node * node)82 InstructionOperand DefineAsRegister(Node* node) { 83 return Define(node, 84 UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER, 85 GetVReg(node))); 86 } 87 DefineSameAsInput(Node * node,int input_index)88 InstructionOperand DefineSameAsInput(Node* node, int input_index) { 89 return Define(node, UnallocatedOperand(GetVReg(node), input_index)); 90 } 91 DefineSameAsFirst(Node * node)92 InstructionOperand DefineSameAsFirst(Node* node) { 93 return DefineSameAsInput(node, 0); 94 } 95 DefineAsFixed(Node * node,Register reg)96 InstructionOperand DefineAsFixed(Node* node, Register reg) { 97 return Define(node, UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER, 98 reg.code(), GetVReg(node))); 99 } 100 101 template <typename FPRegType> DefineAsFixed(Node * node,FPRegType reg)102 InstructionOperand DefineAsFixed(Node* node, FPRegType reg) { 103 return Define(node, 104 UnallocatedOperand(UnallocatedOperand::FIXED_FP_REGISTER, 105 reg.code(), GetVReg(node))); 106 } 107 DefineAsConstant(Node * node)108 InstructionOperand DefineAsConstant(Node* node) { 109 selector()->MarkAsDefined(node); 110 int virtual_register = GetVReg(node); 111 sequence()->AddConstant(virtual_register, ToConstant(node)); 112 return ConstantOperand(virtual_register); 113 } 114 DefineAsLocation(Node * node,LinkageLocation location)115 InstructionOperand DefineAsLocation(Node* node, LinkageLocation location) { 116 return Define(node, ToUnallocatedOperand(location, GetVReg(node))); 117 } 118 DefineAsDualLocation(Node * node,LinkageLocation primary_location,LinkageLocation secondary_location)119 InstructionOperand DefineAsDualLocation(Node* node, 120 LinkageLocation primary_location, 121 LinkageLocation secondary_location) { 122 return Define(node, 123 ToDualLocationUnallocatedOperand( 124 primary_location, secondary_location, GetVReg(node))); 125 } 126 Use(Node * node)127 InstructionOperand Use(Node* node) { 128 return Use(node, UnallocatedOperand(UnallocatedOperand::NONE, 129 UnallocatedOperand::USED_AT_START, 130 GetVReg(node))); 131 } 132 UseAnyAtEnd(Node * node)133 InstructionOperand UseAnyAtEnd(Node* node) { 134 return Use(node, UnallocatedOperand(UnallocatedOperand::REGISTER_OR_SLOT, 135 UnallocatedOperand::USED_AT_END, 136 GetVReg(node))); 137 } 138 UseAny(Node * node)139 InstructionOperand UseAny(Node* node) { 140 return Use(node, UnallocatedOperand(UnallocatedOperand::REGISTER_OR_SLOT, 141 UnallocatedOperand::USED_AT_START, 142 GetVReg(node))); 143 } 144 UseRegisterOrSlotOrConstant(Node * node)145 InstructionOperand UseRegisterOrSlotOrConstant(Node* node) { 146 return Use(node, UnallocatedOperand( 147 UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT, 148 UnallocatedOperand::USED_AT_START, GetVReg(node))); 149 } 150 UseUniqueRegisterOrSlotOrConstant(Node * node)151 InstructionOperand UseUniqueRegisterOrSlotOrConstant(Node* node) { 152 return Use(node, UnallocatedOperand( 153 UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT, 154 GetVReg(node))); 155 } 156 UseRegister(Node * node)157 InstructionOperand UseRegister(Node* node) { 158 return Use(node, UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER, 159 UnallocatedOperand::USED_AT_START, 160 GetVReg(node))); 161 } 162 UseUniqueSlot(Node * node)163 InstructionOperand UseUniqueSlot(Node* node) { 164 return Use(node, UnallocatedOperand(UnallocatedOperand::MUST_HAVE_SLOT, 165 GetVReg(node))); 166 } 167 168 // Use register or operand for the node. If a register is chosen, it won't 169 // alias any temporary or output registers. UseUnique(Node * node)170 InstructionOperand UseUnique(Node* node) { 171 return Use(node, 172 UnallocatedOperand(UnallocatedOperand::NONE, GetVReg(node))); 173 } 174 175 // Use a unique register for the node that does not alias any temporary or 176 // output registers. UseUniqueRegister(Node * node)177 InstructionOperand UseUniqueRegister(Node* node) { 178 return Use(node, UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER, 179 GetVReg(node))); 180 } 181 182 enum class RegisterUseKind { kUseRegister, kUseUniqueRegister }; UseRegister(Node * node,RegisterUseKind unique_reg)183 InstructionOperand UseRegister(Node* node, RegisterUseKind unique_reg) { 184 if (V8_LIKELY(unique_reg == RegisterUseKind::kUseRegister)) { 185 return UseRegister(node); 186 } else { 187 DCHECK_EQ(unique_reg, RegisterUseKind::kUseUniqueRegister); 188 return UseUniqueRegister(node); 189 } 190 } 191 UseFixed(Node * node,Register reg)192 InstructionOperand UseFixed(Node* node, Register reg) { 193 return Use(node, UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER, 194 reg.code(), GetVReg(node))); 195 } 196 197 template <typename FPRegType> UseFixed(Node * node,FPRegType reg)198 InstructionOperand UseFixed(Node* node, FPRegType reg) { 199 return Use(node, UnallocatedOperand(UnallocatedOperand::FIXED_FP_REGISTER, 200 reg.code(), GetVReg(node))); 201 } 202 UseImmediate(int immediate)203 InstructionOperand UseImmediate(int immediate) { 204 return sequence()->AddImmediate(Constant(immediate)); 205 } 206 UseImmediate64(int64_t immediate)207 InstructionOperand UseImmediate64(int64_t immediate) { 208 return sequence()->AddImmediate(Constant(immediate)); 209 } 210 UseImmediate(Node * node)211 InstructionOperand UseImmediate(Node* node) { 212 return sequence()->AddImmediate(ToConstant(node)); 213 } 214 UseNegatedImmediate(Node * node)215 InstructionOperand UseNegatedImmediate(Node* node) { 216 return sequence()->AddImmediate(ToNegatedConstant(node)); 217 } 218 UseLocation(Node * node,LinkageLocation location)219 InstructionOperand UseLocation(Node* node, LinkageLocation location) { 220 return Use(node, ToUnallocatedOperand(location, GetVReg(node))); 221 } 222 223 // Used to force gap moves from the from_location to the to_location 224 // immediately before an instruction. UsePointerLocation(LinkageLocation to_location,LinkageLocation from_location)225 InstructionOperand UsePointerLocation(LinkageLocation to_location, 226 LinkageLocation from_location) { 227 UnallocatedOperand casted_from_operand = 228 UnallocatedOperand::cast(TempLocation(from_location)); 229 selector_->Emit(kArchNop, casted_from_operand); 230 return ToUnallocatedOperand(to_location, 231 casted_from_operand.virtual_register()); 232 } 233 TempRegister()234 InstructionOperand TempRegister() { 235 return UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER, 236 UnallocatedOperand::USED_AT_START, 237 sequence()->NextVirtualRegister()); 238 } 239 AllocateVirtualRegister()240 int AllocateVirtualRegister() { return sequence()->NextVirtualRegister(); } 241 DefineSameAsFirstForVreg(int vreg)242 InstructionOperand DefineSameAsFirstForVreg(int vreg) { 243 return UnallocatedOperand(UnallocatedOperand::SAME_AS_INPUT, vreg); 244 } 245 DefineAsRegistertForVreg(int vreg)246 InstructionOperand DefineAsRegistertForVreg(int vreg) { 247 return UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER, vreg); 248 } 249 UseRegisterForVreg(int vreg)250 InstructionOperand UseRegisterForVreg(int vreg) { 251 return UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER, 252 UnallocatedOperand::USED_AT_START, vreg); 253 } 254 255 // The kind of register generated for memory operands. kRegister is alive 256 // until the start of the operation, kUniqueRegister until the end. 257 enum RegisterMode { 258 kRegister, 259 kUniqueRegister, 260 }; 261 UseRegisterWithMode(Node * node,RegisterMode register_mode)262 InstructionOperand UseRegisterWithMode(Node* node, 263 RegisterMode register_mode) { 264 return register_mode == kRegister ? UseRegister(node) 265 : UseUniqueRegister(node); 266 } 267 TempDoubleRegister()268 InstructionOperand TempDoubleRegister() { 269 UnallocatedOperand op = UnallocatedOperand( 270 UnallocatedOperand::MUST_HAVE_REGISTER, 271 UnallocatedOperand::USED_AT_START, sequence()->NextVirtualRegister()); 272 sequence()->MarkAsRepresentation(MachineRepresentation::kFloat64, 273 op.virtual_register()); 274 return op; 275 } 276 TempSimd128Register()277 InstructionOperand TempSimd128Register() { 278 UnallocatedOperand op = UnallocatedOperand( 279 UnallocatedOperand::MUST_HAVE_REGISTER, 280 UnallocatedOperand::USED_AT_START, sequence()->NextVirtualRegister()); 281 sequence()->MarkAsRepresentation(MachineRepresentation::kSimd128, 282 op.virtual_register()); 283 return op; 284 } 285 TempRegister(Register reg)286 InstructionOperand TempRegister(Register reg) { 287 return UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER, reg.code(), 288 InstructionOperand::kInvalidVirtualRegister); 289 } 290 291 template <typename FPRegType> TempFpRegister(FPRegType reg)292 InstructionOperand TempFpRegister(FPRegType reg) { 293 UnallocatedOperand op = 294 UnallocatedOperand(UnallocatedOperand::FIXED_FP_REGISTER, reg.code(), 295 sequence()->NextVirtualRegister()); 296 sequence()->MarkAsRepresentation(MachineRepresentation::kSimd128, 297 op.virtual_register()); 298 return op; 299 } 300 TempImmediate(int32_t imm)301 InstructionOperand TempImmediate(int32_t imm) { 302 return sequence()->AddImmediate(Constant(imm)); 303 } 304 TempLocation(LinkageLocation location)305 InstructionOperand TempLocation(LinkageLocation location) { 306 return ToUnallocatedOperand(location, sequence()->NextVirtualRegister()); 307 } 308 Label(BasicBlock * block)309 InstructionOperand Label(BasicBlock* block) { 310 return sequence()->AddImmediate( 311 Constant(RpoNumber::FromInt(block->rpo_number()))); 312 } 313 314 protected: selector()315 InstructionSelector* selector() const { return selector_; } sequence()316 InstructionSequence* sequence() const { return selector()->sequence(); } zone()317 Zone* zone() const { return selector()->instruction_zone(); } 318 319 private: GetVReg(Node * node)320 int GetVReg(Node* node) const { return selector_->GetVirtualRegister(node); } 321 ToConstant(const Node * node)322 static Constant ToConstant(const Node* node) { 323 switch (node->opcode()) { 324 case IrOpcode::kInt32Constant: 325 return Constant(OpParameter<int32_t>(node->op())); 326 case IrOpcode::kInt64Constant: 327 return Constant(OpParameter<int64_t>(node->op())); 328 case IrOpcode::kTaggedIndexConstant: { 329 // Unencoded index value. 330 intptr_t value = 331 static_cast<intptr_t>(OpParameter<int32_t>(node->op())); 332 DCHECK(TaggedIndex::IsValid(value)); 333 // Generate it as 32/64-bit constant in a tagged form. 334 Address tagged_index = TaggedIndex::FromIntptr(value).ptr(); 335 if (kSystemPointerSize == kInt32Size) { 336 return Constant(static_cast<int32_t>(tagged_index)); 337 } else { 338 return Constant(static_cast<int64_t>(tagged_index)); 339 } 340 } 341 case IrOpcode::kFloat32Constant: 342 return Constant(OpParameter<float>(node->op())); 343 case IrOpcode::kRelocatableInt32Constant: 344 case IrOpcode::kRelocatableInt64Constant: 345 return Constant(OpParameter<RelocatablePtrConstantInfo>(node->op())); 346 case IrOpcode::kFloat64Constant: 347 case IrOpcode::kNumberConstant: 348 return Constant(OpParameter<double>(node->op())); 349 case IrOpcode::kExternalConstant: 350 return Constant(OpParameter<ExternalReference>(node->op())); 351 case IrOpcode::kComment: { 352 // We cannot use {intptr_t} here, since the Constant constructor would 353 // be ambiguous on some architectures. 354 using ptrsize_int_t = 355 std::conditional<kSystemPointerSize == 8, int64_t, int32_t>::type; 356 return Constant(reinterpret_cast<ptrsize_int_t>( 357 OpParameter<const char*>(node->op()))); 358 } 359 case IrOpcode::kHeapConstant: 360 return Constant(HeapConstantOf(node->op())); 361 case IrOpcode::kCompressedHeapConstant: 362 return Constant(HeapConstantOf(node->op()), true); 363 case IrOpcode::kDelayedStringConstant: 364 return Constant(StringConstantBaseOf(node->op())); 365 case IrOpcode::kDeadValue: { 366 switch (DeadValueRepresentationOf(node->op())) { 367 case MachineRepresentation::kBit: 368 case MachineRepresentation::kWord32: 369 case MachineRepresentation::kTagged: 370 case MachineRepresentation::kTaggedSigned: 371 case MachineRepresentation::kTaggedPointer: 372 case MachineRepresentation::kCompressed: 373 case MachineRepresentation::kCompressedPointer: 374 return Constant(static_cast<int32_t>(0)); 375 case MachineRepresentation::kWord64: 376 return Constant(static_cast<int64_t>(0)); 377 case MachineRepresentation::kFloat64: 378 return Constant(static_cast<double>(0)); 379 case MachineRepresentation::kFloat32: 380 return Constant(static_cast<float>(0)); 381 default: 382 UNREACHABLE(); 383 } 384 break; 385 } 386 default: 387 break; 388 } 389 UNREACHABLE(); 390 } 391 ToNegatedConstant(const Node * node)392 static Constant ToNegatedConstant(const Node* node) { 393 switch (node->opcode()) { 394 case IrOpcode::kInt32Constant: 395 return Constant(-OpParameter<int32_t>(node->op())); 396 case IrOpcode::kInt64Constant: 397 return Constant(-OpParameter<int64_t>(node->op())); 398 default: 399 break; 400 } 401 UNREACHABLE(); 402 } 403 Define(Node * node,UnallocatedOperand operand)404 UnallocatedOperand Define(Node* node, UnallocatedOperand operand) { 405 DCHECK_NOT_NULL(node); 406 DCHECK_EQ(operand.virtual_register(), GetVReg(node)); 407 selector()->MarkAsDefined(node); 408 return operand; 409 } 410 Use(Node * node,UnallocatedOperand operand)411 UnallocatedOperand Use(Node* node, UnallocatedOperand operand) { 412 DCHECK_NOT_NULL(node); 413 DCHECK_EQ(operand.virtual_register(), GetVReg(node)); 414 selector()->MarkAsUsed(node); 415 return operand; 416 } 417 ToDualLocationUnallocatedOperand(LinkageLocation primary_location,LinkageLocation secondary_location,int virtual_register)418 UnallocatedOperand ToDualLocationUnallocatedOperand( 419 LinkageLocation primary_location, LinkageLocation secondary_location, 420 int virtual_register) { 421 // We only support the primary location being a register and the secondary 422 // one a slot. 423 DCHECK(primary_location.IsRegister() && 424 secondary_location.IsCalleeFrameSlot()); 425 int reg_id = primary_location.AsRegister(); 426 int slot_id = secondary_location.AsCalleeFrameSlot(); 427 return UnallocatedOperand(reg_id, slot_id, virtual_register); 428 } 429 ToUnallocatedOperand(LinkageLocation location,int virtual_register)430 UnallocatedOperand ToUnallocatedOperand(LinkageLocation location, 431 int virtual_register) { 432 if (location.IsAnyRegister()) { 433 // any machine register. 434 return UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER, 435 virtual_register); 436 } 437 if (location.IsCallerFrameSlot()) { 438 // a location on the caller frame. 439 return UnallocatedOperand(UnallocatedOperand::FIXED_SLOT, 440 location.AsCallerFrameSlot(), virtual_register); 441 } 442 if (location.IsCalleeFrameSlot()) { 443 // a spill location on this (callee) frame. 444 return UnallocatedOperand(UnallocatedOperand::FIXED_SLOT, 445 location.AsCalleeFrameSlot(), virtual_register); 446 } 447 // a fixed register. 448 if (IsFloatingPoint(location.GetType().representation())) { 449 return UnallocatedOperand(UnallocatedOperand::FIXED_FP_REGISTER, 450 location.AsRegister(), virtual_register); 451 } 452 return UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER, 453 location.AsRegister(), virtual_register); 454 } 455 456 InstructionSelector* selector_; 457 }; 458 459 } // namespace compiler 460 } // namespace internal 461 } // namespace v8 462 463 #endif // V8_COMPILER_BACKEND_INSTRUCTION_SELECTOR_IMPL_H_ 464