• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/codegen/register-configuration.h"
6 #include "src/base/lazy-instance.h"
7 #include "src/codegen/cpu-features.h"
8 #include "src/codegen/register-arch.h"
9 #include "src/common/globals.h"
10 
11 namespace v8 {
12 namespace internal {
13 
14 namespace {
15 
16 #define REGISTER_COUNT(R) 1 +
17 static const int kMaxAllocatableGeneralRegisterCount =
18     ALLOCATABLE_GENERAL_REGISTERS(REGISTER_COUNT) 0;
19 static const int kMaxAllocatableDoubleRegisterCount =
20     ALLOCATABLE_DOUBLE_REGISTERS(REGISTER_COUNT) 0;
21 
22 static const int kAllocatableGeneralCodes[] = {
23 #define REGISTER_CODE(R) kRegCode_##R,
24     ALLOCATABLE_GENERAL_REGISTERS(REGISTER_CODE)};
25 #undef REGISTER_CODE
26 
27 #define REGISTER_CODE(R) kDoubleCode_##R,
28 static const int kAllocatableDoubleCodes[] = {
29     ALLOCATABLE_DOUBLE_REGISTERS(REGISTER_CODE)};
30 #if V8_TARGET_ARCH_ARM
31 static const int kAllocatableNoVFP32DoubleCodes[] = {
32     ALLOCATABLE_NO_VFP32_DOUBLE_REGISTERS(REGISTER_CODE)};
33 #endif  // V8_TARGET_ARCH_ARM
34 #undef REGISTER_CODE
35 
36 STATIC_ASSERT(RegisterConfiguration::kMaxGeneralRegisters >=
37               Register::kNumRegisters);
38 STATIC_ASSERT(RegisterConfiguration::kMaxFPRegisters >=
39               FloatRegister::kNumRegisters);
40 STATIC_ASSERT(RegisterConfiguration::kMaxFPRegisters >=
41               DoubleRegister::kNumRegisters);
42 STATIC_ASSERT(RegisterConfiguration::kMaxFPRegisters >=
43               Simd128Register::kNumRegisters);
44 
45 // Callers on architectures other than Arm expect this to be be constant
46 // between build and runtime. Avoid adding variability on other platforms.
get_num_allocatable_double_registers()47 static int get_num_allocatable_double_registers() {
48   return
49 #if V8_TARGET_ARCH_IA32
50       kMaxAllocatableDoubleRegisterCount;
51 #elif V8_TARGET_ARCH_X64
52       kMaxAllocatableDoubleRegisterCount;
53 #elif V8_TARGET_ARCH_ARM
54       CpuFeatures::IsSupported(VFP32DREGS)
55           ? kMaxAllocatableDoubleRegisterCount
56           : (ALLOCATABLE_NO_VFP32_DOUBLE_REGISTERS(REGISTER_COUNT) 0);
57 #elif V8_TARGET_ARCH_ARM64
58       kMaxAllocatableDoubleRegisterCount;
59 #elif V8_TARGET_ARCH_MIPS
60       kMaxAllocatableDoubleRegisterCount;
61 #elif V8_TARGET_ARCH_MIPS64
62       kMaxAllocatableDoubleRegisterCount;
63 #elif V8_TARGET_ARCH_PPC
64       kMaxAllocatableDoubleRegisterCount;
65 #elif V8_TARGET_ARCH_PPC64
66       kMaxAllocatableDoubleRegisterCount;
67 #elif V8_TARGET_ARCH_S390
68       kMaxAllocatableDoubleRegisterCount;
69 #else
70 #error Unsupported target architecture.
71 #endif
72 }
73 
74 #undef REGISTER_COUNT
75 
76 // Callers on architectures other than Arm expect this to be be constant
77 // between build and runtime. Avoid adding variability on other platforms.
get_allocatable_double_codes()78 static const int* get_allocatable_double_codes() {
79   return
80 #if V8_TARGET_ARCH_ARM
81       CpuFeatures::IsSupported(VFP32DREGS) ? kAllocatableDoubleCodes
82                                            : kAllocatableNoVFP32DoubleCodes;
83 #else
84       kAllocatableDoubleCodes;
85 #endif
86 }
87 
88 class ArchDefaultRegisterConfiguration : public RegisterConfiguration {
89  public:
ArchDefaultRegisterConfiguration()90   ArchDefaultRegisterConfiguration()
91       : RegisterConfiguration(
92             Register::kNumRegisters, DoubleRegister::kNumRegisters,
93             kMaxAllocatableGeneralRegisterCount,
94             get_num_allocatable_double_registers(), kAllocatableGeneralCodes,
95             get_allocatable_double_codes(),
96             kSimpleFPAliasing ? AliasingKind::OVERLAP : AliasingKind::COMBINE) {
97   }
98 };
99 
100 DEFINE_LAZY_LEAKY_OBJECT_GETTER(ArchDefaultRegisterConfiguration,
101                                 GetDefaultRegisterConfiguration)
102 
103 // Allocatable registers with the masking register removed.
104 class ArchDefaultPoisoningRegisterConfiguration : public RegisterConfiguration {
105  public:
ArchDefaultPoisoningRegisterConfiguration()106   ArchDefaultPoisoningRegisterConfiguration()
107       : RegisterConfiguration(
108             Register::kNumRegisters, DoubleRegister::kNumRegisters,
109             kMaxAllocatableGeneralRegisterCount - 1,
110             get_num_allocatable_double_registers(),
111             InitializeGeneralRegisterCodes(), get_allocatable_double_codes(),
112             kSimpleFPAliasing ? AliasingKind::OVERLAP : AliasingKind::COMBINE) {
113   }
114 
115  private:
InitializeGeneralRegisterCodes()116   static const int* InitializeGeneralRegisterCodes() {
117     int filtered_index = 0;
118     for (int i = 0; i < kMaxAllocatableGeneralRegisterCount; ++i) {
119       if (kAllocatableGeneralCodes[i] != kSpeculationPoisonRegister.code()) {
120         allocatable_general_codes_[filtered_index] =
121             kAllocatableGeneralCodes[i];
122         filtered_index++;
123       }
124     }
125     DCHECK_EQ(filtered_index, kMaxAllocatableGeneralRegisterCount - 1);
126     return allocatable_general_codes_;
127   }
128 
129   static int
130       allocatable_general_codes_[kMaxAllocatableGeneralRegisterCount - 1];
131 };
132 
133 int ArchDefaultPoisoningRegisterConfiguration::allocatable_general_codes_
134     [kMaxAllocatableGeneralRegisterCount - 1];
135 
136 DEFINE_LAZY_LEAKY_OBJECT_GETTER(ArchDefaultPoisoningRegisterConfiguration,
137                                 GetDefaultPoisoningRegisterConfiguration)
138 
139 // RestrictedRegisterConfiguration uses the subset of allocatable general
140 // registers the architecture support, which results into generating assembly
141 // to use less registers. Currently, it's only used by RecordWrite code stub.
142 class RestrictedRegisterConfiguration : public RegisterConfiguration {
143  public:
RestrictedRegisterConfiguration(int num_allocatable_general_registers,std::unique_ptr<int[]> allocatable_general_register_codes,std::unique_ptr<char const * []> allocatable_general_register_names)144   RestrictedRegisterConfiguration(
145       int num_allocatable_general_registers,
146       std::unique_ptr<int[]> allocatable_general_register_codes,
147       std::unique_ptr<char const*[]> allocatable_general_register_names)
148       : RegisterConfiguration(
149             Register::kNumRegisters, DoubleRegister::kNumRegisters,
150             num_allocatable_general_registers,
151             get_num_allocatable_double_registers(),
152             allocatable_general_register_codes.get(),
153             get_allocatable_double_codes(),
154             kSimpleFPAliasing ? AliasingKind::OVERLAP : AliasingKind::COMBINE),
155         allocatable_general_register_codes_(
156             std::move(allocatable_general_register_codes)),
157         allocatable_general_register_names_(
158             std::move(allocatable_general_register_names)) {
159     for (int i = 0; i < num_allocatable_general_registers; ++i) {
160       DCHECK(
161           IsAllocatableGeneralRegister(allocatable_general_register_codes_[i]));
162     }
163   }
164 
IsAllocatableGeneralRegister(int code)165   bool IsAllocatableGeneralRegister(int code) {
166     for (int i = 0; i < kMaxAllocatableGeneralRegisterCount; ++i) {
167       if (code == kAllocatableGeneralCodes[i]) {
168         return true;
169       }
170     }
171     return false;
172   }
173 
174  private:
175   std::unique_ptr<int[]> allocatable_general_register_codes_;
176   std::unique_ptr<char const*[]> allocatable_general_register_names_;
177 };
178 
179 }  // namespace
180 
Default()181 const RegisterConfiguration* RegisterConfiguration::Default() {
182   return GetDefaultRegisterConfiguration();
183 }
184 
Poisoning()185 const RegisterConfiguration* RegisterConfiguration::Poisoning() {
186   return GetDefaultPoisoningRegisterConfiguration();
187 }
188 
RestrictGeneralRegisters(RegList registers)189 const RegisterConfiguration* RegisterConfiguration::RestrictGeneralRegisters(
190     RegList registers) {
191   int num = NumRegs(registers);
192   std::unique_ptr<int[]> codes{new int[num]};
193   std::unique_ptr<char const* []> names { new char const*[num] };
194   int counter = 0;
195   for (int i = 0; i < Default()->num_allocatable_general_registers(); ++i) {
196     auto reg = Register::from_code(Default()->GetAllocatableGeneralCode(i));
197     if (reg.bit() & registers) {
198       DCHECK(counter < num);
199       codes[counter] = reg.code();
200       names[counter] = RegisterName(Register::from_code(i));
201       counter++;
202     }
203   }
204 
205   return new RestrictedRegisterConfiguration(num, std::move(codes),
206                                              std::move(names));
207 }
208 
RegisterConfiguration(int num_general_registers,int num_double_registers,int num_allocatable_general_registers,int num_allocatable_double_registers,const int * allocatable_general_codes,const int * allocatable_double_codes,AliasingKind fp_aliasing_kind)209 RegisterConfiguration::RegisterConfiguration(
210     int num_general_registers, int num_double_registers,
211     int num_allocatable_general_registers, int num_allocatable_double_registers,
212     const int* allocatable_general_codes, const int* allocatable_double_codes,
213     AliasingKind fp_aliasing_kind)
214     : num_general_registers_(num_general_registers),
215       num_float_registers_(0),
216       num_double_registers_(num_double_registers),
217       num_simd128_registers_(0),
218       num_allocatable_general_registers_(num_allocatable_general_registers),
219       num_allocatable_float_registers_(0),
220       num_allocatable_double_registers_(num_allocatable_double_registers),
221       num_allocatable_simd128_registers_(0),
222       allocatable_general_codes_mask_(0),
223       allocatable_float_codes_mask_(0),
224       allocatable_double_codes_mask_(0),
225       allocatable_simd128_codes_mask_(0),
226       allocatable_general_codes_(allocatable_general_codes),
227       allocatable_double_codes_(allocatable_double_codes),
228       fp_aliasing_kind_(fp_aliasing_kind) {
229   DCHECK_LE(num_general_registers_,
230             RegisterConfiguration::kMaxGeneralRegisters);
231   DCHECK_LE(num_double_registers_, RegisterConfiguration::kMaxFPRegisters);
232   for (int i = 0; i < num_allocatable_general_registers_; ++i) {
233     allocatable_general_codes_mask_ |= (1 << allocatable_general_codes_[i]);
234   }
235   for (int i = 0; i < num_allocatable_double_registers_; ++i) {
236     allocatable_double_codes_mask_ |= (1 << allocatable_double_codes_[i]);
237   }
238 
239   if (fp_aliasing_kind_ == COMBINE) {
240     num_float_registers_ = num_double_registers_ * 2 <= kMaxFPRegisters
241                                ? num_double_registers_ * 2
242                                : kMaxFPRegisters;
243     num_allocatable_float_registers_ = 0;
244     for (int i = 0; i < num_allocatable_double_registers_; i++) {
245       int base_code = allocatable_double_codes_[i] * 2;
246       if (base_code >= kMaxFPRegisters) continue;
247       allocatable_float_codes_[num_allocatable_float_registers_++] = base_code;
248       allocatable_float_codes_[num_allocatable_float_registers_++] =
249           base_code + 1;
250       allocatable_float_codes_mask_ |= (0x3 << base_code);
251     }
252     num_simd128_registers_ = num_double_registers_ / 2;
253     num_allocatable_simd128_registers_ = 0;
254     int last_simd128_code = allocatable_double_codes_[0] / 2;
255     for (int i = 1; i < num_allocatable_double_registers_; i++) {
256       int next_simd128_code = allocatable_double_codes_[i] / 2;
257       // This scheme assumes allocatable_double_codes_ are strictly increasing.
258       DCHECK_GE(next_simd128_code, last_simd128_code);
259       if (last_simd128_code == next_simd128_code) {
260         allocatable_simd128_codes_[num_allocatable_simd128_registers_++] =
261             next_simd128_code;
262         allocatable_simd128_codes_mask_ |= (0x1 << next_simd128_code);
263       }
264       last_simd128_code = next_simd128_code;
265     }
266   } else {
267     DCHECK(fp_aliasing_kind_ == OVERLAP);
268     num_float_registers_ = num_simd128_registers_ = num_double_registers_;
269     num_allocatable_float_registers_ = num_allocatable_simd128_registers_ =
270         num_allocatable_double_registers_;
271     for (int i = 0; i < num_allocatable_float_registers_; ++i) {
272       allocatable_float_codes_[i] = allocatable_simd128_codes_[i] =
273           allocatable_double_codes_[i];
274     }
275     allocatable_float_codes_mask_ = allocatable_simd128_codes_mask_ =
276         allocatable_double_codes_mask_;
277   }
278 }
279 
280 // Assert that kFloat32, kFloat64, and kSimd128 are consecutive values.
281 STATIC_ASSERT(static_cast<int>(MachineRepresentation::kSimd128) ==
282               static_cast<int>(MachineRepresentation::kFloat64) + 1);
283 STATIC_ASSERT(static_cast<int>(MachineRepresentation::kFloat64) ==
284               static_cast<int>(MachineRepresentation::kFloat32) + 1);
285 
GetAliases(MachineRepresentation rep,int index,MachineRepresentation other_rep,int * alias_base_index) const286 int RegisterConfiguration::GetAliases(MachineRepresentation rep, int index,
287                                       MachineRepresentation other_rep,
288                                       int* alias_base_index) const {
289   DCHECK(fp_aliasing_kind_ == COMBINE);
290   DCHECK(IsFloatingPoint(rep) && IsFloatingPoint(other_rep));
291   if (rep == other_rep) {
292     *alias_base_index = index;
293     return 1;
294   }
295   int rep_int = static_cast<int>(rep);
296   int other_rep_int = static_cast<int>(other_rep);
297   if (rep_int > other_rep_int) {
298     int shift = rep_int - other_rep_int;
299     int base_index = index << shift;
300     if (base_index >= kMaxFPRegisters) {
301       // Alias indices would be out of FP register range.
302       return 0;
303     }
304     *alias_base_index = base_index;
305     return 1 << shift;
306   }
307   int shift = other_rep_int - rep_int;
308   *alias_base_index = index >> shift;
309   return 1;
310 }
311 
AreAliases(MachineRepresentation rep,int index,MachineRepresentation other_rep,int other_index) const312 bool RegisterConfiguration::AreAliases(MachineRepresentation rep, int index,
313                                        MachineRepresentation other_rep,
314                                        int other_index) const {
315   DCHECK(fp_aliasing_kind_ == COMBINE);
316   DCHECK(IsFloatingPoint(rep) && IsFloatingPoint(other_rep));
317   if (rep == other_rep) {
318     return index == other_index;
319   }
320   int rep_int = static_cast<int>(rep);
321   int other_rep_int = static_cast<int>(other_rep);
322   if (rep_int > other_rep_int) {
323     int shift = rep_int - other_rep_int;
324     return index == other_index >> shift;
325   }
326   int shift = other_rep_int - rep_int;
327   return index >> shift == other_index;
328 }
329 
330 }  // namespace internal
331 }  // namespace v8
332