• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017, VIXL authors
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 //   * Redistributions of source code must retain the above copyright notice,
8 //     this list of conditions and the following disclaimer.
9 //   * Redistributions in binary form must reproduce the above copyright notice,
10 //     this list of conditions and the following disclaimer in the documentation
11 //     and/or other materials provided with the distribution.
12 //   * Neither the name of ARM Limited nor the names of its contributors may be
13 //     used to endorse or promote products derived from this software without
14 //     specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 
27 #include <cstdio>
28 #include <cstring>
29 #include <string>
30 #include <thread>
31 
32 #include "test-runner.h"
33 #include "test-utils.h"
34 
35 #include "aarch64/macro-assembler-aarch64.h"
36 #include "aarch64/registers-aarch64.h"
37 #include "aarch64/simulator-aarch64.h"
38 #include "aarch64/test-utils-aarch64.h"
39 
40 #define __ masm.
41 #define TEST(name) TEST_(AARCH64_API_##name)
42 
43 
44 namespace vixl {
45 namespace aarch64 {
46 
47 // Check compiler intrinsics helpers.
48 
TEST(count_leading_sign_bits)49 TEST(count_leading_sign_bits) {
50   class Helper {
51    public:
52     static void Check(int64_t value, int non_sign_bits) {
53       VIXL_ASSERT((0 <= non_sign_bits) && (non_sign_bits < 64));
54 
55       for (int width = 1; width <= 64; width *= 2) {
56         // Note that leading_sign_bits does not include the topmost bit.
57         int leading_sign_bits = width - non_sign_bits - 1;
58         if (leading_sign_bits < 0) continue;
59 
60         int64_t result = CountLeadingSignBits(value, width);
61         int64_t fallback_result = CountLeadingSignBitsFallBack(value, width);
62         VIXL_CHECK(result == leading_sign_bits);
63         VIXL_CHECK(fallback_result == leading_sign_bits);
64       }
65     }
66   };
67 
68   // Basic positive (and zero) cases. Sign bits are all zeroes.
69   Helper::Check(0, 0);  // 0b++++
70   Helper::Check(1, 1);  // 0b+++1
71   Helper::Check(2, 2);  // 0b++10
72   Helper::Check(3, 2);  // 0b++11
73   Helper::Check(4, 3);  // 0b+100
74 
75   // Basic negative cases. Sign bits are all ones.
76   Helper::Check(-1, 0);  // 0b----
77   Helper::Check(-2, 1);  // 0b---0
78   Helper::Check(-3, 2);  // 0b--01
79   Helper::Check(-4, 2);  // 0b--00
80   Helper::Check(-5, 3);  // 0b-011
81 
82   // Boundary conditions.
83   Helper::Check(INT8_MAX, 7);
84   Helper::Check(INT8_MIN, 7);
85   Helper::Check(static_cast<int64_t>(INT8_MAX) + 1, 8);
86   Helper::Check(static_cast<int64_t>(INT8_MIN) - 1, 8);
87 
88   Helper::Check(INT16_MAX, 15);
89   Helper::Check(INT16_MIN, 15);
90   Helper::Check(static_cast<int64_t>(INT16_MAX) + 1, 16);
91   Helper::Check(static_cast<int64_t>(INT16_MIN) - 1, 16);
92 
93   Helper::Check(INT32_MAX, 31);
94   Helper::Check(INT32_MIN, 31);
95   Helper::Check(static_cast<int64_t>(INT32_MAX) + 1, 32);
96   Helper::Check(static_cast<int64_t>(INT32_MIN) - 1, 32);
97 
98   Helper::Check(INT64_MAX, 63);
99   Helper::Check(INT64_MIN, 63);
100 
101   // Check automatic width detection.
102   VIXL_CHECK(CountLeadingSignBits(static_cast<int8_t>(42)) == 1);  // 0b00101010
103   VIXL_CHECK(CountLeadingSignBits(static_cast<int16_t>(42)) == 9);
104   VIXL_CHECK(CountLeadingSignBits(static_cast<int32_t>(42)) == 25);
105   VIXL_CHECK(CountLeadingSignBits(static_cast<int64_t>(42)) == 57);
106 }
107 
108 // Check SimFloat16 class mechanics.
TEST(float16_operators)109 TEST(float16_operators) {
110   ::vixl::internal::SimFloat16 f1 = kFP16DefaultNaN;
111   ::vixl::internal::SimFloat16 f2 = kFP16DefaultNaN;
112   ::vixl::internal::SimFloat16 f3 = kFP16PositiveInfinity;
113   ::vixl::internal::SimFloat16 f4 = kFP16NegativeInfinity;
114   VIXL_CHECK(!(f1 == f2));
115   VIXL_CHECK(f1 != f2);
116   VIXL_CHECK(!(f3 == f4));
117   VIXL_CHECK(f3 != f4);
118   VIXL_CHECK(::vixl::internal::SimFloat16(kFP16PositiveZero) ==
119              ::vixl::internal::SimFloat16(kFP16NegativeZero));
120   VIXL_CHECK(!(::vixl::internal::SimFloat16(kFP16PositiveZero) !=
121                ::vixl::internal::SimFloat16(kFP16NegativeZero)));
122 }
123 
TEST(rawbits_conversions)124 TEST(rawbits_conversions) {
125   VIXL_CHECK(RawbitsToInt64(0x0) == 0x0);
126   VIXL_CHECK(RawbitsToInt64(0x123) == 0x123);
127   VIXL_CHECK(RawbitsToInt64(INT64_MAX) == INT64_MAX);
128   VIXL_CHECK(RawbitsToInt64(UINT64_C(0xffffffffffffffff)) == -1);
129   VIXL_CHECK(RawbitsToInt64(UINT64_C(0x8000000000000000)) == INT64_MIN);
130   VIXL_CHECK(RawbitsToInt64(UINT64_C(0x8000000000000001)) == -INT64_MAX);
131 
132   VIXL_CHECK(RawbitsToInt32(0x0) == 0x0);
133   VIXL_CHECK(RawbitsToInt32(0x123) == 0x123);
134   VIXL_CHECK(RawbitsToInt32(INT32_MAX) == INT32_MAX);
135   VIXL_CHECK(RawbitsToInt32(UINT32_C(0xffffffff)) == -1);
136   VIXL_CHECK(RawbitsToInt32(UINT32_C(0x80000000)) == INT32_MIN);
137   VIXL_CHECK(RawbitsToInt32(UINT32_C(0x80000001)) == -INT32_MAX);
138 }
139 
140 // Check moved FP constants are still accessible via the AArch64 namespace.
TEST(float_constants_scope)141 TEST(float_constants_scope) {
142   VIXL_CHECK(vixl::aarch64::kFP64PositiveInfinity ==
143              vixl::kFP64PositiveInfinity);
144   VIXL_CHECK(vixl::aarch64::kFP64NegativeInfinity ==
145              vixl::kFP64NegativeInfinity);
146   VIXL_CHECK(vixl::aarch64::kFP32PositiveInfinity ==
147              vixl::kFP32PositiveInfinity);
148   VIXL_CHECK(vixl::aarch64::kFP32NegativeInfinity ==
149              vixl::kFP32NegativeInfinity);
150   VIXL_CHECK(Float16ToRawbits(vixl::aarch64::kFP16PositiveInfinity) ==
151              Float16ToRawbits(vixl::aarch64::kFP16PositiveInfinity));
152   VIXL_CHECK(Float16ToRawbits(vixl::aarch64::kFP16NegativeInfinity) ==
153              Float16ToRawbits(vixl::aarch64::kFP16NegativeInfinity));
154   VIXL_CHECK(DoubleToRawbits(vixl::aarch64::kFP64DefaultNaN) ==
155              DoubleToRawbits(vixl::kFP64DefaultNaN));
156   VIXL_CHECK(FloatToRawbits(vixl::aarch64::kFP32DefaultNaN) ==
157              FloatToRawbits(vixl::kFP32DefaultNaN));
158   VIXL_CHECK(IsNaN(vixl::aarch64::kFP16DefaultNaN) ==
159              IsNaN(vixl::kFP16DefaultNaN));
160   VIXL_CHECK(vixl::aarch64::kDoubleExponentBits == vixl::kDoubleExponentBits);
161   VIXL_CHECK(vixl::aarch64::kDoubleMantissaBits == vixl::kDoubleMantissaBits);
162   VIXL_CHECK(vixl::aarch64::kFloatExponentBits == vixl::kFloatExponentBits);
163   VIXL_CHECK(vixl::aarch64::kFloatMantissaBits == vixl::kFloatMantissaBits);
164   VIXL_CHECK(vixl::aarch64::kFloat16ExponentBits == vixl::kFloat16ExponentBits);
165   VIXL_CHECK(vixl::aarch64::kFloat16MantissaBits == vixl::kFloat16MantissaBits);
166 }
167 
168 
TEST(register_bit)169 TEST(register_bit) {
170   VIXL_CHECK(x0.GetBit() == (UINT64_C(1) << 0));
171   VIXL_CHECK(x1.GetBit() == (UINT64_C(1) << 1));
172   VIXL_CHECK(x10.GetBit() == (UINT64_C(1) << 10));
173 
174   // AAPCS64 definitions.
175   VIXL_CHECK(lr.GetBit() == (UINT64_C(1) << kLinkRegCode));
176 
177   // Fixed (hardware) definitions.
178   VIXL_CHECK(xzr.GetBit() == (UINT64_C(1) << kZeroRegCode));
179 
180   // Internal ABI definitions.
181   VIXL_CHECK(sp.GetBit() == (UINT64_C(1) << kSPRegInternalCode));
182   VIXL_CHECK(sp.GetBit() != xzr.GetBit());
183 
184   // xn.GetBit() == wn.GetBit() at all times, for the same n.
185   VIXL_CHECK(x0.GetBit() == w0.GetBit());
186   VIXL_CHECK(x1.GetBit() == w1.GetBit());
187   VIXL_CHECK(x10.GetBit() == w10.GetBit());
188   VIXL_CHECK(xzr.GetBit() == wzr.GetBit());
189   VIXL_CHECK(sp.GetBit() == wsp.GetBit());
190 }
191 
192 
TEST(noreg)193 TEST(noreg) {
194   VIXL_CHECK(NoReg.Is(NoVReg));
195   VIXL_CHECK(NoVReg.Is(NoReg));
196 
197   VIXL_CHECK(NoVReg.Is(NoReg));
198   VIXL_CHECK(NoReg.Is(NoVReg));
199 
200   VIXL_CHECK(NoReg.Is(NoCPUReg));
201   VIXL_CHECK(NoCPUReg.Is(NoReg));
202 
203   VIXL_CHECK(NoVReg.Is(NoCPUReg));
204   VIXL_CHECK(NoCPUReg.Is(NoVReg));
205 
206   VIXL_CHECK(NoVReg.Is(NoCPUReg));
207   VIXL_CHECK(NoCPUReg.Is(NoVReg));
208 
209   VIXL_CHECK(NoReg.IsNone());
210   VIXL_CHECK(NoVReg.IsNone());
211   VIXL_CHECK(NoCPUReg.IsNone());
212 }
213 
214 
TEST(constructors)215 TEST(constructors) {
216   // *Register(code)
217   VIXL_CHECK(WRegister(0).Is(w0));
218   VIXL_CHECK(XRegister(1).Is(x1));
219 
220   VIXL_CHECK(BRegister(2).Is(b2));
221   VIXL_CHECK(HRegister(3).Is(h3));
222   VIXL_CHECK(SRegister(4).Is(s4));
223   VIXL_CHECK(DRegister(5).Is(d5));
224   VIXL_CHECK(QRegister(6).Is(q6));
225 
226   VIXL_CHECK(ZRegister(7).Is(z7));
227   VIXL_CHECK(PRegister(8).Is(p8));
228 }
229 
230 
TEST(constructors_r)231 TEST(constructors_r) {
232   // Register(code, size_in_bits)
233   VIXL_CHECK(Register(0, kWRegSize).Is(w0));
234   VIXL_CHECK(Register(1, kXRegSize).Is(x1));
235 }
236 
237 
TEST(constructors_v)238 TEST(constructors_v) {
239   // VRegister(code)
240   VIXL_CHECK(VRegister(0).Is(v0));
241   VIXL_CHECK(VRegister(1).Is(v1));
242   VIXL_CHECK(VRegister(2).Is(v2));
243   VIXL_CHECK(VRegister(3).Is(v3));
244   VIXL_CHECK(VRegister(4).Is(v4));
245 
246   // VRegister(code, size_in_bits)
247   VIXL_CHECK(VRegister(0, kBRegSize).Is(b0));
248   VIXL_CHECK(VRegister(1, kHRegSize).Is(h1));
249   VIXL_CHECK(VRegister(2, kSRegSize).Is(s2));
250   VIXL_CHECK(VRegister(3, kDRegSize).Is(d3));
251   VIXL_CHECK(VRegister(4, kQRegSize).Is(q4));
252 
253   // VRegister(code, size_in_bits, lanes)
254   VIXL_CHECK(VRegister(0, kBRegSize, 1).Is(b0));
255   VIXL_CHECK(VRegister(1, kHRegSize, 1).Is(h1));
256   VIXL_CHECK(VRegister(2, kSRegSize, 1).Is(s2));
257   VIXL_CHECK(VRegister(3, kDRegSize, 1).Is(d3));
258   VIXL_CHECK(VRegister(4, kQRegSize, 1).Is(q4));
259 
260   VIXL_CHECK(VRegister(0, kSRegSize, 2).Is(v0.V2H()));
261 
262   VIXL_CHECK(VRegister(1, kDRegSize, 1).Is(v1.V1D()));
263   VIXL_CHECK(VRegister(2, kDRegSize, 2).Is(v2.V2S()));
264   VIXL_CHECK(VRegister(3, kDRegSize, 4).Is(v3.V4H()));
265   VIXL_CHECK(VRegister(4, kDRegSize, 8).Is(v4.V8B()));
266 
267   VIXL_CHECK(VRegister(5, kQRegSize, 2).Is(v5.V2D()));
268   VIXL_CHECK(VRegister(6, kQRegSize, 4).Is(v6.V4S()));
269   VIXL_CHECK(VRegister(7, kQRegSize, 8).Is(v7.V8H()));
270   VIXL_CHECK(VRegister(8, kQRegSize, 16).Is(v8.V16B()));
271 
272   // VRegister(code, format)
273   VIXL_CHECK(VRegister(0, kFormatB).Is(b0));
274   VIXL_CHECK(VRegister(1, kFormatH).Is(h1));
275   VIXL_CHECK(VRegister(2, kFormatS).Is(s2));
276   VIXL_CHECK(VRegister(3, kFormatD).Is(d3));
277   VIXL_CHECK(VRegister(4, kFormat8B).Is(v4.V8B()));
278   VIXL_CHECK(VRegister(5, kFormat16B).Is(v5.V16B()));
279   VIXL_CHECK(VRegister(6, kFormat2H).Is(v6.V2H()));
280   VIXL_CHECK(VRegister(7, kFormat4H).Is(v7.V4H()));
281   VIXL_CHECK(VRegister(8, kFormat8H).Is(v8.V8H()));
282   VIXL_CHECK(VRegister(9, kFormat2S).Is(v9.V2S()));
283   VIXL_CHECK(VRegister(10, kFormat4S).Is(v10.V4S()));
284   VIXL_CHECK(VRegister(11, kFormat1D).Is(v11.V1D()));
285   VIXL_CHECK(VRegister(12, kFormat2D).Is(v12.V2D()));
286 }
287 
288 
TEST(constructors_z)289 TEST(constructors_z) {
290   // ZRegister(code, lane_size_in_bits)
291   VIXL_CHECK(ZRegister(0, kBRegSize).Is(z0.VnB()));
292   VIXL_CHECK(ZRegister(1, kHRegSize).Is(z1.VnH()));
293   VIXL_CHECK(ZRegister(2, kSRegSize).Is(z2.VnS()));
294   VIXL_CHECK(ZRegister(3, kDRegSize).Is(z3.VnD()));
295 
296   // ZRegister(code, format)
297   VIXL_CHECK(ZRegister(0, kFormatVnB).Is(z0.VnB()));
298   VIXL_CHECK(ZRegister(1, kFormatVnH).Is(z1.VnH()));
299   VIXL_CHECK(ZRegister(2, kFormatVnS).Is(z2.VnS()));
300   VIXL_CHECK(ZRegister(3, kFormatVnD).Is(z3.VnD()));
301 }
302 
303 
TEST(constructors_p)304 TEST(constructors_p) {
305   // ZRegister(code, lane_size_in_bits)
306   VIXL_CHECK(PRegisterWithLaneSize(0, kBRegSize).Is(p0.VnB()));
307   VIXL_CHECK(PRegisterWithLaneSize(1, kHRegSize).Is(p1.VnH()));
308   VIXL_CHECK(PRegisterWithLaneSize(2, kSRegSize).Is(p2.VnS()));
309   VIXL_CHECK(PRegisterWithLaneSize(3, kDRegSize).Is(p3.VnD()));
310 
311   // ZRegister(code, format)
312   VIXL_CHECK(PRegisterWithLaneSize(0, kFormatVnB).Is(p0.VnB()));
313   VIXL_CHECK(PRegisterWithLaneSize(1, kFormatVnH).Is(p1.VnH()));
314   VIXL_CHECK(PRegisterWithLaneSize(2, kFormatVnS).Is(p2.VnS()));
315   VIXL_CHECK(PRegisterWithLaneSize(3, kFormatVnD).Is(p3.VnD()));
316 
317   VIXL_CHECK(PRegisterZ(0).Is(p0.Zeroing()));
318   VIXL_CHECK(PRegisterM(1).Is(p1.Merging()));
319 }
320 
321 
TEST(constructors_cpu)322 TEST(constructors_cpu) {
323   // ZRegister(code, size_in_bits, type)
324   VIXL_CHECK(CPURegister(0, kWRegSize, CPURegister::kRegister).Is(w0));
325   VIXL_CHECK(CPURegister(1, kXRegSize, CPURegister::kRegister).Is(x1));
326 
327   VIXL_CHECK(CPURegister(2, kBRegSize, CPURegister::kVRegister).Is(b2));
328   VIXL_CHECK(CPURegister(3, kHRegSize, CPURegister::kVRegister).Is(h3));
329   VIXL_CHECK(CPURegister(4, kSRegSize, CPURegister::kVRegister).Is(s4));
330   VIXL_CHECK(CPURegister(5, kDRegSize, CPURegister::kVRegister).Is(d5));
331   VIXL_CHECK(CPURegister(6, kQRegSize, CPURegister::kVRegister).Is(q6));
332   VIXL_CHECK(CPURegister(7, kQRegSize, CPURegister::kVRegister).Is(v7));
333 
334   VIXL_CHECK(CPURegister(0, CPURegister::kUnknownSize, CPURegister::kVRegister)
335                  .Is(z0));
336   VIXL_CHECK(CPURegister(1, CPURegister::kUnknownSize, CPURegister::kPRegister)
337                  .Is(p1));
338 }
339 
340 
341 #ifdef __aarch64__
CPURegisterByValueHelper(CPURegister reg)342 static void CPURegisterByValueHelper(CPURegister reg) {
343   // Test that `reg` can be passed in one register. We'd like to use
344   // __attribute__((naked)) for this, but it isn't supported for AArch64, so
345   // generate a function using VIXL instead.
346 
347   MacroAssembler masm;
348   // CPURegister fn(int placeholder, CPURegister reg);
349   // Move `reg` to its result register.
350   __ Mov(x0, x1);
351   // Clobber all other result registers.
352   __ Mov(x1, 0xfffffffffffffff1);
353   __ Mov(x2, 0xfffffffffffffff2);
354   __ Mov(x3, 0xfffffffffffffff3);
355   __ Mov(x4, 0xfffffffffffffff4);
356   __ Mov(x5, 0xfffffffffffffff5);
357   __ Mov(x6, 0xfffffffffffffff6);
358   __ Mov(x7, 0xfffffffffffffff7);
359   __ Ret();
360   masm.FinalizeCode();
361 
362   CodeBuffer* buffer = masm.GetBuffer();
363   auto fn = buffer->GetStartAddress<CPURegister (*)(int, CPURegister)>();
364   buffer->SetExecutable();
365   CPURegister out = fn(42, reg);
366 
367   VIXL_CHECK(out.Is(reg));
368 }
369 
370 
TEST(cpureg_by_value)371 TEST(cpureg_by_value) {
372   VIXL_STATIC_ASSERT(sizeof(CPURegister) <= sizeof(void*));
373   // Check some arbitrary registers to try to exercise each encoding field.
374   CPURegisterByValueHelper(x0);
375   CPURegisterByValueHelper(v31.V8H());
376   CPURegisterByValueHelper(z16.VnD());
377   CPURegisterByValueHelper(p15.Merging());
378 }
379 #endif  // __aarch64__
380 
381 
TEST(isvalid)382 TEST(isvalid) {
383   VIXL_CHECK(!NoReg.IsValid());
384   VIXL_CHECK(!NoVReg.IsValid());
385   VIXL_CHECK(!NoCPUReg.IsValid());
386 
387   VIXL_CHECK(x0.IsValid());
388   VIXL_CHECK(w0.IsValid());
389   VIXL_CHECK(x30.IsValid());
390   VIXL_CHECK(w30.IsValid());
391   VIXL_CHECK(xzr.IsValid());
392   VIXL_CHECK(wzr.IsValid());
393 
394   VIXL_CHECK(sp.IsValid());
395   VIXL_CHECK(wsp.IsValid());
396 
397   VIXL_CHECK(d0.IsValid());
398   VIXL_CHECK(s0.IsValid());
399   VIXL_CHECK(d31.IsValid());
400   VIXL_CHECK(s31.IsValid());
401 
402   VIXL_CHECK(x0.IsValidRegister());
403   VIXL_CHECK(w0.IsValidRegister());
404   VIXL_CHECK(xzr.IsValidRegister());
405   VIXL_CHECK(wzr.IsValidRegister());
406   VIXL_CHECK(sp.IsValidRegister());
407   VIXL_CHECK(wsp.IsValidRegister());
408   VIXL_CHECK(!x0.IsValidVRegister());
409   VIXL_CHECK(!w0.IsValidVRegister());
410   VIXL_CHECK(!xzr.IsValidVRegister());
411   VIXL_CHECK(!wzr.IsValidVRegister());
412   VIXL_CHECK(!sp.IsValidVRegister());
413   VIXL_CHECK(!wsp.IsValidVRegister());
414   VIXL_CHECK(!x0.IsValidFPRegister());
415   VIXL_CHECK(!w0.IsValidFPRegister());
416   VIXL_CHECK(!xzr.IsValidFPRegister());
417   VIXL_CHECK(!wzr.IsValidFPRegister());
418   VIXL_CHECK(!sp.IsValidFPRegister());
419   VIXL_CHECK(!wsp.IsValidFPRegister());
420 
421   VIXL_CHECK(q0.IsValidVRegister());
422   VIXL_CHECK(!q0.IsValidFPRegister());
423   VIXL_CHECK(!q0.IsValidRegister());
424 
425   VIXL_CHECK(d0.IsValidVRegister());
426   VIXL_CHECK(d0.IsValidFPRegister());
427   VIXL_CHECK(!d0.IsValidRegister());
428 
429   VIXL_CHECK(s0.IsValidVRegister());
430   VIXL_CHECK(s0.IsValidFPRegister());
431   VIXL_CHECK(!s0.IsValidRegister());
432 
433   VIXL_CHECK(h0.IsValidVRegister());
434   VIXL_CHECK(h0.IsValidFPRegister());
435   VIXL_CHECK(!h0.IsValidRegister());
436 
437   VIXL_CHECK(b0.IsValidVRegister());
438   VIXL_CHECK(!b0.IsValidFPRegister());
439   VIXL_CHECK(!b0.IsValidRegister());
440 
441   // IsValidFPRegister() is only true for scalar types.
442   VIXL_CHECK(q0.V2D().IsValidVRegister());
443   VIXL_CHECK(!q0.V2D().IsValidFPRegister());
444   VIXL_CHECK(d0.V2S().IsValidVRegister());
445   VIXL_CHECK(!d0.V2S().IsValidFPRegister());
446   VIXL_CHECK(s0.V2H().IsValidVRegister());
447   VIXL_CHECK(!s0.V2H().IsValidFPRegister());
448 }
449 
450 
TEST(isvalid_cpu)451 TEST(isvalid_cpu) {
452   // As 'isvalid', but using CPURegister types where possible. This shouldn't
453   // make any difference.
454   VIXL_CHECK(!static_cast<CPURegister>(NoReg).IsValid());
455   VIXL_CHECK(!static_cast<CPURegister>(NoVReg).IsValid());
456   VIXL_CHECK(!static_cast<CPURegister>(NoCPUReg).IsValid());
457 
458   VIXL_CHECK(static_cast<CPURegister>(x0).IsValid());
459   VIXL_CHECK(static_cast<CPURegister>(w0).IsValid());
460   VIXL_CHECK(static_cast<CPURegister>(x30).IsValid());
461   VIXL_CHECK(static_cast<CPURegister>(w30).IsValid());
462   VIXL_CHECK(static_cast<CPURegister>(xzr).IsValid());
463   VIXL_CHECK(static_cast<CPURegister>(wzr).IsValid());
464 
465   VIXL_CHECK(static_cast<CPURegister>(sp).IsValid());
466   VIXL_CHECK(static_cast<CPURegister>(wsp).IsValid());
467 
468   VIXL_CHECK(static_cast<CPURegister>(d0).IsValid());
469   VIXL_CHECK(static_cast<CPURegister>(s0).IsValid());
470   VIXL_CHECK(static_cast<CPURegister>(d31).IsValid());
471   VIXL_CHECK(static_cast<CPURegister>(s31).IsValid());
472 
473   VIXL_CHECK(static_cast<CPURegister>(x0).IsValidRegister());
474   VIXL_CHECK(static_cast<CPURegister>(w0).IsValidRegister());
475   VIXL_CHECK(static_cast<CPURegister>(xzr).IsValidRegister());
476   VIXL_CHECK(static_cast<CPURegister>(wzr).IsValidRegister());
477   VIXL_CHECK(static_cast<CPURegister>(sp).IsValidRegister());
478   VIXL_CHECK(static_cast<CPURegister>(wsp).IsValidRegister());
479   VIXL_CHECK(!static_cast<CPURegister>(x0).IsValidVRegister());
480   VIXL_CHECK(!static_cast<CPURegister>(w0).IsValidVRegister());
481   VIXL_CHECK(!static_cast<CPURegister>(xzr).IsValidVRegister());
482   VIXL_CHECK(!static_cast<CPURegister>(wzr).IsValidVRegister());
483   VIXL_CHECK(!static_cast<CPURegister>(sp).IsValidVRegister());
484   VIXL_CHECK(!static_cast<CPURegister>(wsp).IsValidVRegister());
485   VIXL_CHECK(!static_cast<CPURegister>(x0).IsValidFPRegister());
486   VIXL_CHECK(!static_cast<CPURegister>(w0).IsValidFPRegister());
487   VIXL_CHECK(!static_cast<CPURegister>(xzr).IsValidFPRegister());
488   VIXL_CHECK(!static_cast<CPURegister>(wzr).IsValidFPRegister());
489   VIXL_CHECK(!static_cast<CPURegister>(sp).IsValidFPRegister());
490   VIXL_CHECK(!static_cast<CPURegister>(wsp).IsValidFPRegister());
491 
492   VIXL_CHECK(static_cast<CPURegister>(q0).IsValidVRegister());
493   VIXL_CHECK(!static_cast<CPURegister>(q0).IsValidFPRegister());
494   VIXL_CHECK(!static_cast<CPURegister>(q0).IsValidRegister());
495 
496   VIXL_CHECK(static_cast<CPURegister>(d0).IsValidVRegister());
497   VIXL_CHECK(static_cast<CPURegister>(d0).IsValidFPRegister());
498   VIXL_CHECK(!static_cast<CPURegister>(d0).IsValidRegister());
499 
500   VIXL_CHECK(static_cast<CPURegister>(s0).IsValidVRegister());
501   VIXL_CHECK(static_cast<CPURegister>(s0).IsValidFPRegister());
502   VIXL_CHECK(!static_cast<CPURegister>(s0).IsValidRegister());
503 
504   VIXL_CHECK(static_cast<CPURegister>(h0).IsValidVRegister());
505   VIXL_CHECK(static_cast<CPURegister>(h0).IsValidFPRegister());
506   VIXL_CHECK(!static_cast<CPURegister>(h0).IsValidRegister());
507 
508   VIXL_CHECK(static_cast<CPURegister>(b0).IsValidVRegister());
509   VIXL_CHECK(!static_cast<CPURegister>(b0).IsValidFPRegister());
510   VIXL_CHECK(!static_cast<CPURegister>(b0).IsValidRegister());
511 }
512 
513 
TEST(are_consecutive)514 TEST(are_consecutive) {
515   VIXL_CHECK(AreConsecutive(b0, NoVReg));
516   VIXL_CHECK(AreConsecutive(b1, b2));
517   VIXL_CHECK(AreConsecutive(b3, b4, b5));
518   VIXL_CHECK(AreConsecutive(b6, b7, b8, b9));
519   VIXL_CHECK(AreConsecutive(h10, NoVReg));
520   VIXL_CHECK(AreConsecutive(h11, h12));
521   VIXL_CHECK(AreConsecutive(h13, h14, h15));
522   VIXL_CHECK(AreConsecutive(h16, h17, h18, h19));
523   VIXL_CHECK(AreConsecutive(s20, NoVReg));
524   VIXL_CHECK(AreConsecutive(s21, s22));
525   VIXL_CHECK(AreConsecutive(s23, s24, s25));
526   VIXL_CHECK(AreConsecutive(s26, s27, s28, s29));
527   VIXL_CHECK(AreConsecutive(d30, NoVReg));
528   VIXL_CHECK(AreConsecutive(d31, d0));
529   VIXL_CHECK(AreConsecutive(d1, d2, d3));
530   VIXL_CHECK(AreConsecutive(d4, d5, d6, d7));
531   VIXL_CHECK(AreConsecutive(q8, NoVReg));
532   VIXL_CHECK(AreConsecutive(q9, q10));
533   VIXL_CHECK(AreConsecutive(q11, q12, q13));
534   VIXL_CHECK(AreConsecutive(q14, q15, q16, q17));
535   VIXL_CHECK(AreConsecutive(v18, NoVReg));
536   VIXL_CHECK(AreConsecutive(v19, v20));
537   VIXL_CHECK(AreConsecutive(v21, v22, v23));
538   VIXL_CHECK(AreConsecutive(v24, v25, v26, v27));
539   VIXL_CHECK(AreConsecutive(b29, h30));
540   VIXL_CHECK(AreConsecutive(s31, d0, q1));
541   VIXL_CHECK(AreConsecutive(v2, b3, h4, s5));
542 
543   VIXL_CHECK(!AreConsecutive(b0, b2));
544   VIXL_CHECK(!AreConsecutive(h1, h0));
545   VIXL_CHECK(!AreConsecutive(s31, s1));
546   VIXL_CHECK(!AreConsecutive(d12, d12));
547   VIXL_CHECK(!AreConsecutive(q31, q1));
548 
549   VIXL_CHECK(!AreConsecutive(b0, b1, b3));
550   VIXL_CHECK(!AreConsecutive(h4, h5, h6, h6));
551   VIXL_CHECK(!AreConsecutive(d11, d13, NoVReg, d14));
552   VIXL_CHECK(!AreConsecutive(d15, d16, d18, NoVReg));
553   VIXL_CHECK(!AreConsecutive(b26, b28, NoVReg, b29));
554   VIXL_CHECK(!AreConsecutive(s28, s30, NoVReg, NoVReg));
555 
556   VIXL_CHECK(AreConsecutive(q19, NoVReg, NoVReg, q22));
557   VIXL_CHECK(AreConsecutive(v23, NoVReg, v25, NoVReg));
558   VIXL_CHECK(AreConsecutive(b26, b27, NoVReg, NoVReg));
559   VIXL_CHECK(AreConsecutive(h28, NoVReg, NoVReg, NoVReg));
560   VIXL_CHECK(AreConsecutive(s30, s31, NoVReg, s2));
561   VIXL_CHECK(AreConsecutive(d3, NoVReg, d6, d7));
562 }
563 
564 
TEST(sve_p_registers)565 TEST(sve_p_registers) {
566   enum Qualification { kNone, kZeroing, kMerging, kWithLaneSize };
567   class Helper {
568    public:
569     static Qualification GetQualification(PRegister) { return kNone; }
570     static Qualification GetQualification(PRegisterZ) { return kZeroing; }
571     static Qualification GetQualification(PRegisterM) { return kMerging; }
572     static Qualification GetQualification(PRegisterWithLaneSize) {
573       return kWithLaneSize;
574     }
575   };
576 
577   VIXL_CHECK(kNumberOfPRegisters == 16);
578   VIXL_CHECK(p0.GetCode() == 0);
579   VIXL_CHECK(p15.GetCode() == 15);
580   VIXL_CHECK(p14.VnB().GetLaneSizeInBits() == kBRegSize);
581   VIXL_CHECK(p14.VnH().GetLaneSizeInBits() == kHRegSize);
582   VIXL_CHECK(p14.VnS().GetLaneSizeInBits() == kSRegSize);
583   VIXL_CHECK(p14.VnD().GetLaneSizeInBits() == kDRegSize);
584   VIXL_CHECK(p14.VnB().GetLaneSizeInBytes() == kBRegSizeInBytes);
585   VIXL_CHECK(p14.VnH().GetLaneSizeInBytes() == kHRegSizeInBytes);
586   VIXL_CHECK(p14.VnS().GetLaneSizeInBytes() == kSRegSizeInBytes);
587   VIXL_CHECK(p14.VnD().GetLaneSizeInBytes() == kDRegSizeInBytes);
588   VIXL_CHECK(Helper::GetQualification(p1) == kNone);
589   VIXL_CHECK(Helper::GetQualification(p2.Zeroing()) == kZeroing);
590   VIXL_CHECK(Helper::GetQualification(p3.Merging()) == kMerging);
591   VIXL_CHECK(Helper::GetQualification(p4.VnB()) == kWithLaneSize);
592   VIXL_CHECK(Helper::GetQualification(p5.VnH()) == kWithLaneSize);
593   VIXL_CHECK(Helper::GetQualification(p6.VnS()) == kWithLaneSize);
594   VIXL_CHECK(Helper::GetQualification(p7.VnD()) == kWithLaneSize);
595 }
596 
597 
TEST(sve_z_registers)598 TEST(sve_z_registers) {
599   VIXL_CHECK(z0.GetCode() == 0);
600   VIXL_CHECK(z31.GetCode() == 31);
601 
602   VIXL_CHECK(z0.Is(z0));
603   VIXL_CHECK(!z0.Is(z1));
604   VIXL_CHECK(!z0.Is(v0));
605   VIXL_CHECK(!z0.Is(b0));
606   VIXL_CHECK(!z0.Is(q0));
607 
608   VIXL_CHECK(AreAliased(z5, z5));
609   VIXL_CHECK(AreAliased(z5, b5));
610   VIXL_CHECK(AreAliased(b5, z5));
611   VIXL_CHECK(AreAliased(z5, z5.B()));
612   VIXL_CHECK(AreAliased(z5, z5.VnB()));
613 
614   VIXL_CHECK(!AreAliased(z6, z7));
615   VIXL_CHECK(!AreAliased(b6, z7));
616   VIXL_CHECK(!AreAliased(x7, z7));
617 }
618 
619 
TEST(sve_z_registers_vs_neon)620 TEST(sve_z_registers_vs_neon) {
621   // There are three related register variants to consider in VIXL's API:
622   //
623   //    "b0": NEON: The least-significant byte of v0.
624   //    "v0.B": NEON: v0, with an unspecified number of byte-sized lanes.
625   //    "z0.B": SVE: z0, with an unspecified number of byte-sized lanes.
626   //
627   // The first two cases are indistinguishable in VIXL; both are obtained using
628   // something like `v0.B()`. This is fine for NEON because there is no
629   // ambiguity in practice; the "v0.B" form is always used with an index that
630   // makes the meaning clear.
631 
632   VIXL_ASSERT(v6.B().Is(b6));
633   VIXL_ASSERT(v7.H().Is(h7));
634   VIXL_ASSERT(v8.S().Is(s8));
635   VIXL_ASSERT(v9.D().Is(d9));
636 
637   VIXL_ASSERT(z6.B().Is(b6));
638   VIXL_ASSERT(z7.H().Is(h7));
639   VIXL_ASSERT(z8.S().Is(s8));
640   VIXL_ASSERT(z9.D().Is(d9));
641 
642   // We cannot use the same approach for SVE's "z0.B" because, for example,
643   // `Add(VRegister, ...)` and `Add(ZRegister, ...)` generate different
644   // instructions.
645 
646   // Test that the variants can be distinguished with `Is`.
647   VIXL_CHECK(!z6.VnB().Is(b6));
648   VIXL_CHECK(!z7.VnH().Is(h7));
649   VIXL_CHECK(!z8.VnS().Is(s8));
650   VIXL_CHECK(!z9.VnD().Is(d9));
651 
652   VIXL_CHECK(!z6.VnB().Is(v6.B()));
653   VIXL_CHECK(!z7.VnH().Is(v7.H()));
654   VIXL_CHECK(!z8.VnS().Is(v8.S()));
655   VIXL_CHECK(!z9.VnD().Is(v9.D()));
656 
657   VIXL_CHECK(!z6.VnB().Is(z6.B()));
658   VIXL_CHECK(!z7.VnH().Is(z7.H()));
659   VIXL_CHECK(!z8.VnS().Is(z8.S()));
660   VIXL_CHECK(!z9.VnD().Is(z9.D()));
661 
662   // Test that the variants can be distinguished at compile-time using
663   // overloading. VIXL's API relies on this.
664   enum Variant { kNEON, kSVE, kUnknown };
665   class Helper {
666    public:
667     static Variant GetVariant(ZRegister) { return kSVE; }
668     static Variant GetVariant(VRegister) { return kNEON; }
669     static Variant GetVariant(CPURegister) { return kUnknown; }
670   };
671   VIXL_CHECK(Helper::GetVariant(z10.VnB()) == kSVE);
672   VIXL_CHECK(Helper::GetVariant(z11.VnH()) == kSVE);
673   VIXL_CHECK(Helper::GetVariant(z12.VnS()) == kSVE);
674   VIXL_CHECK(Helper::GetVariant(z13.VnD()) == kSVE);
675 
676   VIXL_CHECK(Helper::GetVariant(v10.B()) == kNEON);
677   VIXL_CHECK(Helper::GetVariant(v11.H()) == kNEON);
678   VIXL_CHECK(Helper::GetVariant(v12.S()) == kNEON);
679   VIXL_CHECK(Helper::GetVariant(v13.D()) == kNEON);
680 
681   VIXL_CHECK(Helper::GetVariant(v10.V16B()) == kNEON);
682   VIXL_CHECK(Helper::GetVariant(v11.V8H()) == kNEON);
683   VIXL_CHECK(Helper::GetVariant(v12.V4S()) == kNEON);
684   VIXL_CHECK(Helper::GetVariant(v13.V2D()) == kNEON);
685 
686   VIXL_CHECK(Helper::GetVariant(b10) == kNEON);
687   VIXL_CHECK(Helper::GetVariant(h11) == kNEON);
688   VIXL_CHECK(Helper::GetVariant(s12) == kNEON);
689   VIXL_CHECK(Helper::GetVariant(d13) == kNEON);
690 }
691 
692 
TEST(move_immediate_helpers)693 TEST(move_immediate_helpers) {
694   // Using these helpers to query information (without generating code) should
695   // not crash.
696   MacroAssembler::MoveImmediateHelper(NULL, x0, 0x12345678);
697   MacroAssembler::OneInstrMoveImmediateHelper(NULL, x1, 0xabcdef);
698 }
699 
700 
TEST(generic_operand_helpers)701 TEST(generic_operand_helpers) {
702   GenericOperand invalid_1;
703   GenericOperand invalid_2;
704   GenericOperand reg(x3);
705   GenericOperand mem(MemOperand(sp, 8), kXRegSizeInBytes);
706 
707   VIXL_CHECK(!invalid_1.IsValid());
708   VIXL_CHECK(!invalid_2.IsValid());
709 
710   VIXL_CHECK(invalid_1.Equals(invalid_1));
711   VIXL_CHECK(invalid_2.Equals(invalid_2));
712   VIXL_CHECK(reg.Equals(reg));
713   VIXL_CHECK(mem.Equals(mem));
714 
715   VIXL_CHECK(invalid_1.Equals(invalid_2));
716   VIXL_CHECK(invalid_2.Equals(invalid_1));
717 
718   VIXL_CHECK(!invalid_1.Equals(reg));
719   VIXL_CHECK(!invalid_1.Equals(mem));
720   VIXL_CHECK(!reg.Equals(invalid_1));
721   VIXL_CHECK(!reg.Equals(invalid_2));
722   VIXL_CHECK(!reg.Equals(mem));
723   VIXL_CHECK(!mem.Equals(invalid_1));
724   VIXL_CHECK(!mem.Equals(reg));
725 }
726 
727 
TEST(integer_operand_is)728 TEST(integer_operand_is) {
729   VIXL_CHECK(IntegerOperand(0).IsZero());
730   VIXL_CHECK(!IntegerOperand(1).IsZero());
731   VIXL_CHECK(!IntegerOperand(-1).IsZero());
732 
733   VIXL_CHECK(!IntegerOperand(-0x81).IsIntN(8));
734   VIXL_CHECK(IntegerOperand(-0x80).IsIntN(8));
735   VIXL_CHECK(IntegerOperand(-1).IsIntN(8));
736   VIXL_CHECK(IntegerOperand(0).IsIntN(8));
737   VIXL_CHECK(IntegerOperand(1).IsIntN(8));
738   VIXL_CHECK(IntegerOperand(0x7f).IsIntN(8));
739   VIXL_CHECK(!IntegerOperand(0x80).IsIntN(8));
740 
741   VIXL_CHECK(!IntegerOperand(-1).IsUintN(8));
742   VIXL_CHECK(IntegerOperand(0).IsUintN(8));
743   VIXL_CHECK(IntegerOperand(1).IsUintN(8));
744   VIXL_CHECK(IntegerOperand(0xff).IsUintN(8));
745   VIXL_CHECK(!IntegerOperand(0x100).IsUintN(8));
746 
747   VIXL_CHECK(IntegerOperand(INT64_MIN).IsIntN(64));
748   VIXL_CHECK(IntegerOperand(0).IsIntN(64));
749   VIXL_CHECK(IntegerOperand(INT64_MAX).IsIntN(64));
750   VIXL_CHECK(!IntegerOperand(0x8000000000000000).IsIntN(64));
751 
752   VIXL_CHECK(!IntegerOperand(-1).IsUintN(64));
753   VIXL_CHECK(IntegerOperand(0).IsUintN(64));
754   VIXL_CHECK(IntegerOperand(UINT64_MAX).IsUintN(64));
755 
756   VIXL_CHECK(!IntegerOperand(-0x801).FitsInBits(12));
757   VIXL_CHECK(IntegerOperand(-0x800).FitsInBits(12));
758   VIXL_CHECK(IntegerOperand(0).FitsInBits(12));
759   VIXL_CHECK(IntegerOperand(0x7ff).FitsInBits(12));
760   VIXL_CHECK(IntegerOperand(0x800).FitsInBits(12));
761   VIXL_CHECK(IntegerOperand(0xfff).FitsInBits(12));
762   VIXL_CHECK(!IntegerOperand(0x1000).FitsInBits(12));
763 
764   VIXL_CHECK(!IntegerOperand(-0x8001).FitsInLane(z0.VnH()));
765   VIXL_CHECK(IntegerOperand(-0x8000).FitsInLane(z0.VnH()));
766   VIXL_CHECK(IntegerOperand(0).FitsInLane(z0.VnH()));
767   VIXL_CHECK(IntegerOperand(0x7fff).FitsInLane(z0.VnH()));
768   VIXL_CHECK(IntegerOperand(0x8000).FitsInLane(z0.VnH()));
769   VIXL_CHECK(IntegerOperand(0xffff).FitsInLane(z0.VnH()));
770   VIXL_CHECK(!IntegerOperand(0x10000).FitsInLane(z0.VnH()));
771 }
772 
TEST(integer_operand_as_uint)773 TEST(integer_operand_as_uint) {
774   // Simple cases.
775   VIXL_CHECK(IntegerOperand(1).AsUintN(8) == 1);
776   VIXL_CHECK(IntegerOperand(1).AsUintN(16) == 1);
777   VIXL_CHECK(IntegerOperand(1).AsUintN(32) == 1);
778   VIXL_CHECK(IntegerOperand(1).AsUintN(64) == 1);
779   VIXL_CHECK(IntegerOperand(-1).AsUintN(8) == 0xff);
780   VIXL_CHECK(IntegerOperand(-1).AsUintN(16) == 0xffff);
781   VIXL_CHECK(IntegerOperand(-1).AsUintN(32) == 0xffffffff);
782   VIXL_CHECK(IntegerOperand(-1).AsUintN(64) == 0xffffffffffffffff);
783   VIXL_CHECK(IntegerOperand(0xf0).AsUintN(8) == 0xf0);
784   VIXL_CHECK(IntegerOperand(0xf420).AsUintN(16) == 0xf420);
785   VIXL_CHECK(IntegerOperand(0xf4242420).AsUintN(32) == 0xf4242420);
786   VIXL_CHECK(IntegerOperand(0xf424242424242420).AsUintN(64) ==
787              0xf424242424242420);
788 
789   // Boundary conditions for known-size types.
790   VIXL_CHECK(IntegerOperand(INT8_MIN).AsUintN(8) == 0x80);
791   VIXL_CHECK(IntegerOperand(INT8_MAX).AsUintN(8) == 0x7f);
792   VIXL_CHECK(IntegerOperand(UINT8_MAX).AsUintN(8) == 0xff);
793 
794   VIXL_CHECK(IntegerOperand(INT16_MIN).AsUintN(16) == 0x8000);
795   VIXL_CHECK(IntegerOperand(INT16_MAX).AsUintN(16) == 0x7fff);
796   VIXL_CHECK(IntegerOperand(UINT16_MAX).AsUintN(16) == 0xffff);
797 
798   VIXL_CHECK(IntegerOperand(INT32_MIN).AsUintN(32) == 0x80000000);
799   VIXL_CHECK(IntegerOperand(INT32_MAX).AsUintN(32) == 0x7fffffff);
800   VIXL_CHECK(IntegerOperand(UINT32_MAX).AsUintN(32) == 0xffffffff);
801 
802   VIXL_CHECK(IntegerOperand(INT64_MIN).AsUintN(64) == 0x8000000000000000);
803   VIXL_CHECK(IntegerOperand(INT64_MAX).AsUintN(64) == 0x7fffffffffffffff);
804   VIXL_CHECK(IntegerOperand(UINT64_MAX).AsUintN(64) == 0xffffffffffffffff);
805 }
806 
TEST(integer_operand_as_int)807 TEST(integer_operand_as_int) {
808   // Simple cases.
809   VIXL_CHECK(IntegerOperand(1).AsIntN(8) == 1);
810   VIXL_CHECK(IntegerOperand(1).AsIntN(16) == 1);
811   VIXL_CHECK(IntegerOperand(1).AsIntN(32) == 1);
812   VIXL_CHECK(IntegerOperand(1).AsIntN(64) == 1);
813   VIXL_CHECK(IntegerOperand(-1).AsIntN(8) == -1);
814   VIXL_CHECK(IntegerOperand(-1).AsIntN(16) == -1);
815   VIXL_CHECK(IntegerOperand(-1).AsIntN(32) == -1);
816   VIXL_CHECK(IntegerOperand(-1).AsIntN(64) == -1);
817   VIXL_CHECK(IntegerOperand(0x70).AsIntN(8) == 0x70);
818   VIXL_CHECK(IntegerOperand(0x7420).AsIntN(16) == 0x7420);
819   VIXL_CHECK(IntegerOperand(0x74242420).AsIntN(32) == 0x74242420);
820   VIXL_CHECK(IntegerOperand(0x7424242424242420).AsIntN(64) ==
821              0x7424242424242420);
822 
823   // Boundary conditions for known-size types.
824   VIXL_CHECK(IntegerOperand(UINT8_MAX).AsIntN(8) == -1);
825   VIXL_CHECK(IntegerOperand(UINT16_MAX).AsIntN(16) == -1);
826   VIXL_CHECK(IntegerOperand(UINT32_MAX).AsIntN(32) == -1);
827   VIXL_CHECK(IntegerOperand(UINT64_MAX).AsIntN(64) == -1);
828 
829   VIXL_CHECK(IntegerOperand(INT8_MAX).AsIntN(8) == INT8_MAX);
830   VIXL_CHECK(IntegerOperand(INT16_MAX).AsIntN(16) == INT16_MAX);
831   VIXL_CHECK(IntegerOperand(INT32_MAX).AsIntN(32) == INT32_MAX);
832   VIXL_CHECK(IntegerOperand(INT64_MAX).AsIntN(64) == INT64_MAX);
833 
834   VIXL_CHECK(IntegerOperand(0x80).AsIntN(8) == INT8_MIN);
835   VIXL_CHECK(IntegerOperand(0x8000).AsIntN(16) == INT16_MIN);
836   VIXL_CHECK(IntegerOperand(0x80000000).AsIntN(32) == INT32_MIN);
837   VIXL_CHECK(IntegerOperand(0x8000000000000000).AsIntN(64) == INT64_MIN);
838 }
839 
840 template <unsigned N>
841 class IntegerOperandTryEncodeShiftedIntHelper {
842  public:
IntegerOperandTryEncodeShiftedIntHelper()843   IntegerOperandTryEncodeShiftedIntHelper() {}
844 
845   template <unsigned kShift, typename T>
TestEncodable(T value,const ZRegister & zd,int64_t expected_imm)846   void TestEncodable(T value, const ZRegister& zd, int64_t expected_imm) {
847     VIXL_CHECK(TestImpl<kShift>(value, zd, expected_imm));
848   }
849 
850   template <unsigned kShift, typename T>
TestUnencodable(T value,const ZRegister & zd)851   void TestUnencodable(T value, const ZRegister& zd) {
852     // The `expected_imm` value is ignored, so its value is arbitrary.
853     VIXL_CHECK(!TestImpl<kShift>(value, zd, 0));
854   }
855 
856  private:
857   template <unsigned kShift, typename T>
TestImpl(T value,const ZRegister & zd,int64_t expected_imm)858   bool TestImpl(T value, const ZRegister& zd, int64_t expected_imm) {
859     IntegerOperand operand(value);
860     int64_t imm = 0xdeadbeef42;
861     unsigned shift = 0xbeef43;
862     bool success =
863         operand.TryEncodeAsShiftedIntNForLane<N, kShift>(zd, &imm, &shift);
864     if (success) {
865       VIXL_CHECK(imm == expected_imm);
866       VIXL_CHECK(shift == kShift);
867     } else {
868       // Check that the outputs were unmodified.
869       VIXL_CHECK(imm == 0xdeadbeef42);
870       VIXL_CHECK(shift == 0xbeef43);
871     }
872 
873     // If kShift is 0, also check TryEncodeAsIntNForLane.
874     if (kShift == 0) {
875       int64_t unshifted_imm = 0xdeadbeef99;
876       bool unshifted_success =
877           operand.TryEncodeAsIntNForLane<N>(zd, &unshifted_imm);
878 
879       VIXL_CHECK(unshifted_success == success);
880       if (unshifted_success) {
881         VIXL_CHECK(unshifted_imm == expected_imm);
882       } else {
883         VIXL_CHECK(unshifted_imm == 0xdeadbeef99);
884       }
885     }
886 
887     return success;
888   }
889 };
890 
TEST(integer_operand_encode_as_intn)891 TEST(integer_operand_encode_as_intn) {
892   IntegerOperandTryEncodeShiftedIntHelper<4> int4_helper;
893   IntegerOperandTryEncodeShiftedIntHelper<8> int8_helper;
894   IntegerOperandTryEncodeShiftedIntHelper<12> int12_helper;
895 
896   // Simple cases, where the value is directly encodable.
897   int4_helper.TestEncodable<0>(-8, z0.VnH(), -8);
898   int4_helper.TestEncodable<0>(-7, z0.VnH(), -7);
899   int4_helper.TestEncodable<0>(-1, z0.VnS(), -1);
900   int4_helper.TestEncodable<0>(0, z0.VnD(), 0);
901   int4_helper.TestEncodable<0>(1, z0.VnB(), 1);
902   int4_helper.TestEncodable<0>(7, z0.VnH(), 7);
903 
904   int8_helper.TestEncodable<0>(0x7f, z0.VnB(), 0x7f);
905   int8_helper.TestEncodable<0>(0x7f, z0.VnH(), 0x7f);
906   int12_helper.TestEncodable<0>(0x7ff, z0.VnH(), 0x7ff);
907 
908   int8_helper.TestEncodable<0>(-0x80, z0.VnB(), -0x80);
909   int8_helper.TestEncodable<0>(-0x80, z0.VnH(), -0x80);
910   int12_helper.TestEncodable<0>(-0x800, z0.VnH(), -0x800);
911 
912   // Cases that are directly encodable with a shift.
913   int8_helper.TestEncodable<4>(-0x800, z0.VnH(), -0x80);
914   int8_helper.TestEncodable<4>(-0x7f0, z0.VnH(), -0x7f);
915   int8_helper.TestEncodable<4>(-0x010, z0.VnH(), -1);
916   int8_helper.TestEncodable<4>(0x000, z0.VnH(), 0);
917   int8_helper.TestEncodable<4>(0x010, z0.VnH(), 1);
918   int8_helper.TestEncodable<4>(0x7f0, z0.VnH(), 0x7f);
919 
920   // Ensure that (positive) bit representations of negative values are treated
921   // as negative values, even though their arithmetic values are unencodable.
922   int12_helper.TestEncodable<0>(0xffd6, z0.VnH(), -42);
923   int12_helper.TestEncodable<0>(0xffffffd6, z0.VnS(), -42);
924   int12_helper.TestEncodable<4>(0xfd60, z0.VnH(), -42);
925   int12_helper.TestEncodable<8>(0xffffd600, z0.VnS(), -42);
926 
927   int8_helper.TestEncodable<0>(UINT8_MAX, z0.VnB(), -1);
928   int8_helper.TestEncodable<0>(UINT16_MAX, z0.VnH(), -1);
929   int8_helper.TestEncodable<0>(UINT32_MAX, z0.VnS(), -1);
930   int8_helper.TestEncodable<0>(UINT64_MAX, z0.VnD(), -1);
931 
932   int4_helper.TestEncodable<1>(UINT8_MAX ^ 0x1, z0.VnB(), -1);
933   int4_helper.TestEncodable<2>(UINT16_MAX ^ 0x3, z0.VnH(), -1);
934   int4_helper.TestEncodable<3>(UINT32_MAX ^ 0x7, z0.VnS(), -1);
935   int4_helper.TestEncodable<4>(UINT64_MAX ^ 0xf, z0.VnD(), -1);
936 
937   // Unencodable cases.
938   int8_helper.TestUnencodable<0>(INT16_MAX, z0.VnH());
939   int8_helper.TestUnencodable<0>(INT32_MAX, z0.VnS());
940   int8_helper.TestUnencodable<0>(INT64_MAX, z0.VnD());
941 
942   int4_helper.TestUnencodable<0>(0x10, z0.VnB());
943   int4_helper.TestUnencodable<1>(0x20, z0.VnB());
944 
945   int12_helper.TestUnencodable<1>(1, z0.VnD());
946   int12_helper.TestUnencodable<12>(1, z0.VnD());
947   int12_helper.TestUnencodable<12>(0x800, z0.VnD());
948 }
949 
TEST(static_register_types)950 TEST(static_register_types) {
951   // [WX]Register implicitly casts to Register.
952   XRegister x_x0(0);
953   WRegister w_w0(0);
954   Register r_x0 = x_x0;
955   Register r_w0 = w_w0;
956   VIXL_CHECK(r_x0.Is(x_x0));
957   VIXL_CHECK(x_x0.Is(r_x0));
958   VIXL_CHECK(r_w0.Is(w_w0));
959   VIXL_CHECK(w_w0.Is(r_w0));
960 
961   // Register explicitly casts to [WX]Register.
962   Register r_x1(1, kXRegSize);
963   Register r_w1(1, kWRegSize);
964   XRegister x_x1(r_x1);
965   WRegister w_w1(r_w1);
966   VIXL_CHECK(r_x1.Is(x_x1));
967   VIXL_CHECK(x_x1.Is(r_x1));
968   VIXL_CHECK(r_w1.Is(w_w1));
969   VIXL_CHECK(w_w1.Is(r_w1));
970 
971   // [WX]Register implicitly casts to CPURegister.
972   XRegister x_x2(2);
973   WRegister w_w2(2);
974   CPURegister cpu_x2 = x_x2;
975   CPURegister cpu_w2 = w_w2;
976   VIXL_CHECK(cpu_x2.Is(x_x2));
977   VIXL_CHECK(x_x2.Is(cpu_x2));
978   VIXL_CHECK(cpu_w2.Is(w_w2));
979   VIXL_CHECK(w_w2.Is(cpu_w2));
980 }
981 
982 
TEST(operand_is_plain_register)983 TEST(operand_is_plain_register) {
984   VIXL_CHECK(Operand(x0).IsPlainRegister());
985   VIXL_CHECK(Operand(x1, LSL, 0).IsPlainRegister());
986   VIXL_CHECK(Operand(x2, LSR, 0).IsPlainRegister());
987   VIXL_CHECK(Operand(x3, ASR, 0).IsPlainRegister());
988   VIXL_CHECK(Operand(x4, ROR, 0).IsPlainRegister());
989   VIXL_CHECK(Operand(x5, UXTX).IsPlainRegister());
990   VIXL_CHECK(Operand(x6, SXTX).IsPlainRegister());
991   VIXL_CHECK(Operand(w7).IsPlainRegister());
992   VIXL_CHECK(Operand(w8, LSL, 0).IsPlainRegister());
993   VIXL_CHECK(Operand(w9, LSR, 0).IsPlainRegister());
994   VIXL_CHECK(Operand(w10, ASR, 0).IsPlainRegister());
995   VIXL_CHECK(Operand(w11, ROR, 0).IsPlainRegister());
996 
997   VIXL_CHECK(!Operand(x0, LSL, 1).IsPlainRegister());
998   VIXL_CHECK(!Operand(x1, LSR, 2).IsPlainRegister());
999   VIXL_CHECK(!Operand(x2, ASR, 3).IsPlainRegister());
1000   VIXL_CHECK(!Operand(x3, ROR, 4).IsPlainRegister());
1001   VIXL_CHECK(!Operand(x5, UXTX, 1).IsPlainRegister());
1002   VIXL_CHECK(!Operand(x6, SXTX, 2).IsPlainRegister());
1003   VIXL_CHECK(!Operand(w7, LSL, 1).IsPlainRegister());
1004   VIXL_CHECK(!Operand(w8, LSR, 2).IsPlainRegister());
1005   VIXL_CHECK(!Operand(w9, ASR, 3).IsPlainRegister());
1006   VIXL_CHECK(!Operand(w10, ROR, 4).IsPlainRegister());
1007   VIXL_CHECK(!Operand(w11, UXTB).IsPlainRegister());
1008   VIXL_CHECK(!Operand(w12, SXTB).IsPlainRegister());
1009   VIXL_CHECK(!Operand(w13, UXTH).IsPlainRegister());
1010   VIXL_CHECK(!Operand(w14, SXTH).IsPlainRegister());
1011   // UXTW and SXTW could be treated as plain registers in 32-bit contexts, but
1012   // the Operand class doesn't know the context so it has to return false.
1013   VIXL_CHECK(!Operand(w15, UXTW).IsPlainRegister());
1014   VIXL_CHECK(!Operand(w16, SXTW).IsPlainRegister());
1015 }
1016 
1017 
TEST(memoperand_is_plain_register)1018 TEST(memoperand_is_plain_register) {
1019   VIXL_CHECK(MemOperand(x0).IsPlainRegister());
1020   VIXL_CHECK(MemOperand(sp).IsPlainRegister());
1021   VIXL_CHECK(MemOperand(x1, 0).IsPlainRegister());
1022 
1023   VIXL_CHECK(!MemOperand(x2, xzr).IsPlainRegister());
1024   VIXL_CHECK(!MemOperand(x3, xzr, SXTX).IsPlainRegister());
1025   VIXL_CHECK(!MemOperand(x4, xzr, SXTX, 2).IsPlainRegister());
1026   VIXL_CHECK(!MemOperand(x5, wzr, UXTW).IsPlainRegister());
1027   VIXL_CHECK(!MemOperand(x6, wzr, UXTW, 3).IsPlainRegister());
1028 
1029   VIXL_CHECK(!MemOperand(x7, 0, PostIndex).IsPlainRegister());
1030   VIXL_CHECK(!MemOperand(x8, 0, PreIndex).IsPlainRegister());
1031   VIXL_CHECK(!MemOperand(x9, xzr, PostIndex).IsPlainRegister());
1032 
1033   VIXL_CHECK(!MemOperand(x20, 1).IsPlainRegister());
1034   VIXL_CHECK(!MemOperand(x21, x30).IsPlainRegister());
1035 }
1036 
TEST(memoperand_is_plain_register_or_equivalent)1037 TEST(memoperand_is_plain_register_or_equivalent) {
1038   VIXL_CHECK(MemOperand(x0).IsEquivalentToPlainRegister());
1039   VIXL_CHECK(MemOperand(sp).IsEquivalentToPlainRegister());
1040   VIXL_CHECK(MemOperand(x1, 0).IsEquivalentToPlainRegister());
1041 
1042   VIXL_CHECK(MemOperand(x2, xzr).IsEquivalentToPlainRegister());
1043   VIXL_CHECK(MemOperand(x3, xzr, SXTX).IsEquivalentToPlainRegister());
1044   VIXL_CHECK(MemOperand(x4, xzr, SXTX, 2).IsEquivalentToPlainRegister());
1045   VIXL_CHECK(MemOperand(x5, wzr, UXTW).IsEquivalentToPlainRegister());
1046   VIXL_CHECK(MemOperand(x6, wzr, UXTW, 3).IsEquivalentToPlainRegister());
1047 
1048   VIXL_CHECK(MemOperand(x7, 0, PostIndex).IsEquivalentToPlainRegister());
1049   VIXL_CHECK(MemOperand(x8, 0, PreIndex).IsEquivalentToPlainRegister());
1050   VIXL_CHECK(MemOperand(x9, xzr, PostIndex).IsEquivalentToPlainRegister());
1051 
1052   VIXL_CHECK(!MemOperand(x20, 1).IsEquivalentToPlainRegister());
1053   VIXL_CHECK(!MemOperand(x21, x30).IsEquivalentToPlainRegister());
1054 }
1055 
TEST(sve_memoperand_is_plain_scalar)1056 TEST(sve_memoperand_is_plain_scalar) {
1057   VIXL_CHECK(SVEMemOperand(x0).IsPlainScalar());
1058   VIXL_CHECK(SVEMemOperand(sp).IsPlainScalar());
1059   VIXL_CHECK(SVEMemOperand(x1, 0).IsPlainScalar());
1060 
1061   VIXL_CHECK(!SVEMemOperand(x2, xzr).IsPlainScalar());
1062   VIXL_CHECK(!SVEMemOperand(x4, xzr, LSL, 2).IsPlainScalar());
1063 
1064   VIXL_CHECK(!SVEMemOperand(x20, 1).IsPlainScalar());
1065   VIXL_CHECK(!SVEMemOperand(x21, x30).IsPlainScalar());
1066 
1067   VIXL_CHECK(!SVEMemOperand(x0, z1.VnD()).IsPlainScalar());
1068   VIXL_CHECK(!SVEMemOperand(x2, z3.VnS(), UXTW).IsPlainScalar());
1069   VIXL_CHECK(!SVEMemOperand(z4.VnD(), 0).IsPlainScalar());
1070 }
1071 
TEST(sve_memoperand_is_scalar_or_equivalent)1072 TEST(sve_memoperand_is_scalar_or_equivalent) {
1073   VIXL_CHECK(SVEMemOperand(x0).IsEquivalentToScalar());
1074   VIXL_CHECK(SVEMemOperand(sp).IsEquivalentToScalar());
1075   VIXL_CHECK(SVEMemOperand(x1, 0).IsEquivalentToScalar());
1076 
1077   VIXL_CHECK(SVEMemOperand(x2, xzr).IsEquivalentToScalar());
1078   VIXL_CHECK(SVEMemOperand(x4, xzr, LSL, 2).IsEquivalentToScalar());
1079 
1080   VIXL_CHECK(!SVEMemOperand(x20, 1).IsEquivalentToScalar());
1081   VIXL_CHECK(!SVEMemOperand(x21, x30).IsEquivalentToScalar());
1082 
1083   VIXL_CHECK(!SVEMemOperand(x0, z1.VnD()).IsEquivalentToScalar());
1084   VIXL_CHECK(!SVEMemOperand(x2, z3.VnD(), SXTW).IsEquivalentToScalar());
1085   VIXL_CHECK(!SVEMemOperand(z4.VnD(), 0).IsEquivalentToScalar());
1086 }
1087 
TEST(sve_memoperand_types)1088 TEST(sve_memoperand_types) {
1089   VIXL_CHECK(SVEMemOperand(x0, 42).IsScalarPlusImmediate());
1090   VIXL_CHECK(SVEMemOperand(x1, 42, SVE_MUL_VL).IsScalarPlusImmediate());
1091   VIXL_CHECK(SVEMemOperand(x2, -42, SVE_MUL_VL).IsScalarPlusImmediate());
1092 
1093   VIXL_CHECK(SVEMemOperand(sp, x3).IsScalarPlusScalar());
1094   VIXL_CHECK(SVEMemOperand(x4, xzr).IsScalarPlusScalar());
1095   VIXL_CHECK(SVEMemOperand(x5, x6, LSL, 1).IsScalarPlusScalar());
1096 
1097   VIXL_CHECK(SVEMemOperand(x7, z0.VnD()).IsScalarPlusVector());
1098   VIXL_CHECK(SVEMemOperand(x8, z1.VnS(), SXTW).IsScalarPlusVector());
1099   VIXL_CHECK(SVEMemOperand(x9, z2.VnD(), UXTW).IsScalarPlusVector());
1100   VIXL_CHECK(SVEMemOperand(x10, z3.VnD(), LSL, 2).IsScalarPlusVector());
1101 
1102   VIXL_CHECK(SVEMemOperand(z4.VnD(), 42).IsVectorPlusImmediate());
1103   VIXL_CHECK(SVEMemOperand(z5.VnS(), -42).IsVectorPlusImmediate());
1104 }
1105 
TEST(sve_memoperand_scatter_gather)1106 TEST(sve_memoperand_scatter_gather) {
1107   // Single-address accesses.
1108   VIXL_CHECK(!SVEMemOperand(x0, 42).IsScatterGather());
1109   VIXL_CHECK(!SVEMemOperand(x1, 42, SVE_MUL_VL).IsScatterGather());
1110   VIXL_CHECK(!SVEMemOperand(x2, -42, SVE_MUL_VL).IsScatterGather());
1111 
1112   VIXL_CHECK(!SVEMemOperand(sp, x3).IsScatterGather());
1113   VIXL_CHECK(!SVEMemOperand(x4, xzr).IsScatterGather());
1114   VIXL_CHECK(!SVEMemOperand(x5, x6, LSL, 1).IsScatterGather());
1115 
1116   // Scatter-gather accesses.
1117   VIXL_CHECK(SVEMemOperand(x7, z0.VnD()).IsScatterGather());
1118   VIXL_CHECK(SVEMemOperand(x8, z1.VnS(), SXTW).IsScatterGather());
1119   VIXL_CHECK(SVEMemOperand(x9, z2.VnD(), UXTW).IsScatterGather());
1120   VIXL_CHECK(SVEMemOperand(x10, z3.VnD(), LSL, 2).IsScatterGather());
1121 
1122   VIXL_CHECK(SVEMemOperand(z4.VnD(), 42).IsScatterGather());
1123   VIXL_CHECK(SVEMemOperand(z5.VnS(), -42).IsScatterGather());
1124 }
1125 
TEST(scratch_scope_basic)1126 TEST(scratch_scope_basic) {
1127   MacroAssembler masm;
1128   // x16 and x17 are available as scratch registers by default.
1129   {
1130     UseScratchRegisterScope temps(&masm);
1131     Register temp1 = temps.AcquireW();
1132     Register temp2 = temps.AcquireX();
1133     VIXL_CHECK(temp1.Is(w16));
1134     VIXL_CHECK(temp2.Is(x17));
1135   }
1136   {
1137     UseScratchRegisterScope temps(&masm);
1138     Register temp1 = temps.AcquireRegisterOfSize(kXRegSize);
1139     Register temp2 = temps.AcquireRegisterOfSize(kWRegSize);
1140     VIXL_CHECK(temp1.Is(x16));
1141     VIXL_CHECK(temp2.Is(w17));
1142   }
1143 }
1144 
TEST(scratch_scope_basic_v)1145 TEST(scratch_scope_basic_v) {
1146   MacroAssembler masm;
1147   // v31 is the only V scratch register available by default.
1148   {
1149     UseScratchRegisterScope temps(&masm);
1150     VRegister temp = temps.AcquireH();
1151     VIXL_CHECK(temp.Is(h31));
1152   }
1153   {
1154     UseScratchRegisterScope temps(&masm);
1155     VRegister temp = temps.AcquireS();
1156     VIXL_CHECK(temp.Is(s31));
1157   }
1158   {
1159     UseScratchRegisterScope temps(&masm);
1160     VRegister temp = temps.AcquireD();
1161     VIXL_CHECK(temp.Is(d31));
1162   }
1163   {
1164     UseScratchRegisterScope temps(&masm);
1165     VRegister temp = temps.AcquireVRegisterOfSize(kQRegSize);
1166     VIXL_CHECK(temp.Is(q31));
1167   }
1168   {
1169     UseScratchRegisterScope temps(&masm);
1170     VRegister temp = temps.AcquireVRegisterOfSize(kDRegSize);
1171     VIXL_CHECK(temp.Is(d31));
1172   }
1173   {
1174     UseScratchRegisterScope temps(&masm);
1175     VRegister temp = temps.AcquireVRegisterOfSize(kSRegSize);
1176     VIXL_CHECK(temp.Is(s31));
1177   }
1178 }
1179 
TEST(scratch_scope_basic_z)1180 TEST(scratch_scope_basic_z) {
1181   MacroAssembler masm;
1182   // z31 is the only Z scratch register available by default.
1183   {
1184     UseScratchRegisterScope temps(&masm);
1185     VIXL_CHECK(temps.IsAvailable(v31));
1186     VIXL_CHECK(temps.IsAvailable(z31));
1187     ZRegister temp = temps.AcquireZ();
1188     VIXL_CHECK(temp.Is(z31));
1189     // Check that allocating a Z register properly reserves the corresponding V
1190     // register.
1191     VIXL_CHECK(!temps.IsAvailable(v31));
1192     VIXL_CHECK(!temps.IsAvailable(z31));
1193   }
1194   // Check that the destructor restored the acquired register.
1195   UseScratchRegisterScope temps(&masm);
1196   VIXL_CHECK(temps.IsAvailable(v31));
1197   VIXL_CHECK(temps.IsAvailable(z31));
1198 }
1199 
TEST(scratch_scope_basic_p)1200 TEST(scratch_scope_basic_p) {
1201   MacroAssembler masm;
1202   {
1203     UseScratchRegisterScope temps(&masm);
1204     // There are no P scratch registers available by default.
1205     VIXL_CHECK(masm.GetScratchPRegisterList()->IsEmpty());
1206     temps.Include(p0, p1);
1207     VIXL_CHECK(temps.IsAvailable(p0));
1208     VIXL_CHECK(temps.IsAvailable(p1));
1209     temps.Include(p7, p8, p15);
1210     VIXL_CHECK(temps.IsAvailable(p7));
1211     VIXL_CHECK(temps.IsAvailable(p8));
1212     VIXL_CHECK(temps.IsAvailable(p15));
1213 
1214     // AcquireGoverningP() can only return p0-p7.
1215     VIXL_CHECK(temps.AcquireGoverningP().GetCode() <
1216                kNumberOfGoverningPRegisters);
1217     VIXL_CHECK(temps.AcquireGoverningP().GetCode() <
1218                kNumberOfGoverningPRegisters);
1219     VIXL_CHECK(temps.IsAvailable(p8));
1220     VIXL_CHECK(temps.IsAvailable(p15));
1221 
1222     // AcquireP() prefers p8-p15, ...
1223     VIXL_CHECK(temps.AcquireP().GetCode() >= kNumberOfGoverningPRegisters);
1224     VIXL_CHECK(temps.AcquireP().GetCode() >= kNumberOfGoverningPRegisters);
1225     // ... but will return p0-p7 if none of p8-p15 are available.
1226     VIXL_CHECK(temps.AcquireP().GetCode() < kNumberOfGoverningPRegisters);
1227 
1228     VIXL_CHECK(masm.GetScratchPRegisterList()->IsEmpty());
1229 
1230     // Leave some registers available so we can test the destructor.
1231     temps.Include(p3, p6, p9, p12);
1232     VIXL_CHECK(!masm.GetScratchPRegisterList()->IsEmpty());
1233   }
1234   // Check that the destructor correctly cleared the list.
1235   VIXL_CHECK(masm.GetScratchPRegisterList()->IsEmpty());
1236 }
1237 
TEST(scratch_scope_include_ignored)1238 TEST(scratch_scope_include_ignored) {
1239   MacroAssembler masm;
1240   {
1241     UseScratchRegisterScope temps(&masm);
1242     // Start with an empty set of scratch registers.
1243     temps.ExcludeAll();
1244 
1245     // Including NoReg has no effect.
1246     temps.Include(NoReg);
1247     temps.Include(NoCPUReg);
1248     temps.Include(CPURegList(CPURegister::kNoRegister, 0, 0));
1249 
1250     // Including sp or zr has no effect, since they are never appropriate
1251     // scratch registers.
1252     temps.Include(sp);
1253     temps.Include(xzr, wsp);
1254     temps.Include(wzr);
1255     temps.Include(CPURegList(xzr, sp));
1256 
1257     VIXL_CHECK(masm.GetScratchRegisterList()->IsEmpty());
1258     VIXL_CHECK(masm.GetScratchVRegisterList()->IsEmpty());
1259   }
1260 }
1261 
1262 class ScratchScopeHelper {
1263  public:
1264   enum Action { kRelease, kInclude, kExclude };
1265 
ScratchScopeHelper(MacroAssembler * masm,Action action,CPURegister::RegisterType type)1266   ScratchScopeHelper(MacroAssembler* masm,
1267                      Action action,
1268                      CPURegister::RegisterType type)
1269       : masm_(masm),
1270         action_(action),
1271         type_(type),
1272         expected_(GetGuardListFor(CPURegister::kRegister)),
1273         expected_v_(GetGuardListFor(CPURegister::kVRegister)),
1274         expected_p_(GetGuardListFor(CPURegister::kPRegister)) {
1275     *GetExpectedFor(type) = GetInitialList();
1276     masm->GetScratchRegisterList()->SetList(expected_);
1277     masm->GetScratchVRegisterList()->SetList(expected_v_);
1278     masm->GetScratchPRegisterList()->SetList(expected_p_);
1279   }
1280 
1281   // Notify the helper that the registers in `update` have been passed into
1282   // DoAction(), and assert that the MacroAssembler's scratch lists are as
1283   // expected.
RecordActionsAndCheck(RegList update)1284   void RecordActionsAndCheck(RegList update) {
1285     RegList* expected = GetExpectedFor(type_);
1286     switch (action_) {
1287       case kRelease:
1288         // It isn't valid to release a register that is already available.
1289         VIXL_CHECK((*expected & update) == 0);
1290         VIXL_FALLTHROUGH();
1291       case kInclude:
1292         *expected |= update;
1293         break;
1294       case kExclude:
1295         *expected &= ~update;
1296         break;
1297     }
1298     VIXL_CHECK(masm_->GetScratchRegisterList()->GetList() == expected_);
1299     VIXL_CHECK(masm_->GetScratchVRegisterList()->GetList() == expected_v_);
1300     VIXL_CHECK(masm_->GetScratchPRegisterList()->GetList() == expected_p_);
1301   }
1302 
1303  private:
GetInitialList()1304   RegList GetInitialList() {
1305     switch (action_) {
1306       case kRelease:
1307       case kInclude:
1308         return 0;
1309       case kExclude:
1310         return GetPotentialListFor(type_);
1311     }
1312     VIXL_UNREACHABLE();
1313     return 0;
1314   }
1315 
1316   // Return some valid, non-zero RegList suitable for use as a guard value.
GetGuardListFor(CPURegister::RegisterType type)1317   static RegList GetGuardListFor(CPURegister::RegisterType type) {
1318     return (0x1111111111111111 * (type + 1)) & GetPotentialListFor(type);
1319   }
1320 
GetPotentialListFor(CPURegister::RegisterType type)1321   static RegList GetPotentialListFor(CPURegister::RegisterType type) {
1322     RegList list = CPURegList::All(type).GetList();
1323     // The zr and sp registers cannot be scratch registers.
1324     if (type == CPURegister::kRegister) list &= ~(xzr.GetBit() | sp.GetBit());
1325     return list;
1326   }
1327 
GetExpectedFor(CPURegister::RegisterType type)1328   RegList* GetExpectedFor(CPURegister::RegisterType type) {
1329     switch (type) {
1330       case CPURegister::kNoRegister:
1331         VIXL_UNREACHABLE();
1332         return NULL;
1333       case CPURegister::kRegister:
1334         return &expected_;
1335       case CPURegister::kVRegister:
1336       case CPURegister::kZRegister:
1337         return &expected_v_;
1338       case CPURegister::kPRegister:
1339         return &expected_p_;
1340     }
1341     VIXL_UNREACHABLE();
1342     return NULL;
1343   }
1344 
1345   MacroAssembler* masm_;
1346   Action action_;
1347   CPURegister::RegisterType type_;
1348 
1349   RegList expected_;
1350   RegList expected_v_;
1351   RegList expected_p_;
1352 };
1353 
TEST(scratch_scope_include)1354 TEST(scratch_scope_include) {
1355   MacroAssembler masm;
1356   {
1357     UseScratchRegisterScope temps(&masm);
1358     ScratchScopeHelper helper(&masm,
1359                               ScratchScopeHelper::kInclude,
1360                               CPURegister::kRegister);
1361 
1362     // Any suitable register type deriving from CPURegister can be included.
1363     temps.Include(w0);
1364     temps.Include(x1);
1365     temps.Include(WRegister(2));
1366     temps.Include(XRegister(3));
1367     temps.Include(Register(w4));
1368     temps.Include(Register(x5));
1369     temps.Include(CPURegister(w6));
1370     temps.Include(CPURegister(x7));
1371     helper.RecordActionsAndCheck(0xff);
1372     // Multiple registers can be included at once.
1373     temps.Include(x8, w9, x10);
1374     temps.Include(Register(w12), Register(x13), Register(w14));
1375     temps.Include(XRegister(16), XRegister(17), XRegister(18));
1376     temps.Include(WRegister(20), WRegister(21), WRegister(22));
1377     temps.Include(CPURegList(w24, w25, w26));
1378     helper.RecordActionsAndCheck(0x7777700);
1379     // Including a register again has no effect.
1380     temps.Include(Register(w26));
1381     temps.Include(Register(x25));
1382     temps.Include(CPURegister(x24));
1383     temps.Include(CPURegister(x22));
1384     temps.Include(x21, x20, w18, x17);
1385     temps.Include(CPURegList(x16, x14, x13, x12));
1386     helper.RecordActionsAndCheck(0x7777700);
1387   }
1388 }
1389 
TEST(scratch_scope_exclude)1390 TEST(scratch_scope_exclude) {
1391   MacroAssembler masm;
1392   {
1393     UseScratchRegisterScope temps(&masm);
1394     ScratchScopeHelper helper(&masm,
1395                               ScratchScopeHelper::kExclude,
1396                               CPURegister::kRegister);
1397 
1398     // Any suitable register type deriving from CPURegister can be excluded.
1399     temps.Exclude(w0);
1400     temps.Exclude(x1);
1401     temps.Exclude(WRegister(2));
1402     temps.Exclude(XRegister(3));
1403     temps.Exclude(Register(w4));
1404     temps.Exclude(Register(x5));
1405     temps.Exclude(CPURegister(w6));
1406     temps.Exclude(CPURegister(x7));
1407     helper.RecordActionsAndCheck(0xff);
1408     // Multiple registers can be excluded at once.
1409     temps.Exclude(x8, w9, x10);
1410     temps.Exclude(Register(w12), Register(x13), Register(w14));
1411     temps.Exclude(XRegister(16), XRegister(17), XRegister(18));
1412     temps.Exclude(WRegister(20), WRegister(21), WRegister(22));
1413     temps.Exclude(CPURegList(w24, w25, w26));
1414     helper.RecordActionsAndCheck(0x7777700);
1415     // Excluding a register again has no effect.
1416     temps.Exclude(Register(w26));
1417     temps.Exclude(Register(x25));
1418     temps.Exclude(CPURegister(x24));
1419     temps.Exclude(CPURegister(x22));
1420     temps.Exclude(x21, x20, w18, x17);
1421     temps.Exclude(CPURegList(x16, x14, x13, x12));
1422     helper.RecordActionsAndCheck(0x7777700);
1423   }
1424 }
1425 
TEST(scratch_scope_release)1426 TEST(scratch_scope_release) {
1427   MacroAssembler masm;
1428   {
1429     UseScratchRegisterScope temps(&masm);
1430     ScratchScopeHelper helper(&masm,
1431                               ScratchScopeHelper::kRelease,
1432                               CPURegister::kRegister);
1433 
1434     // Any suitable register type deriving from CPURegister can be released.
1435     temps.Release(w0);
1436     temps.Release(x1);
1437     temps.Release(WRegister(2));
1438     temps.Release(XRegister(3));
1439     temps.Release(Register(w4));
1440     temps.Release(Register(x5));
1441     temps.Release(CPURegister(w6));
1442     temps.Release(CPURegister(x7));
1443     helper.RecordActionsAndCheck(0xff);
1444     // It is not possible to release more than one register at a time, and it is
1445     // invalid to release a register that is already available.
1446   }
1447 }
1448 
TEST(scratch_scope_include_v)1449 TEST(scratch_scope_include_v) {
1450   MacroAssembler masm;
1451   {
1452     UseScratchRegisterScope temps(&masm);
1453     ScratchScopeHelper helper(&masm,
1454                               ScratchScopeHelper::kInclude,
1455                               CPURegister::kVRegister);
1456 
1457     // Any suitable register type deriving from CPURegister can be included.
1458     temps.Include(b0);
1459     temps.Include(h1);
1460     temps.Include(SRegister(2));
1461     temps.Include(DRegister(3));
1462     temps.Include(VRegister(q4));
1463     temps.Include(VRegister(v5.V8B()));
1464     temps.Include(CPURegister(d6));
1465     temps.Include(CPURegister(v7.S4B()));
1466     helper.RecordActionsAndCheck(0xff);
1467     // Multiple registers can be included at once.
1468     temps.Include(b8, h9, s10);
1469     temps.Include(VRegister(d12), VRegister(d13), VRegister(d14));
1470     temps.Include(QRegister(16), QRegister(17), QRegister(18));
1471     temps.Include(BRegister(20), BRegister(21), BRegister(22));
1472     temps.Include(CPURegList(s24, s25, s26));
1473     helper.RecordActionsAndCheck(0x7777700);
1474     // Including a register again has no effect.
1475     temps.Include(VRegister(b26));
1476     temps.Include(VRegister(h25));
1477     temps.Include(CPURegister(s24));
1478     temps.Include(CPURegister(v22.V4H()));
1479     temps.Include(q21, d20, s18, h17);
1480     temps.Include(CPURegList(h16, h14, h13, h12));
1481     helper.RecordActionsAndCheck(0x7777700);
1482   }
1483 }
1484 
TEST(scratch_scope_exclude_v)1485 TEST(scratch_scope_exclude_v) {
1486   MacroAssembler masm;
1487   {
1488     UseScratchRegisterScope temps(&masm);
1489     ScratchScopeHelper helper(&masm,
1490                               ScratchScopeHelper::kExclude,
1491                               CPURegister::kVRegister);
1492 
1493     // Any suitable register type deriving from CPURegister can be excluded.
1494     temps.Exclude(b0);
1495     temps.Exclude(h1);
1496     temps.Exclude(SRegister(2));
1497     temps.Exclude(DRegister(3));
1498     temps.Exclude(VRegister(q4));
1499     temps.Exclude(VRegister(v5.V8B()));
1500     temps.Exclude(CPURegister(d6));
1501     temps.Exclude(CPURegister(v7.S4B()));
1502     helper.RecordActionsAndCheck(0xff);
1503     // Multiple registers can be excluded at once.
1504     temps.Exclude(b8, h9, s10);
1505     temps.Exclude(VRegister(d12), VRegister(d13), VRegister(d14));
1506     temps.Exclude(QRegister(16), QRegister(17), QRegister(18));
1507     temps.Exclude(BRegister(20), BRegister(21), BRegister(22));
1508     temps.Exclude(CPURegList(s24, s25, s26));
1509     helper.RecordActionsAndCheck(0x7777700);
1510     // Excluding a register again has no effect.
1511     temps.Exclude(VRegister(b26));
1512     temps.Exclude(VRegister(h25));
1513     temps.Exclude(CPURegister(s24));
1514     temps.Exclude(CPURegister(v22.V4H()));
1515     temps.Exclude(q21, d20, s18, h17);
1516     temps.Exclude(CPURegList(h16, h14, h13, h12));
1517     helper.RecordActionsAndCheck(0x7777700);
1518   }
1519 }
1520 
TEST(scratch_scope_release_v)1521 TEST(scratch_scope_release_v) {
1522   MacroAssembler masm;
1523   {
1524     UseScratchRegisterScope temps(&masm);
1525     ScratchScopeHelper helper(&masm,
1526                               ScratchScopeHelper::kRelease,
1527                               CPURegister::kVRegister);
1528 
1529     // Any suitable register type deriving from CPURegister can be released.
1530     temps.Release(b0);
1531     temps.Release(h1);
1532     temps.Release(SRegister(2));
1533     temps.Release(DRegister(3));
1534     temps.Release(VRegister(q4));
1535     temps.Release(VRegister(v5.V8B()));
1536     temps.Release(CPURegister(d6));
1537     temps.Release(CPURegister(v7.S4B()));
1538     helper.RecordActionsAndCheck(0xff);
1539     // It is not possible to release more than one register at a time, and it is
1540     // invalid to release a register that is already available.
1541   }
1542 }
1543 
TEST(scratch_scope_include_z)1544 TEST(scratch_scope_include_z) {
1545   MacroAssembler masm;
1546   {
1547     UseScratchRegisterScope temps(&masm);
1548     ScratchScopeHelper helper(&masm,
1549                               ScratchScopeHelper::kInclude,
1550                               CPURegister::kZRegister);
1551 
1552     // Any suitable register type deriving from CPURegister can be included.
1553     temps.Include(z0);
1554     temps.Include(z1.VnB());
1555     temps.Include(ZRegister(2));
1556     temps.Include(ZRegister(3, kFormatVnD));
1557     temps.Include(CPURegister(z4));
1558     temps.Include(CPURegister(z5.VnH()));
1559     helper.RecordActionsAndCheck(0x3f);
1560     // Multiple registers can be included at once.
1561     temps.Include(z8, z9, z10.VnS());
1562     temps.Include(ZRegister(12), ZRegister(13, kHRegSize), z14);
1563     temps.Include(CPURegList(z16, z17, z18));
1564     helper.RecordActionsAndCheck(0x77700);
1565     // Including a register again has no effect.
1566     temps.Include(ZRegister(18));
1567     temps.Include(ZRegister(17, kFormatVnB));
1568     temps.Include(CPURegister(z16));
1569     temps.Include(CPURegister(z13.VnD()));
1570     temps.Include(z12, z10, z9.VnB(), z8);
1571     temps.Include(CPURegList(z5, z4, z3, z2));
1572     helper.RecordActionsAndCheck(0x77700);
1573   }
1574 }
1575 
TEST(scratch_scope_exclude_z)1576 TEST(scratch_scope_exclude_z) {
1577   MacroAssembler masm;
1578   {
1579     UseScratchRegisterScope temps(&masm);
1580     ScratchScopeHelper helper(&masm,
1581                               ScratchScopeHelper::kExclude,
1582                               CPURegister::kZRegister);
1583 
1584     // Any suitable register type deriving from CPURegister can be excluded.
1585     temps.Exclude(z0);
1586     temps.Exclude(z1.VnB());
1587     temps.Exclude(ZRegister(2));
1588     temps.Exclude(ZRegister(3, kFormatVnD));
1589     temps.Exclude(CPURegister(z4));
1590     temps.Exclude(CPURegister(z5.VnH()));
1591     helper.RecordActionsAndCheck(0x3f);
1592     // Multiple registers can be excluded at once.
1593     temps.Exclude(z8, z9, z10.VnS());
1594     temps.Exclude(ZRegister(12), ZRegister(13, kHRegSize), z14);
1595     temps.Exclude(CPURegList(z16, z17, z18));
1596     helper.RecordActionsAndCheck(0x77700);
1597     // Excluding a register again has no effect.
1598     temps.Exclude(ZRegister(18));
1599     temps.Exclude(ZRegister(17, kFormatVnB));
1600     temps.Exclude(CPURegister(z16));
1601     temps.Exclude(CPURegister(z13.VnD()));
1602     temps.Exclude(z12, z10, z9.VnB(), z8);
1603     temps.Exclude(CPURegList(z5, z4, z3, z2));
1604     helper.RecordActionsAndCheck(0x77700);
1605   }
1606 }
1607 
TEST(scratch_scope_release_z)1608 TEST(scratch_scope_release_z) {
1609   MacroAssembler masm;
1610   {
1611     UseScratchRegisterScope temps(&masm);
1612     ScratchScopeHelper helper(&masm,
1613                               ScratchScopeHelper::kRelease,
1614                               CPURegister::kZRegister);
1615 
1616     // Any suitable register type deriving from CPURegister can be released.
1617     temps.Release(z0);
1618     temps.Release(z1.VnB());
1619     temps.Release(ZRegister(2));
1620     temps.Release(ZRegister(3, kFormatVnD));
1621     temps.Release(CPURegister(z4));
1622     temps.Release(CPURegister(z5.VnH()));
1623     helper.RecordActionsAndCheck(0x3f);
1624     // It is not possible to release more than one register at a time, and it is
1625     // invalid to release a register that is already available.
1626   }
1627 }
1628 
TEST(scratch_scope_include_p)1629 TEST(scratch_scope_include_p) {
1630   MacroAssembler masm;
1631   {
1632     UseScratchRegisterScope temps(&masm);
1633     ScratchScopeHelper helper(&masm,
1634                               ScratchScopeHelper::kInclude,
1635                               CPURegister::kPRegister);
1636 
1637     // Any suitable register type deriving from CPURegister can be included.
1638     temps.Include(p0);
1639     temps.Include(PRegister(1));
1640     temps.Include(PRegisterWithLaneSize(2, kFormatVnD));
1641     temps.Include(PRegisterM(3));
1642     temps.Include(CPURegister(PRegister(4)));
1643     temps.Include(CPURegister(PRegisterZ(5)));
1644     helper.RecordActionsAndCheck(0x3f);
1645     // Multiple registers can be included at once.
1646     temps.Include(p7, p8.Merging(), p9.VnS());
1647     temps.Include(PRegister(11), PRegisterWithLaneSize(12, kHRegSize));
1648     temps.Include(CPURegList(p15));
1649     helper.RecordActionsAndCheck(0x9b80);
1650     // Including a register again has no effect.
1651     temps.Include(PRegister(15));
1652     temps.Include(PRegisterWithLaneSize(12, kFormatVnB));
1653     temps.Include(CPURegister(p11));
1654     temps.Include(CPURegister(p9.VnD()));
1655     temps.Include(p8.Merging(), p7.Zeroing(), p5.VnB(), p4);
1656     temps.Include(CPURegList(p3, p2, p1, p0));
1657     helper.RecordActionsAndCheck(0x9b80);
1658   }
1659 }
1660 
TEST(scratch_scope_exclude_p)1661 TEST(scratch_scope_exclude_p) {
1662   MacroAssembler masm;
1663   {
1664     UseScratchRegisterScope temps(&masm);
1665     ScratchScopeHelper helper(&masm,
1666                               ScratchScopeHelper::kExclude,
1667                               CPURegister::kPRegister);
1668 
1669     // Any suitable register type deriving from CPURegister can be excluded.
1670     temps.Exclude(p0);
1671     temps.Exclude(PRegister(1));
1672     temps.Exclude(PRegisterWithLaneSize(2, kFormatVnD));
1673     temps.Exclude(PRegisterM(3));
1674     temps.Exclude(CPURegister(PRegister(4)));
1675     temps.Exclude(CPURegister(PRegisterZ(5)));
1676     helper.RecordActionsAndCheck(0x3f);
1677     // Multiple registers can be excluded at once.
1678     temps.Exclude(p7, p8.Merging(), p9.VnS());
1679     temps.Exclude(PRegister(11), PRegisterWithLaneSize(12, kHRegSize));
1680     temps.Exclude(CPURegList(p15));
1681     helper.RecordActionsAndCheck(0x9b80);
1682     // Excluding a register again has no effect.
1683     temps.Exclude(PRegister(15));
1684     temps.Exclude(PRegisterWithLaneSize(12, kFormatVnB));
1685     temps.Exclude(CPURegister(p11));
1686     temps.Exclude(CPURegister(p9.VnD()));
1687     temps.Exclude(p8.Merging(), p7.Zeroing(), p5.VnB(), p4);
1688     temps.Exclude(CPURegList(p3, p2, p1, p0));
1689     helper.RecordActionsAndCheck(0x9b80);
1690   }
1691 }
1692 
TEST(scratch_scope_release_p)1693 TEST(scratch_scope_release_p) {
1694   MacroAssembler masm;
1695   {
1696     UseScratchRegisterScope temps(&masm);
1697     ScratchScopeHelper helper(&masm,
1698                               ScratchScopeHelper::kRelease,
1699                               CPURegister::kPRegister);
1700 
1701     // Any suitable register type deriving from CPURegister can be excluded.
1702     temps.Release(p0);
1703     temps.Release(PRegister(1));
1704     temps.Release(PRegisterWithLaneSize(2, kFormatVnD));
1705     temps.Release(PRegisterM(3));
1706     temps.Release(CPURegister(PRegister(4)));
1707     temps.Release(CPURegister(PRegisterZ(5)));
1708     helper.RecordActionsAndCheck(0x3f);
1709     // It is not possible to release more than one register at a time, and it is
1710     // invalid to release a register that is already available.
1711   }
1712 }
1713 
1714 #ifdef VIXL_INCLUDE_SIMULATOR_AARCH64
TEST(sim_stack_default)1715 TEST(sim_stack_default) {
1716   SimStack::Allocated s = SimStack().Allocate();
1717 
1718   // The default stack is at least 16-byte aligned.
1719   VIXL_CHECK(IsAligned<16>(s.GetBase()));
1720   VIXL_CHECK(IsAligned<16>(s.GetLimit() + 1));
1721 
1722   VIXL_CHECK(s.GetBase() > s.GetLimit());
1723 
1724   // The default guard regions are sufficient to detect at least off-by-one
1725   // errors.
1726   VIXL_CHECK(s.IsAccessInGuardRegion(s.GetBase(), 1));
1727   VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetBase() - 1, 1));
1728   // The limit is one below the lowest address on the stack.
1729   VIXL_CHECK(s.IsAccessInGuardRegion(s.GetLimit(), 1));
1730   VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetLimit() + 1, 1));
1731 
1732   // We need to be able to access 16-byte granules at both extremes.
1733   VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetBase() - 16, 16));
1734   VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetLimit() + 1, 16));
1735 }
1736 
TEST(sim_stack)1737 TEST(sim_stack) {
1738   SimStack builder;
1739   builder.AlignToBytesLog2(WhichPowerOf2(1024));
1740   builder.SetBaseGuardSize(42);
1741   builder.SetLimitGuardSize(2049);
1742   builder.SetUsableSize(2048);
1743   SimStack::Allocated s = builder.Allocate();
1744 
1745   VIXL_CHECK(IsAligned<1024>(s.GetBase()));
1746   VIXL_CHECK(IsAligned<1024>(s.GetLimit() + 1));
1747 
1748   // The stack is accessible for (limit, base), both exclusive.
1749   // This is checked precisely, using the base and limit modified to respect
1750   // alignment, so we can test the exact boundary condition.
1751   VIXL_CHECK(s.IsAccessInGuardRegion(s.GetBase(), 1));
1752   VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetBase() - 1, 1));
1753   VIXL_CHECK(s.IsAccessInGuardRegion(s.GetLimit(), 1));
1754   VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetLimit() + 1, 1));
1755   VIXL_CHECK((s.GetBase() - s.GetLimit() - 1) == 2048);
1756 
1757   // We can access the whole range (limit, base), both exclusive.
1758   VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetLimit() + 1, 2048));
1759   // Off-by-one.
1760   VIXL_CHECK(s.IsAccessInGuardRegion(s.GetLimit(), 2048));
1761   VIXL_CHECK(s.IsAccessInGuardRegion(s.GetLimit() + 1, 2049));
1762   // Accesses spanning whole guard regions.
1763   VIXL_CHECK(s.IsAccessInGuardRegion(s.GetBase() - 42, 4096));
1764   VIXL_CHECK(s.IsAccessInGuardRegion(s.GetLimit() - 1280, 2048));
1765   VIXL_CHECK(s.IsAccessInGuardRegion(s.GetLimit() - 1280, 10000));
1766 }
1767 
AllocateAndFreeGCS()1768 void AllocateAndFreeGCS() {
1769   Decoder d;
1770   Simulator s(&d);
1771 
1772   for (int i = 0; i < 100000; i++) {
1773     uint64_t gcs = s.GetGCSManager().AllocateStack();
1774     s.GetGCSManager().FreeStack(gcs);
1775   }
1776 }
1777 
TEST(sim_gcs_manager)1778 TEST(sim_gcs_manager) {
1779   std::thread t1(AllocateAndFreeGCS);
1780   std::thread t2(AllocateAndFreeGCS);
1781 
1782   t1.join();
1783   t2.join();
1784 }
1785 #endif
1786 
1787 }  // namespace aarch64
1788 }  // namespace vixl
1789