• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017, VIXL authors
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 //   * Redistributions of source code must retain the above copyright notice,
8 //     this list of conditions and the following disclaimer.
9 //   * Redistributions in binary form must reproduce the above copyright notice,
10 //     this list of conditions and the following disclaimer in the documentation
11 //     and/or other materials provided with the distribution.
12 //   * Neither the name of ARM Limited nor the names of its contributors may be
13 //     used to endorse or promote products derived from this software without
14 //     specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 
27 #include <cstdio>
28 #include <cstring>
29 #include <string>
30 
31 #include "test-runner.h"
32 #include "test-utils.h"
33 #include "aarch64/test-utils-aarch64.h"
34 
35 #include "aarch64/macro-assembler-aarch64.h"
36 
37 #define __ masm.
38 #define TEST(name) TEST_(AARCH64_API_##name)
39 
40 
41 namespace vixl {
42 namespace aarch64 {
43 
44 // Check SimFloat16 class mechanics.
TEST(float16_operators)45 TEST(float16_operators) {
46   ::vixl::internal::SimFloat16 f1 = kFP16DefaultNaN;
47   ::vixl::internal::SimFloat16 f2 = kFP16DefaultNaN;
48   ::vixl::internal::SimFloat16 f3 = kFP16PositiveInfinity;
49   ::vixl::internal::SimFloat16 f4 = kFP16NegativeInfinity;
50   VIXL_CHECK(!(f1 == f2));
51   VIXL_CHECK(f1 != f2);
52   VIXL_CHECK(!(f3 == f4));
53   VIXL_CHECK(f3 != f4);
54   VIXL_CHECK(::vixl::internal::SimFloat16(kFP16PositiveZero) ==
55              ::vixl::internal::SimFloat16(kFP16NegativeZero));
56   VIXL_CHECK(!(::vixl::internal::SimFloat16(kFP16PositiveZero) !=
57                ::vixl::internal::SimFloat16(kFP16NegativeZero)));
58 }
59 
60 // Check moved FP constants are still accessible via the AArch64 namespace.
TEST(float_constants_scope)61 TEST(float_constants_scope) {
62   VIXL_CHECK(vixl::aarch64::kFP64PositiveInfinity ==
63              vixl::kFP64PositiveInfinity);
64   VIXL_CHECK(vixl::aarch64::kFP64NegativeInfinity ==
65              vixl::kFP64NegativeInfinity);
66   VIXL_CHECK(vixl::aarch64::kFP32PositiveInfinity ==
67              vixl::kFP32PositiveInfinity);
68   VIXL_CHECK(vixl::aarch64::kFP32NegativeInfinity ==
69              vixl::kFP32NegativeInfinity);
70   VIXL_CHECK(Float16ToRawbits(vixl::aarch64::kFP16PositiveInfinity) ==
71              Float16ToRawbits(vixl::aarch64::kFP16PositiveInfinity));
72   VIXL_CHECK(Float16ToRawbits(vixl::aarch64::kFP16NegativeInfinity) ==
73              Float16ToRawbits(vixl::aarch64::kFP16NegativeInfinity));
74   VIXL_CHECK(DoubleToRawbits(vixl::aarch64::kFP64DefaultNaN) ==
75              DoubleToRawbits(vixl::kFP64DefaultNaN));
76   VIXL_CHECK(FloatToRawbits(vixl::aarch64::kFP32DefaultNaN) ==
77              FloatToRawbits(vixl::kFP32DefaultNaN));
78   VIXL_CHECK(IsNaN(vixl::aarch64::kFP16DefaultNaN) ==
79              IsNaN(vixl::kFP16DefaultNaN));
80   VIXL_CHECK(vixl::aarch64::kDoubleExponentBits == vixl::kDoubleExponentBits);
81   VIXL_CHECK(vixl::aarch64::kDoubleMantissaBits == vixl::kDoubleMantissaBits);
82   VIXL_CHECK(vixl::aarch64::kFloatExponentBits == vixl::kFloatExponentBits);
83   VIXL_CHECK(vixl::aarch64::kFloatMantissaBits == vixl::kFloatMantissaBits);
84   VIXL_CHECK(vixl::aarch64::kFloat16ExponentBits == vixl::kFloat16ExponentBits);
85   VIXL_CHECK(vixl::aarch64::kFloat16MantissaBits == vixl::kFloat16MantissaBits);
86 }
87 
88 
TEST(register_bit)89 TEST(register_bit) {
90   VIXL_CHECK(x0.GetBit() == (UINT64_C(1) << 0));
91   VIXL_CHECK(x1.GetBit() == (UINT64_C(1) << 1));
92   VIXL_CHECK(x10.GetBit() == (UINT64_C(1) << 10));
93 
94   // AAPCS64 definitions.
95   VIXL_CHECK(lr.GetBit() == (UINT64_C(1) << kLinkRegCode));
96 
97   // Fixed (hardware) definitions.
98   VIXL_CHECK(xzr.GetBit() == (UINT64_C(1) << kZeroRegCode));
99 
100   // Internal ABI definitions.
101   VIXL_CHECK(sp.GetBit() == (UINT64_C(1) << kSPRegInternalCode));
102   VIXL_CHECK(sp.GetBit() != xzr.GetBit());
103 
104   // xn.GetBit() == wn.GetBit() at all times, for the same n.
105   VIXL_CHECK(x0.GetBit() == w0.GetBit());
106   VIXL_CHECK(x1.GetBit() == w1.GetBit());
107   VIXL_CHECK(x10.GetBit() == w10.GetBit());
108   VIXL_CHECK(xzr.GetBit() == wzr.GetBit());
109   VIXL_CHECK(sp.GetBit() == wsp.GetBit());
110 }
111 
112 
TEST(noreg)113 TEST(noreg) {
114   VIXL_CHECK(NoReg.Is(NoFPReg));
115   VIXL_CHECK(NoFPReg.Is(NoReg));
116 
117   VIXL_CHECK(NoVReg.Is(NoReg));
118   VIXL_CHECK(NoReg.Is(NoVReg));
119 
120   VIXL_CHECK(NoReg.Is(NoCPUReg));
121   VIXL_CHECK(NoCPUReg.Is(NoReg));
122 
123   VIXL_CHECK(NoFPReg.Is(NoCPUReg));
124   VIXL_CHECK(NoCPUReg.Is(NoFPReg));
125 
126   VIXL_CHECK(NoVReg.Is(NoCPUReg));
127   VIXL_CHECK(NoCPUReg.Is(NoVReg));
128 
129   VIXL_CHECK(NoReg.IsNone());
130   VIXL_CHECK(NoFPReg.IsNone());
131   VIXL_CHECK(NoVReg.IsNone());
132   VIXL_CHECK(NoCPUReg.IsNone());
133 }
134 
135 
TEST(isvalid)136 TEST(isvalid) {
137   VIXL_CHECK(!NoReg.IsValid());
138   VIXL_CHECK(!NoFPReg.IsValid());
139   VIXL_CHECK(!NoVReg.IsValid());
140   VIXL_CHECK(!NoCPUReg.IsValid());
141 
142   VIXL_CHECK(x0.IsValid());
143   VIXL_CHECK(w0.IsValid());
144   VIXL_CHECK(x30.IsValid());
145   VIXL_CHECK(w30.IsValid());
146   VIXL_CHECK(xzr.IsValid());
147   VIXL_CHECK(wzr.IsValid());
148 
149   VIXL_CHECK(sp.IsValid());
150   VIXL_CHECK(wsp.IsValid());
151 
152   VIXL_CHECK(d0.IsValid());
153   VIXL_CHECK(s0.IsValid());
154   VIXL_CHECK(d31.IsValid());
155   VIXL_CHECK(s31.IsValid());
156 
157   VIXL_CHECK(x0.IsValidRegister());
158   VIXL_CHECK(w0.IsValidRegister());
159   VIXL_CHECK(xzr.IsValidRegister());
160   VIXL_CHECK(wzr.IsValidRegister());
161   VIXL_CHECK(sp.IsValidRegister());
162   VIXL_CHECK(wsp.IsValidRegister());
163   VIXL_CHECK(!x0.IsValidFPRegister());
164   VIXL_CHECK(!w0.IsValidFPRegister());
165   VIXL_CHECK(!xzr.IsValidFPRegister());
166   VIXL_CHECK(!wzr.IsValidFPRegister());
167   VIXL_CHECK(!sp.IsValidFPRegister());
168   VIXL_CHECK(!wsp.IsValidFPRegister());
169 
170   VIXL_CHECK(d0.IsValidFPRegister());
171   VIXL_CHECK(s0.IsValidFPRegister());
172   VIXL_CHECK(!d0.IsValidRegister());
173   VIXL_CHECK(!s0.IsValidRegister());
174 
175   // Test the same as before, but using CPURegister types. This shouldn't make
176   // any difference.
177   VIXL_CHECK(static_cast<CPURegister>(x0).IsValid());
178   VIXL_CHECK(static_cast<CPURegister>(w0).IsValid());
179   VIXL_CHECK(static_cast<CPURegister>(x30).IsValid());
180   VIXL_CHECK(static_cast<CPURegister>(w30).IsValid());
181   VIXL_CHECK(static_cast<CPURegister>(xzr).IsValid());
182   VIXL_CHECK(static_cast<CPURegister>(wzr).IsValid());
183 
184   VIXL_CHECK(static_cast<CPURegister>(sp).IsValid());
185   VIXL_CHECK(static_cast<CPURegister>(wsp).IsValid());
186 
187   VIXL_CHECK(static_cast<CPURegister>(d0).IsValid());
188   VIXL_CHECK(static_cast<CPURegister>(s0).IsValid());
189   VIXL_CHECK(static_cast<CPURegister>(d31).IsValid());
190   VIXL_CHECK(static_cast<CPURegister>(s31).IsValid());
191 
192   VIXL_CHECK(static_cast<CPURegister>(x0).IsValidRegister());
193   VIXL_CHECK(static_cast<CPURegister>(w0).IsValidRegister());
194   VIXL_CHECK(static_cast<CPURegister>(xzr).IsValidRegister());
195   VIXL_CHECK(static_cast<CPURegister>(wzr).IsValidRegister());
196   VIXL_CHECK(static_cast<CPURegister>(sp).IsValidRegister());
197   VIXL_CHECK(static_cast<CPURegister>(wsp).IsValidRegister());
198   VIXL_CHECK(!static_cast<CPURegister>(x0).IsValidFPRegister());
199   VIXL_CHECK(!static_cast<CPURegister>(w0).IsValidFPRegister());
200   VIXL_CHECK(!static_cast<CPURegister>(xzr).IsValidFPRegister());
201   VIXL_CHECK(!static_cast<CPURegister>(wzr).IsValidFPRegister());
202   VIXL_CHECK(!static_cast<CPURegister>(sp).IsValidFPRegister());
203   VIXL_CHECK(!static_cast<CPURegister>(wsp).IsValidFPRegister());
204 
205   VIXL_CHECK(static_cast<CPURegister>(d0).IsValidFPRegister());
206   VIXL_CHECK(static_cast<CPURegister>(s0).IsValidFPRegister());
207   VIXL_CHECK(!static_cast<CPURegister>(d0).IsValidRegister());
208   VIXL_CHECK(!static_cast<CPURegister>(s0).IsValidRegister());
209 }
210 
211 
TEST(areconsecutive)212 TEST(areconsecutive) {
213   VIXL_CHECK(AreConsecutive(b0, NoVReg));
214   VIXL_CHECK(AreConsecutive(b1, b2));
215   VIXL_CHECK(AreConsecutive(b3, b4, b5));
216   VIXL_CHECK(AreConsecutive(b6, b7, b8, b9));
217   VIXL_CHECK(AreConsecutive(h10, NoVReg));
218   VIXL_CHECK(AreConsecutive(h11, h12));
219   VIXL_CHECK(AreConsecutive(h13, h14, h15));
220   VIXL_CHECK(AreConsecutive(h16, h17, h18, h19));
221   VIXL_CHECK(AreConsecutive(s20, NoVReg));
222   VIXL_CHECK(AreConsecutive(s21, s22));
223   VIXL_CHECK(AreConsecutive(s23, s24, s25));
224   VIXL_CHECK(AreConsecutive(s26, s27, s28, s29));
225   VIXL_CHECK(AreConsecutive(d30, NoVReg));
226   VIXL_CHECK(AreConsecutive(d31, d0));
227   VIXL_CHECK(AreConsecutive(d1, d2, d3));
228   VIXL_CHECK(AreConsecutive(d4, d5, d6, d7));
229   VIXL_CHECK(AreConsecutive(q8, NoVReg));
230   VIXL_CHECK(AreConsecutive(q9, q10));
231   VIXL_CHECK(AreConsecutive(q11, q12, q13));
232   VIXL_CHECK(AreConsecutive(q14, q15, q16, q17));
233   VIXL_CHECK(AreConsecutive(v18, NoVReg));
234   VIXL_CHECK(AreConsecutive(v19, v20));
235   VIXL_CHECK(AreConsecutive(v21, v22, v23));
236   VIXL_CHECK(AreConsecutive(v24, v25, v26, v27));
237   VIXL_CHECK(AreConsecutive(b29, h30));
238   VIXL_CHECK(AreConsecutive(s31, d0, q1));
239   VIXL_CHECK(AreConsecutive(v2, b3, h4, s5));
240 
241   VIXL_CHECK(!AreConsecutive(b0, b2));
242   VIXL_CHECK(!AreConsecutive(h1, h0));
243   VIXL_CHECK(!AreConsecutive(s31, s1));
244   VIXL_CHECK(!AreConsecutive(d12, d12));
245   VIXL_CHECK(!AreConsecutive(q31, q1));
246 
247   VIXL_CHECK(!AreConsecutive(b0, b1, b3));
248   VIXL_CHECK(!AreConsecutive(h4, h5, h6, h6));
249   VIXL_CHECK(!AreConsecutive(d11, d13, NoVReg, d14));
250   VIXL_CHECK(!AreConsecutive(d15, d16, d18, NoVReg));
251   VIXL_CHECK(!AreConsecutive(b26, b28, NoVReg, b29));
252   VIXL_CHECK(!AreConsecutive(s28, s30, NoVReg, NoVReg));
253 
254   VIXL_CHECK(AreConsecutive(q19, NoVReg, NoVReg, q22));
255   VIXL_CHECK(AreConsecutive(v23, NoVReg, v25, NoVReg));
256   VIXL_CHECK(AreConsecutive(b26, b27, NoVReg, NoVReg));
257   VIXL_CHECK(AreConsecutive(h28, NoVReg, NoVReg, NoVReg));
258   VIXL_CHECK(AreConsecutive(s30, s31, NoVReg, s2));
259   VIXL_CHECK(AreConsecutive(d3, NoVReg, d6, d7));
260 }
261 
262 
TEST(move_immediate_helpers)263 TEST(move_immediate_helpers) {
264   // Using these helpers to query information (without generating code) should
265   // not crash.
266   MacroAssembler::MoveImmediateHelper(NULL, x0, 0x12345678);
267   MacroAssembler::OneInstrMoveImmediateHelper(NULL, x1, 0xabcdef);
268 }
269 
270 
TEST(generic_operand_helpers)271 TEST(generic_operand_helpers) {
272   GenericOperand invalid_1;
273   GenericOperand invalid_2;
274   GenericOperand reg(x3);
275   GenericOperand mem(MemOperand(sp, 8), kXRegSizeInBytes);
276 
277   VIXL_CHECK(!invalid_1.IsValid());
278   VIXL_CHECK(!invalid_2.IsValid());
279 
280   VIXL_CHECK(invalid_1.Equals(invalid_1));
281   VIXL_CHECK(invalid_2.Equals(invalid_2));
282   VIXL_CHECK(reg.Equals(reg));
283   VIXL_CHECK(mem.Equals(mem));
284 
285   VIXL_CHECK(invalid_1.Equals(invalid_2));
286   VIXL_CHECK(invalid_2.Equals(invalid_1));
287 
288   VIXL_CHECK(!invalid_1.Equals(reg));
289   VIXL_CHECK(!invalid_1.Equals(mem));
290   VIXL_CHECK(!reg.Equals(invalid_1));
291   VIXL_CHECK(!reg.Equals(invalid_2));
292   VIXL_CHECK(!reg.Equals(mem));
293   VIXL_CHECK(!mem.Equals(invalid_1));
294   VIXL_CHECK(!mem.Equals(reg));
295 }
296 
297 
TEST(static_register_types)298 TEST(static_register_types) {
299   // [WX]Register implicitly casts to Register.
300   XRegister x_x0(0);
301   WRegister w_w0(0);
302   Register r_x0 = x_x0;
303   Register r_w0 = w_w0;
304   VIXL_CHECK(r_x0.Is(x_x0));
305   VIXL_CHECK(x_x0.Is(r_x0));
306   VIXL_CHECK(r_w0.Is(w_w0));
307   VIXL_CHECK(w_w0.Is(r_w0));
308 
309   // Register explicitly casts to [WX]Register.
310   Register r_x1(1, kXRegSize);
311   Register r_w1(1, kWRegSize);
312   XRegister x_x1(r_x1);
313   WRegister w_w1(r_w1);
314   VIXL_CHECK(r_x1.Is(x_x1));
315   VIXL_CHECK(x_x1.Is(r_x1));
316   VIXL_CHECK(r_w1.Is(w_w1));
317   VIXL_CHECK(w_w1.Is(r_w1));
318 
319   // [WX]Register implicitly casts to CPURegister.
320   XRegister x_x2(2);
321   WRegister w_w2(2);
322   CPURegister cpu_x2 = x_x2;
323   CPURegister cpu_w2 = w_w2;
324   VIXL_CHECK(cpu_x2.Is(x_x2));
325   VIXL_CHECK(x_x2.Is(cpu_x2));
326   VIXL_CHECK(cpu_w2.Is(w_w2));
327   VIXL_CHECK(w_w2.Is(cpu_w2));
328 }
329 
330 
TEST(is_plain_register)331 TEST(is_plain_register) {
332   VIXL_CHECK(Operand(x0).IsPlainRegister());
333   VIXL_CHECK(Operand(x1, LSL, 0).IsPlainRegister());
334   VIXL_CHECK(Operand(x2, LSR, 0).IsPlainRegister());
335   VIXL_CHECK(Operand(x3, ASR, 0).IsPlainRegister());
336   VIXL_CHECK(Operand(x4, ROR, 0).IsPlainRegister());
337   VIXL_CHECK(Operand(x5, UXTX).IsPlainRegister());
338   VIXL_CHECK(Operand(x6, SXTX).IsPlainRegister());
339   VIXL_CHECK(Operand(w7).IsPlainRegister());
340   VIXL_CHECK(Operand(w8, LSL, 0).IsPlainRegister());
341   VIXL_CHECK(Operand(w9, LSR, 0).IsPlainRegister());
342   VIXL_CHECK(Operand(w10, ASR, 0).IsPlainRegister());
343   VIXL_CHECK(Operand(w11, ROR, 0).IsPlainRegister());
344 
345   VIXL_CHECK(!Operand(x0, LSL, 1).IsPlainRegister());
346   VIXL_CHECK(!Operand(x1, LSR, 2).IsPlainRegister());
347   VIXL_CHECK(!Operand(x2, ASR, 3).IsPlainRegister());
348   VIXL_CHECK(!Operand(x3, ROR, 4).IsPlainRegister());
349   VIXL_CHECK(!Operand(x5, UXTX, 1).IsPlainRegister());
350   VIXL_CHECK(!Operand(x6, SXTX, 2).IsPlainRegister());
351   VIXL_CHECK(!Operand(w7, LSL, 1).IsPlainRegister());
352   VIXL_CHECK(!Operand(w8, LSR, 2).IsPlainRegister());
353   VIXL_CHECK(!Operand(w9, ASR, 3).IsPlainRegister());
354   VIXL_CHECK(!Operand(w10, ROR, 4).IsPlainRegister());
355   VIXL_CHECK(!Operand(w11, UXTB).IsPlainRegister());
356   VIXL_CHECK(!Operand(w12, SXTB).IsPlainRegister());
357   VIXL_CHECK(!Operand(w13, UXTH).IsPlainRegister());
358   VIXL_CHECK(!Operand(w14, SXTH).IsPlainRegister());
359   // UXTW and SXTW could be treated as plain registers in 32-bit contexts, but
360   // the Operand class doesn't know the context so it has to return false.
361   VIXL_CHECK(!Operand(w15, UXTW).IsPlainRegister());
362   VIXL_CHECK(!Operand(w16, SXTW).IsPlainRegister());
363 }
364 
365 
366 }  // namespace aarch64
367 }  // namespace vixl
368