1 // Copyright 2017, VIXL authors
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 // * Redistributions of source code must retain the above copyright notice,
8 // this list of conditions and the following disclaimer.
9 // * Redistributions in binary form must reproduce the above copyright notice,
10 // this list of conditions and the following disclaimer in the documentation
11 // and/or other materials provided with the distribution.
12 // * Neither the name of ARM Limited nor the names of its contributors may be
13 // used to endorse or promote products derived from this software without
14 // specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27 #include <cstdio>
28 #include <cstring>
29 #include <string>
30
31 #include "test-runner.h"
32 #include "test-utils.h"
33 #include "aarch64/test-utils-aarch64.h"
34
35 #include "aarch64/macro-assembler-aarch64.h"
36
37 #define __ masm.
38 #define TEST(name) TEST_(AARCH64_API_##name)
39
40
41 namespace vixl {
42 namespace aarch64 {
43
44 // Check compiler intrinsics helpers.
45
TEST(count_leading_sign_bits)46 TEST(count_leading_sign_bits) {
47 class Helper {
48 public:
49 static void Check(int64_t value, int non_sign_bits) {
50 VIXL_ASSERT((0 <= non_sign_bits) && (non_sign_bits < 64));
51
52 for (int width = 1; width <= 64; width *= 2) {
53 // Note that leading_sign_bits does not include the topmost bit.
54 int leading_sign_bits = width - non_sign_bits - 1;
55 if (leading_sign_bits < 0) continue;
56
57 int64_t result = CountLeadingSignBits(value, width);
58 int64_t fallback_result = CountLeadingSignBitsFallBack(value, width);
59 VIXL_CHECK(result == leading_sign_bits);
60 VIXL_CHECK(fallback_result == leading_sign_bits);
61 }
62 }
63 };
64
65 // Basic positive (and zero) cases. Sign bits are all zeroes.
66 Helper::Check(0, 0); // 0b++++
67 Helper::Check(1, 1); // 0b+++1
68 Helper::Check(2, 2); // 0b++10
69 Helper::Check(3, 2); // 0b++11
70 Helper::Check(4, 3); // 0b+100
71
72 // Basic negative cases. Sign bits are all ones.
73 Helper::Check(-1, 0); // 0b----
74 Helper::Check(-2, 1); // 0b---0
75 Helper::Check(-3, 2); // 0b--01
76 Helper::Check(-4, 2); // 0b--00
77 Helper::Check(-5, 3); // 0b-011
78
79 // Boundary conditions.
80 Helper::Check(INT8_MAX, 7);
81 Helper::Check(INT8_MIN, 7);
82 Helper::Check(static_cast<int64_t>(INT8_MAX) + 1, 8);
83 Helper::Check(static_cast<int64_t>(INT8_MIN) - 1, 8);
84
85 Helper::Check(INT16_MAX, 15);
86 Helper::Check(INT16_MIN, 15);
87 Helper::Check(static_cast<int64_t>(INT16_MAX) + 1, 16);
88 Helper::Check(static_cast<int64_t>(INT16_MIN) - 1, 16);
89
90 Helper::Check(INT32_MAX, 31);
91 Helper::Check(INT32_MIN, 31);
92 Helper::Check(static_cast<int64_t>(INT32_MAX) + 1, 32);
93 Helper::Check(static_cast<int64_t>(INT32_MIN) - 1, 32);
94
95 Helper::Check(INT64_MAX, 63);
96 Helper::Check(INT64_MIN, 63);
97
98 // Check automatic width detection.
99 VIXL_CHECK(CountLeadingSignBits(static_cast<int8_t>(42)) == 1); // 0b00101010
100 VIXL_CHECK(CountLeadingSignBits(static_cast<int16_t>(42)) == 9);
101 VIXL_CHECK(CountLeadingSignBits(static_cast<int32_t>(42)) == 25);
102 VIXL_CHECK(CountLeadingSignBits(static_cast<int64_t>(42)) == 57);
103 }
104
105 // Check SimFloat16 class mechanics.
TEST(float16_operators)106 TEST(float16_operators) {
107 ::vixl::internal::SimFloat16 f1 = kFP16DefaultNaN;
108 ::vixl::internal::SimFloat16 f2 = kFP16DefaultNaN;
109 ::vixl::internal::SimFloat16 f3 = kFP16PositiveInfinity;
110 ::vixl::internal::SimFloat16 f4 = kFP16NegativeInfinity;
111 VIXL_CHECK(!(f1 == f2));
112 VIXL_CHECK(f1 != f2);
113 VIXL_CHECK(!(f3 == f4));
114 VIXL_CHECK(f3 != f4);
115 VIXL_CHECK(::vixl::internal::SimFloat16(kFP16PositiveZero) ==
116 ::vixl::internal::SimFloat16(kFP16NegativeZero));
117 VIXL_CHECK(!(::vixl::internal::SimFloat16(kFP16PositiveZero) !=
118 ::vixl::internal::SimFloat16(kFP16NegativeZero)));
119 }
120
121 // Check moved FP constants are still accessible via the AArch64 namespace.
TEST(float_constants_scope)122 TEST(float_constants_scope) {
123 VIXL_CHECK(vixl::aarch64::kFP64PositiveInfinity ==
124 vixl::kFP64PositiveInfinity);
125 VIXL_CHECK(vixl::aarch64::kFP64NegativeInfinity ==
126 vixl::kFP64NegativeInfinity);
127 VIXL_CHECK(vixl::aarch64::kFP32PositiveInfinity ==
128 vixl::kFP32PositiveInfinity);
129 VIXL_CHECK(vixl::aarch64::kFP32NegativeInfinity ==
130 vixl::kFP32NegativeInfinity);
131 VIXL_CHECK(Float16ToRawbits(vixl::aarch64::kFP16PositiveInfinity) ==
132 Float16ToRawbits(vixl::aarch64::kFP16PositiveInfinity));
133 VIXL_CHECK(Float16ToRawbits(vixl::aarch64::kFP16NegativeInfinity) ==
134 Float16ToRawbits(vixl::aarch64::kFP16NegativeInfinity));
135 VIXL_CHECK(DoubleToRawbits(vixl::aarch64::kFP64DefaultNaN) ==
136 DoubleToRawbits(vixl::kFP64DefaultNaN));
137 VIXL_CHECK(FloatToRawbits(vixl::aarch64::kFP32DefaultNaN) ==
138 FloatToRawbits(vixl::kFP32DefaultNaN));
139 VIXL_CHECK(IsNaN(vixl::aarch64::kFP16DefaultNaN) ==
140 IsNaN(vixl::kFP16DefaultNaN));
141 VIXL_CHECK(vixl::aarch64::kDoubleExponentBits == vixl::kDoubleExponentBits);
142 VIXL_CHECK(vixl::aarch64::kDoubleMantissaBits == vixl::kDoubleMantissaBits);
143 VIXL_CHECK(vixl::aarch64::kFloatExponentBits == vixl::kFloatExponentBits);
144 VIXL_CHECK(vixl::aarch64::kFloatMantissaBits == vixl::kFloatMantissaBits);
145 VIXL_CHECK(vixl::aarch64::kFloat16ExponentBits == vixl::kFloat16ExponentBits);
146 VIXL_CHECK(vixl::aarch64::kFloat16MantissaBits == vixl::kFloat16MantissaBits);
147 }
148
149
TEST(register_bit)150 TEST(register_bit) {
151 VIXL_CHECK(x0.GetBit() == (UINT64_C(1) << 0));
152 VIXL_CHECK(x1.GetBit() == (UINT64_C(1) << 1));
153 VIXL_CHECK(x10.GetBit() == (UINT64_C(1) << 10));
154
155 // AAPCS64 definitions.
156 VIXL_CHECK(lr.GetBit() == (UINT64_C(1) << kLinkRegCode));
157
158 // Fixed (hardware) definitions.
159 VIXL_CHECK(xzr.GetBit() == (UINT64_C(1) << kZeroRegCode));
160
161 // Internal ABI definitions.
162 VIXL_CHECK(sp.GetBit() == (UINT64_C(1) << kSPRegInternalCode));
163 VIXL_CHECK(sp.GetBit() != xzr.GetBit());
164
165 // xn.GetBit() == wn.GetBit() at all times, for the same n.
166 VIXL_CHECK(x0.GetBit() == w0.GetBit());
167 VIXL_CHECK(x1.GetBit() == w1.GetBit());
168 VIXL_CHECK(x10.GetBit() == w10.GetBit());
169 VIXL_CHECK(xzr.GetBit() == wzr.GetBit());
170 VIXL_CHECK(sp.GetBit() == wsp.GetBit());
171 }
172
173
TEST(noreg)174 TEST(noreg) {
175 VIXL_CHECK(NoReg.Is(NoVReg));
176 VIXL_CHECK(NoVReg.Is(NoReg));
177
178 VIXL_CHECK(NoVReg.Is(NoReg));
179 VIXL_CHECK(NoReg.Is(NoVReg));
180
181 VIXL_CHECK(NoReg.Is(NoCPUReg));
182 VIXL_CHECK(NoCPUReg.Is(NoReg));
183
184 VIXL_CHECK(NoVReg.Is(NoCPUReg));
185 VIXL_CHECK(NoCPUReg.Is(NoVReg));
186
187 VIXL_CHECK(NoVReg.Is(NoCPUReg));
188 VIXL_CHECK(NoCPUReg.Is(NoVReg));
189
190 VIXL_CHECK(NoReg.IsNone());
191 VIXL_CHECK(NoVReg.IsNone());
192 VIXL_CHECK(NoCPUReg.IsNone());
193 }
194
195
TEST(isvalid)196 TEST(isvalid) {
197 VIXL_CHECK(!NoReg.IsValid());
198 VIXL_CHECK(!NoVReg.IsValid());
199 VIXL_CHECK(!NoCPUReg.IsValid());
200
201 VIXL_CHECK(x0.IsValid());
202 VIXL_CHECK(w0.IsValid());
203 VIXL_CHECK(x30.IsValid());
204 VIXL_CHECK(w30.IsValid());
205 VIXL_CHECK(xzr.IsValid());
206 VIXL_CHECK(wzr.IsValid());
207
208 VIXL_CHECK(sp.IsValid());
209 VIXL_CHECK(wsp.IsValid());
210
211 VIXL_CHECK(d0.IsValid());
212 VIXL_CHECK(s0.IsValid());
213 VIXL_CHECK(d31.IsValid());
214 VIXL_CHECK(s31.IsValid());
215
216 VIXL_CHECK(x0.IsValidRegister());
217 VIXL_CHECK(w0.IsValidRegister());
218 VIXL_CHECK(xzr.IsValidRegister());
219 VIXL_CHECK(wzr.IsValidRegister());
220 VIXL_CHECK(sp.IsValidRegister());
221 VIXL_CHECK(wsp.IsValidRegister());
222 VIXL_CHECK(!x0.IsValidVRegister());
223 VIXL_CHECK(!w0.IsValidVRegister());
224 VIXL_CHECK(!xzr.IsValidVRegister());
225 VIXL_CHECK(!wzr.IsValidVRegister());
226 VIXL_CHECK(!sp.IsValidVRegister());
227 VIXL_CHECK(!wsp.IsValidVRegister());
228 VIXL_CHECK(!x0.IsValidFPRegister());
229 VIXL_CHECK(!w0.IsValidFPRegister());
230 VIXL_CHECK(!xzr.IsValidFPRegister());
231 VIXL_CHECK(!wzr.IsValidFPRegister());
232 VIXL_CHECK(!sp.IsValidFPRegister());
233 VIXL_CHECK(!wsp.IsValidFPRegister());
234
235 VIXL_CHECK(q0.IsValidVRegister());
236 VIXL_CHECK(!q0.IsValidFPRegister());
237 VIXL_CHECK(!q0.IsValidRegister());
238
239 VIXL_CHECK(d0.IsValidVRegister());
240 VIXL_CHECK(d0.IsValidFPRegister());
241 VIXL_CHECK(!d0.IsValidRegister());
242
243 VIXL_CHECK(s0.IsValidVRegister());
244 VIXL_CHECK(s0.IsValidFPRegister());
245 VIXL_CHECK(!s0.IsValidRegister());
246
247 VIXL_CHECK(h0.IsValidVRegister());
248 VIXL_CHECK(h0.IsValidFPRegister());
249 VIXL_CHECK(!h0.IsValidRegister());
250
251 VIXL_CHECK(b0.IsValidVRegister());
252 VIXL_CHECK(!b0.IsValidFPRegister());
253 VIXL_CHECK(!b0.IsValidRegister());
254
255 // IsValidFPRegister() is only true for scalar types.
256 VIXL_CHECK(q0.V2D().IsValidVRegister());
257 VIXL_CHECK(!q0.V2D().IsValidFPRegister());
258 VIXL_CHECK(d0.V2S().IsValidVRegister());
259 VIXL_CHECK(!d0.V2S().IsValidFPRegister());
260 VIXL_CHECK(s0.V2H().IsValidVRegister());
261 VIXL_CHECK(!s0.V2H().IsValidFPRegister());
262 }
263
264
TEST(isvalid_cpu)265 TEST(isvalid_cpu) {
266 // As 'isvalid', but using CPURegister types where possible. This shouldn't
267 // make any difference.
268 VIXL_CHECK(!static_cast<CPURegister>(NoReg).IsValid());
269 VIXL_CHECK(!static_cast<CPURegister>(NoVReg).IsValid());
270 VIXL_CHECK(!static_cast<CPURegister>(NoCPUReg).IsValid());
271
272 VIXL_CHECK(static_cast<CPURegister>(x0).IsValid());
273 VIXL_CHECK(static_cast<CPURegister>(w0).IsValid());
274 VIXL_CHECK(static_cast<CPURegister>(x30).IsValid());
275 VIXL_CHECK(static_cast<CPURegister>(w30).IsValid());
276 VIXL_CHECK(static_cast<CPURegister>(xzr).IsValid());
277 VIXL_CHECK(static_cast<CPURegister>(wzr).IsValid());
278
279 VIXL_CHECK(static_cast<CPURegister>(sp).IsValid());
280 VIXL_CHECK(static_cast<CPURegister>(wsp).IsValid());
281
282 VIXL_CHECK(static_cast<CPURegister>(d0).IsValid());
283 VIXL_CHECK(static_cast<CPURegister>(s0).IsValid());
284 VIXL_CHECK(static_cast<CPURegister>(d31).IsValid());
285 VIXL_CHECK(static_cast<CPURegister>(s31).IsValid());
286
287 VIXL_CHECK(static_cast<CPURegister>(x0).IsValidRegister());
288 VIXL_CHECK(static_cast<CPURegister>(w0).IsValidRegister());
289 VIXL_CHECK(static_cast<CPURegister>(xzr).IsValidRegister());
290 VIXL_CHECK(static_cast<CPURegister>(wzr).IsValidRegister());
291 VIXL_CHECK(static_cast<CPURegister>(sp).IsValidRegister());
292 VIXL_CHECK(static_cast<CPURegister>(wsp).IsValidRegister());
293 VIXL_CHECK(!static_cast<CPURegister>(x0).IsValidVRegister());
294 VIXL_CHECK(!static_cast<CPURegister>(w0).IsValidVRegister());
295 VIXL_CHECK(!static_cast<CPURegister>(xzr).IsValidVRegister());
296 VIXL_CHECK(!static_cast<CPURegister>(wzr).IsValidVRegister());
297 VIXL_CHECK(!static_cast<CPURegister>(sp).IsValidVRegister());
298 VIXL_CHECK(!static_cast<CPURegister>(wsp).IsValidVRegister());
299 VIXL_CHECK(!static_cast<CPURegister>(x0).IsValidFPRegister());
300 VIXL_CHECK(!static_cast<CPURegister>(w0).IsValidFPRegister());
301 VIXL_CHECK(!static_cast<CPURegister>(xzr).IsValidFPRegister());
302 VIXL_CHECK(!static_cast<CPURegister>(wzr).IsValidFPRegister());
303 VIXL_CHECK(!static_cast<CPURegister>(sp).IsValidFPRegister());
304 VIXL_CHECK(!static_cast<CPURegister>(wsp).IsValidFPRegister());
305
306 VIXL_CHECK(static_cast<CPURegister>(q0).IsValidVRegister());
307 VIXL_CHECK(!static_cast<CPURegister>(q0).IsValidFPRegister());
308 VIXL_CHECK(!static_cast<CPURegister>(q0).IsValidRegister());
309
310 VIXL_CHECK(static_cast<CPURegister>(d0).IsValidVRegister());
311 VIXL_CHECK(static_cast<CPURegister>(d0).IsValidFPRegister());
312 VIXL_CHECK(!static_cast<CPURegister>(d0).IsValidRegister());
313
314 VIXL_CHECK(static_cast<CPURegister>(s0).IsValidVRegister());
315 VIXL_CHECK(static_cast<CPURegister>(s0).IsValidFPRegister());
316 VIXL_CHECK(!static_cast<CPURegister>(s0).IsValidRegister());
317
318 VIXL_CHECK(static_cast<CPURegister>(h0).IsValidVRegister());
319 VIXL_CHECK(static_cast<CPURegister>(h0).IsValidFPRegister());
320 VIXL_CHECK(!static_cast<CPURegister>(h0).IsValidRegister());
321
322 VIXL_CHECK(static_cast<CPURegister>(b0).IsValidVRegister());
323 VIXL_CHECK(!static_cast<CPURegister>(b0).IsValidFPRegister());
324 VIXL_CHECK(!static_cast<CPURegister>(b0).IsValidRegister());
325 }
326
327
TEST(areconsecutive)328 TEST(areconsecutive) {
329 VIXL_CHECK(AreConsecutive(b0, NoVReg));
330 VIXL_CHECK(AreConsecutive(b1, b2));
331 VIXL_CHECK(AreConsecutive(b3, b4, b5));
332 VIXL_CHECK(AreConsecutive(b6, b7, b8, b9));
333 VIXL_CHECK(AreConsecutive(h10, NoVReg));
334 VIXL_CHECK(AreConsecutive(h11, h12));
335 VIXL_CHECK(AreConsecutive(h13, h14, h15));
336 VIXL_CHECK(AreConsecutive(h16, h17, h18, h19));
337 VIXL_CHECK(AreConsecutive(s20, NoVReg));
338 VIXL_CHECK(AreConsecutive(s21, s22));
339 VIXL_CHECK(AreConsecutive(s23, s24, s25));
340 VIXL_CHECK(AreConsecutive(s26, s27, s28, s29));
341 VIXL_CHECK(AreConsecutive(d30, NoVReg));
342 VIXL_CHECK(AreConsecutive(d31, d0));
343 VIXL_CHECK(AreConsecutive(d1, d2, d3));
344 VIXL_CHECK(AreConsecutive(d4, d5, d6, d7));
345 VIXL_CHECK(AreConsecutive(q8, NoVReg));
346 VIXL_CHECK(AreConsecutive(q9, q10));
347 VIXL_CHECK(AreConsecutive(q11, q12, q13));
348 VIXL_CHECK(AreConsecutive(q14, q15, q16, q17));
349 VIXL_CHECK(AreConsecutive(v18, NoVReg));
350 VIXL_CHECK(AreConsecutive(v19, v20));
351 VIXL_CHECK(AreConsecutive(v21, v22, v23));
352 VIXL_CHECK(AreConsecutive(v24, v25, v26, v27));
353 VIXL_CHECK(AreConsecutive(b29, h30));
354 VIXL_CHECK(AreConsecutive(s31, d0, q1));
355 VIXL_CHECK(AreConsecutive(v2, b3, h4, s5));
356
357 VIXL_CHECK(!AreConsecutive(b0, b2));
358 VIXL_CHECK(!AreConsecutive(h1, h0));
359 VIXL_CHECK(!AreConsecutive(s31, s1));
360 VIXL_CHECK(!AreConsecutive(d12, d12));
361 VIXL_CHECK(!AreConsecutive(q31, q1));
362
363 VIXL_CHECK(!AreConsecutive(b0, b1, b3));
364 VIXL_CHECK(!AreConsecutive(h4, h5, h6, h6));
365 VIXL_CHECK(!AreConsecutive(d11, d13, NoVReg, d14));
366 VIXL_CHECK(!AreConsecutive(d15, d16, d18, NoVReg));
367 VIXL_CHECK(!AreConsecutive(b26, b28, NoVReg, b29));
368 VIXL_CHECK(!AreConsecutive(s28, s30, NoVReg, NoVReg));
369
370 VIXL_CHECK(AreConsecutive(q19, NoVReg, NoVReg, q22));
371 VIXL_CHECK(AreConsecutive(v23, NoVReg, v25, NoVReg));
372 VIXL_CHECK(AreConsecutive(b26, b27, NoVReg, NoVReg));
373 VIXL_CHECK(AreConsecutive(h28, NoVReg, NoVReg, NoVReg));
374 VIXL_CHECK(AreConsecutive(s30, s31, NoVReg, s2));
375 VIXL_CHECK(AreConsecutive(d3, NoVReg, d6, d7));
376 }
377
378
TEST(move_immediate_helpers)379 TEST(move_immediate_helpers) {
380 // Using these helpers to query information (without generating code) should
381 // not crash.
382 MacroAssembler::MoveImmediateHelper(NULL, x0, 0x12345678);
383 MacroAssembler::OneInstrMoveImmediateHelper(NULL, x1, 0xabcdef);
384 }
385
386
TEST(generic_operand_helpers)387 TEST(generic_operand_helpers) {
388 GenericOperand invalid_1;
389 GenericOperand invalid_2;
390 GenericOperand reg(x3);
391 GenericOperand mem(MemOperand(sp, 8), kXRegSizeInBytes);
392
393 VIXL_CHECK(!invalid_1.IsValid());
394 VIXL_CHECK(!invalid_2.IsValid());
395
396 VIXL_CHECK(invalid_1.Equals(invalid_1));
397 VIXL_CHECK(invalid_2.Equals(invalid_2));
398 VIXL_CHECK(reg.Equals(reg));
399 VIXL_CHECK(mem.Equals(mem));
400
401 VIXL_CHECK(invalid_1.Equals(invalid_2));
402 VIXL_CHECK(invalid_2.Equals(invalid_1));
403
404 VIXL_CHECK(!invalid_1.Equals(reg));
405 VIXL_CHECK(!invalid_1.Equals(mem));
406 VIXL_CHECK(!reg.Equals(invalid_1));
407 VIXL_CHECK(!reg.Equals(invalid_2));
408 VIXL_CHECK(!reg.Equals(mem));
409 VIXL_CHECK(!mem.Equals(invalid_1));
410 VIXL_CHECK(!mem.Equals(reg));
411 }
412
413
TEST(static_register_types)414 TEST(static_register_types) {
415 // [WX]Register implicitly casts to Register.
416 XRegister x_x0(0);
417 WRegister w_w0(0);
418 Register r_x0 = x_x0;
419 Register r_w0 = w_w0;
420 VIXL_CHECK(r_x0.Is(x_x0));
421 VIXL_CHECK(x_x0.Is(r_x0));
422 VIXL_CHECK(r_w0.Is(w_w0));
423 VIXL_CHECK(w_w0.Is(r_w0));
424
425 // Register explicitly casts to [WX]Register.
426 Register r_x1(1, kXRegSize);
427 Register r_w1(1, kWRegSize);
428 XRegister x_x1(r_x1);
429 WRegister w_w1(r_w1);
430 VIXL_CHECK(r_x1.Is(x_x1));
431 VIXL_CHECK(x_x1.Is(r_x1));
432 VIXL_CHECK(r_w1.Is(w_w1));
433 VIXL_CHECK(w_w1.Is(r_w1));
434
435 // [WX]Register implicitly casts to CPURegister.
436 XRegister x_x2(2);
437 WRegister w_w2(2);
438 CPURegister cpu_x2 = x_x2;
439 CPURegister cpu_w2 = w_w2;
440 VIXL_CHECK(cpu_x2.Is(x_x2));
441 VIXL_CHECK(x_x2.Is(cpu_x2));
442 VIXL_CHECK(cpu_w2.Is(w_w2));
443 VIXL_CHECK(w_w2.Is(cpu_w2));
444 }
445
446
TEST(is_plain_register)447 TEST(is_plain_register) {
448 VIXL_CHECK(Operand(x0).IsPlainRegister());
449 VIXL_CHECK(Operand(x1, LSL, 0).IsPlainRegister());
450 VIXL_CHECK(Operand(x2, LSR, 0).IsPlainRegister());
451 VIXL_CHECK(Operand(x3, ASR, 0).IsPlainRegister());
452 VIXL_CHECK(Operand(x4, ROR, 0).IsPlainRegister());
453 VIXL_CHECK(Operand(x5, UXTX).IsPlainRegister());
454 VIXL_CHECK(Operand(x6, SXTX).IsPlainRegister());
455 VIXL_CHECK(Operand(w7).IsPlainRegister());
456 VIXL_CHECK(Operand(w8, LSL, 0).IsPlainRegister());
457 VIXL_CHECK(Operand(w9, LSR, 0).IsPlainRegister());
458 VIXL_CHECK(Operand(w10, ASR, 0).IsPlainRegister());
459 VIXL_CHECK(Operand(w11, ROR, 0).IsPlainRegister());
460
461 VIXL_CHECK(!Operand(x0, LSL, 1).IsPlainRegister());
462 VIXL_CHECK(!Operand(x1, LSR, 2).IsPlainRegister());
463 VIXL_CHECK(!Operand(x2, ASR, 3).IsPlainRegister());
464 VIXL_CHECK(!Operand(x3, ROR, 4).IsPlainRegister());
465 VIXL_CHECK(!Operand(x5, UXTX, 1).IsPlainRegister());
466 VIXL_CHECK(!Operand(x6, SXTX, 2).IsPlainRegister());
467 VIXL_CHECK(!Operand(w7, LSL, 1).IsPlainRegister());
468 VIXL_CHECK(!Operand(w8, LSR, 2).IsPlainRegister());
469 VIXL_CHECK(!Operand(w9, ASR, 3).IsPlainRegister());
470 VIXL_CHECK(!Operand(w10, ROR, 4).IsPlainRegister());
471 VIXL_CHECK(!Operand(w11, UXTB).IsPlainRegister());
472 VIXL_CHECK(!Operand(w12, SXTB).IsPlainRegister());
473 VIXL_CHECK(!Operand(w13, UXTH).IsPlainRegister());
474 VIXL_CHECK(!Operand(w14, SXTH).IsPlainRegister());
475 // UXTW and SXTW could be treated as plain registers in 32-bit contexts, but
476 // the Operand class doesn't know the context so it has to return false.
477 VIXL_CHECK(!Operand(w15, UXTW).IsPlainRegister());
478 VIXL_CHECK(!Operand(w16, SXTW).IsPlainRegister());
479 }
480
481
482 } // namespace aarch64
483 } // namespace vixl
484