1 // Copyright 2016, VIXL authors
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 // * Redistributions of source code must retain the above copyright notice,
8 // this list of conditions and the following disclaimer.
9 // * Redistributions in binary form must reproduce the above copyright notice,
10 // this list of conditions and the following disclaimer in the documentation
11 // and/or other materials provided with the distribution.
12 // * Neither the name of ARM Limited nor the names of its contributors may be
13 // used to endorse or promote products derived from this software without
14 // specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27 #include <stdlib.h>
28
29 #include "test-runner.h"
30
31 #ifdef VIXL_INCLUDE_TARGET_AARCH32
32 #include "aarch32/macro-assembler-aarch32.h"
33 #endif
34
35 #ifdef VIXL_INCLUDE_TARGET_AARCH64
36 #include "aarch64/macro-assembler-aarch64.h"
37 #endif
38
39 #define STRINGIFY(x) #x
40
41 #define TEST_AARCH32(Name) \
42 namespace aarch32 { \
43 void Test_##Name##_AArch32_Impl(); \
44 } \
45 void Test_##Name##_AArch32() { aarch32::Test_##Name##_AArch32_Impl(); } \
46 Test test_##Name##_AArch32(STRINGIFY(AARCH32_SCRATCH_##Name), \
47 &Test_##Name##_AArch32); \
48 void aarch32::Test_##Name##_AArch32_Impl()
49
50 #define TEST_AARCH64(Name) \
51 namespace aarch64 { \
52 void Test_##Name##_AArch64_Impl(); \
53 } \
54 void Test_##Name##_AArch64() { aarch64::Test_##Name##_AArch64_Impl(); } \
55 Test test_##Name##_AArch64(STRINGIFY(AARCH64_SCRATCH_##Name), \
56 &Test_##Name##_AArch64); \
57 void aarch64::Test_##Name##_AArch64_Impl()
58
59 #define SETUP() MacroAssembler masm
60
61 #define __ masm.
62
63 namespace vixl {
64
65 // UseScratchRegisterScopes must be able to nest perfectly. That is, they may
66 // nest, but nested scopes must not outlive less-nested scopes.
67 template <typename MacroAssembler, typename UseScratchRegisterScope>
68 class PerfectNestingTestHelper {
69 public:
PerfectNestingTestHelper(MacroAssembler * masm)70 explicit PerfectNestingTestHelper(MacroAssembler* masm) : masm_(masm) {
71 uint16_t seed[3] = {4, 5, 6};
72 seed48(seed);
73 }
Run()74 void Run() {
75 UseScratchRegisterScope* top_scope =
76 masm_->GetCurrentScratchRegisterScope();
77 int descendents = 0;
78 while (descendents < kMinimumDescendentScopeCount) descendents += Run(0);
79 VIXL_CHECK(masm_->GetCurrentScratchRegisterScope() == top_scope);
80 }
81
82 private:
Run(int depth)83 int Run(int depth) {
84 // As the depth increases, the probability of recursion decreases.
85 // At depth = kDepthLimit, we never recurse.
86 int max_children = static_cast<int>(std::abs(mrand48()) % kDepthLimit);
87 int children = std::max(0, max_children - depth);
88 int descendents = children;
89 while (children-- > 0) {
90 UseScratchRegisterScope scope(masm_);
91 VIXL_CHECK(masm_->GetCurrentScratchRegisterScope() == &scope);
92 descendents += Run(depth + 1);
93 VIXL_CHECK(masm_->GetCurrentScratchRegisterScope() == &scope);
94 }
95 return descendents;
96 }
97
98 MacroAssembler* masm_;
99 static const int kDepthLimit = 12;
100 static const int kMinimumDescendentScopeCount = 10000;
101 };
102
103 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST_AARCH32(perfect_nesting)104 TEST_AARCH32(perfect_nesting) {
105 SETUP();
106 PerfectNestingTestHelper<MacroAssembler, UseScratchRegisterScope>(&masm)
107 .Run();
108 }
109 #endif // VIXL_INCLUDE_TARGET_AARCH32
110
111 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST_AARCH64(perfect_nesting)112 TEST_AARCH64(perfect_nesting) {
113 SETUP();
114 PerfectNestingTestHelper<MacroAssembler, UseScratchRegisterScope>(&masm)
115 .Run();
116 }
117 #endif // VIXL_INCLUDE_TARGET_AARCH64
118
119
120 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST_AARCH32(v_registers)121 TEST_AARCH32(v_registers) {
122 SETUP();
123 {
124 UseScratchRegisterScope temps(&masm);
125 temps.Include(VRegisterList(q0, q1, q2, q3));
126
127 // This test assumes that low-numbered registers are allocated first. The
128 // implementation is allowed to use a different strategy; if it does, the
129 // test will need to be updated.
130 // TODO: Write more flexible (and thorough) tests.
131
132 VIXL_CHECK(q0.Is(temps.AcquireQ()));
133 VIXL_CHECK(!temps.IsAvailable(q0));
134 VIXL_CHECK(!temps.IsAvailable(d0));
135 VIXL_CHECK(!temps.IsAvailable(d1));
136 VIXL_CHECK(!temps.IsAvailable(s0));
137 VIXL_CHECK(!temps.IsAvailable(s1));
138 VIXL_CHECK(!temps.IsAvailable(s2));
139 VIXL_CHECK(!temps.IsAvailable(s3));
140
141 VIXL_CHECK(d2.Is(temps.AcquireV(64)));
142 VIXL_CHECK(!temps.IsAvailable(q1));
143 VIXL_CHECK(!temps.IsAvailable(d2));
144 VIXL_CHECK(temps.IsAvailable(d3));
145 VIXL_CHECK(!temps.IsAvailable(s4));
146 VIXL_CHECK(!temps.IsAvailable(s5));
147 VIXL_CHECK(temps.IsAvailable(s6));
148 VIXL_CHECK(temps.IsAvailable(s7));
149
150 VIXL_CHECK(s6.Is(temps.AcquireS()));
151 VIXL_CHECK(!temps.IsAvailable(d3));
152 VIXL_CHECK(!temps.IsAvailable(s6));
153 VIXL_CHECK(temps.IsAvailable(s7));
154
155 VIXL_CHECK(q2.Is(temps.AcquireV(128)));
156 VIXL_CHECK(!temps.IsAvailable(q2));
157 VIXL_CHECK(!temps.IsAvailable(d4));
158 VIXL_CHECK(!temps.IsAvailable(d5));
159 VIXL_CHECK(!temps.IsAvailable(s8));
160 VIXL_CHECK(!temps.IsAvailable(s9));
161 VIXL_CHECK(!temps.IsAvailable(s10));
162 VIXL_CHECK(!temps.IsAvailable(s11));
163 VIXL_CHECK(temps.IsAvailable(s7));
164
165 VIXL_CHECK(d6.Is(temps.AcquireD()));
166 VIXL_CHECK(!temps.IsAvailable(q3));
167 VIXL_CHECK(!temps.IsAvailable(d6));
168 VIXL_CHECK(temps.IsAvailable(d7));
169 VIXL_CHECK(!temps.IsAvailable(s12));
170 VIXL_CHECK(!temps.IsAvailable(s13));
171 VIXL_CHECK(temps.IsAvailable(s14));
172 VIXL_CHECK(temps.IsAvailable(s15));
173 VIXL_CHECK(temps.IsAvailable(s7));
174
175 VIXL_CHECK(s7.Is(temps.AcquireS()));
176 }
177 }
178 #endif // VIXL_INCLUDE_TARGET_AARCH32
179
180
181 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST_AARCH32(include_exclude)182 TEST_AARCH32(include_exclude) {
183 SETUP();
184 {
185 UseScratchRegisterScope temps(&masm);
186 temps.Include(r0, r1, r2, r3);
187 temps.Include(s0, s1, d1, q1);
188
189 VIXL_CHECK(temps.IsAvailable(r0));
190 VIXL_CHECK(temps.IsAvailable(r1));
191 VIXL_CHECK(temps.IsAvailable(r2));
192 VIXL_CHECK(temps.IsAvailable(r3));
193
194 VIXL_CHECK(temps.IsAvailable(s0));
195
196 VIXL_CHECK(temps.IsAvailable(s1));
197
198 VIXL_CHECK(temps.IsAvailable(d1));
199 VIXL_CHECK(temps.IsAvailable(s2));
200 VIXL_CHECK(temps.IsAvailable(s3));
201
202 VIXL_CHECK(temps.IsAvailable(q1));
203 VIXL_CHECK(temps.IsAvailable(d2));
204 VIXL_CHECK(temps.IsAvailable(d3));
205 VIXL_CHECK(temps.IsAvailable(s4));
206 VIXL_CHECK(temps.IsAvailable(s5));
207 VIXL_CHECK(temps.IsAvailable(s6));
208 VIXL_CHECK(temps.IsAvailable(s7));
209
210 // Test local exclusion.
211 {
212 UseScratchRegisterScope local_temps(&masm);
213 local_temps.Exclude(r1, r2);
214 local_temps.Exclude(s1, q1);
215
216 VIXL_CHECK(temps.IsAvailable(r0));
217 VIXL_CHECK(!temps.IsAvailable(r1));
218 VIXL_CHECK(!temps.IsAvailable(r2));
219 VIXL_CHECK(temps.IsAvailable(r3));
220
221 VIXL_CHECK(temps.IsAvailable(s0));
222
223 VIXL_CHECK(!temps.IsAvailable(s1));
224
225 VIXL_CHECK(temps.IsAvailable(d1));
226 VIXL_CHECK(temps.IsAvailable(s2));
227 VIXL_CHECK(temps.IsAvailable(s3));
228
229 VIXL_CHECK(!temps.IsAvailable(q1));
230 VIXL_CHECK(!temps.IsAvailable(d2));
231 VIXL_CHECK(!temps.IsAvailable(d3));
232 VIXL_CHECK(!temps.IsAvailable(s4));
233 VIXL_CHECK(!temps.IsAvailable(s5));
234 VIXL_CHECK(!temps.IsAvailable(s6));
235 VIXL_CHECK(!temps.IsAvailable(s7));
236 }
237
238 // This time, exclude part of included registers, making sure the entire
239 // register gets excluded.
240 {
241 UseScratchRegisterScope local_temps(&masm);
242 local_temps.Exclude(s2, d3);
243
244 VIXL_CHECK(temps.IsAvailable(r0));
245 VIXL_CHECK(temps.IsAvailable(r1));
246 VIXL_CHECK(temps.IsAvailable(r2));
247 VIXL_CHECK(temps.IsAvailable(r3));
248
249 VIXL_CHECK(temps.IsAvailable(s0));
250
251 VIXL_CHECK(temps.IsAvailable(s1));
252
253 // Excluding s2 should exclude d1 but not s3.
254 VIXL_CHECK(!temps.IsAvailable(d1));
255 VIXL_CHECK(!temps.IsAvailable(s2));
256 VIXL_CHECK(temps.IsAvailable(s3));
257
258 // Excluding d3 should exclude q1, s7 and s6 but not d2, s5, s4.
259 VIXL_CHECK(!temps.IsAvailable(q1));
260 VIXL_CHECK(temps.IsAvailable(d2));
261 VIXL_CHECK(!temps.IsAvailable(d3));
262 VIXL_CHECK(temps.IsAvailable(s4));
263 VIXL_CHECK(temps.IsAvailable(s5));
264 VIXL_CHECK(!temps.IsAvailable(s6));
265 VIXL_CHECK(!temps.IsAvailable(s7));
266 }
267
268 // Make sure the initial state was restored.
269
270 VIXL_CHECK(temps.IsAvailable(r0));
271 VIXL_CHECK(temps.IsAvailable(r1));
272 VIXL_CHECK(temps.IsAvailable(r2));
273 VIXL_CHECK(temps.IsAvailable(r3));
274
275 VIXL_CHECK(temps.IsAvailable(s0));
276
277 VIXL_CHECK(temps.IsAvailable(s1));
278
279 VIXL_CHECK(temps.IsAvailable(d1));
280 VIXL_CHECK(temps.IsAvailable(s2));
281 VIXL_CHECK(temps.IsAvailable(s3));
282
283 VIXL_CHECK(temps.IsAvailable(q1));
284 VIXL_CHECK(temps.IsAvailable(d2));
285 VIXL_CHECK(temps.IsAvailable(d3));
286 VIXL_CHECK(temps.IsAvailable(s4));
287 VIXL_CHECK(temps.IsAvailable(s5));
288 VIXL_CHECK(temps.IsAvailable(s6));
289 VIXL_CHECK(temps.IsAvailable(s7));
290 }
291 }
292 #endif // VIXL_INCLUDE_TARGET_AARCH32
293
294 } // namespace vixl
295