• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017, VIXL authors
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 //   * Redistributions of source code must retain the above copyright notice,
8 //     this list of conditions and the following disclaimer.
9 //   * Redistributions in binary form must reproduce the above copyright notice,
10 //     this list of conditions and the following disclaimer in the documentation
11 //     and/or other materials provided with the distribution.
12 //   * Neither the name of ARM Limited nor the names of its contributors may be
13 //     used to endorse or promote products derived from this software without
14 //     specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 
27 #include "test-runner.h"
28 
29 #ifdef VIXL_INCLUDE_TARGET_AARCH32
30 #include "aarch32/macro-assembler-aarch32.h"
31 #include "aarch32/test-utils-aarch32.h"
32 #endif
33 
34 #ifdef VIXL_INCLUDE_TARGET_AARCH64
35 #include "aarch64/macro-assembler-aarch64.h"
36 #endif
37 
38 #define TEST(name) TEST_(SCOPES_##name)
39 
40 #ifdef VIXL_INCLUDE_TARGET_A32
41 #define TEST_A32(name) TEST(name)
42 #else
43 // Do not add this test to the harness.
44 #define TEST_A32(name) void Test##name()
45 #endif
46 
47 #define __ masm.
48 
49 namespace vixl {
50 
51 // This file contains tests for code generation scopes.
52 
53 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST(CodeBufferCheckScope_basic_32)54 TEST(CodeBufferCheckScope_basic_32) {
55   aarch32::MacroAssembler masm;
56 
57   {
58     CodeBufferCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
59     __ Mov(aarch32::r0, 0);
60   }
61 
62   masm.FinalizeCode();
63 }
64 #endif  // VIXL_INCLUDE_TARGET_AARCH32
65 
66 
67 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(CodeBufferCheckScope_basic_64)68 TEST(CodeBufferCheckScope_basic_64) {
69   aarch64::MacroAssembler masm;
70 
71   {
72     CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
73     __ Mov(aarch64::x0, 0);
74   }
75 
76   masm.FinalizeCode();
77 }
78 #endif  // VIXL_INCLUDE_TARGET_AARCH64
79 
80 
81 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST(CodeBufferCheckScope_assembler_use_32)82 TEST(CodeBufferCheckScope_assembler_use_32) {
83   aarch32::MacroAssembler masm;
84 
85   {
86     CodeBufferCheckScope scope(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
87     __ Mov(aarch32::r0, 0);
88     __ mov(aarch32::r1, 1);
89   }
90 
91   masm.FinalizeCode();
92 }
93 #endif  // VIXL_INCLUDE_TARGET_AARCH32
94 
95 
96 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(CodeBufferCheckScope_assembler_use_64)97 TEST(CodeBufferCheckScope_assembler_use_64) {
98   aarch64::MacroAssembler masm;
99 
100   {
101     CodeBufferCheckScope scope(&masm, 2 * aarch64::kInstructionSize);
102     __ Mov(aarch64::x0, 0);
103     __ movz(aarch64::x1, 1);
104   }
105 
106   masm.FinalizeCode();
107 }
108 #endif  // VIXL_INCLUDE_TARGET_AARCH64
109 
110 
111 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST(CodeBufferCheckScope_Open_32)112 TEST(CodeBufferCheckScope_Open_32) {
113   aarch32::MacroAssembler masm;
114 
115   {
116     CodeBufferCheckScope scope;
117     __ Mov(aarch32::r0, 0);
118     scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
119     __ Mov(aarch32::r1, 1);
120   }
121 
122   masm.FinalizeCode();
123 }
124 #endif  // VIXL_INCLUDE_TARGET_AARCH32
125 
126 
127 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(CodeBufferCheckScope_Open_64)128 TEST(CodeBufferCheckScope_Open_64) {
129   aarch64::MacroAssembler masm;
130 
131   {
132     CodeBufferCheckScope scope;
133     __ Mov(aarch64::x0, 0);
134     scope.Open(&masm, aarch64::kInstructionSize);
135     __ Mov(aarch64::x1, 1);
136   }
137 
138   masm.FinalizeCode();
139 }
140 #endif  // VIXL_INCLUDE_TARGET_AARCH64
141 
142 
143 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST(CodeBufferCheckScope_Close_32)144 TEST(CodeBufferCheckScope_Close_32) {
145   aarch32::MacroAssembler masm;
146 
147   {
148     CodeBufferCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
149     __ Mov(aarch32::r0, 0);
150     scope.Close();
151     __ Mov(aarch32::r1, 1);
152   }
153 
154   masm.FinalizeCode();
155 }
156 #endif  // VIXL_INCLUDE_TARGET_AARCH32
157 
158 
159 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(CodeBufferCheckScope_Close_64)160 TEST(CodeBufferCheckScope_Close_64) {
161   aarch64::MacroAssembler masm;
162 
163   {
164     CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
165     __ Mov(aarch64::x0, 0);
166     scope.Close();
167     __ Mov(aarch64::x1, 1);
168   }
169 
170   masm.FinalizeCode();
171 }
172 #endif  // VIXL_INCLUDE_TARGET_AARCH64
173 
174 
175 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST(CodeBufferCheckScope_Open_Close_32)176 TEST(CodeBufferCheckScope_Open_Close_32) {
177   aarch32::MacroAssembler masm;
178 
179   {
180     CodeBufferCheckScope scope;
181     __ Mov(aarch32::r0, 0);
182     scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
183     __ Mov(aarch32::r1, 1);
184     scope.Close();
185     __ Mov(aarch32::r2, 2);
186   }
187 
188   masm.FinalizeCode();
189 }
190 #endif  // VIXL_INCLUDE_TARGET_AARCH32
191 
192 
193 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(CodeBufferCheckScope_Open_Close_64)194 TEST(CodeBufferCheckScope_Open_Close_64) {
195   aarch64::MacroAssembler masm;
196 
197   {
198     CodeBufferCheckScope scope;
199     __ Mov(aarch64::x0, 0);
200     scope.Open(&masm, aarch64::kInstructionSize);
201     __ Mov(aarch64::x1, 1);
202     scope.Close();
203     __ Mov(aarch64::x2, 2);
204   }
205 
206   masm.FinalizeCode();
207 }
208 #endif  // VIXL_INCLUDE_TARGET_AARCH64
209 
210 
211 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST(EmissionCheckScope_basic_32)212 TEST(EmissionCheckScope_basic_32) {
213   aarch32::MacroAssembler masm;
214 
215   {
216     EmissionCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
217     __ Mov(aarch32::r0, 0);
218   }
219 
220   masm.FinalizeCode();
221 }
222 #endif  // VIXL_INCLUDE_TARGET_AARCH32
223 
224 
225 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(EmissionCheckScope_basic_64)226 TEST(EmissionCheckScope_basic_64) {
227   aarch64::MacroAssembler masm;
228 
229   {
230     EmissionCheckScope scope(&masm, aarch64::kInstructionSize);
231     __ Mov(aarch64::x0, 0);
232   }
233 
234   masm.FinalizeCode();
235 }
236 #endif  // VIXL_INCLUDE_TARGET_AARCH64
237 
238 
239 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST(EmissionCheckScope_Open_32)240 TEST(EmissionCheckScope_Open_32) {
241   aarch32::MacroAssembler masm;
242 
243   {
244     EmissionCheckScope scope;
245     __ Mov(aarch32::r0, 0);
246     scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
247     __ Mov(aarch32::r1, 1);
248   }
249 
250   masm.FinalizeCode();
251 }
252 #endif  // VIXL_INCLUDE_TARGET_AARCH32
253 
254 
255 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(EmissionCheckScope_Open_64)256 TEST(EmissionCheckScope_Open_64) {
257   aarch64::MacroAssembler masm;
258 
259   {
260     EmissionCheckScope scope;
261     __ Mov(aarch64::x0, 0);
262     scope.Open(&masm, aarch64::kInstructionSize);
263     __ Mov(aarch64::x1, 1);
264   }
265 
266   masm.FinalizeCode();
267 }
268 #endif  // VIXL_INCLUDE_TARGET_AARCH64
269 
270 
271 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST(EmissionCheckScope_Close_32)272 TEST(EmissionCheckScope_Close_32) {
273   aarch32::MacroAssembler masm;
274 
275   {
276     EmissionCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
277     __ Mov(aarch32::r0, 0);
278     scope.Close();
279     __ Mov(aarch32::r1, 1);
280   }
281 
282   masm.FinalizeCode();
283 }
284 #endif  // VIXL_INCLUDE_TARGET_AARCH32
285 
286 
287 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(EmissionCheckScope_Close_64)288 TEST(EmissionCheckScope_Close_64) {
289   aarch64::MacroAssembler masm;
290 
291   {
292     EmissionCheckScope scope(&masm, aarch64::kInstructionSize);
293     __ Mov(aarch64::x0, 0);
294     scope.Close();
295     __ Mov(aarch64::x1, 1);
296   }
297 
298   masm.FinalizeCode();
299 }
300 #endif  // VIXL_INCLUDE_TARGET_AARCH64
301 
302 
303 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST(EmissionCheckScope_Open_Close_32)304 TEST(EmissionCheckScope_Open_Close_32) {
305   aarch32::MacroAssembler masm;
306 
307   {
308     EmissionCheckScope scope;
309     __ Mov(aarch32::r0, 0);
310     scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
311     __ Mov(aarch32::r1, 1);
312     scope.Close();
313     __ Mov(aarch32::r2, 2);
314   }
315 
316   masm.FinalizeCode();
317 }
318 #endif  // VIXL_INCLUDE_TARGET_AARCH32
319 
320 
321 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(EmissionCheckScope_Open_Close_64)322 TEST(EmissionCheckScope_Open_Close_64) {
323   aarch64::MacroAssembler masm;
324 
325   {
326     EmissionCheckScope scope;
327     __ Mov(aarch64::x0, 0);
328     scope.Open(&masm, aarch64::kInstructionSize);
329     __ Mov(aarch64::x1, 1);
330     scope.Close();
331     __ Mov(aarch64::x2, 2);
332   }
333 
334   masm.FinalizeCode();
335 }
336 #endif  // VIXL_INCLUDE_TARGET_AARCH64
337 
338 
339 #ifdef VIXL_INCLUDE_TARGET_AARCH32
340 
341 #define ASSERT_LITERAL_POOL_SIZE_32(expected)     \
342   {                                               \
343     aarch32::TestMacroAssembler test(&masm);      \
344     VIXL_CHECK((expected) == test.GetPoolSize()); \
345   }
346 
TEST_A32(EmissionCheckScope_emit_pool_32)347 TEST_A32(EmissionCheckScope_emit_pool_32) {
348   aarch32::MacroAssembler masm;
349 
350   // Make sure the pool is empty;
351   masm.EmitLiteralPool(PoolManager<int32_t>::kBranchRequired);
352   ASSERT_LITERAL_POOL_SIZE_32(0);
353 
354   __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
355   ASSERT_LITERAL_POOL_SIZE_32(8);
356 
357   const int kLdrdRange = 255;
358   const int kLessThanLdrdRange = 100;
359 
360   {
361     // Check that opening the scope with a reserved space well below the limit
362     // at which can generate the literal pool does not force the emission of
363     // the pool.
364     EmissionCheckScope scope(&masm,
365                              kLessThanLdrdRange,
366                              EmissionCheckScope::kMaximumSize);
367     ASSERT_LITERAL_POOL_SIZE_32(8);
368   }
369 
370   {
371     // Check that the scope forces emission of the pool if necessary.
372     EmissionCheckScope scope(&masm,
373                              kLdrdRange + 1,
374                              EmissionCheckScope::kMaximumSize);
375     ASSERT_LITERAL_POOL_SIZE_32(0);
376   }
377 
378   masm.FinalizeCode();
379 }
380 #endif  // VIXL_INCLUDE_TARGET_AARCH32
381 
382 
383 #ifdef VIXL_INCLUDE_TARGET_AARCH64
384 
385 #define ASSERT_LITERAL_POOL_SIZE_64(expected)          \
386   VIXL_CHECK((expected + aarch64::kInstructionSize) == \
387              masm.GetLiteralPoolSize())
388 
TEST(EmissionCheckScope_emit_pool_64)389 TEST(EmissionCheckScope_emit_pool_64) {
390   aarch64::MacroAssembler masm;
391 
392   // Make sure the pool is empty;
393   masm.EmitLiteralPool(aarch64::LiteralPool::kBranchRequired);
394   ASSERT_LITERAL_POOL_SIZE_64(0);
395 
396   __ Ldr(aarch64::x0, 0x1234567890abcdef);
397   ASSERT_LITERAL_POOL_SIZE_64(8);
398 
399   {
400     // Check that opening the scope with a reserved space well below the limit
401     // at which can generate the literal pool does not force the emission of
402     // the pool.
403     EmissionCheckScope scope(&masm,
404                              10 * aarch64::kInstructionSize,
405                              EmissionCheckScope::kMaximumSize);
406     ASSERT_LITERAL_POOL_SIZE_64(8);
407   }
408 
409   {
410     // Check that the scope forces emission of the pool if necessary.
411     EmissionCheckScope scope(&masm,
412                              aarch64::kMaxLoadLiteralRange + 1,
413                              EmissionCheckScope::kMaximumSize);
414     ASSERT_LITERAL_POOL_SIZE_64(0);
415   }
416 
417   masm.FinalizeCode();
418 }
419 #endif  // VIXL_INCLUDE_TARGET_AARCH64
420 
421 
422 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST_A32(EmissionCheckScope_emit_pool_on_Open_32)423 TEST_A32(EmissionCheckScope_emit_pool_on_Open_32) {
424   aarch32::MacroAssembler masm;
425 
426   // Make sure the pool is empty;
427   masm.EmitLiteralPool(PoolManager<int32_t>::kBranchRequired);
428   ASSERT_LITERAL_POOL_SIZE_32(0);
429 
430   __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
431   ASSERT_LITERAL_POOL_SIZE_32(8);
432 
433   const int kLdrdRange = 255;
434   const int kLessThanLdrdRange = 100;
435 
436   {
437     // Check that opening the scope with a reserved space well below the limit
438     // at which can generate the literal pool does not force the emission of
439     // the pool.
440     EmissionCheckScope scope(&masm,
441                              kLessThanLdrdRange,
442                              EmissionCheckScope::kMaximumSize);
443     ASSERT_LITERAL_POOL_SIZE_32(8);
444   }
445 
446   {
447     // Check that the scope forces emission of the pool if necessary.
448     EmissionCheckScope scope(&masm,
449                              kLdrdRange + 1,
450                              EmissionCheckScope::kMaximumSize);
451     ASSERT_LITERAL_POOL_SIZE_32(0);
452   }
453 
454   masm.FinalizeCode();
455 }
456 #endif  // VIXL_INCLUDE_TARGET_AARCH32
457 
458 
459 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(EmissionCheckScope_emit_pool_on_Open_64)460 TEST(EmissionCheckScope_emit_pool_on_Open_64) {
461   aarch64::MacroAssembler masm;
462 
463   // Make sure the pool is empty;
464   masm.EmitLiteralPool(aarch64::LiteralPool::kBranchRequired);
465   ASSERT_LITERAL_POOL_SIZE_64(0);
466 
467   __ Ldr(aarch64::x0, 0x1234567890abcdef);
468   ASSERT_LITERAL_POOL_SIZE_64(8);
469 
470   {
471     // Check that opening the scope with a reserved space well below the limit
472     // at which can generate the literal pool does not force the emission of
473     // the pool.
474     EmissionCheckScope scope;
475     scope.Open(&masm,
476                10 * aarch64::kInstructionSize,
477                EmissionCheckScope::kMaximumSize);
478     ASSERT_LITERAL_POOL_SIZE_64(8);
479   }
480 
481   {
482     // Check that the scope forces emission of the pool if necessary.
483     EmissionCheckScope scope;
484     scope.Open(&masm,
485                aarch64::kMaxLoadLiteralRange + 1,
486                EmissionCheckScope::kMaximumSize);
487     ASSERT_LITERAL_POOL_SIZE_64(0);
488   }
489 
490   masm.FinalizeCode();
491 }
492 #endif  // VIXL_INCLUDE_TARGET_AARCH64
493 
494 
495 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST_A32(ExactAssemblyScope_basic_32)496 TEST_A32(ExactAssemblyScope_basic_32) {
497   aarch32::MacroAssembler masm;
498 
499   {
500     ExactAssemblyScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
501     __ nop();
502   }
503 
504   masm.FinalizeCode();
505 }
506 #endif  // VIXL_INCLUDE_TARGET_AARCH32
507 
508 
509 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(ExactAssemblyScope_basic_64)510 TEST(ExactAssemblyScope_basic_64) {
511   aarch64::MacroAssembler masm;
512 
513   {
514     ExactAssemblyScope scope(&masm, aarch64::kInstructionSize);
515     __ nop();
516   }
517 
518   masm.FinalizeCode();
519 }
520 #endif  // VIXL_INCLUDE_TARGET_AARCH64
521 
522 
523 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST_A32(ExactAssemblyScope_Open_32)524 TEST_A32(ExactAssemblyScope_Open_32) {
525   aarch32::MacroAssembler masm;
526 
527   {
528     ExactAssemblyScope scope;
529     __ Mov(aarch32::r0, 0);
530     scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
531     __ mov(aarch32::r1, 1);
532   }
533 
534   masm.FinalizeCode();
535 }
536 #endif  // VIXL_INCLUDE_TARGET_AARCH32
537 
538 
539 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(ExactAssemblyScope_Open_64)540 TEST(ExactAssemblyScope_Open_64) {
541   aarch64::MacroAssembler masm;
542 
543   {
544     ExactAssemblyScope scope;
545     __ Mov(aarch64::x0, 0);
546     scope.Open(&masm, aarch64::kInstructionSize);
547     __ movz(aarch64::x1, 1);
548   }
549 
550   masm.FinalizeCode();
551 }
552 #endif  // VIXL_INCLUDE_TARGET_AARCH64
553 
554 
555 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST_A32(ExactAssemblyScope_Close_32)556 TEST_A32(ExactAssemblyScope_Close_32) {
557   aarch32::MacroAssembler masm;
558 
559   {
560     ExactAssemblyScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
561     __ mov(aarch32::r0, 0);
562     scope.Close();
563     __ Mov(aarch32::r1, 1);
564   }
565 
566   masm.FinalizeCode();
567 }
568 #endif  // VIXL_INCLUDE_TARGET_AARCH32
569 
570 
571 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(ExactAssemblyScope_Close_64)572 TEST(ExactAssemblyScope_Close_64) {
573   aarch64::MacroAssembler masm;
574 
575   {
576     ExactAssemblyScope scope(&masm, aarch64::kInstructionSize);
577     __ movz(aarch64::x0, 0);
578     scope.Close();
579     __ Mov(aarch64::x1, 1);
580   }
581 
582   masm.FinalizeCode();
583 }
584 #endif  // VIXL_INCLUDE_TARGET_AARCH64
585 
586 
587 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST_A32(ExactAssemblyScope_Open_Close_32)588 TEST_A32(ExactAssemblyScope_Open_Close_32) {
589   aarch32::MacroAssembler masm;
590 
591   {
592     ExactAssemblyScope scope;
593     __ Mov(aarch32::r0, 0);
594     scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
595     __ mov(aarch32::r1, 1);
596     scope.Close();
597     __ Mov(aarch32::r2, 2);
598   }
599 
600   masm.FinalizeCode();
601 }
602 #endif  // VIXL_INCLUDE_TARGET_AARCH32
603 
604 
605 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(ExactAssemblyScope_Open_Close_64)606 TEST(ExactAssemblyScope_Open_Close_64) {
607   aarch64::MacroAssembler masm;
608 
609   {
610     ExactAssemblyScope scope;
611     __ Mov(aarch64::x0, 0);
612     scope.Open(&masm, aarch64::kInstructionSize);
613     __ movz(aarch64::x1, 1);
614     scope.Close();
615     __ Mov(aarch64::x2, 2);
616   }
617 
618   masm.FinalizeCode();
619 }
620 #endif  // VIXL_INCLUDE_TARGET_AARCH64
621 
622 
623 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST_A32(ExactAssemblyScope_32)624 TEST_A32(ExactAssemblyScope_32) {
625   aarch32::MacroAssembler masm;
626 
627   // By default macro instructions are allowed.
628   VIXL_CHECK(!masm.ArePoolsBlocked());
629   VIXL_ASSERT(!masm.AllowAssembler());
630   VIXL_ASSERT(masm.AllowMacroInstructions());
631   {
632     ExactAssemblyScope scope1(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
633     VIXL_CHECK(masm.ArePoolsBlocked());
634     VIXL_ASSERT(masm.AllowAssembler());
635     VIXL_ASSERT(!masm.AllowMacroInstructions());
636     __ nop();
637     {
638       ExactAssemblyScope scope2(&masm, 1 * aarch32::kA32InstructionSizeInBytes);
639       VIXL_CHECK(masm.ArePoolsBlocked());
640       VIXL_ASSERT(masm.AllowAssembler());
641       VIXL_ASSERT(!masm.AllowMacroInstructions());
642       __ nop();
643     }
644     VIXL_CHECK(masm.ArePoolsBlocked());
645     VIXL_ASSERT(masm.AllowAssembler());
646     VIXL_ASSERT(!masm.AllowMacroInstructions());
647   }
648   VIXL_CHECK(!masm.ArePoolsBlocked());
649   VIXL_ASSERT(!masm.AllowAssembler());
650   VIXL_ASSERT(masm.AllowMacroInstructions());
651 
652   {
653     ExactAssemblyScope scope(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
654     __ add(aarch32::r0, aarch32::r0, aarch32::r0);
655     __ sub(aarch32::r0, aarch32::r0, aarch32::r0);
656   }
657 
658   masm.FinalizeCode();
659 }
660 #endif  // VIXL_INCLUDE_TARGET_AARCH32
661 
662 
663 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(ExactAssemblyScope_64)664 TEST(ExactAssemblyScope_64) {
665   aarch64::MacroAssembler masm;
666 
667   // By default macro instructions are allowed.
668   VIXL_CHECK(!masm.ArePoolsBlocked());
669   VIXL_ASSERT(!masm.AllowAssembler());
670   VIXL_ASSERT(masm.AllowMacroInstructions());
671   {
672     ExactAssemblyScope scope1(&masm, 2 * aarch64::kInstructionSize);
673     VIXL_CHECK(masm.ArePoolsBlocked());
674     VIXL_ASSERT(masm.AllowAssembler());
675     VIXL_ASSERT(!masm.AllowMacroInstructions());
676     __ nop();
677     {
678       ExactAssemblyScope scope2(&masm, 1 * aarch64::kInstructionSize);
679       VIXL_CHECK(masm.ArePoolsBlocked());
680       VIXL_ASSERT(masm.AllowAssembler());
681       VIXL_ASSERT(!masm.AllowMacroInstructions());
682       __ nop();
683     }
684     VIXL_CHECK(masm.ArePoolsBlocked());
685     VIXL_ASSERT(masm.AllowAssembler());
686     VIXL_ASSERT(!masm.AllowMacroInstructions());
687   }
688   VIXL_CHECK(!masm.ArePoolsBlocked());
689   VIXL_ASSERT(!masm.AllowAssembler());
690   VIXL_ASSERT(masm.AllowMacroInstructions());
691 
692   {
693     ExactAssemblyScope scope(&masm, 2 * aarch64::kInstructionSize);
694     __ add(aarch64::x0, aarch64::x0, aarch64::x0);
695     __ sub(aarch64::x0, aarch64::x0, aarch64::x0);
696   }
697 
698   masm.FinalizeCode();
699 }
700 #endif  // VIXL_INCLUDE_TARGET_AARCH64
701 
702 
703 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST_A32(ExactAssemblyScope_scope_with_pools_32)704 TEST_A32(ExactAssemblyScope_scope_with_pools_32) {
705   aarch32::MacroAssembler masm;
706 
707   ASSERT_LITERAL_POOL_SIZE_32(0);
708 
709   __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
710 
711   ASSERT_LITERAL_POOL_SIZE_32(8);
712 
713   const int32_t kLdrdRange = 255;
714   const int32_t n_nops = (kLdrdRange / aarch32::kA32InstructionSizeInBytes) + 1;
715   {
716     // The literal pool should be generated when opening this scope, as
717     // otherwise the `Ldrd` will run out of range when we generate the `nop`
718     // instructions below.
719     ExactAssemblyScope scope(&masm,
720                              n_nops * aarch32::kA32InstructionSizeInBytes);
721 
722     // Although it must be, we do not check that the literal pool size is zero
723     // here, because we want this regression test to fail while or after we
724     // generate the nops.
725 
726     for (int32_t i = 0; i < n_nops; ++i) {
727       __ nop();
728     }
729   }
730 
731   ASSERT_LITERAL_POOL_SIZE_32(0);
732 
733   masm.FinalizeCode();
734 }
735 #endif  // VIXL_INCLUDE_TARGET_AARCH32
736 
737 
738 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST(ExactAssemblyScope_scope_with_pools_64)739 TEST(ExactAssemblyScope_scope_with_pools_64) {
740   aarch64::MacroAssembler masm;
741 
742   ASSERT_LITERAL_POOL_SIZE_64(0);
743 
744   __ Ldr(aarch64::x10, 0x1234567890abcdef);
745 
746   ASSERT_LITERAL_POOL_SIZE_64(8);
747 
748   const int64_t n_nops =
749       aarch64::kMaxLoadLiteralRange / aarch64::kInstructionSize;
750   {
751     // The literal pool should be generated when opening this scope, as
752     // otherwise the `Ldr` will run out of range when we generate the `nop`
753     // instructions below.
754     ExactAssemblyScope scope(&masm, n_nops * aarch64::kInstructionSize);
755 
756     // Although it must be, we do not check that the literal pool size is zero
757     // here, because we want this regression test to fail while or after we
758     // generate the nops.
759 
760     for (int64_t i = 0; i < n_nops; ++i) {
761       __ nop();
762     }
763   }
764 
765   ASSERT_LITERAL_POOL_SIZE_64(0);
766 
767   masm.FinalizeCode();
768 }
769 #endif  // VIXL_INCLUDE_TARGET_AARCH64
770 
771 
772 }  // namespace vixl
773