1 // Copyright 2021 Google LLC
2 //
3 // This source code is licensed under the BSD-style license found in the
4 // LICENSE file in the root directory of this source tree.
5
6 #include <xnnpack/aarch32-assembler.h>
7 #include <xnnpack/allocator.h>
8 #include <xnnpack/common.h>
9
10 #include <ios>
11
12 #include "assembler-helpers.h"
13 #include <gtest/gtest.h>
14
15 namespace xnnpack {
16 namespace aarch32 {
TEST(AArch32Assembler,InstructionEncoding)17 TEST(AArch32Assembler, InstructionEncoding) {
18 xnn_code_buffer b;
19 xnn_allocate_code_memory(&b, XNN_DEFAULT_CODE_BUFFER_SIZE);
20 Assembler a(&b);
21
22 CHECK_ENCODING(0xE086600B, a.add(r6, r11));
23 CHECK_ENCODING(0xE0810002, a.add(r0, r1, r2));
24 CHECK_ENCODING(0xE28A9080, a.add(r9, r10, 128));
25 CHECK_ENCODING(0xE29D5008, a.adds(r5, r13, 8));
26
27 CHECK_ENCODING(0xE2025007, a.and_(r5, r2, 7));
28
29 CHECK_ENCODING(0xE3CC2003, a.bic(r2, r12, 3));
30
31 CHECK_ENCODING(0xE12FFF1E, a.bx(lr));
32
33 CHECK_ENCODING(0xE3500002, a.cmp(r0, 2));
34 CHECK_ENCODING(0xE1530007, a.cmp(r3, r7));
35
36 // Offset addressing mode.
37 CHECK_ENCODING(0xE59D7060, a.ldr(r7, mem[sp, 96]));
38 // Post-indexed addressing mode.
39 CHECK_ENCODING(0xE490B000, a.ldr(r11, mem[r0], 0));
40 CHECK_ENCODING(0xE490B060, a.ldr(r11, mem[r0], 96));
41 // Offsets out of bounds.
42 EXPECT_ERROR(Error::kInvalidOperand, a.ldr(r7, MemOperand(sp, 4096)));
43 EXPECT_ERROR(Error::kInvalidOperand, a.ldr(r7, MemOperand(sp, -4096)));
44
45 CHECK_ENCODING(0xE1CD66D8, a.ldrd(r6, r7, mem[sp, 104]));
46 CHECK_ENCODING(0xE0CD66D8, a.ldrd(r6, r7, MemOperand(sp, 104, AddressingMode::kPostIndexed)));
47 EXPECT_ERROR(Error::kInvalidOperand, a.ldrd(r6, r8, mem[sp, 104]));
48 EXPECT_ERROR(Error::kInvalidOperand, a.ldrd(r6, r7, mem[sp, 4096]));
49
50 CHECK_ENCODING(0x01A0C007, a.moveq(r12, r7));
51 CHECK_ENCODING(0x31A0C003, a.movlo(r12, r3));
52 CHECK_ENCODING(0x91A0A00C, a.movls(r10, r12));
53 CHECK_ENCODING(0xE1A0A00C, a.mov(r10, r12));
54
55 CHECK_ENCODING(0xE320F000, a.nop());
56
57 CHECK_ENCODING(0xE8BD0FF0, a.pop({r4, r5, r6, r7, r8, r9, r10, r11}));
58 EXPECT_ERROR(Error::kInvalidOperand, a.pop({}));
59 EXPECT_ERROR(Error::kInvalidOperand, a.pop({r1}));
60
61 CHECK_ENCODING(0xE92D0FF0, a.push({r4, r5, r6, r7, r8, r9, r10, r11}));
62 EXPECT_ERROR(Error::kInvalidOperand, a.push({}));
63 EXPECT_ERROR(Error::kInvalidOperand, a.push({r1}));
64
65 CHECK_ENCODING(0xF5D3F000, a.pld(MemOperand(r3, 0)));
66 CHECK_ENCODING(0xF5D3F040, a.pld(MemOperand(r3, 64)));
67
68 CHECK_ENCODING(0xE58D5068, a.str(r5, mem[sp, 104]));
69 EXPECT_ERROR(Error::kInvalidOperand, a.str(r5, MemOperand(sp, 4096)));
70 EXPECT_ERROR(Error::kInvalidOperand, a.str(r5, MemOperand(sp, -4096)));
71
72 CHECK_ENCODING(0xE0487002, a.sub(r7, r8, r2));
73 CHECK_ENCODING(0xE2425010, a.sub(r5, r2, 16));
74 CHECK_ENCODING(0xE2525010, a.subs(r5, r2, 16));
75
76 CHECK_ENCODING(0xE315000F, a.tst(r5, 15));
77
78 CHECK_ENCODING(0xEEB44AC8, a.vcmpe_f32(s8, s16));
79
80 CHECK_ENCODING(0xF3FBE646, a.vcvt_f32_s32(q15, q3));
81 CHECK_ENCODING(0xF3FB6748, a.vcvt_s32_f32(q11, q4));
82
83 CHECK_ENCODING(0xF3FB6148, a.vcvtn_s32_f32(q11, q4));
84
85 CHECK_ENCODING(0xF3FF8C4F, a.vdup_8(q12, d15[7]));
86 EXPECT_ERROR(Error::kInvalidLaneIndex, a.vdup_8(q12, d15[8]));
87 CHECK_ENCODING(0xF3FE8C4F, a.vdup_16(q12, d15[3]));
88 EXPECT_ERROR(Error::kInvalidLaneIndex, a.vdup_16(q12, d15[4]));
89 CHECK_ENCODING(0xF3FC8C4F, a.vdup_32(q12, d15[1]));
90 EXPECT_ERROR(Error::kInvalidLaneIndex, a.vdup_32(q12, d15[2]));
91
92 CHECK_ENCODING(0xF2BE04C6, a.vext_8(q0, q15, q3, 4));
93 EXPECT_ERROR(Error::kInvalidOperand, a.vext_8(q0, q15, q3, 16));
94
95 CHECK_ENCODING(0xF423070F, a.vld1_8({d0}, mem[r3]));
96 CHECK_ENCODING(0xF423070D, a.vld1_8({d0}, mem[r3]++));
97 CHECK_ENCODING(0xF4230A0F, a.vld1_8({d0-d1}, mem[r3]));
98 CHECK_ENCODING(0xF423060F, a.vld1_8({d0-d2}, mem[r3]));
99 CHECK_ENCODING(0xF423020F, a.vld1_8({d0-d3}, mem[r3]));
100 CHECK_ENCODING(0xF42A4705, a.vld1_8({d4}, mem[r10], r5));
101 CHECK_ENCODING(0xF4294A0D, a.vld1_8({q2}, mem[r9]++));
102
103 CHECK_ENCODING(0xF42C178F, a.vld1_32({d1}, mem[r12]));
104 CHECK_ENCODING(0xF42C178D, a.vld1_32({d1}, mem[r12]++));
105 CHECK_ENCODING(0xF42C1A8D, a.vld1_32({d1-d2}, mem[r12]++));
106 CHECK_ENCODING(0xF42C168D, a.vld1_32({d1-d3}, mem[r12]++));
107 CHECK_ENCODING(0xF42C128D, a.vld1_32({d1-d4}, mem[r12]++));
108
109 CHECK_ENCODING(0xF4A8780F, a.vld1_32({d7[0]}, mem[r8]));
110 CHECK_ENCODING(0xF4A3488D, a.vld1_32({d4[1]}, mem[r3]++));
111 EXPECT_ERROR(Error::kInvalidLaneIndex, a.vld1_32({d0[2]}, mem[r3]));
112
113 CHECK_ENCODING(0xF4294A8D, a.vld1_32({q2}, mem[r9]++));
114
115 CHECK_ENCODING(0xF4A54C8F, a.vld1r_32({d4}, mem[r5]));
116 CHECK_ENCODING(0xF4A54CAF, a.vld1r_32({d4, d5}, mem[r5]));
117 CHECK_ENCODING(0xF4A54CAD, a.vld1r_32({d4, d5}, mem[r5]++));
118 EXPECT_ERROR(Error::kInvalidOperand, a.vld1r_32({d4, d5}, mem[r5, 4]));
119 EXPECT_ERROR(Error::kInvalidOperand, a.vld1r_32({d4, d6}, mem[r5]));
120
121 CHECK_ENCODING(0xECD90B08, a.vldm(mem[r9], {d16-d19}));
122 CHECK_ENCODING(0xECF90B08, a.vldm(mem[r9]++, {d16-d19}));
123 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vldm(mem[r9], {d8-d0}));
124 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vldm(mem[r9], {d0-d16}));
125 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vldm(mem[r9], DRegisterList(d31, 2)));
126
127 CHECK_ENCODING(0xEC930A01, a.vldm(mem[r3], {s0}));
128 CHECK_ENCODING(0xECB30A01, a.vldm(mem[r3]++, {s0}));
129 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vldm(mem[r3], {s4-s0}));
130 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vldm(mem[r3], SRegisterList(s31, 2)));
131
132 CHECK_ENCODING(0xEDD97A0E, a.vldr(s15, mem[r9, 56]));
133 CHECK_ENCODING(0xEDD97AFF, a.vldr(s15, mem[r9, 1020]));
134 CHECK_ENCODING(0xED597AFF, a.vldr(s15, mem[r9, -1020]));
135 EXPECT_ERROR(Error::kInvalidOperand, a.vldr(s15, MemOperand(r9, 56, AddressingMode::kPostIndexed)));
136 EXPECT_ERROR(Error::kInvalidOperand, a.vldr(s15, mem[r9, 1024]));
137 EXPECT_ERROR(Error::kInvalidOperand, a.vldr(s15, mem[r9, -1024]));
138 EXPECT_ERROR(Error::kInvalidOperand, a.vldr(s15, mem[r9, 1018]));
139
140 CHECK_ENCODING(0xED99FB0E, a.vldr(d15, mem[r9, 56]));
141 CHECK_ENCODING(0xED99FBFF, a.vldr(d15, mem[r9, 1020]));
142 CHECK_ENCODING(0xED19FBFF, a.vldr(d15, mem[r9, -1020]));
143 EXPECT_ERROR(Error::kInvalidOperand, a.vldr(d15, MemOperand(r9, 56, AddressingMode::kPostIndexed)));
144 EXPECT_ERROR(Error::kInvalidOperand, a.vldr(d15, mem[r9, 1024]));
145 EXPECT_ERROR(Error::kInvalidOperand, a.vldr(d15, mem[r9, -1024]));
146 EXPECT_ERROR(Error::kInvalidOperand, a.vldr(d15, mem[r9, 1018]));
147
148 CHECK_ENCODING(0xF20E26C6, a.vmax_s8(q1, q15, q3));
149 CHECK_ENCODING(0xF24ECFC4, a.vmax_f32(q14, q15, q2));
150
151 CHECK_ENCODING(0xF20E26D6, a.vmin_s8(q1, q15, q3));
152 CHECK_ENCODING(0xF220EFC6, a.vmin_f32(q7, q8, q3));
153
154 CHECK_ENCODING(0xEE04AA01, a.vmla_f32(s20, s8, s2));
155
156 CHECK_ENCODING(0xF3E80140, a.vmla_f32(q8, q4, d0[0]));
157 CHECK_ENCODING(0xF3EC0160, a.vmla_f32(q8, q6, d0[1]));
158 EXPECT_ERROR(Error::kInvalidLaneIndex, a.vmla_f32(q8, q4, d0[2]));
159
160 CHECK_ENCODING(0xF2D9E246, a.vmlal_s16(q15, d9, d6[0]));
161 CHECK_ENCODING(0xF2D8424A, a.vmlal_s16(q10, d8, d2[1]));
162 CHECK_ENCODING(0xF2D88264, a.vmlal_s16(q12, d8, d4[2]));
163 CHECK_ENCODING(0xF2D8626A, a.vmlal_s16(q11, d8, d2[3]));
164 EXPECT_ERROR(Error::kInvalidLaneIndex, a.vmlal_s16(q15, d9, d6[4]));
165
166 CHECK_ENCODING(0xEEB0EA4F, a.vmov(s28, s30));
167 CHECK_ENCODING(0xF26101B1, a.vmov(d16, d17));
168 CHECK_ENCODING(0xEC420B1F, a.vmov(d15, r0, r2));
169 CHECK_ENCODING(0xF26041F0, a.vmov(q10, q8));
170
171 CHECK_ENCODING(0xEEB08A49, a.vmov_f32(s16, s18));
172 CHECK_ENCODING(0x5EB08A44, a.vmovpl_f32(s16, s8));
173 CHECK_ENCODING(0x4EB08A64, a.vmovmi_f32(s16, s9));
174
175 CHECK_ENCODING(0xEEB0AB48, a.vmov_f64(d10, d8));
176
177 CHECK_ENCODING(0xF2880A10, a.vmovl_s8(q0, d0));
178
179 CHECK_ENCODING(0xEEF1FA10, a.vmrs(APSR_nzcv, FPSCR));
180
181 CHECK_ENCODING(0xF34E2DD2, a.vmul_f32(q9, q15, q1));
182
183 CHECK_ENCODING(0xECBD8B10, a.vpop({d8-d15}));
184 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vpop({d0-d16}));
185 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vpop({d4-d0}));
186 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vpop(DRegisterList(d31, 2)));
187
188 CHECK_ENCODING(0xED2D8B10, a.vpush({d8-d15}));
189 CHECK_ENCODING(0xED6D4B08, a.vpush({d20-d23}));
190 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vpush({d8-d7}));
191 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vpush({d0-d16}));
192 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vpush(DRegisterList(d31, 2)));
193
194 CHECK_ENCODING(0xED2D4A08, a.vpush({s8-s15}));
195 CHECK_ENCODING(0xED2DAA04, a.vpush({s20-s23}));
196 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vpush({s8-s2}));
197 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vpush(SRegisterList(s31, 2)));
198
199 CHECK_ENCODING(0xF25E00D2, a.vqadd_s16(q8, q15, q1));
200
201 CHECK_ENCODING(0xF3A82CCE, a.vqdmulh_s32(q1, q12, d14[0]));
202 CHECK_ENCODING(0xF3A82CEE, a.vqdmulh_s32(q1, q12, d14[1]));
203 EXPECT_ERROR(Error::kInvalidLaneIndex, a.vqdmulh_s32(q1, q12, d14[2]));
204 EXPECT_ERROR(Error::kInvalidOperand, a.vqdmulh_s32(q1, q12, d16[0]));
205
206 CHECK_ENCODING(0xF3B232A6, a.vqmovn_s16(d3, q11));
207 CHECK_ENCODING(0xF3F602A0, a.vqmovn_s32(d16, q8));
208
209 CHECK_ENCODING(0xF22C247E, a.vqshl_s32(q1, q15, q6));
210
211 CHECK_ENCODING(0xF264C560, a.vrshl_s32(q14, q8, q2));
212
213 CHECK_ENCODING(0xFE666D41, a.vsdot_s8(q11, q3, d1[0]));
214 EXPECT_ERROR(Error::kInvalidLaneIndex, a.vsdot_s8(q11, q3, d1[2]));
215
216 CHECK_ENCODING(0xF40B070F, a.vst1_8({d0}, mem[r11]));
217 CHECK_ENCODING(0xF40B070D, a.vst1_8({d0}, mem[r11]++));
218 CHECK_ENCODING(0xF40B0707, a.vst1_8({d0}, mem[r11], r7));
219 CHECK_ENCODING(0xF48B000F, a.vst1_8({d0[0]}, mem[r11]));
220 CHECK_ENCODING(0xF48B00EF, a.vst1_8({d0[7]}, mem[r11]));
221 EXPECT_ERROR(Error::kInvalidLaneIndex, a.vst1_8(d0[8], mem[r11]));
222
223 CHECK_ENCODING(0xF40B074F, a.vst1_16({d0}, mem[r11]));
224 CHECK_ENCODING(0xF40B074D, a.vst1_16({d0}, mem[r11]++));
225 CHECK_ENCODING(0xF40B0747, a.vst1_16({d0}, mem[r11], r7));
226 CHECK_ENCODING(0xF48B040F, a.vst1_16({d0[0]}, mem[r11]));
227 CHECK_ENCODING(0xF48B04CF, a.vst1_16({d0[3]}, mem[r11]));
228 EXPECT_ERROR(Error::kInvalidLaneIndex, a.vst1_16(d0[4], mem[r11]));
229
230 CHECK_ENCODING(0xF44B0280, a.vst1_32({d16-d19}, mem[r11], r0));
231 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vst1_32({d0-d4}, mem[r11], r0));
232 EXPECT_ERROR(Error::kInvalidOperand, a.vst1_32({d16-d19}, mem[r11], sp));
233 EXPECT_ERROR(Error::kInvalidOperand, a.vst1_32({d16-d19}, mem[r11], pc));
234 CHECK_ENCODING(0xF404168F, a.vst1_32({d1-d3}, mem[r4]));
235 CHECK_ENCODING(0xF44B0A8D, a.vst1_32({d16-d17}, mem[r11]++));
236 CHECK_ENCODING(0xF4CB080F, a.vst1_32({d16[0]}, mem[r11]));
237 // The surrounding braces are optional, but makes it look closer to native assembly.
238 CHECK_ENCODING(0xF4CB080F, a.vst1_32(d16[0], mem[r11]));
239 CHECK_ENCODING(0xF4CB088F, a.vst1_32(d16[1], mem[r11]));
240 EXPECT_ERROR(Error::kInvalidLaneIndex, a.vst1_32(d16[2], mem[r11]));
241 CHECK_ENCODING(0xF4C6C80D, a.vst1_32({d28[0]}, mem[r6]++));
242
243 CHECK_ENCODING(0xEC868B04, a.vstm(mem[r6], {d8-d9}));
244 CHECK_ENCODING(0xECA7EB02, a.vstm(mem[r7]++, {d14}));
245 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vstm(mem[r6], {d8-d28}));
246 EXPECT_ERROR(Error::kInvalidRegisterListLength, a.vstm(mem[r6], DRegisterList(d31, 2)));
247
248 CHECK_ENCODING(0xED868A00, a.vstr(s16, mem[r6]));
249 CHECK_ENCODING(0xED868A02, a.vstr(s16, mem[r6, 8]));
250 CHECK_ENCODING(0xED868AFF, a.vstr(s16, mem[r6, 1020]));
251 CHECK_ENCODING(0xED068AFF, a.vstr(s16, mem[r6, -1020]));
252 EXPECT_ERROR(Error::kInvalidOperand, a.vstr(s16, MemOperand(r6, 8, AddressingMode::kPostIndexed)));
253 EXPECT_ERROR(Error::kInvalidOperand, a.vstr(s16, mem[r6, 1024]));
254 EXPECT_ERROR(Error::kInvalidOperand, a.vstr(s16, mem[r6, -1024]));
255 EXPECT_ERROR(Error::kInvalidOperand, a.vstr(s16, mem[r6, 1018]));
256
257 ASSERT_EQ(xnn_status_success, xnn_release_code_memory(&b));
258 }
259
TEST(AArch32Assembler,Label)260 TEST(AArch32Assembler, Label) {
261 xnn_code_buffer b;
262 xnn_allocate_code_memory(&b, XNN_DEFAULT_CODE_BUFFER_SIZE);
263 Assembler a(&b);
264
265 Label l1;
266 a.add(r0, r0, r0);
267
268 // Branch to unbound label.
269 auto b1 = a.offset<uint32_t*>();
270 a.beq(l1);
271
272 a.add(r1, r1, r1);
273
274 auto b2 = a.offset<uint32_t*>();
275 a.bne(l1);
276
277 a.add(r2, r2, r2);
278
279 a.bind(l1);
280
281 // Check that b1 and b2 are both patched after binding l1.
282 EXPECT_INSTR(0x0A000002, *b1);
283 EXPECT_INSTR(0x1A000000, *b2);
284
285 a.add(r0, r1, r2);
286
287 // Branch to bound label.
288 auto b3 = a.offset<uint32_t*>();
289 a.bhi(l1);
290 auto b4 = a.offset<uint32_t*>();
291 a.bhs(l1);
292 auto b5 = a.offset<uint32_t*>();
293 a.blo(l1);
294 auto b6 = a.offset<uint32_t*>();
295 a.b(l1);
296
297 EXPECT_INSTR(0x8AFFFFFD, *b3);
298 EXPECT_INSTR(0x2AFFFFFC, *b4);
299 EXPECT_INSTR(0x3AFFFFFB, *b5);
300 EXPECT_INSTR(0xEAFFFFFA, *b6);
301
302 // Binding a bound label is an error.
303 a.bind(l1);
304 EXPECT_ERROR(Error::kLabelAlreadyBound, a.bind(l1));
305
306 // Check for bind failure due to too many users of label.
307 Label lfail;
308 a.reset();
309 // Arbitrary high number of users that we probably won't support.
310 for (int i = 0; i < 1000; i++) {
311 a.beq(lfail);
312 }
313 EXPECT_EQ(Error::kLabelHasTooManyUsers, a.error());
314
315 ASSERT_EQ(xnn_status_success, xnn_release_code_memory(&b));
316 }
317
TEST(AArch32Assembler,Align)318 TEST(AArch32Assembler, Align) {
319 xnn_code_buffer b;
320 xnn_allocate_code_memory(&b, XNN_DEFAULT_CODE_BUFFER_SIZE);
321 Assembler a(&b);
322
323 a.add(r0, r1, r2);
324 a.align(4);
325 EXPECT_EQ(0, reinterpret_cast<uintptr_t>(a.offset<uint32_t*>()) & 0x3);
326 EXPECT_EQ(4, a.code_size_in_bytes());
327
328 a.align(8);
329 EXPECT_EQ(0, reinterpret_cast<uintptr_t>(a.offset<uint32_t*>()) & 0x7);
330 EXPECT_EQ(8, a.code_size_in_bytes());
331
332 a.add(r0, r1, r2);
333 a.align(8);
334 EXPECT_EQ(0, reinterpret_cast<uintptr_t>(a.offset<uint32_t*>()) & 0x7);
335 EXPECT_EQ(16, a.code_size_in_bytes());
336
337 a.add(r0, r1, r2);
338 EXPECT_EQ(20, a.code_size_in_bytes());
339
340 a.align(16);
341 EXPECT_EQ(0, reinterpret_cast<uintptr_t>(a.offset<uint32_t*>()) & 0xF);
342 EXPECT_EQ(32, a.code_size_in_bytes());
343
344 a.add(r0, r1, r2);
345 a.add(r0, r1, r2);
346 EXPECT_EQ(40, a.code_size_in_bytes());
347
348 a.align(16);
349 EXPECT_EQ(0, reinterpret_cast<uintptr_t>(a.offset<uint32_t*>()) & 0xF);
350 EXPECT_EQ(48, a.code_size_in_bytes());
351
352 // Not power-of-two.
353 EXPECT_ERROR(Error::kInvalidOperand, a.align(6));
354 // Is power-of-two but is not a multiple of instruction size.
355 EXPECT_ERROR(Error::kInvalidOperand, a.align(2));
356
357 ASSERT_EQ(xnn_status_success, xnn_release_code_memory(&b));
358 }
359
TEST(AArch32Assembler,CoreRegisterList)360 TEST(AArch32Assembler, CoreRegisterList) {
361 EXPECT_EQ(0x3, CoreRegisterList({r0, r1}));
362 EXPECT_EQ(0xFC00, CoreRegisterList({r10, r11, r12, r13, r14, r15}));
363
364 EXPECT_FALSE(CoreRegisterList({}).has_more_than_one_register());
365 EXPECT_FALSE(CoreRegisterList({r0}).has_more_than_one_register());
366 EXPECT_FALSE(CoreRegisterList({r1}).has_more_than_one_register());
367 EXPECT_TRUE(CoreRegisterList({r0, r1}).has_more_than_one_register());
368 }
369
TEST(AArch32Assembler,ConsecutiveRegisterList)370 TEST(AArch32Assembler, ConsecutiveRegisterList) {
371 SRegisterList s_list_1 = SRegisterList(s0, s9);
372 EXPECT_EQ(s_list_1.start, s0);
373 EXPECT_EQ(s_list_1.length, 10);
374
375 SRegisterList s_list_2 = {s4 - s11};
376 EXPECT_EQ(s_list_2.start, s4);
377 EXPECT_EQ(s_list_2.length, 8);
378
379 DRegisterList d_list_1 = DRegisterList(d4, d5);
380 EXPECT_EQ(d_list_1.start, d4);
381 EXPECT_EQ(d_list_1.length, 2);
382
383 DRegisterList d_list_2 = {d4 - d11};
384 EXPECT_EQ(d_list_2.start, d4);
385 EXPECT_EQ(d_list_2.length, 8);
386
387 QRegisterList q_list_1 = {q3-q3};
388 EXPECT_EQ(q_list_1.start, q3);
389 EXPECT_EQ(q_list_1.length, 1);
390
391 DRegisterList d_from_q_1 = static_cast<DRegisterList>(q_list_1);
392 EXPECT_EQ(d_from_q_1.start, d6);
393 EXPECT_EQ(d_from_q_1.length, 2);
394
395 QRegisterList q_list_2 = {q4-q9};
396 EXPECT_EQ(q_list_2.start, q4);
397 EXPECT_EQ(q_list_2.length, 6);
398
399 DRegisterList d_from_q_2 = static_cast<DRegisterList>(q_list_2);
400 EXPECT_EQ(d_from_q_2.start, d8);
401 EXPECT_EQ(d_from_q_2.length, 12);
402 }
403
TEST(AArch32Assembler,MemOperand)404 TEST(AArch32Assembler, MemOperand) {
405 EXPECT_EQ(MemOperand(r0, 4, AddressingMode::kOffset), (mem[r0, 4]));
406 }
407
TEST(AArch32Assembler,DRegisterLane)408 TEST(AArch32Assembler, DRegisterLane) {
409 EXPECT_EQ((DRegisterLane{2, 0}), d2[0]);
410 EXPECT_EQ((DRegisterLane{2, 1}), d2[1]);
411 }
412
TEST(AArch32Assembler,CodeBufferOverflow)413 TEST(AArch32Assembler, CodeBufferOverflow) {
414 xnn_code_buffer b;
415 xnn_allocate_code_memory(&b, 4);
416 Assembler a(&b);
417 a.add(r0, r0, 2);
418 EXPECT_EQ(Error::kNoError, a.error());
419
420 a.bx(lr);
421 EXPECT_EQ(Error::kOutOfMemory, a.error());
422
423 ASSERT_EQ(xnn_status_success, xnn_release_code_memory(&b));
424 }
425
426 #if XNN_ARCH_ARM && XNN_PLATFORM_JIT
TEST(AArch32Assembler,JitAllocCodeBuffer)427 TEST(AArch32Assembler, JitAllocCodeBuffer) {
428 typedef uint32_t (*Func)(uint32_t);
429
430 xnn_code_buffer b;
431 xnn_allocate_code_memory(&b, XNN_DEFAULT_CODE_BUFFER_SIZE);
432
433 Assembler a(&b);
434 a.add(r0, r0, 2);
435 a.bx(lr);
436
437 Func fn = reinterpret_cast<Func>(a.finalize());
438
439 ASSERT_EQ(3, fn(1));
440
441 ASSERT_EQ(xnn_status_success, xnn_release_code_memory(&b));
442 }
443 #endif // XNN_ARCH_ARM && XNN_PLATFORM_JIT
444 } // namespace aarch32
445 } // namespace xnnpack
446