1 // Copyright 2019, VIXL authors 2 // All rights reserved. 3 // 4 // Redistribution and use in source and binary forms, with or without 5 // modification, are permitted provided that the following conditions are met: 6 // 7 // * Redistributions of source code must retain the above copyright notice, 8 // this list of conditions and the following disclaimer. 9 // * Redistributions in binary form must reproduce the above copyright notice, 10 // this list of conditions and the following disclaimer in the documentation 11 // and/or other materials provided with the distribution. 12 // * Neither the name of ARM Limited nor the names of its contributors may be 13 // used to endorse or promote products derived from this software without 14 // specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND 17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE 20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // Test infrastructure. 27 // 28 // Tests are functions which accept no parameters and have no return values. 29 // The testing code should not perform an explicit return once completed. For 30 // example to test the mov immediate instruction a very simple test would be: 31 // 32 // TEST(mov_x0_one) { 33 // SETUP(); 34 // 35 // START(); 36 // __ mov(x0, 1); 37 // END(); 38 // 39 // if (CAN_RUN()) { 40 // RUN(); 41 // 42 // ASSERT_EQUAL_64(1, x0); 43 // } 44 // } 45 // 46 // Within a START ... END block all registers but sp can be modified. sp has to 47 // be explicitly saved/restored. The END() macro replaces the function return 48 // so it may appear multiple times in a test if the test has multiple exit 49 // points. 50 // 51 // Tests requiring specific CPU features should specify exactly what they 52 // require using SETUP_WITH_FEATURES(...) instead of SETUP(). 53 // 54 // Once the test has been run all integer and floating point registers as well 55 // as flags are accessible through a RegisterDump instance, see 56 // utils-aarch64.cc for more info on RegisterDump. 57 // 58 // We provide some helper assert to handle common cases: 59 // 60 // ASSERT_EQUAL_32(int32_t, int_32t) 61 // ASSERT_EQUAL_FP32(float, float) 62 // ASSERT_EQUAL_32(int32_t, W register) 63 // ASSERT_EQUAL_FP32(float, S register) 64 // ASSERT_EQUAL_64(int64_t, int_64t) 65 // ASSERT_EQUAL_FP64(double, double) 66 // ASSERT_EQUAL_64(int64_t, X register) 67 // ASSERT_EQUAL_64(X register, X register) 68 // ASSERT_EQUAL_FP64(double, D register) 69 // 70 // e.g. ASSERT_EQUAL_64(0.5, d30); 71 // 72 // If more advanced computation is required before the assert then access the 73 // RegisterDump named core directly: 74 // 75 // ASSERT_EQUAL_64(0x1234, core->reg_x0() & 0xffff); 76 77 namespace vixl { 78 namespace aarch64 { 79 80 #define __ masm. 81 #define TEST(name) TEST_(AARCH64_ASM_##name) 82 83 #ifdef VIXL_INCLUDE_SIMULATOR_AARCH64 84 // Run tests with the simulator. 85 86 #define SETUP() \ 87 MacroAssembler masm; \ 88 SETUP_COMMON(); \ 89 SETUP_COMMON_SIM() 90 91 #define SETUP_WITH_FEATURES(...) \ 92 MacroAssembler masm; \ 93 SETUP_COMMON(); \ 94 SETUP_COMMON_SIM(); \ 95 masm.SetCPUFeatures(CPUFeatures(__VA_ARGS__)); \ 96 simulator.SetCPUFeatures(CPUFeatures(__VA_ARGS__)) 97 98 #define SETUP_CUSTOM(size, pic) \ 99 MacroAssembler masm(size + CodeBuffer::kDefaultCapacity, pic); \ 100 SETUP_COMMON(); \ 101 SETUP_COMMON_SIM() 102 103 #define SETUP_CUSTOM_SIM(...) \ 104 MacroAssembler masm; \ 105 SETUP_COMMON(); \ 106 Simulator simulator(&simulator_decoder, stdout, __VA_ARGS__); \ 107 simulator.SetColouredTrace(Test::coloured_trace()); \ 108 simulator.SetCPUFeatures(CPUFeatures::None()) 109 110 #define SETUP_COMMON() \ 111 bool queried_can_run = false; \ 112 bool printed_sve_lane_warning = false; \ 113 /* Avoid unused-variable warnings in case a test never calls RUN(). */ \ 114 USE(queried_can_run); \ 115 USE(printed_sve_lane_warning); \ 116 masm.SetCPUFeatures(CPUFeatures::None()); \ 117 masm.SetGenerateSimulatorCode(true); \ 118 Decoder simulator_decoder; \ 119 RegisterDump core; \ 120 ptrdiff_t offset_after_infrastructure_start; \ 121 ptrdiff_t offset_before_infrastructure_end 122 123 #define SETUP_COMMON_SIM() \ 124 Simulator simulator(&simulator_decoder); \ 125 simulator.SetColouredTrace(Test::coloured_trace()); \ 126 simulator.SetCPUFeatures(CPUFeatures::None()) 127 128 #define START() \ 129 masm.Reset(); \ 130 simulator.ResetState(); \ 131 { \ 132 SimulationCPUFeaturesScope cpu(&masm, kInfrastructureCPUFeatures); \ 133 __ PushCalleeSavedRegisters(); \ 134 } \ 135 { \ 136 int trace_parameters = 0; \ 137 if (Test::trace_reg()) trace_parameters |= LOG_STATE; \ 138 if (Test::trace_write()) trace_parameters |= LOG_WRITE; \ 139 if (Test::trace_sim()) trace_parameters |= LOG_DISASM; \ 140 if (Test::trace_branch()) trace_parameters |= LOG_BRANCH; \ 141 if (trace_parameters != 0) { \ 142 __ Trace(static_cast<TraceParameters>(trace_parameters), TRACE_ENABLE); \ 143 } \ 144 } \ 145 offset_after_infrastructure_start = masm.GetCursorOffset(); \ 146 /* Avoid unused-variable warnings in case a test never calls RUN(). */ \ 147 USE(offset_after_infrastructure_start) 148 149 #define END() \ 150 offset_before_infrastructure_end = masm.GetCursorOffset(); \ 151 /* Avoid unused-variable warnings in case a test never calls RUN(). */ \ 152 USE(offset_before_infrastructure_end); \ 153 __ Trace(LOG_ALL, TRACE_DISABLE); \ 154 { \ 155 SimulationCPUFeaturesScope cpu(&masm, kInfrastructureCPUFeatures); \ 156 core.Dump(&masm); \ 157 __ PopCalleeSavedRegisters(); \ 158 } \ 159 __ Ret(); \ 160 masm.FinalizeCode() 161 162 #define RUN() \ 163 RUN_WITHOUT_SEEN_FEATURE_CHECK(); \ 164 { \ 165 /* We expect the test to use all of the features it requested, plus the */ \ 166 /* features that the instructure code requires. */ \ 167 CPUFeatures const& expected = \ 168 simulator.GetCPUFeatures()->With(CPUFeatures::kNEON); \ 169 CPUFeatures const& seen = simulator.GetSeenFeatures(); \ 170 /* This gives three broad categories of features that we care about: */ \ 171 /* 1. Things both expected and seen. */ \ 172 /* 2. Things seen, but not expected. The simulator catches these. */ \ 173 /* 3. Things expected, but not seen. We check these here. */ \ 174 /* In a valid, passing test, categories 2 and 3 should be empty. */ \ 175 if (seen != expected) { \ 176 /* The Simulator should have caught anything in category 2 already. */ \ 177 VIXL_ASSERT(expected.Has(seen)); \ 178 /* Anything left is category 3: things expected, but not seen. This */ \ 179 /* is not necessarily a bug in VIXL itself, but indicates that the */ \ 180 /* test is less strict than it could be. */ \ 181 CPUFeatures missing = expected.Without(seen); \ 182 VIXL_ASSERT(missing.Count() > 0); \ 183 std::cout << "Error: expected to see CPUFeatures { " << missing \ 184 << " }\n"; \ 185 VIXL_ABORT(); \ 186 } \ 187 } 188 189 #define RUN_WITHOUT_SEEN_FEATURE_CHECK() \ 190 DISASSEMBLE(); \ 191 VIXL_ASSERT(QUERIED_CAN_RUN()); \ 192 VIXL_ASSERT(CAN_RUN()); \ 193 simulator.RunFrom(masm.GetBuffer()->GetStartAddress<Instruction*>()) 194 195 #else // ifdef VIXL_INCLUDE_SIMULATOR_AARCH64. 196 #define SETUP() \ 197 MacroAssembler masm; \ 198 SETUP_COMMON() 199 200 #define SETUP_WITH_FEATURES(...) \ 201 MacroAssembler masm; \ 202 SETUP_COMMON(); \ 203 masm.SetCPUFeatures(CPUFeatures(__VA_ARGS__)) 204 205 #define SETUP_CUSTOM(size, pic) \ 206 size_t buffer_size = size + CodeBuffer::kDefaultCapacity; \ 207 MacroAssembler masm(buffer_size, pic); \ 208 SETUP_COMMON() 209 210 #define SETUP_COMMON() \ 211 bool queried_can_run = false; \ 212 bool printed_sve_lane_warning = false; \ 213 /* Avoid unused-variable warnings in case a test never calls RUN(). */ \ 214 USE(queried_can_run); \ 215 USE(printed_sve_lane_warning); \ 216 masm.SetCPUFeatures(CPUFeatures::None()); \ 217 masm.SetGenerateSimulatorCode(false); \ 218 RegisterDump core; \ 219 CPU::SetUp(); \ 220 ptrdiff_t offset_after_infrastructure_start; \ 221 ptrdiff_t offset_before_infrastructure_end 222 223 #define START() \ 224 masm.Reset(); \ 225 { \ 226 CPUFeaturesScope cpu(&masm, kInfrastructureCPUFeatures); \ 227 __ PushCalleeSavedRegisters(); \ 228 } \ 229 offset_after_infrastructure_start = masm.GetCursorOffset(); \ 230 /* Avoid unused-variable warnings in case a test never calls RUN(). */ \ 231 USE(offset_after_infrastructure_start) 232 233 #define END() \ 234 offset_before_infrastructure_end = masm.GetCursorOffset(); \ 235 /* Avoid unused-variable warnings in case a test never calls RUN(). */ \ 236 USE(offset_before_infrastructure_end); \ 237 { \ 238 CPUFeaturesScope cpu(&masm, kInfrastructureCPUFeatures); \ 239 core.Dump(&masm); \ 240 __ PopCalleeSavedRegisters(); \ 241 } \ 242 __ Ret(); \ 243 masm.FinalizeCode() 244 245 // Execute the generated code from the memory area. 246 #define RUN() \ 247 DISASSEMBLE(); \ 248 VIXL_ASSERT(QUERIED_CAN_RUN()); \ 249 VIXL_ASSERT(CAN_RUN()); \ 250 masm.GetBuffer()->SetExecutable(); \ 251 ExecuteMemory(masm.GetBuffer()->GetStartAddress<byte*>(), \ 252 masm.GetSizeOfCodeGenerated()); \ 253 masm.GetBuffer()->SetWritable() 254 255 // This just provides compatibility with VIXL_INCLUDE_SIMULATOR_AARCH64 builds. 256 // We cannot run seen-feature checks when running natively. 257 #define RUN_WITHOUT_SEEN_FEATURE_CHECK() RUN() 258 259 #endif // ifdef VIXL_INCLUDE_SIMULATOR_AARCH64. 260 261 #define CAN_RUN() CanRun(*masm.GetCPUFeatures(), &queried_can_run) 262 #define QUERIED_CAN_RUN() (queried_can_run) 263 264 #define DISASSEMBLE() \ 265 if (Test::disassemble()) { \ 266 PrintDisassembler disasm(stdout); \ 267 CodeBuffer* buffer = masm.GetBuffer(); \ 268 Instruction* start = buffer->GetOffsetAddress<Instruction*>( \ 269 offset_after_infrastructure_start); \ 270 Instruction* end = buffer->GetOffsetAddress<Instruction*>( \ 271 offset_before_infrastructure_end); \ 272 \ 273 if (Test::disassemble_infrastructure()) { \ 274 Instruction* infra_start = buffer->GetStartAddress<Instruction*>(); \ 275 printf("# Infrastructure code (prologue)\n"); \ 276 disasm.DisassembleBuffer(infra_start, start); \ 277 printf("# Test code\n"); \ 278 } else { \ 279 printf( \ 280 "# Warning: Omitting infrastructure code. " \ 281 "Use --disassemble to see it.\n"); \ 282 } \ 283 \ 284 disasm.DisassembleBuffer(start, end); \ 285 \ 286 if (Test::disassemble_infrastructure()) { \ 287 printf("# Infrastructure code (epilogue)\n"); \ 288 Instruction* infra_end = buffer->GetEndAddress<Instruction*>(); \ 289 disasm.DisassembleBuffer(end, infra_end); \ 290 } \ 291 } 292 293 #define ASSERT_EQUAL_NZCV(expected) \ 294 VIXL_CHECK(EqualNzcv(expected, core.flags_nzcv())) 295 296 #define ASSERT_EQUAL_REGISTERS(expected) \ 297 VIXL_CHECK(EqualRegisters(&expected, &core)) 298 299 #define ASSERT_EQUAL_FP16(expected, result) \ 300 VIXL_CHECK(EqualFP16(expected, &core, result)) 301 302 #define ASSERT_EQUAL_32(expected, result) \ 303 VIXL_CHECK(Equal32(static_cast<uint32_t>(expected), &core, result)) 304 305 #define ASSERT_EQUAL_FP32(expected, result) \ 306 VIXL_CHECK(EqualFP32(expected, &core, result)) 307 308 #define ASSERT_EQUAL_64(expected, result) \ 309 VIXL_CHECK(Equal64(expected, &core, result)) 310 311 #define ASSERT_NOT_EQUAL_64(expected, result) \ 312 VIXL_CHECK(NotEqual64(expected, &core, result)) 313 314 #define ASSERT_EQUAL_FP64(expected, result) \ 315 VIXL_CHECK(EqualFP64(expected, &core, result)) 316 317 #define ASSERT_EQUAL_128(expected_h, expected_l, result) \ 318 VIXL_CHECK(Equal128(expected_h, expected_l, &core, result)) 319 320 #define ASSERT_LITERAL_POOL_SIZE(expected) \ 321 VIXL_CHECK((expected + kInstructionSize) == (masm.GetLiteralPoolSize())) 322 323 #define ASSERT_EQUAL_SVE_LANE(expected, result, lane) \ 324 VIXL_CHECK(EqualSVELane(expected, &core, result, lane)); 325 326 // If `expected` is scalar, check that every lane of `result` matches it. 327 // If `expected` is an array of N expected values, check that the first N 328 // lanes on `result` match. The rightmost (highest-indexed) array element maps 329 // to the lowest-numbered lane. 330 #define ASSERT_EQUAL_SVE(expected, result) \ 331 VIXL_CHECK(EqualSVE(expected, &core, result, &printed_sve_lane_warning)) 332 333 #define ASSERT_EQUAL_MEMORY(expected, result, ...) \ 334 VIXL_CHECK(EqualMemory(reinterpret_cast<void*>(expected), \ 335 reinterpret_cast<void*>(result), \ 336 __VA_ARGS__)) 337 338 #define MUST_FAIL_WITH_MESSAGE(code, message) \ 339 { \ 340 bool aborted = false; \ 341 try { \ 342 code; \ 343 } catch (const std::runtime_error& e) { \ 344 const char* expected_error = message; \ 345 size_t error_length = strlen(expected_error); \ 346 if (strncmp(expected_error, e.what(), error_length) == 0) { \ 347 aborted = true; \ 348 } else { \ 349 printf("Mismatch in error message.\n"); \ 350 printf("Expected: %s\n", expected_error); \ 351 printf("Found: %s\n", e.what()); \ 352 } \ 353 } \ 354 VIXL_CHECK(aborted); \ 355 } 356 357 } // namespace aarch64 358 } // namespace vixl 359