/external/vixl/test/aarch32/ |
D | test-utils-aarch32.cc | 46 VIXL_STATIC_ASSERT(sizeof(dump_.d_[0]) == kDRegSizeInBytes); in Dump() 76 MemOperand(dump_base, d_offset + (i * kDRegSizeInBytes))); in Dump()
|
/external/vixl/test/aarch64/ |
D | test-abi.cc | 103 CHECK_NEXT_PARAMETER_MEM(double, MemOperand(sp, 16), kDRegSizeInBytes); in TEST()
|
D | test-utils-aarch64.h | 97 VIXL_ASSERT(sizeof(dump_.d_[0]) == kDRegSizeInBytes); in RegisterDump()
|
D | test-utils-aarch64.cc | 684 DumpRegisters<VRegister>(masm, dump_base, d_offset, kDRegSizeInBytes); in Dump()
|
D | test-assembler-fp-aarch64.cc | 4652 __ Str(d0, MemOperand(x0, fbits * kDRegSizeInBytes)); in TestUScvtfHelper() 4653 __ Str(d1, MemOperand(x1, fbits * kDRegSizeInBytes)); in TestUScvtfHelper() 4654 __ Str(d2, MemOperand(x2, fbits * kDRegSizeInBytes)); in TestUScvtfHelper() 4655 __ Str(d3, MemOperand(x3, fbits * kDRegSizeInBytes)); in TestUScvtfHelper() 4663 __ Str(d0, MemOperand(x0, fbits * kDRegSizeInBytes)); in TestUScvtfHelper() 4664 __ Str(d1, MemOperand(x1, fbits * kDRegSizeInBytes)); in TestUScvtfHelper()
|
D | test-assembler-aarch64.cc | 7808 __ PeekCPURegList(list_d_2, 2 * kDRegSizeInBytes); in TEST() 7810 __ PeekSRegList(s16.GetBit() | s17.GetBit(), 3 * kDRegSizeInBytes); in TEST() 7895 size_stored += 4 * kDRegSizeInBytes; in TEST() 7900 size_stored += 4 * kDRegSizeInBytes; in TEST() 7910 size_stored += 4 * kDRegSizeInBytes; in TEST() 11297 offset += 2 * kDRegSizeInBytes; in TEST() 11325 offset += kDRegSizeInBytes; in TEST() 11383 offset += kDRegSizeInBytes; in TEST() 11517 preindex = 2 * kDRegSizeInBytes; in TEST() 11549 preindex = kDRegSizeInBytes; in TEST() [all …]
|
D | test-assembler-sve-aarch64.cc | 330 __ Dup(v13.V8B(), b13, kDRegSizeInBytes); in TEST_SVE() 7157 int vl_d = vl / kDRegSizeInBytes; in TEST_SVE() 7194 (10 * vl) + (6 * kDRegSizeInBytes), in TEST_SVE() 7339 int vl_d = vl / kDRegSizeInBytes; in TEST_SVE() 7509 int vl_d = vl / kDRegSizeInBytes; in TEST_SVE() 7706 int vl_d = vl / kDRegSizeInBytes; in TEST_SVE() 7916 int vl_d = vl / kDRegSizeInBytes; in TEST_SVE() 8158 int vl_d = vl / kDRegSizeInBytes; in TEST_SVE() 8416 int vl_d = vl / kDRegSizeInBytes; in TEST_SVE() 10566 uint64_t dlanes = config->sve_vl_in_bytes() / kDRegSizeInBytes; in TEST_SVE() [all …]
|
D | test-api-aarch64.cc | 586 VIXL_CHECK(p14.VnD().GetLaneSizeInBytes() == kDRegSizeInBytes); in TEST()
|
D | test-assembler-neon-aarch64.cc | 2683 uint8_t src[14 * kDRegSizeInBytes]; in TEST() 2740 uint8_t src[64 + 14 * kDRegSizeInBytes]; in TEST()
|
D | test-trace-aarch64.cc | 3063 for (unsigned lane = 0; lane < (vl_in_bytes / kDRegSizeInBytes); lane++) { in TraceTestHelper()
|
/external/vixl/src/aarch64/ |
D | assembler-sve-aarch64.cc | 208 ((zm.GetLaneSizeInBytes() == kDRegSizeInBytes) && in asr() 209 (zd.GetLaneSizeInBytes() != kDRegSizeInBytes))); in asr() 283 ((zm.GetLaneSizeInBytes() == kDRegSizeInBytes) && in lsl() 284 (zd.GetLaneSizeInBytes() != kDRegSizeInBytes))); in lsl() 340 ((zm.GetLaneSizeInBytes() == kDRegSizeInBytes) && in lsr() 341 (zd.GetLaneSizeInBytes() != kDRegSizeInBytes))); in lsr() 402 VIXL_ASSERT(zd.GetLaneSizeInBytes() != kDRegSizeInBytes); in asr() 419 VIXL_ASSERT(zd.GetLaneSizeInBytes() != kDRegSizeInBytes); in lsl() 436 VIXL_ASSERT(zd.GetLaneSizeInBytes() != kDRegSizeInBytes); in lsr() 1685 case kDRegSizeInBytes: in fcvt() [all …]
|
D | instructions-aarch64.h | 73 const unsigned kDRegSizeInBytes = kDRegSize / 8; variable
|
D | instructions-aarch64.cc | 852 VIXL_STATIC_ASSERT(kXRegSizeInBytes == kDRegSizeInBytes); in CalcLSPairDataSize()
|
D | simulator-aarch64.h | 1512 (sizeof(T) == kSRegSizeInBytes) || (sizeof(T) == kDRegSizeInBytes) || 1621 (sizeof(value) == kDRegSizeInBytes) || 2109 case kDRegSizeInBytes: 2121 (GetPrintRegLaneSizeInBytes(format) == kDRegSizeInBytes)) { 2137 VIXL_STATIC_ASSERT(sizeof(value) == kDRegSizeInBytes);
|
D | simulator-aarch64.cc | 483 VIXL_ASSERT((GetVectorLengthInBytes() % kDRegSizeInBytes) == 0); in ResetVRegisters() 484 int lane_count = GetVectorLengthInBytes() / kDRegSizeInBytes; in ResetVRegisters() 1042 case kDRegSizeInBytes: in GetPrintRegisterFormatForSize() 1055 case kDRegSizeInBytes: in GetPrintRegisterFormatForSize() 1069 VIXL_STATIC_ASSERT(kXRegSizeInBytes == kDRegSizeInBytes); in GetPrintRegisterFormatForSize() 1256 case kDRegSizeInBytes: { in PrintRegisterValueFPAnnotations() 12207 instr->ExtractSignedBits(19, 16) * dwords * kDRegSizeInBytes; in VisitSVELoadAndBroadcastQOWord_ScalarPlusImm() 12212 ld1(kFormatVnD, zt, i, addr + offset + (i * kDRegSizeInBytes)); in VisitSVELoadAndBroadcastQOWord_ScalarPlusImm()
|
D | assembler-aarch64.cc | 138 case kDRegSizeInBytes: in place() 6236 case kDRegSizeInBytes: in StorePairOpFor() 6265 case kDRegSizeInBytes: in StorePairNonTemporalOpFor() 6291 case kDRegSizeInBytes: in LoadLiteralOpFor()
|
D | assembler-aarch64.h | 196 VIXL_STATIC_ASSERT(kDRegSizeInBytes == kXRegSizeInBytes); in GetSize() 350 case kDRegSizeInBytes: in RewriteValueInCode()
|
D | disasm-aarch64.cc | 6197 imm *= (instr->GetNEONQ() == 0) ? kDRegSizeInBytes in SubstituteRegisterField()
|
D | macro-assembler-aarch64.h | 4862 kDRegSizeInBytes); in Ld1rd()
|
/external/vixl/src/aarch32/ |
D | constants-aarch32.h | 53 const unsigned kDRegSizeInBytes = kDRegSizeInBits / 8; variable
|