Searched refs:kBRegSizeInBytesLog2 (Results 1 – 9 of 9) sorted by relevance
/external/vixl/src/aarch64/ |
D | instructions-aarch64.cc | 628 case kBRegSizeInBytesLog2: { in GetSVEImmLogical() 704 return kBRegSizeInBytesLog2; in GetSVEBitwiseImmLaneSizeInBytesLog2()
|
D | macro-assembler-sve-aarch64.cc | 1307 SVELoadStore1Helper(kBRegSizeInBytesLog2, in Ld1b() 1351 SVELoadStore1Helper(kBRegSizeInBytesLog2, in Ld1sb() 1384 SVELoadStore1Helper(kBRegSizeInBytesLog2, in St1b() 1428 SVELoadFFHelper(kBRegSizeInBytesLog2, in Ldff1b() 1472 SVELoadFFHelper(kBRegSizeInBytesLog2, in Ldff1sb()
|
D | registers-aarch64.h | 461 return kBRegSizeInBytesLog2; in DecodeSizeInBytesLog2()
|
D | instructions-aarch64.h | 54 const unsigned kBRegSizeInBytesLog2 = kBRegSizeLog2 - 3; variable
|
D | disasm-aarch64.cc | 4432 if (instr->GetSVESize() == kBRegSizeInBytesLog2) { in VisitSVEFPUnaryOp() 5631 if ((lane_size >= static_cast<int>(kBRegSizeInBytesLog2)) && in Disassemble_ZdT_ZnTb() 5666 if ((lane_size >= static_cast<int>(kBRegSizeInBytesLog2)) && in DisassembleSVEShiftLeftImm() 5679 if ((lane_size >= static_cast<int>(kBRegSizeInBytesLog2)) && in DisassembleSVEShiftRightImm()
|
D | assembler-sve-aarch64.cc | 4161 SVELd1BroadcastHelper(kBRegSizeInBytesLog2, zt, pg, addr, false); in ld1rb() 4204 SVELd1BroadcastHelper(kBRegSizeInBytesLog2, zt, pg, addr, true); in ld1rsb() 4612 VIXL_ASSERT(addr.GetShiftAmount() == kBRegSizeInBytesLog2); in SVEContiguousPrefetchScalarPlusVectorHelper()
|
D | simulator-aarch64.h | 844 VIXL_ASSERT(msize_in_bytes_log2 >= static_cast<int>(kBRegSizeInBytesLog2)); in SetMsizeInBytesLog2()
|
D | simulator-aarch64.cc | 2496 VIXL_ASSERT((lane_size >= static_cast<int>(kBRegSizeInBytesLog2)) && in SimulateSVENarrow()
|
/external/vixl/test/aarch64/ |
D | test-assembler-sve-aarch64.cc | 7923 int64_t offset = -(1 << kBRegSizeInBytesLog2) * vl; in TEST_SVE() 8423 int64_t offset = -(1 << kBRegSizeInBytesLog2) * vl; in TEST_SVE()
|