Home
last modified time | relevance | path

Searched refs:SVEMemOperand (Results 1 – 13 of 13) sorted by relevance

/external/vixl/test/aarch64/
Dtest-disasm-sve-aarch64.cc77 COMPARE_PREFIX(adr(z19.VnD(), SVEMemOperand(z22.VnD(), z11.VnD(), SXTW)), in TEST()
79 COMPARE_PREFIX(adr(z19.VnD(), SVEMemOperand(z22.VnD(), z11.VnD(), SXTW, 1)), in TEST()
81 COMPARE_PREFIX(adr(z19.VnD(), SVEMemOperand(z22.VnD(), z11.VnD(), SXTW, 2)), in TEST()
83 COMPARE_PREFIX(adr(z19.VnD(), SVEMemOperand(z22.VnD(), z11.VnD(), SXTW, 3)), in TEST()
85 COMPARE_PREFIX(adr(z30.VnD(), SVEMemOperand(z14.VnD(), z16.VnD(), UXTW)), in TEST()
87 COMPARE_PREFIX(adr(z30.VnD(), SVEMemOperand(z14.VnD(), z16.VnD(), UXTW, 1)), in TEST()
89 COMPARE_PREFIX(adr(z30.VnD(), SVEMemOperand(z14.VnD(), z16.VnD(), UXTW, 2)), in TEST()
91 COMPARE_PREFIX(adr(z30.VnD(), SVEMemOperand(z14.VnD(), z16.VnD(), UXTW, 3)), in TEST()
93 COMPARE_PREFIX(adr(z8.VnS(), SVEMemOperand(z16.VnS(), z16.VnS())), in TEST()
95 COMPARE_PREFIX(adr(z8.VnS(), SVEMemOperand(z16.VnS(), z16.VnS(), LSL, 1)), in TEST()
[all …]
Dtest-api-aarch64.cc1056 VIXL_CHECK(SVEMemOperand(x0).IsPlainScalar()); in TEST()
1057 VIXL_CHECK(SVEMemOperand(sp).IsPlainScalar()); in TEST()
1058 VIXL_CHECK(SVEMemOperand(x1, 0).IsPlainScalar()); in TEST()
1060 VIXL_CHECK(!SVEMemOperand(x2, xzr).IsPlainScalar()); in TEST()
1061 VIXL_CHECK(!SVEMemOperand(x4, xzr, LSL, 2).IsPlainScalar()); in TEST()
1063 VIXL_CHECK(!SVEMemOperand(x20, 1).IsPlainScalar()); in TEST()
1064 VIXL_CHECK(!SVEMemOperand(x21, x30).IsPlainScalar()); in TEST()
1066 VIXL_CHECK(!SVEMemOperand(x0, z1.VnD()).IsPlainScalar()); in TEST()
1067 VIXL_CHECK(!SVEMemOperand(x2, z3.VnS(), UXTW).IsPlainScalar()); in TEST()
1068 VIXL_CHECK(!SVEMemOperand(z4.VnD(), 0).IsPlainScalar()); in TEST()
[all …]
Dtest-assembler-sve-aarch64.cc137 masm->Ldr(pd, SVEMemOperand(temp)); in Initialise()
5554 __ CalculateSVEAddress(x0, SVEMemOperand(x28)); in TEST_SVE()
5555 __ CalculateSVEAddress(x1, SVEMemOperand(x28, 0)); in TEST_SVE()
5556 __ CalculateSVEAddress(x2, SVEMemOperand(x28, 0, SVE_MUL_VL)); in TEST_SVE()
5557 __ CalculateSVEAddress(x3, SVEMemOperand(x28, 0, SVE_MUL_VL), 3); in TEST_SVE()
5558 __ CalculateSVEAddress(x4, SVEMemOperand(x28, xzr)); in TEST_SVE()
5559 __ CalculateSVEAddress(x5, SVEMemOperand(x28, xzr, LSL, 42)); in TEST_SVE()
5564 __ CalculateSVEAddress(x6, SVEMemOperand(x28, 42)); in TEST_SVE()
5565 __ CalculateSVEAddress(x7, SVEMemOperand(x28, -42)); in TEST_SVE()
5567 __ CalculateSVEAddress(x8, SVEMemOperand(x28, 31, SVE_MUL_VL), 0); in TEST_SVE()
[all …]
Dtest-trace-aarch64.cc2751 __ str(p12.VnD(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE()
2752 __ str(p13.VnS(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE()
2753 __ str(p14.VnH(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE()
2754 __ str(p15.VnB(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE()
2755 __ ldr(p8.VnD(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE()
2756 __ ldr(p9.VnS(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE()
2757 __ ldr(p10.VnH(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE()
2758 __ ldr(p11.VnB(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE()
2760 __ str(z0.VnD(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE()
2761 __ str(z1.VnS(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE()
[all …]
Dtest-utils-aarch64.h537 const SVEMemOperand& addr, in CalculateSVEAddress()
542 void CalculateSVEAddress(const Register& xd, const SVEMemOperand& addr) { in CalculateSVEAddress()
Dtest-utils-aarch64.cc614 __ Str(reg, SVEMemOperand(dump)); in DumpRegisters()
/external/vixl/src/aarch64/
Dmacro-assembler-sve-aarch64.cc305 const SVEMemOperand& addr, in CalculateSVEAddress()
1093 const SVEMemOperand& addr, in SVELoadBroadcastImmHelper()
1106 (this->*fn)(zt, pg, SVEMemOperand(scratch)); in SVELoadBroadcastImmHelper()
1111 const SVEMemOperand& addr, in SVELoadStoreScalarImmHelper()
1126 (this->*fn)(rt, SVEMemOperand(addr.GetScalarBase())); in SVELoadStoreScalarImmHelper()
1134 (this->*fn)(rt, SVEMemOperand(scratch)); in SVELoadStoreScalarImmHelper()
1141 const SVEMemOperand& addr, in SVELoadStoreScalarImmHelper()
1162 (this->*fn)(zt, pg, SVEMemOperand(addr.GetScalarBase())); in SVELoadStoreScalarImmHelper()
1177 (this->*fn)(zt, pg, SVEMemOperand(scratch)); in SVELoadStoreScalarImmHelper()
1184 const SVEMemOperand& addr, in SVELoadStore1Helper()
[all …]
Dassembler-aarch64.h3613 void adr(const ZRegister& zd, const SVEMemOperand& addr);
4528 const SVEMemOperand& addr);
4533 const SVEMemOperand& addr);
4538 const SVEMemOperand& addr);
4543 const SVEMemOperand& addr);
4550 const SVEMemOperand& addr);
4555 const SVEMemOperand& addr);
4560 const SVEMemOperand& addr);
4565 const SVEMemOperand& addr);
4570 const SVEMemOperand& addr);
[all …]
Dmacro-assembler-aarch64.h3492 void Adr(const ZRegister& zd, const SVEMemOperand& addr) { in Adr()
4803 const SVEMemOperand& addr);
4806 const SVEMemOperand& addr);
4809 const SVEMemOperand& addr);
4812 const SVEMemOperand& addr);
4815 const SVEMemOperand& addr) { in Ld1rb()
4825 const SVEMemOperand& addr) { in Ld1rh()
4835 const SVEMemOperand& addr) { in Ld1rw()
4845 const SVEMemOperand& addr) { in Ld1rd()
4855 const SVEMemOperand& addr);
[all …]
Dassembler-sve-aarch64.cc50 void Assembler::adr(const ZRegister& zd, const SVEMemOperand& addr) { in adr()
3802 const SVEMemOperand& addr, in SVELdSt1Helper()
3816 const SVEMemOperand& addr, in SVELdSt234Helper()
3830 const SVEMemOperand& addr, in SVELd1Helper()
3868 const SVEMemOperand& addr, in SVELdff1Helper()
3893 SVEMemOperand addr_scalar_plus_scalar(addr.GetScalarBase(), xzr); in SVELdff1Helper()
3916 const SVEMemOperand& addr, in SVEScatterGatherHelper()
4024 const SVEMemOperand& addr) { in SVELd234Helper()
4047 const SVEMemOperand& addr) { \
4055 const SVEMemOperand& addr) { \
[all …]
Doperands-aarch64.h474 class SVEMemOperand {
477 explicit SVEMemOperand(ZRegister base, uint64_t offset = 0)
494 explicit SVEMemOperand(Register base,
508 SVEMemOperand(Register base, CPURegister offset) in SVEMemOperand() function
523 SVEMemOperand(Register base, ZRegister offset, M mod) in SVEMemOperand() function
539 SVEMemOperand(Register base, CPURegister offset, M mod, unsigned shift_amount) in SVEMemOperand() function
550 SVEMemOperand(ZRegister base,
Doperands-aarch64.cc376 bool SVEMemOperand::IsValid() const { in IsValid()
413 bool SVEMemOperand::IsEquivalentToScalar() const { in IsEquivalentToScalar()
425 bool SVEMemOperand::IsPlainRegister() const { in IsPlainRegister()
/external/vixl/examples/aarch64/
Dsve-strlen.cc54 __ Ldff1b(z0.VnB(), all_true.Zeroing(), SVEMemOperand(x0, len)); in GenerateSVEStrlen()