/external/vixl/test/aarch64/ |
D | test-api-movprfx-aarch64.cc | 95 __ asr(z2.VnS(), p2.Merging(), z2.VnS(), z2.VnS()); in TEST() 106 __ movprfx(z0.VnS(), p6.Zeroing(), z6.VnS()); in TEST() 107 __ bic(z0.VnS(), p6.Merging(), z0.VnS(), z0.VnS()); in TEST() 113 __ clastb(z7.VnS(), p7, z7.VnS(), z7.VnS()); in TEST() 121 __ movprfx(z14.VnS(), p6.Zeroing(), z3.VnS()); in TEST() 122 __ cnot(z14.VnS(), p6.Merging(), z14.VnS()); in TEST() 133 __ movprfx(z4.VnS(), p1.Zeroing(), z22.VnS()); in TEST() 134 __ lsl(z4.VnS(), p1.Merging(), z4.VnS(), z4.VnS()); in TEST() 157 __ movprfx(z17.VnS(), p4.Zeroing(), z29.VnS()); in TEST() 158 __ mad(z17.VnS(), p4.Merging(), z17.VnS(), z23.VnS()); in TEST() [all …]
|
D | test-disasm-sve-aarch64.cc | 53 COMPARE_PREFIX(mla(z1.VnS(), p7.Merging(), z1.VnS(), z0.VnS()), in TEST() 57 COMPARE_PREFIX(lsr(z1.VnS(), z0.VnS(), 32), "lsr z1.s, z0.s, #32"); in TEST() 93 COMPARE_PREFIX(adr(z8.VnS(), SVEMemOperand(z16.VnS(), z16.VnS())), in TEST() 95 COMPARE_PREFIX(adr(z8.VnS(), SVEMemOperand(z16.VnS(), z16.VnS(), LSL, 1)), in TEST() 97 COMPARE_PREFIX(adr(z8.VnS(), SVEMemOperand(z16.VnS(), z16.VnS(), LSL, 2)), in TEST() 99 COMPARE_PREFIX(adr(z8.VnS(), SVEMemOperand(z16.VnS(), z16.VnS(), LSL, 3)), in TEST() 167 COMPARE_PREFIX(dupm(z15.VnS(), 0x7f007f00), "dupm z15.h, #0x7f00"); in TEST() 175 COMPARE_PREFIX(eon(z31.VnS(), z31.VnS(), 0x1ffe), in TEST() 181 COMPARE_MACRO(Mov(z11.VnS(), 0xe0000003), "mov z11.s, #0xe0000003"); in TEST() 190 COMPARE_PREFIX(dupm(z0.VnS(), 0xfe00), "dupm z0.s, #0xfe00"); in TEST() [all …]
|
D | test-assembler-sve-aarch64.cc | 230 __ Insr(z2.VnS(), -42); // 0xffffffd6 in TEST_SVE() 231 __ Insr(z2.VnS(), 0xfedcba98); // 0xfedcba98 in TEST_SVE() 276 Initialise(&masm, p2.VnS(), p2_inputs); in TEST_SVE() 291 Initialise(&masm, p5.VnS(), p5_inputs); in TEST_SVE() 321 ASSERT_EQUAL_SVE_LANE(p2_inputs[i], p2.VnS(), lane); in TEST_SVE() 334 ASSERT_EQUAL_SVE(p5_inputs, p5.VnS()); in TEST_SVE() 603 __ Index(z0.VnS(), 0x11111110, 1); in TEST_SVE() 604 __ Lastb(x13, p1, z0.VnS()); in TEST_SVE() 605 __ Lasta(x14, p2, z0.VnS()); in TEST_SVE() 606 __ Lastb(x18, p4, z0.VnS()); in TEST_SVE() [all …]
|
D | test-api-aarch64.cc | 292 VIXL_CHECK(ZRegister(2, kSRegSize).Is(z2.VnS())); in TEST() 298 VIXL_CHECK(ZRegister(2, kFormatVnS).Is(z2.VnS())); in TEST() 307 VIXL_CHECK(PRegisterWithLaneSize(2, kSRegSize).Is(p2.VnS())); in TEST() 313 VIXL_CHECK(PRegisterWithLaneSize(2, kFormatVnS).Is(p2.VnS())); in TEST() 581 VIXL_CHECK(p14.VnS().GetLaneSizeInBits() == kSRegSize); in TEST() 585 VIXL_CHECK(p14.VnS().GetLaneSizeInBytes() == kSRegSizeInBytes); in TEST() 592 VIXL_CHECK(Helper::GetQualification(p6.VnS()) == kWithLaneSize); in TEST() 648 VIXL_CHECK(!z8.VnS().Is(s8)); in TEST() 653 VIXL_CHECK(!z8.VnS().Is(v8.S())); in TEST() 658 VIXL_CHECK(!z8.VnS().Is(z8.S())); in TEST() [all …]
|
D | test-trace-aarch64.cc | 2752 __ str(p13.VnS(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE() 2756 __ ldr(p9.VnS(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE() 2761 __ str(z1.VnS(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE() 2765 __ ldr(z21.VnS(), SVEMemOperand(x0, 11, SVE_MUL_VL)); in GenerateTestSequenceSVE() 2772 __ st1w(z2.VnS(), p1, SVEMemOperand(x0, x3, LSL, 2)); in GenerateTestSequenceSVE() 2776 __ ld1w(z22.VnS(), p1.Zeroing(), SVEMemOperand(x0, 3, SVE_MUL_VL)); in GenerateTestSequenceSVE() 2781 __ st1b(z3.VnS(), p2, SVEMemOperand(x0, 3, SVE_MUL_VL)); in GenerateTestSequenceSVE() 2783 __ st1h(z0.VnS(), p1, SVEMemOperand(x0, 3, SVE_MUL_VL)); in GenerateTestSequenceSVE() 2787 __ ld1b(z21.VnS(), p1.Zeroing(), SVEMemOperand(x0, 3, SVE_MUL_VL)); in GenerateTestSequenceSVE() 2789 __ ld1h(z23.VnS(), p2.Zeroing(), SVEMemOperand(x0, 3, SVE_MUL_VL)); in GenerateTestSequenceSVE() [all …]
|
/external/vixl/src/aarch64/ |
D | registers-aarch64.h | 614 ZRegister VnS() const { return ZRegister(GetCode(), kSRegSize); } in VnS() function 647 PRegisterWithLaneSize VnS() const; 746 inline PRegisterWithLaneSize PRegister::VnS() const { in VnS() function
|