Home
last modified time | relevance | path

Searched refs:V8B (Results 1 – 13 of 13) sorted by relevance

/external/vixl/test/aarch64/
Dtest-trace-aarch64.cc619 __ abs(v31.V8B(), v5.V8B()); in GenerateTestSequenceNEON()
627 __ add(v13.V8B(), v7.V8B(), v18.V8B()); in GenerateTestSequenceNEON()
631 __ addhn(v31.V8B(), v12.V8H(), v22.V8H()); in GenerateTestSequenceNEON()
641 __ addp(v12.V8B(), v26.V8B(), v7.V8B()); in GenerateTestSequenceNEON()
644 __ addv(b12, v20.V8B()); in GenerateTestSequenceNEON()
649 __ and_(v5.V8B(), v1.V8B(), v16.V8B()); in GenerateTestSequenceNEON()
654 __ bic(v12.V8B(), v31.V8B(), v21.V8B()); in GenerateTestSequenceNEON()
657 __ bif(v2.V8B(), v23.V8B(), v27.V8B()); in GenerateTestSequenceNEON()
659 __ bit(v5.V8B(), v5.V8B(), v23.V8B()); in GenerateTestSequenceNEON()
661 __ bsl(v14.V8B(), v7.V8B(), v3.V8B()); in GenerateTestSequenceNEON()
[all …]
Dtest-cpu-features-aarch64.cc756 TEST_NEON(abs_0, abs(v0.V8B(), v1.V8B()))
764 TEST_NEON(addhn_0, addhn(v0.V8B(), v1.V8H(), v2.V8H()))
771 TEST_NEON(addp_1, addp(v0.V8B(), v1.V8B(), v2.V8B()))
778 TEST_NEON(addv_0, addv(b0, v1.V8B()))
783 TEST_NEON(add_0, add(v0.V8B(), v1.V8B(), v2.V8B()))
791 TEST_NEON(and_0, and_(v0.V8B(), v1.V8B(), v2.V8B()))
797 TEST_NEON(bic_4, bic(v0.V8B(), v1.V8B(), v2.V8B()))
799 TEST_NEON(bif_0, bif(v0.V8B(), v1.V8B(), v2.V8B()))
801 TEST_NEON(bit_0, bit(v0.V8B(), v1.V8B(), v2.V8B()))
803 TEST_NEON(bsl_0, bsl(v0.V8B(), v1.V8B(), v2.V8B()))
[all …]
Dtest-disasm-neon-aarch64.cc305 V(V8B(), "8b") \
314 V(V4H(), "4h", V8B(), "8b") \
322 V(V8H(), "8h", V8B(), "8b") \
332 V(V8B(), "8b") \
391 COMPARE_MACRO(Ld1(v0.V8B(), MemOperand(x15, 8, PostIndex)), in TEST()
507 COMPARE_MACRO(St1(v0.V8B(), MemOperand(x15, 8, PostIndex)), in TEST()
660 COMPARE_MACRO(Ld1(v0.V8B(), 0, MemOperand(x15)), "ld1 {v0.b}[0], [x15]"); in TEST()
675 COMPARE_MACRO(Ld1(v0.V8B(), 0, MemOperand(x15, x0, PostIndex)), in TEST()
708 COMPARE_MACRO(Ld2(v0.V8B(), v1.V8B(), 0, MemOperand(x15)), in TEST()
737 COMPARE_MACRO(Ld2(v0.V8B(), v1.V8B(), 0, MemOperand(x15, x0, PostIndex)), in TEST()
[all …]
Dtest-assembler-neon-aarch64.cc312 __ Ld1(v2.V8B(), MemOperand(x17)); in TEST()
314 __ Ld1(v3.V8B(), v4.V8B(), MemOperand(x17)); in TEST()
368 __ Ld1(v2.V8B(), MemOperand(x17, x23, PostIndex)); in TEST()
369 __ Ld1(v3.V8B(), v4.V8B(), MemOperand(x18, 16, PostIndex)); in TEST()
593 __ Ld2(v2.V8B(), v3.V8B(), MemOperand(x17)); in TEST()
595 __ Ld2(v4.V8B(), v5.V8B(), MemOperand(x17)); in TEST()
632 __ Ld2(v2.V8B(), v3.V8B(), MemOperand(x17, x22, PostIndex)); in TEST()
633 __ Ld2(v4.V8B(), v5.V8B(), MemOperand(x18, 16, PostIndex)); in TEST()
935 __ Ld2r(v0.V8B(), v1.V8B(), MemOperand(x17)); in TEST()
983 __ Ld2r(v0.V8B(), v1.V8B(), MemOperand(x17, 2, PostIndex)); in TEST()
[all …]
Dtest-simulator-aarch64.cc1704 VRegister vn_ext = (kDRegSize == vn_bits) ? vn.V8B() : vn.V16B(); in Test1OpAcrossNEON_Helper()
1705 VRegister vntmp_ext = (kDRegSize == vn_bits) ? vntmp.V8B() : vntmp.V16B(); in Test1OpAcrossNEON_Helper()
2671 VRegister vn_ext = (kDRegSize == vn_bits) ? vn.V8B() : vn.V16B(); in TestOpImmOpImmNEON_Helper()
2672 VRegister vntmp_ext = (kDRegSize == vn_bits) ? vntmp.V8B() : vntmp.V16B(); in TestOpImmOpImmNEON_Helper()
/external/v8/src/compiler/backend/arm64/
Dcode-generator-arm64.cc2443 __ Sqxtn(dst.V8B(), src0.V8H()); in AssembleArchInstruction()
2480 __ Sqxtun(dst.V8B(), src0.V8H()); in AssembleArchInstruction()
2642 __ Ldr(i.OutputSimd128Register().V8B(), i.MemoryOperand(0)); in AssembleArchInstruction()
2643 __ Sxtl(i.OutputSimd128Register().V8H(), i.OutputSimd128Register().V8B()); in AssembleArchInstruction()
2647 __ Ldr(i.OutputSimd128Register().V8B(), i.MemoryOperand(0)); in AssembleArchInstruction()
2648 __ Uxtl(i.OutputSimd128Register().V8H(), i.OutputSimd128Register().V8B()); in AssembleArchInstruction()
/external/vixl/src/aarch64/
Doperands-aarch64.h367 VRegister V8B() const { return VRegister(code_, kDRegSize, 8); } in V8B() function
Dassembler-aarch64.cc3937 orr(vd.V8B(), vn.V8B(), vn.V8B()); in mov()
3990 not_(vd.V8B(), vn.V8B()); in mvn()
Dmacro-assembler-aarch64.cc964 movi(vd.Is64Bits() ? vd.V8B() : vd.V16B(), byte1); in Movi16bitHelper()
/external/v8/src/codegen/arm64/
Dregister-arm64.h324 VRegister V8B() const { in V8B() function
Dassembler-arm64.cc3185 orr(vd.V8B(), vn.V8B(), vn.V8B()); in mov()
3226 not_(vd.V8B(), vn.V8B()); in mvn()
Dmacro-assembler-arm64.cc354 movi(vd.Is64Bits() ? vd.V8B() : vd.V16B(), byte1); in Movi16bitHelper()
/external/v8/src/wasm/baseline/arm64/
Dliftoff-assembler-arm64.h1491 Sxtl(dst.fp().V8H(), dst.fp().V8B()); in LoadTransform()
1494 Uxtl(dst.fp().V8H(), dst.fp().V8B()); in LoadTransform()