Home
last modified time | relevance | path

Searched refs:V8B (Results 1 – 7 of 7) sorted by relevance

/external/vixl/test/aarch64/
Dtest-trace-aarch64.cc620 __ abs(v31.V8B(), v5.V8B()); in GenerateTestSequenceNEON()
628 __ add(v13.V8B(), v7.V8B(), v18.V8B()); in GenerateTestSequenceNEON()
632 __ addhn(v31.V8B(), v12.V8H(), v22.V8H()); in GenerateTestSequenceNEON()
642 __ addp(v12.V8B(), v26.V8B(), v7.V8B()); in GenerateTestSequenceNEON()
645 __ addv(b12, v20.V8B()); in GenerateTestSequenceNEON()
650 __ and_(v5.V8B(), v1.V8B(), v16.V8B()); in GenerateTestSequenceNEON()
655 __ bic(v12.V8B(), v31.V8B(), v21.V8B()); in GenerateTestSequenceNEON()
658 __ bif(v2.V8B(), v23.V8B(), v27.V8B()); in GenerateTestSequenceNEON()
660 __ bit(v5.V8B(), v5.V8B(), v23.V8B()); in GenerateTestSequenceNEON()
662 __ bsl(v14.V8B(), v7.V8B(), v3.V8B()); in GenerateTestSequenceNEON()
[all …]
Dtest-disasm-aarch64.cc3084 V(V8B(), "8b") \
3093 V(V4H(), "4h", V8B(), "8b") \
3101 V(V8H(), "8h", V8B(), "8b") \
3111 V(V8B(), "8b") \
3170 COMPARE_MACRO(Ld1(v0.V8B(), MemOperand(x15, 8, PostIndex)), in TEST()
3286 COMPARE_MACRO(St1(v0.V8B(), MemOperand(x15, 8, PostIndex)), in TEST()
3434 COMPARE_MACRO(Ld1(v0.V8B(), 0, MemOperand(x15)), "ld1 {v0.b}[0], [x15]"); in TEST()
3449 COMPARE_MACRO(Ld1(v0.V8B(), 0, MemOperand(x15, x0, PostIndex)), in TEST()
3482 COMPARE_MACRO(Ld2(v0.V8B(), v1.V8B(), 0, MemOperand(x15)), in TEST()
3511 COMPARE_MACRO(Ld2(v0.V8B(), v1.V8B(), 0, MemOperand(x15, x0, PostIndex)), in TEST()
[all …]
Dtest-assembler-aarch64.cc3162 __ Ld1(v2.V8B(), MemOperand(x17)); in TEST()
3164 __ Ld1(v3.V8B(), v4.V8B(), MemOperand(x17)); in TEST()
3218 __ Ld1(v2.V8B(), MemOperand(x17, x23, PostIndex)); in TEST()
3219 __ Ld1(v3.V8B(), v4.V8B(), MemOperand(x18, 16, PostIndex)); in TEST()
3443 __ Ld2(v2.V8B(), v3.V8B(), MemOperand(x17)); in TEST()
3445 __ Ld2(v4.V8B(), v5.V8B(), MemOperand(x17)); in TEST()
3482 __ Ld2(v2.V8B(), v3.V8B(), MemOperand(x17, x22, PostIndex)); in TEST()
3483 __ Ld2(v4.V8B(), v5.V8B(), MemOperand(x18, 16, PostIndex)); in TEST()
3785 __ Ld2r(v0.V8B(), v1.V8B(), MemOperand(x17)); in TEST()
3833 __ Ld2r(v0.V8B(), v1.V8B(), MemOperand(x17, 2, PostIndex)); in TEST()
[all …]
Dtest-simulator-aarch64.cc1544 VRegister vn_ext = (kDRegSize == vn_bits) ? vn.V8B() : vn.V16B(); in Test1OpAcrossNEON_Helper()
1545 VRegister vntmp_ext = (kDRegSize == vn_bits) ? vntmp.V8B() : vntmp.V16B(); in Test1OpAcrossNEON_Helper()
2478 VRegister vn_ext = (kDRegSize == vn_bits) ? vn.V8B() : vn.V16B(); in TestOpImmOpImmNEON_Helper()
2479 VRegister vntmp_ext = (kDRegSize == vn_bits) ? vntmp.V8B() : vntmp.V16B(); in TestOpImmOpImmNEON_Helper()
/external/vixl/src/aarch64/
Doperands-aarch64.h361 VRegister V8B() const { return VRegister(code_, kDRegSize, 8); } in V8B() function
Dassembler-aarch64.cc2808 orr(vd.V8B(), vn.V8B(), vn.V8B()); in mov()
2858 not_(vd.V8B(), vn.V8B()); in mvn()
Dmacro-assembler-aarch64.cc952 movi(vd.Is64Bits() ? vd.V8B() : vd.V16B(), byte1); in Movi16bitHelper()