/external/llvm/test/MC/AArch64/ |
D | arm64-advsimd.s | 904 fcvtzs d0, d0 define 1349 fcvtzs d0, d0, #2 define
|
D | neon-diagnostics.s | 7297 fcvtzs d0, s0 define
|
/external/vixl/test/aarch64/ |
D | test-trace-aarch64.cc | 497 __ fcvtzs(d15, d0); in GenerateTestSequenceFP() local 498 __ fcvtzs(d13, d4, 42); in GenerateTestSequenceFP() local 499 __ fcvtzs(s8, s11); in GenerateTestSequenceFP() local 500 __ fcvtzs(s31, s6, 25); in GenerateTestSequenceFP() local 501 __ fcvtzs(w6, d9); in GenerateTestSequenceFP() local 502 __ fcvtzs(w25, d10, 20); in GenerateTestSequenceFP() local 503 __ fcvtzs(w9, s1); in GenerateTestSequenceFP() local 504 __ fcvtzs(w17, s29, 30); in GenerateTestSequenceFP() local 505 __ fcvtzs(x19, d2); in GenerateTestSequenceFP() local 506 __ fcvtzs(x22, d14, 1); in GenerateTestSequenceFP() local [all …]
|
D | test-api-movprfx-aarch64.cc | 400 __ fcvtzs(z25.VnD(), p2.Merging(), z25.VnH()); in TEST() local 403 __ fcvtzs(z31.VnH(), p7.Merging(), z31.VnH()); in TEST() local 406 __ fcvtzs(z21.VnD(), p1.Merging(), z21.VnS()); in TEST() local 409 __ fcvtzs(z5.VnS(), p5.Merging(), z5.VnD()); in TEST() local 916 __ fcvtzs(z3.VnD(), p2.Merging(), z7.VnH()); in TEST() local 919 __ fcvtzs(z17.VnD(), p3.Merging(), z14.VnD()); in TEST() local 922 __ fcvtzs(z2.VnS(), p1.Merging(), z31.VnH()); in TEST() local 925 __ fcvtzs(z13.VnS(), p2.Merging(), z23.VnD()); in TEST() local 1760 __ fcvtzs(z12.VnD(), p1.Merging(), z18.VnH()); in TEST() local 1763 __ fcvtzs(z3.VnS(), p2.Merging(), z26.VnS()); in TEST() local [all …]
|
D | test-assembler-fp-aarch64.cc | 4287 TEST(fcvtzs) { in TEST() argument
|
/external/vixl/src/aarch64/ |
D | assembler-aarch64.cc | 3178 void Assembler::fcvtzs(const VRegister& vd, const VRegister& vn, int fbits) { in fcvtzs() function in vixl::aarch64::Assembler
|
D | assembler-sve-aarch64.cc | 1716 void Assembler::fcvtzs(const ZRegister& zd, in fcvtzs() function in vixl::aarch64::Assembler
|