/external/llvm-project/llvm/test/MC/AArch64/SVE/ |
D | lasta-diagnostics.s | 7 lasta w0, p8, z31.b label 12 lasta w0, p7.b, w0, z31.b label 17 lasta w0, p7.q, w0, z31.b label 26 lasta x0, p7, z31.b label 31 lasta x0, p7, z31.h label 36 lasta x0, p7, z31.s label 41 lasta w0, p7, z31.d label 46 lasta b0, p7, z31.h label 51 lasta h0, p7, z31.s label 56 lasta s0, p7, z31.d label [all …]
|
D | lasta.s | 10 lasta w0, p7, z31.b label 16 lasta w0, p7, z31.h label 22 lasta w0, p7, z31.s label 28 lasta x0, p7, z31.d label 34 lasta b0, p7, z31.b label 40 lasta h0, p7, z31.h label 46 lasta s0, p7, z31.s label 52 lasta d0, p7, z31.d define
|
/external/arm-optimized-routines/string/aarch64/ |
D | memcmp-sve.S | 39 lasta w0, p1, z0.b /* extract each byte */ 40 lasta w1, p1, z1.b
|
D | strcmp-sve.S | 41 lasta w0, p2, z0.b /* extract each char */ 42 lasta w1, p2, z1.b
|
D | strncmp-sve.S | 45 lasta w0, p2, z0.b /* extract each char */ 46 lasta w1, p2, z1.b
|
/external/llvm-project/libc/AOR_v20.02/string/aarch64/ |
D | memcmp-sve.S | 41 lasta w0, p1, z0.b /* extract each byte */ 42 lasta w1, p1, z1.b
|
D | strcmp-sve.S | 44 lasta w0, p2, z0.b /* extract each char */ 45 lasta w1, p2, z1.b
|
D | strncmp-sve.S | 47 lasta w0, p2, z0.b /* extract each char */ 48 lasta w1, p2, z1.b
|
/external/llvm-project/llvm/test/CodeGen/AArch64/ |
D | sve-intrinsics-perm-select.ll | 628 ; CHECK: lasta w0, p0, z0.b 630 %res = call i8 @llvm.aarch64.sve.lasta.nxv16i8(<vscale x 16 x i1> %pg, 637 ; CHECK: lasta w0, p0, z0.h 639 %res = call i16 @llvm.aarch64.sve.lasta.nxv8i16(<vscale x 8 x i1> %pg, 646 ; CHECK: lasta w0, p0, z0.s 648 %res = call i32 @llvm.aarch64.sve.lasta.nxv4i32(<vscale x 4 x i1> %pg, 655 ; CHECK: lasta x0, p0, z0.d 657 %res = call i64 @llvm.aarch64.sve.lasta.nxv2i64(<vscale x 2 x i1> %pg, 664 ; CHECK: lasta h0, p0, z0.h 666 %res = call half @llvm.aarch64.sve.lasta.nxv8f16(<vscale x 8 x i1> %pg, [all …]
|
/external/vixl/test/aarch64/ |
D | test-disasm-sve-aarch64.cc | 5843 COMPARE_PREFIX(lasta(w15, p3, z3.VnB()), "lasta w15, p3, z3.b"); in TEST() 5844 COMPARE_PREFIX(lasta(w15, p3, z3.VnH()), "lasta w15, p3, z3.h"); in TEST() 5845 COMPARE_PREFIX(lasta(w15, p3, z3.VnS()), "lasta w15, p3, z3.s"); in TEST() 5846 COMPARE_PREFIX(lasta(x15, p3, z3.VnD()), "lasta x15, p3, z3.d"); in TEST() 5847 COMPARE_PREFIX(lasta(b30, p4, z24.VnB()), "lasta b30, p4, z24.b"); in TEST() 5848 COMPARE_PREFIX(lasta(h30, p4, z24.VnH()), "lasta h30, p4, z24.h"); in TEST() 5849 COMPARE_PREFIX(lasta(s30, p4, z24.VnS()), "lasta s30, p4, z24.s"); in TEST() 5850 COMPARE_PREFIX(lasta(d30, p4, z24.VnD()), "lasta d30, p4, z24.d"); in TEST()
|
/external/swiftshader/third_party/llvm-10.0/configs/common/lib/Target/AArch64/ |
D | AArch64GenAsmMatcher.inc | 12512 "inch\004incp\004incw\005index\003ins\004insr\003irg\003isb\005lasta\005" 14754 …{ 1842 /* lasta */, AArch64::LASTA_VPZ_H, Convert__Reg1_0__SVEPredicate3bAnyReg1_1__SVEVectorHReg1… 14755 …{ 1842 /* lasta */, AArch64::LASTA_VPZ_S, Convert__Reg1_0__SVEPredicate3bAnyReg1_1__SVEVectorSReg1… 14756 …{ 1842 /* lasta */, AArch64::LASTA_VPZ_D, Convert__Reg1_0__SVEPredicate3bAnyReg1_1__SVEVectorDReg1… 14757 …{ 1842 /* lasta */, AArch64::LASTA_VPZ_B, Convert__Reg1_0__SVEPredicate3bAnyReg1_1__SVEVectorBReg1… 14758 …{ 1842 /* lasta */, AArch64::LASTA_RPZ_H, Convert__Reg1_0__SVEPredicate3bAnyReg1_1__SVEVectorHReg1… 14759 …{ 1842 /* lasta */, AArch64::LASTA_RPZ_S, Convert__Reg1_0__SVEPredicate3bAnyReg1_1__SVEVectorSReg1… 14760 …{ 1842 /* lasta */, AArch64::LASTA_RPZ_B, Convert__Reg1_0__SVEPredicate3bAnyReg1_1__SVEVectorBReg1… 14761 …{ 1842 /* lasta */, AArch64::LASTA_RPZ_D, Convert__Reg1_0__SVEPredicate3bAnyReg1_1__SVEVectorDReg1… 22127 …{ 1842 /* lasta */, AArch64::LASTA_VPZ_H, Convert__Reg1_0__SVEPredicate3bAnyReg1_1__SVEVectorHReg1… [all …]
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/ |
D | AArch64SVEInstrInfo.td | 336 defm LASTA_RPZ : sve_int_perm_last_r<0, "lasta", AArch64lasta>; 338 defm LASTA_VPZ : sve_int_perm_last_v<0, "lasta", AArch64lasta>;
|
/external/vixl/src/aarch64/ |
D | assembler-aarch64.h | 4514 void lasta(const Register& rd, const PRegister& pg, const ZRegister& zn); 4517 void lasta(const VRegister& vd, const PRegister& pg, const ZRegister& zn);
|
D | assembler-sve-aarch64.cc | 5708 void Assembler::lasta(const Register& rd, in lasta() function in vixl::aarch64::Assembler 5720 void Assembler::lasta(const VRegister& vd, in lasta() function in vixl::aarch64::Assembler
|
D | macro-assembler-aarch64.h | 4784 lasta(rd, pg, zn); in Lasta() 4789 lasta(vd, pg, zn); in Lasta()
|
/external/llvm-project/llvm/lib/Target/AArch64/ |
D | AArch64SVEInstrInfo.td | 671 defm LASTA_RPZ : sve_int_perm_last_r<0, "lasta", AArch64lasta>; 673 defm LASTA_VPZ : sve_int_perm_last_v<0, "lasta", AArch64lasta>;
|
/external/e2fsprogs/po/ |
D | eo.po | 1124 "En @S: tempo de lasta surmeto (%t,\n" 1134 "En @S: tempo de lasta skribo (%t,\n" 1193 "En @S la tempo de lasta surmeto estas en la estonteco.\n" 1207 "En @S la tempo de lasta skribo estas en la estonteco.\n"
|
/external/swiftshader/third_party/llvm-10.0/configs/common/include/llvm/IR/ |
D | IntrinsicImpl.inc | 660 "llvm.aarch64.sve.lasta", 10793 1, // llvm.aarch64.sve.lasta
|