/external/llvm-project/llvm/test/MC/AArch64/SVE/ |
D | ld1sb-diagnostics.s | 6 ld1sb z23.b, p0/z, [x13, #1, MUL VL] label 11 ld1sb z29.b, p0/z, [x3, #1, MUL VL] label 20 ld1sb z21.h, p4/z, [x17, #-9, MUL VL] label 25 ld1sb z10.h, p5/z, [x16, #8, MUL VL] label 30 ld1sb z30.s, p6/z, [x25, #-9, MUL VL] label 35 ld1sb z29.s, p5/z, [x15, #8, MUL VL] label 40 ld1sb z28.d, p2/z, [x28, #-9, MUL VL] label 45 ld1sb z27.d, p1/z, [x26, #8, MUL VL] label 54 ld1sb z9.h, p8/z, [x25, #1, MUL VL] label 59 ld1sb z12.s, p8/z, [x13, #1, MUL VL] label [all …]
|
D | ld1sb.s | 10 ld1sb z0.h, p0/z, [x0] label 16 ld1sb z0.s, p0/z, [x0] label 22 ld1sb z0.d, p0/z, [x0] label 28 ld1sb { z0.h }, p0/z, [x0] label 34 ld1sb { z0.s }, p0/z, [x0] label 40 ld1sb { z0.d }, p0/z, [x0] label 46 ld1sb { z31.h }, p7/z, [sp, #-1, mul vl] label 52 ld1sb { z21.h }, p5/z, [x10, #5, mul vl] label 58 ld1sb { z31.s }, p7/z, [sp, #-1, mul vl] label 64 ld1sb { z21.s }, p5/z, [x10, #5, mul vl] label [all …]
|
/external/llvm-project/llvm/test/CodeGen/AArch64/ |
D | sve-masked-ldst-sext.ll | 13 ; CHECK: ld1sb { [[IN:z[0-9]+]].d }, [[PG:p[0-9]+]]/z, [x0] 40 ; CHECK: ld1sb { [[IN:z[0-9]+]].s }, [[PG:p[0-9]+]]/z, [x0] 58 ; CHECK: ld1sb { [[IN:z[0-9]+]].h }, [[PG:p[0-9]+]]/z, [x0]
|
D | sve-masked-ldst-zext.ll | 13 ; CHECK-NOT: ld1sb 43 ; CHECK-NOT: ld1sb 63 ; CHECK-NOT: ld1sb
|
D | sve-masked-gather-legalize.ll | 10 ; CHECK-NEXT: ld1sb { z0.d }, p0/z, [x0, z0.d] 29 ; CHECK-NEXT: ld1sb { z0.d }, p0/z, [x0, z0.d] 85 ; CHECK-NEXT: ld1sb { z1.d }, p2/z, [x8, z1.d] 86 ; CHECK-NEXT: ld1sb { z0.d }, p0/z, [x8, z0.d]
|
D | sve-intrinsics-ld1.ll | 31 ; CHECK: ld1sb { z0.h }, p0/z, [x0] 49 ; CHECK: ld1sb { z0.s }, p0/z, [x0] 67 ; CHECK: ld1sb { z0.d }, p0/z, [x0]
|
D | sve-intrinsics-gather-loads-vector-base-imm-offset.ll | 119 ; CHECK: ld1sb { z0.s }, p0/z, [z0.s, #16] 130 ; CHECK: ld1sb { z0.d }, p0/z, [z0.d, #16] 289 ; e.g. ld1sb { z0.s }, p0/z, [x8, z0.s, uxtw] 296 ; CHECK-NEXT: ld1sb { z0.s }, p0/z, [x8, z0.s, uxtw] 308 ; CHECK-NEXT: ld1sb { z0.d }, p0/z, [x8, z0.d]
|
D | spillfill-sve.ll | 32 ; CHECK-DAG: ld1sb { z{{[01]}}.h }, p0/z, [sp] 33 ; CHECK-DAG: ld1sb { z{{[01]}}.h }, p0/z, [sp, #1, mul vl] 57 ; CHECK-DAG: ld1sb { z{{[01]}}.s }, p0/z, [sp, #3, mul vl] 58 ; CHECK-DAG: ld1sb { z{{[01]}}.s }, p0/z, [sp, #2, mul vl] 82 ; CHECK-DAG: ld1sb { z{{[01]}}.d }, p0/z, [sp, #7, mul vl] 83 ; CHECK-DAG: ld1sb { z{{[01]}}.d }, p0/z, [sp, #6, mul vl]
|
D | sve-intrinsics-ld1-addressing-mode-reg-reg.ll | 32 ; CHECK: ld1sb { z0.h }, p0/z, [x0, x1] 52 ; CHECK: ld1sb { z0.s }, p0/z, [x0, x1] 72 ; CHECK: ld1sb { z0.d }, p0/z, [x0, x1]
|
D | sve-intrinsics-gather-loads-32bit-unscaled-offsets.ll | 216 ; CHECK: ld1sb { z0.s }, p0/z, [x0, z0.s, uxtw] 227 ; CHECK: ld1sb { z0.s }, p0/z, [x0, z0.s, sxtw] 238 ; CHECK: ld1sb { z0.d }, p0/z, [x0, z0.d, uxtw] 249 ; CHECK: ld1sb { z0.d }, p0/z, [x0, z0.d, sxtw]
|
D | sve-pred-contiguous-ldst-addressing-mode-reg-reg.ll | 11 ; CHECK-NEXT: ld1sb { z[[DATA:[0-9]+]].d }, p0/z, [x0, x1] 153 ; CHECK-NEXT: ld1sb { z0.d }, p0/z, [x0, x1] 270 ; CHECK-NEXT: ld1sb { z[[DATA:[0-9]+]].s }, p0/z, [x0, x1] 376 ; CHECK-NEXT: ld1sb { z0.s }, p0/z, [x0, x1] 450 ; CHECK-NEXT: ld1sb { z[[DATA:[0-9]+]].h }, p0/z, [x0, x1] 538 ; CHECK-NEXT: ld1sb { z0.h }, p0/z, [x0, x1]
|
D | sve-pred-contiguous-ldst-addressing-mode-reg-imm.ll | 38 ; CHECK-NEXT: ld1sb { z[[DATA:[0-9]+]].d }, p0/z, [x0, #-8, mul vl] 181 ; CHECK-NEXT: ld1sb { z0.d }, p0/z, [x0, #-3, mul vl] 290 ; CHECK-NEXT: ld1sb { z[[DATA:[0-9]+]].s }, p0/z, [x0, #-1, mul vl] 395 ; CHECK-NEXT: ld1sb { z0.s }, p0/z, [x0, #-3, mul vl] 465 ; CHECK-NEXT: ld1sb { z[[DATA:[0-9]+]].h }, p0/z, [x0, #6, mul vl] 552 ; CHECK-NEXT: ld1sb { z0.h }, p0/z, [x0, #-3, mul vl]
|
D | sve-intrinsics-gather-loads-vector-base-scalar-offset.ll | 118 ; CHECK: ld1sb { z0.s }, p0/z, [x0, z0.s, uxtw] 129 ; CHECK: ld1sb { z0.d }, p0/z, [x0, z0.d]
|
D | sve-intrinsics-ld1-addressing-mode-reg-imm.ll | 47 ; CHECK: ld1sb { z0.s }, p0/z, [x0, #7, mul vl] 110 ; CHECK: ld1sb { z0.h }, p0/z, [x0, #7, mul vl] 169 ; CHECK: ld1sb { z0.d }, p0/z, [x0, #7, mul vl]
|
D | sve-intrinsics-gather-loads-64bit-unscaled-offset.ll | 72 ; CHECK: ld1sb { z0.d }, p0/z, [x0, z0.d]
|
D | sve-masked-gather-32b-signed-unscaled.ll | 91 ; CHECK-NEXT: ld1sb { z0.d }, p0/z, [x0, z0.d, sxtw] 186 ; CHECK-NEXT: ld1sb { z0.s }, p0/z, [x0, z0.s, sxtw]
|
D | sve-masked-gather-32b-unsigned-unscaled.ll | 98 ; CHECK-NEXT: ld1sb { z0.d }, p0/z, [x0, z0.d, uxtw] 201 ; CHECK-NEXT: ld1sb { z0.s }, p0/z, [x0, z0.s, uxtw]
|
D | sve-masked-gather-64b-unscaled.ll | 87 ; CHECK-NEXT: ld1sb { z0.d }, p0/z, [x0, z0.d]
|
/external/swiftshader/third_party/llvm-10.0/configs/common/lib/Target/AArch64/ |
D | AArch64GenAsmMatcher.inc | 12515 "ld1rw\005ld1sb\005ld1sh\005ld1sw\004ld1w\003ld2\004ld2b\004ld2d\004ld2h" 15232 …{ 1951 /* ld1sb */, AArch64::LD1SB_H_IMM, Convert__SVEVectorHReg1_0__SVEPredicate3bAnyReg1_1__Reg1… 15233 …{ 1951 /* ld1sb */, AArch64::LD1SB_S_IMM, Convert__SVEVectorSReg1_0__SVEPredicate3bAnyReg1_1__Reg1… 15234 …{ 1951 /* ld1sb */, AArch64::GLD1SB_S_IMM_REAL, Convert__SVEVectorSReg1_0__SVEPredicate3bAnyReg1_1… 15235 …{ 1951 /* ld1sb */, AArch64::LD1SB_D_IMM, Convert__SVEVectorDReg1_0__SVEPredicate3bAnyReg1_1__Reg1… 15236 …{ 1951 /* ld1sb */, AArch64::GLD1SB_D_IMM_REAL, Convert__SVEVectorDReg1_0__SVEPredicate3bAnyReg1_1… 15237 …{ 1951 /* ld1sb */, AArch64::LD1SB_H_IMM, Convert__SVEVectorList1161_0__SVEPredicate3bAnyReg1_1__R… 15238 …{ 1951 /* ld1sb */, AArch64::LD1SB_S_IMM, Convert__SVEVectorList1321_0__SVEPredicate3bAnyReg1_1__R… 15239 …{ 1951 /* ld1sb */, AArch64::GLD1SB_S_IMM_REAL, Convert__SVEVectorList1321_0__SVEPredicate3bAnyReg… 15240 …{ 1951 /* ld1sb */, AArch64::LD1SB_D_IMM, Convert__SVEVectorList1641_0__SVEPredicate3bAnyReg1_1__R… [all …]
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/ |
D | AArch64SVEInstrInfo.td | 354 defm LD1SB_D_IMM : sve_mem_cld_si<0b1100, "ld1sb", Z_d, ZPR64>; 355 defm LD1SB_S_IMM : sve_mem_cld_si<0b1101, "ld1sb", Z_s, ZPR32>; 356 defm LD1SB_H_IMM : sve_mem_cld_si<0b1110, "ld1sb", Z_h, ZPR16>; 400 defm LD1SB_D : sve_mem_cld_ss<0b1100, "ld1sb", Z_d, ZPR64, GPR64NoXZRshifted8>; 401 defm LD1SB_S : sve_mem_cld_ss<0b1101, "ld1sb", Z_s, ZPR32, GPR64NoXZRshifted8>; 402 defm LD1SB_H : sve_mem_cld_ss<0b1110, "ld1sb", Z_h, ZPR16, GPR64NoXZRshifted8>; 471 …defm GLD1SB_S : sve_mem_32b_gld_vs_32_unscaled<0b0000, "ld1sb", AArch64ld1s_gather_sxtw, AAr… 493 …defm GLD1SB_S : sve_mem_32b_gld_vi_32_ptrs<0b0000, "ld1sb", imm0_31, AArch64ld1s_gather_imm, … 506 …defm GLD1SB_D : sve_mem_64b_gld_vi_64_ptrs<0b0000, "ld1sb", imm0_31, AArch64ld1s_gather_imm, … 523 defm GLD1SB_D : sve_mem_64b_gld_vs2_64_unscaled<0b0000, "ld1sb", AArch64ld1s_gather, nxv2i8>; [all …]
|
/external/llvm-project/llvm/lib/Target/AArch64/ |
D | AArch64SVEInstrInfo.td | 694 defm LD1SB_D_IMM : sve_mem_cld_si<0b1100, "ld1sb", Z_d, ZPR64>; 695 defm LD1SB_S_IMM : sve_mem_cld_si<0b1101, "ld1sb", Z_s, ZPR32>; 696 defm LD1SB_H_IMM : sve_mem_cld_si<0b1110, "ld1sb", Z_h, ZPR16>; 740 defm LD1SB_D : sve_mem_cld_ss<0b1100, "ld1sb", Z_d, ZPR64, GPR64NoXZRshifted8>; 741 defm LD1SB_S : sve_mem_cld_ss<0b1101, "ld1sb", Z_s, ZPR32, GPR64NoXZRshifted8>; 742 defm LD1SB_H : sve_mem_cld_ss<0b1110, "ld1sb", Z_h, ZPR16, GPR64NoXZRshifted8>; 811 …defm GLD1SB_S : sve_mem_32b_gld_vs_32_unscaled<0b0000, "ld1sb", AArch64ld1s_gather_sxtw_z, A… 833 …defm GLD1SB_S : sve_mem_32b_gld_vi_32_ptrs<0b0000, "ld1sb", imm0_31, AArch64ld1s_gather_imm_z,… 846 …defm GLD1SB_D : sve_mem_64b_gld_vi_64_ptrs<0b0000, "ld1sb", imm0_31, AArch64ld1s_gather_imm_z,… 863 …defm GLD1SB_D : sve_mem_64b_gld_vs2_64_unscaled<0b0000, "ld1sb", AArch64ld1s_gather_z, nxv2i… [all …]
|
/external/vixl/test/aarch64/ |
D | test-disasm-sve-aarch64.cc | 3386 COMPARE_PREFIX(ld1sb(z12.VnS(), in TEST() 3390 COMPARE_PREFIX(ld1sb(z22.VnS(), in TEST() 3525 COMPARE_PREFIX(ld1sb(z16.VnD(), p7.Zeroing(), SVEMemOperand(z31.VnD())), in TEST() 3638 COMPARE_PREFIX(ld1sb(z11.VnD(), p3.Zeroing(), SVEMemOperand(x24, z21.VnD())), in TEST() 3680 COMPARE_PREFIX(ld1sb(z4.VnD(), in TEST() 4906 COMPARE_PREFIX(ld1sb(z15.VnH(), in TEST() 4910 COMPARE_PREFIX(ld1sb(z19.VnS(), in TEST() 4918 COMPARE_PREFIX(ld1sb(z5.VnH(), p1.Zeroing(), SVEMemOperand(x15, x1, LSL, 0)), in TEST() 4920 COMPARE_PREFIX(ld1sb(z9.VnS(), p2.Zeroing(), SVEMemOperand(x29, x3, LSL, 0)), in TEST() 4922 COMPARE_PREFIX(ld1sb(z31.VnD(), p7.Zeroing(), SVEMemOperand(x9, x9, LSL, 0)), in TEST()
|
D | test-assembler-sve-aarch64.cc | 9088 Ld1Macro ld1sb = &MacroAssembler::Ld1sb; in TEST_SVE() local 9089 ldff1_unscaled_offset_helper(kBRegSize, kHRegSize, ldff1sb, ld1sb); in TEST_SVE() 9090 ldff1_unscaled_offset_helper(kBRegSize, kSRegSize, ldff1sb, ld1sb); in TEST_SVE() 9091 ldff1_unscaled_offset_helper(kBRegSize, kDRegSize, ldff1sb, ld1sb); in TEST_SVE() 9188 Ld1Macro ld1sb = &MacroAssembler::Ld1sb; in sve_ldff1_scalar_plus_vector_32_unscaled_offset() local 9189 ldff1_32_unscaled_offset_helper(kBRegSize, ldff1sb, ld1sb, UXTW); in sve_ldff1_scalar_plus_vector_32_unscaled_offset() 9190 ldff1_32_unscaled_offset_helper(kBRegSize, ldff1sb, ld1sb, SXTW); in sve_ldff1_scalar_plus_vector_32_unscaled_offset() 9273 Ld1Macro ld1sb = &MacroAssembler::Ld1sb; in sve_ldff1_scalar_plus_vector_32_unpacked_unscaled_offset() local 9274 ldff1_32_unpacked_unscaled_offset_helper(kBRegSize, ldff1sb, ld1sb, UXTW); in sve_ldff1_scalar_plus_vector_32_unpacked_unscaled_offset() 9275 ldff1_32_unpacked_unscaled_offset_helper(kBRegSize, ldff1sb, ld1sb, SXTW); in sve_ldff1_scalar_plus_vector_32_unpacked_unscaled_offset() [all …]
|
D | test-trace-aarch64.cc | 2792 __ ld1sb(z21.VnH(), p1.Zeroing(), SVEMemOperand(x0, 3, SVE_MUL_VL)); in GenerateTestSequenceSVE() local 2793 __ ld1sb(z22.VnS(), p1.Zeroing(), SVEMemOperand(x0, 3, SVE_MUL_VL)); in GenerateTestSequenceSVE() local 2794 __ ld1sb(z23.VnD(), p2.Zeroing(), SVEMemOperand(x0, x2)); in GenerateTestSequenceSVE() local
|
/external/vixl/src/aarch64/ |
D | macro-assembler-sve-aarch64.cc | 1330 static_cast<SVELoad1Fn>(&Assembler::ld1sb)); in Ld1sb()
|