/external/vixl/src/aarch64/ |
D | operands-aarch64.cc | 230 return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_); in ToExtendedRegister() 269 VIXL_ASSERT((extend == UXTW) || (extend == SXTW) || (extend == SXTX)); in MemOperand() 330 VIXL_ASSERT((extend_ == UXTW) || (extend_ == SXTW) || (extend_ == SXTX)); in MemOperand()
|
D | operands-aarch64.h | 694 if (extend == UXTW) return SVE_UXTW;
|
/external/llvm/lib/Target/AArch64/MCTargetDesc/ |
D | AArch64AddressingModes.h | 42 UXTW, enumerator 62 case AArch64_AM::UXTW: return "uxtw"; in getShiftExtendName() 129 case 2: return AArch64_AM::UXTW; in getExtendType() 156 case AArch64_AM::UXTW: return 2; break; in getExtendEncoding()
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/MCTargetDesc/ |
D | AArch64AddressingModes.h | 42 UXTW, enumerator 62 case AArch64_AM::UXTW: return "uxtw"; in getShiftExtendName() 129 case 2: return AArch64_AM::UXTW; in getExtendType() 156 case AArch64_AM::UXTW: return 2; break; in getExtendEncoding()
|
D | AArch64InstPrinter.cpp | 1000 if (ExtType == AArch64_AM::UXTW || ExtType == AArch64_AM::UXTX) { in printArithExtend() 1006 ExtType == AArch64_AM::UXTW) ) { in printArithExtend()
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/ |
D | AArch64RegisterInfo.td | 1057 // UXTW(8|16|32|64) 1058 def ZPR#RegWidth#AsmOpndExtUXTW8Only : ZPRExtendAsmOperand<"UXTW", RegWidth, 8, 0b1>; 1059 def ZPR#RegWidth#AsmOpndExtUXTW8 : ZPRExtendAsmOperand<"UXTW", RegWidth, 8>; 1060 def ZPR#RegWidth#AsmOpndExtUXTW16 : ZPRExtendAsmOperand<"UXTW", RegWidth, 16>; 1061 def ZPR#RegWidth#AsmOpndExtUXTW32 : ZPRExtendAsmOperand<"UXTW", RegWidth, 32>; 1062 def ZPR#RegWidth#AsmOpndExtUXTW64 : ZPRExtendAsmOperand<"UXTW", RegWidth, 64>; 1064 …def ZPR#RegWidth#ExtUXTW8Only : ZPRExtendRegisterOperand<0b0, 0b0, "UXTW", RegWidth, 8, "On… 1065 def ZPR#RegWidth#ExtUXTW8 : ZPRExtendRegisterOperand<0b0, 0b0, "UXTW", RegWidth, 8>; 1066 def ZPR#RegWidth#ExtUXTW16 : ZPRExtendRegisterOperand<0b0, 0b0, "UXTW", RegWidth, 16>; 1067 def ZPR#RegWidth#ExtUXTW32 : ZPRExtendRegisterOperand<0b0, 0b0, "UXTW", RegWidth, 32>; [all …]
|
D | AArch64SchedPredicates.td | 20 def CheckExtUXTW : CheckImmOperand_s<3, "AArch64_AM::UXTW">; 36 def CheckMemExtUXTW : CheckImmOperand_s<3, "AArch64_AM::UXTW">;
|
D | AArch64FastISel.cpp | 744 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress() 768 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress() 828 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress() 865 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress() 887 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress() 1070 Addr.getExtendType() == AArch64_AM::UXTW ) in simplifyAddress() 1081 if (Addr.getExtendType() == AArch64_AM::UXTW) in simplifyAddress() 1857 if (Addr.getExtendType() == AArch64_AM::UXTW || in emitLoad() 2148 if (Addr.getExtendType() == AArch64_AM::UXTW || in emitStore()
|
D | AArch64InstructionSelector.cpp | 4743 return AArch64_AM::UXTW; in getExtendTypeForInst() 4766 return AArch64_AM::UXTW; in getExtendTypeForInst() 4835 if (Ext == AArch64_AM::UXTW && MRI.getType(ExtReg).getSizeInBits() == 32) { in selectArithExtendedRegister()
|
D | AArch64ISelDAGToDAG.cpp | 513 return AArch64_AM::UXTW; in getExtendTypeForNode() 531 return AArch64_AM::UXTW; in getExtendTypeForNode() 711 if (Ext == AArch64_AM::UXTW && in SelectArithExtendedRegister()
|
/external/vixl/test/aarch64/ |
D | test-disasm-aarch64.cc | 339 COMPARE(adds(x9, x10, Operand(x11, UXTW, 3)), "adds x9, x10, w11, uxtw #3"); in TEST() 351 COMPARE(add(wsp, wsp, Operand(w4, UXTW, 2)), "add wsp, wsp, w4, lsl #2"); in TEST() 365 COMPARE(subs(x9, x10, Operand(x11, UXTW, 3)), "subs x9, x10, w11, uxtw #3"); in TEST() 377 COMPARE(sub(wsp, wsp, Operand(w4, UXTW, 2)), "sub wsp, wsp, w4, lsl #2"); in TEST() 1043 COMPARE(ldr(w0, MemOperand(x1, w2, UXTW)), "ldr w0, [x1, w2, uxtw]"); in TEST() 1044 COMPARE(ldr(w3, MemOperand(x4, w5, UXTW, 2)), "ldr w3, [x4, w5, uxtw #2]"); in TEST() 1053 COMPARE(ldr(x0, MemOperand(x1, w2, UXTW)), "ldr x0, [x1, w2, uxtw]"); in TEST() 1054 COMPARE(ldr(x3, MemOperand(x4, w5, UXTW, 3)), "ldr x3, [x4, w5, uxtw #3]"); in TEST() 1064 COMPARE(str(w0, MemOperand(x1, w2, UXTW)), "str w0, [x1, w2, uxtw]"); in TEST() 1065 COMPARE(str(w3, MemOperand(x4, w5, UXTW, 2)), "str w3, [x4, w5, uxtw #2]"); in TEST() [all …]
|
D | test-api-aarch64.cc | 1012 VIXL_CHECK(!Operand(w15, UXTW).IsPlainRegister()); in TEST() 1025 VIXL_CHECK(!MemOperand(x5, wzr, UXTW).IsPlainRegister()); in TEST() 1026 VIXL_CHECK(!MemOperand(x6, wzr, UXTW, 3).IsPlainRegister()); in TEST() 1044 VIXL_CHECK(MemOperand(x5, wzr, UXTW).IsEquivalentToPlainRegister()); in TEST() 1045 VIXL_CHECK(MemOperand(x6, wzr, UXTW, 3).IsEquivalentToPlainRegister()); in TEST() 1067 VIXL_CHECK(!SVEMemOperand(x2, z3.VnS(), UXTW).IsPlainScalar()); in TEST() 1098 VIXL_CHECK(SVEMemOperand(x9, z2.VnD(), UXTW).IsScalarPlusVector()); in TEST() 1118 VIXL_CHECK(SVEMemOperand(x9, z2.VnD(), UXTW).IsScatterGather()); in TEST()
|
D | test-assembler-sve-aarch64.cc | 8878 (static_cast<int>(mod) == UXTW)); in Ldff1Helper() 8883 offs_is_unsigned = (static_cast<int>(mod) == UXTW) ? true : false; in Ldff1Helper() 9086 ldff1_32_scaled_offset_helper(kHRegSize, ldff1h, ld1h, UXTW); in sve_ldff1_scalar_plus_vector_32_scaled_offset() 9091 ldff1_32_scaled_offset_helper(kSRegSize, ldff1w, ld1w, UXTW); in sve_ldff1_scalar_plus_vector_32_scaled_offset() 9096 ldff1_32_scaled_offset_helper(kHRegSize, ldff1sh, ld1sh, UXTW); in sve_ldff1_scalar_plus_vector_32_scaled_offset() 9115 ldff1_32_unscaled_offset_helper(kBRegSize, ldff1b, ld1b, UXTW); in sve_ldff1_scalar_plus_vector_32_unscaled_offset() 9120 ldff1_32_unscaled_offset_helper(kHRegSize, ldff1h, ld1h, UXTW); in sve_ldff1_scalar_plus_vector_32_unscaled_offset() 9125 ldff1_32_unscaled_offset_helper(kSRegSize, ldff1w, ld1w, UXTW); in sve_ldff1_scalar_plus_vector_32_unscaled_offset() 9130 ldff1_32_unscaled_offset_helper(kBRegSize, ldff1sb, ld1sb, UXTW); in sve_ldff1_scalar_plus_vector_32_unscaled_offset() 9135 ldff1_32_unscaled_offset_helper(kHRegSize, ldff1sh, ld1sh, UXTW); in sve_ldff1_scalar_plus_vector_32_unscaled_offset() [all …]
|
D | test-disasm-neon-aarch64.cc | 206 COMPARE(ldr(b1, MemOperand(x2, w3, UXTW)), "ldr b1, [x2, w3, uxtw]"); in TEST() 211 COMPARE(ldr(b31, MemOperand(sp, wzr, UXTW)), "ldr b31, [sp, wzr, uxtw]"); in TEST() 214 COMPARE(ldr(h1, MemOperand(x2, w3, UXTW)), "ldr h1, [x2, w3, uxtw]"); in TEST() 216 COMPARE(ldr(h3, MemOperand(x4, w5, UXTW, 1)), "ldr h3, [x4, w5, uxtw #1]"); in TEST() 223 COMPARE(ldr(s1, MemOperand(x2, w3, UXTW)), "ldr s1, [x2, w3, uxtw]"); in TEST() 225 COMPARE(ldr(s3, MemOperand(x4, w5, UXTW, 2)), "ldr s3, [x4, w5, uxtw #2]"); in TEST() 232 COMPARE(ldr(d1, MemOperand(x2, w3, UXTW)), "ldr d1, [x2, w3, uxtw]"); in TEST() 234 COMPARE(ldr(d3, MemOperand(x4, w5, UXTW, 3)), "ldr d3, [x4, w5, uxtw #3]"); in TEST() 241 COMPARE(ldr(q1, MemOperand(x2, w3, UXTW)), "ldr q1, [x2, w3, uxtw]"); in TEST() 243 COMPARE(ldr(q3, MemOperand(x4, w5, UXTW, 4)), "ldr q3, [x4, w5, uxtw #4]"); in TEST() [all …]
|
D | test-disasm-sve-aarch64.cc | 137 COMPARE(adr(z30.VnD(), SVEMemOperand(z14.VnD(), z16.VnD(), UXTW)), in TEST() 139 COMPARE(adr(z30.VnD(), SVEMemOperand(z14.VnD(), z16.VnD(), UXTW, 1)), in TEST() 141 COMPARE(adr(z30.VnD(), SVEMemOperand(z14.VnD(), z16.VnD(), UXTW, 2)), in TEST() 143 COMPARE(adr(z30.VnD(), SVEMemOperand(z14.VnD(), z16.VnD(), UXTW, 3)), in TEST() 3409 COMPARE(ld1b(z9.VnS(), p5.Zeroing(), SVEMemOperand(sp, z1.VnS(), UXTW)), in TEST() 3413 COMPARE(ld1w(z22.VnS(), p6.Zeroing(), SVEMemOperand(sp, z5.VnS(), UXTW)), in TEST() 3415 COMPARE(ld1sb(z12.VnS(), p7.Zeroing(), SVEMemOperand(x17, z23.VnS(), UXTW)), in TEST() 3419 COMPARE(ld1sh(z11.VnS(), p2.Zeroing(), SVEMemOperand(x18, z10.VnS(), UXTW)), in TEST() 3423 COMPARE(ld1h(z9.VnS(), p3.Zeroing(), SVEMemOperand(sp, z4.VnS(), UXTW, 1)), in TEST() 3432 COMPARE(ldff1b(z18.VnS(), p6.Zeroing(), SVEMemOperand(x27, z24.VnS(), UXTW)), in TEST() [all …]
|
D | test-simulator-aarch64.cc | 291 __ Ldr(fn, MemOperand(inputs_base, index_n, UXTW, n_index_shift)); in Test1Op_Helper() 437 __ Ldr(fn, MemOperand(inputs_base, index_n, UXTW, index_shift)); in Test2Op_Helper() 441 __ Ldr(fm, MemOperand(inputs_base, index_m, UXTW, index_shift)); in Test2Op_Helper() 581 __ Ldr(fn, MemOperand(inputs_base, index_n, UXTW, index_shift)); in Test3Op_Helper() 585 __ Ldr(fm, MemOperand(inputs_base, index_m, UXTW, index_shift)); in Test3Op_Helper() 589 __ Ldr(fa, MemOperand(inputs_base, index_a, UXTW, index_shift)); in Test3Op_Helper() 730 __ Ldr(fn, MemOperand(inputs_base, index_n, UXTW, index_shift)); in TestCmp_Helper() 734 __ Ldr(fm, MemOperand(inputs_base, index_m, UXTW, index_shift)); in TestCmp_Helper() 869 __ Ldr(fn, MemOperand(inputs_base, index_n, UXTW, index_shift)); in TestCmpZero_Helper() 1012 __ Ldr(fn, MemOperand(inputs_base, index_n, UXTW, n_index_shift)); in TestFPToFixed_Helper() [all …]
|
D | test-assembler-aarch64.cc | 192 __ Mvn(x14, Operand(w2, UXTW, 4)); in TEST() 366 __ Mov(x27, Operand(w13, UXTW, 4)); in TEST() 427 __ Mov(x29, Operand(x12, UXTW, 1)); in TEST() 499 __ Orr(w8, w0, Operand(w1, UXTW, 2)); in TEST() 593 __ Orn(w8, w0, Operand(w1, UXTW, 2)); in TEST() 660 __ And(w8, w0, Operand(w1, UXTW, 2)); in TEST() 806 __ Bic(w8, w0, Operand(w1, UXTW, 2)); in TEST() 938 __ Eor(w8, w0, Operand(w1, UXTW, 2)); in TEST() 1005 __ Eon(w8, w0, Operand(w1, UXTW, 2)); in TEST() 4208 __ Prfm(op, MemOperand(x0, input, UXTW)); in TEST() [all …]
|
D | test-cpu-features-aarch64.cc | 177 TEST_NONE(add_0, add(w0, w1, Operand(w2, UXTW, 3))) 304 TEST_NONE(ldrh_3, ldrh(w0, MemOperand(x1, w2, UXTW, 1))) 313 TEST_NONE(ldrsb_7, ldrsb(w0, MemOperand(x1, w2, UXTW, 0))) 324 TEST_NONE(ldrsh_6, ldrsh(w0, MemOperand(x1, w2, UXTW, 0))) 344 TEST_NONE(ldr_10, ldr(x0, MemOperand(x1, w2, UXTW, 3))) 460 TEST_NONE(strh_3, strh(w0, MemOperand(x1, w2, UXTW, 0))) 470 TEST_NONE(str_8, str(x0, MemOperand(x1, w2, UXTW, 0))) 674 TEST_FP(ldr_13, ldr(h0, MemOperand(x1, w2, UXTW, 1))) 708 TEST_FP(str_11, str(h0, MemOperand(x1, w2, UXTW, 1))) 710 TEST_FP(str_13, str(s0, MemOperand(x1, w2, UXTW, 0))) [all …]
|
/external/vixl/benchmarks/aarch64/ |
D | bench-utils.cc | 244 __ Peek(PickR(size), Operand(claim.W(), UXTW)); in GenerateOperandSequence()
|
/external/llvm/lib/Target/AArch64/Utils/ |
D | AArch64BaseInfo.h | 361 UXTW, enumerator
|
/external/llvm/lib/Target/AArch64/ |
D | AArch64FastISel.cpp | 670 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress() 694 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress() 754 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress() 791 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress() 813 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress() 991 Addr.getExtendType() == AArch64_AM::UXTW ) in simplifyAddress() 1002 if (Addr.getExtendType() == AArch64_AM::UXTW) in simplifyAddress() 1770 if (Addr.getExtendType() == AArch64_AM::UXTW || in emitLoad() 2037 if (Addr.getExtendType() == AArch64_AM::UXTW || in emitStore()
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/Utils/ |
D | AArch64BaseInfo.h | 459 UXTW, enumerator
|
/external/llvm/lib/Target/AArch64/InstPrinter/ |
D | AArch64InstPrinter.cpp | 1104 if (ExtType == AArch64_AM::UXTW || ExtType == AArch64_AM::UXTX) { in printArithExtend() 1110 ExtType == AArch64_AM::UXTW) ) { in printArithExtend()
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/AsmParser/ |
D | AArch64AsmParser.cpp | 1099 if (!MatchShift && (ShiftExtendTy == AArch64_AM::UXTW || in isSVEDataVectorRegWithShiftExtend() 1253 ET == AArch64_AM::UXTW || ET == AArch64_AM::SXTW || in isExtend() 1266 ET == AArch64_AM::UXTW || ET == AArch64_AM::SXTW; in isExtend64() 1291 return (ET == AArch64_AM::UXTW || ET == AArch64_AM::SXTW) && in isMemWExtend() 1737 if (ET == AArch64_AM::LSL) ET = AArch64_AM::UXTW; in addExtendOperands() 2752 .Case("uxtw", AArch64_AM::UXTW) in tryParseOptionalShiftExtend()
|
/external/llvm/lib/Target/AArch64/AsmParser/ |
D | AArch64AsmParser.cpp | 990 ET == AArch64_AM::UXTW || ET == AArch64_AM::SXTW || in isExtend() 1025 return (ET == AArch64_AM::UXTW || ET == AArch64_AM::SXTW) && in isMemWExtend() 1561 if (ET == AArch64_AM::LSL) ET = AArch64_AM::UXTW; in addExtendOperands() 2391 .Case("uxtw", AArch64_AM::UXTW) in tryParseOptionalShiftExtend()
|