Home
last modified time | relevance | path

Searched refs:UXTW (Results 1 – 25 of 53) sorted by relevance

123

/external/vixl/src/aarch64/
Doperands-aarch64.cc230 return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_); in ToExtendedRegister()
269 VIXL_ASSERT((extend == UXTW) || (extend == SXTW) || (extend == SXTX)); in MemOperand()
330 VIXL_ASSERT((extend_ == UXTW) || (extend_ == SXTW) || (extend_ == SXTX)); in MemOperand()
Doperands-aarch64.h673 if (extend == UXTW) return SVE_UXTW;
/external/llvm/lib/Target/AArch64/MCTargetDesc/
DAArch64AddressingModes.h42 UXTW, enumerator
62 case AArch64_AM::UXTW: return "uxtw"; in getShiftExtendName()
129 case 2: return AArch64_AM::UXTW; in getExtendType()
156 case AArch64_AM::UXTW: return 2; break; in getExtendEncoding()
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/MCTargetDesc/
DAArch64AddressingModes.h42 UXTW, enumerator
62 case AArch64_AM::UXTW: return "uxtw"; in getShiftExtendName()
129 case 2: return AArch64_AM::UXTW; in getExtendType()
156 case AArch64_AM::UXTW: return 2; break; in getExtendEncoding()
/external/llvm-project/llvm/lib/Target/AArch64/MCTargetDesc/
DAArch64AddressingModes.h42 UXTW, enumerator
62 case AArch64_AM::UXTW: return "uxtw"; in getShiftExtendName()
129 case 2: return AArch64_AM::UXTW; in getExtendType()
156 case AArch64_AM::UXTW: return 2; break; in getExtendEncoding()
/external/llvm-project/llvm/lib/Target/AArch64/
DAArch64RegisterInfo.td1078 // UXTW(8|16|32|64)
1079 def ZPR#RegWidth#AsmOpndExtUXTW8Only : ZPRExtendAsmOperand<"UXTW", RegWidth, 8, 0b1>;
1080 def ZPR#RegWidth#AsmOpndExtUXTW8 : ZPRExtendAsmOperand<"UXTW", RegWidth, 8>;
1081 def ZPR#RegWidth#AsmOpndExtUXTW16 : ZPRExtendAsmOperand<"UXTW", RegWidth, 16>;
1082 def ZPR#RegWidth#AsmOpndExtUXTW32 : ZPRExtendAsmOperand<"UXTW", RegWidth, 32>;
1083 def ZPR#RegWidth#AsmOpndExtUXTW64 : ZPRExtendAsmOperand<"UXTW", RegWidth, 64>;
1085 …def ZPR#RegWidth#ExtUXTW8Only : ZPRExtendRegisterOperand<0b0, 0b0, "UXTW", RegWidth, 8, "On…
1086 def ZPR#RegWidth#ExtUXTW8 : ZPRExtendRegisterOperand<0b0, 0b0, "UXTW", RegWidth, 8>;
1087 def ZPR#RegWidth#ExtUXTW16 : ZPRExtendRegisterOperand<0b0, 0b0, "UXTW", RegWidth, 16>;
1088 def ZPR#RegWidth#ExtUXTW32 : ZPRExtendRegisterOperand<0b0, 0b0, "UXTW", RegWidth, 32>;
[all …]
DAArch64SchedPredicates.td20 def CheckExtUXTW : CheckImmOperand_s<3, "AArch64_AM::UXTW">;
36 def CheckMemExtUXTW : CheckImmOperand_s<3, "AArch64_AM::UXTW">;
DAArch64FastISel.cpp742 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
766 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
826 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
863 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
885 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
1068 Addr.getExtendType() == AArch64_AM::UXTW ) in simplifyAddress()
1079 if (Addr.getExtendType() == AArch64_AM::UXTW) in simplifyAddress()
1855 if (Addr.getExtendType() == AArch64_AM::UXTW || in emitLoad()
2146 if (Addr.getExtendType() == AArch64_AM::UXTW || in emitStore()
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/
DAArch64RegisterInfo.td1057 // UXTW(8|16|32|64)
1058 def ZPR#RegWidth#AsmOpndExtUXTW8Only : ZPRExtendAsmOperand<"UXTW", RegWidth, 8, 0b1>;
1059 def ZPR#RegWidth#AsmOpndExtUXTW8 : ZPRExtendAsmOperand<"UXTW", RegWidth, 8>;
1060 def ZPR#RegWidth#AsmOpndExtUXTW16 : ZPRExtendAsmOperand<"UXTW", RegWidth, 16>;
1061 def ZPR#RegWidth#AsmOpndExtUXTW32 : ZPRExtendAsmOperand<"UXTW", RegWidth, 32>;
1062 def ZPR#RegWidth#AsmOpndExtUXTW64 : ZPRExtendAsmOperand<"UXTW", RegWidth, 64>;
1064 …def ZPR#RegWidth#ExtUXTW8Only : ZPRExtendRegisterOperand<0b0, 0b0, "UXTW", RegWidth, 8, "On…
1065 def ZPR#RegWidth#ExtUXTW8 : ZPRExtendRegisterOperand<0b0, 0b0, "UXTW", RegWidth, 8>;
1066 def ZPR#RegWidth#ExtUXTW16 : ZPRExtendRegisterOperand<0b0, 0b0, "UXTW", RegWidth, 16>;
1067 def ZPR#RegWidth#ExtUXTW32 : ZPRExtendRegisterOperand<0b0, 0b0, "UXTW", RegWidth, 32>;
[all …]
DAArch64SchedPredicates.td20 def CheckExtUXTW : CheckImmOperand_s<3, "AArch64_AM::UXTW">;
36 def CheckMemExtUXTW : CheckImmOperand_s<3, "AArch64_AM::UXTW">;
DAArch64FastISel.cpp744 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
768 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
828 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
865 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
887 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
1070 Addr.getExtendType() == AArch64_AM::UXTW ) in simplifyAddress()
1081 if (Addr.getExtendType() == AArch64_AM::UXTW) in simplifyAddress()
1857 if (Addr.getExtendType() == AArch64_AM::UXTW || in emitLoad()
2148 if (Addr.getExtendType() == AArch64_AM::UXTW || in emitStore()
/external/vixl/test/aarch64/
Dtest-disasm-aarch64.cc330 COMPARE(adds(x9, x10, Operand(x11, UXTW, 3)), "adds x9, x10, w11, uxtw #3"); in TEST()
342 COMPARE(add(wsp, wsp, Operand(w4, UXTW, 2)), "add wsp, wsp, w4, lsl #2"); in TEST()
356 COMPARE(subs(x9, x10, Operand(x11, UXTW, 3)), "subs x9, x10, w11, uxtw #3"); in TEST()
368 COMPARE(sub(wsp, wsp, Operand(w4, UXTW, 2)), "sub wsp, wsp, w4, lsl #2"); in TEST()
1021 COMPARE(ldr(w0, MemOperand(x1, w2, UXTW)), "ldr w0, [x1, w2, uxtw]"); in TEST()
1022 COMPARE(ldr(w3, MemOperand(x4, w5, UXTW, 2)), "ldr w3, [x4, w5, uxtw #2]"); in TEST()
1031 COMPARE(ldr(x0, MemOperand(x1, w2, UXTW)), "ldr x0, [x1, w2, uxtw]"); in TEST()
1032 COMPARE(ldr(x3, MemOperand(x4, w5, UXTW, 3)), "ldr x3, [x4, w5, uxtw #3]"); in TEST()
1042 COMPARE(str(w0, MemOperand(x1, w2, UXTW)), "str w0, [x1, w2, uxtw]"); in TEST()
1043 COMPARE(str(w3, MemOperand(x4, w5, UXTW, 2)), "str w3, [x4, w5, uxtw #2]"); in TEST()
[all …]
Dtest-disasm-sve-aarch64.cc85 COMPARE_PREFIX(adr(z30.VnD(), SVEMemOperand(z14.VnD(), z16.VnD(), UXTW)), in TEST()
87 COMPARE_PREFIX(adr(z30.VnD(), SVEMemOperand(z14.VnD(), z16.VnD(), UXTW, 1)), in TEST()
89 COMPARE_PREFIX(adr(z30.VnD(), SVEMemOperand(z14.VnD(), z16.VnD(), UXTW, 2)), in TEST()
91 COMPARE_PREFIX(adr(z30.VnD(), SVEMemOperand(z14.VnD(), z16.VnD(), UXTW, 3)), in TEST()
3376 SVEMemOperand(sp, z1.VnS(), UXTW)), in TEST()
3384 SVEMemOperand(sp, z5.VnS(), UXTW)), in TEST()
3388 SVEMemOperand(x17, z23.VnS(), UXTW)), in TEST()
3396 SVEMemOperand(x18, z10.VnS(), UXTW)), in TEST()
3402 SVEMemOperand(sp, z4.VnS(), UXTW, 1)), in TEST()
3417 SVEMemOperand(x27, z24.VnS(), UXTW)), in TEST()
[all …]
Dtest-api-aarch64.cc1012 VIXL_CHECK(!Operand(w15, UXTW).IsPlainRegister()); in TEST()
1025 VIXL_CHECK(!MemOperand(x5, wzr, UXTW).IsPlainRegister()); in TEST()
1026 VIXL_CHECK(!MemOperand(x6, wzr, UXTW, 3).IsPlainRegister()); in TEST()
1044 VIXL_CHECK(MemOperand(x5, wzr, UXTW).IsEquivalentToPlainRegister()); in TEST()
1045 VIXL_CHECK(MemOperand(x6, wzr, UXTW, 3).IsEquivalentToPlainRegister()); in TEST()
1067 VIXL_CHECK(!SVEMemOperand(x2, z3.VnS(), UXTW).IsPlainScalar()); in TEST()
1098 VIXL_CHECK(SVEMemOperand(x9, z2.VnD(), UXTW).IsScalarPlusVector()); in TEST()
1118 VIXL_CHECK(SVEMemOperand(x9, z2.VnD(), UXTW).IsScatterGather()); in TEST()
Dtest-assembler-sve-aarch64.cc8937 (static_cast<int>(mod) == UXTW)); in Ldff1Helper()
8942 offs_is_unsigned = (static_cast<int>(mod) == UXTW) ? true : false; in Ldff1Helper()
9145 ldff1_32_scaled_offset_helper(kHRegSize, ldff1h, ld1h, UXTW); in sve_ldff1_scalar_plus_vector_32_scaled_offset()
9150 ldff1_32_scaled_offset_helper(kSRegSize, ldff1w, ld1w, UXTW); in sve_ldff1_scalar_plus_vector_32_scaled_offset()
9155 ldff1_32_scaled_offset_helper(kHRegSize, ldff1sh, ld1sh, UXTW); in sve_ldff1_scalar_plus_vector_32_scaled_offset()
9174 ldff1_32_unscaled_offset_helper(kBRegSize, ldff1b, ld1b, UXTW); in sve_ldff1_scalar_plus_vector_32_unscaled_offset()
9179 ldff1_32_unscaled_offset_helper(kHRegSize, ldff1h, ld1h, UXTW); in sve_ldff1_scalar_plus_vector_32_unscaled_offset()
9184 ldff1_32_unscaled_offset_helper(kSRegSize, ldff1w, ld1w, UXTW); in sve_ldff1_scalar_plus_vector_32_unscaled_offset()
9189 ldff1_32_unscaled_offset_helper(kBRegSize, ldff1sb, ld1sb, UXTW); in sve_ldff1_scalar_plus_vector_32_unscaled_offset()
9194 ldff1_32_unscaled_offset_helper(kHRegSize, ldff1sh, ld1sh, UXTW); in sve_ldff1_scalar_plus_vector_32_unscaled_offset()
[all …]
Dtest-disasm-neon-aarch64.cc206 COMPARE(ldr(b1, MemOperand(x2, w3, UXTW)), "ldr b1, [x2, w3, uxtw]"); in TEST()
211 COMPARE(ldr(b31, MemOperand(sp, wzr, UXTW)), "ldr b31, [sp, wzr, uxtw]"); in TEST()
214 COMPARE(ldr(h1, MemOperand(x2, w3, UXTW)), "ldr h1, [x2, w3, uxtw]"); in TEST()
216 COMPARE(ldr(h3, MemOperand(x4, w5, UXTW, 1)), "ldr h3, [x4, w5, uxtw #1]"); in TEST()
223 COMPARE(ldr(s1, MemOperand(x2, w3, UXTW)), "ldr s1, [x2, w3, uxtw]"); in TEST()
225 COMPARE(ldr(s3, MemOperand(x4, w5, UXTW, 2)), "ldr s3, [x4, w5, uxtw #2]"); in TEST()
232 COMPARE(ldr(d1, MemOperand(x2, w3, UXTW)), "ldr d1, [x2, w3, uxtw]"); in TEST()
234 COMPARE(ldr(d3, MemOperand(x4, w5, UXTW, 3)), "ldr d3, [x4, w5, uxtw #3]"); in TEST()
241 COMPARE(ldr(q1, MemOperand(x2, w3, UXTW)), "ldr q1, [x2, w3, uxtw]"); in TEST()
243 COMPARE(ldr(q3, MemOperand(x4, w5, UXTW, 4)), "ldr q3, [x4, w5, uxtw #4]"); in TEST()
[all …]
Dtest-simulator-aarch64.cc291 __ Ldr(fn, MemOperand(inputs_base, index_n, UXTW, n_index_shift)); in Test1Op_Helper()
437 __ Ldr(fn, MemOperand(inputs_base, index_n, UXTW, index_shift)); in Test2Op_Helper()
441 __ Ldr(fm, MemOperand(inputs_base, index_m, UXTW, index_shift)); in Test2Op_Helper()
581 __ Ldr(fn, MemOperand(inputs_base, index_n, UXTW, index_shift)); in Test3Op_Helper()
585 __ Ldr(fm, MemOperand(inputs_base, index_m, UXTW, index_shift)); in Test3Op_Helper()
589 __ Ldr(fa, MemOperand(inputs_base, index_a, UXTW, index_shift)); in Test3Op_Helper()
730 __ Ldr(fn, MemOperand(inputs_base, index_n, UXTW, index_shift)); in TestCmp_Helper()
734 __ Ldr(fm, MemOperand(inputs_base, index_m, UXTW, index_shift)); in TestCmp_Helper()
869 __ Ldr(fn, MemOperand(inputs_base, index_n, UXTW, index_shift)); in TestCmpZero_Helper()
1012 __ Ldr(fn, MemOperand(inputs_base, index_n, UXTW, n_index_shift)); in TestFPToFixed_Helper()
[all …]
Dtest-assembler-aarch64.cc192 __ Mvn(x14, Operand(w2, UXTW, 4)); in TEST()
366 __ Mov(x27, Operand(w13, UXTW, 4)); in TEST()
427 __ Mov(x29, Operand(x12, UXTW, 1)); in TEST()
499 __ Orr(w8, w0, Operand(w1, UXTW, 2)); in TEST()
593 __ Orn(w8, w0, Operand(w1, UXTW, 2)); in TEST()
660 __ And(w8, w0, Operand(w1, UXTW, 2)); in TEST()
806 __ Bic(w8, w0, Operand(w1, UXTW, 2)); in TEST()
938 __ Eor(w8, w0, Operand(w1, UXTW, 2)); in TEST()
1005 __ Eon(w8, w0, Operand(w1, UXTW, 2)); in TEST()
4208 __ Prfm(op, MemOperand(x0, input, UXTW)); in TEST()
[all …]
/external/llvm/lib/Target/AArch64/Utils/
DAArch64BaseInfo.h361 UXTW, enumerator
/external/vixl/benchmarks/aarch64/
Dbench-utils.cc244 __ Peek(PickR(size), Operand(claim.W(), UXTW)); in GenerateOperandSequence()
/external/llvm/lib/Target/AArch64/
DAArch64FastISel.cpp670 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
694 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
754 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
791 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
813 Addr.setExtendType(AArch64_AM::UXTW); in computeAddress()
991 Addr.getExtendType() == AArch64_AM::UXTW ) in simplifyAddress()
1002 if (Addr.getExtendType() == AArch64_AM::UXTW) in simplifyAddress()
1770 if (Addr.getExtendType() == AArch64_AM::UXTW || in emitLoad()
2037 if (Addr.getExtendType() == AArch64_AM::UXTW || in emitStore()
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/Utils/
DAArch64BaseInfo.h459 UXTW, enumerator
/external/llvm-project/llvm/lib/Target/AArch64/Utils/
DAArch64BaseInfo.h459 UXTW, enumerator
/external/llvm-project/llvm/test/CodeGen/AArch64/
Dsve-intrinsics-conversion.ll138 ; UXTW
/external/llvm/lib/Target/AArch64/InstPrinter/
DAArch64InstPrinter.cpp1104 if (ExtType == AArch64_AM::UXTW || ExtType == AArch64_AM::UXTX) { in printArithExtend()
1110 ExtType == AArch64_AM::UXTW) ) { in printArithExtend()

123