• Home
  • Raw
  • Download

Lines Matching refs:v8i32

340 def : Pat<(v4i32 (extract_subvector (v8i32 VR256:$src), (iPTR 0))),
341 (v4i32 (EXTRACT_SUBREG (v8i32 VR256:$src), sub_xmm))>;
363 (INSERT_SUBREG (v8i32 (IMPLICIT_DEF)), VR128:$src, sub_xmm)>;
420 def : Pat<(v4i64 (bitconvert (v8i32 VR256:$src))), (v4i64 VR256:$src)>;
425 def : Pat<(v8i32 (bitconvert (v4i64 VR256:$src))), (v8i32 VR256:$src)>;
426 def : Pat<(v8i32 (bitconvert (v16i16 VR256:$src))), (v8i32 VR256:$src)>;
427 def : Pat<(v8i32 (bitconvert (v32i8 VR256:$src))), (v8i32 VR256:$src)>;
428 def : Pat<(v8i32 (bitconvert (v4f64 VR256:$src))), (v8i32 VR256:$src)>;
429 def : Pat<(v8i32 (bitconvert (v8f32 VR256:$src))), (v8i32 VR256:$src)>;
431 def : Pat<(v16i16 (bitconvert (v8i32 VR256:$src))), (v16i16 VR256:$src)>;
436 def : Pat<(v32i8 (bitconvert (v8i32 VR256:$src))), (v32i8 VR256:$src)>;
441 def : Pat<(v8f32 (bitconvert (v8i32 VR256:$src))), (v8f32 VR256:$src)>;
446 def : Pat<(v4f64 (bitconvert (v8i32 VR256:$src))), (v4f64 VR256:$src)>;
487 [(set VR256:$dst, (v8i32 immAllZerosV))]>;
498 [(set VR256:$dst, (v8i32 immAllOnesV))]>;
633 def : Pat<(v8i32 (X86Movss VR256:$src1, VR256:$src2)),
635 (VMOVSSrr (EXTRACT_SUBREG (v8i32 VR256:$src1), sub_xmm),
636 (EXTRACT_SUBREG (v8i32 VR256:$src2), sub_xmm)),
983 def : Pat<(alignedstore256 (v8i32 VR256:$src), addr:$dst),
987 def : Pat<(store (v8i32 VR256:$src), addr:$dst),
1002 (v8i32 VR256:$src), (iPTR 0))), addr:$dst),
1021 (v8i32 VR256:$src), (iPTR 0))), addr:$dst),
2049 def : Pat<(v8f32 (sint_to_fp (v8i32 VR256:$src))),
2054 def : Pat<(v8i32 (fp_to_sint (v8f32 VR256:$src))),
2056 def : Pat<(v8i32 (fp_to_sint (loadv8f32 addr:$src))),
2591 def : Pat<(v8i32 (X86Shufp VR256:$src1, VR256:$src2, (i8 imm:$imm))),
2593 def : Pat<(v8i32 (X86Shufp VR256:$src1,
2687 def : Pat<(v8i32 (X86Unpckl VR256:$src1, (bc_v8i32 (loadv4i64 addr:$src2)))),
2689 def : Pat<(v8i32 (X86Unpckl VR256:$src1, VR256:$src2)),
2691 def : Pat<(v8i32 (X86Unpckh VR256:$src1, (bc_v8i32 (loadv4i64 addr:$src2)))),
2693 def : Pat<(v8i32 (X86Unpckh VR256:$src1, VR256:$src2)),
3616 def : Pat<(alignednontemporalstore (v8i32 VR256:$src), addr:$dst),
3963 defm PADDD : PDI_binop_all<0xFE, "paddd", add, v4i32, v8i32,
3985 defm PSUBD : PDI_binop_all<0xFA, "psubd", sub, v4i32, v8i32,
4031 defm VPMULUDQY : PDI_binop_rm2<0xF4, "vpmuludq", X86pmuludq, v4i64, v8i32,
4095 VR256, v8i32, v4i32, loadv2i64,
4102 VR256, v8i32, v4i32, loadv2i64,
4109 VR256, v8i32, v4i32, loadv2i64,
4197 defm PCMPEQD : PDI_binop_all<0x76, "pcmpeqd", X86pcmpeq, v4i32, v8i32,
4203 defm PCMPGTD : PDI_binop_all<0x66, "pcmpgtd", X86pcmpgt, v4i32, v8i32,
4269 defm PSHUFD : sse2_pshuffle<"pshufd", v4i32, v8i32, X86PShufd, NoVLX>, PD;
4395 defm VPACKSSDW : sse2_pack_y<0x6B, "vpackssdw", v16i16, v8i32, X86Packss>,
4400 defm VPACKUSDW : sse4_pack_y<0x2B, "vpackusdw", v16i16, v8i32, X86Packus>,
4491 defm VPUNPCKLDQ : sse2_unpack_y<0x62, "vpunpckldq", v8i32, X86Unpckl>,
4495 defm VPUNPCKHDQ : sse2_unpack_y<0x6A, "vpunpckhdq", v8i32, X86Unpckh>,
4740 def : Pat<(v8i32 (X86Vinsert (v8i32 immAllZerosV), GR32:$src2, (iPTR 0))),
4743 def : Pat<(v4i64 (X86Vinsert (bc_v4i64 (v8i32 immAllZerosV)), GR64:$src2, (iPTR 0))),
4746 def : Pat<(v8i32 (X86Vinsert undef, GR32:$src2, (iPTR 0))),
4855 def : Pat<(v8i32 (X86vzmovl (insert_subvector undef,
4860 def : Pat<(v8i32 (X86vzmovl (insert_subvector undef,
5075 def : Pat<(v8i32 (X86Movshdup VR256:$src)),
5077 def : Pat<(v8i32 (X86Movshdup (bc_v8i32 (loadv4i64 addr:$src)))),
5079 def : Pat<(v8i32 (X86Movsldup VR256:$src)),
5081 def : Pat<(v8i32 (X86Movsldup (bc_v8i32 (loadv4i64 addr:$src)))),
5394 def v32i1sextv32i8 : PatLeaf<(v32i8 (X86pcmpgt (bc_v32i8 (v8i32 immAllZerosV)),
5397 def v8i1sextv8i32 : PatLeaf<(v8i32 (X86vsrai VR256:$src, (i8 31)))>;
5427 defm VPABSD : SS3I_unop_rm_y<0x1E, "vpabsd", v8i32, X86Abs>, VEX, VEX_L;
5441 (bc_v4i64 (add (v8i32 VR256:$src), (v8i1sextv8i32)))),
5602 defm VPHADDDY : SS3I_binop_rm<0x02, "vphaddd", X86hadd, v8i32, VR256,
5608 defm VPHSUBDY : SS3I_binop_rm<0x06, "vphsubd", X86hsub, v8i32, VR256,
5717 def : Pat<(v8i32 (X86PAlignr VR256:$src1, VR256:$src2, (i8 imm:$imm))),
5841 def : Pat<(v8i32 (ExtOp (v16i8 VR128:$src))),
5846 def : Pat<(v8i32 (ExtOp (v8i16 VR128:$src))),
5861 def : Pat<(v8i32 (!cast<PatFrag>(ExtTy#"extloadvi8") addr:$src)),
5866 def : Pat<(v8i32 (!cast<PatFrag>(ExtTy#"extloadvi16") addr:$src)),
5887 def : Pat<(v8i32 (ExtOp (bc_v16i8 (v2i64 (scalar_to_vector (loadi64 addr:$src)))))),
5889 def : Pat<(v8i32 (ExtOp (v16i8 (vzmovl_v2i64 addr:$src)))),
5891 def : Pat<(v8i32 (ExtOp (v16i8 (vzload_v2i64 addr:$src)))),
5893 def : Pat<(v8i32 (ExtOp (bc_v16i8 (loadv2i64 addr:$src)))),
5905 def : Pat<(v8i32 (ExtOp (bc_v8i16 (loadv2i64 addr:$src)))),
5907 def : Pat<(v8i32 (ExtOp (v8i16 (vzmovl_v2i64 addr:$src)))),
5909 def : Pat<(v8i32 (ExtOp (v8i16 (vzload_v2i64 addr:$src)))),
5911 def : Pat<(v8i32 (ExtOp (bc_v8i16 (loadv2i64 addr:$src)))),
6800 defm VPMINSDY : SS48I_binop_rm<0x39, "vpminsd", smin, v8i32, VR256,
6803 defm VPMINUDY : SS48I_binop_rm<0x3B, "vpminud", umin, v8i32, VR256,
6806 defm VPMAXSDY : SS48I_binop_rm<0x3D, "vpmaxsd", smax, v8i32, VR256,
6809 defm VPMAXUDY : SS48I_binop_rm<0x3F, "vpmaxud", umax, v8i32, VR256,
6812 defm VPMULDQY : SS48I_binop_rm2<0x28, "vpmuldq", X86pmuldq, v4i64, v8i32,
6862 defm VPMULLDY : SS48I_binop_rm<0x40, "vpmulld", mul, v8i32, VR256,
7080 def : Pat<(v8i32 (vselect (v8i32 VR256:$mask), (v8i32 VR256:$src1),
7081 (v8i32 VR256:$src2))),
7083 def : Pat<(v8f32 (vselect (v8i32 VR256:$mask), (v8f32 VR256:$src1),
7141 def : Pat<(v8i32 (X86vzmovl (v8i32 VR256:$src))),
7142 (VBLENDPSYrri (v8i32 (AVX_SET0)), VR256:$src, (i8 1))>;
7878 def : Pat<(vinsert128_insert:$ins (v8i32 VR256:$src1), (v4i32 VR128:$src2),
7895 def : Pat<(vinsert128_insert:$ins (v8i32 VR256:$src1),
7955 (v8i32 VR256:$src1),
7970 def : Pat<(store (v4i32 (vextract128_extract:$ext (v8i32 VR256:$src1),
8061 loadv4i64, v8f32, v8i32>, VEX_L;
8071 def : Pat<(v8f32 (X86VPermilpv VR256:$src1, (v8i32 VR256:$src2))),
8080 def : Pat<(v8i32 (X86VPermilpi VR256:$src1, (i8 imm:$imm))),
8084 def : Pat<(v8i32 (X86VPermilpi (bc_v8i32 (loadv4i64 addr:$src1)),
8133 def : Pat<(v8i32 (X86VPerm2x128 VR256:$src1, VR256:$src2, (i8 imm:$imm))),
8142 def : Pat<(v8i32 (X86VPerm2x128 VR256:$src1,
8273 defm VPBLENDDY : AVX2_binop_rmi<0x02, "vpblendd", X86Blendi, v8i32,
8318 v4i32, v8i32, NoVLX>;
8381 def : Pat<(v8i32 (X86VBroadcast GR32:$src)),
8396 def : Pat<(v8i32 (X86VBroadcast (loadi32 addr:$src))),
8426 def : Pat<(v8i32 (X86VBroadcast GR32:$src)),
8427 (VINSERTF128rr (INSERT_SUBREG (v8i32 (IMPLICIT_DEF)),
8464 defm VPERMD : avx2_perm<0x36, "vpermd", loadv4i64, v8i32, WriteShuffle256>;
8513 def : Pat<(v8i32 (X86VPerm2x128 VR256:$src1, VR256:$src2, (i8 imm:$imm))),
8526 def : Pat<(v8i32 (X86VPerm2x128 VR256:$src1, (bc_v8i32 (loadv4i64 addr:$src2)),
8552 def : Pat<(vinsert128_insert:$ins (v8i32 VR256:$src1), (v4i32 VR128:$src2),
8569 def : Pat<(vinsert128_insert:$ins (v8i32 VR256:$src1),
8606 (v8i32 VR256:$src1),
8621 def : Pat<(store (v4i32 (vextract128_extract:$ext (v8i32 VR256:$src1),
8691 defm : maskmov_lowering<"VMASKMOVPSY", VR256, v8f32, v8i32, "VBLENDVPSY", v8i32>;
8692 defm : maskmov_lowering<"VMASKMOVPDY", VR256, v4f64, v4i64, "VBLENDVPDY", v8i32>;
8696 defm : maskmov_lowering<"VMASKMOVPSY", VR256, v8i32, v8i32, "VBLENDVPSY", v8i32>;
8697 defm : maskmov_lowering<"VMASKMOVPDY", VR256, v4i64, v4i64, "VBLENDVPDY", v8i32>;
8702 defm : maskmov_lowering<"VPMASKMOVDY", VR256, v8i32, v8i32, "VBLENDVPSY", v8i32>;
8703 defm : maskmov_lowering<"VPMASKMOVQY", VR256, v4i64, v4i64, "VBLENDVPDY", v8i32>;
8741 defm VPSLLVD : avx2_var_shift<0x47, "vpsllvd", shl, v4i32, v8i32>;
8743 defm VPSRLVD : avx2_var_shift<0x45, "vpsrlvd", srl, v4i32, v8i32>;
8745 defm VPSRAVD : avx2_var_shift<0x46, "vpsravd", sra, v4i32, v8i32>;
8747 defm VPSRAVD_Int : avx2_var_shift<0x46, "vpsravd", X86vsrav, v4i32, v8i32>;