• Home
  • Raw
  • Download

Lines Matching refs:v8i32

1060     setOperationAction(ISD::ROTL,              MVT::v8i32, Custom);  in X86TargetLowering()
1067 addRegisterClass(MVT::v8i32, &X86::VR256RegClass); in X86TargetLowering()
1106 setOperationAction(ISD::FP_TO_SINT, MVT::v8i32, Legal); in X86TargetLowering()
1109 setOperationAction(ISD::SINT_TO_FP, MVT::v8i32, Legal); in X86TargetLowering()
1129 setOperationAction(ISD::SETCC, MVT::v8i32, Custom); in X86TargetLowering()
1137 setOperationAction(ISD::SIGN_EXTEND, MVT::v8i32, Custom); in X86TargetLowering()
1140 setOperationAction(ISD::ZERO_EXTEND, MVT::v8i32, Custom); in X86TargetLowering()
1143 setOperationAction(ISD::ANY_EXTEND, MVT::v8i32, Custom); in X86TargetLowering()
1151 setOperationAction(ISD::CTPOP, MVT::v8i32, Custom); in X86TargetLowering()
1156 setOperationAction(ISD::CTTZ, MVT::v8i32, Custom); in X86TargetLowering()
1160 setOperationAction(ISD::CTTZ_ZERO_UNDEF, MVT::v8i32, Custom); in X86TargetLowering()
1174 setOperationAction(ISD::ADD, MVT::v8i32, Legal); in X86TargetLowering()
1179 setOperationAction(ISD::SUB, MVT::v8i32, Legal); in X86TargetLowering()
1184 setOperationAction(ISD::MUL, MVT::v8i32, Legal); in X86TargetLowering()
1188 setOperationAction(ISD::UMUL_LOHI, MVT::v8i32, Custom); in X86TargetLowering()
1189 setOperationAction(ISD::SMUL_LOHI, MVT::v8i32, Custom); in X86TargetLowering()
1195 setOperationAction(ISD::SMAX, MVT::v8i32, Legal); in X86TargetLowering()
1198 setOperationAction(ISD::UMAX, MVT::v8i32, Legal); in X86TargetLowering()
1201 setOperationAction(ISD::SMIN, MVT::v8i32, Legal); in X86TargetLowering()
1204 setOperationAction(ISD::UMIN, MVT::v8i32, Legal); in X86TargetLowering()
1208 setOperationAction(ISD::UINT_TO_FP, MVT::v8i32, Custom); in X86TargetLowering()
1212 setLoadExtAction(ISD::SEXTLOAD, MVT::v8i32, MVT::v8i8, Legal); in X86TargetLowering()
1214 setLoadExtAction(ISD::SEXTLOAD, MVT::v8i32, MVT::v8i16, Legal); in X86TargetLowering()
1219 setLoadExtAction(ISD::ZEXTLOAD, MVT::v8i32, MVT::v8i8, Legal); in X86TargetLowering()
1221 setLoadExtAction(ISD::ZEXTLOAD, MVT::v8i32, MVT::v8i16, Legal); in X86TargetLowering()
1226 setOperationAction(ISD::ADD, MVT::v8i32, Custom); in X86TargetLowering()
1231 setOperationAction(ISD::SUB, MVT::v8i32, Custom); in X86TargetLowering()
1236 setOperationAction(ISD::MUL, MVT::v8i32, Custom); in X86TargetLowering()
1242 setOperationAction(ISD::SMAX, MVT::v8i32, Custom); in X86TargetLowering()
1245 setOperationAction(ISD::UMAX, MVT::v8i32, Custom); in X86TargetLowering()
1248 setOperationAction(ISD::SMIN, MVT::v8i32, Custom); in X86TargetLowering()
1251 setOperationAction(ISD::UMIN, MVT::v8i32, Custom); in X86TargetLowering()
1257 setOperationAction(ISD::SRL, MVT::v8i32, Custom); in X86TargetLowering()
1260 setOperationAction(ISD::SHL, MVT::v8i32, Custom); in X86TargetLowering()
1263 setOperationAction(ISD::SRA, MVT::v8i32, Custom); in X86TargetLowering()
1294 for (auto VT : { MVT::v32i8, MVT::v16i16, MVT::v8i32 }) { in X86TargetLowering()
1331 setLoadExtAction(ISD::ZEXTLOAD, MVT::v8i64, MVT::v8i32, Legal); in X86TargetLowering()
1332 setLoadExtAction(ISD::SEXTLOAD, MVT::v8i64, MVT::v8i32, Legal); in X86TargetLowering()
1369 setOperationAction(ISD::FP_TO_UINT, MVT::v8i32, Legal); in X86TargetLowering()
1377 setOperationAction(ISD::UINT_TO_FP, MVT::v8i32, Legal); in X86TargetLowering()
1386 setTruncStoreAction(MVT::v8i64, MVT::v8i32, Legal); in X86TargetLowering()
1393 setTruncStoreAction(MVT::v8i32, MVT::v8i8, Legal); in X86TargetLowering()
1394 setTruncStoreAction(MVT::v8i32, MVT::v8i16, Legal); in X86TargetLowering()
1402 setOperationAction(ISD::MLOAD, MVT::v8i32, Custom); in X86TargetLowering()
1404 setOperationAction(ISD::MSTORE, MVT::v8i32, Custom); in X86TargetLowering()
1409 setOperationAction(ISD::TRUNCATE, MVT::v8i32, Custom); in X86TargetLowering()
1432 setOperationAction(ISD::SINT_TO_FP, MVT::v8i32, Legal); in X86TargetLowering()
1433 setOperationAction(ISD::UINT_TO_FP, MVT::v8i32, Legal); in X86TargetLowering()
1434 setOperationAction(ISD::FP_TO_SINT, MVT::v8i32, Legal); in X86TargetLowering()
1435 setOperationAction(ISD::FP_TO_UINT, MVT::v8i32, Legal); in X86TargetLowering()
1545 setOperationAction(ISD::CTLZ, MVT::v8i32, Legal); in X86TargetLowering()
1549 setOperationAction(ISD::CTLZ_ZERO_UNDEF, MVT::v8i32, Legal); in X86TargetLowering()
1554 setOperationAction(ISD::CTTZ_ZERO_UNDEF, MVT::v8i32, Custom); in X86TargetLowering()
1559 setOperationAction(ISD::CTLZ, MVT::v8i32, Custom); in X86TargetLowering()
1563 setOperationAction(ISD::CTLZ_ZERO_UNDEF, MVT::v8i32, Custom); in X86TargetLowering()
1719 setOperationAction(ISD::AND, MVT::v8i32, Legal); in X86TargetLowering()
1720 setOperationAction(ISD::OR, MVT::v8i32, Legal); in X86TargetLowering()
1721 setOperationAction(ISD::XOR, MVT::v8i32, Legal); in X86TargetLowering()
1982 return MVT::v8i32; in getOptimalMemOpType()
2109 case MVT::v32i8: case MVT::v8i32: case MVT::v4i64: case MVT::v8f32: in findRepresentativeClass()
4404 Vec = DAG.getNode(ISD::BUILD_VECTOR, dl, MVT::v8i32, Ops); in getZeroVector()
4552 MVT CastVT = Subtarget.hasAVX2() ? MVT::v8i32 : MVT::v8f32; in Insert128BitVector()
4679 Vec = DAG.getNode(ISD::BUILD_VECTOR, dl, MVT::v8i32, Ops); in getOnesVector()
4682 Vec = Concat128BitVectors(Vec, Vec, MVT::v8i32, 8, DAG, dl); in getOnesVector()
6197 } else if (VT == MVT::v8i32 || VT == MVT::v16i16) { in LowerToHorizontalOp()
6240 if ((VT == MVT::v8f32 || VT == MVT::v4f64 || VT == MVT::v8i32 || in LowerToHorizontalOp()
6286 if (VT == MVT::v4i32 || VT == MVT::v8i32 || VT == MVT::v16i32) in LowerBUILD_VECTOR()
6296 if (VT == MVT::v4i32 || (VT == MVT::v8i32 && Subtarget->hasInt256())) in LowerBUILD_VECTOR()
6393 Item = DAG.getNode(ISD::SCALAR_TO_VECTOR, dl, MVT::v8i32, Item); in LowerBUILD_VECTOR()
6399 SDValue ZeroVec = getZeroVector(MVT::v8i32, Subtarget, DAG, dl); in LowerBUILD_VECTOR()
7104 case MVT::v8i32: in lowerVectorShuffleAsBlend()
7115 MVT BlendVT = VT.getSizeInBits() > 128 ? MVT::v8i32 : MVT::v4i32; in lowerVectorShuffleAsBlend()
10529 DAG.getNode(X86ISD::PSHUFD, DL, MVT::v8i32, in lowerV4I64VectorShuffle()
10530 DAG.getBitcast(MVT::v8i32, V1), in lowerV4I64VectorShuffle()
10630 DAG.getNode(ISD::BUILD_VECTOR, DL, MVT::v8i32, VPermMask)); in lowerV8F32VectorShuffle()
10635 DAG.getNode(ISD::BUILD_VECTOR, DL, MVT::v8i32, VPermMask), V1); in lowerV8F32VectorShuffle()
10666 assert(V1.getSimpleValueType() == MVT::v8i32 && "Bad operand type!"); in lowerV8I32VectorShuffle()
10667 assert(V2.getSimpleValueType() == MVT::v8i32 && "Bad operand type!"); in lowerV8I32VectorShuffle()
10676 if (SDValue ZExt = lowerVectorShuffleAsZeroOrAnyExtend(DL, MVT::v8i32, V1, V2, in lowerV8I32VectorShuffle()
10680 if (SDValue Blend = lowerVectorShuffleAsBlend(DL, MVT::v8i32, V1, V2, Mask, in lowerV8I32VectorShuffle()
10685 if (SDValue Broadcast = lowerVectorShuffleAsBroadcast(DL, MVT::v8i32, V1, in lowerV8I32VectorShuffle()
10693 if (is128BitLaneRepeatedShuffleMask(MVT::v8i32, Mask, RepeatedMask)) { in lowerV8I32VectorShuffle()
10696 return DAG.getNode(X86ISD::PSHUFD, DL, MVT::v8i32, V1, in lowerV8I32VectorShuffle()
10701 lowerVectorShuffleWithUNPCK(DL, MVT::v8i32, Mask, V1, V2, DAG)) in lowerV8I32VectorShuffle()
10707 lowerVectorShuffleAsShift(DL, MVT::v8i32, V1, V2, Mask, DAG)) in lowerV8I32VectorShuffle()
10711 DL, MVT::v8i32, V1, V2, Mask, Subtarget, DAG)) in lowerV8I32VectorShuffle()
10722 X86ISD::VPERMV, DL, MVT::v8i32, in lowerV8I32VectorShuffle()
10723 DAG.getNode(ISD::BUILD_VECTOR, DL, MVT::v8i32, VPermMask), V1); in lowerV8I32VectorShuffle()
10729 DL, MVT::v8i32, V1, V2, Mask, Subtarget, DAG)) in lowerV8I32VectorShuffle()
10733 return lowerVectorShuffleAsDecomposedShuffleBlend(DL, MVT::v8i32, V1, V2, in lowerV8I32VectorShuffle()
10954 case MVT::v8i32: in lower256BitVectorShuffle()
12767 assert((VecIntVT == MVT::v4i32 || VecIntVT == MVT::v8i32) && in lowerUINT_TO_FP_vXi32()
12865 case MVT::v8i32: in lowerUINT_TO_FP_vec()
13185 ((VT != MVT::v8i32) || (InVT != MVT::v8i16)) && in LowerAVXExtend()
13324 In = DAG.getBitcast(MVT::v8i32, In); in LowerTRUNCATE()
13325 In = DAG.getVectorShuffle(MVT::v8i32, DL, In, DAG.getUNDEF(MVT::v8i32), in LowerTRUNCATE()
13341 if ((VT == MVT::v8i16) && (InVT == MVT::v8i32)) { in LowerTRUNCATE()
15205 (VT != MVT::v8i32 || InVT != MVT::v8i16) && in LowerSIGN_EXTEND()
18111 (VT == MVT::v4i64) ? MVT::v8i32 : MVT::v16i32; in LowerMUL()
18191 (VT == MVT::v8i32 && Subtarget->hasInt256())); in LowerMUL_LOHI()
18226 if (VT == MVT::v8i32) { in LowerMUL_LOHI()
18892 MVT ExtVT = MVT::v8i32; in LowerShift()
18902 MVT ExtVT = MVT::v8i32; in LowerShift()
19852 if (IndexVT == MVT::v8i32) in LowerMSCATTER()
25071 (VT != MVT::v4i64 && VT != MVT::v8i32 && VT != MVT::v16i16))) in performShiftToAllZeros()
27575 (Subtarget->hasInt256() && (VT == MVT::v16i16 || VT == MVT::v8i32))) && in PerformAddCombine()
27608 (Subtarget->hasInt256() && (VT == MVT::v16i16 || VT == MVT::v8i32))) && in PerformSubCombine()
28390 case MVT::v8i32: in getRegForInlineAsmConstraint()