Lines Matching refs:v32i32
203 if (LocVT == MVT::v16i64 || LocVT == MVT::v32i32 || LocVT == MVT::v64i16 || in CC_Hexagon_VarArg()
348 (LocVT == MVT::v16i64 || LocVT == MVT::v32i32 || LocVT == MVT::v64i16 || in CC_HexagonVector()
371 (LocVT == MVT::v16i64 || LocVT == MVT::v32i32 || LocVT == MVT::v64i16 || in CC_HexagonVector()
419 LocVT == MVT::v32i32 || LocVT == MVT::v16i64 || in RetCC_Hexagon()
421 LocVT = MVT::v32i32; in RetCC_Hexagon()
422 ValVT = MVT::v32i32; in RetCC_Hexagon()
439 if (LocVT == MVT::v16i32 || LocVT == MVT::v32i32 || LocVT == MVT::v64i32) { in RetCC_Hexagon()
490 } else if (LocVT == MVT::v32i32) { in RetCC_HexagonVector()
545 ty == MVT::v16i64 || ty == MVT::v32i32 || ty == MVT::v64i16 || in IsHvxVectorType()
898 (UseHVX && UseHVXDbl) && (VT == MVT::v32i32 || VT == MVT::v16i64 || in getIndexedAddressParts()
1125 ((RegVT == MVT::v16i64 || RegVT == MVT::v32i32 || in LowerFormalArguments()
1133 } else if ((RegVT == MVT::v16i64 || RegVT == MVT::v32i32 || in LowerFormalArguments()
1761 addRegisterClass(MVT::v32i32, &Hexagon::VecDblRegsRegClass); in HexagonTargetLowering()
1767 addRegisterClass(MVT::v32i32, &Hexagon::VectorRegs128BRegClass); in HexagonTargetLowering()
2001 setOperationAction(ISD::CONCAT_VECTORS, MVT::v32i32, Custom); in HexagonTargetLowering()
2072 for (MVT VT : {MVT::v128i8, MVT::v64i16, MVT::v32i32, MVT::v16i64}) { in HexagonTargetLowering()
2553 MVT OpTy = Subtarget.useHVXSglOps() ? MVT::v16i32 : MVT::v32i32; in LowerCONCAT_VECTORS()
2554 MVT ReTy = Subtarget.useHVXSglOps() ? MVT::v32i32 : MVT::v64i32; in LowerCONCAT_VECTORS()
2888 case MVT::v32i32: in getRegForInlineAsmConstraint()
3035 case MVT::v32i32: in allowsMisalignedMemoryAccesses()
3063 case MVT::v32i32: in findRepresentativeClass()