Searched refs:MaskWidth (Results 1 – 6 of 6) sorted by relevance
2088 int &MaskWidth) { in isBitfieldPositioningOp() argument2123 MaskWidth = countTrailingOnes(NonZeroBits >> ShiftAmount); in isBitfieldPositioningOp()
2771 unsigned MaskWidth = CVal.logBase2(); in ProcessUMulZExtIdiom() local2772 if (MaskWidth == MulWidth) in ProcessUMulZExtIdiom()
1989 int &MaskWidth) { in isBitfieldPositioningOp() argument2024 MaskWidth = countTrailingOnes(NonZeroBits >> ShiftAmount); in isBitfieldPositioningOp()
3944 unsigned MaskWidth = CVal.logBase2(); in processUMulZExtIdiom() local3945 if (MaskWidth == MulWidth) in processUMulZExtIdiom()
6356 int MaskWidth = Mask.size(); in resolveTargetShuffleInputsAndMask() local6359 int lo = UsedInputs.size() * MaskWidth; in resolveTargetShuffleInputsAndMask()6360 int hi = lo + MaskWidth; in resolveTargetShuffleInputsAndMask()6375 M -= MaskWidth; in resolveTargetShuffleInputsAndMask()30238 unsigned MaskWidth = std::max<unsigned>(OpMask.size(), RootMask.size()); in combineX86ShufflesRecursively() local30244 assert(isPowerOf2_32(MaskWidth) && "Non-power-of-2 shuffle mask sizes"); in combineX86ShufflesRecursively()30250 SmallVector<int, 64> Mask(MaskWidth, SM_SentinelUndef); in combineX86ShufflesRecursively()30256 for (unsigned i = 0; i < MaskWidth; ++i) { in combineX86ShufflesRecursively()30271 if ((RootMaskedIdx < (SrcOpIndex * MaskWidth)) || in combineX86ShufflesRecursively()30272 (((SrcOpIndex + 1) * MaskWidth) <= RootMaskedIdx)) { in combineX86ShufflesRecursively()[all …]
25365 int MaskWidth = std::max<int>(OpMask.size(), RootMask.size()); in combineX86ShufflesRecursively() local25373 Mask.reserve(MaskWidth); in combineX86ShufflesRecursively()25379 for (int i = 0; i < MaskWidth; ++i) { in combineX86ShufflesRecursively()