/external/swiftshader/third_party/llvm-10.0/llvm/include/llvm/Analysis/Utils/ |
D | Local.h | 49 if (Constant *OpC = dyn_cast<Constant>(Op)) { variable 50 if (OpC->isZeroValue()) 55 uint64_t OpValue = OpC->getUniqueInteger().getZExtValue(); 65 if (IntIdxTy->isVectorTy() && !OpC->getType()->isVectorTy()) 66 OpC = ConstantVector::getSplat(IntIdxTy->getVectorNumElements(), OpC); 69 Constant *OC = ConstantExpr::getIntegerCast(OpC, IntIdxTy, true /*SExt*/);
|
/external/llvm-project/llvm/include/llvm/Analysis/Utils/ |
D | Local.h | 50 if (Constant *OpC = dyn_cast<Constant>(Op)) { variable 51 if (OpC->isZeroValue()) 56 uint64_t OpValue = OpC->getUniqueInteger().getZExtValue(); 64 if (IntIdxTy->isVectorTy() && !OpC->getType()->isVectorTy()) 65 OpC = ConstantVector::getSplat( 66 cast<VectorType>(IntIdxTy)->getElementCount(), OpC); 70 ConstantExpr::getIntegerCast(OpC, IntIdxTy, true /*SExt*/);
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/IR/ |
D | Operator.cpp | 42 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand()); in accumulateConstantOffset() local 43 if (!OpC) in accumulateConstantOffset() 45 if (OpC->isZero()) in accumulateConstantOffset() 50 unsigned ElementIdx = OpC->getZExtValue(); in accumulateConstantOffset() 57 APInt Index = OpC->getValue().sextOrTrunc(Offset.getBitWidth()); in accumulateConstantOffset()
|
/external/llvm/lib/IR/ |
D | Operator.cpp | 29 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand()); in accumulateConstantOffset() local 30 if (!OpC) in accumulateConstantOffset() 32 if (OpC->isZero()) in accumulateConstantOffset() 37 unsigned ElementIdx = OpC->getZExtValue(); in accumulateConstantOffset() 44 APInt Index = OpC->getValue().sextOrTrunc(Offset.getBitWidth()); in accumulateConstantOffset()
|
/external/llvm/lib/Target/AMDGPU/ |
D | AMDGPUAnnotateKernelFeatures.cpp | 102 const auto *OpC = dyn_cast<Constant>(U); in visitConstantExprsRecursively() local 103 if (!OpC) in visitConstantExprsRecursively() 106 if (!ConstantExprVisited.insert(OpC).second) in visitConstantExprsRecursively() 109 Stack.push_back(OpC); in visitConstantExprsRecursively() 128 const auto *OpC = dyn_cast<Constant>(U); in hasAddrSpaceCast() local 129 if (!OpC) in hasAddrSpaceCast() 132 if (visitConstantExprsRecursively(OpC, ConstantExprVisited)) in hasAddrSpaceCast()
|
/external/llvm-project/llvm/lib/Transforms/IPO/ |
D | Annotation2Metadata.cpp | 47 auto *OpC = dyn_cast<ConstantStruct>(&Op); in convertAnnotation2Metadata() local 48 if (!OpC || OpC->getNumOperands() != 4) in convertAnnotation2Metadata() 50 auto *StrGEP = dyn_cast<ConstantExpr>(OpC->getOperand(1)); in convertAnnotation2Metadata() 60 auto *Bitcast = dyn_cast<ConstantExpr>(OpC->getOperand(0)); in convertAnnotation2Metadata()
|
/external/llvm-project/llvm/lib/Target/PowerPC/ |
D | PPCRegisterInfo.cpp | 93 static unsigned offsetMinAlignForOpcode(unsigned OpC); 1084 static unsigned offsetMinAlignForOpcode(unsigned OpC) { in offsetMinAlignForOpcode() argument 1085 switch (OpC) { in offsetMinAlignForOpcode() 1114 unsigned OpC = MI.getOpcode(); in offsetMinAlign() local 1115 return offsetMinAlignForOpcode(OpC); in offsetMinAlign() 1161 unsigned OpC = MI.getOpcode(); in eliminateFrameIndex() local 1163 if ((OpC == PPC::DYNAREAOFFSET || OpC == PPC::DYNAREAOFFSET8)) { in eliminateFrameIndex() 1170 (OpC == PPC::DYNALLOC || OpC == PPC::DYNALLOC8)) { in eliminateFrameIndex() 1176 (OpC == PPC::PREPARE_PROBED_ALLOCA_64 || in eliminateFrameIndex() 1177 OpC == PPC::PREPARE_PROBED_ALLOCA_32 || in eliminateFrameIndex() [all …]
|
D | PPCInstrInfo.cpp | 464 MachineOperand &OpC = Root.getOperand(0); in reassociateFMA() local 465 Register RegC = OpC.getReg(); in reassociateFMA() 1727 unsigned OpC = MI.getOpcode(); in PredicateInstruction() local 1728 if (OpC == PPC::BLR || OpC == PPC::BLR8) { in PredicateInstruction() 1751 } else if (OpC == PPC::B) { in PredicateInstruction() 1788 } else if (OpC == PPC::BCTR || OpC == PPC::BCTR8 || OpC == PPC::BCTRL || in PredicateInstruction() 1789 OpC == PPC::BCTRL8) { in PredicateInstruction() 1793 bool setLR = OpC == PPC::BCTRL || OpC == PPC::BCTRL8; in PredicateInstruction() 1928 int OpC = CmpInstr.getOpcode(); in optimizeCompareInstr() local 1933 if (OpC == PPC::FCMPUS || OpC == PPC::FCMPUD) in optimizeCompareInstr() [all …]
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/PowerPC/ |
D | PPCRegisterInfo.cpp | 78 static unsigned offsetMinAlignForOpcode(unsigned OpC); 963 static unsigned offsetMinAlignForOpcode(unsigned OpC) { in offsetMinAlignForOpcode() argument 964 switch (OpC) { in offsetMinAlignForOpcode() 993 unsigned OpC = MI.getOpcode(); in offsetMinAlign() local 994 return offsetMinAlignForOpcode(OpC); in offsetMinAlign() 1040 unsigned OpC = MI.getOpcode(); in eliminateFrameIndex() local 1042 if ((OpC == PPC::DYNAREAOFFSET || OpC == PPC::DYNAREAOFFSET8)) { in eliminateFrameIndex() 1049 (OpC == PPC::DYNALLOC || OpC == PPC::DYNALLOC8)) { in eliminateFrameIndex() 1055 if (OpC == PPC::SPILL_CR) { in eliminateFrameIndex() 1058 } else if (OpC == PPC::RESTORE_CR) { in eliminateFrameIndex() [all …]
|
D | PPCInstrInfo.cpp | 1439 unsigned OpC = MI.getOpcode(); in PredicateInstruction() local 1440 if (OpC == PPC::BLR || OpC == PPC::BLR8) { in PredicateInstruction() 1459 } else if (OpC == PPC::B) { in PredicateInstruction() 1492 } else if (OpC == PPC::BCTR || OpC == PPC::BCTR8 || OpC == PPC::BCTRL || in PredicateInstruction() 1493 OpC == PPC::BCTRL8) { in PredicateInstruction() 1497 bool setLR = OpC == PPC::BCTRL || OpC == PPC::BCTRL8; in PredicateInstruction() 1626 int OpC = CmpInstr.getOpcode(); in optimizeCompareInstr() local 1631 if (OpC == PPC::FCMPUS || OpC == PPC::FCMPUD) in optimizeCompareInstr() 1644 bool is32BitSignedCompare = OpC == PPC::CMPWI || OpC == PPC::CMPW; in optimizeCompareInstr() 1645 bool is32BitUnsignedCompare = OpC == PPC::CMPLWI || OpC == PPC::CMPLW; in optimizeCompareInstr() [all …]
|
/external/llvm/lib/Target/PowerPC/ |
D | PPCRegisterInfo.cpp | 747 unsigned OpC = MI.getOpcode(); in usesIXAddr() local 749 switch (OpC) { in usesIXAddr() 803 unsigned OpC = MI.getOpcode(); in eliminateFrameIndex() local 805 if ((OpC == PPC::DYNAREAOFFSET || OpC == PPC::DYNAREAOFFSET8)) { in eliminateFrameIndex() 812 (OpC == PPC::DYNALLOC || OpC == PPC::DYNALLOC8)) { in eliminateFrameIndex() 818 if (OpC == PPC::SPILL_CR) { in eliminateFrameIndex() 821 } else if (OpC == PPC::RESTORE_CR) { in eliminateFrameIndex() 824 } else if (OpC == PPC::SPILL_CRBIT) { in eliminateFrameIndex() 827 } else if (OpC == PPC::RESTORE_CRBIT) { in eliminateFrameIndex() 830 } else if (OpC == PPC::SPILL_VRSAVE) { in eliminateFrameIndex() [all …]
|
D | PPCInstrInfo.cpp | 1328 unsigned OpC = MI.getOpcode(); in PredicateInstruction() local 1329 if (OpC == PPC::BLR || OpC == PPC::BLR8) { in PredicateInstruction() 1350 } else if (OpC == PPC::B) { in PredicateInstruction() 1383 } else if (OpC == PPC::BCTR || OpC == PPC::BCTR8 || in PredicateInstruction() 1384 OpC == PPC::BCTRL || OpC == PPC::BCTRL8) { in PredicateInstruction() 1388 bool setLR = OpC == PPC::BCTRL || OpC == PPC::BCTRL8; in PredicateInstruction() 1484 unsigned OpC = MI.getOpcode(); in isPredicable() local 1485 switch (OpC) { in isPredicable() 1533 int OpC = CmpInstr.getOpcode(); in optimizeCompareInstr() local 1538 if (OpC == PPC::FCMPUS || OpC == PPC::FCMPUD) in optimizeCompareInstr() [all …]
|
/external/llvm/include/llvm/Transforms/Utils/ |
D | Local.h | 207 if (Constant *OpC = dyn_cast<Constant>(Op)) { variable 208 if (OpC->isZeroValue()) 213 if (OpC->getType()->isVectorTy()) 214 OpC = OpC->getSplatValue(); 216 uint64_t OpValue = cast<ConstantInt>(OpC)->getZExtValue(); 226 Constant *OC = ConstantExpr::getIntegerCast(OpC, IntPtrTy, true /*SExt*/);
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AMDGPU/ |
D | AMDGPUAnnotateKernelFeatures.cpp | 126 const auto *OpC = dyn_cast<Constant>(U); in visitConstantExprsRecursively() local 127 if (!OpC) in visitConstantExprsRecursively() 130 if (!ConstantExprVisited.insert(OpC).second) in visitConstantExprsRecursively() 133 Stack.push_back(OpC); in visitConstantExprsRecursively() 315 const auto *OpC = dyn_cast<Constant>(U); in addFeatureAttributes() local 316 if (!OpC) in addFeatureAttributes() 319 if (visitConstantExprsRecursively(OpC, ConstantExprVisited)) { in addFeatureAttributes()
|
/external/llvm-project/llvm/lib/Target/AMDGPU/ |
D | AMDGPUAnnotateKernelFeatures.cpp | 139 const auto *OpC = dyn_cast<Constant>(U); in visitConstantExprsRecursively() local 140 if (!OpC) in visitConstantExprsRecursively() 143 if (!ConstantExprVisited.insert(OpC).second) in visitConstantExprsRecursively() 146 Stack.push_back(OpC); in visitConstantExprsRecursively() 338 const auto *OpC = dyn_cast<Constant>(U); in addFeatureAttributes() local 339 if (!OpC) in addFeatureAttributes() 342 if (visitConstantExprsRecursively(OpC, ConstantExprVisited, IsFunc, in addFeatureAttributes()
|
/external/llvm-project/llvm/lib/IR/ |
D | Operator.cpp | 42 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand()); in getMaxPreservedAlignment() local 46 Offset = SL->getElementOffset(OpC->getZExtValue()); in getMaxPreservedAlignment() 52 if (OpC) in getMaxPreservedAlignment() 53 ElemCount = OpC->getZExtValue(); in getMaxPreservedAlignment()
|
/external/llvm/include/llvm/IR/ |
D | Operator.h | 145 static bool isPossiblyExactOpcode(unsigned OpC) { in isPossiblyExactOpcode() argument 146 return OpC == Instruction::SDiv || in isPossiblyExactOpcode() 147 OpC == Instruction::UDiv || in isPossiblyExactOpcode() 148 OpC == Instruction::AShr || in isPossiblyExactOpcode() 149 OpC == Instruction::LShr; in isPossiblyExactOpcode()
|
/external/llvm-project/mlir/test/mlir-tblgen/ |
D | op-result.td | 32 def OpC : NS_Op<"three_normal_result_op", []> { 36 // CHECK-LABEL: OpC definitions 37 // CHECK: void OpC::build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::… 42 // CHECK: void OpC::build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::m…
|
/external/llvm-project/llvm/include/llvm/IR/ |
D | Operator.h | 140 static bool isPossiblyExactOpcode(unsigned OpC) { in isPossiblyExactOpcode() argument 141 return OpC == Instruction::SDiv || in isPossiblyExactOpcode() 142 OpC == Instruction::UDiv || in isPossiblyExactOpcode() 143 OpC == Instruction::AShr || in isPossiblyExactOpcode() 144 OpC == Instruction::LShr; in isPossiblyExactOpcode()
|
/external/swiftshader/third_party/llvm-10.0/llvm/include/llvm/IR/ |
D | Operator.h | 140 static bool isPossiblyExactOpcode(unsigned OpC) { in isPossiblyExactOpcode() argument 141 return OpC == Instruction::SDiv || in isPossiblyExactOpcode() 142 OpC == Instruction::UDiv || in isPossiblyExactOpcode() 143 OpC == Instruction::AShr || in isPossiblyExactOpcode() 144 OpC == Instruction::LShr; in isPossiblyExactOpcode()
|
/external/llvm/lib/Transforms/Utils/ |
D | SimplifyLibCalls.cpp | 1020 auto *OpC = dyn_cast<CallInst>(Op1); in optimizePow() local 1021 if (OpC && OpC->hasUnsafeAlgebra() && CI->hasUnsafeAlgebra()) { in optimizePow() 1023 Function *OpCCallee = OpC->getCalledFunction(); in optimizePow() 1028 Value *FMul = B.CreateFMul(OpC->getArgOperand(0), Op2, "mul"); in optimizePow() 1124 if (SIToFPInst *OpC = dyn_cast<SIToFPInst>(Op)) { in optimizeExp2() local 1125 if (OpC->getOperand(0)->getType()->getPrimitiveSizeInBits() <= 32) in optimizeExp2() 1126 LdExpArg = B.CreateSExt(OpC->getOperand(0), B.getInt32Ty()); in optimizeExp2() 1127 } else if (UIToFPInst *OpC = dyn_cast<UIToFPInst>(Op)) { in optimizeExp2() local 1128 if (OpC->getOperand(0)->getType()->getPrimitiveSizeInBits() < 32) in optimizeExp2() 1129 LdExpArg = B.CreateZExt(OpC->getOperand(0), B.getInt32Ty()); in optimizeExp2() [all …]
|
/external/llvm/lib/Analysis/ |
D | InlineCost.cpp | 313 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand()); in accumulateGEPOffset() local 314 if (!OpC) in accumulateGEPOffset() 316 OpC = dyn_cast<ConstantInt>(SimpleOp); in accumulateGEPOffset() 317 if (!OpC) in accumulateGEPOffset() 319 if (OpC->isZero()) in accumulateGEPOffset() 324 unsigned ElementIdx = OpC->getZExtValue(); in accumulateGEPOffset() 331 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize; in accumulateGEPOffset()
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Analysis/ |
D | ConstantFolding.cpp | 1277 unsigned OpC = in ConstantFoldCompareInstOperands() local 1279 return ConstantFoldBinaryOpOperands(OpC, LHS, RHS, DL); in ConstantFoldCompareInstOperands() 2504 if (ConstantFP *OpC = dyn_cast<ConstantFP>(Call->getArgOperand(0))) { in isMathLibCallNoop() local 2505 const APFloat &Op = OpC->getValueAPF(); in isMathLibCallNoop() 2522 if (OpC->getType()->isDoubleTy()) in isMathLibCallNoop() 2525 if (OpC->getType()->isFloatTy()) in isMathLibCallNoop() 2534 if (OpC->getType()->isDoubleTy()) in isMathLibCallNoop() 2537 if (OpC->getType()->isFloatTy()) in isMathLibCallNoop() 2555 Type *Ty = OpC->getType(); in isMathLibCallNoop() 2557 double OpV = getValueAsDouble(OpC); in isMathLibCallNoop() [all …]
|
D | InlineCost.cpp | 680 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand()); in accumulateGEPOffset() local 681 if (!OpC) in accumulateGEPOffset() 683 OpC = dyn_cast<ConstantInt>(SimpleOp); in accumulateGEPOffset() 684 if (!OpC) in accumulateGEPOffset() 686 if (OpC->isZero()) in accumulateGEPOffset() 691 unsigned ElementIdx = OpC->getZExtValue(); in accumulateGEPOffset() 698 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize; in accumulateGEPOffset()
|
/external/llvm/lib/Transforms/InstCombine/ |
D | InstCombineSimplifyDemanded.cpp | 34 ConstantInt *OpC = dyn_cast<ConstantInt>(I->getOperand(OpNo)); in ShrinkDemandedConstant() local 35 if (!OpC) return false; in ShrinkDemandedConstant() 38 Demanded = Demanded.zextOrTrunc(OpC->getValue().getBitWidth()); in ShrinkDemandedConstant() 39 if ((~Demanded & OpC->getValue()) == 0) in ShrinkDemandedConstant() 43 Demanded &= OpC->getValue(); in ShrinkDemandedConstant() 44 I->setOperand(OpNo, ConstantInt::get(OpC->getType(), Demanded)); in ShrinkDemandedConstant()
|