/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/X86/ |
D | X86AvoidStoreForwardingBlocks.cpp | 101 void breakBlockedCopies(MachineInstr *LoadInst, MachineInstr *StoreInst, 104 void buildCopies(int Size, MachineInstr *LoadInst, int64_t LdDispImm, 108 void buildCopy(MachineInstr *LoadInst, unsigned NLoadOpcode, int64_t LoadDisp, 338 findPotentialBlockers(MachineInstr *LoadInst) { in findPotentialBlockers() argument 342 for (auto PBInst = std::next(MachineBasicBlock::reverse_iterator(LoadInst)), in findPotentialBlockers() 343 E = LoadInst->getParent()->rend(); in findPotentialBlockers() 360 MachineBasicBlock *MBB = LoadInst->getParent(); in findPotentialBlockers() 384 void X86AvoidSFBPass::buildCopy(MachineInstr *LoadInst, unsigned NLoadOpcode, in buildCopy() argument 389 MachineOperand &LoadBase = getBaseOperand(LoadInst); in buildCopy() 391 MachineBasicBlock *MBB = LoadInst->getParent(); in buildCopy() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/ARM/ |
D | ARMParallelDSP.cpp | 56 using MemInstList = SmallVectorImpl<LoadInst*>; 68 SmallVector<LoadInst*, 2> VecLd; // Container for loads to widen. 74 return isa<LoadInst>(LHS) && isa<LoadInst>(RHS); in HasTwoLoadInputs() 77 LoadInst *getBaseLoad() const { in getBaseLoad() 198 LoadInst *NewLd = nullptr; 199 SmallVector<LoadInst*, 4> Loads; 202 WidenedLoad(SmallVectorImpl<LoadInst*> &Lds, LoadInst *Wide) in WidenedLoad() 207 LoadInst *getLoad() { in getLoad() 219 std::map<LoadInst*, LoadInst*> LoadPairs; 220 SmallPtrSet<LoadInst*, 4> OffsetLoads; [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/IPO/ |
D | ArgumentPromotion.cpp | 130 std::map<std::pair<Argument *, IndicesVector>, LoadInst *> OriginalLoads; in doPromotion() 171 if (LoadInst *L = dyn_cast<LoadInst>(UI)) in doPromotion() 187 LoadInst *OrigLoad; in doPromotion() 188 if (LoadInst *L = dyn_cast<LoadInst>(UI)) in doPromotion() 192 OrigLoad = cast<LoadInst>(UI->user_back()); in doPromotion() 282 LoadInst *OrigLoad = in doPromotion() 306 LoadInst *newLoad = in doPromotion() 430 if (LoadInst *LI = dyn_cast<LoadInst>(I->user_back())) { in doPromotion() 469 LoadInst *L = cast<LoadInst>(GEP->user_back()); in doPromotion() 629 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in isSafeToPromoteArgument() [all …]
|
D | GlobalOpt.cpp | 166 if (isa<LoadInst>(V) || isa<InvokeInst>(V) || isa<Argument>(V) || in IsSafeComputationToRemove() 292 if (LoadInst *LI = dyn_cast<LoadInst>(U)) { in CleanupConstantGlobalUsers() 408 if (isa<LoadInst>(I)) return true; in isSafeSROAElementUse() 636 if (isa<LoadInst>(U)) { in AllUsesOfValueWillTrapIfNull() 678 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) { in AllUsesOfLoadedValueWillTrapIfNull() 700 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in OptimizeAwayTrappingUsesOfValue() 775 if (LoadInst *LI = dyn_cast<LoadInst>(GlobalUser)) { in OptimizeAwayTrappingUsesOfLoads() 913 LoadInst *LI = cast<LoadInst>(GV->user_back()); in OptimizeGlobalAddressOfMalloc() 924 Value *LV = new LoadInst(InitBool->getValueType(), InitBool, in OptimizeGlobalAddressOfMalloc() 984 if (isa<LoadInst>(Inst) || isa<CmpInst>(Inst)) { in ValueIsOnlyUsedLocallyOrStoredToOneGlobal() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/include/llvm/Transforms/Utils/ |
D | Local.h | 51 class LoadInst; variable 295 LoadInst *LI, DIBuilder &Builder); 441 void copyMetadataForLoad(LoadInst &Dest, const LoadInst &Source); 475 void copyNonnullMetadata(const LoadInst &OldLI, MDNode *N, LoadInst &NewLI); 481 void copyRangeMetadata(const DataLayout &DL, const LoadInst &OldLI, MDNode *N, 482 LoadInst &NewLI);
|
D | VNCoercion.h | 28 class LoadInst; variable 62 int analyzeLoadFromClobberingLoad(Type *LoadTy, Value *LoadPtr, LoadInst *DepLI, 87 Value *getLoadValueForLoad(LoadInst *SrcVal, unsigned Offset, Type *LoadTy,
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Utils/ |
D | SSAUpdater.cpp | 342 if (const LoadInst *LI = dyn_cast<LoadInst>(Insts[0])) in LoadAndStorePromoter() 364 SmallVector<LoadInst *, 32> LiveInLoads; in run() 383 LiveInLoads.push_back(cast<LoadInst>(User)); in run() 402 LiveInLoads.push_back(cast<LoadInst>(I)); in run() 414 if (LoadInst *L = dyn_cast<LoadInst>(&I)) { in run() 448 for (LoadInst *ALoad : LiveInLoads) { in run() 481 replaceLoadWithValue(cast<LoadInst>(User), NewVal); in run()
|
D | PromoteMemoryToRegister.cpp | 71 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) { in isAllocaPromotable() 143 LoadInst *LI = cast<LoadInst>(User); in AnalyzeAlloca() 192 return (isa<LoadInst>(I) && isa<AllocaInst>(I->getOperand(0))) || in isInterestingInstruction() 304 static void addAssumeNonNull(AssumptionCache *AC, LoadInst *LI) { in addAssumeNonNull() 322 if (isa<LoadInst>(I) || isa<StoreInst>(I)) in removeLifetimeIntrinsicUsers() 362 LoadInst *LI = cast<LoadInst>(UserInst); in rewriteSingleStoreAlloca() 469 LoadInst *LI = dyn_cast<LoadInst>(*UI++); in promoteSingleBlockAlloca() 807 if (LoadInst *LI = dyn_cast<LoadInst>(I)) in ComputeLiveInBlocks() 935 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in RenamePass()
|
D | GlobalStatus.cpp | 88 if (const LoadInst *LI = dyn_cast<LoadInst>(I)) { in analyzeGlobalAux() 123 } else if (isa<LoadInst>(StoredVal) && in analyzeGlobalAux() 124 cast<LoadInst>(StoredVal)->getOperand(0) == GV) { in analyzeGlobalAux()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AMDGPU/ |
D | AMDGPUAnnotateUniformValues.cpp | 57 void visitLoadInst(LoadInst &I); 58 bool isClobberedInFunction(LoadInst * Load); 86 bool AMDGPUAnnotateUniformValues::isClobberedInFunction(LoadInst * Load) { in isClobberedInFunction() 123 void AMDGPUAnnotateUniformValues::visitLoadInst(LoadInst &I) { in visitLoadInst() 127 auto isGlobalLoad = [&](LoadInst &Load)->bool { in visitLoadInst()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Scalar/ |
D | LowerAtomic.cpp | 30 LoadInst *Orig = Builder.CreateLoad(Val->getType(), Ptr); in LowerAtomicCmpXchgInst() 48 LoadInst *Orig = Builder.CreateLoad(Val->getType(), Ptr); in LowerAtomicRMWInst() 108 static bool LowerLoadInst(LoadInst *LI) { in LowerLoadInst() 128 else if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { in runOnBasicBlock()
|
D | LoopLoadElimination.cpp | 90 LoadInst *Load; 93 StoreToLoadForwardingCandidate(LoadInst *Load, StoreInst *Store) in StoreToLoadForwardingCandidate() 157 static bool isLoadConditional(LoadInst *Load, Loop *L) { in isLoadConditional() 195 if (isa<LoadInst>(Source)) in findStoreToLoadDependences() 197 if (isa<LoadInst>(Destination)) in findStoreToLoadDependences() 213 auto *Load = dyn_cast<LoadInst>(Destination); in findStoreToLoadDependences() 263 DenseMap<LoadInst *, const StoreToLoadForwardingCandidate *>; in removeDependencesFromMultipleStores() 344 LoadInst *LastLoad = in findPointersWrittenOnForwardingPath() 437 Value *Initial = new LoadInst( in propagateStoredValueToLoadUsers()
|
D | SROA.cpp | 780 void visitLoadInst(LoadInst &LI) { in visitLoadInst() 962 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in hasUnsafePHIOrSelectUse() 1145 if (LoadInst *LI = dyn_cast<LoadInst>(U->getUser())) { in findCommonType() 1208 LoadInst *LI = dyn_cast<LoadInst>(U); in isSafePHIToSpeculate() 1266 LoadInst *SomeLoad = cast<LoadInst>(PN.user_back()); in speculatePHINodeLoads() 1280 LoadInst *LI = cast<LoadInst>(PN.user_back()); in speculatePHINodeLoads() 1303 LoadInst *Load = PredBuilder.CreateLoad( in speculatePHINodeLoads() 1337 LoadInst *LI = dyn_cast<LoadInst>(U); in isSafeSelectToSpeculate() 1363 LoadInst *LI = cast<LoadInst>(SI.user_back()); in speculateSelectInstLoads() 1367 LoadInst *TL = IRB.CreateLoad(LI->getType(), TV, in speculateSelectInstLoads() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/NVPTX/ |
D | NVPTXLowerAggrCopies.cpp | 60 SmallVector<LoadInst *, 4> AggrLoads; in runOnFunction() 72 if (LoadInst *LI = dyn_cast<LoadInst>(II)) { in runOnFunction() 105 for (LoadInst *LI : AggrLoads) { in runOnFunction()
|
D | NVPTXLowerArgs.cpp | 173 LoadInst *LI = in handleByValParam() 174 new LoadInst(StructType, ArgInParam, Arg->getName(), FirstInst); in handleByValParam() 213 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in runOnKernelFunction()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/InstCombine/ |
D | InstCombineLoadStoreAlloca.cpp | 73 if (auto *LI = dyn_cast<LoadInst>(I)) { in isOnlyCopiedFromConstantGlobal() 273 if (isa<LoadInst>(Inst)) { in findLoadAndReplace() 298 if (auto *LT = dyn_cast<LoadInst>(I)) { in replace() 301 auto *NewI = new LoadInst(I->getType(), V); in replace() 452 LoadInst *InstCombiner::combineLoadToNewType(LoadInst &LI, Type *NewTy, in combineLoadToNewType() 472 LoadInst *NewLoad = Builder.CreateAlignedLoad( in combineLoadToNewType() 574 static Instruction *combineLoadToOperationType(InstCombiner &IC, LoadInst &LI) { in combineLoadToOperationType() 610 LoadInst *NewLoad = IC.combineLoadToNewType( in combineLoadToOperationType() 633 LoadInst *NewLoad = IC.combineLoadToNewType(LI, CI->getDestTy()); in combineLoadToOperationType() 644 static Instruction *unpackLoadToAggregate(InstCombiner &IC, LoadInst &LI) { in unpackLoadToAggregate() [all …]
|
D | InstCombinePHI.cpp | 110 if (LoadInst *LoadI = dyn_cast<LoadInst>(U)) { in FoldIntegerTypedPHI() 165 auto *LoadI = dyn_cast<LoadInst>(Arg); in FoldIntegerTypedPHI() 254 LoadInst *LoadI = dyn_cast<LoadInst>(IncomingVal); in FoldIntegerTypedPHI() 505 static bool isSafeAndProfitableToSinkLoad(LoadInst *L) { in isSafeAndProfitableToSinkLoad() 517 if (isa<LoadInst>(U)) continue; in isSafeAndProfitableToSinkLoad() 544 LoadInst *FirstLI = cast<LoadInst>(PN.getIncomingValue(0)); in FoldPHIArgLoadIntoPHI() 575 LoadInst *LI = dyn_cast<LoadInst>(PN.getIncomingValue(i)); in FoldPHIArgLoadIntoPHI() 610 LoadInst *NewLI = in FoldPHIArgLoadIntoPHI() 611 new LoadInst(FirstLI->getType(), NewPN, "", isVolatile, LoadAlignment); in FoldPHIArgLoadIntoPHI() 631 LoadInst *LI = cast<LoadInst>(PN.getIncomingValue(i)); in FoldPHIArgLoadIntoPHI() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/CodeGen/ |
D | InterleavedLoadCombinePass.cpp | 94 LoadInst *findFirstLoad(const std::set<LoadInst *> &LIs); 651 LoadInst *LI; 653 ElementInfo(Polynomial Offset = Polynomial(), LoadInst *LI = nullptr) in ElementInfo() 664 std::set<LoadInst *> LIs; 716 LoadInst *LI = dyn_cast<LoadInst>(V); in compute() 868 static bool computeFromLI(LoadInst *LI, VectorInfo &Result, in computeFromLI() 1101 LoadInst * 1102 InterleavedLoadCombineImpl::findFirstLoad(const std::set<LoadInst *> &LIs) { in findFirstLoad() 1113 return cast<LoadInst>(FLI); in findFirstLoad() 1123 LoadInst *InsertionPoint = InterleavedLoad.front().EI[0].LI; in combine() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/include/llvm/Transforms/Scalar/ |
D | GVN.h | 47 class LoadInst; variable 252 bool processLoad(LoadInst *L); 253 bool processNonLocalLoad(LoadInst *L); 259 bool AnalyzeLoadAvailability(LoadInst *LI, MemDepResult DepInfo, 265 void AnalyzeLoadAvailability(LoadInst *LI, LoadDepVect &Deps, 269 bool PerformLoadPRE(LoadInst *LI, AvailValInBlkVect &ValuesPerBlock,
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Instrumentation/ |
D | ThreadSanitizer.cpp | 350 } else if (LoadInst *L = dyn_cast<LoadInst>(Addr)) { in addrPointsToConstantData() 384 LoadInst *Load = cast<LoadInst>(I); in chooseInstructionsToInstrument() 400 : cast<LoadInst>(I)->getPointerOperand(); in chooseInstructionsToInstrument() 416 if (LoadInst *LI = dyn_cast<LoadInst>(I)) in isAtomic() 459 else if (isa<LoadInst>(Inst) || isa<StoreInst>(Inst)) in sanitizeFunction() 525 : cast<LoadInst>(I)->getPointerOperand(); in instrumentLoadOrStore() 562 : cast<LoadInst>(I)->getAlignment(); in instrumentLoadOrStore() 631 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in instrumentAtomic()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Vectorize/ |
D | LoadStoreVectorizer.cpp | 132 unsigned getAlignment(LoadInst *LI) const { in getAlignment() 305 if (LoadInst *L = dyn_cast<LoadInst>(I)) in getPointerAddressSpace() 607 bool IsLoadChain = isa<LoadInst>(Chain[0]); in getVectorizablePrefix() 611 assert(isa<LoadInst>(I) && in getVectorizablePrefix() 620 if (isa<LoadInst>(I) || isa<StoreInst>(I)) { in getVectorizablePrefix() 661 auto *MemLoad = dyn_cast<LoadInst>(MemInstr); in getVectorizablePrefix() 662 auto *ChainLoad = dyn_cast<LoadInst>(ChainInstr); in getVectorizablePrefix() 668 auto IsInvariantLoad = [](const LoadInst *LI) -> bool { in getVectorizablePrefix() 750 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in collectInstructions() 928 if (isa<LoadInst>(*Operands.begin())) in vectorizeInstructions() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/include/llvm/Analysis/ |
D | ObjCARCAnalysisUtils.h | 188 if (const LoadInst *LI = dyn_cast<LoadInst>(Op)) in IsPotentialRetainableObjPtr() 221 if (const LoadInst *LI = dyn_cast<LoadInst>(V)) { in IsObjCIdentifiedObject()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Analysis/ |
D | Loads.cpp | 197 bool llvm::isDereferenceableAndAlignedInLoop(LoadInst *LI, Loop *L, in isDereferenceableAndAlignedInLoop() 305 if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) { in isSafeToLoadUnconditionally() 361 Value *llvm::FindAvailableLoadedValue(LoadInst *Load, in FindAvailableLoadedValue() 409 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) in FindAvailablePtrLoadStore()
|
D | MemoryDependenceAnalysis.cpp | 119 if (const LoadInst *LI = dyn_cast<LoadInst>(Inst)) { in GetLocation() 243 const LoadInst *LI) { in getLoadLoadClobberFullWidthSize() 319 if (auto *LI = dyn_cast<LoadInst>(Inst)) in isVolatile() 334 if (auto *LI = dyn_cast<LoadInst>(QueryInst)) { in getPointerDependencyFrom() 357 MemoryDependenceResults::getInvariantGroupPointerDependency(LoadInst *LI, in getInvariantGroupPointerDependency() 420 if ((isa<LoadInst>(U) || isa<StoreInst>(U)) && in getInvariantGroupPointerDependency() 484 LoadInst *LI = dyn_cast<LoadInst>(QueryInst); in getSimplePointerDependencyFrom() 503 if (auto *LI = dyn_cast<LoadInst>(I)) in getSimplePointerDependencyFrom() 513 return !isa<LoadInst>(I) && !isa<StoreInst>(I) && I->mayReadOrWriteMemory(); in getSimplePointerDependencyFrom() 550 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { in getSimplePointerDependencyFrom() [all …]
|
D | GlobalsModRef.cpp | 355 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in AnalyzeUsesOfPointer() 419 if (LoadInst *LI = dyn_cast<LoadInst>(U)) { in AnalyzeIndirectGlobalMemory() 661 if (auto *LI = dyn_cast<LoadInst>(Input)) { in isNonEscapingGlobalNoAliasWithLoad() 772 if (auto *LI = dyn_cast<LoadInst>(Input)) { in isNonEscapingGlobalNoAlias() 864 if (const LoadInst *LI = dyn_cast<LoadInst>(UV1)) in alias() 868 if (const LoadInst *LI = dyn_cast<LoadInst>(UV2)) in alias()
|