Home
last modified time | relevance | path

Searched refs:LoadInst (Results 1 – 25 of 171) sorted by relevance

1234567

/external/llvm/lib/Transforms/Scalar/
DMergedLoadStoreMotion.cpp140 LoadInst* LI);
141 LoadInst *canHoistFromBlock(BasicBlock *BB, LoadInst *LI);
145 bool hoistLoad(BasicBlock *BB, LoadInst *HoistCand, LoadInst *ElseInst);
187 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) in INITIALIZE_PASS_DEPENDENCY()
245 LoadInst* LI) { in isLoadHoistBarrierInRange()
257 LoadInst *MergedLoadStoreMotion::canHoistFromBlock(BasicBlock *BB1, in canHoistFromBlock()
258 LoadInst *Load0) { in canHoistFromBlock()
265 if (!isa<LoadInst>(Inst) || Inst->isUsedOutsideOfBlock(BB1)) in canHoistFromBlock()
268 LoadInst *Load1 = dyn_cast<LoadInst>(Inst); in canHoistFromBlock()
333 bool MergedLoadStoreMotion::hoistLoad(BasicBlock *BB, LoadInst *L0, in hoistLoad()
[all …]
DLoadCombine.cpp46 LoadPOPPair(LoadInst *L, PointerOffsetPair P, unsigned O) in LoadPOPPair()
48 LoadInst *Load;
76 PointerOffsetPair getPointerOffsetPair(LoadInst &);
89 PointerOffsetPair LoadCombine::getPointerOffsetPair(LoadInst &LI) { in getPointerOffsetPair()
132 LoadInst *BaseLoad = nullptr; in aggregateLoads()
203 LoadInst *NewLoad = new LoadInst( in combineLoads()
246 LoadInst *LI = dyn_cast<LoadInst>(&I); in runOnBasicBlock()
DLowerAtomic.cpp30 LoadInst *Orig = Builder.CreateLoad(Ptr); in LowerAtomicCmpXchgInst()
48 LoadInst *Orig = Builder.CreateLoad(Ptr); in LowerAtomicRMWInst()
102 static bool LowerLoadInst(LoadInst *LI) { in LowerLoadInst()
130 else if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { in runOnBasicBlock()
DScalarReplAggregates.cpp185 void RewriteLoadUserOfWholeAlloca(LoadInst *LI, AllocaInst *AI,
480 if (LoadInst *LI = dyn_cast<LoadInst>(UI)) { in CanConvertToScalar()
635 if (LoadInst *LI = dyn_cast<LoadInst>(User)) { in ConvertUsesToScalar()
718 LoadInst *SrcVal = Builder.CreateLoad(SrcPtr, "srcval"); in ConvertUsesToScalar()
725 LoadInst *SrcVal = Builder.CreateLoad(NewAI, "srcval"); in ConvertUsesToScalar()
1089 if (LoadInst *LI = dyn_cast<LoadInst>(I)) in isInstInList()
1100 else if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) in updateDebugInfo()
1116 } else if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { in updateDebugInfo()
1148 LoadInst *LI = dyn_cast<LoadInst>(U); in isSafeSelectToSpeculate()
1190 LoadInst *LI = dyn_cast<LoadInst>(U); in isSafePHIToSpeculate()
[all …]
DLoopLoadElimination.cpp56 LoadInst *Load;
59 StoreToLoadForwardingCandidate(LoadInst *Load, StoreInst *Store) in StoreToLoadForwardingCandidate()
144 if (isa<LoadInst>(Source)) in findStoreToLoadDependences()
146 if (isa<LoadInst>(Destination)) in findStoreToLoadDependences()
162 auto *Load = dyn_cast<LoadInst>(Destination); in findStoreToLoadDependences()
206 typedef DenseMap<LoadInst *, const StoreToLoadForwardingCandidate *> in removeDependencesFromMultipleStores()
286 LoadInst *LastLoad = in findPointersWrittenOnForwardingPath()
380 new LoadInst(InitialPtr, "load_initial", PH->getTerminator()); in propagateStoredValueToLoadUsers()
/external/llvm/lib/Transforms/IPO/
DArgumentPromotion.cpp192 } else if (!isa<LoadInst>(V)) { in canPaddingBeAccessed()
474 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in isSafeToPromoteArgument()
502 SmallVector<LoadInst*, 16> Loads; in isSafeToPromoteArgument()
507 if (LoadInst *LI = dyn_cast<LoadInst>(UR)) { in isSafeToPromoteArgument()
534 if (LoadInst *LI = dyn_cast<LoadInst>(GEPU)) { in isSafeToPromoteArgument()
581 LoadInst *Load = Loads[i]; in isSafeToPromoteArgument()
632 std::map<std::pair<Argument*, IndicesVector>, LoadInst*> OriginalLoads; in DoPromotion()
677 if (LoadInst *L = dyn_cast<LoadInst>(UI)) in DoPromotion()
693 LoadInst *OrigLoad; in DoPromotion()
694 if (LoadInst *L = dyn_cast<LoadInst>(UI)) in DoPromotion()
[all …]
DGlobalOpt.cpp165 if (isa<LoadInst>(V) || isa<InvokeInst>(V) || isa<Argument>(V) || in IsSafeComputationToRemove()
290 if (LoadInst *LI = dyn_cast<LoadInst>(U)) { in CleanupConstantGlobalUsers()
371 if (isa<LoadInst>(I)) return true; in isSafeSROAElementUse()
623 if (isa<LoadInst>(U)) { in AllUsesOfValueWillTrapIfNull()
665 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) { in AllUsesOfLoadedValueWillTrapIfNull()
683 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in OptimizeAwayTrappingUsesOfValue()
758 if (LoadInst *LI = dyn_cast<LoadInst>(GlobalUser)) { in OptimizeAwayTrappingUsesOfLoads()
897 LoadInst *LI = cast<LoadInst>(GV->user_back()); in OptimizeGlobalAddressOfMalloc()
908 Value *LV = new LoadInst(InitBool, InitBool->getName()+".val", false, 0, in OptimizeGlobalAddressOfMalloc()
967 if (isa<LoadInst>(Inst) || isa<CmpInst>(Inst)) { in ValueIsOnlyUsedLocallyOrStoredToOneGlobal()
[all …]
/external/clang/lib/CodeGen/
DCGBuilder.h78 llvm::LoadInst *CreateLoad(Address Addr, const llvm::Twine &Name = "") {
83 llvm::LoadInst *CreateLoad(Address Addr, const char *Name) { in CreateLoad()
90 llvm::LoadInst *CreateLoad(Address Addr, bool IsVolatile,
99 llvm::LoadInst *CreateAlignedLoad(llvm::Value *Addr, CharUnits Align,
103 llvm::LoadInst *CreateAlignedLoad(llvm::Value *Addr, CharUnits Align, in CreateAlignedLoad()
107 llvm::LoadInst *CreateAlignedLoad(llvm::Type *Ty, llvm::Value *Addr,
113 llvm::LoadInst *CreateAlignedLoad(llvm::Value *Addr, CharUnits Align,
135 llvm::LoadInst *CreateDefaultAlignedLoad(llvm::Value *Addr,
139 llvm::LoadInst *CreateDefaultAlignedLoad(llvm::Value *Addr, in CreateDefaultAlignedLoad()
143 llvm::LoadInst *CreateDefaultAlignedLoad(llvm::Value *Addr, bool IsVolatile,
[all …]
/external/llvm/lib/Transforms/Utils/
DSSAUpdater.cpp330 if (const LoadInst *LI = dyn_cast<LoadInst>(Insts[0])) in LoadAndStorePromoter()
355 SmallVector<LoadInst*, 32> LiveInLoads; in run()
374 LiveInLoads.push_back(cast<LoadInst>(User)); in run()
393 LiveInLoads.push_back(cast<LoadInst>(I)); in run()
405 if (LoadInst *L = dyn_cast<LoadInst>(&I)) { in run()
439 for (LoadInst *ALoad : LiveInLoads) { in run()
472 replaceLoadWithValue(cast<LoadInst>(User), NewVal); in run()
DPromoteMemoryToRegister.cpp58 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) { in isAllocaPromotable()
134 LoadInst *LI = cast<LoadInst>(User); in AnalyzeAlloca()
189 return (isa<LoadInst>(I) && isa<AllocaInst>(I->getOperand(0))) || in isInterestingInstruction()
311 if (isa<LoadInst>(I) || isa<StoreInst>(I)) in removeLifetimeIntrinsicUsers()
350 if (!isa<LoadInst>(UserInst)) { in rewriteSingleStoreAlloca()
354 LoadInst *LI = cast<LoadInst>(UserInst); in rewriteSingleStoreAlloca()
459 LoadInst *LI = dyn_cast<LoadInst>(*UI++); in promoteSingleBlockAlloca()
809 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in ComputeLiveInBlocks()
929 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in RenamePass()
DGlobalStatus.cpp76 if (const LoadInst *LI = dyn_cast<LoadInst>(I)) { in analyzeGlobalAux()
111 } else if (isa<LoadInst>(StoredVal) && in analyzeGlobalAux()
112 cast<LoadInst>(StoredVal)->getOperand(0) == GV) { in analyzeGlobalAux()
DDemoteRegToStack.cpp74 V = new LoadInst(Slot, I.getName()+".reload", VolatileLoads, in DemoteRegToStack()
82 Value *V = new LoadInst(Slot, I.getName()+".reload", VolatileLoads, U); in DemoteRegToStack()
140 Value *V = new LoadInst(Slot, P->getName() + ".reload", &*InsertPt); in DemotePHIToStack()
/external/llvm/lib/Target/NVPTX/
DNVPTXLowerKernelArgs.cpp161 LoadInst *LI = new LoadInst(ArgInParam, Arg->getName(), FirstInst); in handleByValParam()
204 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in runOnFunction()
/external/llvm/lib/CodeGen/
DAtomicExpandPass.cpp53 LoadInst *convertAtomicLoadToIntegerType(LoadInst *LI);
54 bool tryExpandAtomicLoad(LoadInst *LI);
55 bool expandAtomicLoadToLL(LoadInst *LI);
56 bool expandAtomicLoadToCmpXchg(LoadInst *LI);
95 auto LI = dyn_cast<LoadInst>(I); in runOnFunction()
211 LoadInst *AtomicExpand::convertAtomicLoadToIntegerType(LoadInst *LI) { in convertAtomicLoadToIntegerType()
235 bool AtomicExpand::tryExpandAtomicLoad(LoadInst *LI) { in tryExpandAtomicLoad()
251 bool AtomicExpand::expandAtomicLoadToLL(LoadInst *LI) { in expandAtomicLoadToLL()
267 bool AtomicExpand::expandAtomicLoadToCmpXchg(LoadInst *LI) { in expandAtomicLoadToCmpXchg()
660 LoadInst *InitLoaded = Builder.CreateLoad(Addr); in expandAtomicRMWToCmpXchg()
DInterleavedAccessPass.cpp83 bool lowerInterleavedLoad(LoadInst *LI,
179 LoadInst *LI, SmallVector<Instruction *, 32> &DeadInsts) { in lowerInterleavedLoad()
275 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) in runOnFunction()
/external/llvm/lib/Transforms/InstCombine/
DInstCombineLoadStoreAlloca.cpp69 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in isOnlyCopiedFromConstantGlobal()
318 static LoadInst *combineLoadToNewType(InstCombiner &IC, LoadInst &LI, Type *NewTy, in combineLoadToNewType()
325 LoadInst *NewLoad = IC.Builder->CreateAlignedLoad( in combineLoadToNewType()
456 static Instruction *combineLoadToOperationType(InstCombiner &IC, LoadInst &LI) { in combineLoadToOperationType()
479 LoadInst *NewLoad = combineLoadToNewType( in combineLoadToOperationType()
502 LoadInst *NewLoad = combineLoadToNewType(IC, LI, CI->getDestTy()); in combineLoadToOperationType()
514 static Instruction *unpackLoadToAggregate(InstCombiner &IC, LoadInst &LI) { in unpackLoadToAggregate()
530 LoadInst *NewLoad = combineLoadToNewType(IC, LI, ST->getTypeAtIndex(0U), in unpackLoadToAggregate()
569 LoadInst *NewLoad = combineLoadToNewType(IC, LI, AT->getElementType(), in unpackLoadToAggregate()
756 Instruction *InstCombiner::visitLoadInst(LoadInst &LI) { in visitLoadInst()
[all …]
DInstCombinePHI.cpp248 static bool isSafeAndProfitableToSinkLoad(LoadInst *L) { in isSafeAndProfitableToSinkLoad()
260 if (isa<LoadInst>(U)) continue; in isSafeAndProfitableToSinkLoad()
287 LoadInst *FirstLI = cast<LoadInst>(PN.getIncomingValue(0)); in FoldPHIArgLoadIntoPHI()
318 LoadInst *LI = dyn_cast<LoadInst>(PN.getIncomingValue(i)); in FoldPHIArgLoadIntoPHI()
353 LoadInst *NewLI = new LoadInst(NewPN, "", isVolatile, LoadAlignment); in FoldPHIArgLoadIntoPHI()
372 LoadInst *LI = cast<LoadInst>(PN.getIncomingValue(i)); in FoldPHIArgLoadIntoPHI()
394 cast<LoadInst>(IncValue)->setVolatile(false); in FoldPHIArgLoadIntoPHI()
488 if (isa<LoadInst>(FirstInst)) in FoldPHIArgOpIntoPHI()
/external/llvm/lib/Transforms/Instrumentation/
DThreadSanitizer.cpp257 } else if (LoadInst *L = dyn_cast<LoadInst>(Addr)) { in addrPointsToConstantData()
290 LoadInst *Load = cast<LoadInst>(I); in chooseInstructionsToInstrument()
304 : cast<LoadInst>(I)->getPointerOperand(); in chooseInstructionsToInstrument()
319 if (LoadInst *LI = dyn_cast<LoadInst>(I)) in isAtomic()
353 else if (isa<LoadInst>(Inst) || isa<StoreInst>(Inst)) in runOnFunction()
412 : cast<LoadInst>(I)->getPointerOperand(); in instrumentLoadOrStore()
442 : cast<LoadInst>(I)->getAlignment(); in instrumentLoadOrStore()
509 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in instrumentAtomic()
/external/llvm/include/llvm/Analysis/
DObjCARCAnalysisUtils.h175 if (const LoadInst *LI = dyn_cast<LoadInst>(Op)) in IsPotentialRetainableObjPtr()
208 if (const LoadInst *LI = dyn_cast<LoadInst>(V)) { in IsObjCIdentifiedObject()
DMemoryLocation.h25 class LoadInst; variable
65 static MemoryLocation get(const LoadInst *LI);
71 if (auto *I = dyn_cast<LoadInst>(Inst)) in get()
/external/llvm/lib/Analysis/
DLoads.cpp139 if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) { in isSafeToLoadUnconditionally()
230 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) in FindAvailableLoadedValue()
DDelinearization.cpp74 if (LoadInst *Load = dyn_cast<LoadInst>(&Inst)) in getPointerOperand()
89 if (!isa<StoreInst>(Inst) && !isa<LoadInst>(Inst) && in print()
DMemoryDependenceAnalysis.cpp132 if (const LoadInst *LI = dyn_cast<LoadInst>(Inst)) { in GetLocation()
272 const LoadInst *LI) { in isLoadLoadClobberIfExtendedToFullWidth()
293 const LoadInst *LI) { in getLoadLoadClobberFullWidthSize()
363 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) in isVolatile()
384 if (auto *LI = dyn_cast<LoadInst>(QueryInst)) { in getPointerDependencyFrom()
396 MemoryDependenceAnalysis::getInvariantGroupPointerDependency(LoadInst *LI, in getInvariantGroupPointerDependency()
440 if ((isa<LoadInst>(U) || isa<StoreInst>(U)) && U->getParent() == BB && in getInvariantGroupPointerDependency()
490 LoadInst *LI = dyn_cast<LoadInst>(QueryInst); in getSimplePointerDependencyFrom()
536 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { in getSimplePointerDependencyFrom()
559 if (auto *QueryLI = dyn_cast<LoadInst>(QueryInst)) { in getSimplePointerDependencyFrom()
[all …]
/external/llvm/lib/IR/
DInstruction.cpp282 if (const LoadInst *LI = dyn_cast<LoadInst>(I1)) in haveSameSpecialState()
283 return LI->isVolatile() == cast<LoadInst>(I2)->isVolatile() && in haveSameSpecialState()
284 (LI->getAlignment() == cast<LoadInst>(I2)->getAlignment() || in haveSameSpecialState()
286 LI->getOrdering() == cast<LoadInst>(I2)->getOrdering() && in haveSameSpecialState()
287 LI->getSynchScope() == cast<LoadInst>(I2)->getSynchScope(); in haveSameSpecialState()
452 return !cast<LoadInst>(this)->isUnordered(); in mayWriteToMemory()
465 return cast<LoadInst>(this)->getOrdering() != NotAtomic; in isAtomic()
/external/llvm/lib/Target/AMDGPU/
DAMDGPUAnnotateUniformValues.cpp46 void visitLoadInst(LoadInst &I);
60 void AMDGPUAnnotateUniformValues::visitLoadInst(LoadInst &I) { in visitLoadInst()

1234567