/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Scalar/ |
D | MergedLoadStoreMotion.cpp | 118 StoreInst *canSinkFromBlock(BasicBlock *BB, StoreInst *SI); 119 PHINode *getPHIOperand(BasicBlock *BB, StoreInst *S0, StoreInst *S1); 122 bool canSinkStoresAndGEPs(StoreInst *S0, StoreInst *S1) const; 123 void sinkStoresAndGEPs(BasicBlock *BB, StoreInst *SinkCand, 124 StoreInst *ElseInst); 187 StoreInst *MergedLoadStoreMotion::canSinkFromBlock(BasicBlock *BB1, in canSinkFromBlock() 188 StoreInst *Store0) { in canSinkFromBlock() 192 auto *Store1 = dyn_cast<StoreInst>(&Inst); in canSinkFromBlock() 210 PHINode *MergedLoadStoreMotion::getPHIOperand(BasicBlock *BB, StoreInst *S0, in getPHIOperand() 211 StoreInst *S1) { in getPHIOperand() [all …]
|
D | MemCpyOptimizer.cpp | 115 if (!isa<StoreInst>(SI)) in isProfitableToUseMemset() 167 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) in addInst() 173 void addStore(int64_t OffsetFromFirst, StoreInst *SI) { in addStore() 318 if (!isa<StoreInst>(BI) && !isa<MemSetInst>(BI)) { in INITIALIZE_PASS_DEPENDENCY() 327 if (StoreInst *NextStore = dyn_cast<StoreInst>(BI)) { in INITIALIZE_PASS_DEPENDENCY() 416 static Align findStoreAlignment(const DataLayout &DL, const StoreInst *SI) { in findStoreAlignment() 426 static Align findCommonAlignment(const DataLayout &DL, const StoreInst *SI, in findCommonAlignment() 437 static bool moveUp(AliasAnalysis &AA, StoreInst *SI, Instruction *P, in moveUp() 495 } else if (isa<LoadInst>(C) || isa<StoreInst>(C) || isa<VAArgInst>(C)) { in moveUp() 527 bool MemCpyOptPass::processStore(StoreInst *SI, BasicBlock::iterator &BBI) { in processStore() [all …]
|
D | LoopIdiomRecognize.cpp | 139 using StoreList = SmallVector<StoreInst *, 8>; 168 LegalStoreKind isLegalStore(StoreInst *SI); 170 bool processLoopStores(SmallVectorImpl<StoreInst *> &SL, const SCEV *BECount, 180 bool processLoopStoreOfLoopLoad(StoreInst *SI, const SCEV *BECount); 412 LoopIdiomRecognize::isLegalStore(StoreInst *SI) { in isLegalStore() 522 StoreInst *SI = dyn_cast<StoreInst>(&I); in collectStores() 603 bool LoopIdiomRecognize::processLoopStores(SmallVectorImpl<StoreInst *> &SL, in processLoopStores() 606 SetVector<StoreInst *> Heads, Tails; in processLoopStores() 607 SmallDenseMap<StoreInst *, StoreInst *> ConsecutiveChain; in processLoopStores() 698 for (SetVector<StoreInst *>::iterator it = Heads.begin(), e = Heads.end(); in processLoopStores() [all …]
|
D | LowerAtomic.cpp | 113 static bool LowerStoreInst(StoreInst *SI) { in LowerStoreInst() 131 } else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) { in runOnBasicBlock()
|
D | DeadStoreElimination.cpp | 163 if (isa<StoreInst>(I)) in hasAnalyzableMemoryWrite() 204 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) in getLocForWrite() 249 if (StoreInst *SI = dyn_cast<StoreInst>(I)) in isRemovable() 285 if (isa<StoreInst>(I)) in isShortenableAtTheEnd() 1049 StoreInst *SI = dyn_cast<StoreInst>(Inst); in eliminateNoopStore() 1235 auto *Earlier = dyn_cast<StoreInst>(DepWrite); in eliminateDeadStores() 1236 auto *Later = dyn_cast<StoreInst>(Inst); in eliminateDeadStores() 1279 auto *SI = new StoreInst( in eliminateDeadStores()
|
D | SROA.cpp | 795 void visitStoreInst(StoreInst &SI) { in visitStoreInst() 967 if (StoreInst *SI = dyn_cast<StoreInst>(I)) { in hasUnsafePHIOrSelectUse() 1147 } else if (StoreInst *SI = dyn_cast<StoreInst>(U->getUser())) { in findCommonType() 1691 } else if (auto *SI = dyn_cast<StoreInst>(I)) { in getAdjustedAlignment() 1856 } else if (StoreInst *SI = dyn_cast<StoreInst>(U->getUser())) { in isVectorPromotionViableForSlice() 1911 else if (auto *SI = dyn_cast<StoreInst>(S.getUse()->getUser())) in isVectorPromotionViable() 2039 } else if (StoreInst *SI = dyn_cast<StoreInst>(U->getUser())) { in isIntegerWideningViableForSlice() 2598 bool rewriteVectorizedStoreInst(Value *V, StoreInst &SI, Value *OldOp, in rewriteVectorizedStoreInst() 2617 StoreInst *Store = IRB.CreateAlignedStore(V, &NewAI, NewAI.getAlignment()); in rewriteVectorizedStoreInst() 2626 bool rewriteIntegerStore(Value *V, StoreInst &SI, AAMDNodes AATags) { in rewriteIntegerStore() [all …]
|
D | LoopLoadElimination.cpp | 91 StoreInst *Store; 93 StoreToLoadForwardingCandidate(LoadInst *Load, StoreInst *Store) in StoreToLoadForwardingCandidate() 210 auto *Store = dyn_cast<StoreInst>(Source); in findStoreToLoadDependences() 351 StoreInst *FirstStore = in findPointersWrittenOnForwardingPath() 366 if (auto *S = dyn_cast<StoreInst>(I)) in findPointersWrittenOnForwardingPath()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/X86/ |
D | X86AvoidStoreForwardingBlocks.cpp | 101 void breakBlockedCopies(MachineInstr *LoadInst, MachineInstr *StoreInst, 105 MachineInstr *StoreInst, int64_t StDispImm, 109 MachineInstr *StoreInst, unsigned NStoreOpcode, 385 int64_t LoadDisp, MachineInstr *StoreInst, in buildCopy() argument 390 MachineOperand &StoreBase = getBaseOperand(StoreInst); in buildCopy() 393 MachineMemOperand *SMMO = *StoreInst->memoperands_begin(); in buildCopy() 412 MachineInstr *StInst = StoreInst; in buildCopy() 414 std::prev(MachineBasicBlock::instr_iterator(StoreInst)), in buildCopy() 430 MachineOperand &StoreSrcVReg = StoreInst->getOperand(X86::AddrNumOperands); in buildCopy() 437 int64_t LdDispImm, MachineInstr *StoreInst, in buildCopies() argument [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Utils/ |
D | PromoteMemoryToRegister.cpp | 76 } else if (const StoreInst *SI = dyn_cast<StoreInst>(U)) { in isAllocaPromotable() 112 StoreInst *OnlyStore; 138 if (StoreInst *SI = dyn_cast<StoreInst>(User)) { in AnalyzeAlloca() 193 (isa<StoreInst>(I) && isa<AllocaInst>(I->getOperand(1))); in isInterestingInstruction() 322 if (isa<LoadInst>(I) || isa<StoreInst>(I)) in removeLifetimeIntrinsicUsers() 350 StoreInst *OnlyStore = Info.OnlyStore; in rewriteSingleStoreAlloca() 455 using StoresByIndexTy = SmallVector<std::pair<unsigned, StoreInst *>, 64>; in promoteSingleBlockAlloca() 459 if (StoreInst *SI = dyn_cast<StoreInst>(U)) in promoteSingleBlockAlloca() 478 std::make_pair(LoadIdx, static_cast<StoreInst *>(nullptr)), in promoteSingleBlockAlloca() 511 StoreInst *SI = cast<StoreInst>(AI->user_back()); in promoteSingleBlockAlloca() [all …]
|
D | SSAUpdater.cpp | 345 SomeVal = cast<StoreInst>(Insts[0])->getOperand(0); in LoadAndStorePromoter() 378 if (StoreInst *SI = dyn_cast<StoreInst>(User)) { in run() 391 if (isa<StoreInst>(I)) { in run() 430 if (StoreInst *SI = dyn_cast<StoreInst>(&I)) { in run()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/InstCombine/ |
D | InstCombineLoadStoreAlloca.cpp | 482 static StoreInst *combineStoreToNewValue(InstCombiner &IC, StoreInst &SI, Value *V) { in combineStoreToNewValue() 491 StoreInst *NewStore = IC.Builder.CreateAlignedStore( in combineStoreToNewValue() 606 auto *SI = dyn_cast<StoreInst>(U); in combineLoadToOperationType() 614 auto *SI = cast<StoreInst>(*UI++); in combineLoadToOperationType() 934 static bool canSimplifyNullStoreOrGEP(StoreInst &SI) { in canSimplifyNullStoreOrGEP() 1014 StoreInst *SI = new StoreInst(UndefValue::get(LI.getType()), in visitLoadInst() 1147 static bool combineStoreToValueType(InstCombiner &IC, StoreInst &SI) { in combineStoreToValueType() 1179 static bool unpackStoreToAggregate(InstCombiner &IC, StoreInst &SI) { in unpackStoreToAggregate() 1324 StoreInst &SI) { in removeBitcastsFromLoadStoreOnMinMax() 1351 auto *SI = dyn_cast<StoreInst>(U); in removeBitcastsFromLoadStoreOnMinMax() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Instrumentation/ |
D | ThreadSanitizer.cpp | 378 if (StoreInst *Store = dyn_cast<StoreInst>(I)) { in chooseInstructionsToInstrument() 398 Value *Addr = isa<StoreInst>(*I) in chooseInstructionsToInstrument() 399 ? cast<StoreInst>(I)->getPointerOperand() in chooseInstructionsToInstrument() 418 if (StoreInst *SI = dyn_cast<StoreInst>(I)) in isAtomic() 459 else if (isa<LoadInst>(Inst) || isa<StoreInst>(Inst)) in sanitizeFunction() 522 bool IsWrite = isa<StoreInst>(*I); in instrumentLoadOrStore() 524 ? cast<StoreInst>(I)->getPointerOperand() in instrumentLoadOrStore() 538 Value *StoredValue = cast<StoreInst>(I)->getValueOperand(); in instrumentLoadOrStore() 561 ? cast<StoreInst>(I)->getAlignment() in instrumentLoadOrStore() 646 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) { in instrumentAtomic()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/CodeGen/ |
D | GCRootLowering.cpp | 130 if (isa<AllocaInst>(I) || isa<GetElementPtrInst>(I) || isa<StoreInst>(I) || in CouldBecomeSafePoint() 153 if (StoreInst *SI = dyn_cast<StoreInst>(IP)) in InsertRootInitializers() 163 StoreInst *SI = new StoreInst( in InsertRootInitializers() 208 Value *St = new StoreInst(CI->getArgOperand(0), in DoLowering()
|
D | InterleavedAccessPass.cpp | 112 bool lowerInterleavedStore(StoreInst *SI, 419 StoreInst *SI, SmallVector<Instruction *, 32> &DeadInsts) { in lowerInterleavedStore() 465 if (StoreInst *SI = dyn_cast<StoreInst>(&I)) in runOnFunction()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AMDGPU/ |
D | AMDGPURewriteOutArguments.cpp | 141 StoreInst *SI = dyn_cast<StoreInst>(U.getUser()); in checkArgumentUses() 179 U.getOperandNo() != StoreInst::getPointerOperandIndex()) in checkArgumentUses() 288 SmallVector<std::pair<ReturnInst *, StoreInst *>, 4> ReplaceableStores; in runOnFunction() 308 StoreInst *SI = nullptr; in runOnFunction() 310 SI = dyn_cast<StoreInst>(Q.getInst()); in runOnFunction() 324 for (std::pair<ReturnInst *, StoreInst *> Store : ReplaceableStores) { in runOnFunction()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Vectorize/ |
D | LoadStoreVectorizer.cpp | 140 unsigned getAlignment(StoreInst *SI) const { in getAlignment() 307 if (StoreInst *S = dyn_cast<StoreInst>(I)) in getPointerAddressSpace() 614 assert(isa<StoreInst>(I) && in getVectorizablePrefix() 620 if (isa<LoadInst>(I) || isa<StoreInst>(I)) { in getVectorizablePrefix() 676 if (isa<StoreInst>(MemInstr) && ChainLoad && in getVectorizablePrefix() 681 if (MemLoad && isa<StoreInst>(ChainInstr) && in getVectorizablePrefix() 797 } else if (StoreInst *SI = dyn_cast<StoreInst>(&I)) { in collectInstructions() 942 StoreInst *S0 = cast<StoreInst>(Chain[0]); in vectorizeStoreChain() 947 StoreTy = cast<StoreInst>(I)->getValueOperand()->getType(); in vectorizeStoreChain() 1051 StoreInst *Store = cast<StoreInst>(Chain[I]); in vectorizeStoreChain() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/IPO/ |
D | GlobalOpt.cpp | 211 if (StoreInst *SI = dyn_cast<StoreInst>(U)) { in CleanupPointerRootUsers() 299 } else if (StoreInst *SI = dyn_cast<StoreInst>(U)) { in CleanupConstantGlobalUsers() 411 if (StoreInst *SI = dyn_cast<StoreInst>(I)) in isSafeSROAElementUse() 638 } else if (const StoreInst *SI = dyn_cast<StoreInst>(U)) { in AllUsesOfValueWillTrapIfNull() 682 } else if (isa<StoreInst>(U)) { in AllUsesOfLoadedValueWillTrapIfNull() 703 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) { in OptimizeAwayTrappingUsesOfValue() 784 } else if (isa<StoreInst>(GlobalUser)) { in OptimizeAwayTrappingUsesOfLoads() 905 if (StoreInst *SI = dyn_cast<StoreInst>(GV->user_back())) { in OptimizeGlobalAddressOfMalloc() 907 new StoreInst(ConstantInt::getTrue(GV->getContext()), InitBool, false, in OptimizeGlobalAddressOfMalloc() 956 cast<StoreInst>(InitBool->user_back())->eraseFromParent(); in OptimizeGlobalAddressOfMalloc() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/NVPTX/ |
D | NVPTXLowerAggrCopies.cpp | 79 if (StoreInst *SI = dyn_cast<StoreInst>(LI->user_back())) { in runOnFunction() 106 auto *SI = cast<StoreInst>(*LI->user_begin()); in runOnFunction()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/include/llvm/Transforms/Vectorize/ |
D | SLPVectorizer.h | 45 class StoreInst; variable 61 using StoreList = SmallVector<StoreInst *, 8>; 144 bool vectorizeStores(ArrayRef<StoreInst *> Stores, slpvectorizer::BoUpSLP &R);
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/IR/ |
D | Instruction.cpp | 395 if (const StoreInst *SI = dyn_cast<StoreInst>(I1)) in haveSameSpecialState() 396 return SI->isVolatile() == cast<StoreInst>(I2)->isVolatile() && in haveSameSpecialState() 397 (SI->getAlignment() == cast<StoreInst>(I2)->getAlignment() || in haveSameSpecialState() 399 SI->getOrdering() == cast<StoreInst>(I2)->getOrdering() && in haveSameSpecialState() 400 SI->getSyncScopeID() == cast<StoreInst>(I2)->getSyncScopeID(); in haveSameSpecialState() 530 return !cast<StoreInst>(this)->isUnordered(); in mayReadFromMemory() 565 return cast<StoreInst>(this)->getOrdering() != AtomicOrdering::NotAtomic; in isAtomic()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/include/llvm/Transforms/Utils/ |
D | VNCoercion.h | 27 class StoreInst; variable 55 StoreInst *DepSI, const DataLayout &DL);
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/ObjCARC/ |
D | ObjCARCContract.cpp | 208 static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load, in findSafeStoreForStoreStrongContraction() 212 StoreInst *Store = nullptr; in findSafeStoreForStoreStrongContraction() 267 Store = dyn_cast<StoreInst>(Inst); in findSafeStoreForStoreStrongContraction() 295 findRetainForStoreStrongContraction(Value *New, StoreInst *Store, in findRetainForStoreStrongContraction() 387 StoreInst *Store = in tryToContractReleaseIntoStoreStrong() 513 new StoreInst(Null, CI->getArgOperand(0), CI); in tryToPeepholeInstruction()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/include/llvm/Transforms/Scalar/ |
D | MemCpyOptimizer.h | 35 class StoreInst; variable 60 bool processStore(StoreInst *SI, BasicBlock::iterator &BBI);
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/include/llvm/Analysis/ |
D | MemoryLocation.h | 26 class StoreInst; variable 200 static MemoryLocation get(const StoreInst *SI); 212 return get(cast<StoreInst>(Inst)); in getOrNone()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Analysis/ |
D | InstructionPrecedenceTracking.cpp | 147 if (isa<StoreInst>(Insn)) { in isSpecialInstruction() 148 assert(cast<StoreInst>(Insn)->isVolatile() && in isSpecialInstruction()
|