/external/swiftshader/third_party/LLVM/lib/Transforms/Scalar/ |
D | LowerAtomic.cpp | 23 static bool LowerAtomicCmpXchgInst(AtomicCmpXchgInst *CXI) { in LowerAtomicCmpXchgInst() argument 24 IRBuilder<> Builder(CXI->getParent(), CXI); in LowerAtomicCmpXchgInst() 25 Value *Ptr = CXI->getPointerOperand(); in LowerAtomicCmpXchgInst() 26 Value *Cmp = CXI->getCompareOperand(); in LowerAtomicCmpXchgInst() 27 Value *Val = CXI->getNewValOperand(); in LowerAtomicCmpXchgInst() 34 CXI->replaceAllUsesWith(Orig); in LowerAtomicCmpXchgInst() 35 CXI->eraseFromParent(); in LowerAtomicCmpXchgInst() 120 else if (AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(Inst)) in runOnBasicBlock() local 121 Changed |= LowerAtomicCmpXchgInst(CXI); in runOnBasicBlock()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Transforms/Scalar/ |
D | LowerAtomic.cpp | 24 static bool LowerAtomicCmpXchgInst(AtomicCmpXchgInst *CXI) { in LowerAtomicCmpXchgInst() argument 25 IRBuilder<> Builder(CXI); in LowerAtomicCmpXchgInst() 26 Value *Ptr = CXI->getPointerOperand(); in LowerAtomicCmpXchgInst() 27 Value *Cmp = CXI->getCompareOperand(); in LowerAtomicCmpXchgInst() 28 Value *Val = CXI->getNewValOperand(); in LowerAtomicCmpXchgInst() 35 Res = Builder.CreateInsertValue(UndefValue::get(CXI->getType()), Orig, 0); in LowerAtomicCmpXchgInst() 38 CXI->replaceAllUsesWith(Res); in LowerAtomicCmpXchgInst() 39 CXI->eraseFromParent(); in LowerAtomicCmpXchgInst() 118 else if (AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(Inst)) in runOnBasicBlock() local 119 Changed |= LowerAtomicCmpXchgInst(CXI); in runOnBasicBlock()
|
/external/llvm/lib/Transforms/Scalar/ |
D | LowerAtomic.cpp | 25 static bool LowerAtomicCmpXchgInst(AtomicCmpXchgInst *CXI) { in LowerAtomicCmpXchgInst() argument 26 IRBuilder<> Builder(CXI); in LowerAtomicCmpXchgInst() 27 Value *Ptr = CXI->getPointerOperand(); in LowerAtomicCmpXchgInst() 28 Value *Cmp = CXI->getCompareOperand(); in LowerAtomicCmpXchgInst() 29 Value *Val = CXI->getNewValOperand(); in LowerAtomicCmpXchgInst() 36 Res = Builder.CreateInsertValue(UndefValue::get(CXI->getType()), Orig, 0); in LowerAtomicCmpXchgInst() 39 CXI->replaceAllUsesWith(Res); in LowerAtomicCmpXchgInst() 40 CXI->eraseFromParent(); in LowerAtomicCmpXchgInst() 119 else if (AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(Inst)) in runOnBasicBlock() local 120 Changed |= LowerAtomicCmpXchgInst(CXI); in runOnBasicBlock()
|
/external/llvm/lib/Analysis/ |
D | MemoryLocation.cpp | 47 MemoryLocation MemoryLocation::get(const AtomicCmpXchgInst *CXI) { in get() argument 49 CXI->getAAMetadata(AATags); in get() 50 const auto &DL = CXI->getModule()->getDataLayout(); in get() 53 CXI->getPointerOperand(), in get() 54 DL.getTypeStoreSize(CXI->getCompareOperand()->getType()), AATags); in get()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Analysis/ |
D | MemoryLocation.cpp | 47 MemoryLocation MemoryLocation::get(const AtomicCmpXchgInst *CXI) { in get() argument 49 CXI->getAAMetadata(AATags); in get() 50 const auto &DL = CXI->getModule()->getDataLayout(); in get() 53 CXI->getPointerOperand(), in get() 54 DL.getTypeStoreSize(CXI->getCompareOperand()->getType()), AATags); in get()
|
/external/swiftshader/third_party/LLVM/lib/VMCore/ |
D | Instruction.cpp | 223 if (const AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(this)) in isIdenticalToWhenDefined() local 224 return CXI->isVolatile() == cast<AtomicCmpXchgInst>(I)->isVolatile() && in isIdenticalToWhenDefined() 225 CXI->getOrdering() == cast<AtomicCmpXchgInst>(I)->getOrdering() && in isIdenticalToWhenDefined() 226 CXI->getSynchScope() == cast<AtomicCmpXchgInst>(I)->getSynchScope(); in isIdenticalToWhenDefined() 279 if (const AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(this)) in isSameOperationAs() local 280 return CXI->isVolatile() == cast<AtomicCmpXchgInst>(I)->isVolatile() && in isSameOperationAs() 281 CXI->getOrdering() == cast<AtomicCmpXchgInst>(I)->getOrdering() && in isSameOperationAs() 282 CXI->getSynchScope() == cast<AtomicCmpXchgInst>(I)->getSynchScope(); in isSameOperationAs()
|
D | Verifier.cpp | 292 void visitAtomicCmpXchgInst(AtomicCmpXchgInst &CXI); 1361 void Verifier::visitAtomicCmpXchgInst(AtomicCmpXchgInst &CXI) { in visitAtomicCmpXchgInst() argument 1362 Assert1(CXI.getOrdering() != NotAtomic, in visitAtomicCmpXchgInst() 1363 "cmpxchg instructions must be atomic.", &CXI); in visitAtomicCmpXchgInst() 1364 Assert1(CXI.getOrdering() != Unordered, in visitAtomicCmpXchgInst() 1365 "cmpxchg instructions cannot be unordered.", &CXI); in visitAtomicCmpXchgInst() 1366 PointerType *PTy = dyn_cast<PointerType>(CXI.getOperand(0)->getType()); in visitAtomicCmpXchgInst() 1367 Assert1(PTy, "First cmpxchg operand must be a pointer.", &CXI); in visitAtomicCmpXchgInst() 1369 Assert2(ElTy == CXI.getOperand(1)->getType(), in visitAtomicCmpXchgInst() 1371 &CXI, ElTy); in visitAtomicCmpXchgInst() [all …]
|
D | AsmWriter.cpp | 1954 } else if (const AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(&I)) { in printInstruction() local 1955 writeAtomic(CXI->getOrdering(), CXI->getSynchScope()); in printInstruction()
|
/external/llvm/lib/IR/ |
D | Instruction.cpp | 380 if (const AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(I1)) in haveSameSpecialState() local 381 return CXI->isVolatile() == cast<AtomicCmpXchgInst>(I2)->isVolatile() && in haveSameSpecialState() 382 CXI->isWeak() == cast<AtomicCmpXchgInst>(I2)->isWeak() && in haveSameSpecialState() 383 CXI->getSuccessOrdering() == in haveSameSpecialState() 385 CXI->getFailureOrdering() == in haveSameSpecialState() 387 CXI->getSynchScope() == cast<AtomicCmpXchgInst>(I2)->getSynchScope(); in haveSameSpecialState()
|
D | Verifier.cpp | 423 void visitAtomicCmpXchgInst(AtomicCmpXchgInst &CXI); 3087 void Verifier::visitAtomicCmpXchgInst(AtomicCmpXchgInst &CXI) { in visitAtomicCmpXchgInst() argument 3090 Assert(CXI.getSuccessOrdering() != AtomicOrdering::NotAtomic, in visitAtomicCmpXchgInst() 3091 "cmpxchg instructions must be atomic.", &CXI); in visitAtomicCmpXchgInst() 3092 Assert(CXI.getFailureOrdering() != AtomicOrdering::NotAtomic, in visitAtomicCmpXchgInst() 3093 "cmpxchg instructions must be atomic.", &CXI); in visitAtomicCmpXchgInst() 3094 Assert(CXI.getSuccessOrdering() != AtomicOrdering::Unordered, in visitAtomicCmpXchgInst() 3095 "cmpxchg instructions cannot be unordered.", &CXI); in visitAtomicCmpXchgInst() 3096 Assert(CXI.getFailureOrdering() != AtomicOrdering::Unordered, in visitAtomicCmpXchgInst() 3097 "cmpxchg instructions cannot be unordered.", &CXI); in visitAtomicCmpXchgInst() [all …]
|
D | AsmWriter.cpp | 3177 } else if (const AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(&I)) { in printInstruction() local 3178 writeAtomicCmpXchg(CXI->getSuccessOrdering(), CXI->getFailureOrdering(), in printInstruction() 3179 CXI->getSynchScope()); in printInstruction()
|
/external/swiftshader/third_party/LLVM/lib/Analysis/ |
D | AliasAnalysis.cpp | 241 AliasAnalysis::getLocation(const AtomicCmpXchgInst *CXI) { in getLocation() argument 242 return Location(CXI->getPointerOperand(), in getLocation() 243 getTypeStoreSize(CXI->getCompareOperand()->getType()), in getLocation() 244 CXI->getMetadata(LLVMContext::MD_tbaa)); in getLocation()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/IR/ |
D | Instruction.cpp | 413 if (const AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(I1)) in haveSameSpecialState() local 414 return CXI->isVolatile() == cast<AtomicCmpXchgInst>(I2)->isVolatile() && in haveSameSpecialState() 415 CXI->isWeak() == cast<AtomicCmpXchgInst>(I2)->isWeak() && in haveSameSpecialState() 416 CXI->getSuccessOrdering() == in haveSameSpecialState() 418 CXI->getFailureOrdering() == in haveSameSpecialState() 420 CXI->getSyncScopeID() == in haveSameSpecialState()
|
D | Verifier.cpp | 472 void visitAtomicCmpXchgInst(AtomicCmpXchgInst &CXI); 3306 void Verifier::visitAtomicCmpXchgInst(AtomicCmpXchgInst &CXI) { in visitAtomicCmpXchgInst() argument 3309 Assert(CXI.getSuccessOrdering() != AtomicOrdering::NotAtomic, in visitAtomicCmpXchgInst() 3310 "cmpxchg instructions must be atomic.", &CXI); in visitAtomicCmpXchgInst() 3311 Assert(CXI.getFailureOrdering() != AtomicOrdering::NotAtomic, in visitAtomicCmpXchgInst() 3312 "cmpxchg instructions must be atomic.", &CXI); in visitAtomicCmpXchgInst() 3313 Assert(CXI.getSuccessOrdering() != AtomicOrdering::Unordered, in visitAtomicCmpXchgInst() 3314 "cmpxchg instructions cannot be unordered.", &CXI); in visitAtomicCmpXchgInst() 3315 Assert(CXI.getFailureOrdering() != AtomicOrdering::Unordered, in visitAtomicCmpXchgInst() 3316 "cmpxchg instructions cannot be unordered.", &CXI); in visitAtomicCmpXchgInst() [all …]
|
D | AsmWriter.cpp | 3853 } else if (const AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(&I)) { in printInstruction() local 3854 writeAtomicCmpXchg(CXI->getContext(), CXI->getSuccessOrdering(), in printInstruction() 3855 CXI->getFailureOrdering(), CXI->getSyncScopeID()); in printInstruction()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Transforms/Utils/ |
D | FunctionComparator.cpp | 618 if (const AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(L)) { in cmpOperations() local 619 if (int Res = cmpNumbers(CXI->isVolatile(), in cmpOperations() 622 if (int Res = cmpNumbers(CXI->isWeak(), in cmpOperations() 626 cmpOrderings(CXI->getSuccessOrdering(), in cmpOperations() 630 cmpOrderings(CXI->getFailureOrdering(), in cmpOperations() 633 return cmpNumbers(CXI->getSyncScopeID(), in cmpOperations()
|
D | InlineFunction.cpp | 984 else if (const AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(I)) in AddAliasScopeMetadata() local 985 PtrArgs.push_back(CXI->getPointerOperand()); in AddAliasScopeMetadata()
|
/external/swiftshader/third_party/LLVM/lib/Transforms/IPO/ |
D | MergeFunctions.cpp | 331 if (const AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(I1)) in isEquivalentOperation() local 332 return CXI->isVolatile() == cast<AtomicCmpXchgInst>(I2)->isVolatile() && in isEquivalentOperation() 333 CXI->getOrdering() == cast<AtomicCmpXchgInst>(I2)->getOrdering() && in isEquivalentOperation() 334 CXI->getSynchScope() == cast<AtomicCmpXchgInst>(I2)->getSynchScope(); in isEquivalentOperation()
|
/external/llvm/include/llvm/Analysis/ |
D | MemoryLocation.h | 68 static MemoryLocation get(const AtomicCmpXchgInst *CXI);
|
/external/swiftshader/third_party/llvm-7.0/llvm/include/llvm/Analysis/ |
D | MemoryLocation.h | 77 static MemoryLocation get(const AtomicCmpXchgInst *CXI);
|
/external/llvm/lib/Transforms/IPO/ |
D | MergeFunctions.cpp | 1022 if (const AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(L)) { in cmpOperations() local 1023 if (int Res = cmpNumbers(CXI->isVolatile(), in cmpOperations() 1026 if (int Res = cmpNumbers(CXI->isWeak(), in cmpOperations() 1030 cmpOrderings(CXI->getSuccessOrdering(), in cmpOperations() 1034 cmpOrderings(CXI->getFailureOrdering(), in cmpOperations() 1037 return cmpNumbers(CXI->getSynchScope(), in cmpOperations()
|
/external/swiftshader/third_party/LLVM/include/llvm/Analysis/ |
D | AliasAnalysis.h | 139 Location getLocation(const AtomicCmpXchgInst *CXI);
|
/external/llvm/lib/Transforms/Utils/ |
D | InlineFunction.cpp | 904 else if (const AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(I)) in AddAliasScopeMetadata() local 905 PtrArgs.push_back(CXI->getPointerOperand()); in AddAliasScopeMetadata()
|
/external/swiftshader/third_party/LLVM/lib/AsmParser/ |
D | LLParser.cpp | 3822 AtomicCmpXchgInst *CXI = in ParseCmpXchg() local 3824 CXI->setVolatile(isVolatile); in ParseCmpXchg() 3825 Inst = CXI; in ParseCmpXchg()
|
/external/swiftshader/third_party/LLVM/lib/Transforms/Utils/ |
D | SimplifyCFG.cpp | 2309 } else if (AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(BBI)) { in SimplifyUnreachable() local 2310 if (CXI->isVolatile()) in SimplifyUnreachable()
|