/external/llvm/lib/Analysis/ |
D | MemoryLocation.cpp | 67 MemoryLocation MemoryLocation::getForSource(const MemTransferInst *MTI) { in getForSource() argument 69 if (ConstantInt *C = dyn_cast<ConstantInt>(MTI->getLength())) in getForSource() 75 MTI->getAAMetadata(AATags); in getForSource() 77 return MemoryLocation(MTI->getRawSource(), Size, AATags); in getForSource() 80 MemoryLocation MemoryLocation::getForDest(const MemIntrinsic *MTI) { in getForDest() argument 82 if (ConstantInt *C = dyn_cast<ConstantInt>(MTI->getLength())) in getForDest() 88 MTI->getAAMetadata(AATags); in getForDest() 90 return MemoryLocation(MTI->getRawDest(), Size, AATags); in getForDest()
|
D | LazyValueInfo.cpp | 724 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) in InstructionDereferencesPointer() local 725 if (MTI->getSourceAddressSpace() == 0) in InstructionDereferencesPointer() 726 if (GetUnderlyingObject(MTI->getRawSource(), in InstructionDereferencesPointer() 727 MTI->getModule()->getDataLayout()) == Ptr) in InstructionDereferencesPointer()
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Analysis/ |
D | MemoryLocation.cpp | 86 MemoryLocation MemoryLocation::getForSource(const MemTransferInst *MTI) { in getForSource() argument 87 return getForSource(cast<AnyMemTransferInst>(MTI)); in getForSource() 90 MemoryLocation MemoryLocation::getForSource(const AtomicMemTransferInst *MTI) { in getForSource() argument 91 return getForSource(cast<AnyMemTransferInst>(MTI)); in getForSource() 94 MemoryLocation MemoryLocation::getForSource(const AnyMemTransferInst *MTI) { in getForSource() argument 96 if (ConstantInt *C = dyn_cast<ConstantInt>(MTI->getLength())) in getForSource() 102 MTI->getAAMetadata(AATags); in getForSource() 104 return MemoryLocation(MTI->getRawSource(), Size, AATags); in getForSource()
|
D | AliasSetTracker.cpp | 429 void AliasSetTracker::add(AnyMemTransferInst *MTI) { in add() argument 430 addPointer(MemoryLocation::getForDest(MTI), AliasSet::ModAccess); in add() 431 addPointer(MemoryLocation::getForSource(MTI), AliasSet::RefAccess); in add() 471 if (AnyMemTransferInst *MTI = dyn_cast<AnyMemTransferInst>(I)) in add() local 472 return add(MTI); in add()
|
D | StackSafetyAnalysis.cpp | 266 if (auto MTI = dyn_cast<MemTransferInst>(MI)) { in getMemIntrinsicAccessRange() local 267 if (MTI->getRawSource() != U && MTI->getRawDest() != U) in getMemIntrinsicAccessRange()
|
D | LazyValueInfo.cpp | 693 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) in InstructionDereferencesPointer() local 694 if (MTI->getSourceAddressSpace() == 0) in InstructionDereferencesPointer() 695 if (GetUnderlyingObject(MTI->getRawSource(), in InstructionDereferencesPointer() 696 MTI->getModule()->getDataLayout()) == Ptr) in InstructionDereferencesPointer()
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Scalar/ |
D | InferAddressSpaces.cpp | 373 if (auto *MTI = dyn_cast<MemTransferInst>(MI)) in collectFlatAddressExpressions() local 374 PushPtrOperand(MTI->getRawSource()); in collectFlatAddressExpressions() 798 } else if (auto *MTI = dyn_cast<MemTransferInst>(MI)) { in handleMemIntrinsicPtrUse() local 799 Value *Src = MTI->getRawSource(); in handleMemIntrinsicPtrUse() 800 Value *Dest = MTI->getRawDest(); in handleMemIntrinsicPtrUse() 809 if (isa<MemCpyInst>(MTI)) { in handleMemIntrinsicPtrUse() 810 MDNode *TBAAStruct = MTI->getMetadata(LLVMContext::MD_tbaa_struct); in handleMemIntrinsicPtrUse() 811 B.CreateMemCpy(Dest, MTI->getDestAlign(), Src, MTI->getSourceAlign(), in handleMemIntrinsicPtrUse() 812 MTI->getLength(), in handleMemIntrinsicPtrUse() 816 assert(isa<MemMoveInst>(MTI)); in handleMemIntrinsicPtrUse() [all …]
|
D | AlignmentFromAssumptions.cpp | 348 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) { in processAssumption() local 350 MTI->getSource(), SE); in processAssumption() 354 if (NewSrcAlignment > MTI->getSourceAlignment()) { in processAssumption() 355 MTI->setSourceAlignment(NewSrcAlignment); in processAssumption()
|
D | DeadStoreElimination.cpp | 240 if (auto *MTI = dyn_cast<AnyMemTransferInst>(Inst)) in getLocForRead() local 241 return MemoryLocation::getForSource(MTI); in getLocForRead()
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Utils/ |
D | GlobalStatus.cpp | 155 } else if (const MemTransferInst *MTI = dyn_cast<MemTransferInst>(I)) { in analyzeGlobalAux() local 156 if (MTI->isVolatile()) in analyzeGlobalAux() 158 if (MTI->getArgOperand(0) == V) in analyzeGlobalAux() 160 if (MTI->getArgOperand(1) == V) in analyzeGlobalAux()
|
D | VNCoercion.cpp | 306 MemTransferInst *MTI = cast<MemTransferInst>(MI); in analyzeLoadFromClobberingMemInst() local 308 Constant *Src = dyn_cast<Constant>(MTI->getSource()); in analyzeLoadFromClobberingMemInst() 501 MemTransferInst *MTI = cast<MemTransferInst>(SrcInst); in getMemInstValueForLoadHelper() local 502 Constant *Src = cast<Constant>(MTI->getSource()); in getMemInstValueForLoadHelper()
|
/external/llvm/lib/Transforms/Utils/ |
D | GlobalStatus.cpp | 145 } else if (const MemTransferInst *MTI = dyn_cast<MemTransferInst>(I)) { in analyzeGlobalAux() local 146 if (MTI->isVolatile()) in analyzeGlobalAux() 148 if (MTI->getArgOperand(0) == V) in analyzeGlobalAux() 150 if (MTI->getArgOperand(1) == V) in analyzeGlobalAux()
|
/external/llvm/lib/Transforms/Scalar/ |
D | AlignmentFromAssumptions.cpp | 342 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) { in processAssumption() local 344 MTI->getSource(), SE); in processAssumption() 347 NewDestAlignments.find(MTI); in processAssumption() 352 NewSrcAlignments.find(MTI); in processAssumption() 377 NewDestAlignments.insert(std::make_pair(MTI, NewDestAlignment)); in processAssumption() 378 NewSrcAlignments.insert(std::make_pair(MTI, NewSrcAlignment)); in processAssumption()
|
D | DeadStoreElimination.cpp | 185 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(Inst)) in getLocForRead() local 186 return MemoryLocation::getForSource(MTI); in getLocForRead() 798 } else if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(BBI)) { in handleEndBlock() local 799 LoadedLoc = MemoryLocation::getForSource(MTI); in handleEndBlock()
|
D | GVN.cpp | 951 MemTransferInst *MTI = cast<MemTransferInst>(MI); in AnalyzeLoadFromClobberingMemInst() local 953 Constant *Src = dyn_cast<Constant>(MTI->getSource()); in AnalyzeLoadFromClobberingMemInst() 1123 MemTransferInst *MTI = cast<MemTransferInst>(SrcInst); in GetMemInstValueForLoad() local 1124 Constant *Src = cast<Constant>(MTI->getSource()); in GetMemInstValueForLoad()
|
/external/swiftshader/third_party/llvm-10.0/llvm/include/llvm/Analysis/ |
D | MemoryLocation.h | 225 static MemoryLocation getForSource(const MemTransferInst *MTI); 226 static MemoryLocation getForSource(const AtomicMemTransferInst *MTI); 227 static MemoryLocation getForSource(const AnyMemTransferInst *MTI);
|
D | AliasSetTracker.h | 382 void add(AnyMemTransferInst *MTI);
|
/external/llvm/include/llvm/Analysis/ |
D | MemoryLocation.h | 85 static MemoryLocation getForSource(const MemTransferInst *MTI);
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/ARM/ |
D | ARMFastISel.cpp | 2521 const MemTransferInst &MTI = cast<MemTransferInst>(I); in SelectIntrinsicCall() local 2523 if (MTI.isVolatile()) in SelectIntrinsicCall() 2529 if (isa<ConstantInt>(MTI.getLength()) && isMemCpy) { in SelectIntrinsicCall() 2532 uint64_t Len = cast<ConstantInt>(MTI.getLength())->getZExtValue(); in SelectIntrinsicCall() 2535 if (!ARMComputeAddress(MTI.getRawDest(), Dest) || in SelectIntrinsicCall() 2536 !ARMComputeAddress(MTI.getRawSource(), Src)) in SelectIntrinsicCall() 2538 unsigned Alignment = MinAlign(MTI.getDestAlignment(), in SelectIntrinsicCall() 2539 MTI.getSourceAlignment()); in SelectIntrinsicCall() 2545 if (!MTI.getLength()->getType()->isIntegerTy(32)) in SelectIntrinsicCall() 2548 if (MTI.getSourceAddressSpace() > 255 || MTI.getDestAddressSpace() > 255) in SelectIntrinsicCall()
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/CodeGen/ |
D | SafeStack.cpp | 263 if (auto MTI = dyn_cast<MemTransferInst>(MI)) { in IsMemIntrinsicSafe() local 264 if (MTI->getRawSource() != U && MTI->getRawDest() != U) in IsMemIntrinsicSafe()
|
/external/llvm/lib/Target/ARM/ |
D | ARMFastISel.cpp | 2507 const MemTransferInst &MTI = cast<MemTransferInst>(I); in SelectIntrinsicCall() local 2509 if (MTI.isVolatile()) in SelectIntrinsicCall() 2515 if (isa<ConstantInt>(MTI.getLength()) && isMemCpy) { in SelectIntrinsicCall() 2518 uint64_t Len = cast<ConstantInt>(MTI.getLength())->getZExtValue(); in SelectIntrinsicCall() 2521 if (!ARMComputeAddress(MTI.getRawDest(), Dest) || in SelectIntrinsicCall() 2522 !ARMComputeAddress(MTI.getRawSource(), Src)) in SelectIntrinsicCall() 2524 unsigned Alignment = MTI.getAlignment(); in SelectIntrinsicCall() 2530 if (!MTI.getLength()->getType()->isIntegerTy(32)) in SelectIntrinsicCall() 2533 if (MTI.getSourceAddressSpace() > 255 || MTI.getDestAddressSpace() > 255) in SelectIntrinsicCall()
|
/external/cpu_features/scripts/ |
D | run_integration.sh | 148 …/components/toolchain/${DATE}/Codescape.GNU.Tools.Package.${DATE}.for.MIPS.MTI.Linux.CentOS-6.x86_…
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Instrumentation/ |
D | DataFlowSanitizer.cpp | 1504 auto *MTI = cast<MemTransferInst>( in visitMemTransferInst() local 1508 MTI->setDestAlignment(I.getDestAlignment() * (DFSF.DFS.ShadowWidth / 8)); in visitMemTransferInst() 1509 MTI->setSourceAlignment(I.getSourceAlignment() * (DFSF.DFS.ShadowWidth / 8)); in visitMemTransferInst() 1511 MTI->setDestAlignment(DFSF.DFS.ShadowWidth / 8); in visitMemTransferInst() 1512 MTI->setSourceAlignment(DFSF.DFS.ShadowWidth / 8); in visitMemTransferInst()
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/ |
D | AArch64FastISel.cpp | 3524 const auto *MTI = cast<MemTransferInst>(II); in fastLowerIntrinsicCall() local 3526 if (MTI->isVolatile()) in fastLowerIntrinsicCall() 3532 if (isa<ConstantInt>(MTI->getLength()) && IsMemCpy) { in fastLowerIntrinsicCall() 3535 uint64_t Len = cast<ConstantInt>(MTI->getLength())->getZExtValue(); in fastLowerIntrinsicCall() 3536 unsigned Alignment = MinAlign(MTI->getDestAlignment(), in fastLowerIntrinsicCall() 3537 MTI->getSourceAlignment()); in fastLowerIntrinsicCall() 3540 if (!computeAddress(MTI->getRawDest(), Dest) || in fastLowerIntrinsicCall() 3541 !computeAddress(MTI->getRawSource(), Src)) in fastLowerIntrinsicCall() 3548 if (!MTI->getLength()->getType()->isIntegerTy(64)) in fastLowerIntrinsicCall() 3551 if (MTI->getSourceAddressSpace() > 255 || MTI->getDestAddressSpace() > 255) in fastLowerIntrinsicCall()
|
/external/llvm/lib/Target/AArch64/ |
D | AArch64FastISel.cpp | 3363 const auto *MTI = cast<MemTransferInst>(II); in fastLowerIntrinsicCall() local 3365 if (MTI->isVolatile()) in fastLowerIntrinsicCall() 3371 if (isa<ConstantInt>(MTI->getLength()) && IsMemCpy) { in fastLowerIntrinsicCall() 3374 uint64_t Len = cast<ConstantInt>(MTI->getLength())->getZExtValue(); in fastLowerIntrinsicCall() 3375 unsigned Alignment = MTI->getAlignment(); in fastLowerIntrinsicCall() 3378 if (!computeAddress(MTI->getRawDest(), Dest) || in fastLowerIntrinsicCall() 3379 !computeAddress(MTI->getRawSource(), Src)) in fastLowerIntrinsicCall() 3386 if (!MTI->getLength()->getType()->isIntegerTy(64)) in fastLowerIntrinsicCall() 3389 if (MTI->getSourceAddressSpace() > 255 || MTI->getDestAddressSpace() > 255) in fastLowerIntrinsicCall()
|