Home
last modified time | relevance | path

Searched refs:MPI (Results 1 – 25 of 43) sorted by relevance

12

/external/eigen/cmake/
DFindPastix.cmake20 # - MPI
27 # - MPI: to activate detection of the parallel MPI version (default)
28 # it looks for Threads, HWLOC, BLAS, MPI and ScaLAPACK libraries
29 # - SEQ: to activate detection of the sequential version (exclude MPI version)
31 # it looks for MPI version of StarPU (default behaviour)
32 # if SEQ and STARPU are given, it looks for a StarPU without MPI
96 # means we look for the sequential version of PaStiX (without MPI)
100 if (${component} STREQUAL "MPI")
101 # means we look for the MPI version of PaStiX (default)
220 # PASTIX may depend on MPI
[all …]
DFindPTSCOTCH.cmake20 # - MPI
90 # PTSCOTCH depends on MPI, try to find it
93 find_package(MPI REQUIRED)
95 find_package(MPI)
285 # MPI
/external/clang/lib/StaticAnalyzer/Checkers/
DCMakeLists.txt44 MPI-Checker/MPIBugReporter.cpp
45 MPI-Checker/MPIChecker.cpp
46 MPI-Checker/MPIFunctionClassifier.cpp
DAndroid.bp9 subdirs = ["MPI-Checker"]
/external/valgrind/docs/internals/
Dmpi2entries.txt1 Canned summary of MPI-1.1/MPI-2 entry points, as derived from mpi.h
2 from Open MPI svn rev 9191 (somewhere between Open MPI versions 1.0.1
D3_9_BUGSTATUS.txt153 === MPI ================================================================
D3_7_BUGSTATUS.txt44 287862 MPI_IN_PLACE not supported for MPI collect
D3_10_BUGSTATUS.txt296 === MPI ================================================================
D3_12_BUGSTATUS.txt385 === MPI ================================================================
D3_11_BUGSTATUS.txt203 === MPI ================================================================
D3_2_BUGSTATUS.txt141 and makes a valid MPI program crash.
/external/eigen/bench/spbench/
DCMakeLists.txt42 # check that the PASTIX found is a version without MPI
49 … " Because Eigen tests require a version without MPI, we disable the Pastix backend.")
/external/llvm/lib/Transforms/Utils/
DMemorySSA.cpp1166 for (auto MPI = upward_defs_begin(PHIPair), MPE = upward_defs_end(); in UpwardsDFSWalk() local
1167 MPI != MPE; ++MPI) { in UpwardsDFSWalk()
1170 DT->dominates(CurrAccess->getBlock(), MPI.getPhiArgBlock()); in UpwardsDFSWalk()
1173 UpwardsDFSWalk(MPI->first, MPI->second, Q, Backedge); in UpwardsDFSWalk()
/external/llvm/lib/Target/WebAssembly/
DWebAssemblyRegStackify.cpp142 const MachinePointerInfo &MPI = MMO->getPointerInfo(); in Query() local
143 if (MPI.V.is<const PseudoSourceValue *>()) { in Query()
144 auto PSV = MPI.V.get<const PseudoSourceValue *>(); in Query()
/external/eigen/test/
DCMakeLists.txt84 # check that the PASTIX found is a version without MPI
91 … " Because Eigen tests require a version without MPI, we disable the Pastix backend.")
/external/clang/include/clang/StaticAnalyzer/Checkers/
DCheckers.td75 def MPI : Package<"mpi">, InPackage<OptIn>;
582 let ParentPackage = MPI in {
583 def MPIChecker : Checker<"MPI-Checker">,
584 HelpText<"Checks MPI code">,
/external/llvm/lib/Target/PowerPC/
DPPCISelLowering.cpp4452 MachinePointerInfo MPI(CS ? CS->getCalledValue() : nullptr); in PrepareCall() local
4453 SDValue LoadFuncPtr = DAG.getLoad(MVT::i64, dl, LDChain, Callee, MPI, in PrepareCall()
4460 MPI.getWithOffset(16), false, false, in PrepareCall()
4466 MPI.getWithOffset(8), false, false, in PrepareCall()
6408 MachinePointerInfo MPI = in LowerFP_TO_INTForReuse() local
6416 MF.getMachineMemOperand(MPI, MachineMemOperand::MOStore, 4, 4); in LowerFP_TO_INTForReuse()
6422 MPI, false, false, 0); in LowerFP_TO_INTForReuse()
6429 MPI = MPI.getWithOffset(Subtarget.isLittleEndian() ? 0 : 4); in LowerFP_TO_INTForReuse()
6434 RLI.MPI = MPI; in LowerFP_TO_INTForReuse()
6480 return DAG.getLoad(Op.getValueType(), dl, RLI.Chain, RLI.Ptr, RLI.MPI, false, in LowerFP_TO_INT()
[all …]
DPPCISelLowering.h751 MachinePointerInfo MPI; member
/external/valgrind/
DNEWS.old480 Lackey has been improved, and MPI support has been added. In detail:
548 - MPI support: partial support for debugging distributed applications
549 using the MPI library specification has been added. Valgrind is
550 aware of the memory state changes caused by a subset of the MPI
581 again, and was required for MPI support.
1026 use valgrind for debugging MPI-based programs. The relevant
Dconfigure.ac3904 # MPI checks
3906 # Do we have a useable MPI setup on the primary and/or secondary targets?
DNEWS1771 n-i-bz Fixes for more MPI false positives
2206 troublesome pieces of code. The MPI wrapper library (libmpiwrap.c)
3149 * For people who use Valgrind with MPI programs, the installed
/external/llvm/lib/CodeGen/AsmPrinter/
DCodeViewDebug.cpp1224 MemberPointerInfo MPI( in lowerTypeMemberPointer() local
1226 PointerRecord PR(PointeeTI, PK, PM, PO, SizeInBytes, MPI); in lowerTypeMemberPointer()
/external/boringssl/src/crypto/fipsmodule/bn/
Dbn_test.cc899 TEST_F(BNTest, MPI) { in TEST_F() argument
/external/clang/include/clang/Basic/
DAttrDocs.td1315 * MPI library implementations, where these attributes enable checking that
1317 * for HDF5 library there is a similar use case to MPI;
/external/llvm/lib/CodeGen/SelectionDAG/
DDAGCombiner.cpp12273 MachinePointerInfo MPI; in ReplaceExtractVectorEltOfLoadWithNarrowedLoad() local
12279 MPI = OriginalLoad->getPointerInfo().getWithOffset(PtrOff); in ReplaceExtractVectorEltOfLoadWithNarrowedLoad()
12285 MPI = OriginalLoad->getPointerInfo(); in ReplaceExtractVectorEltOfLoadWithNarrowedLoad()
12305 ExtType, SDLoc(EVE), ResultVT, OriginalLoad->getChain(), NewPtr, MPI, in ReplaceExtractVectorEltOfLoadWithNarrowedLoad()
12311 VecEltVT, SDLoc(EVE), OriginalLoad->getChain(), NewPtr, MPI, in ReplaceExtractVectorEltOfLoadWithNarrowedLoad()

12