Home
last modified time | relevance | path

Searched refs:MPI (Results 1 – 25 of 57) sorted by relevance

123

/external/tensorflow/tensorflow/contrib/mpi/
DREADME.md1 ## How to compile and use MPI-enabled TensorFlow
3 1. Follow the regular TF compilation instructions. During configure step, if you want MPI support, …
5 ```Do you wish to build TensorFlow with MPI support [y/N]```
7 2. To turn on the MPI connection, add the protocol "grpc+mpi" in the server definition:
13 …age of the high performance networking primitives that are offered via the MPI API. This enables T…
26 This environment variable allows you to disable the MPI path before launch (e.g. for performance or…
31MPI library can directly access the pointer to the data. For CPU backed buffers this is no problem…
37MPI libraries. This seems to be related to memory allocations/routines that register the memory fo…
43 … or the MPI library will print an error and exit. The error is "Attempt to free memory that is sti…
48MPI functions take over the tensor exchanges. On the sending side the tensors are placed in the st…
[all …]
DBUILD2 # MPI based communication interfaces and implementations for TensorFlow.
/external/tensorflow/tensorflow/contrib/mpi_collectives/
DREADME.md1 # MPI TensorFlow integration
3 Tensorflow MPI integration allows communicating between different TensorFlow
4 processes using MPI. This enables training across multiple nodes and GPUs
DBUILD1 # Ops that communicate with other processes via MPI.
Dmpi_message.proto48 // SHUTDOWN if all MPI processes should shut down.
/external/eigen/cmake/
DFindPastix.cmake20 # - MPI
27 # - MPI: to activate detection of the parallel MPI version (default)
28 # it looks for Threads, HWLOC, BLAS, MPI and ScaLAPACK libraries
29 # - SEQ: to activate detection of the sequential version (exclude MPI version)
31 # it looks for MPI version of StarPU (default behaviour)
32 # if SEQ and STARPU are given, it looks for a StarPU without MPI
96 # means we look for the sequential version of PaStiX (without MPI)
100 if (${component} STREQUAL "MPI")
101 # means we look for the MPI version of PaStiX (default)
220 # PASTIX may depend on MPI
[all …]
DFindPTSCOTCH.cmake20 # - MPI
90 # PTSCOTCH depends on MPI, try to find it
93 find_package(MPI REQUIRED)
95 find_package(MPI)
285 # MPI
/external/clang/lib/StaticAnalyzer/Checkers/
DCMakeLists.txt44 MPI-Checker/MPIBugReporter.cpp
45 MPI-Checker/MPIChecker.cpp
46 MPI-Checker/MPIFunctionClassifier.cpp
DAndroid.bp9 subdirs = ["MPI-Checker"]
/external/eigen/bench/spbench/
DCMakeLists.txt42 # check that the PASTIX found is a version without MPI
49 … " Because Eigen tests require a version without MPI, we disable the Pastix backend.")
/external/tensorflow/tensorflow/tools/docker/
DDockerfile.mkl-horovod74 # Install Open MPI
98 # Install OpenSSH for MPI to communicate between containers
DDockerfile.devel-mkl-horovod131 # Install Open MPI
155 # Install OpenSSH for MPI to communicate between containers
/external/tensorflow/tensorflow/tools/ci_build/
DDockerfile.cpu.mpi22 # Set up MPI
/external/llvm/lib/Transforms/Utils/
DMemorySSA.cpp1166 for (auto MPI = upward_defs_begin(PHIPair), MPE = upward_defs_end(); in UpwardsDFSWalk() local
1167 MPI != MPE; ++MPI) { in UpwardsDFSWalk()
1170 DT->dominates(CurrAccess->getBlock(), MPI.getPhiArgBlock()); in UpwardsDFSWalk()
1173 UpwardsDFSWalk(MPI->first, MPI->second, Q, Backedge); in UpwardsDFSWalk()
/external/llvm/lib/Target/WebAssembly/
DWebAssemblyRegStackify.cpp142 const MachinePointerInfo &MPI = MMO->getPointerInfo(); in Query() local
143 if (MPI.V.is<const PseudoSourceValue *>()) { in Query()
144 auto PSV = MPI.V.get<const PseudoSourceValue *>(); in Query()
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/WebAssembly/
DWebAssemblyRegStackify.cpp178 const MachinePointerInfo &MPI = MMO->getPointerInfo(); in Query() local
179 if (MPI.V.is<const PseudoSourceValue *>()) { in Query()
180 auto PSV = MPI.V.get<const PseudoSourceValue *>(); in Query()
/external/eigen/test/
DCMakeLists.txt84 # check that the PASTIX found is a version without MPI
91 … " Because Eigen tests require a version without MPI, we disable the Pastix backend.")
/external/clang/include/clang/StaticAnalyzer/Checkers/
DCheckers.td75 def MPI : Package<"mpi">, InPackage<OptIn>;
582 let ParentPackage = MPI in {
583 def MPIChecker : Checker<"MPI-Checker">,
584 HelpText<"Checks MPI code">,
/external/swiftshader/third_party/llvm-7.0/llvm/lib/ObjectYAML/
DCodeViewYAMLTypes.cpp386 void MappingTraits<MemberPointerInfo>::mapping(IO &IO, MemberPointerInfo &MPI) { in mapping() argument
387 IO.mapRequired("ContainingType", MPI.ContainingType); in mapping()
388 IO.mapRequired("Representation", MPI.Representation); in mapping()
/external/catch2/contrib/
DParseAndAddCatchTests.cmake43 # a test should be run. For instance to use test MPI, one can write #
/external/swiftshader/third_party/llvm-7.0/llvm/lib/CodeGen/
DMachineFunction.cpp398 MachinePointerInfo MPI = MMO->getValue() ? in getMachineMemOperand() local
403 MachineMemOperand(MPI, MMO->getFlags(), MMO->getSize(), in getMachineMemOperand()
/external/swiftshader/third_party/llvm-7.0/llvm/include/llvm/DebugInfo/CodeView/
DTypeRecord.h291 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI) in PointerRecord() argument
293 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {} in PointerRecord()
/external/catch2/docs/
Dcmake-integration.md177 instance to run some tests using `MPI` and other sequentially, one can write
/external/llvm/lib/Target/PowerPC/
DPPCISelLowering.cpp4452 MachinePointerInfo MPI(CS ? CS->getCalledValue() : nullptr); in PrepareCall() local
4453 SDValue LoadFuncPtr = DAG.getLoad(MVT::i64, dl, LDChain, Callee, MPI, in PrepareCall()
4460 MPI.getWithOffset(16), false, false, in PrepareCall()
4466 MPI.getWithOffset(8), false, false, in PrepareCall()
6408 MachinePointerInfo MPI = in LowerFP_TO_INTForReuse() local
6416 MF.getMachineMemOperand(MPI, MachineMemOperand::MOStore, 4, 4); in LowerFP_TO_INTForReuse()
6422 MPI, false, false, 0); in LowerFP_TO_INTForReuse()
6429 MPI = MPI.getWithOffset(Subtarget.isLittleEndian() ? 0 : 4); in LowerFP_TO_INTForReuse()
6434 RLI.MPI = MPI; in LowerFP_TO_INTForReuse()
6480 return DAG.getLoad(Op.getValueType(), dl, RLI.Chain, RLI.Ptr, RLI.MPI, false, in LowerFP_TO_INT()
[all …]
DPPCISelLowering.h751 MachinePointerInfo MPI; member

123