/external/tensorflow/tensorflow/contrib/mpi/ |
D | README.md | 1 ## How to compile and use MPI-enabled TensorFlow 3 1. Follow the regular TF compilation instructions. During configure step, if you want MPI support, … 5 ```Do you wish to build TensorFlow with MPI support [y/N]``` 7 2. To turn on the MPI connection, add the protocol "grpc+mpi" in the server definition: 13 …age of the high performance networking primitives that are offered via the MPI API. This enables T… 26 This environment variable allows you to disable the MPI path before launch (e.g. for performance or… 31 …MPI library can directly access the pointer to the data. For CPU backed buffers this is no problem… 37 …MPI libraries. This seems to be related to memory allocations/routines that register the memory fo… 43 … or the MPI library will print an error and exit. The error is "Attempt to free memory that is sti… 48 …MPI functions take over the tensor exchanges. On the sending side the tensors are placed in the st… [all …]
|
D | BUILD | 2 # MPI based communication interfaces and implementations for TensorFlow.
|
/external/tensorflow/tensorflow/contrib/mpi_collectives/ |
D | README.md | 1 # MPI TensorFlow integration 3 Tensorflow MPI integration allows communicating between different TensorFlow 4 processes using MPI. This enables training across multiple nodes and GPUs
|
D | BUILD | 1 # Ops that communicate with other processes via MPI.
|
D | mpi_message.proto | 48 // SHUTDOWN if all MPI processes should shut down.
|
/external/eigen/cmake/ |
D | FindPastix.cmake | 20 # - MPI 27 # - MPI: to activate detection of the parallel MPI version (default) 28 # it looks for Threads, HWLOC, BLAS, MPI and ScaLAPACK libraries 29 # - SEQ: to activate detection of the sequential version (exclude MPI version) 31 # it looks for MPI version of StarPU (default behaviour) 32 # if SEQ and STARPU are given, it looks for a StarPU without MPI 96 # means we look for the sequential version of PaStiX (without MPI) 100 if (${component} STREQUAL "MPI") 101 # means we look for the MPI version of PaStiX (default) 220 # PASTIX may depend on MPI [all …]
|
D | FindPTSCOTCH.cmake | 20 # - MPI 90 # PTSCOTCH depends on MPI, try to find it 93 find_package(MPI REQUIRED) 95 find_package(MPI) 285 # MPI
|
/external/clang/lib/StaticAnalyzer/Checkers/ |
D | CMakeLists.txt | 44 MPI-Checker/MPIBugReporter.cpp 45 MPI-Checker/MPIChecker.cpp 46 MPI-Checker/MPIFunctionClassifier.cpp
|
D | Android.bp | 9 subdirs = ["MPI-Checker"]
|
/external/eigen/bench/spbench/ |
D | CMakeLists.txt | 42 # check that the PASTIX found is a version without MPI 49 … " Because Eigen tests require a version without MPI, we disable the Pastix backend.")
|
/external/tensorflow/tensorflow/tools/docker/ |
D | Dockerfile.mkl-horovod | 74 # Install Open MPI 98 # Install OpenSSH for MPI to communicate between containers
|
D | Dockerfile.devel-mkl-horovod | 131 # Install Open MPI 155 # Install OpenSSH for MPI to communicate between containers
|
/external/tensorflow/tensorflow/tools/ci_build/ |
D | Dockerfile.cpu.mpi | 22 # Set up MPI
|
/external/llvm/lib/Transforms/Utils/ |
D | MemorySSA.cpp | 1166 for (auto MPI = upward_defs_begin(PHIPair), MPE = upward_defs_end(); in UpwardsDFSWalk() local 1167 MPI != MPE; ++MPI) { in UpwardsDFSWalk() 1170 DT->dominates(CurrAccess->getBlock(), MPI.getPhiArgBlock()); in UpwardsDFSWalk() 1173 UpwardsDFSWalk(MPI->first, MPI->second, Q, Backedge); in UpwardsDFSWalk()
|
/external/llvm/lib/Target/WebAssembly/ |
D | WebAssemblyRegStackify.cpp | 142 const MachinePointerInfo &MPI = MMO->getPointerInfo(); in Query() local 143 if (MPI.V.is<const PseudoSourceValue *>()) { in Query() 144 auto PSV = MPI.V.get<const PseudoSourceValue *>(); in Query()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/WebAssembly/ |
D | WebAssemblyRegStackify.cpp | 178 const MachinePointerInfo &MPI = MMO->getPointerInfo(); in Query() local 179 if (MPI.V.is<const PseudoSourceValue *>()) { in Query() 180 auto PSV = MPI.V.get<const PseudoSourceValue *>(); in Query()
|
/external/eigen/test/ |
D | CMakeLists.txt | 84 # check that the PASTIX found is a version without MPI 91 … " Because Eigen tests require a version without MPI, we disable the Pastix backend.")
|
/external/clang/include/clang/StaticAnalyzer/Checkers/ |
D | Checkers.td | 75 def MPI : Package<"mpi">, InPackage<OptIn>; 582 let ParentPackage = MPI in { 583 def MPIChecker : Checker<"MPI-Checker">, 584 HelpText<"Checks MPI code">,
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/ObjectYAML/ |
D | CodeViewYAMLTypes.cpp | 386 void MappingTraits<MemberPointerInfo>::mapping(IO &IO, MemberPointerInfo &MPI) { in mapping() argument 387 IO.mapRequired("ContainingType", MPI.ContainingType); in mapping() 388 IO.mapRequired("Representation", MPI.Representation); in mapping()
|
/external/catch2/contrib/ |
D | ParseAndAddCatchTests.cmake | 43 # a test should be run. For instance to use test MPI, one can write #
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/CodeGen/ |
D | MachineFunction.cpp | 398 MachinePointerInfo MPI = MMO->getValue() ? in getMachineMemOperand() local 403 MachineMemOperand(MPI, MMO->getFlags(), MMO->getSize(), in getMachineMemOperand()
|
/external/swiftshader/third_party/llvm-7.0/llvm/include/llvm/DebugInfo/CodeView/ |
D | TypeRecord.h | 291 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI) in PointerRecord() argument 293 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {} in PointerRecord()
|
/external/catch2/docs/ |
D | cmake-integration.md | 177 instance to run some tests using `MPI` and other sequentially, one can write
|
/external/llvm/lib/Target/PowerPC/ |
D | PPCISelLowering.cpp | 4452 MachinePointerInfo MPI(CS ? CS->getCalledValue() : nullptr); in PrepareCall() local 4453 SDValue LoadFuncPtr = DAG.getLoad(MVT::i64, dl, LDChain, Callee, MPI, in PrepareCall() 4460 MPI.getWithOffset(16), false, false, in PrepareCall() 4466 MPI.getWithOffset(8), false, false, in PrepareCall() 6408 MachinePointerInfo MPI = in LowerFP_TO_INTForReuse() local 6416 MF.getMachineMemOperand(MPI, MachineMemOperand::MOStore, 4, 4); in LowerFP_TO_INTForReuse() 6422 MPI, false, false, 0); in LowerFP_TO_INTForReuse() 6429 MPI = MPI.getWithOffset(Subtarget.isLittleEndian() ? 0 : 4); in LowerFP_TO_INTForReuse() 6434 RLI.MPI = MPI; in LowerFP_TO_INTForReuse() 6480 return DAG.getLoad(Op.getValueType(), dl, RLI.Chain, RLI.Ptr, RLI.MPI, false, in LowerFP_TO_INT() [all …]
|
D | PPCISelLowering.h | 751 MachinePointerInfo MPI; member
|