1 /* Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 3 Licensed under the Apache License, Version 2.0 (the "License"); 4 you may not use this file except in compliance with the License. 5 You may obtain a copy of the License at 6 7 http://www.apache.org/licenses/LICENSE-2.0 8 9 Unless required by applicable law or agreed to in writing, software 10 distributed under the License is distributed on an "AS IS" BASIS, 11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 See the License for the specific language governing permissions and 13 limitations under the License. 14 ==============================================================================*/ 15 16 // The "client library" instantiates a local (in-process) XLA service for 17 // use by this process, and connects to it with a singleton XLA local 18 // client. ClientLibrary::GetOrCreateLocalClient will spawn a local service, 19 // and return a client that's connected to it and ready to run XLA 20 // computations. 21 #ifndef TENSORFLOW_COMPILER_XLA_CLIENT_CLIENT_LIBRARY_H_ 22 #define TENSORFLOW_COMPILER_XLA_CLIENT_CLIENT_LIBRARY_H_ 23 24 #include <functional> 25 #include <memory> 26 #include <set> 27 #include <string> 28 #include <vector> 29 30 #include "absl/types/optional.h" 31 #include "tensorflow/compiler/xla/client/compile_only_client.h" 32 #include "tensorflow/compiler/xla/client/local_client.h" 33 #include "tensorflow/compiler/xla/service/compile_only_service.h" 34 #include "tensorflow/compiler/xla/service/local_service.h" 35 #include "tensorflow/compiler/xla/statusor.h" 36 #include "tensorflow/compiler/xla/types.h" 37 #include "tensorflow/core/platform/macros.h" 38 #include "tensorflow/core/platform/mutex.h" 39 #include "tensorflow/core/platform/stream_executor_no_cuda.h" 40 #include "tensorflow/core/platform/thread_annotations.h" 41 #include "tensorflow/stream_executor/device_memory_allocator.h" 42 43 namespace xla { 44 45 // Options to configure the local client when it is created. 46 class LocalClientOptions { 47 public: 48 LocalClientOptions( 49 se::Platform* platform = nullptr, int number_of_replicas = 1, 50 int intra_op_parallelism_threads = -1, 51 const absl::optional<std::set<int>>& allowed_devices = absl::nullopt); 52 53 // Set the platform backing the service, or nullptr for the default platform. 54 LocalClientOptions& set_platform(se::Platform* platform); 55 se::Platform* platform() const; 56 57 // Set the number of replicas to use when compiling replicated 58 // programs. 59 LocalClientOptions& set_number_of_replicas(int number_of_replicas); 60 int number_of_replicas() const; 61 62 // Sets the thread pool size for parallel execution of an individual operator. 63 LocalClientOptions& set_intra_op_parallelism_threads(int num_threads); 64 int intra_op_parallelism_threads() const; 65 66 // Sets the allowed_devices set for selectively constructing stream executors 67 // on the platform. 68 LocalClientOptions& set_allowed_devices( 69 const absl::optional<std::set<int>>& allowed_devices); 70 const absl::optional<std::set<int>>& allowed_devices() const; 71 72 private: 73 se::Platform* platform_; 74 int number_of_replicas_; 75 int intra_op_parallelism_threads_; 76 absl::optional<std::set<int>> allowed_devices_; 77 }; 78 79 class ClientLibrary { 80 public: 81 // Singleton constructor-or-accessor -- returns a client for the application 82 // to issue XLA commands on. Arguments: 83 // 84 // platform : The platform the underlying XLA service should target. If 85 // null then default platform is used. 86 // device_set: Set of device IDs for which the stream executor will be 87 // created, for the given platform. 88 static StatusOr<LocalClient*> GetOrCreateLocalClient( 89 se::Platform* platform = nullptr, 90 const absl::optional<std::set<int>>& allowed_devices = absl::nullopt); 91 static StatusOr<LocalClient*> GetOrCreateLocalClient( 92 const LocalClientOptions& options); 93 94 // Convenience "or-die" wrapper around the above which returns the existing 95 // client library or creates one with default platform and allocator. 96 static LocalClient* LocalClientOrDie(); 97 98 // Returns the service from the service thread. Only used in unit tests to 99 // access user computations from client. 100 static LocalService* GetXlaService(se::Platform* platform); 101 102 // Singleton constructor-or-accessor for compile-only clients. Arguments: 103 // 104 // platform : The platform the underlying XLA service should target. If 105 // null then default platform is used. 106 static StatusOr<CompileOnlyClient*> GetOrCreateCompileOnlyClient( 107 se::Platform* platform = nullptr); 108 109 // Clears the local instance and compile only instance caches. The client 110 // pointers returned by the previous GetOrCreateLocalClient() or 111 // GetOrCreateCompileOnlyClient() invocations are not valid anymore. 112 static void DestroyLocalInstances(); 113 114 private: 115 // Returns the singleton instance of ClientLibrary. 116 static ClientLibrary& Singleton(); 117 118 ClientLibrary(); 119 ~ClientLibrary(); 120 121 struct LocalInstance { 122 // Service that is wrapped by the singleton client object. 123 std::unique_ptr<LocalService> service; 124 // Singleton client object. 125 std::unique_ptr<LocalClient> client; 126 }; 127 128 struct CompileOnlyInstance { 129 // Service that is wrapped by the singleton client object. 130 std::unique_ptr<CompileOnlyService> service; 131 // Singleton client object. 132 std::unique_ptr<CompileOnlyClient> client; 133 }; 134 135 tensorflow::mutex service_mutex_; // Guards the singleton creation state. 136 std::unordered_map<se::Platform::Id, std::unique_ptr<LocalInstance>> 137 local_instances_ TF_GUARDED_BY(service_mutex_); 138 139 std::unordered_map<se::Platform::Id, std::unique_ptr<CompileOnlyInstance>> 140 compile_only_instances_ TF_GUARDED_BY(service_mutex_); 141 142 TF_DISALLOW_COPY_AND_ASSIGN(ClientLibrary); 143 }; 144 145 } // namespace xla 146 147 #endif // TENSORFLOW_COMPILER_XLA_CLIENT_CLIENT_LIBRARY_H_ 148