• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #ifndef TENSORFLOW_CORE_DISTRIBUTED_RUNTIME_SERVER_LIB_H_
17 #define TENSORFLOW_CORE_DISTRIBUTED_RUNTIME_SERVER_LIB_H_
18 
19 #include <memory>
20 
21 #include "tensorflow/core/lib/core/status.h"
22 #include "tensorflow/core/platform/macros.h"
23 #include "tensorflow/core/protobuf/tensorflow_server.pb.h"
24 
25 namespace tensorflow {
26 
27 class DeviceMgr;
28 
29 // This library supports a registration/factory-based mechanism for
30 // creating TensorFlow server objects. Each server implementation must
31 // have an accompanying implementation of ServerFactory, and create a
32 // static "registrar" object that calls `ServerFactory::Register()`
33 // with an instance of the factory class. See "rpc/grpc_server_lib.cc"
34 // for an example.
35 
36 // Represents a single TensorFlow server that exports Master and Worker
37 // services.
38 class ServerInterface {
39  public:
ServerInterface()40   ServerInterface() {}
~ServerInterface()41   virtual ~ServerInterface() {}
42 
43   // Starts the server running asynchronously. Returns OK on success, otherwise
44   // returns an error.
45   virtual Status Start() = 0;
46 
47   // Stops the server asynchronously. Returns OK on success, otherwise returns
48   // an error.
49   //
50   // After calling `Stop()`, the caller may call `Join()` to block until the
51   // server has stopped.
52   virtual Status Stop() = 0;
53 
54   // Blocks until the server has stopped. Returns OK on success, otherwise
55   // returns an error.
56   virtual Status Join() = 0;
57 
58   // Returns a target string that can be used to connect to this server using
59   // `tensorflow::NewSession()`.
60   virtual const string target() const = 0;
61 
62  private:
63   TF_DISALLOW_COPY_AND_ASSIGN(ServerInterface);
64 };
65 
66 class ServerFactory {
67  public:
68   struct Options {
69     // Local DeviceMgr to use.
70     const tensorflow::DeviceMgr* local_device_mgr;
71   };
72   // Creates a new server based on the given `server_def`, and stores
73   // it in `*out_server`. Returns OK on success, otherwise returns an
74   // error.
75   virtual Status NewServer(const ServerDef& server_def, const Options& options,
76                            std::unique_ptr<ServerInterface>* out_server) = 0;
77 
78   // Returns true if and only if this factory can create a server
79   // based on the given `server_def`.
80   virtual bool AcceptsOptions(const ServerDef& server_def) = 0;
81 
~ServerFactory()82   virtual ~ServerFactory() {}
83 
84   // For each `ServerFactory` subclass, an instance of that class must
85   // be registered by calling this method.
86   //
87   // The `server_type` must be unique to the server factory.
88   static void Register(const string& server_type, ServerFactory* factory);
89 
90   // Looks up a factory that can create a server based on the given
91   // `server_def`, and stores it in `*out_factory`. Returns OK on
92   // success, otherwise returns an error.
93   static Status GetFactory(const ServerDef& server_def,
94                            ServerFactory** out_factory);
95 };
96 
97 // Creates a server based on the given `server_def`, and stores it in
98 // `*out_server`. Returns OK on success, otherwise returns an error.
99 Status NewServer(const ServerDef& server_def,
100                  std::unique_ptr<ServerInterface>* out_server);
101 Status NewServerWithOptions(const ServerDef& server_def,
102                             const ServerFactory::Options& options,
103                             std::unique_ptr<ServerInterface>* out_server);
104 
105 }  // namespace tensorflow
106 
107 #endif  // TENSORFLOW_CORE_DISTRIBUTED_RUNTIME_SERVER_LIB_H_
108