• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #ifndef TENSORFLOW_CORE_GRAPPLER_OPTIMIZERS_GRAPH_OPTIMIZER_H_
17 #define TENSORFLOW_CORE_GRAPPLER_OPTIMIZERS_GRAPH_OPTIMIZER_H_
18 
19 #include <string>
20 #include "tensorflow/core/framework/graph.pb.h"
21 #include "tensorflow/core/lib/core/status.h"
22 #include "tensorflow/core/platform/env.h"
23 
24 namespace tensorflow {
25 namespace grappler {
26 
27 class Cluster;
28 struct GrapplerItem;
29 
30 // An abstract interface for an algorithm for generating a candidate
31 // optimization of a GrapplerItem for running on a cluster.
32 class GraphOptimizer {
33  public:
GraphOptimizer()34   GraphOptimizer() : deadline_usec_(0) {}
~GraphOptimizer()35   virtual ~GraphOptimizer() {}
36 
37   virtual string name() const = 0;
38 
39   // Returns true if the optimizer requires a valid function library to perform
40   // graph optimization. If false, optimized GrapplerItem will have a stub
41   // instead of real function library (all function signatures and attributes
42   // will be valid, but function body will be empty). Most of the optimizers
43   // that do not instantiate functions should return true.
44   virtual bool UsesFunctionLibrary() const = 0;
45 
46   // Routine called to allow an algorithm to propose a rewritten graph
47   // for the graph, feeds and fetches in "item" to run more efficiently
48   // on "cluster". If the returned status is Status::OK() then
49   // *optimized_graph contains the rewritten graph.
50   // Returns an error status if it failed to generate a solution.
51   //
52   // A return value of error::Aborted() can be used signal early termination of
53   // the optimizer, e.g. if the optimization turned out to be a no-op. In this
54   // case the content of *optimized_graph is undefined.
55   virtual Status Optimize(Cluster* cluster, const GrapplerItem& item,
56                           GraphDef* optimized_graph) = 0;
57 
58   // Method invoked by the framework so that it can provide feedback
59   // on how well the "optimized_graph" (produced as *optimized_graph from a
60   // call to Optimize) performed.  Lower "result" scores are better.
61   virtual void Feedback(Cluster* cluster, const GrapplerItem& item,
62                         const GraphDef& optimized_graph, double result) = 0;
63 
64   // Set deadline in microseconds since epoch. A value of zero means no
65   // deadline.
set_deadline_usec(uint64 deadline_usec)66   void set_deadline_usec(uint64 deadline_usec) {
67     deadline_usec_ = deadline_usec;
68   }
deadline_usec()69   uint64 deadline_usec() const { return deadline_usec_; }
DeadlineExceeded()70   bool DeadlineExceeded() const {
71     return deadline_usec_ > 0 && Env::Default()->NowMicros() > deadline_usec_;
72   }
73 
74  private:
75   uint64 deadline_usec_;
76 };
77 
78 #define GRAPPLER_RETURN_IF_DEADLINE_EXCEEDED()                              \
79   do {                                                                      \
80     if (this->DeadlineExceeded()) {                                         \
81       return errors::DeadlineExceeded(this->name(), " exceeded deadline."); \
82     }                                                                       \
83   } while (0)
84 
85 }  // end namespace grappler
86 }  // end namespace tensorflow
87 
88 #endif  // TENSORFLOW_CORE_GRAPPLER_OPTIMIZERS_GRAPH_OPTIMIZER_H_
89