• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #ifndef TENSORFLOW_COMPILER_XLA_SERVICE_INTERPRETER_EXECUTABLE_BASE_H_
17 #define TENSORFLOW_COMPILER_XLA_SERVICE_INTERPRETER_EXECUTABLE_BASE_H_
18 
19 #include <memory>
20 #include <optional>
21 
22 #include "tensorflow/compiler/xla/literal.h"
23 #include "tensorflow/compiler/xla/service/dynamic_dimension_inference.h"
24 #include "tensorflow/compiler/xla/service/executable.h"
25 #include "tensorflow/compiler/xla/service/hlo_execution_profile.h"
26 #include "tensorflow/compiler/xla/service/hlo_module.h"
27 #include "tensorflow/compiler/xla/service/service_executable_run_options.h"
28 #include "tensorflow/compiler/xla/shape.h"
29 #include "tensorflow/compiler/xla/statusor.h"
30 #include "tensorflow/compiler/xla/xla.pb.h"
31 namespace xla {
32 namespace interpreter {
33 
34 // Responsible for running a HLO graph through the HloEvaluator and output
35 // buffer allocation. Refer to interpreter/README.md for more.
36 class InterpreterExecutableBase : public Executable {
37  public:
38   explicit InterpreterExecutableBase(std::unique_ptr<HloModule> hlo_module);
39 
40   StatusOr<ExecutionOutput> ExecuteAsyncOnStream(
41       const ServiceExecutableRunOptions* run_options,
42       std::vector<ExecutionInput> arguments,
43       HloExecutionProfile* hlo_execution_profile) override;
44 
45  protected:
46   virtual StatusOr<Literal> Evaluate(
47       const ServiceExecutableRunOptions* run_options,
48       const HloComputation& computation,
49       absl::Span<const Literal> arg_literals) = 0;
50 
51  private:
52   StatusOr<ExecutionOutput> AllocateOutputMemoryWithInputReuse(
53       const Shape& shape, const HloInputOutputAliasConfig& alias_config,
54       se::DeviceMemoryAllocator* allocator,
55       std::vector<ExecutionInput>* arguments, stream_executor::Stream* stream);
56 
57   InterpreterExecutableBase(const InterpreterExecutableBase&) = delete;
58   InterpreterExecutableBase& operator=(const InterpreterExecutableBase&) =
59       delete;
60 };
61 
62 }  // namespace interpreter
63 }  // namespace xla
64 
65 #endif  // TENSORFLOW_COMPILER_XLA_SERVICE_INTERPRETER_EXECUTABLE_BASE_H_
66