• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 #include <cstdio>
16 #include "tensorflow/lite/interpreter.h"
17 #include "tensorflow/lite/kernels/register.h"
18 #include "tensorflow/lite/model.h"
19 #include "tensorflow/lite/optional_debug_tools.h"
20 
21 // This is an example that is minimal to read a model
22 // from disk and perform inference. There is no data being loaded
23 // that is up to you to add as a user.
24 //
25 // NOTE: Do not add any dependencies to this that cannot be built with
26 // the minimal makefile. This example must remain trivial to build with
27 // the minimal build tool.
28 //
29 // Usage: minimal <tflite model>
30 
31 #define TFLITE_MINIMAL_CHECK(x)                              \
32   if (!(x)) {                                                \
33     fprintf(stderr, "Error at %s:%d\n", __FILE__, __LINE__); \
34     exit(1);                                                 \
35   }
36 
main(int argc,char * argv[])37 int main(int argc, char* argv[]) {
38   if (argc != 2) {
39     fprintf(stderr, "minimal <tflite model>\n");
40     return 1;
41   }
42   const char* filename = argv[1];
43 
44   // Load model
45   std::unique_ptr<tflite::FlatBufferModel> model =
46       tflite::FlatBufferModel::BuildFromFile(filename);
47   TFLITE_MINIMAL_CHECK(model != nullptr);
48 
49   // Build the interpreter with the InterpreterBuilder.
50   // Note: all Interpreters should be built with the InterpreterBuilder,
51   // which allocates memory for the Intrepter and does various set up
52   // tasks so that the Interpreter can read the provided model.
53   tflite::ops::builtin::BuiltinOpResolver resolver;
54   tflite::InterpreterBuilder builder(*model, resolver);
55   std::unique_ptr<tflite::Interpreter> interpreter;
56   builder(&interpreter);
57   TFLITE_MINIMAL_CHECK(interpreter != nullptr);
58 
59   // Allocate tensor buffers.
60   TFLITE_MINIMAL_CHECK(interpreter->AllocateTensors() == kTfLiteOk);
61   printf("=== Pre-invoke Interpreter State ===\n");
62   tflite::PrintInterpreterState(interpreter.get());
63 
64   // Fill input buffers
65   // TODO(user): Insert code to fill input tensors.
66   // Note: The buffer of the input tensor with index `i` of type T can
67   // be accessed with `T* input = interpreter->typed_input_tensor<T>(i);`
68 
69   // Run inference
70   TFLITE_MINIMAL_CHECK(interpreter->Invoke() == kTfLiteOk);
71   printf("\n\n=== Post-invoke Interpreter State ===\n");
72   tflite::PrintInterpreterState(interpreter.get());
73 
74   // Read output buffers
75   // TODO(user): Insert getting data out code.
76   // Note: The buffer of the output tensor with index `i` of type T can
77   // be accessed with `T* output = interpreter->typed_output_tensor<T>(i);`
78 
79   return 0;
80 }
81