• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #include "tensorflow/compiler/mlir/lite/tf_tfl_translate_cl.h"
17 
18 using llvm::cl::opt;
19 
20 // TODO(jpienaar): Revise the command line option parsing here.
21 // NOLINTNEXTLINE
22 opt<std::string> input_file_name(llvm::cl::Positional,
23                                  llvm::cl::desc("<input file>"),
24                                  llvm::cl::init("-"));
25 
26 // NOLINTNEXTLINE
27 opt<bool> import_saved_model_object_graph(
28     "savedmodel-objectgraph-to-mlir",
29     llvm::cl::desc("Import a saved model to its MLIR representation"),
30     llvm::cl::value_desc("dir"));
31 
32 // NOLINTNEXTLINE
33 opt<bool> import_saved_model_signature_defs(
34     "savedmodel-signaturedefs-to-mlir",
35     llvm::cl::desc("Import a saved model V1 to its MLIR representation"),
36     llvm::cl::value_desc("dir"));
37 
38 // NOLINTNEXTLINE
39 opt<std::string> saved_model_tags(
40     "tf-savedmodel-tags",
41     llvm::cl::desc("Tags used to indicate which MetaGraphDef to import, "
42                    "separated by ','"),
43     llvm::cl::init("serve"));
44 
45 // NOLINTNEXTLINE
46 opt<std::string> saved_model_exported_names(
47     "tf-savedmodel-exported-names",
48     llvm::cl::desc("Names to export from SavedModel, separated by ','. Empty "
49                    "(the default) means export all."),
50     llvm::cl::init(""));
51 
52 // NOLINTNEXTLINE
53 opt<std::string> output_file_name("o", llvm::cl::desc("<output file>"),
54                                   llvm::cl::value_desc("filename"),
55                                   llvm::cl::init("-"));
56 // NOLINTNEXTLINE
57 opt<bool> use_splatted_constant(
58     "use-splatted-constant",
59     llvm::cl::desc(
60         "Replace constants with randomly generated splatted tensors"),
61     llvm::cl::init(false), llvm::cl::Hidden);
62 // NOLINTNEXTLINE
63 opt<bool> input_mlir(
64     "input-mlir",
65     llvm::cl::desc("Take input TensorFlow model in textual MLIR instead of "
66                    "GraphDef format"),
67     llvm::cl::init(false), llvm::cl::Hidden);
68 // NOLINTNEXTLINE
69 opt<bool> output_mlir(
70     "output-mlir",
71     llvm::cl::desc(
72         "Output MLIR rather than FlatBuffer for the generated TFLite model"),
73     llvm::cl::init(false));
74 // NOLINTNEXTLINE
75 opt<bool> allow_all_select_tf_ops(
76     "allow-all-select-tf-ops",
77     llvm::cl::desc("Allow automatic pass through of TF ops (outside the flex "
78                    "allowlist) as select Tensorflow ops"),
79     llvm::cl::init(false));
80 
81 // The following approach allows injecting opdefs in addition
82 // to those that are already part of the global TF registry  to be linked in
83 // prior to importing the graph. The primary goal is for support of custom ops.
84 // This is not intended to be a general solution for custom ops for the future
85 // but mainly for supporting older models like mobilenet_ssd. More appropriate
86 // mechanisms, such as op hints or using functions to represent composable ops
87 // like https://github.com/tensorflow/community/pull/113 should be encouraged
88 // going forward.
89 // NOLINTNEXTLINE
90 llvm::cl::list<std::string> custom_opdefs(
91     "tf-custom-opdefs", llvm::cl::desc("List of custom opdefs when importing "
92                                        "graphdef"));
93 
94 // Quantize and Dequantize ops pair can be optionally emitted before and after
95 // the quantized model as the adaptors to receive and produce floating point
96 // type data with the quantized model. Set this to `false` if the model input is
97 // integer types.
98 // NOLINTNEXTLINE
99 opt<bool> emit_quant_adaptor_ops(
100     "emit-quant-adaptor-ops",
101     llvm::cl::desc(
102         "Emit Quantize/Dequantize before and after the generated TFLite model"),
103     llvm::cl::init(false));
104 
105 // The path to a quantization stats file to specify value ranges for some of the
106 // tensors with known names.
107 // NOLINTNEXTLINE
108 opt<std::string> quant_stats_file_name("quant-stats",
109                                        llvm::cl::desc("<stats file>"),
110                                        llvm::cl::value_desc("filename"),
111                                        llvm::cl::init(""));
112 
113 // NOLINTNEXTLINE
114 opt<bool> convert_tf_while_to_tfl_while(
115     "convert_tf_while_to_tfl_while",
116     llvm::cl::desc("Whether to legalize TF While to TFL While."),
117     llvm::cl::init(true));
118 
119 // A list of comma separated TF operators which are created by the user.
120 // This must be used with `-emit-select-tf-ops=true`.
121 // NOLINTNEXTLINE
122 opt<std::string> select_user_tf_ops(
123     "select-user-tf-ops",
124     llvm::cl::desc(
125         "<list of custom tf ops created by the user (comma separated)>"),
126     llvm::cl::init(""));
127 
128 // NOLINTNEXTLINE
129 opt<bool> unfold_batchmatmul(
130     "unfold_batchmatmul",
131     llvm::cl::desc(
132         "Whether to unfold TF BatchMatMul to a set of TFL FullyConnected ops."),
133     llvm::cl::init(true));
134 
135 // NOLINTNEXTLINE
136 opt<bool> unfold_large_splat_constant(
137     "unfold-large-splat-constant",
138     llvm::cl::desc("Whether to unfold large splat constant tensors to reduce "
139                    "the generated model size."),
140     llvm::cl::init(false));
141 
142 // NOLINTNEXTLINE
143 opt<bool> guarantee_all_funcs_one_use(
144     "guarantee-all-funcs-one-use",
145     llvm::cl::desc(
146         "Whether to clone functions to ensure each function has a single use."),
147     llvm::cl::init(false));
148