Home
last modified time | relevance | path

Searched refs:serialized_model (Results 1 – 13 of 13) sorted by relevance

/external/tensorflow/tensorflow/compiler/mlir/lite/
Dflatbuffer_to_string.cc37 std::string* serialized_model) { in ReadAndVerify() argument
39 *serialized_model = std::string{std::istreambuf_iterator<char>(std::cin), in ReadAndVerify()
47 *serialized_model = std::string{std::istreambuf_iterator<char>(t), in ReadAndVerify()
52 reinterpret_cast<const uint8_t*>(serialized_model->c_str()), in ReadAndVerify()
53 serialized_model->length()); in ReadAndVerify()
118 void ToString(const std::string& serialized_model) { in ToString() argument
120 IterateFlatBuffer(reinterpret_cast<const uint8_t*>(serialized_model.c_str()), in ToString()
138 std::string serialized_model; in main() local
139 if (tflite::ReadAndVerify(argv[1], &serialized_model)) return 1; in main()
140 tflite::ToString(serialized_model); in main()
/external/libtextclassifier/native/annotator/grammar/
Dgrammar-annotator_test.cc65 flatbuffers::DetachedBuffer serialized_model = PackModel(grammar_model); in TEST_F() local
66 GrammarAnnotator annotator(CreateGrammarAnnotator(serialized_model)); in TEST_F()
108 flatbuffers::DetachedBuffer serialized_model = PackModel(grammar_model); in TEST_F() local
109 GrammarAnnotator annotator(CreateGrammarAnnotator(serialized_model)); in TEST_F()
151 flatbuffers::DetachedBuffer serialized_model = PackModel(grammar_model); in TEST_F() local
152 GrammarAnnotator annotator(CreateGrammarAnnotator(serialized_model)); in TEST_F()
183 flatbuffers::DetachedBuffer serialized_model = PackModel(grammar_model); in TEST_F() local
184 GrammarAnnotator annotator(CreateGrammarAnnotator(serialized_model)); in TEST_F()
239 flatbuffers::DetachedBuffer serialized_model = PackModel(grammar_model); in TEST_F() local
240 GrammarAnnotator annotator(CreateGrammarAnnotator(serialized_model)); in TEST_F()
[all …]
Dtest-utils.cc24 const ::flatbuffers::DetachedBuffer& serialized_model) { in CreateGrammarAnnotator()
27 flatbuffers::GetRoot<GrammarModel>(serialized_model.data()), in CreateGrammarAnnotator()
Dtest-utils.h63 const ::flatbuffers::DetachedBuffer& serialized_model);
/external/tensorflow/tensorflow/compiler/mlir/lite/sparsity/
Dsparsify_model.cc49 std::string serialized_model( in SparsifyModel()
53 OwningModuleRef module = tflite::FlatBufferToMlir(serialized_model, &context, in SparsifyModel()
/external/tensorflow/tensorflow/lite/delegates/gpu/cl/
Dapi.h86 std::vector<uint8_t>* serialized_model) = 0;
92 const absl::Span<const uint8_t> serialized_model,
Dinference_context.cc140 Environment* env, std::vector<uint8_t>* serialized_model) { in InitFromGraph() argument
184 if (serialized_model) { in InitFromGraph()
192 serialized_model->resize(builder.GetSize()); in InitFromGraph()
193 std::memcpy(serialized_model->data(), builder.GetBufferPointer(), in InitFromGraph()
204 const absl::Span<const uint8_t> serialized_model, Environment* env) { in RestoreDeserialized() argument
205 flatbuffers::Verifier verifier(serialized_model.data(), in RestoreDeserialized()
206 serialized_model.size()); in RestoreDeserialized()
210 auto decoded_fb = data::GetInferenceContext(serialized_model.data()); in RestoreDeserialized()
231 Environment* env, std::vector<uint8_t>* serialized_model) { in InitFromGraphWithTransforms() argument
233 RETURN_IF_ERROR(InitFromGraph(create_info, *graph, env, serialized_model)); in InitFromGraphWithTransforms()
Dinference_context.h71 std::vector<uint8_t>* serialized_model = nullptr);
78 Environment* env, std::vector<uint8_t>* serialized_model = nullptr);
102 const absl::Span<const uint8_t> serialized_model, Environment* env);
Dapi.cc687 const absl::Span<const uint8_t> serialized_model, in Initialize() argument
692 context_->RestoreDeserialized(serialized_model, environment_)); in Initialize()
903 std::vector<uint8_t>* serialized_model) final { in BuildSerializedModel() argument
930 serialized_model)); in BuildSerializedModel()
960 const absl::Span<const uint8_t> serialized_model, in NewInferenceBuilder() argument
973 RETURN_IF_ERROR(builder_impl->Initialize(options_, serialized_model, in NewInferenceBuilder()
/external/tensorflow/tensorflow/lite/delegates/gpu/cl/testing/
Dinternal_api_samples.cc94 const std::vector<uint8_t>& serialized_model);
294 std::vector<uint8_t> serialized_model; in RunModelSampleWithInternalAPISerializedKernels() local
296 &serialized_model)); in RunModelSampleWithInternalAPISerializedKernels()
298 RETURN_IF_ERROR(inf_env->NewInferenceBuilder(serialized_model, &builder, in RunModelSampleWithInternalAPISerializedKernels()
343 cpu_inference.get(), kernel_cache, serialized_model)); in RunModelSampleWithInternalAPISerializedKernels()
350 const std::vector<uint8_t>& serialized_model) { in RunModelSampleWithInternalAPISerialized() argument
375 RETURN_IF_ERROR(inf_env->NewInferenceBuilder(serialized_model, &builder, in RunModelSampleWithInternalAPISerialized()
/external/tensorflow/tensorflow/compiler/mlir/lite/quantization/lite/
Dquantize_model.cc68 std::string serialized_model( in QuantizeModel()
72 OwningModuleRef module = tflite::FlatBufferToMlir(serialized_model, &context, in QuantizeModel()
/external/tensorflow/tensorflow/lite/delegates/gpu/gl/
Dapi.h79 const std::vector<uint8_t>& serialized_model,
Dapi.cc405 const std::vector<uint8_t>& serialized_model, in ReadSerializedModel() argument
415 absl::MakeConstSpan(serialized_model), compiled_model_impl.get())); in ReadSerializedModel()