1 /* Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 3 Licensed under the Apache License, Version 2.0 (the "License"); 4 you may not use this file except in compliance with the License. 5 You may obtain a copy of the License at 6 7 http://www.apache.org/licenses/LICENSE-2.0 8 9 Unless required by applicable law or agreed to in writing, software 10 distributed under the License is distributed on an "AS IS" BASIS, 11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 See the License for the specific language governing permissions and 13 limitations under the License. 14 ==============================================================================*/ 15 16 #ifndef TENSORFLOW_LITE_DELEGATES_GPU_CL_SERIALIZATION_H_ 17 #define TENSORFLOW_LITE_DELEGATES_GPU_CL_SERIALIZATION_H_ 18 19 #include "absl/types/span.h" 20 #include "tensorflow/lite/delegates/gpu/cl/cl_context.h" 21 #include "tensorflow/lite/delegates/gpu/cl/inference_context.h" 22 #include "tensorflow/lite/delegates/gpu/cl/program_cache.h" 23 #include "tensorflow/lite/delegates/gpu/cl/serialization_generated.h" 24 #include "tensorflow/lite/delegates/gpu/common/status.h" 25 26 namespace tflite { 27 namespace gpu { 28 namespace cl { 29 30 class InferenceContext; 31 32 flatbuffers::Offset<data::InferenceContext> Encode( 33 const CLDevice& device, const InferenceContext& inference, 34 const ProgramCache& program_cache, const std::vector<int64_t>& in_refs, 35 std::vector<int64_t>& out_refs, flatbuffers::FlatBufferBuilder* builder); 36 37 absl::Status Decode(const CLContext& context, const CLDevice& device, 38 ProgramCache* program_cache, 39 const data::InferenceContext* fb_inference, 40 InferenceContext* inference); 41 42 absl::Status GetInOutRefs(const absl::Span<const uint8_t> serialized_model, 43 std::vector<int64_t>* in_refs, 44 std::vector<int64_t>* out_refs); 45 46 } // namespace cl 47 } // namespace gpu 48 } // namespace tflite 49 50 #endif // TENSORFLOW_LITE_DELEGATES_GPU_CL_SERIALIZATION_H_ 51