1 /** 2 * Copyright 2020 Huawei Technologies Co., Ltd 3 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 8 * http://www.apache.org/licenses/LICENSE-2.0 9 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 #ifndef MINDSPORE_CCSRC_MINDDATA_DATASET_ENGINE_CACHE_FBB_H_ 17 #define MINDSPORE_CCSRC_MINDDATA_DATASET_ENGINE_CACHE_FBB_H_ 18 19 /// This header contains some serialize and deserialize functions for tensor row using 20 /// Google Flatbuffer 21 22 #include <memory> 23 #include <utility> 24 #include <vector> 25 #include "minddata/dataset/engine/cache/de_tensor_generated.h" 26 #include "minddata/dataset/core/tensor_row.h" 27 #include "minddata/dataset/util/slice.h" 28 #include "minddata/dataset/util/status.h" 29 30 namespace mindspore { 31 namespace dataset { 32 /// \brief Function to serialize TensorRow header used by CacheRowRequest 33 /// \param row TensorRow 34 /// \param fbb [in/out] fbb that contains the serialized data 35 /// \return Status object 36 Status SerializeTensorRowHeader(const TensorRow &row, std::shared_ptr<flatbuffers::FlatBufferBuilder> *fbb); 37 38 /// \brief A function used by BatchFetchRequest to deserialize a flat buffer back to a tensor row. 39 /// \param col_ts A serialized version of Tensor meta data 40 /// \param data Tensor data wrapped in a slice 41 /// \param out Tensor 42 /// \return Status object 43 Status RestoreOneTensor(const TensorMetaMsg *col_ts, const ReadableSlice &data, std::shared_ptr<Tensor> *out); 44 } // namespace dataset 45 } // namespace mindspore 46 #endif // MINDSPORE_CCSRC_MINDDATA_DATASET_ENGINE_CACHE_FBB_H_ 47