1 /* 2 * Copyright (c) 2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef NEURAL_NETWORK_RUNTIME_NN_TENSOR_H 17 #define NEURAL_NETWORK_RUNTIME_NN_TENSOR_H 18 19 #include <string> 20 #include <vector> 21 22 #include "cpp_type.h" 23 #include "interfaces/kits/c/neural_network_runtime.h" 24 25 namespace OHOS { 26 namespace NeuralNetworkRuntime { 27 using LiteGraphTensorPtr = std::unique_ptr<void, void(*)(void*)>; 28 29 void DestroyLiteGraphTensor(void* tensor); 30 31 class NNTensor { 32 public: 33 NNTensor() = default; 34 ~NNTensor(); 35 NNTensor(NNTensor&& tensor) noexcept; 36 NNTensor& operator=(NNTensor&& tensor) noexcept; 37 // Copy construction and assignment is not allowed in case of double-free of m_buffer 38 NNTensor(const NNTensor& tensor) = delete; 39 NNTensor& operator=(const NNTensor& tensor) = delete; 40 41 OH_NN_ReturnCode BuildFromOHNNTensor(const OH_NN_Tensor& nnTensor); 42 OH_NN_ReturnCode Build(OH_NN_DataType dataType, 43 const std::vector<int32_t>& dimensions, 44 const std::vector<QuantParam>& quantParam, 45 OH_NN_TensorType type); 46 void IdentifyOpParameter(); 47 48 void SetName(const std::string& name); 49 void SetBuffer(const void* buffer, size_t length); 50 OH_NN_ReturnCode SetDimensions(const std::vector<int32_t>& dimensions); 51 52 std::string GetName() const; 53 OH_NN_TensorType GetType() const; 54 void* GetBuffer() const; 55 // Return complete buffer length 56 size_t GetBufferLength() const; 57 // Return actual data length, since the data can be store in a larger buffer 58 size_t GetDataLength() const; 59 OH_NN_DataType GetDataType() const; 60 uint32_t GetElementCount() const; 61 std::vector<int32_t> GetDimensions() const; 62 OH_NN_Format GetFormat() const; 63 std::vector<QuantParam> GetQuantParam() const; 64 LiteGraphTensorPtr ConvertToLiteGraphTensor() const; 65 void ConvertToIOTensor(IOTensor& tensor) const; 66 67 bool IsDynamicShape() const; 68 bool IsQuantTensor() const; 69 bool IsScalar() const; 70 bool IsOpParameter() const; 71 bool CompareAttribute(const NNTensor& tensor) const; 72 73 private: 74 // Used in BuildFromOHNNTensor() 75 OH_NN_ReturnCode ParseQuantParams(const OH_NN_QuantParam* quantParams); 76 OH_NN_ReturnCode ParseDimensions(const OH_NN_Tensor& nnTensor); 77 // Used in Build() 78 OH_NN_ReturnCode ParseQuantParams(const std::vector<QuantParam>& quantParams); 79 OH_NN_ReturnCode ParseDimensions(const std::vector<int32_t>& dimensions); 80 81 private: 82 OH_NN_TensorType m_type {OH_NN_TENSOR}; 83 OH_NN_DataType m_dataType {OH_NN_FLOAT32}; 84 OH_NN_Format m_format {OH_NN_FORMAT_NHWC}; 85 std::string m_name; 86 std::vector<int32_t> m_dimensions; 87 std::vector<QuantParam> m_quantParams; 88 uint32_t m_elementCount {0}; 89 bool m_isDynamicShape {false}; 90 bool m_isOpParameter {false}; 91 void* m_buffer {nullptr}; 92 size_t m_bufferLength {0}; 93 size_t m_dataLength {0}; 94 }; 95 } // namespace NeuralNetworkRuntime 96 } // namespace OHOS 97 #endif // NEURAL_NETWORK_RUNTIME_NN_TENSOR_H