1 /* 2 * Copyright (c) 2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef NEURAL_NETWORK_RUNTIME_NN_TENSOR_H 17 #define NEURAL_NETWORK_RUNTIME_NN_TENSOR_H 18 19 #include <string> 20 #include <vector> 21 22 #include "cpp_type.h" 23 #include "interfaces/kits/c/neural_network_runtime.h" 24 #include "interfaces/innerkits/c/neural_network_runtime_inner.h" 25 26 namespace OHOS { 27 namespace NeuralNetworkRuntime { 28 using LiteGraphTensorPtr = std::unique_ptr<void, void(*)(void*)>; 29 30 void DestroyLiteGraphTensor(void* tensor); 31 32 class NNTensor { 33 public: 34 NNTensor() = default; 35 ~NNTensor(); 36 NNTensor(NNTensor&& tensor) noexcept; 37 NNTensor& operator=(NNTensor&& tensor) noexcept; 38 // Copy construction and assignment is not allowed in case of double-free of m_buffer 39 NNTensor(const NNTensor& tensor) = delete; 40 NNTensor& operator=(const NNTensor& tensor) = delete; 41 42 OH_NN_ReturnCode BuildFromOHNNTensor(const OH_NN_Tensor& nnTensor); 43 OH_NN_ReturnCode BuildFromOHNNTensorInfo(const OH_NN_TensorInfo& nnTensorInfo); 44 OH_NN_ReturnCode Build(OH_NN_DataType dataType, 45 const std::vector<int32_t>& dimensions, 46 const std::vector<QuantParam>& quantParam, 47 OH_NN_TensorType type); 48 void IdentifyOpParameter(); 49 50 void SetName(const std::string& name); 51 void SetBuffer(const void* buffer, size_t length); 52 void SetFormat(const OH_NN_Format& format); 53 OH_NN_ReturnCode SetDimensions(const std::vector<int32_t>& dimensions); 54 55 std::string GetName() const; 56 OH_NN_TensorType GetType() const; 57 void* GetBuffer() const; 58 // Return complete buffer length 59 size_t GetBufferLength() const; 60 // Return actual data length, since the data can be store in a larger buffer 61 size_t GetDataLength() const; 62 OH_NN_DataType GetDataType() const; 63 uint32_t GetElementCount() const; 64 std::vector<int32_t> GetDimensions() const; 65 OH_NN_Format GetFormat() const; 66 std::vector<QuantParam> GetQuantParam() const; 67 LiteGraphTensorPtr ConvertToLiteGraphTensor() const; 68 void ConvertToIOTensor(IOTensor& tensor) const; 69 70 bool IsDynamicShape() const; 71 bool IsQuantTensor() const; 72 bool IsScalar() const; 73 bool IsOpParameter() const; 74 bool CompareAttribute(const NNTensor& tensor) const; 75 76 private: 77 // Used in BuildFromOHNNTensor() 78 OH_NN_ReturnCode ParseQuantParams(const OH_NN_QuantParam* quantParams); 79 OH_NN_ReturnCode ParseDimensions(const int32_t* dimensions, uint32_t dimensionCount); 80 // Used in Build() 81 OH_NN_ReturnCode ParseQuantParams(const std::vector<QuantParam>& quantParams); 82 OH_NN_ReturnCode ParseDimensions(const std::vector<int32_t>& dimensions); 83 84 private: 85 OH_NN_TensorType m_type {OH_NN_TENSOR}; 86 OH_NN_DataType m_dataType {OH_NN_FLOAT32}; 87 OH_NN_Format m_format {OH_NN_FORMAT_NHWC}; 88 std::string m_name; 89 std::vector<int32_t> m_dimensions; 90 std::vector<QuantParam> m_quantParams; 91 uint32_t m_elementCount {0}; 92 bool m_isDynamicShape {false}; 93 bool m_isOpParameter {false}; 94 void* m_buffer {nullptr}; 95 size_t m_bufferLength {0}; 96 size_t m_dataLength {0}; 97 }; 98 } // namespace NeuralNetworkRuntime 99 } // namespace OHOS 100 #endif // NEURAL_NETWORK_RUNTIME_NN_TENSOR_H