• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <algorithm>
17 #include <cstdlib>
18 #include <new>
19 
20 #include "nn_tensor.h"
21 #include "validation.h"
22 #include "transform.h"
23 #include "log.h"
24 #include "mindir.h"
25 #include "mindir_types.h"
26 #include "quant_param.h"
27 
28 namespace OHOS {
29 namespace NeuralNetworkRuntime {
30 const uint32_t SUPPORT_NUM_BIT = 8; // Currently support 8-bit quantization only
31 constexpr size_t DIM_MAX_NUM = 200;
32 
DestroyLiteGraphTensor(void * tensor)33 void DestroyLiteGraphTensor(void* tensor)
34 {
35     mindspore::lite::MindIR_Tensor_Destroy(&tensor);
36 }
37 
~NNTensor()38 NNTensor::~NNTensor()
39 {
40     if (m_buffer != nullptr) {
41         delete [] reinterpret_cast<char*>(m_buffer);
42     }
43 }
44 
NNTensor(NNTensor && tensor)45 NNTensor::NNTensor(NNTensor&& tensor) noexcept
46 {
47     *this = std::move(tensor);
48 }
49 
operator =(NNTensor && tensor)50 NNTensor& NNTensor::operator=(NNTensor&& tensor) noexcept
51 {
52     if (this == &tensor) {
53         return *this;
54     }
55 
56     m_type = tensor.m_type;
57     m_dataType = tensor.m_dataType;
58     m_format = tensor.m_format;
59     m_name = std::move(tensor.m_name);
60     m_dimensions = std::move(tensor.m_dimensions);
61     m_quantParams = std::move(tensor.m_quantParams);
62     m_elementCount = tensor.m_elementCount;
63     m_isDynamicShape = tensor.m_isDynamicShape;
64     m_isOpParameter = tensor.m_isOpParameter;
65     m_buffer = tensor.m_buffer;
66     m_bufferLength = tensor.m_bufferLength;
67     m_dataLength = tensor.m_dataLength;
68 
69     tensor.m_buffer = nullptr;
70     tensor.m_bufferLength = 0;
71     tensor.m_dataLength = 0;
72 
73     return *this;
74 }
75 
Build(OH_NN_DataType dataType,const std::vector<int32_t> & dimensions,const std::vector<QuantParam> & quantParams,OH_NN_TensorType type)76 OH_NN_ReturnCode NNTensor::Build(OH_NN_DataType dataType,
77                                  const std::vector<int32_t>& dimensions,
78                                  const std::vector<QuantParam>& quantParams,
79                                  OH_NN_TensorType type)
80 {
81     m_type = type;
82 
83     if (!Validation::ValidateTensorDataType(dataType)) {
84         LOGE("Build failed, passed invalid data type.");
85         return OH_NN_INVALID_PARAMETER;
86     }
87     m_dataType = dataType;
88 
89     OH_NN_ReturnCode returnCode = ValidateDimensions(dimensions);
90     if (returnCode != OH_NN_SUCCESS) {
91         LOGE("Build failed, error happened when validating dimensions.");
92         return returnCode;
93     }
94     m_dimensions = dimensions;
95 
96     returnCode = ValidateQuantParams(quantParams);
97     if (returnCode != OH_NN_SUCCESS) {
98         LOGE("Build failed, error happened when validating quantParams.");
99         return returnCode;
100     }
101     m_quantParams = quantParams;
102 
103     return OH_NN_SUCCESS;
104 }
105 
BuildFromOHNNTensor(const OH_NN_Tensor & nnTensor)106 OH_NN_ReturnCode NNTensor::BuildFromOHNNTensor(const OH_NN_Tensor& nnTensor)
107 {
108     m_type = nnTensor.type;
109 
110     if (!Validation::ValidateTensorDataType(nnTensor.dataType)) {
111         LOGE("BuildFromOHNNTensor failed, passed invalid data type: %d.", nnTensor.dataType);
112         return OH_NN_INVALID_PARAMETER;
113     }
114     m_dataType = nnTensor.dataType;
115 
116     if (!Validation::ValidateTensorType(nnTensor.type)) {
117         LOGE("BuildFromOHNNTensor failed, passed invalid nnTensor type: %d.", nnTensor.type);
118         return OH_NN_INVALID_PARAMETER;
119     }
120 
121     OH_NN_ReturnCode returnCode = ParseDimensions(nnTensor.dimensions, nnTensor.dimensionCount);
122     if (returnCode != OH_NN_SUCCESS) {
123         LOGE("BuildFromOHNNTensor failed, passed invalid nnTensor dimensions.");
124         return returnCode;
125     }
126 
127     returnCode = ParseQuantParams(nnTensor.quantParam);
128     if (returnCode != OH_NN_SUCCESS) {
129         LOGE("BuildFromOHNNTensor failed, please check quantParam in nnTensor.");
130         return returnCode;
131     }
132 
133     return OH_NN_SUCCESS;
134 }
135 
BuildFromOHNNTensorInfo(const OH_NN_TensorInfo & nnTensorInfo)136 OH_NN_ReturnCode NNTensor::BuildFromOHNNTensorInfo(const OH_NN_TensorInfo& nnTensorInfo)
137 {
138     if (!Validation::ValidateTensorDataType(nnTensorInfo.dataType)) {
139         LOGE("BuildFromOHNNTensorInfo failed, passed invalid data type: %d.", nnTensorInfo.dataType);
140         return OH_NN_INVALID_PARAMETER;
141     }
142     m_dataType = nnTensorInfo.dataType;
143 
144     if (!Validation::ValidateTensorFormat(nnTensorInfo.format)) {
145         LOGE("BuildFromOHNNTensorInfo failed, passed invalid nnTensorInfo format: %d.", nnTensorInfo.format);
146         return OH_NN_INVALID_PARAMETER;
147     }
148     m_format = nnTensorInfo.format;
149     m_name = nnTensorInfo.name;
150 
151     OH_NN_ReturnCode returnCode = ParseDimensions(nnTensorInfo.dimensions, nnTensorInfo.dimensionCount);
152     if (returnCode != OH_NN_SUCCESS) {
153         LOGE("BuildFromOHNNTensorInfo failed, passed invalid nnTensorInfo dimensions.");
154         return returnCode;
155     }
156 
157     return OH_NN_SUCCESS;
158 }
159 
BuildFromTensorDesc(const NN_TensorDesc * tensorDesc)160 OH_NN_ReturnCode NNTensor::BuildFromTensorDesc(const NN_TensorDesc* tensorDesc)
161 {
162     if (tensorDesc == nullptr) {
163         LOGE("BuildFromTensorDesc failed, passed nullptr to tensorDesc.");
164         return OH_NN_INVALID_PARAMETER;
165     }
166 
167     const auto* tensorDescImpl = reinterpret_cast<const OHOS::NeuralNetworkRuntime::TensorDesc*>(tensorDesc);
168 
169     // Get datatype from TensorDesc
170     OH_NN_DataType dataType;
171     OH_NN_ReturnCode returnCode = tensorDescImpl->GetDataType(&dataType);
172     if (returnCode != OH_NN_SUCCESS) {
173         LOGE("BuildFromTensorDesc failed, error happened when get dataType.");
174         return returnCode;
175     }
176     if (!OHOS::NeuralNetworkRuntime::Validation::ValidateTensorDataType(dataType)) {
177         LOGE("BuildFromTensorDesc failed, passed invalid dataType.");
178         return OH_NN_INVALID_PARAMETER;
179     }
180 
181     // Get Dimensions from TensorDesc and transform to std::vector
182     int32_t* shape {nullptr};
183     size_t shapeNum {0};
184     returnCode = tensorDescImpl->GetShape(&shape, &shapeNum);
185     if (returnCode != OH_NN_SUCCESS) {
186         LOGE("BuildFromTensorDesc failed, error happened when get shape.");
187         return returnCode;
188     }
189     std::vector<int32_t> dimensions(shape, shape + shapeNum);
190 
191     // OH_NNCore_TensorDesc does not include quant parameters and tensor type,
192     // should be setted by using indenpendent interface.
193     returnCode = Build(dataType, dimensions, {}, OH_NN_TENSOR);
194     if (returnCode != OH_NN_SUCCESS) {
195         LOGE("BuildFromTensorDesc failed, error happened when building NNTensor.");
196     }
197 
198     return returnCode;
199 }
200 
SetQuantParam(const NN_QuantParam * quantParam)201 OH_NN_ReturnCode NNTensor::SetQuantParam(const NN_QuantParam* quantParam)
202 {
203     if (quantParam == nullptr) {
204         LOGE("SetQuantParam failed, quantParam is nullptr.");
205         return OH_NN_INVALID_PARAMETER;
206     }
207 
208     const auto* quantParamImpl = reinterpret_cast<const OHOS::NeuralNetworkRuntime::QuantParams*>(quantParam);
209     m_quantParams.clear();
210     OH_NN_ReturnCode returnCode = quantParamImpl->CopyToCompat(m_quantParams);
211     if (returnCode != OH_NN_SUCCESS) {
212         LOGE("SetQuantParam failed, error happened when converting quantization parameters.");
213         return returnCode;
214     }
215 
216     returnCode = ValidateQuantParams(m_quantParams);
217     if (returnCode != OH_NN_SUCCESS) {
218         m_quantParams.clear();
219         LOGE("SetQuantParam failed, error happened when parsing quantization parameters.");
220     }
221 
222     return returnCode;
223 }
224 
SetTensorType(OH_NN_TensorType tensorType)225 OH_NN_ReturnCode NNTensor::SetTensorType(OH_NN_TensorType tensorType)
226 {
227     m_type = tensorType;
228     return OH_NN_SUCCESS;
229 }
230 
ValidateDimensions(const std::vector<int32_t> & dimensions)231 OH_NN_ReturnCode NNTensor::ValidateDimensions(const std::vector<int32_t>& dimensions)
232 {
233     // Temporary variable to check overflow.
234     uint64_t absoluteDim {0};
235     uint64_t elementCount {1};
236     uint64_t dataLength {static_cast<uint64_t>(GetTypeSize(m_dataType))};
237     m_isDynamicShape = false;
238     if (dimensions.size() > DIM_MAX_NUM) {
239         LOGE("ParseDimension failed, dimensions more than 200.");
240         return OH_NN_INVALID_PARAMETER;
241     }
242 
243     for (int32_t dim : dimensions) {
244         if (dim < -1 || dim == 0) {
245             LOGE("ParseDimension failed, dimension of OH_NN_Tensor cannot be 0 or less than -1, receive %d.", dim);
246             return OH_NN_INVALID_PARAMETER;
247         }
248 
249         m_isDynamicShape = m_isDynamicShape || (dim == -1);
250         absoluteDim = static_cast<uint64_t>(abs(dim));
251         elementCount *= absoluteDim;
252         dataLength *= absoluteDim;
253 
254         if (dataLength > UINT32_MAX) {
255             LOGE("ParseDimension failed, expected data length of tensor exceed limit %u.", UINT32_MAX);
256             return OH_NN_INVALID_PARAMETER;
257         }
258     }
259 
260     if (m_isDynamicShape) {
261         // If tensor has dynamic shape, m_elementCount and m_dataLength take 0.
262         m_elementCount = 0;
263         m_dataLength = 0;
264     } else {
265         m_elementCount = static_cast<uint32_t>(elementCount);
266         m_dataLength = static_cast<size_t>(dataLength);
267     }
268 
269     return OH_NN_SUCCESS;
270 }
271 
ParseDimensions(const int32_t * dimensions,uint32_t dimensionCount)272 OH_NN_ReturnCode NNTensor::ParseDimensions(const int32_t* dimensions, uint32_t dimensionCount)
273 {
274     OH_NN_ReturnCode returnCode = Validation::ValidateArray(dimensions, dimensionCount);
275     if (returnCode != OH_NN_SUCCESS) {
276         LOGE("BuildFromOHNNTensor failed, please check dimension and dimensionCount in NNTensor.");
277         return returnCode;
278     }
279     std::vector<int32_t> dimensionsVec = ConstructVectorFromArray(dimensions, dimensionCount);
280 
281     returnCode = ValidateDimensions(dimensionsVec);
282     if (returnCode != OH_NN_SUCCESS) {
283         LOGE("BuildFromOHNNTensor failed, passed invalid dimension info.");
284         return returnCode;
285     }
286     m_dimensions = std::move(dimensionsVec);
287 
288     return OH_NN_SUCCESS;
289 }
290 
ParseQuantParams(const OH_NN_QuantParam * quantParam)291 OH_NN_ReturnCode NNTensor::ParseQuantParams(const OH_NN_QuantParam* quantParam)
292 {
293     if (quantParam == nullptr) {
294         return OH_NN_SUCCESS;
295     }
296 
297     if ((quantParam->numBits == nullptr) || (quantParam->scale == nullptr) || (quantParam->zeroPoint == nullptr)) {
298         LOGE("ParseQuantParams failed, scale or zeroPoint is nullptr.");
299         return OH_NN_INVALID_PARAMETER;
300     }
301 
302     std::vector<QuantParam> tmpQuantParam;
303     uint32_t numBits{0};
304     double scale{0.0};
305     int32_t zeroPoint{0};
306     for (uint32_t i = 0; i < quantParam->quantCount; i++) {
307         numBits = quantParam->numBits[i];
308         scale = quantParam->scale[i];
309         zeroPoint = quantParam->zeroPoint[i];
310         tmpQuantParam.emplace_back((QuantParam){numBits, scale, zeroPoint});
311     }
312 
313     OH_NN_ReturnCode returnCode = ValidateQuantParams(tmpQuantParam);
314     if (returnCode != OH_NN_SUCCESS) {
315         LOGE("ParseQuantParams failed, error happened when validating quantization parameters.");
316         return returnCode;
317     }
318     m_quantParams = std::move(tmpQuantParam);
319 
320     return OH_NN_SUCCESS;
321 }
322 
ValidateQuantParams(const std::vector<QuantParam> & quantParams)323 OH_NN_ReturnCode NNTensor::ValidateQuantParams(const std::vector<QuantParam>& quantParams)
324 {
325     // Only support 8-bit quantization in NNR version 1.0
326     auto paramIt = std::find_if(quantParams.begin(), quantParams.end(), [](QuantParam quant) {
327         return  quant.numBits != SUPPORT_NUM_BIT;
328     });
329     if (paramIt != quantParams.end()) {
330             LOGE("ValidateQuantParams failed, get invalid numBits %d.", paramIt->numBits);
331             return OH_NN_INVALID_PARAMETER;
332     }
333 
334     return OH_NN_SUCCESS;
335 }
336 
IdentifyOpParameter()337 void NNTensor::IdentifyOpParameter()
338 {
339     m_isOpParameter = true;
340 }
341 
SetName(const std::string & name)342 void NNTensor::SetName(const std::string& name)
343 {
344     m_name = name;
345 }
346 
347 // Buffer set inside NNTensor will be released during deconstruction, make sure the buffer won't be released twice.
SetBuffer(const void * buffer,size_t length)348 void NNTensor::SetBuffer(const void* buffer, size_t length)
349 {
350     // copy pointer instead of memory copying
351     m_buffer = const_cast<void*>(buffer);
352     m_bufferLength = length;
353 }
354 
SetFormat(const OH_NN_Format & format)355 void NNTensor::SetFormat(const OH_NN_Format& format)
356 {
357     m_format = format;
358 }
359 
SetDimensions(const std::vector<int32_t> & dimensions)360 OH_NN_ReturnCode NNTensor::SetDimensions(const std::vector<int32_t>& dimensions)
361 {
362     size_t expectedDimensionCount = m_dimensions.size();
363     size_t dimensionCount = dimensions.size();
364     if (dimensionCount != expectedDimensionCount) {
365         LOGE("Passed dimensions have different dimension counts from NNTensor, expected %zu, but passed %zu.",
366              expectedDimensionCount, dimensionCount);
367         return OH_NN_INVALID_PARAMETER;
368     }
369 
370     auto returnCode = ValidateDimensions(dimensions);
371     if (returnCode != OH_NN_SUCCESS) {
372         LOGE("SetDimemsions failed, error happened when validating dimensions.");
373         return returnCode;
374     }
375 
376     m_dimensions = dimensions;
377     return OH_NN_SUCCESS;
378 }
379 
GetType() const380 OH_NN_TensorType NNTensor::GetType() const
381 {
382     return m_type;
383 }
384 
GetName() const385 std::string NNTensor::GetName() const
386 {
387     return m_name;
388 }
389 
GetBuffer() const390 void* NNTensor::GetBuffer() const
391 {
392     return m_buffer;
393 }
394 
GetBufferLength() const395 size_t NNTensor::GetBufferLength() const
396 {
397     return m_bufferLength;
398 }
399 
GetDataLength() const400 size_t NNTensor::GetDataLength() const
401 {
402     return m_dataLength;
403 }
404 
GetDataType() const405 OH_NN_DataType NNTensor::GetDataType() const
406 {
407     return m_dataType;
408 }
409 
GetElementCount() const410 uint32_t NNTensor::GetElementCount() const
411 {
412     return m_elementCount;
413 }
414 
GetDimensions() const415 std::vector<int32_t> NNTensor::GetDimensions() const
416 {
417     return m_dimensions;
418 }
419 
GetFormat() const420 OH_NN_Format NNTensor::GetFormat() const
421 {
422     return m_format;
423 }
424 
GetQuantParam() const425 std::vector<QuantParam> NNTensor::GetQuantParam() const
426 {
427     return m_quantParams;
428 }
429 
ConvertToLiteGraphTensor() const430 LiteGraphTensorPtr NNTensor::ConvertToLiteGraphTensor() const
431 {
432     mindspore::lite::DataType dataType = NNToMS::TransformDataType(m_dataType);
433     mindspore::lite::Format format = NNToMS::TransformFormat(m_format);
434     const uint8_t* buffer = static_cast<const uint8_t*>(m_buffer);
435     std::vector<uint8_t> data = ConstructVectorFromArray(buffer, m_dataLength);
436 
437     std::vector<mindspore::lite::QuantParam> quantParams;
438     mindspore::lite::QuantParam msQuantParam;
439     for (const QuantParam& param : m_quantParams) {
440         msQuantParam = {param.zeroPoint, param.scale, param.numBits};
441         quantParams.emplace_back(std::move(msQuantParam));
442     }
443 
444     mindspore::lite::TensorPtr tensor = mindspore::lite::MindIR_Tensor_Create(
445         m_name.c_str(), dataType, m_dimensions.data(), m_dimensions.size(), format,
446         data.data(), data.size(), quantParams.data(), quantParams.size());
447     if (tensor == nullptr) {
448         LOGE("ConvertToLiteGraphTensor failed, please check attributes of NNTensor.");
449         return {nullptr, DestroyLiteGraphTensor};
450     }
451 
452     LiteGraphTensorPtr liteGraphTensor(tensor, DestroyLiteGraphTensor);
453     return liteGraphTensor;
454 }
455 
ConvertToIOTensor(IOTensor & tensor) const456 void NNTensor::ConvertToIOTensor(IOTensor& tensor) const
457 {
458     tensor.dataType = m_dataType;
459     tensor.format = m_format;
460     tensor.dimensions = m_dimensions;
461     tensor.data = const_cast<void*>(m_buffer);
462     tensor.length = m_bufferLength;
463 }
464 
ConvertToTensorDesc(TensorDesc & desc) const465 void NNTensor::ConvertToTensorDesc(TensorDesc& desc) const
466 {
467     desc.SetDataType(m_dataType);
468     desc.SetFormat(m_format);
469     desc.SetName(m_name.c_str());
470     desc.SetShape(m_dimensions.data(), m_dimensions.size());
471 }
472 
IsDynamicShape() const473 bool NNTensor::IsDynamicShape() const
474 {
475     return m_isDynamicShape;
476 }
477 
IsQuantTensor() const478 bool NNTensor::IsQuantTensor() const
479 {
480     return (m_quantParams.size() > 0);
481 }
482 
IsScalar() const483 bool NNTensor::IsScalar() const
484 {
485     return (m_dimensions.empty());
486 }
487 
IsOpParameter() const488 bool NNTensor::IsOpParameter() const
489 {
490     return m_isOpParameter;
491 }
492 
CompareAttribute(const NNTensor & tensor) const493 bool NNTensor::CompareAttribute(const NNTensor& tensor) const
494 {
495     if (m_dataType != tensor.GetDataType()) {
496         LOGI("Tensors have different data type: %d and %d.", m_dataType, tensor.GetDataType());
497         return false;
498     }
499 
500     if (m_format != tensor.GetFormat()) {
501         LOGI("Tensors have different format: %d and %d.", m_format, tensor.GetFormat());
502         return false;
503     }
504 
505     const std::vector<int32_t> dimensions = tensor.GetDimensions();
506     if (m_dimensions.size() != dimensions.size()) {
507         LOGI("Tensors have differents dimension counts: %zu and %zu.", m_dimensions.size(), dimensions.size());
508         return false;
509     }
510 
511     size_t dimensionsSize = dimensions.size();
512     for (size_t i = 0; i < dimensionsSize; i++) {
513         if ((m_dimensions[i] != -1) && (m_dimensions[i] != dimensions[i])) {
514             LOGI("Tensors have different dimension: dimension index: %zu, dimension value: %d and %d.",
515                  i, m_dimensions[i], dimensions[i]);
516             return false;
517         }
518     }
519 
520     if (m_type != tensor.GetType()) {
521         LOGI("Tensors have different type: %d and %d.", m_type, tensor.GetType());
522         return false;
523     }
524 
525     return true;
526 }
527 } // NeuralNetworkRuntime
528 } // OHOS
529