• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022-2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "neural_network_runtime_inner.h"
17 #include "neural_network_runtime/neural_network_runtime.h"
18 
19 #include "compilation.h"
20 #include "executor.h"
21 #include "inner_model.h"
22 #include "log.h"
23 #include "quant_param.h"
24 #include "validation.h"
25 #include "syspara/parameter.h"
26 
27 #include <cstring>
28 #include <fstream>
29 #include <filesystem>
30 #include <sys/stat.h>
31 #include <unistd.h>
32 #include "nlohmann/json.hpp"
33 #include "securec.h"
34 
35 using namespace OHOS::NeuralNetworkRuntime;
36 
37 #define NNRT_API __attribute__((visibility("default")))
38 
39 namespace {
40 const std::string EXTENSION_KEY_QUANT_BUFFER = "QuantBuffer";
41 const std::string EXTENSION_KEY_MODEL_NAME = "ModelName";
42 const std::string EXTENSION_KEY_IS_PROFILING = "isProfiling";
43 const std::string EXTENSION_KEY_OP_LAYOUT = "opLayout";
44 const std::string EXTENSION_KEY_INPUT_DIMS = "InputDims";
45 const std::string EXTENSION_KEY_DYNAMIC_DIMS = "DynamicDims";
46 const std::string EXTENSION_KEY_FM_SHARED = "NPU_FM_SHARED";
47 const std::string EXTENSION_KEY_IS_EXCEED_RAMLIMIT = "isExceedRamLimit";
48 
49 const std::string NULL_HARDWARE_NAME = "default";
50 const std::string NNRT_DEVICE_NAME = "const.ai.nnrt_deivce";
51 const std::string HARDWARE_NAME = "ohos.boot.hardware";
52 const std::string HARDWARE_VERSION = "v5_0";
53 constexpr size_t HARDWARE_NAME_MAX_LENGTH = 128;
54 constexpr size_t FILE_NUMBER_MAX = 100; // 限制cache文件数量最大为100
55 constexpr size_t EXTENSION_MAX_SIZE = 200; // 限制MS传过来的参数最多为200
56 constexpr size_t INPUT_MAX_COUNT = 200; // 限制模型最大输入个数为200
57 constexpr int32_t HEX_UNIT = 16;
58 }
59 
CacheInfoGetCrc16(char * buffer,size_t length)60 unsigned short CacheInfoGetCrc16(char* buffer, size_t length)
61 {
62     unsigned int sum = 0;
63     while (length > 1) {
64         sum += *(reinterpret_cast<unsigned short*>(buffer));
65         length -= sizeof(unsigned short);
66         buffer += sizeof(unsigned short);
67     }
68 
69     if (length > 0) {
70         sum += *(reinterpret_cast<unsigned char*>(buffer));
71     }
72 
73     while (sum >> HEX_UNIT) {
74         sum = (sum >> HEX_UNIT) + (sum & 0xffff);
75     }
76 
77     return static_cast<unsigned short>(~sum);
78 }
79 
OH_NNQuantParam_Create()80 NNRT_API NN_QuantParam *OH_NNQuantParam_Create()
81 {
82     auto* quantParamImpl = new (std::nothrow) QuantParams();
83     if (quantParamImpl == nullptr) {
84         LOGE("OH_NNQuantParam_Create failed, please check whether it has enough memory.");
85         return nullptr;
86     }
87 
88     return (NN_QuantParam*)(quantParamImpl);
89 }
90 
OH_NNQuantParam_SetScales(NN_QuantParam * quantParams,const double * scales,size_t quantNum)91 NNRT_API OH_NN_ReturnCode OH_NNQuantParam_SetScales(NN_QuantParam* quantParams, const double* scales, size_t quantNum)
92 {
93     if (quantParams == nullptr) {
94         LOGE("OH_NNQuantParam_SetScales failed, passed nullptr to quantParams.");
95         return OH_NN_INVALID_PARAMETER;
96     }
97 
98     if (scales == nullptr) {
99         LOGE("OH_NNQuantParam_SetScales failed, passed nullptr to scales.");
100         return OH_NN_INVALID_PARAMETER;
101     }
102 
103     if (quantNum == 0) {
104         LOGE("OH_NNQuantParam_SetScales failed, passed 0 to quantNum.");
105         return OH_NN_INVALID_PARAMETER;
106     }
107 
108     auto* quantParamImpl = reinterpret_cast<QuantParams*>(quantParams);
109     std::vector<double> scaleVector(scales, scales + quantNum);
110     quantParamImpl->SetScales(scaleVector);
111 
112     return OH_NN_SUCCESS;
113 }
114 
OH_NNQuantParam_SetZeroPoints(NN_QuantParam * quantParams,const int32_t * zeroPoints,size_t quantNum)115 NNRT_API OH_NN_ReturnCode OH_NNQuantParam_SetZeroPoints(NN_QuantParam* quantParams,
116                                                         const int32_t* zeroPoints,
117                                                         size_t quantNum)
118 {
119     if (quantParams == nullptr) {
120         LOGE("OH_NNQuantParam_SetZeroPoints failed, passed nullptr to quantParams.");
121         return OH_NN_INVALID_PARAMETER;
122     }
123 
124     if (zeroPoints == nullptr) {
125         LOGE("OH_NNQuantParam_SetZeroPoints failed, passed nullptr to zeroPoints.");
126         return OH_NN_INVALID_PARAMETER;
127     }
128 
129     if (quantNum == 0) {
130         LOGE("OH_NNQuantParam_SetZeroPoints failed, passed 0 to quantNum.");
131         return OH_NN_INVALID_PARAMETER;
132     }
133 
134     auto* quantParamImpl = reinterpret_cast<QuantParams*>(quantParams);
135     std::vector<int32_t> zeroPointVector(zeroPoints, zeroPoints + quantNum);
136     quantParamImpl->SetZeroPoints(zeroPointVector);
137 
138     return OH_NN_SUCCESS;
139 }
140 
OH_NNQuantParam_SetNumBits(NN_QuantParam * quantParams,const uint32_t * numBits,size_t quantNum)141 OH_NN_ReturnCode OH_NNQuantParam_SetNumBits(NN_QuantParam* quantParams, const uint32_t* numBits, size_t quantNum)
142 {
143     if (quantParams == nullptr) {
144         LOGE("OH_NNQuantParam_SetNumBits failed, passed nullptr to quantParams.");
145         return OH_NN_INVALID_PARAMETER;
146     }
147 
148     if (numBits == nullptr) {
149         LOGE("OH_NNQuantParam_SetNumBits failed, passed nullptr to numBits.");
150         return OH_NN_INVALID_PARAMETER;
151     }
152 
153     if (quantNum == 0) {
154         LOGE("OH_NNQuantParam_SetNumBits failed, passed 0 to quantNum.");
155         return OH_NN_INVALID_PARAMETER;
156     }
157 
158     auto* quantParamImpl = reinterpret_cast<QuantParams*>(quantParams);
159     std::vector<uint32_t> numBitVector(numBits, numBits + quantNum);
160     quantParamImpl->SetNumBits(numBitVector);
161 
162     return OH_NN_SUCCESS;
163 }
164 
OH_NNQuantParam_Destroy(NN_QuantParam ** quantParams)165 OH_NN_ReturnCode OH_NNQuantParam_Destroy(NN_QuantParam** quantParams)
166 {
167     if (quantParams == nullptr) {
168         LOGE("OH_NNQuantParam_Destroy failed, passed nullptr to quantParams.");
169         return OH_NN_INVALID_PARAMETER;
170     }
171 
172     if (*quantParams == nullptr) {
173         LOGW("OH_NNQuantParam_Destroy failed, passed nullptr to *quantParams.");
174         return OH_NN_INVALID_PARAMETER;
175     }
176 
177     auto* quantParamImpl = reinterpret_cast<QuantParams*>(*quantParams);
178     delete quantParamImpl;
179     *quantParams = nullptr;
180 
181     return OH_NN_SUCCESS;
182 }
183 
OH_NNModel_AddTensorToModel(OH_NNModel * model,const NN_TensorDesc * tensorDesc)184 OH_NN_ReturnCode OH_NNModel_AddTensorToModel(OH_NNModel* model, const NN_TensorDesc* tensorDesc)
185 {
186     if (model == nullptr) {
187         LOGE("OH_NNModel_AddTensorToModel failed, passed nullptr to model.");
188         return OH_NN_INVALID_PARAMETER;
189     }
190 
191     if (tensorDesc == nullptr) {
192         LOGE("OH_NNModel_AddTensorToModel failed, passed nullptr to tensorDesc.");
193         return OH_NN_INVALID_PARAMETER;
194     }
195 
196     auto* innerModel = reinterpret_cast<OHOS::NeuralNetworkRuntime::InnerModel*>(model);
197     OH_NN_ReturnCode returnCode = innerModel->AddTensorDesc(tensorDesc);
198     if (returnCode != OH_NN_SUCCESS) {
199         LOGE("OH_NNModel_AddTensorToModel failed, error happened when adding tensor to model.");
200     }
201 
202     return returnCode;
203 }
204 
OH_NNModel_SetTensorQuantParams(OH_NNModel * model,uint32_t index,NN_QuantParam * quantParam)205 OH_NN_ReturnCode OH_NNModel_SetTensorQuantParams(OH_NNModel* model, uint32_t index, NN_QuantParam* quantParam)
206 {
207     if (model == nullptr) {
208         LOGE("OH_NNModel_SetTensorQuantParams failed, passed nullptr to model.");
209         return OH_NN_INVALID_PARAMETER;
210     }
211 
212     if (quantParam == nullptr) {
213         LOGE("OH_NNModel_SetTensorQuantParams failed, passed nullptr to quantParam.");
214         return OH_NN_INVALID_PARAMETER;
215     }
216 
217     auto* innerModel = reinterpret_cast<OHOS::NeuralNetworkRuntime::InnerModel*>(model);
218     OH_NN_ReturnCode returnCode = innerModel->SetTensorQuantParam((uint32_t)(index), quantParam);
219     if (returnCode != OH_NN_SUCCESS) {
220         LOGE("OH_NNModel_SetTensorQuantParams failed, error happened when setting tensor quantParam.");
221     }
222 
223     return returnCode;
224 }
225 
OH_NNModel_SetTensorType(OH_NNModel * model,uint32_t index,OH_NN_TensorType tensorType)226 OH_NN_ReturnCode OH_NNModel_SetTensorType(OH_NNModel* model, uint32_t index, OH_NN_TensorType tensorType)
227 {
228     if (model == nullptr) {
229         LOGE("OH_NNModel_SetTensorType failed, passed nullptr to model.");
230         return OH_NN_INVALID_PARAMETER;
231     }
232 
233     if (!Validation::ValidateTensorType(tensorType)) {
234         LOGE("OH_NNModel_SetTensorType failed, invalid tensor type.");
235         return OH_NN_INVALID_PARAMETER;
236     }
237 
238     auto* innerModel = reinterpret_cast<OHOS::NeuralNetworkRuntime::InnerModel*>(model);
239     OH_NN_ReturnCode returnCode = innerModel->SetTensorType((uint32_t)(index), tensorType);
240     if (returnCode != OH_NN_SUCCESS) {
241         LOGE("OH_NNModel_SetTensorType failed, error happened when setting tensor type.");
242     }
243 
244     return returnCode;
245 }
246 
OH_NNModel_Construct(void)247 NNRT_API OH_NNModel *OH_NNModel_Construct(void)
248 {
249     InnerModel *innerModel = new(std::nothrow) InnerModel();
250     if (innerModel == nullptr) {
251         LOGE("OH_NNModel_Construct failed, please check whether it has enough memory.");
252         return nullptr;
253     }
254 
255     OH_NNModel *nnModel = reinterpret_cast<OH_NNModel*>(innerModel);
256     return nnModel;
257 }
258 
OH_NNModel_AddOperation(OH_NNModel * model,OH_NN_OperationType op,const OH_NN_UInt32Array * paramIndices,const OH_NN_UInt32Array * inputIndices,const OH_NN_UInt32Array * outputIndices)259 NNRT_API OH_NN_ReturnCode OH_NNModel_AddOperation(OH_NNModel *model,
260                                                   OH_NN_OperationType op,
261                                                   const OH_NN_UInt32Array *paramIndices,
262                                                   const OH_NN_UInt32Array *inputIndices,
263                                                   const OH_NN_UInt32Array *outputIndices)
264 {
265     if (model == nullptr) {
266         LOGE("OH_NNModel_AddOperation failed, passed nullptr to model.");
267         return OH_NN_INVALID_PARAMETER;
268     }
269 
270     if (paramIndices == nullptr) {
271         LOGE("OH_NNModel_AddOperation failed, passed nullptr to paramIndices.");
272         return OH_NN_INVALID_PARAMETER;
273     }
274 
275     if (inputIndices == nullptr) {
276         LOGE("OH_NNModel_AddOperation failed, passed nullptr to inputIndices.");
277         return OH_NN_INVALID_PARAMETER;
278     }
279 
280     if (outputIndices == nullptr) {
281         LOGE("OH_NNModel_AddOperation failed, passed nullptr to outputIndices.");
282         return OH_NN_INVALID_PARAMETER;
283     }
284 
285     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
286     return innerModel->AddOperation(op, *paramIndices, *inputIndices, *outputIndices);
287 }
288 
OH_NNModel_SetTensorData(OH_NNModel * model,uint32_t index,const void * dataBuffer,size_t length)289 NNRT_API OH_NN_ReturnCode OH_NNModel_SetTensorData(OH_NNModel *model,
290                                                    uint32_t index,
291                                                    const void *dataBuffer,
292                                                    size_t length)
293 {
294     if (model == nullptr) {
295         LOGE("OH_NNModel_SetTensorData failed, passed nullptr to model.");
296         return OH_NN_INVALID_PARAMETER;
297     }
298 
299     if (dataBuffer == nullptr) {
300         LOGE("OH_NNModel_SetTensorData failed, passed nullptr to dataBuffer, which has no effect.");
301         return OH_NN_INVALID_PARAMETER;
302     }
303 
304     if (length == 0) {
305         LOGE("OH_NNModel_SetTensorData failed, passed dataBuffer with length 0, which has no effect.");
306         return OH_NN_INVALID_PARAMETER;
307     }
308 
309     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
310     return innerModel->SetTensorValue(index, dataBuffer, length);
311 }
312 
OH_NNModel_SpecifyInputsAndOutputs(OH_NNModel * model,const OH_NN_UInt32Array * inputIndices,const OH_NN_UInt32Array * outputIndices)313 NNRT_API OH_NN_ReturnCode OH_NNModel_SpecifyInputsAndOutputs(OH_NNModel *model,
314                                                              const OH_NN_UInt32Array *inputIndices,
315                                                              const OH_NN_UInt32Array *outputIndices)
316 {
317     if (model == nullptr) {
318         LOGE("OH_NNModel_SpecifyInputsAndOutputs failed, passed nullptr to model.");
319         return OH_NN_INVALID_PARAMETER;
320     }
321 
322     if (inputIndices == nullptr) {
323         LOGE("OH_NNModel_SpecifyInputsAndOutputs failed, passed nullptr to inputIndices.");
324         return OH_NN_INVALID_PARAMETER;
325     }
326 
327     if (outputIndices == nullptr) {
328         LOGE("OH_NNModel_SpecifyInputsAndOutputs failed, passed nullptr to outputIndices.");
329         return OH_NN_INVALID_PARAMETER;
330     }
331 
332     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
333     return innerModel->SpecifyInputsAndOutputs(*inputIndices, *outputIndices);
334 }
335 
OH_NNModel_Finish(OH_NNModel * model)336 NNRT_API OH_NN_ReturnCode OH_NNModel_Finish(OH_NNModel *model)
337 {
338     if (model == nullptr) {
339         LOGE("OH_NNModel_Finish failed, passed nullptr to model.");
340         return OH_NN_INVALID_PARAMETER;
341     }
342 
343     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
344     return innerModel->Build();
345 }
346 
ParseInputDimsFromExtensions(char * data,size_t dataSize,const mindspore::lite::LiteGraph * liteGraph,ExtensionConfig & extensionConfig,size_t & dynamicCount)347 OH_NN_ReturnCode ParseInputDimsFromExtensions(char* data, size_t dataSize, const mindspore::lite::LiteGraph* liteGraph,
348     ExtensionConfig& extensionConfig, size_t& dynamicCount)
349 {
350     extensionConfig.inputDims.clear();
351     int32_t* dimsValue = reinterpret_cast<int32_t*>(data);
352     size_t allDimsSize = dataSize / sizeof(int32_t);
353 
354     size_t inputCount = liteGraph->input_indices_.size(); // LiteGraph输入个数
355     size_t allTensorSize = liteGraph->all_tensors_.size(); // LiteGraph所有tensor个数
356     if (inputCount > INPUT_MAX_COUNT) {
357         LOGE("ParseInputDimsFromExtensions failed, inputCount more than 200.");
358         return OH_NN_INVALID_PARAMETER;
359     }
360 
361     std::vector<int32_t> inputDim;
362     size_t dataIndex = 0;
363     for (size_t i = 0; i < inputCount; ++i) {
364         inputDim.clear();
365         if (liteGraph->input_indices_[i] >= allTensorSize) {
366             LOGE("ParseInputDimsFromExtensions failed, indice of input %u is out of range.",
367                 liteGraph->input_indices_[i]);
368             extensionConfig.inputDims.clear();
369             return OH_NN_INVALID_PARAMETER;
370         }
371         //获取当前输入的维度
372         mindspore::lite::TensorPtr tensor = liteGraph->all_tensors_[liteGraph->input_indices_[i]];
373         auto tensorDims = mindspore::lite::MindIR_Tensor_GetDims(tensor);
374         size_t inputDimSize = tensorDims.size();
375         if (allDimsSize < inputDimSize) {
376             LOGE("ParseInputDimsFromExtensions failed, dataSize is invalid.");
377             extensionConfig.inputDims.clear();
378             return OH_NN_INVALID_PARAMETER;
379         }
380         // 读取extensor中当前输入的dim值
381         for (size_t j = 0; j < inputDimSize; ++j) {
382             inputDim.emplace_back(dimsValue[dataIndex]);
383             if (dimsValue[dataIndex] == -1) {
384                 ++dynamicCount;
385             }
386             ++dataIndex;
387         }
388         extensionConfig.inputDims.emplace_back(inputDim);
389         allDimsSize -= inputDimSize;
390     }
391     // allDimsSize应和模型一致,遍历完后,allDimsSize等于0
392     if (allDimsSize != 0) {
393         LOGE("ParseInputDimsFromExtensions failed, allDimsSize is not equal to liteGraph.");
394         extensionConfig.inputDims.clear();
395         return OH_NN_INVALID_PARAMETER;
396     }
397     return OH_NN_SUCCESS;
398 }
399 
ParseDynamicDimsFromExtensions(const std::unordered_map<std::string,std::vector<std::pair<char *,size_t>>> & extensionMap,const mindspore::lite::LiteGraph * liteGraph,ExtensionConfig & extensionConfig)400 OH_NN_ReturnCode ParseDynamicDimsFromExtensions(
401     const std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>>& extensionMap,
402     const mindspore::lite::LiteGraph* liteGraph, ExtensionConfig& extensionConfig)
403 {
404     const std::vector<std::pair<char*, size_t>>& inputDims = extensionMap.at(EXTENSION_KEY_INPUT_DIMS);
405     if (inputDims.empty()) {
406         LOGE("ParseDynamicDimsFromExtensions failed, input dims is empty.");
407         return OH_NN_INVALID_PARAMETER;
408     }
409     auto dynamicDims = extensionMap.at(EXTENSION_KEY_DYNAMIC_DIMS);
410     if (dynamicDims.empty()) {
411         LOGE("ParseDynamicDimsFromExtensions failed, dynamic dims is empty.");
412         return OH_NN_INVALID_PARAMETER;
413     }
414     if (inputDims[0].first == nullptr || inputDims[0].second == 0 ||
415         dynamicDims[0].first == nullptr || dynamicDims[0].second == 0) {
416         LOGE("ParseDynamicDimsFromExtensions failed, data or dataSize is invalid.");
417         return OH_NN_INVALID_PARAMETER;
418     }
419 
420     size_t dynamicCount = 0;
421     auto returnCode = ParseInputDimsFromExtensions(
422         inputDims[0].first, inputDims[0].second, liteGraph, extensionConfig, dynamicCount);
423     if (returnCode != OH_NN_SUCCESS) {
424         LOGE("ParseDynamicDimsFromExtensions failed, failed to get input dims from extensions.");
425         return returnCode;
426     }
427     if (dynamicCount == 0) {
428         LOGE("ParseDynamicDimsFromExtensions failed, dynamic count is 0.");
429         extensionConfig.inputDims.clear();
430         return OH_NN_INVALID_PARAMETER;
431     }
432 
433     extensionConfig.dynamicDims.clear();
434     int32_t* dynamicDimsValue = reinterpret_cast<int32_t*>(dynamicDims[0].first);
435     size_t dynamicDimsSize = dynamicDims[0].second / sizeof(int32_t);
436     if ((dynamicDimsSize % dynamicCount) != 0) {
437         LOGE("ParseDynamicDimsFromExtensions failed, dynamic dataSize is invalid.");
438         extensionConfig.inputDims.clear();
439         return OH_NN_INVALID_PARAMETER;
440     }
441     size_t dynamicSize = dynamicDimsSize / dynamicCount;
442     std::vector<int32_t> dynamicDim;
443     size_t dataIndex = 0;
444     for (size_t i = 0; i < dynamicSize; ++i) {
445         dynamicDim.clear();
446         for (size_t j = 0; j < dynamicCount; ++j) {
447             dynamicDim.emplace_back(dynamicDimsValue[dataIndex]);
448             ++dataIndex;
449         }
450         extensionConfig.dynamicDims.emplace_back(dynamicDim);
451     }
452 
453     return OH_NN_SUCCESS;
454 }
455 
CheckExtensionConfigs(const std::unordered_map<std::string,std::vector<std::pair<char *,size_t>>> & extensionMap,ExtensionConfig & extensionConfig)456 OH_NN_ReturnCode CheckExtensionConfigs(
457     const std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>>& extensionMap,
458     ExtensionConfig& extensionConfig)
459 {
460     if (extensionMap.find(EXTENSION_KEY_QUANT_BUFFER) != extensionMap.end()) {
461         const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_QUANT_BUFFER);
462         if (value.empty()) {
463             LOGE("ParseExtensionConfigs failed, get empty quant buffer value.");
464             return OH_NN_INVALID_PARAMETER;
465         }
466         extensionConfig.quantBuffer.data = value[0].first;
467         extensionConfig.quantBuffer.length = value[0].second;
468     }
469     if (extensionMap.find(EXTENSION_KEY_MODEL_NAME) != extensionMap.end()) {
470         const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_MODEL_NAME);
471         if (value.empty()) {
472             LOGE("ParseExtensionConfigs failed, get empty model name value.");
473             return OH_NN_INVALID_PARAMETER;
474         }
475         extensionConfig.modelName.assign(value[0].first, value[0].first + value[0].second);
476     }
477     if (extensionMap.find(EXTENSION_KEY_IS_PROFILING) != extensionMap.end()) {
478         const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_IS_PROFILING);
479         if (value.empty()) {
480             LOGE("ParseExtensionConfigs failed, get empty isProfiling value.");
481             return OH_NN_INVALID_PARAMETER;
482         }
483         extensionConfig.isProfiling.assign(value[0].first, value[0].first + value[0].second);
484     }
485     if (extensionMap.find(EXTENSION_KEY_OP_LAYOUT) != extensionMap.end()) {
486         const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_OP_LAYOUT);
487         if (value.empty()) {
488             LOGE("ParseExtensionConfigs failed, get empty op layout value.");
489             return OH_NN_INVALID_PARAMETER;
490         }
491         std::string ops;
492         for (auto singleValue : value) {
493             ops.assign(singleValue.first, singleValue.first + singleValue.second);
494             extensionConfig.opLayout.insert({ops, "hiai::ExecuteDevice::CPU"});
495             LOGI("ParseExtensionConfigs opLayout:%{public}s.", ops.c_str());
496         }
497     }
498     return OH_NN_SUCCESS;
499 }
500 
ParseExtensionConfigs(const std::unordered_map<std::string,std::vector<std::pair<char *,size_t>>> & extensionMap,const mindspore::lite::LiteGraph * pLiteGraph,ExtensionConfig & extensionConfig)501 OH_NN_ReturnCode ParseExtensionConfigs(
502     const std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>>& extensionMap,
503     const mindspore::lite::LiteGraph* pLiteGraph, ExtensionConfig& extensionConfig)
504 {
505     extensionConfig.tuningStrategy = TuningStrategy::ON_DEVICE_PREPROCESS_TUNING;
506     OH_NN_ReturnCode ret = CheckExtensionConfigs(extensionMap, extensionConfig);
507     if (ret != OH_NN_SUCCESS) {
508         LOGE("CheckExtensionConfigs failed.");
509         return ret;
510     }
511     if (extensionMap.find(EXTENSION_KEY_INPUT_DIMS) != extensionMap.end() &&
512         extensionMap.find(EXTENSION_KEY_DYNAMIC_DIMS) != extensionMap.end()) {
513         auto returnCode = ParseDynamicDimsFromExtensions(extensionMap, pLiteGraph, extensionConfig);
514         if (returnCode != OH_NN_SUCCESS) {
515             LOGE("ParseExtensionConfigs failed, parse dynamic dims from extensions failed.");
516             return returnCode;
517         }
518         extensionConfig.tuningStrategy = TuningStrategy::OFF; // 分档shape不支持fftl
519     }
520     if (extensionMap.find(EXTENSION_KEY_FM_SHARED) != extensionMap.end()) {
521         extensionConfig.isNpuFmShared = true;
522         LOGI("NNRT enable fm shared success.");
523     }
524     return OH_NN_SUCCESS;
525 }
526 
OH_NNModel_BuildFromLiteGraph(OH_NNModel * model,const void * liteGraph,const OH_NN_Extension * extensions,size_t extensionSize)527 NNRT_API OH_NN_ReturnCode OH_NNModel_BuildFromLiteGraph(OH_NNModel *model, const void *liteGraph,
528     const OH_NN_Extension *extensions, size_t extensionSize)
529 {
530     if (model == nullptr) {
531         LOGE("OH_NNModel_BuildFromLiteGraph failed, passed nullptr to model.");
532         return OH_NN_INVALID_PARAMETER;
533     }
534 
535     if (liteGraph == nullptr) {
536         LOGE("OH_NNModel_BuildFromLiteGraph failed, passed nullptr to liteGraph.");
537         return OH_NN_INVALID_PARAMETER;
538     }
539 
540     if (extensionSize > EXTENSION_MAX_SIZE) {
541         LOGE("OH_NNModel_BuildFromLiteGraph failed, extensionSize more than 200.");
542         return OH_NN_INVALID_PARAMETER;
543     }
544 
545     auto *pLiteGraph = reinterpret_cast<const mindspore::lite::LiteGraph*>(liteGraph);
546     ExtensionConfig extensionConfig;
547     std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>> extensionMap;
548     for (size_t i = 0; i < extensionSize; ++i) {
549         std::string name = extensions[i].name;
550         if (extensionMap.find(name) == extensionMap.end()) {
551             extensionMap.insert({name, {{extensions[i].value, extensions[i].valueSize}}});
552         } else {
553             extensionMap[name].push_back({extensions[i].value, extensions[i].valueSize});
554         }
555     }
556     auto returnCode = ParseExtensionConfigs(extensionMap, pLiteGraph, extensionConfig);
557     if (returnCode != OH_NN_SUCCESS) {
558         LOGE("OH_NNModel_BuildFromLiteGraph failed, parse extension configs failed.");
559         return returnCode;
560     }
561 
562     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
563 
564     // Once the innerModel built from the liteGraph successfully, the innerModel
565     // owns the liteGraph, in which case, the invoker should not delete
566     // the liteGraph actively. Otherwise, the invoker still has the ownership.
567     return innerModel->BuildFromLiteGraph(pLiteGraph, extensionConfig);
568 }
569 
570 namespace {
CheckCacheFileExtension(const std::string & content,int64_t & fileNumber,int64_t & cacheVersion,int64_t & deviceId)571 OH_NN_ReturnCode CheckCacheFileExtension(const std::string& content, int64_t& fileNumber,
572                                          int64_t& cacheVersion, int64_t& deviceId)
573 {
574     if (!nlohmann::json::accept(content)) {
575         LOGE("OH_NNModel_HasCache CheckCacheFile JSON parse error");
576         return OH_NN_INVALID_FILE;
577     }
578 
579     nlohmann::json j = nlohmann::json::parse(content);
580     if (j.find("data") == j.end()) {
581         LOGE("OH_NNModel_HasCache read data from cache info file failed.");
582         return OH_NN_INVALID_FILE;
583     }
584 
585     if (j["data"].find("deviceId") == j["data"].end()) {
586         LOGE("OH_NNModel_HasCache read deviceId from cache info file failed.");
587         return OH_NN_INVALID_FILE;
588     }
589     deviceId = j["data"]["deviceId"].get<int64_t>();
590 
591     if (j["data"].find("fileNumber") == j["data"].end()) {
592         LOGE("OH_NNModel_HasCache read fileNumber from cache info file failed.");
593         return OH_NN_INVALID_FILE;
594     }
595     fileNumber = j["data"]["fileNumber"].get<int>();
596 
597     if (j["data"].find("version") == j["data"].end()) {
598         LOGE("OH_NNModel_HasCache read version from cache info file failed.");
599         return OH_NN_INVALID_FILE;
600     }
601     cacheVersion = j["data"]["version"].get<int>();
602 
603     if (j.find("CheckSum") == j.end()) {
604         LOGE("OH_NNModel_HasCache read CheckSum from cache info file failed.");
605         return OH_NN_INVALID_FILE;
606     }
607     const size_t dataLength = j["data"].dump().length();
608     char jData[dataLength + 1];
609 
610     if (strncpy_s(jData, dataLength+1, j["data"].dump().c_str(), dataLength) != 0) {
611         LOGE("OH_NNModel_HasCache ParseStr failed due to strncpy_s error.");
612         return OH_NN_INVALID_FILE;
613     }
614 
615     if (static_cast<int64_t>(CacheInfoGetCrc16(jData, dataLength)) != j["CheckSum"].get<int64_t>()) {
616         LOGE("OH_NNModel_HasCache cache_info CheckSum is not correct.");
617         return OH_NN_INVALID_FILE;
618     }
619 
620     return OH_NN_SUCCESS;
621 }
622 
CheckCacheFile(const std::string & cacheInfoPath,int64_t & fileNumber,int64_t & cacheVersion,int64_t & deviceId)623 OH_NN_ReturnCode CheckCacheFile(const std::string& cacheInfoPath, int64_t& fileNumber,
624                                 int64_t& cacheVersion, int64_t& deviceId)
625 {
626     char path[PATH_MAX];
627     if (realpath(cacheInfoPath.c_str(), path) == nullptr) {
628         LOGE("OH_NNModel_HasCache get real path of cache info failed.");
629         return OH_NN_INVALID_FILE;
630     }
631 
632     if (access(path, F_OK) != 0) {
633         LOGE("OH_NNModel_HasCache access cache info file failed.");
634         return OH_NN_INVALID_FILE;
635     }
636 
637     std::ifstream ifs(path, std::ios::in | std::ios::binary);
638     if (!ifs) {
639         LOGE("OH_NNModel_HasCache open cache info file failed.");
640         return OH_NN_INVALID_FILE;
641     }
642 
643     // Read the entire file into a string
644     std::string content((std::istreambuf_iterator<char>(ifs)), std::istreambuf_iterator<char>());
645     ifs.close();
646 
647     return CheckCacheFileExtension(content, fileNumber, cacheVersion, deviceId);
648 }
649 
CheckDeviceId(int64_t & deviceId)650 OH_NN_ReturnCode CheckDeviceId(int64_t& deviceId)
651 {
652     std::string deviceName;
653     char cName[HARDWARE_NAME_MAX_LENGTH];
654     int ret = GetParameter(HARDWARE_NAME.c_str(), NULL_HARDWARE_NAME.c_str(), cName, HARDWARE_NAME_MAX_LENGTH);
655     if (ret <= 0) {
656         LOGE("OH_NNModel_HasCache failed to get parameter.");
657         return OH_NN_FAILED;
658     }
659 
660     deviceName = HARDWARE_NAME + "." + cName;
661     if (deviceId != static_cast<int64_t>(std::hash<std::string>{}(deviceName))) {
662         LOGE("OH_NNModel_HasCache the deviceId in the cache files is different from current deviceId.");
663         return OH_NN_FAILED;
664     }
665 
666     return OH_NN_SUCCESS;
667 }
668 }
669 
OH_NNModel_HasCache(const char * cacheDir,const char * modelName,uint32_t version)670 NNRT_API bool OH_NNModel_HasCache(const char *cacheDir, const char *modelName, uint32_t version)
671 {
672     if (cacheDir == nullptr) {
673         LOGI("OH_NNModel_HasCache get empty cache directory.");
674         return false;
675     }
676 
677     if (modelName == nullptr) {
678         LOGI("OH_NNModel_HasCache get empty model name.");
679     }
680 
681     std::string cacheInfoPath = std::string(cacheDir) + "/" + std::string(modelName) + "cache_info.nncache";
682 
683     // determine whether cache info file exists
684     struct stat buffer;
685     bool exist = (stat(cacheInfoPath.c_str(), &buffer) == 0);
686     if (!exist) {
687         return false;
688     }
689 
690     int64_t deviceId{0};
691     int64_t fileNumber{0};
692     int64_t cacheVersion{0};
693     OH_NN_ReturnCode returnCode = CheckCacheFile(cacheInfoPath, fileNumber, cacheVersion, deviceId);
694     if (returnCode != OH_NN_SUCCESS) {
695         LOGE("OH_NNModel_HasCache get fileNumber or cacheVersion fail.");
696         std::filesystem::remove_all(cacheInfoPath);
697         return false;
698     }
699 
700     returnCode = CheckDeviceId(deviceId);
701     if (returnCode != OH_NN_SUCCESS) {
702         LOGE("OH_NNModel_HasCache check deviceId fail.");
703         std::filesystem::remove_all(cacheInfoPath);
704         return false;
705     }
706 
707     if (fileNumber <= 0 || static_cast<size_t>(fileNumber) > FILE_NUMBER_MAX) {
708         LOGE("OH_NNModel_HasCache fileNumber is invalid or more than 100");
709         std::filesystem::remove_all(cacheInfoPath);
710         return false;
711     }
712 
713     // determine whether cache model files exist
714     for (int64_t i = 0; i < fileNumber; ++i) {
715         std::string cacheModelPath =
716             std::string(cacheDir) + "/" + std::string(modelName) + std::to_string(i) + ".nncache";
717         exist = (exist && (stat(cacheModelPath.c_str(), &buffer) == 0));
718         if (!exist) {
719             LOGE("OH_NNModel_HasCache cacheModelPath is not existed.");
720             std::filesystem::remove_all(cacheInfoPath);
721             return false;
722         }
723     }
724 
725     if (cacheVersion != version) {
726         LOGE("OH_NNModel_HasCache version is not match.");
727         exist = false;
728     }
729 
730     return exist;
731 }
732 
OH_NNModel_BuildFromMetaGraph(OH_NNModel * model,const void * metaGraph,const OH_NN_Extension * extensions,size_t extensionSize)733 NNRT_API OH_NN_ReturnCode OH_NNModel_BuildFromMetaGraph(OH_NNModel *model, const void *metaGraph,
734     const OH_NN_Extension *extensions, size_t extensionSize)
735 {
736     if (model == nullptr) {
737         LOGE("OH_NNModel_BuildFromMetaGraph failed, passed nullptr to model.");
738         return OH_NN_INVALID_PARAMETER;
739     }
740 
741     if (metaGraph == nullptr) {
742         LOGE("OH_NNModel_BuildFromMetaGraph failed, passed nullptr to metaGraph.");
743         return OH_NN_INVALID_PARAMETER;
744     }
745 
746     ExtensionConfig extensionConfig;
747     std::string ops;
748     for (size_t i = 0; i < extensionSize; ++i) {
749         std::string name = extensions[i].name;
750         if (name == "QuantBuffer") {
751             extensionConfig.quantBuffer.data = extensions[i].value;
752             extensionConfig.quantBuffer.length = extensions[i].valueSize;
753         } else if (name == "ModelName") {
754             extensionConfig.modelName.assign(extensions[i].value, extensions[i].value + extensions[i].valueSize);
755         } else if (name == "Profiling") {
756             extensionConfig.isProfiling.assign(extensions[i].value, extensions[i].value + extensions[i].valueSize);
757             LOGI("OH_NNModel_BuildFromMetaGraph isProfiling enable.");
758         } else if (name == "opLayout") {
759             ops.assign(extensions[i].value, extensions[i].value + extensions[i].valueSize);
760             extensionConfig.opLayout.insert({ops, "hiai::ExecuteDevice::CPU"});
761             LOGI("OH_NNModel_BuildFromMetaGraph opLayout:%{public}s.", ops.c_str());
762         }
763     }
764 
765     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
766     return innerModel->BuildFromMetaGraph(metaGraph, extensionConfig);
767 }
768 
OH_NNModel_SetInputsAndOutputsInfo(OH_NNModel * model,const OH_NN_TensorInfo * inputsInfo,size_t inputSize,const OH_NN_TensorInfo * outputsInfo,size_t outputSize)769 NNRT_API OH_NN_ReturnCode OH_NNModel_SetInputsAndOutputsInfo(OH_NNModel *model, const OH_NN_TensorInfo *inputsInfo,
770     size_t inputSize, const OH_NN_TensorInfo *outputsInfo, size_t outputSize)
771 {
772     if (model == nullptr) {
773         LOGE("OH_NNModel_SetInputsAndOutputsInfo failed, passed nullptr to model.");
774         return OH_NN_INVALID_PARAMETER;
775     }
776 
777     if ((inputsInfo == nullptr) || (inputSize == 0)) {
778         LOGE("OH_NNModel_SetInputsAndOutputsInfo failed, inputsInfo is empty.");
779         return OH_NN_INVALID_PARAMETER;
780     }
781 
782     if ((outputsInfo == nullptr) || (outputSize == 0)) {
783         LOGE("OH_NNModel_SetInputsAndOutputsInfo failed, outputsInfo is empty.");
784         return OH_NN_INVALID_PARAMETER;
785     }
786 
787     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
788     return innerModel->SetInputsAndOutputsInfo(inputsInfo, inputSize, outputsInfo, outputSize);
789 }
790 
OH_NNModel_Destroy(OH_NNModel ** model)791 NNRT_API void OH_NNModel_Destroy(OH_NNModel **model)
792 {
793     if (model == nullptr) {
794         LOGW("OH_NNModel_Destroy has no effect, passed nullptr to model.");
795         return;
796     }
797 
798     if (*model == nullptr) {
799         LOGW("OH_NNModel_Destroy has no effect, passed nullptr to *model.");
800         return;
801     }
802 
803     InnerModel *innerModel = reinterpret_cast<InnerModel*>(*model);
804     delete innerModel;
805     *model = nullptr;
806 }
807 
OH_NNModel_GetAvailableOperations(OH_NNModel * model,size_t deviceID,const bool ** isAvailable,uint32_t * opCount)808 NNRT_API OH_NN_ReturnCode OH_NNModel_GetAvailableOperations(OH_NNModel *model,
809                                                             size_t deviceID,
810                                                             const bool **isAvailable,
811                                                             uint32_t *opCount)
812 {
813     if (model == nullptr) {
814         LOGE("OH_NNModel_GetAvailableOperations failed, passed nullptr to model.");
815         return OH_NN_INVALID_PARAMETER;
816     }
817 
818     if (isAvailable == nullptr) {
819         LOGE("OH_NNModel_GetAvailableOperations failed, passed nullptr to isAvailable.");
820         return OH_NN_INVALID_PARAMETER;
821     }
822 
823     if (*isAvailable != nullptr) {
824         LOGE("OH_NNModel_GetAvailableOperations failed, *isAvailable is not nullptr.");
825         return OH_NN_INVALID_PARAMETER;
826     }
827 
828     if (opCount == nullptr) {
829         LOGE("OH_NNModel_GetAvailableOperations failed, passed nullptr to opCount.");
830         return OH_NN_INVALID_PARAMETER;
831     }
832 
833     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
834     return innerModel->GetSupportedOperations(deviceID, isAvailable, *opCount);
835 }
836 
OH_NN_GetDeviceID(char * nnrtDevice,size_t len)837 NNRT_API OH_NN_ReturnCode OH_NN_GetDeviceID(char *nnrtDevice, size_t len)
838 {
839     if (nnrtDevice == nullptr || len == 0) {
840         LOGE("nnrtDevice is nullptr or len is 0.");
841         return OH_NN_INVALID_PARAMETER;
842     }
843 
844     char cName[HARDWARE_NAME_MAX_LENGTH] = {0};
845     int ret = GetParameter(NNRT_DEVICE_NAME.c_str(), NULL_HARDWARE_NAME.c_str(), cName, HARDWARE_NAME_MAX_LENGTH);
846     // 如果成功获取返回值为硬件名称的字节数
847     if (ret <= 0) {
848         LOGE("GetNNRtDeviceName failed, failed to get parameter.");
849         return OH_NN_FAILED;
850     }
851 
852     std::string deviceName = (std::string)cName + "_" + HARDWARE_VERSION;
853     auto secureRet = strcpy_s(nnrtDevice, len, deviceName.c_str());
854     if (secureRet != EOK) {
855         LOGE("GetNNRtDeviceName failed, failed to get name.");
856         return OH_NN_FAILED;
857     }
858     return OH_NN_SUCCESS;
859 }