• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022-2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "neural_network_runtime_inner.h"
17 #include "neural_network_runtime/neural_network_runtime.h"
18 
19 #include "compilation.h"
20 #include "executor.h"
21 #include "inner_model.h"
22 #include "log.h"
23 #include "quant_param.h"
24 #include "validation.h"
25 #include "syspara/parameter.h"
26 #include "securec.h"
27 
28 #include <cstring>
29 #include <fstream>
30 #include <filesystem>
31 #include <sys/stat.h>
32 #include <unistd.h>
33 
34 using namespace OHOS::NeuralNetworkRuntime;
35 
36 #define NNRT_API __attribute__((visibility("default")))
37 
38 const std::string EXTENSION_KEY_QUANT_BUFFER = "QuantBuffer";
39 const std::string EXTENSION_KEY_MODEL_NAME = "ModelName";
40 const std::string EXTENSION_KEY_IS_PROFILING = "isProfiling";
41 const std::string EXTENSION_KEY_OP_LAYOUT = "opLayout";
42 const std::string EXTENSION_KEY_INPUT_DIMS = "InputDims";
43 const std::string EXTENSION_KEY_DYNAMIC_DIMS = "DynamicDims";
44 const std::string EXTENSION_KEY_FM_SHARED = "NPU_FM_SHARED";
45 const std::string EXTENSION_KEY_IS_EXCEED_RAMLIMIT = "isExceedRamLimit";
46 
47 const std::string NULL_HARDWARE_NAME = "default";
48 const std::string HARDWARE_NAME = "const.ai.nnrt_deivce";
49 const std::string HARDWARE_VERSION = "v5_0";
50 constexpr size_t HARDWARE_NAME_MAX_LENGTH = 128;
51 constexpr size_t FILE_NUMBER_MAX = 100; // 限制cache文件数量最大为100
52 
OH_NNQuantParam_Create()53 NNRT_API NN_QuantParam *OH_NNQuantParam_Create()
54 {
55     auto* quantParamImpl = new (std::nothrow) QuantParams();
56     if (quantParamImpl == nullptr) {
57         LOGE("OH_NNQuantParam_Create failed, please check whether it has enough memory.");
58         return nullptr;
59     }
60 
61     return (NN_QuantParam*)(quantParamImpl);
62 }
63 
OH_NNQuantParam_SetScales(NN_QuantParam * quantParams,const double * scales,size_t quantNum)64 NNRT_API OH_NN_ReturnCode OH_NNQuantParam_SetScales(NN_QuantParam* quantParams, const double* scales, size_t quantNum)
65 {
66     if (quantParams == nullptr) {
67         LOGE("OH_NNQuantParam_SetScales failed, passed nullptr to quantParams.");
68         return OH_NN_INVALID_PARAMETER;
69     }
70 
71     if (scales == nullptr) {
72         LOGE("OH_NNQuantParam_SetScales failed, passed nullptr to scales.");
73         return OH_NN_INVALID_PARAMETER;
74     }
75 
76     if (quantNum == 0) {
77         LOGE("OH_NNQuantParam_SetScales failed, passed 0 to quantNum.");
78         return OH_NN_INVALID_PARAMETER;
79     }
80 
81     auto* quantParamImpl = reinterpret_cast<QuantParams*>(quantParams);
82     std::vector<double> scaleVector(scales, scales + quantNum);
83     quantParamImpl->SetScales(scaleVector);
84 
85     return OH_NN_SUCCESS;
86 }
87 
OH_NNQuantParam_SetZeroPoints(NN_QuantParam * quantParams,const int32_t * zeroPoints,size_t quantNum)88 NNRT_API OH_NN_ReturnCode OH_NNQuantParam_SetZeroPoints(NN_QuantParam* quantParams,
89                                                         const int32_t* zeroPoints,
90                                                         size_t quantNum)
91 {
92     if (quantParams == nullptr) {
93         LOGE("OH_NNQuantParam_SetZeroPoints failed, passed nullptr to quantParams.");
94         return OH_NN_INVALID_PARAMETER;
95     }
96 
97     if (zeroPoints == nullptr) {
98         LOGE("OH_NNQuantParam_SetZeroPoints failed, passed nullptr to zeroPoints.");
99         return OH_NN_INVALID_PARAMETER;
100     }
101 
102     if (quantNum == 0) {
103         LOGE("OH_NNQuantParam_SetZeroPoints failed, passed 0 to quantNum.");
104         return OH_NN_INVALID_PARAMETER;
105     }
106 
107     auto* quantParamImpl = reinterpret_cast<QuantParams*>(quantParams);
108     std::vector<int32_t> zeroPointVector(zeroPoints, zeroPoints + quantNum);
109     quantParamImpl->SetZeroPoints(zeroPointVector);
110 
111     return OH_NN_SUCCESS;
112 }
113 
OH_NNQuantParam_SetNumBits(NN_QuantParam * quantParams,const uint32_t * numBits,size_t quantNum)114 OH_NN_ReturnCode OH_NNQuantParam_SetNumBits(NN_QuantParam* quantParams, const uint32_t* numBits, size_t quantNum)
115 {
116     if (quantParams == nullptr) {
117         LOGE("OH_NNQuantParam_SetNumBits failed, passed nullptr to quantParams.");
118         return OH_NN_INVALID_PARAMETER;
119     }
120 
121     if (numBits == nullptr) {
122         LOGE("OH_NNQuantParam_SetNumBits failed, passed nullptr to numBits.");
123         return OH_NN_INVALID_PARAMETER;
124     }
125 
126     if (quantNum == 0) {
127         LOGE("OH_NNQuantParam_SetNumBits failed, passed 0 to quantNum.");
128         return OH_NN_INVALID_PARAMETER;
129     }
130 
131     auto* quantParamImpl = reinterpret_cast<QuantParams*>(quantParams);
132     std::vector<uint32_t> numBitVector(numBits, numBits + quantNum);
133     quantParamImpl->SetNumBits(numBitVector);
134 
135     return OH_NN_SUCCESS;
136 }
137 
OH_NNQuantParam_Destroy(NN_QuantParam ** quantParams)138 OH_NN_ReturnCode OH_NNQuantParam_Destroy(NN_QuantParam** quantParams)
139 {
140     if (quantParams == nullptr) {
141         LOGE("OH_NNQuantParam_Destroy failed, passed nullptr to quantParams.");
142         return OH_NN_INVALID_PARAMETER;
143     }
144 
145     if (*quantParams == nullptr) {
146         LOGW("OH_NNQuantParam_Destroy failed, passed nullptr to *quantParams.");
147         return OH_NN_INVALID_PARAMETER;
148     }
149 
150     auto* quantParamImpl = reinterpret_cast<QuantParams*>(*quantParams);
151     delete quantParamImpl;
152     *quantParams = nullptr;
153 
154     return OH_NN_SUCCESS;
155 }
156 
OH_NNModel_AddTensorToModel(OH_NNModel * model,const NN_TensorDesc * tensorDesc)157 OH_NN_ReturnCode OH_NNModel_AddTensorToModel(OH_NNModel* model, const NN_TensorDesc* tensorDesc)
158 {
159     if (model == nullptr) {
160         LOGE("OH_NNModel_AddTensorToModel failed, passed nullptr to model.");
161         return OH_NN_INVALID_PARAMETER;
162     }
163 
164     if (tensorDesc == nullptr) {
165         LOGE("OH_NNModel_AddTensorToModel failed, passed nullptr to tensorDesc.");
166         return OH_NN_INVALID_PARAMETER;
167     }
168 
169     auto* innerModel = reinterpret_cast<OHOS::NeuralNetworkRuntime::InnerModel*>(model);
170     OH_NN_ReturnCode returnCode = innerModel->AddTensorDesc(tensorDesc);
171     if (returnCode != OH_NN_SUCCESS) {
172         LOGE("OH_NNModel_AddTensorToModel failed, error happened when adding tensor to model.");
173     }
174 
175     return returnCode;
176 }
177 
OH_NNModel_SetTensorQuantParams(OH_NNModel * model,uint32_t index,NN_QuantParam * quantParam)178 OH_NN_ReturnCode OH_NNModel_SetTensorQuantParams(OH_NNModel* model, uint32_t index, NN_QuantParam* quantParam)
179 {
180     if (model == nullptr) {
181         LOGE("OH_NNModel_SetTensorQuantParams failed, passed nullptr to model.");
182         return OH_NN_INVALID_PARAMETER;
183     }
184 
185     if (quantParam == nullptr) {
186         LOGE("OH_NNModel_SetTensorQuantParams failed, passed nullptr to quantParam.");
187         return OH_NN_INVALID_PARAMETER;
188     }
189 
190     auto* innerModel = reinterpret_cast<OHOS::NeuralNetworkRuntime::InnerModel*>(model);
191     OH_NN_ReturnCode returnCode = innerModel->SetTensorQuantParam((uint32_t)(index), quantParam);
192     if (returnCode != OH_NN_SUCCESS) {
193         LOGE("OH_NNModel_SetTensorQuantParams failed, error happened when setting tensor quantParam.");
194     }
195 
196     return returnCode;
197 }
198 
OH_NNModel_SetTensorType(OH_NNModel * model,uint32_t index,OH_NN_TensorType tensorType)199 OH_NN_ReturnCode OH_NNModel_SetTensorType(OH_NNModel* model, uint32_t index, OH_NN_TensorType tensorType)
200 {
201     if (model == nullptr) {
202         LOGE("OH_NNModel_SetTensorType failed, passed nullptr to model.");
203         return OH_NN_INVALID_PARAMETER;
204     }
205 
206     if (!Validation::ValidateTensorType(tensorType)) {
207         LOGE("OH_NNModel_SetTensorType failed, invalid tensor type.");
208         return OH_NN_INVALID_PARAMETER;
209     }
210 
211     auto* innerModel = reinterpret_cast<OHOS::NeuralNetworkRuntime::InnerModel*>(model);
212     OH_NN_ReturnCode returnCode = innerModel->SetTensorType((uint32_t)(index), tensorType);
213     if (returnCode != OH_NN_SUCCESS) {
214         LOGE("OH_NNModel_SetTensorType failed, error happened when setting tensor type.");
215     }
216 
217     return returnCode;
218 }
219 
OH_NNModel_Construct(void)220 NNRT_API OH_NNModel *OH_NNModel_Construct(void)
221 {
222     InnerModel *innerModel = new(std::nothrow) InnerModel();
223     if (innerModel == nullptr) {
224         LOGE("OH_NNModel_Construct failed, please check whether it has enough memory.");
225         return nullptr;
226     }
227 
228     OH_NNModel *nnModel = reinterpret_cast<OH_NNModel*>(innerModel);
229     return nnModel;
230 }
231 
OH_NNModel_AddOperation(OH_NNModel * model,OH_NN_OperationType op,const OH_NN_UInt32Array * paramIndices,const OH_NN_UInt32Array * inputIndices,const OH_NN_UInt32Array * outputIndices)232 NNRT_API OH_NN_ReturnCode OH_NNModel_AddOperation(OH_NNModel *model,
233                                                   OH_NN_OperationType op,
234                                                   const OH_NN_UInt32Array *paramIndices,
235                                                   const OH_NN_UInt32Array *inputIndices,
236                                                   const OH_NN_UInt32Array *outputIndices)
237 {
238     if (model == nullptr) {
239         LOGE("OH_NNModel_AddOperation failed, passed nullptr to model.");
240         return OH_NN_INVALID_PARAMETER;
241     }
242 
243     if (paramIndices == nullptr) {
244         LOGE("OH_NNModel_AddOperation failed, passed nullptr to paramIndices.");
245         return OH_NN_INVALID_PARAMETER;
246     }
247 
248     if (inputIndices == nullptr) {
249         LOGE("OH_NNModel_AddOperation failed, passed nullptr to inputIndices.");
250         return OH_NN_INVALID_PARAMETER;
251     }
252 
253     if (outputIndices == nullptr) {
254         LOGE("OH_NNModel_AddOperation failed, passed nullptr to outputIndices.");
255         return OH_NN_INVALID_PARAMETER;
256     }
257 
258     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
259     return innerModel->AddOperation(op, *paramIndices, *inputIndices, *outputIndices);
260 }
261 
OH_NNModel_SetTensorData(OH_NNModel * model,uint32_t index,const void * dataBuffer,size_t length)262 NNRT_API OH_NN_ReturnCode OH_NNModel_SetTensorData(OH_NNModel *model,
263                                                    uint32_t index,
264                                                    const void *dataBuffer,
265                                                    size_t length)
266 {
267     if (model == nullptr) {
268         LOGE("OH_NNModel_SetTensorData failed, passed nullptr to model.");
269         return OH_NN_INVALID_PARAMETER;
270     }
271 
272     if (dataBuffer == nullptr) {
273         LOGE("OH_NNModel_SetTensorData failed, passed nullptr to dataBuffer, which has no effect.");
274         return OH_NN_INVALID_PARAMETER;
275     }
276 
277     if (length == 0) {
278         LOGE("OH_NNModel_SetTensorData failed, passed dataBuffer with length 0, which has no effect.");
279         return OH_NN_INVALID_PARAMETER;
280     }
281 
282     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
283     return innerModel->SetTensorValue(index, dataBuffer, length);
284 }
285 
OH_NNModel_SpecifyInputsAndOutputs(OH_NNModel * model,const OH_NN_UInt32Array * inputIndices,const OH_NN_UInt32Array * outputIndices)286 NNRT_API OH_NN_ReturnCode OH_NNModel_SpecifyInputsAndOutputs(OH_NNModel *model,
287                                                              const OH_NN_UInt32Array *inputIndices,
288                                                              const OH_NN_UInt32Array *outputIndices)
289 {
290     if (model == nullptr) {
291         LOGE("OH_NNModel_SpecifyInputsAndOutputs failed, passed nullptr to model.");
292         return OH_NN_INVALID_PARAMETER;
293     }
294 
295     if (inputIndices == nullptr) {
296         LOGE("OH_NNModel_SpecifyInputsAndOutputs failed, passed nullptr to inputIndices.");
297         return OH_NN_INVALID_PARAMETER;
298     }
299 
300     if (outputIndices == nullptr) {
301         LOGE("OH_NNModel_SpecifyInputsAndOutputs failed, passed nullptr to outputIndices.");
302         return OH_NN_INVALID_PARAMETER;
303     }
304 
305     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
306     return innerModel->SpecifyInputsAndOutputs(*inputIndices, *outputIndices);
307 }
308 
OH_NNModel_Finish(OH_NNModel * model)309 NNRT_API OH_NN_ReturnCode OH_NNModel_Finish(OH_NNModel *model)
310 {
311     if (model == nullptr) {
312         LOGE("OH_NNModel_Finish failed, passed nullptr to model.");
313         return OH_NN_INVALID_PARAMETER;
314     }
315 
316     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
317     return innerModel->Build();
318 }
319 
ParseInputDimsFromExtensions(char * data,size_t dataSize,const mindspore::lite::LiteGraph * liteGraph,ExtensionConfig & extensionConfig,size_t & dynamicCount)320 OH_NN_ReturnCode ParseInputDimsFromExtensions(char* data, size_t dataSize, const mindspore::lite::LiteGraph* liteGraph,
321     ExtensionConfig& extensionConfig, size_t& dynamicCount)
322 {
323     extensionConfig.inputDims.clear();
324     int32_t* dimsValue = reinterpret_cast<int32_t*>(data);
325     size_t allDimsSize = dataSize / sizeof(int32_t);
326 
327     size_t inputCount = liteGraph->input_indices_.size(); // LiteGraph输入个数
328     size_t allTensorSize = liteGraph->all_tensors_.size(); // LiteGraph所有tensor个数
329     std::vector<int32_t> inputDim;
330     size_t dataIndex = 0;
331     for (size_t i = 0; i < inputCount; ++i) {
332         inputDim.clear();
333         if (liteGraph->input_indices_[i] >= allTensorSize) {
334             LOGE("ParseInputDimsFromExtensions failed, indice of input %u is out of range.",
335                 liteGraph->input_indices_[i]);
336             extensionConfig.inputDims.clear();
337             return OH_NN_INVALID_PARAMETER;
338         }
339         //获取当前输入的维度
340         mindspore::lite::TensorPtr tensor = liteGraph->all_tensors_[liteGraph->input_indices_[i]];
341         auto tensorDims = mindspore::lite::MindIR_Tensor_GetDims(tensor);
342         size_t inputDimSize = tensorDims.size();
343         if (allDimsSize < inputDimSize) {
344             LOGE("ParseInputDimsFromExtensions failed, dataSize is invalid.");
345             extensionConfig.inputDims.clear();
346             return OH_NN_INVALID_PARAMETER;
347         }
348         // 读取extensor中当前输入的dim值
349         for (size_t j = 0; j < inputDimSize; ++j) {
350             inputDim.emplace_back(dimsValue[dataIndex]);
351             if (dimsValue[dataIndex] == -1) {
352                 ++dynamicCount;
353             }
354             ++dataIndex;
355         }
356         extensionConfig.inputDims.emplace_back(inputDim);
357         allDimsSize -= inputDimSize;
358     }
359     // allDimsSize应和模型一致,遍历完后,allDimsSize等于0
360     if (allDimsSize != 0) {
361         LOGE("ParseInputDimsFromExtensions failed, allDimsSize is not equal to liteGraph.");
362         extensionConfig.inputDims.clear();
363         return OH_NN_INVALID_PARAMETER;
364     }
365     return OH_NN_SUCCESS;
366 }
367 
ParseDynamicDimsFromExtensions(const std::unordered_map<std::string,std::vector<std::pair<char *,size_t>>> & extensionMap,const mindspore::lite::LiteGraph * liteGraph,ExtensionConfig & extensionConfig)368 OH_NN_ReturnCode ParseDynamicDimsFromExtensions(
369     const std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>>& extensionMap,
370     const mindspore::lite::LiteGraph* liteGraph, ExtensionConfig& extensionConfig)
371 {
372     const std::vector<std::pair<char*, size_t>>& inputDims = extensionMap.at(EXTENSION_KEY_INPUT_DIMS);
373     if (inputDims.empty()) {
374         LOGE("ParseDynamicDimsFromExtensions failed, input dims is empty.");
375         return OH_NN_INVALID_PARAMETER;
376     }
377     auto dynamicDims = extensionMap.at(EXTENSION_KEY_DYNAMIC_DIMS);
378     if (dynamicDims.empty()) {
379         LOGE("ParseDynamicDimsFromExtensions failed, dynamic dims is empty.");
380         return OH_NN_INVALID_PARAMETER;
381     }
382     if (inputDims[0].first == nullptr || inputDims[0].second == 0 ||
383         dynamicDims[0].first == nullptr || dynamicDims[0].second == 0) {
384         LOGE("ParseDynamicDimsFromExtensions failed, data or dataSize is invalid.");
385         return OH_NN_INVALID_PARAMETER;
386     }
387 
388     size_t dynamicCount = 0;
389     auto returnCode = ParseInputDimsFromExtensions(
390         inputDims[0].first, inputDims[0].second, liteGraph, extensionConfig, dynamicCount);
391     if (returnCode != OH_NN_SUCCESS) {
392         LOGE("ParseDynamicDimsFromExtensions failed, failed to get input dims from extensions.");
393         return returnCode;
394     }
395     if (dynamicCount == 0) {
396         LOGE("ParseDynamicDimsFromExtensions failed, dynamic count is 0.");
397         extensionConfig.inputDims.clear();
398         return OH_NN_INVALID_PARAMETER;
399     }
400 
401     extensionConfig.dynamicDims.clear();
402     int32_t* dynamicDimsValue = reinterpret_cast<int32_t*>(dynamicDims[0].first);
403     size_t dynamicDimsSize = dynamicDims[0].second / sizeof(int32_t);
404     if ((dynamicDimsSize % dynamicCount) != 0) {
405         LOGE("ParseDynamicDimsFromExtensions failed, dynamic dataSize is invalid.");
406         extensionConfig.inputDims.clear();
407         return OH_NN_INVALID_PARAMETER;
408     }
409     size_t dynamicSize = dynamicDimsSize / dynamicCount;
410     std::vector<int32_t> dynamicDim;
411     size_t dataIndex = 0;
412     for (size_t i = 0; i < dynamicSize; ++i) {
413         dynamicDim.clear();
414         for (size_t j = 0; j < dynamicCount; ++j) {
415             dynamicDim.emplace_back(dynamicDimsValue[dataIndex]);
416             ++dataIndex;
417         }
418         extensionConfig.dynamicDims.emplace_back(dynamicDim);
419     }
420 
421     return OH_NN_SUCCESS;
422 }
423 
CheckExtensionConfigs(const std::unordered_map<std::string,std::vector<std::pair<char *,size_t>>> & extensionMap,ExtensionConfig & extensionConfig)424 OH_NN_ReturnCode CheckExtensionConfigs(
425     const std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>>& extensionMap,
426     ExtensionConfig& extensionConfig)
427 {
428     if (extensionMap.find(EXTENSION_KEY_QUANT_BUFFER) != extensionMap.end()) {
429         const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_QUANT_BUFFER);
430         if (value.empty()) {
431             LOGE("ParseExtensionConfigs failed, get empty quant buffer value.");
432             return OH_NN_INVALID_PARAMETER;
433         }
434         extensionConfig.quantBuffer.data = value[0].first;
435         extensionConfig.quantBuffer.length = value[0].second;
436     }
437     if (extensionMap.find(EXTENSION_KEY_MODEL_NAME) != extensionMap.end()) {
438         const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_MODEL_NAME);
439         if (value.empty()) {
440             LOGE("ParseExtensionConfigs failed, get empty model name value.");
441             return OH_NN_INVALID_PARAMETER;
442         }
443         extensionConfig.modelName.assign(value[0].first, value[0].first + value[0].second);
444     }
445     if (extensionMap.find(EXTENSION_KEY_IS_PROFILING) != extensionMap.end()) {
446         const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_IS_PROFILING);
447         if (value.empty()) {
448             LOGE("ParseExtensionConfigs failed, get empty isProfiling value.");
449             return OH_NN_INVALID_PARAMETER;
450         }
451         extensionConfig.isProfiling.assign(value[0].first, value[0].first + value[0].second);
452     }
453     if (extensionMap.find(EXTENSION_KEY_OP_LAYOUT) != extensionMap.end()) {
454         const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_OP_LAYOUT);
455         if (value.empty()) {
456             LOGE("ParseExtensionConfigs failed, get empty op layout value.");
457             return OH_NN_INVALID_PARAMETER;
458         }
459         std::string ops;
460         for (auto singleValue : value) {
461             ops.assign(singleValue.first, singleValue.first + singleValue.second);
462             extensionConfig.opLayout.insert({ops, "hiai::ExecuteDevice::CPU"});
463             LOGI("ParseExtensionConfigs opLayout:%{public}s.", ops.c_str());
464         }
465     }
466     return OH_NN_SUCCESS;
467 }
468 
ParseExtensionConfigs(const std::unordered_map<std::string,std::vector<std::pair<char *,size_t>>> & extensionMap,const mindspore::lite::LiteGraph * pLiteGraph,ExtensionConfig & extensionConfig)469 OH_NN_ReturnCode ParseExtensionConfigs(
470     const std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>>& extensionMap,
471     const mindspore::lite::LiteGraph* pLiteGraph, ExtensionConfig& extensionConfig)
472 {
473     extensionConfig.tuningStrategy = TuningStrategy::ON_DEVICE_PREPROCESS_TUNING;
474     OH_NN_ReturnCode ret = CheckExtensionConfigs(extensionMap, extensionConfig);
475     if (ret != OH_NN_SUCCESS) {
476         LOGE("CheckExtensionConfigs failed.");
477         return ret;
478     }
479     if (extensionMap.find(EXTENSION_KEY_INPUT_DIMS) != extensionMap.end() &&
480         extensionMap.find(EXTENSION_KEY_DYNAMIC_DIMS) != extensionMap.end()) {
481         auto returnCode = ParseDynamicDimsFromExtensions(extensionMap, pLiteGraph, extensionConfig);
482         if (returnCode != OH_NN_SUCCESS) {
483             LOGE("ParseExtensionConfigs failed, parse dynamic dims from extensions failed.");
484             return returnCode;
485         }
486         extensionConfig.tuningStrategy = TuningStrategy::OFF; // 分档shape不支持fftl
487     }
488     if (extensionMap.find(EXTENSION_KEY_FM_SHARED) != extensionMap.end()) {
489         extensionConfig.isNpuFmShared = true;
490         LOGI("NNRT enable fm shared success.");
491     }
492     return OH_NN_SUCCESS;
493 }
494 
OH_NNModel_BuildFromLiteGraph(OH_NNModel * model,const void * liteGraph,const OH_NN_Extension * extensions,size_t extensionSize)495 NNRT_API OH_NN_ReturnCode OH_NNModel_BuildFromLiteGraph(OH_NNModel *model, const void *liteGraph,
496     const OH_NN_Extension *extensions, size_t extensionSize)
497 {
498     if (model == nullptr) {
499         LOGE("OH_NNModel_BuildFromLiteGraph failed, passed nullptr to model.");
500         return OH_NN_INVALID_PARAMETER;
501     }
502 
503     if (liteGraph == nullptr) {
504         LOGE("OH_NNModel_BuildFromLiteGraph failed, passed nullptr to liteGraph.");
505         return OH_NN_INVALID_PARAMETER;
506     }
507 
508     auto *pLiteGraph = reinterpret_cast<const mindspore::lite::LiteGraph*>(liteGraph);
509     ExtensionConfig extensionConfig;
510     std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>> extensionMap;
511     for (size_t i = 0; i < extensionSize; ++i) {
512         std::string name = extensions[i].name;
513         if (extensionMap.find(name) == extensionMap.end()) {
514             extensionMap.insert({name, {{extensions[i].value, extensions[i].valueSize}}});
515         } else {
516             extensionMap[name].push_back({extensions[i].value, extensions[i].valueSize});
517         }
518     }
519     auto returnCode = ParseExtensionConfigs(extensionMap, pLiteGraph, extensionConfig);
520     if (returnCode != OH_NN_SUCCESS) {
521         LOGE("OH_NNModel_BuildFromLiteGraph failed, parse extension configs failed.");
522         return returnCode;
523     }
524 
525     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
526 
527     // Once the innerModel built from the liteGraph successfully, the innerModel
528     // owns the liteGraph, in which case, the invoker should not delete
529     // the liteGraph actively. Otherwise, the invoker still has the ownership.
530     return innerModel->BuildFromLiteGraph(pLiteGraph, extensionConfig);
531 }
532 
533 namespace {
CheckCacheFile(const std::string & cacheInfoPath,int64_t & fileNumber,int64_t & cacheVersion)534 OH_NN_ReturnCode CheckCacheFile(const std::string& cacheInfoPath, int64_t& fileNumber, int64_t& cacheVersion)
535 {
536     // read number of cache models
537     char path[PATH_MAX];
538     if (realpath(cacheInfoPath.c_str(), path) == nullptr) {
539         LOGE("OH_NNModel_HasCache get real path of cache info failed.");
540         return OH_NN_INVALID_PARAMETER;
541     }
542 
543     if (access(path, F_OK) != 0) {
544         LOGE("OH_NNModel_HasCache access cache info file failed.");
545         return OH_NN_INVALID_PARAMETER;
546     }
547 
548     std::ifstream ifs(path, std::ios::in | std::ios::binary);
549     if (!ifs) {
550         LOGE("OH_NNModel_HasCache open cache info file failed.");
551         return OH_NN_INVALID_PARAMETER;
552     }
553 
554     if (!ifs.read(reinterpret_cast<char*>(&(fileNumber)), sizeof(fileNumber))) {
555         LOGI("OH_NNModel_HasCache read cache info file failed.");
556         ifs.close();
557         return OH_NN_INVALID_PARAMETER;
558     }
559 
560     if (!ifs.read(reinterpret_cast<char*>(&(cacheVersion)), sizeof(cacheVersion))) {
561         LOGI("OH_NNModel_HasCache read cache info file failed.");
562         ifs.close();
563         return OH_NN_INVALID_PARAMETER;
564     }
565 
566     ifs.close();
567     return OH_NN_SUCCESS;
568 }
569 }
570 
OH_NNModel_HasCache(const char * cacheDir,const char * modelName,uint32_t version)571 NNRT_API bool OH_NNModel_HasCache(const char *cacheDir, const char *modelName, uint32_t version)
572 {
573     if (cacheDir == nullptr) {
574         LOGI("OH_NNModel_HasCache get empty cache directory.");
575         return false;
576     }
577 
578     if (modelName == nullptr) {
579         LOGI("OH_NNModel_HasCache get empty model name.");
580         return false;
581     }
582 
583     std::string cacheInfoPath = std::string(cacheDir) + "/" + std::string(modelName) + "cache_info.nncache";
584 
585     // determine whether cache info file exists
586     struct stat buffer;
587     bool exist = (stat(cacheInfoPath.c_str(), &buffer) == 0);
588     if (!exist) {
589         return false;
590     }
591 
592     int64_t fileNumber{0};
593     int64_t cacheVersion{0};
594     OH_NN_ReturnCode returnCode = CheckCacheFile(cacheInfoPath, fileNumber, cacheVersion);
595     if (returnCode != OH_NN_SUCCESS) {
596         LOGE("OH_NNModel_HasCache get fileNumber or cacheVersion fail.");
597         return false;
598     }
599 
600     if (fileNumber <= 0 || fileNumber > FILE_NUMBER_MAX) {
601         LOGE("OH_NNModel_HasCache fileNumber is invalid or more than 100");
602         std::filesystem::remove_all(cacheInfoPath);
603         return false;
604     }
605 
606     // determine whether cache model files exist
607     for (int64_t i = 0; i < fileNumber; ++i) {
608         std::string cacheModelPath =
609             std::string(cacheDir) + "/" + std::string(modelName) + std::to_string(i) + ".nncache";
610         exist = (exist && (stat(cacheModelPath.c_str(), &buffer) == 0));
611         if (!exist) {
612             LOGE("OH_NNModel_HasCache cacheModelPath is not existed.");
613             std::filesystem::remove_all(cacheInfoPath);
614             return false;
615         }
616     }
617 
618     if (cacheVersion != version) {
619         LOGE("OH_NNModel_HasCache version is not match.");
620         exist = false;
621     }
622 
623     return exist;
624 }
625 
OH_NNModel_BuildFromMetaGraph(OH_NNModel * model,const void * metaGraph,const OH_NN_Extension * extensions,size_t extensionSize)626 NNRT_API OH_NN_ReturnCode OH_NNModel_BuildFromMetaGraph(OH_NNModel *model, const void *metaGraph,
627     const OH_NN_Extension *extensions, size_t extensionSize)
628 {
629     if (model == nullptr) {
630         LOGE("OH_NNModel_BuildFromMetaGraph failed, passed nullptr to model.");
631         return OH_NN_INVALID_PARAMETER;
632     }
633 
634     if (metaGraph == nullptr) {
635         LOGE("OH_NNModel_BuildFromMetaGraph failed, passed nullptr to metaGraph.");
636         return OH_NN_INVALID_PARAMETER;
637     }
638 
639     ExtensionConfig extensionConfig;
640     std::string ops;
641     for (size_t i = 0; i < extensionSize; ++i) {
642         std::string name = extensions[i].name;
643         if (name == "QuantBuffer") {
644             extensionConfig.quantBuffer.data = extensions[i].value;
645             extensionConfig.quantBuffer.length = extensions[i].valueSize;
646         } else if (name == "ModelName") {
647             extensionConfig.modelName.assign(extensions[i].value, extensions[i].value + extensions[i].valueSize);
648         } else if (name == "Profiling") {
649             extensionConfig.isProfiling.assign(extensions[i].value, extensions[i].value + extensions[i].valueSize);
650             LOGI("OH_NNModel_BuildFromMetaGraph isProfiling enable.");
651         } else if (name == "opLayout") {
652             ops.assign(extensions[i].value, extensions[i].value + extensions[i].valueSize);
653             extensionConfig.opLayout.insert({ops, "hiai::ExecuteDevice::CPU"});
654             LOGI("OH_NNModel_BuildFromMetaGraph opLayout:%{public}s.", ops.c_str());
655         }
656     }
657 
658     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
659     return innerModel->BuildFromMetaGraph(metaGraph, extensionConfig);
660 }
661 
OH_NNModel_SetInputsAndOutputsInfo(OH_NNModel * model,const OH_NN_TensorInfo * inputsInfo,size_t inputSize,const OH_NN_TensorInfo * outputsInfo,size_t outputSize)662 NNRT_API OH_NN_ReturnCode OH_NNModel_SetInputsAndOutputsInfo(OH_NNModel *model, const OH_NN_TensorInfo *inputsInfo,
663     size_t inputSize, const OH_NN_TensorInfo *outputsInfo, size_t outputSize)
664 {
665     if (model == nullptr) {
666         LOGE("OH_NNModel_SetInputsAndOutputsInfo failed, passed nullptr to model.");
667         return OH_NN_INVALID_PARAMETER;
668     }
669 
670     if ((inputsInfo == nullptr) || (inputSize == 0)) {
671         LOGE("OH_NNModel_SetInputsAndOutputsInfo failed, inputsInfo is empty.");
672         return OH_NN_INVALID_PARAMETER;
673     }
674 
675     if ((outputsInfo == nullptr) || (outputSize == 0)) {
676         LOGE("OH_NNModel_SetInputsAndOutputsInfo failed, outputsInfo is empty.");
677         return OH_NN_INVALID_PARAMETER;
678     }
679 
680     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
681     return innerModel->SetInputsAndOutputsInfo(inputsInfo, inputSize, outputsInfo, outputSize);
682 }
683 
OH_NNModel_Destroy(OH_NNModel ** model)684 NNRT_API void OH_NNModel_Destroy(OH_NNModel **model)
685 {
686     if (model == nullptr) {
687         LOGW("OH_NNModel_Destroy has no effect, passed nullptr to model.");
688         return;
689     }
690 
691     if (*model == nullptr) {
692         LOGW("OH_NNModel_Destroy has no effect, passed nullptr to *model.");
693         return;
694     }
695 
696     InnerModel *innerModel = reinterpret_cast<InnerModel*>(*model);
697     delete innerModel;
698     *model = nullptr;
699 }
700 
OH_NNModel_GetAvailableOperations(OH_NNModel * model,size_t deviceID,const bool ** isAvailable,uint32_t * opCount)701 NNRT_API OH_NN_ReturnCode OH_NNModel_GetAvailableOperations(OH_NNModel *model,
702                                                             size_t deviceID,
703                                                             const bool **isAvailable,
704                                                             uint32_t *opCount)
705 {
706     if (model == nullptr) {
707         LOGE("OH_NNModel_GetAvailableOperations failed, passed nullptr to model.");
708         return OH_NN_INVALID_PARAMETER;
709     }
710 
711     if (isAvailable == nullptr) {
712         LOGE("OH_NNModel_GetAvailableOperations failed, passed nullptr to isAvailable.");
713         return OH_NN_INVALID_PARAMETER;
714     }
715 
716     if (*isAvailable != nullptr) {
717         LOGE("OH_NNModel_GetAvailableOperations failed, *isAvailable is not nullptr.");
718         return OH_NN_INVALID_PARAMETER;
719     }
720 
721     if (opCount == nullptr) {
722         LOGE("OH_NNModel_GetAvailableOperations failed, passed nullptr to opCount.");
723         return OH_NN_INVALID_PARAMETER;
724     }
725 
726     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
727     return innerModel->GetSupportedOperations(deviceID, isAvailable, *opCount);
728 }
729 
OH_NN_GetDeviceID(char * nnrtDevice,size_t len)730 NNRT_API OH_NN_ReturnCode OH_NN_GetDeviceID(char *nnrtDevice, size_t len)
731 {
732     if (nnrtDevice == nullptr || len == 0) {
733         LOGE("nnrtDevice is nullptr or len is 0.");
734         return OH_NN_INVALID_PARAMETER;
735     }
736 
737     char cName[HARDWARE_NAME_MAX_LENGTH] = {0};
738     int ret = GetParameter(HARDWARE_NAME.c_str(), NULL_HARDWARE_NAME.c_str(), cName, HARDWARE_NAME_MAX_LENGTH);
739     // 如果成功获取返回值为硬件名称的字节数
740     if (ret <= 0) {
741         LOGE("GetNNRtDeviceName failed, failed to get parameter.");
742         return OH_NN_FAILED;
743     }
744 
745     std::string deviceName = (std::string)cName + "_" + HARDWARE_VERSION;
746     auto secureRet = strcpy_s(nnrtDevice, len, deviceName.c_str());
747     if (secureRet != EOK) {
748         LOGE("GetNNRtDeviceName failed, failed to get name.");
749         return OH_NN_FAILED;
750     }
751     return OH_NN_SUCCESS;
752 }