1 /*
2 * Copyright (c) 2022-2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "interfaces/innerkits/c/neural_network_runtime_inner.h"
17 #include "interfaces/kits/c/neural_network_runtime/neural_network_runtime.h"
18
19 #include "compilation.h"
20 #include "executor.h"
21 #include "inner_model.h"
22 #include "common/log.h"
23 #include "quant_param.h"
24 #include "validation.h"
25 #include "syspara/parameter.h"
26 #include "securec.h"
27
28 #include <cstring>
29 #include <fstream>
30 #include <sys/stat.h>
31 #include <unistd.h>
32
33 using namespace OHOS::NeuralNetworkRuntime;
34
35 #define NNRT_API __attribute__((visibility("default")))
36
37 const std::string EXTENSION_KEY_QUANT_BUFFER = "QuantBuffer";
38 const std::string EXTENSION_KEY_MODEL_NAME = "ModelName";
39 const std::string EXTENSION_KEY_IS_PROFILING = "isProfiling";
40 const std::string EXTENSION_KEY_OP_LAYOUT = "opLayout";
41 const std::string EXTENSION_KEY_INPUT_DIMS = "InputDims";
42 const std::string EXTENSION_KEY_DYNAMIC_DIMS = "DynamicDims";
43 const std::string EXTENSION_KEY_FM_SHARED = "NPU_FM_SHARED";
44
45 const std::string NULL_HARDWARE_NAME = "default";
46 const std::string HARDWARE_NAME = "const.ai.nnrt_deivce";
47 const std::string HARDWARE_VERSION = "v5_0";
48 constexpr size_t HARDWARE_NAME_MAX_LENGTH = 128;
49
OH_NNQuantParam_Create()50 NNRT_API NN_QuantParam *OH_NNQuantParam_Create()
51 {
52 auto* quantParamImpl = new (std::nothrow) QuantParams();
53 if (quantParamImpl == nullptr) {
54 LOGE("OH_NNQuantParam_Create failed, please check whether it has enough memory.");
55 return nullptr;
56 }
57
58 return (NN_QuantParam*)(quantParamImpl);
59 }
60
OH_NNQuantParam_SetScales(NN_QuantParam * quantParams,const double * scales,size_t quantNum)61 NNRT_API OH_NN_ReturnCode OH_NNQuantParam_SetScales(NN_QuantParam* quantParams, const double* scales, size_t quantNum)
62 {
63 if (quantParams == nullptr) {
64 LOGE("OH_NNQuantParam_SetScales failed, passed nullptr to quantParams.");
65 return OH_NN_INVALID_PARAMETER;
66 }
67
68 if (scales == nullptr) {
69 LOGE("OH_NNQuantParam_SetScales failed, passed nullptr to scales.");
70 return OH_NN_INVALID_PARAMETER;
71 }
72
73 if (quantNum == 0) {
74 LOGE("OH_NNQuantParam_SetScales failed, passed 0 to quantNum.");
75 return OH_NN_INVALID_PARAMETER;
76 }
77
78 auto* quantParamImpl = reinterpret_cast<QuantParams*>(quantParams);
79 std::vector<double> scaleVector(scales, scales + quantNum);
80 quantParamImpl->SetScales(scaleVector);
81
82 return OH_NN_SUCCESS;
83 }
84
OH_NNQuantParam_SetZeroPoints(NN_QuantParam * quantParams,const int32_t * zeroPoints,size_t quantNum)85 NNRT_API OH_NN_ReturnCode OH_NNQuantParam_SetZeroPoints(NN_QuantParam* quantParams,
86 const int32_t* zeroPoints,
87 size_t quantNum)
88 {
89 if (quantParams == nullptr) {
90 LOGE("OH_NNQuantParam_SetZeroPoints failed, passed nullptr to quantParams.");
91 return OH_NN_INVALID_PARAMETER;
92 }
93
94 if (zeroPoints == nullptr) {
95 LOGE("OH_NNQuantParam_SetZeroPoints failed, passed nullptr to zeroPoints.");
96 return OH_NN_INVALID_PARAMETER;
97 }
98
99 if (quantNum == 0) {
100 LOGE("OH_NNQuantParam_SetZeroPoints failed, passed 0 to quantNum.");
101 return OH_NN_INVALID_PARAMETER;
102 }
103
104 auto* quantParamImpl = reinterpret_cast<QuantParams*>(quantParams);
105 std::vector<int32_t> zeroPointVector(zeroPoints, zeroPoints + quantNum);
106 quantParamImpl->SetZeroPoints(zeroPointVector);
107
108 return OH_NN_SUCCESS;
109 }
110
OH_NNQuantParam_SetNumBits(NN_QuantParam * quantParams,const uint32_t * numBits,size_t quantNum)111 OH_NN_ReturnCode OH_NNQuantParam_SetNumBits(NN_QuantParam* quantParams, const uint32_t* numBits, size_t quantNum)
112 {
113 if (quantParams == nullptr) {
114 LOGE("OH_NNQuantParam_SetNumBits failed, passed nullptr to quantParams.");
115 return OH_NN_INVALID_PARAMETER;
116 }
117
118 if (numBits == nullptr) {
119 LOGE("OH_NNQuantParam_SetNumBits failed, passed nullptr to numBits.");
120 return OH_NN_INVALID_PARAMETER;
121 }
122
123 if (quantNum == 0) {
124 LOGE("OH_NNQuantParam_SetNumBits failed, passed 0 to quantNum.");
125 return OH_NN_INVALID_PARAMETER;
126 }
127
128 auto* quantParamImpl = reinterpret_cast<QuantParams*>(quantParams);
129 std::vector<uint32_t> numBitVector(numBits, numBits + quantNum);
130 quantParamImpl->SetNumBits(numBitVector);
131
132 return OH_NN_SUCCESS;
133 }
134
OH_NNQuantParam_Destroy(NN_QuantParam ** quantParams)135 OH_NN_ReturnCode OH_NNQuantParam_Destroy(NN_QuantParam** quantParams)
136 {
137 if (quantParams == nullptr) {
138 LOGE("OH_NNQuantParam_Destroy failed, passed nullptr to quantParams.");
139 return OH_NN_INVALID_PARAMETER;
140 }
141
142 if (*quantParams == nullptr) {
143 LOGW("OH_NNQuantParam_Destroy failed, passed nullptr to *quantParams.");
144 return OH_NN_INVALID_PARAMETER;
145 }
146
147 auto* quantParamImpl = reinterpret_cast<QuantParams*>(*quantParams);
148 delete quantParamImpl;
149 *quantParams = nullptr;
150
151 return OH_NN_SUCCESS;
152 }
153
OH_NNModel_AddTensorToModel(OH_NNModel * model,const NN_TensorDesc * tensorDesc)154 OH_NN_ReturnCode OH_NNModel_AddTensorToModel(OH_NNModel* model, const NN_TensorDesc* tensorDesc)
155 {
156 if (model == nullptr) {
157 LOGE("OH_NNModel_AddTensorToModel failed, passed nullptr to model.");
158 return OH_NN_INVALID_PARAMETER;
159 }
160
161 if (tensorDesc == nullptr) {
162 LOGE("OH_NNModel_AddTensorToModel failed, passed nullptr to tensorDesc.");
163 return OH_NN_INVALID_PARAMETER;
164 }
165
166 auto* innerModel = reinterpret_cast<OHOS::NeuralNetworkRuntime::InnerModel*>(model);
167 OH_NN_ReturnCode returnCode = innerModel->AddTensorDesc(tensorDesc);
168 if (returnCode != OH_NN_SUCCESS) {
169 LOGE("OH_NNModel_AddTensorToModel failed, error happened when adding tensor to model.");
170 }
171
172 return returnCode;
173 }
174
OH_NNModel_SetTensorQuantParams(OH_NNModel * model,uint32_t index,NN_QuantParam * quantParam)175 OH_NN_ReturnCode OH_NNModel_SetTensorQuantParams(OH_NNModel* model, uint32_t index, NN_QuantParam* quantParam)
176 {
177 if (model == nullptr) {
178 LOGE("OH_NNModel_SetTensorQuantParams failed, passed nullptr to model.");
179 return OH_NN_INVALID_PARAMETER;
180 }
181
182 if (quantParam == nullptr) {
183 LOGE("OH_NNModel_SetTensorQuantParams failed, passed nullptr to quantParam.");
184 return OH_NN_INVALID_PARAMETER;
185 }
186
187 auto* innerModel = reinterpret_cast<OHOS::NeuralNetworkRuntime::InnerModel*>(model);
188 OH_NN_ReturnCode returnCode = innerModel->SetTensorQuantParam((uint32_t)(index), quantParam);
189 if (returnCode != OH_NN_SUCCESS) {
190 LOGE("OH_NNModel_SetTensorQuantParams failed, error happened when setting tensor quantParam.");
191 }
192
193 return returnCode;
194 }
195
OH_NNModel_SetTensorType(OH_NNModel * model,uint32_t index,OH_NN_TensorType tensorType)196 OH_NN_ReturnCode OH_NNModel_SetTensorType(OH_NNModel* model, uint32_t index, OH_NN_TensorType tensorType)
197 {
198 if (model == nullptr) {
199 LOGE("OH_NNModel_SetTensorType failed, passed nullptr to model.");
200 return OH_NN_INVALID_PARAMETER;
201 }
202
203 if (!Validation::ValidateTensorType(tensorType)) {
204 LOGE("OH_NNModel_SetTensorType failed, invalid tensor type.");
205 return OH_NN_INVALID_PARAMETER;
206 }
207
208 auto* innerModel = reinterpret_cast<OHOS::NeuralNetworkRuntime::InnerModel*>(model);
209 OH_NN_ReturnCode returnCode = innerModel->SetTensorType((uint32_t)(index), tensorType);
210 if (returnCode != OH_NN_SUCCESS) {
211 LOGE("OH_NNModel_SetTensorType failed, error happened when setting tensor type.");
212 }
213
214 return returnCode;
215 }
216
OH_NNModel_Construct(void)217 NNRT_API OH_NNModel *OH_NNModel_Construct(void)
218 {
219 InnerModel *innerModel = new(std::nothrow) InnerModel();
220 if (innerModel == nullptr) {
221 LOGE("OH_NNModel_Construct failed, please check whether it has enough memory.");
222 return nullptr;
223 }
224
225 OH_NNModel *nnModel = reinterpret_cast<OH_NNModel*>(innerModel);
226 return nnModel;
227 }
228
OH_NNModel_AddOperation(OH_NNModel * model,OH_NN_OperationType op,const OH_NN_UInt32Array * paramIndices,const OH_NN_UInt32Array * inputIndices,const OH_NN_UInt32Array * outputIndices)229 NNRT_API OH_NN_ReturnCode OH_NNModel_AddOperation(OH_NNModel *model,
230 OH_NN_OperationType op,
231 const OH_NN_UInt32Array *paramIndices,
232 const OH_NN_UInt32Array *inputIndices,
233 const OH_NN_UInt32Array *outputIndices)
234 {
235 if (model == nullptr) {
236 LOGE("OH_NNModel_AddOperation failed, passed nullptr to model.");
237 return OH_NN_INVALID_PARAMETER;
238 }
239
240 if (paramIndices == nullptr) {
241 LOGE("OH_NNModel_AddOperation failed, passed nullptr to paramIndices.");
242 return OH_NN_INVALID_PARAMETER;
243 }
244
245 if (inputIndices == nullptr) {
246 LOGE("OH_NNModel_AddOperation failed, passed nullptr to inputIndices.");
247 return OH_NN_INVALID_PARAMETER;
248 }
249
250 if (outputIndices == nullptr) {
251 LOGE("OH_NNModel_AddOperation failed, passed nullptr to outputIndices.");
252 return OH_NN_INVALID_PARAMETER;
253 }
254
255 InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
256 return innerModel->AddOperation(op, *paramIndices, *inputIndices, *outputIndices);
257 }
258
OH_NNModel_SetTensorData(OH_NNModel * model,uint32_t index,const void * dataBuffer,size_t length)259 NNRT_API OH_NN_ReturnCode OH_NNModel_SetTensorData(OH_NNModel *model,
260 uint32_t index,
261 const void *dataBuffer,
262 size_t length)
263 {
264 if (model == nullptr) {
265 LOGE("OH_NNModel_SetTensorData failed, passed nullptr to model.");
266 return OH_NN_INVALID_PARAMETER;
267 }
268
269 if (dataBuffer == nullptr) {
270 LOGE("OH_NNModel_SetTensorData failed, passed nullptr to dataBuffer, which has no effect.");
271 return OH_NN_INVALID_PARAMETER;
272 }
273
274 if (length == 0) {
275 LOGE("OH_NNModel_SetTensorData failed, passed dataBuffer with length 0, which has no effect.");
276 return OH_NN_INVALID_PARAMETER;
277 }
278
279 InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
280 return innerModel->SetTensorValue(index, dataBuffer, length);
281 }
282
OH_NNModel_SpecifyInputsAndOutputs(OH_NNModel * model,const OH_NN_UInt32Array * inputIndices,const OH_NN_UInt32Array * outputIndices)283 NNRT_API OH_NN_ReturnCode OH_NNModel_SpecifyInputsAndOutputs(OH_NNModel *model,
284 const OH_NN_UInt32Array *inputIndices,
285 const OH_NN_UInt32Array *outputIndices)
286 {
287 if (model == nullptr) {
288 LOGE("OH_NNModel_SpecifyInputsAndOutputs failed, passed nullptr to model.");
289 return OH_NN_INVALID_PARAMETER;
290 }
291
292 if (inputIndices == nullptr) {
293 LOGE("OH_NNModel_SpecifyInputsAndOutputs failed, passed nullptr to inputIndices.");
294 return OH_NN_INVALID_PARAMETER;
295 }
296
297 if (outputIndices == nullptr) {
298 LOGE("OH_NNModel_SpecifyInputsAndOutputs failed, passed nullptr to outputIndices.");
299 return OH_NN_INVALID_PARAMETER;
300 }
301
302 InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
303 return innerModel->SpecifyInputsAndOutputs(*inputIndices, *outputIndices);
304 }
305
OH_NNModel_Finish(OH_NNModel * model)306 NNRT_API OH_NN_ReturnCode OH_NNModel_Finish(OH_NNModel *model)
307 {
308 if (model == nullptr) {
309 LOGE("OH_NNModel_Finish failed, passed nullptr to model.");
310 return OH_NN_INVALID_PARAMETER;
311 }
312
313 InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
314 return innerModel->Build();
315 }
316
ParseInputDimsFromExtensions(char * data,size_t dataSize,const mindspore::lite::LiteGraph * liteGraph,ExtensionConfig & extensionConfig,size_t & dynamicCount)317 OH_NN_ReturnCode ParseInputDimsFromExtensions(char* data, size_t dataSize, const mindspore::lite::LiteGraph* liteGraph,
318 ExtensionConfig& extensionConfig, size_t& dynamicCount)
319 {
320 extensionConfig.inputDims.clear();
321 int32_t* dimsValue = reinterpret_cast<int32_t*>(data);
322 size_t allDimsSize = dataSize / sizeof(int32_t);
323
324 size_t inputCount = liteGraph->input_indices_.size(); // LiteGraph输入个数
325 size_t allTensorSize = liteGraph->all_tensors_.size(); // LiteGraph所有tensor个数
326 std::vector<int32_t> inputDim;
327 size_t dataIndex = 0;
328 for (size_t i = 0; i < inputCount; ++i) {
329 inputDim.clear();
330 if (liteGraph->input_indices_[i] >= allTensorSize) {
331 LOGE("ParseInputDimsFromExtensions failed, indice of input %u is out of range.",
332 liteGraph->input_indices_[i]);
333 extensionConfig.inputDims.clear();
334 return OH_NN_INVALID_PARAMETER;
335 }
336 //获取当前输入的维度
337 mindspore::lite::TensorPtr tensor = liteGraph->all_tensors_[liteGraph->input_indices_[i]];
338 auto tensorDims = mindspore::lite::MindIR_Tensor_GetDims(tensor);
339 size_t inputDimSize = tensorDims.size();
340 if (allDimsSize < inputDimSize) {
341 LOGE("ParseInputDimsFromExtensions failed, dataSize is invalid.");
342 extensionConfig.inputDims.clear();
343 return OH_NN_INVALID_PARAMETER;
344 }
345 // 读取extensor中当前输入的dim值
346 for (size_t j = 0; j < inputDimSize; ++j) {
347 inputDim.emplace_back(dimsValue[dataIndex]);
348 if (dimsValue[dataIndex] == -1) {
349 ++dynamicCount;
350 }
351 ++dataIndex;
352 }
353 extensionConfig.inputDims.emplace_back(inputDim);
354 allDimsSize -= inputDimSize;
355 }
356 // allDimsSize应和模型一致,遍历完后,allDimsSize等于0
357 if (allDimsSize != 0) {
358 LOGE("ParseInputDimsFromExtensions failed, allDimsSize is not equal to liteGraph.");
359 extensionConfig.inputDims.clear();
360 return OH_NN_INVALID_PARAMETER;
361 }
362 return OH_NN_SUCCESS;
363 }
364
ParseDynamicDimsFromExtensions(const std::unordered_map<std::string,std::vector<std::pair<char *,size_t>>> & extensionMap,const mindspore::lite::LiteGraph * liteGraph,ExtensionConfig & extensionConfig)365 OH_NN_ReturnCode ParseDynamicDimsFromExtensions(
366 const std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>>& extensionMap,
367 const mindspore::lite::LiteGraph* liteGraph, ExtensionConfig& extensionConfig)
368 {
369 const std::vector<std::pair<char*, size_t>>& inputDims = extensionMap.at(EXTENSION_KEY_INPUT_DIMS);
370 if (inputDims.empty()) {
371 LOGE("ParseDynamicDimsFromExtensions failed, input dims is empty.");
372 return OH_NN_INVALID_PARAMETER;
373 }
374 auto dynamicDims = extensionMap.at(EXTENSION_KEY_DYNAMIC_DIMS);
375 if (dynamicDims.empty()) {
376 LOGE("ParseDynamicDimsFromExtensions failed, dynamic dims is empty.");
377 return OH_NN_INVALID_PARAMETER;
378 }
379 if (inputDims[0].first == nullptr || inputDims[0].second == 0 ||
380 dynamicDims[0].first == nullptr || dynamicDims[0].second == 0) {
381 LOGE("ParseDynamicDimsFromExtensions failed, data or dataSize is invalid.");
382 return OH_NN_INVALID_PARAMETER;
383 }
384
385 size_t dynamicCount = 0;
386 auto returnCode = ParseInputDimsFromExtensions(
387 inputDims[0].first, inputDims[0].second, liteGraph, extensionConfig, dynamicCount);
388 if (returnCode != OH_NN_SUCCESS) {
389 LOGE("ParseDynamicDimsFromExtensions failed, failed to get input dims from extensions.");
390 return returnCode;
391 }
392 if (dynamicCount == 0) {
393 LOGE("ParseDynamicDimsFromExtensions failed, dynamic count is 0.");
394 extensionConfig.inputDims.clear();
395 return OH_NN_INVALID_PARAMETER;
396 }
397
398 extensionConfig.dynamicDims.clear();
399 int32_t* dynamicDimsValue = reinterpret_cast<int32_t*>(dynamicDims[0].first);
400 size_t dynamicDimsSize = dynamicDims[0].second / sizeof(int32_t);
401 if ((dynamicDimsSize % dynamicCount) != 0) {
402 LOGE("ParseDynamicDimsFromExtensions failed, dynamic dataSize is invalid.");
403 extensionConfig.inputDims.clear();
404 return OH_NN_INVALID_PARAMETER;
405 }
406 size_t dynamicSize = dynamicDimsSize / dynamicCount;
407 std::vector<int32_t> dynamicDim;
408 size_t dataIndex = 0;
409 for (size_t i = 0; i < dynamicSize; ++i) {
410 dynamicDim.clear();
411 for (size_t j = 0; j < dynamicCount; ++j) {
412 dynamicDim.emplace_back(dynamicDimsValue[dataIndex]);
413 ++dataIndex;
414 }
415 extensionConfig.dynamicDims.emplace_back(dynamicDim);
416 }
417
418 return OH_NN_SUCCESS;
419 }
420
CheckExtensionConfigs(const std::unordered_map<std::string,std::vector<std::pair<char *,size_t>>> & extensionMap,ExtensionConfig & extensionConfig)421 OH_NN_ReturnCode CheckExtensionConfigs(
422 const std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>>& extensionMap,
423 ExtensionConfig& extensionConfig)
424 {
425 if (extensionMap.find(EXTENSION_KEY_QUANT_BUFFER) != extensionMap.end()) {
426 const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_QUANT_BUFFER);
427 if (value.empty()) {
428 LOGE("ParseExtensionConfigs failed, get empty quant buffer value.");
429 return OH_NN_INVALID_PARAMETER;
430 }
431 extensionConfig.quantBuffer.data = value[0].first;
432 extensionConfig.quantBuffer.length = value[0].second;
433 }
434 if (extensionMap.find(EXTENSION_KEY_MODEL_NAME) != extensionMap.end()) {
435 const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_MODEL_NAME);
436 if (value.empty()) {
437 LOGE("ParseExtensionConfigs failed, get empty model name value.");
438 return OH_NN_INVALID_PARAMETER;
439 }
440 extensionConfig.modelName.assign(value[0].first, value[0].first + value[0].second);
441 }
442 if (extensionMap.find(EXTENSION_KEY_IS_PROFILING) != extensionMap.end()) {
443 const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_IS_PROFILING);
444 if (value.empty()) {
445 LOGE("ParseExtensionConfigs failed, get empty isProfiling value.");
446 return OH_NN_INVALID_PARAMETER;
447 }
448 extensionConfig.isProfiling.assign(value[0].first, value[0].first + value[0].second);
449 }
450 if (extensionMap.find(EXTENSION_KEY_OP_LAYOUT) != extensionMap.end()) {
451 const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_OP_LAYOUT);
452 if (value.empty()) {
453 LOGE("ParseExtensionConfigs failed, get empty op layout value.");
454 return OH_NN_INVALID_PARAMETER;
455 }
456 std::string ops;
457 for (auto singleValue : value) {
458 ops.assign(singleValue.first, singleValue.first + singleValue.second);
459 extensionConfig.opLayout.insert({ops, "hiai::ExecuteDevice::CPU"});
460 LOGI("ParseExtensionConfigs opLayout:%{public}s.", ops.c_str());
461 }
462 }
463 return OH_NN_SUCCESS;
464 }
465
ParseExtensionConfigs(const std::unordered_map<std::string,std::vector<std::pair<char *,size_t>>> & extensionMap,const mindspore::lite::LiteGraph * pLiteGraph,ExtensionConfig & extensionConfig)466 OH_NN_ReturnCode ParseExtensionConfigs(
467 const std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>>& extensionMap,
468 const mindspore::lite::LiteGraph* pLiteGraph, ExtensionConfig& extensionConfig)
469 {
470 extensionConfig.tuningStrategy = TuningStrategy::ON_DEVICE_PREPROCESS_TUNING;
471 OH_NN_ReturnCode ret = CheckExtensionConfigs(extensionMap, extensionConfig);
472 if (ret != OH_NN_SUCCESS) {
473 LOGE("CheckExtensionConfigs failed.");
474 return ret;
475 }
476 if (extensionMap.find(EXTENSION_KEY_INPUT_DIMS) != extensionMap.end() &&
477 extensionMap.find(EXTENSION_KEY_DYNAMIC_DIMS) != extensionMap.end()) {
478 auto returnCode = ParseDynamicDimsFromExtensions(extensionMap, pLiteGraph, extensionConfig);
479 if (returnCode != OH_NN_SUCCESS) {
480 LOGE("ParseExtensionConfigs failed, parse dynamic dims from extensions failed.");
481 return returnCode;
482 }
483 extensionConfig.tuningStrategy = TuningStrategy::OFF; // 分档shape不支持fftl
484 }
485 if (extensionMap.find(EXTENSION_KEY_FM_SHARED) != extensionMap.end()) {
486 extensionConfig.isNpuFmShared = true;
487 LOGI("NNRT enable fm shared success.");
488 }
489 return OH_NN_SUCCESS;
490 }
491
OH_NNModel_BuildFromLiteGraph(OH_NNModel * model,const void * liteGraph,const OH_NN_Extension * extensions,size_t extensionSize)492 NNRT_API OH_NN_ReturnCode OH_NNModel_BuildFromLiteGraph(OH_NNModel *model, const void *liteGraph,
493 const OH_NN_Extension *extensions, size_t extensionSize)
494 {
495 if (model == nullptr) {
496 LOGE("OH_NNModel_BuildFromLiteGraph failed, passed nullptr to model.");
497 return OH_NN_INVALID_PARAMETER;
498 }
499
500 if (liteGraph == nullptr) {
501 LOGE("OH_NNModel_BuildFromLiteGraph failed, passed nullptr to liteGraph.");
502 return OH_NN_INVALID_PARAMETER;
503 }
504
505 auto *pLiteGraph = reinterpret_cast<const mindspore::lite::LiteGraph*>(liteGraph);
506 ExtensionConfig extensionConfig;
507 std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>> extensionMap;
508 for (size_t i = 0; i < extensionSize; ++i) {
509 std::string name = extensions[i].name;
510 if (extensionMap.find(name) == extensionMap.end()) {
511 extensionMap.insert({name, {{extensions[i].value, extensions[i].valueSize}}});
512 } else {
513 extensionMap[name].push_back({extensions[i].value, extensions[i].valueSize});
514 }
515 }
516 auto returnCode = ParseExtensionConfigs(extensionMap, pLiteGraph, extensionConfig);
517 if (returnCode != OH_NN_SUCCESS) {
518 LOGE("OH_NNModel_BuildFromLiteGraph failed, parse extension configs failed.");
519 return returnCode;
520 }
521
522 InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
523
524 // Once the innerModel built from the liteGraph successfully, the innerModel
525 // owns the liteGraph, in which case, the invoker should not delete
526 // the liteGraph actively. Otherwise, the invoker still has the ownership.
527 return innerModel->BuildFromLiteGraph(pLiteGraph, extensionConfig);
528 }
529
530 namespace {
CheckCacheFile(const std::string & cacheInfoPath,int64_t & fileNumber,int64_t & cacheVersion)531 OH_NN_ReturnCode CheckCacheFile(const std::string& cacheInfoPath, int64_t& fileNumber, int64_t& cacheVersion)
532 {
533 // read number of cache models
534 char path[PATH_MAX];
535 if (realpath(cacheInfoPath.c_str(), path) == nullptr) {
536 LOGE("OH_NNModel_HasCache get real path of cache info failed.");
537 return OH_NN_INVALID_PARAMETER;
538 }
539
540 if (access(path, F_OK) != 0) {
541 LOGE("OH_NNModel_HasCache access cache info file failed.");
542 return OH_NN_INVALID_PARAMETER;
543 }
544
545 std::ifstream ifs(path, std::ios::in | std::ios::binary);
546 if (!ifs) {
547 LOGE("OH_NNModel_HasCache open cache info file failed.");
548 return OH_NN_INVALID_PARAMETER;
549 }
550
551 if (!ifs.read(reinterpret_cast<char*>(&(fileNumber)), sizeof(fileNumber))) {
552 LOGI("OH_NNModel_HasCache read cache info file failed.");
553 ifs.close();
554 return OH_NN_INVALID_PARAMETER;
555 }
556
557 if (!ifs.read(reinterpret_cast<char*>(&(cacheVersion)), sizeof(cacheVersion))) {
558 LOGI("OH_NNModel_HasCache read cache info file failed.");
559 ifs.close();
560 return OH_NN_INVALID_PARAMETER;
561 }
562
563 ifs.close();
564 return OH_NN_SUCCESS;
565 }
566 }
567
OH_NNModel_HasCache(const char * cacheDir,const char * modelName,uint32_t version)568 NNRT_API bool OH_NNModel_HasCache(const char *cacheDir, const char *modelName, uint32_t version)
569 {
570 if (cacheDir == nullptr) {
571 LOGI("OH_NNModel_HasCache get empty cache directory.");
572 return false;
573 }
574
575 if (modelName == nullptr) {
576 LOGI("OH_NNModel_HasCache get empty model name.");
577 return false;
578 }
579
580 std::string cacheInfoPath = std::string(cacheDir) + "/" + std::string(modelName) + "cache_info.nncache";
581
582 // determine whether cache info file exists
583 struct stat buffer;
584 bool exist = (stat(cacheInfoPath.c_str(), &buffer) == 0);
585 if (!exist) {
586 return false;
587 }
588
589 int64_t fileNumber{0};
590 int64_t cacheVersion{0};
591 OH_NN_ReturnCode returnCode = CheckCacheFile(cacheInfoPath, fileNumber, cacheVersion);
592 if (returnCode != OH_NN_SUCCESS) {
593 LOGE("OH_NNModel_HasCache get fileNumber or cacheVersion fail.");
594 return false;
595 }
596
597 // determine whether cache model files exist
598 for (int64_t i = 0; i < fileNumber; ++i) {
599 std::string cacheModelPath =
600 std::string(cacheDir) + "/" + std::string(modelName) + std::to_string(i) + ".nncache";
601 exist = (exist && (stat(cacheModelPath.c_str(), &buffer) == 0));
602 }
603
604 if (cacheVersion != version) {
605 LOGE("OH_NNModel_HasCache version is not match.");
606 exist = false;
607 }
608
609 return exist;
610 }
611
OH_NNModel_BuildFromMetaGraph(OH_NNModel * model,const void * metaGraph,const OH_NN_Extension * extensions,size_t extensionSize)612 NNRT_API OH_NN_ReturnCode OH_NNModel_BuildFromMetaGraph(OH_NNModel *model, const void *metaGraph,
613 const OH_NN_Extension *extensions, size_t extensionSize)
614 {
615 if (model == nullptr) {
616 LOGE("OH_NNModel_BuildFromMetaGraph failed, passed nullptr to model.");
617 return OH_NN_INVALID_PARAMETER;
618 }
619
620 if (metaGraph == nullptr) {
621 LOGE("OH_NNModel_BuildFromMetaGraph failed, passed nullptr to metaGraph.");
622 return OH_NN_INVALID_PARAMETER;
623 }
624
625 ExtensionConfig extensionConfig;
626 std::string ops;
627 for (size_t i = 0; i < extensionSize; ++i) {
628 std::string name = extensions[i].name;
629 if (name == "QuantBuffer") {
630 extensionConfig.quantBuffer.data = extensions[i].value;
631 extensionConfig.quantBuffer.length = extensions[i].valueSize;
632 } else if (name == "ModelName") {
633 extensionConfig.modelName.assign(extensions[i].value, extensions[i].value + extensions[i].valueSize);
634 } else if (name == "Profiling") {
635 extensionConfig.isProfiling.assign(extensions[i].value, extensions[i].value + extensions[i].valueSize);
636 LOGI("OH_NNModel_BuildFromMetaGraph isProfiling enable.");
637 } else if (name == "opLayout") {
638 ops.assign(extensions[i].value, extensions[i].value + extensions[i].valueSize);
639 extensionConfig.opLayout.insert({ops, "hiai::ExecuteDevice::CPU"});
640 LOGI("OH_NNModel_BuildFromMetaGraph opLayout:%{public}s.", ops.c_str());
641 }
642 }
643
644 InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
645 return innerModel->BuildFromMetaGraph(metaGraph, extensionConfig);
646 }
647
OH_NNModel_SetInputsAndOutputsInfo(OH_NNModel * model,const OH_NN_TensorInfo * inputsInfo,size_t inputSize,const OH_NN_TensorInfo * outputsInfo,size_t outputSize)648 NNRT_API OH_NN_ReturnCode OH_NNModel_SetInputsAndOutputsInfo(OH_NNModel *model, const OH_NN_TensorInfo *inputsInfo,
649 size_t inputSize, const OH_NN_TensorInfo *outputsInfo, size_t outputSize)
650 {
651 if (model == nullptr) {
652 LOGE("OH_NNModel_SetInputsAndOutputsInfo failed, passed nullptr to model.");
653 return OH_NN_INVALID_PARAMETER;
654 }
655
656 if ((inputsInfo == nullptr) || (inputSize == 0)) {
657 LOGE("OH_NNModel_SetInputsAndOutputsInfo failed, inputsInfo is empty.");
658 return OH_NN_INVALID_PARAMETER;
659 }
660
661 if ((outputsInfo == nullptr) || (outputSize == 0)) {
662 LOGE("OH_NNModel_SetInputsAndOutputsInfo failed, outputsInfo is empty.");
663 return OH_NN_INVALID_PARAMETER;
664 }
665
666 InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
667 return innerModel->SetInputsAndOutputsInfo(inputsInfo, inputSize, outputsInfo, outputSize);
668 }
669
OH_NNModel_Destroy(OH_NNModel ** model)670 NNRT_API void OH_NNModel_Destroy(OH_NNModel **model)
671 {
672 if (model == nullptr) {
673 LOGW("OH_NNModel_Destroy has no effect, passed nullptr to model.");
674 return;
675 }
676
677 if (*model == nullptr) {
678 LOGW("OH_NNModel_Destroy has no effect, passed nullptr to *model.");
679 return;
680 }
681
682 InnerModel *innerModel = reinterpret_cast<InnerModel*>(*model);
683 delete innerModel;
684 *model = nullptr;
685 }
686
OH_NNModel_GetAvailableOperations(OH_NNModel * model,size_t deviceID,const bool ** isAvailable,uint32_t * opCount)687 NNRT_API OH_NN_ReturnCode OH_NNModel_GetAvailableOperations(OH_NNModel *model,
688 size_t deviceID,
689 const bool **isAvailable,
690 uint32_t *opCount)
691 {
692 if (model == nullptr) {
693 LOGE("OH_NNModel_GetAvailableOperations failed, passed nullptr to model.");
694 return OH_NN_INVALID_PARAMETER;
695 }
696
697 if (isAvailable == nullptr) {
698 LOGE("OH_NNModel_GetAvailableOperations failed, passed nullptr to isAvailable.");
699 return OH_NN_INVALID_PARAMETER;
700 }
701
702 if (*isAvailable != nullptr) {
703 LOGE("OH_NNModel_GetAvailableOperations failed, *isAvailable is not nullptr.");
704 return OH_NN_INVALID_PARAMETER;
705 }
706
707 if (opCount == nullptr) {
708 LOGE("OH_NNModel_GetAvailableOperations failed, passed nullptr to opCount.");
709 return OH_NN_INVALID_PARAMETER;
710 }
711
712 InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
713 return innerModel->GetSupportedOperations(deviceID, isAvailable, *opCount);
714 }
715
OH_NN_GetDeviceID(char * nnrtDevice,size_t len)716 NNRT_API OH_NN_ReturnCode OH_NN_GetDeviceID(char *nnrtDevice, size_t len)
717 {
718 if (nnrtDevice == nullptr || len == 0) {
719 LOGE("nnrtDevice is nullptr or len is 0.");
720 return OH_NN_INVALID_PARAMETER;
721 }
722
723 char cName[HARDWARE_NAME_MAX_LENGTH] = {0};
724 int ret = GetParameter(HARDWARE_NAME.c_str(), NULL_HARDWARE_NAME.c_str(), cName, HARDWARE_NAME_MAX_LENGTH);
725 // 如果成功获取返回值为硬件名称的字节数
726 if (ret <= 0) {
727 LOGE("GetNNRtDeviceName failed, failed to get parameter.");
728 return OH_NN_FAILED;
729 }
730
731 std::string deviceName = (std::string)cName + "_" + HARDWARE_VERSION;
732 auto secureRet = strcpy_s(nnrtDevice, len, deviceName.c_str());
733 if (secureRet != EOK) {
734 LOGE("GetNNRtDeviceName failed, failed to get name.");
735 return OH_NN_FAILED;
736 }
737 return OH_NN_SUCCESS;
738 }