• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #include "nnrt_utils.h"
16 #include "const.h"
17 #include <fstream>
18 
19 namespace OHOS {
20 namespace NeuralNetworkRuntime {
21 namespace Test {
22 
TransformUInt32Array(const std::vector<uint32_t> & vector)23 OH_NN_UInt32Array TransformUInt32Array(const std::vector<uint32_t>& vector)
24 {
25     uint32_t* data = (vector.empty()) ? nullptr : const_cast<uint32_t*>(vector.data());
26     return {data, vector.size()};
27 }
28 
BuildMultiOpGraph(OH_NNModel * model,const OHNNGraphArgsMulti & graphArgs)29 int BuildMultiOpGraph(OH_NNModel *model, const OHNNGraphArgsMulti &graphArgs)
30 {
31     int ret = 0;
32     int opCnt = 0;
33     for (int j = 0; j < graphArgs.operationTypes.size(); j++) {
34         for (int i = 0; i < graphArgs.operands[j].size(); i++) {
35             const OHNNOperandTest &operandTem = graphArgs.operands[j][i];
36             auto quantParam = operandTem.quantParam;
37             OH_NN_Tensor operand = {operandTem.dataType, (uint32_t) operandTem.shape.size(),
38                 operandTem.shape.data(), quantParam, operandTem.type};
39             ret = OH_NNModel_AddTensor(model, &operand);
40             if (ret != OH_NN_SUCCESS) {
41                 LOGE("[NNRtTest] OH_NNModel_AddTensor failed! ret=%d\n", ret);
42                 return ret;
43             }
44             if (std::find(graphArgs.paramIndices[j].begin(), graphArgs.paramIndices[j].end(), opCnt) !=
45                 graphArgs.paramIndices[j].end()) {
46                 ret = OH_NNModel_SetTensorData(model, opCnt, operandTem.data, operandTem.length);
47             }
48             if (ret != OH_NN_SUCCESS) {
49                 LOGE("[NNRtTest] OH_NNModel_SetTensorData failed! ret=%d\n", ret);
50                 return ret;
51             }
52             opCnt += 1;
53         }
54         auto paramIndices = TransformUInt32Array(graphArgs.paramIndices[j]);
55         auto inputIndices = TransformUInt32Array(graphArgs.inputIndices[j]);
56         auto outputIndices = TransformUInt32Array(graphArgs.outputIndices[j]);
57 
58         ret = OH_NNModel_AddOperation(model, graphArgs.operationTypes[j], &paramIndices, &inputIndices,
59         &outputIndices);
60         if (ret != OH_NN_SUCCESS) {
61             LOGE("[NNRtTest] OH_NNModel_AddOperation failed! ret=%d\n", ret);
62             return ret;
63         }
64     }
65     auto graphInputs = TransformUInt32Array(graphArgs.graphInput);
66     auto graphOutputs = TransformUInt32Array(graphArgs.graphOutput);
67     ret = OH_NNModel_SpecifyInputsAndOutputs(model, &graphInputs, &graphOutputs);
68     if (ret != OH_NN_SUCCESS) {
69         LOGE("[NNRtTest] OH_NNModel_SpecifyInputsAndOutputs failed! ret=%d\n", ret);
70         return ret;
71     }
72     ret = OH_NNModel_Finish(model);
73     if (ret != OH_NN_SUCCESS) {
74         LOGE("[NNRtTest] OH_NNModel_Finish failed! ret=%d\n", ret);
75         return ret;
76     }
77     return ret;
78 }
79 
BuildSingleOpGraph(OH_NNModel * model,const OHNNGraphArgs & graphArgs)80 int BuildSingleOpGraph(OH_NNModel *model, const OHNNGraphArgs &graphArgs)
81 {
82     int ret = 0;
83     for (int i = 0; i < graphArgs.operands.size(); i++) {
84         const OHNNOperandTest &operandTem = graphArgs.operands[i];
85         auto quantParam = operandTem.quantParam;
86         OH_NN_Tensor operand = {operandTem.dataType, (uint32_t) operandTem.shape.size(),
87             operandTem.shape.data(), quantParam, operandTem.type};
88         ret = OH_NNModel_AddTensor(model, &operand);
89         if (ret != OH_NN_SUCCESS) {
90             LOGE("[NNRtTest] OH_NNModel_AddTensor failed! ret=%d\n", ret);
91             return ret;
92         }
93 
94         if (std::find(graphArgs.paramIndices.begin(), graphArgs.paramIndices.end(), i) !=
95             graphArgs.paramIndices.end()) {
96             ret = OH_NNModel_SetTensorData(model, i, operandTem.data, operandTem.length);
97             if (ret != OH_NN_SUCCESS) {
98                 LOGE("[NNRtTest] OH_NNModel_SetTensorData failed! ret=%d\n", ret);
99                 return ret;
100             }
101         }
102     }
103     auto paramIndices = TransformUInt32Array(graphArgs.paramIndices);
104     auto inputIndices = TransformUInt32Array(graphArgs.inputIndices);
105     auto outputIndices = TransformUInt32Array(graphArgs.outputIndices);
106     if (graphArgs.addOperation) {
107         ret = OH_NNModel_AddOperation(model, graphArgs.operationType, &paramIndices, &inputIndices,
108                                       &outputIndices);
109         if (ret != OH_NN_SUCCESS) {
110             LOGE("[NNRtTest] OH_NNModel_AddOperation failed! ret=%d\n", ret);
111             return ret;
112         }
113     }
114     if (graphArgs.specifyIO) {
115         ret = OH_NNModel_SpecifyInputsAndOutputs(model, &inputIndices, &outputIndices);
116         if (ret != OH_NN_SUCCESS) {
117             LOGE("[NNRtTest] OH_NNModel_SpecifyInputsAndOutputs failed! ret=%d\n", ret);
118             return ret;
119         }
120     }
121     if (graphArgs.build) {
122         ret = OH_NNModel_Finish(model);
123         if (ret != OH_NN_SUCCESS) {
124             LOGE("[NNRtTest] OH_NNModel_Finish failed! ret=%d\n", ret);
125             return ret;
126         }
127     }
128     return ret;
129 }
130 
SetDevice(OH_NNCompilation * compilation)131 int SetDevice(OH_NNCompilation *compilation)
132 {
133     int ret = 0;
134     const size_t *devicesID{nullptr};
135     uint32_t devicesCount{0};
136     ret = OH_NNDevice_GetAllDevicesID(&devicesID, &devicesCount);
137     if (ret != OH_NN_SUCCESS) {
138         LOGE("[NNRtTest] OH_NNDevice_GetAllDevicesID failed! ret=%d\n", ret);
139         return ret;
140     }
141     if (devicesCount <= NO_DEVICE_COUNT) {
142         return OH_NN_FAILED;
143     }
144     size_t targetDevice = devicesID[0]; // Use the first device in system test.
145     ret = OH_NNCompilation_SetDevice(compilation, targetDevice);
146     return ret;
147 }
148 
CompileGraphMock(OH_NNCompilation * compilation,const OHNNCompileParam & compileParam)149 int CompileGraphMock(OH_NNCompilation *compilation, const OHNNCompileParam &compileParam)
150 {
151     int ret = 0;
152     ret = SetDevice(compilation);
153     if (ret != OH_NN_SUCCESS) {
154         LOGE("[NNRtTest] OH_NNCompilation_SetDevice failed! ret=%d\n", ret);
155         return ret;
156     }
157     // set cache
158     if (!compileParam.cacheDir.empty()) {
159         ret = OH_NNCompilation_SetCache(compilation, compileParam.cacheDir.c_str(),
160         compileParam.cacheVersion);
161         if (ret != OH_NN_SUCCESS) {
162             LOGE("[NNRtTest] OH_NNCompilation_SetCache failed! ret=%d\n", ret);
163             return ret;
164         }
165     }
166     // set performance
167     if (compileParam.performanceMode != OH_NN_PERFORMANCE_NONE) {
168         ret = OH_NNCompilation_SetPerformanceMode(compilation, compileParam.performanceMode);
169         if (ret != OH_NN_SUCCESS) {
170             LOGE("[NNRtTest] OH_NNCompilation_SetPerformanceMode failed! ret=%d\n", ret);
171             return ret;
172         }
173     }
174     // set priority
175     if (compileParam.priority != OH_NN_PRIORITY_NONE) {
176         ret = OH_NNCompilation_SetPriority(compilation, compileParam.priority);
177         if (ret != OH_NN_SUCCESS) {
178             LOGE("[NNRtTest] OH_NNCompilation_SetPriority failed! ret=%d\n", ret);
179             return ret;
180         }
181     }
182     // enable fp16
183     if (compileParam.enableFp16) {
184         ret = OH_NNCompilation_EnableFloat16(compilation, compileParam.enableFp16);
185         if (ret != OH_NN_SUCCESS) {
186             LOGE("[NNRtTest] OH_NNCompilation_EnableFloat16 failed! ret=%d\n", ret);
187             return ret;
188         }
189     }
190     // build
191     ret = OH_NNCompilation_Build(compilation);
192     return ret;
193 }
194 
195 
ExecuteGraphMock(OH_NNExecutor * executor,const OHNNGraphArgs & graphArgs,float * expect)196 int ExecuteGraphMock(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs,
197     float* expect)
198 {
199     OHOS::sptr<V1_0::MockIDevice> device = V1_0::MockIDevice::GetInstance();
200     int ret = 0;
201     uint32_t inputIndex = 0;
202     uint32_t outputIndex = 0;
203     for (auto i = 0; i < graphArgs.operands.size(); i++) {
204         const OHNNOperandTest &operandTem = graphArgs.operands[i];
205         auto quantParam = operandTem.quantParam;
206         OH_NN_Tensor operand = {operandTem.dataType, (uint32_t) operandTem.shape.size(),
207             operandTem.shape.data(),
208             quantParam, operandTem.type};
209         if (std::find(graphArgs.inputIndices.begin(), graphArgs.inputIndices.end(), i) !=
210             graphArgs.inputIndices.end()) {
211             ret = OH_NNExecutor_SetInput(executor, inputIndex, &operand, operandTem.data,
212             operandTem.length);
213             if (ret != OH_NN_SUCCESS) {
214                 LOGE("[NNRtTest] OH_NNExecutor_SetInput failed! ret=%d\n", ret);
215                 return ret;
216             }
217             inputIndex += 1;
218         } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) !=
219                    graphArgs.outputIndices.end()) {
220             ret = OH_NNExecutor_SetOutput(executor, outputIndex, operandTem.data, operandTem.length);
221             if (ret != OH_NN_SUCCESS) {
222                 LOGE("[NNRtTest] OH_NNExecutor_SetOutput failed! ret=%d\n", ret);
223                 return ret;
224             }
225             if(expect!=nullptr){
226                 ret = device->MemoryCopy(expect, operandTem.length);
227             }
228             if (ret != OH_NN_SUCCESS) {
229                 LOGE("[NNRtTest] device set expect output failed! ret=%d\n", ret);
230                 return ret;
231             }
232             outputIndex += 1;
233         }
234     }
235     ret = OH_NNExecutor_Run(executor);
236     return ret;
237 }
238 
ExecutorWithMemory(OH_NNExecutor * executor,const OHNNGraphArgs & graphArgs,OH_NN_Memory * OHNNMemory[],float * expect)239 int ExecutorWithMemory(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs, OH_NN_Memory *OHNNMemory[],
240     float* expect)
241 {
242     OHOS::sptr<V1_0::MockIDevice> device = V1_0::MockIDevice::GetInstance();
243     int ret = 0;
244     uint32_t inputIndex = 0;
245     uint32_t outputIndex = 0;
246     for (auto i = 0; i < graphArgs.operands.size(); i++) {
247         const OHNNOperandTest &operandTem = graphArgs.operands[i];
248         auto quantParam = operandTem.quantParam;
249         OH_NN_Tensor operand = {operandTem.dataType, (uint32_t) operandTem.shape.size(),
250             operandTem.shape.data(),
251             quantParam, operandTem.type};
252         if (std::find(graphArgs.inputIndices.begin(), graphArgs.inputIndices.end(), i) !=
253             graphArgs.inputIndices.end()) {
254             OH_NN_Memory *inputMemory = OH_NNExecutor_AllocateInputMemory(executor, inputIndex,
255             operandTem.length);
256             ret = OH_NNExecutor_SetInputWithMemory(executor, inputIndex, &operand, inputMemory);
257             if (ret != OH_NN_SUCCESS) {
258                 LOGE("[NNRtTest] OH_NNExecutor_SetInputWithMemory failed! ret=%d\n", ret);
259                 return ret;
260             }
261             memcpy_s(inputMemory->data, operandTem.length, (void *) operandTem.data, operandTem.length);
262             OHNNMemory[inputIndex] = inputMemory;
263             inputIndex += 1;
264         } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) !=
265                    graphArgs.outputIndices.end()) {
266             OH_NN_Memory *outputMemory = OH_NNExecutor_AllocateOutputMemory(executor, outputIndex,
267             operandTem.length);
268             ret = OH_NNExecutor_SetOutputWithMemory(executor, outputIndex, outputMemory);
269             if (ret != OH_NN_SUCCESS) {
270                 LOGE("[NNRtTest] OH_NNExecutor_SetOutputWithMemory failed! ret=%d\n", ret);
271                 return ret;
272             }
273             ret = device->MemoryCopy(expect, operandTem.length);
274             if (ret != OH_NN_SUCCESS) {
275                 LOGE("[NNRtTest] device set expect output failed! ret=%d\n", ret);
276                 return ret;
277             }
278             OHNNMemory[inputIndex + outputIndex] = outputMemory;
279             outputIndex += 1;
280         }
281     }
282     ret = OH_NNExecutor_Run(executor);
283     return ret;
284 }
285 
286 
Free(OH_NNModel * model,OH_NNCompilation * compilation,OH_NNExecutor * executor)287 void Free(OH_NNModel *model, OH_NNCompilation *compilation, OH_NNExecutor *executor)
288 {
289     if (model != nullptr) {
290         OH_NNModel_Destroy(&model);
291         ASSERT_EQ(nullptr, model);
292     }
293     if (compilation != nullptr) {
294         OH_NNCompilation_Destroy(&compilation);
295         ASSERT_EQ(nullptr, compilation);
296     }
297     if (executor != nullptr) {
298         OH_NNExecutor_Destroy(&executor);
299         ASSERT_EQ(nullptr, executor);
300     }
301 }
302 
CheckPath(const std::string & path)303 PathType CheckPath(const std::string &path)
304 {
305     if (path.empty()) {
306         LOGI("CheckPath: path is null");
307         return PathType::NOT_FOUND;
308     }
309     struct stat buf{};
310     if (stat(path.c_str(), &buf) == 0) {
311         if (buf.st_mode & S_IFDIR) {
312             return PathType::DIR;
313         } else if (buf.st_mode & S_IFREG) {
314             return PathType::FILE;
315         } else {
316             return PathType::UNKNOWN;
317         }
318     }
319     LOGI("%s not found", path.c_str());
320     return PathType::NOT_FOUND;
321 }
322 
DeleteFile(const std::string & path)323 bool DeleteFile(const std::string &path)
324 {
325     if (path.empty()) {
326         LOGI("DeleteFile: path is null");
327         return false;
328     }
329     if (CheckPath(path) == PathType::NOT_FOUND) {
330         LOGI("not found: %s", path.c_str());
331         return true;
332     }
333     if (remove(path.c_str()) == 0) {
334         LOGI("deleted: %s", path.c_str());
335         return true;
336     }
337     LOGI("delete failed: %s", path.c_str());
338     return false;
339 }
340 
CopyFile(const std::string & srcPath,const std::string & dstPath)341 void CopyFile(const std::string &srcPath, const std::string &dstPath)
342 {
343     std::ifstream src(srcPath, std::ios::binary);
344     std::ofstream dst(dstPath, std::ios::binary);
345 
346     dst << src.rdbuf();
347 }
348 
ConcatPath(const std::string & str1,const std::string & str2)349 std::string ConcatPath(const std::string &str1, const std::string &str2)
350 {
351     // boundary
352     if (str2.empty()) {
353         return str1;
354     }
355     if (str1.empty()) {
356         return str2;
357     }
358     // concat
359     char end = str1[str1.size() - 1];
360     if (end == '\\' or end == '/') {
361         return str1 + str2;
362     } else {
363         return str1 + '/' + str2;
364     }
365 }
366 
DeleteFolder(const std::string & path)367 void DeleteFolder(const std::string &path)
368 {
369     if (path.empty()) {
370         LOGI("DeletePath: path is null");
371         return;
372     }
373 
374     DIR *dir = opendir(path.c_str());
375     // check is dir ?
376     if (dir == nullptr) {
377         LOGE("[NNRtTest] Can not open dir. Check path or permission! path: %s", path.c_str());
378         return;
379     }
380     struct dirent *file;
381     // read all the files in dir
382     std::vector <std::string> pathList;
383     while ((file = readdir(dir)) != nullptr) {
384         // skip "." and ".."
385         if (strcmp(file->d_name, ".") == 0 || strcmp(file->d_name, "..") == 0) {
386             continue;
387         }
388         if (file->d_type == DT_DIR) {
389             std::string filePath = path + "/" + file->d_name;
390             DeleteFolder(filePath); // 递归执行
391         } else {
392             pathList.emplace_back(ConcatPath(path, file->d_name));
393         }
394     }
395     closedir(dir);
396     pathList.emplace_back(path);
397     LOGI("[Common] Delete folder %s", path.c_str());
398     for (auto &i : pathList) {
399         DeleteFile(i);
400     }
401 }
402 
CreateFolder(const std::string & path)403 bool CreateFolder(const std::string &path)
404 {
405     if (path.empty()) {
406         LOGI("CreateFolder: path is empty");
407         return false;
408     }
409     LOGI("CreateFolder:%s", path.c_str());
410     mode_t mode = 0700;
411     for (int i = 1; i < path.size() - 1; i++) {
412         if (path[i] != '/') {
413             continue;
414         }
415         PathType ret = CheckPath(path.substr(0, i));
416         switch (ret) {
417             case PathType::DIR:
418                 continue;
419             case PathType::NOT_FOUND:
420                 LOGI("mkdir: %s", path.substr(0, i).c_str());
421                 mkdir(path.substr(0, i).c_str(), mode);
422                 break;
423             default:
424                 LOGI("error: %s", path.substr(0, i).c_str());
425                 return false;
426         }
427     }
428     mkdir(path.c_str(), mode);
429     return CheckPath(path) == PathType::DIR;
430 }
431 
CheckOutput(const float * output,const float * expect)432 bool CheckOutput(const float* output, const float* expect)
433 {
434     if (output == nullptr || expect == nullptr) {
435         LOGE("[NNRtTest] output or expect is nullptr\n");
436         return false;
437     }
438     for (int i = 0; i < ELEMENT_COUNT; i++) {
439         if (std::abs(float(output[i]) - float(expect[i])) > 1e-8) {
440             for (int j = 0; j < ELEMENT_COUNT; j++) {
441                 LOGE("[NNRtTest] output %d not match: expect:%f, actual:%f\n", j, float(expect[j]), float(output[j]));
442             }
443             return false;
444         }
445     }
446     return true;
447 }
448 
449 } // namespace Test
450 } // namespace NeuralNetworkRuntime
451 } // namespace OHOS
452