1 /* 2 * Copyright (c) 2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 #ifndef NNRT_UTILS_H 16 #define NNRT_UTILS_H 17 18 #include <dirent.h> 19 #include <sys/stat.h> 20 #include <gtest/gtest.h> 21 22 #include "interfaces/kits/c/neural_network_runtime.h" 23 #include "common/log.h" 24 #include "mock_idevice.h" 25 #include "const.h" 26 27 namespace OHOS { 28 namespace NeuralNetworkRuntime { 29 namespace Test { 30 namespace V1_0 = OHOS::HDI::Nnrt::V1_0; 31 struct OHNNOperandTest { 32 OH_NN_DataType dataType; 33 OH_NN_TensorType type; 34 std::vector<int32_t> shape; 35 void *data{nullptr}; 36 int32_t length{0}; 37 const OH_NN_QuantParam *quantParam = nullptr; 38 }; 39 40 struct OHNNGraphArgs { 41 OH_NN_OperationType operationType; 42 std::vector<OHNNOperandTest> operands; 43 std::vector<uint32_t> paramIndices; 44 std::vector<uint32_t> inputIndices; 45 std::vector<uint32_t> outputIndices; 46 bool build = true; 47 bool specifyIO = true; 48 bool addOperation = true; 49 }; 50 51 struct OHNNGraphArgsMulti { 52 std::vector<OH_NN_OperationType> operationTypes; 53 std::vector<std::vector<OHNNOperandTest>> operands; 54 std::vector<std::vector<uint32_t>> paramIndices; 55 std::vector<std::vector<uint32_t>> inputIndices; 56 std::vector<std::vector<uint32_t>> outputIndices; 57 std::vector<uint32_t> graphInput; 58 std::vector<uint32_t> graphOutput; 59 }; 60 61 struct OHNNCompileParam { 62 int32_t deviceId = 0; 63 std::string cacheDir; 64 uint32_t cacheVersion = 0; 65 OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE; 66 OH_NN_Priority priority = OH_NN_PRIORITY_NONE; 67 bool enableFp16 = false; 68 }; 69 70 int BuildSingleOpGraph(OH_NNModel *modelptr, const OHNNGraphArgs &args); 71 72 int ExecutorWithMemory(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs, OH_NN_Memory *OHNNMemory[], 73 float* expect); 74 75 void Free(OH_NNModel *model = nullptr, OH_NNCompilation *compilation = nullptr, OH_NNExecutor *executor = nullptr); 76 77 int CompileGraphMock(OH_NNCompilation *compilation, const OHNNCompileParam &compileParam); 78 79 int ExecuteGraphMock(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs, float* expect); 80 81 int SetDevice(OH_NNCompilation *compilation); 82 int BuildMultiOpGraph(OH_NNModel *model, const OHNNGraphArgsMulti &graphArgs); 83 OH_NN_UInt32Array GetUInt32Array(std::vector<uint32_t> indices); 84 85 bool CheckOutput(const float* output, const float* expect); 86 87 enum class PathType { FILE, DIR, UNKNOWN, NOT_FOUND }; 88 PathType CheckPath(const std::string &path); 89 bool DeleteFile(const std::string &path); 90 void CopyFile(const std::string &srcPath, const std::string &dstPath); 91 std::string ConcatPath(const std::string &str1, const std::string &str2); 92 void DeleteFolder(const std::string &path); 93 bool CreateFolder(const std::string &path); 94 95 } // namespace Test 96 } // namespace NeuralNetworkRuntime 97 } // namespace OHOS 98 99 #endif // NNRT_UTILS_H