• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "gtest/gtest.h"
17 #include <random>
18 #include "../utils/model_utils.h"
19 #include "../utils/common.h"
20 
21 class MSLiteNnrtTest: public testing::Test {
22   protected:
SetUpTestCase(void)23     static void SetUpTestCase(void) {}
TearDownTestCase(void)24     static void TearDownTestCase(void) {}
SetUp()25     virtual void SetUp() {}
TearDown()26     virtual void TearDown() {}
27 };
28 
29 /*
30  * @tc.name: Nnrt_Test
31  * @tc.desc: Verify the NNRT delegate.
32  * @tc.type: FUNC
33  */
34 HWTEST(MSLiteNnrtTest, Nnrt_ContextTest, testing::ext::TestSize.Level0) {
35     std::cout << "==========Get All Nnrt Device Descs==========" << std::endl;
36     size_t num = 0;
37     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
38     if (descs == nullptr) {
39         std::cout << "descs is nullptr , num: " << num << std::endl;
40         ASSERT_EQ(num, 0);
41         return;
42     }
43 
44     std::cout << "found " << num << " nnrt devices" << std::endl;
45     for (size_t i = 0; i < num; i++) {
46         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
47         ASSERT_NE(desc, nullptr);
48         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
49         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
50         auto type = OH_AI_GetTypeFromNNRTDeviceDesc(desc);
51         std::cout << "NNRT device: id = " << id << ", name: " << name << ", type:" << type << std::endl;
52     }
53 
54     for (size_t i = 0; i < num; i++) {
55         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
56         ASSERT_NE(desc, nullptr);
57         OH_AI_DestroyAllNNRTDeviceDescs(&desc);
58         ASSERT_EQ(desc, nullptr);
59     }
60 }
61 
62 /*
63  * @tc.name: Nnrt_CreateNnrtDevice
64  * @tc.desc: Verify the NNRT device create function.
65  * @tc.type: FUNC
66  */
67 HWTEST(MSLiteNnrtTest, Nnrt_CreateNnrtDevice, testing::ext::TestSize.Level0) {
68     std::cout << "==========Get All Nnrt Device Descs==========" << std::endl;
69     size_t num = 0;
70     auto desc = OH_AI_GetAllNNRTDeviceDescs(&num);
71     if (desc == nullptr) {
72         std::cout << "descs is nullptr , num: " << num << std::endl;
73         ASSERT_EQ(num, 0);
74         return;
75     }
76 
77     std::cout << "found " << num << " nnrt devices" << std::endl;
78     auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
79     auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
80     auto type = OH_AI_GetTypeFromNNRTDeviceDesc(desc);
81     std::cout << "NNRT device: id = " << id << ", name = " << name << ", type = " << type << std::endl;
82 
83     // create by name
84     auto nnrtDeviceInfo = OH_AI_CreateNNRTDeviceInfoByName(name);
85     ASSERT_NE(nnrtDeviceInfo, nullptr);
86 
87     OH_AI_DeviceType deviceType = OH_AI_DeviceInfoGetDeviceType(nnrtDeviceInfo);
88     printf("==========deviceType:%d\n", deviceType);
89     ASSERT_EQ(OH_AI_DeviceInfoGetDeviceId(nnrtDeviceInfo), id);
90     ASSERT_EQ(deviceType, OH_AI_DEVICETYPE_NNRT);
91     OH_AI_DeviceInfoDestroy(&nnrtDeviceInfo);
92     ASSERT_EQ(nnrtDeviceInfo, nullptr);
93 
94     // create by type
95     nnrtDeviceInfo = OH_AI_CreateNNRTDeviceInfoByType(type);
96     ASSERT_NE(nnrtDeviceInfo, nullptr);
97 
98     deviceType = OH_AI_DeviceInfoGetDeviceType(nnrtDeviceInfo);
99     printf("==========deviceType:%d\n", deviceType);
100     ASSERT_EQ(deviceType, OH_AI_DEVICETYPE_NNRT);
101     ASSERT_EQ(OH_AI_DeviceInfoGetDeviceId(nnrtDeviceInfo), id);
102     OH_AI_DeviceInfoDestroy(&nnrtDeviceInfo);
103     ASSERT_EQ(nnrtDeviceInfo, nullptr);
104 
105     // create by id
106     nnrtDeviceInfo = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
107     ASSERT_NE(nnrtDeviceInfo, nullptr);
108     OH_AI_DeviceInfoSetDeviceId(nnrtDeviceInfo, id);
109 
110     deviceType = OH_AI_DeviceInfoGetDeviceType(nnrtDeviceInfo);
111     printf("==========deviceType:%d\n", deviceType);
112     ASSERT_EQ(deviceType, OH_AI_DEVICETYPE_NNRT);
113 
114     OH_AI_DeviceInfoSetPerformanceMode(nnrtDeviceInfo, OH_AI_PERFORMANCE_MEDIUM);
115     ASSERT_EQ(OH_AI_DeviceInfoGetPerformanceMode(nnrtDeviceInfo), OH_AI_PERFORMANCE_MEDIUM);
116     OH_AI_DeviceInfoSetPriority(nnrtDeviceInfo, OH_AI_PRIORITY_MEDIUM);
117     ASSERT_EQ(OH_AI_DeviceInfoGetPriority(nnrtDeviceInfo), OH_AI_PRIORITY_MEDIUM);
118     std::string cachePath = "/data/local/tmp/";
119     std::string cacheVersion = "1";
120     OH_AI_DeviceInfoAddExtension(nnrtDeviceInfo, "CachePath", cachePath.c_str(), cachePath.size());
121     OH_AI_DeviceInfoAddExtension(nnrtDeviceInfo, "CacheVersion", cacheVersion.c_str(), cacheVersion.size());
122     OH_AI_DeviceInfoDestroy(&nnrtDeviceInfo);
123     ASSERT_EQ(nnrtDeviceInfo, nullptr);
124 
125     OH_AI_DestroyAllNNRTDeviceDescs(&desc);
126 }
127 
128 /*
129  * @tc.name: Nnrt_NpuPredict
130  * @tc.desc: Verify the NNRT predict.
131  * @tc.type: FUNC
132  */
133 HWTEST(MSLiteNnrtTest, Nnrt_NpuPredict, testing::ext::TestSize.Level0) {
134     if (!IsNPU()) {
135         printf("NNRt is not NPU, skip this test");
136         return;
137     }
138 
139     printf("==========Init Context==========\n");
140     OH_AI_ContextHandle context = OH_AI_ContextCreate();
141     ASSERT_NE(context, nullptr);
142     AddContextDeviceNNRT(context);
143     printf("==========Create model==========\n");
144     OH_AI_ModelHandle model = OH_AI_ModelCreate();
145     ASSERT_NE(model, nullptr);
146     printf("==========Build model==========\n");
147     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/resource/tinynet.om.ms",
148                                                 OH_AI_MODELTYPE_MINDIR, context);
149     printf("==========build model return code:%d\n", ret);
150     if (ret != OH_AI_STATUS_SUCCESS) {
151         printf("==========build model failed, ret: %d\n", ret);
152         OH_AI_ModelDestroy(&model);
153         return;
154     }
155 
156     printf("==========GetInputs==========\n");
157     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
158     ASSERT_NE(inputs.handle_list, nullptr);
159     for (size_t i = 0; i < inputs.handle_num; ++i) {
160         OH_AI_TensorHandle tensor = inputs.handle_list[i];
161         float *inputData = reinterpret_cast<float *>(OH_AI_TensorGetMutableData(tensor));
162         size_t elementNum = OH_AI_TensorGetElementNum(tensor);
163         std::random_device rd;
164         std::mt19937 gen(rd());
165         std::uniform_real_distribution<float> dis(0.0f,1.0f);
166         for (size_t z = 0; z < elementNum; z++) {
167             inputData[z] = dis(gen);
168         }
169     }
170     printf("==========Model Predict==========\n");
171     OH_AI_TensorHandleArray outputs;
172     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
173     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
174     OH_AI_ModelDestroy(&model);
175 }
176 
177 /*
178  * @tc.name: Nnrt_NpuCpuPredict
179  * @tc.desc: Verify the NNRT npu/cpu predict.
180  * @tc.type: FUNC
181  */
182 HWTEST(MSLiteNnrtTest, Nnrt_NpuCpuPredict, testing::ext::TestSize.Level0) {
183     printf("==========Init Context==========\n");
184     OH_AI_ContextHandle context = OH_AI_ContextCreate();
185     ASSERT_NE(context, nullptr);
186     AddContextDeviceNNRT(context);
187     AddContextDeviceCPU(context);
188     printf("==========Create model==========\n");
189     OH_AI_ModelHandle model = OH_AI_ModelCreate();
190     ASSERT_NE(model, nullptr);
191     printf("==========Build model==========\n");
192     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/resource/ml_face_isface.ms",
193                                                 OH_AI_MODELTYPE_MINDIR, context);
194     printf("==========build model return code:%d\n", ret);
195     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
196 
197     printf("==========GetInputs==========\n");
198     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
199     ASSERT_NE(inputs.handle_list, nullptr);
200     FillInputsData(inputs, "ml_face_isface", true);
201     printf("==========Model Predict==========\n");
202     OH_AI_TensorHandleArray outputs;
203     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
204     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
205     CompareResult(outputs, "ml_face_isface");
206     OH_AI_ModelDestroy(&model);
207 }
208