• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "gtest/gtest.h"
17 #include <random>
18 #include "../utils/model_utils.h"
19 #include "../utils/common.h"
20 
21 class MSLiteNnrtTest: public testing::Test {
22   protected:
SetUpTestCase(void)23     static void SetUpTestCase(void) {}
TearDownTestCase(void)24     static void TearDownTestCase(void) {}
SetUp()25     virtual void SetUp() {}
TearDown()26     virtual void TearDown() {}
27 };
28 
29 /*
30  * @tc.name: Nnrt_Test
31  * @tc.desc: Verify the NNRT delegate.
32  * @tc.type: FUNC
33  */
34 HWTEST(MSLiteNnrtTest, Nnrt_ContextTest, testing::ext::TestSize.Level0) {
35     std::cout << "==========Get All Nnrt Device Descs==========" << std::endl;
36     size_t num = 0;
37     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
38     if (descs == nullptr) {
39         std::cout << "descs is nullptr , num: " << num << std::endl;
40         ASSERT_EQ(num, 0);
41         return;
42     }
43 
44     std::cout << "found " << num << " nnrt devices" << std::endl;
45     for (size_t i = 0; i < num; i++) {
46         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
47         ASSERT_NE(desc, nullptr);
48         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
49         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
50         auto type = OH_AI_GetTypeFromNNRTDeviceDesc(desc);
51         std::cout << "NNRT device: id = " << id << ", name: " << name << ", type:" << type << std::endl;
52     }
53 
54     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
55     ASSERT_EQ(descs, nullptr);
56 }
57 
58 /*
59  * @tc.name: Nnrt_CreateNnrtDevice
60  * @tc.desc: Verify the NNRT device create function.
61  * @tc.type: FUNC
62  */
63 HWTEST(MSLiteNnrtTest, Nnrt_CreateNnrtDevice, testing::ext::TestSize.Level0) {
64     std::cout << "==========Get All Nnrt Device Descs==========" << std::endl;
65     size_t num = 0;
66     auto desc = OH_AI_GetAllNNRTDeviceDescs(&num);
67     if (desc == nullptr) {
68         std::cout << "descs is nullptr , num: " << num << std::endl;
69         ASSERT_EQ(num, 0);
70         return;
71     }
72 
73     std::cout << "found " << num << " nnrt devices" << std::endl;
74     auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
75     auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
76     auto type = OH_AI_GetTypeFromNNRTDeviceDesc(desc);
77     std::cout << "NNRT device: id = " << id << ", name = " << name << ", type = " << type << std::endl;
78 
79     // create by name
80     auto nnrtDeviceInfo = OH_AI_CreateNNRTDeviceInfoByName(name);
81     ASSERT_NE(nnrtDeviceInfo, nullptr);
82 
83     OH_AI_DeviceType deviceType = OH_AI_DeviceInfoGetDeviceType(nnrtDeviceInfo);
84     printf("==========deviceType:%d\n", deviceType);
85     ASSERT_EQ(OH_AI_DeviceInfoGetDeviceId(nnrtDeviceInfo), id);
86     ASSERT_EQ(deviceType, OH_AI_DEVICETYPE_NNRT);
87     OH_AI_DeviceInfoDestroy(&nnrtDeviceInfo);
88     ASSERT_EQ(nnrtDeviceInfo, nullptr);
89 
90     // create by type
91     nnrtDeviceInfo = OH_AI_CreateNNRTDeviceInfoByType(type);
92     ASSERT_NE(nnrtDeviceInfo, nullptr);
93 
94     deviceType = OH_AI_DeviceInfoGetDeviceType(nnrtDeviceInfo);
95     printf("==========deviceType:%d\n", deviceType);
96     ASSERT_EQ(deviceType, OH_AI_DEVICETYPE_NNRT);
97     ASSERT_EQ(OH_AI_DeviceInfoGetDeviceId(nnrtDeviceInfo), id);
98     OH_AI_DeviceInfoDestroy(&nnrtDeviceInfo);
99     ASSERT_EQ(nnrtDeviceInfo, nullptr);
100 
101     // create by id
102     nnrtDeviceInfo = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
103     ASSERT_NE(nnrtDeviceInfo, nullptr);
104     OH_AI_DeviceInfoSetDeviceId(nnrtDeviceInfo, id);
105 
106     deviceType = OH_AI_DeviceInfoGetDeviceType(nnrtDeviceInfo);
107     printf("==========deviceType:%d\n", deviceType);
108     ASSERT_EQ(deviceType, OH_AI_DEVICETYPE_NNRT);
109 
110     OH_AI_DeviceInfoSetPerformanceMode(nnrtDeviceInfo, OH_AI_PERFORMANCE_MEDIUM);
111     ASSERT_EQ(OH_AI_DeviceInfoGetPerformanceMode(nnrtDeviceInfo), OH_AI_PERFORMANCE_MEDIUM);
112     OH_AI_DeviceInfoSetPriority(nnrtDeviceInfo, OH_AI_PRIORITY_MEDIUM);
113     ASSERT_EQ(OH_AI_DeviceInfoGetPriority(nnrtDeviceInfo), OH_AI_PRIORITY_MEDIUM);
114     std::string cachePath = "/data/local/tmp/";
115     std::string cacheVersion = "1";
116     OH_AI_DeviceInfoAddExtension(nnrtDeviceInfo, "CachePath", cachePath.c_str(), cachePath.size());
117     OH_AI_DeviceInfoAddExtension(nnrtDeviceInfo, "CacheVersion", cacheVersion.c_str(), cacheVersion.size());
118     OH_AI_DeviceInfoDestroy(&nnrtDeviceInfo);
119     ASSERT_EQ(nnrtDeviceInfo, nullptr);
120 
121     OH_AI_DestroyAllNNRTDeviceDescs(&desc);
122 }
123 
124 /*
125  * @tc.name: Nnrt_NpuPredict
126  * @tc.desc: Verify the NNRT predict.
127  * @tc.type: FUNC
128  */
129 HWTEST(MSLiteNnrtTest, Nnrt_NpuPredict, testing::ext::TestSize.Level0) {
130     if (!IsNPU()) {
131         printf("NNRt is not NPU, skip this test");
132         return;
133     }
134 
135     printf("==========Init Context==========\n");
136     OH_AI_ContextHandle context = OH_AI_ContextCreate();
137     ASSERT_NE(context, nullptr);
138     AddContextDeviceNNRT(context);
139     printf("==========Create model==========\n");
140     OH_AI_ModelHandle model = OH_AI_ModelCreate();
141     ASSERT_NE(model, nullptr);
142     printf("==========Build model==========\n");
143     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/resource/tinynet.om.ms",
144                                                 OH_AI_MODELTYPE_MINDIR, context);
145     printf("==========build model return code:%d\n", ret);
146     if (ret != OH_AI_STATUS_SUCCESS) {
147         printf("==========build model failed, ret: %d\n", ret);
148         OH_AI_ModelDestroy(&model);
149         return;
150     }
151 
152     printf("==========GetInputs==========\n");
153     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
154     ASSERT_NE(inputs.handle_list, nullptr);
155     for (size_t i = 0; i < inputs.handle_num; ++i) {
156         OH_AI_TensorHandle tensor = inputs.handle_list[i];
157         float *inputData = reinterpret_cast<float *>(OH_AI_TensorGetMutableData(tensor));
158         size_t elementNum = OH_AI_TensorGetElementNum(tensor);
159         std::random_device rd;
160         std::mt19937 gen(rd());
161         std::uniform_real_distribution<float> dis(0.0f,1.0f);
162         for (size_t z = 0; z < elementNum; z++) {
163             inputData[z] = dis(gen);
164         }
165     }
166     printf("==========Model Predict==========\n");
167     OH_AI_TensorHandleArray outputs;
168     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
169     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
170     OH_AI_ModelDestroy(&model);
171 }
172 
173 /*
174  * @tc.name: Nnrt_NpuCpuPredict
175  * @tc.desc: Verify the NNRT npu/cpu predict.
176  * @tc.type: FUNC
177  */
178 HWTEST(MSLiteNnrtTest, Nnrt_NpuCpuPredict, testing::ext::TestSize.Level0) {
179     printf("==========Init Context==========\n");
180     OH_AI_ContextHandle context = OH_AI_ContextCreate();
181     ASSERT_NE(context, nullptr);
182     AddContextDeviceNNRT(context);
183     AddContextDeviceCPU(context);
184     printf("==========Create model==========\n");
185     OH_AI_ModelHandle model = OH_AI_ModelCreate();
186     ASSERT_NE(model, nullptr);
187     printf("==========Build model==========\n");
188     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/resource/ml_face_isface.ms",
189                                                 OH_AI_MODELTYPE_MINDIR, context);
190     printf("==========build model return code:%d\n", ret);
191     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
192 
193     printf("==========GetInputs==========\n");
194     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
195     ASSERT_NE(inputs.handle_list, nullptr);
196     FillInputsData(inputs, "ml_face_isface", true);
197     printf("==========Model Predict==========\n");
198     OH_AI_TensorHandleArray outputs;
199     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
200     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
201     CompareResult(outputs, "ml_face_isface");
202     OH_AI_ModelDestroy(&model);
203 }
204