• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <thread>
17 #include <random>
18 #include <inttypes.h>
19 #include <securec.h>
20 #include "ohos_common.h"
21 #include "gtest/gtest.h"
22 #include "include/c_api/context_c.h"
23 #include "include/c_api/model_c.h"
24 #include "include/c_api/types_c.h"
25 #include "include/c_api/status_c.h"
26 #include "include/c_api/data_type_c.h"
27 #include "include/c_api/tensor_c.h"
28 #include "include/c_api/format_c.h"
29 #include "syspara/parameter.h"
30 
31 using namespace testing::ext;
32 
33 class MSLiteTest: public testing::Test {
34 protected:
SetUpTestCase(void)35     static void SetUpTestCase(void) {}
TearDownTestCase(void)36     static void TearDownTestCase(void) {}
SetUp()37     virtual void SetUp() {}
TearDown()38     virtual void TearDown() {}
39 };
40 
41 // function before callback
PrintBeforeCallback(const OH_AI_TensorHandleArray inputs,const OH_AI_TensorHandleArray outputs,const OH_AI_CallBackParam kernel_Info)42 bool PrintBeforeCallback(const OH_AI_TensorHandleArray inputs, const OH_AI_TensorHandleArray outputs,
43                          const OH_AI_CallBackParam kernel_Info) {
44     std::cout << "Before forwarding " << kernel_Info.node_name << " " << kernel_Info.node_type << std::endl;
45     return true;
46 }
47 
48 // function after callback
PrintAfterCallback(const OH_AI_TensorHandleArray inputs,const OH_AI_TensorHandleArray outputs,const OH_AI_CallBackParam kernel_Info)49 bool PrintAfterCallback(const OH_AI_TensorHandleArray inputs, const OH_AI_TensorHandleArray outputs,
50                         const OH_AI_CallBackParam kernel_Info) {
51     std::cout << "After forwarding " << kernel_Info.node_name << " " << kernel_Info.node_type << std::endl;
52     return true;
53 }
54 
55 // add cpu device info
AddContextDeviceCPU(OH_AI_ContextHandle context)56 void AddContextDeviceCPU(OH_AI_ContextHandle context) {
57     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
58     ASSERT_NE(cpu_device_info, nullptr);
59     OH_AI_DeviceType device_type = OH_AI_DeviceInfoGetDeviceType(cpu_device_info);
60     printf("==========device_type:%d\n", device_type);
61     ASSERT_EQ(device_type, OH_AI_DEVICETYPE_CPU);
62     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
63 }
64 
IsNNRTAvailable()65 bool IsNNRTAvailable() {
66     size_t num = 0;
67     auto desc = OH_AI_GetAllNNRTDeviceDescs(&num);
68     if (desc == nullptr) {
69         return false;
70     }
71     auto type = OH_AI_GetTypeFromNNRTDeviceDesc(desc);
72     if (type != 1) {
73         return false;
74     }
75     OH_AI_DestroyAllNNRTDeviceDescs(&desc);
76     return true;
77 }
78 
IsNPU()79 bool IsNPU() {
80     size_t num = 0;
81     auto desc = OH_AI_GetAllNNRTDeviceDescs(&num);
82     if (desc == nullptr) {
83         return false;
84     }
85     auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
86     const std::string npu_name_prefix = "NPU_";
87     if (strncmp(npu_name_prefix.c_str(), name, npu_name_prefix.size()) != 0) {
88         return false;
89     }
90     return true;
91 }
92 
93 
OH_AI_Test_GetDeviceID(char * nnrtDevice,size_t len)94 OH_AI_Status OH_AI_Test_GetDeviceID(char *nnrtDevice, size_t len)
95 {
96     const std::string nullHardwareName = "default";
97     const std::string hardwareName = "const.ai.nnrt_deivce";
98     constexpr size_t hardwareNameMaxLength = 128;
99 
100     if (nnrtDevice == nullptr || len == 0) {
101         std::cout << "nnrtDevice is nullptr or len is 0." << std::endl;
102         return OH_AI_STATUS_LITE_ERROR;
103     }
104 
105     char cName[hardwareNameMaxLength] = {0};
106     int ret = GetParameter(hardwareName.c_str(), nullHardwareName.c_str(), cName, hardwareNameMaxLength);
107     // 如果成功获取返回值为硬件名称的字节数
108     if (ret <= 0) {
109         std::cout << "GetNNRtDeviceName failed, failed to get parameter." << std::endl;
110         return OH_AI_STATUS_LITE_ERROR;
111     }
112 
113     auto secureRet = strcpy_s(nnrtDevice, len, cName);
114     if (secureRet != EOK) {
115         std::cout << "GetNNRtDeviceName failed, failed to get name." << std::endl;
116         return OH_AI_STATUS_LITE_ERROR;
117     }
118     return OH_AI_STATUS_SUCCESS;
119 }
120 
121 
122 // add nnrt device info
AddContextDeviceNNRT(OH_AI_ContextHandle context)123 void AddContextDeviceNNRT(OH_AI_ContextHandle context) {
124     size_t num = 0;
125     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
126     if (descs == nullptr) {
127         return;
128     }
129 
130     std::cout << "found " << num << " nnrt devices" << std::endl;
131     for (size_t i = 0; i < num; i++) {
132         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
133         ASSERT_NE(desc, nullptr);
134         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
135         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
136         auto type = OH_AI_GetTypeFromNNRTDeviceDesc(desc);
137         std::cout << "NNRT device: id = " << id << ", name: " << name << ", type:" << type << std::endl;
138     }
139 
140     auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(descs);
141 
142     OH_AI_DeviceInfoHandle nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
143     ASSERT_NE(nnrt_device_info, nullptr);
144     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id);
145     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
146 
147     OH_AI_DeviceType device_type = OH_AI_DeviceInfoGetDeviceType(nnrt_device_info);
148     printf("==========device_type:%d\n", device_type);
149     ASSERT_EQ(device_type, OH_AI_DEVICETYPE_NNRT);
150 
151     OH_AI_DeviceInfoSetPerformanceMode(nnrt_device_info, OH_AI_PERFORMANCE_MEDIUM);
152     ASSERT_EQ(OH_AI_DeviceInfoGetPerformanceMode(nnrt_device_info), OH_AI_PERFORMANCE_MEDIUM);
153     OH_AI_DeviceInfoSetPriority(nnrt_device_info, OH_AI_PRIORITY_MEDIUM);
154     ASSERT_EQ(OH_AI_DeviceInfoGetPriority(nnrt_device_info), OH_AI_PRIORITY_MEDIUM);
155 
156     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
157 }
158 
159 // add nnrt device info
AddContextDeviceNNRTWithCache(OH_AI_ContextHandle context,const char * cache_path,const char * cache_version)160 void AddContextDeviceNNRTWithCache(OH_AI_ContextHandle context, const char *cache_path, const char *cache_version) {
161     size_t num = 0;
162     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
163     if (descs == nullptr) {
164         return;
165     }
166 
167     std::cout << "found " << num << " nnrt devices" << std::endl;
168     for (size_t i = 0; i < num; i++) {
169         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
170         ASSERT_NE(desc, nullptr);
171         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
172         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
173         auto type = OH_AI_GetTypeFromNNRTDeviceDesc(desc);
174         std::cout << "NNRT device: id = " << id << ", name: " << name << ", type:" << type << std::endl;
175     }
176 
177     auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(descs);
178 
179     OH_AI_DeviceInfoHandle nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
180     ASSERT_NE(nnrt_device_info, nullptr);
181     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id);
182     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
183 
184     OH_AI_DeviceType device_type = OH_AI_DeviceInfoGetDeviceType(nnrt_device_info);
185     printf("==========device_type:%d\n", device_type);
186     ASSERT_EQ(device_type, OH_AI_DEVICETYPE_NNRT);
187 
188     OH_AI_DeviceInfoSetPerformanceMode(nnrt_device_info, OH_AI_PERFORMANCE_MEDIUM);
189     ASSERT_EQ(OH_AI_DeviceInfoGetPerformanceMode(nnrt_device_info), OH_AI_PERFORMANCE_MEDIUM);
190     OH_AI_DeviceInfoSetPriority(nnrt_device_info, OH_AI_PRIORITY_MEDIUM);
191     ASSERT_EQ(OH_AI_DeviceInfoGetPriority(nnrt_device_info), OH_AI_PRIORITY_MEDIUM);
192     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CachePath", cache_path, strlen(cache_path));
193     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CacheVersion", cache_version, strlen(cache_version));
194 
195     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
196 }
197 
198 // add nnrt device info by type
AddContextDeviceNNRTByType(OH_AI_ContextHandle context)199 void AddContextDeviceNNRTByType(OH_AI_ContextHandle context) {
200     size_t num = 0;
201     auto desc = OH_AI_GetAllNNRTDeviceDescs(&num);
202     // 返回值desc是NNRTDeviceDesc结构体数组首地址
203     if (desc == nullptr) {
204         return;
205     }
206     // 目前nnrt仅提供了rk3568的驱动,只有cpu一个设备,故不用判断
207     std::cout << "found " << num << " nnrt devices" << std::endl;
208     auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
209     auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
210     auto type = OH_AI_GetTypeFromNNRTDeviceDesc(desc);
211     std::cout << "NNRT device: id = " << id << ", name: " << name << ", type:" << type << std::endl;
212 
213     auto nnrt_device_info = OH_AI_CreateNNRTDeviceInfoByType(type);
214     OH_AI_DestroyAllNNRTDeviceDescs(&desc);
215     ASSERT_NE(nnrt_device_info, nullptr);
216 
217     OH_AI_DeviceType device_type = OH_AI_DeviceInfoGetDeviceType(nnrt_device_info);
218     printf("==========device_type:%d\n", device_type);
219     ASSERT_EQ(device_type, OH_AI_DEVICETYPE_NNRT);
220     ASSERT_EQ(OH_AI_DeviceInfoGetDeviceId(nnrt_device_info), id);
221 
222     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
223 }
224 
225 // add nnrt device info by name
AddContextDeviceNNRTByName(OH_AI_ContextHandle context)226 void AddContextDeviceNNRTByName(OH_AI_ContextHandle context) {
227     size_t num = 0;
228     auto desc = OH_AI_GetAllNNRTDeviceDescs(&num);
229     // 返回值desc是NNRTDeviceDesc结构体数组首地址
230     if (desc == nullptr) {
231         return;
232     }
233     // 目前nnrt仅提供了rk3568的驱动,只有cpu一个设备,故不用判断
234     std::cout << "found " << num << " nnrt devices" << std::endl;
235     auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
236     auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
237     auto type = OH_AI_GetTypeFromNNRTDeviceDesc(desc);
238     std::cout << "NNRT device: id = " << id << ", name: " << name << ", type:" << type << std::endl;
239 
240     auto nnrt_device_info = OH_AI_CreateNNRTDeviceInfoByName(name);
241     OH_AI_DestroyAllNNRTDeviceDescs(&desc);
242     ASSERT_NE(nnrt_device_info, nullptr);
243 
244     OH_AI_DeviceType device_type = OH_AI_DeviceInfoGetDeviceType(nnrt_device_info);
245     printf("==========device_type:%d\n", device_type);
246     ASSERT_EQ(device_type, OH_AI_DEVICETYPE_NNRT);
247     ASSERT_EQ(OH_AI_DeviceInfoGetDeviceId(nnrt_device_info), id);
248 
249     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
250 }
251 
252 // fill data to inputs tensor
FillInputsData(OH_AI_TensorHandleArray inputs,string model_name,bool is_transpose)253 void FillInputsData(OH_AI_TensorHandleArray inputs, string model_name, bool is_transpose) {
254     for (size_t i = 0; i < inputs.handle_num; ++i) {
255         printf("==========ReadFile==========\n");
256         size_t size1;
257         size_t *ptr_size1 = &size1;
258         string input_data_path = "/data/test/" + model_name + "_" + std::to_string(i) + ".input";
259         const char *imagePath = input_data_path.c_str();
260         char *imageBuf = ReadFile(imagePath, ptr_size1);
261         ASSERT_NE(imageBuf, nullptr);
262         OH_AI_TensorHandle tensor = inputs.handle_list[i];
263         int64_t element_num = OH_AI_TensorGetElementNum(tensor);
264         printf("Tensor name: %s. \n", OH_AI_TensorGetName(tensor));
265         float *input_data = reinterpret_cast<float *>(OH_AI_TensorGetMutableData(inputs.handle_list[i]));
266         ASSERT_NE(input_data, nullptr);
267         if (is_transpose) {
268             printf("==========Transpose==========\n");
269             size_t shape_num;
270             const int64_t *shape = OH_AI_TensorGetShape(tensor, &shape_num);
271             auto imageBuf_nhwc = new char[size1];
272             PackNCHWToNHWCFp32(imageBuf, imageBuf_nhwc, shape[0], shape[1] * shape[2], shape[3]);
273             memcpy_s(input_data, size1, imageBuf_nhwc, size1);
274             delete[] imageBuf_nhwc;
275         } else {
276             memcpy_s(input_data, size1, imageBuf, size1);
277         }
278         printf("input data after filling is: ");
279         for (int j = 0; j < element_num && j <= 20; ++j) {
280             printf("%f ", input_data[j]);
281         }
282         printf("\n");
283         delete[] imageBuf;
284     }
285 }
286 
287 // compare result after predict
CompareResult(OH_AI_TensorHandleArray outputs,string model_name,float atol=0.01,float rtol=0.01,bool isquant=false)288 void CompareResult(OH_AI_TensorHandleArray outputs, string model_name, float atol = 0.01, float rtol = 0.01, bool isquant = false) {
289     printf("==========GetOutput==========\n");
290     for (size_t i = 0; i < outputs.handle_num; ++i) {
291         OH_AI_TensorHandle tensor = outputs.handle_list[i];
292         int64_t element_num = OH_AI_TensorGetElementNum(tensor);
293         printf("Tensor name: %s .\n", OH_AI_TensorGetName(tensor));
294         float *output_data = reinterpret_cast<float *>(OH_AI_TensorGetMutableData(tensor));
295         printf("output data is:");
296         for (int j = 0; j < element_num && j <= 20; ++j) {
297             printf("%f ", output_data[j]);
298         }
299         printf("\n");
300         printf("==========compFp32WithTData==========\n");
301         string output_file = "/data/test/" + model_name + std::to_string(i) + ".output";
302         bool result = compFp32WithTData(output_data, output_file, atol, rtol, isquant);
303         EXPECT_EQ(result, true);
304     }
305 }
306 
307 // model build and predict
ModelPredict(OH_AI_ModelHandle model,OH_AI_ContextHandle context,string model_name,OH_AI_ShapeInfo shape_infos,bool build_by_graph,bool is_transpose,bool is_callback)308 void ModelPredict(OH_AI_ModelHandle model, OH_AI_ContextHandle context, string model_name,
309             OH_AI_ShapeInfo shape_infos, bool build_by_graph, bool is_transpose, bool is_callback) {
310     string model_path = "/data/test/" + model_name + ".ms";
311     const char *graphPath = model_path.c_str();
312     OH_AI_Status ret = OH_AI_STATUS_SUCCESS;
313     if (build_by_graph) {
314         printf("==========Build model by graphBuf==========\n");
315         size_t size;
316         size_t *ptr_size = &size;
317         char *graphBuf = ReadFile(graphPath, ptr_size);
318         ASSERT_NE(graphBuf, nullptr);
319         ret = OH_AI_ModelBuild(model, graphBuf, size, OH_AI_MODELTYPE_MINDIR, context);
320         delete[] graphBuf;
321     } else {
322         printf("==========Build model==========\n");
323         ret = OH_AI_ModelBuildFromFile(model, graphPath, OH_AI_MODELTYPE_MINDIR, context);
324     }
325     printf("==========build model return code:%d\n", ret);
326     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
327     printf("==========GetInputs==========\n");
328     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
329     ASSERT_NE(inputs.handle_list, nullptr);
330     if (shape_infos.shape_num != 0) {
331         printf("==========Resizes==========\n");
332         OH_AI_Status resize_ret = OH_AI_ModelResize(model, inputs, &shape_infos, inputs.handle_num);
333         printf("==========Resizes return code:%d\n", resize_ret);
334         ASSERT_EQ(resize_ret, OH_AI_STATUS_SUCCESS);
335     }
336 
337     FillInputsData(inputs, model_name, is_transpose);
338     OH_AI_TensorHandleArray outputs;
339     OH_AI_Status predict_ret = OH_AI_STATUS_SUCCESS;
340     if (is_callback) {
341         printf("==========Model Predict Callback==========\n");
342         OH_AI_KernelCallBack before_call_back = PrintBeforeCallback;
343         OH_AI_KernelCallBack after_call_back = PrintAfterCallback;
344         predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, before_call_back, after_call_back);
345     }else {
346         printf("==========Model Predict==========\n");
347         predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
348     }
349     printf("==========Model Predict End==========\n");
350     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
351     printf("=========CompareResult===========\n");
352     CompareResult(outputs, model_name);
353     printf("=========OH_AI_ContextDestroy===========\n");
354     OH_AI_ContextDestroy(&context);
355     printf("=========OH_AI_ContextDestroy End===========\n");
356     printf("=========OH_AI_ModelDestroy===========\n");
357     OH_AI_ModelDestroy(&model);
358     printf("=========OH_AI_ModelDestroy End===========\n");
359 }
360 
361 // model build and predict
ModelTrain(OH_AI_ModelHandle model,OH_AI_ContextHandle context,string model_name,OH_AI_ShapeInfo shape_infos,bool build_by_graph,bool is_transpose,bool is_callback)362 void ModelTrain(OH_AI_ModelHandle model, OH_AI_ContextHandle context, string model_name,
363             OH_AI_ShapeInfo shape_infos, bool build_by_graph, bool is_transpose, bool is_callback) {
364     string model_path = "/data/test/" + model_name + ".ms";
365     const char *graphPath = model_path.c_str();
366     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
367     OH_AI_Status ret = OH_AI_STATUS_SUCCESS;
368     if (build_by_graph) {
369         printf("==========Build model by graphBuf==========\n");
370         size_t size;
371         size_t *ptr_size = &size;
372         char *graphBuf = ReadFile(graphPath, ptr_size);
373         ASSERT_NE(graphBuf, nullptr);
374         ret = OH_AI_TrainModelBuild(model, graphBuf, size, OH_AI_MODELTYPE_MINDIR, context, train_cfg);
375         delete[] graphBuf;
376     } else {
377         printf("==========Build model==========\n");
378         ret = OH_AI_TrainModelBuildFromFile(model, graphPath, OH_AI_MODELTYPE_MINDIR, context, train_cfg);
379     }
380     printf("==========build model return code:%d\n", ret);
381     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
382     printf("==========GetInputs==========\n");
383     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
384     ASSERT_NE(inputs.handle_list, nullptr);
385     if (shape_infos.shape_num != 0) {
386         printf("==========Resizes==========\n");
387         OH_AI_Status resize_ret = OH_AI_ModelResize(model, inputs, &shape_infos, inputs.handle_num);
388         printf("==========Resizes return code:%d\n", resize_ret);
389         ASSERT_EQ(resize_ret, OH_AI_STATUS_SUCCESS);
390     }
391     FillInputsData(inputs, model_name, is_transpose);
392     ret = OH_AI_ModelSetTrainMode(model, true);
393     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
394     if (is_callback) {
395         printf("==========Model RunStep Callback==========\n");
396         OH_AI_KernelCallBack before_call_back = PrintBeforeCallback;
397         OH_AI_KernelCallBack after_call_back = PrintAfterCallback;
398         ret = OH_AI_RunStep(model, before_call_back, after_call_back);
399     }else {
400         printf("==========Model RunStep==========\n");
401         ret = OH_AI_RunStep(model, nullptr, nullptr);
402     }
403     printf("==========Model RunStep End==========\n");
404     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
405 }
TransStrVectorToCharArrays(const std::vector<std::string> & s)406 char **TransStrVectorToCharArrays(const std::vector<std::string> &s) {
407   char **char_arr = static_cast<char **>(malloc(s.size() * sizeof(char *)));
408   for (size_t i = 0; i < s.size(); i++) {
409     char_arr[i] = static_cast<char *>(malloc((s[i].size() + 1)));
410     strcpy(char_arr[i], s[i].c_str());
411   }
412   return char_arr;
413 }
TransCharArraysToStrVector(char ** c,const size_t & num)414 std::vector<std::string> TransCharArraysToStrVector(char **c, const size_t &num) {
415   std::vector<std::string> str;
416   for (size_t i = 0; i < num; i++) {
417     str.push_back(std::string(c[i]));
418   }
419   return str;
420 }
421 
PrintTrainLossName(OH_AI_TrainCfgHandle trainCfg)422 void PrintTrainLossName(OH_AI_TrainCfgHandle trainCfg) {
423     size_t num = 0;
424     char **lossName = OH_AI_TrainCfgGetLossName(trainCfg, &num);
425     std::vector<std::string> trainCfgLossName = TransCharArraysToStrVector(lossName, num);
426     for (auto ele : trainCfgLossName) {
427         std::cout << "loss_name:" << ele << std::endl;
428     }
429     for (size_t i = 0; i < num; i++) {
430         free(lossName[i]);
431     }
432 }
433 
434 // 正常场景:更新权重
435 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_UpdateWeights_0001, Function | MediumTest | Level0) {
436     printf("==========OH_AI_ContextCreate==========\n");
437     OH_AI_ContextHandle context = OH_AI_ContextCreate();
438     ASSERT_NE(context, nullptr);
439     AddContextDeviceCPU(context);
440     printf("==========OH_AI_ModelCreate==========\n");
441     OH_AI_ModelHandle model = OH_AI_ModelCreate();
442     ASSERT_NE(model, nullptr);
443     printf("==========OH_AI_TrainCfgCreate==========\n");
444     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
445     ASSERT_NE(train_cfg, nullptr);
446     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
447     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
448     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
__anonbf25cd840102(size_t size, void *data) 449     auto GenRandomData = [](size_t size, void *data) {
450       auto generator = std::uniform_real_distribution<float>(0.0f, 1.0f);
451       std::mt19937 random_engine_;
452       size_t elements_num = size / sizeof(float);
453       (void)std::generate_n(static_cast<float *>(data), elements_num,
454                             [&]() { return static_cast<float>(generator(random_engine_)); });
455     };
456     std::vector<OH_AI_TensorHandle> vec_inputs;
457     constexpr size_t create_shape_num = 1;
458     int64_t create_shape[create_shape_num] = {10};
459     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("fc3.bias", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape, create_shape_num, nullptr, 0);
460     ASSERT_NE(tensor, nullptr);
461     GenRandomData(OH_AI_TensorGetDataSize(tensor), OH_AI_TensorGetMutableData(tensor));
462     vec_inputs.push_back(tensor);
463     OH_AI_TensorHandleArray update_weights = {1, vec_inputs.data()};
464     status = OH_AI_ModelUpdateWeights(model, update_weights);
465     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
466     printf("==========GetInputs==========\n");
467     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
468     ASSERT_NE(inputs.handle_list, nullptr);
469     FillInputsData(inputs, "lenet_train", false);
470     status = OH_AI_ModelSetTrainMode(model, true);
471     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
472     printf("==========Model RunStep==========\n");
473     status = OH_AI_RunStep(model, nullptr, nullptr);
474     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
475     OH_AI_ContextDestroy(&context);
476     OH_AI_ModelDestroy(&model);
477 }
478 
ModelUpdateWeightsTest(OH_AI_ModelHandle model)479 void ModelUpdateWeightsTest(OH_AI_ModelHandle model)
480 {
481     OH_AI_TensorHandleArray get_update_weights = OH_AI_ModelGetWeights(model);
482     for (size_t i = 0; i < get_update_weights.handle_num; ++i) {
483         OH_AI_TensorHandle weights_tensor = get_update_weights.handle_list[i];
484         if (strcmp(OH_AI_TensorGetName(weights_tensor), "fc3.bias") == 0) {
485             float *input_data = reinterpret_cast<float *>(OH_AI_TensorGetMutableData(weights_tensor));
486             std::cout << "fc3.bias:" << input_data[0] << std::endl;
487         }
488     }
489     auto genRandomData = [](size_t size, void *data) {
490         auto generator = std::uniform_real_distribution<float>(0.0f, 1.0f);
491         std::mt19937 randomEngine;
492         size_t elementsNum = size / sizeof(float);
493         (void)std::generate_n(static_cast<float *>(data), elementsNum,
494             [&]() { return static_cast<float>(generator(randomEngine)); });
495     };
496     std::vector<OH_AI_TensorHandle> vec_inputs;
497     constexpr size_t createShapeNum = 1;
498     int64_t createShape[createShapeNum] = {10};
499     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("fc3.bias", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape,
500         createShapeNum, nullptr, 0);
501     genRandomData(OH_AI_TensorGetDataSize(tensor), OH_AI_TensorGetMutableData(tensor));
502     vec_inputs.push_back(tensor);
503     OH_AI_TensorHandleArray update_weights = {1, vec_inputs.data()};
504     OH_AI_ModelUpdateWeights(model, update_weights);
505 }
506 
507 // 正常场景:更新权重后导出训练图,再获取权重
508 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_UpdateWeights_0002, Function | MediumTest | Level0) {
509     printf("==========OH_AI_ContextCreate==========\n");
510     OH_AI_ContextHandle context = OH_AI_ContextCreate();
511     ASSERT_NE(context, nullptr);
512     AddContextDeviceCPU(context);
513     printf("==========OH_AI_ModelCreate==========\n");
514     OH_AI_ModelHandle model = OH_AI_ModelCreate();
515     ASSERT_NE(model, nullptr);
516     printf("==========OH_AI_TrainCfgCreate==========\n");
517     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
518     ASSERT_NE(train_cfg, nullptr);
519     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
520     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train.ms", OH_AI_MODELTYPE_MINDIR,
521         context, train_cfg);
522     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
523     ModelUpdateWeightsTest(model);
524     printf("==========GetInputs==========\n");
525     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
526     ASSERT_NE(inputs.handle_list, nullptr);
527     FillInputsData(inputs, "lenet_train", false);
528     status = OH_AI_ModelSetTrainMode(model, true);
529     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
530     printf("==========Model RunStep==========\n");
531     status = OH_AI_RunStep(model, nullptr, nullptr);
532     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
533     printf("==========OH_AI_ExportModel==========\n");
534     status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_infer.ms", OH_AI_NO_QUANT,
535         true, nullptr, 0);
536     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
537     OH_AI_TensorHandleArray export_update_weights = OH_AI_ModelGetWeights(model);
538     for (size_t i = 0; i < export_update_weights.handle_num; ++i) {
539         OH_AI_TensorHandle weights_tensor = export_update_weights.handle_list[i];
540         if (strcmp(OH_AI_TensorGetName(weights_tensor), "fc3.bias") == 0){
541             float *input_data = reinterpret_cast<float *>(OH_AI_TensorGetMutableData(weights_tensor));
542             std::cout << "fc3.bias:" << input_data[0] << std::endl;
543         }
544     }
545     OH_AI_ContextDestroy(&context);
546     OH_AI_ModelDestroy(&model);
547 }
548 // 异常场景:更新权重tensor name错误
549 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_UpdateWeights_0003, Function | MediumTest | Level0) {
550     printf("==========OH_AI_ContextCreate==========\n");
551     OH_AI_ContextHandle context = OH_AI_ContextCreate();
552     ASSERT_NE(context, nullptr);
553     AddContextDeviceCPU(context);
554     printf("==========OH_AI_ModelCreate==========\n");
555     OH_AI_ModelHandle model = OH_AI_ModelCreate();
556     ASSERT_NE(model, nullptr);
557     printf("==========OH_AI_TrainCfgCreate==========\n");
558     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
559     ASSERT_NE(train_cfg, nullptr);
560     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
561     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
562     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
563     std::vector<OH_AI_TensorHandle> vec_inputs;
564     constexpr size_t create_shape_num = 1;
565     int64_t create_shape[create_shape_num] = {10};
566     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("aaaaa", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape, create_shape_num, nullptr, 0);
567     ASSERT_NE(tensor, nullptr);
568     vec_inputs.push_back(tensor);
569     OH_AI_TensorHandleArray update_weights = {1, vec_inputs.data()};
570     status = OH_AI_ModelUpdateWeights(model, update_weights);
571     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
572     OH_AI_ContextDestroy(&context);
573     OH_AI_ModelDestroy(&model);
574 }
575 // 异常场景:更新权重tensor type错误
576 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_UpdateWeights_0004, Function | MediumTest | Level0) {
577     printf("==========OH_AI_ContextCreate==========\n");
578     OH_AI_ContextHandle context = OH_AI_ContextCreate();
579     ASSERT_NE(context, nullptr);
580     AddContextDeviceCPU(context);
581     printf("==========OH_AI_ModelCreate==========\n");
582     OH_AI_ModelHandle model = OH_AI_ModelCreate();
583     ASSERT_NE(model, nullptr);
584     printf("==========OH_AI_TrainCfgCreate==========\n");
585     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
586     ASSERT_NE(train_cfg, nullptr);
587     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
588     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
589     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
590     std::vector<OH_AI_TensorHandle> vec_inputs;
591     constexpr size_t create_shape_num = 1;
592     int64_t create_shape[create_shape_num] = {10};
593     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("fc3.bias", OH_AI_DATATYPE_NUMBERTYPE_FLOAT16, create_shape, create_shape_num, nullptr, 0);
594     ASSERT_NE(tensor, nullptr);
595     vec_inputs.push_back(tensor);
596     OH_AI_TensorHandleArray update_weights = {1, vec_inputs.data()};
597     status = OH_AI_ModelUpdateWeights(model, update_weights);
598     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
599     OH_AI_ContextDestroy(&context);
600     OH_AI_ModelDestroy(&model);
601 }
602 // 异常场景:更新权重tensor name全部错误
603 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_UpdateWeights_0005, Function | MediumTest | Level0) {
604     printf("==========OH_AI_ContextCreate==========\n");
605     OH_AI_ContextHandle context = OH_AI_ContextCreate();
606     ASSERT_NE(context, nullptr);
607     AddContextDeviceCPU(context);
608     printf("==========OH_AI_ModelCreate==========\n");
609     OH_AI_ModelHandle model = OH_AI_ModelCreate();
610     ASSERT_NE(model, nullptr);
611     printf("==========OH_AI_TrainCfgCreate==========\n");
612     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
613     ASSERT_NE(train_cfg, nullptr);
614     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
615     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
616     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
617     OH_AI_TensorHandleArray get_weights = OH_AI_ModelGetWeights(model);
618     std::vector<OH_AI_TensorHandle>  feature_maps;
619     for (size_t i = 0; i < get_weights.handle_num; i++) {
620         auto name = OH_AI_TensorGetName(get_weights.handle_list[i]);
621         size_t shape_num;
622         auto shape = OH_AI_TensorGetShape(get_weights.handle_list[i], &shape_num);
623         OH_AI_DataType data_type = OH_AI_TensorGetDataType(get_weights.handle_list[i]);
624         auto data = OH_AI_TensorGetData(get_weights.handle_list[i]);
625         auto data_size = OH_AI_TensorGetDataSize(get_weights.handle_list[i]);
626         OH_AI_TensorHandle tensor = OH_AI_TensorCreate(name, data_type, shape, shape_num, data, data_size);
627         OH_AI_TensorSetName(tensor, "AAAA");
628         feature_maps.push_back(tensor);
629     }
630     printf("==========OH_AI_TrainModel updateWeights==========\n");
631     status = OH_AI_ModelUpdateWeights(model, {feature_maps.size(), feature_maps.data()});
632     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
633     OH_AI_ContextDestroy(&context);
634     OH_AI_ModelDestroy(&model);
635 }
636 // 异常场景:更新权重tensor name错误一个
637 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_UpdateWeights_0006, Function | MediumTest | Level0) {
638     printf("==========OH_AI_ContextCreate==========\n");
639     OH_AI_ContextHandle context = OH_AI_ContextCreate();
640     ASSERT_NE(context, nullptr);
641     AddContextDeviceCPU(context);
642     printf("==========OH_AI_ModelCreate==========\n");
643     OH_AI_ModelHandle model = OH_AI_ModelCreate();
644     ASSERT_NE(model, nullptr);
645     printf("==========OH_AI_TrainCfgCreate==========\n");
646     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
647     ASSERT_NE(train_cfg, nullptr);
648     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
649     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
650     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
651     OH_AI_TensorHandleArray get_weights = OH_AI_ModelGetWeights(model);
652     std::vector<OH_AI_TensorHandle>  feature_maps;
653     for (size_t i = 0; i < get_weights.handle_num; i++) {
654         auto name = OH_AI_TensorGetName(get_weights.handle_list[i]);
655         size_t shape_num;
656         auto shape = OH_AI_TensorGetShape(get_weights.handle_list[i], &shape_num);
657         OH_AI_DataType data_type = OH_AI_TensorGetDataType(get_weights.handle_list[i]);
658         auto data = OH_AI_TensorGetData(get_weights.handle_list[i]);
659         auto data_size = OH_AI_TensorGetDataSize(get_weights.handle_list[i]);
660         OH_AI_TensorHandle tensor = OH_AI_TensorCreate(name, data_type, shape, shape_num, data, data_size);
661         feature_maps.push_back(tensor);
662     }
663     printf("==========OH_AI_TrainModel updateWeights==========\n");
664     OH_AI_TensorSetName(feature_maps.at(1), "AAAA");
665     status = OH_AI_ModelUpdateWeights(model, {feature_maps.size(), feature_maps.data()});
666     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
667     OH_AI_ContextDestroy(&context);
668     OH_AI_ModelDestroy(&model);
669 }
670 // 正常场景:设置学习率为0.01
671 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_SetLearningRate_0001, Function | MediumTest | Level0) {
672     printf("==========OH_AI_ContextCreate==========\n");
673     OH_AI_ContextHandle context = OH_AI_ContextCreate();
674     ASSERT_NE(context, nullptr);
675     AddContextDeviceCPU(context);
676     printf("==========OH_AI_ModelCreate==========\n");
677     OH_AI_ModelHandle model = OH_AI_ModelCreate();
678     ASSERT_NE(model, nullptr);
679     printf("==========OH_AI_TrainCfgCreate==========\n");
680     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
681     ASSERT_NE(train_cfg, nullptr);
682     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
683     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
684     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
685     auto learing_rate = OH_AI_ModelGetLearningRate(model);
686     std::cout << "learing_rate:" << learing_rate << std::endl;
687     status = OH_AI_ModelSetLearningRate(model, 0.01f);
688     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
689     learing_rate = OH_AI_ModelGetLearningRate(model);
690     std::cout << "get_learing_rate:" << learing_rate << std::endl;
691     ASSERT_EQ(learing_rate, 0.01f);
692     printf("==========GetInputs==========\n");
693     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
694     ASSERT_NE(inputs.handle_list, nullptr);
695     FillInputsData(inputs, "lenet_train", false);
696     status = OH_AI_ModelSetTrainMode(model, true);
697     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
698     printf("==========Model RunStep==========\n");
699     status = OH_AI_RunStep(model, nullptr, nullptr);
700     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
701     OH_AI_ContextDestroy(&context);
702     OH_AI_ModelDestroy(&model);
703 }
704 // 正常场景:设置学习率值为1000.0
705 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_SetLearningRate_0002, Function | MediumTest | Level0) {
706     printf("==========OH_AI_ContextCreate==========\n");
707     OH_AI_ContextHandle context = OH_AI_ContextCreate();
708     ASSERT_NE(context, nullptr);
709     AddContextDeviceCPU(context);
710     printf("==========OH_AI_ModelCreate==========\n");
711     OH_AI_ModelHandle model = OH_AI_ModelCreate();
712     ASSERT_NE(model, nullptr);
713     printf("==========OH_AI_TrainCfgCreate==========\n");
714     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
715     ASSERT_NE(train_cfg, nullptr);
716     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
717     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
718     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
719     auto learing_rate = OH_AI_ModelGetLearningRate(model);
720     std::cout << "learing_rate:" << learing_rate << std::endl;
721     status = OH_AI_ModelSetLearningRate(model, 1000.0f);
722     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
723     learing_rate = OH_AI_ModelGetLearningRate(model);
724     std::cout << "get_learing_rate:" << learing_rate << std::endl;
725     ASSERT_EQ(learing_rate, 1000.0f);
726     printf("==========GetInputs==========\n");
727     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
728     ASSERT_NE(inputs.handle_list, nullptr);
729     FillInputsData(inputs, "lenet_train", false);
730     status = OH_AI_ModelSetTrainMode(model, true);
731     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
732     printf("==========Model RunStep==========\n");
733     status = OH_AI_RunStep(model, nullptr, nullptr);
734     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
735     OH_AI_ContextDestroy(&context);
736     OH_AI_ModelDestroy(&model);
737 }
738 // 正常场景:设置虚拟batch_size为2
739 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_SetupVirtualBatch_0001, Function | MediumTest | Level0) {
740     printf("==========OH_AI_ContextCreate==========\n");
741     OH_AI_ContextHandle context = OH_AI_ContextCreate();
742     ASSERT_NE(context, nullptr);
743     AddContextDeviceCPU(context);
744     printf("==========OH_AI_ModelCreate==========\n");
745     OH_AI_ModelHandle model = OH_AI_ModelCreate();
746     ASSERT_NE(model, nullptr);
747     printf("==========OH_AI_TrainCfgCreate==========\n");
748     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
749     ASSERT_NE(train_cfg, nullptr);
750     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
751     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
752     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
753     status = OH_AI_ModelSetupVirtualBatch(model, 2, -1.0f, -1.0f);
754     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
755     printf("==========GetInputs==========\n");
756     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
757     ASSERT_NE(inputs.handle_list, nullptr);
758     FillInputsData(inputs, "lenet_train", false);
759     status = OH_AI_ModelSetTrainMode(model, true);
760     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
761     printf("==========Model RunStep==========\n");
762     status = OH_AI_RunStep(model, nullptr, nullptr);
763     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
764     printf("==========OH_AI_ExportModel==========\n");
765     status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_infer.ms", OH_AI_NO_QUANT, true, nullptr, 0);
766     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
767     printf("==========OH_AI_ModelSetTrainMode==========\n");
768     status = OH_AI_ModelSetTrainMode(model, false);
769     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
770     printf("==========OH_AI_ContextCreate2==========\n");
771     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
772     ASSERT_NE(context2, nullptr);
773     AddContextDeviceCPU(context2);
774     printf("==========OH_AI_ModelCreate2==========\n");
775     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
776     ASSERT_NE(model2, nullptr);
777     printf("==========ModelPredict==========\n");
778     ModelPredict(model2, context2, "lenet_train_infer", {}, false, false, false);
779     OH_AI_ContextDestroy(&context);
780     OH_AI_ModelDestroy(&model);
781 }
782 // 正常场景:设置优化等级
783 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_SetOptimizationLevel_0001, Function | MediumTest | Level0) {
784     printf("==========OH_AI_ContextCreate==========\n");
785     OH_AI_ContextHandle context = OH_AI_ContextCreate();
786     ASSERT_NE(context, nullptr);
787     AddContextDeviceCPU(context);
788     printf("==========OH_AI_ModelCreate==========\n");
789     OH_AI_ModelHandle model = OH_AI_ModelCreate();
790     ASSERT_NE(model, nullptr);
791     printf("==========OH_AI_TrainCfgCreate==========\n");
792     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
793     ASSERT_NE(train_cfg, nullptr);
794     auto opt_level = OH_AI_TrainCfgGetOptimizationLevel(train_cfg);
795     std::cout << "opt_level:" << opt_level << std::endl;
796     OH_AI_TrainCfgSetOptimizationLevel(train_cfg, OH_AI_KO2);
797     auto set_opt_level = OH_AI_TrainCfgGetOptimizationLevel(train_cfg);
798     std::cout << "set_opt_level:" << set_opt_level << std::endl;
799     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
800     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
801     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
802     printf("==========GetInputs==========\n");
803     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
804     ASSERT_NE(inputs.handle_list, nullptr);
805     FillInputsData(inputs, "lenet_train", false);
806     status = OH_AI_ModelSetTrainMode(model, true);
807     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
808     printf("==========Model RunStep==========\n");
809     status = OH_AI_RunStep(model, nullptr, nullptr);
810     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
811     printf("==========OH_AI_ExportModel==========\n");
812     status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_infer.ms", OH_AI_NO_QUANT, true, nullptr, 0);
813     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
814     printf("==========OH_AI_ModelSetTrainMode==========\n");
815     status = OH_AI_ModelSetTrainMode(model, false);
816     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
817     printf("==========OH_AI_ContextCreate2==========\n");
818     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
819     ASSERT_NE(context2, nullptr);
820     AddContextDeviceCPU(context2);
821     printf("==========OH_AI_ModelCreate2==========\n");
822     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
823     ASSERT_NE(model2, nullptr);
824     printf("==========ModelPredict==========\n");
825     ModelPredict(model2, context2, "lenet_train_infer", {}, false, false, false);
826     OH_AI_ContextDestroy(&context);
827     OH_AI_ModelDestroy(&model);
828 }
829 // 正常场景:创建TrainCfg对象并销毁
830 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_TrainCfg_0001, Function | MediumTest | Level0) {
831     printf("==========OH_AI_ContextCreate==========\n");
832     OH_AI_ContextHandle context = OH_AI_ContextCreate();
833     ASSERT_NE(context, nullptr);
834     AddContextDeviceCPU(context);
835     printf("==========OH_AI_ModelCreate==========\n");
836     OH_AI_ModelHandle model = OH_AI_ModelCreate();
837     ASSERT_NE(model, nullptr);
838     printf("==========OH_AI_TrainCfgCreate==========\n");
839     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
840     ASSERT_NE(train_cfg, nullptr);
841     OH_AI_TrainCfgDestroy(&train_cfg);
842     ASSERT_EQ(train_cfg, nullptr);
843     OH_AI_ContextDestroy(&context);
844     OH_AI_ModelDestroy(&model);
845 }
846 // 正常场景:设置存在的损失函数名
847 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_TrainCfg_0002, Function | MediumTest | Level0) {
848     printf("==========OH_AI_ContextCreate==========\n");
849     OH_AI_ContextHandle context = OH_AI_ContextCreate();
850     ASSERT_NE(context, nullptr);
851     AddContextDeviceCPU(context);
852     printf("==========OH_AI_ModelCreate==========\n");
853     OH_AI_ModelHandle model = OH_AI_ModelCreate();
854     ASSERT_NE(model, nullptr);
855     printf("==========OH_AI_TrainCfgCreate==========\n");
856     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
857     ASSERT_NE(train_cfg, nullptr);
858     PrintTrainLossName(train_cfg);
859 
860     std::vector<std::string> set_train_cfg_loss_name = {"loss_fct", "_loss_fn"};
861     char **set_loss_name = TransStrVectorToCharArrays(set_train_cfg_loss_name);
862     OH_AI_TrainCfgSetLossName(train_cfg, const_cast<const char **>(set_loss_name), set_train_cfg_loss_name.size());
863     PrintTrainLossName(train_cfg);
864 
865     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
866     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
867     printf("==========GetInputs==========\n");
868     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
869     ASSERT_NE(inputs.handle_list, nullptr);
870     FillInputsData(inputs, "lenet_train", false);
871     status = OH_AI_ModelSetTrainMode(model, true);
872     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
873     printf("==========Model RunStep==========\n");
874     status = OH_AI_RunStep(model, nullptr, nullptr);
875     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
876     printf("==========OH_AI_ExportModel==========\n");
877     status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_infer.ms", OH_AI_NO_QUANT, true, nullptr, 0);
878     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
879     printf("==========OH_AI_ModelSetTrainMode==========\n");
880     status = OH_AI_ModelSetTrainMode(model, false);
881     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
882     printf("==========OH_AI_ContextCreate2==========\n");
883     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
884     ASSERT_NE(context2, nullptr);
885     AddContextDeviceCPU(context2);
886     printf("==========OH_AI_ModelCreate2==========\n");
887     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
888     ASSERT_NE(model2, nullptr);
889     printf("==========ModelPredict==========\n");
890     ModelPredict(model2, context2, "lenet_train_infer", {}, false, false, false);
891     OH_AI_ContextDestroy(&context);
892     OH_AI_ModelDestroy(&model);
893 }
894 // 正常场景:设置不存在的损失函数名
895 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_TrainCfg_0003, Function | MediumTest | Level0) {
896     printf("==========OH_AI_ContextCreate==========\n");
897     OH_AI_ContextHandle context = OH_AI_ContextCreate();
898     ASSERT_NE(context, nullptr);
899     AddContextDeviceCPU(context);
900     printf("==========OH_AI_ModelCreate==========\n");
901     OH_AI_ModelHandle model = OH_AI_ModelCreate();
902     ASSERT_NE(model, nullptr);
903     printf("==========OH_AI_TrainCfgCreate==========\n");
904     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
905     ASSERT_NE(train_cfg, nullptr);
906     PrintTrainLossName(train_cfg);
907 
908     std::vector<std::string> set_train_cfg_loss_name = {"aaa", "bbb"};
909     char **set_loss_name = TransStrVectorToCharArrays(set_train_cfg_loss_name);
910     OH_AI_TrainCfgSetLossName(train_cfg, const_cast<const char **>(set_loss_name), set_train_cfg_loss_name.size());
911     PrintTrainLossName(train_cfg);
912 
913     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
914     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
915     OH_AI_ContextDestroy(&context);
916     OH_AI_ModelDestroy(&model);
917 }
918 // 正常场景:设置损失函数名个数大于num
919 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_TrainCfg_0004, Function | MediumTest | Level0) {
920     printf("==========OH_AI_ContextCreate==========\n");
921     OH_AI_ContextHandle context = OH_AI_ContextCreate();
922     ASSERT_NE(context, nullptr);
923     AddContextDeviceCPU(context);
924     printf("==========OH_AI_ModelCreate==========\n");
925     OH_AI_ModelHandle model = OH_AI_ModelCreate();
926     ASSERT_NE(model, nullptr);
927     printf("==========OH_AI_TrainCfgCreate==========\n");
928     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
929     ASSERT_NE(train_cfg, nullptr);
930     PrintTrainLossName(train_cfg);
931 
932     std::vector<std::string> set_train_cfg_loss_name = {"loss_fct", "_loss_fn"};
933     char **set_loss_name = TransStrVectorToCharArrays(set_train_cfg_loss_name);
934     OH_AI_TrainCfgSetLossName(train_cfg, const_cast<const char **>(set_loss_name), 1);
935     PrintTrainLossName(train_cfg);
936 
937     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
938     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
939     OH_AI_ContextDestroy(&context);
940     OH_AI_ModelDestroy(&model);
941 }
942 // 正常场景:通过buffer加载模型,执行1轮训练并对比精度
943 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ModelBuild_0001, Function | MediumTest | Level0) {
944     printf("==========OH_AI_ContextCreate==========\n");
945     OH_AI_ContextHandle context = OH_AI_ContextCreate();
946     ASSERT_NE(context, nullptr);
947     AddContextDeviceCPU(context);
948     printf("==========OH_AI_ModelCreate==========\n");
949     OH_AI_ModelHandle model = OH_AI_ModelCreate();
950     ASSERT_NE(model, nullptr);
951     printf("==========OH_AI_RunStep==========\n");
952     ModelTrain(model, context, "lenet_train", {}, true, false, false);
953     printf("==========OH_AI_ExportModel==========\n");
954     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_infer.ms", OH_AI_NO_QUANT, true, nullptr, 0);
955     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
956     printf("==========OH_AI_ModelSetTrainMode==========\n");
957     status = OH_AI_ModelSetTrainMode(model, false);
958     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
959     printf("==========OH_AI_ContextCreate2==========\n");
960     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
961     ASSERT_NE(context2, nullptr);
962     AddContextDeviceCPU(context2);
963     printf("==========OH_AI_ModelCreate2==========\n");
964     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
965     ASSERT_NE(model2, nullptr);
966     printf("==========ModelPredict==========\n");
967     ModelPredict(model2, context2, "lenet_train_infer", {}, true, false, false);
968     OH_AI_ContextDestroy(&context);
969     OH_AI_ModelDestroy(&model);
970 }
971 // 异常场景:加载模型buffer为空
972 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ModelBuild_0002, Function | MediumTest | Level0) {
973     printf("==========OH_AI_ContextCreate==========\n");
974     OH_AI_ContextHandle context = OH_AI_ContextCreate();
975     ASSERT_NE(context, nullptr);
976     AddContextDeviceCPU(context);
977     printf("==========OH_AI_ModelCreate==========\n");
978     OH_AI_ModelHandle model = OH_AI_ModelCreate();
979     ASSERT_NE(model, nullptr);
980     printf("==========OH_AI_TrainCfgCreate==========\n");
981     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
982     ASSERT_NE(train_cfg, nullptr);
983     printf("==========Build model by graphBuf==========\n");
984     auto status = OH_AI_TrainModelBuild(model, nullptr, 0, OH_AI_MODELTYPE_MINDIR, context, train_cfg);
985     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
986     OH_AI_ContextDestroy(&context);
987     OH_AI_ModelDestroy(&model);
988 }
989 // 异常场景:加载模型文件路径不存在
990 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ModelBuild_0003, Function | MediumTest | Level0) {
991     printf("==========OH_AI_ContextCreate==========\n");
992     OH_AI_ContextHandle context = OH_AI_ContextCreate();
993     ASSERT_NE(context, nullptr);
994     AddContextDeviceCPU(context);
995     printf("==========OH_AI_ModelCreate==========\n");
996     OH_AI_ModelHandle model = OH_AI_ModelCreate();
997     ASSERT_NE(model, nullptr);
998     printf("==========OH_AI_TrainCfgCreate==========\n");
999     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
1000     ASSERT_NE(train_cfg, nullptr);
1001     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/not_exist/lenet_train.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
1002     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
1003     OH_AI_ContextDestroy(&context);
1004     OH_AI_ModelDestroy(&model);
1005 }
1006 // 异常场景:加载模型文件路径为空
1007 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ModelBuild_0004, Function | MediumTest | Level0) {
1008     printf("==========OH_AI_ContextCreate==========\n");
1009     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1010     ASSERT_NE(context, nullptr);
1011     AddContextDeviceCPU(context);
1012     printf("==========OH_AI_ModelCreate==========\n");
1013     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1014     ASSERT_NE(model, nullptr);
1015     printf("==========OH_AI_TrainCfgCreate==========\n");
1016     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
1017     ASSERT_NE(train_cfg, nullptr);
1018     auto status = OH_AI_TrainModelBuildFromFile(model, "", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
1019     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
1020     OH_AI_ContextDestroy(&context);
1021     OH_AI_ModelDestroy(&model);
1022 }
1023 // 异常场景:加载模型文件路径为错误文件
1024 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ModelBuild_0005, Function | MediumTest | Level0) {
1025     printf("==========OH_AI_ContextCreate==========\n");
1026     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1027     ASSERT_NE(context, nullptr);
1028     AddContextDeviceCPU(context);
1029     printf("==========OH_AI_ModelCreate==========\n");
1030     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1031     ASSERT_NE(model, nullptr);
1032     printf("==========OH_AI_TrainCfgCreate==========\n");
1033     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
1034     ASSERT_NE(train_cfg, nullptr);
1035     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/lenet_train_0.input", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
1036     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
1037     OH_AI_ContextDestroy(&context);
1038     OH_AI_ModelDestroy(&model);
1039 }
1040 // 正常场景:训练model导出推理图后对比精度
1041 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0001, Function | MediumTest | Level0) {
1042     printf("==========OH_AI_ContextCreate==========\n");
1043     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1044     ASSERT_NE(context, nullptr);
1045     AddContextDeviceCPU(context);
1046     printf("==========OH_AI_ModelCreate==========\n");
1047     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1048     ASSERT_NE(model, nullptr);
1049     printf("==========OH_AI_RunStep==========\n");
1050     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1051     printf("==========OH_AI_ExportModel==========\n");
1052     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_infer.ms", OH_AI_NO_QUANT, true, nullptr, 0);
1053     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1054     printf("==========OH_AI_ModelSetTrainMode==========\n");
1055     status = OH_AI_ModelSetTrainMode(model, false);
1056     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1057     printf("==========OH_AI_ContextCreate2==========\n");
1058     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
1059     ASSERT_NE(context2, nullptr);
1060     AddContextDeviceCPU(context2);
1061     printf("==========OH_AI_ModelCreate2==========\n");
1062     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
1063     ASSERT_NE(model2, nullptr);
1064     printf("==========ModelPredict==========\n");
1065     ModelPredict(model2, context2, "lenet_train_infer", {}, false, false, false);
1066     OH_AI_ContextDestroy(&context);
1067     OH_AI_ModelDestroy(&model);
1068 }
1069 // 正常场景:quantization_type为OH_AI_WEIGHT_QUANT
1070 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0002, Function | MediumTest | Level0) {
1071     printf("==========OH_AI_ContextCreate==========\n");
1072     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1073     ASSERT_NE(context, nullptr);
1074     AddContextDeviceCPU(context);
1075     printf("==========OH_AI_ModelCreate==========\n");
1076     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1077     ASSERT_NE(model, nullptr);
1078     printf("==========OH_AI_RunStep==========\n");
1079     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1080     printf("==========OH_AI_ExportModel==========\n");
1081     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_infer.ms", OH_AI_WEIGHT_QUANT, true, nullptr, 0);
1082     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1083     printf("==========OH_AI_ModelSetTrainMode==========\n");
1084     status = OH_AI_ModelSetTrainMode(model, false);
1085     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1086     printf("==========OH_AI_ContextCreate2==========\n");
1087     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
1088     ASSERT_NE(context2, nullptr);
1089     AddContextDeviceCPU(context2);
1090     printf("==========OH_AI_ModelCreate2==========\n");
1091     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
1092     ASSERT_NE(model2, nullptr);
1093     printf("==========ModelPredict==========\n");
1094     ModelPredict(model2, context2, "lenet_train_infer", {}, false, false, false);
1095     OH_AI_ContextDestroy(&context);
1096     OH_AI_ModelDestroy(&model);
1097 }
1098 // 正常场景:quantization_type为OH_AI_FULL_QUANT
1099 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0003, Function | MediumTest | Level0) {
1100     printf("==========OH_AI_ContextCreate==========\n");
1101     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1102     ASSERT_NE(context, nullptr);
1103     AddContextDeviceCPU(context);
1104     printf("==========OH_AI_ModelCreate==========\n");
1105     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1106     ASSERT_NE(model, nullptr);
1107     printf("==========OH_AI_RunStep==========\n");
1108     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1109     printf("==========OH_AI_ExportModel==========\n");
1110     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_infer.ms", OH_AI_FULL_QUANT, true, nullptr, 0);
1111     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1112     printf("==========OH_AI_ModelSetTrainMode==========\n");
1113     status = OH_AI_ModelSetTrainMode(model, false);
1114     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1115     printf("==========OH_AI_ContextCreate2==========\n");
1116     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
1117     ASSERT_NE(context2, nullptr);
1118     AddContextDeviceCPU(context2);
1119     printf("==========OH_AI_ModelCreate2==========\n");
1120     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
1121     ASSERT_NE(model2, nullptr);
1122     printf("==========ModelPredict==========\n");
1123     ModelPredict(model2, context2, "lenet_train_infer", {}, false, false, false);
1124     OH_AI_ContextDestroy(&context);
1125     OH_AI_ModelDestroy(&model);
1126 }
1127 // 正常场景:quantization_type为OH_AI_UNKNOWN_QUANT_TYPE
1128 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0004, Function | MediumTest | Level0) {
1129     printf("==========OH_AI_ContextCreate==========\n");
1130     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1131     ASSERT_NE(context, nullptr);
1132     AddContextDeviceCPU(context);
1133     printf("==========OH_AI_ModelCreate==========\n");
1134     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1135     ASSERT_NE(model, nullptr);
1136     printf("==========OH_AI_RunStep==========\n");
1137     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1138     printf("==========OH_AI_ExportModel==========\n");
1139     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_infer.ms", OH_AI_UNKNOWN_QUANT_TYPE, true, nullptr, 0);
1140     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1141     printf("==========OH_AI_ModelSetTrainMode==========\n");
1142     status = OH_AI_ModelSetTrainMode(model, false);
1143     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1144     printf("==========OH_AI_ContextCreate2==========\n");
1145     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
1146     ASSERT_NE(context2, nullptr);
1147     AddContextDeviceCPU(context2);
1148     printf("==========OH_AI_ModelCreate2==========\n");
1149     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
1150     ASSERT_NE(model2, nullptr);
1151     printf("==========ModelPredict==========\n");
1152     ModelPredict(model2, context2, "lenet_train_infer", {}, false, false, false);
1153     OH_AI_ContextDestroy(&context);
1154     OH_AI_ModelDestroy(&model);
1155 }
1156 // 正常场景:export_inference_only为false
1157 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0005, Function | MediumTest | Level0) {
1158     printf("==========OH_AI_ContextCreate==========\n");
1159     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1160     ASSERT_NE(context, nullptr);
1161     AddContextDeviceCPU(context);
1162     printf("==========OH_AI_ModelCreate==========\n");
1163     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1164     ASSERT_NE(model, nullptr);
1165     printf("==========OH_AI_RunStep==========\n");
1166     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1167     printf("==========OH_AI_ExportModel==========\n");
1168     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_train.ms", OH_AI_NO_QUANT, false, nullptr, 0);
1169     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1170     printf("==========OH_AI_ContextCreate2==========\n");
1171     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
1172     ASSERT_NE(context2, nullptr);
1173     AddContextDeviceCPU(context2);
1174     printf("==========OH_AI_ModelCreate2==========\n");
1175     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
1176     ASSERT_NE(model2, nullptr);
1177     printf("==========ModelTrain==========\n");
1178     ModelTrain(model2, context2, "lenet_train_train", {}, false, false, false);
1179     OH_AI_ContextDestroy(&context);
1180     OH_AI_ModelDestroy(&model);
1181 }
1182 // 正常场景:export_inference_only为false,再指定output_tensor_name
1183 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0006, Function | MediumTest | Level0) {
1184     printf("==========OH_AI_ContextCreate==========\n");
1185     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1186     ASSERT_NE(context, nullptr);
1187     AddContextDeviceCPU(context);
1188     printf("==========OH_AI_ModelCreate==========\n");
1189     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1190     ASSERT_NE(model, nullptr);
1191     printf("==========OH_AI_RunStep==========\n");
1192     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1193     printf("==========OH_AI_ExportModel==========\n");
1194     const std::vector<std::string> output_name = {"Default/network-WithLossCell/_loss_fn-L1Loss/ReduceMean-op127"};
1195     auto output_tensor_name = TransStrVectorToCharArrays(output_name);
1196     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_train.ms", OH_AI_NO_QUANT, false, output_tensor_name, 1);
1197     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1198     printf("==========OH_AI_ContextCreate2==========\n");
1199     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
1200     ASSERT_NE(context2, nullptr);
1201     AddContextDeviceCPU(context2);
1202     printf("==========OH_AI_ModelCreate2==========\n");
1203     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
1204     ASSERT_NE(model2, nullptr);
1205     printf("==========ModelTrain==========\n");
1206     ModelTrain(model2, context2, "lenet_train_train", {}, false, false, false);
1207     OH_AI_ContextDestroy(&context);
1208     OH_AI_ModelDestroy(&model);
1209 }
1210 // 异常场景:OH_AI_MODELTYPE_INVALID
1211 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0007, Function | MediumTest | Level0) {
1212     printf("==========OH_AI_ContextCreate==========\n");
1213     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1214     ASSERT_NE(context, nullptr);
1215     AddContextDeviceCPU(context);
1216     printf("==========OH_AI_ModelCreate==========\n");
1217     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1218     ASSERT_NE(model, nullptr);
1219     printf("==========OH_AI_RunStep==========\n");
1220     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1221     printf("==========OH_AI_ExportModel==========\n");
1222     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_INVALID, "/data/test/lenet_train_infer.ms", OH_AI_NO_QUANT, true, nullptr, 0);
1223     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
1224     OH_AI_ContextDestroy(&context);
1225     OH_AI_ModelDestroy(&model);
1226 }
1227 // 异常场景:指定导出不存在的output_tensor_name
1228 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0008, Function | MediumTest | Level0) {
1229     printf("==========OH_AI_ContextCreate==========\n");
1230     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1231     ASSERT_NE(context, nullptr);
1232     AddContextDeviceCPU(context);
1233     printf("==========OH_AI_ModelCreate==========\n");
1234     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1235     ASSERT_NE(model, nullptr);
1236     printf("==========OH_AI_RunStep==========\n");
1237     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1238     printf("==========OH_AI_ExportModel==========\n");
1239     const std::vector<std::string> output_name = {"aaa"};
1240     auto output_tensor_name = TransStrVectorToCharArrays(output_name);
1241     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_infer.ms", OH_AI_NO_QUANT, true, output_tensor_name, 1);
1242     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
1243     OH_AI_ContextDestroy(&context);
1244     OH_AI_ModelDestroy(&model);
1245 }
1246 // 正常场景:output_tensor_name的个数与num不一致
1247 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0009, Function | MediumTest | Level0) {
1248     printf("==========OH_AI_ContextCreate==========\n");
1249     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1250     ASSERT_NE(context, nullptr);
1251     AddContextDeviceCPU(context);
1252     printf("==========OH_AI_ModelCreate==========\n");
1253     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1254     ASSERT_NE(model, nullptr);
1255     printf("==========OH_AI_RunStep==========\n");
1256     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1257     printf("==========OH_AI_ExportModel==========\n");
1258     const std::vector<std::string> output_name = {"Default/network-WithLossCell/_loss_fn-L1Loss/ReduceMean-op127"};
1259     auto output_tensor_name = TransStrVectorToCharArrays(output_name);
1260     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_infer.ms", OH_AI_NO_QUANT, true, output_tensor_name, 0);
1261     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1262     OH_AI_ContextDestroy(&context);
1263     OH_AI_ModelDestroy(&model);
1264 }
1265 // 异常场景:model_file文件路径不存在
1266 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0010, Function | MediumTest | Level0) {
1267     printf("==========OH_AI_ContextCreate==========\n");
1268     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1269     ASSERT_NE(context, nullptr);
1270     AddContextDeviceCPU(context);
1271     printf("==========OH_AI_ModelCreate==========\n");
1272     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1273     ASSERT_NE(model, nullptr);
1274     printf("==========OH_AI_RunStep==========\n");
1275     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1276     printf("==========OH_AI_ExportModel==========\n");
1277     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/not_exsit/lenet_train_infer.ms", OH_AI_NO_QUANT, true, nullptr, 0);
1278     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
1279     OH_AI_ContextDestroy(&context);
1280     OH_AI_ModelDestroy(&model);
1281 }
1282 // 异常场景:model_file路径为空
1283 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0011, Function | MediumTest | Level0) {
1284     printf("==========OH_AI_ContextCreate==========\n");
1285     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1286     ASSERT_NE(context, nullptr);
1287     AddContextDeviceCPU(context);
1288     printf("==========OH_AI_ModelCreate==========\n");
1289     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1290     ASSERT_NE(model, nullptr);
1291     printf("==========OH_AI_RunStep==========\n");
1292     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1293     printf("==========OH_AI_ExportModel==========\n");
1294     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "", OH_AI_NO_QUANT, true, nullptr, 0);
1295     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
1296     OH_AI_ContextDestroy(&context);
1297     OH_AI_ModelDestroy(&model);
1298 }
1299 // 异常场景:model_file路径为文件夹
1300 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0012, Function | MediumTest | Level0) {
1301     printf("==========OH_AI_ContextCreate==========\n");
1302     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1303     ASSERT_NE(context, nullptr);
1304     AddContextDeviceCPU(context);
1305     printf("==========OH_AI_ModelCreate==========\n");
1306     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1307     ASSERT_NE(model, nullptr);
1308     printf("==========OH_AI_RunStep==========\n");
1309     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1310     printf("==========OH_AI_ExportModel==========\n");
1311     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/", OH_AI_NO_QUANT, true, nullptr, 0);
1312     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
1313     OH_AI_ContextDestroy(&context);
1314     OH_AI_ModelDestroy(&model);
1315 }
1316 // 正常场景:OH_AI_ModelGetTrainMode
1317 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0013, Function | MediumTest | Level0) {
1318     printf("==========OH_AI_ContextCreate==========\n");
1319     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1320     ASSERT_NE(context, nullptr);
1321     AddContextDeviceCPU(context);
1322     printf("==========OH_AI_ModelCreate==========\n");
1323     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1324     ASSERT_NE(model, nullptr);
1325     printf("==========OH_AI_RunStep==========\n");
1326     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1327     printf("==========OH_AI_ExportModel==========\n");
1328     auto status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/lenet_train_infer.ms", OH_AI_NO_QUANT, true, nullptr, 0);
1329     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1330     printf("==========OH_AI_ModelSetTrainMode==========\n");
1331     status = OH_AI_ModelSetTrainMode(model, false);
1332     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1333     auto train_mode = OH_AI_ModelGetTrainMode(model);
1334     ASSERT_EQ(train_mode, false);
1335     printf("=========OH_AI_ModelDestroy===========\n");
1336     OH_AI_ContextDestroy(&context);
1337     OH_AI_ModelDestroy(&model);
1338     printf("=========OH_AI_ModelDestroy End===========\n");
1339 }
1340 // 正常场景:OH_AI_ExportModelBuffer
1341 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportModel_0014, Function | MediumTest | Level0) {
1342     printf("==========OH_AI_ContextCreate==========\n");
1343     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1344     ASSERT_NE(context, nullptr);
1345     AddContextDeviceCPU(context);
1346     printf("==========OH_AI_ModelCreate==========\n");
1347     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1348     ASSERT_NE(model, nullptr);
1349     printf("==========OH_AI_RunStep==========\n");
1350     ModelTrain(model, context, "lenet_train", {}, false, false, false);
1351     printf("==========OH_AI_ExportModel==========\n");
1352     char *modelData;
1353     size_t data_size;
1354     auto status = OH_AI_ExportModelBuffer(model, OH_AI_MODELTYPE_MINDIR, &modelData,
1355 	 &data_size, OH_AI_NO_QUANT, true, nullptr, 0);
1356     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1357     ASSERT_NE(modelData, nullptr);
1358     ASSERT_NE(data_size, 0);
1359     printf("==========OH_AI_ModelCreate2==========\n");
1360     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
1361     ASSERT_NE(model2, nullptr);
1362 
1363     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
1364     ASSERT_NE(context2, nullptr);
1365     AddContextDeviceCPU(context2);
1366     printf("==========ModelPredict==========\n");
1367     auto ret = OH_AI_ModelBuild(model2, modelData, data_size, OH_AI_MODELTYPE_MINDIR, context2);
1368     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
1369     printf("==========GetInputs==========\n");
1370     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model2);
1371     ASSERT_NE(inputs.handle_list, nullptr);
1372     FillInputsData(inputs, "lenet_train_infer", false);
1373     printf("==========Model Predict==========\n");
1374     OH_AI_TensorHandleArray outputs;
1375     OH_AI_Status predict_ret = OH_AI_ModelPredict(model2, inputs, &outputs, nullptr, nullptr);
1376     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
1377     printf("=========CompareResult===========\n");
1378     CompareResult(outputs, "lenet_train_infer");
1379     printf("=========model01 context and Model destroy===========\n");
1380     OH_AI_ContextDestroy(&context);
1381     OH_AI_ModelDestroy(&model);
1382     printf("=========model01 context and Model destroy End===========\n");
1383     printf("=========model02 context and Model destroy===========\n");
1384     OH_AI_ContextDestroy(&context2);
1385     OH_AI_ModelDestroy(&model2);
1386     printf("=========model02 context and Model destroy End===========\n");
1387 }
1388 // 正常场景:训练model导出micro权重
1389 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportWeights_0001, Function | MediumTest | Level0) {
1390     printf("==========OH_AI_ContextCreate==========\n");
1391     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1392     ASSERT_NE(context, nullptr);
1393     AddContextDeviceCPU(context);
1394     printf("==========OH_AI_ModelCreate==========\n");
1395     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1396     ASSERT_NE(model, nullptr);
1397     printf("==========OH_AI_TrainCfgCreate==========\n");
1398     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
1399     ASSERT_NE(train_cfg, nullptr);
1400     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
1401     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/xiaoyi_train_codegen.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
1402     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1403     printf("==========OH_AI_ExportModel==========\n");
1404     status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/xiaoyi_train_codegen_gru_model1.ms", OH_AI_NO_QUANT, true, nullptr, 0);
1405     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1406     status = OH_AI_ExportWeightsCollaborateWithMicro(model, OH_AI_MODELTYPE_MINDIR, "/data/test/xiaoyi_train_codegen_net1.bin", true, true, nullptr, 0);
1407     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1408     status = OH_AI_ExportWeightsCollaborateWithMicro(model, OH_AI_MODELTYPE_MINDIR, "/data/test/xiaoyi_train_codegen_net1_fp32.bin", true, false, nullptr, 0);
1409     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1410     OH_AI_ContextDestroy(&context);
1411     OH_AI_ModelDestroy(&model);
1412 }
1413 
ModelExportWeightsTest(OH_AI_ModelHandle model)1414 void ModelExportWeightsTest(OH_AI_ModelHandle model)
1415 {
1416     const std::vector<std::string> changebleWeightsName = {"app_usage_statistic_30_cell.embedding.embedding_table",
1417         "moment1.app_usage_statistic_30_cell.embedding.embedding_table",
1418         "moment2.app_usage_statistic_30_cell.embedding.embedding_table",
1419         "data-57"};
1420     char **setChangebleWeightsName = TransStrVectorToCharArrays(changebleWeightsName);
1421     OH_AI_ExportWeightsCollaborateWithMicro(model, OH_AI_MODELTYPE_MINDIR,
1422         "/data/test/xiaoyi_train_codegen_net1.bin", true, true,
1423         setChangebleWeightsName, changebleWeightsName.size());
1424     OH_AI_ExportWeightsCollaborateWithMicro(model, OH_AI_MODELTYPE_MINDIR,
1425         "/data/test/xiaoyi_train_codegen_net1_fp32.bin", true, false,
1426         setChangebleWeightsName, changebleWeightsName.size());
1427     printf("==================== update weight ==================\n");
1428     auto genRandomData = [](size_t size, void *data) {
1429         auto generator = std::uniform_real_distribution<float>(0.0f, 1.0f);
1430         std::mt19937 randomEngine;
1431         size_t elementsNum = size / sizeof(float);
1432         (void)std::generate_n(static_cast<float *>(data), elementsNum,
1433             [&]() { return static_cast<float>(generator(randomEngine)); });
1434     };
1435     std::vector<OH_AI_TensorHandle> vec_inputs;
1436     constexpr size_t createShapeNum = 2;
1437     int64_t createShape[createShapeNum] = {76, 8};
1438     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("app_usage_statistic_30_cell.embedding.embedding_table",
1439         OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape, createShapeNum, nullptr, 0);
1440     genRandomData(OH_AI_TensorGetDataSize(tensor), OH_AI_TensorGetMutableData(tensor));
1441     vec_inputs.push_back(tensor);
1442     constexpr size_t createShapeNum2 = 2;
1443     int64_t createShape2[createShapeNum2] = {76, 8};
1444     OH_AI_TensorHandle tensor2 = OH_AI_TensorCreate("moment1.app_usage_statistic_30_cell.embedding.embedding_table",
1445         OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape2, createShapeNum2, nullptr, 0);
1446     genRandomData(OH_AI_TensorGetDataSize(tensor2), OH_AI_TensorGetMutableData(tensor2));
1447     vec_inputs.push_back(tensor2);
1448     constexpr size_t createShapeNum3 = 2;
1449     int64_t createShape3[createShapeNum3] = {76, 8};
1450     OH_AI_TensorHandle tensor3 = OH_AI_TensorCreate("moment2.app_usage_statistic_30_cell.embedding.embedding_table",
1451         OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape3, createShapeNum3, nullptr, 0);
1452     genRandomData(OH_AI_TensorGetDataSize(tensor3), OH_AI_TensorGetMutableData(tensor3));
1453     vec_inputs.push_back(tensor3);
1454     OH_AI_TensorHandleArray update_weights = {3, vec_inputs.data()};
1455     OH_AI_ModelUpdateWeights(model, update_weights);
1456 }
1457 
1458 // 正常场景:训练model更新并导出micro权重
1459 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportWeights_0002, Function | MediumTest | Level0) {
1460     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1461     ASSERT_NE(context, nullptr);
1462     AddContextDeviceCPU(context);
1463     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1464     ASSERT_NE(model, nullptr);
1465     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
1466     ASSERT_NE(train_cfg, nullptr);
1467     std::vector<std::string> set_train_cfg_loss_name = {"loss_fct", "_loss_fn", "SigmoidCrossEntropy",
1468         "BinaryCrossEntropy"};
1469     char **set_loss_name = TransStrVectorToCharArrays(set_train_cfg_loss_name);
1470     OH_AI_TrainCfgSetLossName(train_cfg, const_cast<const char **>(set_loss_name), set_train_cfg_loss_name.size());
1471     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/xiaoyi_train_codegen.ms", OH_AI_MODELTYPE_MINDIR,
1472         context, train_cfg);
1473     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1474     printf("==========OH_AI_ExportModel==========\n");
1475     status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/xiaoyi_train_codegen_gru_model1.ms",
1476         OH_AI_NO_QUANT, true, nullptr, 0);
1477     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1478     const std::vector<std::string> changebleWeightsName2 = {"app_usage_statistic_30_cell.embedding.embedding_table",
1479                                                  "moment1.app_usage_statistic_30_cell.embedding.embedding_table",
1480                                                  "moment2.app_usage_statistic_30_cell.embedding.embedding_table",
1481                                                  "data-57"};
1482     char **setChangebleWeightsName2 = TransStrVectorToCharArrays(changebleWeightsName2);
1483     ModelExportWeightsTest(model);
1484     status = OH_AI_ModelSetTrainMode(model, true);
1485     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1486     status = OH_AI_RunStep(model, nullptr, nullptr);
1487     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1488     status = OH_AI_ModelSetTrainMode(model, false);
1489     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1490     status = OH_AI_RunStep(model, nullptr, nullptr);
1491     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1492     printf("==========OH_AI_ExportModel2==========\n");
1493     status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/xiaoyi_train_codegen_gru_model2.ms",
1494         OH_AI_NO_QUANT, true, nullptr, 0);
1495     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1496     status = OH_AI_ExportWeightsCollaborateWithMicro(model, OH_AI_MODELTYPE_MINDIR,
1497         "/data/test/xiaoyi_train_codegen_net2.bin", true, true,
1498         setChangebleWeightsName2, changebleWeightsName2.size());
1499     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1500     status = OH_AI_ExportWeightsCollaborateWithMicro(model, OH_AI_MODELTYPE_MINDIR,
1501         "/data/test/xiaoyi_train_codegen_net2_fp32.bin", true, false,
1502         setChangebleWeightsName2, changebleWeightsName2.size());
1503     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1504     OH_AI_ContextDestroy(&context);
1505     OH_AI_ModelDestroy(&model);
1506 }
1507 // 异常场景:weight_file文件路径不存在
1508 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportWeights_0003, Function | MediumTest | Level0) {
1509     printf("==========OH_AI_ContextCreate==========\n");
1510     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1511     ASSERT_NE(context, nullptr);
1512     AddContextDeviceCPU(context);
1513     printf("==========OH_AI_ModelCreate==========\n");
1514     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1515     ASSERT_NE(model, nullptr);
1516     printf("==========OH_AI_TrainCfgCreate==========\n");
1517     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
1518     ASSERT_NE(train_cfg, nullptr);
1519     std::vector<std::string> set_train_cfg_loss_name = {"loss_fct", "_loss_fn", "SigmoidCrossEntropy", "BinaryCrossEntropy"};
1520     char **set_loss_name = TransStrVectorToCharArrays(set_train_cfg_loss_name);
1521     OH_AI_TrainCfgSetLossName(train_cfg, const_cast<const char **>(set_loss_name), set_train_cfg_loss_name.size());
1522     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
1523     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/xiaoyi_train_codegen.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
1524     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1525     printf("==========OH_AI_ExportModel==========\n");
1526     status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/xiaoyi_train_codegen_gru_model1.ms", OH_AI_NO_QUANT, true, nullptr, 0);
1527     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1528     const std::vector<std::string> changeble_weights_name = {"app_usage_statistic_30_cell.embedding.embedding_table",
1529                                                  "moment1.app_usage_statistic_30_cell.embedding.embedding_table",
1530                                                  "moment2.app_usage_statistic_30_cell.embedding.embedding_table",
1531                                                  "data-57"};
1532     char **set_changeble_weights_name = TransStrVectorToCharArrays(changeble_weights_name);
1533     status = OH_AI_ExportWeightsCollaborateWithMicro(model, OH_AI_MODELTYPE_MINDIR, "/data/not_exist/xiaoyi_train_codegen_net1.bin", true, true, set_changeble_weights_name, changeble_weights_name.size());
1534     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
1535     OH_AI_ContextDestroy(&context);
1536     OH_AI_ModelDestroy(&model);
1537 }
1538 // 异常场景:weight_file路径为空
1539 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportWeights_0004, Function | MediumTest | Level0) {
1540     printf("==========OH_AI_ContextCreate==========\n");
1541     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1542     ASSERT_NE(context, nullptr);
1543     AddContextDeviceCPU(context);
1544     printf("==========OH_AI_ModelCreate==========\n");
1545     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1546     ASSERT_NE(model, nullptr);
1547     printf("==========OH_AI_TrainCfgCreate==========\n");
1548     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
1549     ASSERT_NE(train_cfg, nullptr);
1550     std::vector<std::string> set_train_cfg_loss_name = {"loss_fct", "_loss_fn", "SigmoidCrossEntropy", "BinaryCrossEntropy"};
1551     char **set_loss_name = TransStrVectorToCharArrays(set_train_cfg_loss_name);
1552     OH_AI_TrainCfgSetLossName(train_cfg, const_cast<const char **>(set_loss_name), set_train_cfg_loss_name.size());
1553     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
1554     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/xiaoyi_train_codegen.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
1555     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1556     printf("==========OH_AI_ExportModel==========\n");
1557     status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/xiaoyi_train_codegen_gru_model1.ms", OH_AI_NO_QUANT, true, nullptr, 0);
1558     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1559     const std::vector<std::string> changeble_weights_name = {"app_usage_statistic_30_cell.embedding.embedding_table",
1560                                                  "moment1.app_usage_statistic_30_cell.embedding.embedding_table",
1561                                                  "moment2.app_usage_statistic_30_cell.embedding.embedding_table",
1562                                                  "data-57"};
1563     char **set_changeble_weights_name = TransStrVectorToCharArrays(changeble_weights_name);
1564     status = OH_AI_ExportWeightsCollaborateWithMicro(model, OH_AI_MODELTYPE_MINDIR, "", true, true, set_changeble_weights_name, changeble_weights_name.size());
1565     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
1566     OH_AI_ContextDestroy(&context);
1567     OH_AI_ModelDestroy(&model);
1568 }
1569 // 异常场景:weight_file路径为文件夹
1570 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportWeights_0005, Function | MediumTest | Level0) {
1571     printf("==========OH_AI_ContextCreate==========\n");
1572     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1573     ASSERT_NE(context, nullptr);
1574     AddContextDeviceCPU(context);
1575     printf("==========OH_AI_ModelCreate==========\n");
1576     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1577     ASSERT_NE(model, nullptr);
1578     printf("==========OH_AI_TrainCfgCreate==========\n");
1579     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
1580     ASSERT_NE(train_cfg, nullptr);
1581     std::vector<std::string> set_train_cfg_loss_name = {"loss_fct", "_loss_fn", "SigmoidCrossEntropy", "BinaryCrossEntropy"};
1582     char **set_loss_name = TransStrVectorToCharArrays(set_train_cfg_loss_name);
1583     OH_AI_TrainCfgSetLossName(train_cfg, const_cast<const char **>(set_loss_name), set_train_cfg_loss_name.size());
1584     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
1585     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/xiaoyi_train_codegen.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
1586     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1587     printf("==========OH_AI_ExportModel==========\n");
1588     status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/xiaoyi_train_codegen_gru_model1.ms", OH_AI_NO_QUANT, true, nullptr, 0);
1589     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1590     const std::vector<std::string> changeble_weights_name = {"app_usage_statistic_30_cell.embedding.embedding_table",
1591                                                  "moment1.app_usage_statistic_30_cell.embedding.embedding_table",
1592                                                  "moment2.app_usage_statistic_30_cell.embedding.embedding_table",
1593                                                  "data-57"};
1594     char **set_changeble_weights_name = TransStrVectorToCharArrays(changeble_weights_name);
1595     status = OH_AI_ExportWeightsCollaborateWithMicro(model, OH_AI_MODELTYPE_MINDIR, "/data/test/", true, true, set_changeble_weights_name, changeble_weights_name.size());
1596     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
1597     OH_AI_ContextDestroy(&context);
1598     OH_AI_ModelDestroy(&model);
1599 }
1600 // 异常场景:is_inference为false
1601 HWTEST(MSLiteTest, SUB_AI_MindSpore_Train_ExportWeights_0006, Function | MediumTest | Level0) {
1602     printf("==========OH_AI_ContextCreate==========\n");
1603     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1604     ASSERT_NE(context, nullptr);
1605     AddContextDeviceCPU(context);
1606     printf("==========OH_AI_ModelCreate==========\n");
1607     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1608     ASSERT_NE(model, nullptr);
1609     printf("==========OH_AI_TrainCfgCreate==========\n");
1610     OH_AI_TrainCfgHandle train_cfg = OH_AI_TrainCfgCreate();
1611     ASSERT_NE(train_cfg, nullptr);
1612     std::vector<std::string> set_train_cfg_loss_name = {"loss_fct", "_loss_fn", "SigmoidCrossEntropy", "BinaryCrossEntropy"};
1613     char **set_loss_name = TransStrVectorToCharArrays(set_train_cfg_loss_name);
1614     OH_AI_TrainCfgSetLossName(train_cfg, const_cast<const char **>(set_loss_name), set_train_cfg_loss_name.size());
1615     printf("==========OH_AI_TrainModelBuildFromFile==========\n");
1616     auto status = OH_AI_TrainModelBuildFromFile(model, "/data/test/xiaoyi_train_codegen.ms", OH_AI_MODELTYPE_MINDIR, context, train_cfg);
1617     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1618     printf("==========OH_AI_ExportModel==========\n");
1619     status = OH_AI_ExportModel(model, OH_AI_MODELTYPE_MINDIR, "/data/test/xiaoyi_train_codegen_gru_model1.ms", OH_AI_NO_QUANT, true, nullptr, 0);
1620     ASSERT_EQ(status, OH_AI_STATUS_SUCCESS);
1621     const std::vector<std::string> changeble_weights_name = {"app_usage_statistic_30_cell.embedding.embedding_table",
1622                                                  "moment1.app_usage_statistic_30_cell.embedding.embedding_table",
1623                                                  "moment2.app_usage_statistic_30_cell.embedding.embedding_table",
1624                                                  "data-57"};
1625     char **set_changeble_weights_name = TransStrVectorToCharArrays(changeble_weights_name);
1626     status = OH_AI_ExportWeightsCollaborateWithMicro(model, OH_AI_MODELTYPE_MINDIR, "/data/test/xiaoyi_train_codegen_net1.bin", false, true, set_changeble_weights_name, changeble_weights_name.size());
1627     ASSERT_NE(status, OH_AI_STATUS_SUCCESS);
1628     OH_AI_ContextDestroy(&context);
1629     OH_AI_ModelDestroy(&model);
1630 }
1631 
1632 
1633 // predict on cpu
Predict_CPU()1634 void Predict_CPU() {
1635     printf("==========Init Context==========\n");
1636     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1637     ASSERT_NE(context, nullptr);
1638     AddContextDeviceCPU(context);
1639     printf("==========Create model==========\n");
1640     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1641     ASSERT_NE(model, nullptr);
1642     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1643 }
1644 
1645 // predict on cpu
Predict_NPU()1646 void Predict_NPU() {
1647     if (!IsNPU()) {
1648         printf("NNRt is not NPU, skip this test");
1649         return;
1650     }
1651     printf("==========Init Context==========\n");
1652     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1653     ASSERT_NE(context, nullptr);
1654     AddContextDeviceNNRT(context);
1655     printf("==========Create model==========\n");
1656     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1657     ASSERT_NE(model, nullptr);
1658     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1659 }
1660 
1661 // 正常场景:Context设置CPU,默认场景,不设置线程绑核
1662 HWTEST(MSLiteTest, OHOS_Context_CPU_0001, Function | MediumTest | Level0) {
1663     printf("==========Init Context==========\n");
1664     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1665     ASSERT_NE(context, nullptr);
1666     AddContextDeviceCPU(context);
1667     printf("==========Create model==========\n");
1668     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1669     ASSERT_NE(model, nullptr);
1670     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1671 }
1672 
1673 // 正常场景:Context设置CPU,4线程
1674 HWTEST(MSLiteTest, OHOS_Context_CPU_0002, Function | MediumTest | Level0) {
1675     printf("==========Init Context==========\n");
1676     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1677     ASSERT_NE(context, nullptr);
1678     OH_AI_ContextSetThreadNum(context, 4);
1679     int32_t thread_num = OH_AI_ContextGetThreadNum(context);
1680     printf("==========thread_num:%d\n", thread_num);
1681     ASSERT_EQ(thread_num, 4);
1682     AddContextDeviceCPU(context);
1683     printf("==========Create model==========\n");
1684     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1685     ASSERT_NE(model, nullptr);
1686     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1687 }
1688 
1689 // 正常场景:Context设置CPU,2线程
1690 HWTEST(MSLiteTest, OHOS_Context_CPU_0003, Function | MediumTest | Level0) {
1691     printf("==========Init Context==========\n");
1692     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1693     ASSERT_NE(context, nullptr);
1694     OH_AI_ContextSetThreadNum(context, 2);
1695     int32_t thread_num = OH_AI_ContextGetThreadNum(context);
1696     printf("==========thread_num:%d\n", thread_num);
1697     ASSERT_EQ(thread_num, 2);
1698     AddContextDeviceCPU(context);
1699     printf("==========Create model==========\n");
1700     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1701     ASSERT_NE(model, nullptr);
1702     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1703 }
1704 
1705 // 正常场景:Context设置CPU,1线程
1706 HWTEST(MSLiteTest, OHOS_Context_CPU_0004, Function | MediumTest | Level0) {
1707     printf("==========Init Context==========\n");
1708     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1709     ASSERT_NE(context, nullptr);
1710     OH_AI_ContextSetThreadNum(context, 1);
1711     int32_t thread_num = OH_AI_ContextGetThreadNum(context);
1712     printf("==========thread_num:%d\n", thread_num);
1713     ASSERT_EQ(thread_num, 1);
1714     AddContextDeviceCPU(context);
1715     printf("==========Create model==========\n");
1716     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1717     ASSERT_NE(model, nullptr);
1718     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1719 }
1720 
1721 // 异常场景:Context设置CPU,0线程
1722 HWTEST(MSLiteTest, OHOS_Context_CPU_0005, Function | MediumTest | Level0) {
1723     printf("==========Init Context==========\n");
1724     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1725     ASSERT_NE(context, nullptr);
1726     OH_AI_ContextSetThreadNum(context, 0);
1727     int32_t thread_num = OH_AI_ContextGetThreadNum(context);
1728     printf("==========thread_num:%d\n", thread_num);
1729     ASSERT_EQ(thread_num, 0);
1730     AddContextDeviceCPU(context);
1731     printf("==========Create model==========\n");
1732     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1733     ASSERT_NE(model, nullptr);
1734     printf("==========Build model==========\n");
1735     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
1736     printf("==========build model return code:%d\n", ret);
1737     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
1738     OH_AI_ContextDestroy(&context);
1739     OH_AI_ModelDestroy(&model);
1740 }
1741 
1742 // 正常场景:Context设置CPU,不绑核
1743 HWTEST(MSLiteTest, OHOS_Context_CPU_0006, Function | MediumTest | Level0) {
1744     printf("==========Init Context==========\n");
1745     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1746     ASSERT_NE(context, nullptr);
1747     OH_AI_ContextSetThreadNum(context, 4);
1748     int32_t thread_num = OH_AI_ContextGetThreadNum(context);
1749     printf("==========thread_num:%d\n", thread_num);
1750     ASSERT_EQ(thread_num, 4);
1751     OH_AI_ContextSetThreadAffinityMode(context, 0);
1752     int thread_affinity_mode = OH_AI_ContextGetThreadAffinityMode(context);
1753     printf("==========thread_affinity_mode:%d\n", thread_affinity_mode);
1754     ASSERT_EQ(thread_affinity_mode, 0);
1755     AddContextDeviceCPU(context);
1756     printf("==========Create model==========\n");
1757     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1758     ASSERT_NE(model, nullptr);
1759     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1760 }
1761 
1762 // 正常场景:Context设置CPU,绑大核
1763 HWTEST(MSLiteTest, OHOS_Context_CPU_0007, Function | MediumTest | Level0) {
1764     printf("==========Init Context==========\n");
1765     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1766     ASSERT_NE(context, nullptr);
1767     OH_AI_ContextSetThreadNum(context, 4);
1768     int32_t thread_num = OH_AI_ContextGetThreadNum(context);
1769     printf("==========thread_num:%d\n", thread_num);
1770     ASSERT_EQ(thread_num, 4);
1771     OH_AI_ContextSetThreadAffinityMode(context, 1);
1772     int thread_affinity_mode = OH_AI_ContextGetThreadAffinityMode(context);
1773     printf("==========thread_affinity_mode:%d\n", thread_affinity_mode);
1774     ASSERT_EQ(thread_affinity_mode, 1);
1775     AddContextDeviceCPU(context);
1776     printf("==========Create model==========\n");
1777     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1778     ASSERT_NE(model, nullptr);
1779     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1780 }
1781 
1782 // 正常场景:Context设置CPU,绑中核
1783 HWTEST(MSLiteTest, OHOS_Context_CPU_0008, Function | MediumTest | Level0) {
1784     printf("==========Init Context==========\n");
1785     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1786     ASSERT_NE(context, nullptr);
1787     OH_AI_ContextSetThreadNum(context, 4);
1788     int32_t thread_num = OH_AI_ContextGetThreadNum(context);
1789     printf("==========thread_num:%d\n", thread_num);
1790     ASSERT_EQ(thread_num, 4);
1791     OH_AI_ContextSetThreadAffinityMode(context, 2);
1792     int thread_affinity_mode = OH_AI_ContextGetThreadAffinityMode(context);
1793     printf("==========thread_affinity_mode:%d\n", thread_affinity_mode);
1794     ASSERT_EQ(thread_affinity_mode, 2);
1795     AddContextDeviceCPU(context);
1796     printf("==========Create model==========\n");
1797     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1798     ASSERT_NE(model, nullptr);
1799     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1800 }
1801 
1802 // 异常场景:Context设置CPU,绑核失败
1803 HWTEST(MSLiteTest, OHOS_Context_CPU_0009, Function | MediumTest | Level0) {
1804     printf("==========Init Context==========\n");
1805     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1806     ASSERT_NE(context, nullptr);
1807     OH_AI_ContextSetThreadNum(context, 4);
1808     int32_t thread_num = OH_AI_ContextGetThreadNum(context);
1809     printf("==========thread_num:%d\n", thread_num);
1810     ASSERT_EQ(thread_num, 4);
1811     OH_AI_ContextSetThreadAffinityMode(context, 3);
1812     int thread_affinity_mode = OH_AI_ContextGetThreadAffinityMode(context);
1813     printf("==========thread_affinity_mode:%d\n", thread_affinity_mode);
1814     ASSERT_EQ(thread_affinity_mode, 0);
1815     AddContextDeviceCPU(context);
1816     printf("==========Create model==========\n");
1817     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1818     ASSERT_NE(model, nullptr);
1819     printf("==========Build model==========\n");
1820     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
1821     printf("==========build model return code:%d\n", ret);
1822     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
1823     OH_AI_ContextDestroy(&context);
1824     OH_AI_ModelDestroy(&model);
1825 }
1826 
1827 // 正常场景:Context设置CPU,绑核列表{0,1,2,3}
1828 HWTEST(MSLiteTest, OHOS_Context_CPU_0010, Function | MediumTest | Level0) {
1829     printf("==========Init Context==========\n");
1830     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1831     ASSERT_NE(context, nullptr);
1832     OH_AI_ContextSetThreadNum(context, 4);
1833     int32_t thread_num = OH_AI_ContextGetThreadNum(context);
1834     printf("==========thread_num:%d\n", thread_num);
1835     ASSERT_EQ(thread_num, 4);
1836     constexpr size_t core_num = 4;
1837     int32_t core_list[core_num] = {0, 1, 2, 3};
1838     OH_AI_ContextSetThreadAffinityCoreList(context, core_list, core_num);
1839     size_t ret_core_num;
1840     int32_t *ret_core_list = nullptr;
1841     ret_core_list = const_cast<int32_t *>(OH_AI_ContextGetThreadAffinityCoreList(context, &ret_core_num));
1842     ASSERT_EQ(ret_core_num, core_num);
1843     for (size_t i = 0; i < ret_core_num; i++) {
1844         printf("==========ret_core_list:%d\n", ret_core_list[i]);
1845         ASSERT_EQ(ret_core_list[i], core_list[i]);
1846     }
1847     free(ret_core_list);
1848     AddContextDeviceCPU(context);
1849     printf("==========Create model==========\n");
1850     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1851     ASSERT_NE(model, nullptr);
1852     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1853 }
1854 
1855 // 正常场景:Context设置CPU,绑核列表和模式同时开启
1856 HWTEST(MSLiteTest, OHOS_Context_CPU_0011, Function | MediumTest | Level0) {
1857     printf("==========Init Context==========\n");
1858     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1859     ASSERT_NE(context, nullptr);
1860     OH_AI_ContextSetThreadNum(context, 4);
1861     int32_t thread_num = OH_AI_ContextGetThreadNum(context);
1862     printf("==========thread_num:%d\n", thread_num);
1863     ASSERT_EQ(thread_num, 4);
1864     OH_AI_ContextSetThreadAffinityMode(context, 1);
1865     int thread_affinity_mode = OH_AI_ContextGetThreadAffinityMode(context);
1866     printf("==========thread_affinity_mode:%d\n", thread_affinity_mode);
1867     constexpr size_t core_num = 4;
1868     int32_t core_list[core_num] = {0, 1, 3, 4};
1869     OH_AI_ContextSetThreadAffinityCoreList(context, core_list, core_num);
1870     size_t ret_core_num;
1871     int32_t *ret_core_list = nullptr;
1872     ret_core_list = const_cast<int32_t *>(OH_AI_ContextGetThreadAffinityCoreList(context, &ret_core_num));
1873     ASSERT_EQ(ret_core_num, core_num);
1874     for (size_t i = 0; i < ret_core_num; i++) {
1875         printf("==========ret_core_list:%d\n", ret_core_list[i]);
1876         ASSERT_EQ(ret_core_list[i], core_list[i]);
1877     }
1878     free(ret_core_list);
1879     AddContextDeviceCPU(context);
1880     printf("==========Create model==========\n");
1881     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1882     ASSERT_NE(model, nullptr);
1883     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1884 }
1885 
1886 // 正常场景:Context设置CPU,开启并行
1887 HWTEST(MSLiteTest, OHOS_Context_CPU_0012, Function | MediumTest | Level0) {
1888     printf("==========Init Context==========\n");
1889     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1890     ASSERT_NE(context, nullptr);
1891     OH_AI_ContextSetThreadNum(context, 4);
1892     int32_t thread_num = OH_AI_ContextGetThreadNum(context);
1893     printf("==========thread_num:%d\n", thread_num);
1894     ASSERT_EQ(thread_num, 4);
1895     OH_AI_ContextSetEnableParallel(context, true);
1896     bool is_parallel = OH_AI_ContextGetEnableParallel(context);
1897     printf("==========is_parallel:%d\n", is_parallel);
1898     ASSERT_EQ(is_parallel, true);
1899     AddContextDeviceCPU(context);
1900     printf("==========Create model==========\n");
1901     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1902     ASSERT_NE(model, nullptr);
1903     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1904 }
1905 
1906 // 正常场景:Context设置CPU,关闭并行
1907 HWTEST(MSLiteTest, OHOS_Context_CPU_0013, Function | MediumTest | Level0) {
1908     printf("==========Init Context==========\n");
1909     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1910     ASSERT_NE(context, nullptr);
1911     OH_AI_ContextSetThreadNum(context, 4);
1912     int32_t thread_num = OH_AI_ContextGetThreadNum(context);
1913     printf("==========thread_num:%d\n", thread_num);
1914     ASSERT_EQ(thread_num, 4);
1915     OH_AI_ContextSetEnableParallel(context, false);
1916     bool is_parallel = OH_AI_ContextGetEnableParallel(context);
1917     printf("==========is_parallel:%d\n", is_parallel);
1918     ASSERT_EQ(is_parallel, false);
1919     AddContextDeviceCPU(context);
1920     printf("==========Create model==========\n");
1921     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1922     ASSERT_NE(model, nullptr);
1923     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1924 }
1925 
1926 // 正常场景:Context设置CPU,开启fp16
1927 HWTEST(MSLiteTest, OHOS_Context_CPU_0014, Function | MediumTest | Level0) {
1928     printf("==========Init Context==========\n");
1929     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1930     ASSERT_NE(context, nullptr);
1931     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
1932     ASSERT_NE(cpu_device_info, nullptr);
1933     OH_AI_DeviceInfoSetEnableFP16(cpu_device_info, true);
1934     bool is_fp16 = OH_AI_DeviceInfoGetEnableFP16(cpu_device_info);
1935     printf("==========is_fp16:%d\n", is_fp16);
1936     ASSERT_EQ(is_fp16, true);
1937     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
1938     printf("==========Create model==========\n");
1939     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1940     ASSERT_NE(model, nullptr);
1941     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1942 }
1943 
1944 // 正常场景:Context设置CPU,关闭fp16
1945 HWTEST(MSLiteTest, OHOS_Context_CPU_0015, Function | MediumTest | Level0) {
1946     printf("==========Init Context==========\n");
1947     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1948     ASSERT_NE(context, nullptr);
1949     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
1950     ASSERT_NE(cpu_device_info, nullptr);
1951     OH_AI_DeviceInfoSetEnableFP16(cpu_device_info, false);
1952     bool is_fp16 = OH_AI_DeviceInfoGetEnableFP16(cpu_device_info);
1953     printf("==========is_fp16:%d\n", is_fp16);
1954     ASSERT_EQ(is_fp16, false);
1955     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
1956     printf("==========Create model==========\n");
1957     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1958     ASSERT_NE(model, nullptr);
1959     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1960 }
1961 
1962 // 正常场景:Context设置CPU,设置厂商名称
1963 HWTEST(MSLiteTest, OHOS_Context_CPU_0016, Function | MediumTest | Level0) {
1964     printf("==========Init Context==========\n");
1965     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1966     ASSERT_NE(context, nullptr);
1967     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
1968     ASSERT_NE(cpu_device_info, nullptr);
1969     OH_AI_DeviceInfoSetProvider(cpu_device_info, "vendor_new");
1970     char *proInfo = const_cast<char *>(OH_AI_DeviceInfoGetProvider(cpu_device_info));
1971     ASSERT_EQ(strcmp(proInfo, "vendor_new"), 0);
1972     free(proInfo);
1973     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
1974     printf("==========Create model==========\n");
1975     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1976     ASSERT_NE(model, nullptr);
1977     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1978 }
1979 
1980 // 正常场景:Context设置CPU,设置厂商设备类型
1981 HWTEST(MSLiteTest, OHOS_Context_CPU_0017, Function | MediumTest | Level0) {
1982     printf("==========Init Context==========\n");
1983     OH_AI_ContextHandle context = OH_AI_ContextCreate();
1984     ASSERT_NE(context, nullptr);
1985     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
1986     ASSERT_NE(cpu_device_info, nullptr);
1987     OH_AI_DeviceInfoSetProviderDevice(cpu_device_info, "cpu_new");
1988     char *proInfo = const_cast<char *>(OH_AI_DeviceInfoGetProviderDevice(cpu_device_info));
1989     ASSERT_EQ(strcmp(proInfo, "cpu_new"), 0);
1990     free(proInfo);
1991     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
1992     printf("==========Create model==========\n");
1993     OH_AI_ModelHandle model = OH_AI_ModelCreate();
1994     ASSERT_NE(model, nullptr);
1995     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
1996 }
1997 
1998 // 正常场景:Context设置CPU,销毁MSDeviceInfo
1999 HWTEST(MSLiteTest, OHOS_Context_CPU_0018, Function | MediumTest | Level0) {
2000     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
2001     ASSERT_NE(cpu_device_info, nullptr);
2002     OH_AI_DeviceType device_type = OH_AI_DeviceInfoGetDeviceType(cpu_device_info);
2003     printf("==========device_type:%d\n", device_type);
2004     ASSERT_EQ(device_type, OH_AI_DEVICETYPE_CPU);
2005     OH_AI_DeviceInfoDestroy(&cpu_device_info);
2006     ASSERT_EQ(cpu_device_info, nullptr);
2007 }
2008 
2009 // 正常场景:Context设置CPU,销毁OH_AI_Context
2010 HWTEST(MSLiteTest, OHOS_Context_CPU_0019, Function | MediumTest | Level0) {
2011     printf("==========Init Context==========\n");
2012     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2013     ASSERT_NE(context, nullptr);
2014     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
2015     ASSERT_NE(cpu_device_info, nullptr);
2016     OH_AI_DeviceType device_type = OH_AI_DeviceInfoGetDeviceType(cpu_device_info);
2017     printf("==========device_type:%d\n", device_type);
2018     ASSERT_EQ(device_type, OH_AI_DEVICETYPE_CPU);
2019     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
2020     OH_AI_ContextDestroy(&context);
2021     ASSERT_EQ(context, nullptr);
2022 }
2023 
2024 // 异常场景:Context不设置device info
2025 HWTEST(MSLiteTest, OHOS_Context_CPU_0020, Function | MediumTest | Level0) {
2026     printf("==========Init Context==========\n");
2027     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2028     ASSERT_NE(context, nullptr);
2029     printf("==========Create model==========\n");
2030     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2031     ASSERT_NE(model, nullptr);
2032     printf("==========Build model==========\n");
2033     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
2034     printf("==========build model return code:%d\n", ret);
2035     ASSERT_EQ(ret, OH_AI_STATUS_LITE_NULLPTR);
2036     OH_AI_ContextDestroy(&context);
2037     OH_AI_ModelDestroy(&model);
2038 }
2039 
2040 // 正常场景:Context设置NPU,频率为1
2041 HWTEST(MSLiteTest, OHOS_Context_NPU_0002, Function | MediumTest | Level0) {
2042     printf("==========Init Context==========\n");
2043     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2044     ASSERT_NE(context, nullptr);
2045     OH_AI_DeviceInfoHandle npu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_KIRIN_NPU);
2046     ASSERT_NE(npu_device_info, nullptr);
2047     OH_AI_DeviceInfoSetFrequency(npu_device_info, 1);
2048     int frequency = OH_AI_DeviceInfoGetFrequency(npu_device_info);
2049     ASSERT_EQ(frequency, 1);
2050     OH_AI_ContextAddDeviceInfo(context, npu_device_info);
2051     printf("==========Create model==========\n");
2052     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2053     ASSERT_NE(model, nullptr);
2054     OH_AI_ContextDestroy(&context);
2055     OH_AI_ModelDestroy(&model);
2056 }
2057 
2058 // 正常场景:ModelBuild,调用指针方法
2059 HWTEST(MSLiteTest, OHOS_Model_Build_0001, Function | MediumTest | Level0) {
2060     printf("==========Init Context==========\n");
2061     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2062     ASSERT_NE(context, nullptr);
2063     AddContextDeviceCPU(context);
2064     printf("==========Create model==========\n");
2065     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2066     ASSERT_NE(model, nullptr);
2067     ModelPredict(model, context, "ml_face_isface", {}, true, true, false);
2068 }
2069 
2070 // 异常场景:ModelBuild,model_data指向的不是模型数据
2071 HWTEST(MSLiteTest, OHOS_Model_Build_0002, Function | MediumTest | Level0) {
2072     printf("==========ReadFile==========\n");
2073     size_t size1;
2074     size_t *ptr_size1 = &size1;
2075     const char *imagePath = "/data/test/ml_face_isface.input";
2076     char *imageBuf = ReadFile(imagePath, ptr_size1);
2077     ASSERT_NE(imageBuf, nullptr);
2078     printf("==========Init Context==========\n");
2079     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2080     ASSERT_NE(context, nullptr);
2081     AddContextDeviceCPU(context);
2082     printf("==========Create model==========\n");
2083     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2084     ASSERT_NE(model, nullptr);
2085     printf("==========Build model==========\n");
2086     OH_AI_Status ret = OH_AI_ModelBuild(model, imageBuf, size1, OH_AI_MODELTYPE_MINDIR, context);
2087     printf("==========build model return code:%d\n", ret);
2088     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
2089     delete[] imageBuf;
2090     OH_AI_ContextDestroy(&context);
2091     OH_AI_ModelDestroy(&model);
2092 }
2093 
2094 // 异常场景:ModelBuild,model_data为空
2095 HWTEST(MSLiteTest, OHOS_Model_Build_0003, Function | MediumTest | Level0) {
2096     printf("==========Init Context==========\n");
2097     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2098     ASSERT_NE(context, nullptr);
2099     AddContextDeviceCPU(context);
2100     printf("==========Create model==========\n");
2101     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2102     ASSERT_NE(model, nullptr);
2103     printf("==========Build model==========\n");
2104     OH_AI_Status ret = OH_AI_ModelBuild(model, nullptr, 0, OH_AI_MODELTYPE_MINDIR, context);
2105     printf("==========build model return code:%d\n", ret);
2106     ASSERT_EQ(ret, OH_AI_STATUS_LITE_NULLPTR);
2107     OH_AI_ContextDestroy(&context);
2108     OH_AI_ModelDestroy(&model);
2109 }
2110 
2111 // 异常场景:ModelBuild,data_size为0
2112 HWTEST(MSLiteTest, OHOS_Model_Build_0004, Function | MediumTest | Level0) {
2113     printf("==========ReadFile==========\n");
2114     size_t size;
2115     size_t *ptr_size = &size;
2116     const char *graphPath = "/data/test/ml_face_isface.ms";
2117     char *graphBuf = ReadFile(graphPath, ptr_size);
2118     ASSERT_NE(graphBuf, nullptr);
2119     printf("==========Init Context==========\n");
2120     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2121     ASSERT_NE(context, nullptr);
2122     AddContextDeviceCPU(context);
2123     printf("==========Create model==========\n");
2124     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2125     ASSERT_NE(model, nullptr);
2126     printf("==========Build model==========\n");
2127     OH_AI_Status ret = OH_AI_ModelBuild(model, graphBuf, 0, OH_AI_MODELTYPE_MINDIR, context);
2128     printf("==========build model return code:%d\n", ret);
2129     ASSERT_EQ(ret, OH_AI_STATUS_LITE_INPUT_PARAM_INVALID);
2130     delete[] graphBuf;
2131     OH_AI_ContextDestroy(&context);
2132     OH_AI_ModelDestroy(&model);
2133 }
2134 
2135 // 异常场景:ModelBuild,读取路径方法,且路径不是模型数据路径
2136 HWTEST(MSLiteTest, OHOS_Model_Build_0005, Function | MediumTest | Level0) {
2137     printf("==========Init Context==========\n");
2138     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2139     ASSERT_NE(context, nullptr);
2140     AddContextDeviceCPU(context);
2141     printf("==========Create model==========\n");
2142     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2143     ASSERT_NE(model, nullptr);
2144     printf("==========Build model==========\n");
2145     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.input", OH_AI_MODELTYPE_MINDIR, context);
2146     printf("==========build model return code:%d\n", ret);
2147     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
2148     OH_AI_ContextDestroy(&context);
2149     OH_AI_ModelDestroy(&model);
2150 }
2151 
2152 // 异常场景:ModelBuild,读取路径方法,路径为空
2153 HWTEST(MSLiteTest, OHOS_Model_Build_0006, Function | MediumTest | Level0) {
2154     printf("==========Init Context==========\n");
2155     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2156     ASSERT_NE(context, nullptr);
2157     AddContextDeviceCPU(context);
2158     printf("==========Create model==========\n");
2159     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2160     ASSERT_NE(model, nullptr);
2161     printf("==========Build model==========\n");
2162     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "", OH_AI_MODELTYPE_MINDIR, context);
2163     printf("==========build model return code:%d\n", ret);
2164     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
2165     OH_AI_ContextDestroy(&context);
2166     OH_AI_ModelDestroy(&model);
2167 }
2168 
2169 // 异常场景:ModelBuild,model_type不支持
2170 HWTEST(MSLiteTest, OHOS_Model_Build_0007, Function | MediumTest | Level0) {
2171     printf("==========Init Context==========\n");
2172     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2173     ASSERT_NE(context, nullptr);
2174     AddContextDeviceCPU(context);
2175     printf("==========Create model==========\n");
2176     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2177     ASSERT_NE(model, nullptr);
2178     printf("==========Build model==========\n");
2179     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_INVALID, context);
2180     printf("==========build model return code:%d\n", ret);
2181     ASSERT_EQ(ret, OH_AI_STATUS_LITE_PARAM_INVALID);
2182     OH_AI_ContextDestroy(&context);
2183     OH_AI_ModelDestroy(&model);
2184 }
2185 
2186 // 异常场景:ModelBuild,model_context为空
2187 HWTEST(MSLiteTest, OHOS_Model_Build_0008, Function | MediumTest | Level0) {
2188     printf("==========Init Context==========\n");
2189     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2190     ASSERT_NE(context, nullptr);
2191     AddContextDeviceCPU(context);
2192     printf("==========Create model==========\n");
2193     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2194     ASSERT_NE(model, nullptr);
2195     printf("==========Build model==========\n");
2196     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, nullptr);
2197     printf("==========build model return code:%d\n", ret);
2198     ASSERT_EQ(ret, OH_AI_STATUS_LITE_NULLPTR);
2199     OH_AI_ContextDestroy(&context);
2200     OH_AI_ModelDestroy(&model);
2201 }
2202 
2203 // 正常场景:ModelBuild,调用GetOutputs获取输出
2204 HWTEST(MSLiteTest, OHOS_Model_Build_0009, Function | MediumTest | Level0) {
2205     printf("==========Init Context==========\n");
2206     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2207     ASSERT_NE(context, nullptr);
2208     AddContextDeviceCPU(context);
2209     printf("==========Create model==========\n");
2210     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2211     ASSERT_NE(model, nullptr);
2212     printf("==========Model build==========\n");
2213     OH_AI_ModelBuildFromFile(model,"/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
2214     printf("==========Model Predict==========\n");
2215     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
2216     OH_AI_TensorHandleArray output;
2217     FillInputsData(inputs,"ml_face_isface",false);
2218     OH_AI_Status ret = OH_AI_ModelPredict(model, inputs, &output, nullptr, nullptr);
2219     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2220     printf("==========GetOutput==========\n");
2221     OH_AI_TensorHandleArray outputs = OH_AI_ModelGetOutputs(model);
2222     for (size_t i = 0; i < outputs.handle_num; ++i) {
2223         OH_AI_TensorHandle tensor = outputs.handle_list[i];
2224         int64_t element_num = OH_AI_TensorGetElementNum(tensor);
2225         printf("Tensor name: %s, elements num: %" PRId64 ".\n", OH_AI_TensorGetName(tensor), element_num);
2226         float *output_data = reinterpret_cast<float *>(OH_AI_TensorGetMutableData(tensor));
2227         printf("output data is:");
2228         for (int j = 0; j < element_num && j <= 20; ++j) {
2229             printf("%f ", output_data[j]);
2230         }
2231         printf("\n");
2232         printf("==========compFp32WithTData==========\n");
2233         string expectedDataFile = "/data/test/ml_face_isface" + std::to_string(i) + ".output";
2234         bool result = compFp32WithTData(output_data, expectedDataFile, 0.01, 0.01, false);
2235         EXPECT_EQ(result, true);
2236     }
2237     OH_AI_ContextDestroy(&context);
2238     OH_AI_ModelDestroy(&model);
2239 }
2240 
2241 // 正常场景:ModelResize,shape与之前一致
2242 HWTEST(MSLiteTest, OHOS_Model_Resize_0001, Function | MediumTest | Level0) {
2243     printf("==========Init Context==========\n");
2244     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2245     ASSERT_NE(context, nullptr);
2246     AddContextDeviceCPU(context);
2247     printf("==========Create model==========\n");
2248     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2249     ASSERT_NE(model, nullptr);
2250     ModelPredict(model, context, "ml_ocr_cn", {4, {1, 32, 512, 1}}, false, true, false);
2251 }
2252 
2253 // 正常场景:ModelResize,shape与之前不一致
2254 HWTEST(MSLiteTest, OHOS_Model_Resize_0002, Function | MediumTest | Level0) {
2255     printf("==========Init Context==========\n");
2256     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2257     ASSERT_NE(context, nullptr);
2258     AddContextDeviceCPU(context);
2259     printf("==========Create model==========\n");
2260     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2261     ASSERT_NE(model, nullptr);
2262     printf("==========Build model==========\n");
2263     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_ocr_cn.ms", OH_AI_MODELTYPE_MINDIR, context);
2264     printf("==========build model return code:%d\n", ret);
2265     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2266     printf("==========GetInputs==========\n");
2267     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
2268     ASSERT_NE(inputs.handle_list, nullptr);
2269     printf("==========Resizes==========\n");
2270     OH_AI_ShapeInfo shape_infos = {4, {1, 64, 256, 1}};
2271     OH_AI_Status resize_ret = OH_AI_ModelResize(model, inputs, &shape_infos, inputs.handle_num);
2272     printf("==========Resizes return code:%d\n", resize_ret);
2273     ASSERT_EQ(resize_ret, OH_AI_STATUS_SUCCESS);
2274     FillInputsData(inputs, "ml_ocr_cn", false);
2275     OH_AI_TensorHandleArray outputs;
2276     printf("==========Model Predict==========\n");
2277     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
2278     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
2279     OH_AI_ContextDestroy(&context);
2280     OH_AI_ModelDestroy(&model);
2281 }
2282 
2283 // 异常场景:ModelResize,shape为三维
2284 HWTEST(MSLiteTest, OHOS_Model_Resize_0003, Function | MediumTest | Level0) {
2285     printf("==========Init Context==========\n");
2286     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2287     ASSERT_NE(context, nullptr);
2288     AddContextDeviceCPU(context);
2289     printf("==========Create model==========\n");
2290     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2291     ASSERT_NE(model, nullptr);
2292     printf("==========Build model==========\n");
2293     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_ocr_cn.ms", OH_AI_MODELTYPE_MINDIR, context);
2294     printf("==========build model return code:%d\n", ret);
2295     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2296     printf("==========GetInputs==========\n");
2297     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
2298     ASSERT_NE(inputs.handle_list, nullptr);
2299     printf("==========Resizes==========\n");
2300     OH_AI_ShapeInfo shape_infos = {4, {1, 32, 1}};
2301     ret = OH_AI_ModelResize(model, inputs, &shape_infos, inputs.handle_num);
2302     printf("==========Resizes return code:%d\n", ret);
2303     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
2304     OH_AI_ContextDestroy(&context);
2305     OH_AI_ModelDestroy(&model);
2306 }
2307 
2308 // 异常场景:ModelResize,shape值有负数
2309 HWTEST(MSLiteTest, OHOS_Model_Resize_0004, Function | MediumTest | Level0) {
2310     printf("==========Init Context==========\n");
2311     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2312     ASSERT_NE(context, nullptr);
2313     AddContextDeviceCPU(context);
2314     printf("==========Create model==========\n");
2315     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2316     ASSERT_NE(model, nullptr);
2317     printf("==========Build model==========\n");
2318     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_ocr_cn.ms", OH_AI_MODELTYPE_MINDIR, context);
2319     printf("==========build model return code:%d\n", ret);
2320     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2321     printf("==========GetInputs==========\n");
2322     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
2323     ASSERT_NE(inputs.handle_list, nullptr);
2324     printf("==========Resizes==========\n");
2325     OH_AI_ShapeInfo shape_infos = {4, {1, -32, 32, 1}};
2326     ret = OH_AI_ModelResize(model, inputs, &shape_infos, inputs.handle_num);
2327     printf("==========Resizes return code:%d\n", ret);
2328     ASSERT_EQ(ret, OH_AI_STATUS_LITE_PARAM_INVALID);
2329     OH_AI_ContextDestroy(&context);
2330     OH_AI_ModelDestroy(&model);
2331 }
2332 
2333 // 异常场景:ModelResize,不支持resize的模型
2334 HWTEST(MSLiteTest, OHOS_Model_Resize_0005, Function | MediumTest | Level0) {
2335     printf("==========Init Context==========\n");
2336     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2337     ASSERT_NE(context, nullptr);
2338     AddContextDeviceCPU(context);
2339     printf("==========Create model==========\n");
2340     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2341     ASSERT_NE(model, nullptr);
2342     printf("==========Build model==========\n");
2343     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
2344     printf("==========build model return code:%d\n", ret);
2345     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2346     printf("==========GetInputs==========\n");
2347     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
2348     ASSERT_NE(inputs.handle_list, nullptr);
2349     printf("==========Resizes==========\n");
2350     OH_AI_ShapeInfo shape_infos = {4, {1, 96, 96, 1}};
2351     ret = OH_AI_ModelResize(model, inputs, &shape_infos, inputs.handle_num);
2352     printf("==========Resizes return code:%d\n", ret);
2353     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
2354     OH_AI_ContextDestroy(&context);
2355     OH_AI_ModelDestroy(&model);
2356 }
2357 
2358 // 正常场景:ModelPredict
2359 HWTEST(MSLiteTest, OHOS_Model_Predict_0001, Function | MediumTest | Level0) {
2360     printf("==========Init Context==========\n");
2361     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2362     ASSERT_NE(context, nullptr);
2363     AddContextDeviceCPU(context);
2364     printf("==========Create model==========\n");
2365     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2366     ASSERT_NE(model, nullptr);
2367     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
2368 }
2369 
2370 // 异常场景:ModelPredict,model被销毁
2371 HWTEST(MSLiteTest, OHOS_Model_Predict_0002, Function | MediumTest | Level0) {
2372     printf("==========Init Context==========\n");
2373     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2374     ASSERT_NE(context, nullptr);
2375     AddContextDeviceCPU(context);
2376     printf("==========Create model==========\n");
2377     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2378     ASSERT_NE(model, nullptr);
2379     printf("==========Build model==========\n");
2380     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
2381     printf("==========build model return code:%d\n", ret);
2382     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2383     printf("==========GetInputs==========\n");
2384     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
2385     ASSERT_NE(inputs.handle_list, nullptr);
2386     FillInputsData(inputs, "ml_face_isface", true);
2387     printf("==========Model Predict==========\n");
2388     OH_AI_TensorHandleArray outputs;
2389     OH_AI_ModelDestroy(&model);
2390     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
2391     printf("==========Model Predict return code:%d\n", ret);
2392     ASSERT_EQ(ret, OH_AI_STATUS_LITE_NULLPTR);
2393     OH_AI_ContextDestroy(&context);
2394 }
2395 
2396 // 异常场景:ModelPredict,input为空
2397 HWTEST(MSLiteTest, OHOS_Model_Predict_0003, Function | MediumTest | Level0) {
2398     printf("==========Init Context==========\n");
2399     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2400     ASSERT_NE(context, nullptr);
2401     AddContextDeviceCPU(context);
2402     printf("==========Create model==========\n");
2403     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2404     ASSERT_NE(model, nullptr);
2405     printf("==========Build model==========\n");
2406     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
2407     printf("==========build model return code:%d\n", ret);
2408     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2409     printf("==========Model Predict==========\n");
2410     OH_AI_TensorHandleArray inputs;
2411     OH_AI_TensorHandleArray outputs;
2412     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
2413     printf("==========Model Predict return code:%d\n", ret);
2414     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
2415     OH_AI_ContextDestroy(&context);
2416     OH_AI_ModelDestroy(&model);
2417 }
2418 
2419 // 正常场景:ModelPredict,传入回调函数
2420 HWTEST(MSLiteTest, OHOS_Model_Predict_0004, Function | MediumTest | Level0) {
2421     printf("==========Init Context==========\n");
2422     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2423     ASSERT_NE(context, nullptr);
2424     AddContextDeviceCPU(context);
2425     printf("==========Create model==========\n");
2426     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2427     ASSERT_NE(model, nullptr);
2428     ModelPredict(model, context, "ml_face_isface", {}, false, true, true);
2429 }
2430 
2431 // 正常场景:ModelGetInputByTensorName
2432 HWTEST(MSLiteTest, OHOS_Model_GetInputByTensorName_0001, Function | MediumTest | Level0) {
2433     printf("==========ReadFile==========\n");
2434     size_t size1;
2435     size_t *ptr_size1 = &size1;
2436     const char *imagePath = "/data/test/ml_face_isface.input";
2437     char *imageBuf = ReadFile(imagePath, ptr_size1);
2438     ASSERT_NE(imageBuf, nullptr);
2439     printf("==========Init Context==========\n");
2440     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2441     ASSERT_NE(context, nullptr);
2442     AddContextDeviceCPU(context);
2443     printf("==========Create model==========\n");
2444     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2445     ASSERT_NE(model, nullptr);
2446     printf("==========Build model==========\n");
2447     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
2448     printf("==========build model return code:%d\n", ret);
2449     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2450     printf("==========GetInputs==========\n");
2451     OH_AI_TensorHandle tensor = OH_AI_ModelGetInputByTensorName(model, "data");
2452     ASSERT_NE(tensor, nullptr);
2453     int64_t element_num = OH_AI_TensorGetElementNum(tensor);
2454     printf("Tensor name: %s, elements num: %" PRId64 ".\n", OH_AI_TensorGetName(tensor), element_num);
2455     float *input_data = reinterpret_cast<float *>(OH_AI_TensorGetMutableData(tensor));
2456     ASSERT_NE(input_data, nullptr);
2457     printf("==========Transpose==========\n");
2458     size_t shape_num;
2459     const int64_t *shape = OH_AI_TensorGetShape(tensor, &shape_num);
2460     auto imageBuf_nhwc = new char[size1];
2461     PackNCHWToNHWCFp32(imageBuf, imageBuf_nhwc, shape[0], shape[1] * shape[2], shape[3]);
2462     memcpy_s(input_data, size1, imageBuf_nhwc, size1);
2463     printf("input data is:");
2464     for (int j = 0; j < element_num && j <= 20; ++j) {
2465         printf("%f ", input_data[j]);
2466     }
2467     printf("\n");
2468     printf("==========Model Predict==========\n");
2469     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
2470     ASSERT_NE(inputs.handle_list, nullptr);
2471     OH_AI_TensorHandleArray outputs;
2472     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
2473     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2474     CompareResult(outputs, "ml_face_isface");
2475     delete[] imageBuf;
2476     OH_AI_ContextDestroy(&context);
2477     OH_AI_ModelDestroy(&model);
2478 }
2479 
2480 // 异常场景:ModelGetInputByTensorName,名称不存在
2481 HWTEST(MSLiteTest, OHOS_Model_GetInputByTensorName_0002, Function | MediumTest | Level0) {
2482     printf("==========Init Context==========\n");
2483     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2484     ASSERT_NE(context, nullptr);
2485     AddContextDeviceCPU(context);
2486     printf("==========Create model==========\n");
2487     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2488     ASSERT_NE(model, nullptr);
2489     printf("==========Build model==========\n");
2490     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
2491     printf("==========build model return code:%d\n", ret);
2492     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2493     printf("==========GetInputs==========\n");
2494     OH_AI_TensorHandle tensor = OH_AI_ModelGetInputByTensorName(model, "aaa");
2495     ASSERT_EQ(tensor, nullptr);
2496     OH_AI_ContextDestroy(&context);
2497     OH_AI_ModelDestroy(&model);
2498 }
2499 
2500 // 正常场景:ModelGetOutputByTensorName
2501 HWTEST(MSLiteTest, OHOS_Model_GetOutputByTensorName_0001, Function | MediumTest | Level0) {
2502     printf("==========Init Context==========\n");
2503     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2504     ASSERT_NE(context, nullptr);
2505     AddContextDeviceCPU(context);
2506     printf("==========Create model==========\n");
2507     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2508     ASSERT_NE(model, nullptr);
2509     printf("==========Build model==========\n");
2510     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
2511     printf("==========build model return code:%d\n", ret);
2512     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2513     printf("==========GetInputs==========\n");
2514     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
2515     ASSERT_NE(inputs.handle_list, nullptr);
2516     FillInputsData(inputs, "ml_face_isface", true);
2517     printf("==========Model Predict==========\n");
2518     OH_AI_TensorHandleArray outputs;
2519     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
2520     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2521     printf("==========GetOutput==========\n");
2522     OH_AI_TensorHandle tensor = OH_AI_ModelGetOutputByTensorName(model, "prob");
2523     ASSERT_NE(tensor, nullptr);
2524     int64_t element_num = OH_AI_TensorGetElementNum(tensor);
2525     printf("Tensor name: %s, elements num: %" PRId64 ".\n", OH_AI_TensorGetName(tensor), element_num);
2526     float *output_data = reinterpret_cast<float *>(OH_AI_TensorGetMutableData(tensor));
2527     printf("output data is:");
2528     for (int j = 0; j < element_num && j <= 20; ++j) {
2529         printf("%f ", output_data[j]);
2530     }
2531     printf("\n");
2532     printf("==========compFp32WithTData==========\n");
2533     bool result = compFp32WithTData(output_data, "/data/test/ml_face_isface0.output", 0.01, 0.01, false);
2534     EXPECT_EQ(result, true);
2535     OH_AI_ContextDestroy(&context);
2536     OH_AI_ModelDestroy(&model);
2537 }
2538 
2539 // 异常场景:ModelGetOutputByTensorName,名称不存在
2540 HWTEST(MSLiteTest, OHOS_Model_GetOutputByTensorName_0002, Function | MediumTest | Level0) {
2541     printf("==========Init Context==========\n");
2542     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2543     ASSERT_NE(context, nullptr);
2544     AddContextDeviceCPU(context);
2545     printf("==========Create model==========\n");
2546     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2547     ASSERT_NE(model, nullptr);
2548     printf("==========Build model==========\n");
2549     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
2550     printf("==========build model return code:%d\n", ret);
2551     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2552     printf("==========GetInputs==========\n");
2553     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
2554     ASSERT_NE(inputs.handle_list, nullptr);
2555     FillInputsData(inputs, "ml_face_isface", true);
2556     printf("==========Model Predict==========\n");
2557     OH_AI_TensorHandleArray outputs;
2558     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
2559     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2560     printf("==========GetOutput==========\n");
2561     OH_AI_TensorHandle tensor = OH_AI_ModelGetOutputByTensorName(model, "aaa");
2562     ASSERT_EQ(tensor, nullptr);
2563     OH_AI_ContextDestroy(&context);
2564     OH_AI_ModelDestroy(&model);
2565 }
2566 
2567 // 正常场景:MSTensorCreate,创建tensor
2568 HWTEST(MSLiteTest, OHOS_Tensor_Create_0001, Function | MediumTest | Level0) {
2569     printf("==========Init Context==========\n");
2570     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2571     ASSERT_NE(context, nullptr);
2572     AddContextDeviceCPU(context);
2573     printf("==========Create model==========\n");
2574     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2575     ASSERT_NE(model, nullptr);
2576     printf("==========Build model==========\n");
2577     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
2578     printf("==========build model return code:%d\n", ret);
2579     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2580     printf("==========GetInputs==========\n");
2581     constexpr size_t create_shape_num = 4;
2582     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2583     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2584                             create_shape_num, nullptr, 0);
2585     ASSERT_NE(tensor, nullptr);
2586     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
2587     inputs.handle_list[0] = tensor;
2588     FillInputsData(inputs, "ml_face_isface", true);
2589     printf("==========Model Predict==========\n");
2590     OH_AI_TensorHandleArray outputs;
2591     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
2592     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2593     CompareResult(outputs, "ml_face_isface");
2594     OH_AI_ContextDestroy(&context);
2595     OH_AI_ModelDestroy(&model);
2596 }
2597 
2598 // 正常场景:MSTensorDestroy,销毁tensor
2599 HWTEST(MSLiteTest, OHOS_Tensor_Create_0002, Function | MediumTest | Level0) {
2600     printf("==========ReadFile==========\n");
2601     size_t size1;
2602     size_t *ptr_size1 = &size1;
2603     const char *imagePath = "/data/test/ml_face_isface.input";
2604     char *imageBuf = ReadFile(imagePath, ptr_size1);
2605     ASSERT_NE(imageBuf, nullptr);
2606     printf("==========OH_AI_TensorCreate==========\n");
2607     constexpr size_t create_shape_num = 4;
2608     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2609     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2610                                            create_shape_num, imageBuf, size1);
2611     ASSERT_NE(tensor, nullptr);
2612     delete[] imageBuf;
2613     OH_AI_TensorDestroy(&tensor);
2614     ASSERT_EQ(tensor, nullptr);
2615 }
2616 
2617 // 正常场景:MSTensorGetName,获取tensor名称
2618 HWTEST(MSLiteTest, OHOS_Tensor_Create_0003, Function | MediumTest | Level0) {
2619     printf("==========ReadFile==========\n");
2620     size_t size1;
2621     size_t *ptr_size1 = &size1;
2622     const char *imagePath = "/data/test/ml_face_isface.input";
2623     char *imageBuf = ReadFile(imagePath, ptr_size1);
2624     ASSERT_NE(imageBuf, nullptr);
2625     printf("==========OH_AI_TensorCreate==========\n");
2626     constexpr size_t create_shape_num = 4;
2627     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2628     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2629                                             create_shape_num, imageBuf, size1);
2630     ASSERT_NE(tensor, nullptr);
2631     const char *tensor_name = OH_AI_TensorGetName(tensor);
2632     ASSERT_EQ(strcmp(tensor_name, "data"), 0);
2633     delete[] imageBuf;
2634     OH_AI_TensorDestroy(&tensor);
2635 }
2636 
2637 // 正常场景:MSTensorGetName,设置tensor名称
2638 HWTEST(MSLiteTest, OHOS_Tensor_Create_0004, Function | MediumTest | Level0) {
2639     printf("==========ReadFile==========\n");
2640     size_t size1;
2641     size_t *ptr_size1 = &size1;
2642     const char *imagePath = "/data/test/ml_face_isface.input";
2643     char *imageBuf = ReadFile(imagePath, ptr_size1);
2644     ASSERT_NE(imageBuf, nullptr);
2645     printf("==========OH_AI_TensorCreate==========\n");
2646     constexpr size_t create_shape_num = 4;
2647     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2648     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2649                                            create_shape_num, imageBuf, size1);
2650     ASSERT_NE(tensor, nullptr);
2651     OH_AI_TensorSetName(tensor, "new_data");
2652     const char *tensor_name = OH_AI_TensorGetName(tensor);
2653     ASSERT_EQ(strcmp(tensor_name, "new_data"), 0);
2654     delete[] imageBuf;
2655     OH_AI_TensorDestroy(&tensor);
2656 }
2657 
2658 // 正常场景:MSTensorGetDataType,获取tensor数据类型
2659 HWTEST(MSLiteTest, OHOS_Tensor_Create_0005, Function | MediumTest | Level0) {
2660     printf("==========ReadFile==========\n");
2661     size_t size1;
2662     size_t *ptr_size1 = &size1;
2663     const char *imagePath = "/data/test/ml_face_isface.input";
2664     char *imageBuf = ReadFile(imagePath, ptr_size1);
2665     ASSERT_NE(imageBuf, nullptr);
2666     printf("==========OH_AI_TensorCreate==========\n");
2667     constexpr size_t create_shape_num = 4;
2668     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2669     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2670                                            create_shape_num, imageBuf, size1);
2671     ASSERT_NE(tensor, nullptr);
2672     OH_AI_DataType data_type = OH_AI_TensorGetDataType(tensor);
2673     ASSERT_EQ(data_type, OH_AI_DATATYPE_NUMBERTYPE_FLOAT32);
2674     delete[] imageBuf;
2675     OH_AI_TensorDestroy(&tensor);
2676 }
2677 
2678 // 正常场景:MSTensorSetDataType,设置tensor数据类型
2679 HWTEST(MSLiteTest, OHOS_Tensor_Create_0006, Function | MediumTest | Level0) {
2680     printf("==========ReadFile==========\n");
2681     size_t size1;
2682     size_t *ptr_size1 = &size1;
2683     const char *imagePath = "/data/test/ml_face_isface.input";
2684     char *imageBuf = ReadFile(imagePath, ptr_size1);
2685     ASSERT_NE(imageBuf, nullptr);
2686     printf("==========OH_AI_TensorCreate==========\n");
2687     constexpr size_t create_shape_num = 4;
2688     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2689     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2690                                            create_shape_num, imageBuf, size1);
2691     ASSERT_NE(tensor, nullptr);
2692     OH_AI_TensorSetDataType(tensor, OH_AI_DATATYPE_NUMBERTYPE_FLOAT16);
2693     OH_AI_DataType data_type = OH_AI_TensorGetDataType(tensor);
2694     ASSERT_EQ(data_type, OH_AI_DATATYPE_NUMBERTYPE_FLOAT16);
2695     delete[] imageBuf;
2696     OH_AI_TensorDestroy(&tensor);
2697 }
2698 
2699 // 正常场景:MSTensorGetShape,获取tensor维度
2700 HWTEST(MSLiteTest, OHOS_Tensor_Create_0007, Function | MediumTest | Level0) {
2701     printf("==========ReadFile==========\n");
2702     size_t size1;
2703     size_t *ptr_size1 = &size1;
2704     const char *imagePath = "/data/test/ml_face_isface.input";
2705     char *imageBuf = ReadFile(imagePath, ptr_size1);
2706     ASSERT_NE(imageBuf, nullptr);
2707     printf("==========OH_AI_TensorCreate==========\n");
2708     constexpr size_t create_shape_num = 4;
2709     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2710     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2711                                             create_shape_num, imageBuf, size1);
2712     ASSERT_NE(tensor, nullptr);
2713     size_t ret_shape_num;
2714     const int64_t *ret_shape = OH_AI_TensorGetShape(tensor, &ret_shape_num);
2715     ASSERT_EQ(ret_shape_num, create_shape_num);
2716     for (size_t i = 0; i < ret_shape_num; i++) {
2717         ASSERT_EQ(ret_shape[i], create_shape[i]);
2718     }
2719     delete[] imageBuf;
2720     OH_AI_TensorDestroy(&tensor);
2721 }
2722 
2723 // 正常场景:MSTensorSetShape,设置tensor维度
2724 HWTEST(MSLiteTest, OHOS_Tensor_Create_0008, Function | MediumTest | Level0) {
2725     printf("==========ReadFile==========\n");
2726     size_t size1;
2727     size_t *ptr_size1 = &size1;
2728     const char *imagePath = "/data/test/ml_face_isface.input";
2729     char *imageBuf = ReadFile(imagePath, ptr_size1);
2730     ASSERT_NE(imageBuf, nullptr);
2731     printf("==========OH_AI_TensorCreate==========\n");
2732     constexpr size_t create_shape_num = 4;
2733     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2734     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2735                                            create_shape_num, imageBuf, size1);
2736     ASSERT_NE(tensor, nullptr);
2737     size_t ret_shape_num;
2738     const int64_t *ret_shape = OH_AI_TensorGetShape(tensor, &ret_shape_num);
2739     ASSERT_EQ(ret_shape_num, create_shape_num);
2740     for (size_t i = 0; i < ret_shape_num; i++) {
2741         ASSERT_EQ(ret_shape[i], create_shape[i]);
2742     }
2743     constexpr size_t new_shape_num = 4;
2744     int64_t new_shape[new_shape_num] = {1, 32, 32, 1};
2745     OH_AI_TensorSetShape(tensor, new_shape, new_shape_num);
2746     size_t new_ret_shape_num;
2747     const int64_t *new_ret_shape = OH_AI_TensorGetShape(tensor, &new_ret_shape_num);
2748     ASSERT_EQ(new_ret_shape_num, new_shape_num);
2749     for (size_t i = 0; i < new_ret_shape_num; i++) {
2750         ASSERT_EQ(new_ret_shape[i], new_shape[i]);
2751     }
2752     delete[] imageBuf;
2753     OH_AI_TensorDestroy(&tensor);
2754 }
2755 
2756 // 正常场景:MSTensorGetFormat,获取tensor格式
2757 HWTEST(MSLiteTest, OHOS_Tensor_Create_0009, Function | MediumTest | Level0) {
2758     printf("==========ReadFile==========\n");
2759     size_t size1;
2760     size_t *ptr_size1 = &size1;
2761     const char *imagePath = "/data/test/ml_face_isface.input";
2762     char *imageBuf = ReadFile(imagePath, ptr_size1);
2763     ASSERT_NE(imageBuf, nullptr);
2764     printf("==========OH_AI_TensorCreate==========\n");
2765     constexpr size_t create_shape_num = 4;
2766     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2767     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2768                                            create_shape_num, imageBuf, size1);
2769     ASSERT_NE(tensor, nullptr);
2770     OH_AI_Format data_format = OH_AI_TensorGetFormat(tensor);
2771     ASSERT_EQ(data_format, OH_AI_FORMAT_NHWC);
2772     delete[] imageBuf;
2773     OH_AI_TensorDestroy(&tensor);
2774 }
2775 
2776 // 正常场景:MSTensorSetFormat,设置tensor格式
2777 HWTEST(MSLiteTest, OHOS_Tensor_Create_0010, Function | MediumTest | Level0) {
2778     printf("==========ReadFile==========\n");
2779     size_t size1;
2780     size_t *ptr_size1 = &size1;
2781     const char *imagePath = "/data/test/ml_face_isface.input";
2782     char *imageBuf = ReadFile(imagePath, ptr_size1);
2783     ASSERT_NE(imageBuf, nullptr);
2784     printf("==========OH_AI_TensorCreate==========\n");
2785     constexpr size_t create_shape_num = 4;
2786     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2787     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2788                                            create_shape_num, imageBuf, size1);
2789     ASSERT_NE(tensor, nullptr);
2790     OH_AI_TensorSetFormat(tensor, OH_AI_FORMAT_NHWC);
2791     OH_AI_Format data_format = OH_AI_TensorGetFormat(tensor);
2792     ASSERT_EQ(data_format, OH_AI_FORMAT_NHWC);
2793     delete[] imageBuf;
2794     OH_AI_TensorDestroy(&tensor);
2795 }
2796 
2797 // 正常场景:MSTensorGetData,获取tensor数据
2798 HWTEST(MSLiteTest, OHOS_Tensor_Create_0011, Function | MediumTest | Level0) {
2799     printf("==========ReadFile==========\n");
2800     size_t size1;
2801     size_t *ptr_size1 = &size1;
2802     const char *imagePath = "/data/test/ml_face_isface.input";
2803     char *imageBuf = ReadFile(imagePath, ptr_size1);
2804     ASSERT_NE(imageBuf, nullptr);
2805     printf("==========OH_AI_TensorCreate==========\n");
2806     constexpr size_t create_shape_num = 4;
2807     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2808     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2809                                            create_shape_num, imageBuf, size1);
2810     ASSERT_NE(tensor, nullptr);
2811     const float *ret_data = static_cast<const float *>(OH_AI_TensorGetData(tensor));
2812     ASSERT_NE(ret_data, nullptr);
2813     printf("return data is:");
2814     for (int i = 0; i < 20; ++i) {
2815         printf("%f ", ret_data[i]);
2816     }
2817     printf("\n");
2818     delete[] imageBuf;
2819     OH_AI_TensorDestroy(&tensor);
2820 }
2821 
2822 // 正常场景:MSTensorSetData,设置tensor数据
2823 HWTEST(MSLiteTest, OHOS_Tensor_Create_0012, Function | MediumTest | Level0) {
2824     printf("==========ReadFile==========\n");
2825     size_t size1;
2826     size_t *ptr_size1 = &size1;
2827     const char *imagePath = "/data/test/ml_face_isface.input";
2828     char *imageBuf = ReadFile(imagePath, ptr_size1);
2829     ASSERT_NE(imageBuf, nullptr);
2830     printf("==========OH_AI_TensorCreate==========\n");
2831     constexpr size_t create_shape_num = 4;
2832     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2833     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2834                                            create_shape_num, imageBuf, size1);
2835     ASSERT_NE(tensor, nullptr);
2836     constexpr size_t data_len = 6;
2837     float data[data_len] = {1, 2, 3, 4, 5, 6};
2838     OH_AI_TensorSetData(tensor, data);
2839     const float *ret_data = static_cast<const float *>(OH_AI_TensorGetData(tensor));
2840     ASSERT_NE(ret_data, nullptr);
2841     printf("return data is:");
2842     for (size_t i = 0; i < data_len; i++) {
2843         ASSERT_EQ(ret_data[i], data[i]);
2844         printf("%f ", ret_data[i]);
2845     }
2846     printf("\n");
2847     delete[] imageBuf;
2848     OH_AI_TensorDestroy(&tensor);
2849 }
2850 
2851 // 正常场景:MSTensorGetElementNum,获取tensor元素
2852 HWTEST(MSLiteTest, OHOS_Tensor_Create_0013, Function | MediumTest | Level0) {
2853     printf("==========ReadFile==========\n");
2854     size_t size1;
2855     size_t *ptr_size1 = &size1;
2856     const char *imagePath = "/data/test/ml_face_isface.input";
2857     char *imageBuf = ReadFile(imagePath, ptr_size1);
2858     ASSERT_NE(imageBuf, nullptr);
2859     printf("==========OH_AI_TensorCreate==========\n");
2860     constexpr size_t create_shape_num = 4;
2861     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2862     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2863                                            create_shape_num, imageBuf, size1);
2864     ASSERT_NE(tensor, nullptr);
2865     int64_t element_num = OH_AI_TensorGetElementNum(tensor);
2866     printf("Tensor name: %s, elements num: %" PRId64 ".\n", OH_AI_TensorGetName(tensor), element_num);
2867     ASSERT_EQ(element_num, 6912);
2868     delete[] imageBuf;
2869     OH_AI_TensorDestroy(&tensor);
2870 }
2871 
2872 // 正常场景:MSTensorGetDataSize,获取tensor大小
2873 HWTEST(MSLiteTest, OHOS_Tensor_Create_0014, Function | MediumTest | Level0) {
2874     printf("==========ReadFile==========\n");
2875     size_t size1;
2876     size_t *ptr_size1 = &size1;
2877     const char *imagePath = "/data/test/ml_face_isface.input";
2878     char *imageBuf = ReadFile(imagePath, ptr_size1);
2879     ASSERT_NE(imageBuf, nullptr);
2880     printf("==========OH_AI_TensorCreate==========\n");
2881     constexpr size_t create_shape_num = 4;
2882     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2883     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2884                                            create_shape_num, imageBuf, size1);
2885     ASSERT_NE(tensor, nullptr);
2886     size_t data_size = OH_AI_TensorGetDataSize(tensor);
2887     printf("Tensor data size: %zu.\n", data_size);
2888     ASSERT_EQ(data_size, 6912 * sizeof(float));
2889     delete[] imageBuf;
2890     OH_AI_TensorDestroy(&tensor);
2891 }
2892 
2893 // 正常场景:MSTensorGetMutableData,获取tensor可变数据指针
2894 HWTEST(MSLiteTest, OHOS_Tensor_Create_0015, Function | MediumTest | Level0) {
2895     printf("==========ReadFile==========\n");
2896     size_t size1;
2897     size_t *ptr_size1 = &size1;
2898     const char *imagePath = "/data/test/ml_face_isface.input";
2899     char *imageBuf = ReadFile(imagePath, ptr_size1);
2900     ASSERT_NE(imageBuf, nullptr);
2901     printf("==========OH_AI_TensorCreate==========\n");
2902     constexpr size_t create_shape_num = 4;
2903     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2904     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2905                                            create_shape_num, imageBuf, size1);
2906     ASSERT_NE(tensor, nullptr);
2907     float *input_data = reinterpret_cast<float *>(OH_AI_TensorGetMutableData(tensor));
2908     ASSERT_NE(input_data, nullptr);
2909     delete[] imageBuf;
2910     OH_AI_TensorDestroy(&tensor);
2911 }
2912 
2913 // 正常场景:MSTensorClone,拷贝tensor
2914 HWTEST(MSLiteTest, OHOS_Tensor_Create_0016, Function | MediumTest | Level0) {
2915     printf("==========ReadFile==========\n");
2916     size_t size1;
2917     size_t *ptr_size1 = &size1;
2918     const char *imagePath = "/data/test/ml_face_isface.input";
2919     char *imageBuf = ReadFile(imagePath, ptr_size1);
2920     ASSERT_NE(imageBuf, nullptr);
2921     printf("==========OH_AI_TensorCreate==========\n");
2922     constexpr size_t create_shape_num = 4;
2923     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
2924     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
2925                                            create_shape_num, imageBuf, size1);
2926     ASSERT_NE(tensor, nullptr);
2927     OH_AI_TensorHandle clone = OH_AI_TensorClone(tensor);
2928     ASSERT_NE(clone, nullptr);
2929     ASSERT_EQ(strcmp(OH_AI_TensorGetName(clone), "data_duplicate"), 0);
2930     delete[] imageBuf;
2931     OH_AI_TensorDestroy(&tensor);
2932     OH_AI_TensorDestroy(&clone);
2933 }
2934 
2935 // 正常场景:单输入模型
2936 HWTEST(MSLiteTest, OHOS_Input_0001, Function | MediumTest | Level0) {
2937     printf("==========Init Context==========\n");
2938     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2939     ASSERT_NE(context, nullptr);
2940     AddContextDeviceCPU(context);
2941     printf("==========Create model==========\n");
2942     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2943     ASSERT_NE(model, nullptr);
2944     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
2945 }
2946 
2947 // 正常场景:多输入模型
2948 HWTEST(MSLiteTest, OHOS_Input_0002, Function | MediumTest | Level0) {
2949     printf("==========Init Context==========\n");
2950     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2951     ASSERT_NE(context, nullptr);
2952     AddContextDeviceCPU(context);
2953     printf("==========Create model==========\n");
2954     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2955     ASSERT_NE(model, nullptr);
2956     printf("==========Build model==========\n");
2957     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_headpose_pb2tflite.ms",
2958         OH_AI_MODELTYPE_MINDIR, context);
2959     printf("==========build model return code:%d\n", ret);
2960     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2961     printf("==========GetInputs==========\n");
2962     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
2963     ASSERT_NE(inputs.handle_list, nullptr);
2964     FillInputsData(inputs, "ml_headpose_pb2tflite", false);
2965     printf("==========Model Predict==========\n");
2966     OH_AI_TensorHandleArray outputs;
2967     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
2968     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2969     CompareResult(outputs, "ml_headpose_pb2tflite", 0.02, 0.02);
2970     OH_AI_ContextDestroy(&context);
2971     OH_AI_ModelDestroy(&model);
2972 }
2973 
2974 // 正常场景:输入为uint8模型
2975 HWTEST(MSLiteTest, OHOS_Input_0003, Function | MediumTest | Level0) {
2976     size_t size1;
2977     size_t *ptr_size1 = &size1;
2978     const char *imagePath = "/data/test/aiy_vision_classifier_plants_V1_3.input";
2979     char *imageBuf = ReadFile(imagePath, ptr_size1);
2980     printf("==========Init Context==========\n");
2981     OH_AI_ContextHandle context = OH_AI_ContextCreate();
2982     AddContextDeviceCPU(context);
2983     printf("==========Create and Build model==========\n");
2984     OH_AI_ModelHandle model = OH_AI_ModelCreate();
2985     ASSERT_NE(model, nullptr);
2986     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/aiy_vision_classifier_plants_V1_3.ms",
2987         OH_AI_MODELTYPE_MINDIR, context);
2988     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
2989     printf("==========GetInputs==========\n");
2990     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
2991     ASSERT_NE(inputs.handle_list, nullptr);
2992     for (size_t i = 0; i < inputs.handle_num; ++i) {
2993         OH_AI_TensorHandle tensor = inputs.handle_list[i];
2994         int64_t element_num = OH_AI_TensorGetElementNum(tensor);
2995         printf("Tensor name: %s, elements num: %" PRId64 ".\n", OH_AI_TensorGetName(tensor), element_num);
2996         void *input_data = OH_AI_TensorGetMutableData(inputs.handle_list[i]);
2997         ASSERT_NE(input_data, nullptr);
2998         memcpy(input_data, imageBuf, size1);
2999     }
3000     printf("==========Model Predict==========\n");
3001     OH_AI_TensorHandleArray outputs;
3002     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3003     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3004     printf("==========GetOutput==========\n");
3005     for (size_t i = 0; i < outputs.handle_num; ++i) {
3006         OH_AI_TensorHandle tensor = outputs.handle_list[i];
3007         int64_t element_num = OH_AI_TensorGetElementNum(tensor);
3008         printf("Tensor name: %s, elements num: %" PRId64 ".\n", OH_AI_TensorGetName(tensor), element_num);
3009         uint8_t *output_data = reinterpret_cast<uint8_t *>(OH_AI_TensorGetMutableData(tensor));
3010         printf("output data is:");
3011         for (int j = 0; j < element_num && j <= 20; ++j) {
3012             printf("%d ", output_data[j]);
3013         }
3014         printf("\n==========compFp32WithTData==========\n");
3015         string expectedDataFile = "/data/test/aiy_vision_classifier_plants_V1_3" + std::to_string(i) + ".output";
3016         bool result = compUint8WithTData(output_data, expectedDataFile, 0.01, 0.01, false);
3017         EXPECT_EQ(result, true);
3018     }
3019     delete[] imageBuf;
3020     OH_AI_ContextDestroy(&context);
3021     OH_AI_ModelDestroy(&model);
3022 }
3023 
3024 // 正常场景:量化模型
3025 HWTEST(MSLiteTest, OHOS_Input_0004, Function | MediumTest | Level0) {
3026     printf("==========Init Context==========\n");
3027     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3028     ASSERT_NE(context, nullptr);
3029     AddContextDeviceCPU(context);
3030     printf("==========Create model==========\n");
3031     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3032     ASSERT_NE(model, nullptr);
3033     ModelPredict(model, context, "ml_face_isface_quant", {}, false, true, false);
3034 }
3035 
3036 // 正常场景:循环多次执行推理流程
3037 HWTEST(MSLiteTest, OHOS_Multiple_0001, Function | MediumTest | Level0) {
3038     for (size_t num = 0; num < 50; ++num) {
3039         Predict_CPU();
3040     }
3041 }
3042 
3043 // 异常场景:Model创建一次,Build多次
3044 HWTEST(MSLiteTest, OHOS_Multiple_0002, Function | MediumTest | Level0) {
3045     printf("==========Init Context==========\n");
3046     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3047     ASSERT_NE(context, nullptr);
3048     AddContextDeviceCPU(context);
3049     printf("==========Create model==========\n");
3050     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3051     ASSERT_NE(model, nullptr);
3052     printf("==========Build model==========\n");
3053     int ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
3054     printf("==========build model return code:%d\n", ret);
3055     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3056     printf("==========Build model==========\n");
3057     int ret2 = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
3058     printf("==========build model return code:%d\n", ret2);
3059     ASSERT_EQ(ret2, OH_AI_STATUS_LITE_MODEL_REBUILD);
3060     OH_AI_ContextDestroy(&context);
3061     OH_AI_ModelDestroy(&model);
3062 }
3063 
3064 // 正常场景:Model创建一次,Build一次,Predict多次
3065 HWTEST(MSLiteTest, OHOS_Multiple_0003, Function | MediumTest | Level0) {
3066     printf("==========Init Context==========\n");
3067     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3068     ASSERT_NE(context, nullptr);
3069     AddContextDeviceCPU(context);
3070     printf("==========Create model==========\n");
3071     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3072     ASSERT_NE(model, nullptr);
3073     printf("==========Build model==========\n");
3074     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
3075     printf("==========build model return code:%d\n", ret);
3076     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3077     printf("==========GetInputs==========\n");
3078     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
3079     ASSERT_NE(inputs.handle_list, nullptr);
3080     FillInputsData(inputs, "ml_face_isface", true);
3081     OH_AI_TensorHandleArray outputs;
3082     for (size_t i = 0; i < 50; ++i) {
3083         printf("==========Model Predict==========\n");
3084         OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3085         ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
3086     }
3087     CompareResult(outputs, "ml_face_isface");
3088     OH_AI_ContextDestroy(&context);
3089     OH_AI_ModelDestroy(&model);
3090 }
3091 
3092 // 正常场景:多次创建和销毁Model
3093 HWTEST(MSLiteTest, OHOS_Multiple_0004, Function | MediumTest | Level0) {
3094     for (size_t i = 0; i < 50; ++i) {
3095         printf("==========Init Context==========\n");
3096         OH_AI_ContextHandle context = OH_AI_ContextCreate();
3097         ASSERT_NE(context, nullptr);
3098         AddContextDeviceCPU(context);
3099         printf("==========Create model==========\n");
3100         OH_AI_ModelHandle model = OH_AI_ModelCreate();
3101         ASSERT_NE(model, nullptr);
3102         printf("==========Build model==========\n");
3103         OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
3104         printf("==========build model return code:%d\n", ret);
3105         ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3106         printf("==========Build model==========\n");
3107         OH_AI_ContextDestroy(&context);
3108         OH_AI_ModelDestroy(&model);
3109     }
3110 }
3111 
3112 // 正常场景:两个模型都在CPU上并行推理
3113 HWTEST(MSLiteTest, OHOS_Parallel_0001, Function | MediumTest | Level0) {
3114     std::cout << "run start" << std::endl;
3115     std::thread t1(Predict_CPU);
3116     std::cout << "1111111111111" << std::endl;
3117     std::thread t2(Predict_CPU);
3118     std::cout << "2222222222222" << std::endl;
3119     t1.join();
3120     t2.join();
3121 }
3122 
3123 // 正常场景:两个模型都在NPU上并行推理
3124 HWTEST(MSLiteTest, OHOS_Parallel_0002, Function | MediumTest | Level0) {
3125     std::cout << "run start" << std::endl;
3126     std::thread t1(Predict_NPU);
3127     std::cout << "1111111111111" << std::endl;
3128     std::thread t2(Predict_NPU);
3129     std::cout << "2222222222222" << std::endl;
3130     t1.join();
3131     t2.join();
3132 }
3133 
3134 // 正常场景:两个模型在CPU NPU上并行推理
3135 HWTEST(MSLiteTest, OHOS_Parallel_0003, Function | MediumTest | Level0) {
3136     std::cout << "run start" << std::endl;
3137     std::thread t1(Predict_CPU);
3138     std::cout << "1111111111111" << std::endl;
3139     std::thread t2(Predict_NPU);
3140     std::cout << "2222222222222" << std::endl;
3141     t1.join();
3142     t2.join();
3143 }
3144 
3145 // 正常场景:r1.3转换的模型在r1.5上推理
3146 HWTEST(MSLiteTest, OHOS_Compatible_0001, Function | MediumTest | Level0) {
3147     printf("==========Init Context==========\n");
3148     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3149     ASSERT_NE(context, nullptr);
3150     AddContextDeviceCPU(context);
3151     printf("==========Create model==========\n");
3152     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3153     ASSERT_NE(model, nullptr);
3154     printf("==========Build model==========\n");
3155     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface_r13.ms",
3156         OH_AI_MODELTYPE_MINDIR, context);
3157     printf("==========build model return code:%d\n", ret);
3158     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3159     printf("==========GetInputs==========\n");
3160     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
3161     ASSERT_NE(inputs.handle_list, nullptr);
3162     FillInputsData(inputs, "ml_face_isface", true);
3163     printf("==========Model Predict==========\n");
3164     OH_AI_TensorHandleArray outputs;
3165     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3166     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3167     CompareResult(outputs, "ml_face_isface");
3168     OH_AI_ContextDestroy(&context);
3169     OH_AI_ModelDestroy(&model);
3170 }
3171 
3172 
3173 // 正常场景:离线模型支持NNRT后端,单模型输入
3174 HWTEST(MSLiteTest, OHOS_OfflineModel_0001, Function | MediumTest | Level0) {
3175     if (!IsNNRTAvailable()) {
3176         printf("NNRt is not available, skip this test");
3177         return;
3178     }
3179 
3180     printf("==========Init Context==========\n");
3181     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3182     ASSERT_NE(context, nullptr);
3183     AddContextDeviceNNRT(context);
3184     printf("==========Create model==========\n");
3185     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3186     ASSERT_NE(model, nullptr);
3187     printf("==========Build model==========\n");
3188     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface_offline_model.ms",
3189         OH_AI_MODELTYPE_MINDIR, context);
3190     printf("==========build model return code:%d\n", ret);
3191     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3192     printf("==========GetInputs==========\n");
3193     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
3194     ASSERT_NE(inputs.handle_list, nullptr);
3195     FillInputsData(inputs, "ml_face_isface", true);
3196     printf("==========Model Predict==========\n");
3197     OH_AI_TensorHandleArray outputs;
3198     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3199     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3200     CompareResult(outputs, "ml_face_isface");
3201     OH_AI_ContextDestroy(&context);
3202     OH_AI_ModelDestroy(&model);
3203 }
3204 
3205 // 正常场景:离线模型支持NNRT后端,多输入模型
3206 HWTEST(MSLiteTest, OHOS_OfflineModel_0002, Function | MediumTest | Level0) {
3207     if (!IsNNRTAvailable()) {
3208         printf("NNRt is not available, skip this test");
3209         return;
3210     }
3211 
3212     printf("==========Init Context==========\n");
3213     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3214     ASSERT_NE(context, nullptr);
3215     AddContextDeviceNNRT(context);
3216     printf("==========Create model==========\n");
3217     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3218     ASSERT_NE(model, nullptr);
3219     printf("==========Build model==========\n");
3220     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_headpose_pb2tflite_offline_model.ms",
3221         OH_AI_MODELTYPE_MINDIR, context);
3222     printf("==========build model return code:%d\n", ret);
3223     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3224     printf("==========GetInputs==========\n");
3225     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
3226     ASSERT_NE(inputs.handle_list, nullptr);
3227     FillInputsData(inputs, "ml_headpose_pb2tflite", false);
3228     printf("==========Model Predict==========\n");
3229     OH_AI_TensorHandleArray outputs;
3230     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3231     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3232     CompareResult(outputs, "ml_headpose_pb2tflite", 0.02, 0.02);
3233     OH_AI_ContextDestroy(&context);
3234     OH_AI_ModelDestroy(&model);
3235 }
3236 
3237 
3238 // 正常场景:离线模型支持NNRT后端,Model创建一次,Build一次,Predict多次
3239 HWTEST(MSLiteTest, OHOS_OfflineModel_0004, Function | MediumTest | Level0) {
3240     if (!IsNNRTAvailable()) {
3241         printf("NNRt is not available, skip this test");
3242         return;
3243     }
3244 
3245     printf("==========Init Context==========\n");
3246     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3247     ASSERT_NE(context, nullptr);
3248     AddContextDeviceNNRT(context);
3249     printf("==========Create model==========\n");
3250     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3251     ASSERT_NE(model, nullptr);
3252     printf("==========Build model==========\n");
3253     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface_offline_model.ms", OH_AI_MODELTYPE_MINDIR, context);
3254     printf("==========build model return code:%d\n", ret);
3255     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3256     printf("==========GetInputs==========\n");
3257     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
3258     ASSERT_NE(inputs.handle_list, nullptr);
3259     FillInputsData(inputs, "ml_face_isface", true);
3260     OH_AI_TensorHandleArray outputs;
3261     for (size_t i = 0; i < 50; ++i) {
3262         printf("==========Model Predict==========\n");
3263         OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3264         ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
3265     }
3266     CompareResult(outputs, "ml_face_isface");
3267     OH_AI_ContextDestroy(&context);
3268     OH_AI_ModelDestroy(&model);
3269 }
3270 
3271 // 正常场景:离线模型支持NNRT后端,Model创建一次,Build多次
3272 HWTEST(MSLiteTest, OHOS_OfflineModel_0005, Function | MediumTest | Level0) {
3273     if (!IsNNRTAvailable()) {
3274         printf("NNRt is not available, skip this test");
3275         return;
3276     }
3277 
3278     printf("==========Init Context==========\n");
3279     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3280     ASSERT_NE(context, nullptr);
3281     AddContextDeviceNNRT(context);
3282     printf("==========Create model==========\n");
3283     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3284     ASSERT_NE(model, nullptr);
3285     printf("==========Build model==========\n");
3286     int ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface_offline_model.ms", OH_AI_MODELTYPE_MINDIR, context);
3287     printf("==========build model return code:%d\n", ret);
3288     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3289     printf("==========Build model==========\n");
3290     int ret2 = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface_offline_model.ms", OH_AI_MODELTYPE_MINDIR, context);
3291     printf("==========build model return code:%d\n", ret2);
3292     ASSERT_EQ(ret2, OH_AI_STATUS_SUCCESS);
3293     OH_AI_ContextDestroy(&context);
3294     OH_AI_ModelDestroy(&model);
3295 }
3296 
3297 // 异常场景:离线模型支持NNRT后端,ModelPredict,input为空
3298 HWTEST(MSLiteTest, OHOS_OfflineModel_0006, Function | MediumTest | Level0) {
3299     if (!IsNNRTAvailable()) {
3300         printf("NNRt is not available, skip this test");
3301         return;
3302     }
3303 
3304     printf("==========Init Context==========\n");
3305     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3306     ASSERT_NE(context, nullptr);
3307     AddContextDeviceNNRT(context);
3308     printf("==========Create model==========\n");
3309     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3310     ASSERT_NE(model, nullptr);
3311     printf("==========Build model==========\n");
3312     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface_offline_model.ms", OH_AI_MODELTYPE_MINDIR, context);
3313     printf("==========build model return code:%d\n", ret);
3314     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3315     printf("==========Model Predict==========\n");
3316     OH_AI_TensorHandleArray inputs;
3317     OH_AI_TensorHandleArray outputs;
3318     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3319     printf("==========Model Predict return code:%d\n", ret);
3320     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
3321     OH_AI_ContextDestroy(&context);
3322     OH_AI_ModelDestroy(&model);
3323 }
3324 
3325 // 异常场景:非离线模型支持NNRT后端,ms模型未转换为NNRT后端模型
3326 HWTEST(MSLiteTest, OHOS_OfflineModel_0007, Function | MediumTest | Level0) {
3327     if (!IsNNRTAvailable()) {
3328         printf("NNRt is not available, skip this test");
3329         return;
3330     }
3331 
3332     printf("==========Init Context==========\n");
3333     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3334     ASSERT_NE(context, nullptr);
3335     AddContextDeviceNNRT(context);
3336     AddContextDeviceCPU(context);  // nnrt算子支持有限,加cpu设备走异构推理
3337     printf("==========Create model==========\n");
3338     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3339     ASSERT_NE(model, nullptr);
3340     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
3341 }
3342 
3343 // 正常场景:覆盖模型ml_ocr_cn
3344 HWTEST(MSLiteTest, OHOS_OfflineModel_0008, Function | MediumTest | Level0) {
3345     if (!IsNNRTAvailable()) {
3346         printf("NNRt is not available, skip this test");
3347         return;
3348     }
3349 
3350     printf("==========Init Context==========\n");
3351     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3352     ASSERT_NE(context, nullptr);
3353     AddContextDeviceNNRT(context);
3354     printf("==========Create model==========\n");
3355     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3356     ASSERT_NE(model, nullptr);
3357     printf("==========Build model==========\n");
3358     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_ocr_cn_offline_model.ms",
3359         OH_AI_MODELTYPE_MINDIR, context);
3360     printf("==========build model return code:%d\n", ret);
3361     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3362     printf("==========GetInputs==========\n");
3363     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
3364     ASSERT_NE(inputs.handle_list, nullptr);
3365     FillInputsData(inputs, "ml_ocr_cn", false);
3366     printf("==========Model Predict==========\n");
3367     OH_AI_TensorHandleArray outputs;
3368     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3369     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3370     CompareResult(outputs, "ml_ocr_cn");
3371     OH_AI_ContextDestroy(&context);
3372     OH_AI_ModelDestroy(&model);
3373 }
3374 
3375 // 正常场景:离线模型覆盖NPU
3376 HWTEST(MSLiteTest, OHOS_OfflineModel_0009, Function | MediumTest | Level0) {
3377     if (!IsNPU()) {
3378         printf("NNRt is not NPU, skip this test");
3379         return;
3380     }
3381 
3382     printf("==========Init Context==========\n");
3383     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3384     ASSERT_NE(context, nullptr);
3385     AddContextDeviceNNRT(context);
3386     printf("==========Create model==========\n");
3387     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3388     ASSERT_NE(model, nullptr);
3389     printf("==========Build model==========\n");
3390     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/tinynet.om.ms",
3391         OH_AI_MODELTYPE_MINDIR, context);
3392     printf("==========build model return code:%d\n", ret);
3393     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3394     printf("==========GetInputs==========\n");
3395     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
3396     ASSERT_NE(inputs.handle_list, nullptr);
3397     for (size_t i = 0; i < inputs.handle_num; ++i) {
3398         OH_AI_TensorHandle tensor = inputs.handle_list[i];
3399         float *input_data = reinterpret_cast<float *>(OH_AI_TensorGetMutableData(tensor));
3400         size_t element_num = OH_AI_TensorGetElementNum(tensor);
3401         std::random_device rd;
3402         std::mt19937 gen(rd());
3403         std::uniform_real_distribution<float> dis(0.0f,1.0f);
3404         for (int z=0;z<element_num;z++) {
3405             input_data[z] = dis(gen);
3406         }
3407     }
3408     printf("==========Model Predict==========\n");
3409     OH_AI_TensorHandleArray outputs;
3410     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3411     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3412     OH_AI_ContextDestroy(&context);
3413     OH_AI_ModelDestroy(&model);
3414 }
3415 
3416 // 正常场景:delegate异构,使用低级接口创建nnrt device info,选取第一个NNRT设备
3417 HWTEST(MSLiteTest, OHOS_NNRT_0001, Function | MediumTest | Level0) {
3418     printf("==========Init Context==========\n");
3419     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3420     ASSERT_NE(context, nullptr);
3421     AddContextDeviceNNRT(context);
3422     AddContextDeviceCPU(context);
3423     printf("==========Create model==========\n");
3424     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3425     ASSERT_NE(model, nullptr);
3426     printf("==========Build model==========\n");
3427     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
3428         OH_AI_MODELTYPE_MINDIR, context);
3429     printf("==========build model return code:%d\n", ret);
3430     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3431     printf("==========GetInputs==========\n");
3432     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
3433     ASSERT_NE(inputs.handle_list, nullptr);
3434     FillInputsData(inputs, "ml_face_isface", true);
3435     printf("==========Model Predict==========\n");
3436     OH_AI_TensorHandleArray outputs;
3437     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3438     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3439     CompareResult(outputs, "ml_face_isface");
3440     OH_AI_ContextDestroy(&context);
3441     OH_AI_ModelDestroy(&model);
3442 }
3443 
3444 //  正常场景:delegate异构,使用高级接口创建nnrt device info,根据类型确定NNRT设备
3445 HWTEST(MSLiteTest, OHOS_NNRT_0002, Function | MediumTest | Level0) {
3446     printf("==========Init Context==========\n");
3447     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3448     ASSERT_NE(context, nullptr);
3449     AddContextDeviceNNRTByType(context);
3450     AddContextDeviceCPU(context);
3451     printf("==========Create model==========\n");
3452     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3453     ASSERT_NE(model, nullptr);
3454     printf("==========Build model==========\n");
3455     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
3456         OH_AI_MODELTYPE_MINDIR, context);
3457     printf("==========build model return code:%d\n", ret);
3458     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3459     printf("==========GetInputs==========\n");
3460     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
3461     ASSERT_NE(inputs.handle_list, nullptr);
3462     FillInputsData(inputs, "ml_face_isface", true);
3463     printf("==========Model Predict==========\n");
3464     OH_AI_TensorHandleArray outputs;
3465     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3466     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3467     CompareResult(outputs, "ml_face_isface");
3468     OH_AI_ContextDestroy(&context);
3469     OH_AI_ModelDestroy(&model);
3470 }
3471 
3472 // 正常场景:delegate异构,使用高级接口创建nnrt device info,根据名称确定NNRT设备
3473 HWTEST(MSLiteTest, OHOS_NNRT_0003, Function | MediumTest | Level0) {
3474     printf("==========Init Context==========\n");
3475     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3476     ASSERT_NE(context, nullptr);
3477     AddContextDeviceNNRTByName(context);
3478     AddContextDeviceCPU(context);
3479     printf("==========Create model==========\n");
3480     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3481     ASSERT_NE(model, nullptr);
3482     printf("==========Build model==========\n");
3483     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
3484         OH_AI_MODELTYPE_MINDIR, context);
3485     printf("==========build model return code:%d\n", ret);
3486     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3487     printf("==========GetInputs==========\n");
3488     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
3489     ASSERT_NE(inputs.handle_list, nullptr);
3490     FillInputsData(inputs, "ml_face_isface", true);
3491     printf("==========Model Predict==========\n");
3492     OH_AI_TensorHandleArray outputs;
3493     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3494     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3495     CompareResult(outputs, "ml_face_isface");
3496     OH_AI_ContextDestroy(&context);
3497     OH_AI_ModelDestroy(&model);
3498 }
3499 
3500 
3501 // 正常场景:delegate异构,设置NNRT扩展选项,包括cache路径
3502 HWTEST(MSLiteTest, OHOS_NNRT_0005, Function | MediumTest | Level0) {
3503     printf("==========Init Context==========\n");
3504     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3505     ASSERT_NE(context, nullptr);
3506     AddContextDeviceNNRTWithCache(context, "/data/local/tmp/", "1");
3507     AddContextDeviceCPU(context);
3508 
3509     printf("==========Create model==========\n");
3510     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3511     ASSERT_NE(model, nullptr);
3512 
3513     printf("==========Build model==========\n");
3514     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
3515         OH_AI_MODELTYPE_MINDIR, context);
3516     printf("==========build model return code:%d\n", ret);
3517     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3518 
3519     printf("==========GetInputs==========\n");
3520     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
3521     ASSERT_NE(inputs.handle_list, nullptr);
3522     FillInputsData(inputs, "ml_face_isface", true);
3523 
3524     printf("==========Model Predict==========\n");
3525     OH_AI_TensorHandleArray outputs;
3526     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3527     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3528     CompareResult(outputs, "ml_face_isface");
3529     OH_AI_ContextDestroy(&context);
3530     OH_AI_ModelDestroy(&model);
3531 }
3532 
3533 
3534 // 正常场景:通过OH_AI_ModelGetInputs和OH_AI_ModelGetOutputs获取,实现数据免拷贝
3535 HWTEST(MSLiteTest, SUB_AI_MindSpore_NNRT_copy_free_0001, Function | MediumTest | Level0) {
3536     if (!IsNPU()) {
3537         printf("NNRt is not NPU, skip this test");
3538         return;
3539     }
3540     printf("==========Init Context==========\n");
3541     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3542     AddContextDeviceNNRT(context);
3543     printf("==========Build model==========\n");
3544     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3545     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
3546     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3547     printf("==========GetInputs==========\n");
3548     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
3549     ASSERT_NE(inputs.handle_list, nullptr);
3550     FillInputsData(inputs, "ml_face_isface", true);
3551     printf("==========Model Predict==========\n");
3552     OH_AI_TensorHandleArray outputs = OH_AI_ModelGetOutputs(model);
3553     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3554     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3555     CompareResult(outputs, "ml_face_isface");
3556     OH_AI_ContextDestroy(&context);
3557     OH_AI_ModelDestroy(&model);
3558 }
3559 
RunMSLiteModel(OH_AI_ModelHandle model,string model_name,bool is_transpose)3560 void RunMSLiteModel(OH_AI_ModelHandle model, string model_name, bool is_transpose) {
3561     const size_t MAX_DIMS = 10;
3562     int64_t shape[MAX_DIMS];
3563     size_t shape_num;
3564     OH_AI_TensorHandleArray in_tensor_array;
3565     OH_AI_TensorHandleArray out_tensor_array;
3566     printf("==========OH_AI_TensorSetAllocator in_tensor==========\n");
3567     OH_AI_TensorHandleArray inputs_handle = OH_AI_ModelGetInputs(model);
3568     in_tensor_array.handle_num = inputs_handle.handle_num;
3569     in_tensor_array.handle_list = (OH_AI_TensorHandle *)malloc(sizeof(OH_AI_TensorHandle) * in_tensor_array.handle_num);
3570     for (size_t i = 0; i < inputs_handle.handle_num; i++) {
3571         auto ori_tensor = inputs_handle.handle_list[i];
3572         auto shape_ptr = OH_AI_TensorGetShape(ori_tensor, &shape_num);
3573         for (size_t j = 0; j < shape_num; j++) {
3574             shape[j] = shape_ptr[j];
3575         }
3576         void *in_allocator = OH_AI_TensorGetAllocator(ori_tensor);
3577         OH_AI_TensorHandle in_tensor = OH_AI_TensorCreate(OH_AI_TensorGetName(ori_tensor), OH_AI_TensorGetDataType(ori_tensor),
3578                                         shape, shape_num, nullptr, 0);
3579         OH_AI_TensorSetAllocator(in_tensor, in_allocator);
3580         in_tensor_array.handle_list[i] = in_tensor;
3581     }
3582     printf("==========FillInputsData==========\n");
3583     FillInputsData(in_tensor_array, model_name, is_transpose);
3584     printf("==========OH_AI_TensorSetAllocator out_tensor==========\n");
3585     OH_AI_TensorHandleArray outputs_handle = OH_AI_ModelGetOutputs(model);
3586     out_tensor_array.handle_num = outputs_handle.handle_num;
3587     out_tensor_array.handle_list = (OH_AI_TensorHandle *)malloc(sizeof(OH_AI_TensorHandle) * out_tensor_array.handle_num);
3588     for (size_t i = 0; i < outputs_handle.handle_num; i++) {
3589         auto ori_tensor = outputs_handle.handle_list[i];
3590         auto shape_ptr = OH_AI_TensorGetShape(ori_tensor, &shape_num);
3591         for (size_t j = 0; j < shape_num; j++) {
3592             shape[j] = shape_ptr[j];
3593         }
3594         void *in_allocator = OH_AI_TensorGetAllocator(ori_tensor);
3595         OH_AI_TensorHandle out_tensor = OH_AI_TensorCreate(OH_AI_TensorGetName(ori_tensor), OH_AI_TensorGetDataType(ori_tensor),
3596                                         shape, shape_num, nullptr, 0);
3597         OH_AI_TensorSetAllocator(out_tensor, in_allocator);
3598         out_tensor_array.handle_list[i] = out_tensor;
3599     }
3600     printf("==========OH_AI_ModelPredict==========\n");
3601     auto ret = OH_AI_ModelPredict(model, in_tensor_array, &out_tensor_array, NULL, NULL);
3602     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3603     printf("==========OH_AI_TensorDestroy==========\n");
3604     CompareResult(out_tensor_array, model_name);
3605     for (size_t i = 0; i < in_tensor_array.handle_num; i++) {
3606         auto ori_tensor = in_tensor_array.handle_list[i];
3607         OH_AI_TensorDestroy(&ori_tensor);
3608     }
3609     free(in_tensor_array.handle_list);
3610     for (size_t i = 0; i < out_tensor_array.handle_num; i++) {
3611         auto ori_tensor = out_tensor_array.handle_list[i];
3612         OH_AI_TensorDestroy(&ori_tensor);
3613     }
3614     free(out_tensor_array.handle_list);
3615 }
3616 
CopyFreeSetAllocator(OH_AI_ModelHandle model,OH_AI_TensorHandleArray * in_tensor_array,OH_AI_TensorHandleArray * out_tensor_array)3617 void CopyFreeSetAllocator(OH_AI_ModelHandle model, OH_AI_TensorHandleArray *in_tensor_array,
3618     OH_AI_TensorHandleArray *out_tensor_array)
3619 {
3620     const size_t maxDims = 10;
3621     int64_t shape[maxDims];
3622     size_t shapeNum;
3623     OH_AI_TensorHandleArray inputs_handle = OH_AI_ModelGetInputs(model);
3624     in_tensor_array->handle_num = inputs_handle.handle_num;
3625     in_tensor_array->handle_list = (OH_AI_TensorHandle *)malloc(sizeof(OH_AI_TensorHandle) *
3626         in_tensor_array->handle_num);
3627     for (size_t i = 0; i < inputs_handle.handle_num; i++) {
3628         auto ori_tensor = inputs_handle.handle_list[i];
3629         auto shape_ptr = OH_AI_TensorGetShape(ori_tensor, &shapeNum);
3630         for (size_t j = 0; j < shapeNum; j++) {
3631             shape[j] = shape_ptr[j];
3632         }
3633         void *in_allocator = OH_AI_TensorGetAllocator(ori_tensor);
3634         OH_AI_TensorHandle in_tensor = OH_AI_TensorCreate(OH_AI_TensorGetName(ori_tensor),
3635             OH_AI_TensorGetDataType(ori_tensor), shape, shapeNum, nullptr, 0);
3636         OH_AI_TensorSetAllocator(in_tensor, in_allocator);
3637         in_tensor_array->handle_list[i] = in_tensor;
3638     }
3639     printf("==========OH_AI_TensorSetAllocator out_tensor==========\n");
3640     OH_AI_TensorHandleArray outputs_handle = OH_AI_ModelGetOutputs(model);
3641     out_tensor_array->handle_num = outputs_handle.handle_num;
3642     out_tensor_array->handle_list = (OH_AI_TensorHandle *)malloc(sizeof(OH_AI_TensorHandle) *
3643         out_tensor_array->handle_num);
3644     for (size_t i = 0; i < outputs_handle.handle_num; i++) {
3645         auto ori_tensor = outputs_handle.handle_list[i];
3646         auto shape_ptr = OH_AI_TensorGetShape(ori_tensor, &shapeNum);
3647         for (size_t j = 0; j < shapeNum; j++) {
3648             shape[j] = shape_ptr[j];
3649         }
3650         void *in_allocator = OH_AI_TensorGetAllocator(ori_tensor);
3651         OH_AI_TensorHandle out_tensor = OH_AI_TensorCreate(OH_AI_TensorGetName(ori_tensor),
3652             OH_AI_TensorGetDataType(ori_tensor), shape, shapeNum, nullptr, 0);
3653         OH_AI_TensorSetAllocator(out_tensor, in_allocator);
3654         out_tensor_array->handle_list[i] = out_tensor;
3655     }
3656 }
3657 
3658 // 正常场景:CPU免拷贝场景,并循环推理
3659 HWTEST(MSLiteTest, SUB_AI_MindSpore_CPU_copy_free_0002, Function | MediumTest | Level0) {
3660     printf("==========Init Context==========\n");
3661     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3662     AddContextDeviceCPU(context);
3663     printf("==========Build model==========\n");
3664     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3665     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
3666     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3667 
3668     OH_AI_TensorHandleArray in_tensor_array;
3669     OH_AI_TensorHandleArray out_tensor_array;
3670     printf("==========OH_AI_TensorSetAllocator in_tensor==========\n");
3671     CopyFreeSetAllocator(model, &in_tensor_array, &out_tensor_array);
3672     printf("==========FillInputsData==========\n");
3673     FillInputsData(in_tensor_array, "ml_face_isface", true);
3674     for (size_t i = 0; i < 50; ++i) {
3675         printf("==========OH_AI_ModelPredict==========\n");
3676         auto predict_ret = OH_AI_ModelPredict(model, in_tensor_array, &out_tensor_array, NULL, NULL);
3677         ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
3678         CompareResult(out_tensor_array, "ml_face_isface");
3679     }
3680     printf("==========OH_AI_TensorDestroy==========\n");
3681     for (size_t i = 0; i < in_tensor_array.handle_num; i++) {
3682         auto ori_tensor = in_tensor_array.handle_list[i];
3683         OH_AI_TensorDestroy(&ori_tensor);
3684     }
3685     free(in_tensor_array.handle_list);
3686     for (size_t i = 0; i < out_tensor_array.handle_num; i++) {
3687         auto ori_tensor = out_tensor_array.handle_list[i];
3688         OH_AI_TensorDestroy(&ori_tensor);
3689     }
3690     free(out_tensor_array.handle_list);
3691     printf("==========ContextDestroy and ModelDestroy==========\n");
3692     OH_AI_ContextDestroy(&context);
3693     OH_AI_ModelDestroy(&model);
3694 }
3695 
CopyFreeNoSetAllocator(OH_AI_ModelHandle model,OH_AI_TensorHandleArray * in_tensor_array,OH_AI_TensorHandleArray * out_tensor_array)3696 void CopyFreeNoSetAllocator(OH_AI_ModelHandle model, OH_AI_TensorHandleArray *in_tensor_array,
3697     OH_AI_TensorHandleArray *out_tensor_array)
3698 {
3699     const size_t maxDims = 10;
3700     int64_t shape[maxDims];
3701     size_t shapeNum;
3702     OH_AI_TensorHandleArray inputs_handle = OH_AI_ModelGetInputs(model);
3703     in_tensor_array->handle_num = inputs_handle.handle_num;
3704     in_tensor_array->handle_list = (OH_AI_TensorHandle *)malloc(sizeof(OH_AI_TensorHandle) *
3705                                                                 in_tensor_array->handle_num);
3706     for (size_t i = 0; i < inputs_handle.handle_num; i++) {
3707         auto ori_tensor = inputs_handle.handle_list[i];
3708         auto shape_ptr = OH_AI_TensorGetShape(ori_tensor, &shapeNum);
3709         for (size_t j = 0; j < shapeNum; j++) {
3710             shape[j] = shape_ptr[j];
3711         }
3712         OH_AI_TensorHandle in_tensor = OH_AI_TensorCreate(OH_AI_TensorGetName(ori_tensor),
3713             OH_AI_TensorGetDataType(ori_tensor), shape, shapeNum, nullptr, 0);
3714         in_tensor_array->handle_list[i] = in_tensor;
3715     }
3716     printf("==========OH_AI_TensorSetAllocator out_tensor==========\n");
3717     OH_AI_TensorHandleArray outputs_handle = OH_AI_ModelGetOutputs(model);
3718     out_tensor_array->handle_num = outputs_handle.handle_num;
3719     out_tensor_array->handle_list = (OH_AI_TensorHandle *)malloc(sizeof(OH_AI_TensorHandle) *
3720                                                                 out_tensor_array->handle_num);
3721     for (size_t i = 0; i < outputs_handle.handle_num; i++) {
3722         auto ori_tensor = outputs_handle.handle_list[i];
3723         auto shape_ptr = OH_AI_TensorGetShape(ori_tensor, &shapeNum);
3724         for (size_t j = 0; j < shapeNum; j++) {
3725             shape[j] = shape_ptr[j];
3726         }
3727         OH_AI_TensorHandle out_tensor = OH_AI_TensorCreate(OH_AI_TensorGetName(ori_tensor),
3728             OH_AI_TensorGetDataType(ori_tensor), shape, shapeNum, nullptr, 0);
3729         out_tensor_array->handle_list[i] = out_tensor;
3730     }
3731 }
3732 
3733 // 正常场景:CPU免拷贝场景,不调用OH_AI_TensorSetAllocator
3734 HWTEST(MSLiteTest, SUB_AI_MindSpore_CPU_copy_free_0003, Function | MediumTest | Level0) {
3735     printf("==========Init Context==========\n");
3736     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3737     AddContextDeviceCPU(context);
3738     printf("==========Build model==========\n");
3739     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3740     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
3741     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3742     OH_AI_TensorHandleArray in_tensor_array;
3743     OH_AI_TensorHandleArray out_tensor_array;
3744     printf("==========OH_AI_TensorSetAllocator in_tensor==========\n");
3745     CopyFreeNoSetAllocator(model, &in_tensor_array, &out_tensor_array);
3746     printf("==========FillInputsData==========\n");
3747     FillInputsData(in_tensor_array, "ml_face_isface", true);
3748     printf("==========OH_AI_ModelPredict==========\n");
3749     auto predict_ret = OH_AI_ModelPredict(model, in_tensor_array, &out_tensor_array, NULL, NULL);
3750     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
3751     CompareResult(out_tensor_array, "ml_face_isface");
3752     printf("==========OH_AI_TensorDestroy==========\n");
3753     for (size_t i = 0; i < in_tensor_array.handle_num; i++) {
3754         auto ori_tensor = in_tensor_array.handle_list[i];
3755         OH_AI_TensorDestroy(&ori_tensor);
3756     }
3757     free(in_tensor_array.handle_list);
3758     for (size_t i = 0; i < out_tensor_array.handle_num; i++) {
3759         auto ori_tensor = out_tensor_array.handle_list[i];
3760         OH_AI_TensorDestroy(&ori_tensor);
3761     }
3762     free(out_tensor_array.handle_list);
3763     printf("==========ContextDestroy and ModelDestroy==========\n");
3764     OH_AI_ContextDestroy(&context);
3765     OH_AI_ModelDestroy(&model);
3766 }
3767 
3768 // 正常场景:CPU免拷贝场景,不调用OH_AI_TensorSetAllocator,并循环推理
3769 HWTEST(MSLiteTest, SUB_AI_MindSpore_CPU_copy_free_0004, Function | MediumTest | Level0) {
3770     printf("==========Init Context==========\n");
3771     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3772     AddContextDeviceCPU(context);
3773     printf("==========Build model==========\n");
3774     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3775     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
3776     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3777     OH_AI_TensorHandleArray in_tensor_array;
3778     OH_AI_TensorHandleArray out_tensor_array;
3779     printf("==========OH_AI_TensorSetAllocator in_tensor==========\n");
3780     CopyFreeNoSetAllocator(model, &in_tensor_array, &out_tensor_array);
3781     printf("==========FillInputsData==========\n");
3782     FillInputsData(in_tensor_array, "ml_face_isface", true);
3783     for (size_t i = 0; i < 50; ++i) {
3784         printf("==========OH_AI_ModelPredict==========\n");
3785         auto predict_ret = OH_AI_ModelPredict(model, in_tensor_array, &out_tensor_array, NULL, NULL);
3786         ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
3787         CompareResult(out_tensor_array, "ml_face_isface");
3788     }
3789     printf("==========OH_AI_TensorDestroy==========\n");
3790     for (size_t i = 0; i < in_tensor_array.handle_num; i++) {
3791         auto ori_tensor = in_tensor_array.handle_list[i];
3792         OH_AI_TensorDestroy(&ori_tensor);
3793     }
3794     free(in_tensor_array.handle_list);
3795     for (size_t i = 0; i < out_tensor_array.handle_num; i++) {
3796         auto ori_tensor = out_tensor_array.handle_list[i];
3797         OH_AI_TensorDestroy(&ori_tensor);
3798     }
3799     free(out_tensor_array.handle_list);
3800     printf("==========ContextDestroy and ModelDestroy==========\n");
3801     OH_AI_ContextDestroy(&context);
3802     OH_AI_ModelDestroy(&model);
3803 }
3804 
3805 // 正常场景:通过OH_AI_TensorCreate创建输入输出tensor,实现数据免拷贝
3806 HWTEST(MSLiteTest, SUB_AI_MindSpore_NNRT_copy_free_0002, Function | MediumTest | Level0) {
3807     if (!IsNPU()) {
3808         printf("NNRt is not NPU, skip this test");
3809         return;
3810     }
3811     printf("==========Init Context==========\n");
3812     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3813     AddContextDeviceNNRT(context);
3814     printf("==========Build model==========\n");
3815     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3816     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
3817     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3818     RunMSLiteModel(model, "ml_face_isface", true);
3819     printf("==========OH_AI_ModelDestroy==========\n");
3820     OH_AI_ContextDestroy(&context);
3821     OH_AI_ModelDestroy(&model);
3822 }
3823 
3824 // 异常场景:OH_AI_TensorSetAllocator设置空指针
3825 HWTEST(MSLiteTest, SUB_AI_MindSpore_NNRT_copy_free_0003, Function | MediumTest | Level0) {
3826     if (!IsNPU()) {
3827         printf("NNRt is not NPU, skip this test");
3828         return;
3829     }
3830     printf("==========Init Context==========\n");
3831     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3832     AddContextDeviceNNRT(context);
3833     printf("==========Build model==========\n");
3834     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3835     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
3836     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3837     printf("==========GetInputs==========\n");
3838     const size_t MAX_DIMS = 10;
3839     int64_t shape[MAX_DIMS];
3840     size_t shapeNum;
3841     OH_AI_TensorHandleArray in_tensor_array;
3842     OH_AI_TensorHandleArray inputs_handle = OH_AI_ModelGetInputs(model);
3843     in_tensor_array.handle_num = inputs_handle.handle_num;
3844     in_tensor_array.handle_list = (OH_AI_TensorHandle *)malloc(sizeof(OH_AI_TensorHandle) * in_tensor_array.handle_num);
3845     for (size_t i = 0; i < inputs_handle.handle_num; i++) {
3846         auto ori_tensor = inputs_handle.handle_list[i];
3847         auto shape_ptr = OH_AI_TensorGetShape(ori_tensor, &shapeNum);
3848         for (size_t j = 0; j < shapeNum; j++) {
3849             shape[j] = shape_ptr[j];
3850         }
3851         OH_AI_TensorHandle in_tensor = OH_AI_TensorCreate(OH_AI_TensorGetName(ori_tensor),
3852                                         OH_AI_TensorGetDataType(ori_tensor), shape, shapeNum, nullptr, 0);
3853         OH_AI_Status ret = OH_AI_TensorSetAllocator(in_tensor, nullptr);
3854         ASSERT_NE(ret, OH_AI_STATUS_SUCCESS);
3855         in_tensor_array.handle_list[i] = in_tensor;
3856     }
3857     OH_AI_ContextDestroy(&context);
3858     OH_AI_ModelDestroy(&model);
3859 }
3860 
3861 // 正常场景:通过OH_AI_TensorCreate创建输入输出tensor,实现数据免拷贝, CPU后端场景
3862 HWTEST(MSLiteTest, SUB_AI_MindSpore_CPU_copy_free_0001, Function | MediumTest | Level0) {
3863     printf("==========Init Context==========\n");
3864     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3865     AddContextDeviceCPU(context);
3866     printf("==========Build model==========\n");
3867     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3868     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
3869     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3870     RunMSLiteModel(model, "ml_face_isface", true);
3871     printf("==========OH_AI_ModelDestroy==========\n");
3872     OH_AI_ContextDestroy(&context);
3873     OH_AI_ModelDestroy(&model);
3874 }
3875 
3876 
3877 // 正常场景:npu循环推理
3878 HWTEST(MSLiteTest, SUB_AI_MindSpore_NNRT_copy_free_0004, Function | MediumTest | Level0) {
3879     if (!IsNPU()) {
3880         printf("NNRt is not NPU, skip this test");
3881         return;
3882     }
3883     printf("==========Init Context==========\n");
3884     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3885     AddContextDeviceNNRT(context);
3886     printf("==========Build model==========\n");
3887     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3888     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
3889     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3890     printf("==========GetInputs==========\n");
3891     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
3892     ASSERT_NE(inputs.handle_list, nullptr);
3893     FillInputsData(inputs, "ml_face_isface", true);
3894     for (size_t i = 0; i < 50; ++i) {
3895         printf("==========Model Predict==========\n");
3896         OH_AI_TensorHandleArray outputs;
3897         ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
3898         ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3899         CompareResult(outputs, "ml_face_isface");
3900     }
3901     OH_AI_ContextDestroy(&context);
3902     OH_AI_ModelDestroy(&model);
3903 }
3904 
3905 // 正常场景:npu免拷贝场景循环推理
3906 HWTEST(MSLiteTest, SUB_AI_MindSpore_NNRT_copy_free_0005, Function | MediumTest | Level0) {
3907     if (!IsNPU()) {
3908         printf("NNRt is not NPU, skip this test");
3909         return;
3910     }
3911     printf("==========Init Context==========\n");
3912     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3913     AddContextDeviceNNRT(context);
3914     printf("==========Build model==========\n");
3915     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3916     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
3917     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3918     OH_AI_TensorHandleArray in_tensor_array;
3919     OH_AI_TensorHandleArray out_tensor_array;
3920     printf("==========OH_AI_TensorSetAllocator in_tensor==========\n");
3921     CopyFreeSetAllocator(model, &in_tensor_array, &out_tensor_array);
3922     printf("==========FillInputsData==========\n");
3923     FillInputsData(in_tensor_array, "ml_face_isface", true);
3924     for (size_t i = 0; i < 50; ++i) {
3925         printf("==========OH_AI_ModelPredict==========\n");
3926         auto ret = OH_AI_ModelPredict(model, in_tensor_array, &out_tensor_array, NULL, NULL);
3927         ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3928         CompareResult(out_tensor_array, "ml_face_isface");
3929     }
3930     printf("==========OH_AI_TensorDestroy==========\n");
3931     for (size_t i = 0; i < in_tensor_array.handle_num; i++) {
3932         auto ori_tensor = in_tensor_array.handle_list[i];
3933         OH_AI_TensorDestroy(&ori_tensor);
3934     }
3935     free(in_tensor_array.handle_list);
3936     for (size_t i = 0; i < out_tensor_array.handle_num; i++) {
3937         auto ori_tensor = out_tensor_array.handle_list[i];
3938         OH_AI_TensorDestroy(&ori_tensor);
3939     }
3940     free(out_tensor_array.handle_list);
3941     printf("==========ContextDestroy and ModelDestroy==========\n");
3942     OH_AI_ContextDestroy(&context);
3943     OH_AI_ModelDestroy(&model);
3944 }
3945 
3946 // 正常场景:npu免拷贝场景,不调用OH_AI_TensorSetAllocator
3947 HWTEST(MSLiteTest, SUB_AI_MindSpore_NNRT_copy_free_0006, Function | MediumTest | Level0) {
3948     if (!IsNPU()) {
3949         printf("NNRt is not NPU, skip this test");
3950         return;
3951     }
3952     printf("==========Init Context==========\n");
3953     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3954     AddContextDeviceNNRT(context);
3955     printf("==========Build model==========\n");
3956     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3957     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
3958         OH_AI_MODELTYPE_MINDIR, context);
3959     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
3960     OH_AI_TensorHandleArray in_tensor_array;
3961     OH_AI_TensorHandleArray out_tensor_array;
3962     printf("==========OH_AI_TensorSetAllocator in_tensor==========\n");
3963     CopyFreeNoSetAllocator(model, &in_tensor_array, &out_tensor_array);
3964     printf("==========FillInputsData==========\n");
3965     FillInputsData(in_tensor_array, "ml_face_isface", true);
3966     printf("==========OH_AI_ModelPredict==========\n");
3967     auto predict_ret = OH_AI_ModelPredict(model, in_tensor_array, &out_tensor_array, NULL, NULL);
3968     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
3969     CompareResult(out_tensor_array, "ml_face_isface");
3970     printf("==========OH_AI_TensorDestroy==========\n");
3971     for (size_t i = 0; i < in_tensor_array.handle_num; i++) {
3972         auto ori_tensor = in_tensor_array.handle_list[i];
3973         OH_AI_TensorDestroy(&ori_tensor);
3974     }
3975     free(in_tensor_array.handle_list);
3976     for (size_t i = 0; i < out_tensor_array.handle_num; i++) {
3977         auto ori_tensor = out_tensor_array.handle_list[i];
3978         OH_AI_TensorDestroy(&ori_tensor);
3979     }
3980     free(out_tensor_array.handle_list);
3981     printf("==========ContextDestroy and ModelDestroy==========\n");
3982     OH_AI_ContextDestroy(&context);
3983     OH_AI_ModelDestroy(&model);
3984 }
3985 
3986 // 正常场景:npu免拷贝场景,不调用OH_AI_TensorSetAllocator,并循环推理
3987 HWTEST(MSLiteTest, SUB_AI_MindSpore_NNRT_copy_free_0007, Function | MediumTest | Level0) {
3988     if (!IsNPU()) {
3989         printf("NNRt is not NPU, skip this test");
3990         return;
3991     }
3992     printf("==========Init Context==========\n");
3993     OH_AI_ContextHandle context = OH_AI_ContextCreate();
3994     AddContextDeviceNNRT(context);
3995     printf("==========Build model==========\n");
3996     OH_AI_ModelHandle model = OH_AI_ModelCreate();
3997     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
3998         OH_AI_MODELTYPE_MINDIR, context);
3999     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4000 
4001     OH_AI_TensorHandleArray in_tensor_array;
4002     OH_AI_TensorHandleArray out_tensor_array;
4003     printf("==========OH_AI_TensorSetAllocator in_tensor==========\n");
4004     CopyFreeNoSetAllocator(model, &in_tensor_array, &out_tensor_array);
4005     printf("==========FillInputsData==========\n");
4006     FillInputsData(in_tensor_array, "ml_face_isface", true);
4007     for (size_t i = 0; i < 50; ++i) {
4008         printf("==========OH_AI_ModelPredict==========\n");
4009         auto predict_ret = OH_AI_ModelPredict(model, in_tensor_array, &out_tensor_array, NULL, NULL);
4010         ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
4011         CompareResult(out_tensor_array, "ml_face_isface");
4012     }
4013     printf("==========OH_AI_TensorDestroy==========\n");
4014     for (size_t i = 0; i < in_tensor_array.handle_num; i++) {
4015         auto ori_tensor = in_tensor_array.handle_list[i];
4016         OH_AI_TensorDestroy(&ori_tensor);
4017     }
4018     free(in_tensor_array.handle_list);
4019     for (size_t i = 0; i < out_tensor_array.handle_num; i++) {
4020         auto ori_tensor = out_tensor_array.handle_list[i];
4021         OH_AI_TensorDestroy(&ori_tensor);
4022     }
4023     free(out_tensor_array.handle_list);
4024     printf("==========ContextDestroy and ModelDestroy==========\n");
4025     OH_AI_ContextDestroy(&context);
4026     OH_AI_ModelDestroy(&model);
4027 }
4028 
4029 // 正常场景:NPU权重量化模型
4030 HWTEST(MSLiteTest, OHOS_NNRT_QUANT_0001, Function | MediumTest | Level0) {
4031     if (!IsNPU()) {
4032         printf("NNRt is not NPU, skip this test");
4033         return;
4034     }
4035     printf("==========Init Context==========\n");
4036     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4037     ASSERT_NE(context, nullptr);
4038     AddContextDeviceNNRT(context);
4039     printf("==========Create model==========\n");
4040     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4041     ASSERT_NE(model, nullptr);
4042     ModelPredict(model, context, "ml_face_isface_quant", {}, false, true, false);
4043 }
4044 
4045 
4046 // add nnrt hiai device info
AddContextDeviceHIAI(OH_AI_ContextHandle context)4047 void AddContextDeviceHIAI(OH_AI_ContextHandle context) {
4048     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4049     size_t num = 0;
4050     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4051     std::cout << "found " << num << " nnrt devices" << std::endl;
4052     NNRTDeviceDesc *desc_1 = nullptr;
4053     for (size_t i = 0; i < num; i++) {
4054         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4055         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
4056         if (strcmp(name, "HIAI_F") == 0) {
4057             desc_1 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4058         }
4059     }
4060 
4061     auto id_1 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_1);
4062     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_1);
4063     const char *band_mode = "HIAI_BANDMODE_HIGH";
4064     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "BandMode", band_mode, strlen(band_mode));
4065 
4066     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4067 }
4068 
4069 // 异常场景:HIAI流程,离线模型支持NNRT后端,Model创建一次,Build多次
4070 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0004, Function | MediumTest | Level0) {
4071     if (!IsNPU()) {
4072         printf("NNRt is not NPU, skip this test");
4073         return;
4074     }
4075     char nnrtDevice[128];
4076     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4077     printf("nnrtDevice name: %s. \n", nnrtDevice);
4078     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4079     if (strcmp(nnrtDevice, "K5010") == 0) {
4080         printf("nnrtDevice is K5010, skip this test");
4081         return;
4082     }
4083     printf("==========Init Context==========\n");
4084     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4085     AddContextDeviceHIAI(context);
4086     printf("==========Create model==========\n");
4087     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4088     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4089     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4090     printf("==========Build model==========\n");
4091     OH_AI_Status ret2 = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4092     ASSERT_EQ(ret2, OH_AI_STATUS_LITE_MODEL_REBUILD);
4093     OH_AI_ContextDestroy(&context);
4094     OH_AI_ModelDestroy(&model);
4095 }
4096 
4097 // 异常场景:HIAI流程,离线模型支持NNRT后端,ModelPredict,input为空
4098 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0005, Function | MediumTest | Level0) {
4099     if (!IsNPU()) {
4100         printf("NNRt is not NPU, skip this test");
4101         return;
4102     }
4103     char nnrtDevice[128];
4104     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4105     printf("nnrtDevice name: %s. \n", nnrtDevice);
4106     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4107     if (strcmp(nnrtDevice, "K5010") == 0) {
4108         printf("nnrtDevice is K5010, skip this test");
4109         return;
4110     }
4111     printf("==========Init Context==========\n");
4112     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4113     AddContextDeviceHIAI(context);
4114     printf("==========Create model==========\n");
4115     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4116     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4117     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4118     printf("==========Model Predict==========\n");
4119     OH_AI_TensorHandleArray inputs;
4120     OH_AI_TensorHandleArray outputs;
4121     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
4122     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
4123     OH_AI_ContextDestroy(&context);
4124     OH_AI_ModelDestroy(&model);
4125 }
4126 
4127 // 异常场景:HIAI流程,非离线模型支持NNRT后端,ms模型未转换为NNRT后端模型
4128 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0006, Function | MediumTest | Level0) {
4129     if (!IsNPU()) {
4130         printf("NNRt is not NPU, skip this test");
4131         return;
4132     }
4133     char nnrtDevice[128];
4134     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4135     printf("nnrtDevice name: %s. \n", nnrtDevice);
4136     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4137     if (strcmp(nnrtDevice, "K5010") == 0) {
4138         printf("nnrtDevice is K5010, skip this test");
4139         return;
4140     }
4141     printf("==========Init Context==========\n");
4142     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4143     AddContextDeviceHIAI(context);
4144     printf("==========Create model==========\n");
4145     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4146     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
4147     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
4148     OH_AI_ContextDestroy(&context);
4149     OH_AI_ModelDestroy(&model);
4150 }
4151 
4152 // 正常场景:HIAI流程,离线模型配置量化参数
4153 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0007, Function | MediumTest | Level0) {
4154     if (!IsNPU()) {
4155         printf("NNRt is not NPU, skip this test");
4156         return;
4157     }
4158     char nnrtDevice[128];
4159     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4160     printf("nnrtDevice name: %s. \n", nnrtDevice);
4161     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4162     if (strcmp(nnrtDevice, "K5010") == 0) {
4163         printf("nnrtDevice is K5010, skip this test");
4164         return;
4165     }
4166     printf("==========Init Context==========\n");
4167     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4168     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4169     size_t num = 0;
4170     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4171     std::cout << "found " << num << " nnrt devices" << std::endl;
4172     NNRTDeviceDesc *desc_1 = nullptr;
4173     for (size_t i = 0; i < num; i++) {
4174         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4175         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
4176         if (strcmp(name, "HIAI_F") == 0) {
4177             desc_1 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4178         }
4179     }
4180 
4181     auto id_1 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_1);
4182     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_1);
4183     const char *band_mode = "HIAI_BANDMODE_HIGH";
4184     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "BandMode", band_mode, strlen(band_mode));
4185     size_t q_size;
4186     char *quant_config = ReadFile("/data/test/test_model_param", &q_size);
4187     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "QuantConfigData", quant_config, q_size);
4188 
4189     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4190     printf("==========Create model==========\n");
4191     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4192     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/test_model.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4193     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4194     printf("==========GetInputs==========\n");
4195     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
4196     ASSERT_NE(inputs.handle_list, nullptr);
4197     FillInputsData(inputs, "test_model", false);
4198     printf("==========Model Predict==========\n");
4199     OH_AI_TensorHandleArray outputs;
4200     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
4201     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
4202     CompareResult(outputs, "test_model", 0.01, 0.01, true);
4203     OH_AI_ContextDestroy(&context);
4204     OH_AI_ModelDestroy(&model);
4205 }
4206 
4207 // 正常场景:HIAI流程,设置量化配置QuantConfigData为空指针时等于不量化
4208 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0008, Function | MediumTest | Level0) {
4209     if (!IsNPU()) {
4210         printf("NNRt is not NPU, skip this test");
4211         return;
4212     }
4213     char nnrtDevice[128];
4214     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4215     printf("nnrtDevice name: %s. \n", nnrtDevice);
4216     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4217     if (strcmp(nnrtDevice, "K5010") == 0) {
4218         printf("nnrtDevice is K5010, skip this test");
4219         return;
4220     }
4221     printf("==========Init Context==========\n");
4222     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4223     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4224     size_t num = 0;
4225     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4226     std::cout << "found " << num << " nnrt devices" << std::endl;
4227     NNRTDeviceDesc *desc_1 = nullptr;
4228     for (size_t i = 0; i < num; i++) {
4229         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4230         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
4231         if (strcmp(name, "HIAI_F") == 0) {
4232             desc_1 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4233         }
4234     }
4235 
4236     auto id_1 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_1);
4237     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_1);
4238     const char *band_mode = "HIAI_BANDMODE_HIGH";
4239     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "BandMode", band_mode, strlen(band_mode));
4240     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "QuantConfigData", nullptr, 0);
4241 
4242     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4243     printf("==========Create model==========\n");
4244     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4245     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/test_model.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4246     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4247     printf("==========GetInputs==========\n");
4248     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
4249     ASSERT_NE(inputs.handle_list, nullptr);
4250     FillInputsData(inputs, "test_model", false);
4251     printf("==========Model Predict==========\n");
4252     OH_AI_TensorHandleArray outputs;
4253     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
4254     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
4255     CompareResult(outputs, "test_model", 0.01, 0.01, true);
4256     OH_AI_ContextDestroy(&context);
4257     OH_AI_ModelDestroy(&model);
4258 }
4259 
4260 // 异常场景:HIAI流程,设置量化配置QuantConfigData为错误配置文件
4261 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0009, Function | MediumTest | Level0) {
4262     if (!IsNPU()) {
4263         printf("NNRt is not NPU, skip this test");
4264         return;
4265     }
4266     char nnrtDevice[128];
4267     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4268     printf("nnrtDevice name: %s. \n", nnrtDevice);
4269     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4270     if (strcmp(nnrtDevice, "K5010") == 0) {
4271         printf("nnrtDevice is K5010, skip this test");
4272         return;
4273     }
4274     printf("==========Init Context==========\n");
4275     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4276     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4277     size_t num = 0;
4278     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4279     std::cout << "found " << num << " nnrt devices" << std::endl;
4280     NNRTDeviceDesc *desc_1 = nullptr;
4281     for (size_t i = 0; i < num; i++) {
4282         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4283         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
4284         if (strcmp(name, "HIAI_F") == 0) {
4285             desc_1 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4286         }
4287     }
4288 
4289     auto id_1 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_1);
4290     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_1);
4291     const char *band_mode = "HIAI_BANDMODE_HIGH";
4292     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "BandMode", band_mode, strlen(band_mode));
4293     size_t q_size;
4294     char *quant_config = ReadFile("/data/test/test_model.om.ms", &q_size);
4295     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "QuantConfigData", quant_config, q_size);
4296 
4297     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4298     printf("==========Create model==========\n");
4299     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4300     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/test_model.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4301     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
4302     OH_AI_ContextDestroy(&context);
4303     OH_AI_ModelDestroy(&model);
4304 }
4305 
4306 // 异常场景:HIAI流程,设置量化q_size为异常值
4307 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0010, Function | MediumTest | Level0) {
4308     if (!IsNPU()) {
4309         printf("NNRt is not NPU, skip this test");
4310         return;
4311     }
4312     char nnrtDevice[128];
4313     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4314     printf("nnrtDevice name: %s. \n", nnrtDevice);
4315     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4316     if (strcmp(nnrtDevice, "K5010") == 0) {
4317         printf("nnrtDevice is K5010, skip this test");
4318         return;
4319     }
4320     printf("==========Init Context==========\n");
4321     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4322     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4323     size_t num = 0;
4324     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4325     std::cout << "found " << num << " nnrt devices" << std::endl;
4326     NNRTDeviceDesc *desc_1 = nullptr;
4327     for (size_t i = 0; i < num; i++) {
4328         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4329         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
4330         if (strcmp(name, "HIAI_F") == 0) {
4331             desc_1 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4332         }
4333     }
4334 
4335     auto id_1 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_1);
4336     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_1);
4337     const char *band_mode = "HIAI_BANDMODE_HIGH";
4338     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "BandMode", band_mode, strlen(band_mode));
4339     size_t q_size;
4340     char *quant_config = ReadFile("/data/test/test_model_param", &q_size);
4341     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "QuantConfigData", quant_config, 0);
4342 
4343     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4344     printf("==========Create model==========\n");
4345     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4346     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/test_model.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4347     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
4348     OH_AI_ContextDestroy(&context);
4349     OH_AI_ModelDestroy(&model);
4350 }
4351 
4352 // 正常场景:HIAI流程,设置 NPU 和外围输入/输出(I/O)设备的带宽模式BandMode模式为HIAI_BANDMODE_NORMAL
4353 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0011, Function | MediumTest | Level0) {
4354     if (!IsNPU()) {
4355         printf("NNRt is not NPU, skip this test");
4356         return;
4357     }
4358     char nnrtDevice[128];
4359     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4360     printf("nnrtDevice name: %s. \n", nnrtDevice);
4361     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4362     if (strcmp(nnrtDevice, "K5010") == 0) {
4363         printf("nnrtDevice is K5010, skip this test");
4364         return;
4365     }
4366     printf("==========Init Context==========\n");
4367     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4368     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4369     size_t num = 0;
4370     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4371     std::cout << "found " << num << " nnrt devices" << std::endl;
4372     NNRTDeviceDesc *desc_1 = nullptr;
4373     for (size_t i = 0; i < num; i++) {
4374         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4375         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
4376         if (strcmp(name, "HIAI_F") == 0) {
4377             desc_1 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4378         }
4379     }
4380 
4381     auto id_1 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_1);
4382     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_1);
4383     const char *band_mode = "HIAI_BANDMODE_NORMAL";
4384     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "BandMode", band_mode, strlen(band_mode));
4385     size_t q_size;
4386     char *quant_config = ReadFile("/data/test/test_model_param", &q_size);
4387     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "QuantConfigData", quant_config, q_size);
4388 
4389     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4390     printf("==========Create model==========\n");
4391     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4392     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/test_model.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4393     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4394     printf("==========GetInputs==========\n");
4395     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
4396     ASSERT_NE(inputs.handle_list, nullptr);
4397     FillInputsData(inputs, "test_model", false);
4398     printf("==========Model Predict==========\n");
4399     OH_AI_TensorHandleArray outputs;
4400     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
4401     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
4402     CompareResult(outputs, "test_model", 0.01, 0.01, true);
4403     OH_AI_ContextDestroy(&context);
4404     OH_AI_ModelDestroy(&model);
4405 }
4406 
4407 // 正常场景:HIAI流程,设置 NPU 和外围输入/输出(I/O)设备的带宽模式BandMode模式为HIAI_BANDMODE_LOW
4408 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0012, Function | MediumTest | Level0) {
4409     if (!IsNPU()) {
4410         printf("NNRt is not NPU, skip this test");
4411         return;
4412     }
4413     char nnrtDevice[128];
4414     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4415     printf("nnrtDevice name: %s. \n", nnrtDevice);
4416     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4417     if (strcmp(nnrtDevice, "K5010") == 0) {
4418         printf("nnrtDevice is K5010, skip this test");
4419         return;
4420     }
4421     printf("==========Init Context==========\n");
4422     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4423     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4424     size_t num = 0;
4425     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4426     std::cout << "found " << num << " nnrt devices" << std::endl;
4427     NNRTDeviceDesc *desc_1 = nullptr;
4428     for (size_t i = 0; i < num; i++) {
4429         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4430         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
4431         if (strcmp(name, "HIAI_F") == 0) {
4432             desc_1 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4433         }
4434     }
4435 
4436     auto id_1 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_1);
4437     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_1);
4438     const char *band_mode = "HIAI_BANDMODE_LOW";
4439     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "BandMode", band_mode, strlen(band_mode));
4440     size_t q_size;
4441     char *quant_config = ReadFile("/data/test/test_model_param", &q_size);
4442     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "QuantConfigData", quant_config, q_size);
4443 
4444     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4445     printf("==========Create model==========\n");
4446     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4447     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/test_model.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4448     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4449     printf("==========GetInputs==========\n");
4450     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
4451     ASSERT_NE(inputs.handle_list, nullptr);
4452     FillInputsData(inputs, "test_model", false);
4453     printf("==========Model Predict==========\n");
4454     OH_AI_TensorHandleArray outputs;
4455     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
4456     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
4457     CompareResult(outputs, "test_model", 0.01, 0.01, true);
4458     OH_AI_ContextDestroy(&context);
4459     OH_AI_ModelDestroy(&model);
4460 }
4461 
4462 // 正常场景:HIAI流程,设置 NPU 和外围输入/输出(I/O)设备的带宽模式BandMode模式为HIAI_BANDMODE_UNSET
4463 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0013, Function | MediumTest | Level0) {
4464     if (!IsNPU()) {
4465         printf("NNRt is not NPU, skip this test");
4466         return;
4467     }
4468     char nnrtDevice[128];
4469     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4470     printf("nnrtDevice name: %s. \n", nnrtDevice);
4471     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4472     if (strcmp(nnrtDevice, "K5010") == 0) {
4473         printf("nnrtDevice is K5010, skip this test");
4474         return;
4475     }
4476     printf("==========Init Context==========\n");
4477     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4478     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4479     size_t num = 0;
4480     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4481     std::cout << "found " << num << " nnrt devices" << std::endl;
4482     NNRTDeviceDesc *desc_1 = nullptr;
4483     for (size_t i = 0; i < num; i++) {
4484         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4485         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
4486         if (strcmp(name, "HIAI_F") == 0) {
4487             desc_1 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4488         }
4489     }
4490 
4491     auto id_1 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_1);
4492     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_1);
4493     const char *band_mode = "HIAI_BANDMODE_UNSET";
4494     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "BandMode", band_mode, strlen(band_mode));
4495     size_t q_size;
4496     char *quant_config = ReadFile("/data/test/test_model_param", &q_size);
4497     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "QuantConfigData", quant_config, q_size);
4498 
4499     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4500     printf("==========Create model==========\n");
4501     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4502     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/test_model.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4503     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4504     printf("==========GetInputs==========\n");
4505     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
4506     ASSERT_NE(inputs.handle_list, nullptr);
4507     FillInputsData(inputs, "test_model", false);
4508     printf("==========Model Predict==========\n");
4509     OH_AI_TensorHandleArray outputs;
4510     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
4511     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
4512     CompareResult(outputs, "test_model", 0.01, 0.01, true);
4513     OH_AI_ContextDestroy(&context);
4514     OH_AI_ModelDestroy(&model);
4515 }
4516 
4517 // 正常场景:HIAI流程,设置 NPU 和外围输入/输出(I/O)设备的带宽模式BandMode模式为HIAI_BANDMODE_HIGH
4518 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0014, Function | MediumTest | Level0) {
4519     if (!IsNPU()) {
4520         printf("NNRt is not NPU, skip this test");
4521         return;
4522     }
4523     char nnrtDevice[128];
4524     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4525     printf("nnrtDevice name: %s. \n", nnrtDevice);
4526     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4527     if (strcmp(nnrtDevice, "K5010") == 0) {
4528         printf("nnrtDevice is K5010, skip this test");
4529         return;
4530     }
4531     printf("==========Init Context==========\n");
4532     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4533     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4534     size_t num = 0;
4535     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4536     std::cout << "found " << num << " nnrt devices" << std::endl;
4537     NNRTDeviceDesc *desc_1 = nullptr;
4538     for (size_t i = 0; i < num; i++) {
4539         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4540         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
4541         if (strcmp(name, "HIAI_F") == 0) {
4542             desc_1 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4543         }
4544     }
4545 
4546     auto id_1 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_1);
4547     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_1);
4548     const char *band_mode = "HIAI_BANDMODE_HIGH";
4549     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "BandMode", band_mode, strlen(band_mode));
4550     size_t q_size;
4551     char *quant_config = ReadFile("/data/test/test_model_param", &q_size);
4552     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "QuantConfigData", quant_config, q_size);
4553 
4554     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4555     printf("==========Create model==========\n");
4556     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4557     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/test_model.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4558     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4559     printf("==========GetInputs==========\n");
4560     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
4561     ASSERT_NE(inputs.handle_list, nullptr);
4562     FillInputsData(inputs, "test_model", false);
4563     printf("==========Model Predict==========\n");
4564     OH_AI_TensorHandleArray outputs;
4565     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
4566     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
4567     CompareResult(outputs, "test_model", 0.01, 0.01, true);
4568     OH_AI_ContextDestroy(&context);
4569     OH_AI_ModelDestroy(&model);
4570 }
4571 
4572 // 正常场景:HIAI流程,离线模型配置量化参数QuantBuffer
4573 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0015, Function | MediumTest | Level0) {
4574     if (!IsNPU()) {
4575         printf("NNRt is not NPU, skip this test");
4576         return;
4577     }
4578     char nnrtDevice[128];
4579     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4580     printf("nnrtDevice name: %s. \n", nnrtDevice);
4581     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4582     if (strcmp(nnrtDevice, "K5010") == 0) {
4583         printf("nnrtDevice is K5010, skip this test");
4584         return;
4585     }
4586     printf("==========Init Context==========\n");
4587     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4588     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4589     size_t num = 0;
4590     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4591     std::cout << "found " << num << " nnrt devices" << std::endl;
4592     NNRTDeviceDesc *desc_1 = nullptr;
4593     for (size_t i = 0; i < num; i++) {
4594         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4595         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
4596         if (strcmp(name, "HIAI_F") == 0) {
4597             desc_1 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4598         }
4599     }
4600 
4601     auto id_1 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_1);
4602     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_1);
4603     const char *band_mode = "HIAI_BANDMODE_HIGH";
4604     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "BandMode", band_mode, strlen(band_mode));
4605     size_t q_size;
4606     char *quant_config = ReadFile("/data/test/test_model_param", &q_size);
4607     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "QuantBuffer", quant_config, q_size);
4608 
4609     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4610     printf("==========Create model==========\n");
4611     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4612     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/test_model.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4613     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4614     printf("==========GetInputs==========\n");
4615     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
4616     ASSERT_NE(inputs.handle_list, nullptr);
4617     FillInputsData(inputs, "test_model", false);
4618     printf("==========Model Predict==========\n");
4619     OH_AI_TensorHandleArray outputs;
4620     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
4621     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
4622     CompareResult(outputs, "test_model", 0.01, 0.01, true);
4623     OH_AI_ContextDestroy(&context);
4624     OH_AI_ModelDestroy(&model);
4625 }
4626 
4627 // 正常场景:HIAI流程,设置量化配置QuantBuffer为空指针时等于不量化
4628 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0016, Function | MediumTest | Level0) {
4629     if (!IsNPU()) {
4630         printf("NNRt is not NPU, skip this test");
4631         return;
4632     }
4633     char nnrtDevice[128];
4634     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4635     printf("nnrtDevice name: %s. \n", nnrtDevice);
4636     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4637     if (strcmp(nnrtDevice, "K5010") == 0) {
4638         printf("nnrtDevice is K5010, skip this test");
4639         return;
4640     }
4641     printf("==========Init Context==========\n");
4642     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4643     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4644     size_t num = 0;
4645     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4646     std::cout << "found " << num << " nnrt devices" << std::endl;
4647     NNRTDeviceDesc *desc_1 = nullptr;
4648     for (size_t i = 0; i < num; i++) {
4649         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4650         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
4651         if (strcmp(name, "HIAI_F") == 0) {
4652             desc_1 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4653         }
4654     }
4655 
4656     auto id_1 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_1);
4657     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_1);
4658     const char *band_mode = "HIAI_BANDMODE_HIGH";
4659     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "BandMode", band_mode, strlen(band_mode));
4660     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "QuantBuffer", nullptr, 0);
4661 
4662     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4663     printf("==========Create model==========\n");
4664     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4665     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/test_model.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4666     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4667     printf("==========GetInputs==========\n");
4668     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
4669     ASSERT_NE(inputs.handle_list, nullptr);
4670     FillInputsData(inputs, "test_model", false);
4671     printf("==========Model Predict==========\n");
4672     OH_AI_TensorHandleArray outputs;
4673     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
4674     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
4675     CompareResult(outputs, "test_model", 0.01, 0.01, true);
4676     OH_AI_ContextDestroy(&context);
4677     OH_AI_ModelDestroy(&model);
4678 }
4679 
4680 // 异常场景:HIAI流程,设置量化配置QuantBuffer为错误配置文件
4681 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0017, Function | MediumTest | Level0) {
4682     if (!IsNPU()) {
4683         printf("NNRt is not NPU, skip this test");
4684         return;
4685     }
4686     char nnrtDevice[128];
4687     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4688     printf("nnrtDevice name: %s. \n", nnrtDevice);
4689     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4690     if (strcmp(nnrtDevice, "K5010") == 0) {
4691         printf("nnrtDevice is K5010, skip this test");
4692         return;
4693     }
4694     printf("==========Init Context==========\n");
4695     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4696     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4697     size_t num = 0;
4698     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4699     std::cout << "found " << num << " nnrt devices" << std::endl;
4700     NNRTDeviceDesc *desc_1 = nullptr;
4701     for (size_t i = 0; i < num; i++) {
4702         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4703         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
4704         if (strcmp(name, "HIAI_F") == 0) {
4705             desc_1 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4706         }
4707     }
4708 
4709     auto id_1 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_1);
4710     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_1);
4711     const char *band_mode = "HIAI_BANDMODE_HIGH";
4712     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "BandMode", band_mode, strlen(band_mode));
4713     size_t q_size;
4714     char *quant_config = ReadFile("/data/test/test_model.om.ms", &q_size);
4715     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "QuantBuffer", quant_config, q_size);
4716 
4717     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4718     printf("==========Create model==========\n");
4719     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4720     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/test_model.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4721     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
4722     OH_AI_ContextDestroy(&context);
4723     OH_AI_ModelDestroy(&model);
4724 }
4725 
4726 // 异常场景:HIAI流程,设置量化QuantBufferq_size为异常值
4727 HWTEST(MSLiteTest, SUB_AI_MindSpore_HIAI_OfflineModel_0018, Function | MediumTest | Level0) {
4728     if (!IsNPU()) {
4729         printf("NNRt is not NPU, skip this test");
4730         return;
4731     }
4732     char nnrtDevice[128];
4733     OH_AI_Status deviceRet = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
4734     printf("nnrtDevice name: %s. \n", nnrtDevice);
4735     ASSERT_EQ(deviceRet, OH_AI_STATUS_SUCCESS);
4736     if (strcmp(nnrtDevice, "K5010") == 0) {
4737         printf("nnrtDevice is K5010, skip this test");
4738         return;
4739     }
4740     printf("==========Init Context==========\n");
4741     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4742     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4743     size_t num = 0;
4744     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4745     std::cout << "found " << num << " nnrt devices" << std::endl;
4746     NNRTDeviceDesc *desc_1 = nullptr;
4747     for (size_t i = 0; i < num; i++) {
4748         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4749         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
4750         if (strcmp(name, "HIAI_F") == 0) {
4751             desc_1 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
4752         }
4753     }
4754 
4755     auto id_1 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_1);
4756     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_1);
4757     const char *band_mode = "HIAI_BANDMODE_HIGH";
4758     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "BandMode", band_mode, strlen(band_mode));
4759     size_t q_size;
4760     char *quant_config = ReadFile("/data/test/test_model_param", &q_size);
4761     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "QuantBuffer", quant_config, 0);
4762 
4763     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4764     printf("==========Create model==========\n");
4765     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4766     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/test_model.om.ms", OH_AI_MODELTYPE_MINDIR, context);
4767     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
4768     OH_AI_ContextDestroy(&context);
4769     OH_AI_ModelDestroy(&model);
4770 }
4771 
PrintMem(const std::string & position)4772 void PrintMem(const std::string &position) {
4773     std::string procFile = "/proc/" + std::to_string(getpid()) + "/status";
4774     std::ifstream infile(procFile);
4775     if (infile.good()) {
4776         std::string line;
4777         while (std::getline(infile, line)) {
4778             if (line.find("VmRSS") != std::string::npos) {
4779                 std::cout << position << " mem size: " << line << std::endl;
4780             }
4781         }
4782         infile.close();
4783     }
4784 }
4785 
4786 
4787 // 正常场景:context配置cache信息,执行推理流程
4788 HWTEST(MSLiteTest, SUB_AI_MindSpore_NNRT_Cache_0001, Function | MediumTest | Level0) {
4789     if (!IsNPU()) {
4790         printf("NNRt is not NPU, skip this test");
4791         return;
4792     }
4793     printf("==========OH_AI_ContextCreate==========\n");
4794     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4795     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4796     size_t num = 0;
4797     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4798     auto desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, 0);
4799     auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc_0);
4800     std::cout << "OH_AI_GetNameFromNNRTDeviceDesc: " << name << std::endl;
4801     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
4802     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
4803     const char *cache_path = "/data/local/tmp";
4804     const char *cache_version = "1";
4805     const char *model_name = "cache_model";
4806     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CachePath", cache_path, strlen(cache_path));
4807     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CacheVersion", cache_version, strlen(cache_version));
4808     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "ModelName", model_name, strlen(model_name));
4809     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4810     printf("==========OH_AI_ModelCreate==========\n");
4811     PrintMem("before build");
4812     uint64_t timeStartPrepare = getTimeInUs();
4813     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4814     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
4815     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4816     uint64_t timeEndPrepare = getTimeInUs();
4817     float init_session_time_once = (timeEndPrepare - timeStartPrepare) / 1000.0;
4818     std::cout << "init_session_time_once: " << init_session_time_once << std::endl;
4819     PrintMem("after build");
4820     printf("==========GetInputs==========\n");
4821     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
4822     ASSERT_NE(inputs.handle_list, nullptr);
4823     FillInputsData(inputs, "ml_face_isface", true);
4824     printf("==========Model Predict==========\n");
4825     OH_AI_TensorHandleArray outputs;
4826     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
4827     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
4828     CompareResult(outputs, "ml_face_isface", 0.01, 0.01, true);
4829     OH_AI_ContextDestroy(&context);
4830     OH_AI_ModelDestroy(&model);
4831 }
4832 
4833 // 正常场景:context配置cache信息,量化模型执行推理流程
4834 HWTEST(MSLiteTest, SUB_AI_MindSpore_NNRT_Cache_0002, Function | MediumTest | Level0) {
4835     if (!IsNPU()) {
4836         printf("NNRt is not NPU, skip this test");
4837         return;
4838     }
4839     printf("==========OH_AI_ContextCreate==========\n");
4840     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4841     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4842     size_t num = 0;
4843     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4844     auto desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, 0);
4845     auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc_0);
4846     std::cout << "OH_AI_GetNameFromNNRTDeviceDesc: " << name << std::endl;
4847     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
4848     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
4849     const char *cache_path = "/data/local/tmp";
4850     const char *cache_version = "1";
4851     const char *model_name = "cache_model_quant";
4852     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CachePath", cache_path, strlen(cache_path));
4853     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CacheVersion", cache_version, strlen(cache_version));
4854     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "ModelName", model_name, strlen(model_name));
4855     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4856     printf("==========OH_AI_ModelCreate==========\n");
4857     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4858     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface_quant.ms", OH_AI_MODELTYPE_MINDIR, context);
4859     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4860     printf("==========GetInputs==========\n");
4861     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
4862     ASSERT_NE(inputs.handle_list, nullptr);
4863     FillInputsData(inputs, "ml_face_isface_quant", true);
4864     printf("==========Model Predict==========\n");
4865     OH_AI_TensorHandleArray outputs;
4866     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
4867     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
4868     CompareResult(outputs, "ml_face_isface_quant", 0.01, 0.01, true);
4869     OH_AI_ContextDestroy(&context);
4870     OH_AI_ModelDestroy(&model);
4871 }
4872 
AddContextCacheDiffPath(OH_AI_ContextHandle context,const char * modelName)4873 void AddContextCacheDiffPath(OH_AI_ContextHandle context, const char *modelName)
4874 {
4875     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4876     size_t num = 0;
4877     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4878     auto desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, 0);
4879     auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc_0);
4880     std::cout << "OH_AI_GetNameFromNNRTDeviceDesc: " << name << std::endl;
4881     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
4882     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
4883     const char *cachePath = "/data/local/tmp";
4884     const char *cacheVersion = "1";
4885     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CachePath", cachePath, strlen(cachePath));
4886     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CacheVersion", cacheVersion, strlen(cacheVersion));
4887     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "ModelName", modelName, strlen(modelName));
4888     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4889 }
4890 
4891 // 正常场景:多个不同模型在同一路径下缓存,执行推理流程
4892 HWTEST(MSLiteTest, SUB_AI_MindSpore_NNRT_Cache_0003, Function | MediumTest | Level0) {
4893     if (!IsNPU()) {
4894         printf("NNRt is not NPU, skip this test");
4895         return;
4896     }
4897     printf("==========OH_AI_ContextCreate==========\n");
4898     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4899     AddContextCacheDiffPath(context, "cache_a");
4900     printf("==========OH_AI_ModelCreate==========\n");
4901     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4902     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_ocr_cn.ms", OH_AI_MODELTYPE_MINDIR, context);
4903     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
4904     printf("==========OH_AI_ContextCreate2==========\n");
4905     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
4906     AddContextCacheDiffPath(context2, "cache_b");
4907     printf("==========OH_AI_ModelCreate2==========\n");
4908     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
4909     OH_AI_Status ret2 = OH_AI_ModelBuildFromFile(model2, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context2);
4910     ASSERT_EQ(ret2, OH_AI_STATUS_SUCCESS);
4911     printf("==========GetInputs==========\n");
4912     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
4913     ASSERT_NE(inputs.handle_list, nullptr);
4914     FillInputsData(inputs, "ml_ocr_cn", true);
4915     printf("==========Model Predict==========\n");
4916     OH_AI_TensorHandleArray outputs;
4917     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
4918     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
4919     CompareResult(outputs, "ml_ocr_cn", 0.01, 0.01, true);
4920     OH_AI_ContextDestroy(&context);
4921     OH_AI_ModelDestroy(&model);
4922     printf("==========GetInputs2==========\n");
4923     OH_AI_TensorHandleArray inputs2 = OH_AI_ModelGetInputs(model2);
4924     ASSERT_NE(inputs2.handle_list, nullptr);
4925     FillInputsData(inputs2, "ml_face_isface", true);
4926     printf("==========Model Predict2==========\n");
4927     OH_AI_TensorHandleArray outputs2;
4928     OH_AI_Status predict_ret2 = OH_AI_ModelPredict(model2, inputs2, &outputs2, nullptr, nullptr);
4929     ASSERT_EQ(predict_ret2, OH_AI_STATUS_SUCCESS);
4930     CompareResult(outputs2, "ml_face_isface", 0.01, 0.01, true);
4931     OH_AI_ContextDestroy(&context2);
4932     OH_AI_ModelDestroy(&model2);
4933 }
4934 
4935 // 异常场景:CachePath路径非法值或不存在
4936 HWTEST(MSLiteTest, SUB_AI_MindSpore_NNRT_Cache_0004, Function | MediumTest | Level0) {
4937     if (!IsNPU()) {
4938         printf("NNRt is not NPU, skip this test");
4939         return;
4940     }
4941     printf("==========OH_AI_ContextCreate==========\n");
4942     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4943     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4944     size_t num = 0;
4945     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4946     auto desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, 0);
4947     auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc_0);
4948     std::cout << "OH_AI_GetNameFromNNRTDeviceDesc: " << name << std::endl;
4949     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
4950     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
4951     const char *cache_path = "/data/local/tmp/notexist/";
4952     const char *cache_version = "1";
4953     const char *model_name = "cache_error";
4954     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CachePath", cache_path, strlen(cache_path));
4955     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CacheVersion", cache_version, strlen(cache_version));
4956     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "ModelName", model_name, strlen(model_name));
4957     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4958     printf("==========OH_AI_ModelCreate==========\n");
4959     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4960     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
4961     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
4962     OH_AI_ContextDestroy(&context);
4963     OH_AI_ModelDestroy(&model);
4964 }
4965 
4966 // 异常场景:CacheVersion在取值范围外
4967 HWTEST(MSLiteTest, SUB_AI_MindSpore_NNRT_Cache_0005, Function | MediumTest | Level0) {
4968     if (!IsNPU()) {
4969         printf("NNRt is not NPU, skip this test");
4970         return;
4971     }
4972     printf("==========OH_AI_ContextCreate==========\n");
4973     OH_AI_ContextHandle context = OH_AI_ContextCreate();
4974     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
4975     size_t num = 0;
4976     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
4977     auto desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, 0);
4978     auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc_0);
4979     std::cout << "OH_AI_GetNameFromNNRTDeviceDesc: " << name << std::endl;
4980     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
4981     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
4982     const char *cache_path = "/data/local/tmp";
4983     const char *cache_version = "-1";
4984     const char *model_name = "cache_error";
4985     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CachePath", cache_path, strlen(cache_path));
4986     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CacheVersion", cache_version, strlen(cache_version));
4987     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "ModelName", model_name, strlen(model_name));
4988     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
4989     printf("==========OH_AI_ModelCreate==========\n");
4990     OH_AI_ModelHandle model = OH_AI_ModelCreate();
4991     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
4992     ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
4993     OH_AI_ContextDestroy(&context);
4994     OH_AI_ModelDestroy(&model);
4995 }
4996 
4997 // add cpu device info
AddContextCache(OH_AI_ContextHandle context)4998 void AddContextCache(OH_AI_ContextHandle context)
4999 {
5000     auto nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
5001     size_t num = 0;
5002     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
5003     auto desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, 0);
5004     auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc_0);
5005     std::cout << "OH_AI_GetNameFromNNRTDeviceDesc: " << name << std::endl;
5006     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
5007     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
5008     const char *cachePath = "/data/local/tmp";
5009     const char *cacheVersion = "1";
5010     const char *modelName = "cache_same";
5011     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CachePath", cachePath, strlen(cachePath));
5012     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "CacheVersion", cacheVersion, strlen(cacheVersion));
5013     OH_AI_DeviceInfoAddExtension(nnrt_device_info, "ModelName", modelName, strlen(modelName));
5014     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
5015 }
5016 
5017 // 异常场景:a模型生成缓存,b模型用相同的CachePath、CacheVersion、modelname
5018 HWTEST(MSLiteTest, SUB_AI_MindSpore_NNRT_Cache_0006, Function | MediumTest | Level0) {
5019     if (!IsNPU()) {
5020         printf("NNRt is not NPU, skip this test");
5021         return;
5022     }
5023     char nnrtDevice[128];
5024     OH_AI_Status device_ret = OH_AI_Test_GetDeviceID(nnrtDevice, sizeof(nnrtDevice));
5025     printf("nnrtDevice name: %s. \n", nnrtDevice);
5026     ASSERT_EQ(device_ret, OH_AI_STATUS_SUCCESS);
5027     if (strcmp(nnrtDevice, "K5010") == 0) {
5028         printf("nnrtDevice is K5010, skip this test");
5029         return;
5030     }
5031     printf("==========OH_AI_ContextCreate==========\n");
5032     OH_AI_ContextHandle context = OH_AI_ContextCreate();
5033     AddContextCache(context);
5034     printf("==========OH_AI_ModelCreate==========\n");
5035     OH_AI_ModelHandle model = OH_AI_ModelCreate();
5036     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
5037     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
5038     printf("==========OH_AI_ContextCreate2==========\n");
5039     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
5040     AddContextCache(context2);
5041     printf("==========OH_AI_ModelCreate2==========\n");
5042     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
5043     OH_AI_Status ret2 = OH_AI_ModelBuildFromFile(model2, "/data/test/ml_ocr_cn.ms", OH_AI_MODELTYPE_MINDIR, context2);
5044     ASSERT_EQ(ret2, OH_AI_STATUS_SUCCESS);
5045     printf("==========GetInputs==========\n");
5046     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
5047     ASSERT_NE(inputs.handle_list, nullptr);
5048     FillInputsData(inputs, "ml_face_isface", true);
5049     printf("==========Model Predict==========\n");
5050     OH_AI_TensorHandleArray outputs;
5051     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
5052     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
5053     CompareResult(outputs, "ml_face_isface", 0.01, 0.01, true);
5054     OH_AI_ContextDestroy(&context);
5055     OH_AI_ModelDestroy(&model);
5056     printf("==========GetInputs2==========\n");
5057     OH_AI_TensorHandleArray inputs2 = OH_AI_ModelGetInputs(model2);
5058     ASSERT_NE(inputs2.handle_list, nullptr);
5059     FillInputsData(inputs2, "ml_ocr_cn", true);
5060     printf("==========Model Predict2==========\n");
5061     OH_AI_TensorHandleArray outputs2;
5062     OH_AI_Status predict_ret2 = OH_AI_ModelPredict(model2, inputs2, &outputs2, nullptr, nullptr);
5063     ASSERT_EQ(predict_ret2, OH_AI_STATUS_LITE_ERROR);
5064     OH_AI_ContextDestroy(&context2);
5065     OH_AI_ModelDestroy(&model2);
5066 }
5067 
5068 // 正常场景:多输入模型,测试ContextDestroy接口,Context在model释放之前释放
5069 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextDestroy_0001, Function | MediumTest | Level0) {
5070     printf("==========Init Context==========\n");
5071     OH_AI_ContextHandle context = OH_AI_ContextCreate();
5072     ASSERT_NE(context, nullptr);
5073     AddContextDeviceCPU(context);
5074     printf("==========Create model==========\n");
5075     OH_AI_ModelHandle model = OH_AI_ModelCreate();
5076     ASSERT_NE(model, nullptr);
5077     printf("==========Build model==========\n");
5078     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_headpose_pb2tflite.ms",
5079         OH_AI_MODELTYPE_MINDIR, context);
5080     printf("==========build model return code:%d\n", ret);
5081     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
5082     printf("==========GetInputs==========\n");
5083     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
5084     ASSERT_NE(inputs.handle_list, nullptr);
5085     FillInputsData(inputs, "ml_headpose_pb2tflite", false);
5086     printf("==========Model Predict==========\n");
5087     OH_AI_TensorHandleArray outputs;
5088     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
5089     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
5090     CompareResult(outputs, "ml_headpose_pb2tflite", 0.02, 0.02);
5091     OH_AI_ContextDestroy(&context);
5092     OH_AI_ModelDestroy(&model);
5093 }
5094 
5095 // 异常场景:多输入模型,测试ContextDestroy接口,Context在model释放之后释放
5096 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextDestroy_0002, Function | MediumTest | Level0) {
5097     printf("==========Init Context==========\n");
5098     OH_AI_ContextHandle context = OH_AI_ContextCreate();
5099     ASSERT_NE(context, nullptr);
5100     AddContextDeviceCPU(context);
5101     printf("==========Create model==========\n");
5102     OH_AI_ModelHandle model = OH_AI_ModelCreate();
5103     ASSERT_NE(model, nullptr);
5104     printf("==========Build model==========\n");
5105     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_headpose_pb2tflite.ms",
5106         OH_AI_MODELTYPE_MINDIR, context);
5107     printf("==========build model return code:%d\n", ret);
5108     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
5109     printf("==========GetInputs==========\n");
5110     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
5111     ASSERT_NE(inputs.handle_list, nullptr);
5112     FillInputsData(inputs, "ml_headpose_pb2tflite", false);
5113     printf("==========Model Predict==========\n");
5114     OH_AI_TensorHandleArray outputs;
5115     ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
5116     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
5117     CompareResult(outputs, "ml_headpose_pb2tflite", 0.02, 0.02);
5118     OH_AI_ModelDestroy(&model);
5119     OH_AI_ContextDestroy(&context);
5120 }
5121 
5122 // OH_AI_TensorCreate接口,name为nullptr
5123 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0001, Function | MediumTest | Level0) {
5124     printf("==========OH_AI_TensorCreate==========\n");
5125     constexpr size_t createShapeNum = 4;
5126     int64_t createShape[createShapeNum] = {1, 48, 48, 3};
5127     OH_AI_TensorHandle tensor = OH_AI_TensorCreate(nullptr, OH_AI_DATATYPE_NUMBERTYPE_FLOAT32,
5128         createShape, createShapeNum, nullptr, 0);
5129     ASSERT_EQ(tensor, nullptr);
5130     OH_AI_TensorDestroy(&tensor);
5131 }
5132 
5133 // OH_AI_TensorCreate接口,type为异常类型
5134 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0002, Function | MediumTest | Level0) {
5135     printf("==========OH_AI_TensorCreate==========\n");
5136     constexpr size_t createShapeNum = 4;
5137     int64_t createShape[createShapeNum] = {1, 48, 48, 3};
5138     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DataTypeInvalid,
5139         createShape, createShapeNum, nullptr, 0);
5140     ASSERT_EQ(tensor, nullptr);
5141     OH_AI_TensorDestroy(&tensor);
5142 }
5143 
5144 // OH_AI_TensorCreate接口,tensor维度数组长度为33,超过最大值32
5145 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0003, Function | MediumTest | Level0) {
5146     printf("==========OH_AI_TensorCreate==========\n");
5147     constexpr size_t createShapeNum = 33;
5148     int64_t createShape[createShapeNum] = {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
5149         1, 1, 1, 1, 1, 1, 1, 48, 48, 3};
5150     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape,
5151         createShapeNum, nullptr, 0);
5152     ASSERT_EQ(tensor, nullptr);
5153     OH_AI_TensorDestroy(&tensor);
5154 }
5155 
5156 // OH_AI_TensorCreate接口,tensor维度数组长度为32,shapeNum为3
5157 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0004, Function | MediumTest | Level0) {
5158     printf("==========OH_AI_TensorCreate==========\n");
5159     constexpr size_t createShapeNum = 32;
5160     int64_t createShape[createShapeNum] = {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
5161         1, 1, 1, 1, 1, 48, 48, 3};
5162     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape, 3,
5163         nullptr, 0);
5164     ASSERT_NE(tensor, nullptr);
5165     OH_AI_TensorDestroy(&tensor);
5166 }
5167 
5168 // 正常调用接口创建Tensor,并传入input进行推理
5169 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0005, Function | MediumTest | Level0) {
5170     printf("==========OH_AI_ContextCreate==========\n");
5171     OH_AI_ContextHandle context = OH_AI_ContextCreate();
5172     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
5173     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
5174     printf("==========OH_AI_ModelBuildFromFile==========\n");
5175     OH_AI_ModelHandle model = OH_AI_ModelCreate();
5176     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
5177         OH_AI_MODELTYPE_MINDIR, context);
5178     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
5179     printf("==========OH_AI_ModelGetInputs==========\n");
5180     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
5181     ASSERT_NE(inputs.handle_list, nullptr);
5182     constexpr size_t create_shape_num = 4;
5183     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5184     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5185         create_shape_num, nullptr, 0);
5186     OH_AI_TensorHandleArray in_tensor_array;
5187     in_tensor_array.handle_num = 1;
5188     in_tensor_array.handle_list = (OH_AI_TensorHandle *)malloc(sizeof(OH_AI_TensorHandle) * 1);
5189     in_tensor_array.handle_list[0] = tensor;
5190     FillInputsData(in_tensor_array, "ml_face_isface", true);
5191     printf("==========OH_AI_ModelPredict==========\n");
5192     OH_AI_TensorHandleArray outputs;
5193     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, in_tensor_array, &outputs, nullptr, nullptr);
5194     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
5195     printf("==========CompareModelOutputs==========\n");
5196     CompareResult(outputs, "ml_face_isface");
5197     printf("==========OH_AI_ModelDestroy==========\n");
5198     OH_AI_ContextDestroy(&context);
5199     OH_AI_ModelDestroy(&model);
5200 }
5201 
5202 // 正常调用接口创建Tensor,用不同的OH_AI_DataType,并传入input进行推理
5203 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0006, Function | MediumTest | Level0) {
5204     printf("==========OH_AI_ContextCreate==========\n");
5205     OH_AI_ContextHandle context = OH_AI_ContextCreate();
5206     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
5207     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
5208     printf("==========OH_AI_ModelBuildFromFile==========\n");
5209     OH_AI_ModelHandle model = OH_AI_ModelCreate();
5210     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/aiy_vision_classifier_plants_V1_3.ms",
5211         OH_AI_MODELTYPE_MINDIR, context);
5212     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
5213     printf("==========OH_AI_ModelGetInputs==========\n");
5214     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
5215     ASSERT_NE(inputs.handle_list, nullptr);
5216     constexpr size_t create_shape_num = 4;
5217     int64_t createShape[create_shape_num] = {1, 224, 224, 3};
5218     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_UINT8, createShape,
5219         create_shape_num, nullptr, 0);
5220     OH_AI_TensorHandleArray in_tensor_array;
5221     in_tensor_array.handle_num = 1;
5222     in_tensor_array.handle_list = (OH_AI_TensorHandle *)malloc(sizeof(OH_AI_TensorHandle) * 1);
5223     in_tensor_array.handle_list[0] = tensor;
5224     FillInputsData(in_tensor_array, "aiy_vision_classifier_plants_V1_3", false);
5225     printf("==========OH_AI_ModelPredict==========\n");
5226     OH_AI_TensorHandleArray outputs;
5227     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, in_tensor_array, &outputs, nullptr, nullptr);
5228     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
5229     printf("==========CompareModelOutputs==========\n");
5230     for (size_t i = 0; i < outputs.handle_num; ++i) {
5231         OH_AI_TensorHandle tensor = outputs.handle_list[i];
5232         int64_t element_num = OH_AI_TensorGetElementNum(tensor);
5233         printf("Tensor name: %s, elements num: %" PRId64 ".\n", OH_AI_TensorGetName(tensor), element_num);
5234         uint8_t *output_data = reinterpret_cast<uint8_t *>(OH_AI_TensorGetMutableData(tensor));
5235         printf("output data is:");
5236         for (int j = 0; j < element_num && j <= 20; ++j) {
5237             printf("%d ", output_data[j]);
5238         }
5239         printf("\n");
5240         printf("==========compFp32WithTData==========\n");
5241         string expectedDataFile = "/data/test/aiy_vision_classifier_plants_V1_3" + std::to_string(i) + ".output";
5242         bool result = compUint8WithTData(output_data, expectedDataFile, 0.01, 0.01, false);
5243         ASSERT_EQ(result, true);
5244     }
5245     printf("==========OH_AI_ModelDestroy==========\n");
5246     OH_AI_ContextDestroy(&context);
5247     OH_AI_ModelDestroy(&model);
5248 }
5249 
5250 // OH_AI_TensorCreate接口,OH_AI_DataType类型、shape数组长度,遍历设置
5251 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0007, Function | MediumTest | Level0) {
5252 constexpr size_t datatypeNum = 12;
5253 OH_AI_DataType datatype[datatypeNum] = {
5254     OH_AI_DATATYPE_NUMBERTYPE_BOOL,
5255     OH_AI_DATATYPE_NUMBERTYPE_INT8,
5256     OH_AI_DATATYPE_NUMBERTYPE_INT16,
5257     OH_AI_DATATYPE_NUMBERTYPE_INT32,
5258     OH_AI_DATATYPE_NUMBERTYPE_INT64,
5259     OH_AI_DATATYPE_NUMBERTYPE_UINT8,
5260     OH_AI_DATATYPE_NUMBERTYPE_UINT16,
5261     OH_AI_DATATYPE_NUMBERTYPE_UINT32,
5262     OH_AI_DATATYPE_NUMBERTYPE_UINT64,
5263     OH_AI_DATATYPE_NUMBERTYPE_FLOAT16,
5264     OH_AI_DATATYPE_NUMBERTYPE_FLOAT32,
5265     OH_AI_DATATYPE_NUMBERTYPE_FLOAT64
5266 };
5267 constexpr size_t createShapeNum = 4;
5268 int64_t createShape[createShapeNum] = {1, 48, 48, 3};
5269 for (size_t i = 0; i < datatypeNum; ++i) {
5270     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", datatype[i], createShape, createShapeNum, nullptr, 0);
5271     ASSERT_NE(tensor, nullptr);
5272     OH_AI_TensorDestroy(&tensor);
5273 }
5274 }
5275 
CreateAndSetTensor(OH_AI_ModelHandle model,OH_AI_ContextHandle context,OH_AI_TensorHandleArray * in_tensor_array,bool isFirst)5276 void CreateAndSetTensor(OH_AI_ModelHandle model, OH_AI_ContextHandle context,
5277     OH_AI_TensorHandleArray *in_tensor_array, bool isFirst)
5278 {
5279     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
5280     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
5281     printf("==========OH_AI_ModelBuildFromFile==========\n");
5282     OH_AI_TensorHandle tensor;
5283     if (isFirst) {
5284         OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
5285             OH_AI_MODELTYPE_MINDIR, context);
5286         ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
5287         printf("==========OH_AI_ModelGetInputs==========\n");
5288         OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
5289         ASSERT_NE(inputs.handle_list, nullptr);
5290         OH_AI_TensorSetFormat(inputs.handle_list[0], OH_AI_FORMAT_NHWC);
5291         constexpr size_t createShapeNum = 4;
5292         int64_t createShape[createShapeNum] = {1, 48, 48, 3};
5293         tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32,
5294             createShape, createShapeNum, nullptr, 0);
5295     } else {
5296         OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/aiy_vision_classifier_plants_V1_3.ms",
5297             OH_AI_MODELTYPE_MINDIR, context);
5298         ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
5299         printf("==========OH_AI_ModelGetInputs==========\n");
5300         OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
5301         ASSERT_NE(inputs.handle_list, nullptr);
5302         constexpr size_t createShapeNum = 4;
5303         int64_t createShape[createShapeNum] = {1, 224, 224, 3};
5304         tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_UINT8, createShape,
5305             createShapeNum, nullptr, 0);
5306     }
5307 
5308     in_tensor_array->handle_num = 1;
5309     in_tensor_array->handle_list = (OH_AI_TensorHandle *)malloc(sizeof(OH_AI_TensorHandle) * 1);
5310     in_tensor_array->handle_list[0] = tensor;
5311 }
5312 
5313 // 正常调用接口创建多个Tensor,并传入input进行推理
5314 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0008, Function | MediumTest | Level0) {
5315     printf("==========OH_AI_ContextCreate==========\n");
5316     OH_AI_ContextHandle context = OH_AI_ContextCreate();
5317     OH_AI_ModelHandle model = OH_AI_ModelCreate();
5318     OH_AI_TensorHandleArray in_tensor_array;
5319     CreateAndSetTensor(model, context, &in_tensor_array, true);
5320     FillInputsData(in_tensor_array, "ml_face_isface", true);
5321     printf("==========OH_AI_ModelPredict==========\n");
5322     OH_AI_TensorHandleArray outputs;
5323     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, in_tensor_array, &outputs, nullptr, nullptr);
5324     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
5325     printf("==========CompareModelOutputs==========\n");
5326     CompareResult(outputs, "ml_face_isface");
5327     printf("==========OH_AI_ModelDestroy==========\n");
5328     OH_AI_ContextDestroy(&context);
5329     OH_AI_ModelDestroy(&model);
5330 
5331     printf("==========OH_AI_ContextCreate==========\n");
5332     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
5333     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
5334     OH_AI_TensorHandleArray in_tensor_array2;
5335     CreateAndSetTensor(model2, context2, &in_tensor_array2, false);
5336     FillInputsData(in_tensor_array2, "aiy_vision_classifier_plants_V1_3", false);
5337     printf("==========OH_AI_ModelPredict==========\n");
5338     OH_AI_TensorHandleArray outputs2;
5339     OH_AI_Status predict_ret2 = OH_AI_ModelPredict(model2, in_tensor_array2, &outputs2, nullptr, nullptr);
5340     ASSERT_EQ(predict_ret2, OH_AI_STATUS_SUCCESS);
5341     printf("==========CompareModelOutputs==========\n");
5342     for (size_t i = 0; i < outputs2.handle_num; ++i) {
5343         OH_AI_TensorHandle tensor = outputs2.handle_list[i];
5344         int64_t element_num = OH_AI_TensorGetElementNum(tensor);
5345         printf("Tensor name: %s, elements num: %" PRId64 ".\n", OH_AI_TensorGetName(tensor), element_num);
5346         uint8_t *output_data = reinterpret_cast<uint8_t *>(OH_AI_TensorGetMutableData(tensor));
5347         printf("output data is:");
5348         for (int j = 0; j < element_num && j <= 20; ++j) {
5349             printf("%d ", output_data[j]);
5350         }
5351         printf("\n");
5352         printf("==========compFp32WithTData==========\n");
5353         string expectedDataFile = "/data/test/aiy_vision_classifier_plants_V1_3" + std::to_string(i) + ".output";
5354         bool result = compUint8WithTData(output_data, expectedDataFile, 0.01, 0.01, false);
5355         ASSERT_EQ(result, true);
5356     }
5357     printf("==========OH_AI_ModelDestroy==========\n");
5358     OH_AI_ContextDestroy(&context2);
5359     OH_AI_ModelDestroy(&model2);
5360 }
5361 
5362 // OH_AI_TensorDestroy接口,传入的tensor为nullptr
5363 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0009, Function | MediumTest | Level0) {
5364     printf("==========OH_AI_TensorCreate==========\n");
5365     OH_AI_TensorDestroy(nullptr);
5366 }
5367 
5368 // OH_AI_TensorDestroy接口,多次释放已创建的tensor
5369 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0010, Function | MediumTest | Level0) {
5370     printf("==========OH_AI_TensorCreate==========\n");
5371     constexpr size_t create_shape_num = 4;
5372     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5373     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5374         create_shape_num, nullptr, 0);
5375     OH_AI_TensorDestroy(&tensor);
5376     OH_AI_TensorDestroy(&tensor);
5377 }
5378 
5379 // OH_AI_TensorClone接口,tensor为nullptr
5380 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0011, Function | MediumTest | Level0) {
5381     printf("==========OH_AI_TensorCreate==========\n");
5382     OH_AI_TensorHandle clone = OH_AI_TensorClone(nullptr);
5383     ASSERT_EQ(clone, nullptr);
5384 }
5385 
5386 // OH_AI_TensorClone 接口(深拷贝tensor),拷贝完释放原始tensor
5387 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0012, Function | MediumTest | Level0) {
5388     printf("==========ReadFile==========\n");
5389     size_t size1;
5390     size_t *ptr_size1 = &size1;
5391     const char *imagePath = "/data/test/ml_face_isface.input";
5392     char *imageBuf = ReadFile(imagePath, ptr_size1);
5393     ASSERT_NE(imageBuf, nullptr);
5394     printf("==========OH_AI_TensorCreate==========\n");
5395     constexpr size_t create_shape_num = 4;
5396     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5397     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5398                                            create_shape_num, imageBuf, size1);
5399     ASSERT_NE(tensor, nullptr);
5400     OH_AI_TensorHandle clone = OH_AI_TensorClone(tensor);
5401     ASSERT_NE(clone, nullptr);
5402     ASSERT_EQ(strcmp(OH_AI_TensorGetName(clone), "data_duplicate"), 0);
5403     delete[] imageBuf;
5404     OH_AI_TensorDestroy(&tensor);
5405     OH_AI_TensorDestroy(&clone);
5406 }
5407 
5408 // OH_AI_TensorClone,克隆出多个tensor
5409 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0013, Function | MediumTest | Level0) {
5410     printf("==========ReadFile==========\n");
5411     size_t size1;
5412     size_t *ptr_size1 = &size1;
5413     const char *imagePath = "/data/test/ml_face_isface.input";
5414     char *imageBuf = ReadFile(imagePath, ptr_size1);
5415     ASSERT_NE(imageBuf, nullptr);
5416     printf("==========OH_AI_TensorCreate==========\n");
5417     constexpr size_t create_shape_num = 4;
5418     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5419     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5420                                            create_shape_num, imageBuf, size1);
5421     ASSERT_NE(tensor, nullptr);
5422     OH_AI_TensorHandle clone = OH_AI_TensorClone(tensor);
5423     ASSERT_NE(clone, nullptr);
5424     ASSERT_EQ(strcmp(OH_AI_TensorGetName(clone), "data_duplicate"), 0);
5425     OH_AI_TensorHandle clone2 = OH_AI_TensorClone(tensor);
5426     ASSERT_NE(clone2, nullptr);
5427     ASSERT_EQ(strcmp(OH_AI_TensorGetName(clone2), "data_duplicate"), 0);
5428     delete[] imageBuf;
5429     OH_AI_TensorDestroy(&tensor);
5430     OH_AI_TensorDestroy(&clone);
5431     OH_AI_TensorDestroy(&clone2);
5432 }
5433 
5434 // 正常设置name
5435 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0014, Function | MediumTest | Level0) {
5436     printf("==========OH_AI_TensorCreate==========\n");
5437     constexpr size_t create_shape_num = 4;
5438     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5439     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5440         create_shape_num, nullptr, 0);
5441     ASSERT_NE(tensor, nullptr);
5442     OH_AI_TensorSetName(tensor, "new_data");
5443     const char *tensorName = OH_AI_TensorGetName(tensor);
5444     ASSERT_EQ(strcmp(tensorName, "new_data"), 0);
5445     OH_AI_TensorDestroy(&tensor);
5446 }
5447 
5448 // 多次设置name
5449 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0015, Function | MediumTest | Level0) {
5450     printf("==========OH_AI_TensorCreate==========\n");
5451     constexpr size_t create_shape_num = 4;
5452     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5453     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5454         create_shape_num, nullptr, 0);
5455     ASSERT_NE(tensor, nullptr);
5456     OH_AI_TensorSetName(tensor, "new_data");
5457     const char *tensorName = OH_AI_TensorGetName(tensor);
5458     ASSERT_EQ(strcmp(tensorName, "new_data"), 0);
5459     OH_AI_TensorSetName(tensor, "new_data2");
5460     const char *tensorName2 = OH_AI_TensorGetName(tensor);
5461     ASSERT_EQ(strcmp(tensorName2, "new_data2"), 0);
5462     OH_AI_TensorDestroy(&tensor);
5463 }
5464 
5465 // OH_AI_TensorSetName接口,tensor为nullptr
5466 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0016, Function | MediumTest | Level0) {
5467     printf("==========OH_AI_TensorCreate==========\n");
5468     OH_AI_TensorSetName(nullptr, "data");
5469 }
5470 
5471 // OH_AI_TensorSetName接口,name为空字符、含有非法字符
5472 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0017, Function | MediumTest | Level0) {
5473     printf("==========OH_AI_TensorCreate==========\n");
5474     constexpr size_t create_shape_num = 4;
5475     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5476     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5477         create_shape_num, nullptr, 0);
5478     ASSERT_NE(tensor, nullptr);
5479     OH_AI_TensorSetName(tensor, "");
5480     printf("Tensor name: %s. \n", OH_AI_TensorGetName(tensor));
5481     OH_AI_TensorDestroy(&tensor);
5482 }
5483 
5484 // OH_AI_TensorGetName接口,tensor为nullptr
5485 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0018, Function | MediumTest | Level0) {
5486     printf("==========OH_AI_TensorCreate==========\n");
5487     const char *tensorName = OH_AI_TensorGetName(nullptr);
5488     ASSERT_EQ(tensorName, nullptr);
5489 }
5490 
5491 // OH_AI_TensorSetDataType接口,tensor为nullptr
5492 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0019, Function | MediumTest | Level0) {
5493     printf("==========OH_AI_TensorCreate==========\n");
5494     OH_AI_TensorSetDataType(nullptr, OH_AI_DATATYPE_NUMBERTYPE_FLOAT32);
5495 }
5496 
5497 // OH_AI_TensorSetDataType接口,OH_AI_DataType设置无效值、非法值
5498 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0020, Function | MediumTest | Level0) {
5499     printf("==========OH_AI_TensorCreate==========\n");
5500     constexpr size_t create_shape_num = 4;
5501     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5502     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5503         create_shape_num, nullptr, 0);
5504     ASSERT_NE(tensor, nullptr);
5505     OH_AI_TensorSetDataType(tensor, OH_AI_DataTypeInvalid);
5506     OH_AI_TensorDestroy(&tensor);
5507 }
5508 
5509 // 正常设置datatype
5510 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0021, Function | MediumTest | Level0) {
5511     printf("==========OH_AI_TensorCreate==========\n");
5512     constexpr size_t create_shape_num = 4;
5513     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5514     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5515         create_shape_num, nullptr, 0);
5516     ASSERT_NE(tensor, nullptr);
5517     OH_AI_TensorSetDataType(tensor, OH_AI_DATATYPE_NUMBERTYPE_FLOAT16);
5518     ASSERT_EQ(OH_AI_TensorGetDataType(tensor), OH_AI_DATATYPE_NUMBERTYPE_FLOAT16);
5519     OH_AI_TensorDestroy(&tensor);
5520 }
5521 
5522 // 多次设置datatype
5523 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0022, Function | MediumTest | Level0) {
5524     printf("==========OH_AI_TensorCreate==========\n");
5525     constexpr size_t create_shape_num = 4;
5526     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5527     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5528         create_shape_num, nullptr, 0);
5529     ASSERT_NE(tensor, nullptr);
5530     OH_AI_TensorSetDataType(tensor, OH_AI_DATATYPE_NUMBERTYPE_FLOAT16);
5531     ASSERT_EQ(OH_AI_TensorGetDataType(tensor), OH_AI_DATATYPE_NUMBERTYPE_FLOAT16);
5532     OH_AI_TensorSetDataType(tensor, OH_AI_DATATYPE_NUMBERTYPE_UINT8);
5533     ASSERT_EQ(OH_AI_TensorGetDataType(tensor), OH_AI_DATATYPE_NUMBERTYPE_UINT8);
5534     OH_AI_TensorDestroy(&tensor);
5535 }
5536 
5537 // OH_AI_TensorGetDataType接口,tensor为nullptr
5538 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0023, Function | MediumTest | Level0) {
5539     printf("==========OH_AI_TensorCreate==========\n");
5540     OH_AI_TensorGetDataType(nullptr);
5541 }
5542 
5543 // OH_AI_TensorSetShape接口,tensor为nullptr
5544 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0024, Function | MediumTest | Level0) {
5545     printf("==========OH_AI_TensorCreate==========\n");
5546     constexpr size_t create_shape_num = 4;
5547     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5548     OH_AI_TensorSetShape(nullptr, create_shape, create_shape_num);
5549 }
5550 
5551 // OH_AI_TensorSetShape接口,shape数组长度为33
5552 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0025, Function | MediumTest | Level0) {
5553     printf("==========OH_AI_TensorCreate==========\n");
5554     constexpr size_t create_shape_num = 4;
5555     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5556     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5557         create_shape_num, nullptr, 0);
5558     ASSERT_NE(tensor, nullptr);
5559     OH_AI_TensorSetShape(tensor, create_shape, 33);
5560     OH_AI_TensorDestroy(&tensor);
5561 }
5562 
5563 // 正常设置shape
5564 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0026, Function | MediumTest | Level0) {
5565     printf("==========OH_AI_TensorCreate==========\n");
5566     constexpr size_t create_shape_num = 4;
5567     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5568     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5569         create_shape_num, nullptr, 0);
5570     ASSERT_NE(tensor, nullptr);
5571     constexpr size_t new_shape_num = 4;
5572     int64_t new_shape[new_shape_num] = {1, 32, 32, 1};
5573     OH_AI_TensorSetShape(tensor, new_shape, new_shape_num);
5574     size_t new_ret_shape_num;
5575     const int64_t *new_ret_shape = OH_AI_TensorGetShape(tensor, &new_ret_shape_num);
5576     ASSERT_EQ(new_ret_shape_num, new_shape_num);
5577     for (size_t i = 0; i < new_ret_shape_num; i++) {
5578         ASSERT_EQ(new_ret_shape[i], new_shape[i]);
5579     }
5580     OH_AI_TensorDestroy(&tensor);
5581 }
5582 
5583 // 多次设置shape
5584 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0027, Function | MediumTest | Level0) {
5585     printf("==========OH_AI_TensorCreate==========\n");
5586     constexpr size_t create_shape_num = 4;
5587     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5588     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5589         create_shape_num, nullptr, 0);
5590     ASSERT_NE(tensor, nullptr);
5591     constexpr size_t new_shape_num = 4;
5592     int64_t new_shape[new_shape_num] = {1, 32, 32, 1};
5593     OH_AI_TensorSetShape(tensor, new_shape, new_shape_num);
5594     size_t new_ret_shape_num;
5595     const int64_t *new_ret_shape = OH_AI_TensorGetShape(tensor, &new_ret_shape_num);
5596     ASSERT_EQ(new_ret_shape_num, new_shape_num);
5597     for (size_t i = 0; i < new_ret_shape_num; i++) {
5598         ASSERT_EQ(new_ret_shape[i], new_shape[i]);
5599     }
5600     constexpr size_t newShapeNum2 = 4;
5601     int64_t newShape2[newShapeNum2] = {1, 16, 16, 1};
5602     OH_AI_TensorSetShape(tensor, newShape2, newShapeNum2);
5603     size_t newRetShapeNum2;
5604     const int64_t *newRetShape2 = OH_AI_TensorGetShape(tensor, &newRetShapeNum2);
5605     ASSERT_EQ(newRetShapeNum2, newShapeNum2);
5606     for (size_t i = 0; i < newRetShapeNum2; i++) {
5607         ASSERT_EQ(newRetShape2[i], newShape2[i]);
5608     }
5609     OH_AI_TensorDestroy(&tensor);
5610 }
5611 
5612 // OH_AI_TensorGetShape接口,tensor为nullptr
5613 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0028, Function | MediumTest | Level0) {
5614     printf("==========OH_AI_TensorCreate==========\n");
5615     size_t shape_num;
5616     OH_AI_TensorGetShape(nullptr, &shape_num);
5617 }
5618 
5619 // OH_AI_TensorSetFormat接口,tensor为nullptr
5620 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0029, Function | MediumTest | Level0) {
5621     printf("==========OH_AI_TensorCreate==========\n");
5622     OH_AI_TensorSetFormat(nullptr, OH_AI_FORMAT_NCHW);
5623 }
5624 
5625 // 正常设置format
5626 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0030, Function | MediumTest | Level0) {
5627     printf("==========OH_AI_TensorCreate==========\n");
5628     constexpr size_t create_shape_num = 4;
5629     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5630     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5631         create_shape_num, nullptr, 0);
5632     ASSERT_NE(tensor, nullptr);
5633     OH_AI_TensorSetFormat(tensor, OH_AI_FORMAT_NCHW);
5634     ASSERT_EQ(OH_AI_TensorGetFormat(tensor), OH_AI_FORMAT_NCHW);
5635     OH_AI_TensorDestroy(&tensor);
5636 }
5637 
5638 // 多次设置format
5639 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0031, Function | MediumTest | Level0) {
5640     printf("==========OH_AI_TensorCreate==========\n");
5641     constexpr size_t create_shape_num = 4;
5642     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5643     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5644         create_shape_num, nullptr, 0);
5645     ASSERT_NE(tensor, nullptr);
5646     OH_AI_TensorSetFormat(tensor, OH_AI_FORMAT_NCHW);
5647     ASSERT_EQ(OH_AI_TensorGetFormat(tensor), OH_AI_FORMAT_NCHW);
5648     OH_AI_TensorSetFormat(tensor, OH_AI_FORMAT_NHWC);
5649     ASSERT_EQ(OH_AI_TensorGetFormat(tensor), OH_AI_FORMAT_NHWC);
5650     OH_AI_TensorDestroy(&tensor);
5651 }
5652 
5653 // 循环设置所有的format类型
5654 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0032, Function | MediumTest | Level0) {
5655     printf("==========OH_AI_TensorCreate==========\n");
5656     constexpr size_t create_shape_num = 4;
5657     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5658     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5659         create_shape_num, nullptr, 0);
5660     ASSERT_NE(tensor, nullptr);
5661     constexpr size_t formatNum = 18;
5662     OH_AI_Format format[formatNum] = {
5663         OH_AI_FORMAT_NCHW,
5664         OH_AI_FORMAT_NHWC,
5665         OH_AI_FORMAT_NHWC4,
5666         OH_AI_FORMAT_HWKC,
5667         OH_AI_FORMAT_HWCK,
5668         OH_AI_FORMAT_KCHW,
5669         OH_AI_FORMAT_CKHW,
5670         OH_AI_FORMAT_KHWC,
5671         OH_AI_FORMAT_CHWK,
5672         OH_AI_FORMAT_HW,
5673         OH_AI_FORMAT_HW4,
5674         OH_AI_FORMAT_NC,
5675         OH_AI_FORMAT_NC4,
5676         OH_AI_FORMAT_NC4HW4,
5677         OH_AI_FORMAT_NCDHW,
5678         OH_AI_FORMAT_NWC,
5679         OH_AI_FORMAT_NCW
5680     };
5681     for (size_t i = 0; i < formatNum; ++i) {
5682         OH_AI_TensorSetFormat(tensor, format[i]);
5683         ASSERT_EQ(OH_AI_TensorGetFormat(tensor), format[i]);
5684     }
5685     OH_AI_TensorDestroy(&tensor);
5686 }
5687 
5688 // OH_AI_TensorGetFormat接口,tensor为nullptr
5689 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0033, Function | MediumTest | Level0) {
5690     printf("==========OH_AI_TensorCreate==========\n");
5691     OH_AI_TensorGetFormat(nullptr);
5692 }
5693 
5694 // OH_AI_TensorSetData接口,tensor为nullptr
5695 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0034, Function | MediumTest | Level0) {
5696     printf("==========OH_AI_TensorCreate==========\n");
5697     constexpr size_t data_len = 6;
5698     float data[data_len] = {1, 2, 3, 4, 5, 6};
5699     OH_AI_TensorSetData(nullptr, data);
5700 }
5701 
5702 // OH_AI_TensorSetData接口,data为空指针nullptr
5703 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0035, Function | MediumTest | Level0) {
5704     printf("==========OH_AI_TensorCreate==========\n");
5705     constexpr size_t create_shape_num = 1;
5706     int64_t createShape[create_shape_num] = {6};
5707     constexpr size_t data_len = 6;
5708     float data[data_len] = {1, 2, 3, 4, 5, 6};
5709     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape,
5710         create_shape_num, data, 6*4);
5711     ASSERT_NE(tensor, nullptr);
5712     OH_AI_TensorSetData(tensor, nullptr);
5713     OH_AI_TensorDestroy(&tensor);
5714 }
5715 
5716 // OH_AI_TensorSetData接口,正常设置data
5717 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0036, Function | MediumTest | Level0) {
5718     printf("==========OH_AI_TensorCreate==========\n");
5719     constexpr size_t create_shape_num = 1;
5720     int64_t createShape[create_shape_num] = {6};
5721     constexpr size_t data_len = 6;
5722     float data[data_len] = {1, 2, 3, 4, 5, 6};
5723     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape,
5724         create_shape_num, data, 6*4);
5725     ASSERT_NE(tensor, nullptr);
5726     float data2[data_len] = {12, 22, 32, 42, 52, 62};
5727     OH_AI_TensorSetData(tensor, data2);
5728     const float *ret_data = static_cast<const float *>(OH_AI_TensorGetData(tensor));
5729     ASSERT_NE(ret_data, nullptr);
5730     printf("return data is:");
5731     for (size_t i = 0; i < data_len; i++) {
5732         ASSERT_EQ(ret_data[i], data2[i]);
5733         printf("%f ", ret_data[i]);
5734     }
5735     OH_AI_TensorDestroy(&tensor);
5736 }
5737 
5738 // OH_AI_TensorSetData接口,多次设置data
5739 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0037, Function | MediumTest | Level0) {
5740     printf("==========OH_AI_TensorCreate==========\n");
5741     constexpr size_t create_shape_num = 1;
5742     int64_t createShape[create_shape_num] = {6};
5743     constexpr size_t data_len = 6;
5744     float data[data_len] = {1, 2, 3, 4, 5, 6};
5745     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape,
5746         create_shape_num, data, 6*4);
5747     ASSERT_NE(tensor, nullptr);
5748     float data2[data_len] = {12, 22, 32, 42, 52, 62};
5749     float data3[data_len] = {13, 23, 33, 43, 53, 63};
5750     OH_AI_TensorSetData(tensor, data2);
5751     OH_AI_TensorSetData(tensor, data3);
5752     const float *ret_data = static_cast<const float *>(OH_AI_TensorGetData(tensor));
5753     ASSERT_NE(ret_data, nullptr);
5754     printf("return data is:");
5755     for (size_t i = 0; i < data_len; i++) {
5756         ASSERT_EQ(ret_data[i], data3[i]);
5757         printf("%f ", ret_data[i]);
5758     }
5759     OH_AI_TensorDestroy(&tensor);
5760 }
5761 
5762 // OH_AI_TensorGetData接口。tensor为nullptr
5763 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0038, Function | MediumTest | Level0) {
5764     printf("==========OH_AI_TensorCreate==========\n");
5765     OH_AI_TensorGetData(nullptr);
5766 }
5767 
5768 // OH_AI_TensorGetMutableData接口,tensor为nullptr
5769 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0039, Function | MediumTest | Level0) {
5770     printf("==========OH_AI_TensorCreate==========\n");
5771     OH_AI_TensorGetMutableData(nullptr);
5772 }
5773 
5774 // OH_AI_TensorGetElementNum接口,tensor为nullptr
5775 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0040, Function | MediumTest | Level0) {
5776     printf("==========OH_AI_TensorCreate==========\n");
5777     int64_t element = OH_AI_TensorGetElementNum(nullptr);
5778     std::cout << element << std::endl;
5779 }
5780 
5781 // OH_AI_TensorGetDataSize接口,tensor为nullptr
5782 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0041, Function | MediumTest | Level0) {
5783     printf("==========OH_AI_TensorCreate==========\n");
5784     size_t datasize = OH_AI_TensorGetDataSize(nullptr);
5785     std::cout << datasize << std::endl;
5786 }
5787 
5788 // OH_AI_TensorGetDataSize接口,未填入数据
5789 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0042, Function | MediumTest | Level0) {
5790     printf("==========OH_AI_TensorCreate==========\n");
5791     constexpr size_t create_shape_num = 4;
5792     int64_t create_shape[create_shape_num] = {1, 48, 48, 3};
5793     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, create_shape,
5794                             create_shape_num, nullptr, 0);
5795     ASSERT_NE(tensor, nullptr);
5796     size_t datasize = OH_AI_TensorGetDataSize(tensor);
5797     std::cout << datasize << std::endl;
5798     OH_AI_TensorDestroy(&tensor);
5799 }
5800 
5801 // OH_AI_TensorSetUserData接口,tensor为nullptr
5802 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0043, Function | MediumTest | Level0) {
5803     printf("==========OH_AI_TensorCreate==========\n");
5804     constexpr size_t data_len = 6;
5805     float data[data_len] = {1, 2, 3, 4, 5, 6};
5806     OH_AI_TensorSetUserData(nullptr, data, 6 * 4);
5807 }
5808 
5809 // OH_AI_TensorSetUserData接口,data为空指针nullptr
5810 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0044, Function | MediumTest | Level0) {
5811     printf("==========OH_AI_TensorCreate==========\n");
5812     constexpr size_t create_shape_num = 1;
5813     int64_t createShape[create_shape_num] = {6};
5814     constexpr size_t data_len = 6;
5815     float data[data_len] = {1, 2, 3, 4, 5, 6};
5816     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape,
5817         create_shape_num, data, 6*4);
5818     ASSERT_NE(tensor, nullptr);
5819     OH_AI_TensorSetUserData(tensor, nullptr, 0);
5820     OH_AI_TensorDestroy(&tensor);
5821 }
5822 
5823 // OH_AI_TensorSetUserData接口,正常设置data
5824 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0045, Function | MediumTest | Level0) {
5825     printf("==========OH_AI_TensorCreate==========\n");
5826     constexpr size_t create_shape_num = 1;
5827     int64_t createShape[create_shape_num] = {6};
5828     constexpr size_t data_len = 6;
5829     float data[data_len] = {1, 2, 3, 4, 5, 6};
5830     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape,
5831         create_shape_num, data, 6*4);
5832     ASSERT_NE(tensor, nullptr);
5833     float data2[data_len] = {12, 22, 32, 42, 52, 62};
5834     OH_AI_TensorSetUserData(tensor, data2, 6*4);
5835     const float *ret_data = static_cast<const float *>(OH_AI_TensorGetData(tensor));
5836     ASSERT_NE(ret_data, nullptr);
5837     printf("return data is:");
5838     for (size_t i = 0; i < data_len; i++) {
5839         ASSERT_EQ(ret_data[i], data2[i]);
5840         printf("%f ", ret_data[i]);
5841     }
5842     OH_AI_TensorDestroy(&tensor);
5843 }
5844 
5845 // OH_AI_TensorSetUserData接口,多次设置data
5846 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0046, Function | MediumTest | Level0) {
5847     printf("==========OH_AI_TensorCreate==========\n");
5848     constexpr size_t create_shape_num = 1;
5849     int64_t createShape[create_shape_num] = {6};
5850     constexpr size_t data_len = 6;
5851     float data[data_len] = {1, 2, 3, 4, 5, 6};
5852     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape,
5853         create_shape_num, data, 6*4);
5854     ASSERT_NE(tensor, nullptr);
5855     float data2[data_len] = {12, 22, 32, 42, 52, 62};
5856     float data3[data_len] = {13, 23, 33, 43, 53, 63};
5857     OH_AI_TensorSetUserData(tensor, data2, 6*4);
5858     OH_AI_TensorSetUserData(tensor, data3, 6*4);
5859     const float *ret_data = static_cast<const float *>(OH_AI_TensorGetData(tensor));
5860     ASSERT_NE(ret_data, nullptr);
5861     printf("return data is:");
5862     for (size_t i = 0; i < data_len; i++) {
5863         ASSERT_EQ(ret_data[i], data3[i]);
5864         printf("%f ", ret_data[i]);
5865     }
5866     OH_AI_TensorDestroy(&tensor);
5867 }
5868 
5869 // OH_AI_TensorSetAllocator接口,tensor为nullptr
5870 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0047, Function | MediumTest | Level0) {
5871     printf("==========OH_AI_TensorCreate==========\n");
5872     constexpr size_t create_shape_num = 1;
5873     int64_t createShape[create_shape_num] = {6};
5874     constexpr size_t data_len = 6;
5875     float data[data_len] = {1, 2, 3, 4, 5, 6};
5876     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape,
5877         create_shape_num, data, 6*4);
5878     ASSERT_NE(tensor, nullptr);
5879     void *inAllocator = OH_AI_TensorGetAllocator(tensor);
5880     OH_AI_TensorSetAllocator(nullptr, inAllocator);
5881     OH_AI_TensorDestroy(&tensor);
5882 }
5883 
5884 // OH_AI_TensorSetAllocator接口,正常设置allocator
5885 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0048, Function | MediumTest | Level0) {
5886     printf("==========OH_AI_TensorCreate==========\n");
5887     constexpr size_t create_shape_num = 1;
5888     int64_t createShape[create_shape_num] = {6};
5889     constexpr size_t data_len = 6;
5890     float data[data_len] = {1, 2, 3, 4, 5, 6};
5891     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape,
5892         create_shape_num, data, 6*4);
5893     ASSERT_NE(tensor, nullptr);
5894     void *inAllocator = OH_AI_TensorGetAllocator(tensor);
5895     OH_AI_TensorSetAllocator(tensor, inAllocator);
5896     OH_AI_TensorDestroy(&tensor);
5897 }
5898 
5899 // OH_AI_TensorSetAllocator接口,多次设置allocator
5900 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0049, Function | MediumTest | Level0) {
5901     printf("==========OH_AI_TensorCreate==========\n");
5902     constexpr size_t create_shape_num = 1;
5903     int64_t createShape[create_shape_num] = {6};
5904     constexpr size_t data_len = 6;
5905     float data[data_len] = {1, 2, 3, 4, 5, 6};
5906     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape,
5907         create_shape_num, data, 6*4);
5908     ASSERT_NE(tensor, nullptr);
5909     void *inAllocator = OH_AI_TensorGetAllocator(tensor);
5910     OH_AI_TensorSetAllocator(tensor, inAllocator);
5911     OH_AI_TensorSetAllocator(tensor, inAllocator);
5912     OH_AI_TensorDestroy(&tensor);
5913 }
5914 
5915 // OH_AI_TensorSetAllocator接口,tensor为nullptr
5916 HWTEST(MSLiteTest, SUB_AI_MindSpore_TensorCreate_0050, Function | MediumTest | Level0) {
5917     printf("==========OH_AI_TensorCreate==========\n");
5918     OH_AI_TensorGetAllocator(nullptr);
5919 }
5920 
5921 // 创建多个context
5922 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0001, Function | MediumTest | Level0) {
5923     printf("==========OH_AI_ContextCreate==========\n");
5924     OH_AI_ContextHandle context = OH_AI_ContextCreate();
5925     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
5926     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
5927     printf("==========OH_AI_ModelBuildFromFile==========\n");
5928     OH_AI_ModelHandle model = OH_AI_ModelCreate();
5929     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
5930         OH_AI_MODELTYPE_MINDIR, context);
5931     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
5932     printf("==========OH_AI_ModelGetInputs==========\n");
5933     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
5934     ASSERT_NE(inputs.handle_list, nullptr);
5935     FillInputsData(inputs, "ml_face_isface", true);
5936     printf("==========OH_AI_ModelPredict==========\n");
5937     OH_AI_TensorHandleArray outputs;
5938     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
5939     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
5940     printf("==========CompareModelOutputs==========\n");
5941     CompareResult(outputs, "ml_face_isface");
5942     printf("==========OH_AI_ModelDestroy==========\n");
5943     OH_AI_ContextDestroy(&context);
5944     OH_AI_ModelDestroy(&model);
5945     OH_AI_ContextDestroy(&context);
5946     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
5947     OH_AI_DeviceInfoHandle cpu_device_info2 = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
5948     OH_AI_ContextAddDeviceInfo(context2, cpu_device_info2);
5949     OH_AI_ContextHandle context3 = OH_AI_ContextCreate();
5950     OH_AI_DeviceInfoHandle cpu_device_info3 = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
5951     OH_AI_ContextAddDeviceInfo(context3, cpu_device_info3);
5952     OH_AI_ContextHandle context4 = OH_AI_ContextCreate();
5953     OH_AI_DeviceInfoHandle cpu_device_info4 = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
5954     OH_AI_ContextAddDeviceInfo(context4, cpu_device_info4);
5955     OH_AI_ContextHandle context5 = OH_AI_ContextCreate();
5956     OH_AI_DeviceInfoHandle cpu_device_info5 = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
5957     OH_AI_ContextAddDeviceInfo(context5, cpu_device_info5);
5958     OH_AI_ContextDestroy(&context2);
5959     OH_AI_ContextDestroy(&context2);
5960     OH_AI_ContextDestroy(&context3);
5961     OH_AI_ContextDestroy(&context3);
5962     OH_AI_ContextDestroy(&context4);
5963     OH_AI_ContextDestroy(&context4);
5964     OH_AI_ContextDestroy(&context5);
5965     OH_AI_ContextDestroy(&context5);
5966 }
5967 
5968 // OH_AI_ContextDestroy接口context对象为nullptr
5969 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0002, Function | MediumTest | Level0) {
5970     printf("==========OH_AI_ContextCreate==========\n");
5971     OH_AI_ContextDestroy(nullptr);
5972 }
5973 
5974 // OH_AI_ContextSetThreadNum接口,context设置为nullptr
5975 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0003, Function | MediumTest | Level0) {
5976     printf("==========OH_AI_ContextCreate==========\n");
5977     OH_AI_ContextSetThreadNum(nullptr, 2);
5978 }
5979 
5980 // OH_AI_ContextSetThreadNum接口,threadNum设置为-100
5981 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0004, Function | MediumTest | Level0) {
5982     printf("==========OH_AI_ContextCreate==========\n");
5983     OH_AI_ContextHandle context = OH_AI_ContextCreate();
5984     OH_AI_ContextSetThreadNum(context, -100);
5985     int32_t threadNum = OH_AI_ContextGetThreadNum(context);
5986     std::cout << threadNum << std::endl;
5987     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
5988     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
5989     printf("==========OH_AI_ModelBuildFromFile==========\n");
5990     OH_AI_ModelHandle model = OH_AI_ModelCreate();
5991     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
5992         OH_AI_MODELTYPE_MINDIR, context);
5993     ASSERT_EQ(build_ret, OH_AI_STATUS_LITE_NULLPTR);
5994     OH_AI_ContextDestroy(&context);
5995 }
5996 
5997 // OH_AI_ContextSetThreadNum接口,重复设置线程数,均可设置成功
5998 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0005, Function | MediumTest | Level0) {
5999     printf("==========OH_AI_ContextCreate==========\n");
6000     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6001     OH_AI_ContextSetThreadNum(context, 2);
6002     int32_t threadNum = OH_AI_ContextGetThreadNum(context);
6003     ASSERT_EQ(threadNum, 2);
6004     OH_AI_ContextSetThreadNum(context, 10);
6005     threadNum = OH_AI_ContextGetThreadNum(context);
6006     ASSERT_EQ(threadNum, 10);
6007     OH_AI_ContextDestroy(&context);
6008 }
6009 
6010 // OH_AI_ContextSetThreadNum接口,设置的context已被销毁,然后传入ThreadNum接口
6011 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0006, Function | MediumTest | Level0) {
6012     printf("==========OH_AI_ContextCreate==========\n");
6013     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6014     OH_AI_ContextDestroy(&context);
6015     OH_AI_ContextSetThreadNum(context, 2);
6016 }
6017 
6018 // OH_AI_ContextGetThreadNum接口,context设置为nullptr
6019 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0007, Function | MediumTest | Level0) {
6020     printf("==========OH_AI_ContextCreate==========\n");
6021     int32_t threadNum = OH_AI_ContextGetThreadNum(nullptr);
6022     std::cout << threadNum << std::endl;
6023 }
6024 
6025 // OH_AI_ContextSetThreadAffinityMode接口,context设置为nullptr
6026 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0008, Function | MediumTest | Level0) {
6027     printf("==========OH_AI_ContextCreate==========\n");
6028     OH_AI_ContextSetThreadAffinityMode(nullptr, 1);
6029 }
6030 
6031 // OH_AI_ContextSetThreadAffinityMode接口,mode设置为3
6032 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0009, Function | MediumTest | Level0) {
6033     printf("==========OH_AI_ContextCreate==========\n");
6034     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6035     OH_AI_ContextSetThreadAffinityMode(context, -100);
6036     int threadAffinityMode = OH_AI_ContextGetThreadAffinityMode(context);
6037     std::cout << threadAffinityMode << std::endl;
6038     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6039     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
6040     printf("==========OH_AI_ModelBuildFromFile==========\n");
6041     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6042     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
6043         OH_AI_MODELTYPE_MINDIR, context);
6044     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
6045     threadAffinityMode = OH_AI_ContextGetThreadAffinityMode(context);
6046     std::cout << threadAffinityMode << std::endl;
6047     printf("==========OH_AI_ModelGetInputs==========\n");
6048     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
6049     ASSERT_NE(inputs.handle_list, nullptr);
6050     FillInputsData(inputs, "ml_face_isface", true);
6051     printf("==========OH_AI_ModelPredict==========\n");
6052     OH_AI_TensorHandleArray outputs;
6053     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
6054     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
6055     printf("==========CompareModelOutputs==========\n");
6056     CompareResult(outputs, "ml_face_isface");
6057     OH_AI_ContextDestroy(&context);
6058 }
6059 
6060 // OH_AI_ContextSetThreadAffinityMode接口,重复设置各种绑核模式,均可设置成功
6061 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0010, Function | MediumTest | Level0) {
6062     printf("==========OH_AI_ContextCreate==========\n");
6063     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6064     OH_AI_ContextSetThreadAffinityMode(context, 2);
6065     int threadAffinityMode = OH_AI_ContextGetThreadAffinityMode(context);
6066     ASSERT_EQ(threadAffinityMode, 2);
6067     OH_AI_ContextSetThreadAffinityMode(context, 1);
6068     threadAffinityMode = OH_AI_ContextGetThreadAffinityMode(context);
6069     ASSERT_EQ(threadAffinityMode, 1);
6070     OH_AI_ContextDestroy(&context);
6071 }
6072 
6073 // OH_AI_ContextGetThreadAffinityMode 接口,context设置为nullptr
6074 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0011, Function | MediumTest | Level0) {
6075     printf("==========OH_AI_ContextCreate==========\n");
6076     int threadAffinityMode = OH_AI_ContextGetThreadAffinityMode(nullptr);
6077     std::cout << threadAffinityMode << std::endl;
6078 }
6079 
6080 // OH_AI_ContextSetThreadAffinityCoreList接口,context设置为nullptr
6081 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0012, Function | MediumTest | Level0) {
6082     printf("==========OH_AI_ContextCreate==========\n");
6083     constexpr size_t core_num = 4;
6084     int32_t coreList[core_num] = {0, 1, 2, 3};
6085     OH_AI_ContextSetThreadAffinityCoreList(nullptr, coreList, core_num);
6086 }
6087 
6088 // OH_AI_ContextSetThreadAffinityCoreList接口,core_num非法(0)、与coreList个数不一致(大于、小于)
6089 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0013, Function | MediumTest | Level0) {
6090     printf("==========OH_AI_ContextCreate==========\n");
6091     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6092     constexpr size_t core_num = 4;
6093     int32_t coreList[core_num] = {0, 1, 2, 3};
6094     OH_AI_ContextSetThreadAffinityCoreList(context, coreList, 0);
6095     OH_AI_ContextDestroy(&context);
6096 }
6097 
6098 // OH_AI_ContextSetThreadAffinityCoreList接口,coreList设置中存在非法编号(负数、极大值)
6099 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0014, Function | MediumTest | Level0) {
6100     printf("==========OH_AI_ContextCreate==========\n");
6101     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6102     constexpr size_t core_num = 4;
6103     int32_t coreList[core_num] = {0, 1, 2, -3};
6104     OH_AI_ContextSetThreadAffinityCoreList(context, coreList, 4);
6105     size_t ret_core_num;
6106     int32_t *retCoreList = nullptr;
6107     retCoreList = const_cast<int32_t *>(OH_AI_ContextGetThreadAffinityCoreList(context, &ret_core_num));
6108     ASSERT_EQ(ret_core_num, core_num);
6109     for (size_t i = 0; i < ret_core_num; i++) {
6110         printf("==========retCoreList:%d\n", retCoreList[i]);
6111         ASSERT_EQ(retCoreList[i], coreList[i]);
6112     }
6113     printf("==========OH_AI_ModelBuildFromFile==========\n");
6114     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6115     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
6116         OH_AI_MODELTYPE_MINDIR, context);
6117     ASSERT_EQ(build_ret, OH_AI_STATUS_LITE_NULLPTR);
6118     OH_AI_ContextDestroy(&context);
6119 }
6120 
6121 // OH_AI_ContextSetThreadAffinityCoreList接口,每次设置不同的core_list,均可设置成功
6122 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0015, Function | MediumTest | Level0) {
6123     printf("==========OH_AI_ContextCreate==========\n");
6124     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6125     constexpr size_t core_num = 4;
6126     int32_t core_list[core_num] = {0, 1, 2, 3};
6127     OH_AI_ContextSetThreadAffinityCoreList(context, core_list, 4);
6128     constexpr size_t coreNum2 = 4;
6129     int32_t coreList2[coreNum2] = {4, 5, 6, 7};
6130     OH_AI_ContextSetThreadAffinityCoreList(context, coreList2, 4);
6131     size_t ret_core_num;
6132     int32_t *retCoreList = nullptr;
6133     retCoreList = const_cast<int32_t *>(OH_AI_ContextGetThreadAffinityCoreList(context, &ret_core_num));
6134     ASSERT_EQ(ret_core_num, coreNum2);
6135     for (size_t i = 0; i < ret_core_num; i++) {
6136         printf("==========retCoreList:%d\n", retCoreList[i]);
6137         ASSERT_EQ(retCoreList[i], coreList2[i]);
6138     }
6139     free(retCoreList);
6140     OH_AI_ContextDestroy(&context);
6141 }
6142 
6143 // OH_AI_ContextGetThreadAffinityCoreList接口,context为nullptr
6144 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0016, Function | MediumTest | Level0) {
6145     printf("==========OH_AI_ContextCreate==========\n");
6146     size_t ret_core_num;
6147     const int32_t *retCoreList = OH_AI_ContextGetThreadAffinityCoreList(nullptr, &ret_core_num);
6148     std::cout << retCoreList << std::endl;
6149 }
6150 
6151 // OH_AI_ContextGetThreadAffinityCoreList接口,core_num为nullptr
6152 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0017, Function | MediumTest | Level0) {
6153     printf("==========OH_AI_ContextCreate==========\n");
6154     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6155     const int32_t *retCoreList = OH_AI_ContextGetThreadAffinityCoreList(context, nullptr);
6156     std::cout << retCoreList << std::endl;
6157     OH_AI_ContextDestroy(&context);
6158 }
6159 
6160 // OH_AI_DeviceInfoCreate接口,传入无效值、非法值
6161 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0018, Function | MediumTest | Level0) {
6162     printf("==========OH_AI_ContextCreate==========\n");
6163     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_INVALID);
6164     ASSERT_EQ(cpu_device_info, nullptr);
6165 }
6166 
6167 // OH_AI_DeviceInfoSetProvide接口,device_info传入nullptr
6168 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0019, Function | MediumTest | Level0) {
6169     printf("==========OH_AI_ContextCreate==========\n");
6170     OH_AI_DeviceInfoSetProviderDevice(nullptr, "aaa");
6171 }
6172 
6173 // OH_AI_DeviceInfoSetProvide接口,provider传入nullptr
6174 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0020, Function | MediumTest | Level0) {
6175     printf("==========OH_AI_ContextCreate==========\n");
6176     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6177     ASSERT_NE(cpu_device_info, nullptr);
6178     OH_AI_DeviceInfoSetProviderDevice(cpu_device_info, nullptr);
6179 }
6180 
6181 // OH_AI_DeviceInfoGetProvider,device设置为nullptr
6182 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0021, Function | MediumTest | Level0) {
6183     printf("==========OH_AI_ContextCreate==========\n");
6184     OH_AI_DeviceInfoGetProvider(nullptr);
6185 }
6186 
6187 // OH_AI_DeviceInfoGetProvider接口,未设置provider,直接获取默认值
6188 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0022, Function | MediumTest | Level0) {
6189     printf("==========OH_AI_ContextCreate==========\n");
6190     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6191     ASSERT_NE(cpu_device_info, nullptr);
6192     char *proInfo = const_cast<char *>(OH_AI_DeviceInfoGetProvider(cpu_device_info));
6193     std::cout << proInfo << std::endl;
6194 }
6195 
6196 // OH_AI_DeviceInfoSetProviderDevice接口, device_info设置为nullptr
6197 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0023, Function | MediumTest | Level0) {
6198     printf("==========OH_AI_ContextCreate==========\n");
6199     OH_AI_DeviceInfoSetProviderDevice(nullptr, "aaa");
6200 }
6201 
6202 // OH_AI_DeviceInfoSetProviderDevice接口, device设置为nullptr
6203 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0024, Function | MediumTest | Level0) {
6204     printf("==========OH_AI_ContextCreate==========\n");
6205     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6206     ASSERT_NE(cpu_device_info, nullptr);
6207     OH_AI_DeviceInfoSetProviderDevice(cpu_device_info, nullptr);
6208 }
6209 
6210 // OH_AI_DeviceInfoGetProviderDevice接口, device设置为nullptr
6211 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0025, Function | MediumTest | Level0) {
6212     printf("==========OH_AI_ContextCreate==========\n");
6213     OH_AI_DeviceInfoGetProviderDevice(nullptr);
6214 }
6215 
6216 // OH_AI_DeviceInfoGetProviderDevice接口,未设置device,直接获取默认值
6217 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0026, Function | MediumTest | Level0) {
6218     printf("==========OH_AI_ContextCreate==========\n");
6219     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6220     ASSERT_NE(cpu_device_info, nullptr);
6221     char *proInfo = const_cast<char *>(OH_AI_DeviceInfoGetProviderDevice(cpu_device_info));
6222     std::cout << proInfo << std::endl;
6223 }
6224 
6225 // OH_AI_ContextAddDeviceInfo接口,context传入nullptr
6226 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0027, Function | MediumTest | Level0) {
6227     printf("==========OH_AI_ContextCreate==========\n");
6228     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6229     OH_AI_ContextAddDeviceInfo(nullptr, cpu_device_info);
6230 }
6231 
6232 // OH_AI_ContextAddDeviceInfo接口,正常添加多次同一个设备信息
6233 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0028, Function | MediumTest | Level0) {
6234     printf("==========OH_AI_ContextCreate==========\n");
6235     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6236     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6237     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
6238     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
6239 }
6240 
6241 // OH_AI_ContextAddDeviceInfo接口,正常添加多次不同设备信息
6242 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0029, Function | MediumTest | Level0) {
6243     printf("==========OH_AI_ContextCreate==========\n");
6244     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6245     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6246     OH_AI_DeviceInfoHandle npu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
6247     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
6248     OH_AI_ContextAddDeviceInfo(context, npu_device_info);
6249 }
6250 
6251 // OH_AI_DeviceInfoCreate接口,OH_AI_DEVICETYPE_INVALID
6252 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0030, Function | MediumTest | Level0) {
6253     printf("==========OH_AI_ContextCreate==========\n");
6254     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_INVALID);
6255     ASSERT_EQ(cpu_device_info, nullptr);
6256 }
6257 
6258 // OH_AI_DeviceInfoSetEnableFP16接口,device_info设置为nnrt,然后调用此接口
6259 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0031, Function | MediumTest | Level0) {
6260     if (!IsNPU()) {
6261         printf("NNRt is not NPU, skip this test");
6262         return;
6263     }
6264     printf("==========OH_AI_ContextCreate==========\n");
6265     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6266     OH_AI_DeviceInfoHandle nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
6267     size_t num = 0;
6268     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6269     NNRTDeviceDesc *desc_0 = nullptr;
6270     for (size_t i = 0; i < num; i++) {
6271         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6272         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6273         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6274         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6275         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6276         const std::string npuNamePrefix = "NPU_";
6277         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
6278             desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6279         }
6280     }
6281     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
6282     std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id_0 << std::endl;
6283     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
6284     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
6285     OH_AI_DeviceInfoSetEnableFP16(nnrt_device_info, false);
6286     bool isFp16 = OH_AI_DeviceInfoGetEnableFP16(nnrt_device_info);
6287     printf("==========isFp16:%d\n", isFp16);
6288     ASSERT_EQ(isFp16, false);
6289     OH_AI_DeviceInfoSetEnableFP16(nnrt_device_info, true);
6290     isFp16 = OH_AI_DeviceInfoGetEnableFP16(nnrt_device_info);
6291     printf("==========isFp16:%d\n", isFp16);
6292     ASSERT_EQ(isFp16, true);
6293     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
6294     printf("==========Create model==========\n");
6295     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6296     ASSERT_NE(model, nullptr);
6297     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
6298 }
6299 
6300 // OH_AI_CreateNNRTDeviceInfoByName
6301 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0032, Function | MediumTest | Level0) {
6302     if (!IsNPU()) {
6303         printf("NNRt is not NPU, skip this test");
6304         return;
6305     }
6306     printf("==========OH_AI_ContextCreate==========\n");
6307     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6308     size_t num = 0;
6309     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6310     const char *npuName = nullptr;
6311     for (size_t i = 0; i < num; i++) {
6312         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6313         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6314         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6315         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6316         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6317         const std::string npuNamePrefix = "NPU_";
6318         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
6319             npuName = name;
6320         }
6321     }
6322     auto nnrt_device_info = OH_AI_CreateNNRTDeviceInfoByName(npuName);
6323     ASSERT_NE(nnrt_device_info, nullptr);
6324     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
6325     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
6326     printf("==========Create model==========\n");
6327     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6328     ASSERT_NE(model, nullptr);
6329     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
6330 }
6331 
6332 // OH_AI_CreateNNRTDeviceInfoByName,创建多次
6333 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0033, Function | MediumTest | Level0) {
6334     if (!IsNPU()) {
6335         printf("NNRt is not NPU, skip this test");
6336         return;
6337     }
6338     printf("==========OH_AI_ContextCreate==========\n");
6339     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6340     size_t num = 0;
6341     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6342     const char *npuName = nullptr;
6343     for (size_t i = 0; i < num; i++) {
6344         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6345         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6346         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6347         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6348         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6349         const std::string npuNamePrefix = "NPU_";
6350         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
6351             npuName = name;
6352         }
6353     }
6354     auto nnrt_device_info = OH_AI_CreateNNRTDeviceInfoByName(npuName);
6355     ASSERT_NE(nnrt_device_info, nullptr);
6356     nnrt_device_info = OH_AI_CreateNNRTDeviceInfoByName(npuName);
6357     ASSERT_NE(nnrt_device_info, nullptr);
6358     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
6359     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
6360     printf("==========Create model==========\n");
6361     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6362     ASSERT_NE(model, nullptr);
6363     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
6364 }
6365 
6366 // OH_AI_CreateNNRTDeviceInfoByName,name不存在
6367 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0034, Function | MediumTest | Level0) {
6368     if (!IsNPU()) {
6369         printf("NNRt is not NPU, skip this test");
6370         return;
6371     }
6372     printf("==========OH_AI_ContextCreate==========\n");
6373     auto nnrt_device_info = OH_AI_CreateNNRTDeviceInfoByName("aaa");
6374     ASSERT_EQ(nnrt_device_info, nullptr);
6375 }
6376 
6377 // OH_AI_CreateNNRTDeviceInfoByName,name为nullptr
6378 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0035, Function | MediumTest | Level0) {
6379     if (!IsNPU()) {
6380         printf("NNRt is not NPU, skip this test");
6381         return;
6382     }
6383     printf("==========OH_AI_ContextCreate==========\n");
6384     auto nnrt_device_info = OH_AI_CreateNNRTDeviceInfoByName(nullptr);
6385     ASSERT_EQ(nnrt_device_info, nullptr);
6386 }
6387 
6388 // OH_AI_CreateNNRTDeviceInfoByType
6389 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0036, Function | MediumTest | Level0) {
6390     if (!IsNPU()) {
6391         printf("NNRt is not NPU, skip this test");
6392         return;
6393     }
6394     printf("==========OH_AI_ContextCreate==========\n");
6395     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6396     size_t num = 0;
6397     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6398     OH_AI_NNRTDeviceType npu_type;
6399     for (size_t i = 0; i < num; i++) {
6400         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6401         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6402         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6403         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6404         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6405         auto type = OH_AI_GetTypeFromNNRTDeviceDesc(desc);
6406         std::cout << "OH_AI_GetTypeFromNNRTDeviceDesc " << type << std::endl;
6407         const std::string npuNamePrefix = "NPU_";
6408         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
6409             npu_type = type;
6410         }
6411     }
6412     auto nnrt_device_info = OH_AI_CreateNNRTDeviceInfoByType(npu_type);
6413     ASSERT_NE(nnrt_device_info, nullptr);
6414     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
6415     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
6416     printf("==========Create model==========\n");
6417     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6418     ASSERT_NE(model, nullptr);
6419     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
6420 }
6421 
6422 // OH_AI_CreateNNRTDeviceInfoByType,多次调用
6423 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0037, Function | MediumTest | Level0) {
6424     if (!IsNPU()) {
6425         printf("NNRt is not NPU, skip this test");
6426         return;
6427     }
6428     printf("==========OH_AI_ContextCreate==========\n");
6429     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6430     size_t num = 0;
6431     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6432     OH_AI_NNRTDeviceType npu_type;
6433     for (size_t i = 0; i < num; i++) {
6434         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6435         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6436         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6437         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6438         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6439         auto type = OH_AI_GetTypeFromNNRTDeviceDesc(desc);
6440         std::cout << "OH_AI_GetTypeFromNNRTDeviceDesc " << type << std::endl;
6441         const std::string npuNamePrefix = "NPU_";
6442         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
6443             npu_type = type;
6444         }
6445     }
6446     auto nnrt_device_info = OH_AI_CreateNNRTDeviceInfoByType(npu_type);
6447     ASSERT_NE(nnrt_device_info, nullptr);
6448     nnrt_device_info = OH_AI_CreateNNRTDeviceInfoByType(npu_type);
6449     ASSERT_NE(nnrt_device_info, nullptr);
6450     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
6451     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
6452     printf("==========Create model==========\n");
6453     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6454     ASSERT_NE(model, nullptr);
6455     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
6456 }
6457 
6458 // OH_AI_CreateNNRTDeviceInfoByType,OH_AI_NNRTDeviceType不正确
6459 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0038, Function | MediumTest | Level0) {
6460     if (!IsNPU()) {
6461         printf("NNRt is not NPU, skip this test");
6462         return;
6463     }
6464     printf("==========OH_AI_ContextCreate==========\n");
6465     auto nnrt_device_info = OH_AI_CreateNNRTDeviceInfoByType(OH_AI_NNRTDEVICE_OTHERS);
6466     ASSERT_EQ(nnrt_device_info, nullptr);
6467 }
6468 
6469 // OH_AI_GetAllNNRTDeviceDescs,num为nullptr
6470 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0039, Function | MediumTest | Level0) {
6471     if (!IsNPU()) {
6472         printf("NNRt is not NPU, skip this test");
6473         return;
6474     }
6475     printf("==========OH_AI_ContextCreate==========\n");
6476     OH_AI_GetAllNNRTDeviceDescs(nullptr);
6477 }
6478 
6479 // OH_AI_GetElementOfNNRTDeviceDescs,descs为nullptr
6480 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0040, Function | MediumTest | Level0) {
6481     if (!IsNPU()) {
6482         printf("NNRt is not NPU, skip this test");
6483         return;
6484     }
6485     printf("==========OH_AI_ContextCreate==========\n");
6486     OH_AI_GetElementOfNNRTDeviceDescs(nullptr, 0);
6487 }
6488 
6489 // OH_AI_GetNameFromNNRTDeviceDesc,desc为nullptr
6490 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0041, Function | MediumTest | Level0) {
6491     if (!IsNPU()) {
6492         printf("NNRt is not NPU, skip this test");
6493         return;
6494     }
6495     printf("==========OH_AI_ContextCreate==========\n");
6496     OH_AI_GetNameFromNNRTDeviceDesc(nullptr);
6497 }
6498 
6499 // OH_AI_GetTypeFromNNRTDeviceDesc,desc为nullptr
6500 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0042, Function | MediumTest | Level0) {
6501     if (!IsNPU()) {
6502         printf("NNRt is not NPU, skip this test");
6503         return;
6504     }
6505     printf("==========OH_AI_ContextCreate==========\n");
6506     OH_AI_GetTypeFromNNRTDeviceDesc(nullptr);
6507 }
6508 
6509 // OH_AI_GetDeviceIdFromNNRTDeviceDesc,desc为nullptr
6510 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0043, Function | MediumTest | Level0) {
6511     if (!IsNPU()) {
6512         printf("NNRt is not NPU, skip this test");
6513         return;
6514     }
6515     printf("==========OH_AI_ContextCreate==========\n");
6516     OH_AI_GetDeviceIdFromNNRTDeviceDesc(nullptr);
6517 }
6518 
6519 // OH_AI_DeviceInfoSetDeviceId,多次调用
6520 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0044, Function | MediumTest | Level0) {
6521     if (!IsNPU()) {
6522         printf("NNRt is not NPU, skip this test");
6523         return;
6524     }
6525     printf("==========OH_AI_ContextCreate==========\n");
6526     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6527     OH_AI_DeviceInfoHandle nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
6528     size_t num = 0;
6529     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6530     NNRTDeviceDesc *desc_0 = nullptr;
6531     for (size_t i = 0; i < num; i++) {
6532         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6533         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6534         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6535         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6536         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6537         const std::string npuNamePrefix = "NPU_";
6538         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
6539             desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6540         }
6541     }
6542     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
6543     std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id_0 << std::endl;
6544     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
6545     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
6546     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
6547     printf("==========Create model==========\n");
6548     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6549     ASSERT_NE(model, nullptr);
6550     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
6551 }
6552 
6553 // OH_AI_DeviceInfoSetDeviceId,id不正确
6554 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0045, Function | MediumTest | Level0) {
6555     if (!IsNPU()) {
6556         printf("NNRt is not NPU, skip this test");
6557         return;
6558     }
6559     printf("==========OH_AI_ContextCreate==========\n");
6560     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6561     OH_AI_DeviceInfoHandle nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
6562     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, 12345);
6563     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
6564     printf("==========OH_AI_ModelBuildFromFile==========\n");
6565     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6566     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
6567         OH_AI_MODELTYPE_MINDIR, context);
6568     ASSERT_EQ(build_ret, OH_AI_STATUS_LITE_ERROR);
6569     OH_AI_ContextDestroy(&context);
6570     OH_AI_ModelDestroy(&model);
6571 }
6572 
6573 // OH_AI_DeviceInfoSetDeviceId,device_info不正确
6574 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0046, Function | MediumTest | Level0) {
6575     if (!IsNPU()) {
6576         printf("NNRt is not NPU, skip this test");
6577         return;
6578     }
6579     printf("==========OH_AI_ContextCreate==========\n");
6580     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6581     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6582     size_t num = 0;
6583     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6584     NNRTDeviceDesc *desc_0 = nullptr;
6585     for (size_t i = 0; i < num; i++) {
6586         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6587         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6588         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6589         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6590         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6591         const std::string npuNamePrefix = "NPU_";
6592         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
6593             desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6594         }
6595     }
6596     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
6597     std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id_0 << std::endl;
6598     OH_AI_DeviceInfoSetDeviceId(cpu_device_info, id_0);
6599     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
6600     printf("==========OH_AI_ModelBuildFromFile==========\n");
6601     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6602     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
6603         OH_AI_MODELTYPE_MINDIR, context);
6604     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
6605     OH_AI_ContextDestroy(&context);
6606     OH_AI_ModelDestroy(&model);
6607 }
6608 
6609 // OH_AI_DeviceInfoSetDeviceId,device为nullptr
6610 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0047, Function | MediumTest | Level0) {
6611     if (!IsNPU()) {
6612         printf("NNRt is not NPU, skip this test");
6613         return;
6614     }
6615     printf("==========OH_AI_ContextCreate==========\n");
6616     size_t num = 0;
6617     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6618     NNRTDeviceDesc *desc_0 = nullptr;
6619     for (size_t i = 0; i < num; i++) {
6620         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6621         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6622         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6623         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6624         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6625         const std::string npuNamePrefix = "NPU_";
6626         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
6627             desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6628         }
6629     }
6630     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
6631     std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id_0 << std::endl;
6632     OH_AI_DeviceInfoSetDeviceId(nullptr, id_0);
6633 }
6634 
6635 // OH_AI_DeviceInfoSetDeviceId,device_info不正确
6636 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0048, Function | MediumTest | Level0) {
6637     if (!IsNPU()) {
6638         printf("NNRt is not NPU, skip this test");
6639         return;
6640     }
6641     printf("==========OH_AI_ContextCreate==========\n");
6642     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6643     auto id = OH_AI_DeviceInfoGetDeviceType(cpu_device_info);
6644     std::cout << "OH_AI_DeviceInfoGetDeviceType " << id << std::endl;
6645 }
6646 
6647 // OH_AI_DeviceInfoSetDeviceId,device为nullptr
6648 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0049, Function | MediumTest | Level0) {
6649     if (!IsNPU()) {
6650         printf("NNRt is not NPU, skip this test");
6651         return;
6652     }
6653     printf("==========OH_AI_ContextCreate==========\n");
6654     auto id = OH_AI_DeviceInfoGetDeviceType(nullptr);
6655     std::cout << "OH_AI_DeviceInfoGetDeviceType " << id << std::endl;
6656 }
6657 
6658 // OH_AI_DeviceInfoSetPerformanceMode
6659 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0050, Function | MediumTest | Level0) {
6660     if (!IsNPU()) {
6661         printf("NNRt is not NPU, skip this test");
6662         return;
6663     }
6664     printf("==========OH_AI_ContextCreate==========\n");
6665     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6666     OH_AI_DeviceInfoHandle nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
6667     size_t num = 0;
6668     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6669     ASSERT_NE(descs, nullptr);
6670     NNRTDeviceDesc *desc_0 = nullptr;
6671     for (size_t i = 0; i < num; i++) {
6672         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6673         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6674         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6675         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6676         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6677         const std::string npuNamePrefix = "NPU_";
6678         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
6679             desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6680         }
6681     }
6682     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
6683     std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id_0 << std::endl;
6684     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
6685     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
6686     ASSERT_EQ(OH_AI_DeviceInfoGetPerformanceMode(nnrt_device_info), OH_AI_PERFORMANCE_NONE);
6687     OH_AI_DeviceInfoSetPerformanceMode(nnrt_device_info, OH_AI_PERFORMANCE_MEDIUM);
6688     ASSERT_EQ(OH_AI_DeviceInfoGetPerformanceMode(nnrt_device_info), OH_AI_PERFORMANCE_MEDIUM);
6689     OH_AI_DeviceInfoSetPriority(nnrt_device_info, OH_AI_PRIORITY_MEDIUM);
6690     ASSERT_EQ(OH_AI_DeviceInfoGetPriority(nnrt_device_info), OH_AI_PRIORITY_MEDIUM);
6691     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
6692     printf("==========OH_AI_ModelBuildFromFile==========\n");
6693     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6694     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
6695         OH_AI_MODELTYPE_MINDIR, context);
6696     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
6697     printf("==========OH_AI_ModelDestroy==========\n");
6698     OH_AI_ContextDestroy(&context);
6699     OH_AI_ModelDestroy(&model);
6700 }
6701 
6702 // OH_AI_DeviceInfoSetPerformanceMode,覆盖枚举值
6703 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0051, Function | MediumTest | Level0) {
6704     if (!IsNPU()) {
6705         printf("NNRt is not NPU, skip this test");
6706         return;
6707     }
6708     printf("==========OH_AI_ContextCreate==========\n");
6709     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6710     OH_AI_DeviceInfoHandle nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
6711     size_t num = 0;
6712     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6713     ASSERT_NE(descs, nullptr);
6714     NNRTDeviceDesc *desc_0 = nullptr;
6715     for (size_t i = 0; i < num; i++) {
6716         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6717         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6718         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6719         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6720         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6721         const std::string npuNamePrefix = "NPU_";
6722         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
6723             desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6724         }
6725     }
6726     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
6727     std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id_0 << std::endl;
6728     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
6729     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
6730     ASSERT_EQ(OH_AI_DeviceInfoGetPerformanceMode(nnrt_device_info), OH_AI_PERFORMANCE_NONE);
6731     OH_AI_DeviceInfoSetPerformanceMode(nnrt_device_info, OH_AI_PERFORMANCE_LOW);
6732     ASSERT_EQ(OH_AI_DeviceInfoGetPerformanceMode(nnrt_device_info), OH_AI_PERFORMANCE_LOW);
6733     OH_AI_DeviceInfoSetPerformanceMode(nnrt_device_info, OH_AI_PERFORMANCE_MEDIUM);
6734     ASSERT_EQ(OH_AI_DeviceInfoGetPerformanceMode(nnrt_device_info), OH_AI_PERFORMANCE_MEDIUM);
6735     OH_AI_DeviceInfoSetPerformanceMode(nnrt_device_info, OH_AI_PERFORMANCE_HIGH);
6736     ASSERT_EQ(OH_AI_DeviceInfoGetPerformanceMode(nnrt_device_info), OH_AI_PERFORMANCE_HIGH);
6737     OH_AI_DeviceInfoSetPerformanceMode(nnrt_device_info, OH_AI_PERFORMANCE_EXTREME);
6738     ASSERT_EQ(OH_AI_DeviceInfoGetPerformanceMode(nnrt_device_info), OH_AI_PERFORMANCE_EXTREME);
6739     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
6740     printf("==========OH_AI_ModelBuildFromFile==========\n");
6741     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6742     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
6743         OH_AI_MODELTYPE_MINDIR, context);
6744     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
6745     printf("==========OH_AI_ModelDestroy==========\n");
6746     OH_AI_ContextDestroy(&context);
6747     OH_AI_ModelDestroy(&model);
6748 }
6749 
6750 // OH_AI_DeviceInfoSetPerformanceMode,device不正确
6751 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0052, Function | MediumTest | Level0) {
6752     if (!IsNPU()) {
6753         printf("NNRt is not NPU, skip this test");
6754         return;
6755     }
6756     printf("==========OH_AI_ContextCreate==========\n");
6757     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6758     OH_AI_DeviceInfoSetPerformanceMode(cpu_device_info, OH_AI_PERFORMANCE_MEDIUM);
6759 }
6760 
6761 // OH_AI_DeviceInfoSetPerformanceMode,device为nullptr
6762 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0053, Function | MediumTest | Level0) {
6763     if (!IsNPU()) {
6764         printf("NNRt is not NPU, skip this test");
6765         return;
6766     }
6767     printf("==========OH_AI_ContextCreate==========\n");
6768     OH_AI_DeviceInfoSetPerformanceMode(nullptr, OH_AI_PERFORMANCE_MEDIUM);
6769 }
6770 
6771 // OH_AI_DeviceInfoGetPerformanceMode,device不正确
6772 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0054, Function | MediumTest | Level0) {
6773     if (!IsNPU()) {
6774         printf("NNRt is not NPU, skip this test");
6775         return;
6776     }
6777     printf("==========OH_AI_ContextCreate==========\n");
6778     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6779     OH_AI_DeviceInfoGetPerformanceMode(cpu_device_info);
6780 }
6781 
6782 // OH_AI_DeviceInfoGetPerformanceMode,device为nullptr
6783 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0055, Function | MediumTest | Level0) {
6784     if (!IsNPU()) {
6785         printf("NNRt is not NPU, skip this test");
6786         return;
6787     }
6788     printf("==========OH_AI_ContextCreate==========\n");
6789     OH_AI_DeviceInfoGetPerformanceMode(nullptr);
6790 }
6791 
6792 // OH_AI_DeviceInfoSetPriority
6793 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0056, Function | MediumTest | Level0) {
6794     if (!IsNPU()) {
6795         printf("NNRt is not NPU, skip this test");
6796         return;
6797     }
6798     printf("==========OH_AI_ContextCreate==========\n");
6799     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6800     OH_AI_DeviceInfoHandle nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
6801     size_t num = 0;
6802     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6803     ASSERT_NE(descs, nullptr);
6804     NNRTDeviceDesc *desc_0 = nullptr;
6805     for (size_t i = 0; i < num; i++) {
6806         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6807         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6808         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6809         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6810         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6811         const std::string npuNamePrefix = "NPU_";
6812         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
6813             desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6814         }
6815     }
6816     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
6817     std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id_0 << std::endl;
6818     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
6819     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
6820     ASSERT_EQ(OH_AI_DeviceInfoGetPriority(nnrt_device_info), OH_AI_PRIORITY_NONE);
6821     OH_AI_DeviceInfoSetPriority(nnrt_device_info, OH_AI_PRIORITY_MEDIUM);
6822     ASSERT_EQ(OH_AI_DeviceInfoGetPriority(nnrt_device_info), OH_AI_PRIORITY_MEDIUM);
6823     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
6824     printf("==========OH_AI_ModelBuildFromFile==========\n");
6825     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6826     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
6827         OH_AI_MODELTYPE_MINDIR, context);
6828     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
6829     printf("==========OH_AI_ModelDestroy==========\n");
6830     OH_AI_ContextDestroy(&context);
6831     OH_AI_ModelDestroy(&model);
6832 }
6833 
6834 // OH_AI_DeviceInfoSetPriority,覆盖枚举值
6835 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0057, Function | MediumTest | Level0) {
6836     if (!IsNPU()) {
6837         printf("NNRt is not NPU, skip this test");
6838         return;
6839     }
6840     printf("==========OH_AI_ContextCreate==========\n");
6841     OH_AI_ContextHandle context = OH_AI_ContextCreate();
6842     OH_AI_DeviceInfoHandle nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
6843     size_t num = 0;
6844     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6845     ASSERT_NE(descs, nullptr);
6846     NNRTDeviceDesc *desc_0 = nullptr;
6847     for (size_t i = 0; i < num; i++) {
6848         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6849         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6850         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6851         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6852         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6853         const std::string npuNamePrefix = "NPU_";
6854         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
6855             desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6856         }
6857     }
6858     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
6859     std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id_0 << std::endl;
6860     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
6861     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
6862     ASSERT_EQ(OH_AI_DeviceInfoGetPriority(nnrt_device_info), OH_AI_PRIORITY_NONE);
6863     OH_AI_DeviceInfoSetPriority(nnrt_device_info, OH_AI_PRIORITY_LOW);
6864     ASSERT_EQ(OH_AI_DeviceInfoGetPriority(nnrt_device_info), OH_AI_PRIORITY_LOW);
6865     OH_AI_DeviceInfoSetPriority(nnrt_device_info, OH_AI_PRIORITY_MEDIUM);
6866     ASSERT_EQ(OH_AI_DeviceInfoGetPriority(nnrt_device_info), OH_AI_PRIORITY_MEDIUM);
6867     OH_AI_DeviceInfoSetPriority(nnrt_device_info, OH_AI_PRIORITY_HIGH);
6868     ASSERT_EQ(OH_AI_DeviceInfoGetPriority(nnrt_device_info), OH_AI_PRIORITY_HIGH);
6869     OH_AI_ContextAddDeviceInfo(context, nnrt_device_info);
6870     printf("==========OH_AI_ModelBuildFromFile==========\n");
6871     OH_AI_ModelHandle model = OH_AI_ModelCreate();
6872     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
6873         OH_AI_MODELTYPE_MINDIR, context);
6874     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
6875     printf("==========OH_AI_ModelDestroy==========\n");
6876     OH_AI_ContextDestroy(&context);
6877     OH_AI_ModelDestroy(&model);
6878 }
6879 
6880 // OH_AI_DeviceInfoSetPriority,device不正确
6881 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0058, Function | MediumTest | Level0) {
6882     if (!IsNPU()) {
6883         printf("NNRt is not NPU, skip this test");
6884         return;
6885     }
6886     printf("==========OH_AI_ContextCreate==========\n");
6887     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6888     OH_AI_DeviceInfoSetPriority(cpu_device_info, OH_AI_PRIORITY_MEDIUM);
6889 }
6890 
6891 // OH_AI_DeviceInfoSetPriority,device为nullptr
6892 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0059, Function | MediumTest | Level0) {
6893     if (!IsNPU()) {
6894         printf("NNRt is not NPU, skip this test");
6895         return;
6896     }
6897     printf("==========OH_AI_ContextCreate==========\n");
6898     OH_AI_DeviceInfoSetPriority(nullptr, OH_AI_PRIORITY_MEDIUM);
6899 }
6900 
6901 // OH_AI_DeviceInfoGetPriority,device不正确
6902 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0060, Function | MediumTest | Level0) {
6903     if (!IsNPU()) {
6904         printf("NNRt is not NPU, skip this test");
6905         return;
6906     }
6907     printf("==========OH_AI_ContextCreate==========\n");
6908     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6909     OH_AI_DeviceInfoGetPriority(cpu_device_info);
6910 }
6911 
6912 // OH_AI_DeviceInfoGetPriority,device为nullptr
6913 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0061, Function | MediumTest | Level0) {
6914     if (!IsNPU()) {
6915         printf("NNRt is not NPU, skip this test");
6916         return;
6917     }
6918     printf("==========OH_AI_ContextCreate==========\n");
6919     OH_AI_DeviceInfoGetPriority(nullptr);
6920 }
6921 
6922 // OH_AI_DeviceInfoAddExtension,device不正确
6923 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0062, Function | MediumTest | Level0) {
6924     if (!IsNPU()) {
6925         printf("NNRt is not NPU, skip this test");
6926         return;
6927     }
6928     printf("==========OH_AI_ContextCreate==========\n");
6929     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
6930     const char *model_name = "cache_model";
6931     OH_AI_Status extension_ret = OH_AI_DeviceInfoAddExtension(cpu_device_info, "ModelName",
6932         model_name, strlen(model_name));
6933     ASSERT_EQ(extension_ret, OH_AI_STATUS_LITE_ERROR);
6934 }
6935 
6936 // OH_AI_DeviceInfoAddExtension,device为nullptr
6937 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0063, Function | MediumTest | Level0) {
6938     if (!IsNPU()) {
6939         printf("NNRt is not NPU, skip this test");
6940         return;
6941     }
6942     printf("==========OH_AI_ContextCreate==========\n");
6943     const char *model_name = "cache_model";
6944     OH_AI_Status extension_ret = OH_AI_DeviceInfoAddExtension(nullptr, "ModelName", model_name, strlen(model_name));
6945     ASSERT_EQ(extension_ret, OH_AI_STATUS_LITE_NULLPTR);
6946 }
6947 
6948 // OH_AI_DeviceInfoAddExtension,name不在白名单内
6949 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0064, Function | MediumTest | Level0) {
6950     if (!IsNPU()) {
6951         printf("NNRt is not NPU, skip this test");
6952         return;
6953     }
6954     printf("==========OH_AI_ContextCreate==========\n");
6955     OH_AI_DeviceInfoHandle nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
6956     size_t num = 0;
6957     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6958     ASSERT_NE(descs, nullptr);
6959     NNRTDeviceDesc *desc_0 = nullptr;
6960     for (size_t i = 0; i < num; i++) {
6961         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6962         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6963         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6964         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6965         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6966         const std::string npuNamePrefix = "NPU_";
6967         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
6968             desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6969         }
6970     }
6971     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
6972     std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id_0 << std::endl;
6973     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
6974     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
6975     const char *model_name = "cache_model";
6976     OH_AI_Status extension_ret = OH_AI_DeviceInfoAddExtension(nnrt_device_info, "aaa", model_name, strlen(model_name));
6977     ASSERT_EQ(extension_ret, OH_AI_STATUS_LITE_ERROR);
6978 }
6979 
6980 // OH_AI_DeviceInfoAddExtension,name为空
6981 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0065, Function | MediumTest | Level0) {
6982     if (!IsNPU()) {
6983         printf("NNRt is not NPU, skip this test");
6984         return;
6985     }
6986     printf("==========OH_AI_ContextCreate==========\n");
6987     OH_AI_DeviceInfoHandle nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
6988     size_t num = 0;
6989     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
6990     ASSERT_NE(descs, nullptr);
6991     NNRTDeviceDesc *desc_0 = nullptr;
6992     for (size_t i = 0; i < num; i++) {
6993         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
6994         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
6995         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
6996         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
6997         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
6998         const std::string npuNamePrefix = "NPU_";
6999         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
7000             desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
7001         }
7002     }
7003     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
7004     std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id_0 << std::endl;
7005     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
7006     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
7007     const char *model_name = "cache_model";
7008     OH_AI_Status extension_ret = OH_AI_DeviceInfoAddExtension(nnrt_device_info, nullptr,
7009         model_name, strlen(model_name));
7010     ASSERT_EQ(extension_ret, OH_AI_STATUS_LITE_NULLPTR);
7011 }
7012 
7013 // OH_AI_DeviceInfoAddExtension,value为空
7014 HWTEST(MSLiteTest, SUB_AI_MindSpore_ContextCreate_0066, Function | MediumTest | Level0) {
7015     if (!IsNPU()) {
7016         printf("NNRt is not NPU, skip this test");
7017         return;
7018     }
7019     printf("==========OH_AI_ContextCreate==========\n");
7020     OH_AI_DeviceInfoHandle nnrt_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
7021     size_t num = 0;
7022     auto descs = OH_AI_GetAllNNRTDeviceDescs(&num);
7023     ASSERT_NE(descs, nullptr);
7024     NNRTDeviceDesc *desc_0 = nullptr;
7025     for (size_t i = 0; i < num; i++) {
7026         auto desc = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
7027         auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
7028         std::cout << "OH_AI_GetNameFromNNRTDeviceDesc " << name << std::endl;
7029         auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
7030         std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id << std::endl;
7031         const std::string npuNamePrefix = "NPU_";
7032         if (strncmp(npuNamePrefix.c_str(), name, npuNamePrefix.size()) == 0) {
7033             desc_0 = OH_AI_GetElementOfNNRTDeviceDescs(descs, i);
7034         }
7035     }
7036     auto id_0 = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc_0);
7037     std::cout << "OH_AI_GetDeviceIdFromNNRTDeviceDesc " << id_0 << std::endl;
7038     OH_AI_DeviceInfoSetDeviceId(nnrt_device_info, id_0);
7039     OH_AI_DestroyAllNNRTDeviceDescs(&descs);
7040     const char *model_name = "cache_model";
7041     OH_AI_Status extension_ret = OH_AI_DeviceInfoAddExtension(nnrt_device_info, "ModelName",
7042         nullptr, strlen(model_name));
7043     ASSERT_EQ(extension_ret, OH_AI_STATUS_LITE_NULLPTR);
7044 }
7045 
7046 // 正常调用接口,创建model对象
7047 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0001, Function | MediumTest | Level0) {
7048     printf("==========OH_AI_ContextCreate==========\n");
7049     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7050     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7051     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7052     printf("==========OH_AI_ModelBuildFromFile==========\n");
7053     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7054     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7055         OH_AI_MODELTYPE_MINDIR, context);
7056     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7057     printf("==========OH_AI_ModelDestroy==========\n");
7058     OH_AI_ContextDestroy(&context);
7059     OH_AI_ModelDestroy(&model);
7060 }
7061 
7062 // 正常调用接口,创建多个model对象
7063 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0002, Function | MediumTest | Level0) {
7064     printf("==========OH_AI_ContextCreate==========\n");
7065     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7066     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7067     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7068     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
7069     OH_AI_DeviceInfoHandle cpu_device_info2 = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7070     OH_AI_ContextAddDeviceInfo(context2, cpu_device_info2);
7071     printf("==========OH_AI_ModelBuildFromFile==========\n");
7072     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7073     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
7074     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7075         OH_AI_MODELTYPE_MINDIR, context);
7076     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7077     OH_AI_Status build_ret2 = OH_AI_ModelBuildFromFile(model2, "/data/test/ml_face_isface.ms",
7078         OH_AI_MODELTYPE_MINDIR, context2);
7079     ASSERT_EQ(build_ret2, OH_AI_STATUS_SUCCESS);
7080     printf("==========OH_AI_ModelDestroy==========\n");
7081     OH_AI_ContextDestroy(&context);
7082     OH_AI_ContextDestroy(&context2);
7083     OH_AI_ModelDestroy(&model);
7084     OH_AI_ModelDestroy(&model2);
7085 }
7086 
7087 // 正常调用接口,循环创建model对象
7088 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0003, Function | MediumTest | Level0) {
7089     for (size_t i = 0; i < 10; i++) {
7090         printf("==========OH_AI_ContextCreate==========\n");
7091         OH_AI_ContextHandle context = OH_AI_ContextCreate();
7092         OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7093         OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7094         printf("==========OH_AI_ModelBuildFromFile==========\n");
7095         OH_AI_ModelHandle model = OH_AI_ModelCreate();
7096         OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7097             OH_AI_MODELTYPE_MINDIR, context);
7098         ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7099         printf("==========OH_AI_ModelDestroy==========\n");
7100         OH_AI_ContextDestroy(&context);
7101         OH_AI_ModelDestroy(&model);
7102     }
7103 }
7104 
7105 // 正常调用接口,多次释放model对象
7106 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0005, Function | MediumTest | Level0) {
7107     printf("==========OH_AI_ContextCreate==========\n");
7108     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7109     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7110     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7111     printf("==========OH_AI_ModelBuildFromFile==========\n");
7112     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7113     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7114         OH_AI_MODELTYPE_MINDIR, context);
7115     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7116     printf("==========OH_AI_ModelDestroy==========\n");
7117     OH_AI_ContextDestroy(&context);
7118     OH_AI_ModelDestroy(&model);
7119     OH_AI_ModelDestroy(&model);
7120 }
7121 
7122 // OH_AI_ModelBuild接口,正常调用
7123 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0006, Function | MediumTest | Level0) {
7124     printf("==========ReadFile==========\n");
7125     size_t size;
7126     const char *modelPath = "/data/test/ml_face_isface.ms";
7127     char *modelBuf = ReadFile(modelPath, &size);
7128     ASSERT_NE(modelBuf, nullptr);
7129     printf("==========OH_AI_ContextCreate==========\n");
7130     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7131     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7132     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7133     printf("==========OH_AI_ModelBuildFromFile==========\n");
7134     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7135     OH_AI_Status build_ret = OH_AI_ModelBuild(model, modelBuf, size, OH_AI_MODELTYPE_MINDIR, context);
7136     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7137     delete[] modelBuf;
7138     OH_AI_ContextDestroy(&context);
7139     OH_AI_ModelDestroy(&model);
7140 }
7141 
7142 // OH_AI_ModelBuild接口,model多次build
7143 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0007, Function | MediumTest | Level0) {
7144     printf("==========ReadFile==========\n");
7145     size_t size;
7146     const char *modelPath = "/data/test/ml_face_isface.ms";
7147     char *modelBuf = ReadFile(modelPath, &size);
7148     ASSERT_NE(modelBuf, nullptr);
7149     size_t size2;
7150     const char *modelPath2 = "/data/test/aiy_vision_classifier_plants_V1_3.ms";
7151     char *modelBuf2 = ReadFile(modelPath2, &size2);
7152     ASSERT_NE(modelBuf2, nullptr);
7153     printf("==========OH_AI_ContextCreate==========\n");
7154     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7155     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7156     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7157     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
7158     OH_AI_DeviceInfoHandle cpu_device_info2 = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7159     OH_AI_ContextAddDeviceInfo(context2, cpu_device_info2);
7160     printf("==========OH_AI_ModelBuildFromFile==========\n");
7161     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7162     OH_AI_Status build_ret = OH_AI_ModelBuild(model, modelBuf, size, OH_AI_MODELTYPE_MINDIR, context);
7163     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7164     OH_AI_Status build_ret2 = OH_AI_ModelBuild(model, modelBuf2, size2, OH_AI_MODELTYPE_MINDIR, context2);
7165     ASSERT_EQ(build_ret2, OH_AI_STATUS_LITE_MODEL_REBUILD);
7166     delete[] modelBuf;
7167     delete[] modelBuf2;
7168     OH_AI_ContextDestroy(&context);
7169     OH_AI_ContextDestroy(&context2);
7170     OH_AI_ModelDestroy(&model);
7171 }
7172 
7173 // OH_AI_ModelBuild接口model对象为nullptr
7174 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0008, Function | MediumTest | Level0) {
7175     printf("==========ReadFile==========\n");
7176     size_t size;
7177     const char *modelPath = "/data/test/ml_face_isface.ms";
7178     char *modelBuf = ReadFile(modelPath, &size);
7179     ASSERT_NE(modelBuf, nullptr);
7180     printf("==========OH_AI_ContextCreate==========\n");
7181     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7182     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7183     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7184     printf("==========OH_AI_ModelBuildFromFile==========\n");
7185     OH_AI_Status build_ret = OH_AI_ModelBuild(nullptr, modelBuf, size, OH_AI_MODELTYPE_MINDIR, context);
7186     ASSERT_EQ(build_ret, OH_AI_STATUS_LITE_NULLPTR);
7187     delete[] modelBuf;
7188     OH_AI_ContextDestroy(&context);
7189 }
7190 
7191 // OH_AI_ModelBuild接口model data对象为nullptr
7192 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0009, Function | MediumTest | Level0) {
7193     printf("==========ReadFile==========\n");
7194     size_t size;
7195     const char *modelPath = "/data/test/ml_face_isface.ms";
7196     char *modelBuf = ReadFile(modelPath, &size);
7197     ASSERT_NE(modelBuf, nullptr);
7198     printf("==========OH_AI_ContextCreate==========\n");
7199     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7200     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7201     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7202     printf("==========OH_AI_ModelBuildFromFile==========\n");
7203     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7204     ASSERT_NE(model, nullptr);
7205     OH_AI_Status build_ret = OH_AI_ModelBuild(model, nullptr, 0, OH_AI_MODELTYPE_MINDIR, context);
7206     ASSERT_EQ(build_ret, OH_AI_STATUS_LITE_NULLPTR);
7207     delete[] modelBuf;
7208     OH_AI_ContextDestroy(&context);
7209     OH_AI_ModelDestroy(&model);
7210 }
7211 
7212 // OH_AI_ModelBuild接口model type非法,传入OH_AI_MODELTYPE_INVALID
7213 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0010, Function | MediumTest | Level0) {
7214     printf("==========ReadFile==========\n");
7215     size_t size;
7216     const char *modelPath = "/data/test/ml_face_isface.ms";
7217     char *modelBuf = ReadFile(modelPath, &size);
7218     ASSERT_NE(modelBuf, nullptr);
7219     printf("==========OH_AI_ContextCreate==========\n");
7220     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7221     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7222     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7223     printf("==========OH_AI_ModelBuildFromFile==========\n");
7224     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7225     ASSERT_NE(model, nullptr);
7226     OH_AI_Status build_ret = OH_AI_ModelBuild(model, modelBuf, size, OH_AI_MODELTYPE_INVALID, context);
7227     ASSERT_EQ(build_ret, OH_AI_STATUS_LITE_PARAM_INVALID);
7228     delete[] modelBuf;
7229     OH_AI_ContextDestroy(&context);
7230     OH_AI_ModelDestroy(&model);
7231 }
7232 
7233 // OH_AI_ModelBuild接口context对象为nullptr
7234 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0011, Function | MediumTest | Level0) {
7235     printf("==========ReadFile==========\n");
7236     size_t size;
7237     const char *modelPath = "/data/test/ml_face_isface.ms";
7238     char *modelBuf = ReadFile(modelPath, &size);
7239     ASSERT_NE(modelBuf, nullptr);
7240     printf("==========OH_AI_ContextCreate==========\n");
7241     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7242     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7243     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7244     printf("==========OH_AI_ModelBuildFromFile==========\n");
7245     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7246     ASSERT_NE(model, nullptr);
7247     OH_AI_Status build_ret = OH_AI_ModelBuild(model, modelBuf, size, OH_AI_MODELTYPE_MINDIR, nullptr);
7248     ASSERT_EQ(build_ret, OH_AI_STATUS_LITE_NULLPTR);
7249     delete[] modelBuf;
7250     OH_AI_ContextDestroy(&context);
7251     OH_AI_ModelDestroy(&model);
7252 }
7253 
7254 // OH_AI_ModelBuild接口data_size小于实际model_data
7255 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0012, Function | MediumTest | Level0) {
7256     printf("==========ReadFile==========\n");
7257     size_t size;
7258     const char *modelPath = "/data/test/ml_face_isface.ms";
7259     char *modelBuf = ReadFile(modelPath, &size);
7260     ASSERT_NE(modelBuf, nullptr);
7261     printf("==========OH_AI_ContextCreate==========\n");
7262     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7263     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7264     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7265     printf("==========OH_AI_ModelBuildFromFile==========\n");
7266     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7267     ASSERT_NE(model, nullptr);
7268     OH_AI_Status build_ret = OH_AI_ModelBuild(model, modelBuf, 1, OH_AI_MODELTYPE_MINDIR, context);
7269     ASSERT_EQ(build_ret, OH_AI_STATUS_LITE_ERROR);
7270     delete[] modelBuf;
7271     OH_AI_ContextDestroy(&context);
7272     OH_AI_ModelDestroy(&model);
7273 }
7274 
7275 // OH_AI_ModelBuildFromFile接口model为nullptr
7276 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0013, Function | MediumTest | Level0) {
7277     printf("==========OH_AI_ContextCreate==========\n");
7278     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7279     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7280     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7281     printf("==========OH_AI_ModelBuildFromFile==========\n");
7282     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(nullptr, "/data/test/ml_face_isface.ms",
7283         OH_AI_MODELTYPE_MINDIR, context);
7284     ASSERT_EQ(build_ret, OH_AI_STATUS_LITE_NULLPTR);
7285     OH_AI_ContextDestroy(&context);
7286 }
7287 
7288 // OH_AI_ModelBuildFromFile接口modelPath有误
7289 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0014, Function | MediumTest | Level0) {
7290     printf("==========OH_AI_ContextCreate==========\n");
7291     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7292     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7293     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7294     printf("==========OH_AI_ModelBuildFromFile==========\n");
7295     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7296     ASSERT_NE(model, nullptr);
7297     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/", OH_AI_MODELTYPE_MINDIR, context);
7298     ASSERT_EQ(build_ret, OH_AI_STATUS_LITE_ERROR);
7299     OH_AI_ContextDestroy(&context);
7300     OH_AI_ModelDestroy(&model);
7301 }
7302 
7303 // OH_AI_ModelBuildFromFile接口model type非法,传入OH_AI_MODELTYPE_INVALID
7304 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0015, Function | MediumTest | Level0) {
7305     printf("==========OH_AI_ContextCreate==========\n");
7306     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7307     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7308     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7309     printf("==========OH_AI_ModelBuildFromFile==========\n");
7310     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7311     ASSERT_NE(model, nullptr);
7312     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7313         OH_AI_MODELTYPE_INVALID, context);
7314     ASSERT_EQ(build_ret, OH_AI_STATUS_LITE_PARAM_INVALID);
7315     OH_AI_ContextDestroy(&context);
7316     OH_AI_ModelDestroy(&model);
7317 }
7318 
7319 // OH_AI_ModelBuildFromFile接口context对象为nullptr
7320 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0016, Function | MediumTest | Level0) {
7321     printf("==========OH_AI_ContextCreate==========\n");
7322     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7323     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7324     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7325     printf("==========OH_AI_ModelBuildFromFile==========\n");
7326     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7327     ASSERT_NE(model, nullptr);
7328     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7329         OH_AI_MODELTYPE_MINDIR, nullptr);
7330     ASSERT_EQ(build_ret, OH_AI_STATUS_LITE_NULLPTR);
7331     OH_AI_ContextDestroy(&context);
7332     OH_AI_ModelDestroy(&model);
7333 }
7334 
7335 
7336 // OH_AI_ModelBuildFromFile接口,正常调用
7337 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0017, Function | MediumTest | Level0) {
7338     printf("==========OH_AI_ContextCreate==========\n");
7339     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7340     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7341     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7342     printf("==========OH_AI_ModelBuildFromFile==========\n");
7343     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7344     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7345         OH_AI_MODELTYPE_MINDIR, context);
7346     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7347     OH_AI_ContextDestroy(&context);
7348     OH_AI_ModelDestroy(&model);
7349 }
7350 
7351 // OH_AI_ModelBuildFromFile接口,model多次build
7352 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0018, Function | MediumTest | Level0) {
7353     printf("==========OH_AI_ContextCreate==========\n");
7354     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7355     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7356     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7357     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
7358     OH_AI_DeviceInfoHandle cpu_device_info2 = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7359     OH_AI_ContextAddDeviceInfo(context2, cpu_device_info2);
7360     printf("==========OH_AI_ModelBuildFromFile==========\n");
7361     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7362     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model,
7363         "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
7364     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7365     OH_AI_Status build_ret2 = OH_AI_ModelBuildFromFile(model,
7366         "/data/test/aiy_vision_classifier_plants_V1_3.ms", OH_AI_MODELTYPE_MINDIR, context2);
7367     ASSERT_EQ(build_ret2, OH_AI_STATUS_LITE_MODEL_REBUILD);
7368     OH_AI_ContextDestroy(&context);
7369     OH_AI_ContextDestroy(&context2);
7370     OH_AI_ModelDestroy(&model);
7371 }
7372 
7373 // OH_AI_ModelPredict接口model为nullptr
7374 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0019, Function | MediumTest | Level0) {
7375     printf("==========OH_AI_ContextCreate==========\n");
7376     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7377     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7378     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7379     printf("==========OH_AI_ModelBuildFromFile==========\n");
7380     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7381     ASSERT_NE(model, nullptr);
7382     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7383         OH_AI_MODELTYPE_MINDIR, context);
7384     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7385     printf("==========FillModelInputs==========\n");
7386     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
7387     ASSERT_NE(inputs.handle_list, nullptr);
7388     FillInputsData(inputs, "ml_face_isface", true);
7389     printf("==========Model Predict==========\n");
7390     OH_AI_TensorHandleArray outputs;
7391     OH_AI_Status predict_ret = OH_AI_ModelPredict(nullptr, inputs, &outputs, nullptr, nullptr);
7392     ASSERT_EQ(predict_ret, OH_AI_STATUS_LITE_NULLPTR);
7393     OH_AI_ContextDestroy(&context);
7394     OH_AI_ModelDestroy(&model);
7395 }
7396 
7397 // OH_AI_ModelPredict接口input为空
7398 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0020, Function | MediumTest | Level0) {
7399     printf("==========OH_AI_ContextCreate==========\n");
7400     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7401     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7402     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7403     printf("==========OH_AI_ModelBuildFromFile==========\n");
7404     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7405     ASSERT_NE(model, nullptr);
7406     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7407         OH_AI_MODELTYPE_MINDIR, context);
7408     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7409     printf("==========FillModelInputs==========\n");
7410     OH_AI_TensorHandleArray inputs;
7411     printf("==========Model Predict==========\n");
7412     OH_AI_TensorHandleArray outputs;
7413     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
7414     ASSERT_EQ(predict_ret, OH_AI_STATUS_LITE_ERROR);
7415     OH_AI_ContextDestroy(&context);
7416     OH_AI_ModelDestroy(&model);
7417 }
7418 
7419 
7420 // OH_AI_ModelPredict接口input给非法数据(其他模型输入、非input数据)
7421 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0021, Function | MediumTest | Level0) {
7422     printf("==========OH_AI_ContextCreate==========\n");
7423     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7424     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7425     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7426     printf("==========OH_AI_ModelBuildFromFile==========\n");
7427     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7428     ASSERT_NE(model, nullptr);
7429     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7430         OH_AI_MODELTYPE_MINDIR, context);
7431     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7432     printf("==========FillModelInputs==========\n");
7433     constexpr size_t create_shape_num = 1;
7434     int64_t createShape[create_shape_num] = {6};
7435     float data[6] = {1, 2, 3, 4, 5, 6};
7436     OH_AI_TensorHandle tensor = OH_AI_TensorCreate("data", OH_AI_DATATYPE_NUMBERTYPE_FLOAT32, createShape,
7437                                            create_shape_num, data, sizeof(data));
7438     ASSERT_NE(tensor, nullptr);
7439     OH_AI_TensorHandleArray in_tensor_array;
7440     in_tensor_array.handle_num = 1;
7441     in_tensor_array.handle_list = (OH_AI_TensorHandle *)malloc(sizeof(OH_AI_TensorHandle) * in_tensor_array.handle_num);
7442     in_tensor_array.handle_list[0] = tensor;
7443     printf("==========Model Predict==========\n");
7444     OH_AI_TensorHandleArray outputs;
7445     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, in_tensor_array, &outputs, nullptr, nullptr);
7446     std::cout << predict_ret << std::endl;
7447     ASSERT_EQ(predict_ret, OH_AI_STATUS_LITE_INPUT_TENSOR_ERROR);
7448     OH_AI_ContextDestroy(&context);
7449     OH_AI_TensorDestroy(&tensor);
7450     OH_AI_ModelDestroy(&model);
7451 }
7452 
7453 // OH_AI_ModelPredict接口,callback功能
7454 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0022, Function | MediumTest | Level0) {
7455     printf("==========OH_AI_ContextCreate==========\n");
7456     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7457     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7458     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7459     printf("==========OH_AI_ModelBuildFromFile==========\n");
7460     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7461     ASSERT_NE(model, nullptr);
7462     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7463         OH_AI_MODELTYPE_MINDIR, context);
7464     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7465     printf("==========FillModelInputs==========\n");
7466     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
7467     ASSERT_NE(inputs.handle_list, nullptr);
7468     FillInputsData(inputs, "ml_face_isface", true);
7469     printf("==========Model Predict Callback==========\n");
7470     OH_AI_TensorHandleArray outputs;
7471     OH_AI_KernelCallBack before_call_back = PrintBeforeCallback;
7472     OH_AI_KernelCallBack after_call_back = PrintAfterCallback;
7473     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, before_call_back, after_call_back);
7474     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
7475     printf("=========CompareResult===========\n");
7476     CompareResult(outputs, "ml_face_isface");
7477     OH_AI_ContextDestroy(&context);
7478     OH_AI_ModelDestroy(&model);
7479 }
7480 
7481 // OH_AI_ModelResize接口resize动态模型,shape_infos为nullptr
7482 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0027, Function | MediumTest | Level0) {
7483     printf("==========OH_AI_ContextCreate==========\n");
7484     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7485     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7486     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7487     printf("==========OH_AI_ModelBuildFromFile==========\n");
7488     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7489     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7490         OH_AI_MODELTYPE_MINDIR, context);
7491     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7492     printf("==========OH_AI_ModelGetInputs==========\n");
7493     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
7494     ASSERT_NE(inputs.handle_list, nullptr);
7495     OH_AI_ShapeInfo shape_infos[] = {{4, {1, 3, 48, 48}}};
7496     OH_AI_Status resize_ret = OH_AI_ModelResize(model, inputs, shape_infos, inputs.handle_num);
7497     ASSERT_EQ(resize_ret, OH_AI_STATUS_LITE_ERROR);
7498     OH_AI_ContextDestroy(&context);
7499     OH_AI_ModelDestroy(&model);
7500 }
7501 
7502 // OH_AI_ModelResize接口resize静态模型
7503 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0028, Function | MediumTest | Level0) {
7504     printf("==========OH_AI_ContextCreate==========\n");
7505     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7506     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7507     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7508     printf("==========OH_AI_ModelBuildFromFile==========\n");
7509     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7510     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7511         OH_AI_MODELTYPE_MINDIR, context);
7512     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7513     printf("==========OH_AI_ModelGetInputs==========\n");
7514     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
7515     ASSERT_NE(inputs.handle_list, nullptr);
7516     OH_AI_ShapeInfo shape_infos[] = {{4, {1, 112, 112, 3}}};
7517     OH_AI_Status resize_ret = OH_AI_ModelResize(model, inputs, shape_infos, inputs.handle_num);
7518     ASSERT_EQ(resize_ret, OH_AI_STATUS_LITE_ERROR);
7519     OH_AI_ContextDestroy(&context);
7520     OH_AI_ModelDestroy(&model);
7521 }
7522 
7523 // 正常调用ModelGetInputs
7524 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0029, Function | MediumTest | Level0) {
7525     printf("==========OH_AI_ContextCreate==========\n");
7526     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7527     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7528     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7529     printf("==========Model Build==========\n");
7530     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7531     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7532         OH_AI_MODELTYPE_MINDIR, context);
7533     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
7534     printf("==========FillModelInputs==========\n");
7535     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
7536     ASSERT_NE(inputs.handle_list, nullptr);
7537     FillInputsData(inputs, "ml_face_isface", true);
7538     printf("==========Model Predict==========\n");
7539     OH_AI_TensorHandleArray outputs;
7540     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
7541     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
7542     printf("==========CompareModelOutputs==========\n");
7543     CompareResult(outputs, "ml_face_isface");
7544     printf("==========OH_AI_ModelDestroy==========\n");
7545     OH_AI_ContextDestroy(&context);
7546     OH_AI_ModelDestroy(&model);
7547 }
7548 
7549 // 多次调用ModelGetInputs
7550 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0030, Function | MediumTest | Level0) {
7551     printf("==========OH_AI_ContextCreate==========\n");
7552     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7553     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7554     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7555     printf("==========Model Build==========\n");
7556     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7557     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7558         OH_AI_MODELTYPE_MINDIR, context);
7559     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
7560     printf("==========FillModelInputs==========\n");
7561     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
7562     ASSERT_NE(inputs.handle_list, nullptr);
7563     OH_AI_TensorHandleArray inputs2 = OH_AI_ModelGetInputs(model);
7564     ASSERT_NE(inputs2.handle_list, nullptr);
7565     FillInputsData(inputs2, "ml_face_isface", true);
7566     printf("==========Model Predict==========\n");
7567     OH_AI_TensorHandleArray outputs;
7568     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs2, &outputs, nullptr, nullptr);
7569     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
7570     printf("==========CompareModelOutputs==========\n");
7571     CompareResult(outputs, "ml_face_isface");
7572     printf("==========OH_AI_ModelDestroy==========\n");
7573     OH_AI_ContextDestroy(&context);
7574     OH_AI_ModelDestroy(&model);
7575 }
7576 
7577 // OH_AI_ModelGetInputs,model为nullptr
7578 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0031, Function | MediumTest | Level0) {
7579     printf("==========OH_AI_ContextCreate==========\n");
7580     OH_AI_ModelGetInputs(nullptr);
7581 }
7582 
7583 // 正常调用OH_AI_ModelGetOutputs
7584 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0032, Function | MediumTest | Level0) {
7585     printf("==========OH_AI_ContextCreate==========\n");
7586     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7587     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7588     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7589     printf("==========Model Build==========\n");
7590     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7591     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7592         OH_AI_MODELTYPE_MINDIR, context);
7593     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
7594     printf("==========FillModelInputs==========\n");
7595     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
7596     ASSERT_NE(inputs.handle_list, nullptr);
7597     FillInputsData(inputs, "ml_face_isface", true);
7598     printf("==========Model Predict==========\n");
7599     OH_AI_TensorHandleArray outputs = OH_AI_ModelGetOutputs(model);
7600     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
7601     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
7602     printf("==========CompareModelOutputs==========\n");
7603     CompareResult(outputs, "ml_face_isface");
7604     printf("==========OH_AI_ModelDestroy==========\n");
7605     OH_AI_ContextDestroy(&context);
7606     OH_AI_ModelDestroy(&model);
7607 }
7608 
7609 // 多次调用OH_AI_ModelGetOutputs
7610 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0033, Function | MediumTest | Level0) {
7611     printf("==========OH_AI_ContextCreate==========\n");
7612     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7613     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7614     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7615     printf("==========Model Build==========\n");
7616     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7617     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7618         OH_AI_MODELTYPE_MINDIR, context);
7619     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
7620     printf("==========FillModelInputs==========\n");
7621     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
7622     ASSERT_NE(inputs.handle_list, nullptr);
7623     FillInputsData(inputs, "ml_face_isface", true);
7624     printf("==========Model Predict==========\n");
7625     OH_AI_TensorHandleArray outputs = OH_AI_ModelGetOutputs(model);
7626     ASSERT_NE(outputs.handle_list, nullptr);
7627     OH_AI_TensorHandleArray outputs2 = OH_AI_ModelGetOutputs(model);
7628     ASSERT_NE(outputs2.handle_list, nullptr);
7629     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs2, nullptr, nullptr);
7630     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
7631     printf("==========CompareModelOutputs==========\n");
7632     CompareResult(outputs2, "ml_face_isface");
7633     printf("==========OH_AI_ModelDestroy==========\n");
7634     OH_AI_ContextDestroy(&context);
7635     OH_AI_ModelDestroy(&model);
7636 }
7637 
7638 // OH_AI_ModelGetOutputs,model为nullptr
7639 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0034, Function | MediumTest | Level0) {
7640     printf("==========OH_AI_ContextCreate==========\n");
7641     OH_AI_ModelGetOutputs(nullptr);
7642 }
7643 
7644 // OH_AI_ModelGetInputByTensorName
7645 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0035, Function | MediumTest | Level0) {
7646     printf("==========OH_AI_ContextCreate==========\n");
7647     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7648     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7649     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7650     printf("==========Model Build==========\n");
7651     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7652     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7653         OH_AI_MODELTYPE_MINDIR, context);
7654     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
7655     printf("==========OH_AI_ModelGetInputByTensorName==========\n");
7656     OH_AI_TensorHandle tensor = OH_AI_ModelGetInputByTensorName(model, "data");
7657     ASSERT_NE(tensor, nullptr);
7658     OH_AI_TensorHandleArray in_tensor_array;
7659     in_tensor_array.handle_num = 1;
7660     in_tensor_array.handle_list = (OH_AI_TensorHandle *)malloc(sizeof(OH_AI_TensorHandle) * 1);
7661     in_tensor_array.handle_list[0] = tensor;
7662     FillInputsData(in_tensor_array, "ml_face_isface", true);
7663     printf("==========OH_AI_ModelPredict==========\n");
7664     OH_AI_TensorHandleArray outputs;
7665     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, in_tensor_array, &outputs, nullptr, nullptr);
7666     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
7667     printf("==========CompareModelOutputs==========\n");
7668     CompareResult(outputs, "ml_face_isface");
7669     printf("==========OH_AI_ModelDestroy==========\n");
7670     OH_AI_ContextDestroy(&context);
7671     OH_AI_ModelDestroy(&model);
7672 }
7673 
7674 // OH_AI_ModelGetInputByTensorName,model为空
7675 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0036, Function | MediumTest | Level0) {
7676     printf("==========OH_AI_ContextCreate==========\n");
7677     OH_AI_ModelGetInputByTensorName(nullptr, "data");
7678 }
7679 
7680 // OH_AI_ModelGetInputByTensorName,名字不存在
7681 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0037, Function | MediumTest | Level0) {
7682     printf("==========OH_AI_ContextCreate==========\n");
7683     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7684     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7685     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7686     printf("==========Model Build==========\n");
7687     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7688     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7689         OH_AI_MODELTYPE_MINDIR, context);
7690     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
7691     printf("==========OH_AI_ModelGetInputByTensorName==========\n");
7692     OH_AI_TensorHandle tensor = OH_AI_ModelGetInputByTensorName(model, "aaa");
7693     ASSERT_EQ(tensor, nullptr);
7694     printf("==========OH_AI_ModelDestroy==========\n");
7695     OH_AI_ContextDestroy(&context);
7696     OH_AI_ModelDestroy(&model);
7697 }
7698 
7699 // OH_AI_ModelGetOutputByTensorName
7700 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0038, Function | MediumTest | Level0) {
7701     printf("==========OH_AI_ContextCreate==========\n");
7702     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7703     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7704     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7705     printf("==========Model Build==========\n");
7706     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7707     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7708         OH_AI_MODELTYPE_MINDIR, context);
7709     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
7710     printf("==========FillModelInputs==========\n");
7711     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
7712     ASSERT_NE(inputs.handle_list, nullptr);
7713     FillInputsData(inputs, "ml_face_isface", true);
7714     printf("==========Model Predict==========\n");
7715     OH_AI_TensorHandle tensor = OH_AI_ModelGetOutputByTensorName(model, "prob");
7716     ASSERT_NE(tensor, nullptr);
7717     OH_AI_TensorHandleArray outputs;
7718     outputs.handle_num = 1;
7719     outputs.handle_list = (OH_AI_TensorHandle *)malloc(sizeof(OH_AI_TensorHandle) * 1);
7720     outputs.handle_list[0] = tensor;
7721     OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
7722     ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
7723     printf("==========CompareModelOutputs==========\n");
7724     CompareResult(outputs, "ml_face_isface");
7725     printf("==========OH_AI_ModelDestroy==========\n");
7726     OH_AI_ContextDestroy(&context);
7727     OH_AI_ModelDestroy(&model);
7728 }
7729 
7730 // OH_AI_ModelGetOutputByTensorName,model为空
7731 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0039, Function | MediumTest | Level0) {
7732     printf("==========OH_AI_ContextCreate==========\n");
7733     OH_AI_ModelGetOutputByTensorName(nullptr, "prob");
7734 }
7735 
7736 // OH_AI_ModelGetOutputByTensorName,名字不存在
7737 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0040, Function | MediumTest | Level0) {
7738     printf("==========OH_AI_ContextCreate==========\n");
7739     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7740     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7741     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7742     printf("==========Model Build==========\n");
7743     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7744     OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms",
7745         OH_AI_MODELTYPE_MINDIR, context);
7746     ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
7747     printf("==========FillModelInputs==========\n");
7748     OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
7749     ASSERT_NE(inputs.handle_list, nullptr);
7750     FillInputsData(inputs, "ml_face_isface", true);
7751     printf("==========Model Predict==========\n");
7752     OH_AI_TensorHandle tensor = OH_AI_ModelGetOutputByTensorName(model, "aaa");
7753     ASSERT_EQ(tensor, nullptr);
7754     printf("==========OH_AI_ModelDestroy==========\n");
7755     OH_AI_ContextDestroy(&context);
7756     OH_AI_ModelDestroy(&model);
7757 }
7758 
7759 // 两个model调用同一个context
7760 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0041, Function | MediumTest | Level0) {
7761     printf("==========OH_AI_ContextCreate==========\n");
7762     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7763     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7764     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7765     printf("==========OH_AI_ModelBuildFromFile==========\n");
7766     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7767     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
7768     OH_AI_Status build_ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR,
7769         context);
7770     ASSERT_EQ(build_ret, OH_AI_STATUS_SUCCESS);
7771     OH_AI_Status build_ret2 = OH_AI_ModelBuildFromFile(model2, "/data/test/aiy_vision_classifier_plants_V1_3.ms",
7772         OH_AI_MODELTYPE_MINDIR, context);
7773     ASSERT_EQ(build_ret2, OH_AI_STATUS_LITE_PARAM_INVALID);
7774     printf("==========OH_AI_ModelDestroy==========\n");
7775     OH_AI_ContextDestroy(&context);
7776     OH_AI_ModelDestroy(&model);
7777     OH_AI_ModelDestroy(&model2);
7778 }
7779 
7780 // OH_AI_ModelCreate同时创建3个model
7781 HWTEST(MSLiteTest, SUB_AI_MindSpore_ModelCreate_0042, Function | MediumTest | Level0) {
7782     printf("==========OH_AI_ContextCreate==========\n");
7783     OH_AI_ContextHandle context = OH_AI_ContextCreate();
7784     OH_AI_DeviceInfoHandle cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7785     OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
7786     OH_AI_ContextHandle context2 = OH_AI_ContextCreate();
7787     OH_AI_DeviceInfoHandle cpu_device_info2 = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7788     OH_AI_ContextAddDeviceInfo(context2, cpu_device_info2);
7789     OH_AI_ContextHandle context3 = OH_AI_ContextCreate();
7790     OH_AI_DeviceInfoHandle cpu_device_info3 = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
7791     OH_AI_ContextAddDeviceInfo(context3, cpu_device_info3);
7792     printf("==========Model Build==========\n");
7793     OH_AI_ModelHandle model = OH_AI_ModelCreate();
7794     ASSERT_NE(model, nullptr);
7795     ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
7796     printf("==========Model Build2==========\n");
7797     OH_AI_ModelHandle model2 = OH_AI_ModelCreate();
7798     ASSERT_NE(model2, nullptr);
7799     ModelPredict(model2, context2, "ml_face_isface_quant", {}, false, false, false);
7800     printf("==========Model Build3==========\n");
7801     OH_AI_ModelHandle model3 = OH_AI_ModelCreate();
7802     ASSERT_NE(model3, nullptr);
7803     OH_AI_ShapeInfo shape_infos = {4, {1, 32, 512, 1}};
7804     ModelPredict(model3, context3, "ml_ocr_cn", shape_infos, false, false, false);
7805 }
7806