• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "neural_network_runtime_test.h"
17 
18 #include "mindir.h"
19 
20 #include "common/utils.h"
21 #include "frameworks/native/compilation.h"
22 #include "frameworks/native/device_manager.h"
23 #include "frameworks/native/hdi_device_v2_0.h"
24 #include "test/unittest/common/v2_0/mock_idevice.h"
25 
26 namespace OHOS {
27 namespace NeuralNetworkRuntime {
PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,const ModelConfig & config,std::shared_ptr<PreparedModel> & preparedModel)28 OH_NN_ReturnCode HDIDeviceV2_0::PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,
29     const ModelConfig& config, std::shared_ptr<PreparedModel>& preparedModel)
30 {
31     if (model == nullptr) {
32         return OH_NN_INVALID_PARAMETER;
33     }
34 
35     if (config.enableFloat16 == false) {
36         return OH_NN_FAILED;
37     }
38 
39     sptr<OHOS::HDI::Nnrt::V2_0::IPreparedModel> iPreparedModel = sptr<OHOS::HDI::Nnrt::V2_0
40         ::MockIPreparedModel>(new OHOS::HDI::Nnrt::V2_0::MockIPreparedModel());
41     if (iPreparedModel == nullptr) {
42         LOGE("HDIDeviceV2_0 mock PrepareModel failed, error happened when new sptr");
43         return OH_NN_NULL_PTR;
44     }
45 
46     preparedModel = CreateSharedPtr<HDIPreparedModelV2_0>(iPreparedModel);
47     return OH_NN_SUCCESS;
48 }
49 
GetDevice(size_t deviceId) const50 std::shared_ptr<Device> DeviceManager::GetDevice(size_t deviceId) const
51 {
52     sptr<OHOS::HDI::Nnrt::V2_0::INnrtDevice> idevice
53         = sptr<OHOS::HDI::Nnrt::V2_0::MockIDevice>(new (std::nothrow) OHOS::HDI::Nnrt::V2_0::MockIDevice());
54     if (idevice == nullptr) {
55         LOGE("DeviceManager mock GetDevice failed, error happened when new sptr");
56         return nullptr;
57     }
58 
59     std::shared_ptr<Device> device = CreateSharedPtr<HDIDeviceV2_0>(idevice);
60     if (device == nullptr) {
61         LOGE("DeviceManager mock GetDevice failed, the device is nullptr");
62         return nullptr;
63     }
64 
65     if (deviceId == 0) {
66         LOGE("DeviceManager mock GetDevice failed, the passed parameter deviceId is 0");
67         return nullptr;
68     } else {
69         return device;
70     }
71 }
72 
GetDeviceType(OH_NN_DeviceType & deviceType)73 OH_NN_ReturnCode HDIDeviceV2_0::GetDeviceType(OH_NN_DeviceType& deviceType)
74 {
75     if (deviceType == OH_NN_OTHERS) {
76         return OH_NN_UNAVALIDABLE_DEVICE;
77     }
78 
79     return OH_NN_SUCCESS;
80 }
81 
GetDeviceName(size_t deviceId)82 const std::string& DeviceManager::GetDeviceName(size_t deviceId)
83 {
84     static std::string deviceName = "";
85     if (deviceId == 0) {
86         return deviceName;
87     }
88 
89     deviceName = "deviceId";
90     return deviceName;
91 }
92 
GetAllDeviceId()93 const std::vector<size_t>& DeviceManager::GetAllDeviceId()
94 {
95     static std::vector<size_t> deviceIds;
96     if (OHOS::HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
97         // In order not to affect other use cases, set to the OH_NN_OPERATION_FORBIDDEN
98         OHOS::HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
99         return deviceIds;
100     }
101     std::size_t device = 1;
102     deviceIds.emplace_back(device);
103     return deviceIds;
104 }
105 
IsModelCacheSupported(bool & isSupported)106 OH_NN_ReturnCode HDIDeviceV2_0::IsModelCacheSupported(bool& isSupported)
107 {
108     isSupported = true;
109     return OH_NN_SUCCESS;
110 }
111 
IsPerformanceModeSupported(bool & isSupported)112 OH_NN_ReturnCode HDIDeviceV2_0::IsPerformanceModeSupported(bool& isSupported)
113 {
114     isSupported = true;
115     return OH_NN_SUCCESS;
116 }
117 
IsPrioritySupported(bool & isSupported)118 OH_NN_ReturnCode HDIDeviceV2_0::IsPrioritySupported(bool& isSupported)
119 {
120     isSupported = true;
121     return OH_NN_SUCCESS;
122 }
123 
IsFloat16PrecisionSupported(bool & isSupported)124 OH_NN_ReturnCode HDIDeviceV2_0::IsFloat16PrecisionSupported(bool& isSupported)
125 {
126     isSupported = true;
127     return OH_NN_SUCCESS;
128 }
129 
GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,std::vector<bool> & ops)130 OH_NN_ReturnCode HDIDeviceV2_0::GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,
131     std::vector<bool>& ops)
132 {
133     if (model == nullptr) {
134         LOGE("HDIDeviceV2_0 mock GetSupportedOperation failed, Model is nullptr, cannot query supported operation.");
135         return OH_NN_NULL_PTR;
136     }
137 
138     ops.emplace_back(true);
139     return OH_NN_SUCCESS;
140 }
141 
IsDynamicInputSupported(bool & isSupported)142 OH_NN_ReturnCode HDIDeviceV2_0::IsDynamicInputSupported(bool& isSupported)
143 {
144     isSupported = true;
145     return OH_NN_SUCCESS;
146 }
147 } // namespace NeuralNetworkRuntime
148 } // namespace OHOS
149 
150 namespace OHOS {
151 namespace NeuralNetworkRuntime {
152 namespace Unittest {
BuildModel(InnerModel & model)153 OH_NN_ReturnCode NeuralNetworkRuntimeTest::BuildModel(InnerModel& model)
154 {
155     int32_t inputDims[2] = {3, 4};
156     OH_NN_Tensor input1 = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
157     OH_NN_ReturnCode ret = model.AddTensor(input1);
158     if (ret != OH_NN_SUCCESS) {
159         return ret;
160     }
161 
162     // 添加Add算子的第二个输入Tensor,类型为float32,张量形状为[3, 4]
163     OH_NN_Tensor input2 = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
164     ret = model.AddTensor(input2);
165     if (ret != OH_NN_SUCCESS) {
166         return ret;
167     }
168 
169     // 添加Add算子的参数Tensor,该参数Tensor用于指定激活函数的类型,Tensor的数据类型为int8。
170     int32_t activationDims = 1;
171     int8_t activationValue = OH_NN_FUSED_NONE;
172     OH_NN_Tensor activation = {OH_NN_INT8, 1, &activationDims, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
173     ret = model.AddTensor(activation);
174     if (ret != OH_NN_SUCCESS) {
175         return ret;
176     }
177 
178     // 将激活函数类型设置为OH_NN_FUSED_NONE,表示该算子不添加激活函数。
179     uint32_t index = 2;
180     ret = model.SetTensorValue(index, &activationValue, sizeof(int8_t));
181     if (ret != OH_NN_SUCCESS) {
182         return ret;
183     }
184 
185     // 设置Add算子的输出,类型为float32,张量形状为[3, 4]
186     OH_NN_Tensor output = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
187     ret = model.AddTensor(output);
188     if (ret != OH_NN_SUCCESS) {
189         return ret;
190     }
191 
192     // 指定Add算子的输入、参数和输出索引
193     uint32_t inputIndicesValues[2] = {0, 1};
194     uint32_t paramIndicesValues = 2;
195     uint32_t outputIndicesValues = 3;
196     OH_NN_UInt32Array paramIndices = {&paramIndicesValues, 1};
197     OH_NN_UInt32Array inputIndices = {inputIndicesValues, 2};
198     OH_NN_UInt32Array outputIndices = {&outputIndicesValues, 1};
199 
200     // 向模型实例添加Add算子
201     ret = model.AddOperation(OH_NN_OPS_ADD, paramIndices, inputIndices, outputIndices);
202     if (ret != OH_NN_SUCCESS) {
203         return ret;
204     }
205 
206     // 设置模型实例的输入、输出索引
207     ret = model.SpecifyInputsAndOutputs(inputIndices, outputIndices);
208     if (ret != OH_NN_SUCCESS) {
209         return ret;
210     }
211 
212     // 完成模型实例的构建
213     ret = model.Build();
214     if (ret != OH_NN_SUCCESS) {
215         return ret;
216     }
217 
218     return ret;
219 }
220 
InitIndices()221 void NeuralNetworkRuntimeTest::InitIndices()
222 {
223     m_inputIndices.data = m_inputIndexs;
224     m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
225 
226     m_outputIndices.data = m_outputIndexs;
227     m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
228 
229     m_paramIndices.data = m_paramIndexs;
230     m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
231 }
232 
AddModelTensor(InnerModel & innerModel)233 void NeuralNetworkRuntimeTest::AddModelTensor(InnerModel& innerModel)
234 {
235     const int dim[2] = {2, 2};
236     const OH_NN_Tensor& tensor = {OH_NN_FLOAT32, 2, dim, nullptr, OH_NN_TENSOR};
237 
238     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
239     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
240     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
241 
242     const OH_NN_Tensor& tensorParam = {OH_NN_INT8, 0, nullptr, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
243     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensorParam));
244 }
245 
SetTensor()246 void NeuralNetworkRuntimeTest::SetTensor()
247 {
248     m_tensor.dataType = OH_NN_INT32;
249     m_tensor.dimensionCount = 0;
250     m_tensor.dimensions = nullptr;
251     m_tensor.quantParam = nullptr;
252     m_tensor.type = OH_NN_TENSOR;
253 }
254 
SetInnerBuild(InnerModel & innerModel)255 void NeuralNetworkRuntimeTest::SetInnerBuild(InnerModel& innerModel)
256 {
257     uint32_t index = 3;
258     const int8_t activation = 0;
259     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
260         static_cast<const void *>(&activation), sizeof(int8_t)));
261 
262     OH_NN_OperationType opType {OH_NN_OPS_ADD};
263     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
264     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
265     EXPECT_EQ(OH_NN_SUCCESS, innerModel.Build());
266 }
267 
SetInputAndOutput(Executor & executor)268 void NeuralNetworkRuntimeTest::SetInputAndOutput(Executor& executor)
269 {
270     float input1[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
271     float input2[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
272 
273     uint32_t input1Index = 0;
274     uint32_t input2Index = 1;
275 
276     int32_t inputDims[2] = {3, 4};
277     size_t length = 12 * sizeof(float);
278     m_tensor = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
279     EXPECT_EQ(OH_NN_SUCCESS, executor.SetInput(input1Index, m_tensor, input1, length));
280     EXPECT_EQ(OH_NN_SUCCESS, executor.SetInput(input2Index, m_tensor, input2, length));
281 
282     float output[12];
283     uint32_t outputIndex = 0;
284     EXPECT_EQ(OH_NN_SUCCESS, executor.SetOutput(outputIndex, output, length));
285     EXPECT_EQ(OH_NN_SUCCESS, executor.Run());
286 }
287 
288 /*
289  * @tc.name: model_construct_001
290  * @tc.desc: Verify the return model of the OH_NNModel_Construct function.
291  * @tc.type: FUNC
292  */
293 HWTEST_F(NeuralNetworkRuntimeTest, model_construct_001, testing::ext::TestSize.Level0)
294 {
295     OH_NNModel* ret = OH_NNModel_Construct();
296     EXPECT_NE(nullptr, ret);
297 }
298 
299 /*
300  * @tc.name: model_add_tensor_001
301  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Tensor function.
302  * @tc.type: FUNC
303  */
304 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_001, testing::ext::TestSize.Level0)
305 {
306     OH_NNModel* model = nullptr;
307     const int32_t dimInput[2] = {2, 2};
308     const OH_NN_Tensor tensor = {OH_NN_INT8, 2, dimInput, nullptr, OH_NN_TENSOR};
309     OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, &tensor);
310     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
311 }
312 
313 /*
314  * @tc.name: model_add_tensor_002
315  * @tc.desc: Verify the OH_NN_Tensor is nullptr of the OH_NNModel_AddTensor function.
316  * @tc.type: FUNC
317  */
318 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_002, testing::ext::TestSize.Level0)
319 {
320     InnerModel innerModel;
321 
322     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
323     OH_NN_Tensor* tensor = nullptr;
324     OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, tensor);
325     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
326 }
327 
328 /*
329  * @tc.name: model_add_tensor_003
330  * @tc.desc: Verify the success of the OH_NNModel_AddTensor function.
331  * @tc.type: FUNC
332  */
333 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_003, testing::ext::TestSize.Level0)
334 {
335     InnerModel innerModel;
336     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
337 
338     const int32_t dimInput[2] = {2, 2};
339     const OH_NN_Tensor tensor = {OH_NN_INT8, 2, dimInput, nullptr, OH_NN_TENSOR};
340     OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, &tensor);
341     EXPECT_EQ(OH_NN_SUCCESS, ret);
342 }
343 
344 /*
345  * @tc.name: model_add_operation_001
346  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_AddOperation function.
347  * @tc.type: FUNC
348  */
349 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_001, testing::ext::TestSize.Level0)
350 {
351     InnerModel innerModel;
352     OH_NNModel* model = nullptr;
353     OH_NN_OperationType opType {OH_NN_OPS_ADD};
354 
355     InitIndices();
356     AddModelTensor(innerModel);
357 
358     uint32_t index = 3;
359     const int8_t activation = 0;
360     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
361         static_cast<const void *>(&activation), sizeof(int8_t)));
362 
363     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, &m_outputIndices);
364     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
365 }
366 
367 /*
368  * @tc.name: model_add_operation_002
369  * @tc.desc: Verify the paramIndices is nullptr of the OH_NNModel_AddOperation function.
370  * @tc.type: FUNC
371  */
372 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_002, testing::ext::TestSize.Level0)
373 {
374     InnerModel innerModel;
375     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
376     OH_NN_OperationType opType {OH_NN_OPS_ADD};
377 
378     m_inputIndices.data = m_inputIndexs;
379     m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
380 
381     m_outputIndices.data = m_outputIndexs;
382     m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
383 
384     AddModelTensor(innerModel);
385     uint32_t index = 3;
386     const int8_t activation = 0;
387     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
388         static_cast<const void *>(&activation), sizeof(int8_t)));
389 
390     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, nullptr, &m_inputIndices, &m_outputIndices);
391     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
392 }
393 
394 /*
395  * @tc.name: model_add_operation_003
396  * @tc.desc: Verify the inputIndices is nullptr of the OH_NNModel_AddOperation function.
397  * @tc.type: FUNC
398  */
399 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_003, testing::ext::TestSize.Level0)
400 {
401     InnerModel innerModel;
402     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
403     OH_NN_OperationType opType {OH_NN_OPS_ADD};
404 
405     m_paramIndices.data = m_paramIndexs;
406     m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
407 
408     m_outputIndices.data = m_outputIndexs;
409     m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
410 
411     AddModelTensor(innerModel);
412     uint32_t index = 3;
413     const int8_t activation = 0;
414     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
415         static_cast<const void *>(&activation), sizeof(int8_t)));
416 
417     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, nullptr, &m_outputIndices);
418     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
419 }
420 
421 /*
422  * @tc.name: model_add_operation_004
423  * @tc.desc: Verify the outputIndices is nullptr of the OH_NNModel_AddOperation function.
424  * @tc.type: FUNC
425  */
426 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_004, testing::ext::TestSize.Level0)
427 {
428     InnerModel innerModel;
429     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
430     OH_NN_OperationType opType {OH_NN_OPS_ADD};
431 
432     m_paramIndices.data = m_paramIndexs;
433     m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
434 
435     m_inputIndices.data = m_inputIndexs;
436     m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
437 
438     AddModelTensor(innerModel);
439     uint32_t index = 3;
440     const int8_t activation = 0;
441     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
442         static_cast<const void *>(&activation), sizeof(int8_t)));
443 
444     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, nullptr);
445     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
446 }
447 
448 /*
449  * @tc.name: model_add_operation_005
450  * @tc.desc: Verify the success of the OH_NNModel_AddOperation function.
451  * @tc.type: FUNC
452  */
453 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_005, testing::ext::TestSize.Level0)
454 {
455     InnerModel innerModel;
456     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
457     OH_NN_OperationType opType {OH_NN_OPS_ADD};
458 
459     InitIndices();
460     AddModelTensor(innerModel);
461 
462     uint32_t index = 3;
463     const int8_t activation = 0;
464     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
465         static_cast<const void *>(&activation), sizeof(int8_t)));
466 
467     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, &m_outputIndices);
468     EXPECT_EQ(OH_NN_SUCCESS, ret);
469 }
470 
471 /*
472  * @tc.name: model_set_tensor_data_001
473  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_SetTensorData function.
474  * @tc.type: FUNC
475  */
476 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_001, testing::ext::TestSize.Level0)
477 {
478     InnerModel innerModel;
479     OH_NNModel* model = nullptr;
480     AddModelTensor(innerModel);
481 
482     uint32_t index = 3;
483     const int8_t activation = 0;
484 
485     OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation),
486         sizeof(int8_t));
487     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
488 }
489 
490 /*
491  * @tc.name: model_set_tensor_data_002
492  * @tc.desc: Verify the data is nullptr of the OH_NNModel_SetTensorData function.
493  * @tc.type: FUNC
494  */
495 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_002, testing::ext::TestSize.Level0)
496 {
497     InnerModel innerModel;
498     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
499     AddModelTensor(innerModel);
500 
501     uint32_t index = 3;
502 
503     OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, nullptr, sizeof(int8_t));
504     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
505 }
506 
507 /*
508  * @tc.name: model_set_tensor_data_003
509  * @tc.desc: Verify the length is 0 of the OH_NNModel_SetTensorData function.
510  * @tc.type: FUNC
511  */
512 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_003, testing::ext::TestSize.Level0)
513 {
514     InnerModel innerModel;
515     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
516     AddModelTensor(innerModel);
517 
518     uint32_t index = 3;
519     const int8_t activation = 0;
520 
521     OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation), 0);
522     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
523 }
524 
525 /*
526  * @tc.name: model_set_tensor_data_004
527  * @tc.desc: Verify the successs of the OH_NNModel_SetTensorData function.
528  * @tc.type: FUNC
529  */
530 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_004, testing::ext::TestSize.Level0)
531 {
532     InnerModel innerModel;
533     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
534     AddModelTensor(innerModel);
535 
536     uint32_t index = 3;
537     const int8_t activation = 0;
538 
539     OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation),
540         sizeof(int8_t));
541     EXPECT_EQ(OH_NN_SUCCESS, ret);
542 }
543 
544 /*
545  * @tc.name: model_specify_inputs_and_outputs_001
546  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
547  * @tc.type: FUNC
548  */
549 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_001, testing::ext::TestSize.Level0)
550 {
551     InnerModel innerModel;
552     OH_NNModel* model = nullptr;
553 
554     InitIndices();
555     AddModelTensor(innerModel);
556 
557     OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, &m_outputIndices);
558     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
559 }
560 
561 /*
562  * @tc.name: model_specify_inputs_and_outputs_002
563  * @tc.desc: Verify the inputIndices is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
564  * @tc.type: FUNC
565  */
566 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_002, testing::ext::TestSize.Level0)
567 {
568     InnerModel innerModel;
569     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
570 
571     InitIndices();
572     AddModelTensor(innerModel);
573 
574     OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, nullptr, &m_outputIndices);
575     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
576 }
577 
578 /*
579  * @tc.name: model_specify_inputs_and_outputs_003
580  * @tc.desc: Verify the outputIndices is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
581  * @tc.type: FUNC
582  */
583 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_003, testing::ext::TestSize.Level0)
584 {
585     InnerModel innerModel;
586     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
587 
588     InitIndices();
589     AddModelTensor(innerModel);
590 
591     OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, nullptr);
592     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
593 }
594 
595 /*
596  * @tc.name: model_specify_inputs_and_outputs_004
597  * @tc.desc: Verify the success of the OH_NNModel_SpecifyInputsAndOutputs function.
598  * @tc.type: FUNC
599  */
600 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_004, testing::ext::TestSize.Level0)
601 {
602     InnerModel innerModel;
603     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
604 
605     InitIndices();
606     AddModelTensor(innerModel);
607 
608     OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, &m_outputIndices);
609     EXPECT_EQ(OH_NN_SUCCESS, ret);
610 }
611 
612 /*
613  * @tc.name: model_finish_001
614  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Finish function.
615  * @tc.type: FUNC
616  */
617 HWTEST_F(NeuralNetworkRuntimeTest, model_finish_001, testing::ext::TestSize.Level0)
618 {
619     InnerModel innerModel;
620     OH_NNModel* model = nullptr;
621 
622     OH_NN_OperationType opType {OH_NN_OPS_ADD};
623 
624     InitIndices();
625     AddModelTensor(innerModel);
626 
627     uint32_t index = 3;
628     const int8_t activation = 0;
629     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index, static_cast<const void *>(&activation),
630         sizeof(int8_t)));
631 
632     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
633     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
634 
635     OH_NN_ReturnCode ret = OH_NNModel_Finish(model);
636     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
637 }
638 
639 /*
640  * @tc.name: model_finish_002
641  * @tc.desc: Verify the success of the OH_NNModel_Finish function.
642  * @tc.type: FUNC
643  */
644 HWTEST_F(NeuralNetworkRuntimeTest, model_finish_002, testing::ext::TestSize.Level0)
645 {
646     InnerModel innerModel;
647     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
648 
649     OH_NN_OperationType opType {OH_NN_OPS_ADD};
650 
651     InitIndices();
652     AddModelTensor(innerModel);
653 
654     const int8_t activation = 0;
655     uint32_t index = 3;
656     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
657         static_cast<const void *>(&activation), sizeof(int8_t)));
658 
659     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
660     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
661 
662     OH_NN_ReturnCode ret = OH_NNModel_Finish(model);
663     EXPECT_EQ(OH_NN_SUCCESS, ret);
664 }
665 
666 /*
667  * @tc.name: model_destroy_001
668  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Destroy function.
669  * @tc.type: FUNC
670  */
671 HWTEST_F(NeuralNetworkRuntimeTest, model_destroy_001, testing::ext::TestSize.Level0)
672 {
673     InnerModel innerModel;
674     OH_NNModel** pModel = nullptr;
675     OH_NNModel_Destroy(pModel);
676     EXPECT_EQ(nullptr, pModel);
677 }
678 
679 /*
680  * @tc.name: model_destroy_002
681  * @tc.desc: Verify the *OH_NNModel is nullptr of the OH_NNModel_Destroy function.
682  * @tc.type: FUNC
683  */
684 HWTEST_F(NeuralNetworkRuntimeTest, model_destroy_002, testing::ext::TestSize.Level0)
685 {
686     InnerModel innerModel;
687     OH_NNModel* model = nullptr;
688     OH_NNModel** pModel = &model;
689     OH_NNModel_Destroy(pModel);
690     EXPECT_EQ(nullptr, model);
691 }
692 
693 /*
694  * @tc.name: model_destroy_003
695  * @tc.desc: Verify the normal model of the OH_NNModel_Destroy function.
696  * @tc.type: FUNC
697  */
698 HWTEST_F(NeuralNetworkRuntimeTest, model_destroy_003, testing::ext::TestSize.Level0)
699 {
700     InnerModel* innerModel = new InnerModel();
701     EXPECT_NE(nullptr, innerModel);
702     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
703     OH_NNModel_Destroy(&model);
704     EXPECT_EQ(nullptr, model);
705 }
706 
707 /*
708  * @tc.name: model_get_available_operation_001
709  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_GetAvailableOperations function.
710  * @tc.type: FUNC
711  */
712 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_001, testing::ext::TestSize.Level0)
713 {
714     InnerModel innerModel;
715     OH_NNModel* model = nullptr;
716 
717     uint32_t opCount = 1;
718     const bool *pIsAvailable = nullptr;
719 
720     InitIndices();
721     AddModelTensor(innerModel);
722     SetInnerBuild(innerModel);
723 
724     size_t deviceID = 10;
725     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
726     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
727 }
728 
729 /*
730  * @tc.name: model_get_available_operation_002
731  * @tc.desc: Verify the isAvailable is nullptr of the OH_NNModel_GetAvailableOperations function.
732  * @tc.type: FUNC
733  */
734 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_002, testing::ext::TestSize.Level0)
735 {
736     InnerModel innerModel;
737     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
738 
739     uint32_t opCount = 1;
740     InitIndices();
741     AddModelTensor(innerModel);
742     SetInnerBuild(innerModel);
743 
744     size_t deviceID = 10;
745     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, nullptr, &opCount);
746     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
747 }
748 
749 /*
750  * @tc.name: model_get_available_operation_003
751  * @tc.desc: Verify the *isAvailable is no nullptr of the OH_NNModel_GetAvailableOperations function.
752  * @tc.type: FUNC
753  */
754 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_003, testing::ext::TestSize.Level0)
755 {
756     InnerModel innerModel;
757     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
758 
759     const bool isAvailable = true;
760     const bool *pIsAvailable = &isAvailable;
761     uint32_t opCount = 1;
762 
763     InitIndices();
764     AddModelTensor(innerModel);
765     SetInnerBuild(innerModel);
766 
767     size_t deviceID = 10;
768     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
769     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
770 }
771 
772 /*
773  * @tc.name: model_get_available_operation_004
774  * @tc.desc: Verify the opCount is nullptr of the OH_NNModel_GetAvailableOperations function.
775  * @tc.type: FUNC
776  */
777 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_004, testing::ext::TestSize.Level0)
778 {
779     InnerModel innerModel;
780     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
781 
782     const bool *pIsAvailable = nullptr;
783     uint32_t* opCount = nullptr;
784 
785     InitIndices();
786     AddModelTensor(innerModel);
787     SetInnerBuild(innerModel);
788 
789     size_t deviceID = 10;
790     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, opCount);
791     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
792 }
793 
794 /*
795  * @tc.name: model_get_available_operation_005
796  * @tc.desc: Verify the success of the OH_NNModel_GetAvailableOperations function.
797  * @tc.type: FUNC
798  */
799 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_005, testing::ext::TestSize.Level0)
800 {
801     InnerModel innerModel;
802     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
803 
804     const bool *pIsAvailable = nullptr;
805     uint32_t opCount = 1;
806 
807     InitIndices();
808     AddModelTensor(innerModel);
809     SetInnerBuild(innerModel);
810 
811     size_t deviceID = 10;
812     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
813     EXPECT_EQ(OH_NN_SUCCESS, ret);
814 }
815 
816 /*
817  * @tc.name: compilation_construct_001
818  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNCompilation_Construct function.
819  * @tc.type: FUNC
820  */
821 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_001, testing::ext::TestSize.Level0)
822 {
823     InnerModel innerModel;
824     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
825     const OH_NNModel* model = nullptr;
826     OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
827     EXPECT_EQ(nullptr, ret);
828 }
829 
830 /*
831  * @tc.name: compilation_construct_002
832  * @tc.desc: Verify the not OH_NNModel_Build before creating compilation of the OH_NNCompilation_Construct function.
833  * @tc.type: FUNC
834  */
835 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_002, testing::ext::TestSize.Level0)
836 {
837     InnerModel innerModel;
838     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
839     OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
840     EXPECT_EQ(nullptr, ret);
841 }
842 
843 /*
844  * @tc.name: compilation_construct_003
845  * @tc.desc: Verify the normal model of the OH_NNCompilation_Construct function.
846  * @tc.type: FUNC
847  */
848 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_003, testing::ext::TestSize.Level0)
849 {
850     InnerModel innerModel;
851     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
852     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
853     OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
854     EXPECT_NE(nullptr, ret);
855 }
856 
857 /*
858  * @tc.name: compilation_set_device_001
859  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetDevice function.
860  * @tc.type: FUNC
861  */
862 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_device_001, testing::ext::TestSize.Level0)
863 {
864     OH_NNCompilation* compilation = nullptr;
865     size_t deviceId = 1;
866     OH_NN_ReturnCode ret = OH_NNCompilation_SetDevice(compilation, deviceId);
867     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
868 }
869 
870 /*
871  * @tc.name: compilation_set_device_002
872  * @tc.desc: Verify the success of the OH_NNCompilation_SetDevice function.
873  * @tc.type: FUNC
874  */
875 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_device_002, testing::ext::TestSize.Level0)
876 {
877     InnerModel innerModel;
878     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
879     Compilation compilation(&innerModel);
880     OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(&compilation);
881     size_t deviceId = 1;
882     OH_NN_ReturnCode ret = OH_NNCompilation_SetDevice(nnCompilation, deviceId);
883     EXPECT_EQ(OH_NN_SUCCESS, ret);
884 }
885 
886 /*
887  * @tc.name: compilation_set_cache_001
888  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function.
889  * @tc.type: FUNC
890  */
891 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_001, testing::ext::TestSize.Level0)
892 {
893     InnerModel innerModel;
894     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
895     Compilation compilation(&innerModel);
896     OH_NNCompilation* nnCompilation = nullptr;
897     const char* cacheDir = "../";
898     uint32_t version = 1;
899     std::size_t deviceId = 1;
900     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetDevice(deviceId));
901     OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
902     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
903 }
904 
905 /*
906  * @tc.name: compilation_set_cache_002
907  * @tc.desc: Verify the cachePath is nullptr of the OH_NNCompilation_SetCache function.
908  * @tc.type: FUNC
909  */
910 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_002, testing::ext::TestSize.Level0)
911 {
912     InnerModel innerModel;
913     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
914     Compilation compilation(&innerModel);
915     OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(&compilation);
916     const char* cacheDir = nullptr;
917     uint32_t version = 1;
918     std::size_t deviceId = 1;
919     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetDevice(deviceId));
920     OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
921     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
922 }
923 
924 /*
925  * @tc.name: compilation_set_cache_003
926  * @tc.desc: Verify the success of the OH_NNCompilation_SetCache function.
927  * @tc.type: FUNC
928  */
929 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_003, testing::ext::TestSize.Level0)
930 {
931     InnerModel innerModel;
932     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
933     Compilation compilation(&innerModel);
934     OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(&compilation);
935     const char* cacheDir = "../";
936     uint32_t version = 1;
937     std::size_t deviceId = 1;
938     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetDevice(deviceId));
939     OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
940     EXPECT_EQ(OH_NN_SUCCESS, ret);
941 }
942 
943 /*
944  * @tc.name: compilation_set_performance_mode_001
945  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetPerformanceMode function.
946  * @tc.type: FUNC
947  */
948 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_performance_mode_001, testing::ext::TestSize.Level0)
949 {
950     InnerModel innerModel;
951     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
952     Compilation compilation(&innerModel);
953     OH_NNCompilation* nnCompilation = nullptr;
954     OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE;
955 
956     std::size_t deviceId = 1;
957     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetDevice(deviceId));
958     OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode);
959     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
960 }
961 
962 /*
963  * @tc.name: compilation_set_performance_mode_002
964  * @tc.desc: Verify the success of the OH_NNCompilation_SetPerformanceMode function.
965  * @tc.type: FUNC
966  */
967 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_performance_mode_002, testing::ext::TestSize.Level0)
968 {
969     InnerModel innerModel;
970     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
971     Compilation compilation(&innerModel);
972     OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(&compilation);
973     OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE;
974 
975     std::size_t deviceId = 1;
976     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetDevice(deviceId));
977 
978     OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode);
979     EXPECT_EQ(OH_NN_SUCCESS, ret);
980 }
981 
982 /*
983  * @tc.name: compilation_set_priority_001
984  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetPriority function.
985  * @tc.type: FUNC
986  */
987 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_priority_001, testing::ext::TestSize.Level0)
988 {
989     InnerModel innerModel;
990     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
991     Compilation compilation(&innerModel);
992     OH_NNCompilation* nnCompilation = nullptr;
993     OH_NN_Priority priority = OH_NN_PRIORITY_LOW;
994 
995     std::size_t deviceId = 1;
996     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetDevice(deviceId));
997 
998     OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority);
999     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1000 }
1001 
1002 /*
1003  * @tc.name: compilation_set_priority_002
1004  * @tc.desc: Verify the success of the OH_NNCompilation_SetPriority function.
1005  * @tc.type: FUNC
1006  */
1007 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_priority_002, testing::ext::TestSize.Level0)
1008 {
1009     InnerModel innerModel;
1010     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1011     Compilation compilation(&innerModel);
1012     OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(&compilation);
1013     OH_NN_Priority priority = OH_NN_PRIORITY_LOW;
1014 
1015     std::size_t deviceId = 1;
1016     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetDevice(deviceId));
1017 
1018     OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority);
1019     EXPECT_EQ(OH_NN_SUCCESS, ret);
1020 }
1021 
1022 /*
1023  * @tc.name: compilation_set_enable_float16_001
1024  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_EnableFloat16 function.
1025  * @tc.type: FUNC
1026  */
1027 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_enable_float16_001, testing::ext::TestSize.Level0)
1028 {
1029     InnerModel innerModel;
1030     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1031     Compilation compilation(&innerModel);
1032     OH_NNCompilation* nnCompilation = nullptr;
1033     bool enableFloat16 = true;
1034 
1035     std::size_t deviceId = 1;
1036     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetDevice(deviceId));
1037 
1038     OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16);
1039     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1040 }
1041 
1042 /*
1043  * @tc.name: compilation_set_enable_float16_002
1044  * @tc.desc: Verify the success of the OH_NNCompilation_EnableFloat16 function.
1045  * @tc.type: FUNC
1046  */
1047 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_enable_float16_002, testing::ext::TestSize.Level0)
1048 {
1049     InnerModel innerModel;
1050     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1051     Compilation compilation(&innerModel);
1052     OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(&compilation);
1053     bool enableFloat16 = true;
1054 
1055     std::size_t deviceId = 1;
1056     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetDevice(deviceId));
1057 
1058     OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16);
1059     EXPECT_EQ(OH_NN_SUCCESS, ret);
1060 }
1061 
1062 /*
1063  * @tc.name: compilation_build_001
1064  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_Build function.
1065  * @tc.type: FUNC
1066  */
1067 HWTEST_F(NeuralNetworkRuntimeTest, compilation_build_001, testing::ext::TestSize.Level0)
1068 {
1069     InnerModel innerModel;
1070     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1071     Compilation compilation(&innerModel);
1072     OH_NNCompilation* nnCompilation = nullptr;
1073 
1074     std::size_t deviceId = 1;
1075     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetDevice(deviceId));
1076     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetPerformance(OH_NN_PERFORMANCE_EXTREME));
1077     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetPriority(OH_NN_PRIORITY_HIGH));
1078     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetEnableFp16(true));
1079 
1080     OH_NN_ReturnCode ret = OH_NNCompilation_Build(nnCompilation);
1081     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1082 }
1083 
1084 /*
1085  * @tc.name: compilation_build_002
1086  * @tc.desc: Verify the success of the OH_NNCompilation_Build function.
1087  * @tc.type: FUNC
1088  */
1089 HWTEST_F(NeuralNetworkRuntimeTest, compilation_build_002, testing::ext::TestSize.Level0)
1090 {
1091     InnerModel innerModel;
1092     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1093     Compilation compilation(&innerModel);
1094     OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(&compilation);
1095 
1096     std::size_t deviceId = 1;
1097     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetDevice(deviceId));
1098     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetPerformance(OH_NN_PERFORMANCE_EXTREME));
1099     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetPriority(OH_NN_PRIORITY_HIGH));
1100     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetEnableFp16(true));
1101 
1102     OH_NN_ReturnCode ret = OH_NNCompilation_Build(nnCompilation);
1103     EXPECT_EQ(OH_NN_SUCCESS, ret);
1104 }
1105 
1106 /*
1107  * @tc.name: compilation_destroy_001
1108  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_Destroy function.
1109  * @tc.type: FUNC
1110  */
1111 HWTEST_F(NeuralNetworkRuntimeTest, compilation_destroy_001, testing::ext::TestSize.Level0)
1112 {
1113     OH_NNCompilation** pCompilation = nullptr;
1114     OH_NNCompilation_Destroy(pCompilation);
1115     EXPECT_EQ(nullptr, pCompilation);
1116 }
1117 
1118 /*
1119  * @tc.name: compilation_destroy_002
1120  * @tc.desc: Verify the *OH_NNCompilation is nullptr of the OH_NNCompilation_Destroy function.
1121  * @tc.type: FUNC
1122  */
1123 HWTEST_F(NeuralNetworkRuntimeTest, compilation_destroy_002, testing::ext::TestSize.Level0)
1124 {
1125     OH_NNCompilation* compilation = nullptr;
1126     OH_NNCompilation** pCompilation = &compilation;
1127     OH_NNCompilation_Destroy(pCompilation);
1128     EXPECT_EQ(nullptr, compilation);
1129 }
1130 
1131 /*
1132  * @tc.name: compilation_destroy_003
1133  * @tc.desc: Verify the normal model of the OH_NNCompilation_Destroy function.
1134  * @tc.type: FUNC
1135  */
1136 HWTEST_F(NeuralNetworkRuntimeTest, compilation_destroy_003, testing::ext::TestSize.Level0)
1137 {
1138     InnerModel* innerModel = new InnerModel();
1139     EXPECT_NE(nullptr, innerModel);
1140     Compilation* compilation = new(std::nothrow) Compilation(innerModel);
1141     EXPECT_NE(nullptr, compilation);
1142     OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
1143     OH_NNCompilation_Destroy(&nnCompilation);
1144     EXPECT_EQ(nullptr, nnCompilation);
1145 }
1146 
1147 /**
1148  * @tc.name: excutor_construct_001
1149  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNExecutor_Construct function
1150  * @tc.type: FUNC
1151  */
1152 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_001, testing::ext::TestSize.Level0)
1153 {
1154     InnerModel innerModel;
1155     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1156     Compilation compilation(&innerModel);
1157 
1158     std::size_t deviceId = 1;
1159     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetDevice(deviceId));
1160     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetEnableFp16(true));
1161     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetPerformance(OH_NN_PERFORMANCE_EXTREME));
1162     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetPriority(OH_NN_PRIORITY_HIGH));
1163     EXPECT_EQ(OH_NN_SUCCESS, compilation.Build());
1164 
1165     OH_NNCompilation* nnCompilation = nullptr;
1166     OH_NNExecutor* executor = OH_NNExecutor_Construct(nnCompilation);
1167     EXPECT_EQ(nullptr, executor);
1168 }
1169 
1170 /**
1171  * @tc.name: excutor_construct_002
1172  * @tc.desc: Verify the not OH_NNCompilation_Build before creating executor of the OH_NNExecutor_Construct function
1173  * @tc.type: FUNC
1174  */
1175 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_002, testing::ext::TestSize.Level0)
1176 {
1177     InnerModel innerModel;
1178     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1179     Compilation compilation(&innerModel);
1180     OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(&compilation);
1181     OH_NNExecutor * executor = OH_NNExecutor_Construct(nnCompilation);
1182     EXPECT_EQ(nullptr, executor);
1183 }
1184 
1185 /**
1186  * @tc.name: excutor_construct_003
1187  * @tc.desc: Verify the success of the OH_NNExecutor_Construct function
1188  * @tc.type: FUNC
1189  */
1190 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_003, testing::ext::TestSize.Level0)
1191 {
1192     InnerModel innerModel;
1193     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1194     Compilation compilation(&innerModel);
1195 
1196     std::size_t deviceId = 1;
1197     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetDevice(deviceId));
1198     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetPerformance(OH_NN_PERFORMANCE_EXTREME));
1199     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetPriority(OH_NN_PRIORITY_HIGH));
1200     EXPECT_EQ(OH_NN_SUCCESS, compilation.SetEnableFp16(true));
1201     EXPECT_EQ(OH_NN_SUCCESS, compilation.Build());
1202 
1203     OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(&compilation);
1204     OH_NNExecutor * executor = OH_NNExecutor_Construct(nnCompilation);
1205     EXPECT_NE(nullptr, executor);
1206 }
1207 
1208 /**
1209  * @tc.name: excutor_setinput_001
1210  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetInput function
1211  * @tc.type: FUNC
1212  */
1213 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_001, testing::ext::TestSize.Level0)
1214 {
1215     SetTensor();
1216 
1217     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1218     const void *buffer = input;
1219     size_t length = 2 * sizeof(float);
1220     uint32_t inputIndex = 0;
1221     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nullptr, inputIndex, &m_tensor, buffer, length));
1222 }
1223 
1224 /**
1225  * @tc.name: excutor_setinput_002
1226  * @tc.desc: Verify the OH_NN_Tensor is nullptr of the OH_NNExecutor_SetInput function
1227  * @tc.type: FUNC
1228  */
1229 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_002, testing::ext::TestSize.Level0)
1230 {
1231     InnerModel innerModel;
1232     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1233     Compilation innerCompilation(&innerModel);
1234     Executor executor(&innerCompilation);
1235     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1236 
1237     uint32_t inputIndex = 0;
1238     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1239     const void *buffer = input;
1240     size_t length = 2 * sizeof(float);
1241     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, nullptr, buffer, length));
1242 }
1243 
1244 /**
1245  * @tc.name: excutor_setinput_003
1246  * @tc.desc: Verify the data is nullptr of the OH_NNExecutor_SetInput function
1247  * @tc.type: FUNC
1248  */
1249 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_003, testing::ext::TestSize.Level0)
1250 {
1251     InnerModel innerModel;
1252     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1253     Compilation innerCompilation(&innerModel);
1254     Executor executor(&innerCompilation);
1255     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1256 
1257     SetTensor();
1258 
1259     uint32_t inputIndex = 0;
1260     const void *buffer = nullptr;
1261     size_t length = 2 * sizeof(float);
1262     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length));
1263 }
1264 
1265 /**
1266  * @tc.name: excutor_setinput_004
1267  * @tc.desc: Verify the length is 0 of the OH_NNExecutor_SetInput function
1268  * @tc.type: FUNC
1269  */
1270 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_004, testing::ext::TestSize.Level0)
1271 {
1272     InnerModel innerModel;
1273     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1274     Compilation innerCompilation(&innerModel);
1275     Executor executor(&innerCompilation);
1276     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1277 
1278     uint32_t inputIndex = 0;
1279     SetTensor();
1280 
1281     size_t length = 0;
1282     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1283     const void *buffer = input;
1284     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length));
1285 }
1286 
1287 /**
1288  * @tc.name: excutor_setinput_005
1289  * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1290  * @tc.type: FUNC
1291  */
1292 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_005, testing::ext::TestSize.Level0)
1293 {
1294     InnerModel innerModel;
1295     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1296     Compilation innerCompilation(&innerModel);
1297     Executor executor(&innerCompilation);
1298     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1299 
1300     uint32_t inputIndex = 0;
1301     int32_t dims[2] = {3, 4};
1302     m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1303 
1304     float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1305     const void *buffer = input;
1306     size_t length = 12 * sizeof(float);
1307     OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length);
1308     EXPECT_EQ(OH_NN_SUCCESS, ret);
1309 }
1310 
1311 /**
1312  * @tc.name: excutor_setoutput_001
1313  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetOutput function
1314  * @tc.type: FUNC
1315  */
1316 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_001, testing::ext::TestSize.Level0)
1317 {
1318     uint32_t outputIndex = 0;
1319     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1320     void *buffer = input;
1321     size_t length = 9 * sizeof(int32_t);
1322     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nullptr, outputIndex, buffer, length));
1323 }
1324 
1325 /**
1326  * @tc.name: excutor_setoutput_002
1327  * @tc.desc: Verify the data is nullptr of the OH_NNExecutor_SetOutput function
1328  * @tc.type: FUNC
1329  */
1330 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_002, testing::ext::TestSize.Level0)
1331 {
1332     InnerModel innerModel;
1333     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1334     Compilation innerCompilation(&innerModel);
1335     Executor executor(&innerCompilation);
1336     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1337 
1338     uint32_t outputIndex = 0;
1339     void *buffer = nullptr;
1340     size_t length = 9 * sizeof(int32_t);
1341     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, buffer, length));
1342 }
1343 
1344 /**
1345  * @tc.name: excutor_setoutput_003
1346  * @tc.desc: Verify the length is 0 of the OH_NNExecutor_SetOutput function
1347  * @tc.type: FUNC
1348  */
1349 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_003, testing::ext::TestSize.Level0)
1350 {
1351     InnerModel innerModel;
1352     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1353     Compilation innerCompilation(&innerModel);
1354     Executor executor(&innerCompilation);
1355     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1356 
1357     uint32_t outputIndex = 0;
1358     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1359     void *buffer = input;
1360     size_t length = 0;
1361     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, buffer, length));
1362 }
1363 
1364 /**
1365  * @tc.name: excutor_setoutput_004
1366  * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1367  * @tc.type: FUNC
1368  */
1369 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_004, testing::ext::TestSize.Level0)
1370 {
1371     InnerModel innerModel;
1372     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1373     Compilation innerCompilation(&innerModel);
1374     Executor executor(&innerCompilation);
1375     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1376 
1377     uint32_t outputIndex = 0;
1378     float output[12];
1379     size_t length = 12 * sizeof(float);
1380     EXPECT_EQ(OH_NN_SUCCESS, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, output, length));
1381 }
1382 
1383 /**
1384  * @tc.name: excutor_getoutputshape_001
1385  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_GetOutputShape function
1386  * @tc.type: FUNC
1387  */
1388 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_001, testing::ext::TestSize.Level0)
1389 {
1390     InnerModel innerModel;
1391     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1392     Compilation innerCompilation(&innerModel);
1393     Executor executor(&innerCompilation);
1394     OH_NNExecutor* nnExecutor = nullptr;
1395 
1396     SetInputAndOutput(executor);
1397 
1398     int32_t* ptr = nullptr;
1399     int32_t** shape = &ptr;
1400     uint32_t length = 2;
1401     uint32_t outputIndex = 0;
1402     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1403         shape, &length));
1404 }
1405 
1406 /**
1407  * @tc.name: excutor_getoutputshape_002
1408  * @tc.desc: Verify the shape is nullptr of the OH_NNExecutor_GetOutputShape function
1409  * @tc.type: FUNC
1410  */
1411 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_002, testing::ext::TestSize.Level0)
1412 {
1413     InnerModel innerModel;
1414     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1415     Compilation innerCompilation(&innerModel);
1416     Executor executor(&innerCompilation);
1417     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1418 
1419     SetInputAndOutput(executor);
1420 
1421     uint32_t outputIndex = 0;
1422     int32_t** shape = nullptr;
1423     uint32_t length = 2;
1424     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1425         shape, &length));
1426 }
1427 
1428 /**
1429  * @tc.name: excutor_getoutputshape_003
1430  * @tc.desc: Verify the *shape is not nullptr of the OH_NNExecutor_GetOutputShape function
1431  * @tc.type: FUNC
1432  */
1433 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_003, testing::ext::TestSize.Level0)
1434 {
1435     InnerModel innerModel;
1436     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1437     Compilation innerCompilation(&innerModel);
1438     Executor executor(&innerCompilation);
1439     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1440 
1441     SetInputAndOutput(executor);
1442 
1443     int32_t expectDim[2] = {3, 3};
1444     int32_t* ptr = expectDim;
1445     int32_t** shape = &ptr;
1446     uint32_t length = 2;
1447     uint32_t outputIndex = 0;
1448     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1449         shape, &length));
1450 }
1451 
1452 /**
1453  * @tc.name: excutor_getoutputshape_004
1454  * @tc.desc: Verify the length is nullptr of the OH_NNExecutor_GetOutputShape function
1455  * @tc.type: FUNC
1456  */
1457 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_004, testing::ext::TestSize.Level0)
1458 {
1459     InnerModel innerModel;
1460     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1461     Compilation innerCompilation(&innerModel);
1462     Executor executor(&innerCompilation);
1463     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1464 
1465     SetInputAndOutput(executor);
1466 
1467     int32_t* ptr = nullptr;
1468     int32_t** shape = &ptr;
1469     uint32_t outputIndex = 0;
1470     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex, shape, nullptr));
1471 }
1472 
1473 /**
1474  * @tc.name: excutor_getoutputshape_005
1475  * @tc.desc: Verify the success of the OH_NNExecutor_GetOutputShape function
1476  * @tc.type: FUNC
1477  */
1478 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_005, testing::ext::TestSize.Level0)
1479 {
1480     InnerModel innerModel;
1481     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1482     Compilation innerCompilation(&innerModel);
1483     Executor executor(&innerCompilation);
1484     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1485 
1486     SetInputAndOutput(executor);
1487 
1488     int32_t* ptr = nullptr;
1489     int32_t** shape = &ptr;
1490     uint32_t length = 2;
1491     uint32_t outputIndex = 0;
1492     EXPECT_EQ(OH_NN_SUCCESS, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex, shape, &length));
1493 }
1494 
1495 /**
1496  * @tc.name: excutor_run_001
1497  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_Run function
1498  * @tc.type: FUNC
1499  */
1500 HWTEST_F(NeuralNetworkRuntimeTest, excutor_run_001, testing::ext::TestSize.Level0)
1501 {
1502     OH_NNExecutor* nnExecutor = nullptr;
1503     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_Run(nnExecutor));
1504 }
1505 
1506 /**
1507  * @tc.name: excutor_run_002
1508  * @tc.desc: Verify the success of the OH_NNExecutor_Run function
1509  * @tc.type: FUNC
1510  */
1511 HWTEST_F(NeuralNetworkRuntimeTest, excutor_run_002, testing::ext::TestSize.Level0)
1512 {
1513     InnerModel innerModel;
1514     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1515     Compilation innerCompilation(&innerModel);
1516     Executor executor(&innerCompilation);
1517     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1518 
1519     float input1[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1520     float input2[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1521     uint32_t input1Index = 0;
1522     uint32_t input2Index = 1;
1523 
1524     int32_t inputDims[2] = {3, 4};
1525     size_t length = 12 * sizeof(float);
1526     m_tensor = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
1527     EXPECT_EQ(OH_NN_SUCCESS, executor.SetInput(input1Index, m_tensor, input1, length));
1528     EXPECT_EQ(OH_NN_SUCCESS, executor.SetInput(input2Index, m_tensor, input2, length));
1529 
1530     float output[12];
1531     uint32_t outputIndex = 0;
1532     EXPECT_EQ(OH_NN_SUCCESS, executor.SetOutput(outputIndex, output, length));
1533     EXPECT_EQ(OH_NN_SUCCESS, OH_NNExecutor_Run(nnExecutor));
1534 }
1535 
1536 /*
1537  * @tc.name: executor_allocate_input_memory_001
1538  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_AllocateInputMemory function.
1539  * @tc.type: FUNC
1540  */
1541 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_001, testing::ext::TestSize.Level0)
1542 {
1543     OH_NNExecutor* nnExecutor = nullptr;
1544     uint32_t outputIndex = 0;
1545     size_t length = 9 * sizeof(float);
1546 
1547     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1548     EXPECT_EQ(nullptr, ret);
1549 }
1550 
1551 /*
1552  * @tc.name: executor_allocate_input_memory_002
1553  * @tc.desc: Verify the passed length equals 0 of the OH_NNExecutor_AllocateInputMemory function.
1554  * @tc.type: FUNC
1555  */
1556 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_002, testing::ext::TestSize.Level0)
1557 {
1558     InnerModel innerModel;
1559     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1560     Compilation innerCompilation(&innerModel);
1561     Executor executor(&innerCompilation);
1562     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1563 
1564     uint32_t outputIndex = 0;
1565     size_t length = 0;
1566 
1567     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1568     EXPECT_EQ(nullptr, ret);
1569 }
1570 
1571 /*
1572  * @tc.name: executor_allocate_input_memory_003
1573  * @tc.desc: Verify the error when creating input memory in executor of the OH_NNExecutor_AllocateInputMemory function.
1574  * @tc.type: FUNC
1575  */
1576 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_003, testing::ext::TestSize.Level0)
1577 {
1578     InnerModel innerModel;
1579     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1580     Compilation innerCompilation(&innerModel);
1581     Executor executor(&innerCompilation);
1582     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1583 
1584     uint32_t outputIndex = 6;
1585     size_t length = 9 * sizeof(float);
1586 
1587     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1588     EXPECT_EQ(nullptr, ret);
1589 }
1590 
1591 /*
1592  * @tc.name: executor_allocate_input_memory_004
1593  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1594  * @tc.type: FUNC
1595  */
1596 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_004, testing::ext::TestSize.Level0)
1597 {
1598     InnerModel innerModel;
1599     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1600     Compilation innerCompilation(&innerModel);
1601     Executor executor(&innerCompilation);
1602     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1603 
1604     uint32_t outputIndex = 0;
1605     size_t length = 9 * sizeof(float);
1606 
1607     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1608     EXPECT_NE(nullptr, ret);
1609 }
1610 
1611 /*
1612  * @tc.name: executor_allocate_output_memory_001
1613  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_AllocateOutputMemory function.
1614  * @tc.type: FUNC
1615  */
1616 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_001, testing::ext::TestSize.Level0)
1617 {
1618     OH_NNExecutor* nnExecutor = nullptr;
1619     uint32_t outputIndex = 0;
1620     size_t length = 9 * sizeof(float);
1621 
1622     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1623     EXPECT_EQ(nullptr, ret);
1624 }
1625 
1626 /*
1627  * @tc.name: executor_allocate_output_memory_002
1628  * @tc.desc: Verify the passed length equals 0 of the OH_NNExecutor_AllocateOutputMemory function.
1629  * @tc.type: FUNC
1630  */
1631 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_002, testing::ext::TestSize.Level0)
1632 {
1633     InnerModel innerModel;
1634     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1635     Compilation innerCompilation(&innerModel);
1636     Executor executor(&innerCompilation);
1637     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1638 
1639     uint32_t outputIndex = 0;
1640     size_t length = 0;
1641 
1642     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1643     EXPECT_EQ(nullptr, ret);
1644 }
1645 
1646 /*
1647  * @tc.name: executor_allocate_output_memory_003
1648  * @tc.desc: Verify the error when create output memory in executor of the OH_NNExecutor_AllocateOutputMemory function.
1649  * @tc.type: FUNC
1650  */
1651 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_003, testing::ext::TestSize.Level0)
1652 {
1653     InnerModel innerModel;
1654     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1655     Compilation innerCompilation(&innerModel);
1656     Executor executor(&innerCompilation);
1657     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1658 
1659     uint32_t outputIndex = 6;
1660     size_t length = 9 * sizeof(float);
1661 
1662     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1663     EXPECT_EQ(nullptr, ret);
1664 }
1665 
1666 /*
1667  * @tc.name: executor_allocate_output_memory_004
1668  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateOutputMemory function.
1669  * @tc.type: FUNC
1670  */
1671 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_004, testing::ext::TestSize.Level0)
1672 {
1673     InnerModel innerModel;
1674     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1675     Compilation innerCompilation(&innerModel);
1676     Executor executor(&innerCompilation);
1677     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1678 
1679     uint32_t outputIndex = 0;
1680     size_t length = 9 * sizeof(float);
1681 
1682     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1683     EXPECT_NE(nullptr, ret);
1684 }
1685 
1686 
1687 /*
1688  * @tc.name: executor_destroy_input_memory_001
1689  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_DestroyInputMemory function.
1690  * @tc.type: FUNC
1691  */
1692 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_001, testing::ext::TestSize.Level0)
1693 {
1694     InnerModel innerModel;
1695     BuildModel(innerModel);
1696     Compilation innerCompilation(&innerModel);
1697     Executor executor(&innerCompilation);
1698     OH_NNExecutor* nnExecutor = nullptr;
1699 
1700     uint32_t inputIndex = 0;
1701     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1702     void* const data = dataArry;
1703     OH_NN_Memory memory = {data, 9 * sizeof(float)};
1704     OH_NN_Memory* pMemory = &memory;
1705     size_t length = 9 * sizeof(float);
1706     EXPECT_EQ(OH_NN_SUCCESS, executor.CreateInputMemory(inputIndex, length, &pMemory));
1707     OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, &pMemory);
1708     EXPECT_EQ(nullptr, nnExecutor);
1709 }
1710 
1711 /*
1712  * @tc.name: executor_destroy_input_memory_002
1713  * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_DestroyInputMemory function.
1714  * @tc.type: FUNC
1715  */
1716 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_002, testing::ext::TestSize.Level0)
1717 {
1718     InnerModel innerModel;
1719     BuildModel(innerModel);
1720     Compilation innerCompilation(&innerModel);
1721     Executor executor(&innerCompilation);
1722     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1723 
1724     uint32_t inputIndex = 0;
1725     OH_NN_Memory** memory = nullptr;
1726     OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, memory);
1727     EXPECT_EQ(nullptr, memory);
1728 }
1729 
1730 /*
1731  * @tc.name: executor_destroy_input_memory_003
1732  * @tc.desc: Verify the *memory is nullptr of the OH_NNExecutor_DestroyInputMemory function.
1733  * @tc.type: FUNC
1734  */
1735 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_003, testing::ext::TestSize.Level0)
1736 {
1737     InnerModel innerModel;
1738     BuildModel(innerModel);
1739     Compilation innerCompilation(&innerModel);
1740     Executor executor(&innerCompilation);
1741     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1742 
1743     uint32_t inputIndex = 0;
1744     OH_NN_Memory* memory = nullptr;
1745     OH_NN_Memory** pMemory = &memory;
1746     OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, pMemory);
1747     EXPECT_EQ(nullptr, memory);
1748 }
1749 
1750 /*
1751  * @tc.name: executor_destroy_input_memory_004
1752  * @tc.desc: Verify the error happened when destroying input memory of the OH_NNExecutor_DestroyInputMemory function.
1753  * @tc.type: FUNC
1754  */
1755 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_004, testing::ext::TestSize.Level0)
1756 {
1757     InnerModel innerModel;
1758     BuildModel(innerModel);
1759     Compilation innerCompilation(&innerModel);
1760     Executor executor(&innerCompilation);
1761     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1762 
1763     uint32_t inputIndex = 6;
1764     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1765     void* const data = dataArry;
1766     OH_NN_Memory memory = {data, 9 * sizeof(float)};
1767     OH_NN_Memory* pMemory = &memory;
1768     OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, &pMemory);
1769     EXPECT_NE(nullptr, pMemory);
1770 }
1771 
1772 /*
1773  * @tc.name: executor_destroy_input_memory_005
1774  * @tc.desc: Verify the success of the OH_NNExecutor_DestroyInputMemory function.
1775  * @tc.type: FUNC
1776  */
1777 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_005, testing::ext::TestSize.Level0)
1778 {
1779     InnerModel innerModel;
1780     BuildModel(innerModel);
1781     Compilation innerCompilation(&innerModel);
1782     Executor executor(&innerCompilation);
1783     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1784 
1785     uint32_t inputIndex = 0;
1786     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1787     void* const data = dataArry;
1788     OH_NN_Memory memory = {data, 9 * sizeof(float)};
1789     OH_NN_Memory* pMemory = &memory;
1790     size_t length = 9 * sizeof(float);
1791     EXPECT_EQ(OH_NN_SUCCESS, executor.CreateInputMemory(inputIndex, length, &pMemory));
1792     OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, &pMemory);
1793     EXPECT_EQ(nullptr, pMemory);
1794 }
1795 
1796 /*
1797  * @tc.name: executor_destroy_output_memory_001
1798  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
1799  * @tc.type: FUNC
1800  */
1801 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_001, testing::ext::TestSize.Level0)
1802 {
1803     OH_NNExecutor* nnExecutor = nullptr;
1804     uint32_t outputIndex = 0;
1805     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1806     void* const data = dataArry;
1807     OH_NN_Memory memory = {data, 9 * sizeof(float)};
1808     OH_NN_Memory* pMemory = &memory;
1809     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
1810     EXPECT_EQ(nullptr, nnExecutor);
1811 }
1812 
1813 /*
1814  * @tc.name: executor_destroy_output_memory_002
1815  * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
1816  * @tc.type: FUNC
1817  */
1818 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_002, testing::ext::TestSize.Level0)
1819 {
1820     InnerModel innerModel;
1821     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1822     Compilation innerCompilation(&innerModel);
1823     Executor executor(&innerCompilation);
1824     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1825 
1826     uint32_t outputIndex = 0;
1827     OH_NN_Memory** memory = nullptr;
1828     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, memory);
1829     EXPECT_EQ(nullptr, memory);
1830 }
1831 
1832 /*
1833  * @tc.name: executor_destroy_output_memory_003
1834  * @tc.desc: Verify the *memory is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
1835  * @tc.type: FUNC
1836  */
1837 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_003, testing::ext::TestSize.Level0)
1838 {
1839     InnerModel innerModel;
1840     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1841     Compilation innerCompilation(&innerModel);
1842     Executor executor(&innerCompilation);
1843     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1844 
1845     uint32_t outputIndex = 0;
1846     OH_NN_Memory* memory = nullptr;
1847     OH_NN_Memory** pMemory = &memory;
1848     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, pMemory);
1849     EXPECT_EQ(nullptr, memory);
1850 }
1851 
1852 /*
1853  * @tc.name: executor_destroy_output_memory_004
1854  * @tc.desc: Verify the error happened when destroying output memory of the OH_NNExecutor_DestroyOutputMemory function.
1855  * @tc.type: FUNC
1856  */
1857 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_004, testing::ext::TestSize.Level0)
1858 {
1859     InnerModel innerModel;
1860     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1861     Compilation innerCompilation(&innerModel);
1862     Executor executor(&innerCompilation);
1863     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1864 
1865     uint32_t outputIndex = 6;
1866     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1867     void* const data = dataArry;
1868     OH_NN_Memory memory = {data, 9 * sizeof(float)};
1869     OH_NN_Memory* pMemory = &memory;
1870     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
1871     EXPECT_NE(nullptr, pMemory);
1872 }
1873 
1874 /*
1875  * @tc.name: executor_destroy_output_memory_005
1876  * @tc.desc: Verify the success of the OH_NNExecutor_DestroyOutputMemory function.
1877  * @tc.type: FUNC
1878  */
1879 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_005, testing::ext::TestSize.Level0)
1880 {
1881     InnerModel innerModel;
1882     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1883     Compilation innerCompilation(&innerModel);
1884     Executor executor(&innerCompilation);
1885     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1886 
1887     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1888     void* const data = dataArry;
1889     OH_NN_Memory memory = {data, 9 * sizeof(float)};
1890     OH_NN_Memory* pMemory = &memory;
1891     size_t length = 9 * sizeof(float);
1892     uint32_t outputIndex = 0;
1893     EXPECT_EQ(OH_NN_SUCCESS, executor.CreateOutputMemory(outputIndex, length, &pMemory));
1894     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
1895     EXPECT_EQ(nullptr, pMemory);
1896 }
1897 
1898 /*
1899  * @tc.name: executor_set_input_with_memory_001
1900  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetInputWithMemory function.
1901  * @tc.type: FUNC
1902  */
1903 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_001, testing::ext::TestSize.Level0)
1904 {
1905     OH_NNExecutor* nnExecutor = nullptr;
1906 
1907     SetTensor();
1908 
1909     uint32_t inputIndex = 0;
1910     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1911     void* const data = dataArry;
1912     OH_NN_Memory memory = {data, 9 * sizeof(float)};
1913 
1914     OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, &memory);
1915     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1916 }
1917 
1918 /*
1919  * @tc.name: executor_set_input_with_memory_002
1920  * @tc.desc: Verify the operand is nullptr of the OH_NNExecutor_SetInputWithMemory function.
1921  * @tc.type: FUNC
1922  */
1923 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_002, testing::ext::TestSize.Level0)
1924 {
1925     InnerModel innerModel;
1926     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1927     Compilation innerCompilation(&innerModel);
1928     Executor executor(&innerCompilation);
1929     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1930 
1931     OH_NN_Tensor* operand = nullptr;
1932 
1933     uint32_t inputIndex = 0;
1934     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1935     void* const data = dataArry;
1936     OH_NN_Memory memory = {data, 9 * sizeof(float)};
1937 
1938     OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, operand, &memory);
1939     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1940 }
1941 
1942 /*
1943  * @tc.name: executor_set_input_with_memory_003
1944  * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_SetInputWithMemory function.
1945  * @tc.type: FUNC
1946  */
1947 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_003, testing::ext::TestSize.Level0)
1948 {
1949     InnerModel innerModel;
1950     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1951     Compilation innerCompilation(&innerModel);
1952     Executor executor(&innerCompilation);
1953     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1954 
1955     SetTensor();
1956 
1957     uint32_t inputIndex = 0;
1958     OH_NN_Memory* memory = nullptr;
1959     OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, memory);
1960     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1961 }
1962 
1963 /*
1964  * @tc.name: executor_set_input_with_memory_004
1965  * @tc.desc: Verify the success of the OH_NNExecutor_SetInputWithMemory function.
1966  * @tc.type: FUNC
1967  */
1968 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_004, testing::ext::TestSize.Level0)
1969 {
1970     InnerModel innerModel;
1971     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1972     Compilation innerCompilation(&innerModel);
1973     Executor executor(&innerCompilation);
1974     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1975 
1976     uint32_t inputIndex = 0;
1977     int32_t dims[2] = {3, 4};
1978     m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1979 
1980     float dataArry[12] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1981     void* const data = dataArry;
1982     OH_NN_Memory memory = {data, 12 * sizeof(float)};
1983 
1984     OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, &memory);
1985     EXPECT_EQ(OH_NN_SUCCESS, ret);
1986 }
1987 
1988 
1989 /*
1990  * @tc.name: executor_set_output_with_memory_001
1991  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetOutputWithMemory function.
1992  * @tc.type: FUNC
1993  */
1994 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_001, testing::ext::TestSize.Level0)
1995 {
1996     OH_NNExecutor* nnExecutor = nullptr;
1997     uint32_t outputIndex = 0;
1998     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1999     void* const data = dataArry;
2000     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2001     OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, &memory);
2002     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2003 }
2004 
2005 /*
2006  * @tc.name: executor_set_output_with_memory_002
2007  * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_SetOutputWithMemory function.
2008  * @tc.type: FUNC
2009  */
2010 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_002, testing::ext::TestSize.Level0)
2011 {
2012     InnerModel innerModel;
2013     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
2014     Compilation innerCompilation(&innerModel);
2015     Executor executor(&innerCompilation);
2016     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
2017 
2018     uint32_t outputIndex = 0;
2019     OH_NN_Memory* memory = nullptr;
2020     OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, memory);
2021     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2022 }
2023 
2024 /*
2025  * @tc.name: executor_set_output_with_memory_003
2026  * @tc.desc: Verify the success of the OH_NNExecutor_SetOutputWithMemory function.
2027  * @tc.type: FUNC
2028  */
2029 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_003, testing::ext::TestSize.Level0)
2030 {
2031     InnerModel innerModel;
2032     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
2033     Compilation innerCompilation(&innerModel);
2034     Executor executor(&innerCompilation);
2035     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
2036 
2037     uint32_t outputIndex = 0;
2038     float dataArry[12] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
2039     void* const data = dataArry;
2040     OH_NN_Memory memory = {data, 12 * sizeof(float)};
2041     OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, &memory);
2042     EXPECT_EQ(OH_NN_SUCCESS, ret);
2043 }
2044 
2045 /*
2046  * @tc.name: executor_destroy_001
2047  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_Destroy function.
2048  * @tc.type: FUNC
2049  */
2050 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_001, testing::ext::TestSize.Level0)
2051 {
2052     OH_NNExecutor** pExecutor = nullptr;
2053     OH_NNExecutor_Destroy(pExecutor);
2054     EXPECT_EQ(nullptr, pExecutor);
2055 }
2056 
2057 /*
2058  * @tc.name: executor_destroy_002
2059  * @tc.desc: Verify the *OH_NNExecutor is nullptr of the OH_NNExecutor_Destroy function.
2060  * @tc.type: FUNC
2061  */
2062 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_002, testing::ext::TestSize.Level0)
2063 {
2064     OH_NNExecutor* nnExecutor = nullptr;
2065     OH_NNExecutor** pExecutor = &nnExecutor;
2066     OH_NNExecutor_Destroy(pExecutor);
2067     EXPECT_EQ(nullptr, nnExecutor);
2068 }
2069 
2070 /*
2071  * @tc.name: executor_destroy_003
2072  * @tc.desc: Verify the normal model of the OH_NNExecutor_Destroy function.
2073  * @tc.type: FUNC
2074  */
2075 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_003, testing::ext::TestSize.Level0)
2076 {
2077     InnerModel* innerModel = new InnerModel();
2078     EXPECT_NE(nullptr, innerModel);
2079     Compilation* innerCompilation = new(std::nothrow) Compilation(innerModel);
2080     EXPECT_NE(nullptr, innerCompilation);
2081     Executor* executor = new(std::nothrow) Executor(innerCompilation);
2082     EXPECT_NE(nullptr, executor);
2083 
2084     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2085     OH_NNExecutor_Destroy(&nnExecutor);
2086     EXPECT_EQ(nullptr, nnExecutor);
2087 }
2088 
2089 /*
2090  * @tc.name: device_get_all_devices_id_001
2091  * @tc.desc: Verify the allDevicesID is nullptr of the OH_NNDevice_GetAllDevicesID function.
2092  * @tc.type: FUNC
2093  */
2094 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_001, testing::ext::TestSize.Level0)
2095 {
2096     const size_t** allDevicesId = nullptr;
2097     uint32_t deviceCount = 1;
2098     uint32_t* pDeviceCount = &deviceCount;
2099     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(allDevicesId, pDeviceCount);
2100     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2101 }
2102 
2103 /*
2104  * @tc.name: device_get_all_devices_id_002
2105  * @tc.desc: Verify the *allDevicesID is not nullptr of the OH_NNDevice_GetAllDevicesID function.
2106  * @tc.type: FUNC
2107  */
2108 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_002, testing::ext::TestSize.Level0)
2109 {
2110     const size_t devicesId = 1;
2111     const size_t* allDevicesId = &devicesId;
2112     const size_t** pAllDevicesId = &allDevicesId;
2113     uint32_t deviceCount = 1;
2114     uint32_t* pDeviceCount = &deviceCount;
2115     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2116     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2117 }
2118 
2119 /*
2120  * @tc.name: device_get_all_devices_id_003
2121  * @tc.desc: Verify the deviceCount is nullptr of the OH_NNDevice_GetAllDevicesID function.
2122  * @tc.type: FUNC
2123  */
2124 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_003, testing::ext::TestSize.Level0)
2125 {
2126     const size_t* allDevicesId = nullptr;
2127     const size_t** pAllDevicesId = &allDevicesId;
2128     uint32_t* pDeviceCount = nullptr;
2129     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2130     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2131 }
2132 
2133 /*
2134  * @tc.name: device_get_all_devices_id_004
2135  * @tc.desc: Verify the get no device of the OH_NNDevice_GetAllDevicesID function.
2136  * @tc.type: FUNC
2137  */
2138 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_004, testing::ext::TestSize.Level0)
2139 {
2140     const size_t* allDevicesId = nullptr;
2141     const size_t** pAllDevicesId = &allDevicesId;
2142     uint32_t deviceCount = 1;
2143     uint32_t* pDeviceCount = &deviceCount;
2144     OHOS::HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_FAILED;
2145     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2146     EXPECT_EQ(OH_NN_SUCCESS, ret);
2147 }
2148 
2149 /*
2150  * @tc.name: device_get_all_devices_id_005
2151  * @tc.desc: Verify the success of the OH_NNDevice_GetAllDevicesID function.
2152  * @tc.type: FUNC
2153  */
2154 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_005, testing::ext::TestSize.Level0)
2155 {
2156     const size_t* allDevicesId = nullptr;
2157     const size_t** pAllDevicesId = &allDevicesId;
2158     uint32_t deviceCount = 1;
2159     uint32_t* pDeviceCount = &deviceCount;
2160     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2161     EXPECT_EQ(OH_NN_SUCCESS, ret);
2162 }
2163 
2164 /*
2165  * @tc.name: device_get_name_001
2166  * @tc.desc: Verify the name is nullptr of the OH_NNDevice_GetName function.
2167  * @tc.type: FUNC
2168  */
2169 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_001, testing::ext::TestSize.Level0)
2170 {
2171     size_t deviceID = 1;
2172     const char **name = nullptr;
2173     OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, name);
2174     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2175 }
2176 
2177 /*
2178  * @tc.name: device_get_name_002
2179  * @tc.desc: Verify the *name is not nullptr of the OH_NNDevice_GetName function.
2180  * @tc.type: FUNC
2181  */
2182 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_002, testing::ext::TestSize.Level0)
2183 {
2184     size_t deviceID = 1;
2185     const char* name = "diviceId";
2186     const char** pName = &name;
2187     OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2188     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2189 }
2190 
2191 /*
2192  * @tc.name: device_get_name_003
2193  * @tc.desc: Verify the error happened when getting name of deviceID of the OH_NNDevice_GetName function.
2194  * @tc.type: FUNC
2195  */
2196 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_003, testing::ext::TestSize.Level0)
2197 {
2198     size_t deviceID = 0;
2199     const char* name = nullptr;
2200     const char** pName = &name;
2201     OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2202     EXPECT_EQ(OH_NN_FAILED, ret);
2203 }
2204 
2205 /*
2206  * @tc.name: device_get_name_004
2207  * @tc.desc: Verify the success of the OH_NNDevice_GetName function.
2208  * @tc.type: FUNC
2209  */
2210 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_004, testing::ext::TestSize.Level0)
2211 {
2212     size_t deviceID = 1;
2213     const char* name = nullptr;
2214     const char** pName = &name;
2215     OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2216     EXPECT_EQ(OH_NN_SUCCESS, ret);
2217 }
2218 
2219 /*
2220  * @tc.name: device_get_type_001
2221  * @tc.desc: Verify the device is nullptr of the OH_NNDevice_GetType function.
2222  * @tc.type: FUNC
2223  */
2224 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_001, testing::ext::TestSize.Level0)
2225 {
2226     size_t deviceID = 0;
2227     OH_NN_DeviceType deviceType = OH_NN_CPU;
2228     OH_NN_DeviceType* pDeviceType = &deviceType;
2229     OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2230     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2231 }
2232 
2233 /*
2234  * @tc.name: device_get_type_002
2235  * @tc.desc: Verify the OH_NN_DeviceType is nullptr of the OH_NNDevice_GetType function.
2236  * @tc.type: FUNC
2237  */
2238 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_002, testing::ext::TestSize.Level0)
2239 {
2240     size_t deviceID = 1;
2241     OH_NN_DeviceType* pDeviceType = nullptr;
2242     OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2243     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2244 }
2245 
2246 /*
2247  * @tc.name: device_get_type_003
2248  * @tc.desc: Verify the error happened when getting name of deviceID of the OH_NNDevice_GetType function.
2249  * @tc.type: FUNC
2250  */
2251 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_003, testing::ext::TestSize.Level0)
2252 {
2253     size_t deviceID = 1;
2254     OH_NN_DeviceType deviceType = OH_NN_OTHERS;
2255     OH_NN_DeviceType* pDeviceType = &deviceType;
2256     OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2257     EXPECT_EQ(OH_NN_UNAVALIDABLE_DEVICE, ret);
2258 }
2259 
2260 /*
2261  * @tc.name: device_get_type_004
2262  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2263  * @tc.type: FUNC
2264  */
2265 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_004, testing::ext::TestSize.Level0)
2266 {
2267     size_t deviceID =  1;
2268     OH_NN_DeviceType deviceType = OH_NN_CPU;
2269     OH_NN_DeviceType* pDeviceType = &deviceType;
2270     OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2271     EXPECT_EQ(OH_NN_SUCCESS, ret);
2272 }
2273 } // namespace Unittest
2274 } // namespace NeuralNetworkRuntime
2275 } // namespace OHOS
2276