• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "neural_network_runtime_test.h"
17 
18 #include "mindir.h"
19 
20 #include "utils.h"
21 #include "compilation.h"
22 #include "hdi_device_v1_0.h"
23 #include "test/unittest/common/v1_0/mock_idevice.h"
24 #include "nnexecutor.h"
25 
26 namespace OHOS {
27 namespace NeuralNetworkRuntime {
PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,const ModelConfig & config,std::shared_ptr<PreparedModel> & preparedModel)28 OH_NN_ReturnCode HDIDeviceV1_0::PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,
29     const ModelConfig& config, std::shared_ptr<PreparedModel>& preparedModel)
30 {
31     if (model == nullptr) {
32         return OH_NN_INVALID_PARAMETER;
33     }
34 
35     if (config.enableFloat16 == false) {
36         return OH_NN_FAILED;
37     }
38 
39     sptr<OHOS::HDI::Nnrt::V1_0::IPreparedModel> iPreparedModel = sptr<OHOS::HDI::Nnrt::V1_0
40         ::MockIPreparedModel>(new OHOS::HDI::Nnrt::V1_0::MockIPreparedModel());
41     if (iPreparedModel == nullptr) {
42         LOGE("HDIDeviceV1_0 mock PrepareModel failed, error happened when new sptr");
43         return OH_NN_NULL_PTR;
44     }
45 
46     preparedModel = CreateSharedPtr<HDIPreparedModelV1_0>(iPreparedModel);
47     return OH_NN_SUCCESS;
48 }
49 
GetDeviceType(OH_NN_DeviceType & deviceType)50 OH_NN_ReturnCode HDIDeviceV1_0::GetDeviceType(OH_NN_DeviceType& deviceType)
51 {
52     if (deviceType == OH_NN_OTHERS) {
53         return OH_NN_UNAVAILABLE_DEVICE;
54     }
55 
56     return OH_NN_SUCCESS;
57 }
58 
IsModelCacheSupported(bool & isSupported)59 OH_NN_ReturnCode HDIDeviceV1_0::IsModelCacheSupported(bool& isSupported)
60 {
61     isSupported = true;
62     return OH_NN_SUCCESS;
63 }
64 
IsPerformanceModeSupported(bool & isSupported)65 OH_NN_ReturnCode HDIDeviceV1_0::IsPerformanceModeSupported(bool& isSupported)
66 {
67     isSupported = true;
68     return OH_NN_SUCCESS;
69 }
70 
IsPrioritySupported(bool & isSupported)71 OH_NN_ReturnCode HDIDeviceV1_0::IsPrioritySupported(bool& isSupported)
72 {
73     isSupported = true;
74     return OH_NN_SUCCESS;
75 }
76 
IsFloat16PrecisionSupported(bool & isSupported)77 OH_NN_ReturnCode HDIDeviceV1_0::IsFloat16PrecisionSupported(bool& isSupported)
78 {
79     isSupported = true;
80     return OH_NN_SUCCESS;
81 }
82 
GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,std::vector<bool> & ops)83 OH_NN_ReturnCode HDIDeviceV1_0::GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,
84     std::vector<bool>& ops)
85 {
86     if (model == nullptr) {
87         LOGE("HDIDeviceV1_0 mock GetSupportedOperation failed, Model is nullptr, cannot query supported operation.");
88         return OH_NN_NULL_PTR;
89     }
90 
91     ops.emplace_back(true);
92     return OH_NN_SUCCESS;
93 }
94 
IsDynamicInputSupported(bool & isSupported)95 OH_NN_ReturnCode HDIDeviceV1_0::IsDynamicInputSupported(bool& isSupported)
96 {
97     isSupported = true;
98     return OH_NN_SUCCESS;
99 }
100 } // namespace NeuralNetworkRuntime
101 } // namespace OHOS
102 
103 namespace OHOS {
104 namespace NeuralNetworkRuntime {
105 namespace Unittest {
BuildModel(InnerModel & model)106 OH_NN_ReturnCode NeuralNetworkRuntimeTest::BuildModel(InnerModel& model)
107 {
108     int32_t inputDims[2] = {3, 4};
109     OH_NN_Tensor input1 = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
110     OH_NN_ReturnCode ret = model.AddTensor(input1);
111     if (ret != OH_NN_SUCCESS) {
112         return ret;
113     }
114 
115     // 添加Add算子的第二个输入Tensor,类型为float32,张量形状为[3, 4]
116     OH_NN_Tensor input2 = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
117     ret = model.AddTensor(input2);
118     if (ret != OH_NN_SUCCESS) {
119         return ret;
120     }
121 
122     // 添加Add算子的参数Tensor,该参数Tensor用于指定激活函数的类型,Tensor的数据类型为int8。
123     int32_t activationDims = 1;
124     int8_t activationValue = OH_NN_FUSED_NONE;
125     OH_NN_Tensor activation = {OH_NN_INT8, 1, &activationDims, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
126     ret = model.AddTensor(activation);
127     if (ret != OH_NN_SUCCESS) {
128         return ret;
129     }
130 
131     // 将激活函数类型设置为OH_NN_FUSED_NONE,表示该算子不添加激活函数。
132     uint32_t index = 2;
133     ret = model.SetTensorValue(index, &activationValue, sizeof(int8_t));
134     if (ret != OH_NN_SUCCESS) {
135         return ret;
136     }
137 
138     // 设置Add算子的输出,类型为float32,张量形状为[3, 4]
139     OH_NN_Tensor output = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
140     ret = model.AddTensor(output);
141     if (ret != OH_NN_SUCCESS) {
142         return ret;
143     }
144 
145     // 指定Add算子的输入、参数和输出索引
146     uint32_t inputIndicesValues[2] = {0, 1};
147     uint32_t paramIndicesValues = 2;
148     uint32_t outputIndicesValues = 3;
149     OH_NN_UInt32Array paramIndices = {&paramIndicesValues, 1};
150     OH_NN_UInt32Array inputIndices = {inputIndicesValues, 2};
151     OH_NN_UInt32Array outputIndices = {&outputIndicesValues, 1};
152 
153     // 向模型实例添加Add算子
154     ret = model.AddOperation(OH_NN_OPS_ADD, paramIndices, inputIndices, outputIndices);
155     if (ret != OH_NN_SUCCESS) {
156         return ret;
157     }
158 
159     // 设置模型实例的输入、输出索引
160     ret = model.SpecifyInputsAndOutputs(inputIndices, outputIndices);
161     if (ret != OH_NN_SUCCESS) {
162         return ret;
163     }
164 
165     // 完成模型实例的构建
166     ret = model.Build();
167     if (ret != OH_NN_SUCCESS) {
168         return ret;
169     }
170 
171     return ret;
172 }
173 
InitIndices()174 void NeuralNetworkRuntimeTest::InitIndices()
175 {
176     m_inputIndices.data = m_inputIndexs;
177     m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
178 
179     m_outputIndices.data = m_outputIndexs;
180     m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
181 
182     m_paramIndices.data = m_paramIndexs;
183     m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
184 }
185 
AddModelTensor(InnerModel & innerModel)186 void NeuralNetworkRuntimeTest::AddModelTensor(InnerModel& innerModel)
187 {
188     const int dim[2] = {2, 2};
189     const OH_NN_Tensor& tensor = {OH_NN_FLOAT32, 2, dim, nullptr, OH_NN_TENSOR};
190 
191     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
192     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
193     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
194 
195     const OH_NN_Tensor& tensorParam = {OH_NN_INT8, 0, nullptr, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
196     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensorParam));
197 }
198 
SetTensor()199 void NeuralNetworkRuntimeTest::SetTensor()
200 {
201     m_tensor.dataType = OH_NN_INT32;
202     m_tensor.dimensionCount = 0;
203     m_tensor.dimensions = nullptr;
204     m_tensor.quantParam = nullptr;
205     m_tensor.type = OH_NN_TENSOR;
206 }
207 
SetInnerBuild(InnerModel & innerModel)208 void NeuralNetworkRuntimeTest::SetInnerBuild(InnerModel& innerModel)
209 {
210     uint32_t index = 3;
211     const int8_t activation = 0;
212     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
213         static_cast<const void *>(&activation), sizeof(int8_t)));
214 
215     OH_NN_OperationType opType {OH_NN_OPS_ADD};
216     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
217     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
218     EXPECT_EQ(OH_NN_SUCCESS, innerModel.Build());
219 }
220 
SetInputAndOutput(Executor & executor)221 void NeuralNetworkRuntimeTest::SetInputAndOutput(Executor& executor)
222 {
223     size_t input1Index = 0;
224     int32_t inputDims[2] = {3, 4};
225     size_t lengthSize = 12 * sizeof(float);
226     size_t *length = &lengthSize;
227 
228     size_t minInputDims = 1;
229     size_t maxInputDims = 12;
230 
231     size_t *minInputDimsAdress = &minInputDims;
232     size_t **minInputDimsAdressA = &minInputDimsAdress;
233 
234     size_t *maxInputDimsAdress = &maxInputDims;
235     size_t **maxInputDimsAdressA = &maxInputDimsAdress;
236 
237     m_tensor = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
238     EXPECT_EQ(OH_NN_SUCCESS, executor.GetInputDimRange(input1Index, minInputDimsAdressA, maxInputDimsAdressA, length));
239 
240     uint32_t outputIndex = 0;
241 
242     int32_t shape = 3;
243     int32_t* shapeA = &shape;
244     int32_t** shapeAA = &shapeA;
245     uint32_t* shapeNum = &outputIndex;
246     EXPECT_EQ(OH_NN_SUCCESS, executor.GetOutputShape(outputIndex, shapeAA, shapeNum));
247 }
248 
249 class MockIPreparedModel : public PreparedModel {
250 public:
251     MOCK_METHOD1(ExportModelCache, OH_NN_ReturnCode(std::vector<Buffer>&));
252     MOCK_METHOD4(Run, OH_NN_ReturnCode(const std::vector<IOTensor>&,
253                                  const std::vector<IOTensor>&,
254                                  std::vector<std::vector<int32_t>>&,
255                                  std::vector<bool>&));
256     MOCK_METHOD4(Run, OH_NN_ReturnCode(const std::vector<NN_Tensor*>&,
257                                  const std::vector<NN_Tensor*>&,
258                                  std::vector<std::vector<int32_t>>&,
259                                  std::vector<bool>&));
260     MOCK_CONST_METHOD1(GetModelID, OH_NN_ReturnCode(uint32_t&));
261     MOCK_METHOD2(GetInputDimRanges, OH_NN_ReturnCode(std::vector<std::vector<uint32_t>>&,
262                                                std::vector<std::vector<uint32_t>>&));
263     MOCK_METHOD0(ReleaseBuiltModel, OH_NN_ReturnCode());
264 };
265 
266 class MockIDevice : public Device {
267 public:
268     MOCK_METHOD1(GetDeviceName, OH_NN_ReturnCode(std::string&));
269     MOCK_METHOD1(GetVendorName, OH_NN_ReturnCode(std::string&));
270     MOCK_METHOD1(GetVersion, OH_NN_ReturnCode(std::string&));
271     MOCK_METHOD1(GetDeviceType, OH_NN_ReturnCode(OH_NN_DeviceType&));
272     MOCK_METHOD1(GetDeviceStatus, OH_NN_ReturnCode(DeviceStatus&));
273     MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
274         std::vector<bool>&));
275     MOCK_METHOD1(IsFloat16PrecisionSupported, OH_NN_ReturnCode(bool&));
276     MOCK_METHOD1(IsPerformanceModeSupported, OH_NN_ReturnCode(bool&));
277     MOCK_METHOD1(IsPrioritySupported, OH_NN_ReturnCode(bool&));
278     MOCK_METHOD1(IsDynamicInputSupported, OH_NN_ReturnCode(bool&));
279     MOCK_METHOD1(IsModelCacheSupported, OH_NN_ReturnCode(bool&));
280     MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
281                                           const ModelConfig&,
282                                           std::shared_ptr<PreparedModel>&));
283     MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(const void*,
284                                           const ModelConfig&,
285                                           std::shared_ptr<PreparedModel>&));
286     MOCK_METHOD4(PrepareModelFromModelCache, OH_NN_ReturnCode(const std::vector<Buffer>&,
287                                                               const ModelConfig&,
288                                                               std::shared_ptr<PreparedModel>&,
289                                                               bool&));
290     MOCK_METHOD3(PrepareOfflineModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
291                                                  const ModelConfig&,
292                                                  std::shared_ptr<PreparedModel>&));
293     MOCK_METHOD1(AllocateBuffer, void*(size_t));
294     MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<TensorDesc>));
295     MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<NNTensor>));
296     MOCK_METHOD1(ReleaseBuffer, OH_NN_ReturnCode(const void*));
297     MOCK_METHOD2(AllocateBuffer, OH_NN_ReturnCode(size_t, int&));
298     MOCK_METHOD2(ReleaseBuffer, OH_NN_ReturnCode(int, size_t));
299     MOCK_METHOD1(ReadOpVersion, OH_NN_ReturnCode(int&));
300 };
301 
302 /*
303  * @tc.name: model_construct_001
304  * @tc.desc: Verify the return model of the OH_NNModel_Construct function.
305  * @tc.type: FUNC
306  */
307 HWTEST_F(NeuralNetworkRuntimeTest, model_construct_001, testing::ext::TestSize.Level0)
308 {
309     OH_NNModel* ret = OH_NNModel_Construct();
310     EXPECT_NE(nullptr, ret);
311 }
312 
313 /*
314  * @tc.name: model_add_tensor_001
315  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Tensor function.
316  * @tc.type: FUNC
317  */
318 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_001, testing::ext::TestSize.Level0)
319 {
320     OH_NNModel* model = nullptr;
321     const int32_t dimInput[2] = {2, 2};
322     const OH_NN_Tensor tensor = {OH_NN_INT8, 2, dimInput, nullptr, OH_NN_TENSOR};
323     OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, &tensor);
324     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
325 }
326 
327 /*
328  * @tc.name: model_add_tensor_002
329  * @tc.desc: Verify the OH_NN_Tensor is nullptr of the OH_NNModel_AddTensor function.
330  * @tc.type: FUNC
331  */
332 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_002, testing::ext::TestSize.Level0)
333 {
334     InnerModel innerModel;
335 
336     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
337     OH_NN_Tensor* tensor = nullptr;
338     OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, tensor);
339     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
340 }
341 
342 /*
343  * @tc.name: model_add_tensor_003
344  * @tc.desc: Verify the success of the OH_NNModel_AddTensor function.
345  * @tc.type: FUNC
346  */
347 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_003, testing::ext::TestSize.Level0)
348 {
349     InnerModel innerModel;
350     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
351 
352     const int32_t dimInput[2] = {2, 2};
353     const OH_NN_Tensor tensor = {OH_NN_INT8, 2, dimInput, nullptr, OH_NN_TENSOR};
354     OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, &tensor);
355     EXPECT_EQ(OH_NN_SUCCESS, ret);
356 }
357 
358 /*
359  * @tc.name: model_add_operation_001
360  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_AddOperation function.
361  * @tc.type: FUNC
362  */
363 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_001, testing::ext::TestSize.Level0)
364 {
365     InnerModel innerModel;
366     OH_NNModel* model = nullptr;
367     OH_NN_OperationType opType {OH_NN_OPS_ADD};
368 
369     InitIndices();
370     AddModelTensor(innerModel);
371 
372     uint32_t index = 3;
373     const int8_t activation = 0;
374     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
375         static_cast<const void *>(&activation), sizeof(int8_t)));
376 
377     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, &m_outputIndices);
378     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
379 }
380 
381 /*
382  * @tc.name: model_add_operation_002
383  * @tc.desc: Verify the paramIndices is nullptr of the OH_NNModel_AddOperation function.
384  * @tc.type: FUNC
385  */
386 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_002, testing::ext::TestSize.Level0)
387 {
388     InnerModel innerModel;
389     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
390     OH_NN_OperationType opType {OH_NN_OPS_ADD};
391 
392     m_inputIndices.data = m_inputIndexs;
393     m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
394 
395     m_outputIndices.data = m_outputIndexs;
396     m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
397 
398     AddModelTensor(innerModel);
399     uint32_t index = 3;
400     const int8_t activation = 0;
401     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
402         static_cast<const void *>(&activation), sizeof(int8_t)));
403 
404     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, nullptr, &m_inputIndices, &m_outputIndices);
405     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
406 }
407 
408 /*
409  * @tc.name: model_add_operation_003
410  * @tc.desc: Verify the inputIndices is nullptr of the OH_NNModel_AddOperation function.
411  * @tc.type: FUNC
412  */
413 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_003, testing::ext::TestSize.Level0)
414 {
415     InnerModel innerModel;
416     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
417     OH_NN_OperationType opType {OH_NN_OPS_ADD};
418 
419     m_paramIndices.data = m_paramIndexs;
420     m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
421 
422     m_outputIndices.data = m_outputIndexs;
423     m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
424 
425     AddModelTensor(innerModel);
426     uint32_t index = 3;
427     const int8_t activation = 0;
428     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
429         static_cast<const void *>(&activation), sizeof(int8_t)));
430 
431     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, nullptr, &m_outputIndices);
432     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
433 }
434 
435 /*
436  * @tc.name: model_add_operation_004
437  * @tc.desc: Verify the outputIndices is nullptr of the OH_NNModel_AddOperation function.
438  * @tc.type: FUNC
439  */
440 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_004, testing::ext::TestSize.Level0)
441 {
442     InnerModel innerModel;
443     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
444     OH_NN_OperationType opType {OH_NN_OPS_ADD};
445 
446     m_paramIndices.data = m_paramIndexs;
447     m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
448 
449     m_inputIndices.data = m_inputIndexs;
450     m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
451 
452     AddModelTensor(innerModel);
453     uint32_t index = 3;
454     const int8_t activation = 0;
455     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
456         static_cast<const void *>(&activation), sizeof(int8_t)));
457 
458     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, nullptr);
459     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
460 }
461 
462 /*
463  * @tc.name: model_add_operation_005
464  * @tc.desc: Verify the success of the OH_NNModel_AddOperation function.
465  * @tc.type: FUNC
466  */
467 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_005, testing::ext::TestSize.Level0)
468 {
469     InnerModel innerModel;
470     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
471     OH_NN_OperationType opType {OH_NN_OPS_ADD};
472 
473     InitIndices();
474     AddModelTensor(innerModel);
475 
476     uint32_t index = 3;
477     const int8_t activation = 0;
478     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
479         static_cast<const void *>(&activation), sizeof(int8_t)));
480 
481     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, &m_outputIndices);
482     EXPECT_EQ(OH_NN_SUCCESS, ret);
483 }
484 
485 /*
486  * @tc.name: model_set_tensor_data_001
487  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_SetTensorData function.
488  * @tc.type: FUNC
489  */
490 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_001, testing::ext::TestSize.Level0)
491 {
492     InnerModel innerModel;
493     OH_NNModel* model = nullptr;
494     AddModelTensor(innerModel);
495 
496     uint32_t index = 3;
497     const int8_t activation = 0;
498 
499     OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation),
500         sizeof(int8_t));
501     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
502 }
503 
504 /*
505  * @tc.name: model_set_tensor_data_002
506  * @tc.desc: Verify the data is nullptr of the OH_NNModel_SetTensorData function.
507  * @tc.type: FUNC
508  */
509 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_002, testing::ext::TestSize.Level0)
510 {
511     InnerModel innerModel;
512     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
513     AddModelTensor(innerModel);
514 
515     uint32_t index = 3;
516 
517     OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, nullptr, sizeof(int8_t));
518     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
519 }
520 
521 /*
522  * @tc.name: model_set_tensor_data_003
523  * @tc.desc: Verify the length is 0 of the OH_NNModel_SetTensorData function.
524  * @tc.type: FUNC
525  */
526 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_003, testing::ext::TestSize.Level0)
527 {
528     InnerModel innerModel;
529     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
530     AddModelTensor(innerModel);
531 
532     uint32_t index = 3;
533     const int8_t activation = 0;
534 
535     OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation), 0);
536     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
537 }
538 
539 /*
540  * @tc.name: model_set_tensor_data_004
541  * @tc.desc: Verify the successs of the OH_NNModel_SetTensorData function.
542  * @tc.type: FUNC
543  */
544 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_004, testing::ext::TestSize.Level0)
545 {
546     InnerModel innerModel;
547     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
548     AddModelTensor(innerModel);
549 
550     uint32_t index = 3;
551     const int8_t activation = 0;
552 
553     OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation),
554         sizeof(int8_t));
555     EXPECT_EQ(OH_NN_SUCCESS, ret);
556 }
557 
558 /*
559  * @tc.name: model_specify_inputs_and_outputs_001
560  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
561  * @tc.type: FUNC
562  */
563 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_001, testing::ext::TestSize.Level0)
564 {
565     InnerModel innerModel;
566     OH_NNModel* model = nullptr;
567 
568     InitIndices();
569     AddModelTensor(innerModel);
570 
571     OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, &m_outputIndices);
572     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
573 }
574 
575 /*
576  * @tc.name: model_specify_inputs_and_outputs_002
577  * @tc.desc: Verify the inputIndices is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
578  * @tc.type: FUNC
579  */
580 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_002, testing::ext::TestSize.Level0)
581 {
582     InnerModel innerModel;
583     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
584 
585     InitIndices();
586     AddModelTensor(innerModel);
587 
588     OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, nullptr, &m_outputIndices);
589     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
590 }
591 
592 /*
593  * @tc.name: model_specify_inputs_and_outputs_003
594  * @tc.desc: Verify the outputIndices is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
595  * @tc.type: FUNC
596  */
597 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_003, testing::ext::TestSize.Level0)
598 {
599     InnerModel innerModel;
600     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
601 
602     InitIndices();
603     AddModelTensor(innerModel);
604 
605     OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, nullptr);
606     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
607 }
608 
609 /*
610  * @tc.name: model_specify_inputs_and_outputs_004
611  * @tc.desc: Verify the success of the OH_NNModel_SpecifyInputsAndOutputs function.
612  * @tc.type: FUNC
613  */
614 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_004, testing::ext::TestSize.Level0)
615 {
616     InnerModel innerModel;
617     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
618 
619     InitIndices();
620     AddModelTensor(innerModel);
621 
622     OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, &m_outputIndices);
623     EXPECT_EQ(OH_NN_SUCCESS, ret);
624 }
625 
626 /*
627  * @tc.name: model_finish_001
628  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Finish function.
629  * @tc.type: FUNC
630  */
631 HWTEST_F(NeuralNetworkRuntimeTest, model_finish_001, testing::ext::TestSize.Level0)
632 {
633     InnerModel innerModel;
634     OH_NNModel* model = nullptr;
635 
636     OH_NN_OperationType opType {OH_NN_OPS_ADD};
637 
638     InitIndices();
639     AddModelTensor(innerModel);
640 
641     uint32_t index = 3;
642     const int8_t activation = 0;
643     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index, static_cast<const void *>(&activation),
644         sizeof(int8_t)));
645 
646     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
647     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
648 
649     OH_NN_ReturnCode ret = OH_NNModel_Finish(model);
650     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
651 }
652 
653 /*
654  * @tc.name: model_finish_002
655  * @tc.desc: Verify the success of the OH_NNModel_Finish function.
656  * @tc.type: FUNC
657  */
658 HWTEST_F(NeuralNetworkRuntimeTest, model_finish_002, testing::ext::TestSize.Level0)
659 {
660     InnerModel innerModel;
661     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
662 
663     OH_NN_OperationType opType {OH_NN_OPS_ADD};
664 
665     InitIndices();
666     AddModelTensor(innerModel);
667 
668     const int8_t activation = 0;
669     uint32_t index = 3;
670     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
671         static_cast<const void *>(&activation), sizeof(int8_t)));
672 
673     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
674     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
675 
676     OH_NN_ReturnCode ret = OH_NNModel_Finish(model);
677     EXPECT_EQ(OH_NN_SUCCESS, ret);
678 }
679 
680 /*
681  * @tc.name: model_destroy_001
682  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Destroy function.
683  * @tc.type: FUNC
684  */
685 HWTEST_F(NeuralNetworkRuntimeTest, model_destroy_001, testing::ext::TestSize.Level0)
686 {
687     InnerModel innerModel;
688     OH_NNModel** pModel = nullptr;
689     OH_NNModel_Destroy(pModel);
690     EXPECT_EQ(nullptr, pModel);
691 }
692 
693 /*
694  * @tc.name: model_destroy_003
695  * @tc.desc: Verify the normal model of the OH_NNModel_Destroy function.
696  * @tc.type: FUNC
697  */
698 HWTEST_F(NeuralNetworkRuntimeTest, model_destroy_003, testing::ext::TestSize.Level0)
699 {
700     InnerModel* innerModel = new InnerModel();
701     EXPECT_NE(nullptr, innerModel);
702     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
703     OH_NNModel_Destroy(&model);
704     EXPECT_EQ(nullptr, model);
705 }
706 
707 /*
708  * @tc.name: model_get_available_operation_001
709  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_GetAvailableOperations function.
710  * @tc.type: FUNC
711  */
712 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_001, testing::ext::TestSize.Level0)
713 {
714     InnerModel innerModel;
715     OH_NNModel* model = nullptr;
716 
717     uint32_t opCount = 1;
718     const bool *pIsAvailable = nullptr;
719 
720     InitIndices();
721     AddModelTensor(innerModel);
722     SetInnerBuild(innerModel);
723 
724     size_t deviceID = 10;
725     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
726     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
727 }
728 
729 /*
730  * @tc.name: model_get_available_operation_002
731  * @tc.desc: Verify the isAvailable is nullptr of the OH_NNModel_GetAvailableOperations function.
732  * @tc.type: FUNC
733  */
734 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_002, testing::ext::TestSize.Level0)
735 {
736     InnerModel innerModel;
737     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
738 
739     uint32_t opCount = 1;
740     InitIndices();
741     AddModelTensor(innerModel);
742     SetInnerBuild(innerModel);
743 
744     size_t deviceID = 10;
745     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, nullptr, &opCount);
746     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
747 }
748 
749 /*
750  * @tc.name: model_get_available_operation_003
751  * @tc.desc: Verify the *isAvailable is no nullptr of the OH_NNModel_GetAvailableOperations function.
752  * @tc.type: FUNC
753  */
754 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_003, testing::ext::TestSize.Level0)
755 {
756     InnerModel innerModel;
757     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
758 
759     const bool isAvailable = true;
760     const bool *pIsAvailable = &isAvailable;
761     uint32_t opCount = 1;
762 
763     InitIndices();
764     AddModelTensor(innerModel);
765     SetInnerBuild(innerModel);
766 
767     size_t deviceID = 10;
768     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
769     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
770 }
771 
772 /*
773  * @tc.name: model_get_available_operation_004
774  * @tc.desc: Verify the opCount is nullptr of the OH_NNModel_GetAvailableOperations function.
775  * @tc.type: FUNC
776  */
777 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_004, testing::ext::TestSize.Level0)
778 {
779     InnerModel innerModel;
780     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
781 
782     const bool *pIsAvailable = nullptr;
783     uint32_t* opCount = nullptr;
784 
785     InitIndices();
786     AddModelTensor(innerModel);
787     SetInnerBuild(innerModel);
788 
789     size_t deviceID = 10;
790     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, opCount);
791     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
792 }
793 
794 /*
795  * @tc.name: model_get_available_operation_005
796  * @tc.desc: Verify the success of the OH_NNModel_GetAvailableOperations function.
797  * @tc.type: FUNC
798  */
799 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_005, testing::ext::TestSize.Level0)
800 {
801     InnerModel innerModel;
802     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
803 
804     const bool *pIsAvailable = nullptr;
805     uint32_t opCount = 1;
806 
807     InitIndices();
808     AddModelTensor(innerModel);
809     SetInnerBuild(innerModel);
810 
811     size_t deviceID = 10;
812     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
813     EXPECT_EQ(OH_NN_FAILED, ret);
814 }
815 
816 /*
817  * @tc.name: compilation_construct_001
818  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNCompilation_Construct function.
819  * @tc.type: FUNC
820  */
821 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_001, testing::ext::TestSize.Level0)
822 {
823     InnerModel innerModel;
824     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
825     const OH_NNModel* model = nullptr;
826     OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
827     EXPECT_EQ(nullptr, ret);
828 }
829 
830 /*
831  * @tc.name: compilation_construct_002
832  * @tc.desc: Verify the not OH_NNModel_Build before creating compilation of the OH_NNCompilation_Construct function.
833  * @tc.type: FUNC
834  */
835 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_002, testing::ext::TestSize.Level0)
836 {
837     InnerModel innerModel;
838     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
839     OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
840     EXPECT_NE(nullptr, ret);
841 }
842 
843 /*
844  * @tc.name: compilation_construct_003
845  * @tc.desc: Verify the normal model of the OH_NNCompilation_Construct function.
846  * @tc.type: FUNC
847  */
848 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_003, testing::ext::TestSize.Level0)
849 {
850     InnerModel innerModel;
851     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
852     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
853     OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
854     EXPECT_NE(nullptr, ret);
855 }
856 
857 /*
858  * @tc.name: compilation_set_device_001
859  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetDevice function.
860  * @tc.type: FUNC
861  */
862 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_device_001, testing::ext::TestSize.Level0)
863 {
864     OH_NNCompilation* compilation = nullptr;
865     size_t deviceId = 1;
866     OH_NN_ReturnCode ret = OH_NNCompilation_SetDevice(compilation, deviceId);
867     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
868 }
869 
870 /*
871  * @tc.name: compilation_set_device_002
872  * @tc.desc: Verify the success of the OH_NNCompilation_SetDevice function.
873  * @tc.type: FUNC
874  */
875 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_device_002, testing::ext::TestSize.Level0)
876 {
877     InnerModel innerModel;
878     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
879 
880     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
881     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
882     size_t deviceId = 1;
883     OH_NN_ReturnCode ret = OH_NNCompilation_SetDevice(nnCompilation, deviceId);
884     EXPECT_EQ(OH_NN_SUCCESS, ret);
885 }
886 
887 /*
888  * @tc.name: compilation_set_cache_001
889  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function.
890  * @tc.type: FUNC
891  */
892 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_001, testing::ext::TestSize.Level0)
893 {
894     InnerModel innerModel;
895     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
896     OH_NNCompilation* nnCompilation = nullptr;
897     const char* cacheDir = "../";
898     uint32_t version = 1;
899     OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
900     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
901 }
902 
903 /*
904  * @tc.name: compilation_set_cache_002
905  * @tc.desc: Verify the cachePath is nullptr of the OH_NNCompilation_SetCache function.
906  * @tc.type: FUNC
907  */
908 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_002, testing::ext::TestSize.Level0)
909 {
910     InnerModel innerModel;
911     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
912 
913     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
914     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
915     const char* cacheDir = nullptr;
916     uint32_t version = 1;
917     OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
918     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
919 }
920 
921 /*
922  * @tc.name: compilation_set_cache_003
923  * @tc.desc: Verify the success of the OH_NNCompilation_SetCache function.
924  * @tc.type: FUNC
925  */
926 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_003, testing::ext::TestSize.Level0)
927 {
928     InnerModel innerModel;
929     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
930 
931     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
932     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
933     const char* cacheDir = "../";
934     uint32_t version = 1;
935     OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
936     EXPECT_EQ(OH_NN_SUCCESS, ret);
937 }
938 
939 /*
940  * @tc.name: compilation_set_performance_mode_001
941  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetPerformanceMode function.
942  * @tc.type: FUNC
943  */
944 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_performance_mode_001, testing::ext::TestSize.Level0)
945 {
946     InnerModel innerModel;
947     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
948     OH_NNCompilation* nnCompilation = nullptr;
949     OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE;
950 
951     OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode);
952     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
953 }
954 
955 /*
956  * @tc.name: compilation_set_performance_mode_002
957  * @tc.desc: Verify the success of the OH_NNCompilation_SetPerformanceMode function.
958  * @tc.type: FUNC
959  */
960 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_performance_mode_002, testing::ext::TestSize.Level0)
961 {
962     InnerModel innerModel;
963     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
964 
965     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
966     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
967     OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE;
968 
969     OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode);
970     EXPECT_EQ(OH_NN_SUCCESS, ret);
971 }
972 
973 /*
974  * @tc.name: compilation_set_priority_001
975  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetPriority function.
976  * @tc.type: FUNC
977  */
978 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_priority_001, testing::ext::TestSize.Level0)
979 {
980     InnerModel innerModel;
981     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
982     OH_NNCompilation* nnCompilation = nullptr;
983     OH_NN_Priority priority = OH_NN_PRIORITY_LOW;
984 
985     OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority);
986     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
987 }
988 
989 /*
990  * @tc.name: compilation_set_priority_002
991  * @tc.desc: Verify the success of the OH_NNCompilation_SetPriority function.
992  * @tc.type: FUNC
993  */
994 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_priority_002, testing::ext::TestSize.Level0)
995 {
996     InnerModel innerModel;
997     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
998 
999     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1000     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1001     OH_NN_Priority priority = OH_NN_PRIORITY_LOW;
1002 
1003     OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority);
1004     EXPECT_EQ(OH_NN_SUCCESS, ret);
1005 }
1006 
1007 /*
1008  * @tc.name: compilation_set_enable_float16_001
1009  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_EnableFloat16 function.
1010  * @tc.type: FUNC
1011  */
1012 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_enable_float16_001, testing::ext::TestSize.Level0)
1013 {
1014     InnerModel innerModel;
1015     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1016     OH_NNCompilation* nnCompilation = nullptr;
1017     bool enableFloat16 = true;
1018 
1019     OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16);
1020     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1021 }
1022 
1023 /*
1024  * @tc.name: compilation_set_enable_float16_002
1025  * @tc.desc: Verify the success of the OH_NNCompilation_EnableFloat16 function.
1026  * @tc.type: FUNC
1027  */
1028 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_enable_float16_002, testing::ext::TestSize.Level0)
1029 {
1030     InnerModel innerModel;
1031     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1032 
1033     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1034     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1035     bool enableFloat16 = true;
1036 
1037     OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16);
1038     EXPECT_EQ(OH_NN_SUCCESS, ret);
1039 }
1040 
1041 /*
1042  * @tc.name: compilation_build_001
1043  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_Build function.
1044  * @tc.type: FUNC
1045  */
1046 HWTEST_F(NeuralNetworkRuntimeTest, compilation_build_001, testing::ext::TestSize.Level0)
1047 {
1048     InnerModel innerModel;
1049     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1050     OH_NNCompilation* nnCompilation = nullptr;
1051 
1052     OH_NN_ReturnCode ret = OH_NNCompilation_Build(nnCompilation);
1053     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1054 }
1055 
1056 /*
1057  * @tc.name: compilation_destroy_001
1058  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_Destroy function.
1059  * @tc.type: FUNC
1060  */
1061 HWTEST_F(NeuralNetworkRuntimeTest, compilation_destroy_001, testing::ext::TestSize.Level0)
1062 {
1063     OH_NNCompilation** pCompilation = nullptr;
1064     OH_NNCompilation_Destroy(pCompilation);
1065     EXPECT_EQ(nullptr, pCompilation);
1066 }
1067 
1068 /*
1069  * @tc.name: compilation_destroy_003
1070  * @tc.desc: Verify the normal model of the OH_NNCompilation_Destroy function.
1071  * @tc.type: FUNC
1072  */
1073 HWTEST_F(NeuralNetworkRuntimeTest, compilation_destroy_003, testing::ext::TestSize.Level0)
1074 {
1075     InnerModel* innerModel = new InnerModel();
1076     EXPECT_NE(nullptr, innerModel);
1077 
1078     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1079     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1080     OH_NNCompilation_Destroy(&nnCompilation);
1081     EXPECT_EQ(nullptr, nnCompilation);
1082 }
1083 
1084 /**
1085  * @tc.name: excutor_construct_001
1086  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNExecutor_Construct function
1087  * @tc.type: FUNC
1088  */
1089 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_001, testing::ext::TestSize.Level0)
1090 {
1091     InnerModel innerModel;
1092     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1093 
1094     OH_NNCompilation* nnCompilation = nullptr;
1095     OH_NNExecutor* executor = OH_NNExecutor_Construct(nnCompilation);
1096     EXPECT_EQ(nullptr, executor);
1097 }
1098 
1099 /**
1100  * @tc.name: excutor_construct_002
1101  * @tc.desc: Verify the not OH_NNCompilation_Build before creating executor of the OH_NNExecutor_Construct function
1102  * @tc.type: FUNC
1103  */
1104 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_002, testing::ext::TestSize.Level0)
1105 {
1106     InnerModel innerModel;
1107     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1108 
1109     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1110     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1111     OH_NNExecutor * executor = OH_NNExecutor_Construct(nnCompilation);
1112     EXPECT_EQ(nullptr, executor);
1113 }
1114 
1115 /**
1116  * @tc.name: excutor_construct_003
1117  * @tc.desc: Verify the success of the OH_NNExecutor_Construct function
1118  * @tc.type: FUNC
1119  */
1120 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_003, testing::ext::TestSize.Level0)
1121 {
1122     InnerModel innerModel;
1123     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1124 
1125     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1126     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1127     OH_NNExecutor * executor = OH_NNExecutor_Construct(nnCompilation);
1128     EXPECT_EQ(nullptr, executor);
1129 }
1130 
1131 /**
1132  * @tc.name: excutor_setinput_001
1133  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetInput function
1134  * @tc.type: FUNC
1135  */
1136 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_001, testing::ext::TestSize.Level0)
1137 {
1138     SetTensor();
1139 
1140     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1141     const void *buffer = input;
1142     size_t length = 2 * sizeof(float);
1143     uint32_t inputIndex = 0;
1144     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nullptr, inputIndex, &m_tensor, buffer, length));
1145 }
1146 
1147 /**
1148  * @tc.name: excutor_setinput_002
1149  * @tc.desc: Verify the OH_NN_Tensor is nullptr of the OH_NNExecutor_SetInput function
1150  * @tc.type: FUNC
1151  */
1152 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_002, testing::ext::TestSize.Level0)
1153 {
1154     InnerModel innerModel;
1155     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1156 
1157     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1158     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1159     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1160 
1161     uint32_t inputIndex = 0;
1162     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1163     const void *buffer = input;
1164     size_t length = 2 * sizeof(float);
1165     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, nullptr, buffer, length));
1166 }
1167 
1168 /**
1169  * @tc.name: excutor_setinput_003
1170  * @tc.desc: Verify the data is nullptr of the OH_NNExecutor_SetInput function
1171  * @tc.type: FUNC
1172  */
1173 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_003, testing::ext::TestSize.Level0)
1174 {
1175     InnerModel innerModel;
1176     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1177 
1178     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1179     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1180     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1181 
1182     SetTensor();
1183 
1184     uint32_t inputIndex = 0;
1185     const void *buffer = nullptr;
1186     size_t length = 2 * sizeof(float);
1187     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length));
1188 }
1189 
1190 /**
1191  * @tc.name: excutor_setinput_004
1192  * @tc.desc: Verify the length is 0 of the OH_NNExecutor_SetInput function
1193  * @tc.type: FUNC
1194  */
1195 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_004, testing::ext::TestSize.Level0)
1196 {
1197     InnerModel innerModel;
1198     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1199 
1200     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1201     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1202     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1203 
1204     uint32_t inputIndex = 0;
1205     SetTensor();
1206 
1207     size_t length = 0;
1208     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1209     const void *buffer = input;
1210     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length));
1211 }
1212 
1213 /**
1214  * @tc.name: excutor_setinput_005
1215  * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1216  * @tc.type: FUNC
1217  */
1218 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_005, testing::ext::TestSize.Level0)
1219 {
1220     InnerModel innerModel;
1221     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1222 
1223     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1224     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1225     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1226 
1227     uint32_t inputIndex = 0;
1228     int32_t dims[2] = {3, 4};
1229     m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1230 
1231     float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1232     const void *buffer = input;
1233     size_t length = 12 * sizeof(float);
1234     OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length);
1235     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1236 }
1237 
1238 /**
1239  * @tc.name: excutor_setinput_006
1240  * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1241  * @tc.type: FUNC
1242  */
1243 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_006, testing::ext::TestSize.Level0)
1244 {
1245     LOGE("OH_NNExecutor_SetInput excutor_setinput_006");
1246     size_t m_backendID {0};
1247     std::shared_ptr<Device> m_device {nullptr};
1248     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1249     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1250         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1251     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1252     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1253     ExtensionConfig extensionConfig;
1254     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
1255     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
1256     NNExecutor* executor = new (std::nothrow) NNExecutor(
1257         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
1258         false, performance, priority);
1259     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1260 
1261     uint32_t inputIndex = 0;
1262     int32_t dims[2] = {3, 4};
1263     m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1264 
1265     float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1266     const void *buffer = input;
1267     size_t length = 12 * sizeof(float);
1268     OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length);
1269     EXPECT_EQ(OH_NN_FAILED, ret);
1270 
1271     testing::Mock::AllowLeak(mockIPreparedMode.get());
1272 }
1273 
1274 /**
1275  * @tc.name: excutor_setinput_007
1276  * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1277  * @tc.type: FUNC
1278  */
1279 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_007, testing::ext::TestSize.Level0)
1280 {
1281     LOGE("OH_NNExecutor_SetInput excutor_setinput_007");
1282     size_t m_backendID {0};
1283     std::shared_ptr<Device> m_device {nullptr};
1284     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1285     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1286         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1287     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1288     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1289     ExtensionConfig extensionConfig;
1290     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
1291     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
1292     NNExecutor* executor = new (std::nothrow) NNExecutor(
1293         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
1294         false, performance, priority);
1295     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1296 
1297     uint32_t inputIndex = 0;
1298 
1299     float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1300     const void *buffer = input;
1301     size_t length = 12 * sizeof(float);
1302     OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, nullptr, buffer, length);
1303     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1304 
1305     testing::Mock::AllowLeak(mockIPreparedMode.get());
1306 }
1307 
1308 /**
1309  * @tc.name: excutor_setinput_008
1310  * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1311  * @tc.type: FUNC
1312  */
1313 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_008, testing::ext::TestSize.Level0)
1314 {
1315     LOGE("OH_NNExecutor_SetInput excutor_setinput_008");
1316     size_t m_backendID {0};
1317     std::shared_ptr<Device> m_device {nullptr};
1318     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1319     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1320         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1321     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1322     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1323     ExtensionConfig extensionConfig;
1324     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
1325     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
1326     NNExecutor* executor = new (std::nothrow) NNExecutor(
1327         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
1328         false, performance, priority);
1329     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1330 
1331     uint32_t inputIndex = 0;
1332     int32_t dims[2] = {3, 4};
1333     m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1334 
1335     size_t length = 12 * sizeof(float);
1336     OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, nullptr, length);
1337     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1338 
1339     testing::Mock::AllowLeak(mockIPreparedMode.get());
1340 }
1341 
1342 /**
1343  * @tc.name: excutor_setinput_009
1344  * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1345  * @tc.type: FUNC
1346  */
1347 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_009, testing::ext::TestSize.Level0)
1348 {
1349     LOGE("OH_NNExecutor_SetInput excutor_setinput_009");
1350     size_t m_backendID {0};
1351     std::shared_ptr<Device> m_device {nullptr};
1352     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1353     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1354         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1355     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1356     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1357     ExtensionConfig extensionConfig;
1358     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
1359     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
1360     NNExecutor* executor = new (std::nothrow) NNExecutor(
1361         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
1362         false, performance, priority);
1363     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1364 
1365     uint32_t inputIndex = 0;
1366     int32_t dims[2] = {3, 4};
1367     m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1368 
1369     float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1370     const void *buffer = input;
1371     size_t length = 0;
1372     OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length);
1373     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1374 
1375     testing::Mock::AllowLeak(mockIPreparedMode.get());
1376 }
1377 
1378 /**
1379  * @tc.name: excutor_setoutput_001
1380  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetOutput function
1381  * @tc.type: FUNC
1382  */
1383 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_001, testing::ext::TestSize.Level0)
1384 {
1385     LOGE("OH_NNExecutor_SetOutput excutor_setoutput_001");
1386     uint32_t outputIndex = 0;
1387     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1388     void *buffer = input;
1389     size_t length = 9 * sizeof(int32_t);
1390     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nullptr, outputIndex, buffer, length));
1391 }
1392 
1393 /**
1394  * @tc.name: excutor_setoutput_002
1395  * @tc.desc: Verify the data is nullptr of the OH_NNExecutor_SetOutput function
1396  * @tc.type: FUNC
1397  */
1398 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_002, testing::ext::TestSize.Level0)
1399 {
1400     InnerModel innerModel;
1401     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1402 
1403     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1404     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1405     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1406 
1407     uint32_t outputIndex = 0;
1408     void *buffer = nullptr;
1409     size_t length = 9 * sizeof(int32_t);
1410     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, buffer, length));
1411 }
1412 
1413 /**
1414  * @tc.name: excutor_setoutput_003
1415  * @tc.desc: Verify the length is 0 of the OH_NNExecutor_SetOutput function
1416  * @tc.type: FUNC
1417  */
1418 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_003, testing::ext::TestSize.Level0)
1419 {
1420     InnerModel innerModel;
1421     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1422 
1423     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1424     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1425     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1426 
1427     uint32_t outputIndex = 0;
1428     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1429     void *buffer = input;
1430     size_t length = 0;
1431     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, buffer, length));
1432 }
1433 
1434 /**
1435  * @tc.name: excutor_setoutput_004
1436  * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1437  * @tc.type: FUNC
1438  */
1439 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_004, testing::ext::TestSize.Level0)
1440 {
1441     InnerModel innerModel;
1442     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1443 
1444     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1445     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1446     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1447 
1448     uint32_t outputIndex = 0;
1449     float output[12];
1450     size_t length = 12 * sizeof(float);
1451     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, output, length));
1452 }
1453 
1454 /**
1455  * @tc.name: excutor_setoutput_005
1456  * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1457  * @tc.type: FUNC
1458  */
1459 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_005, testing::ext::TestSize.Level0)
1460 {
1461     LOGE("OH_NNExecutor_SetOutput excutor_setinput_006");
1462     size_t m_backendID {0};
1463     std::shared_ptr<Device> m_device {nullptr};
1464     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1465     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1466         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1467     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1468     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1469     ExtensionConfig extensionConfig;
1470     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
1471     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
1472     NNExecutor* executor = new (std::nothrow) NNExecutor(
1473         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
1474         false, performance, priority);
1475     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1476 
1477     uint32_t outputIndex = 0;
1478     float output[12];
1479     size_t length = 12 * sizeof(float);
1480     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, output, length));
1481 
1482     testing::Mock::AllowLeak(mockIPreparedMode.get());
1483 }
1484 
1485 /**
1486  * @tc.name: excutor_setoutput_006
1487  * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1488  * @tc.type: FUNC
1489  */
1490 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_006, testing::ext::TestSize.Level0)
1491 {
1492     LOGE("OH_NNExecutor_SetOutput excutor_setinput_006");
1493     size_t m_backendID {0};
1494     std::shared_ptr<Device> m_device {nullptr};
1495     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1496     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1497         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1498     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1499     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1500     ExtensionConfig extensionConfig;
1501     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
1502     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
1503     NNExecutor* executor = new (std::nothrow) NNExecutor(
1504         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
1505         false, performance, priority);
1506     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1507 
1508     uint32_t outputIndex = 0;
1509     size_t length = 12 * sizeof(float);
1510     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, nullptr, length));
1511 
1512     testing::Mock::AllowLeak(mockIPreparedMode.get());
1513 }
1514 
1515 /**
1516  * @tc.name: excutor_setoutput_007
1517  * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1518  * @tc.type: FUNC
1519  */
1520 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_007, testing::ext::TestSize.Level0)
1521 {
1522     LOGE("OH_NNExecutor_SetOutput excutor_setoutput_007");
1523     size_t m_backendID {0};
1524     std::shared_ptr<Device> m_device {nullptr};
1525     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1526     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1527         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1528     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1529     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1530     ExtensionConfig extensionConfig;
1531     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
1532     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
1533     NNExecutor* executor = new (std::nothrow) NNExecutor(
1534         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
1535         false, performance, priority);
1536     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1537 
1538     uint32_t outputIndex = 0;
1539     float output[12];
1540     size_t length = 0;
1541     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, output, length));
1542 
1543     testing::Mock::AllowLeak(mockIPreparedMode.get());
1544 }
1545 
1546 /**
1547  * @tc.name: excutor_getoutputshape_001
1548  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_GetOutputShape function
1549  * @tc.type: FUNC
1550  */
1551 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_001, testing::ext::TestSize.Level0)
1552 {
1553     InnerModel innerModel;
1554     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1555     OH_NNExecutor* nnExecutor = nullptr;
1556 
1557     int32_t* ptr = nullptr;
1558     int32_t** shape = &ptr;
1559     uint32_t length = 2;
1560     uint32_t outputIndex = 0;
1561     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1562         shape, &length));
1563 }
1564 
1565 /**
1566  * @tc.name: excutor_getoutputshape_002
1567  * @tc.desc: Verify the shape is nullptr of the OH_NNExecutor_GetOutputShape function
1568  * @tc.type: FUNC
1569  */
1570 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_002, testing::ext::TestSize.Level0)
1571 {
1572     InnerModel innerModel;
1573     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1574 
1575     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1576     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1577     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1578 
1579     uint32_t outputIndex = 0;
1580     int32_t** shape = nullptr;
1581     uint32_t length = 2;
1582     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1583         shape, &length));
1584 }
1585 
1586 /**
1587  * @tc.name: excutor_getoutputshape_003
1588  * @tc.desc: Verify the *shape is not nullptr of the OH_NNExecutor_GetOutputShape function
1589  * @tc.type: FUNC
1590  */
1591 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_003, testing::ext::TestSize.Level0)
1592 {
1593     InnerModel innerModel;
1594     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1595 
1596     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1597     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1598     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1599 
1600     int32_t expectDim[2] = {3, 3};
1601     int32_t* ptr = expectDim;
1602     int32_t** shape = &ptr;
1603     uint32_t length = 2;
1604     uint32_t outputIndex = 0;
1605     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1606         shape, &length));
1607 }
1608 
1609 /**
1610  * @tc.name: excutor_getoutputshape_004
1611  * @tc.desc: Verify the length is nullptr of the OH_NNExecutor_GetOutputShape function
1612  * @tc.type: FUNC
1613  */
1614 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_004, testing::ext::TestSize.Level0)
1615 {
1616     InnerModel innerModel;
1617     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1618 
1619     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1620     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1621     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1622 
1623     int32_t* ptr = nullptr;
1624     int32_t** shape = &ptr;
1625     uint32_t outputIndex = 0;
1626     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex, shape, nullptr));
1627 }
1628 
1629 /**
1630  * @tc.name: excutor_getoutputshape_005
1631  * @tc.desc: Verify the success of the OH_NNExecutor_GetOutputShape function
1632  * @tc.type: FUNC
1633  */
1634 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_005, testing::ext::TestSize.Level0)
1635 {
1636     InnerModel innerModel;
1637     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1638 
1639     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1640     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1641     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1642 
1643     int32_t* ptr = nullptr;
1644     int32_t** shape = &ptr;
1645     uint32_t length = 2;
1646     uint32_t outputIndex = 0;
1647     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex, shape, &length));
1648 }
1649 
1650 /**
1651  * @tc.name: excutor_run_001
1652  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_Run function
1653  * @tc.type: FUNC
1654  */
1655 HWTEST_F(NeuralNetworkRuntimeTest, excutor_run_001, testing::ext::TestSize.Level0)
1656 {
1657     OH_NNExecutor* nnExecutor = nullptr;
1658     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_Run(nnExecutor));
1659 }
1660 
1661 /**
1662  * @tc.name: excutor_run_002
1663  * @tc.desc: Verify the success of the OH_NNExecutor_Run function
1664  * @tc.type: FUNC
1665  */
1666 HWTEST_F(NeuralNetworkRuntimeTest, excutor_run_002, testing::ext::TestSize.Level0)
1667 {
1668     InnerModel innerModel;
1669     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1670 
1671     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1672     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1673     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1674 
1675     int32_t inputDims[2] = {3, 4};
1676     m_tensor = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
1677     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_Run(nnExecutor));
1678 }
1679 
1680 /**
1681  * @tc.name: excutor_run_003
1682  * @tc.desc: Verify the success of the OH_NNExecutor_Run function
1683  * @tc.type: FUNC
1684  */
1685 HWTEST_F(NeuralNetworkRuntimeTest, excutor_run_003, testing::ext::TestSize.Level0)
1686 {
1687     LOGE("OH_NNExecutor_Run excutor_run_003");
1688     size_t m_backendID {0};
1689     std::shared_ptr<Device> m_device {nullptr};
1690     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1691     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1692         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1693     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1694     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1695     ExtensionConfig extensionConfig;
1696     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
1697     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
1698     NNExecutor* executor = new (std::nothrow) NNExecutor(
1699         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
1700         false, performance, priority);
1701     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1702 
1703     int32_t inputDims[2] = {3, 4};
1704     m_tensor = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
1705     OH_NN_ReturnCode ret = OH_NNExecutor_Run(nnExecutor);
1706     EXPECT_EQ(OH_NN_SUCCESS, ret);
1707 
1708     testing::Mock::AllowLeak(mockIPreparedMode.get());
1709 }
1710 
1711 /*
1712  * @tc.name: executor_allocate_input_memory_001
1713  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_AllocateInputMemory function.
1714  * @tc.type: FUNC
1715  */
1716 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_001, testing::ext::TestSize.Level0)
1717 {
1718     OH_NNExecutor* nnExecutor = nullptr;
1719     uint32_t outputIndex = 0;
1720     size_t length = 9 * sizeof(float);
1721 
1722     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1723     EXPECT_EQ(nullptr, ret);
1724 }
1725 
1726 /*
1727  * @tc.name: executor_allocate_input_memory_002
1728  * @tc.desc: Verify the passed length equals 0 of the OH_NNExecutor_AllocateInputMemory function.
1729  * @tc.type: FUNC
1730  */
1731 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_002, testing::ext::TestSize.Level0)
1732 {
1733     InnerModel innerModel;
1734     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1735 
1736     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1737     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1738     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1739 
1740     uint32_t outputIndex = 0;
1741     size_t length = 0;
1742 
1743     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1744     EXPECT_EQ(nullptr, ret);
1745 }
1746 
1747 /*
1748  * @tc.name: executor_allocate_input_memory_003
1749  * @tc.desc: Verify the error when creating input memory in executor of the OH_NNExecutor_AllocateInputMemory function.
1750  * @tc.type: FUNC
1751  */
1752 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_003, testing::ext::TestSize.Level0)
1753 {
1754     InnerModel innerModel;
1755     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1756 
1757     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1758     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1759     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1760 
1761     uint32_t outputIndex = 6;
1762     size_t length = 9 * sizeof(float);
1763 
1764     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1765     EXPECT_EQ(nullptr, ret);
1766 }
1767 
1768 /*
1769  * @tc.name: executor_allocate_input_memory_004
1770  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1771  * @tc.type: FUNC
1772  */
1773 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_004, testing::ext::TestSize.Level0)
1774 {
1775     InnerModel innerModel;
1776     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1777 
1778     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1779     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1780     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1781 
1782     uint32_t outputIndex = 0;
1783     size_t length = 9 * sizeof(float);
1784 
1785     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1786     EXPECT_EQ(nullptr, ret);
1787 }
1788 
1789 /*
1790  * @tc.name: executor_allocate_input_memory_005
1791  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1792  * @tc.type: FUNC
1793  */
1794 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_005, testing::ext::TestSize.Level0)
1795 {
1796     LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_input_memory_005");
1797     size_t m_backendID {0};
1798     std::shared_ptr<Device> m_device {nullptr};
1799     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1800     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1801         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1802     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1803     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1804     ExtensionConfig extensionConfig;
1805     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
1806     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
1807     NNExecutor* executor = new (std::nothrow) NNExecutor(
1808         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
1809         false, performance, priority);
1810     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1811 
1812     uint32_t outputIndex = 0;
1813     size_t length = 9 * sizeof(float);
1814 
1815     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1816     EXPECT_EQ(nullptr, ret);
1817 
1818     testing::Mock::AllowLeak(mockIPreparedMode.get());
1819 }
1820 
1821 /*
1822  * @tc.name: executor_allocate_input_memory_006
1823  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1824  * @tc.type: FUNC
1825  */
1826 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_006, testing::ext::TestSize.Level0)
1827 {
1828     LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_input_memory_006");
1829     size_t m_backendID {0};
1830     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1831 
1832     std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
1833     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1834     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1835 
1836     std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType> pair1;
1837     std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType> pair2;
1838     std::shared_ptr<TensorDesc> tensorDesr = std::make_shared<TensorDesc>();
1839     int32_t expectDim[2] = {3, 3};
1840     int32_t* ptr = expectDim;
1841     uint32_t dimensionCount = 2;
1842     tensorDesr->SetShape(ptr, dimensionCount);
1843     pair1.first = tensorDesr;
1844     pair2.first = tensorDesr;
1845     m_inputTensorDescs.emplace_back(pair1);
1846     m_inputTensorDescs.emplace_back(pair2);
1847     m_outputTensorDescs.emplace_back(pair1);
1848     m_outputTensorDescs.emplace_back(pair2);
1849     ExtensionConfig extensionConfig;
1850     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
1851     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
1852 
1853     size_t length = 9 * sizeof(float);
1854     EXPECT_CALL(*((MockIDevice *) device.get()), AllocateTensorBuffer(length, m_inputTensorDescs[0].first))
1855         .WillRepeatedly(::testing::Return(reinterpret_cast<void*>(0x1000)));
1856 
1857     NNExecutor* executor = new (std::nothrow) NNExecutor(
1858         m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
1859         false, performance, priority);
1860     EXPECT_NE(nullptr, executor);
1861     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1862 
1863     uint32_t outputIndex = 0;
1864 
1865     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1866     EXPECT_NE(nullptr, ret);
1867 
1868     testing::Mock::AllowLeak(device.get());
1869 }
1870 
1871 /*
1872  * @tc.name: executor_allocate_input_memory_007
1873  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1874  * @tc.type: FUNC
1875  */
1876 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_007, testing::ext::TestSize.Level0)
1877 {
1878     LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_input_memory_007");
1879     size_t m_backendID {0};
1880     std::shared_ptr<Device> m_device {nullptr};
1881     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1882     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1883         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1884     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1885     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1886     ExtensionConfig extensionConfig;
1887     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
1888     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
1889 
1890     NNExecutor* executor = new (std::nothrow) NNExecutor(
1891         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
1892         false, performance, priority);
1893     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1894 
1895     uint32_t outputIndex = 0;
1896     size_t length = 0;
1897 
1898     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1899     EXPECT_EQ(nullptr, ret);
1900 
1901     testing::Mock::AllowLeak(mockIPreparedMode.get());
1902 }
1903 
1904 /*
1905  * @tc.name: executor_allocate_output_memory_001
1906  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_AllocateOutputMemory function.
1907  * @tc.type: FUNC
1908  */
1909 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_001, testing::ext::TestSize.Level0)
1910 {
1911     OH_NNExecutor* nnExecutor = nullptr;
1912     uint32_t outputIndex = 0;
1913     size_t length = 9 * sizeof(float);
1914 
1915     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1916     EXPECT_EQ(nullptr, ret);
1917 }
1918 
1919 /*
1920  * @tc.name: executor_allocate_output_memory_002
1921  * @tc.desc: Verify the passed length equals 0 of the OH_NNExecutor_AllocateOutputMemory function.
1922  * @tc.type: FUNC
1923  */
1924 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_002, testing::ext::TestSize.Level0)
1925 {
1926     InnerModel innerModel;
1927     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1928 
1929     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1930     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1931     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1932 
1933     uint32_t outputIndex = 0;
1934     size_t length = 0;
1935 
1936     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1937     EXPECT_EQ(nullptr, ret);
1938 }
1939 
1940 /*
1941  * @tc.name: executor_allocate_output_memory_003
1942  * @tc.desc: Verify the error when create output memory in executor of the OH_NNExecutor_AllocateOutputMemory function.
1943  * @tc.type: FUNC
1944  */
1945 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_003, testing::ext::TestSize.Level0)
1946 {
1947     InnerModel innerModel;
1948     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1949 
1950     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1951     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1952     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1953 
1954     uint32_t outputIndex = 6;
1955     size_t length = 9 * sizeof(float);
1956 
1957     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1958     EXPECT_EQ(nullptr, ret);
1959 }
1960 
1961 /*
1962  * @tc.name: executor_allocate_output_memory_004
1963  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateOutputMemory function.
1964  * @tc.type: FUNC
1965  */
1966 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_004, testing::ext::TestSize.Level0)
1967 {
1968     InnerModel innerModel;
1969     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1970 
1971     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1972     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1973     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1974 
1975     uint32_t outputIndex = 0;
1976     size_t length = 9 * sizeof(float);
1977 
1978     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1979     EXPECT_EQ(nullptr, ret);
1980 }
1981 
1982 /*
1983  * @tc.name: executor_allocate_output_memory_005
1984  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1985  * @tc.type: FUNC
1986  */
1987 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_005, testing::ext::TestSize.Level0)
1988 {
1989     LOGE("OH_NNExecutor_AllocateOutputMemory executor_allocate_output_memory_005");
1990     size_t m_backendID {0};
1991     std::shared_ptr<Device> m_device {nullptr};
1992     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1993     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1994         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1995     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1996     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1997     ExtensionConfig extensionConfig;
1998     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
1999     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2000 
2001     NNExecutor* executor = new (std::nothrow) NNExecutor(
2002         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2003         false, performance, priority);
2004     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2005 
2006     uint32_t outputIndex = 0;
2007     size_t length = 9 * sizeof(float);
2008 
2009     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
2010     EXPECT_EQ(nullptr, ret);
2011 
2012     testing::Mock::AllowLeak(mockIPreparedMode.get());
2013 }
2014 
2015 /*
2016  * @tc.name: executor_allocate_output_memory_006
2017  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
2018  * @tc.type: FUNC
2019  */
2020 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_006, testing::ext::TestSize.Level0)
2021 {
2022     LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_output_memory_006");
2023     size_t m_backendID {0};
2024     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
2025 
2026     std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
2027     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2028     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2029 
2030     std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType> pair1;
2031     std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType> pair2;
2032     std::shared_ptr<TensorDesc> tensorDesr = std::make_shared<TensorDesc>();
2033     int32_t expectDim[2] = {3, 3};
2034     int32_t* ptr = expectDim;
2035     uint32_t dimensionCount = 2;
2036     tensorDesr->SetShape(ptr, dimensionCount);
2037     pair1.first = tensorDesr;
2038     pair2.first = tensorDesr;
2039     m_inputTensorDescs.emplace_back(pair1);
2040     m_inputTensorDescs.emplace_back(pair2);
2041     m_outputTensorDescs.emplace_back(pair1);
2042     m_outputTensorDescs.emplace_back(pair2);
2043     ExtensionConfig extensionConfig;
2044     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2045     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2046 
2047     size_t length = 9 * sizeof(float);
2048     EXPECT_CALL(*((MockIDevice *) device.get()), AllocateTensorBuffer(length, m_outputTensorDescs[0].first))
2049         .WillRepeatedly(::testing::Return(reinterpret_cast<void*>(0x1000)));
2050 
2051     NNExecutor* executor = new (std::nothrow) NNExecutor(
2052         m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2053         false, performance, priority);
2054     EXPECT_NE(nullptr, executor);
2055     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2056 
2057     uint32_t outputIndex = 0;
2058 
2059     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
2060     EXPECT_NE(nullptr, ret);
2061 
2062     testing::Mock::AllowLeak(device.get());
2063 }
2064 
2065 /*
2066  * @tc.name: executor_allocate_output_memory_007
2067  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
2068  * @tc.type: FUNC
2069  */
2070 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_007, testing::ext::TestSize.Level0)
2071 {
2072     LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_output_memory_007");
2073     size_t m_backendID {0};
2074     std::shared_ptr<Device> m_device {nullptr};
2075     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2076     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2077         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2078     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2079     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2080     ExtensionConfig extensionConfig;
2081     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2082     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2083 
2084     NNExecutor* executor = new (std::nothrow) NNExecutor(
2085         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2086         false, performance, priority);
2087     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2088 
2089     uint32_t outputIndex = 0;
2090     size_t length = 0;
2091 
2092     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
2093     EXPECT_EQ(nullptr, ret);
2094 
2095     testing::Mock::AllowLeak(mockIPreparedMode.get());
2096 }
2097 
2098 /*
2099  * @tc.name: executor_destroy_input_memory_001
2100  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_DestroyInputMemory function.
2101  * @tc.type: FUNC
2102  */
2103 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_001, testing::ext::TestSize.Level0)
2104 {
2105     InnerModel innerModel;
2106     BuildModel(innerModel);
2107     OH_NNExecutor* nnExecutor = nullptr;
2108 
2109     uint32_t inputIndex = 0;
2110     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2111     void* const data = dataArry;
2112     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2113     OH_NN_Memory* pMemory = &memory;
2114     OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, &pMemory);
2115     EXPECT_EQ(nullptr, nnExecutor);
2116 }
2117 
2118 /*
2119  * @tc.name: executor_destroy_input_memory_002
2120  * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_DestroyInputMemory function.
2121  * @tc.type: FUNC
2122  */
2123 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_002, testing::ext::TestSize.Level0)
2124 {
2125     LOGE("OH_NNExecutor_DestroyInputMemory executor_destroy_input_memory_002");
2126     size_t m_backendID {0};
2127     std::shared_ptr<Device> m_device {nullptr};
2128     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2129     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2130         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2131     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2132     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2133     ExtensionConfig extensionConfig;
2134     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2135     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2136 
2137     NNExecutor* executor = new (std::nothrow) NNExecutor(
2138         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2139         false, performance, priority);
2140     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2141 
2142     uint32_t inputIndex = 0;
2143     OH_NN_Memory** memory = nullptr;
2144     OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, memory);
2145     EXPECT_EQ(nullptr, memory);
2146 
2147     testing::Mock::AllowLeak(mockIPreparedMode.get());
2148 }
2149 
2150 /*
2151  * @tc.name: executor_destroy_input_memory_003
2152  * @tc.desc: Verify the *memory is nullptr of the OH_NNExecutor_DestroyInputMemory function.
2153  * @tc.type: FUNC
2154  */
2155 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_003, testing::ext::TestSize.Level0)
2156 {
2157     LOGE("OH_NNExecutor_DestroyInputMemory executor_destroy_input_memory_003");
2158     size_t m_backendID {0};
2159     std::shared_ptr<Device> m_device {nullptr};
2160     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2161     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2162         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2163     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2164     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2165     ExtensionConfig extensionConfig;
2166     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2167     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2168 
2169     NNExecutor* executor = new (std::nothrow) NNExecutor(
2170         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2171         false, performance, priority);
2172     EXPECT_NE(executor, nullptr);
2173     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2174 
2175     uint32_t inputIndex = 0;
2176     OH_NN_Memory* memory = nullptr;
2177     OH_NN_Memory** pMemory = &memory;
2178     OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, pMemory);
2179 
2180     testing::Mock::AllowLeak(mockIPreparedMode.get());
2181 }
2182 
2183 /*
2184  * @tc.name: executor_destroy_input_memory_004
2185  * @tc.desc: Verify the error happened when destroying input memory of the OH_NNExecutor_DestroyInputMemory function.
2186  * @tc.type: FUNC
2187  */
2188 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_004, testing::ext::TestSize.Level0)
2189 {
2190     LOGE("OH_NNExecutor_DestroyInputMemory executor_destroy_input_memory_004");
2191     size_t m_backendID {0};
2192     std::shared_ptr<Device> m_device {nullptr};
2193     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2194     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2195         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2196     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2197     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2198     ExtensionConfig extensionConfig;
2199     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2200     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2201 
2202     NNExecutor* executor = new (std::nothrow) NNExecutor(
2203         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2204         false, performance, priority);
2205     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2206 
2207     uint32_t inputIndex = 6;
2208     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2209     void* const data = dataArry;
2210     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2211     OH_NN_Memory* pMemory = &memory;
2212     OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, &pMemory);
2213     EXPECT_NE(nullptr, pMemory);
2214 
2215     testing::Mock::AllowLeak(mockIPreparedMode.get());
2216 }
2217 
2218 /*
2219  * @tc.name: executor_destroy_output_memory_001
2220  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
2221  * @tc.type: FUNC
2222  */
2223 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_001, testing::ext::TestSize.Level0)
2224 {
2225     OH_NNExecutor* nnExecutor = nullptr;
2226     uint32_t outputIndex = 0;
2227     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2228     void* const data = dataArry;
2229     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2230     OH_NN_Memory* pMemory = &memory;
2231     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
2232     EXPECT_EQ(nullptr, nnExecutor);
2233 }
2234 
2235 /*
2236  * @tc.name: executor_destroy_output_memory_002
2237  * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
2238  * @tc.type: FUNC
2239  */
2240 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_002, testing::ext::TestSize.Level0)
2241 {
2242     LOGE("OH_NNExecutor_DestroyOutputMemory executor_destroy_output_memory_002");
2243     size_t m_backendID {0};
2244     std::shared_ptr<Device> m_device {nullptr};
2245     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2246     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2247         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2248     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2249     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2250     ExtensionConfig extensionConfig;
2251     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2252     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2253 
2254     NNExecutor* executor = new (std::nothrow) NNExecutor(
2255         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2256         false, performance, priority);
2257     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2258 
2259     uint32_t outputIndex = 0;
2260     OH_NN_Memory** memory = nullptr;
2261     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, memory);
2262     EXPECT_EQ(nullptr, memory);
2263 
2264     testing::Mock::AllowLeak(mockIPreparedMode.get());
2265 }
2266 
2267 /*
2268  * @tc.name: executor_destroy_output_memory_003
2269  * @tc.desc: Verify the *memory is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
2270  * @tc.type: FUNC
2271  */
2272 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_003, testing::ext::TestSize.Level0)
2273 {
2274     LOGE("OH_NNExecutor_DestroyOutputMemory executor_destroy_output_memory_003");
2275     size_t m_backendID {0};
2276     std::shared_ptr<Device> m_device {nullptr};
2277     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2278     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2279         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2280     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2281     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2282     ExtensionConfig extensionConfig;
2283     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2284     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2285 
2286     NNExecutor* executor = new (std::nothrow) NNExecutor(
2287         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2288         false, performance, priority);
2289     EXPECT_NE(executor, nullptr);
2290     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2291 
2292     uint32_t outputIndex = 0;
2293     OH_NN_Memory* memory = nullptr;
2294     OH_NN_Memory** pMemory = &memory;
2295     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, pMemory);
2296 
2297     testing::Mock::AllowLeak(mockIPreparedMode.get());
2298 }
2299 
2300 /*
2301  * @tc.name: executor_destroy_output_memory_004
2302  * @tc.desc: Verify the error happened when destroying output memory of the OH_NNExecutor_DestroyOutputMemory function.
2303  * @tc.type: FUNC
2304  */
2305 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_004, testing::ext::TestSize.Level0)
2306 {
2307     LOGE("OH_NNExecutor_DestroyOutputMemory executor_destroy_output_memory_004");
2308     size_t m_backendID {0};
2309     std::shared_ptr<Device> m_device {nullptr};
2310     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2311     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2312         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2313     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2314     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2315     ExtensionConfig extensionConfig;
2316     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2317     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2318 
2319     NNExecutor* executor = new (std::nothrow) NNExecutor(
2320         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2321         false, performance, priority);
2322     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2323 
2324     uint32_t outputIndex = 6;
2325     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2326     void* const data = dataArry;
2327     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2328     OH_NN_Memory* pMemory = &memory;
2329     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
2330     EXPECT_NE(nullptr, pMemory);
2331 
2332     testing::Mock::AllowLeak(mockIPreparedMode.get());
2333 }
2334 
2335 /*
2336  * @tc.name: executor_destroy_output_memory_005
2337  * @tc.desc: Verify the success of the OH_NNExecutor_DestroyOutputMemory function.
2338  * @tc.type: FUNC
2339  */
2340 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_005, testing::ext::TestSize.Level0)
2341 {
2342     LOGE("OH_NNExecutor_DestroyOutputMemory executor_destroy_output_memory_005");
2343     size_t m_backendID {0};
2344     std::shared_ptr<Device> m_device {nullptr};
2345     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2346     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2347         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2348     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2349     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2350     ExtensionConfig extensionConfig;
2351     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2352     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2353 
2354     NNExecutor* executor = new (std::nothrow) NNExecutor(
2355         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2356         false, performance, priority);
2357     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2358 
2359     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2360     void* const data = dataArry;
2361     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2362     OH_NN_Memory* pMemory = &memory;
2363     uint32_t outputIndex = 0;
2364     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
2365     EXPECT_NE(nullptr, pMemory);
2366 
2367     testing::Mock::AllowLeak(mockIPreparedMode.get());
2368 }
2369 
2370 /*
2371  * @tc.name: executor_set_input_with_memory_001
2372  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetInputWithMemory function.
2373  * @tc.type: FUNC
2374  */
2375 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_001, testing::ext::TestSize.Level0)
2376 {
2377     OH_NNExecutor* nnExecutor = nullptr;
2378 
2379     SetTensor();
2380 
2381     uint32_t inputIndex = 0;
2382     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2383     void* const data = dataArry;
2384     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2385 
2386     OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, &memory);
2387     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2388 }
2389 
2390 /*
2391  * @tc.name: executor_set_input_with_memory_002
2392  * @tc.desc: Verify the operand is nullptr of the OH_NNExecutor_SetInputWithMemory function.
2393  * @tc.type: FUNC
2394  */
2395 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_002, testing::ext::TestSize.Level0)
2396 {
2397     LOGE("OH_NNExecutor_SetInputWithMemory executor_set_input_with_memory_002");
2398     size_t m_backendID {0};
2399     std::shared_ptr<Device> m_device {nullptr};
2400     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2401     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2402         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2403     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2404     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2405     ExtensionConfig extensionConfig;
2406     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2407     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2408 
2409     NNExecutor* executor = new (std::nothrow) NNExecutor(
2410         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2411         false, performance, priority);
2412     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2413 
2414     OH_NN_Tensor* operand = nullptr;
2415 
2416     uint32_t inputIndex = 0;
2417     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2418     void* const data = dataArry;
2419     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2420 
2421     OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, operand, &memory);
2422     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2423 
2424     testing::Mock::AllowLeak(mockIPreparedMode.get());
2425 }
2426 
2427 /*
2428  * @tc.name: executor_set_input_with_memory_003
2429  * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_SetInputWithMemory function.
2430  * @tc.type: FUNC
2431  */
2432 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_003, testing::ext::TestSize.Level0)
2433 {
2434     LOGE("OH_NNExecutor_SetInputWithMemory executor_set_input_with_memory_003");
2435     size_t m_backendID {0};
2436     std::shared_ptr<Device> m_device {nullptr};
2437     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2438     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2439         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2440     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2441     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2442     ExtensionConfig extensionConfig;
2443     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2444     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2445 
2446     NNExecutor* executor = new (std::nothrow) NNExecutor(
2447         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2448         false, performance, priority);
2449     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2450 
2451     SetTensor();
2452 
2453     uint32_t inputIndex = 0;
2454     OH_NN_Memory* memory = nullptr;
2455     OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, memory);
2456     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2457 
2458     testing::Mock::AllowLeak(mockIPreparedMode.get());
2459 }
2460 
2461 /*
2462  * @tc.name: executor_set_input_with_memory_004
2463  * @tc.desc: Verify the success of the OH_NNExecutor_SetInputWithMemory function.
2464  * @tc.type: FUNC
2465  */
2466 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_004, testing::ext::TestSize.Level0)
2467 {
2468     LOGE("OH_NNExecutor_SetInputWithMemory executor_set_input_with_memory_004");
2469     size_t m_backendID {0};
2470     std::shared_ptr<Device> m_device {nullptr};
2471     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2472     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2473         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2474     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2475     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2476     ExtensionConfig extensionConfig;
2477     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2478     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2479 
2480     NNExecutor* executor = new (std::nothrow) NNExecutor(
2481         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2482         false, performance, priority);
2483     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2484 
2485     uint32_t inputIndex = 0;
2486     int32_t dims[2] = {3, 4};
2487     m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
2488 
2489     float dataArry[12] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
2490     void* const data = dataArry;
2491     OH_NN_Memory memory = {data, 12 * sizeof(float)};
2492 
2493     OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, &memory);
2494     EXPECT_EQ(OH_NN_FAILED, ret);
2495 
2496     testing::Mock::AllowLeak(mockIPreparedMode.get());
2497 }
2498 
2499 
2500 /*
2501  * @tc.name: executor_set_output_with_memory_001
2502  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetOutputWithMemory function.
2503  * @tc.type: FUNC
2504  */
2505 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_001, testing::ext::TestSize.Level0)
2506 {
2507     OH_NNExecutor* nnExecutor = nullptr;
2508     uint32_t outputIndex = 0;
2509     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2510     void* const data = dataArry;
2511     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2512     OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, &memory);
2513     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2514 }
2515 
2516 /*
2517  * @tc.name: executor_set_output_with_memory_002
2518  * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_SetOutputWithMemory function.
2519  * @tc.type: FUNC
2520  */
2521 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_002, testing::ext::TestSize.Level0)
2522 {
2523     LOGE("OH_NNExecutor_SetOutputWithMemory executor_set_output_with_memory_002");
2524     size_t m_backendID {0};
2525     std::shared_ptr<Device> m_device {nullptr};
2526     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2527     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2528         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2529     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2530     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2531     ExtensionConfig extensionConfig;
2532     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2533     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2534 
2535     NNExecutor* executor = new (std::nothrow) NNExecutor(
2536         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2537         false, performance, priority);
2538     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2539 
2540     uint32_t outputIndex = 0;
2541     OH_NN_Memory* memory = nullptr;
2542     OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, memory);
2543     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2544 
2545     testing::Mock::AllowLeak(mockIPreparedMode.get());
2546 }
2547 
2548 /*
2549  * @tc.name: executor_set_output_with_memory_003
2550  * @tc.desc: Verify the success of the OH_NNExecutor_SetOutputWithMemory function.
2551  * @tc.type: FUNC
2552  */
2553 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_003, testing::ext::TestSize.Level0)
2554 {
2555     LOGE("OH_NNExecutor_SetOutputWithMemory executor_set_output_with_memory_003");
2556     size_t m_backendID {0};
2557     std::shared_ptr<Device> m_device {nullptr};
2558     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2559     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2560         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2561     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2562     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2563     ExtensionConfig extensionConfig;
2564     OH_NN_PerformanceMode performance {OH_NN_PERFORMANCE_EXTREME};
2565     OH_NN_Priority priority {OH_NN_PRIORITY_HIGH};
2566 
2567     NNExecutor* executor = new (std::nothrow) NNExecutor(
2568         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs, "", 0, extensionConfig,
2569         false, performance, priority);
2570     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2571 
2572     uint32_t outputIndex = 0;
2573     float dataArry[12] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
2574     void* const data = dataArry;
2575     OH_NN_Memory memory = {data, 12 * sizeof(float)};
2576     OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, &memory);
2577     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2578 
2579     testing::Mock::AllowLeak(mockIPreparedMode.get());
2580 }
2581 
2582 /*
2583  * @tc.name: executor_destroy_001
2584  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_Destroy function.
2585  * @tc.type: FUNC
2586  */
2587 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_001, testing::ext::TestSize.Level0)
2588 {
2589     OH_NNExecutor** pExecutor = nullptr;
2590     OH_NNExecutor_Destroy(pExecutor);
2591     EXPECT_EQ(nullptr, pExecutor);
2592 }
2593 
2594 /*
2595  * @tc.name: device_get_all_devices_id_001
2596  * @tc.desc: Verify the allDevicesID is nullptr of the OH_NNDevice_GetAllDevicesID function.
2597  * @tc.type: FUNC
2598  */
2599 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_001, testing::ext::TestSize.Level0)
2600 {
2601     const size_t** allDevicesId = nullptr;
2602     uint32_t deviceCount = 1;
2603     uint32_t* pDeviceCount = &deviceCount;
2604     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(allDevicesId, pDeviceCount);
2605     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2606 }
2607 
2608 /*
2609  * @tc.name: device_get_all_devices_id_002
2610  * @tc.desc: Verify the *allDevicesID is not nullptr of the OH_NNDevice_GetAllDevicesID function.
2611  * @tc.type: FUNC
2612  */
2613 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_002, testing::ext::TestSize.Level0)
2614 {
2615     const size_t devicesId = 1;
2616     const size_t* allDevicesId = &devicesId;
2617     const size_t** pAllDevicesId = &allDevicesId;
2618     uint32_t deviceCount = 1;
2619     uint32_t* pDeviceCount = &deviceCount;
2620     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2621     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2622 }
2623 
2624 /*
2625  * @tc.name: device_get_all_devices_id_003
2626  * @tc.desc: Verify the deviceCount is nullptr of the OH_NNDevice_GetAllDevicesID function.
2627  * @tc.type: FUNC
2628  */
2629 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_003, testing::ext::TestSize.Level0)
2630 {
2631     const size_t* allDevicesId = nullptr;
2632     const size_t** pAllDevicesId = &allDevicesId;
2633     uint32_t* pDeviceCount = nullptr;
2634     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2635     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2636 }
2637 
2638 /*
2639  * @tc.name: device_get_all_devices_id_004
2640  * @tc.desc: Verify the get no device of the OH_NNDevice_GetAllDevicesID function.
2641  * @tc.type: FUNC
2642  */
2643 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_004, testing::ext::TestSize.Level0)
2644 {
2645     const size_t* allDevicesId = nullptr;
2646     const size_t** pAllDevicesId = &allDevicesId;
2647     uint32_t deviceCount = 1;
2648     uint32_t* pDeviceCount = &deviceCount;
2649     OHOS::HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_FAILED;
2650     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2651     EXPECT_EQ(OH_NN_SUCCESS, ret);
2652 }
2653 
2654 /*
2655  * @tc.name: device_get_all_devices_id_005
2656  * @tc.desc: Verify the success of the OH_NNDevice_GetAllDevicesID function.
2657  * @tc.type: FUNC
2658  */
2659 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_005, testing::ext::TestSize.Level0)
2660 {
2661     const size_t* allDevicesId = nullptr;
2662     const size_t** pAllDevicesId = &allDevicesId;
2663     uint32_t deviceCount = 1;
2664     uint32_t* pDeviceCount = &deviceCount;
2665     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2666     EXPECT_EQ(OH_NN_SUCCESS, ret);
2667 }
2668 
2669 /*
2670  * @tc.name: device_get_name_001
2671  * @tc.desc: Verify the name is nullptr of the OH_NNDevice_GetName function.
2672  * @tc.type: FUNC
2673  */
2674 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_001, testing::ext::TestSize.Level0)
2675 {
2676     size_t deviceID = 1;
2677     const char **name = nullptr;
2678     OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, name);
2679     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2680 }
2681 
2682 /*
2683  * @tc.name: device_get_name_002
2684  * @tc.desc: Verify the *name is not nullptr of the OH_NNDevice_GetName function.
2685  * @tc.type: FUNC
2686  */
2687 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_002, testing::ext::TestSize.Level0)
2688 {
2689     size_t deviceID = 1;
2690     const char* name = "diviceId";
2691     const char** pName = &name;
2692     OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2693     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2694 }
2695 
2696 /*
2697  * @tc.name: device_get_name_003
2698  * @tc.desc: Verify the error happened when getting name of deviceID of the OH_NNDevice_GetName function.
2699  * @tc.type: FUNC
2700  */
2701 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_003, testing::ext::TestSize.Level0)
2702 {
2703     size_t deviceID = 12345;
2704     const char* name = nullptr;
2705     const char** pName = &name;
2706     OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2707     EXPECT_EQ(OH_NN_FAILED, ret);
2708 }
2709 
2710 /*
2711  * @tc.name: device_get_name_004
2712  * @tc.desc: Verify the success of the OH_NNDevice_GetName function.
2713  * @tc.type: FUNC
2714  */
2715 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_004, testing::ext::TestSize.Level0)
2716 {
2717     size_t deviceID = 1;
2718     const char* name = nullptr;
2719     const char** pName = &name;
2720     OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2721     EXPECT_EQ(OH_NN_FAILED, ret);
2722 }
2723 
2724 /*
2725  * @tc.name: device_get_type_001
2726  * @tc.desc: Verify the device is nullptr of the OH_NNDevice_GetType function.
2727  * @tc.type: FUNC
2728  */
2729 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_001, testing::ext::TestSize.Level0)
2730 {
2731     size_t deviceID = 12345;
2732     OH_NN_DeviceType deviceType = OH_NN_CPU;
2733     OH_NN_DeviceType* pDeviceType = &deviceType;
2734     OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2735     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2736 }
2737 
2738 /*
2739  * @tc.name: device_get_type_002
2740  * @tc.desc: Verify the OH_NN_DeviceType is nullptr of the OH_NNDevice_GetType function.
2741  * @tc.type: FUNC
2742  */
2743 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_002, testing::ext::TestSize.Level0)
2744 {
2745     size_t deviceID = 1;
2746     OH_NN_DeviceType* pDeviceType = nullptr;
2747     OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2748     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2749 }
2750 
2751 /*
2752  * @tc.name: device_get_type_003
2753  * @tc.desc: Verify the error happened when getting name of deviceID of the OH_NNDevice_GetType function.
2754  * @tc.type: FUNC
2755  */
2756 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_003, testing::ext::TestSize.Level0)
2757 {
2758     size_t deviceID = 1;
2759     OH_NN_DeviceType deviceType = OH_NN_OTHERS;
2760     OH_NN_DeviceType* pDeviceType = &deviceType;
2761     OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2762     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2763 }
2764 
2765 /*
2766  * @tc.name: device_get_type_004
2767  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2768  * @tc.type: FUNC
2769  */
2770 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_004, testing::ext::TestSize.Level0)
2771 {
2772     size_t deviceID =  1;
2773     OH_NN_DeviceType deviceType = OH_NN_CPU;
2774     OH_NN_DeviceType* pDeviceType = &deviceType;
2775     OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2776     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2777 }
2778 
2779 /*
2780  * @tc.name: oh_nnquantparam_create_001
2781  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2782  * @tc.type: FUNC
2783  */
2784 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_create_001, testing::ext::TestSize.Level0)
2785 {
2786     LOGE("OH_NNQuantParam_Create oh_nnquantparam_create_001");
2787     NN_QuantParam* ret = OH_NNQuantParam_Create();
2788     EXPECT_NE(nullptr, ret);
2789 }
2790 
2791 /*
2792  * @tc.name: oh_nnquantparam_setscales_001
2793  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2794  * @tc.type: FUNC
2795  */
2796 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setscales_001, testing::ext::TestSize.Level0)
2797 {
2798     LOGE("OH_NNQuantParam_SetScales oh_nnquantparam_setscales_001");
2799     size_t quantNum = 1;
2800     OH_NN_ReturnCode ret = OH_NNQuantParam_SetScales(nullptr, nullptr, quantNum);
2801     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2802 }
2803 
2804 /*
2805  * @tc.name: oh_nnquantparam_setscales_002
2806  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2807  * @tc.type: FUNC
2808  */
2809 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setscales_002, testing::ext::TestSize.Level0)
2810 {
2811     LOGE("OH_NNQuantParam_SetScales oh_nnquantparam_setscales_002");
2812     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2813     size_t quantNum = 1;
2814     OH_NN_ReturnCode ret = OH_NNQuantParam_SetScales(quantParams, nullptr, quantNum);
2815     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2816 }
2817 
2818 /*
2819  * @tc.name: oh_nnquantparam_setscales_003
2820  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2821  * @tc.type: FUNC
2822  */
2823 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setscales_003, testing::ext::TestSize.Level0)
2824 {
2825     LOGE("OH_NNQuantParam_SetScales oh_nnquantparam_setscales_003");
2826     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2827     double scale = 2;
2828     size_t quantNum = 0;
2829     OH_NN_ReturnCode ret = OH_NNQuantParam_SetScales(quantParams, &scale, quantNum);
2830     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2831 }
2832 
2833 /*
2834  * @tc.name: oh_nnquantparam_setscales_004
2835  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2836  * @tc.type: FUNC
2837  */
2838 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setscales_004, testing::ext::TestSize.Level0)
2839 {
2840     LOGE("OH_NNQuantParam_SetScales oh_nnquantparam_setscales_004");
2841     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2842     double scale = 2;
2843     size_t quantNum = 2;
2844     OH_NN_ReturnCode ret = OH_NNQuantParam_SetScales(quantParams, &scale, quantNum);
2845     EXPECT_EQ(OH_NN_SUCCESS, ret);
2846 }
2847 
2848 /*
2849  * @tc.name: oh_nnquantparam_setzeropoints_001
2850  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2851  * @tc.type: FUNC
2852  */
2853 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setzeropoints_001, testing::ext::TestSize.Level0)
2854 {
2855     LOGE("OH_NNQuantParam_SetZeroPoints oh_nnquantparam_setzeropoints_001");
2856     size_t quantNum = 2;
2857     OH_NN_ReturnCode ret = OH_NNQuantParam_SetZeroPoints(nullptr, nullptr, quantNum);
2858     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2859 }
2860 
2861 /*
2862  * @tc.name: oh_nnquantparam_setzeropoints_002
2863  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2864  * @tc.type: FUNC
2865  */
2866 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setzeropoints_002, testing::ext::TestSize.Level0)
2867 {
2868     LOGE("OH_NNQuantParam_SetZeroPoints oh_nnquantparam_setzeropoints_002");
2869     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2870     size_t quantNum = 2;
2871     OH_NN_ReturnCode ret = OH_NNQuantParam_SetZeroPoints(quantParams, nullptr, quantNum);
2872     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2873 }
2874 
2875 /*
2876  * @tc.name: oh_nnquantparam_setzeropoints_003
2877  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2878  * @tc.type: FUNC
2879  */
2880 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setzeropoints_003, testing::ext::TestSize.Level0)
2881 {
2882     LOGE("OH_NNQuantParam_SetZeroPoints oh_nnquantparam_setzeropoints_003");
2883     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2884     int32_t zeroPoints = 2;
2885     size_t quantNum = 0;
2886     OH_NN_ReturnCode ret = OH_NNQuantParam_SetZeroPoints(quantParams, &zeroPoints, quantNum);
2887     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2888 }
2889 
2890 /*
2891  * @tc.name: oh_nnquantparam_setzeropoints_004
2892  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2893  * @tc.type: FUNC
2894  */
2895 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setzeropoints_004, testing::ext::TestSize.Level0)
2896 {
2897     LOGE("OH_NNQuantParam_SetZeroPoints oh_nnquantparam_setzeropoints_004");
2898     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2899     int32_t zeroPoints = 2;
2900     size_t quantNum = 2;
2901     OH_NN_ReturnCode ret = OH_NNQuantParam_SetZeroPoints(quantParams, &zeroPoints, quantNum);
2902     EXPECT_EQ(OH_NN_SUCCESS, ret);
2903 }
2904 
2905 /*
2906  * @tc.name: oh_nnquantparam_setnumbits_001
2907  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2908  * @tc.type: FUNC
2909  */
2910 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setnumbits_001, testing::ext::TestSize.Level0)
2911 {
2912     LOGE("OH_NNQuantParam_SetNumBits oh_nnquantparam_setnumbits_001");
2913     size_t quantNum = 2;
2914     OH_NN_ReturnCode ret = OH_NNQuantParam_SetNumBits(nullptr, nullptr, quantNum);
2915     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2916 }
2917 
2918 /*
2919  * @tc.name: oh_nnquantparam_setnumbits_002
2920  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2921  * @tc.type: FUNC
2922  */
2923 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setnumbits_002, testing::ext::TestSize.Level0)
2924 {
2925     LOGE("OH_NNQuantParam_SetNumBits oh_nnquantparam_setnumbits_002");
2926     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2927     size_t quantNum = 2;
2928     OH_NN_ReturnCode ret = OH_NNQuantParam_SetNumBits(quantParams, nullptr, quantNum);
2929     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2930 }
2931 
2932 /*
2933  * @tc.name: oh_nnquantparam_setnumbits_003
2934  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2935  * @tc.type: FUNC
2936  */
2937 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setnumbits_003, testing::ext::TestSize.Level0)
2938 {
2939     LOGE("OH_NNQuantParam_SetNumBits oh_nnquantparam_setnumbits_003");
2940     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2941     uint32_t zeroPoints = 2;
2942     size_t quantNum = 0;
2943     OH_NN_ReturnCode ret = OH_NNQuantParam_SetNumBits(quantParams, &zeroPoints, quantNum);
2944     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2945 }
2946 
2947 /*
2948  * @tc.name: oh_nnquantparam_setnumbits_004
2949  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2950  * @tc.type: FUNC
2951  */
2952 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setnumbits_004, testing::ext::TestSize.Level0)
2953 {
2954     LOGE("OH_NNQuantParam_SetNumBits oh_nnquantparam_setnumbits_004");
2955     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2956     uint32_t zeroPoints = 2;
2957     size_t quantNum = 2;
2958     OH_NN_ReturnCode ret = OH_NNQuantParam_SetNumBits(quantParams, &zeroPoints, quantNum);
2959     EXPECT_EQ(OH_NN_SUCCESS, ret);
2960 }
2961 
2962 /*
2963  * @tc.name: oh_nnquantparam_destroy_001
2964  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2965  * @tc.type: FUNC
2966  */
2967 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_destroy_001, testing::ext::TestSize.Level0)
2968 {
2969     LOGE("OH_NNQuantParam_Destroy oh_nnquantparam_destroy_001");
2970     OH_NN_ReturnCode ret = OH_NNQuantParam_Destroy(nullptr);
2971     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2972 }
2973 
2974 /*
2975  * @tc.name: oh_nnquantparam_destroy_002
2976  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2977  * @tc.type: FUNC
2978  */
2979 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_destroy_002, testing::ext::TestSize.Level0)
2980 {
2981     LOGE("OH_NNQuantParam_Destroy oh_nnquantparam_destroy_002");
2982     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2983     NN_QuantParam** quantParamsDex = &quantParams;
2984     *quantParamsDex = nullptr;
2985     OH_NN_ReturnCode ret = OH_NNQuantParam_Destroy(quantParamsDex);
2986     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2987 }
2988 
2989 /*
2990  * @tc.name: oh_nnquantparam_destroy_003
2991  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2992  * @tc.type: FUNC
2993  */
2994 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_destroy_003, testing::ext::TestSize.Level0)
2995 {
2996     LOGE("OH_NNQuantParam_Destroy oh_nnquantparam_destroy_003");
2997     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2998     NN_QuantParam** quantParamsDex = &quantParams;
2999     OH_NN_ReturnCode ret = OH_NNQuantParam_Destroy(quantParamsDex);
3000     EXPECT_EQ(OH_NN_SUCCESS, ret);
3001 }
3002 
3003 /*
3004  * @tc.name: oh_nnmodel_addtensortomodel_001
3005  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
3006  * @tc.type: FUNC
3007  */
3008 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_addtensortomodel_001, testing::ext::TestSize.Level0)
3009 {
3010     LOGE("OH_NNModel_AddTensorToModel oh_nnmodel_addtensortomodel_001");
3011     TensorDesc* tensorDescImpl = new (std::nothrow) TensorDesc();
3012     NN_TensorDesc* tensor = reinterpret_cast<NN_TensorDesc*>(tensorDescImpl);
3013     OH_NN_ReturnCode ret = OH_NNModel_AddTensorToModel(nullptr, tensor);
3014     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
3015 }
3016 
3017 /*
3018  * @tc.name: oh_nnmodel_addtensortomodel_002
3019  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
3020  * @tc.type: FUNC
3021  */
3022 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_addtensortomodel_002, testing::ext::TestSize.Level0)
3023 {
3024     LOGE("OH_NNModel_AddTensorToModel oh_nnmodel_addtensortomodel_002");
3025     OH_NNModel* model = OH_NNModel_Construct();
3026     OH_NN_ReturnCode ret = OH_NNModel_AddTensorToModel(model, nullptr);
3027     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
3028 }
3029 
3030 /*
3031  * @tc.name: oh_nnmodel_addtensortomodel_003
3032  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
3033  * @tc.type: FUNC
3034  */
3035 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_addtensortomodel_003, testing::ext::TestSize.Level0)
3036 {
3037     LOGE("OH_NNModel_AddTensorToModel oh_nnmodel_addtensortomodel_003");
3038     OH_NNModel* model = OH_NNModel_Construct();
3039     TensorDesc* tensorDescImpl = new (std::nothrow) TensorDesc();
3040     NN_TensorDesc* tensor = reinterpret_cast<NN_TensorDesc*>(tensorDescImpl);
3041     OH_NN_ReturnCode ret = OH_NNModel_AddTensorToModel(model, tensor);
3042     EXPECT_EQ(OH_NN_SUCCESS, ret);
3043 }
3044 
3045 /*
3046  * @tc.name: oh_nnmodel_settensorquantparams_001
3047  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
3048  * @tc.type: FUNC
3049  */
3050 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensorquantparams_001, testing::ext::TestSize.Level0)
3051 {
3052     LOGE("OH_NNModel_SetTensorQuantParams oh_nnmodel_settensorquantparams_001");
3053     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
3054     uint32_t index = 10;
3055     OH_NN_ReturnCode ret = OH_NNModel_SetTensorQuantParams(nullptr, index, quantParams);
3056     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
3057 }
3058 
3059 /*
3060  * @tc.name: oh_nnmodel_settensorquantparams_002
3061  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
3062  * @tc.type: FUNC
3063  */
3064 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensorquantparams_002, testing::ext::TestSize.Level0)
3065 {
3066     LOGE("OH_NNModel_SetTensorQuantParams oh_nnmodel_settensorquantparams_002");
3067     OH_NNModel* model = OH_NNModel_Construct();
3068     uint32_t index = 10;
3069     OH_NN_ReturnCode ret = OH_NNModel_SetTensorQuantParams(model, index, nullptr);
3070     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
3071 }
3072 
3073 /*
3074  * @tc.name: oh_nnmodel_settensorquantparams_003
3075  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
3076  * @tc.type: FUNC
3077  */
3078 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensorquantparams_003, testing::ext::TestSize.Level0)
3079 {
3080     LOGE("OH_NNModel_SetTensorQuantParams oh_nnmodel_settensorquantparams_003");
3081     OH_NNModel* model = OH_NNModel_Construct();
3082     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
3083     uint32_t index = 10;
3084     OH_NN_ReturnCode ret = OH_NNModel_SetTensorQuantParams(model, index, quantParams);
3085     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
3086 }
3087 
3088 /*
3089  * @tc.name: oh_nnmodel_settensortype_001
3090  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
3091  * @tc.type: FUNC
3092  */
3093 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensortype_001, testing::ext::TestSize.Level0)
3094 {
3095     LOGE("OH_NNModel_SetTensorType oh_nnmodel_settensortype_001");
3096     OH_NN_TensorType tensorType = OH_NN_REDUCE_MIN_KEEP_DIMS;
3097     uint32_t index = 10;
3098     OH_NN_ReturnCode ret = OH_NNModel_SetTensorType(nullptr, index, tensorType);
3099     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
3100 }
3101 
3102 /*
3103  * @tc.name: oh_nnmodel_settensortype_002
3104  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
3105  * @tc.type: FUNC
3106  */
3107 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensortype_002, testing::ext::TestSize.Level0)
3108 {
3109     LOGE("OH_NNModel_SetTensorType oh_nnmodel_settensortype_002");
3110     OH_NNModel* model = OH_NNModel_Construct();
3111     OH_NN_TensorType tensorType = OH_NN_REDUCE_MIN_COEFF;
3112     uint32_t index = 10;
3113     OH_NN_ReturnCode ret = OH_NNModel_SetTensorType(model, index, tensorType);
3114     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
3115 }
3116 } // namespace Unittest
3117 } // namespace NeuralNetworkRuntime
3118 } // namespace OHOS
3119