1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "neural_network_runtime_test.h"
17
18 #include "mindir.h"
19
20 #include "utils.h"
21 #include "compilation.h"
22 #include "hdi_device_v2_0.h"
23 #include "test/unittest/common/v2_0/mock_idevice.h"
24
25 namespace OHOS {
26 namespace NeuralNetworkRuntime {
PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,const ModelConfig & config,std::shared_ptr<PreparedModel> & preparedModel)27 OH_NN_ReturnCode HDIDeviceV2_0::PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,
28 const ModelConfig& config, std::shared_ptr<PreparedModel>& preparedModel)
29 {
30 if (model == nullptr) {
31 return OH_NN_INVALID_PARAMETER;
32 }
33
34 if (config.enableFloat16 == false) {
35 return OH_NN_FAILED;
36 }
37
38 sptr<OHOS::HDI::Nnrt::V2_0::IPreparedModel> iPreparedModel = sptr<OHOS::HDI::Nnrt::V2_0
39 ::MockIPreparedModel>(new OHOS::HDI::Nnrt::V2_0::MockIPreparedModel());
40 if (iPreparedModel == nullptr) {
41 LOGE("HDIDeviceV2_0 mock PrepareModel failed, error happened when new sptr");
42 return OH_NN_NULL_PTR;
43 }
44
45 preparedModel = CreateSharedPtr<HDIPreparedModelV2_0>(iPreparedModel);
46 return OH_NN_SUCCESS;
47 }
48
GetDeviceType(OH_NN_DeviceType & deviceType)49 OH_NN_ReturnCode HDIDeviceV2_0::GetDeviceType(OH_NN_DeviceType& deviceType)
50 {
51 if (deviceType == OH_NN_OTHERS) {
52 return OH_NN_UNAVAILABLE_DEVICE;
53 }
54
55 return OH_NN_SUCCESS;
56 }
57
IsModelCacheSupported(bool & isSupported)58 OH_NN_ReturnCode HDIDeviceV2_0::IsModelCacheSupported(bool& isSupported)
59 {
60 isSupported = true;
61 return OH_NN_SUCCESS;
62 }
63
IsPerformanceModeSupported(bool & isSupported)64 OH_NN_ReturnCode HDIDeviceV2_0::IsPerformanceModeSupported(bool& isSupported)
65 {
66 isSupported = true;
67 return OH_NN_SUCCESS;
68 }
69
IsPrioritySupported(bool & isSupported)70 OH_NN_ReturnCode HDIDeviceV2_0::IsPrioritySupported(bool& isSupported)
71 {
72 isSupported = true;
73 return OH_NN_SUCCESS;
74 }
75
IsFloat16PrecisionSupported(bool & isSupported)76 OH_NN_ReturnCode HDIDeviceV2_0::IsFloat16PrecisionSupported(bool& isSupported)
77 {
78 isSupported = true;
79 return OH_NN_SUCCESS;
80 }
81
GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,std::vector<bool> & ops)82 OH_NN_ReturnCode HDIDeviceV2_0::GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,
83 std::vector<bool>& ops)
84 {
85 if (model == nullptr) {
86 LOGE("HDIDeviceV2_0 mock GetSupportedOperation failed, Model is nullptr, cannot query supported operation.");
87 return OH_NN_NULL_PTR;
88 }
89
90 ops.emplace_back(true);
91 return OH_NN_SUCCESS;
92 }
93
IsDynamicInputSupported(bool & isSupported)94 OH_NN_ReturnCode HDIDeviceV2_0::IsDynamicInputSupported(bool& isSupported)
95 {
96 isSupported = true;
97 return OH_NN_SUCCESS;
98 }
99 } // namespace NeuralNetworkRuntime
100 } // namespace OHOS
101
102 namespace OHOS {
103 namespace NeuralNetworkRuntime {
104 namespace Unittest {
BuildModel(InnerModel & model)105 OH_NN_ReturnCode NeuralNetworkRuntimeTest::BuildModel(InnerModel& model)
106 {
107 int32_t inputDims[2] = {3, 4};
108 OH_NN_Tensor input1 = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
109 OH_NN_ReturnCode ret = model.AddTensor(input1);
110 if (ret != OH_NN_SUCCESS) {
111 return ret;
112 }
113
114 // 添加Add算子的第二个输入Tensor,类型为float32,张量形状为[3, 4]
115 OH_NN_Tensor input2 = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
116 ret = model.AddTensor(input2);
117 if (ret != OH_NN_SUCCESS) {
118 return ret;
119 }
120
121 // 添加Add算子的参数Tensor,该参数Tensor用于指定激活函数的类型,Tensor的数据类型为int8。
122 int32_t activationDims = 1;
123 int8_t activationValue = OH_NN_FUSED_NONE;
124 OH_NN_Tensor activation = {OH_NN_INT8, 1, &activationDims, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
125 ret = model.AddTensor(activation);
126 if (ret != OH_NN_SUCCESS) {
127 return ret;
128 }
129
130 // 将激活函数类型设置为OH_NN_FUSED_NONE,表示该算子不添加激活函数。
131 uint32_t index = 2;
132 ret = model.SetTensorValue(index, &activationValue, sizeof(int8_t));
133 if (ret != OH_NN_SUCCESS) {
134 return ret;
135 }
136
137 // 设置Add算子的输出,类型为float32,张量形状为[3, 4]
138 OH_NN_Tensor output = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
139 ret = model.AddTensor(output);
140 if (ret != OH_NN_SUCCESS) {
141 return ret;
142 }
143
144 // 指定Add算子的输入、参数和输出索引
145 uint32_t inputIndicesValues[2] = {0, 1};
146 uint32_t paramIndicesValues = 2;
147 uint32_t outputIndicesValues = 3;
148 OH_NN_UInt32Array paramIndices = {¶mIndicesValues, 1};
149 OH_NN_UInt32Array inputIndices = {inputIndicesValues, 2};
150 OH_NN_UInt32Array outputIndices = {&outputIndicesValues, 1};
151
152 // 向模型实例添加Add算子
153 ret = model.AddOperation(OH_NN_OPS_ADD, paramIndices, inputIndices, outputIndices);
154 if (ret != OH_NN_SUCCESS) {
155 return ret;
156 }
157
158 // 设置模型实例的输入、输出索引
159 ret = model.SpecifyInputsAndOutputs(inputIndices, outputIndices);
160 if (ret != OH_NN_SUCCESS) {
161 return ret;
162 }
163
164 // 完成模型实例的构建
165 ret = model.Build();
166 if (ret != OH_NN_SUCCESS) {
167 return ret;
168 }
169
170 return ret;
171 }
172
InitIndices()173 void NeuralNetworkRuntimeTest::InitIndices()
174 {
175 m_inputIndices.data = m_inputIndexs;
176 m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
177
178 m_outputIndices.data = m_outputIndexs;
179 m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
180
181 m_paramIndices.data = m_paramIndexs;
182 m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
183 }
184
AddModelTensor(InnerModel & innerModel)185 void NeuralNetworkRuntimeTest::AddModelTensor(InnerModel& innerModel)
186 {
187 const int dim[2] = {2, 2};
188 const OH_NN_Tensor& tensor = {OH_NN_FLOAT32, 2, dim, nullptr, OH_NN_TENSOR};
189
190 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
191 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
192 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
193
194 const OH_NN_Tensor& tensorParam = {OH_NN_INT8, 0, nullptr, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
195 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensorParam));
196 }
197
SetTensor()198 void NeuralNetworkRuntimeTest::SetTensor()
199 {
200 m_tensor.dataType = OH_NN_INT32;
201 m_tensor.dimensionCount = 0;
202 m_tensor.dimensions = nullptr;
203 m_tensor.quantParam = nullptr;
204 m_tensor.type = OH_NN_TENSOR;
205 }
206
SetInnerBuild(InnerModel & innerModel)207 void NeuralNetworkRuntimeTest::SetInnerBuild(InnerModel& innerModel)
208 {
209 uint32_t index = 3;
210 const int8_t activation = 0;
211 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
212 static_cast<const void *>(&activation), sizeof(int8_t)));
213
214 OH_NN_OperationType opType {OH_NN_OPS_ADD};
215 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
216 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
217 EXPECT_EQ(OH_NN_SUCCESS, innerModel.Build());
218 }
219
SetInputAndOutput(Executor & executor)220 void NeuralNetworkRuntimeTest::SetInputAndOutput(Executor& executor)
221 {
222 size_t input1Index = 0;
223 int32_t inputDims[2] = {3, 4};
224 size_t lengthSize = 12 * sizeof(float);
225 size_t *length = &lengthSize;
226
227 size_t minInputDims = 1;
228 size_t maxInputDims = 12;
229
230 size_t *minInputDimsAdress = &minInputDims;
231 size_t **minInputDimsAdressA = &minInputDimsAdress;
232
233 size_t *maxInputDimsAdress = &maxInputDims;
234 size_t **maxInputDimsAdressA = &maxInputDimsAdress;
235
236 m_tensor = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
237 EXPECT_EQ(OH_NN_SUCCESS, executor.GetInputDimRange(input1Index, minInputDimsAdressA, maxInputDimsAdressA, length));
238 uint32_t outputIndex = 0;
239
240 int32_t shape = 3;
241 int32_t* shapeA = &shape;
242 int32_t** shapeAA = &shapeA;
243 uint32_t* shapeNum = &outputIndex;
244 EXPECT_EQ(OH_NN_SUCCESS, executor.GetOutputShape(outputIndex, shapeAA, shapeNum));
245 }
246
247 /*
248 * @tc.name: model_construct_001
249 * @tc.desc: Verify the return model of the OH_NNModel_Construct function.
250 * @tc.type: FUNC
251 */
252 HWTEST_F(NeuralNetworkRuntimeTest, model_construct_001, testing::ext::TestSize.Level0)
253 {
254 OH_NNModel* ret = OH_NNModel_Construct();
255 EXPECT_NE(nullptr, ret);
256 }
257
258 /*
259 * @tc.name: model_add_tensor_001
260 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Tensor function.
261 * @tc.type: FUNC
262 */
263 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_001, testing::ext::TestSize.Level0)
264 {
265 OH_NNModel* model = nullptr;
266 const int32_t dimInput[2] = {2, 2};
267 const OH_NN_Tensor tensor = {OH_NN_INT8, 2, dimInput, nullptr, OH_NN_TENSOR};
268 OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, &tensor);
269 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
270 }
271
272 /*
273 * @tc.name: model_add_tensor_002
274 * @tc.desc: Verify the OH_NN_Tensor is nullptr of the OH_NNModel_AddTensor function.
275 * @tc.type: FUNC
276 */
277 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_002, testing::ext::TestSize.Level0)
278 {
279 InnerModel innerModel;
280
281 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
282 OH_NN_Tensor* tensor = nullptr;
283 OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, tensor);
284 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
285 }
286
287 /*
288 * @tc.name: model_add_tensor_003
289 * @tc.desc: Verify the success of the OH_NNModel_AddTensor function.
290 * @tc.type: FUNC
291 */
292 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_003, testing::ext::TestSize.Level0)
293 {
294 InnerModel innerModel;
295 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
296
297 const int32_t dimInput[2] = {2, 2};
298 const OH_NN_Tensor tensor = {OH_NN_INT8, 2, dimInput, nullptr, OH_NN_TENSOR};
299 OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, &tensor);
300 EXPECT_EQ(OH_NN_SUCCESS, ret);
301 }
302
303 /*
304 * @tc.name: model_add_operation_001
305 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_AddOperation function.
306 * @tc.type: FUNC
307 */
308 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_001, testing::ext::TestSize.Level0)
309 {
310 InnerModel innerModel;
311 OH_NNModel* model = nullptr;
312 OH_NN_OperationType opType {OH_NN_OPS_ADD};
313
314 InitIndices();
315 AddModelTensor(innerModel);
316
317 uint32_t index = 3;
318 const int8_t activation = 0;
319 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
320 static_cast<const void *>(&activation), sizeof(int8_t)));
321
322 OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, &m_outputIndices);
323 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
324 }
325
326 /*
327 * @tc.name: model_add_operation_002
328 * @tc.desc: Verify the paramIndices is nullptr of the OH_NNModel_AddOperation function.
329 * @tc.type: FUNC
330 */
331 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_002, testing::ext::TestSize.Level0)
332 {
333 InnerModel innerModel;
334 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
335 OH_NN_OperationType opType {OH_NN_OPS_ADD};
336
337 m_inputIndices.data = m_inputIndexs;
338 m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
339
340 m_outputIndices.data = m_outputIndexs;
341 m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
342
343 AddModelTensor(innerModel);
344 uint32_t index = 3;
345 const int8_t activation = 0;
346 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
347 static_cast<const void *>(&activation), sizeof(int8_t)));
348
349 OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, nullptr, &m_inputIndices, &m_outputIndices);
350 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
351 }
352
353 /*
354 * @tc.name: model_add_operation_003
355 * @tc.desc: Verify the inputIndices is nullptr of the OH_NNModel_AddOperation function.
356 * @tc.type: FUNC
357 */
358 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_003, testing::ext::TestSize.Level0)
359 {
360 InnerModel innerModel;
361 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
362 OH_NN_OperationType opType {OH_NN_OPS_ADD};
363
364 m_paramIndices.data = m_paramIndexs;
365 m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
366
367 m_outputIndices.data = m_outputIndexs;
368 m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
369
370 AddModelTensor(innerModel);
371 uint32_t index = 3;
372 const int8_t activation = 0;
373 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
374 static_cast<const void *>(&activation), sizeof(int8_t)));
375
376 OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, nullptr, &m_outputIndices);
377 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
378 }
379
380 /*
381 * @tc.name: model_add_operation_004
382 * @tc.desc: Verify the outputIndices is nullptr of the OH_NNModel_AddOperation function.
383 * @tc.type: FUNC
384 */
385 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_004, testing::ext::TestSize.Level0)
386 {
387 InnerModel innerModel;
388 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
389 OH_NN_OperationType opType {OH_NN_OPS_ADD};
390
391 m_paramIndices.data = m_paramIndexs;
392 m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
393
394 m_inputIndices.data = m_inputIndexs;
395 m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
396
397 AddModelTensor(innerModel);
398 uint32_t index = 3;
399 const int8_t activation = 0;
400 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
401 static_cast<const void *>(&activation), sizeof(int8_t)));
402
403 OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, nullptr);
404 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
405 }
406
407 /*
408 * @tc.name: model_add_operation_005
409 * @tc.desc: Verify the success of the OH_NNModel_AddOperation function.
410 * @tc.type: FUNC
411 */
412 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_005, testing::ext::TestSize.Level0)
413 {
414 InnerModel innerModel;
415 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
416 OH_NN_OperationType opType {OH_NN_OPS_ADD};
417
418 InitIndices();
419 AddModelTensor(innerModel);
420
421 uint32_t index = 3;
422 const int8_t activation = 0;
423 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
424 static_cast<const void *>(&activation), sizeof(int8_t)));
425
426 OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, &m_outputIndices);
427 EXPECT_EQ(OH_NN_SUCCESS, ret);
428 }
429
430 /*
431 * @tc.name: model_set_tensor_data_001
432 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_SetTensorData function.
433 * @tc.type: FUNC
434 */
435 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_001, testing::ext::TestSize.Level0)
436 {
437 InnerModel innerModel;
438 OH_NNModel* model = nullptr;
439 AddModelTensor(innerModel);
440
441 uint32_t index = 3;
442 const int8_t activation = 0;
443
444 OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation),
445 sizeof(int8_t));
446 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
447 }
448
449 /*
450 * @tc.name: model_set_tensor_data_002
451 * @tc.desc: Verify the data is nullptr of the OH_NNModel_SetTensorData function.
452 * @tc.type: FUNC
453 */
454 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_002, testing::ext::TestSize.Level0)
455 {
456 InnerModel innerModel;
457 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
458 AddModelTensor(innerModel);
459
460 uint32_t index = 3;
461
462 OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, nullptr, sizeof(int8_t));
463 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
464 }
465
466 /*
467 * @tc.name: model_set_tensor_data_003
468 * @tc.desc: Verify the length is 0 of the OH_NNModel_SetTensorData function.
469 * @tc.type: FUNC
470 */
471 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_003, testing::ext::TestSize.Level0)
472 {
473 InnerModel innerModel;
474 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
475 AddModelTensor(innerModel);
476
477 uint32_t index = 3;
478 const int8_t activation = 0;
479
480 OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation), 0);
481 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
482 }
483
484 /*
485 * @tc.name: model_set_tensor_data_004
486 * @tc.desc: Verify the successs of the OH_NNModel_SetTensorData function.
487 * @tc.type: FUNC
488 */
489 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_004, testing::ext::TestSize.Level0)
490 {
491 InnerModel innerModel;
492 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
493 AddModelTensor(innerModel);
494
495 uint32_t index = 3;
496 const int8_t activation = 0;
497
498 OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation),
499 sizeof(int8_t));
500 EXPECT_EQ(OH_NN_SUCCESS, ret);
501 }
502
503 /*
504 * @tc.name: model_specify_inputs_and_outputs_001
505 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
506 * @tc.type: FUNC
507 */
508 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_001, testing::ext::TestSize.Level0)
509 {
510 InnerModel innerModel;
511 OH_NNModel* model = nullptr;
512
513 InitIndices();
514 AddModelTensor(innerModel);
515
516 OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, &m_outputIndices);
517 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
518 }
519
520 /*
521 * @tc.name: model_specify_inputs_and_outputs_002
522 * @tc.desc: Verify the inputIndices is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
523 * @tc.type: FUNC
524 */
525 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_002, testing::ext::TestSize.Level0)
526 {
527 InnerModel innerModel;
528 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
529
530 InitIndices();
531 AddModelTensor(innerModel);
532
533 OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, nullptr, &m_outputIndices);
534 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
535 }
536
537 /*
538 * @tc.name: model_specify_inputs_and_outputs_003
539 * @tc.desc: Verify the outputIndices is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
540 * @tc.type: FUNC
541 */
542 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_003, testing::ext::TestSize.Level0)
543 {
544 InnerModel innerModel;
545 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
546
547 InitIndices();
548 AddModelTensor(innerModel);
549
550 OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, nullptr);
551 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
552 }
553
554 /*
555 * @tc.name: model_specify_inputs_and_outputs_004
556 * @tc.desc: Verify the success of the OH_NNModel_SpecifyInputsAndOutputs function.
557 * @tc.type: FUNC
558 */
559 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_004, testing::ext::TestSize.Level0)
560 {
561 InnerModel innerModel;
562 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
563
564 InitIndices();
565 AddModelTensor(innerModel);
566
567 OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, &m_outputIndices);
568 EXPECT_EQ(OH_NN_SUCCESS, ret);
569 }
570
571 /*
572 * @tc.name: model_finish_001
573 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Finish function.
574 * @tc.type: FUNC
575 */
576 HWTEST_F(NeuralNetworkRuntimeTest, model_finish_001, testing::ext::TestSize.Level0)
577 {
578 InnerModel innerModel;
579 OH_NNModel* model = nullptr;
580
581 OH_NN_OperationType opType {OH_NN_OPS_ADD};
582
583 InitIndices();
584 AddModelTensor(innerModel);
585
586 uint32_t index = 3;
587 const int8_t activation = 0;
588 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index, static_cast<const void *>(&activation),
589 sizeof(int8_t)));
590
591 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
592 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
593
594 OH_NN_ReturnCode ret = OH_NNModel_Finish(model);
595 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
596 }
597
598 /*
599 * @tc.name: model_finish_002
600 * @tc.desc: Verify the success of the OH_NNModel_Finish function.
601 * @tc.type: FUNC
602 */
603 HWTEST_F(NeuralNetworkRuntimeTest, model_finish_002, testing::ext::TestSize.Level0)
604 {
605 InnerModel innerModel;
606 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
607
608 OH_NN_OperationType opType {OH_NN_OPS_ADD};
609
610 InitIndices();
611 AddModelTensor(innerModel);
612
613 const int8_t activation = 0;
614 uint32_t index = 3;
615 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
616 static_cast<const void *>(&activation), sizeof(int8_t)));
617
618 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
619 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
620
621 OH_NN_ReturnCode ret = OH_NNModel_Finish(model);
622 EXPECT_EQ(OH_NN_SUCCESS, ret);
623 }
624
625 /*
626 * @tc.name: model_destroy_001
627 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Destroy function.
628 * @tc.type: FUNC
629 */
630 HWTEST_F(NeuralNetworkRuntimeTest, model_destroy_001, testing::ext::TestSize.Level0)
631 {
632 InnerModel innerModel;
633 OH_NNModel** pModel = nullptr;
634 OH_NNModel_Destroy(pModel);
635 EXPECT_EQ(nullptr, pModel);
636 }
637
638 /*
639 * @tc.name: model_destroy_003
640 * @tc.desc: Verify the normal model of the OH_NNModel_Destroy function.
641 * @tc.type: FUNC
642 */
643 HWTEST_F(NeuralNetworkRuntimeTest, model_destroy_003, testing::ext::TestSize.Level0)
644 {
645 InnerModel* innerModel = new InnerModel();
646 EXPECT_NE(nullptr, innerModel);
647 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
648 OH_NNModel_Destroy(&model);
649 EXPECT_EQ(nullptr, model);
650 }
651
652 /*
653 * @tc.name: model_get_available_operation_001
654 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_GetAvailableOperations function.
655 * @tc.type: FUNC
656 */
657 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_001, testing::ext::TestSize.Level0)
658 {
659 InnerModel innerModel;
660 OH_NNModel* model = nullptr;
661
662 uint32_t opCount = 1;
663 const bool *pIsAvailable = nullptr;
664
665 InitIndices();
666 AddModelTensor(innerModel);
667 SetInnerBuild(innerModel);
668
669 size_t deviceID = 10;
670 OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
671 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
672 }
673
674 /*
675 * @tc.name: model_get_available_operation_002
676 * @tc.desc: Verify the isAvailable is nullptr of the OH_NNModel_GetAvailableOperations function.
677 * @tc.type: FUNC
678 */
679 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_002, testing::ext::TestSize.Level0)
680 {
681 InnerModel innerModel;
682 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
683
684 uint32_t opCount = 1;
685 InitIndices();
686 AddModelTensor(innerModel);
687 SetInnerBuild(innerModel);
688
689 size_t deviceID = 10;
690 OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, nullptr, &opCount);
691 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
692 }
693
694 /*
695 * @tc.name: model_get_available_operation_003
696 * @tc.desc: Verify the *isAvailable is no nullptr of the OH_NNModel_GetAvailableOperations function.
697 * @tc.type: FUNC
698 */
699 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_003, testing::ext::TestSize.Level0)
700 {
701 InnerModel innerModel;
702 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
703
704 const bool isAvailable = true;
705 const bool *pIsAvailable = &isAvailable;
706 uint32_t opCount = 1;
707
708 InitIndices();
709 AddModelTensor(innerModel);
710 SetInnerBuild(innerModel);
711
712 size_t deviceID = 10;
713 OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
714 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
715 }
716
717 /*
718 * @tc.name: model_get_available_operation_004
719 * @tc.desc: Verify the opCount is nullptr of the OH_NNModel_GetAvailableOperations function.
720 * @tc.type: FUNC
721 */
722 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_004, testing::ext::TestSize.Level0)
723 {
724 InnerModel innerModel;
725 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
726
727 const bool *pIsAvailable = nullptr;
728 uint32_t* opCount = nullptr;
729
730 InitIndices();
731 AddModelTensor(innerModel);
732 SetInnerBuild(innerModel);
733
734 size_t deviceID = 10;
735 OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, opCount);
736 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
737 }
738
739 /*
740 * @tc.name: model_get_available_operation_005
741 * @tc.desc: Verify the success of the OH_NNModel_GetAvailableOperations function.
742 * @tc.type: FUNC
743 */
744 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_005, testing::ext::TestSize.Level0)
745 {
746 InnerModel innerModel;
747 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
748
749 const bool *pIsAvailable = nullptr;
750 uint32_t opCount = 1;
751
752 InitIndices();
753 AddModelTensor(innerModel);
754 SetInnerBuild(innerModel);
755
756 size_t deviceID = 10;
757 OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
758 EXPECT_EQ(OH_NN_FAILED, ret);
759 }
760
761 /*
762 * @tc.name: compilation_construct_001
763 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNCompilation_Construct function.
764 * @tc.type: FUNC
765 */
766 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_001, testing::ext::TestSize.Level0)
767 {
768 InnerModel innerModel;
769 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
770 const OH_NNModel* model = nullptr;
771 OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
772 EXPECT_EQ(nullptr, ret);
773 }
774
775 /*
776 * @tc.name: compilation_construct_002
777 * @tc.desc: Verify the not OH_NNModel_Build before creating compilation of the OH_NNCompilation_Construct function.
778 * @tc.type: FUNC
779 */
780 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_002, testing::ext::TestSize.Level0)
781 {
782 InnerModel innerModel;
783 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
784 OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
785 EXPECT_NE(nullptr, ret);
786 }
787
788 /*
789 * @tc.name: compilation_construct_003
790 * @tc.desc: Verify the normal model of the OH_NNCompilation_Construct function.
791 * @tc.type: FUNC
792 */
793 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_003, testing::ext::TestSize.Level0)
794 {
795 InnerModel innerModel;
796 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
797 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
798 OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
799 EXPECT_NE(nullptr, ret);
800 }
801
802 /*
803 * @tc.name: compilation_set_device_001
804 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetDevice function.
805 * @tc.type: FUNC
806 */
807 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_device_001, testing::ext::TestSize.Level0)
808 {
809 OH_NNCompilation* compilation = nullptr;
810 size_t deviceId = 1;
811 OH_NN_ReturnCode ret = OH_NNCompilation_SetDevice(compilation, deviceId);
812 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
813 }
814
815 /*
816 * @tc.name: compilation_set_device_002
817 * @tc.desc: Verify the success of the OH_NNCompilation_SetDevice function.
818 * @tc.type: FUNC
819 */
820 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_device_002, testing::ext::TestSize.Level0)
821 {
822 InnerModel innerModel;
823 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
824
825 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
826 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
827 size_t deviceId = 1;
828 OH_NN_ReturnCode ret = OH_NNCompilation_SetDevice(nnCompilation, deviceId);
829 EXPECT_EQ(OH_NN_SUCCESS, ret);
830 }
831
832 /*
833 * @tc.name: compilation_set_cache_001
834 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function.
835 * @tc.type: FUNC
836 */
837 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_001, testing::ext::TestSize.Level0)
838 {
839 InnerModel innerModel;
840 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
841 OH_NNCompilation* nnCompilation = nullptr;
842 const char* cacheDir = "../";
843 uint32_t version = 1;
844 OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
845 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
846 }
847
848 /*
849 * @tc.name: compilation_set_cache_002
850 * @tc.desc: Verify the cachePath is nullptr of the OH_NNCompilation_SetCache function.
851 * @tc.type: FUNC
852 */
853 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_002, testing::ext::TestSize.Level0)
854 {
855 InnerModel innerModel;
856 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
857
858 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
859 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
860 const char* cacheDir = nullptr;
861 uint32_t version = 1;
862 OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
863 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
864 }
865
866 /*
867 * @tc.name: compilation_set_cache_003
868 * @tc.desc: Verify the success of the OH_NNCompilation_SetCache function.
869 * @tc.type: FUNC
870 */
871 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_003, testing::ext::TestSize.Level0)
872 {
873 InnerModel innerModel;
874 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
875
876 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
877 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
878 const char* cacheDir = "../";
879 uint32_t version = 1;
880 OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
881 EXPECT_EQ(OH_NN_SUCCESS, ret);
882 }
883
884 /*
885 * @tc.name: compilation_set_performance_mode_001
886 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetPerformanceMode function.
887 * @tc.type: FUNC
888 */
889 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_performance_mode_001, testing::ext::TestSize.Level0)
890 {
891 InnerModel innerModel;
892 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
893 OH_NNCompilation* nnCompilation = nullptr;
894 OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE;
895
896 OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode);
897 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
898 }
899
900 /*
901 * @tc.name: compilation_set_performance_mode_002
902 * @tc.desc: Verify the success of the OH_NNCompilation_SetPerformanceMode function.
903 * @tc.type: FUNC
904 */
905 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_performance_mode_002, testing::ext::TestSize.Level0)
906 {
907 InnerModel innerModel;
908 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
909
910 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
911 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
912 OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE;
913
914 OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode);
915 EXPECT_EQ(OH_NN_SUCCESS, ret);
916 }
917
918 /*
919 * @tc.name: compilation_set_priority_001
920 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetPriority function.
921 * @tc.type: FUNC
922 */
923 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_priority_001, testing::ext::TestSize.Level0)
924 {
925 InnerModel innerModel;
926 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
927 OH_NNCompilation* nnCompilation = nullptr;
928 OH_NN_Priority priority = OH_NN_PRIORITY_LOW;
929
930 OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority);
931 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
932 }
933
934 /*
935 * @tc.name: compilation_set_priority_002
936 * @tc.desc: Verify the success of the OH_NNCompilation_SetPriority function.
937 * @tc.type: FUNC
938 */
939 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_priority_002, testing::ext::TestSize.Level0)
940 {
941 InnerModel innerModel;
942 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
943
944 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
945 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
946 OH_NN_Priority priority = OH_NN_PRIORITY_LOW;
947
948 OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority);
949 EXPECT_EQ(OH_NN_SUCCESS, ret);
950 }
951
952 /*
953 * @tc.name: compilation_set_enable_float16_001
954 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_EnableFloat16 function.
955 * @tc.type: FUNC
956 */
957 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_enable_float16_001, testing::ext::TestSize.Level0)
958 {
959 InnerModel innerModel;
960 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
961 OH_NNCompilation* nnCompilation = nullptr;
962 bool enableFloat16 = true;
963
964 OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16);
965 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
966 }
967
968 /*
969 * @tc.name: compilation_set_enable_float16_002
970 * @tc.desc: Verify the success of the OH_NNCompilation_EnableFloat16 function.
971 * @tc.type: FUNC
972 */
973 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_enable_float16_002, testing::ext::TestSize.Level0)
974 {
975 InnerModel innerModel;
976 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
977
978 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
979 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
980 bool enableFloat16 = true;
981
982 OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16);
983 EXPECT_EQ(OH_NN_SUCCESS, ret);
984 }
985
986 /*
987 * @tc.name: compilation_build_001
988 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_Build function.
989 * @tc.type: FUNC
990 */
991 HWTEST_F(NeuralNetworkRuntimeTest, compilation_build_001, testing::ext::TestSize.Level0)
992 {
993 InnerModel innerModel;
994 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
995 OH_NNCompilation* nnCompilation = nullptr;
996
997 OH_NN_ReturnCode ret = OH_NNCompilation_Build(nnCompilation);
998 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
999 }
1000
1001 /*
1002 * @tc.name: compilation_destroy_001
1003 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_Destroy function.
1004 * @tc.type: FUNC
1005 */
1006 HWTEST_F(NeuralNetworkRuntimeTest, compilation_destroy_001, testing::ext::TestSize.Level0)
1007 {
1008 OH_NNCompilation** pCompilation = nullptr;
1009 OH_NNCompilation_Destroy(pCompilation);
1010 EXPECT_EQ(nullptr, pCompilation);
1011 }
1012
1013 /*
1014 * @tc.name: compilation_destroy_003
1015 * @tc.desc: Verify the normal model of the OH_NNCompilation_Destroy function.
1016 * @tc.type: FUNC
1017 */
1018 HWTEST_F(NeuralNetworkRuntimeTest, compilation_destroy_003, testing::ext::TestSize.Level0)
1019 {
1020 InnerModel* innerModel = new InnerModel();
1021 EXPECT_NE(nullptr, innerModel);
1022
1023 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1024 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1025 OH_NNCompilation_Destroy(&nnCompilation);
1026 EXPECT_EQ(nullptr, nnCompilation);
1027 }
1028
1029 /**
1030 * @tc.name: excutor_construct_001
1031 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNExecutor_Construct function
1032 * @tc.type: FUNC
1033 */
1034 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_001, testing::ext::TestSize.Level0)
1035 {
1036 InnerModel innerModel;
1037 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1038
1039 OH_NNCompilation* nnCompilation = nullptr;
1040 OH_NNExecutor* executor = OH_NNExecutor_Construct(nnCompilation);
1041 EXPECT_EQ(nullptr, executor);
1042 }
1043
1044 /**
1045 * @tc.name: excutor_construct_002
1046 * @tc.desc: Verify the not OH_NNCompilation_Build before creating executor of the OH_NNExecutor_Construct function
1047 * @tc.type: FUNC
1048 */
1049 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_002, testing::ext::TestSize.Level0)
1050 {
1051 InnerModel innerModel;
1052 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1053
1054 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1055 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1056 OH_NNExecutor * executor = OH_NNExecutor_Construct(nnCompilation);
1057 EXPECT_EQ(nullptr, executor);
1058 }
1059
1060 /**
1061 * @tc.name: excutor_construct_003
1062 * @tc.desc: Verify the success of the OH_NNExecutor_Construct function
1063 * @tc.type: FUNC
1064 */
1065 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_003, testing::ext::TestSize.Level0)
1066 {
1067 InnerModel innerModel;
1068 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1069
1070 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1071 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1072 OH_NNExecutor * executor = OH_NNExecutor_Construct(nnCompilation);
1073 EXPECT_EQ(nullptr, executor);
1074 }
1075
1076 /**
1077 * @tc.name: excutor_setinput_001
1078 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetInput function
1079 * @tc.type: FUNC
1080 */
1081 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_001, testing::ext::TestSize.Level0)
1082 {
1083 SetTensor();
1084
1085 float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1086 const void *buffer = input;
1087 size_t length = 2 * sizeof(float);
1088 uint32_t inputIndex = 0;
1089 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nullptr, inputIndex, &m_tensor, buffer, length));
1090 }
1091
1092 /**
1093 * @tc.name: excutor_setinput_002
1094 * @tc.desc: Verify the OH_NN_Tensor is nullptr of the OH_NNExecutor_SetInput function
1095 * @tc.type: FUNC
1096 */
1097 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_002, testing::ext::TestSize.Level0)
1098 {
1099 InnerModel innerModel;
1100 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1101
1102 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1103 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1104 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1105
1106 uint32_t inputIndex = 0;
1107 float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1108 const void *buffer = input;
1109 size_t length = 2 * sizeof(float);
1110 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, nullptr, buffer, length));
1111 }
1112
1113 /**
1114 * @tc.name: excutor_setinput_003
1115 * @tc.desc: Verify the data is nullptr of the OH_NNExecutor_SetInput function
1116 * @tc.type: FUNC
1117 */
1118 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_003, testing::ext::TestSize.Level0)
1119 {
1120 InnerModel innerModel;
1121 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1122
1123 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1124 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1125 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1126
1127 SetTensor();
1128
1129 uint32_t inputIndex = 0;
1130 const void *buffer = nullptr;
1131 size_t length = 2 * sizeof(float);
1132 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length));
1133 }
1134
1135 /**
1136 * @tc.name: excutor_setinput_004
1137 * @tc.desc: Verify the length is 0 of the OH_NNExecutor_SetInput function
1138 * @tc.type: FUNC
1139 */
1140 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_004, testing::ext::TestSize.Level0)
1141 {
1142 InnerModel innerModel;
1143 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1144
1145 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1146 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1147 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1148
1149 uint32_t inputIndex = 0;
1150 SetTensor();
1151
1152 size_t length = 0;
1153 float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1154 const void *buffer = input;
1155 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length));
1156 }
1157
1158 /**
1159 * @tc.name: excutor_setinput_005
1160 * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1161 * @tc.type: FUNC
1162 */
1163 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_005, testing::ext::TestSize.Level0)
1164 {
1165 InnerModel innerModel;
1166 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1167
1168 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1169 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1170 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1171
1172 uint32_t inputIndex = 0;
1173 int32_t dims[2] = {3, 4};
1174 m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1175
1176 float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1177 const void *buffer = input;
1178 size_t length = 12 * sizeof(float);
1179 OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length);
1180 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1181 }
1182
1183 /**
1184 * @tc.name: excutor_setoutput_001
1185 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetOutput function
1186 * @tc.type: FUNC
1187 */
1188 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_001, testing::ext::TestSize.Level0)
1189 {
1190 uint32_t outputIndex = 0;
1191 float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1192 void *buffer = input;
1193 size_t length = 9 * sizeof(int32_t);
1194 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nullptr, outputIndex, buffer, length));
1195 }
1196
1197 /**
1198 * @tc.name: excutor_setoutput_002
1199 * @tc.desc: Verify the data is nullptr of the OH_NNExecutor_SetOutput function
1200 * @tc.type: FUNC
1201 */
1202 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_002, testing::ext::TestSize.Level0)
1203 {
1204 InnerModel innerModel;
1205 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1206
1207 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1208 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1209 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1210
1211 uint32_t outputIndex = 0;
1212 void *buffer = nullptr;
1213 size_t length = 9 * sizeof(int32_t);
1214 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, buffer, length));
1215 }
1216
1217 /**
1218 * @tc.name: excutor_setoutput_003
1219 * @tc.desc: Verify the length is 0 of the OH_NNExecutor_SetOutput function
1220 * @tc.type: FUNC
1221 */
1222 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_003, testing::ext::TestSize.Level0)
1223 {
1224 InnerModel innerModel;
1225 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1226
1227 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1228 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1229 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1230
1231 uint32_t outputIndex = 0;
1232 float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1233 void *buffer = input;
1234 size_t length = 0;
1235 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, buffer, length));
1236 }
1237
1238 /**
1239 * @tc.name: excutor_setoutput_004
1240 * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1241 * @tc.type: FUNC
1242 */
1243 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_004, testing::ext::TestSize.Level0)
1244 {
1245 InnerModel innerModel;
1246 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1247
1248 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1249 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1250 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1251
1252 uint32_t outputIndex = 0;
1253 float output[12];
1254 size_t length = 12 * sizeof(float);
1255 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, output, length));
1256 }
1257
1258 /**
1259 * @tc.name: excutor_getoutputshape_001
1260 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_GetOutputShape function
1261 * @tc.type: FUNC
1262 */
1263 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_001, testing::ext::TestSize.Level0)
1264 {
1265 InnerModel innerModel;
1266 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1267
1268 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1269 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1270 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1271
1272 int32_t* ptr = nullptr;
1273 int32_t** shape = &ptr;
1274 uint32_t length = 2;
1275 uint32_t outputIndex = 0;
1276 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1277 shape, &length));
1278 }
1279
1280 /**
1281 * @tc.name: excutor_getoutputshape_002
1282 * @tc.desc: Verify the shape is nullptr of the OH_NNExecutor_GetOutputShape function
1283 * @tc.type: FUNC
1284 */
1285 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_002, testing::ext::TestSize.Level0)
1286 {
1287 InnerModel innerModel;
1288 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1289
1290 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1291 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1292 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1293
1294 uint32_t outputIndex = 0;
1295 int32_t** shape = nullptr;
1296 uint32_t length = 2;
1297 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1298 shape, &length));
1299 }
1300
1301 /**
1302 * @tc.name: excutor_getoutputshape_003
1303 * @tc.desc: Verify the *shape is not nullptr of the OH_NNExecutor_GetOutputShape function
1304 * @tc.type: FUNC
1305 */
1306 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_003, testing::ext::TestSize.Level0)
1307 {
1308 InnerModel innerModel;
1309 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1310
1311 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1312 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1313 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1314
1315 int32_t expectDim[2] = {3, 3};
1316 int32_t* ptr = expectDim;
1317 int32_t** shape = &ptr;
1318 uint32_t length = 2;
1319 uint32_t outputIndex = 0;
1320 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1321 shape, &length));
1322 }
1323
1324 /**
1325 * @tc.name: excutor_getoutputshape_004
1326 * @tc.desc: Verify the length is nullptr of the OH_NNExecutor_GetOutputShape function
1327 * @tc.type: FUNC
1328 */
1329 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_004, testing::ext::TestSize.Level0)
1330 {
1331 InnerModel innerModel;
1332 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1333
1334 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1335 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1336 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1337
1338 int32_t* ptr = nullptr;
1339 int32_t** shape = &ptr;
1340 uint32_t outputIndex = 0;
1341 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex, shape, nullptr));
1342 }
1343
1344 /**
1345 * @tc.name: excutor_getoutputshape_005
1346 * @tc.desc: Verify the success of the OH_NNExecutor_GetOutputShape function
1347 * @tc.type: FUNC
1348 */
1349 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_005, testing::ext::TestSize.Level0)
1350 {
1351 InnerModel innerModel;
1352 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1353
1354 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1355 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1356 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1357
1358 int32_t* ptr = nullptr;
1359 int32_t** shape = &ptr;
1360 uint32_t length = 2;
1361 uint32_t outputIndex = 0;
1362 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex, shape, &length));
1363 }
1364
1365 /**
1366 * @tc.name: excutor_run_001
1367 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_Run function
1368 * @tc.type: FUNC
1369 */
1370 HWTEST_F(NeuralNetworkRuntimeTest, excutor_run_001, testing::ext::TestSize.Level0)
1371 {
1372 OH_NNExecutor* nnExecutor = nullptr;
1373 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_Run(nnExecutor));
1374 }
1375
1376 /**
1377 * @tc.name: excutor_run_002
1378 * @tc.desc: Verify the success of the OH_NNExecutor_Run function
1379 * @tc.type: FUNC
1380 */
1381 HWTEST_F(NeuralNetworkRuntimeTest, excutor_run_002, testing::ext::TestSize.Level0)
1382 {
1383 InnerModel innerModel;
1384 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1385
1386 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1387 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1388 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1389
1390 int32_t inputDims[2] = {3, 4};
1391 m_tensor = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
1392
1393 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_Run(nnExecutor));
1394 }
1395
1396 /*
1397 * @tc.name: executor_allocate_input_memory_001
1398 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_AllocateInputMemory function.
1399 * @tc.type: FUNC
1400 */
1401 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_001, testing::ext::TestSize.Level0)
1402 {
1403 OH_NNExecutor* nnExecutor = nullptr;
1404 uint32_t outputIndex = 0;
1405 size_t length = 9 * sizeof(float);
1406
1407 OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1408 EXPECT_EQ(nullptr, ret);
1409 }
1410
1411 /*
1412 * @tc.name: executor_allocate_input_memory_002
1413 * @tc.desc: Verify the passed length equals 0 of the OH_NNExecutor_AllocateInputMemory function.
1414 * @tc.type: FUNC
1415 */
1416 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_002, testing::ext::TestSize.Level0)
1417 {
1418 InnerModel innerModel;
1419 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1420
1421 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1422 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1423 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1424
1425 uint32_t outputIndex = 0;
1426 size_t length = 0;
1427
1428 OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1429 EXPECT_EQ(nullptr, ret);
1430 }
1431
1432 /*
1433 * @tc.name: executor_allocate_input_memory_003
1434 * @tc.desc: Verify the error when creating input memory in executor of the OH_NNExecutor_AllocateInputMemory function.
1435 * @tc.type: FUNC
1436 */
1437 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_003, testing::ext::TestSize.Level0)
1438 {
1439 InnerModel innerModel;
1440 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1441
1442 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1443 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1444 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1445
1446 uint32_t outputIndex = 6;
1447 size_t length = 9 * sizeof(float);
1448
1449 OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1450 EXPECT_EQ(nullptr, ret);
1451 }
1452
1453 /*
1454 * @tc.name: executor_allocate_input_memory_004
1455 * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1456 * @tc.type: FUNC
1457 */
1458 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_004, testing::ext::TestSize.Level0)
1459 {
1460 InnerModel innerModel;
1461 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1462
1463 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1464 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1465 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1466
1467 uint32_t outputIndex = 0;
1468 size_t length = 9 * sizeof(float);
1469
1470 OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1471 EXPECT_EQ(nullptr, ret);
1472 }
1473
1474 /*
1475 * @tc.name: executor_allocate_output_memory_001
1476 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_AllocateOutputMemory function.
1477 * @tc.type: FUNC
1478 */
1479 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_001, testing::ext::TestSize.Level0)
1480 {
1481 OH_NNExecutor* nnExecutor = nullptr;
1482 uint32_t outputIndex = 0;
1483 size_t length = 9 * sizeof(float);
1484
1485 OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1486 EXPECT_EQ(nullptr, ret);
1487 }
1488
1489 /*
1490 * @tc.name: executor_allocate_output_memory_002
1491 * @tc.desc: Verify the passed length equals 0 of the OH_NNExecutor_AllocateOutputMemory function.
1492 * @tc.type: FUNC
1493 */
1494 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_002, testing::ext::TestSize.Level0)
1495 {
1496 InnerModel innerModel;
1497 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1498
1499 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1500 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1501 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1502
1503 uint32_t outputIndex = 0;
1504 size_t length = 0;
1505
1506 OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1507 EXPECT_EQ(nullptr, ret);
1508 }
1509
1510 /*
1511 * @tc.name: executor_allocate_output_memory_003
1512 * @tc.desc: Verify the error when create output memory in executor of the OH_NNExecutor_AllocateOutputMemory function.
1513 * @tc.type: FUNC
1514 */
1515 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_003, testing::ext::TestSize.Level0)
1516 {
1517 InnerModel innerModel;
1518 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1519
1520 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1521 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1522 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1523
1524 uint32_t outputIndex = 6;
1525 size_t length = 9 * sizeof(float);
1526
1527 OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1528 EXPECT_EQ(nullptr, ret);
1529 }
1530
1531 /*
1532 * @tc.name: executor_allocate_output_memory_004
1533 * @tc.desc: Verify the success of the OH_NNExecutor_AllocateOutputMemory function.
1534 * @tc.type: FUNC
1535 */
1536 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_004, testing::ext::TestSize.Level0)
1537 {
1538 InnerModel innerModel;
1539 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1540
1541 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1542 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1543 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1544
1545 uint32_t outputIndex = 0;
1546 size_t length = 9 * sizeof(float);
1547
1548 OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1549 EXPECT_EQ(nullptr, ret);
1550 }
1551
1552
1553 /*
1554 * @tc.name: executor_destroy_input_memory_001
1555 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_DestroyInputMemory function.
1556 * @tc.type: FUNC
1557 */
1558 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_001, testing::ext::TestSize.Level0)
1559 {
1560 InnerModel innerModel;
1561 BuildModel(innerModel);
1562 OH_NNExecutor* nnExecutor = nullptr;
1563
1564 uint32_t inputIndex = 0;
1565 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1566 void* const data = dataArry;
1567 OH_NN_Memory memory = {data, 9 * sizeof(float)};
1568 OH_NN_Memory* pMemory = &memory;
1569 OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, &pMemory);
1570 EXPECT_EQ(nullptr, nnExecutor);
1571 }
1572
1573 /*
1574 * @tc.name: executor_destroy_input_memory_002
1575 * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_DestroyInputMemory function.
1576 * @tc.type: FUNC
1577 */
1578 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_002, testing::ext::TestSize.Level0)
1579 {
1580 InnerModel innerModel;
1581 BuildModel(innerModel);
1582
1583 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1584 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1585 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1586
1587 uint32_t inputIndex = 0;
1588 OH_NN_Memory** memory = nullptr;
1589 OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, memory);
1590 EXPECT_EQ(nullptr, memory);
1591 }
1592
1593 /*
1594 * @tc.name: executor_destroy_input_memory_003
1595 * @tc.desc: Verify the *memory is nullptr of the OH_NNExecutor_DestroyInputMemory function.
1596 * @tc.type: FUNC
1597 */
1598 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_003, testing::ext::TestSize.Level0)
1599 {
1600 InnerModel innerModel;
1601 BuildModel(innerModel);
1602
1603 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1604 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1605 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1606 EXPECT_EQ(nnExecutor, nullptr);
1607
1608 uint32_t inputIndex = 0;
1609 OH_NN_Memory* memory = nullptr;
1610 OH_NN_Memory** pMemory = &memory;
1611 OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, pMemory);
1612 }
1613
1614 /*
1615 * @tc.name: executor_destroy_input_memory_004
1616 * @tc.desc: Verify the error happened when destroying input memory of the OH_NNExecutor_DestroyInputMemory function.
1617 * @tc.type: FUNC
1618 */
1619 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_004, testing::ext::TestSize.Level0)
1620 {
1621 InnerModel innerModel;
1622 BuildModel(innerModel);
1623
1624 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1625 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1626 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1627
1628 uint32_t inputIndex = 6;
1629 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1630 void* const data = dataArry;
1631 OH_NN_Memory memory = {data, 9 * sizeof(float)};
1632 OH_NN_Memory* pMemory = &memory;
1633 OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, &pMemory);
1634 EXPECT_NE(nullptr, pMemory);
1635 }
1636
1637 /*
1638 * @tc.name: executor_destroy_input_memory_005
1639 * @tc.desc: Verify the success of the OH_NNExecutor_DestroyInputMemory function.
1640 * @tc.type: FUNC
1641 */
1642 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_005, testing::ext::TestSize.Level0)
1643 {
1644 InnerModel innerModel;
1645 BuildModel(innerModel);
1646
1647 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1648 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1649 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1650
1651 uint32_t inputIndex = 0;
1652 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1653 void* const data = dataArry;
1654 OH_NN_Memory memory = {data, 9 * sizeof(float)};
1655 OH_NN_Memory* pMemory = &memory;
1656 OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, &pMemory);
1657 EXPECT_NE(nullptr, pMemory);
1658 }
1659
1660 /*
1661 * @tc.name: executor_destroy_output_memory_001
1662 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
1663 * @tc.type: FUNC
1664 */
1665 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_001, testing::ext::TestSize.Level0)
1666 {
1667 OH_NNExecutor* nnExecutor = nullptr;
1668 uint32_t outputIndex = 0;
1669 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1670 void* const data = dataArry;
1671 OH_NN_Memory memory = {data, 9 * sizeof(float)};
1672 OH_NN_Memory* pMemory = &memory;
1673 OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
1674 EXPECT_EQ(nullptr, nnExecutor);
1675 }
1676
1677 /*
1678 * @tc.name: executor_destroy_output_memory_002
1679 * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
1680 * @tc.type: FUNC
1681 */
1682 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_002, testing::ext::TestSize.Level0)
1683 {
1684 InnerModel innerModel;
1685 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1686
1687 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1688 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1689 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1690
1691 uint32_t outputIndex = 0;
1692 OH_NN_Memory** memory = nullptr;
1693 OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, memory);
1694 EXPECT_EQ(nullptr, memory);
1695 }
1696
1697 /*
1698 * @tc.name: executor_destroy_output_memory_003
1699 * @tc.desc: Verify the *memory is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
1700 * @tc.type: FUNC
1701 */
1702 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_003, testing::ext::TestSize.Level0)
1703 {
1704 InnerModel innerModel;
1705 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1706
1707 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1708 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1709 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1710 EXPECT_EQ(nnExecutor, nullptr);
1711
1712 uint32_t outputIndex = 0;
1713 OH_NN_Memory* memory = nullptr;
1714 OH_NN_Memory** pMemory = &memory;
1715 OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, pMemory);
1716 }
1717
1718 /*
1719 * @tc.name: executor_destroy_output_memory_004
1720 * @tc.desc: Verify the error happened when destroying output memory of the OH_NNExecutor_DestroyOutputMemory function.
1721 * @tc.type: FUNC
1722 */
1723 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_004, testing::ext::TestSize.Level0)
1724 {
1725 InnerModel innerModel;
1726 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1727
1728 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1729 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1730 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1731
1732 uint32_t outputIndex = 6;
1733 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1734 void* const data = dataArry;
1735 OH_NN_Memory memory = {data, 9 * sizeof(float)};
1736 OH_NN_Memory* pMemory = &memory;
1737 OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
1738 EXPECT_NE(nullptr, pMemory);
1739 }
1740
1741 /*
1742 * @tc.name: executor_destroy_output_memory_005
1743 * @tc.desc: Verify the success of the OH_NNExecutor_DestroyOutputMemory function.
1744 * @tc.type: FUNC
1745 */
1746 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_005, testing::ext::TestSize.Level0)
1747 {
1748 InnerModel innerModel;
1749 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1750
1751 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1752 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1753 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1754
1755 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1756 void* const data = dataArry;
1757 OH_NN_Memory memory = {data, 9 * sizeof(float)};
1758 OH_NN_Memory* pMemory = &memory;
1759 uint32_t outputIndex = 0;
1760 OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
1761 EXPECT_NE(nullptr, pMemory);
1762 }
1763
1764 /*
1765 * @tc.name: executor_set_input_with_memory_001
1766 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetInputWithMemory function.
1767 * @tc.type: FUNC
1768 */
1769 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_001, testing::ext::TestSize.Level0)
1770 {
1771 OH_NNExecutor* nnExecutor = nullptr;
1772
1773 SetTensor();
1774
1775 uint32_t inputIndex = 0;
1776 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1777 void* const data = dataArry;
1778 OH_NN_Memory memory = {data, 9 * sizeof(float)};
1779
1780 OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, &memory);
1781 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1782 }
1783
1784 /*
1785 * @tc.name: executor_set_input_with_memory_002
1786 * @tc.desc: Verify the operand is nullptr of the OH_NNExecutor_SetInputWithMemory function.
1787 * @tc.type: FUNC
1788 */
1789 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_002, testing::ext::TestSize.Level0)
1790 {
1791 InnerModel innerModel;
1792 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1793
1794 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1795 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1796 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1797
1798 OH_NN_Tensor* operand = nullptr;
1799
1800 uint32_t inputIndex = 0;
1801 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1802 void* const data = dataArry;
1803 OH_NN_Memory memory = {data, 9 * sizeof(float)};
1804
1805 OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, operand, &memory);
1806 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1807 }
1808
1809 /*
1810 * @tc.name: executor_set_input_with_memory_003
1811 * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_SetInputWithMemory function.
1812 * @tc.type: FUNC
1813 */
1814 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_003, testing::ext::TestSize.Level0)
1815 {
1816 InnerModel innerModel;
1817 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1818
1819 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1820 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1821 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1822
1823 SetTensor();
1824
1825 uint32_t inputIndex = 0;
1826 OH_NN_Memory* memory = nullptr;
1827 OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, memory);
1828 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1829 }
1830
1831 /*
1832 * @tc.name: executor_set_input_with_memory_004
1833 * @tc.desc: Verify the success of the OH_NNExecutor_SetInputWithMemory function.
1834 * @tc.type: FUNC
1835 */
1836 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_004, testing::ext::TestSize.Level0)
1837 {
1838 InnerModel innerModel;
1839 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1840
1841 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1842 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1843 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1844
1845 uint32_t inputIndex = 0;
1846 int32_t dims[2] = {3, 4};
1847 m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1848
1849 float dataArry[12] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1850 void* const data = dataArry;
1851 OH_NN_Memory memory = {data, 12 * sizeof(float)};
1852
1853 OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, &memory);
1854 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1855 }
1856
1857
1858 /*
1859 * @tc.name: executor_set_output_with_memory_001
1860 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetOutputWithMemory function.
1861 * @tc.type: FUNC
1862 */
1863 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_001, testing::ext::TestSize.Level0)
1864 {
1865 OH_NNExecutor* nnExecutor = nullptr;
1866 uint32_t outputIndex = 0;
1867 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1868 void* const data = dataArry;
1869 OH_NN_Memory memory = {data, 9 * sizeof(float)};
1870 OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, &memory);
1871 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1872 }
1873
1874 /*
1875 * @tc.name: executor_set_output_with_memory_002
1876 * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_SetOutputWithMemory function.
1877 * @tc.type: FUNC
1878 */
1879 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_002, testing::ext::TestSize.Level0)
1880 {
1881 InnerModel innerModel;
1882 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1883
1884 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1885 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1886 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1887
1888 uint32_t outputIndex = 0;
1889 OH_NN_Memory* memory = nullptr;
1890 OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, memory);
1891 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1892 }
1893
1894 /*
1895 * @tc.name: executor_set_output_with_memory_003
1896 * @tc.desc: Verify the success of the OH_NNExecutor_SetOutputWithMemory function.
1897 * @tc.type: FUNC
1898 */
1899 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_003, testing::ext::TestSize.Level0)
1900 {
1901 InnerModel innerModel;
1902 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1903
1904 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1905 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1906 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1907
1908 uint32_t outputIndex = 0;
1909 float dataArry[12] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1910 void* const data = dataArry;
1911 OH_NN_Memory memory = {data, 12 * sizeof(float)};
1912 OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, &memory);
1913 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1914 }
1915
1916 /*
1917 * @tc.name: executor_destroy_001
1918 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_Destroy function.
1919 * @tc.type: FUNC
1920 */
1921 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_001, testing::ext::TestSize.Level0)
1922 {
1923 OH_NNExecutor** pExecutor = nullptr;
1924 OH_NNExecutor_Destroy(pExecutor);
1925 EXPECT_EQ(nullptr, pExecutor);
1926 }
1927
1928 /*
1929 * @tc.name: device_get_all_devices_id_001
1930 * @tc.desc: Verify the allDevicesID is nullptr of the OH_NNDevice_GetAllDevicesID function.
1931 * @tc.type: FUNC
1932 */
1933 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_001, testing::ext::TestSize.Level0)
1934 {
1935 const size_t** allDevicesId = nullptr;
1936 uint32_t deviceCount = 1;
1937 uint32_t* pDeviceCount = &deviceCount;
1938 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(allDevicesId, pDeviceCount);
1939 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1940 }
1941
1942 /*
1943 * @tc.name: device_get_all_devices_id_002
1944 * @tc.desc: Verify the *allDevicesID is not nullptr of the OH_NNDevice_GetAllDevicesID function.
1945 * @tc.type: FUNC
1946 */
1947 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_002, testing::ext::TestSize.Level0)
1948 {
1949 const size_t devicesId = 1;
1950 const size_t* allDevicesId = &devicesId;
1951 const size_t** pAllDevicesId = &allDevicesId;
1952 uint32_t deviceCount = 1;
1953 uint32_t* pDeviceCount = &deviceCount;
1954 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
1955 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1956 }
1957
1958 /*
1959 * @tc.name: device_get_all_devices_id_003
1960 * @tc.desc: Verify the deviceCount is nullptr of the OH_NNDevice_GetAllDevicesID function.
1961 * @tc.type: FUNC
1962 */
1963 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_003, testing::ext::TestSize.Level0)
1964 {
1965 const size_t* allDevicesId = nullptr;
1966 const size_t** pAllDevicesId = &allDevicesId;
1967 uint32_t* pDeviceCount = nullptr;
1968 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
1969 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1970 }
1971
1972 /*
1973 * @tc.name: device_get_all_devices_id_004
1974 * @tc.desc: Verify the get no device of the OH_NNDevice_GetAllDevicesID function.
1975 * @tc.type: FUNC
1976 */
1977 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_004, testing::ext::TestSize.Level0)
1978 {
1979 const size_t* allDevicesId = nullptr;
1980 const size_t** pAllDevicesId = &allDevicesId;
1981 uint32_t deviceCount = 1;
1982 uint32_t* pDeviceCount = &deviceCount;
1983 OHOS::HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_FAILED;
1984 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
1985 EXPECT_EQ(OH_NN_SUCCESS, ret);
1986 }
1987
1988 /*
1989 * @tc.name: device_get_all_devices_id_005
1990 * @tc.desc: Verify the success of the OH_NNDevice_GetAllDevicesID function.
1991 * @tc.type: FUNC
1992 */
1993 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_005, testing::ext::TestSize.Level0)
1994 {
1995 const size_t* allDevicesId = nullptr;
1996 const size_t** pAllDevicesId = &allDevicesId;
1997 uint32_t deviceCount = 1;
1998 uint32_t* pDeviceCount = &deviceCount;
1999 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2000 EXPECT_EQ(OH_NN_SUCCESS, ret);
2001 }
2002
2003 /*
2004 * @tc.name: device_get_name_001
2005 * @tc.desc: Verify the name is nullptr of the OH_NNDevice_GetName function.
2006 * @tc.type: FUNC
2007 */
2008 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_001, testing::ext::TestSize.Level0)
2009 {
2010 size_t deviceID = 1;
2011 const char **name = nullptr;
2012 OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, name);
2013 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2014 }
2015
2016 /*
2017 * @tc.name: device_get_name_002
2018 * @tc.desc: Verify the *name is not nullptr of the OH_NNDevice_GetName function.
2019 * @tc.type: FUNC
2020 */
2021 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_002, testing::ext::TestSize.Level0)
2022 {
2023 size_t deviceID = 1;
2024 const char* name = "diviceId";
2025 const char** pName = &name;
2026 OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2027 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2028 }
2029
2030 /*
2031 * @tc.name: device_get_name_003
2032 * @tc.desc: Verify the error happened when getting name of deviceID of the OH_NNDevice_GetName function.
2033 * @tc.type: FUNC
2034 */
2035 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_003, testing::ext::TestSize.Level0)
2036 {
2037 size_t deviceID = 12345;
2038 const char* name = nullptr;
2039 const char** pName = &name;
2040 OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2041 EXPECT_EQ(OH_NN_FAILED, ret);
2042 }
2043
2044 /*
2045 * @tc.name: device_get_name_004
2046 * @tc.desc: Verify the success of the OH_NNDevice_GetName function.
2047 * @tc.type: FUNC
2048 */
2049 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_004, testing::ext::TestSize.Level0)
2050 {
2051 size_t deviceID = 1;
2052 const char* name = nullptr;
2053 const char** pName = &name;
2054 OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2055 EXPECT_EQ(OH_NN_FAILED, ret);
2056 }
2057
2058 /*
2059 * @tc.name: device_get_type_001
2060 * @tc.desc: Verify the device is nullptr of the OH_NNDevice_GetType function.
2061 * @tc.type: FUNC
2062 */
2063 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_001, testing::ext::TestSize.Level0)
2064 {
2065 size_t deviceID = 12345;
2066 OH_NN_DeviceType deviceType = OH_NN_CPU;
2067 OH_NN_DeviceType* pDeviceType = &deviceType;
2068 OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2069 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2070 }
2071
2072 /*
2073 * @tc.name: device_get_type_002
2074 * @tc.desc: Verify the OH_NN_DeviceType is nullptr of the OH_NNDevice_GetType function.
2075 * @tc.type: FUNC
2076 */
2077 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_002, testing::ext::TestSize.Level0)
2078 {
2079 size_t deviceID = 1;
2080 OH_NN_DeviceType* pDeviceType = nullptr;
2081 OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2082 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2083 }
2084
2085 /*
2086 * @tc.name: device_get_type_003
2087 * @tc.desc: Verify the error happened when getting name of deviceID of the OH_NNDevice_GetType function.
2088 * @tc.type: FUNC
2089 */
2090 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_003, testing::ext::TestSize.Level0)
2091 {
2092 size_t deviceID = 1;
2093 OH_NN_DeviceType deviceType = OH_NN_OTHERS;
2094 OH_NN_DeviceType* pDeviceType = &deviceType;
2095 OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2096 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2097 }
2098
2099 /*
2100 * @tc.name: device_get_type_004
2101 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2102 * @tc.type: FUNC
2103 */
2104 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_004, testing::ext::TestSize.Level0)
2105 {
2106 size_t deviceID = 1;
2107 OH_NN_DeviceType deviceType = OH_NN_CPU;
2108 OH_NN_DeviceType* pDeviceType = &deviceType;
2109 OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2110 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2111 }
2112 } // namespace Unittest
2113 } // namespace NeuralNetworkRuntime
2114 } // namespace OHOS
2115