1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "neural_network_runtime_test.h"
17
18 #include "mindir.h"
19
20 #include "utils.h"
21 #include "compilation.h"
22 #include "hdi_device_v1_0.h"
23 #include "test/unittest/common/v1_0/mock_idevice.h"
24 #include "nnexecutor.h"
25
26 namespace OHOS {
27 namespace NeuralNetworkRuntime {
PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,const ModelConfig & config,std::shared_ptr<PreparedModel> & preparedModel)28 OH_NN_ReturnCode HDIDeviceV1_0::PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,
29 const ModelConfig& config, std::shared_ptr<PreparedModel>& preparedModel)
30 {
31 if (model == nullptr) {
32 return OH_NN_INVALID_PARAMETER;
33 }
34
35 if (config.enableFloat16 == false) {
36 return OH_NN_FAILED;
37 }
38
39 sptr<OHOS::HDI::Nnrt::V1_0::IPreparedModel> iPreparedModel = sptr<OHOS::HDI::Nnrt::V1_0
40 ::MockIPreparedModel>(new OHOS::HDI::Nnrt::V1_0::MockIPreparedModel());
41 if (iPreparedModel == nullptr) {
42 LOGE("HDIDeviceV1_0 mock PrepareModel failed, error happened when new sptr");
43 return OH_NN_NULL_PTR;
44 }
45
46 preparedModel = CreateSharedPtr<HDIPreparedModelV1_0>(iPreparedModel);
47 return OH_NN_SUCCESS;
48 }
49
GetDeviceType(OH_NN_DeviceType & deviceType)50 OH_NN_ReturnCode HDIDeviceV1_0::GetDeviceType(OH_NN_DeviceType& deviceType)
51 {
52 if (deviceType == OH_NN_OTHERS) {
53 return OH_NN_UNAVAILABLE_DEVICE;
54 }
55
56 return OH_NN_SUCCESS;
57 }
58
IsModelCacheSupported(bool & isSupported)59 OH_NN_ReturnCode HDIDeviceV1_0::IsModelCacheSupported(bool& isSupported)
60 {
61 isSupported = true;
62 return OH_NN_SUCCESS;
63 }
64
IsPerformanceModeSupported(bool & isSupported)65 OH_NN_ReturnCode HDIDeviceV1_0::IsPerformanceModeSupported(bool& isSupported)
66 {
67 isSupported = true;
68 return OH_NN_SUCCESS;
69 }
70
IsPrioritySupported(bool & isSupported)71 OH_NN_ReturnCode HDIDeviceV1_0::IsPrioritySupported(bool& isSupported)
72 {
73 isSupported = true;
74 return OH_NN_SUCCESS;
75 }
76
IsFloat16PrecisionSupported(bool & isSupported)77 OH_NN_ReturnCode HDIDeviceV1_0::IsFloat16PrecisionSupported(bool& isSupported)
78 {
79 isSupported = true;
80 return OH_NN_SUCCESS;
81 }
82
GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,std::vector<bool> & ops)83 OH_NN_ReturnCode HDIDeviceV1_0::GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,
84 std::vector<bool>& ops)
85 {
86 if (model == nullptr) {
87 LOGE("HDIDeviceV1_0 mock GetSupportedOperation failed, Model is nullptr, cannot query supported operation.");
88 return OH_NN_NULL_PTR;
89 }
90
91 ops.emplace_back(true);
92 return OH_NN_SUCCESS;
93 }
94
IsDynamicInputSupported(bool & isSupported)95 OH_NN_ReturnCode HDIDeviceV1_0::IsDynamicInputSupported(bool& isSupported)
96 {
97 isSupported = true;
98 return OH_NN_SUCCESS;
99 }
100 } // namespace NeuralNetworkRuntime
101 } // namespace OHOS
102
103 namespace OHOS {
104 namespace NeuralNetworkRuntime {
105 namespace Unittest {
BuildModel(InnerModel & model)106 OH_NN_ReturnCode NeuralNetworkRuntimeTest::BuildModel(InnerModel& model)
107 {
108 int32_t inputDims[2] = {3, 4};
109 OH_NN_Tensor input1 = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
110 OH_NN_ReturnCode ret = model.AddTensor(input1);
111 if (ret != OH_NN_SUCCESS) {
112 return ret;
113 }
114
115 // 添加Add算子的第二个输入Tensor,类型为float32,张量形状为[3, 4]
116 OH_NN_Tensor input2 = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
117 ret = model.AddTensor(input2);
118 if (ret != OH_NN_SUCCESS) {
119 return ret;
120 }
121
122 // 添加Add算子的参数Tensor,该参数Tensor用于指定激活函数的类型,Tensor的数据类型为int8。
123 int32_t activationDims = 1;
124 int8_t activationValue = OH_NN_FUSED_NONE;
125 OH_NN_Tensor activation = {OH_NN_INT8, 1, &activationDims, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
126 ret = model.AddTensor(activation);
127 if (ret != OH_NN_SUCCESS) {
128 return ret;
129 }
130
131 // 将激活函数类型设置为OH_NN_FUSED_NONE,表示该算子不添加激活函数。
132 uint32_t index = 2;
133 ret = model.SetTensorValue(index, &activationValue, sizeof(int8_t));
134 if (ret != OH_NN_SUCCESS) {
135 return ret;
136 }
137
138 // 设置Add算子的输出,类型为float32,张量形状为[3, 4]
139 OH_NN_Tensor output = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
140 ret = model.AddTensor(output);
141 if (ret != OH_NN_SUCCESS) {
142 return ret;
143 }
144
145 // 指定Add算子的输入、参数和输出索引
146 uint32_t inputIndicesValues[2] = {0, 1};
147 uint32_t paramIndicesValues = 2;
148 uint32_t outputIndicesValues = 3;
149 OH_NN_UInt32Array paramIndices = {¶mIndicesValues, 1};
150 OH_NN_UInt32Array inputIndices = {inputIndicesValues, 2};
151 OH_NN_UInt32Array outputIndices = {&outputIndicesValues, 1};
152
153 // 向模型实例添加Add算子
154 ret = model.AddOperation(OH_NN_OPS_ADD, paramIndices, inputIndices, outputIndices);
155 if (ret != OH_NN_SUCCESS) {
156 return ret;
157 }
158
159 // 设置模型实例的输入、输出索引
160 ret = model.SpecifyInputsAndOutputs(inputIndices, outputIndices);
161 if (ret != OH_NN_SUCCESS) {
162 return ret;
163 }
164
165 // 完成模型实例的构建
166 ret = model.Build();
167 if (ret != OH_NN_SUCCESS) {
168 return ret;
169 }
170
171 return ret;
172 }
173
InitIndices()174 void NeuralNetworkRuntimeTest::InitIndices()
175 {
176 m_inputIndices.data = m_inputIndexs;
177 m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
178
179 m_outputIndices.data = m_outputIndexs;
180 m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
181
182 m_paramIndices.data = m_paramIndexs;
183 m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
184 }
185
AddModelTensor(InnerModel & innerModel)186 void NeuralNetworkRuntimeTest::AddModelTensor(InnerModel& innerModel)
187 {
188 const int dim[2] = {2, 2};
189 const OH_NN_Tensor& tensor = {OH_NN_FLOAT32, 2, dim, nullptr, OH_NN_TENSOR};
190
191 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
192 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
193 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
194
195 const OH_NN_Tensor& tensorParam = {OH_NN_INT8, 0, nullptr, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
196 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensorParam));
197 }
198
SetTensor()199 void NeuralNetworkRuntimeTest::SetTensor()
200 {
201 m_tensor.dataType = OH_NN_INT32;
202 m_tensor.dimensionCount = 0;
203 m_tensor.dimensions = nullptr;
204 m_tensor.quantParam = nullptr;
205 m_tensor.type = OH_NN_TENSOR;
206 }
207
SetInnerBuild(InnerModel & innerModel)208 void NeuralNetworkRuntimeTest::SetInnerBuild(InnerModel& innerModel)
209 {
210 uint32_t index = 3;
211 const int8_t activation = 0;
212 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
213 static_cast<const void *>(&activation), sizeof(int8_t)));
214
215 OH_NN_OperationType opType {OH_NN_OPS_ADD};
216 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
217 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
218 EXPECT_EQ(OH_NN_SUCCESS, innerModel.Build());
219 }
220
SetInputAndOutput(Executor & executor)221 void NeuralNetworkRuntimeTest::SetInputAndOutput(Executor& executor)
222 {
223 size_t input1Index = 0;
224 int32_t inputDims[2] = {3, 4};
225 size_t lengthSize = 12 * sizeof(float);
226 size_t *length = &lengthSize;
227
228 size_t minInputDims = 1;
229 size_t maxInputDims = 12;
230
231 size_t *minInputDimsAdress = &minInputDims;
232 size_t **minInputDimsAdressA = &minInputDimsAdress;
233
234 size_t *maxInputDimsAdress = &maxInputDims;
235 size_t **maxInputDimsAdressA = &maxInputDimsAdress;
236
237 m_tensor = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
238 EXPECT_EQ(OH_NN_SUCCESS, executor.GetInputDimRange(input1Index, minInputDimsAdressA, maxInputDimsAdressA, length));
239
240 uint32_t outputIndex = 0;
241
242 int32_t shape = 3;
243 int32_t* shapeA = &shape;
244 int32_t** shapeAA = &shapeA;
245 uint32_t* shapeNum = &outputIndex;
246 EXPECT_EQ(OH_NN_SUCCESS, executor.GetOutputShape(outputIndex, shapeAA, shapeNum));
247 }
248
249 class MockIPreparedModel : public PreparedModel {
250 public:
251 MOCK_METHOD1(ExportModelCache, OH_NN_ReturnCode(std::vector<Buffer>&));
252 MOCK_METHOD4(Run, OH_NN_ReturnCode(const std::vector<IOTensor>&,
253 const std::vector<IOTensor>&,
254 std::vector<std::vector<int32_t>>&,
255 std::vector<bool>&));
256 MOCK_METHOD4(Run, OH_NN_ReturnCode(const std::vector<NN_Tensor*>&,
257 const std::vector<NN_Tensor*>&,
258 std::vector<std::vector<int32_t>>&,
259 std::vector<bool>&));
260 MOCK_CONST_METHOD1(GetModelID, OH_NN_ReturnCode(uint32_t&));
261 MOCK_METHOD2(GetInputDimRanges, OH_NN_ReturnCode(std::vector<std::vector<uint32_t>>&,
262 std::vector<std::vector<uint32_t>>&));
263 MOCK_METHOD0(ReleaseBuiltModel, OH_NN_ReturnCode());
264 };
265
266 class MockIDevice : public Device {
267 public:
268 MOCK_METHOD1(GetDeviceName, OH_NN_ReturnCode(std::string&));
269 MOCK_METHOD1(GetVendorName, OH_NN_ReturnCode(std::string&));
270 MOCK_METHOD1(GetVersion, OH_NN_ReturnCode(std::string&));
271 MOCK_METHOD1(GetDeviceType, OH_NN_ReturnCode(OH_NN_DeviceType&));
272 MOCK_METHOD1(GetDeviceStatus, OH_NN_ReturnCode(DeviceStatus&));
273 MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
274 std::vector<bool>&));
275 MOCK_METHOD1(IsFloat16PrecisionSupported, OH_NN_ReturnCode(bool&));
276 MOCK_METHOD1(IsPerformanceModeSupported, OH_NN_ReturnCode(bool&));
277 MOCK_METHOD1(IsPrioritySupported, OH_NN_ReturnCode(bool&));
278 MOCK_METHOD1(IsDynamicInputSupported, OH_NN_ReturnCode(bool&));
279 MOCK_METHOD1(IsModelCacheSupported, OH_NN_ReturnCode(bool&));
280 MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
281 const ModelConfig&,
282 std::shared_ptr<PreparedModel>&));
283 MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(const void*,
284 const ModelConfig&,
285 std::shared_ptr<PreparedModel>&));
286 MOCK_METHOD4(PrepareModelFromModelCache, OH_NN_ReturnCode(const std::vector<Buffer>&,
287 const ModelConfig&,
288 std::shared_ptr<PreparedModel>&,
289 bool&));
290 MOCK_METHOD3(PrepareOfflineModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
291 const ModelConfig&,
292 std::shared_ptr<PreparedModel>&));
293 MOCK_METHOD1(AllocateBuffer, void*(size_t));
294 MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<TensorDesc>));
295 MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<NNTensor>));
296 MOCK_METHOD1(ReleaseBuffer, OH_NN_ReturnCode(const void*));
297 MOCK_METHOD2(AllocateBuffer, OH_NN_ReturnCode(size_t, int&));
298 MOCK_METHOD2(ReleaseBuffer, OH_NN_ReturnCode(int, size_t));
299 };
300
301 /*
302 * @tc.name: model_construct_001
303 * @tc.desc: Verify the return model of the OH_NNModel_Construct function.
304 * @tc.type: FUNC
305 */
306 HWTEST_F(NeuralNetworkRuntimeTest, model_construct_001, testing::ext::TestSize.Level0)
307 {
308 OH_NNModel* ret = OH_NNModel_Construct();
309 EXPECT_NE(nullptr, ret);
310 }
311
312 /*
313 * @tc.name: model_add_tensor_001
314 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Tensor function.
315 * @tc.type: FUNC
316 */
317 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_001, testing::ext::TestSize.Level0)
318 {
319 OH_NNModel* model = nullptr;
320 const int32_t dimInput[2] = {2, 2};
321 const OH_NN_Tensor tensor = {OH_NN_INT8, 2, dimInput, nullptr, OH_NN_TENSOR};
322 OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, &tensor);
323 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
324 }
325
326 /*
327 * @tc.name: model_add_tensor_002
328 * @tc.desc: Verify the OH_NN_Tensor is nullptr of the OH_NNModel_AddTensor function.
329 * @tc.type: FUNC
330 */
331 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_002, testing::ext::TestSize.Level0)
332 {
333 InnerModel innerModel;
334
335 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
336 OH_NN_Tensor* tensor = nullptr;
337 OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, tensor);
338 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
339 }
340
341 /*
342 * @tc.name: model_add_tensor_003
343 * @tc.desc: Verify the success of the OH_NNModel_AddTensor function.
344 * @tc.type: FUNC
345 */
346 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_003, testing::ext::TestSize.Level0)
347 {
348 InnerModel innerModel;
349 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
350
351 const int32_t dimInput[2] = {2, 2};
352 const OH_NN_Tensor tensor = {OH_NN_INT8, 2, dimInput, nullptr, OH_NN_TENSOR};
353 OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, &tensor);
354 EXPECT_EQ(OH_NN_SUCCESS, ret);
355 }
356
357 /*
358 * @tc.name: model_add_operation_001
359 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_AddOperation function.
360 * @tc.type: FUNC
361 */
362 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_001, testing::ext::TestSize.Level0)
363 {
364 InnerModel innerModel;
365 OH_NNModel* model = nullptr;
366 OH_NN_OperationType opType {OH_NN_OPS_ADD};
367
368 InitIndices();
369 AddModelTensor(innerModel);
370
371 uint32_t index = 3;
372 const int8_t activation = 0;
373 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
374 static_cast<const void *>(&activation), sizeof(int8_t)));
375
376 OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, &m_outputIndices);
377 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
378 }
379
380 /*
381 * @tc.name: model_add_operation_002
382 * @tc.desc: Verify the paramIndices is nullptr of the OH_NNModel_AddOperation function.
383 * @tc.type: FUNC
384 */
385 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_002, testing::ext::TestSize.Level0)
386 {
387 InnerModel innerModel;
388 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
389 OH_NN_OperationType opType {OH_NN_OPS_ADD};
390
391 m_inputIndices.data = m_inputIndexs;
392 m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
393
394 m_outputIndices.data = m_outputIndexs;
395 m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
396
397 AddModelTensor(innerModel);
398 uint32_t index = 3;
399 const int8_t activation = 0;
400 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
401 static_cast<const void *>(&activation), sizeof(int8_t)));
402
403 OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, nullptr, &m_inputIndices, &m_outputIndices);
404 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
405 }
406
407 /*
408 * @tc.name: model_add_operation_003
409 * @tc.desc: Verify the inputIndices is nullptr of the OH_NNModel_AddOperation function.
410 * @tc.type: FUNC
411 */
412 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_003, testing::ext::TestSize.Level0)
413 {
414 InnerModel innerModel;
415 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
416 OH_NN_OperationType opType {OH_NN_OPS_ADD};
417
418 m_paramIndices.data = m_paramIndexs;
419 m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
420
421 m_outputIndices.data = m_outputIndexs;
422 m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
423
424 AddModelTensor(innerModel);
425 uint32_t index = 3;
426 const int8_t activation = 0;
427 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
428 static_cast<const void *>(&activation), sizeof(int8_t)));
429
430 OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, nullptr, &m_outputIndices);
431 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
432 }
433
434 /*
435 * @tc.name: model_add_operation_004
436 * @tc.desc: Verify the outputIndices is nullptr of the OH_NNModel_AddOperation function.
437 * @tc.type: FUNC
438 */
439 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_004, testing::ext::TestSize.Level0)
440 {
441 InnerModel innerModel;
442 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
443 OH_NN_OperationType opType {OH_NN_OPS_ADD};
444
445 m_paramIndices.data = m_paramIndexs;
446 m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
447
448 m_inputIndices.data = m_inputIndexs;
449 m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
450
451 AddModelTensor(innerModel);
452 uint32_t index = 3;
453 const int8_t activation = 0;
454 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
455 static_cast<const void *>(&activation), sizeof(int8_t)));
456
457 OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, nullptr);
458 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
459 }
460
461 /*
462 * @tc.name: model_add_operation_005
463 * @tc.desc: Verify the success of the OH_NNModel_AddOperation function.
464 * @tc.type: FUNC
465 */
466 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_005, testing::ext::TestSize.Level0)
467 {
468 InnerModel innerModel;
469 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
470 OH_NN_OperationType opType {OH_NN_OPS_ADD};
471
472 InitIndices();
473 AddModelTensor(innerModel);
474
475 uint32_t index = 3;
476 const int8_t activation = 0;
477 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
478 static_cast<const void *>(&activation), sizeof(int8_t)));
479
480 OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, &m_outputIndices);
481 EXPECT_EQ(OH_NN_SUCCESS, ret);
482 }
483
484 /*
485 * @tc.name: model_set_tensor_data_001
486 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_SetTensorData function.
487 * @tc.type: FUNC
488 */
489 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_001, testing::ext::TestSize.Level0)
490 {
491 InnerModel innerModel;
492 OH_NNModel* model = nullptr;
493 AddModelTensor(innerModel);
494
495 uint32_t index = 3;
496 const int8_t activation = 0;
497
498 OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation),
499 sizeof(int8_t));
500 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
501 }
502
503 /*
504 * @tc.name: model_set_tensor_data_002
505 * @tc.desc: Verify the data is nullptr of the OH_NNModel_SetTensorData function.
506 * @tc.type: FUNC
507 */
508 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_002, testing::ext::TestSize.Level0)
509 {
510 InnerModel innerModel;
511 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
512 AddModelTensor(innerModel);
513
514 uint32_t index = 3;
515
516 OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, nullptr, sizeof(int8_t));
517 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
518 }
519
520 /*
521 * @tc.name: model_set_tensor_data_003
522 * @tc.desc: Verify the length is 0 of the OH_NNModel_SetTensorData function.
523 * @tc.type: FUNC
524 */
525 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_003, testing::ext::TestSize.Level0)
526 {
527 InnerModel innerModel;
528 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
529 AddModelTensor(innerModel);
530
531 uint32_t index = 3;
532 const int8_t activation = 0;
533
534 OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation), 0);
535 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
536 }
537
538 /*
539 * @tc.name: model_set_tensor_data_004
540 * @tc.desc: Verify the successs of the OH_NNModel_SetTensorData function.
541 * @tc.type: FUNC
542 */
543 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_004, testing::ext::TestSize.Level0)
544 {
545 InnerModel innerModel;
546 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
547 AddModelTensor(innerModel);
548
549 uint32_t index = 3;
550 const int8_t activation = 0;
551
552 OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation),
553 sizeof(int8_t));
554 EXPECT_EQ(OH_NN_SUCCESS, ret);
555 }
556
557 /*
558 * @tc.name: model_specify_inputs_and_outputs_001
559 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
560 * @tc.type: FUNC
561 */
562 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_001, testing::ext::TestSize.Level0)
563 {
564 InnerModel innerModel;
565 OH_NNModel* model = nullptr;
566
567 InitIndices();
568 AddModelTensor(innerModel);
569
570 OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, &m_outputIndices);
571 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
572 }
573
574 /*
575 * @tc.name: model_specify_inputs_and_outputs_002
576 * @tc.desc: Verify the inputIndices is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
577 * @tc.type: FUNC
578 */
579 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_002, testing::ext::TestSize.Level0)
580 {
581 InnerModel innerModel;
582 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
583
584 InitIndices();
585 AddModelTensor(innerModel);
586
587 OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, nullptr, &m_outputIndices);
588 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
589 }
590
591 /*
592 * @tc.name: model_specify_inputs_and_outputs_003
593 * @tc.desc: Verify the outputIndices is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
594 * @tc.type: FUNC
595 */
596 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_003, testing::ext::TestSize.Level0)
597 {
598 InnerModel innerModel;
599 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
600
601 InitIndices();
602 AddModelTensor(innerModel);
603
604 OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, nullptr);
605 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
606 }
607
608 /*
609 * @tc.name: model_specify_inputs_and_outputs_004
610 * @tc.desc: Verify the success of the OH_NNModel_SpecifyInputsAndOutputs function.
611 * @tc.type: FUNC
612 */
613 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_004, testing::ext::TestSize.Level0)
614 {
615 InnerModel innerModel;
616 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
617
618 InitIndices();
619 AddModelTensor(innerModel);
620
621 OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, &m_outputIndices);
622 EXPECT_EQ(OH_NN_SUCCESS, ret);
623 }
624
625 /*
626 * @tc.name: model_finish_001
627 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Finish function.
628 * @tc.type: FUNC
629 */
630 HWTEST_F(NeuralNetworkRuntimeTest, model_finish_001, testing::ext::TestSize.Level0)
631 {
632 InnerModel innerModel;
633 OH_NNModel* model = nullptr;
634
635 OH_NN_OperationType opType {OH_NN_OPS_ADD};
636
637 InitIndices();
638 AddModelTensor(innerModel);
639
640 uint32_t index = 3;
641 const int8_t activation = 0;
642 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index, static_cast<const void *>(&activation),
643 sizeof(int8_t)));
644
645 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
646 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
647
648 OH_NN_ReturnCode ret = OH_NNModel_Finish(model);
649 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
650 }
651
652 /*
653 * @tc.name: model_finish_002
654 * @tc.desc: Verify the success of the OH_NNModel_Finish function.
655 * @tc.type: FUNC
656 */
657 HWTEST_F(NeuralNetworkRuntimeTest, model_finish_002, testing::ext::TestSize.Level0)
658 {
659 InnerModel innerModel;
660 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
661
662 OH_NN_OperationType opType {OH_NN_OPS_ADD};
663
664 InitIndices();
665 AddModelTensor(innerModel);
666
667 const int8_t activation = 0;
668 uint32_t index = 3;
669 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
670 static_cast<const void *>(&activation), sizeof(int8_t)));
671
672 EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
673 EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
674
675 OH_NN_ReturnCode ret = OH_NNModel_Finish(model);
676 EXPECT_EQ(OH_NN_SUCCESS, ret);
677 }
678
679 /*
680 * @tc.name: model_destroy_001
681 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Destroy function.
682 * @tc.type: FUNC
683 */
684 HWTEST_F(NeuralNetworkRuntimeTest, model_destroy_001, testing::ext::TestSize.Level0)
685 {
686 InnerModel innerModel;
687 OH_NNModel** pModel = nullptr;
688 OH_NNModel_Destroy(pModel);
689 EXPECT_EQ(nullptr, pModel);
690 }
691
692 /*
693 * @tc.name: model_destroy_003
694 * @tc.desc: Verify the normal model of the OH_NNModel_Destroy function.
695 * @tc.type: FUNC
696 */
697 HWTEST_F(NeuralNetworkRuntimeTest, model_destroy_003, testing::ext::TestSize.Level0)
698 {
699 InnerModel* innerModel = new InnerModel();
700 EXPECT_NE(nullptr, innerModel);
701 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
702 OH_NNModel_Destroy(&model);
703 EXPECT_EQ(nullptr, model);
704 }
705
706 /*
707 * @tc.name: model_get_available_operation_001
708 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_GetAvailableOperations function.
709 * @tc.type: FUNC
710 */
711 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_001, testing::ext::TestSize.Level0)
712 {
713 InnerModel innerModel;
714 OH_NNModel* model = nullptr;
715
716 uint32_t opCount = 1;
717 const bool *pIsAvailable = nullptr;
718
719 InitIndices();
720 AddModelTensor(innerModel);
721 SetInnerBuild(innerModel);
722
723 size_t deviceID = 10;
724 OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
725 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
726 }
727
728 /*
729 * @tc.name: model_get_available_operation_002
730 * @tc.desc: Verify the isAvailable is nullptr of the OH_NNModel_GetAvailableOperations function.
731 * @tc.type: FUNC
732 */
733 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_002, testing::ext::TestSize.Level0)
734 {
735 InnerModel innerModel;
736 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
737
738 uint32_t opCount = 1;
739 InitIndices();
740 AddModelTensor(innerModel);
741 SetInnerBuild(innerModel);
742
743 size_t deviceID = 10;
744 OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, nullptr, &opCount);
745 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
746 }
747
748 /*
749 * @tc.name: model_get_available_operation_003
750 * @tc.desc: Verify the *isAvailable is no nullptr of the OH_NNModel_GetAvailableOperations function.
751 * @tc.type: FUNC
752 */
753 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_003, testing::ext::TestSize.Level0)
754 {
755 InnerModel innerModel;
756 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
757
758 const bool isAvailable = true;
759 const bool *pIsAvailable = &isAvailable;
760 uint32_t opCount = 1;
761
762 InitIndices();
763 AddModelTensor(innerModel);
764 SetInnerBuild(innerModel);
765
766 size_t deviceID = 10;
767 OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
768 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
769 }
770
771 /*
772 * @tc.name: model_get_available_operation_004
773 * @tc.desc: Verify the opCount is nullptr of the OH_NNModel_GetAvailableOperations function.
774 * @tc.type: FUNC
775 */
776 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_004, testing::ext::TestSize.Level0)
777 {
778 InnerModel innerModel;
779 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
780
781 const bool *pIsAvailable = nullptr;
782 uint32_t* opCount = nullptr;
783
784 InitIndices();
785 AddModelTensor(innerModel);
786 SetInnerBuild(innerModel);
787
788 size_t deviceID = 10;
789 OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, opCount);
790 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
791 }
792
793 /*
794 * @tc.name: model_get_available_operation_005
795 * @tc.desc: Verify the success of the OH_NNModel_GetAvailableOperations function.
796 * @tc.type: FUNC
797 */
798 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_005, testing::ext::TestSize.Level0)
799 {
800 InnerModel innerModel;
801 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
802
803 const bool *pIsAvailable = nullptr;
804 uint32_t opCount = 1;
805
806 InitIndices();
807 AddModelTensor(innerModel);
808 SetInnerBuild(innerModel);
809
810 size_t deviceID = 10;
811 OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
812 EXPECT_EQ(OH_NN_FAILED, ret);
813 }
814
815 /*
816 * @tc.name: compilation_construct_001
817 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNCompilation_Construct function.
818 * @tc.type: FUNC
819 */
820 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_001, testing::ext::TestSize.Level0)
821 {
822 InnerModel innerModel;
823 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
824 const OH_NNModel* model = nullptr;
825 OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
826 EXPECT_EQ(nullptr, ret);
827 }
828
829 /*
830 * @tc.name: compilation_construct_002
831 * @tc.desc: Verify the not OH_NNModel_Build before creating compilation of the OH_NNCompilation_Construct function.
832 * @tc.type: FUNC
833 */
834 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_002, testing::ext::TestSize.Level0)
835 {
836 InnerModel innerModel;
837 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
838 OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
839 EXPECT_NE(nullptr, ret);
840 }
841
842 /*
843 * @tc.name: compilation_construct_003
844 * @tc.desc: Verify the normal model of the OH_NNCompilation_Construct function.
845 * @tc.type: FUNC
846 */
847 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_003, testing::ext::TestSize.Level0)
848 {
849 InnerModel innerModel;
850 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
851 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
852 OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
853 EXPECT_NE(nullptr, ret);
854 }
855
856 /*
857 * @tc.name: compilation_set_device_001
858 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetDevice function.
859 * @tc.type: FUNC
860 */
861 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_device_001, testing::ext::TestSize.Level0)
862 {
863 OH_NNCompilation* compilation = nullptr;
864 size_t deviceId = 1;
865 OH_NN_ReturnCode ret = OH_NNCompilation_SetDevice(compilation, deviceId);
866 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
867 }
868
869 /*
870 * @tc.name: compilation_set_device_002
871 * @tc.desc: Verify the success of the OH_NNCompilation_SetDevice function.
872 * @tc.type: FUNC
873 */
874 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_device_002, testing::ext::TestSize.Level0)
875 {
876 InnerModel innerModel;
877 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
878
879 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
880 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
881 size_t deviceId = 1;
882 OH_NN_ReturnCode ret = OH_NNCompilation_SetDevice(nnCompilation, deviceId);
883 EXPECT_EQ(OH_NN_SUCCESS, ret);
884 }
885
886 /*
887 * @tc.name: compilation_set_cache_001
888 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function.
889 * @tc.type: FUNC
890 */
891 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_001, testing::ext::TestSize.Level0)
892 {
893 InnerModel innerModel;
894 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
895 OH_NNCompilation* nnCompilation = nullptr;
896 const char* cacheDir = "../";
897 uint32_t version = 1;
898 OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
899 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
900 }
901
902 /*
903 * @tc.name: compilation_set_cache_002
904 * @tc.desc: Verify the cachePath is nullptr of the OH_NNCompilation_SetCache function.
905 * @tc.type: FUNC
906 */
907 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_002, testing::ext::TestSize.Level0)
908 {
909 InnerModel innerModel;
910 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
911
912 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
913 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
914 const char* cacheDir = nullptr;
915 uint32_t version = 1;
916 OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
917 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
918 }
919
920 /*
921 * @tc.name: compilation_set_cache_003
922 * @tc.desc: Verify the success of the OH_NNCompilation_SetCache function.
923 * @tc.type: FUNC
924 */
925 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_003, testing::ext::TestSize.Level0)
926 {
927 InnerModel innerModel;
928 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
929
930 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
931 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
932 const char* cacheDir = "../";
933 uint32_t version = 1;
934 OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
935 EXPECT_EQ(OH_NN_SUCCESS, ret);
936 }
937
938 /*
939 * @tc.name: compilation_set_performance_mode_001
940 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetPerformanceMode function.
941 * @tc.type: FUNC
942 */
943 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_performance_mode_001, testing::ext::TestSize.Level0)
944 {
945 InnerModel innerModel;
946 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
947 OH_NNCompilation* nnCompilation = nullptr;
948 OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE;
949
950 OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode);
951 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
952 }
953
954 /*
955 * @tc.name: compilation_set_performance_mode_002
956 * @tc.desc: Verify the success of the OH_NNCompilation_SetPerformanceMode function.
957 * @tc.type: FUNC
958 */
959 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_performance_mode_002, testing::ext::TestSize.Level0)
960 {
961 InnerModel innerModel;
962 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
963
964 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
965 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
966 OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE;
967
968 OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode);
969 EXPECT_EQ(OH_NN_SUCCESS, ret);
970 }
971
972 /*
973 * @tc.name: compilation_set_priority_001
974 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetPriority function.
975 * @tc.type: FUNC
976 */
977 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_priority_001, testing::ext::TestSize.Level0)
978 {
979 InnerModel innerModel;
980 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
981 OH_NNCompilation* nnCompilation = nullptr;
982 OH_NN_Priority priority = OH_NN_PRIORITY_LOW;
983
984 OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority);
985 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
986 }
987
988 /*
989 * @tc.name: compilation_set_priority_002
990 * @tc.desc: Verify the success of the OH_NNCompilation_SetPriority function.
991 * @tc.type: FUNC
992 */
993 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_priority_002, testing::ext::TestSize.Level0)
994 {
995 InnerModel innerModel;
996 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
997
998 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
999 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1000 OH_NN_Priority priority = OH_NN_PRIORITY_LOW;
1001
1002 OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority);
1003 EXPECT_EQ(OH_NN_SUCCESS, ret);
1004 }
1005
1006 /*
1007 * @tc.name: compilation_set_enable_float16_001
1008 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_EnableFloat16 function.
1009 * @tc.type: FUNC
1010 */
1011 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_enable_float16_001, testing::ext::TestSize.Level0)
1012 {
1013 InnerModel innerModel;
1014 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1015 OH_NNCompilation* nnCompilation = nullptr;
1016 bool enableFloat16 = true;
1017
1018 OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16);
1019 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1020 }
1021
1022 /*
1023 * @tc.name: compilation_set_enable_float16_002
1024 * @tc.desc: Verify the success of the OH_NNCompilation_EnableFloat16 function.
1025 * @tc.type: FUNC
1026 */
1027 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_enable_float16_002, testing::ext::TestSize.Level0)
1028 {
1029 InnerModel innerModel;
1030 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1031
1032 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1033 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1034 bool enableFloat16 = true;
1035
1036 OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16);
1037 EXPECT_EQ(OH_NN_SUCCESS, ret);
1038 }
1039
1040 /*
1041 * @tc.name: compilation_build_001
1042 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_Build function.
1043 * @tc.type: FUNC
1044 */
1045 HWTEST_F(NeuralNetworkRuntimeTest, compilation_build_001, testing::ext::TestSize.Level0)
1046 {
1047 InnerModel innerModel;
1048 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1049 OH_NNCompilation* nnCompilation = nullptr;
1050
1051 OH_NN_ReturnCode ret = OH_NNCompilation_Build(nnCompilation);
1052 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1053 }
1054
1055 /*
1056 * @tc.name: compilation_destroy_001
1057 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_Destroy function.
1058 * @tc.type: FUNC
1059 */
1060 HWTEST_F(NeuralNetworkRuntimeTest, compilation_destroy_001, testing::ext::TestSize.Level0)
1061 {
1062 OH_NNCompilation** pCompilation = nullptr;
1063 OH_NNCompilation_Destroy(pCompilation);
1064 EXPECT_EQ(nullptr, pCompilation);
1065 }
1066
1067 /*
1068 * @tc.name: compilation_destroy_003
1069 * @tc.desc: Verify the normal model of the OH_NNCompilation_Destroy function.
1070 * @tc.type: FUNC
1071 */
1072 HWTEST_F(NeuralNetworkRuntimeTest, compilation_destroy_003, testing::ext::TestSize.Level0)
1073 {
1074 InnerModel* innerModel = new InnerModel();
1075 EXPECT_NE(nullptr, innerModel);
1076
1077 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1078 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1079 OH_NNCompilation_Destroy(&nnCompilation);
1080 EXPECT_EQ(nullptr, nnCompilation);
1081 }
1082
1083 /**
1084 * @tc.name: excutor_construct_001
1085 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNExecutor_Construct function
1086 * @tc.type: FUNC
1087 */
1088 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_001, testing::ext::TestSize.Level0)
1089 {
1090 InnerModel innerModel;
1091 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1092
1093 OH_NNCompilation* nnCompilation = nullptr;
1094 OH_NNExecutor* executor = OH_NNExecutor_Construct(nnCompilation);
1095 EXPECT_EQ(nullptr, executor);
1096 }
1097
1098 /**
1099 * @tc.name: excutor_construct_002
1100 * @tc.desc: Verify the not OH_NNCompilation_Build before creating executor of the OH_NNExecutor_Construct function
1101 * @tc.type: FUNC
1102 */
1103 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_002, testing::ext::TestSize.Level0)
1104 {
1105 InnerModel innerModel;
1106 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1107
1108 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1109 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1110 OH_NNExecutor * executor = OH_NNExecutor_Construct(nnCompilation);
1111 EXPECT_EQ(nullptr, executor);
1112 }
1113
1114 /**
1115 * @tc.name: excutor_construct_003
1116 * @tc.desc: Verify the success of the OH_NNExecutor_Construct function
1117 * @tc.type: FUNC
1118 */
1119 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_003, testing::ext::TestSize.Level0)
1120 {
1121 InnerModel innerModel;
1122 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1123
1124 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1125 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1126 OH_NNExecutor * executor = OH_NNExecutor_Construct(nnCompilation);
1127 EXPECT_EQ(nullptr, executor);
1128 }
1129
1130 /**
1131 * @tc.name: excutor_setinput_001
1132 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetInput function
1133 * @tc.type: FUNC
1134 */
1135 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_001, testing::ext::TestSize.Level0)
1136 {
1137 SetTensor();
1138
1139 float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1140 const void *buffer = input;
1141 size_t length = 2 * sizeof(float);
1142 uint32_t inputIndex = 0;
1143 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nullptr, inputIndex, &m_tensor, buffer, length));
1144 }
1145
1146 /**
1147 * @tc.name: excutor_setinput_002
1148 * @tc.desc: Verify the OH_NN_Tensor is nullptr of the OH_NNExecutor_SetInput function
1149 * @tc.type: FUNC
1150 */
1151 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_002, testing::ext::TestSize.Level0)
1152 {
1153 InnerModel innerModel;
1154 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1155
1156 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1157 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1158 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1159
1160 uint32_t inputIndex = 0;
1161 float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1162 const void *buffer = input;
1163 size_t length = 2 * sizeof(float);
1164 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, nullptr, buffer, length));
1165 }
1166
1167 /**
1168 * @tc.name: excutor_setinput_003
1169 * @tc.desc: Verify the data is nullptr of the OH_NNExecutor_SetInput function
1170 * @tc.type: FUNC
1171 */
1172 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_003, testing::ext::TestSize.Level0)
1173 {
1174 InnerModel innerModel;
1175 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1176
1177 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1178 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1179 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1180
1181 SetTensor();
1182
1183 uint32_t inputIndex = 0;
1184 const void *buffer = nullptr;
1185 size_t length = 2 * sizeof(float);
1186 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length));
1187 }
1188
1189 /**
1190 * @tc.name: excutor_setinput_004
1191 * @tc.desc: Verify the length is 0 of the OH_NNExecutor_SetInput function
1192 * @tc.type: FUNC
1193 */
1194 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_004, testing::ext::TestSize.Level0)
1195 {
1196 InnerModel innerModel;
1197 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1198
1199 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1200 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1201 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1202
1203 uint32_t inputIndex = 0;
1204 SetTensor();
1205
1206 size_t length = 0;
1207 float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1208 const void *buffer = input;
1209 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length));
1210 }
1211
1212 /**
1213 * @tc.name: excutor_setinput_005
1214 * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1215 * @tc.type: FUNC
1216 */
1217 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_005, testing::ext::TestSize.Level0)
1218 {
1219 InnerModel innerModel;
1220 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1221
1222 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1223 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1224 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1225
1226 uint32_t inputIndex = 0;
1227 int32_t dims[2] = {3, 4};
1228 m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1229
1230 float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1231 const void *buffer = input;
1232 size_t length = 12 * sizeof(float);
1233 OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length);
1234 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1235 }
1236
1237 /**
1238 * @tc.name: excutor_setinput_006
1239 * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1240 * @tc.type: FUNC
1241 */
1242 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_006, testing::ext::TestSize.Level0)
1243 {
1244 LOGE("OH_NNExecutor_SetInput excutor_setinput_006");
1245 size_t m_backendID {0};
1246 std::shared_ptr<Device> m_device {nullptr};
1247 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1248 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1249 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1250 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1251 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1252 NNExecutor* executor = new (std::nothrow) NNExecutor(
1253 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1254 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1255
1256 uint32_t inputIndex = 0;
1257 int32_t dims[2] = {3, 4};
1258 m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1259
1260 float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1261 const void *buffer = input;
1262 size_t length = 12 * sizeof(float);
1263 OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length);
1264 EXPECT_EQ(OH_NN_FAILED, ret);
1265
1266 testing::Mock::AllowLeak(mockIPreparedMode.get());
1267 }
1268
1269 /**
1270 * @tc.name: excutor_setinput_007
1271 * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1272 * @tc.type: FUNC
1273 */
1274 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_007, testing::ext::TestSize.Level0)
1275 {
1276 LOGE("OH_NNExecutor_SetInput excutor_setinput_007");
1277 size_t m_backendID {0};
1278 std::shared_ptr<Device> m_device {nullptr};
1279 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1280 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1281 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1282 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1283 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1284 NNExecutor* executor = new (std::nothrow) NNExecutor(
1285 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1286 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1287
1288 uint32_t inputIndex = 0;
1289
1290 float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1291 const void *buffer = input;
1292 size_t length = 12 * sizeof(float);
1293 OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, nullptr, buffer, length);
1294 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1295
1296 testing::Mock::AllowLeak(mockIPreparedMode.get());
1297 }
1298
1299 /**
1300 * @tc.name: excutor_setinput_008
1301 * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1302 * @tc.type: FUNC
1303 */
1304 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_008, testing::ext::TestSize.Level0)
1305 {
1306 LOGE("OH_NNExecutor_SetInput excutor_setinput_008");
1307 size_t m_backendID {0};
1308 std::shared_ptr<Device> m_device {nullptr};
1309 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1310 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1311 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1312 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1313 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1314 NNExecutor* executor = new (std::nothrow) NNExecutor(
1315 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1316 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1317
1318 uint32_t inputIndex = 0;
1319 int32_t dims[2] = {3, 4};
1320 m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1321
1322 size_t length = 12 * sizeof(float);
1323 OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, nullptr, length);
1324 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1325
1326 testing::Mock::AllowLeak(mockIPreparedMode.get());
1327 }
1328
1329 /**
1330 * @tc.name: excutor_setinput_009
1331 * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1332 * @tc.type: FUNC
1333 */
1334 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_009, testing::ext::TestSize.Level0)
1335 {
1336 LOGE("OH_NNExecutor_SetInput excutor_setinput_009");
1337 size_t m_backendID {0};
1338 std::shared_ptr<Device> m_device {nullptr};
1339 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1340 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1341 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1342 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1343 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1344 NNExecutor* executor = new (std::nothrow) NNExecutor(
1345 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1346 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1347
1348 uint32_t inputIndex = 0;
1349 int32_t dims[2] = {3, 4};
1350 m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1351
1352 float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1353 const void *buffer = input;
1354 size_t length = 0;
1355 OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length);
1356 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1357
1358 testing::Mock::AllowLeak(mockIPreparedMode.get());
1359 }
1360
1361 /**
1362 * @tc.name: excutor_setoutput_001
1363 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetOutput function
1364 * @tc.type: FUNC
1365 */
1366 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_001, testing::ext::TestSize.Level0)
1367 {
1368 LOGE("OH_NNExecutor_SetOutput excutor_setoutput_001");
1369 uint32_t outputIndex = 0;
1370 float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1371 void *buffer = input;
1372 size_t length = 9 * sizeof(int32_t);
1373 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nullptr, outputIndex, buffer, length));
1374 }
1375
1376 /**
1377 * @tc.name: excutor_setoutput_002
1378 * @tc.desc: Verify the data is nullptr of the OH_NNExecutor_SetOutput function
1379 * @tc.type: FUNC
1380 */
1381 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_002, testing::ext::TestSize.Level0)
1382 {
1383 InnerModel innerModel;
1384 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1385
1386 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1387 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1388 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1389
1390 uint32_t outputIndex = 0;
1391 void *buffer = nullptr;
1392 size_t length = 9 * sizeof(int32_t);
1393 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, buffer, length));
1394 }
1395
1396 /**
1397 * @tc.name: excutor_setoutput_003
1398 * @tc.desc: Verify the length is 0 of the OH_NNExecutor_SetOutput function
1399 * @tc.type: FUNC
1400 */
1401 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_003, testing::ext::TestSize.Level0)
1402 {
1403 InnerModel innerModel;
1404 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1405
1406 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1407 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1408 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1409
1410 uint32_t outputIndex = 0;
1411 float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1412 void *buffer = input;
1413 size_t length = 0;
1414 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, buffer, length));
1415 }
1416
1417 /**
1418 * @tc.name: excutor_setoutput_004
1419 * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1420 * @tc.type: FUNC
1421 */
1422 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_004, testing::ext::TestSize.Level0)
1423 {
1424 InnerModel innerModel;
1425 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1426
1427 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1428 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1429 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1430
1431 uint32_t outputIndex = 0;
1432 float output[12];
1433 size_t length = 12 * sizeof(float);
1434 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, output, length));
1435 }
1436
1437 /**
1438 * @tc.name: excutor_setoutput_005
1439 * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1440 * @tc.type: FUNC
1441 */
1442 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_005, testing::ext::TestSize.Level0)
1443 {
1444 LOGE("OH_NNExecutor_SetOutput excutor_setinput_006");
1445 size_t m_backendID {0};
1446 std::shared_ptr<Device> m_device {nullptr};
1447 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1448 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1449 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1450 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1451 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1452 NNExecutor* executor = new (std::nothrow) NNExecutor(
1453 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1454 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1455
1456 uint32_t outputIndex = 0;
1457 float output[12];
1458 size_t length = 12 * sizeof(float);
1459 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, output, length));
1460
1461 testing::Mock::AllowLeak(mockIPreparedMode.get());
1462 }
1463
1464 /**
1465 * @tc.name: excutor_setoutput_006
1466 * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1467 * @tc.type: FUNC
1468 */
1469 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_006, testing::ext::TestSize.Level0)
1470 {
1471 LOGE("OH_NNExecutor_SetOutput excutor_setinput_006");
1472 size_t m_backendID {0};
1473 std::shared_ptr<Device> m_device {nullptr};
1474 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1475 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1476 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1477 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1478 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1479 NNExecutor* executor = new (std::nothrow) NNExecutor(
1480 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1481 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1482
1483 uint32_t outputIndex = 0;
1484 size_t length = 12 * sizeof(float);
1485 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, nullptr, length));
1486
1487 testing::Mock::AllowLeak(mockIPreparedMode.get());
1488 }
1489
1490 /**
1491 * @tc.name: excutor_setoutput_007
1492 * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1493 * @tc.type: FUNC
1494 */
1495 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_007, testing::ext::TestSize.Level0)
1496 {
1497 LOGE("OH_NNExecutor_SetOutput excutor_setoutput_007");
1498 size_t m_backendID {0};
1499 std::shared_ptr<Device> m_device {nullptr};
1500 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1501 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1502 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1503 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1504 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1505 NNExecutor* executor = new (std::nothrow) NNExecutor(
1506 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1507 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1508
1509 uint32_t outputIndex = 0;
1510 float output[12];
1511 size_t length = 0;
1512 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, output, length));
1513
1514 testing::Mock::AllowLeak(mockIPreparedMode.get());
1515 }
1516
1517 /**
1518 * @tc.name: excutor_getoutputshape_001
1519 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_GetOutputShape function
1520 * @tc.type: FUNC
1521 */
1522 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_001, testing::ext::TestSize.Level0)
1523 {
1524 InnerModel innerModel;
1525 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1526 OH_NNExecutor* nnExecutor = nullptr;
1527
1528 int32_t* ptr = nullptr;
1529 int32_t** shape = &ptr;
1530 uint32_t length = 2;
1531 uint32_t outputIndex = 0;
1532 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1533 shape, &length));
1534 }
1535
1536 /**
1537 * @tc.name: excutor_getoutputshape_002
1538 * @tc.desc: Verify the shape is nullptr of the OH_NNExecutor_GetOutputShape function
1539 * @tc.type: FUNC
1540 */
1541 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_002, testing::ext::TestSize.Level0)
1542 {
1543 InnerModel innerModel;
1544 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1545
1546 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1547 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1548 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1549
1550 uint32_t outputIndex = 0;
1551 int32_t** shape = nullptr;
1552 uint32_t length = 2;
1553 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1554 shape, &length));
1555 }
1556
1557 /**
1558 * @tc.name: excutor_getoutputshape_003
1559 * @tc.desc: Verify the *shape is not nullptr of the OH_NNExecutor_GetOutputShape function
1560 * @tc.type: FUNC
1561 */
1562 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_003, testing::ext::TestSize.Level0)
1563 {
1564 InnerModel innerModel;
1565 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1566
1567 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1568 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1569 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1570
1571 int32_t expectDim[2] = {3, 3};
1572 int32_t* ptr = expectDim;
1573 int32_t** shape = &ptr;
1574 uint32_t length = 2;
1575 uint32_t outputIndex = 0;
1576 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1577 shape, &length));
1578 }
1579
1580 /**
1581 * @tc.name: excutor_getoutputshape_004
1582 * @tc.desc: Verify the length is nullptr of the OH_NNExecutor_GetOutputShape function
1583 * @tc.type: FUNC
1584 */
1585 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_004, testing::ext::TestSize.Level0)
1586 {
1587 InnerModel innerModel;
1588 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1589
1590 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1591 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1592 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1593
1594 int32_t* ptr = nullptr;
1595 int32_t** shape = &ptr;
1596 uint32_t outputIndex = 0;
1597 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex, shape, nullptr));
1598 }
1599
1600 /**
1601 * @tc.name: excutor_getoutputshape_005
1602 * @tc.desc: Verify the success of the OH_NNExecutor_GetOutputShape function
1603 * @tc.type: FUNC
1604 */
1605 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_005, testing::ext::TestSize.Level0)
1606 {
1607 InnerModel innerModel;
1608 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1609
1610 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1611 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1612 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1613
1614 int32_t* ptr = nullptr;
1615 int32_t** shape = &ptr;
1616 uint32_t length = 2;
1617 uint32_t outputIndex = 0;
1618 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex, shape, &length));
1619 }
1620
1621 /**
1622 * @tc.name: excutor_run_001
1623 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_Run function
1624 * @tc.type: FUNC
1625 */
1626 HWTEST_F(NeuralNetworkRuntimeTest, excutor_run_001, testing::ext::TestSize.Level0)
1627 {
1628 OH_NNExecutor* nnExecutor = nullptr;
1629 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_Run(nnExecutor));
1630 }
1631
1632 /**
1633 * @tc.name: excutor_run_002
1634 * @tc.desc: Verify the success of the OH_NNExecutor_Run function
1635 * @tc.type: FUNC
1636 */
1637 HWTEST_F(NeuralNetworkRuntimeTest, excutor_run_002, testing::ext::TestSize.Level0)
1638 {
1639 InnerModel innerModel;
1640 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1641
1642 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1643 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1644 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1645
1646 int32_t inputDims[2] = {3, 4};
1647 m_tensor = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
1648 EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_Run(nnExecutor));
1649 }
1650
1651 /**
1652 * @tc.name: excutor_run_003
1653 * @tc.desc: Verify the success of the OH_NNExecutor_Run function
1654 * @tc.type: FUNC
1655 */
1656 HWTEST_F(NeuralNetworkRuntimeTest, excutor_run_003, testing::ext::TestSize.Level0)
1657 {
1658 LOGE("OH_NNExecutor_Run excutor_run_003");
1659 size_t m_backendID {0};
1660 std::shared_ptr<Device> m_device {nullptr};
1661 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1662 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1663 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1664 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1665 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1666 NNExecutor* executor = new (std::nothrow) NNExecutor(
1667 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1668 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1669
1670 int32_t inputDims[2] = {3, 4};
1671 m_tensor = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
1672 OH_NN_ReturnCode ret = OH_NNExecutor_Run(nnExecutor);
1673 EXPECT_EQ(OH_NN_SUCCESS, ret);
1674
1675 testing::Mock::AllowLeak(mockIPreparedMode.get());
1676 }
1677
1678 /*
1679 * @tc.name: executor_allocate_input_memory_001
1680 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_AllocateInputMemory function.
1681 * @tc.type: FUNC
1682 */
1683 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_001, testing::ext::TestSize.Level0)
1684 {
1685 OH_NNExecutor* nnExecutor = nullptr;
1686 uint32_t outputIndex = 0;
1687 size_t length = 9 * sizeof(float);
1688
1689 OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1690 EXPECT_EQ(nullptr, ret);
1691 }
1692
1693 /*
1694 * @tc.name: executor_allocate_input_memory_002
1695 * @tc.desc: Verify the passed length equals 0 of the OH_NNExecutor_AllocateInputMemory function.
1696 * @tc.type: FUNC
1697 */
1698 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_002, testing::ext::TestSize.Level0)
1699 {
1700 InnerModel innerModel;
1701 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1702
1703 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1704 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1705 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1706
1707 uint32_t outputIndex = 0;
1708 size_t length = 0;
1709
1710 OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1711 EXPECT_EQ(nullptr, ret);
1712 }
1713
1714 /*
1715 * @tc.name: executor_allocate_input_memory_003
1716 * @tc.desc: Verify the error when creating input memory in executor of the OH_NNExecutor_AllocateInputMemory function.
1717 * @tc.type: FUNC
1718 */
1719 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_003, testing::ext::TestSize.Level0)
1720 {
1721 InnerModel innerModel;
1722 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1723
1724 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1725 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1726 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1727
1728 uint32_t outputIndex = 6;
1729 size_t length = 9 * sizeof(float);
1730
1731 OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1732 EXPECT_EQ(nullptr, ret);
1733 }
1734
1735 /*
1736 * @tc.name: executor_allocate_input_memory_004
1737 * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1738 * @tc.type: FUNC
1739 */
1740 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_004, testing::ext::TestSize.Level0)
1741 {
1742 InnerModel innerModel;
1743 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1744
1745 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1746 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1747 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1748
1749 uint32_t outputIndex = 0;
1750 size_t length = 9 * sizeof(float);
1751
1752 OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1753 EXPECT_EQ(nullptr, ret);
1754 }
1755
1756 /*
1757 * @tc.name: executor_allocate_input_memory_005
1758 * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1759 * @tc.type: FUNC
1760 */
1761 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_005, testing::ext::TestSize.Level0)
1762 {
1763 LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_input_memory_005");
1764 size_t m_backendID {0};
1765 std::shared_ptr<Device> m_device {nullptr};
1766 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1767 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1768 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1769 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1770 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1771 NNExecutor* executor = new (std::nothrow) NNExecutor(
1772 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1773 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1774
1775 uint32_t outputIndex = 0;
1776 size_t length = 9 * sizeof(float);
1777
1778 OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1779 EXPECT_EQ(nullptr, ret);
1780
1781 testing::Mock::AllowLeak(mockIPreparedMode.get());
1782 }
1783
1784 /*
1785 * @tc.name: executor_allocate_input_memory_006
1786 * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1787 * @tc.type: FUNC
1788 */
1789 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_006, testing::ext::TestSize.Level0)
1790 {
1791 LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_input_memory_006");
1792 size_t m_backendID {0};
1793 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1794
1795 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
1796 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1797 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1798
1799 std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType> pair1;
1800 std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType> pair2;
1801 std::shared_ptr<TensorDesc> tensorDesr = std::make_shared<TensorDesc>();
1802 int32_t expectDim[2] = {3, 3};
1803 int32_t* ptr = expectDim;
1804 uint32_t dimensionCount = 2;
1805 tensorDesr->SetShape(ptr, dimensionCount);
1806 pair1.first = tensorDesr;
1807 pair2.first = tensorDesr;
1808 m_inputTensorDescs.emplace_back(pair1);
1809 m_inputTensorDescs.emplace_back(pair2);
1810 m_outputTensorDescs.emplace_back(pair1);
1811 m_outputTensorDescs.emplace_back(pair2);
1812
1813 size_t length = 9 * sizeof(float);
1814 EXPECT_CALL(*((MockIDevice *) device.get()), AllocateTensorBuffer(length, m_inputTensorDescs[0].first))
1815 .WillRepeatedly(::testing::Return(reinterpret_cast<void*>(0x1000)));
1816
1817 NNExecutor* executor = new (std::nothrow) NNExecutor(
1818 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
1819 EXPECT_NE(nullptr, executor);
1820 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1821
1822 uint32_t outputIndex = 0;
1823
1824 OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1825 EXPECT_NE(nullptr, ret);
1826
1827 testing::Mock::AllowLeak(device.get());
1828 }
1829
1830 /*
1831 * @tc.name: executor_allocate_input_memory_007
1832 * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1833 * @tc.type: FUNC
1834 */
1835 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_007, testing::ext::TestSize.Level0)
1836 {
1837 LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_input_memory_007");
1838 size_t m_backendID {0};
1839 std::shared_ptr<Device> m_device {nullptr};
1840 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1841 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1842 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1843 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1844 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1845 NNExecutor* executor = new (std::nothrow) NNExecutor(
1846 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1847 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1848
1849 uint32_t outputIndex = 0;
1850 size_t length = 0;
1851
1852 OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1853 EXPECT_EQ(nullptr, ret);
1854
1855 testing::Mock::AllowLeak(mockIPreparedMode.get());
1856 }
1857
1858 /*
1859 * @tc.name: executor_allocate_output_memory_001
1860 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_AllocateOutputMemory function.
1861 * @tc.type: FUNC
1862 */
1863 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_001, testing::ext::TestSize.Level0)
1864 {
1865 OH_NNExecutor* nnExecutor = nullptr;
1866 uint32_t outputIndex = 0;
1867 size_t length = 9 * sizeof(float);
1868
1869 OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1870 EXPECT_EQ(nullptr, ret);
1871 }
1872
1873 /*
1874 * @tc.name: executor_allocate_output_memory_002
1875 * @tc.desc: Verify the passed length equals 0 of the OH_NNExecutor_AllocateOutputMemory function.
1876 * @tc.type: FUNC
1877 */
1878 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_002, testing::ext::TestSize.Level0)
1879 {
1880 InnerModel innerModel;
1881 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1882
1883 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1884 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1885 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1886
1887 uint32_t outputIndex = 0;
1888 size_t length = 0;
1889
1890 OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1891 EXPECT_EQ(nullptr, ret);
1892 }
1893
1894 /*
1895 * @tc.name: executor_allocate_output_memory_003
1896 * @tc.desc: Verify the error when create output memory in executor of the OH_NNExecutor_AllocateOutputMemory function.
1897 * @tc.type: FUNC
1898 */
1899 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_003, testing::ext::TestSize.Level0)
1900 {
1901 InnerModel innerModel;
1902 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1903
1904 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1905 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1906 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1907
1908 uint32_t outputIndex = 6;
1909 size_t length = 9 * sizeof(float);
1910
1911 OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1912 EXPECT_EQ(nullptr, ret);
1913 }
1914
1915 /*
1916 * @tc.name: executor_allocate_output_memory_004
1917 * @tc.desc: Verify the success of the OH_NNExecutor_AllocateOutputMemory function.
1918 * @tc.type: FUNC
1919 */
1920 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_004, testing::ext::TestSize.Level0)
1921 {
1922 InnerModel innerModel;
1923 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1924
1925 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1926 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1927 OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1928
1929 uint32_t outputIndex = 0;
1930 size_t length = 9 * sizeof(float);
1931
1932 OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1933 EXPECT_EQ(nullptr, ret);
1934 }
1935
1936 /*
1937 * @tc.name: executor_allocate_output_memory_005
1938 * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1939 * @tc.type: FUNC
1940 */
1941 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_005, testing::ext::TestSize.Level0)
1942 {
1943 LOGE("OH_NNExecutor_AllocateOutputMemory executor_allocate_output_memory_005");
1944 size_t m_backendID {0};
1945 std::shared_ptr<Device> m_device {nullptr};
1946 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1947 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1948 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1949 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1950 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1951 NNExecutor* executor = new (std::nothrow) NNExecutor(
1952 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1953 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1954
1955 uint32_t outputIndex = 0;
1956 size_t length = 9 * sizeof(float);
1957
1958 OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1959 EXPECT_EQ(nullptr, ret);
1960
1961 testing::Mock::AllowLeak(mockIPreparedMode.get());
1962 }
1963
1964 /*
1965 * @tc.name: executor_allocate_output_memory_006
1966 * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1967 * @tc.type: FUNC
1968 */
1969 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_006, testing::ext::TestSize.Level0)
1970 {
1971 LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_output_memory_006");
1972 size_t m_backendID {0};
1973 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1974
1975 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
1976 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1977 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1978
1979 std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType> pair1;
1980 std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType> pair2;
1981 std::shared_ptr<TensorDesc> tensorDesr = std::make_shared<TensorDesc>();
1982 int32_t expectDim[2] = {3, 3};
1983 int32_t* ptr = expectDim;
1984 uint32_t dimensionCount = 2;
1985 tensorDesr->SetShape(ptr, dimensionCount);
1986 pair1.first = tensorDesr;
1987 pair2.first = tensorDesr;
1988 m_inputTensorDescs.emplace_back(pair1);
1989 m_inputTensorDescs.emplace_back(pair2);
1990 m_outputTensorDescs.emplace_back(pair1);
1991 m_outputTensorDescs.emplace_back(pair2);
1992
1993 size_t length = 9 * sizeof(float);
1994 EXPECT_CALL(*((MockIDevice *) device.get()), AllocateTensorBuffer(length, m_outputTensorDescs[0].first))
1995 .WillRepeatedly(::testing::Return(reinterpret_cast<void*>(0x1000)));
1996
1997 NNExecutor* executor = new (std::nothrow) NNExecutor(
1998 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
1999 EXPECT_NE(nullptr, executor);
2000 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2001
2002 uint32_t outputIndex = 0;
2003
2004 OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
2005 EXPECT_NE(nullptr, ret);
2006
2007 testing::Mock::AllowLeak(device.get());
2008 }
2009
2010 /*
2011 * @tc.name: executor_allocate_output_memory_007
2012 * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
2013 * @tc.type: FUNC
2014 */
2015 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_007, testing::ext::TestSize.Level0)
2016 {
2017 LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_output_memory_007");
2018 size_t m_backendID {0};
2019 std::shared_ptr<Device> m_device {nullptr};
2020 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2021 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2022 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2023 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2024 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2025 NNExecutor* executor = new (std::nothrow) NNExecutor(
2026 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2027 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2028
2029 uint32_t outputIndex = 0;
2030 size_t length = 0;
2031
2032 OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
2033 EXPECT_EQ(nullptr, ret);
2034
2035 testing::Mock::AllowLeak(mockIPreparedMode.get());
2036 }
2037
2038 /*
2039 * @tc.name: executor_destroy_input_memory_001
2040 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_DestroyInputMemory function.
2041 * @tc.type: FUNC
2042 */
2043 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_001, testing::ext::TestSize.Level0)
2044 {
2045 InnerModel innerModel;
2046 BuildModel(innerModel);
2047 OH_NNExecutor* nnExecutor = nullptr;
2048
2049 uint32_t inputIndex = 0;
2050 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2051 void* const data = dataArry;
2052 OH_NN_Memory memory = {data, 9 * sizeof(float)};
2053 OH_NN_Memory* pMemory = &memory;
2054 OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, &pMemory);
2055 EXPECT_EQ(nullptr, nnExecutor);
2056 }
2057
2058 /*
2059 * @tc.name: executor_destroy_input_memory_002
2060 * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_DestroyInputMemory function.
2061 * @tc.type: FUNC
2062 */
2063 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_002, testing::ext::TestSize.Level0)
2064 {
2065 LOGE("OH_NNExecutor_DestroyInputMemory executor_destroy_input_memory_002");
2066 size_t m_backendID {0};
2067 std::shared_ptr<Device> m_device {nullptr};
2068 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2069 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2070 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2071 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2072 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2073 NNExecutor* executor = new (std::nothrow) NNExecutor(
2074 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2075 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2076
2077 uint32_t inputIndex = 0;
2078 OH_NN_Memory** memory = nullptr;
2079 OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, memory);
2080 EXPECT_EQ(nullptr, memory);
2081
2082 testing::Mock::AllowLeak(mockIPreparedMode.get());
2083 }
2084
2085 /*
2086 * @tc.name: executor_destroy_input_memory_003
2087 * @tc.desc: Verify the *memory is nullptr of the OH_NNExecutor_DestroyInputMemory function.
2088 * @tc.type: FUNC
2089 */
2090 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_003, testing::ext::TestSize.Level0)
2091 {
2092 LOGE("OH_NNExecutor_DestroyInputMemory executor_destroy_input_memory_003");
2093 size_t m_backendID {0};
2094 std::shared_ptr<Device> m_device {nullptr};
2095 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2096 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2097 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2098 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2099 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2100 NNExecutor* executor = new (std::nothrow) NNExecutor(
2101 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2102 EXPECT_NE(executor, nullptr);
2103 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2104
2105 uint32_t inputIndex = 0;
2106 OH_NN_Memory* memory = nullptr;
2107 OH_NN_Memory** pMemory = &memory;
2108 OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, pMemory);
2109
2110 testing::Mock::AllowLeak(mockIPreparedMode.get());
2111 }
2112
2113 /*
2114 * @tc.name: executor_destroy_input_memory_004
2115 * @tc.desc: Verify the error happened when destroying input memory of the OH_NNExecutor_DestroyInputMemory function.
2116 * @tc.type: FUNC
2117 */
2118 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_004, testing::ext::TestSize.Level0)
2119 {
2120 LOGE("OH_NNExecutor_DestroyInputMemory executor_destroy_input_memory_004");
2121 size_t m_backendID {0};
2122 std::shared_ptr<Device> m_device {nullptr};
2123 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2124 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2125 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2126 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2127 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2128 NNExecutor* executor = new (std::nothrow) NNExecutor(
2129 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2130 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2131
2132 uint32_t inputIndex = 6;
2133 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2134 void* const data = dataArry;
2135 OH_NN_Memory memory = {data, 9 * sizeof(float)};
2136 OH_NN_Memory* pMemory = &memory;
2137 OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, &pMemory);
2138 EXPECT_NE(nullptr, pMemory);
2139
2140 testing::Mock::AllowLeak(mockIPreparedMode.get());
2141 }
2142
2143 /*
2144 * @tc.name: executor_destroy_output_memory_001
2145 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
2146 * @tc.type: FUNC
2147 */
2148 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_001, testing::ext::TestSize.Level0)
2149 {
2150 OH_NNExecutor* nnExecutor = nullptr;
2151 uint32_t outputIndex = 0;
2152 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2153 void* const data = dataArry;
2154 OH_NN_Memory memory = {data, 9 * sizeof(float)};
2155 OH_NN_Memory* pMemory = &memory;
2156 OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
2157 EXPECT_EQ(nullptr, nnExecutor);
2158 }
2159
2160 /*
2161 * @tc.name: executor_destroy_output_memory_002
2162 * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
2163 * @tc.type: FUNC
2164 */
2165 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_002, testing::ext::TestSize.Level0)
2166 {
2167 LOGE("OH_NNExecutor_DestroyOutputMemory executor_destroy_output_memory_002");
2168 size_t m_backendID {0};
2169 std::shared_ptr<Device> m_device {nullptr};
2170 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2171 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2172 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2173 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2174 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2175 NNExecutor* executor = new (std::nothrow) NNExecutor(
2176 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2177 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2178
2179 uint32_t outputIndex = 0;
2180 OH_NN_Memory** memory = nullptr;
2181 OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, memory);
2182 EXPECT_EQ(nullptr, memory);
2183
2184 testing::Mock::AllowLeak(mockIPreparedMode.get());
2185 }
2186
2187 /*
2188 * @tc.name: executor_destroy_output_memory_003
2189 * @tc.desc: Verify the *memory is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
2190 * @tc.type: FUNC
2191 */
2192 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_003, testing::ext::TestSize.Level0)
2193 {
2194 LOGE("OH_NNExecutor_DestroyOutputMemory executor_destroy_output_memory_003");
2195 size_t m_backendID {0};
2196 std::shared_ptr<Device> m_device {nullptr};
2197 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2198 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2199 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2200 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2201 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2202 NNExecutor* executor = new (std::nothrow) NNExecutor(
2203 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2204 EXPECT_NE(executor, nullptr);
2205 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2206
2207 uint32_t outputIndex = 0;
2208 OH_NN_Memory* memory = nullptr;
2209 OH_NN_Memory** pMemory = &memory;
2210 OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, pMemory);
2211
2212 testing::Mock::AllowLeak(mockIPreparedMode.get());
2213 }
2214
2215 /*
2216 * @tc.name: executor_destroy_output_memory_004
2217 * @tc.desc: Verify the error happened when destroying output memory of the OH_NNExecutor_DestroyOutputMemory function.
2218 * @tc.type: FUNC
2219 */
2220 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_004, testing::ext::TestSize.Level0)
2221 {
2222 LOGE("OH_NNExecutor_DestroyOutputMemory executor_destroy_output_memory_004");
2223 size_t m_backendID {0};
2224 std::shared_ptr<Device> m_device {nullptr};
2225 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2226 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2227 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2228 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2229 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2230 NNExecutor* executor = new (std::nothrow) NNExecutor(
2231 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2232 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2233
2234 uint32_t outputIndex = 6;
2235 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2236 void* const data = dataArry;
2237 OH_NN_Memory memory = {data, 9 * sizeof(float)};
2238 OH_NN_Memory* pMemory = &memory;
2239 OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
2240 EXPECT_NE(nullptr, pMemory);
2241
2242 testing::Mock::AllowLeak(mockIPreparedMode.get());
2243 }
2244
2245 /*
2246 * @tc.name: executor_destroy_output_memory_005
2247 * @tc.desc: Verify the success of the OH_NNExecutor_DestroyOutputMemory function.
2248 * @tc.type: FUNC
2249 */
2250 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_005, testing::ext::TestSize.Level0)
2251 {
2252 LOGE("OH_NNExecutor_DestroyOutputMemory executor_destroy_output_memory_005");
2253 size_t m_backendID {0};
2254 std::shared_ptr<Device> m_device {nullptr};
2255 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2256 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2257 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2258 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2259 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2260 NNExecutor* executor = new (std::nothrow) NNExecutor(
2261 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2262 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2263
2264 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2265 void* const data = dataArry;
2266 OH_NN_Memory memory = {data, 9 * sizeof(float)};
2267 OH_NN_Memory* pMemory = &memory;
2268 uint32_t outputIndex = 0;
2269 OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
2270 EXPECT_NE(nullptr, pMemory);
2271
2272 testing::Mock::AllowLeak(mockIPreparedMode.get());
2273 }
2274
2275 /*
2276 * @tc.name: executor_set_input_with_memory_001
2277 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetInputWithMemory function.
2278 * @tc.type: FUNC
2279 */
2280 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_001, testing::ext::TestSize.Level0)
2281 {
2282 OH_NNExecutor* nnExecutor = nullptr;
2283
2284 SetTensor();
2285
2286 uint32_t inputIndex = 0;
2287 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2288 void* const data = dataArry;
2289 OH_NN_Memory memory = {data, 9 * sizeof(float)};
2290
2291 OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, &memory);
2292 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2293 }
2294
2295 /*
2296 * @tc.name: executor_set_input_with_memory_002
2297 * @tc.desc: Verify the operand is nullptr of the OH_NNExecutor_SetInputWithMemory function.
2298 * @tc.type: FUNC
2299 */
2300 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_002, testing::ext::TestSize.Level0)
2301 {
2302 LOGE("OH_NNExecutor_SetInputWithMemory executor_set_input_with_memory_002");
2303 size_t m_backendID {0};
2304 std::shared_ptr<Device> m_device {nullptr};
2305 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2306 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2307 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2308 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2309 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2310 NNExecutor* executor = new (std::nothrow) NNExecutor(
2311 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2312 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2313
2314 OH_NN_Tensor* operand = nullptr;
2315
2316 uint32_t inputIndex = 0;
2317 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2318 void* const data = dataArry;
2319 OH_NN_Memory memory = {data, 9 * sizeof(float)};
2320
2321 OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, operand, &memory);
2322 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2323
2324 testing::Mock::AllowLeak(mockIPreparedMode.get());
2325 }
2326
2327 /*
2328 * @tc.name: executor_set_input_with_memory_003
2329 * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_SetInputWithMemory function.
2330 * @tc.type: FUNC
2331 */
2332 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_003, testing::ext::TestSize.Level0)
2333 {
2334 LOGE("OH_NNExecutor_SetInputWithMemory executor_set_input_with_memory_003");
2335 size_t m_backendID {0};
2336 std::shared_ptr<Device> m_device {nullptr};
2337 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2338 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2339 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2340 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2341 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2342 NNExecutor* executor = new (std::nothrow) NNExecutor(
2343 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2344 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2345
2346 SetTensor();
2347
2348 uint32_t inputIndex = 0;
2349 OH_NN_Memory* memory = nullptr;
2350 OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, memory);
2351 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2352
2353 testing::Mock::AllowLeak(mockIPreparedMode.get());
2354 }
2355
2356 /*
2357 * @tc.name: executor_set_input_with_memory_004
2358 * @tc.desc: Verify the success of the OH_NNExecutor_SetInputWithMemory function.
2359 * @tc.type: FUNC
2360 */
2361 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_004, testing::ext::TestSize.Level0)
2362 {
2363 LOGE("OH_NNExecutor_SetInputWithMemory executor_set_input_with_memory_004");
2364 size_t m_backendID {0};
2365 std::shared_ptr<Device> m_device {nullptr};
2366 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2367 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2368 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2369 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2370 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2371 NNExecutor* executor = new (std::nothrow) NNExecutor(
2372 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2373 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2374
2375 uint32_t inputIndex = 0;
2376 int32_t dims[2] = {3, 4};
2377 m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
2378
2379 float dataArry[12] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
2380 void* const data = dataArry;
2381 OH_NN_Memory memory = {data, 12 * sizeof(float)};
2382
2383 OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, &memory);
2384 EXPECT_EQ(OH_NN_FAILED, ret);
2385
2386 testing::Mock::AllowLeak(mockIPreparedMode.get());
2387 }
2388
2389
2390 /*
2391 * @tc.name: executor_set_output_with_memory_001
2392 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetOutputWithMemory function.
2393 * @tc.type: FUNC
2394 */
2395 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_001, testing::ext::TestSize.Level0)
2396 {
2397 OH_NNExecutor* nnExecutor = nullptr;
2398 uint32_t outputIndex = 0;
2399 float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2400 void* const data = dataArry;
2401 OH_NN_Memory memory = {data, 9 * sizeof(float)};
2402 OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, &memory);
2403 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2404 }
2405
2406 /*
2407 * @tc.name: executor_set_output_with_memory_002
2408 * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_SetOutputWithMemory function.
2409 * @tc.type: FUNC
2410 */
2411 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_002, testing::ext::TestSize.Level0)
2412 {
2413 LOGE("OH_NNExecutor_SetOutputWithMemory executor_set_output_with_memory_002");
2414 size_t m_backendID {0};
2415 std::shared_ptr<Device> m_device {nullptr};
2416 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2417 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2418 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2419 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2420 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2421 NNExecutor* executor = new (std::nothrow) NNExecutor(
2422 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2423 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2424
2425 uint32_t outputIndex = 0;
2426 OH_NN_Memory* memory = nullptr;
2427 OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, memory);
2428 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2429
2430 testing::Mock::AllowLeak(mockIPreparedMode.get());
2431 }
2432
2433 /*
2434 * @tc.name: executor_set_output_with_memory_003
2435 * @tc.desc: Verify the success of the OH_NNExecutor_SetOutputWithMemory function.
2436 * @tc.type: FUNC
2437 */
2438 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_003, testing::ext::TestSize.Level0)
2439 {
2440 LOGE("OH_NNExecutor_SetOutputWithMemory executor_set_output_with_memory_003");
2441 size_t m_backendID {0};
2442 std::shared_ptr<Device> m_device {nullptr};
2443 std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2444 EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2445 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2446 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2447 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2448 NNExecutor* executor = new (std::nothrow) NNExecutor(
2449 m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2450 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2451
2452 uint32_t outputIndex = 0;
2453 float dataArry[12] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
2454 void* const data = dataArry;
2455 OH_NN_Memory memory = {data, 12 * sizeof(float)};
2456 OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, &memory);
2457 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2458
2459 testing::Mock::AllowLeak(mockIPreparedMode.get());
2460 }
2461
2462 /*
2463 * @tc.name: executor_destroy_001
2464 * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_Destroy function.
2465 * @tc.type: FUNC
2466 */
2467 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_001, testing::ext::TestSize.Level0)
2468 {
2469 OH_NNExecutor** pExecutor = nullptr;
2470 OH_NNExecutor_Destroy(pExecutor);
2471 EXPECT_EQ(nullptr, pExecutor);
2472 }
2473
2474 /*
2475 * @tc.name: device_get_all_devices_id_001
2476 * @tc.desc: Verify the allDevicesID is nullptr of the OH_NNDevice_GetAllDevicesID function.
2477 * @tc.type: FUNC
2478 */
2479 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_001, testing::ext::TestSize.Level0)
2480 {
2481 const size_t** allDevicesId = nullptr;
2482 uint32_t deviceCount = 1;
2483 uint32_t* pDeviceCount = &deviceCount;
2484 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(allDevicesId, pDeviceCount);
2485 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2486 }
2487
2488 /*
2489 * @tc.name: device_get_all_devices_id_002
2490 * @tc.desc: Verify the *allDevicesID is not nullptr of the OH_NNDevice_GetAllDevicesID function.
2491 * @tc.type: FUNC
2492 */
2493 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_002, testing::ext::TestSize.Level0)
2494 {
2495 const size_t devicesId = 1;
2496 const size_t* allDevicesId = &devicesId;
2497 const size_t** pAllDevicesId = &allDevicesId;
2498 uint32_t deviceCount = 1;
2499 uint32_t* pDeviceCount = &deviceCount;
2500 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2501 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2502 }
2503
2504 /*
2505 * @tc.name: device_get_all_devices_id_003
2506 * @tc.desc: Verify the deviceCount is nullptr of the OH_NNDevice_GetAllDevicesID function.
2507 * @tc.type: FUNC
2508 */
2509 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_003, testing::ext::TestSize.Level0)
2510 {
2511 const size_t* allDevicesId = nullptr;
2512 const size_t** pAllDevicesId = &allDevicesId;
2513 uint32_t* pDeviceCount = nullptr;
2514 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2515 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2516 }
2517
2518 /*
2519 * @tc.name: device_get_all_devices_id_004
2520 * @tc.desc: Verify the get no device of the OH_NNDevice_GetAllDevicesID function.
2521 * @tc.type: FUNC
2522 */
2523 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_004, testing::ext::TestSize.Level0)
2524 {
2525 const size_t* allDevicesId = nullptr;
2526 const size_t** pAllDevicesId = &allDevicesId;
2527 uint32_t deviceCount = 1;
2528 uint32_t* pDeviceCount = &deviceCount;
2529 OHOS::HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_FAILED;
2530 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2531 EXPECT_EQ(OH_NN_SUCCESS, ret);
2532 }
2533
2534 /*
2535 * @tc.name: device_get_all_devices_id_005
2536 * @tc.desc: Verify the success of the OH_NNDevice_GetAllDevicesID function.
2537 * @tc.type: FUNC
2538 */
2539 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_005, testing::ext::TestSize.Level0)
2540 {
2541 const size_t* allDevicesId = nullptr;
2542 const size_t** pAllDevicesId = &allDevicesId;
2543 uint32_t deviceCount = 1;
2544 uint32_t* pDeviceCount = &deviceCount;
2545 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2546 EXPECT_EQ(OH_NN_SUCCESS, ret);
2547 }
2548
2549 /*
2550 * @tc.name: device_get_name_001
2551 * @tc.desc: Verify the name is nullptr of the OH_NNDevice_GetName function.
2552 * @tc.type: FUNC
2553 */
2554 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_001, testing::ext::TestSize.Level0)
2555 {
2556 size_t deviceID = 1;
2557 const char **name = nullptr;
2558 OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, name);
2559 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2560 }
2561
2562 /*
2563 * @tc.name: device_get_name_002
2564 * @tc.desc: Verify the *name is not nullptr of the OH_NNDevice_GetName function.
2565 * @tc.type: FUNC
2566 */
2567 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_002, testing::ext::TestSize.Level0)
2568 {
2569 size_t deviceID = 1;
2570 const char* name = "diviceId";
2571 const char** pName = &name;
2572 OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2573 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2574 }
2575
2576 /*
2577 * @tc.name: device_get_name_003
2578 * @tc.desc: Verify the error happened when getting name of deviceID of the OH_NNDevice_GetName function.
2579 * @tc.type: FUNC
2580 */
2581 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_003, testing::ext::TestSize.Level0)
2582 {
2583 size_t deviceID = 12345;
2584 const char* name = nullptr;
2585 const char** pName = &name;
2586 OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2587 EXPECT_EQ(OH_NN_FAILED, ret);
2588 }
2589
2590 /*
2591 * @tc.name: device_get_name_004
2592 * @tc.desc: Verify the success of the OH_NNDevice_GetName function.
2593 * @tc.type: FUNC
2594 */
2595 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_004, testing::ext::TestSize.Level0)
2596 {
2597 size_t deviceID = 1;
2598 const char* name = nullptr;
2599 const char** pName = &name;
2600 OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2601 EXPECT_EQ(OH_NN_FAILED, ret);
2602 }
2603
2604 /*
2605 * @tc.name: device_get_type_001
2606 * @tc.desc: Verify the device is nullptr of the OH_NNDevice_GetType function.
2607 * @tc.type: FUNC
2608 */
2609 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_001, testing::ext::TestSize.Level0)
2610 {
2611 size_t deviceID = 12345;
2612 OH_NN_DeviceType deviceType = OH_NN_CPU;
2613 OH_NN_DeviceType* pDeviceType = &deviceType;
2614 OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2615 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2616 }
2617
2618 /*
2619 * @tc.name: device_get_type_002
2620 * @tc.desc: Verify the OH_NN_DeviceType is nullptr of the OH_NNDevice_GetType function.
2621 * @tc.type: FUNC
2622 */
2623 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_002, testing::ext::TestSize.Level0)
2624 {
2625 size_t deviceID = 1;
2626 OH_NN_DeviceType* pDeviceType = nullptr;
2627 OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2628 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2629 }
2630
2631 /*
2632 * @tc.name: device_get_type_003
2633 * @tc.desc: Verify the error happened when getting name of deviceID of the OH_NNDevice_GetType function.
2634 * @tc.type: FUNC
2635 */
2636 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_003, testing::ext::TestSize.Level0)
2637 {
2638 size_t deviceID = 1;
2639 OH_NN_DeviceType deviceType = OH_NN_OTHERS;
2640 OH_NN_DeviceType* pDeviceType = &deviceType;
2641 OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2642 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2643 }
2644
2645 /*
2646 * @tc.name: device_get_type_004
2647 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2648 * @tc.type: FUNC
2649 */
2650 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_004, testing::ext::TestSize.Level0)
2651 {
2652 size_t deviceID = 1;
2653 OH_NN_DeviceType deviceType = OH_NN_CPU;
2654 OH_NN_DeviceType* pDeviceType = &deviceType;
2655 OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2656 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2657 }
2658
2659 /*
2660 * @tc.name: oh_nnquantparam_create_001
2661 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2662 * @tc.type: FUNC
2663 */
2664 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_create_001, testing::ext::TestSize.Level0)
2665 {
2666 LOGE("OH_NNQuantParam_Create oh_nnquantparam_create_001");
2667 NN_QuantParam* ret = OH_NNQuantParam_Create();
2668 EXPECT_NE(nullptr, ret);
2669 }
2670
2671 /*
2672 * @tc.name: oh_nnquantparam_setscales_001
2673 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2674 * @tc.type: FUNC
2675 */
2676 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setscales_001, testing::ext::TestSize.Level0)
2677 {
2678 LOGE("OH_NNQuantParam_SetScales oh_nnquantparam_setscales_001");
2679 size_t quantNum = 1;
2680 OH_NN_ReturnCode ret = OH_NNQuantParam_SetScales(nullptr, nullptr, quantNum);
2681 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2682 }
2683
2684 /*
2685 * @tc.name: oh_nnquantparam_setscales_002
2686 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2687 * @tc.type: FUNC
2688 */
2689 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setscales_002, testing::ext::TestSize.Level0)
2690 {
2691 LOGE("OH_NNQuantParam_SetScales oh_nnquantparam_setscales_002");
2692 NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2693 size_t quantNum = 1;
2694 OH_NN_ReturnCode ret = OH_NNQuantParam_SetScales(quantParams, nullptr, quantNum);
2695 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2696 }
2697
2698 /*
2699 * @tc.name: oh_nnquantparam_setscales_003
2700 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2701 * @tc.type: FUNC
2702 */
2703 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setscales_003, testing::ext::TestSize.Level0)
2704 {
2705 LOGE("OH_NNQuantParam_SetScales oh_nnquantparam_setscales_003");
2706 NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2707 double scale = 2;
2708 size_t quantNum = 0;
2709 OH_NN_ReturnCode ret = OH_NNQuantParam_SetScales(quantParams, &scale, quantNum);
2710 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2711 }
2712
2713 /*
2714 * @tc.name: oh_nnquantparam_setscales_004
2715 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2716 * @tc.type: FUNC
2717 */
2718 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setscales_004, testing::ext::TestSize.Level0)
2719 {
2720 LOGE("OH_NNQuantParam_SetScales oh_nnquantparam_setscales_004");
2721 NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2722 double scale = 2;
2723 size_t quantNum = 2;
2724 OH_NN_ReturnCode ret = OH_NNQuantParam_SetScales(quantParams, &scale, quantNum);
2725 EXPECT_EQ(OH_NN_SUCCESS, ret);
2726 }
2727
2728 /*
2729 * @tc.name: oh_nnquantparam_setzeropoints_001
2730 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2731 * @tc.type: FUNC
2732 */
2733 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setzeropoints_001, testing::ext::TestSize.Level0)
2734 {
2735 LOGE("OH_NNQuantParam_SetZeroPoints oh_nnquantparam_setzeropoints_001");
2736 size_t quantNum = 2;
2737 OH_NN_ReturnCode ret = OH_NNQuantParam_SetZeroPoints(nullptr, nullptr, quantNum);
2738 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2739 }
2740
2741 /*
2742 * @tc.name: oh_nnquantparam_setzeropoints_002
2743 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2744 * @tc.type: FUNC
2745 */
2746 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setzeropoints_002, testing::ext::TestSize.Level0)
2747 {
2748 LOGE("OH_NNQuantParam_SetZeroPoints oh_nnquantparam_setzeropoints_002");
2749 NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2750 size_t quantNum = 2;
2751 OH_NN_ReturnCode ret = OH_NNQuantParam_SetZeroPoints(quantParams, nullptr, quantNum);
2752 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2753 }
2754
2755 /*
2756 * @tc.name: oh_nnquantparam_setzeropoints_003
2757 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2758 * @tc.type: FUNC
2759 */
2760 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setzeropoints_003, testing::ext::TestSize.Level0)
2761 {
2762 LOGE("OH_NNQuantParam_SetZeroPoints oh_nnquantparam_setzeropoints_003");
2763 NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2764 int32_t zeroPoints = 2;
2765 size_t quantNum = 0;
2766 OH_NN_ReturnCode ret = OH_NNQuantParam_SetZeroPoints(quantParams, &zeroPoints, quantNum);
2767 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2768 }
2769
2770 /*
2771 * @tc.name: oh_nnquantparam_setzeropoints_004
2772 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2773 * @tc.type: FUNC
2774 */
2775 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setzeropoints_004, testing::ext::TestSize.Level0)
2776 {
2777 LOGE("OH_NNQuantParam_SetZeroPoints oh_nnquantparam_setzeropoints_004");
2778 NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2779 int32_t zeroPoints = 2;
2780 size_t quantNum = 2;
2781 OH_NN_ReturnCode ret = OH_NNQuantParam_SetZeroPoints(quantParams, &zeroPoints, quantNum);
2782 EXPECT_EQ(OH_NN_SUCCESS, ret);
2783 }
2784
2785 /*
2786 * @tc.name: oh_nnquantparam_setnumbits_001
2787 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2788 * @tc.type: FUNC
2789 */
2790 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setnumbits_001, testing::ext::TestSize.Level0)
2791 {
2792 LOGE("OH_NNQuantParam_SetNumBits oh_nnquantparam_setnumbits_001");
2793 size_t quantNum = 2;
2794 OH_NN_ReturnCode ret = OH_NNQuantParam_SetNumBits(nullptr, nullptr, quantNum);
2795 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2796 }
2797
2798 /*
2799 * @tc.name: oh_nnquantparam_setnumbits_002
2800 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2801 * @tc.type: FUNC
2802 */
2803 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setnumbits_002, testing::ext::TestSize.Level0)
2804 {
2805 LOGE("OH_NNQuantParam_SetNumBits oh_nnquantparam_setnumbits_002");
2806 NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2807 size_t quantNum = 2;
2808 OH_NN_ReturnCode ret = OH_NNQuantParam_SetNumBits(quantParams, nullptr, quantNum);
2809 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2810 }
2811
2812 /*
2813 * @tc.name: oh_nnquantparam_setnumbits_003
2814 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2815 * @tc.type: FUNC
2816 */
2817 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setnumbits_003, testing::ext::TestSize.Level0)
2818 {
2819 LOGE("OH_NNQuantParam_SetNumBits oh_nnquantparam_setnumbits_003");
2820 NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2821 uint32_t zeroPoints = 2;
2822 size_t quantNum = 0;
2823 OH_NN_ReturnCode ret = OH_NNQuantParam_SetNumBits(quantParams, &zeroPoints, quantNum);
2824 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2825 }
2826
2827 /*
2828 * @tc.name: oh_nnquantparam_setnumbits_004
2829 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2830 * @tc.type: FUNC
2831 */
2832 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setnumbits_004, testing::ext::TestSize.Level0)
2833 {
2834 LOGE("OH_NNQuantParam_SetNumBits oh_nnquantparam_setnumbits_004");
2835 NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2836 uint32_t zeroPoints = 2;
2837 size_t quantNum = 2;
2838 OH_NN_ReturnCode ret = OH_NNQuantParam_SetNumBits(quantParams, &zeroPoints, quantNum);
2839 EXPECT_EQ(OH_NN_SUCCESS, ret);
2840 }
2841
2842 /*
2843 * @tc.name: oh_nnquantparam_destroy_001
2844 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2845 * @tc.type: FUNC
2846 */
2847 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_destroy_001, testing::ext::TestSize.Level0)
2848 {
2849 LOGE("OH_NNQuantParam_Destroy oh_nnquantparam_destroy_001");
2850 OH_NN_ReturnCode ret = OH_NNQuantParam_Destroy(nullptr);
2851 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2852 }
2853
2854 /*
2855 * @tc.name: oh_nnquantparam_destroy_002
2856 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2857 * @tc.type: FUNC
2858 */
2859 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_destroy_002, testing::ext::TestSize.Level0)
2860 {
2861 LOGE("OH_NNQuantParam_Destroy oh_nnquantparam_destroy_002");
2862 NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2863 NN_QuantParam** quantParamsDex = &quantParams;
2864 *quantParamsDex = nullptr;
2865 OH_NN_ReturnCode ret = OH_NNQuantParam_Destroy(quantParamsDex);
2866 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2867 }
2868
2869 /*
2870 * @tc.name: oh_nnquantparam_destroy_003
2871 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2872 * @tc.type: FUNC
2873 */
2874 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_destroy_003, testing::ext::TestSize.Level0)
2875 {
2876 LOGE("OH_NNQuantParam_Destroy oh_nnquantparam_destroy_003");
2877 NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2878 NN_QuantParam** quantParamsDex = &quantParams;
2879 OH_NN_ReturnCode ret = OH_NNQuantParam_Destroy(quantParamsDex);
2880 EXPECT_EQ(OH_NN_SUCCESS, ret);
2881 }
2882
2883 /*
2884 * @tc.name: oh_nnmodel_addtensortomodel_001
2885 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2886 * @tc.type: FUNC
2887 */
2888 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_addtensortomodel_001, testing::ext::TestSize.Level0)
2889 {
2890 LOGE("OH_NNModel_AddTensorToModel oh_nnmodel_addtensortomodel_001");
2891 TensorDesc* tensorDescImpl = new (std::nothrow) TensorDesc();
2892 NN_TensorDesc* tensor = reinterpret_cast<NN_TensorDesc*>(tensorDescImpl);
2893 OH_NN_ReturnCode ret = OH_NNModel_AddTensorToModel(nullptr, tensor);
2894 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2895 }
2896
2897 /*
2898 * @tc.name: oh_nnmodel_addtensortomodel_002
2899 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2900 * @tc.type: FUNC
2901 */
2902 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_addtensortomodel_002, testing::ext::TestSize.Level0)
2903 {
2904 LOGE("OH_NNModel_AddTensorToModel oh_nnmodel_addtensortomodel_002");
2905 OH_NNModel* model = OH_NNModel_Construct();
2906 OH_NN_ReturnCode ret = OH_NNModel_AddTensorToModel(model, nullptr);
2907 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2908 }
2909
2910 /*
2911 * @tc.name: oh_nnmodel_addtensortomodel_003
2912 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2913 * @tc.type: FUNC
2914 */
2915 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_addtensortomodel_003, testing::ext::TestSize.Level0)
2916 {
2917 LOGE("OH_NNModel_AddTensorToModel oh_nnmodel_addtensortomodel_003");
2918 OH_NNModel* model = OH_NNModel_Construct();
2919 TensorDesc* tensorDescImpl = new (std::nothrow) TensorDesc();
2920 NN_TensorDesc* tensor = reinterpret_cast<NN_TensorDesc*>(tensorDescImpl);
2921 OH_NN_ReturnCode ret = OH_NNModel_AddTensorToModel(model, tensor);
2922 EXPECT_EQ(OH_NN_SUCCESS, ret);
2923 }
2924
2925 /*
2926 * @tc.name: oh_nnmodel_settensorquantparams_001
2927 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2928 * @tc.type: FUNC
2929 */
2930 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensorquantparams_001, testing::ext::TestSize.Level0)
2931 {
2932 LOGE("OH_NNModel_SetTensorQuantParams oh_nnmodel_settensorquantparams_001");
2933 NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2934 uint32_t index = 10;
2935 OH_NN_ReturnCode ret = OH_NNModel_SetTensorQuantParams(nullptr, index, quantParams);
2936 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2937 }
2938
2939 /*
2940 * @tc.name: oh_nnmodel_settensorquantparams_002
2941 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2942 * @tc.type: FUNC
2943 */
2944 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensorquantparams_002, testing::ext::TestSize.Level0)
2945 {
2946 LOGE("OH_NNModel_SetTensorQuantParams oh_nnmodel_settensorquantparams_002");
2947 OH_NNModel* model = OH_NNModel_Construct();
2948 uint32_t index = 10;
2949 OH_NN_ReturnCode ret = OH_NNModel_SetTensorQuantParams(model, index, nullptr);
2950 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2951 }
2952
2953 /*
2954 * @tc.name: oh_nnmodel_settensorquantparams_003
2955 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2956 * @tc.type: FUNC
2957 */
2958 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensorquantparams_003, testing::ext::TestSize.Level0)
2959 {
2960 LOGE("OH_NNModel_SetTensorQuantParams oh_nnmodel_settensorquantparams_003");
2961 OH_NNModel* model = OH_NNModel_Construct();
2962 NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2963 uint32_t index = 10;
2964 OH_NN_ReturnCode ret = OH_NNModel_SetTensorQuantParams(model, index, quantParams);
2965 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2966 }
2967
2968 /*
2969 * @tc.name: oh_nnmodel_settensortype_001
2970 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2971 * @tc.type: FUNC
2972 */
2973 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensortype_001, testing::ext::TestSize.Level0)
2974 {
2975 LOGE("OH_NNModel_SetTensorType oh_nnmodel_settensortype_001");
2976 OH_NN_TensorType tensorType = OH_NN_REDUCE_MIN_KEEP_DIMS;
2977 uint32_t index = 10;
2978 OH_NN_ReturnCode ret = OH_NNModel_SetTensorType(nullptr, index, tensorType);
2979 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2980 }
2981
2982 /*
2983 * @tc.name: oh_nnmodel_settensortype_002
2984 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2985 * @tc.type: FUNC
2986 */
2987 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensortype_002, testing::ext::TestSize.Level0)
2988 {
2989 LOGE("OH_NNModel_SetTensorType oh_nnmodel_settensortype_002");
2990 OH_NNModel* model = OH_NNModel_Construct();
2991 OH_NN_TensorType tensorType = OH_NN_REDUCE_MIN_COEFF;
2992 uint32_t index = 10;
2993 OH_NN_ReturnCode ret = OH_NNModel_SetTensorType(model, index, tensorType);
2994 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2995 }
2996 } // namespace Unittest
2997 } // namespace NeuralNetworkRuntime
2998 } // namespace OHOS
2999