1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include <gtest/gtest.h>
17 #include <gmock/gmock.h>
18
19 #include "nnbackend.h"
20 #include "nncompiler.h"
21 #include "device.h"
22 #include "neural_network_runtime/neural_network_runtime_type.h"
23 #include "utils.h"
24 #include "inner_model.h"
25
26 using namespace testing;
27 using namespace testing::ext;
28 using namespace OHOS::NeuralNetworkRuntime;
29
30 namespace OHOS {
31 namespace NeuralNetworkRuntime {
32 namespace UnitTest {
33 class NNCompilerTest : public testing::Test {
34 public:
35 NNCompilerTest() = default;
36 ~NNCompilerTest() = default;
37 OH_NN_ReturnCode BuildModel(InnerModel& innerModel);
38 };
39
40 class MockIDevice : public Device {
41 public:
42 MOCK_METHOD1(GetDeviceName, OH_NN_ReturnCode(std::string&));
43 MOCK_METHOD1(GetVendorName, OH_NN_ReturnCode(std::string&));
44 MOCK_METHOD1(GetVersion, OH_NN_ReturnCode(std::string&));
45 MOCK_METHOD1(GetDeviceType, OH_NN_ReturnCode(OH_NN_DeviceType&));
46 MOCK_METHOD1(GetDeviceStatus, OH_NN_ReturnCode(DeviceStatus&));
47 MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
48 std::vector<bool>&));
49 MOCK_METHOD1(IsFloat16PrecisionSupported, OH_NN_ReturnCode(bool&));
50 MOCK_METHOD1(IsPerformanceModeSupported, OH_NN_ReturnCode(bool&));
51 MOCK_METHOD1(IsPrioritySupported, OH_NN_ReturnCode(bool&));
52 MOCK_METHOD1(IsDynamicInputSupported, OH_NN_ReturnCode(bool&));
53 MOCK_METHOD1(IsModelCacheSupported, OH_NN_ReturnCode(bool&));
54 MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
55 const ModelConfig&,
56 std::shared_ptr<PreparedModel>&));
57 MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(const void*,
58 const ModelConfig&,
59 std::shared_ptr<PreparedModel>&));
60 MOCK_METHOD4(PrepareModelFromModelCache, OH_NN_ReturnCode(const std::vector<Buffer>&,
61 const ModelConfig&,
62 std::shared_ptr<PreparedModel>&,
63 bool&));
64 MOCK_METHOD3(PrepareOfflineModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
65 const ModelConfig&,
66 std::shared_ptr<PreparedModel>&));
67 MOCK_METHOD1(AllocateBuffer, void*(size_t));
68 MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<TensorDesc>));
69 MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<NNTensor>));
70 MOCK_METHOD1(ReleaseBuffer, OH_NN_ReturnCode(const void*));
71 MOCK_METHOD2(AllocateBuffer, OH_NN_ReturnCode(size_t, int&));
72 MOCK_METHOD2(ReleaseBuffer, OH_NN_ReturnCode(int, size_t));
73 };
74
75 class MockIPreparedModel : public PreparedModel {
76 public:
77 MOCK_METHOD1(ExportModelCache, OH_NN_ReturnCode(std::vector<Buffer>&));
78 MOCK_METHOD4(Run, OH_NN_ReturnCode(const std::vector<IOTensor>&,
79 const std::vector<IOTensor>&,
80 std::vector<std::vector<int32_t>>&,
81 std::vector<bool>&));
82 MOCK_METHOD4(Run, OH_NN_ReturnCode(const std::vector<NN_Tensor*>&,
83 const std::vector<NN_Tensor*>&,
84 std::vector<std::vector<int32_t>>&,
85 std::vector<bool>&));
86 MOCK_CONST_METHOD1(GetModelID, OH_NN_ReturnCode(uint32_t&));
87 MOCK_METHOD2(GetInputDimRanges, OH_NN_ReturnCode(std::vector<std::vector<uint32_t>>&,
88 std::vector<std::vector<uint32_t>>&));
89 MOCK_METHOD0(ReleaseBuiltModel, OH_NN_ReturnCode());
90 };
91
92 class MockInnerModel : public InnerModel {
93 public:
94 MOCK_CONST_METHOD0(IsBuild, bool());
95 MOCK_METHOD2(BuildFromLiteGraph, OH_NN_ReturnCode(const mindspore::lite::LiteGraph*,
96 const ExtensionConfig&));
97 MOCK_METHOD2(BuildFromMetaGraph, OH_NN_ReturnCode(const void*, const ExtensionConfig&));
98 MOCK_METHOD1(AddTensor, OH_NN_ReturnCode(const OH_NN_Tensor&));
99 MOCK_METHOD1(AddTensorDesc, OH_NN_ReturnCode(const NN_TensorDesc*));
100 MOCK_METHOD2(SetTensorQuantParam, OH_NN_ReturnCode(uint32_t, const NN_QuantParam*));
101 MOCK_METHOD2(SetTensorType, OH_NN_ReturnCode(uint32_t, OH_NN_TensorType));
102 MOCK_METHOD3(SetTensorValue, OH_NN_ReturnCode(uint32_t, const void*, size_t));
103 MOCK_METHOD4(AddOperation, OH_NN_ReturnCode(OH_NN_OperationType,
104 const OH_NN_UInt32Array&,
105 const OH_NN_UInt32Array&,
106 const OH_NN_UInt32Array&));
107 MOCK_METHOD3(GetSupportedOperations, OH_NN_ReturnCode(size_t, const bool**, uint32_t&));
108 MOCK_METHOD2(SpecifyInputsAndOutputs, OH_NN_ReturnCode(const OH_NN_UInt32Array&, const OH_NN_UInt32Array&));
109 MOCK_METHOD4(SetInputsAndOutputsInfo, OH_NN_ReturnCode(const OH_NN_TensorInfo*, size_t,
110 const OH_NN_TensorInfo*, size_t));
111 MOCK_METHOD0(Build, OH_NN_ReturnCode());
112 MOCK_CONST_METHOD0(GetInputTensors, std::vector<std::shared_ptr<NNTensor>>());
113 MOCK_CONST_METHOD0(GetOutputTensors, std::vector<std::shared_ptr<NNTensor>>());
114 MOCK_CONST_METHOD0(GetInputTensorDescs, std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>>());
115 MOCK_CONST_METHOD0(GetOutputTensorDescs, std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>>());
116 MOCK_CONST_METHOD0(GetLiteGraphs, std::shared_ptr<mindspore::lite::LiteGraph>());
117 MOCK_CONST_METHOD0(GetMetaGraph, void*());
118 MOCK_CONST_METHOD0(GetExtensionConfig, ExtensionConfig());
119 };
120
121
BuildModel(InnerModel & innerModel)122 OH_NN_ReturnCode NNCompilerTest::BuildModel(InnerModel& innerModel)
123 {
124 int32_t inputDims[4] = {1, 2, 2, 3};
125 OH_NN_Tensor input1 = {OH_NN_FLOAT32, 4, inputDims, nullptr, OH_NN_TENSOR};
126 OH_NN_ReturnCode ret = innerModel.AddTensor(input1);
127 if (ret != OH_NN_SUCCESS) {
128 return ret;
129 }
130
131 // 添加Add算子的第二个输入Tensor,类型为float32,张量形状为[1, 2, 2, 3]
132 OH_NN_Tensor input2 = {OH_NN_FLOAT32, 4, inputDims, nullptr, OH_NN_TENSOR};
133 ret = innerModel.AddTensor(input2);
134 if (ret != OH_NN_SUCCESS) {
135 return ret;
136 }
137
138 // 添加Add算子的参数Tensor,该参数Tensor用于指定激活函数的类型,Tensor的数据类型为int8。
139 int32_t activationDims = 1;
140 int8_t activationValue = OH_NN_FUSED_NONE;
141 OH_NN_Tensor activation = {OH_NN_INT8, 1, &activationDims, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
142 ret = innerModel.AddTensor(activation);
143 if (ret != OH_NN_SUCCESS) {
144 return ret;
145 }
146
147 // 将激活函数类型设置为OH_NN_FUSED_NONE,表示该算子不添加激活函数。
148 uint32_t index = 2;
149 ret = innerModel.SetTensorValue(index, &activationValue, sizeof(int8_t));
150 if (ret != OH_NN_SUCCESS) {
151 return ret;
152 }
153
154 // 设置Add算子的输出,类型为float32,张量形状为[1, 2, 2, 3]
155 OH_NN_Tensor output = {OH_NN_FLOAT32, 4, inputDims, nullptr, OH_NN_TENSOR};
156 ret = innerModel.AddTensor(output);
157 if (ret != OH_NN_SUCCESS) {
158 return ret;
159 }
160
161 // 指定Add算子的输入、参数和输出索引
162 uint32_t inputIndicesValues[2] = {0, 1};
163 uint32_t paramIndicesValues = 2;
164 uint32_t outputIndicesValues = 3;
165 OH_NN_UInt32Array paramIndices = {¶mIndicesValues, 1};
166 OH_NN_UInt32Array inputIndices = {inputIndicesValues, 2};
167 OH_NN_UInt32Array outputIndices = {&outputIndicesValues, 1};
168
169 // 向模型实例添加Add算子
170 ret = innerModel.AddOperation(OH_NN_OPS_ADD, paramIndices, inputIndices, outputIndices);
171 if (ret != OH_NN_SUCCESS) {
172 return ret;
173 }
174
175 // 设置模型实例的输入、输出索引
176 ret = innerModel.SpecifyInputsAndOutputs(inputIndices, outputIndices);
177 if (ret != OH_NN_SUCCESS) {
178 return ret;
179 }
180
181 // 完成模型实例的构建
182 ret = innerModel.Build();
183 if (ret != OH_NN_SUCCESS) {
184 return ret;
185 }
186
187 return ret;
188 }
189
190 /**
191 * @tc.name: nncompilertest_construct_001
192 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
193 * @tc.type: FUNC
194 */
195 HWTEST_F(NNCompilerTest, nncompilertest_construct_001, TestSize.Level0)
196 {
197 LOGE("NNCompiler nncompilertest_construct_001");
198 size_t backendID = 1;
199 InnerModel innerModel;
200 BuildModel(innerModel);
201 void* model = &innerModel;
202 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
203
204 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
205 EXPECT_NE(nullptr, nncompiler);
206
207 testing::Mock::AllowLeak(device.get());
208 }
209
210 /**
211 * @tc.name: nncompilertest_construct_002
212 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
213 * @tc.type: FUNC
214 */
215 HWTEST_F(NNCompilerTest, nncompilertest_construct_002, TestSize.Level0)
216 {
217 LOGE("NNCompiler nncompilertest_construct_002");
218 size_t backendID = 1;
219 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
220
221 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
222 EXPECT_NE(nullptr, nncompiler);
223
224 testing::Mock::AllowLeak(device.get());
225 }
226
227 /**
228 * @tc.name: nncompilertest_getbackendid_001
229 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
230 * @tc.type: FUNC
231 */
232 HWTEST_F(NNCompilerTest, nncompilertest_getbackendid_001, TestSize.Level0)
233 {
234 LOGE("GetBackendID nncompilertest_getbackendid_001");
235 size_t backendID = 1;
236 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
237
238 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
239 EXPECT_NE(nullptr, nncompiler);
240
241 size_t ret = nncompiler->GetBackendID();
242 EXPECT_NE(0, ret);
243
244 testing::Mock::AllowLeak(device.get());
245 }
246
247 /**
248 * @tc.name: nncompilertest_setcachedir_001
249 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
250 * @tc.type: FUNC
251 */
252 HWTEST_F(NNCompilerTest, nncompilertest_setcachedir_001, TestSize.Level0)
253 {
254 LOGE("SetCacheDir nncompilertest_setcachedir_001");
255 size_t backendID = 1;
256
257 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
258 EXPECT_NE(nullptr, nncompiler);
259
260 std::string cacheModelPath = "mock";
261 uint32_t version = 0;
262 OH_NN_ReturnCode ret = nncompiler->SetCacheDir(cacheModelPath, version);
263 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
264 }
265
266 /**
267 * @tc.name: nncompilertest_setcachedir_002
268 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
269 * @tc.type: FUNC
270 */
271 HWTEST_F(NNCompilerTest, nncompilertest_setcachedir_002, TestSize.Level0)
272 {
273 LOGE("SetCacheDir nncompilertest_setcachedir_002");
274 size_t backendID = 1;
275
276 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
277 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
278 .WillRepeatedly(::testing::Return(OH_NN_OPERATION_FORBIDDEN));
279
280 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
281 EXPECT_NE(nullptr, nncompiler);
282
283 std::string cacheModelPath = "mock";
284 uint32_t version = 0;
285 OH_NN_ReturnCode ret = nncompiler->SetCacheDir(cacheModelPath, version);
286 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
287
288 testing::Mock::AllowLeak(device.get());
289 }
290
291 /**
292 * @tc.name: nncompilertest_setcachedir_003
293 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
294 * @tc.type: FUNC
295 */
296 HWTEST_F(NNCompilerTest, nncompilertest_setcachedir_003, TestSize.Level0)
297 {
298 LOGE("SetCacheDir nncompilertest_setcachedir_003");
299 size_t backendID = 1;
300
301 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
302 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
303 .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
304
305 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
306 EXPECT_NE(nullptr, nncompiler);
307
308 std::string cacheModelPath = "mock";
309 uint32_t version = 0;
310 OH_NN_ReturnCode ret = nncompiler->SetCacheDir(cacheModelPath, version);
311 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
312
313 testing::Mock::AllowLeak(device.get());
314 }
315
316 /**
317 * @tc.name: nncompilertest_setcachedir_004
318 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
319 * @tc.type: FUNC
320 */
321 HWTEST_F(NNCompilerTest, nncompilertest_setcachedir_004, TestSize.Level0)
322 {
323 LOGE("SetCacheDir nncompilertest_setcachedir_004");
324 size_t backendID = 1;
325
326 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
327 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon6c18fae80102(bool& isSupportedCache) 328 .WillOnce(Invoke([](bool& isSupportedCache) {
329 // 这里直接修改传入的引用参数
330 isSupportedCache = true;
331 return OH_NN_SUCCESS; // 假设成功的状态码
332 }));
333
334 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
335 EXPECT_NE(nullptr, nncompiler);
336
337 std::string cacheModelPath = "mock";
338 uint32_t version = 0;
339 OH_NN_ReturnCode ret = nncompiler->SetCacheDir(cacheModelPath, version);
340 EXPECT_EQ(OH_NN_SUCCESS, ret);
341
342 testing::Mock::AllowLeak(device.get());
343 }
344
345 /**
346 * @tc.name: nncompilertest_setperformance_001
347 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
348 * @tc.type: FUNC
349 */
350 HWTEST_F(NNCompilerTest, nncompilertest_setperformance_001, TestSize.Level0)
351 {
352 LOGE("SetPerformance nncompilertest_setperformance_001");
353 size_t backendID = 1;
354 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
355
356 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
357 EXPECT_NE(nullptr, nncompiler);
358
359 OH_NN_PerformanceMode performance = OH_NN_PERFORMANCE_NONE;
360 OH_NN_ReturnCode ret = nncompiler->SetPerformance(performance);
361 EXPECT_EQ(OH_NN_SUCCESS, ret);
362
363 testing::Mock::AllowLeak(device.get());
364 }
365
366 /**
367 * @tc.name: nncompilertest_setperformance_002
368 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
369 * @tc.type: FUNC
370 */
371 HWTEST_F(NNCompilerTest, nncompilertest_setperformance_002, TestSize.Level0)
372 {
373 LOGE("SetPerformance nncompilertest_setperformance_002");
374 size_t backendID = 1;
375
376 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
377 EXPECT_NE(nullptr, nncompiler);
378
379 OH_NN_PerformanceMode performance = OH_NN_PERFORMANCE_NONE;
380 OH_NN_ReturnCode ret = nncompiler->SetPerformance(performance);
381 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
382 }
383
384 /**
385 * @tc.name: nncompilertest_setperformance_003
386 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
387 * @tc.type: FUNC
388 */
389 HWTEST_F(NNCompilerTest, nncompilertest_setperformance_003, TestSize.Level0)
390 {
391 LOGE("SetPerformance nncompilertest_setperformance_003");
392 size_t backendID = 1;
393
394 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
395 EXPECT_CALL(*((MockIDevice *) device.get()), IsPerformanceModeSupported(::testing::_))
396 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
397
398 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
399 EXPECT_NE(nullptr, nncompiler);
400
401 OH_NN_PerformanceMode performance = OH_NN_PERFORMANCE_NONE;
402 OH_NN_ReturnCode ret = nncompiler->SetPerformance(performance);
403 EXPECT_EQ(OH_NN_FAILED, ret);
404
405 testing::Mock::AllowLeak(device.get());
406 }
407
408 /**
409 * @tc.name: nncompilertest_setperformance_004
410 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
411 * @tc.type: FUNC
412 */
413 HWTEST_F(NNCompilerTest, nncompilertest_setperformance_004, TestSize.Level0)
414 {
415 LOGE("SetPerformance nncompilertest_setperformance_004");
416 size_t backendID = 1;
417
418 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
419 EXPECT_CALL(*((MockIDevice *) device.get()), IsPerformanceModeSupported(::testing::_))
420 .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
421
422 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
423 EXPECT_NE(nullptr, nncompiler);
424
425 OH_NN_PerformanceMode performance = OH_NN_PERFORMANCE_LOW;
426 OH_NN_ReturnCode ret = nncompiler->SetPerformance(performance);
427 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
428
429 testing::Mock::AllowLeak(device.get());
430 }
431
432 /**
433 * @tc.name: nncompilertest_setpriority_001
434 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
435 * @tc.type: FUNC
436 */
437 HWTEST_F(NNCompilerTest, nncompilertest_setpriority_001, TestSize.Level0)
438 {
439 LOGE("SetPriority nncompilertest_setpriority_001");
440 size_t backendID = 1;
441 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
442
443 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
444 EXPECT_NE(nullptr, nncompiler);
445
446 OH_NN_Priority priority = OH_NN_PRIORITY_NONE;
447 OH_NN_ReturnCode ret = nncompiler->SetPriority(priority);
448 EXPECT_EQ(OH_NN_SUCCESS, ret);
449
450 testing::Mock::AllowLeak(device.get());
451 }
452
453 /**
454 * @tc.name: nncompilertest_setpriority_002
455 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
456 * @tc.type: FUNC
457 */
458 HWTEST_F(NNCompilerTest, nncompilertest_setpriority_002, TestSize.Level0)
459 {
460 LOGE("SetPriority nncompilertest_setpriority_002");
461 size_t backendID = 1;
462
463 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
464 EXPECT_NE(nullptr, nncompiler);
465
466 OH_NN_Priority priority = OH_NN_PRIORITY_NONE;
467 OH_NN_ReturnCode ret = nncompiler->SetPriority(priority);
468 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
469 }
470
471 /**
472 * @tc.name: nncompilertest_setpriority_003
473 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
474 * @tc.type: FUNC
475 */
476 HWTEST_F(NNCompilerTest, nncompilertest_setpriority_003, TestSize.Level0)
477 {
478 LOGE("SetPriority nncompilertest_setpriority_003");
479 size_t backendID = 1;
480 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
481 EXPECT_CALL(*((MockIDevice *) device.get()), IsPrioritySupported(::testing::_))
482 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
483
484 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
485 EXPECT_NE(nullptr, nncompiler);
486
487 OH_NN_Priority priority = OH_NN_PRIORITY_NONE;
488 OH_NN_ReturnCode ret = nncompiler->SetPriority(priority);
489 EXPECT_EQ(OH_NN_FAILED, ret);
490
491 testing::Mock::AllowLeak(device.get());
492 }
493
494 /**
495 * @tc.name: nncompilertest_setpriority_004
496 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
497 * @tc.type: FUNC
498 */
499 HWTEST_F(NNCompilerTest, nncompilertest_setpriority_004, TestSize.Level0)
500 {
501 LOGE("SetPriority nncompilertest_setpriority_004");
502 size_t backendID = 1;
503 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
504 EXPECT_CALL(*((MockIDevice *) device.get()), IsPrioritySupported(::testing::_))
505 .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
506
507 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
508 EXPECT_NE(nullptr, nncompiler);
509
510 OH_NN_Priority priority = OH_NN_PRIORITY_LOW;
511 OH_NN_ReturnCode ret = nncompiler->SetPriority(priority);
512 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
513
514 testing::Mock::AllowLeak(device.get());
515 }
516
517 /**
518 * @tc.name: nncompilertest_setenablefp16_001
519 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
520 * @tc.type: FUNC
521 */
522 HWTEST_F(NNCompilerTest, nncompilertest_setenablefp16_001, TestSize.Level0)
523 {
524 LOGE("SetEnableFp16 nncompilertest_setenablefp16_001");
525 size_t backendID = 1;
526
527 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
528 EXPECT_NE(nullptr, nncompiler);
529
530 bool isFp16 = true;
531 OH_NN_ReturnCode ret = nncompiler->SetEnableFp16(isFp16);
532 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
533 }
534
535 /**
536 * @tc.name: nncompilertest_setenablefp16_002
537 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
538 * @tc.type: FUNC
539 */
540 HWTEST_F(NNCompilerTest, nncompilertest_setenablefp16_002, TestSize.Level0)
541 {
542 LOGE("SetEnableFp16 nncompilertest_setenablefp16_002");
543 size_t backendID = 1;
544 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
545 EXPECT_CALL(*((MockIDevice *) device.get()), IsFloat16PrecisionSupported(::testing::_))
546 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
547
548 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
549 EXPECT_NE(nullptr, nncompiler);
550
551 bool isFp16 = true;
552 OH_NN_ReturnCode ret = nncompiler->SetEnableFp16(isFp16);
553 EXPECT_EQ(OH_NN_FAILED, ret);
554
555 testing::Mock::AllowLeak(device.get());
556 }
557
558 /**
559 * @tc.name: nncompilertest_setenablefp16_003
560 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
561 * @tc.type: FUNC
562 */
563 HWTEST_F(NNCompilerTest, nncompilertest_setenablefp16_003, TestSize.Level0)
564 {
565 LOGE("SetEnableFp16 nncompilertest_setenablefp16_003");
566 size_t backendID = 1;
567 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
568 EXPECT_CALL(*((MockIDevice *) device.get()), IsFloat16PrecisionSupported(::testing::_))
569 .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
570
571 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
572 EXPECT_NE(nullptr, nncompiler);
573
574 bool isFp16 = true;
575 OH_NN_ReturnCode ret = nncompiler->SetEnableFp16(isFp16);
576 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
577
578 testing::Mock::AllowLeak(device.get());
579 }
580
581 /**
582 * @tc.name: nncompilertest_setenablefp16_004
583 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
584 * @tc.type: FUNC
585 */
586 HWTEST_F(NNCompilerTest, nncompilertest_setenablefp16_004, TestSize.Level0)
587 {
588 LOGE("SetEnableFp16 nncompilertest_setenablefp16_004");
589 size_t backendID = 1;
590 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
591 EXPECT_CALL(*((MockIDevice *) device.get()), IsPrioritySupported(::testing::_))
592 .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
593
594 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
595 EXPECT_NE(nullptr, nncompiler);
596
597 bool isFp16 = false;
598 OH_NN_ReturnCode ret = nncompiler->SetEnableFp16(isFp16);
599 EXPECT_EQ(OH_NN_SUCCESS, ret);
600
601 testing::Mock::AllowLeak(device.get());
602 }
603
604 /**
605 * @tc.name: nncompilertest_isbuild_001
606 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
607 * @tc.type: FUNC
608 */
609 HWTEST_F(NNCompilerTest, nncompilertest_isbuild_001, TestSize.Level0)
610 {
611 LOGE("IsBuild nncompilertest_isbuild_001");
612 size_t backendID = 1;
613 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
614
615 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
616 EXPECT_NE(nullptr, nncompiler);
617
618 bool ret = nncompiler->IsBuild();
619 EXPECT_EQ(false, ret);
620
621 testing::Mock::AllowLeak(device.get());
622 }
623
624 /**
625 * @tc.name: nncompilertest_build_001
626 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
627 * @tc.type: FUNC
628 */
629 HWTEST_F(NNCompilerTest, nncompilertest_build_001, TestSize.Level0)
630 {
631 LOGE("Build nncompilertest_build_001");
632 size_t backendID = 1;
633
634 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
635 EXPECT_NE(nullptr, nncompiler);
636
637 OH_NN_ReturnCode ret = nncompiler->Build();
638 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
639 }
640
641 /**
642 * @tc.name: nncompilertest_build_002
643 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
644 * @tc.type: FUNC
645 */
646 HWTEST_F(NNCompilerTest, nncompilertest_build_002, TestSize.Level0)
647 {
648 LOGE("Build nncompilertest_build_002");
649 size_t backendID = 1;
650 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
651
652 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
653 EXPECT_NE(nullptr, nncompiler);
654
655 OH_NN_ReturnCode ret = nncompiler->Build();
656 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
657
658 testing::Mock::AllowLeak(device.get());
659 }
660
661 /**
662 * @tc.name: nncompilertest_build_003
663 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
664 * @tc.type: FUNC
665 */
666 HWTEST_F(NNCompilerTest, nncompilertest_build_003, TestSize.Level0)
667 {
668 LOGE("Build nncompilertest_build_003");
669 size_t backendID = 1;
670 InnerModel innerModel;
671 BuildModel(innerModel);
672 void* model = &innerModel;
673 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
674
675 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
676 EXPECT_NE(nullptr, nncompiler);
677
678 OH_NN_ReturnCode ret = nncompiler->Build();
679 EXPECT_EQ(OH_NN_SUCCESS, ret);
680
681 OH_NN_ReturnCode retBuild = nncompiler->Build();
682 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, retBuild);
683
684 testing::Mock::AllowLeak(device.get());
685 }
686
687 /**
688 * @tc.name: nncompilertest_build_004
689 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
690 * @tc.type: FUNC
691 */
692 HWTEST_F(NNCompilerTest, nncompilertest_build_004, TestSize.Level0)
693 {
694 LOGE("Build nncompilertest_build_004");
695 size_t backendID = 1;
696 InnerModel innerModel;
697 void* model = &innerModel;
698 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
699
700 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
701 EXPECT_NE(nullptr, nncompiler);
702
703 OH_NN_ReturnCode ret = nncompiler->Build();
704 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
705
706 testing::Mock::AllowLeak(device.get());
707 }
708
709 /**
710 * @tc.name: nncompilertest_build_005
711 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
712 * @tc.type: FUNC
713 */
714 HWTEST_F(NNCompilerTest, nncompilertest_build_005, TestSize.Level0)
715 {
716 LOGE("Build nncompilertest_build_005");
717 size_t backendID = 1;
718 InnerModel innerModel;
719 BuildModel(innerModel);
720 void* model = &innerModel;
721 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
722 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon6c18fae80202(bool& isSupportedCache) 723 .WillOnce(Invoke([](bool& isSupportedCache) {
724 // 这里直接修改传入的引用参数
725 isSupportedCache = true;
726 return OH_NN_SUCCESS; // 假设成功的状态码
727 }));
728
729 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
730 EXPECT_NE(nullptr, nncompiler);
731
732 std::string cacheModelPath = "mock";
733 uint32_t version = UINT32_MAX;
734 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
735 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);
736
737 OH_NN_ReturnCode ret = nncompiler->Build();
738 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
739
740 testing::Mock::AllowLeak(device.get());
741 }
742
743 /**
744 * @tc.name: nncompilertest_build_006
745 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
746 * @tc.type: FUNC
747 */
748 HWTEST_F(NNCompilerTest, nncompilertest_build_006, TestSize.Level0)
749 {
750 LOGE("Build nncompilertest_build_006");
751 size_t backendID = 1;
752 InnerModel innerModel;
753 BuildModel(innerModel);
754 void* model = &innerModel;
755 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
756 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon6c18fae80302(bool& isSupportedCache) 757 .WillOnce(Invoke([](bool& isSupportedCache) {
758 // 这里直接修改传入的引用参数
759 isSupportedCache = true;
760 return OH_NN_SUCCESS; // 假设成功的状态码
761 }));
762
763 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
764 EXPECT_NE(nullptr, nncompiler);
765
766 std::string cacheModelPath = "mock";
767 uint32_t version = 0;
768 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
769 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);
770
771 OH_NN_ReturnCode ret = nncompiler->Build();
772 EXPECT_EQ(OH_NN_FAILED, ret);
773
774 testing::Mock::AllowLeak(device.get());
775 }
776
777 /**
778 * @tc.name: nncompilertest_build_007
779 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
780 * @tc.type: FUNC
781 */
782 HWTEST_F(NNCompilerTest, nncompilertest_build_007, TestSize.Level0)
783 {
784 LOGE("Build nncompilertest_build_007");
785 size_t backendID = 1;
786 InnerModel innerModel;
787 BuildModel(innerModel);
788 void* model = &innerModel;
789 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
790
791 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
792 EXPECT_NE(nullptr, nncompiler);
793
794 OH_NN_ReturnCode ret = nncompiler->Build();
795 EXPECT_EQ(OH_NN_SUCCESS, ret);
796
797 testing::Mock::AllowLeak(device.get());
798 }
799
800 /**
801 * @tc.name: nncompilertest_savetocachefile_001
802 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
803 * @tc.type: FUNC
804 */
805 HWTEST_F(NNCompilerTest, nncompilertest_savetocachefile_001, TestSize.Level0)
806 {
807 LOGE("SaveToCacheFile nncompilertest_savetocachefile_001");
808 size_t backendID = 1;
809 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
810
811 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
812 EXPECT_NE(nullptr, nncompiler);
813
814 OH_NN_ReturnCode ret = nncompiler->SaveToCacheFile();
815 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
816
817 testing::Mock::AllowLeak(device.get());
818 }
819
820 /**
821 * @tc.name: nncompilertest_savetocachefile_002
822 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
823 * @tc.type: FUNC
824 */
825 HWTEST_F(NNCompilerTest, nncompilertest_savetocachefile_002, TestSize.Level0)
826 {
827 LOGE("SaveToCacheFile nncompilertest_savetocachefile_002");
828 size_t backendID = 1;
829
830 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
831 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon6c18fae80402(bool& isSupportedCache) 832 .WillOnce(Invoke([](bool& isSupportedCache) {
833 // 这里直接修改传入的引用参数
834 isSupportedCache = true;
835 return OH_NN_SUCCESS; // 假设成功的状态码
836 }));
837
838 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
839 EXPECT_NE(nullptr, nncompiler);
840
841 std::string cacheModelPath = "mock";
842 uint32_t version = UINT32_MAX;
843 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
844 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);
845
846 OH_NN_ReturnCode retSave = nncompiler->SaveToCacheFile();
847 EXPECT_EQ(OH_NN_INVALID_PARAMETER, retSave);
848
849 testing::Mock::AllowLeak(device.get());
850 }
851
852 /**
853 * @tc.name: nncompilertest_savetocachefile_003
854 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
855 * @tc.type: FUNC
856 */
857 HWTEST_F(NNCompilerTest, nncompilertest_savetocachefile_003, TestSize.Level0)
858 {
859 LOGE("SaveToCacheFile nncompilertest_savetocachefile_003");
860 size_t backendID = 1;
861
862 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
863 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon6c18fae80502(bool& isSupportedCache) 864 .WillOnce(Invoke([](bool& isSupportedCache) {
865 // 这里直接修改传入的引用参数
866 isSupportedCache = true;
867 return OH_NN_SUCCESS; // 假设成功的状态码
868 }));
869
870 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
871 EXPECT_NE(nullptr, nncompiler);
872
873 std::string cacheModelPath = "mock";
874 uint32_t version = 0;
875 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
876 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);
877
878 OH_NN_ReturnCode retSave = nncompiler->SaveToCacheFile();
879 EXPECT_EQ(OH_NN_FAILED, retSave);
880
881 testing::Mock::AllowLeak(device.get());
882 }
883
884 /**
885 * @tc.name: nncompilertest_savetocachefile_004
886 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
887 * @tc.type: FUNC
888 */
889 HWTEST_F(NNCompilerTest, nncompilertest_savetocachefile_004, TestSize.Level0)
890 {
891 LOGE("SaveToCacheFile nncompilertest_savetocachefile_004");
892 size_t backendID = 1;
893
894 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
895 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon6c18fae80602(bool& isSupportedCache) 896 .WillOnce(Invoke([](bool& isSupportedCache) {
897 // 这里直接修改传入的引用参数
898 isSupportedCache = true;
899 return OH_NN_SUCCESS; // 假设成功的状态码
900 }));
901
902 InnerModel innerModel;
903 BuildModel(innerModel);
904 void* model = &innerModel;
905
906 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
907 EXPECT_NE(nullptr, nncompiler);
908
909 OH_NN_ReturnCode retBuild = nncompiler->Build();
910 EXPECT_EQ(OH_NN_SUCCESS, retBuild);
911
912 std::string cacheModelPath = "mock";
913 uint32_t version = 0;
914 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
915 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);;
916
917 OH_NN_ReturnCode retSave = nncompiler->SaveToCacheFile();
918 EXPECT_EQ(OH_NN_FAILED, retSave);
919
920 testing::Mock::AllowLeak(device.get());
921 }
922
923 /**
924 * @tc.name: nncompilertest_restorefromcachefile_001
925 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
926 * @tc.type: FUNC
927 */
928 HWTEST_F(NNCompilerTest, nncompilertest_restorefromcachefile_001, TestSize.Level0)
929 {
930 LOGE("RestoreFromCacheFile nncompilertest_restorefromcachefile_001");
931 size_t backendID = 1;
932 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
933
934 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
935 EXPECT_NE(nullptr, nncompiler);
936
937 OH_NN_ReturnCode ret = nncompiler->RestoreFromCacheFile();
938 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
939
940 testing::Mock::AllowLeak(device.get());
941 }
942
943 /**
944 * @tc.name: nncompilertest_restorefromcachefile_002
945 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
946 * @tc.type: FUNC
947 */
948 HWTEST_F(NNCompilerTest, nncompilertest_restorefromcachefile_002, TestSize.Level0)
949 {
950 LOGE("RestoreFromCacheFile nncompilertest_restorefromcachefile_002");
951 size_t backendID = 1;
952 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
953 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon6c18fae80702(bool& isSupportedCache) 954 .WillOnce(Invoke([](bool& isSupportedCache) {
955 // 这里直接修改传入的引用参数
956 isSupportedCache = true;
957 return OH_NN_SUCCESS; // 假设成功的状态码
958 }));
959
960 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
961 EXPECT_NE(nullptr, nncompiler);
962
963 std::string cacheModelPath = "mock";
964 uint32_t version = UINT32_MAX;
965 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
966 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);;
967
968 OH_NN_ReturnCode ret = nncompiler->RestoreFromCacheFile();
969 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
970
971 testing::Mock::AllowLeak(device.get());
972 }
973
974 /**
975 * @tc.name: nncompilertest_restorefromcachefile_003
976 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
977 * @tc.type: FUNC
978 */
979 HWTEST_F(NNCompilerTest, nncompilertest_restorefromcachefile_003, TestSize.Level0)
980 {
981 LOGE("RestoreFromCacheFile nncompilertest_restorefromcachefile_003");
982 size_t backendID = 1;
983 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
984 std::shared_ptr<MockIPreparedModel> prepared = std::make_shared<MockIPreparedModel>();
985 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon6c18fae80802(bool& isSupportedCache) 986 .WillOnce(Invoke([](bool& isSupportedCache) {
987 // 这里直接修改传入的引用参数
988 isSupportedCache = true;
989 return OH_NN_SUCCESS;
990 }));
991
992 InnerModel innerModel;
993 BuildModel(innerModel);
994 void* model = &innerModel;
995 EXPECT_CALL(*((MockIDevice *) device.get()),
996 PrepareModel(testing::A<std::shared_ptr<const mindspore::lite::LiteGraph>>(), ::testing::_, ::testing::_))
997 .WillOnce(Invoke([&prepared](std::shared_ptr<const mindspore::lite::LiteGraph> model,
998 const ModelConfig& config,
__anon6c18fae80902(std::shared_ptr<const mindspore::lite::LiteGraph> model, const ModelConfig& config, std::shared_ptr<PreparedModel>& preparedModel) 999 std::shared_ptr<PreparedModel>& preparedModel) {
1000 preparedModel = prepared;
1001 return OH_NN_SUCCESS;
1002 }));
1003
1004 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
1005 EXPECT_NE(nullptr, nncompiler);
1006
1007 OH_NN_ReturnCode retBuild = nncompiler->Build();
1008 EXPECT_EQ(OH_NN_SUCCESS, retBuild);
1009
1010 std::string cacheModelPath = "/data/data";
1011 uint32_t version = UINT32_MAX;
1012 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
1013 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);;
1014
1015 OH_NN_ReturnCode ret = nncompiler->RestoreFromCacheFile();
1016 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1017
1018 testing::Mock::AllowLeak(device.get());
1019 testing::Mock::AllowLeak(prepared.get());
1020 }
1021
1022 /**
1023 * @tc.name: nncompilertest_savetocachebuffer_001
1024 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1025 * @tc.type: FUNC
1026 */
1027 HWTEST_F(NNCompilerTest, nncompilertest_savetocachebuffer_001, TestSize.Level0)
1028 {
1029 LOGE("SaveToCacheBuffer nncompilertest_savetocachebuffer_001");
1030 size_t backendID = 1;
1031 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1032
1033 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1034 EXPECT_NE(nullptr, nncompiler);
1035
1036 size_t length = 10;
1037 size_t* modelSize = &length;
1038 InnerModel innerModel;
1039 BuildModel(innerModel);
1040 void* model = &innerModel;
1041 OH_NN_ReturnCode ret = nncompiler->SaveToCacheBuffer(model, length, modelSize);
1042 EXPECT_EQ(OH_NN_UNSUPPORTED, ret);
1043
1044 testing::Mock::AllowLeak(device.get());
1045 }
1046
1047 /**
1048 * @tc.name: nncompilertest_restorefromcachebuffer_001
1049 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1050 * @tc.type: FUNC
1051 */
1052 HWTEST_F(NNCompilerTest, nncompilertest_restorefromcachebuffer_001, TestSize.Level0)
1053 {
1054 LOGE("RestoreFromCacheBuffer nncompilertest_restorefromcachebuffer_001");
1055 size_t backendID = 1;
1056 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1057
1058 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1059 EXPECT_NE(nullptr, nncompiler);
1060
1061 size_t length = 10;
1062 InnerModel innerModel;
1063 BuildModel(innerModel);
1064 void* model = &innerModel;
1065 OH_NN_ReturnCode ret = nncompiler->RestoreFromCacheBuffer(model, length);
1066 EXPECT_EQ(OH_NN_UNSUPPORTED, ret);
1067
1068 testing::Mock::AllowLeak(device.get());
1069 }
1070
1071 /**
1072 * @tc.name: nncompilertest_setextensionconfig_001
1073 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1074 * @tc.type: FUNC
1075 */
1076 HWTEST_F(NNCompilerTest, nncompilertest_setextensionconfig_001, TestSize.Level0)
1077 {
1078 LOGE("SetExtensionConfig nncompilertest_setextensionconfig_001");
1079 size_t backendID = 1;
1080 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1081
1082 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1083 EXPECT_NE(nullptr, nncompiler);
1084
1085 std::unordered_map<std::string, std::vector<char>> configs;
1086 OH_NN_ReturnCode ret = nncompiler->SetExtensionConfig(configs);
1087 EXPECT_EQ(OH_NN_SUCCESS, ret);
1088
1089 testing::Mock::AllowLeak(device.get());
1090 }
1091
1092 /**
1093 * @tc.name: nncompilertest_setoptions_001
1094 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1095 * @tc.type: FUNC
1096 */
1097 HWTEST_F(NNCompilerTest, nncompilertest_setoptions_001, TestSize.Level0)
1098 {
1099 LOGE("SetOptions nncompilertest_setoptions_001");
1100 size_t backendID = 1;
1101 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1102
1103 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1104 EXPECT_NE(nullptr, nncompiler);
1105
1106 std::vector<std::shared_ptr<void>> options;
1107 OH_NN_ReturnCode ret = nncompiler->SetOptions(options);
1108 EXPECT_EQ(OH_NN_UNSUPPORTED, ret);
1109
1110 testing::Mock::AllowLeak(device.get());
1111 }
1112
1113 /**
1114 * @tc.name: nncompilertest_createexecutor_001
1115 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1116 * @tc.type: FUNC
1117 */
1118 HWTEST_F(NNCompilerTest, nncompilertest_createexecutor_001, TestSize.Level0)
1119 {
1120 LOGE("CreateExecutor nncompilertest_createexecutor_001");
1121 size_t backendID = 1;
1122 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1123
1124 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1125 EXPECT_NE(nullptr, nncompiler);
1126
1127 NNExecutor* ret = nncompiler->CreateExecutor();
1128 EXPECT_EQ(nullptr, ret);
1129
1130 testing::Mock::AllowLeak(device.get());
1131 }
1132
1133 /**
1134 * @tc.name: nncompilertest_createexecutor_002
1135 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1136 * @tc.type: FUNC
1137 */
1138 HWTEST_F(NNCompilerTest, nncompilertest_createexecutor_002, TestSize.Level0)
1139 {
1140 LOGE("CreateExecutor nncompilertest_createexecutor_002");
1141 size_t backendID = 1;
1142
1143 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
1144 EXPECT_NE(nullptr, nncompiler);
1145
1146 NNExecutor* ret = nncompiler->CreateExecutor();
1147 EXPECT_EQ(nullptr, ret);
1148 }
1149
1150 /**
1151 * @tc.name: nncompilertest_createexecutor_003
1152 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1153 * @tc.type: FUNC
1154 */
1155 HWTEST_F(NNCompilerTest, nncompilertest_createexecutor_003, TestSize.Level0)
1156 {
1157 LOGE("CreateExecutor nncompilertest_createexecutor_003");
1158 size_t backendID = 1;
1159 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1160 std::shared_ptr<MockIPreparedModel> prepared = std::make_shared<MockIPreparedModel>();
1161 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon6c18fae80a02(bool& isSupportedCache) 1162 .WillOnce(Invoke([](bool& isSupportedCache) {
1163 // 这里直接修改传入的引用参数
1164 isSupportedCache = true;
1165 return OH_NN_SUCCESS; // 假设成功的状态码
1166 }));
1167
1168 InnerModel innerModel;
1169 BuildModel(innerModel);
1170 void* model = &innerModel;
1171 EXPECT_CALL(*((MockIDevice *) device.get()),
1172 PrepareModel(testing::A<std::shared_ptr<const mindspore::lite::LiteGraph>>(), ::testing::_, ::testing::_))
1173 .WillOnce(Invoke([&prepared](std::shared_ptr<const mindspore::lite::LiteGraph> model,
1174 const ModelConfig& config,
__anon6c18fae80b02(std::shared_ptr<const mindspore::lite::LiteGraph> model, const ModelConfig& config, std::shared_ptr<PreparedModel>& preparedModel) 1175 std::shared_ptr<PreparedModel>& preparedModel) {
1176 // 这里直接修改传入的引用参数
1177 preparedModel = prepared;
1178 return OH_NN_SUCCESS; // 假设成功的状态码
1179 }));
1180
1181 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
1182 EXPECT_NE(nullptr, nncompiler);
1183
1184 OH_NN_ReturnCode retBuild = nncompiler->Build();
1185 EXPECT_EQ(OH_NN_SUCCESS, retBuild);
1186
1187 NNExecutor* ret = nncompiler->CreateExecutor();
1188 EXPECT_NE(nullptr, ret);
1189
1190 delete nncompiler;
1191 nncompiler = nullptr;
1192
1193 testing::Mock::AllowLeak(device.get());
1194 testing::Mock::AllowLeak(prepared.get());
1195 }
1196 } // namespace UnitTest
1197 } // namespace NeuralNetworkRuntime
1198 } // namespace OHOS