• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <gtest/gtest.h>
17 #include <gmock/gmock.h>
18 
19 #include "nnbackend.h"
20 #include "nncompiler.h"
21 #include "device.h"
22 #include "neural_network_runtime/neural_network_runtime_type.h"
23 #include "utils.h"
24 #include "inner_model.h"
25 
26 using namespace testing;
27 using namespace testing::ext;
28 using namespace OHOS::NeuralNetworkRuntime;
29 
30 namespace OHOS {
31 namespace NeuralNetworkRuntime {
32 namespace UnitTest {
33 class NNCompilerTest : public testing::Test {
34 public:
35     NNCompilerTest() = default;
36     ~NNCompilerTest() = default;
37     OH_NN_ReturnCode BuildModel(InnerModel& innerModel);
38 };
39 
40 class MockIDevice : public Device {
41 public:
42     MOCK_METHOD1(GetDeviceName, OH_NN_ReturnCode(std::string&));
43     MOCK_METHOD1(GetVendorName, OH_NN_ReturnCode(std::string&));
44     MOCK_METHOD1(GetVersion, OH_NN_ReturnCode(std::string&));
45     MOCK_METHOD1(GetDeviceType, OH_NN_ReturnCode(OH_NN_DeviceType&));
46     MOCK_METHOD1(GetDeviceStatus, OH_NN_ReturnCode(DeviceStatus&));
47     MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
48         std::vector<bool>&));
49     MOCK_METHOD1(IsFloat16PrecisionSupported, OH_NN_ReturnCode(bool&));
50     MOCK_METHOD1(IsPerformanceModeSupported, OH_NN_ReturnCode(bool&));
51     MOCK_METHOD1(IsPrioritySupported, OH_NN_ReturnCode(bool&));
52     MOCK_METHOD1(IsDynamicInputSupported, OH_NN_ReturnCode(bool&));
53     MOCK_METHOD1(IsModelCacheSupported, OH_NN_ReturnCode(bool&));
54     MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
55                                           const ModelConfig&,
56                                           std::shared_ptr<PreparedModel>&));
57     MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(const void*,
58                                           const ModelConfig&,
59                                           std::shared_ptr<PreparedModel>&));
60     MOCK_METHOD4(PrepareModelFromModelCache, OH_NN_ReturnCode(const std::vector<Buffer>&,
61                                                         const ModelConfig&,
62                                                         std::shared_ptr<PreparedModel>&,
63                                                         bool&));
64     MOCK_METHOD3(PrepareOfflineModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
65                                                  const ModelConfig&,
66                                                  std::shared_ptr<PreparedModel>&));
67     MOCK_METHOD1(AllocateBuffer, void*(size_t));
68     MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<TensorDesc>));
69     MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<NNTensor>));
70     MOCK_METHOD1(ReleaseBuffer, OH_NN_ReturnCode(const void*));
71     MOCK_METHOD2(AllocateBuffer, OH_NN_ReturnCode(size_t, int&));
72     MOCK_METHOD2(ReleaseBuffer, OH_NN_ReturnCode(int, size_t));
73     MOCK_METHOD1(ReadOpVersion, OH_NN_ReturnCode(int&));
74 };
75 
76 class MockIPreparedModel : public PreparedModel {
77 public:
78     MOCK_METHOD1(ExportModelCache, OH_NN_ReturnCode(std::vector<Buffer>&));
79     MOCK_METHOD4(Run, OH_NN_ReturnCode(const std::vector<IOTensor>&,
80                                  const std::vector<IOTensor>&,
81                                  std::vector<std::vector<int32_t>>&,
82                                  std::vector<bool>&));
83     MOCK_METHOD4(Run, OH_NN_ReturnCode(const std::vector<NN_Tensor*>&,
84                                  const std::vector<NN_Tensor*>&,
85                                  std::vector<std::vector<int32_t>>&,
86                                  std::vector<bool>&));
87     MOCK_CONST_METHOD1(GetModelID, OH_NN_ReturnCode(uint32_t&));
88     MOCK_METHOD2(GetInputDimRanges, OH_NN_ReturnCode(std::vector<std::vector<uint32_t>>&,
89                                                std::vector<std::vector<uint32_t>>&));
90     MOCK_METHOD0(ReleaseBuiltModel, OH_NN_ReturnCode());
91 };
92 
93 class MockInnerModel : public InnerModel {
94 public:
95     MOCK_CONST_METHOD0(IsBuild, bool());
96     MOCK_METHOD2(BuildFromLiteGraph, OH_NN_ReturnCode(const mindspore::lite::LiteGraph*,
97                                         const ExtensionConfig&));
98     MOCK_METHOD2(BuildFromMetaGraph, OH_NN_ReturnCode(const void*, const ExtensionConfig&));
99     MOCK_METHOD1(AddTensor, OH_NN_ReturnCode(const OH_NN_Tensor&));
100     MOCK_METHOD1(AddTensorDesc, OH_NN_ReturnCode(const NN_TensorDesc*));
101     MOCK_METHOD2(SetTensorQuantParam, OH_NN_ReturnCode(uint32_t, const NN_QuantParam*));
102     MOCK_METHOD2(SetTensorType, OH_NN_ReturnCode(uint32_t, OH_NN_TensorType));
103     MOCK_METHOD3(SetTensorValue, OH_NN_ReturnCode(uint32_t, const void*, size_t));
104     MOCK_METHOD4(AddOperation, OH_NN_ReturnCode(OH_NN_OperationType,
105                                   const OH_NN_UInt32Array&,
106                                   const OH_NN_UInt32Array&,
107                                   const OH_NN_UInt32Array&));
108     MOCK_METHOD3(GetSupportedOperations, OH_NN_ReturnCode(size_t, const bool**, uint32_t&));
109     MOCK_METHOD2(SpecifyInputsAndOutputs, OH_NN_ReturnCode(const OH_NN_UInt32Array&, const OH_NN_UInt32Array&));
110     MOCK_METHOD4(SetInputsAndOutputsInfo, OH_NN_ReturnCode(const OH_NN_TensorInfo*, size_t,
111         const OH_NN_TensorInfo*, size_t));
112     MOCK_METHOD0(Build, OH_NN_ReturnCode());
113     MOCK_CONST_METHOD0(GetInputTensors, std::vector<std::shared_ptr<NNTensor>>());
114     MOCK_CONST_METHOD0(GetOutputTensors, std::vector<std::shared_ptr<NNTensor>>());
115     MOCK_CONST_METHOD0(GetInputTensorDescs, std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>>());
116     MOCK_CONST_METHOD0(GetOutputTensorDescs, std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>>());
117     MOCK_CONST_METHOD0(GetLiteGraphs, std::shared_ptr<mindspore::lite::LiteGraph>());
118     MOCK_CONST_METHOD0(GetMetaGraph, void*());
119     MOCK_CONST_METHOD0(GetExtensionConfig, ExtensionConfig());
120 };
121 
122 
BuildModel(InnerModel & innerModel)123 OH_NN_ReturnCode NNCompilerTest::BuildModel(InnerModel& innerModel)
124 {
125     int32_t inputDims[4] = {1, 2, 2, 3};
126     OH_NN_Tensor input1 = {OH_NN_FLOAT32, 4, inputDims, nullptr, OH_NN_TENSOR};
127     OH_NN_ReturnCode ret = innerModel.AddTensor(input1);
128     if (ret != OH_NN_SUCCESS) {
129         return ret;
130     }
131 
132     // 添加Add算子的第二个输入Tensor,类型为float32,张量形状为[1, 2, 2, 3]
133     OH_NN_Tensor input2 = {OH_NN_FLOAT32, 4, inputDims, nullptr, OH_NN_TENSOR};
134     ret = innerModel.AddTensor(input2);
135     if (ret != OH_NN_SUCCESS) {
136         return ret;
137     }
138 
139     // 添加Add算子的参数Tensor,该参数Tensor用于指定激活函数的类型,Tensor的数据类型为int8。
140     int32_t activationDims = 1;
141     int8_t activationValue = OH_NN_FUSED_NONE;
142     OH_NN_Tensor activation = {OH_NN_INT8, 1, &activationDims, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
143     ret = innerModel.AddTensor(activation);
144     if (ret != OH_NN_SUCCESS) {
145         return ret;
146     }
147 
148     // 将激活函数类型设置为OH_NN_FUSED_NONE,表示该算子不添加激活函数。
149     uint32_t index = 2;
150     ret = innerModel.SetTensorValue(index, &activationValue, sizeof(int8_t));
151     if (ret != OH_NN_SUCCESS) {
152         return ret;
153     }
154 
155     // 设置Add算子的输出,类型为float32,张量形状为[1, 2, 2, 3]
156     OH_NN_Tensor output = {OH_NN_FLOAT32, 4, inputDims, nullptr, OH_NN_TENSOR};
157     ret = innerModel.AddTensor(output);
158     if (ret != OH_NN_SUCCESS) {
159         return ret;
160     }
161 
162     // 指定Add算子的输入、参数和输出索引
163     uint32_t inputIndicesValues[2] = {0, 1};
164     uint32_t paramIndicesValues = 2;
165     uint32_t outputIndicesValues = 3;
166     OH_NN_UInt32Array paramIndices = {&paramIndicesValues, 1};
167     OH_NN_UInt32Array inputIndices = {inputIndicesValues, 2};
168     OH_NN_UInt32Array outputIndices = {&outputIndicesValues, 1};
169 
170     // 向模型实例添加Add算子
171     ret = innerModel.AddOperation(OH_NN_OPS_ADD, paramIndices, inputIndices, outputIndices);
172     if (ret != OH_NN_SUCCESS) {
173         return ret;
174     }
175 
176     // 设置模型实例的输入、输出索引
177     ret = innerModel.SpecifyInputsAndOutputs(inputIndices, outputIndices);
178     if (ret != OH_NN_SUCCESS) {
179         return ret;
180     }
181 
182     // 完成模型实例的构建
183     ret = innerModel.Build();
184     if (ret != OH_NN_SUCCESS) {
185         return ret;
186     }
187 
188     return ret;
189 }
190 
191 /**
192  * @tc.name: nncompilertest_construct_001
193  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
194  * @tc.type: FUNC
195  */
196 HWTEST_F(NNCompilerTest, nncompilertest_construct_001, TestSize.Level0)
197 {
198     LOGE("NNCompiler nncompilertest_construct_001");
199     size_t backendID = 1;
200     InnerModel innerModel;
201     BuildModel(innerModel);
202     void* model = &innerModel;
203     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
204 
205     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
206     EXPECT_NE(nullptr, nncompiler);
207 
208     testing::Mock::AllowLeak(device.get());
209 }
210 
211 /**
212  * @tc.name: nncompilertest_construct_002
213  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
214  * @tc.type: FUNC
215  */
216 HWTEST_F(NNCompilerTest, nncompilertest_construct_002, TestSize.Level0)
217 {
218     LOGE("NNCompiler nncompilertest_construct_002");
219     size_t backendID = 1;
220     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
221 
222     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
223     EXPECT_NE(nullptr, nncompiler);
224 
225     testing::Mock::AllowLeak(device.get());
226 }
227 
228 /**
229  * @tc.name: nncompilertest_getbackendid_001
230  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
231  * @tc.type: FUNC
232  */
233 HWTEST_F(NNCompilerTest, nncompilertest_getbackendid_001, TestSize.Level0)
234 {
235     LOGE("GetBackendID nncompilertest_getbackendid_001");
236     size_t backendID = 1;
237     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
238 
239     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
240     EXPECT_NE(nullptr, nncompiler);
241 
242     size_t ret = nncompiler->GetBackendID();
243     EXPECT_NE(0, ret);
244 
245     testing::Mock::AllowLeak(device.get());
246 }
247 
248 /**
249  * @tc.name: nncompilertest_setcachedir_001
250  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
251  * @tc.type: FUNC
252  */
253 HWTEST_F(NNCompilerTest, nncompilertest_setcachedir_001, TestSize.Level0)
254 {
255     LOGE("SetCacheDir nncompilertest_setcachedir_001");
256     size_t backendID = 1;
257 
258     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
259     EXPECT_NE(nullptr, nncompiler);
260 
261     std::string cacheModelPath = "mock";
262     uint32_t version = 0;
263     OH_NN_ReturnCode ret = nncompiler->SetCacheDir(cacheModelPath, version);
264     EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
265 }
266 
267 /**
268  * @tc.name: nncompilertest_setcachedir_002
269  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
270  * @tc.type: FUNC
271  */
272 HWTEST_F(NNCompilerTest, nncompilertest_setcachedir_002, TestSize.Level0)
273 {
274     LOGE("SetCacheDir nncompilertest_setcachedir_002");
275     size_t backendID = 1;
276 
277     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
278     EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
279         .WillRepeatedly(::testing::Return(OH_NN_OPERATION_FORBIDDEN));
280 
281     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
282     EXPECT_NE(nullptr, nncompiler);
283 
284     std::string cacheModelPath = "mock";
285     uint32_t version = 0;
286     OH_NN_ReturnCode ret = nncompiler->SetCacheDir(cacheModelPath, version);
287     EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
288 
289     testing::Mock::AllowLeak(device.get());
290 }
291 
292 /**
293  * @tc.name: nncompilertest_setcachedir_003
294  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
295  * @tc.type: FUNC
296  */
297 HWTEST_F(NNCompilerTest, nncompilertest_setcachedir_003, TestSize.Level0)
298 {
299     LOGE("SetCacheDir nncompilertest_setcachedir_003");
300     size_t backendID = 1;
301 
302     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
303     EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
304         .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
305 
306     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
307     EXPECT_NE(nullptr, nncompiler);
308 
309     std::string cacheModelPath = "mock";
310     uint32_t version = 0;
311     OH_NN_ReturnCode ret = nncompiler->SetCacheDir(cacheModelPath, version);
312     EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
313 
314     testing::Mock::AllowLeak(device.get());
315 }
316 
317 /**
318  * @tc.name: nncompilertest_setcachedir_004
319  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
320  * @tc.type: FUNC
321  */
322 HWTEST_F(NNCompilerTest, nncompilertest_setcachedir_004, TestSize.Level0)
323 {
324     LOGE("SetCacheDir nncompilertest_setcachedir_004");
325     size_t backendID = 1;
326 
327     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
328     EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon80ed534a0102(bool& isSupportedCache) 329         .WillOnce(Invoke([](bool& isSupportedCache) {
330                 // 这里直接修改传入的引用参数
331                 isSupportedCache = true;
332                 return OH_NN_SUCCESS; // 假设成功的状态码
333             }));
334 
335     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
336     EXPECT_NE(nullptr, nncompiler);
337 
338     std::string cacheModelPath = "mock";
339     uint32_t version = 0;
340     OH_NN_ReturnCode ret = nncompiler->SetCacheDir(cacheModelPath, version);
341     EXPECT_EQ(OH_NN_SUCCESS, ret);
342 
343     testing::Mock::AllowLeak(device.get());
344 }
345 
346 /**
347  * @tc.name: nncompilertest_setperformance_001
348  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
349  * @tc.type: FUNC
350  */
351 HWTEST_F(NNCompilerTest, nncompilertest_setperformance_001, TestSize.Level0)
352 {
353     LOGE("SetPerformance nncompilertest_setperformance_001");
354     size_t backendID = 1;
355     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
356 
357     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
358     EXPECT_NE(nullptr, nncompiler);
359 
360     OH_NN_PerformanceMode performance = OH_NN_PERFORMANCE_NONE;
361     OH_NN_ReturnCode ret = nncompiler->SetPerformance(performance);
362     EXPECT_EQ(OH_NN_SUCCESS, ret);
363 
364     testing::Mock::AllowLeak(device.get());
365 }
366 
367 /**
368  * @tc.name: nncompilertest_setperformance_002
369  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
370  * @tc.type: FUNC
371  */
372 HWTEST_F(NNCompilerTest, nncompilertest_setperformance_002, TestSize.Level0)
373 {
374     LOGE("SetPerformance nncompilertest_setperformance_002");
375     size_t backendID = 1;
376 
377     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
378     EXPECT_NE(nullptr, nncompiler);
379 
380     OH_NN_PerformanceMode performance = OH_NN_PERFORMANCE_NONE;
381     OH_NN_ReturnCode ret = nncompiler->SetPerformance(performance);
382     EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
383 }
384 
385 /**
386  * @tc.name: nncompilertest_setperformance_003
387  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
388  * @tc.type: FUNC
389  */
390 HWTEST_F(NNCompilerTest, nncompilertest_setperformance_003, TestSize.Level0)
391 {
392     LOGE("SetPerformance nncompilertest_setperformance_003");
393     size_t backendID = 1;
394 
395     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
396     EXPECT_CALL(*((MockIDevice *) device.get()), IsPerformanceModeSupported(::testing::_))
397         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
398 
399     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
400     EXPECT_NE(nullptr, nncompiler);
401 
402     OH_NN_PerformanceMode performance = OH_NN_PERFORMANCE_NONE;
403     OH_NN_ReturnCode ret = nncompiler->SetPerformance(performance);
404     EXPECT_EQ(OH_NN_FAILED, ret);
405 
406     testing::Mock::AllowLeak(device.get());
407 }
408 
409 /**
410  * @tc.name: nncompilertest_setperformance_004
411  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
412  * @tc.type: FUNC
413  */
414 HWTEST_F(NNCompilerTest, nncompilertest_setperformance_004, TestSize.Level0)
415 {
416     LOGE("SetPerformance nncompilertest_setperformance_004");
417     size_t backendID = 1;
418 
419     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
420     EXPECT_CALL(*((MockIDevice *) device.get()), IsPerformanceModeSupported(::testing::_))
421         .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
422 
423     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
424     EXPECT_NE(nullptr, nncompiler);
425 
426     OH_NN_PerformanceMode performance = OH_NN_PERFORMANCE_LOW;
427     OH_NN_ReturnCode ret = nncompiler->SetPerformance(performance);
428     EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
429 
430     testing::Mock::AllowLeak(device.get());
431 }
432 
433 /**
434  * @tc.name: nncompilertest_setpriority_001
435  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
436  * @tc.type: FUNC
437  */
438 HWTEST_F(NNCompilerTest, nncompilertest_setpriority_001, TestSize.Level0)
439 {
440     LOGE("SetPriority nncompilertest_setpriority_001");
441     size_t backendID = 1;
442     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
443 
444     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
445     EXPECT_NE(nullptr, nncompiler);
446 
447     OH_NN_Priority priority = OH_NN_PRIORITY_NONE;
448     OH_NN_ReturnCode ret = nncompiler->SetPriority(priority);
449     EXPECT_EQ(OH_NN_SUCCESS, ret);
450 
451     testing::Mock::AllowLeak(device.get());
452 }
453 
454 /**
455  * @tc.name: nncompilertest_setpriority_002
456  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
457  * @tc.type: FUNC
458  */
459 HWTEST_F(NNCompilerTest, nncompilertest_setpriority_002, TestSize.Level0)
460 {
461     LOGE("SetPriority nncompilertest_setpriority_002");
462     size_t backendID = 1;
463 
464     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
465     EXPECT_NE(nullptr, nncompiler);
466 
467     OH_NN_Priority priority = OH_NN_PRIORITY_NONE;
468     OH_NN_ReturnCode ret = nncompiler->SetPriority(priority);
469     EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
470 }
471 
472 /**
473  * @tc.name: nncompilertest_setpriority_003
474  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
475  * @tc.type: FUNC
476  */
477 HWTEST_F(NNCompilerTest, nncompilertest_setpriority_003, TestSize.Level0)
478 {
479     LOGE("SetPriority nncompilertest_setpriority_003");
480     size_t backendID = 1;
481     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
482     EXPECT_CALL(*((MockIDevice *) device.get()), IsPrioritySupported(::testing::_))
483         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
484 
485     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
486     EXPECT_NE(nullptr, nncompiler);
487 
488     OH_NN_Priority priority = OH_NN_PRIORITY_NONE;
489     OH_NN_ReturnCode ret = nncompiler->SetPriority(priority);
490     EXPECT_EQ(OH_NN_FAILED, ret);
491 
492     testing::Mock::AllowLeak(device.get());
493 }
494 
495 /**
496  * @tc.name: nncompilertest_setpriority_004
497  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
498  * @tc.type: FUNC
499  */
500 HWTEST_F(NNCompilerTest, nncompilertest_setpriority_004, TestSize.Level0)
501 {
502     LOGE("SetPriority nncompilertest_setpriority_004");
503     size_t backendID = 1;
504     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
505     EXPECT_CALL(*((MockIDevice *) device.get()), IsPrioritySupported(::testing::_))
506         .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
507 
508     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
509     EXPECT_NE(nullptr, nncompiler);
510 
511     OH_NN_Priority priority = OH_NN_PRIORITY_LOW;
512     OH_NN_ReturnCode ret = nncompiler->SetPriority(priority);
513     EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
514 
515     testing::Mock::AllowLeak(device.get());
516 }
517 
518 /**
519  * @tc.name: nncompilertest_setenablefp16_001
520  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
521  * @tc.type: FUNC
522  */
523 HWTEST_F(NNCompilerTest, nncompilertest_setenablefp16_001, TestSize.Level0)
524 {
525     LOGE("SetEnableFp16 nncompilertest_setenablefp16_001");
526     size_t backendID = 1;
527 
528     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
529     EXPECT_NE(nullptr, nncompiler);
530 
531     bool isFp16 = true;
532     OH_NN_ReturnCode ret = nncompiler->SetEnableFp16(isFp16);
533     EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
534 }
535 
536 /**
537  * @tc.name: nncompilertest_setenablefp16_002
538  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
539  * @tc.type: FUNC
540  */
541 HWTEST_F(NNCompilerTest, nncompilertest_setenablefp16_002, TestSize.Level0)
542 {
543     LOGE("SetEnableFp16 nncompilertest_setenablefp16_002");
544     size_t backendID = 1;
545     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
546     EXPECT_CALL(*((MockIDevice *) device.get()), IsFloat16PrecisionSupported(::testing::_))
547         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
548 
549     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
550     EXPECT_NE(nullptr, nncompiler);
551 
552     bool isFp16 = true;
553     OH_NN_ReturnCode ret = nncompiler->SetEnableFp16(isFp16);
554     EXPECT_EQ(OH_NN_FAILED, ret);
555 
556     testing::Mock::AllowLeak(device.get());
557 }
558 
559 /**
560  * @tc.name: nncompilertest_setenablefp16_003
561  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
562  * @tc.type: FUNC
563  */
564 HWTEST_F(NNCompilerTest, nncompilertest_setenablefp16_003, TestSize.Level0)
565 {
566     LOGE("SetEnableFp16 nncompilertest_setenablefp16_003");
567     size_t backendID = 1;
568     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
569     EXPECT_CALL(*((MockIDevice *) device.get()), IsFloat16PrecisionSupported(::testing::_))
570         .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
571 
572     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
573     EXPECT_NE(nullptr, nncompiler);
574 
575     bool isFp16 = true;
576     OH_NN_ReturnCode ret = nncompiler->SetEnableFp16(isFp16);
577     EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
578 
579     testing::Mock::AllowLeak(device.get());
580 }
581 
582 /**
583  * @tc.name: nncompilertest_setenablefp16_004
584  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
585  * @tc.type: FUNC
586  */
587 HWTEST_F(NNCompilerTest, nncompilertest_setenablefp16_004, TestSize.Level0)
588 {
589     LOGE("SetEnableFp16 nncompilertest_setenablefp16_004");
590     size_t backendID = 1;
591     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
592     EXPECT_CALL(*((MockIDevice *) device.get()), IsPrioritySupported(::testing::_))
593         .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
594 
595     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
596     EXPECT_NE(nullptr, nncompiler);
597 
598     bool isFp16 = false;
599     OH_NN_ReturnCode ret = nncompiler->SetEnableFp16(isFp16);
600     EXPECT_EQ(OH_NN_SUCCESS, ret);
601 
602     testing::Mock::AllowLeak(device.get());
603 }
604 
605 /**
606  * @tc.name: nncompilertest_isbuild_001
607  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
608  * @tc.type: FUNC
609  */
610 HWTEST_F(NNCompilerTest, nncompilertest_isbuild_001, TestSize.Level0)
611 {
612     LOGE("IsBuild nncompilertest_isbuild_001");
613     size_t backendID = 1;
614     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
615 
616     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
617     EXPECT_NE(nullptr, nncompiler);
618 
619     bool ret = nncompiler->IsBuild();
620     EXPECT_EQ(false, ret);
621 
622     testing::Mock::AllowLeak(device.get());
623 }
624 
625 /**
626  * @tc.name: nncompilertest_build_001
627  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
628  * @tc.type: FUNC
629  */
630 HWTEST_F(NNCompilerTest, nncompilertest_build_001, TestSize.Level0)
631 {
632     LOGE("Build nncompilertest_build_001");
633     size_t backendID = 1;
634 
635     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
636     EXPECT_NE(nullptr, nncompiler);
637 
638     OH_NN_ReturnCode ret = nncompiler->Build();
639     EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
640 }
641 
642 /**
643  * @tc.name: nncompilertest_build_002
644  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
645  * @tc.type: FUNC
646  */
647 HWTEST_F(NNCompilerTest, nncompilertest_build_002, TestSize.Level0)
648 {
649     LOGE("Build nncompilertest_build_002");
650     size_t backendID = 1;
651     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
652 
653     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
654     EXPECT_NE(nullptr, nncompiler);
655 
656     OH_NN_ReturnCode ret = nncompiler->Build();
657     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
658 
659     testing::Mock::AllowLeak(device.get());
660 }
661 
662 /**
663  * @tc.name: nncompilertest_build_003
664  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
665  * @tc.type: FUNC
666  */
667 HWTEST_F(NNCompilerTest, nncompilertest_build_003, TestSize.Level0)
668 {
669     LOGE("Build nncompilertest_build_003");
670     size_t backendID = 1;
671     InnerModel innerModel;
672     BuildModel(innerModel);
673     void* model = &innerModel;
674     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
675 
676     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
677     EXPECT_NE(nullptr, nncompiler);
678 
679     OH_NN_ReturnCode ret = nncompiler->Build();
680     EXPECT_EQ(OH_NN_SUCCESS, ret);
681 
682     OH_NN_ReturnCode retBuild = nncompiler->Build();
683     EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, retBuild);
684 
685     testing::Mock::AllowLeak(device.get());
686 }
687 
688 /**
689  * @tc.name: nncompilertest_build_004
690  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
691  * @tc.type: FUNC
692  */
693 HWTEST_F(NNCompilerTest, nncompilertest_build_004, TestSize.Level0)
694 {
695     LOGE("Build nncompilertest_build_004");
696     size_t backendID = 1;
697     InnerModel innerModel;
698     void* model = &innerModel;
699     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
700 
701     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
702     EXPECT_NE(nullptr, nncompiler);
703 
704     OH_NN_ReturnCode ret = nncompiler->Build();
705     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
706 
707     testing::Mock::AllowLeak(device.get());
708 }
709 
710 /**
711  * @tc.name: nncompilertest_build_005
712  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
713  * @tc.type: FUNC
714  */
715 HWTEST_F(NNCompilerTest, nncompilertest_build_005, TestSize.Level0)
716 {
717     LOGE("Build nncompilertest_build_005");
718     size_t backendID = 1;
719     InnerModel innerModel;
720     BuildModel(innerModel);
721     void* model = &innerModel;
722     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
723     EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon80ed534a0202(bool& isSupportedCache) 724         .WillOnce(Invoke([](bool& isSupportedCache) {
725                 // 这里直接修改传入的引用参数
726                 isSupportedCache = true;
727                 return OH_NN_SUCCESS; // 假设成功的状态码
728             }));
729 
730     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
731     EXPECT_NE(nullptr, nncompiler);
732 
733     std::string cacheModelPath = "mock";
734     uint32_t version = UINT32_MAX;
735     OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
736     EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);
737 
738     OH_NN_ReturnCode ret = nncompiler->Build();
739     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
740 
741     testing::Mock::AllowLeak(device.get());
742 }
743 
744 /**
745  * @tc.name: nncompilertest_build_006
746  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
747  * @tc.type: FUNC
748  */
749 HWTEST_F(NNCompilerTest, nncompilertest_build_006, TestSize.Level0)
750 {
751     LOGE("Build nncompilertest_build_006");
752     size_t backendID = 1;
753     InnerModel innerModel;
754     BuildModel(innerModel);
755     void* model = &innerModel;
756     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
757     EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon80ed534a0302(bool& isSupportedCache) 758         .WillOnce(Invoke([](bool& isSupportedCache) {
759                 // 这里直接修改传入的引用参数
760                 isSupportedCache = true;
761                 return OH_NN_SUCCESS; // 假设成功的状态码
762             }));
763 
764     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
765     EXPECT_NE(nullptr, nncompiler);
766 
767     std::string cacheModelPath = "mock";
768     uint32_t version = 0;
769     OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
770     EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);
771 
772     OH_NN_ReturnCode ret = nncompiler->Build();
773     EXPECT_EQ(OH_NN_FAILED, ret);
774 
775     testing::Mock::AllowLeak(device.get());
776 }
777 
778 /**
779  * @tc.name: nncompilertest_build_007
780  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
781  * @tc.type: FUNC
782  */
783 HWTEST_F(NNCompilerTest, nncompilertest_build_007, TestSize.Level0)
784 {
785     LOGE("Build nncompilertest_build_007");
786     size_t backendID = 1;
787     InnerModel innerModel;
788     BuildModel(innerModel);
789     void* model = &innerModel;
790     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
791 
792     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
793     EXPECT_NE(nullptr, nncompiler);
794 
795     OH_NN_ReturnCode ret = nncompiler->Build();
796     EXPECT_EQ(OH_NN_SUCCESS, ret);
797 
798     testing::Mock::AllowLeak(device.get());
799 }
800 
801 /**
802  * @tc.name: nncompilertest_savetocachefile_001
803  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
804  * @tc.type: FUNC
805  */
806 HWTEST_F(NNCompilerTest, nncompilertest_savetocachefile_001, TestSize.Level0)
807 {
808     LOGE("SaveToCacheFile nncompilertest_savetocachefile_001");
809     size_t backendID = 1;
810     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
811 
812     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
813     EXPECT_NE(nullptr, nncompiler);
814 
815     OH_NN_ReturnCode ret = nncompiler->SaveToCacheFile();
816     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
817 
818     testing::Mock::AllowLeak(device.get());
819 }
820 
821 /**
822  * @tc.name: nncompilertest_savetocachefile_002
823  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
824  * @tc.type: FUNC
825  */
826 HWTEST_F(NNCompilerTest, nncompilertest_savetocachefile_002, TestSize.Level0)
827 {
828     LOGE("SaveToCacheFile nncompilertest_savetocachefile_002");
829     size_t backendID = 1;
830 
831     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
832     EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon80ed534a0402(bool& isSupportedCache) 833         .WillOnce(Invoke([](bool& isSupportedCache) {
834                 // 这里直接修改传入的引用参数
835                 isSupportedCache = true;
836                 return OH_NN_SUCCESS; // 假设成功的状态码
837             }));
838 
839     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
840     EXPECT_NE(nullptr, nncompiler);
841 
842     std::string cacheModelPath = "mock";
843     uint32_t version = UINT32_MAX;
844     OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
845     EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);
846 
847     OH_NN_ReturnCode retSave = nncompiler->SaveToCacheFile();
848     EXPECT_EQ(OH_NN_INVALID_PARAMETER, retSave);
849 
850     testing::Mock::AllowLeak(device.get());
851 }
852 
853 /**
854  * @tc.name: nncompilertest_savetocachefile_003
855  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
856  * @tc.type: FUNC
857  */
858 HWTEST_F(NNCompilerTest, nncompilertest_savetocachefile_003, TestSize.Level0)
859 {
860     LOGE("SaveToCacheFile nncompilertest_savetocachefile_003");
861     size_t backendID = 1;
862 
863     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
864     EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon80ed534a0502(bool& isSupportedCache) 865         .WillOnce(Invoke([](bool& isSupportedCache) {
866                 // 这里直接修改传入的引用参数
867                 isSupportedCache = true;
868                 return OH_NN_SUCCESS; // 假设成功的状态码
869             }));
870 
871     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
872     EXPECT_NE(nullptr, nncompiler);
873 
874     std::string cacheModelPath = "mock";
875     uint32_t version = 0;
876     OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
877     EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);
878 
879     OH_NN_ReturnCode retSave = nncompiler->SaveToCacheFile();
880     EXPECT_EQ(OH_NN_FAILED, retSave);
881 
882     testing::Mock::AllowLeak(device.get());
883 }
884 
885 /**
886  * @tc.name: nncompilertest_savetocachefile_004
887  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
888  * @tc.type: FUNC
889  */
890 HWTEST_F(NNCompilerTest, nncompilertest_savetocachefile_004, TestSize.Level0)
891 {
892     LOGE("SaveToCacheFile nncompilertest_savetocachefile_004");
893     size_t backendID = 1;
894 
895     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
896     EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon80ed534a0602(bool& isSupportedCache) 897         .WillOnce(Invoke([](bool& isSupportedCache) {
898                 // 这里直接修改传入的引用参数
899                 isSupportedCache = true;
900                 return OH_NN_SUCCESS; // 假设成功的状态码
901             }));
902 
903     InnerModel innerModel;
904     BuildModel(innerModel);
905     void* model = &innerModel;
906 
907     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
908     EXPECT_NE(nullptr, nncompiler);
909 
910     OH_NN_ReturnCode retBuild = nncompiler->Build();
911     EXPECT_EQ(OH_NN_SUCCESS, retBuild);
912 
913     std::string cacheModelPath = "mock";
914     uint32_t version = 0;
915     OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
916     EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);;
917 
918     OH_NN_ReturnCode retSave = nncompiler->SaveToCacheFile();
919     EXPECT_EQ(OH_NN_FAILED, retSave);
920 
921     testing::Mock::AllowLeak(device.get());
922 }
923 
924 /**
925  * @tc.name: nncompilertest_restorefromcachefile_001
926  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
927  * @tc.type: FUNC
928  */
929 HWTEST_F(NNCompilerTest, nncompilertest_restorefromcachefile_001, TestSize.Level0)
930 {
931     LOGE("RestoreFromCacheFile nncompilertest_restorefromcachefile_001");
932     size_t backendID = 1;
933     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
934 
935     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
936     EXPECT_NE(nullptr, nncompiler);
937 
938     OH_NN_ReturnCode ret = nncompiler->RestoreFromCacheFile();
939     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
940 
941     testing::Mock::AllowLeak(device.get());
942 }
943 
944 /**
945  * @tc.name: nncompilertest_restorefromcachefile_002
946  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
947  * @tc.type: FUNC
948  */
949 HWTEST_F(NNCompilerTest, nncompilertest_restorefromcachefile_002, TestSize.Level0)
950 {
951     LOGE("RestoreFromCacheFile nncompilertest_restorefromcachefile_002");
952     size_t backendID = 1;
953     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
954     EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon80ed534a0702(bool& isSupportedCache) 955         .WillOnce(Invoke([](bool& isSupportedCache) {
956                 // 这里直接修改传入的引用参数
957                 isSupportedCache = true;
958                 return OH_NN_SUCCESS; // 假设成功的状态码
959             }));
960 
961     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
962     EXPECT_NE(nullptr, nncompiler);
963 
964     std::string cacheModelPath = "mock";
965     uint32_t version = UINT32_MAX;
966     OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
967     EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);;
968 
969     OH_NN_ReturnCode ret = nncompiler->RestoreFromCacheFile();
970     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
971 
972     testing::Mock::AllowLeak(device.get());
973 }
974 
975 /**
976  * @tc.name: nncompilertest_restorefromcachefile_003
977  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
978  * @tc.type: FUNC
979  */
980 HWTEST_F(NNCompilerTest, nncompilertest_restorefromcachefile_003, TestSize.Level0)
981 {
982     LOGE("RestoreFromCacheFile nncompilertest_restorefromcachefile_003");
983     size_t backendID = 1;
984     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
985     std::shared_ptr<MockIPreparedModel> prepared = std::make_shared<MockIPreparedModel>();
986     EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon80ed534a0802(bool& isSupportedCache) 987         .WillOnce(Invoke([](bool& isSupportedCache) {
988                 // 这里直接修改传入的引用参数
989                 isSupportedCache = true;
990                 return OH_NN_SUCCESS;
991             }));
992 
993     InnerModel innerModel;
994     BuildModel(innerModel);
995     void* model = &innerModel;
996     EXPECT_CALL(*((MockIDevice *) device.get()),
997         PrepareModel(testing::A<std::shared_ptr<const mindspore::lite::LiteGraph>>(), ::testing::_, ::testing::_))
998         .WillOnce(Invoke([&prepared](std::shared_ptr<const mindspore::lite::LiteGraph> model,
999                                           const ModelConfig& config,
__anon80ed534a0902(std::shared_ptr<const mindspore::lite::LiteGraph> model, const ModelConfig& config, std::shared_ptr<PreparedModel>& preparedModel) 1000                                           std::shared_ptr<PreparedModel>& preparedModel) {
1001                 preparedModel = prepared;
1002                 return OH_NN_SUCCESS;
1003             }));
1004 
1005     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
1006     EXPECT_NE(nullptr, nncompiler);
1007 
1008     OH_NN_ReturnCode retBuild = nncompiler->Build();
1009     EXPECT_EQ(OH_NN_SUCCESS, retBuild);
1010 
1011     std::string cacheModelPath = "/data/data";
1012     uint32_t version = UINT32_MAX;
1013     OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
1014     EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);;
1015 
1016     OH_NN_ReturnCode ret = nncompiler->RestoreFromCacheFile();
1017     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1018 
1019     testing::Mock::AllowLeak(device.get());
1020     testing::Mock::AllowLeak(prepared.get());
1021 }
1022 
1023 /**
1024  * @tc.name: nncompilertest_savetocachebuffer_001
1025  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1026  * @tc.type: FUNC
1027  */
1028 HWTEST_F(NNCompilerTest, nncompilertest_savetocachebuffer_001, TestSize.Level0)
1029 {
1030     LOGE("SaveToCacheBuffer nncompilertest_savetocachebuffer_001");
1031     size_t backendID = 1;
1032     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1033 
1034     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1035     EXPECT_NE(nullptr, nncompiler);
1036 
1037     size_t length = 10;
1038     size_t* modelSize = &length;
1039     InnerModel innerModel;
1040     BuildModel(innerModel);
1041     void* model = &innerModel;
1042     OH_NN_ReturnCode ret = nncompiler->SaveToCacheBuffer(model, length, modelSize);
1043     EXPECT_EQ(OH_NN_UNSUPPORTED, ret);
1044 
1045     testing::Mock::AllowLeak(device.get());
1046 }
1047 
1048 /**
1049  * @tc.name: nncompilertest_restorefromcachebuffer_001
1050  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1051  * @tc.type: FUNC
1052  */
1053 HWTEST_F(NNCompilerTest, nncompilertest_restorefromcachebuffer_001, TestSize.Level0)
1054 {
1055     LOGE("RestoreFromCacheBuffer nncompilertest_restorefromcachebuffer_001");
1056     size_t backendID = 1;
1057     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1058 
1059     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1060     EXPECT_NE(nullptr, nncompiler);
1061 
1062     size_t length = 10;
1063     InnerModel innerModel;
1064     BuildModel(innerModel);
1065     void* model = &innerModel;
1066     OH_NN_ReturnCode ret = nncompiler->RestoreFromCacheBuffer(model, length);
1067     EXPECT_EQ(OH_NN_UNSUPPORTED, ret);
1068 
1069     testing::Mock::AllowLeak(device.get());
1070 }
1071 
1072 /**
1073  * @tc.name: nncompilertest_setextensionconfig_001
1074  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1075  * @tc.type: FUNC
1076  */
1077 HWTEST_F(NNCompilerTest, nncompilertest_setextensionconfig_001, TestSize.Level0)
1078 {
1079     LOGE("SetExtensionConfig nncompilertest_setextensionconfig_001");
1080     size_t backendID = 1;
1081     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1082 
1083     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1084     EXPECT_NE(nullptr, nncompiler);
1085 
1086     std::unordered_map<std::string, std::vector<char>> configs;
1087     OH_NN_ReturnCode ret = nncompiler->SetExtensionConfig(configs);
1088     EXPECT_EQ(OH_NN_SUCCESS, ret);
1089 
1090     testing::Mock::AllowLeak(device.get());
1091 }
1092 
1093 /**
1094  * @tc.name: nncompilertest_setoptions_001
1095  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1096  * @tc.type: FUNC
1097  */
1098 HWTEST_F(NNCompilerTest, nncompilertest_setoptions_001, TestSize.Level0)
1099 {
1100     LOGE("SetOptions nncompilertest_setoptions_001");
1101     size_t backendID = 1;
1102     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1103 
1104     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1105     EXPECT_NE(nullptr, nncompiler);
1106 
1107     std::vector<std::shared_ptr<void>> options;
1108     OH_NN_ReturnCode ret = nncompiler->SetOptions(options);
1109     EXPECT_EQ(OH_NN_UNSUPPORTED, ret);
1110 
1111     testing::Mock::AllowLeak(device.get());
1112 }
1113 
1114 /**
1115  * @tc.name: nncompilertest_createexecutor_001
1116  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1117  * @tc.type: FUNC
1118  */
1119 HWTEST_F(NNCompilerTest, nncompilertest_createexecutor_001, TestSize.Level0)
1120 {
1121     LOGE("CreateExecutor nncompilertest_createexecutor_001");
1122     size_t backendID = 1;
1123     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1124 
1125     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1126     EXPECT_NE(nullptr, nncompiler);
1127 
1128     NNExecutor* ret = nncompiler->CreateExecutor();
1129     EXPECT_EQ(nullptr, ret);
1130 
1131     testing::Mock::AllowLeak(device.get());
1132 }
1133 
1134 /**
1135  * @tc.name: nncompilertest_createexecutor_002
1136  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1137  * @tc.type: FUNC
1138  */
1139 HWTEST_F(NNCompilerTest, nncompilertest_createexecutor_002, TestSize.Level0)
1140 {
1141     LOGE("CreateExecutor nncompilertest_createexecutor_002");
1142     size_t backendID = 1;
1143 
1144     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
1145     EXPECT_NE(nullptr, nncompiler);
1146 
1147     NNExecutor* ret = nncompiler->CreateExecutor();
1148     EXPECT_EQ(nullptr, ret);
1149 }
1150 
1151 /**
1152  * @tc.name: nncompilertest_createexecutor_003
1153  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1154  * @tc.type: FUNC
1155  */
1156 HWTEST_F(NNCompilerTest, nncompilertest_createexecutor_003, TestSize.Level0)
1157 {
1158     LOGE("CreateExecutor nncompilertest_createexecutor_003");
1159     size_t backendID = 1;
1160     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1161     std::shared_ptr<MockIPreparedModel> prepared = std::make_shared<MockIPreparedModel>();
1162     EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon80ed534a0a02(bool& isSupportedCache) 1163         .WillOnce(Invoke([](bool& isSupportedCache) {
1164                 // 这里直接修改传入的引用参数
1165                 isSupportedCache = true;
1166                 return OH_NN_SUCCESS; // 假设成功的状态码
1167             }));
1168 
1169     InnerModel innerModel;
1170     BuildModel(innerModel);
1171     void* model = &innerModel;
1172     EXPECT_CALL(*((MockIDevice *) device.get()),
1173         PrepareModel(testing::A<std::shared_ptr<const mindspore::lite::LiteGraph>>(), ::testing::_, ::testing::_))
1174         .WillOnce(Invoke([&prepared](std::shared_ptr<const mindspore::lite::LiteGraph> model,
1175                                           const ModelConfig& config,
__anon80ed534a0b02(std::shared_ptr<const mindspore::lite::LiteGraph> model, const ModelConfig& config, std::shared_ptr<PreparedModel>& preparedModel) 1176                                           std::shared_ptr<PreparedModel>& preparedModel) {
1177                 // 这里直接修改传入的引用参数
1178                 preparedModel = prepared;
1179                 return OH_NN_SUCCESS; // 假设成功的状态码
1180             }));
1181 
1182     NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
1183     EXPECT_NE(nullptr, nncompiler);
1184 
1185     OH_NN_ReturnCode retBuild = nncompiler->Build();
1186     EXPECT_EQ(OH_NN_SUCCESS, retBuild);
1187 
1188     NNExecutor* ret = nncompiler->CreateExecutor();
1189     EXPECT_NE(nullptr, ret);
1190 
1191     delete nncompiler;
1192     nncompiler = nullptr;
1193 
1194     testing::Mock::AllowLeak(device.get());
1195     testing::Mock::AllowLeak(prepared.get());
1196 }
1197 } // namespace UnitTest
1198 } // namespace NeuralNetworkRuntime
1199 } // namespace OHOS