• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <gtest/gtest.h>
17 #include <gmock/gmock.h>
18 
19 #include "nntensor.h"
20 #include "nnexecutor.h"
21 #include "nncompiler.h"
22 #include "nnbackend.h"
23 #include "backend_manager.h"
24 #include "device.h"
25 #include "prepared_model.h"
26 #include "neural_network_runtime/neural_network_runtime_type.h"
27 #include "utils.h"
28 #include "log.h"
29 #include "hdi_device_v1_0.h"
30 
31 using namespace testing;
32 using namespace testing::ext;
33 using namespace OHOS::NeuralNetworkRuntime;
34 
35 namespace OHOS {
36 namespace NeuralNetworkRuntime {
37 namespace V1_0 = OHOS::HDI::Nnrt::V1_0;
38 namespace UnitTest {
39 class NNTensor2Test : public testing::Test {
40 public:
41     NNTensor2Test() = default;
42     ~NNTensor2Test() = default;
43 };
44 
45 class MockIDevice : public Device {
46 public:
47     MOCK_METHOD1(GetDeviceName, OH_NN_ReturnCode(std::string&));
48     MOCK_METHOD1(GetVendorName, OH_NN_ReturnCode(std::string&));
49     MOCK_METHOD1(GetVersion, OH_NN_ReturnCode(std::string&));
50     MOCK_METHOD1(GetDeviceType, OH_NN_ReturnCode(OH_NN_DeviceType&));
51     MOCK_METHOD1(GetDeviceStatus, OH_NN_ReturnCode(DeviceStatus&));
52     MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
53         std::vector<bool>&));
54     MOCK_METHOD1(IsFloat16PrecisionSupported, OH_NN_ReturnCode(bool&));
55     MOCK_METHOD1(IsPerformanceModeSupported, OH_NN_ReturnCode(bool&));
56     MOCK_METHOD1(IsPrioritySupported, OH_NN_ReturnCode(bool&));
57     MOCK_METHOD1(IsDynamicInputSupported, OH_NN_ReturnCode(bool&));
58     MOCK_METHOD1(IsModelCacheSupported, OH_NN_ReturnCode(bool&));
59     MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
60                                           const ModelConfig&,
61                                           std::shared_ptr<PreparedModel>&));
62     MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(const void*,
63                                           const ModelConfig&,
64                                           std::shared_ptr<PreparedModel>&));
65     MOCK_METHOD4(PrepareModelFromModelCache, OH_NN_ReturnCode(const std::vector<Buffer>&,
66                                                         const ModelConfig&,
67                                                         std::shared_ptr<PreparedModel>&,
68                                                         bool&));
69     MOCK_METHOD3(PrepareOfflineModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
70                                                  const ModelConfig&,
71                                                  std::shared_ptr<PreparedModel>&));
72     MOCK_METHOD1(AllocateBuffer, void*(size_t));
73     MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<TensorDesc>));
74     MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<NNTensor>));
75     MOCK_METHOD1(ReleaseBuffer, OH_NN_ReturnCode(const void*));
76     MOCK_METHOD2(AllocateBuffer, OH_NN_ReturnCode(size_t, int&));
77     MOCK_METHOD2(ReleaseBuffer, OH_NN_ReturnCode(int, size_t));
78     MOCK_METHOD1(ReadOpVersion, OH_NN_ReturnCode(int&));
79 };
80 
81 class MockTensorDesc : public TensorDesc {
82 public:
83     MOCK_METHOD1(GetDataType, OH_NN_ReturnCode(OH_NN_DataType*));
84     MOCK_METHOD1(SetDataType, OH_NN_ReturnCode(OH_NN_DataType));
85     MOCK_METHOD1(GetFormat, OH_NN_ReturnCode(OH_NN_Format*));
86     MOCK_METHOD1(SetFormat, OH_NN_ReturnCode(OH_NN_Format));
87     MOCK_METHOD2(GetShape, OH_NN_ReturnCode(int32_t**, size_t*));
88     MOCK_METHOD2(SetShape, OH_NN_ReturnCode(const int32_t*, size_t));
89     MOCK_METHOD1(GetElementNum, OH_NN_ReturnCode(size_t*));
90     MOCK_METHOD1(GetByteSize, OH_NN_ReturnCode(size_t*));
91     MOCK_METHOD1(SetName, OH_NN_ReturnCode(const char*));
92     MOCK_METHOD1(GetName, OH_NN_ReturnCode(const char**));
93 };
94 
95 class MockBackend : public Backend {
96 public:
97     MOCK_CONST_METHOD0(GetBackendID, size_t());
98     MOCK_CONST_METHOD1(GetBackendName, OH_NN_ReturnCode(std::string&));
99     MOCK_CONST_METHOD1(GetBackendType, OH_NN_ReturnCode(OH_NN_DeviceType&));
100     MOCK_CONST_METHOD1(GetBackendStatus, OH_NN_ReturnCode(DeviceStatus&));
101     MOCK_METHOD1(CreateCompiler, Compiler*(Compilation*));
102     MOCK_METHOD1(DestroyCompiler, OH_NN_ReturnCode(Compiler*));
103     MOCK_METHOD1(CreateExecutor, Executor*(Compilation*));
104     MOCK_METHOD1(DestroyExecutor, OH_NN_ReturnCode(Executor*));
105     MOCK_METHOD1(CreateTensor, Tensor*(TensorDesc*));
106     MOCK_METHOD1(DestroyTensor, OH_NN_ReturnCode(Tensor*));
107 
GetDevice()108     std::shared_ptr<Device> GetDevice()
109     {
110         std::shared_ptr<Device> device = std::make_shared<MockIDevice>();
111     EXPECT_CALL(*((MockIDevice *) device.get()), AllocateBuffer(::testing::_, ::testing::_))
112         .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
113         return device;
114     }
115     MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
116                                            std::vector<bool>&));
117 };
118 
119 /**
120  * @tc.name: nntensor2_0test_construct_001
121  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
122  * @tc.type: FUNC
123  */
124 HWTEST_F(NNTensor2Test, nntensor2_0test_construct_001, TestSize.Level0)
125 {
126     LOGE("NNTensor2_0 nntensor2_0test_construct_001");
127     size_t backendId = 1;
128 
129     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
130     EXPECT_NE(nullptr, nnTensor);
131 
132     delete nnTensor;
133 }
134 
135 /**
136  * @tc.name: nntensor2_0test_construct_002
137  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
138  * @tc.type: FUNC
139  */
140 HWTEST_F(NNTensor2Test, nntensor2_0test_construct_002, TestSize.Level0)
141 {
142     LOGE("NNTensor2_0 nntensor2_0test_construct_002");
143     size_t backendId = 1;
144 
145     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
146     EXPECT_NE(nullptr, nnTensor);
147 
148     nnTensor->SetSize(1);
149     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
150     void* buffer = dataArry;
151     nnTensor->SetData(buffer);
152     nnTensor->SetFd(-1);
153     delete nnTensor;
154 }
155 
156 /**
157  * @tc.name: nntensor2_0test_construct_003
158  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
159  * @tc.type: FUNC
160  */
161 HWTEST_F(NNTensor2Test, nntensor2_0test_construct_003, TestSize.Level0)
162 {
163     LOGE("NNTensor2_0 nntensor2_0test_construct_003");
164     size_t backendId = 1;
165 
166     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
167     EXPECT_NE(nullptr, nnTensor);
168 
169     nnTensor->SetSize(1);
170     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
171     void* buffer = dataArry;
172     nnTensor->SetData(buffer);
173     nnTensor->SetFd(0);
174     delete nnTensor;
175 }
176 
177 /**
178  * @tc.name: nntensor2_0test_settensordesc_001
179  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
180  * @tc.type: FUNC
181  */
182 HWTEST_F(NNTensor2Test, nntensor2_0test_settensordesc_001, TestSize.Level0)
183 {
184     LOGE("SetTensorDesc nntensor2_0test_settensordesc_001");
185     size_t backendId = 1;
186 
187     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
188     EXPECT_NE(nullptr, nnTensor);
189 
190     TensorDesc desc;
191     TensorDesc* tensorDesc = &desc;
192     OH_NN_ReturnCode setTensorDescRet = nnTensor->SetTensorDesc(tensorDesc);
193     EXPECT_EQ(OH_NN_SUCCESS, setTensorDescRet);
194 
195     OH_NN_ReturnCode ret = nnTensor->SetTensorDesc(tensorDesc);
196     EXPECT_EQ(OH_NN_SUCCESS, ret);
197 }
198 
199 /**
200  * @tc.name: nntensor2_0test_createdata_001
201  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
202  * @tc.type: FUNC
203  */
204 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_001, TestSize.Level0)
205 {
206     LOGE("CreateData nntensor2_0test_createdata_001");
207     size_t backendId = 1;
208 
209     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
210     EXPECT_NE(nullptr, nnTensor);
211 
212     OH_NN_ReturnCode ret = nnTensor->CreateData();
213     EXPECT_EQ(OH_NN_NULL_PTR, ret);
214 }
215 
216 /**
217  * @tc.name: nntensor2_0test_createdata_002
218  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
219  * @tc.type: FUNC
220  */
221 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_002, TestSize.Level0)
222 {
223     LOGE("CreateData nntensor2_0test_createdata_002");
224     size_t backendId = 1;
225 
226     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
227     EXPECT_NE(nullptr, nnTensor);
228 
229     std::shared_ptr<MockTensorDesc> tensorDesc = std::make_shared<MockTensorDesc>();
230     EXPECT_CALL(*((MockTensorDesc *) tensorDesc.get()), GetByteSize(::testing::_))
231         .WillRepeatedly(::testing::Return(OH_NN_INVALID_PARAMETER));
232 
233     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc.get());
234     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
235 
236     OH_NN_ReturnCode retCreateData = nnTensor->CreateData();
237     EXPECT_EQ(OH_NN_INVALID_PARAMETER, retCreateData);
238 
239     testing::Mock::AllowLeak(tensorDesc.get());
240 }
241 
Creator()242 std::shared_ptr<Backend> Creator()
243 {
244     size_t backendID = 1;
245     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
246 
247     EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceStatus(::testing::_))
248         .WillOnce(Invoke([](DeviceStatus& status) {
249                 // 这里直接修改传入的引用参数
250                 status = AVAILABLE;
251                 return OH_NN_SUCCESS; // 假设成功的状态码
252             }));
253 
254     std::string backendName = "mock";
255     EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_))
256         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
257 
258     EXPECT_CALL(*((MockIDevice *) device.get()), GetVendorName(::testing::_))
259         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
260 
261     EXPECT_CALL(*((MockIDevice *) device.get()), GetVersion(::testing::_))
262         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
263 
264     EXPECT_CALL(*((MockIDevice *) device.get()), AllocateBuffer(::testing::_, ::testing::_))
265         .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
266 
267     std::shared_ptr<Backend> backend = std::make_unique<NNBackend>(device, backendID);
268     return backend;
269 }
270 
271 /**
272  * @tc.name: nntensor2_0test_createdata_003
273  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
274  * @tc.type: FUNC
275  */
276 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_003, TestSize.Level0)
277 {
278     LOGE("CreateData nntensor2_0test_createdata_003");
279     size_t backendId = 1;
280 
281     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
282     EXPECT_NE(nullptr, nnTensor);
283 
284     BackendManager& backendManager = BackendManager::GetInstance();
285 
286     std::string backendName = "mock";
287     std::function<std::shared_ptr<Backend>()> creator = Creator;
288 
289     backendManager.RegisterBackend(backendName, creator);
290 
291     TensorDesc desc;
292     desc.SetDataType(OH_NN_INT64);
293     size_t shapeNum = 1;
294     int32_t index = 10;
295     int32_t* shape = &index;
296     desc.SetShape(shape, shapeNum);
297     TensorDesc* tensorDesc = &desc;
298 
299     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
300     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
301 
302     OH_NN_ReturnCode ret = nnTensor->CreateData();
303     EXPECT_EQ(OH_NN_MEMORY_ERROR, ret);
304 }
305 
306 /**
307  * @tc.name: nntensor2_0test_createdata_004
308  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
309  * @tc.type: FUNC
310  */
311 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_004, TestSize.Level0)
312 {
313     LOGE("CreateData nntensor2_0test_createdata_004");
314     size_t backendId = 1;
315 
316     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
317     EXPECT_NE(nullptr, nnTensor);
318 
319     std::shared_ptr<MockTensorDesc> tensorDesc = std::make_shared<MockTensorDesc>();
320     EXPECT_CALL(*((MockTensorDesc *) tensorDesc.get()), GetByteSize(::testing::_))
__anonb585534a0202(size_t* byteSize) 321         .WillRepeatedly(Invoke([](size_t* byteSize) {
322                 // 这里直接修改传入的引用参数
323                 *byteSize = ALLOCATE_BUFFER_LIMIT + 1;
324                 return OH_NN_SUCCESS; // 假设成功的状态码
325             }));
326 
327     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc.get());
328     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
329 
330     OH_NN_ReturnCode ret = nnTensor->CreateData();
331     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
332 
333     testing::Mock::AllowLeak(tensorDesc.get());
334 }
335 
336 /**
337  * @tc.name: nntensor2_0test_createdata_005
338  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
339  * @tc.type: FUNC
340  */
341 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_005, TestSize.Level0)
342 {
343     LOGE("CreateData nntensor2_0test_createdata_005");
344     size_t backendId = 1;
345 
346     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
347     EXPECT_NE(nullptr, nnTensor);
348 
349     std::shared_ptr<MockTensorDesc> tensorDesc = std::make_shared<MockTensorDesc>();
350     EXPECT_CALL(*((MockTensorDesc *) tensorDesc.get()), GetByteSize(::testing::_))
__anonb585534a0302(size_t* byteSize) 351         .WillRepeatedly(Invoke([](size_t* byteSize) {
352                 // 这里直接修改传入的引用参数
353                 *byteSize = 1;
354                 return OH_NN_SUCCESS; // 假设成功的状态码
355             }));
356 
357     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc.get());
358     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
359 
360     OH_NN_ReturnCode ret = nnTensor->CreateData();
361     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
362 
363     testing::Mock::AllowLeak(tensorDesc.get());
364 }
365 
Creator2()366 std::shared_ptr<Backend> Creator2()
367 {
368     size_t backendID = 2;
369 
370     std::shared_ptr<Backend> backend = std::make_unique<NNBackend>(nullptr, backendID);
371     return backend;
372 }
373 
374 /**
375  * @tc.name: nntensor2_0test_createdata_006
376  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
377  * @tc.type: FUNC
378  */
379 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_006, TestSize.Level0)
380 {
381     LOGE("CreateData nntensor2_0test_createdata_006");
382     size_t backendId = 2;
383 
384     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
385     EXPECT_NE(nullptr, nnTensor);
386 
387     BackendManager& backendManager = BackendManager::GetInstance();
388 
389     std::string backendName = "mock";
390     std::function<std::shared_ptr<Backend>()> creator = Creator2;
391 
392     backendManager.RegisterBackend(backendName, creator);
393 
394     TensorDesc desc;
395     desc.SetDataType(OH_NN_INT64);
396     size_t shapeNum = 1;
397     int32_t index = 10;
398     int32_t* shape = &index;
399     desc.SetShape(shape, shapeNum);
400     TensorDesc* tensorDesc = &desc;
401 
402     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
403     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
404 
405     OH_NN_ReturnCode ret = nnTensor->CreateData();
406     EXPECT_EQ(OH_NN_NULL_PTR, ret);
407 }
408 
Creator3()409 std::shared_ptr<Backend> Creator3()
410 {
411     size_t backendID = 3;
412     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
413 
414     EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceStatus(::testing::_))
415         .WillRepeatedly(Invoke([](DeviceStatus& status) {
416                 // 这里直接修改传入的引用参数
417                 status = AVAILABLE;
418                 return OH_NN_SUCCESS; // 假设成功的状态码
419             }));
420 
421     std::string backendName = "mock";
422     EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_))
423         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
424 
425     EXPECT_CALL(*((MockIDevice *) device.get()), GetVendorName(::testing::_))
426         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
427 
428     EXPECT_CALL(*((MockIDevice *) device.get()), GetVersion(::testing::_))
429         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
430 
431     EXPECT_CALL(*((MockIDevice *) device.get()), AllocateBuffer(::testing::_, ::testing::_))
432         .WillRepeatedly(::testing::Return(OH_NN_MEMORY_ERROR));
433 
434     std::shared_ptr<Backend> backend = std::make_unique<NNBackend>(device, backendID);
435 
436     return backend;
437 }
438 
439 /**
440  * @tc.name: nntensor2_0test_createdata_007
441  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
442  * @tc.type: FUNC
443  */
444 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_007, TestSize.Level0)
445 {
446     LOGE("CreateData nntensor2_0test_createdata_007");
447     size_t backendId = 3;
448 
449     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
450     EXPECT_NE(nullptr, nnTensor);
451 
452     BackendManager& backendManager = BackendManager::GetInstance();
453 
454     std::string backendName = "mock";
455     std::function<std::shared_ptr<Backend>()> creator = Creator3;
456 
457     backendManager.RegisterBackend(backendName, creator);
458 
459     TensorDesc desc;
460     desc.SetDataType(OH_NN_INT64);
461     size_t shapeNum = 1;
462     int32_t index = 10;
463     int32_t* shape = &index;
464     desc.SetShape(shape, shapeNum);
465     TensorDesc* tensorDesc = &desc;
466 
467     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
468     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
469 
470     OH_NN_ReturnCode ret = nnTensor->CreateData();
471     EXPECT_EQ(OH_NN_MEMORY_ERROR, ret);
472 }
473 
Creator4()474 std::shared_ptr<Backend> Creator4()
475 {
476     size_t backendID = 4;
477     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
478 
479     EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceStatus(::testing::_))
480         .WillRepeatedly(Invoke([](DeviceStatus& status) {
481                 // 这里直接修改传入的引用参数
482                 status = AVAILABLE;
483                 return OH_NN_SUCCESS; // 假设成功的状态码
484             }));
485 
486     std::string backendName = "mock";
487     EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_))
488         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
489 
490     EXPECT_CALL(*((MockIDevice *) device.get()), GetVendorName(::testing::_))
491         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
492 
493     EXPECT_CALL(*((MockIDevice *) device.get()), GetVersion(::testing::_))
494         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
495 
496     EXPECT_CALL(*((MockIDevice *) device.get()), AllocateBuffer(::testing::_, ::testing::_))
497         .WillRepeatedly(Invoke([](size_t length, int& fd) {
498                 // 这里直接修改传入的引用参数
499                 fd = -1;
500                 return OH_NN_SUCCESS; // 假设成功的状态码
501             }));
502 
503     std::shared_ptr<Backend> backend = std::make_unique<NNBackend>(device, backendID);
504 
505     return backend;
506 }
507 
508 /**
509  * @tc.name: nntensor2_0test_createdata_008
510  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
511  * @tc.type: FUNC
512  */
513 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_008, TestSize.Level0)
514 {
515     LOGE("CreateData nntensor2_0test_createdata_008");
516     size_t backendId = 4;
517 
518     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
519     EXPECT_NE(nullptr, nnTensor);
520 
521     BackendManager& backendManager = BackendManager::GetInstance();
522 
523     std::string backendName = "mock";
524     std::function<std::shared_ptr<Backend>()> creator = Creator4;
525 
526     backendManager.RegisterBackend(backendName, creator);
527 
528     TensorDesc desc;
529     desc.SetDataType(OH_NN_INT64);
530     size_t shapeNum = 1;
531     int32_t index = 10;
532     int32_t* shape = &index;
533     desc.SetShape(shape, shapeNum);
534     TensorDesc* tensorDesc = &desc;
535 
536     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
537     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
538 
539     OH_NN_ReturnCode ret = nnTensor->CreateData();
540     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
541 }
542 
543 /**
544  * @tc.name: nntensor2_0test_createdata_009
545  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
546  * @tc.type: FUNC
547  */
548 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_009, TestSize.Level0)
549 {
550     LOGE("CreateData nntensor2_0test_createdata_009");
551     size_t backendId = 4;
552 
553     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
554     EXPECT_NE(nullptr, nnTensor);
555 
556     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
557     void* buffer = dataArry;
558     nnTensor->SetData(buffer);
559 
560     OH_NN_ReturnCode ret = nnTensor->CreateData();
561     EXPECT_EQ(OH_NN_FAILED, ret);
562 }
563 
564 /**
565  * @tc.name: nntensor2_0test_createdata_020
566  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
567  * @tc.type: FUNC
568  */
569 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_020, TestSize.Level0)
570 {
571     LOGE("CreateData nntensor2_0test_createdata_020");
572     size_t backendId = 1;
573 
574     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
575     EXPECT_NE(nullptr, nnTensor);
576 
577     size_t size = 1;
578     OH_NN_ReturnCode ret = nnTensor->CreateData(size);
579     EXPECT_EQ(OH_NN_NULL_PTR, ret);
580 }
581 
582 /**
583  * @tc.name: nntensor2_0test_createdata_021
584  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
585  * @tc.type: FUNC
586  */
587 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_021, TestSize.Level0)
588 {
589     LOGE("CreateData nntensor2_0test_createdata_021");
590     size_t backendId = 1;
591 
592     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
593     EXPECT_NE(nullptr, nnTensor);
594 
595     TensorDesc desc;
596     desc.SetDataType(OH_NN_INT64);
597     size_t shapeNum = 1;
598     int32_t index = 10;
599     int32_t* shape = &index;
600     desc.SetShape(shape, shapeNum);
601     TensorDesc* tensorDesc = &desc;
602 
603     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
604     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
605 
606     size_t size = ALLOCATE_BUFFER_LIMIT + 1;
607     OH_NN_ReturnCode ret = nnTensor->CreateData(size);
608     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
609 }
610 
611 /**
612  * @tc.name: nntensor2_0test_createdata_022
613  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
614  * @tc.type: FUNC
615  */
616 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_022, TestSize.Level0)
617 {
618     LOGE("CreateData nntensor2_0test_createdata_022");
619     size_t backendId = 1;
620 
621     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
622     EXPECT_NE(nullptr, nnTensor);
623 
624     TensorDesc desc;
625     desc.SetDataType(OH_NN_INT64);
626     size_t shapeNum = 1;
627     int32_t index = 10;
628     int32_t* shape = &index;
629     desc.SetShape(shape, shapeNum);
630     TensorDesc* tensorDesc = &desc;
631 
632     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
633     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
634 
635     size_t size = 1;
636     OH_NN_ReturnCode ret = nnTensor->CreateData(size);
637     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
638 }
639 
640 /**
641  * @tc.name: nntensor2_0test_createdata_023
642  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
643  * @tc.type: FUNC
644  */
645 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_023, TestSize.Level0)
646 {
647     LOGE("CreateData nntensor2_0test_createdata_023");
648     size_t backendId = 1;
649 
650     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
651     EXPECT_NE(nullptr, nnTensor);
652 
653     std::shared_ptr<MockTensorDesc> tensorDesc = std::make_shared<MockTensorDesc>();
654     EXPECT_CALL(*((MockTensorDesc *) tensorDesc.get()), GetByteSize(::testing::_))
655         .WillRepeatedly(::testing::Return(OH_NN_INVALID_PARAMETER));
656 
657     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc.get());
658     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
659 
660     size_t size = 1;
661     OH_NN_ReturnCode ret = nnTensor->CreateData(size);
662     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
663 
664     testing::Mock::AllowLeak(tensorDesc.get());
665 }
666 
667 /**
668  * @tc.name: nntensor2_0test_createdata_024
669  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
670  * @tc.type: FUNC
671  */
672 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_024, TestSize.Level0)
673 {
674     LOGE("CreateData nntensor2_0test_createdata_024");
675     size_t backendId = 1;
676 
677     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
678     EXPECT_NE(nullptr, nnTensor);
679 
680     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
681     void* buffer = dataArry;
682     nnTensor->SetData(buffer);
683 
684     size_t size = 1;
685     OH_NN_ReturnCode ret = nnTensor->CreateData(size);
686     EXPECT_EQ(OH_NN_FAILED, ret);
687 }
688 
689 /**
690  * @tc.name: nntensor2_0test_createdata_029
691  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
692  * @tc.type: FUNC
693  */
694 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_029, TestSize.Level0)
695 {
696     LOGE("CreateData nntensor2_0test_createdata_029");
697     size_t backendId = 1;
698 
699     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
700     EXPECT_NE(nullptr, nnTensor);
701 
702     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
703     void* buffer = dataArry;
704     nnTensor->SetData(buffer);
705 
706     int fd = 1;
707     size_t size = 2;
708     size_t offset = 3;
709     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
710     EXPECT_EQ(OH_NN_FAILED, ret);
711 }
712 
713 /**
714  * @tc.name: nntensor2_0test_createdata_030
715  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
716  * @tc.type: FUNC
717  */
718 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_030, TestSize.Level0)
719 {
720     LOGE("CreateData nntensor2_0test_createdata_030");
721     size_t backendId = 1;
722 
723     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
724     EXPECT_NE(nullptr, nnTensor);
725 
726     int fd = 1;
727     size_t size = 2;
728     size_t offset = 3;
729     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
730     EXPECT_EQ(OH_NN_NULL_PTR, ret);
731 }
732 
733 /**
734  * @tc.name: nntensor2_0test_createdata_031
735  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
736  * @tc.type: FUNC
737  */
738 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_031, TestSize.Level0)
739 {
740     LOGE("CreateData nntensor2_0test_createdata_031");
741     size_t backendId = 1;
742 
743     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
744     EXPECT_NE(nullptr, nnTensor);
745 
746     std::shared_ptr<MockTensorDesc> tensorDesc = std::make_shared<MockTensorDesc>();
747     EXPECT_CALL(*((MockTensorDesc *) tensorDesc.get()), GetByteSize(::testing::_))
748         .WillRepeatedly(::testing::Return(OH_NN_INVALID_PARAMETER));
749 
750     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc.get());
751     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
752 
753     int fd = 1;
754     size_t size = 2;
755     size_t offset = 3;
756     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
757     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
758 
759     testing::Mock::AllowLeak(tensorDesc.get());
760 }
761 
762 /**
763  * @tc.name: nntensor2_0test_createdata_032
764  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
765  * @tc.type: FUNC
766  */
767 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_032, TestSize.Level0)
768 {
769     LOGE("CreateData nntensor2_0test_createdata_032");
770     size_t backendId = 1;
771 
772     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
773     EXPECT_NE(nullptr, nnTensor);
774 
775     TensorDesc desc;
776     desc.SetDataType(OH_NN_INT64);
777     size_t shapeNum = 1;
778     int32_t index = 10;
779     int32_t* shape = &index;
780     desc.SetShape(shape, shapeNum);
781     TensorDesc* tensorDesc = &desc;
782 
783     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
784     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
785 
786     int fd = -1;
787     size_t size = 2;
788     size_t offset = 3;
789     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
790     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
791 }
792 
793 /**
794  * @tc.name: nntensor2_0test_createdata_033
795  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
796  * @tc.type: FUNC
797  */
798 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_033, TestSize.Level0)
799 {
800     LOGE("CreateData nntensor2_0test_createdata_033");
801     size_t backendId = 1;
802 
803     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
804     EXPECT_NE(nullptr, nnTensor);
805 
806     TensorDesc desc;
807     desc.SetDataType(OH_NN_INT64);
808     size_t shapeNum = 1;
809     int32_t index = 10;
810     int32_t* shape = &index;
811     desc.SetShape(shape, shapeNum);
812     TensorDesc* tensorDesc = &desc;
813 
814     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
815     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
816 
817     int fd = 0;
818     size_t size = 0;
819     size_t offset = 3;
820     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
821     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
822 }
823 
824 /**
825  * @tc.name: nntensor2_0test_createdata_034
826  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
827  * @tc.type: FUNC
828  */
829 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_034, TestSize.Level0)
830 {
831     LOGE("CreateData nntensor2_0test_createdata_034");
832     size_t backendId = 1;
833 
834     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
835     EXPECT_NE(nullptr, nnTensor);
836 
837     TensorDesc desc;
838     desc.SetDataType(OH_NN_INT64);
839     size_t shapeNum = 1;
840     int32_t index = 10;
841     int32_t* shape = &index;
842     desc.SetShape(shape, shapeNum);
843     TensorDesc* tensorDesc = &desc;
844 
845     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
846     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
847 
848     int fd = 0;
849     size_t size = 1;
850     size_t offset = 3;
851     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
852     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
853 }
854 
855 /**
856  * @tc.name: nntensor2_0test_createdata_035
857  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
858  * @tc.type: FUNC
859  */
860 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_035, TestSize.Level0)
861 {
862     LOGE("CreateData nntensor2_0test_createdata_035");
863     size_t backendId = 1;
864 
865     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
866     EXPECT_NE(nullptr, nnTensor);
867 
868     TensorDesc desc;
869     desc.SetDataType(OH_NN_INT64);
870     size_t shapeNum = 1;
871     int32_t index = 10;
872     int32_t* shape = &index;
873     desc.SetShape(shape, shapeNum);
874     TensorDesc* tensorDesc = &desc;
875 
876     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
877     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
878 
879     int fd = 0;
880     size_t size = 3;
881     size_t offset = 2;
882     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
883     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
884 }
885 
886 /**
887  * @tc.name: nntensor2_0test_createdata_036
888  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
889  * @tc.type: FUNC
890  */
891 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_036, TestSize.Level0)
892 {
893     LOGE("CreateData nntensor2_0test_createdata_036");
894     size_t backendId = 1;
895 
896     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
897     EXPECT_NE(nullptr, nnTensor);
898 
899     TensorDesc desc;
900     desc.SetDataType(OH_NN_INT64);
901     size_t shapeNum = 1;
902     int32_t index = 10;
903     int32_t* shape = &index;
904     desc.SetShape(shape, shapeNum);
905     TensorDesc* tensorDesc = &desc;
906 
907     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
908     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
909 
910     int fd = 0;
911     size_t size = 200;
912     size_t offset = 1;
913     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
914     EXPECT_EQ(OH_NN_MEMORY_ERROR, ret);
915 }
916 
917 
918 /**
919  * @tc.name: nntensor2_0test_gettensordesc_001
920  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
921  * @tc.type: FUNC
922  */
923 HWTEST_F(NNTensor2Test, nntensor2_0test_gettensordesc_001, TestSize.Level0)
924 {
925     LOGE("GetTensorDesc nntensor2_0test_gettensordesc_001");
926     size_t backendId = 1;
927 
928     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
929     EXPECT_NE(nullptr, nnTensor);
930 
931     TensorDesc* ret = nnTensor->GetTensorDesc();
932     EXPECT_EQ(nullptr, ret);
933 }
934 
935 /**
936  * @tc.name: nntensor2_0test_getdata_001
937  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
938  * @tc.type: FUNC
939  */
940 HWTEST_F(NNTensor2Test, nntensor2_0test_getdata_001, TestSize.Level0)
941 {
942     LOGE("GetData nntensor2_0test_getdata_001");
943     size_t backendId = 1;
944 
945     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
946     EXPECT_NE(nullptr, nnTensor);
947 
948     void* ret = nnTensor->GetData();
949     EXPECT_EQ(nullptr, ret);
950 }
951 
952 /**
953  * @tc.name: nntensor2_0test_getfd_001
954  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
955  * @tc.type: FUNC
956  */
957 HWTEST_F(NNTensor2Test, nntensor2_0test_getfd_001, TestSize.Level0)
958 {
959     LOGE("GetFd nntensor2_0test_getfd_001");
960     size_t backendId = 1;
961 
962     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
963     EXPECT_NE(nullptr, nnTensor);
964 
965     int ret = nnTensor->GetFd();
966     EXPECT_EQ(0, ret);
967 }
968 
969 /**
970  * @tc.name: nntensor2_0test_getsize_001
971  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
972  * @tc.type: FUNC
973  */
974 HWTEST_F(NNTensor2Test, nntensor2_0test_getsize_001, TestSize.Level0)
975 {
976     LOGE("GetSize nntensor2_0test_getsize_001");
977     size_t backendId = 1;
978 
979     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
980     EXPECT_NE(nullptr, nnTensor);
981 
982     size_t ret = nnTensor->GetSize();
983     EXPECT_EQ(0, ret);
984 }
985 
986 /**
987  * @tc.name: nntensor2_0test_getoffset_001
988  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
989  * @tc.type: FUNC
990  */
991 HWTEST_F(NNTensor2Test, nntensor2_0test_getoffset_001, TestSize.Level0)
992 {
993     LOGE("GetOffset nntensor2_0test_getoffset_001");
994     size_t backendId = 1;
995 
996     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
997     EXPECT_NE(nullptr, nnTensor);
998 
999     size_t ret = nnTensor->GetOffset();
1000     EXPECT_EQ(0, ret);
1001 }
1002 
1003 /**
1004  * @tc.name: nntensor2_0test_getbackendid_001
1005  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1006  * @tc.type: FUNC
1007  */
1008 HWTEST_F(NNTensor2Test, nntensor2_0test_getbackendid_001, TestSize.Level0)
1009 {
1010     LOGE("GetBackendID nntensor2_0test_getbackendid_001");
1011     size_t backendId = 1;
1012 
1013     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1014     EXPECT_NE(nullptr, nnTensor);
1015 
1016     size_t ret = nnTensor->GetBackendID();
1017     EXPECT_EQ(1, ret);
1018 }
1019 
1020 /**
1021  * @tc.name: nntensor2_0test_checktensordata_001
1022  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1023  * @tc.type: FUNC
1024  */
1025 HWTEST_F(NNTensor2Test, nntensor2_0test_checktensordata_001, TestSize.Level0)
1026 {
1027     LOGE("CheckTensorData nntensor2_0test_checktensordata_001");
1028     size_t backendId = 1;
1029 
1030     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1031     EXPECT_NE(nullptr, nnTensor);
1032 
1033     bool ret = nnTensor->CheckTensorData();
1034     EXPECT_EQ(false, ret);
1035 }
1036 
1037 /**
1038  * @tc.name: nntensor2_0test_checktensordata_002
1039  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1040  * @tc.type: FUNC
1041  */
1042 HWTEST_F(NNTensor2Test, nntensor2_0test_checktensordata_002, TestSize.Level0)
1043 {
1044     LOGE("CheckTensorData nntensor2_0test_checktensordata_002");
1045     size_t backendId = 1;
1046 
1047     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1048     EXPECT_NE(nullptr, nnTensor);
1049 
1050     std::shared_ptr<MockTensorDesc> tensorDesc = std::make_shared<MockTensorDesc>();
1051     EXPECT_CALL(*((MockTensorDesc *) tensorDesc.get()), GetByteSize(::testing::_))
1052         .WillRepeatedly(::testing::Return(OH_NN_INVALID_PARAMETER));
1053 
1054     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc.get());
1055     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1056 
1057     bool ret = nnTensor->CheckTensorData();
1058     EXPECT_EQ(false, ret);
1059 
1060     testing::Mock::AllowLeak(tensorDesc.get());
1061 }
1062 
1063 /**
1064  * @tc.name: nntensor2_0test_checktensordata_003
1065  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1066  * @tc.type: FUNC
1067  */
1068 HWTEST_F(NNTensor2Test, nntensor2_0test_checktensordata_003, TestSize.Level0)
1069 {
1070     LOGE("CheckTensorData nntensor2_0test_checktensordata_003");
1071     size_t backendId = 1;
1072 
1073     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1074     EXPECT_NE(nullptr, nnTensor);
1075 
1076     TensorDesc desc;
1077     desc.SetDataType(OH_NN_INT64);
1078     size_t shapeNum = 1;
1079     int32_t index = 10;
1080     int32_t* shape = &index;
1081     desc.SetShape(shape, shapeNum);
1082     TensorDesc* tensorDesc = &desc;
1083 
1084     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1085     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1086 
1087     bool ret = nnTensor->CheckTensorData();
1088     EXPECT_EQ(false, ret);
1089 }
1090 
1091 /**
1092  * @tc.name: nntensor2_0test_checktensordata_004
1093  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1094  * @tc.type: FUNC
1095  */
1096 HWTEST_F(NNTensor2Test, nntensor2_0test_checktensordata_004, TestSize.Level0)
1097 {
1098     LOGE("CheckTensorData nntensor2_0test_checktensordata_004");
1099     size_t backendId = 1;
1100 
1101     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1102     EXPECT_NE(nullptr, nnTensor);
1103 
1104     TensorDesc desc;
1105     desc.SetDataType(OH_NN_INT64);
1106     size_t shapeNum = 1;
1107     int32_t index = 10;
1108     int32_t* shape = &index;
1109     desc.SetShape(shape, shapeNum);
1110     TensorDesc* tensorDesc = &desc;
1111 
1112     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1113     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1114 
1115     nnTensor->SetSize(200);
1116     nnTensor->SetOffset(0);
1117 
1118     bool ret = nnTensor->CheckTensorData();
1119     EXPECT_EQ(false, ret);
1120 }
1121 
1122 /**
1123  * @tc.name: nntensor2_0test_checktensordata_005
1124  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1125  * @tc.type: FUNC
1126  */
1127 HWTEST_F(NNTensor2Test, nntensor2_0test_checktensordata_005, TestSize.Level0)
1128 {
1129     LOGE("CheckTensorData nntensor2_0test_checktensordata_005");
1130     size_t backendId = 1;
1131 
1132     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1133     EXPECT_NE(nullptr, nnTensor);
1134 
1135     TensorDesc desc;
1136     desc.SetDataType(OH_NN_INT64);
1137     size_t shapeNum = 1;
1138     int32_t index = 10;
1139     int32_t* shape = &index;
1140     desc.SetShape(shape, shapeNum);
1141     TensorDesc* tensorDesc = &desc;
1142 
1143     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1144     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1145 
1146     nnTensor->SetSize(200);
1147     nnTensor->SetOffset(0);
1148     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1149     void* buffer = dataArry;
1150     nnTensor->SetData(buffer);
1151     nnTensor->SetFd(-1);
1152 
1153     bool ret = nnTensor->CheckTensorData();
1154     EXPECT_EQ(false, ret);
1155 }
1156 
1157 /**
1158  * @tc.name: nntensor2_0test_checktensordata_006
1159  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1160  * @tc.type: FUNC
1161  */
1162 HWTEST_F(NNTensor2Test, nntensor2_0test_checktensordata_006, TestSize.Level0)
1163 {
1164     LOGE("CheckTensorData nntensor2_0test_checktensordata_006");
1165     size_t backendId = 1;
1166 
1167     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1168     EXPECT_NE(nullptr, nnTensor);
1169 
1170     TensorDesc desc;
1171     desc.SetDataType(OH_NN_INT64);
1172     size_t shapeNum = 1;
1173     int32_t index = 10;
1174     int32_t* shape = &index;
1175     desc.SetShape(shape, shapeNum);
1176     TensorDesc* tensorDesc = &desc;
1177 
1178     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1179     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1180 
1181     nnTensor->SetSize(200);
1182     nnTensor->SetOffset(0);
1183     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1184     void* buffer = dataArry;
1185     nnTensor->SetData(buffer);
1186 
1187     bool ret = nnTensor->CheckTensorData();
1188     EXPECT_EQ(true, ret);
1189 }
1190 
1191 /**
1192  * @tc.name: nntensor2_0test_checkdimranges_001
1193  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1194  * @tc.type: FUNC
1195  */
1196 HWTEST_F(NNTensor2Test, nntensor2_0test_checkdimranges_001, TestSize.Level0)
1197 {
1198     LOGE("CheckDimRanges nntensor2_0test_checkdimranges_001");
1199     size_t backendId = 1;
1200 
1201     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1202     EXPECT_NE(nullptr, nnTensor);
1203 
1204     std::vector<uint32_t> minDimRanges;
1205     const std::vector<uint32_t> maxDimRanges;
1206     OH_NN_ReturnCode ret = nnTensor->CheckDimRanges(minDimRanges, maxDimRanges);
1207     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1208 }
1209 
1210 /**
1211  * @tc.name: nntensor2_0test_checkdimranges_002
1212  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1213  * @tc.type: FUNC
1214  */
1215 HWTEST_F(NNTensor2Test, nntensor2_0test_checkdimranges_002, TestSize.Level0)
1216 {
1217     LOGE("CheckDimRanges nntensor2_0test_checkdimranges_002");
1218     size_t backendId = 1;
1219 
1220     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1221     EXPECT_NE(nullptr, nnTensor);
1222 
1223     TensorDesc desc;
1224     desc.SetDataType(OH_NN_INT64);
1225     TensorDesc* tensorDesc = &desc;
1226 
1227     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1228     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1229 
1230     std::vector<uint32_t> minDimRanges;
1231     const std::vector<uint32_t> maxDimRanges;
1232     OH_NN_ReturnCode ret = nnTensor->CheckDimRanges(minDimRanges, maxDimRanges);
1233     EXPECT_EQ(OH_NN_SUCCESS, ret);
1234 }
1235 
1236 /**
1237  * @tc.name: nntensor2_0test_checkdimranges_003
1238  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1239  * @tc.type: FUNC
1240  */
1241 HWTEST_F(NNTensor2Test, nntensor2_0test_checkdimranges_003, TestSize.Level0)
1242 {
1243     LOGE("CheckDimRanges nntensor2_0test_checkdimranges_003");
1244     size_t backendId = 1;
1245 
1246     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1247     EXPECT_NE(nullptr, nnTensor);
1248 
1249     TensorDesc desc;
1250     desc.SetDataType(OH_NN_INT64);
1251     size_t shapeNum = 1;
1252     int32_t index = -10;
1253     int32_t* shape = &index;
1254     desc.SetShape(shape, shapeNum);
1255     TensorDesc* tensorDesc = &desc;
1256 
1257     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1258     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1259 
1260     std::vector<uint32_t> minDimRanges;
1261     const std::vector<uint32_t> maxDimRanges;
1262     OH_NN_ReturnCode ret = nnTensor->CheckDimRanges(minDimRanges, maxDimRanges);
1263     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1264 }
1265 
1266 /**
1267  * @tc.name: nntensor2_0test_checkdimranges_004
1268  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1269  * @tc.type: FUNC
1270  */
1271 HWTEST_F(NNTensor2Test, nntensor2_0test_checkdimranges_004, TestSize.Level0)
1272 {
1273     LOGE("CheckDimRanges nntensor2_0test_checkdimranges_004");
1274     size_t backendId = 1;
1275 
1276     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1277     EXPECT_NE(nullptr, nnTensor);
1278 
1279     TensorDesc desc;
1280     desc.SetDataType(OH_NN_INT64);
1281     size_t shapeNum = 1;
1282     int32_t index = 10;
1283     int32_t* shape = &index;
1284     desc.SetShape(shape, shapeNum);
1285     TensorDesc* tensorDesc = &desc;
1286 
1287     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1288     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1289 
1290     std::vector<uint32_t> minDimRanges;
1291     minDimRanges.emplace_back(20);
1292     std::vector<uint32_t> maxDimRanges;
1293     maxDimRanges.emplace_back(20);
1294     OH_NN_ReturnCode ret = nnTensor->CheckDimRanges(minDimRanges, maxDimRanges);
1295     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1296 }
1297 } // namespace UnitTest
1298 } // namespace NeuralNetworkRuntime
1299 } // namespace OHOS