1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <gtest/gtest.h>
17 #include <gmock/gmock.h>
18 
19 #include "nntensor.h"
20 #include "nnexecutor.h"
21 #include "nncompiler.h"
22 #include "nnbackend.h"
23 #include "backend_manager.h"
24 #include "device.h"
25 #include "prepared_model.h"
26 #include "interfaces/kits/c/neural_network_runtime/neural_network_runtime_type.h"
27 #include "common/utils.h"
28 #include "common/log.h"
29 #include "hdi_device_v1_0.h"
30 
31 using namespace testing;
32 using namespace testing::ext;
33 using namespace OHOS::NeuralNetworkRuntime;
34 
35 namespace OHOS {
36 namespace NeuralNetworkRuntime {
37 namespace V1_0 = OHOS::HDI::Nnrt::V1_0;
38 namespace UnitTest {
39 class NNTensor2Test : public testing::Test {
40 public:
41     NNTensor2Test() = default;
42     ~NNTensor2Test() = default;
43 };
44 
45 class MockIDevice : public Device {
46 public:
47     MOCK_METHOD1(GetDeviceName, OH_NN_ReturnCode(std::string&));
48     MOCK_METHOD1(GetVendorName, OH_NN_ReturnCode(std::string&));
49     MOCK_METHOD1(GetVersion, OH_NN_ReturnCode(std::string&));
50     MOCK_METHOD1(GetDeviceType, OH_NN_ReturnCode(OH_NN_DeviceType&));
51     MOCK_METHOD1(GetDeviceStatus, OH_NN_ReturnCode(DeviceStatus&));
52     MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
53         std::vector<bool>&));
54     MOCK_METHOD1(IsFloat16PrecisionSupported, OH_NN_ReturnCode(bool&));
55     MOCK_METHOD1(IsPerformanceModeSupported, OH_NN_ReturnCode(bool&));
56     MOCK_METHOD1(IsPrioritySupported, OH_NN_ReturnCode(bool&));
57     MOCK_METHOD1(IsDynamicInputSupported, OH_NN_ReturnCode(bool&));
58     MOCK_METHOD1(IsModelCacheSupported, OH_NN_ReturnCode(bool&));
59     MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
60                                           const ModelConfig&,
61                                           std::shared_ptr<PreparedModel>&));
62     MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(const void*,
63                                           const ModelConfig&,
64                                           std::shared_ptr<PreparedModel>&));
65     MOCK_METHOD4(PrepareModelFromModelCache, OH_NN_ReturnCode(const std::vector<Buffer>&,
66                                                         const ModelConfig&,
67                                                         std::shared_ptr<PreparedModel>&,
68                                                         bool&));
69     MOCK_METHOD3(PrepareOfflineModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
70                                                  const ModelConfig&,
71                                                  std::shared_ptr<PreparedModel>&));
72     MOCK_METHOD1(AllocateBuffer, void*(size_t));
73     MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<TensorDesc>));
74     MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<NNTensor>));
75     MOCK_METHOD1(ReleaseBuffer, OH_NN_ReturnCode(const void*));
76     MOCK_METHOD2(AllocateBuffer, OH_NN_ReturnCode(size_t, int&));
77     MOCK_METHOD2(ReleaseBuffer, OH_NN_ReturnCode(int, size_t));
78 };
79 
80 class MockTensorDesc : public TensorDesc {
81 public:
82     MOCK_METHOD1(GetDataType, OH_NN_ReturnCode(OH_NN_DataType*));
83     MOCK_METHOD1(SetDataType, OH_NN_ReturnCode(OH_NN_DataType));
84     MOCK_METHOD1(GetFormat, OH_NN_ReturnCode(OH_NN_Format*));
85     MOCK_METHOD1(SetFormat, OH_NN_ReturnCode(OH_NN_Format));
86     MOCK_METHOD2(GetShape, OH_NN_ReturnCode(int32_t**, size_t*));
87     MOCK_METHOD2(SetShape, OH_NN_ReturnCode(const int32_t*, size_t));
88     MOCK_METHOD1(GetElementNum, OH_NN_ReturnCode(size_t*));
89     MOCK_METHOD1(GetByteSize, OH_NN_ReturnCode(size_t*));
90     MOCK_METHOD1(SetName, OH_NN_ReturnCode(const char*));
91     MOCK_METHOD1(GetName, OH_NN_ReturnCode(const char**));
92 };
93 
94 class MockBackend : public Backend {
95 public:
96     MOCK_CONST_METHOD0(GetBackendID, size_t());
97     MOCK_CONST_METHOD1(GetBackendName, OH_NN_ReturnCode(std::string&));
98     MOCK_CONST_METHOD1(GetBackendType, OH_NN_ReturnCode(OH_NN_DeviceType&));
99     MOCK_CONST_METHOD1(GetBackendStatus, OH_NN_ReturnCode(DeviceStatus&));
100     MOCK_METHOD1(CreateCompiler, Compiler*(Compilation*));
101     MOCK_METHOD1(DestroyCompiler, OH_NN_ReturnCode(Compiler*));
102     MOCK_METHOD1(CreateExecutor, Executor*(Compilation*));
103     MOCK_METHOD1(DestroyExecutor, OH_NN_ReturnCode(Executor*));
104     MOCK_METHOD1(CreateTensor, Tensor*(TensorDesc*));
105     MOCK_METHOD1(DestroyTensor, OH_NN_ReturnCode(Tensor*));
106 
GetDevice()107     std::shared_ptr<Device> GetDevice()
108     {
109         std::shared_ptr<Device> device = std::make_shared<MockIDevice>();
110     EXPECT_CALL(*((MockIDevice *) device.get()), AllocateBuffer(::testing::_, ::testing::_))
111         .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
112         return device;
113     }
114     MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
115                                            std::vector<bool>&));
116 };
117 
118 /**
119  * @tc.name: nntensor2_0test_construct_001
120  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
121  * @tc.type: FUNC
122  */
123 HWTEST_F(NNTensor2Test, nntensor2_0test_construct_001, TestSize.Level0)
124 {
125     LOGE("NNTensor2_0 nntensor2_0test_construct_001");
126     size_t backendId = 1;
127 
128     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
129     EXPECT_NE(nullptr, nnTensor);
130 
131     delete nnTensor;
132 }
133 
134 /**
135  * @tc.name: nntensor2_0test_construct_002
136  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
137  * @tc.type: FUNC
138  */
139 HWTEST_F(NNTensor2Test, nntensor2_0test_construct_002, TestSize.Level0)
140 {
141     LOGE("NNTensor2_0 nntensor2_0test_construct_002");
142     size_t backendId = 1;
143 
144     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
145     EXPECT_NE(nullptr, nnTensor);
146 
147     nnTensor->SetSize(1);
148     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
149     void* buffer = dataArry;
150     nnTensor->SetData(buffer);
151     nnTensor->SetFd(-1);
152     delete nnTensor;
153 }
154 
155 /**
156  * @tc.name: nntensor2_0test_construct_003
157  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
158  * @tc.type: FUNC
159  */
160 HWTEST_F(NNTensor2Test, nntensor2_0test_construct_003, TestSize.Level0)
161 {
162     LOGE("NNTensor2_0 nntensor2_0test_construct_003");
163     size_t backendId = 1;
164 
165     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
166     EXPECT_NE(nullptr, nnTensor);
167 
168     nnTensor->SetSize(1);
169     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
170     void* buffer = dataArry;
171     nnTensor->SetData(buffer);
172     nnTensor->SetFd(0);
173     delete nnTensor;
174 }
175 
176 /**
177  * @tc.name: nntensor2_0test_settensordesc_001
178  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
179  * @tc.type: FUNC
180  */
181 HWTEST_F(NNTensor2Test, nntensor2_0test_settensordesc_001, TestSize.Level0)
182 {
183     LOGE("SetTensorDesc nntensor2_0test_settensordesc_001");
184     size_t backendId = 1;
185 
186     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
187     EXPECT_NE(nullptr, nnTensor);
188 
189     TensorDesc desc;
190     TensorDesc* tensorDesc = &desc;
191     OH_NN_ReturnCode setTensorDescRet = nnTensor->SetTensorDesc(tensorDesc);
192     EXPECT_EQ(OH_NN_SUCCESS, setTensorDescRet);
193 
194     OH_NN_ReturnCode ret = nnTensor->SetTensorDesc(tensorDesc);
195     EXPECT_EQ(OH_NN_SUCCESS, ret);
196 }
197 
198 /**
199  * @tc.name: nntensor2_0test_createdata_001
200  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
201  * @tc.type: FUNC
202  */
203 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_001, TestSize.Level0)
204 {
205     LOGE("CreateData nntensor2_0test_createdata_001");
206     size_t backendId = 1;
207 
208     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
209     EXPECT_NE(nullptr, nnTensor);
210 
211     OH_NN_ReturnCode ret = nnTensor->CreateData();
212     EXPECT_EQ(OH_NN_NULL_PTR, ret);
213 }
214 
215 /**
216  * @tc.name: nntensor2_0test_createdata_002
217  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
218  * @tc.type: FUNC
219  */
220 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_002, TestSize.Level0)
221 {
222     LOGE("CreateData nntensor2_0test_createdata_002");
223     size_t backendId = 1;
224 
225     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
226     EXPECT_NE(nullptr, nnTensor);
227 
228     std::shared_ptr<MockTensorDesc> tensorDesc = std::make_shared<MockTensorDesc>();
229     EXPECT_CALL(*((MockTensorDesc *) tensorDesc.get()), GetByteSize(::testing::_))
230         .WillRepeatedly(::testing::Return(OH_NN_INVALID_PARAMETER));
231 
232     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc.get());
233     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
234 
235     OH_NN_ReturnCode retCreateData = nnTensor->CreateData();
236     EXPECT_EQ(OH_NN_INVALID_PARAMETER, retCreateData);
237 
238     testing::Mock::AllowLeak(tensorDesc.get());
239 }
240 
Creator()241 std::shared_ptr<Backend> Creator()
242 {
243     size_t backendID = 1;
244     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
245 
246     EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceStatus(::testing::_))
247         .WillOnce(Invoke([](DeviceStatus& status) {
248                 // 这里直接修改传入的引用参数
249                 status = AVAILABLE;
250                 return OH_NN_SUCCESS; // 假设成功的状态码
251             }));
252 
253     std::string backendName = "mock";
254     EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_))
255         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
256 
257     EXPECT_CALL(*((MockIDevice *) device.get()), GetVendorName(::testing::_))
258         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
259 
260     EXPECT_CALL(*((MockIDevice *) device.get()), GetVersion(::testing::_))
261         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
262 
263     EXPECT_CALL(*((MockIDevice *) device.get()), AllocateBuffer(::testing::_, ::testing::_))
264         .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
265 
266     std::shared_ptr<Backend> backend = std::make_unique<NNBackend>(device, backendID);
267     return backend;
268 }
269 
270 /**
271  * @tc.name: nntensor2_0test_createdata_003
272  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
273  * @tc.type: FUNC
274  */
275 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_003, TestSize.Level0)
276 {
277     LOGE("CreateData nntensor2_0test_createdata_003");
278     size_t backendId = 1;
279 
280     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
281     EXPECT_NE(nullptr, nnTensor);
282 
283     BackendManager& backendManager = BackendManager::GetInstance();
284 
285     std::string backendName = "mock";
286     std::function<std::shared_ptr<Backend>()> creator = Creator;
287 
288     backendManager.RegisterBackend(backendName, creator);
289 
290     TensorDesc desc;
291     desc.SetDataType(OH_NN_INT64);
292     size_t shapeNum = 1;
293     int32_t index = 10;
294     int32_t* shape = &index;
295     desc.SetShape(shape, shapeNum);
296     TensorDesc* tensorDesc = &desc;
297 
298     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
299     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
300 
301     OH_NN_ReturnCode ret = nnTensor->CreateData();
302     EXPECT_EQ(OH_NN_MEMORY_ERROR, ret);
303 }
304 
305 /**
306  * @tc.name: nntensor2_0test_createdata_004
307  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
308  * @tc.type: FUNC
309  */
310 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_004, TestSize.Level0)
311 {
312     LOGE("CreateData nntensor2_0test_createdata_004");
313     size_t backendId = 1;
314 
315     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
316     EXPECT_NE(nullptr, nnTensor);
317 
318     std::shared_ptr<MockTensorDesc> tensorDesc = std::make_shared<MockTensorDesc>();
319     EXPECT_CALL(*((MockTensorDesc *) tensorDesc.get()), GetByteSize(::testing::_))
__anon4ace95e00202(size_t* byteSize) 320         .WillRepeatedly(Invoke([](size_t* byteSize) {
321                 // 这里直接修改传入的引用参数
322                 *byteSize = ALLOCATE_BUFFER_LIMIT + 1;
323                 return OH_NN_SUCCESS; // 假设成功的状态码
324             }));
325 
326     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc.get());
327     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
328 
329     OH_NN_ReturnCode ret = nnTensor->CreateData();
330     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
331 
332     testing::Mock::AllowLeak(tensorDesc.get());
333 }
334 
335 /**
336  * @tc.name: nntensor2_0test_createdata_005
337  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
338  * @tc.type: FUNC
339  */
340 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_005, TestSize.Level0)
341 {
342     LOGE("CreateData nntensor2_0test_createdata_005");
343     size_t backendId = 1;
344 
345     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
346     EXPECT_NE(nullptr, nnTensor);
347 
348     std::shared_ptr<MockTensorDesc> tensorDesc = std::make_shared<MockTensorDesc>();
349     EXPECT_CALL(*((MockTensorDesc *) tensorDesc.get()), GetByteSize(::testing::_))
__anon4ace95e00302(size_t* byteSize) 350         .WillRepeatedly(Invoke([](size_t* byteSize) {
351                 // 这里直接修改传入的引用参数
352                 *byteSize = 1;
353                 return OH_NN_SUCCESS; // 假设成功的状态码
354             }));
355 
356     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc.get());
357     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
358 
359     OH_NN_ReturnCode ret = nnTensor->CreateData();
360     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
361 
362     testing::Mock::AllowLeak(tensorDesc.get());
363 }
364 
Creator2()365 std::shared_ptr<Backend> Creator2()
366 {
367     size_t backendID = 2;
368 
369     std::shared_ptr<Backend> backend = std::make_unique<NNBackend>(nullptr, backendID);
370     return backend;
371 }
372 
373 /**
374  * @tc.name: nntensor2_0test_createdata_006
375  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
376  * @tc.type: FUNC
377  */
378 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_006, TestSize.Level0)
379 {
380     LOGE("CreateData nntensor2_0test_createdata_006");
381     size_t backendId = 2;
382 
383     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
384     EXPECT_NE(nullptr, nnTensor);
385 
386     BackendManager& backendManager = BackendManager::GetInstance();
387 
388     std::string backendName = "mock";
389     std::function<std::shared_ptr<Backend>()> creator = Creator2;
390 
391     backendManager.RegisterBackend(backendName, creator);
392 
393     TensorDesc desc;
394     desc.SetDataType(OH_NN_INT64);
395     size_t shapeNum = 1;
396     int32_t index = 10;
397     int32_t* shape = &index;
398     desc.SetShape(shape, shapeNum);
399     TensorDesc* tensorDesc = &desc;
400 
401     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
402     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
403 
404     OH_NN_ReturnCode ret = nnTensor->CreateData();
405     EXPECT_EQ(OH_NN_NULL_PTR, ret);
406 }
407 
Creator3()408 std::shared_ptr<Backend> Creator3()
409 {
410     size_t backendID = 3;
411     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
412 
413     EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceStatus(::testing::_))
414         .WillRepeatedly(Invoke([](DeviceStatus& status) {
415                 // 这里直接修改传入的引用参数
416                 status = AVAILABLE;
417                 return OH_NN_SUCCESS; // 假设成功的状态码
418             }));
419 
420     std::string backendName = "mock";
421     EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_))
422         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
423 
424     EXPECT_CALL(*((MockIDevice *) device.get()), GetVendorName(::testing::_))
425         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
426 
427     EXPECT_CALL(*((MockIDevice *) device.get()), GetVersion(::testing::_))
428         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
429 
430     EXPECT_CALL(*((MockIDevice *) device.get()), AllocateBuffer(::testing::_, ::testing::_))
431         .WillRepeatedly(::testing::Return(OH_NN_MEMORY_ERROR));
432 
433     std::shared_ptr<Backend> backend = std::make_unique<NNBackend>(device, backendID);
434 
435     return backend;
436 }
437 
438 /**
439  * @tc.name: nntensor2_0test_createdata_007
440  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
441  * @tc.type: FUNC
442  */
443 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_007, TestSize.Level0)
444 {
445     LOGE("CreateData nntensor2_0test_createdata_007");
446     size_t backendId = 3;
447 
448     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
449     EXPECT_NE(nullptr, nnTensor);
450 
451     BackendManager& backendManager = BackendManager::GetInstance();
452 
453     std::string backendName = "mock";
454     std::function<std::shared_ptr<Backend>()> creator = Creator3;
455 
456     backendManager.RegisterBackend(backendName, creator);
457 
458     TensorDesc desc;
459     desc.SetDataType(OH_NN_INT64);
460     size_t shapeNum = 1;
461     int32_t index = 10;
462     int32_t* shape = &index;
463     desc.SetShape(shape, shapeNum);
464     TensorDesc* tensorDesc = &desc;
465 
466     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
467     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
468 
469     OH_NN_ReturnCode ret = nnTensor->CreateData();
470     EXPECT_EQ(OH_NN_MEMORY_ERROR, ret);
471 }
472 
Creator4()473 std::shared_ptr<Backend> Creator4()
474 {
475     size_t backendID = 4;
476     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
477 
478     EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceStatus(::testing::_))
479         .WillRepeatedly(Invoke([](DeviceStatus& status) {
480                 // 这里直接修改传入的引用参数
481                 status = AVAILABLE;
482                 return OH_NN_SUCCESS; // 假设成功的状态码
483             }));
484 
485     std::string backendName = "mock";
486     EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_))
487         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
488 
489     EXPECT_CALL(*((MockIDevice *) device.get()), GetVendorName(::testing::_))
490         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
491 
492     EXPECT_CALL(*((MockIDevice *) device.get()), GetVersion(::testing::_))
493         .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
494 
495     EXPECT_CALL(*((MockIDevice *) device.get()), AllocateBuffer(::testing::_, ::testing::_))
496         .WillRepeatedly(Invoke([](size_t length, int& fd) {
497                 // 这里直接修改传入的引用参数
498                 fd = -1;
499                 return OH_NN_SUCCESS; // 假设成功的状态码
500             }));
501 
502     std::shared_ptr<Backend> backend = std::make_unique<NNBackend>(device, backendID);
503 
504     return backend;
505 }
506 
507 /**
508  * @tc.name: nntensor2_0test_createdata_008
509  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
510  * @tc.type: FUNC
511  */
512 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_008, TestSize.Level0)
513 {
514     LOGE("CreateData nntensor2_0test_createdata_008");
515     size_t backendId = 4;
516 
517     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
518     EXPECT_NE(nullptr, nnTensor);
519 
520     BackendManager& backendManager = BackendManager::GetInstance();
521 
522     std::string backendName = "mock";
523     std::function<std::shared_ptr<Backend>()> creator = Creator4;
524 
525     backendManager.RegisterBackend(backendName, creator);
526 
527     TensorDesc desc;
528     desc.SetDataType(OH_NN_INT64);
529     size_t shapeNum = 1;
530     int32_t index = 10;
531     int32_t* shape = &index;
532     desc.SetShape(shape, shapeNum);
533     TensorDesc* tensorDesc = &desc;
534 
535     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
536     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
537 
538     OH_NN_ReturnCode ret = nnTensor->CreateData();
539     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
540 }
541 
542 /**
543  * @tc.name: nntensor2_0test_createdata_009
544  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
545  * @tc.type: FUNC
546  */
547 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_009, TestSize.Level0)
548 {
549     LOGE("CreateData nntensor2_0test_createdata_009");
550     size_t backendId = 4;
551 
552     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
553     EXPECT_NE(nullptr, nnTensor);
554 
555     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
556     void* buffer = dataArry;
557     nnTensor->SetData(buffer);
558 
559     OH_NN_ReturnCode ret = nnTensor->CreateData();
560     EXPECT_EQ(OH_NN_FAILED, ret);
561 }
562 
563 /**
564  * @tc.name: nntensor2_0test_createdata_020
565  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
566  * @tc.type: FUNC
567  */
568 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_020, TestSize.Level0)
569 {
570     LOGE("CreateData nntensor2_0test_createdata_020");
571     size_t backendId = 1;
572 
573     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
574     EXPECT_NE(nullptr, nnTensor);
575 
576     size_t size = 1;
577     OH_NN_ReturnCode ret = nnTensor->CreateData(size);
578     EXPECT_EQ(OH_NN_NULL_PTR, ret);
579 }
580 
581 /**
582  * @tc.name: nntensor2_0test_createdata_021
583  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
584  * @tc.type: FUNC
585  */
586 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_021, TestSize.Level0)
587 {
588     LOGE("CreateData nntensor2_0test_createdata_021");
589     size_t backendId = 1;
590 
591     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
592     EXPECT_NE(nullptr, nnTensor);
593 
594     TensorDesc desc;
595     desc.SetDataType(OH_NN_INT64);
596     size_t shapeNum = 1;
597     int32_t index = 10;
598     int32_t* shape = &index;
599     desc.SetShape(shape, shapeNum);
600     TensorDesc* tensorDesc = &desc;
601 
602     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
603     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
604 
605     size_t size = ALLOCATE_BUFFER_LIMIT + 1;
606     OH_NN_ReturnCode ret = nnTensor->CreateData(size);
607     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
608 }
609 
610 /**
611  * @tc.name: nntensor2_0test_createdata_022
612  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
613  * @tc.type: FUNC
614  */
615 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_022, TestSize.Level0)
616 {
617     LOGE("CreateData nntensor2_0test_createdata_022");
618     size_t backendId = 1;
619 
620     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
621     EXPECT_NE(nullptr, nnTensor);
622 
623     TensorDesc desc;
624     desc.SetDataType(OH_NN_INT64);
625     size_t shapeNum = 1;
626     int32_t index = 10;
627     int32_t* shape = &index;
628     desc.SetShape(shape, shapeNum);
629     TensorDesc* tensorDesc = &desc;
630 
631     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
632     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
633 
634     size_t size = 1;
635     OH_NN_ReturnCode ret = nnTensor->CreateData(size);
636     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
637 }
638 
639 /**
640  * @tc.name: nntensor2_0test_createdata_023
641  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
642  * @tc.type: FUNC
643  */
644 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_023, TestSize.Level0)
645 {
646     LOGE("CreateData nntensor2_0test_createdata_023");
647     size_t backendId = 1;
648 
649     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
650     EXPECT_NE(nullptr, nnTensor);
651 
652     std::shared_ptr<MockTensorDesc> tensorDesc = std::make_shared<MockTensorDesc>();
653     EXPECT_CALL(*((MockTensorDesc *) tensorDesc.get()), GetByteSize(::testing::_))
654         .WillRepeatedly(::testing::Return(OH_NN_INVALID_PARAMETER));
655 
656     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc.get());
657     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
658 
659     size_t size = 1;
660     OH_NN_ReturnCode ret = nnTensor->CreateData(size);
661     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
662 
663     testing::Mock::AllowLeak(tensorDesc.get());
664 }
665 
666 /**
667  * @tc.name: nntensor2_0test_createdata_024
668  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
669  * @tc.type: FUNC
670  */
671 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_024, TestSize.Level0)
672 {
673     LOGE("CreateData nntensor2_0test_createdata_024");
674     size_t backendId = 1;
675 
676     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
677     EXPECT_NE(nullptr, nnTensor);
678 
679     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
680     void* buffer = dataArry;
681     nnTensor->SetData(buffer);
682 
683     size_t size = 1;
684     OH_NN_ReturnCode ret = nnTensor->CreateData(size);
685     EXPECT_EQ(OH_NN_FAILED, ret);
686 }
687 
688 /**
689  * @tc.name: nntensor2_0test_createdata_029
690  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
691  * @tc.type: FUNC
692  */
693 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_029, TestSize.Level0)
694 {
695     LOGE("CreateData nntensor2_0test_createdata_029");
696     size_t backendId = 1;
697 
698     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
699     EXPECT_NE(nullptr, nnTensor);
700 
701     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
702     void* buffer = dataArry;
703     nnTensor->SetData(buffer);
704 
705     int fd = 1;
706     size_t size = 2;
707     size_t offset = 3;
708     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
709     EXPECT_EQ(OH_NN_FAILED, ret);
710 }
711 
712 /**
713  * @tc.name: nntensor2_0test_createdata_030
714  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
715  * @tc.type: FUNC
716  */
717 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_030, TestSize.Level0)
718 {
719     LOGE("CreateData nntensor2_0test_createdata_030");
720     size_t backendId = 1;
721 
722     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
723     EXPECT_NE(nullptr, nnTensor);
724 
725     int fd = 1;
726     size_t size = 2;
727     size_t offset = 3;
728     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
729     EXPECT_EQ(OH_NN_NULL_PTR, ret);
730 }
731 
732 /**
733  * @tc.name: nntensor2_0test_createdata_031
734  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
735  * @tc.type: FUNC
736  */
737 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_031, TestSize.Level0)
738 {
739     LOGE("CreateData nntensor2_0test_createdata_031");
740     size_t backendId = 1;
741 
742     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
743     EXPECT_NE(nullptr, nnTensor);
744 
745     std::shared_ptr<MockTensorDesc> tensorDesc = std::make_shared<MockTensorDesc>();
746     EXPECT_CALL(*((MockTensorDesc *) tensorDesc.get()), GetByteSize(::testing::_))
747         .WillRepeatedly(::testing::Return(OH_NN_INVALID_PARAMETER));
748 
749     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc.get());
750     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
751 
752     int fd = 1;
753     size_t size = 2;
754     size_t offset = 3;
755     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
756     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
757 
758     testing::Mock::AllowLeak(tensorDesc.get());
759 }
760 
761 /**
762  * @tc.name: nntensor2_0test_createdata_032
763  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
764  * @tc.type: FUNC
765  */
766 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_032, TestSize.Level0)
767 {
768     LOGE("CreateData nntensor2_0test_createdata_032");
769     size_t backendId = 1;
770 
771     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
772     EXPECT_NE(nullptr, nnTensor);
773 
774     TensorDesc desc;
775     desc.SetDataType(OH_NN_INT64);
776     size_t shapeNum = 1;
777     int32_t index = 10;
778     int32_t* shape = &index;
779     desc.SetShape(shape, shapeNum);
780     TensorDesc* tensorDesc = &desc;
781 
782     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
783     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
784 
785     int fd = -1;
786     size_t size = 2;
787     size_t offset = 3;
788     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
789     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
790 }
791 
792 /**
793  * @tc.name: nntensor2_0test_createdata_033
794  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
795  * @tc.type: FUNC
796  */
797 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_033, TestSize.Level0)
798 {
799     LOGE("CreateData nntensor2_0test_createdata_033");
800     size_t backendId = 1;
801 
802     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
803     EXPECT_NE(nullptr, nnTensor);
804 
805     TensorDesc desc;
806     desc.SetDataType(OH_NN_INT64);
807     size_t shapeNum = 1;
808     int32_t index = 10;
809     int32_t* shape = &index;
810     desc.SetShape(shape, shapeNum);
811     TensorDesc* tensorDesc = &desc;
812 
813     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
814     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
815 
816     int fd = 0;
817     size_t size = 0;
818     size_t offset = 3;
819     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
820     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
821 }
822 
823 /**
824  * @tc.name: nntensor2_0test_createdata_034
825  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
826  * @tc.type: FUNC
827  */
828 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_034, TestSize.Level0)
829 {
830     LOGE("CreateData nntensor2_0test_createdata_034");
831     size_t backendId = 1;
832 
833     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
834     EXPECT_NE(nullptr, nnTensor);
835 
836     TensorDesc desc;
837     desc.SetDataType(OH_NN_INT64);
838     size_t shapeNum = 1;
839     int32_t index = 10;
840     int32_t* shape = &index;
841     desc.SetShape(shape, shapeNum);
842     TensorDesc* tensorDesc = &desc;
843 
844     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
845     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
846 
847     int fd = 0;
848     size_t size = 1;
849     size_t offset = 3;
850     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
851     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
852 }
853 
854 /**
855  * @tc.name: nntensor2_0test_createdata_035
856  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
857  * @tc.type: FUNC
858  */
859 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_035, TestSize.Level0)
860 {
861     LOGE("CreateData nntensor2_0test_createdata_035");
862     size_t backendId = 1;
863 
864     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
865     EXPECT_NE(nullptr, nnTensor);
866 
867     TensorDesc desc;
868     desc.SetDataType(OH_NN_INT64);
869     size_t shapeNum = 1;
870     int32_t index = 10;
871     int32_t* shape = &index;
872     desc.SetShape(shape, shapeNum);
873     TensorDesc* tensorDesc = &desc;
874 
875     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
876     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
877 
878     int fd = 0;
879     size_t size = 3;
880     size_t offset = 2;
881     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
882     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
883 }
884 
885 /**
886  * @tc.name: nntensor2_0test_createdata_036
887  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
888  * @tc.type: FUNC
889  */
890 HWTEST_F(NNTensor2Test, nntensor2_0test_createdata_036, TestSize.Level0)
891 {
892     LOGE("CreateData nntensor2_0test_createdata_036");
893     size_t backendId = 1;
894 
895     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
896     EXPECT_NE(nullptr, nnTensor);
897 
898     TensorDesc desc;
899     desc.SetDataType(OH_NN_INT64);
900     size_t shapeNum = 1;
901     int32_t index = 10;
902     int32_t* shape = &index;
903     desc.SetShape(shape, shapeNum);
904     TensorDesc* tensorDesc = &desc;
905 
906     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
907     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
908 
909     int fd = 0;
910     size_t size = 200;
911     size_t offset = 1;
912     OH_NN_ReturnCode ret = nnTensor->CreateData(fd, size, offset);
913     EXPECT_EQ(OH_NN_MEMORY_ERROR, ret);
914 }
915 
916 
917 /**
918  * @tc.name: nntensor2_0test_gettensordesc_001
919  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
920  * @tc.type: FUNC
921  */
922 HWTEST_F(NNTensor2Test, nntensor2_0test_gettensordesc_001, TestSize.Level0)
923 {
924     LOGE("GetTensorDesc nntensor2_0test_gettensordesc_001");
925     size_t backendId = 1;
926 
927     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
928     EXPECT_NE(nullptr, nnTensor);
929 
930     TensorDesc* ret = nnTensor->GetTensorDesc();
931     EXPECT_EQ(nullptr, ret);
932 }
933 
934 /**
935  * @tc.name: nntensor2_0test_getdata_001
936  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
937  * @tc.type: FUNC
938  */
939 HWTEST_F(NNTensor2Test, nntensor2_0test_getdata_001, TestSize.Level0)
940 {
941     LOGE("GetData nntensor2_0test_getdata_001");
942     size_t backendId = 1;
943 
944     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
945     EXPECT_NE(nullptr, nnTensor);
946 
947     void* ret = nnTensor->GetData();
948     EXPECT_EQ(nullptr, ret);
949 }
950 
951 /**
952  * @tc.name: nntensor2_0test_getfd_001
953  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
954  * @tc.type: FUNC
955  */
956 HWTEST_F(NNTensor2Test, nntensor2_0test_getfd_001, TestSize.Level0)
957 {
958     LOGE("GetFd nntensor2_0test_getfd_001");
959     size_t backendId = 1;
960 
961     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
962     EXPECT_NE(nullptr, nnTensor);
963 
964     int ret = nnTensor->GetFd();
965     EXPECT_EQ(0, ret);
966 }
967 
968 /**
969  * @tc.name: nntensor2_0test_getsize_001
970  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
971  * @tc.type: FUNC
972  */
973 HWTEST_F(NNTensor2Test, nntensor2_0test_getsize_001, TestSize.Level0)
974 {
975     LOGE("GetSize nntensor2_0test_getsize_001");
976     size_t backendId = 1;
977 
978     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
979     EXPECT_NE(nullptr, nnTensor);
980 
981     size_t ret = nnTensor->GetSize();
982     EXPECT_EQ(0, ret);
983 }
984 
985 /**
986  * @tc.name: nntensor2_0test_getoffset_001
987  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
988  * @tc.type: FUNC
989  */
990 HWTEST_F(NNTensor2Test, nntensor2_0test_getoffset_001, TestSize.Level0)
991 {
992     LOGE("GetOffset nntensor2_0test_getoffset_001");
993     size_t backendId = 1;
994 
995     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
996     EXPECT_NE(nullptr, nnTensor);
997 
998     size_t ret = nnTensor->GetOffset();
999     EXPECT_EQ(0, ret);
1000 }
1001 
1002 /**
1003  * @tc.name: nntensor2_0test_getbackendid_001
1004  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1005  * @tc.type: FUNC
1006  */
1007 HWTEST_F(NNTensor2Test, nntensor2_0test_getbackendid_001, TestSize.Level0)
1008 {
1009     LOGE("GetBackendID nntensor2_0test_getbackendid_001");
1010     size_t backendId = 1;
1011 
1012     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1013     EXPECT_NE(nullptr, nnTensor);
1014 
1015     size_t ret = nnTensor->GetBackendID();
1016     EXPECT_EQ(1, ret);
1017 }
1018 
1019 /**
1020  * @tc.name: nntensor2_0test_checktensordata_001
1021  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1022  * @tc.type: FUNC
1023  */
1024 HWTEST_F(NNTensor2Test, nntensor2_0test_checktensordata_001, TestSize.Level0)
1025 {
1026     LOGE("CheckTensorData nntensor2_0test_checktensordata_001");
1027     size_t backendId = 1;
1028 
1029     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1030     EXPECT_NE(nullptr, nnTensor);
1031 
1032     bool ret = nnTensor->CheckTensorData();
1033     EXPECT_EQ(false, ret);
1034 }
1035 
1036 /**
1037  * @tc.name: nntensor2_0test_checktensordata_002
1038  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1039  * @tc.type: FUNC
1040  */
1041 HWTEST_F(NNTensor2Test, nntensor2_0test_checktensordata_002, TestSize.Level0)
1042 {
1043     LOGE("CheckTensorData nntensor2_0test_checktensordata_002");
1044     size_t backendId = 1;
1045 
1046     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1047     EXPECT_NE(nullptr, nnTensor);
1048 
1049     std::shared_ptr<MockTensorDesc> tensorDesc = std::make_shared<MockTensorDesc>();
1050     EXPECT_CALL(*((MockTensorDesc *) tensorDesc.get()), GetByteSize(::testing::_))
1051         .WillRepeatedly(::testing::Return(OH_NN_INVALID_PARAMETER));
1052 
1053     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc.get());
1054     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1055 
1056     bool ret = nnTensor->CheckTensorData();
1057     EXPECT_EQ(false, ret);
1058 
1059     testing::Mock::AllowLeak(tensorDesc.get());
1060 }
1061 
1062 /**
1063  * @tc.name: nntensor2_0test_checktensordata_003
1064  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1065  * @tc.type: FUNC
1066  */
1067 HWTEST_F(NNTensor2Test, nntensor2_0test_checktensordata_003, TestSize.Level0)
1068 {
1069     LOGE("CheckTensorData nntensor2_0test_checktensordata_003");
1070     size_t backendId = 1;
1071 
1072     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1073     EXPECT_NE(nullptr, nnTensor);
1074 
1075     TensorDesc desc;
1076     desc.SetDataType(OH_NN_INT64);
1077     size_t shapeNum = 1;
1078     int32_t index = 10;
1079     int32_t* shape = &index;
1080     desc.SetShape(shape, shapeNum);
1081     TensorDesc* tensorDesc = &desc;
1082 
1083     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1084     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1085 
1086     bool ret = nnTensor->CheckTensorData();
1087     EXPECT_EQ(false, ret);
1088 }
1089 
1090 /**
1091  * @tc.name: nntensor2_0test_checktensordata_004
1092  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1093  * @tc.type: FUNC
1094  */
1095 HWTEST_F(NNTensor2Test, nntensor2_0test_checktensordata_004, TestSize.Level0)
1096 {
1097     LOGE("CheckTensorData nntensor2_0test_checktensordata_004");
1098     size_t backendId = 1;
1099 
1100     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1101     EXPECT_NE(nullptr, nnTensor);
1102 
1103     TensorDesc desc;
1104     desc.SetDataType(OH_NN_INT64);
1105     size_t shapeNum = 1;
1106     int32_t index = 10;
1107     int32_t* shape = &index;
1108     desc.SetShape(shape, shapeNum);
1109     TensorDesc* tensorDesc = &desc;
1110 
1111     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1112     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1113 
1114     nnTensor->SetSize(200);
1115     nnTensor->SetOffset(0);
1116 
1117     bool ret = nnTensor->CheckTensorData();
1118     EXPECT_EQ(false, ret);
1119 }
1120 
1121 /**
1122  * @tc.name: nntensor2_0test_checktensordata_005
1123  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1124  * @tc.type: FUNC
1125  */
1126 HWTEST_F(NNTensor2Test, nntensor2_0test_checktensordata_005, TestSize.Level0)
1127 {
1128     LOGE("CheckTensorData nntensor2_0test_checktensordata_005");
1129     size_t backendId = 1;
1130 
1131     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1132     EXPECT_NE(nullptr, nnTensor);
1133 
1134     TensorDesc desc;
1135     desc.SetDataType(OH_NN_INT64);
1136     size_t shapeNum = 1;
1137     int32_t index = 10;
1138     int32_t* shape = &index;
1139     desc.SetShape(shape, shapeNum);
1140     TensorDesc* tensorDesc = &desc;
1141 
1142     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1143     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1144 
1145     nnTensor->SetSize(200);
1146     nnTensor->SetOffset(0);
1147     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1148     void* buffer = dataArry;
1149     nnTensor->SetData(buffer);
1150     nnTensor->SetFd(-1);
1151 
1152     bool ret = nnTensor->CheckTensorData();
1153     EXPECT_EQ(false, ret);
1154 }
1155 
1156 /**
1157  * @tc.name: nntensor2_0test_checktensordata_006
1158  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1159  * @tc.type: FUNC
1160  */
1161 HWTEST_F(NNTensor2Test, nntensor2_0test_checktensordata_006, TestSize.Level0)
1162 {
1163     LOGE("CheckTensorData nntensor2_0test_checktensordata_006");
1164     size_t backendId = 1;
1165 
1166     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1167     EXPECT_NE(nullptr, nnTensor);
1168 
1169     TensorDesc desc;
1170     desc.SetDataType(OH_NN_INT64);
1171     size_t shapeNum = 1;
1172     int32_t index = 10;
1173     int32_t* shape = &index;
1174     desc.SetShape(shape, shapeNum);
1175     TensorDesc* tensorDesc = &desc;
1176 
1177     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1178     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1179 
1180     nnTensor->SetSize(200);
1181     nnTensor->SetOffset(0);
1182     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
1183     void* buffer = dataArry;
1184     nnTensor->SetData(buffer);
1185 
1186     bool ret = nnTensor->CheckTensorData();
1187     EXPECT_EQ(true, ret);
1188 }
1189 
1190 /**
1191  * @tc.name: nntensor2_0test_checkdimranges_001
1192  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1193  * @tc.type: FUNC
1194  */
1195 HWTEST_F(NNTensor2Test, nntensor2_0test_checkdimranges_001, TestSize.Level0)
1196 {
1197     LOGE("CheckDimRanges nntensor2_0test_checkdimranges_001");
1198     size_t backendId = 1;
1199 
1200     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1201     EXPECT_NE(nullptr, nnTensor);
1202 
1203     std::vector<uint32_t> minDimRanges;
1204     const std::vector<uint32_t> maxDimRanges;
1205     OH_NN_ReturnCode ret = nnTensor->CheckDimRanges(minDimRanges, maxDimRanges);
1206     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1207 }
1208 
1209 /**
1210  * @tc.name: nntensor2_0test_checkdimranges_002
1211  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1212  * @tc.type: FUNC
1213  */
1214 HWTEST_F(NNTensor2Test, nntensor2_0test_checkdimranges_002, TestSize.Level0)
1215 {
1216     LOGE("CheckDimRanges nntensor2_0test_checkdimranges_002");
1217     size_t backendId = 1;
1218 
1219     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1220     EXPECT_NE(nullptr, nnTensor);
1221 
1222     TensorDesc desc;
1223     desc.SetDataType(OH_NN_INT64);
1224     TensorDesc* tensorDesc = &desc;
1225 
1226     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1227     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1228 
1229     std::vector<uint32_t> minDimRanges;
1230     const std::vector<uint32_t> maxDimRanges;
1231     OH_NN_ReturnCode ret = nnTensor->CheckDimRanges(minDimRanges, maxDimRanges);
1232     EXPECT_EQ(OH_NN_SUCCESS, ret);
1233 }
1234 
1235 /**
1236  * @tc.name: nntensor2_0test_checkdimranges_003
1237  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1238  * @tc.type: FUNC
1239  */
1240 HWTEST_F(NNTensor2Test, nntensor2_0test_checkdimranges_003, TestSize.Level0)
1241 {
1242     LOGE("CheckDimRanges nntensor2_0test_checkdimranges_003");
1243     size_t backendId = 1;
1244 
1245     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1246     EXPECT_NE(nullptr, nnTensor);
1247 
1248     TensorDesc desc;
1249     desc.SetDataType(OH_NN_INT64);
1250     size_t shapeNum = 1;
1251     int32_t index = -10;
1252     int32_t* shape = &index;
1253     desc.SetShape(shape, shapeNum);
1254     TensorDesc* tensorDesc = &desc;
1255 
1256     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1257     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1258 
1259     std::vector<uint32_t> minDimRanges;
1260     const std::vector<uint32_t> maxDimRanges;
1261     OH_NN_ReturnCode ret = nnTensor->CheckDimRanges(minDimRanges, maxDimRanges);
1262     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1263 }
1264 
1265 /**
1266  * @tc.name: nntensor2_0test_checkdimranges_004
1267  * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1268  * @tc.type: FUNC
1269  */
1270 HWTEST_F(NNTensor2Test, nntensor2_0test_checkdimranges_004, TestSize.Level0)
1271 {
1272     LOGE("CheckDimRanges nntensor2_0test_checkdimranges_004");
1273     size_t backendId = 1;
1274 
1275     NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
1276     EXPECT_NE(nullptr, nnTensor);
1277 
1278     TensorDesc desc;
1279     desc.SetDataType(OH_NN_INT64);
1280     size_t shapeNum = 1;
1281     int32_t index = 10;
1282     int32_t* shape = &index;
1283     desc.SetShape(shape, shapeNum);
1284     TensorDesc* tensorDesc = &desc;
1285 
1286     OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(tensorDesc);
1287     EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
1288 
1289     std::vector<uint32_t> minDimRanges;
1290     minDimRanges.emplace_back(20);
1291     std::vector<uint32_t> maxDimRanges;
1292     maxDimRanges.emplace_back(20);
1293     OH_NN_ReturnCode ret = nnTensor->CheckDimRanges(minDimRanges, maxDimRanges);
1294     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1295 }
1296 } // namespace UnitTest
1297 } // namespace NeuralNetworkRuntime
1298 } // namespace OHOS