1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include <sys/types.h>
17 #include <sys/stat.h>
18 #include <fcntl.h>
19
20 #include <gtest/gtest.h>
21 #include <gmock/gmock.h>
22
23 #include "common/log.h"
24 #include "hdi_prepared_model_v2_0.h"
25 #include "memory_manager.h"
26 #include "transform.h"
27 #include "test/unittest/common/v2_0/mock_idevice.h"
28 #include "test/unittest/common/file_utils.h"
29 #include "tensor.h"
30 #include "nntensor.h"
31
32 using namespace testing;
33 using namespace testing::ext;
34 using namespace OHOS::NeuralNetworkRuntime;
35 namespace OHOS {
36 namespace NeuralNetworkRuntime {
37 namespace UnitTest {
38 class HDIPreparedModelTest : public testing::Test {
39 protected:
40 void GetBuffer(void*& buffer, size_t length);
41 void InitTensor(std::vector<IOTensor>& inputs, void* buffer, size_t length);
42 OH_NN_ReturnCode Run(std::vector<IOTensor>& inputs);
43 OH_NN_ReturnCode RunFail(std::vector<IOTensor>& inputs);
44 };
45
46 class MockTensor : public Tensor {
47 public:
48 MOCK_METHOD1(SetTensorDesc, OH_NN_ReturnCode(const TensorDesc*));
49 MOCK_METHOD0(CreateData, OH_NN_ReturnCode());
50 MOCK_METHOD1(CreateData, OH_NN_ReturnCode(size_t));
51 MOCK_METHOD3(CreateData, OH_NN_ReturnCode(int, size_t, size_t));
52 MOCK_CONST_METHOD0(GetTensorDesc, TensorDesc*());
53 MOCK_CONST_METHOD0(GetData, void*());
54 MOCK_CONST_METHOD0(GetFd, int());
55 MOCK_CONST_METHOD0(GetSize, size_t());
56 MOCK_CONST_METHOD0(GetOffset, size_t());
57 MOCK_CONST_METHOD0(GetBackendID, size_t());
58 };
59
GetBuffer(void * & buffer,size_t length)60 void HDIPreparedModelTest::GetBuffer(void*& buffer, size_t length)
61 {
62 std::string data = "ABCD";
63 const size_t dataLength = 100;
64 data.resize(dataLength, '-');
65
66 std::string filename = "/data/log/memory-001.dat";
67 FileUtils fileUtils(filename);
68 fileUtils.WriteFile(data);
69
70 int fd = open(filename.c_str(), O_RDWR);
71 EXPECT_NE(-1, fd);
72
73 const auto& memoryManager = MemoryManager::GetInstance();
74 buffer = memoryManager->MapMemory(fd, length);
75 close(fd);
76 }
77
InitTensor(std::vector<IOTensor> & inputs,void * buffer,size_t length)78 void HDIPreparedModelTest::InitTensor(std::vector<IOTensor>& inputs, void* buffer, size_t length)
79 {
80 IOTensor inputTensor;
81 inputTensor.dataType = OH_NN_INT8;
82 inputTensor.dataType = OH_NN_INT8;
83 inputTensor.format = OH_NN_FORMAT_NCHW;
84 inputTensor.data = buffer;
85 inputTensor.length = length;
86 inputs.emplace_back(std::move(inputTensor));
87 }
88
Run(std::vector<IOTensor> & inputs)89 OH_NN_ReturnCode HDIPreparedModelTest::Run(std::vector<IOTensor>& inputs)
90 {
91 const int vvPosition = 2;
92 std::vector<IOTensor> outputs;
93 std::vector<std::vector<int32_t>> outputsDims {{0}};
94 std::vector<bool> isOutputBufferEnough {};
95
96 OHOS::sptr<V2_0::MockIPreparedModel> sp =
97 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
98 EXPECT_NE(sp, nullptr);
99
100 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(sp);
101 EXPECT_CALL(*sp, Run(::testing::_, ::testing::_, ::testing::_))
102 .WillRepeatedly(::testing::DoAll(
103 ::testing::SetArgReferee<vvPosition>(outputsDims),
104 ::testing::Return(HDF_SUCCESS))
105 );
106
107 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
108 return result;
109 }
110
111 /**
112 * @tc.name: hidpreparedmodel_constructor_001
113 * @tc.desc: Verify the Constructor function validate constructor success.
114 * @tc.type: FUNC
115 */
116 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_constructor_001, TestSize.Level0)
117 {
118 OHOS::sptr<V2_0::IPreparedModel> hdiPreparedModel =
119 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
120 EXPECT_NE(hdiPreparedModel, nullptr);
121
122 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(hdiPreparedModel);
123 EXPECT_NE(preparedModel, nullptr);
124 }
125
126 /**
127 * @tc.name: hidpreparedmodel_exportmodelcache_001
128 * @tc.desc: Verify the ExportModelCache function return memory error.
129 * @tc.type: FUNC
130 */
131 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_001, TestSize.Level0)
132 {
133 std::vector<V2_0::SharedBuffer> bufferVect = {{100, 100, 0, 100}};
134 OHOS::sptr<V2_0::IPreparedModel> hdiPreparedModel =
135 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
136 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(hdiPreparedModel);
137 std::vector<Buffer> modelCache;
138 EXPECT_CALL(*((V2_0::MockIPreparedModel*)hdiPreparedModel.GetRefPtr()),
139 ExportModelCache(::testing::_))
140 .WillRepeatedly(
141 ::testing::DoAll(
142 ::testing::SetArgReferee<0>(bufferVect),
143 ::testing::Return(HDF_SUCCESS)
144 )
145 );
146
147 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
148 EXPECT_EQ(OH_NN_MEMORY_ERROR, result);
149 }
150
151 /**
152 * @tc.name: hidpreparedmodel_exportmodelcache_002
153 * @tc.desc: Verify the ExportModelCache function return success.
154 * @tc.type: FUNC
155 */
156 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_002, TestSize.Level0)
157 {
158 std::vector<V2_0::SharedBuffer> bufferVect;
159 OHOS::sptr<V2_0::IPreparedModel> mockPreparedModel =
160 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
161 EXPECT_NE(mockPreparedModel, nullptr);
162
163 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(mockPreparedModel);
164 std::vector<Buffer> modelCache;
165 EXPECT_CALL(*((V2_0::MockIPreparedModel*)mockPreparedModel.GetRefPtr()),
166 ExportModelCache(::testing::_))
167 .WillRepeatedly(
168 ::testing::DoAll(
169 ::testing::SetArgReferee<0>(bufferVect),
170 ::testing::Return(HDF_SUCCESS)
171 )
172 );
173
174 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
175 EXPECT_EQ(OH_NN_SUCCESS, result);
176 }
177
178 /**
179 * @tc.name: hidpreparedmodel_exportmodelcache_003
180 * @tc.desc: Verify the ExportModelCache function return invalid parameter.
181 * @tc.type: FUNC
182 */
183 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_003, TestSize.Level0)
184 {
185 OHOS::sptr<V2_0::IPreparedModel> hdiPreparedModel =
186 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
187 EXPECT_NE(hdiPreparedModel, nullptr);
188
189 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(hdiPreparedModel);
190 std::vector<Buffer> modelCache;
191 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
192 EXPECT_EQ(OH_NN_SUCCESS, result);
193 }
194
195 /**
196 * @tc.name: hidpreparedmodel_exportmodelcache_004
197 * @tc.desc: Verify the ExportModelCache function return unvailable device.
198 * @tc.type: FUNC
199 */
200 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_004, TestSize.Level0)
201 {
202 std::vector<V2_0::SharedBuffer> bufferVect = {{100, 100, 0, 100}};
203 OHOS::sptr<V2_0::IPreparedModel> mockPreparedModel =
204 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
205 EXPECT_NE(mockPreparedModel, nullptr);
206
207 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(mockPreparedModel);
208 std::vector<Buffer> modelCache;
209 EXPECT_CALL(*((V2_0::MockIPreparedModel*)mockPreparedModel.GetRefPtr()),
210 ExportModelCache(::testing::_))
211 .WillRepeatedly(
212 ::testing::DoAll(
213 ::testing::SetArgReferee<0>(bufferVect),
214 ::testing::Return(HDF_FAILURE)
215 )
216 );
217
218 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
219 EXPECT_EQ(OH_NN_SAVE_CACHE_EXCEPTION, result);
220 }
221
222 /**
223 * @tc.name: hidpreparedmodel_exportmodelcache_005
224 * @tc.desc: Verify the ExportModelCache function return unvailable device.
225 * @tc.type: FUNC
226 */
227 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_005, TestSize.Level0)
228 {
229 LOGE("ExportModelCache hidpreparedmodel_exportmodelcache_005");
230 std::vector<V2_0::SharedBuffer> bufferVect = {{100, 100, 0, 100}};
231 OHOS::sptr<V2_0::IPreparedModel> mockPreparedModel =
232 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
233 EXPECT_NE(mockPreparedModel, nullptr);
234
235 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(mockPreparedModel);
236
237 std::vector<Buffer> modelCache;
238 Buffer buffer;
239 modelCache.emplace_back(buffer);
240 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
241 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
242 }
243 /**
244 * @tc.name: hidpreparedmodel_run_001
245 * @tc.desc: Verify the Run function return invalid parameter.
246 * @tc.type: FUNC
247 */
248 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_001, TestSize.Level0)
249 {
250 IOTensor inputTensor;
251 inputTensor.dataType = OH_NN_INT8;
252
253 IOTensor outputTensor;
254 outputTensor.dataType = OH_NN_INT8;
255 std::vector<IOTensor> inputs;
256 inputs.emplace_back(std::move(inputTensor));
257 std::vector<IOTensor> outputs;
258
259 std::vector<V2_0::IOTensor> iOutputTensors;
260 V2_0::IOTensor iTensor;
261 iOutputTensors.emplace_back(iTensor);
262 std::vector<std::vector<int32_t>> outputsDims {{0}};
263 std::vector<bool> isOutputBufferEnough {};
264
265 std::shared_ptr<V2_0::MockIPreparedModel> sp = std::make_shared<V2_0::MockIPreparedModel>();
266 OHOS::sptr<V2_0::IPreparedModel> hdiPreparedModel =
267 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
268 EXPECT_NE(hdiPreparedModel, nullptr);
269
270 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(hdiPreparedModel);
271 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
272 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
273 }
274
275 /**
276 * @tc.name: hidpreparedmodel_run_002
277 * @tc.desc: Verify the Run function return success.
278 * @tc.type: FUNC
279 */
280 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_002, TestSize.Level0)
281 {
282 const size_t length = 100;
283 void* buffer = nullptr;
284 GetBuffer(buffer, length);
285
286 std::vector<IOTensor> inputs;
287 std::vector<IOTensor> outputs;
288 InitTensor(inputs, buffer, length);
289
290 OH_NN_ReturnCode result = Run(inputs);
291 EXPECT_EQ(OH_NN_SUCCESS, result);
292 const auto& memoryManager = MemoryManager::GetInstance();
293 memoryManager->UnMapMemory(buffer);
294 }
295
296 /**
297 * @tc.name: hidpreparedmodel_run_003
298 * @tc.desc: Verify the Run function return unavailable device in case of run failure.
299 * @tc.type: FUNC
300 */
301 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_003, TestSize.Level0)
302 {
303 const size_t length = 100;
304 void* buffer = nullptr;
305 GetBuffer(buffer, length);
306
307 std::vector<IOTensor> inputs;
308 std::vector<IOTensor> outputs;
309 InitTensor(inputs, buffer, length);
310
311 std::vector<std::vector<int32_t>> outputsDims {};
312 std::vector<bool> isOutputBufferEnough {};
313
314 OHOS::sptr<V2_0::MockIPreparedModel> sp =
315 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
316 EXPECT_NE(sp, nullptr);
317
318 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(sp);
319
320 EXPECT_CALL(*sp, Run(::testing::_, ::testing::_, ::testing::_))
321 .WillRepeatedly(
322 ::testing::DoAll(
323 ::testing::SetArgReferee<2>(outputsDims),
324 ::testing::Return(HDF_FAILURE)
325 )
326 );
327
328 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
329 EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, result);
330 const auto& memoryManager = MemoryManager::GetInstance();
331 memoryManager->UnMapMemory(buffer);
332 }
333
334 /**
335 * @tc.name: hidpreparedmodel_run_004
336 * @tc.desc: Verify the Run function return invalid parameter.
337 * @tc.type: FUNC
338 */
339 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_004, TestSize.Level0)
340 {
341 std::vector<IOTensor> inputs;
342 InitTensor(inputs, nullptr, 0);
343 OH_NN_ReturnCode result = Run(inputs);
344 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
345 }
346
347 /**
348 * @tc.name: hidpreparedmodel_run_005
349 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
350 * @tc.type: FUNC
351 */
352 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_005, TestSize.Level0)
353 {
354 const size_t length = 100;
355 void* buffer = nullptr;
356 GetBuffer(buffer, length);
357
358 std::vector<IOTensor> inputs;
359 std::vector<IOTensor> outputs;
360 InitTensor(inputs, buffer, length);
361 InitTensor(outputs, nullptr, 0);
362
363 std::vector<std::vector<int32_t>> outputsDims {};
364 std::vector<bool> isOutputBufferEnough {};
365
366 OHOS::sptr<V2_0::MockIPreparedModel> sp =
367 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
368 EXPECT_NE(sp, nullptr);
369
370 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(sp);
371
372 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
373 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
374 const auto& memoryManager = MemoryManager::GetInstance();
375 memoryManager->UnMapMemory(buffer);
376 }
377
378 /**
379 * @tc.name: hidpreparedmodel_run_006
380 * @tc.desc: Verify the Run function return success.
381 * @tc.type: FUNC
382 */
383 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_006, TestSize.Level0)
384 {
385 LOGE("Run hidpreparedmodel_run_006");
386 const size_t length = 100;
387 void* buffer = nullptr;
388 GetBuffer(buffer, length);
389
390 std::vector<IOTensor> inputs;
391 std::vector<IOTensor> outputs;
392 InitTensor(inputs, buffer, length);
393 InitTensor(outputs, buffer, length);
394
395 std::vector<std::vector<int32_t>> outputsDims {{0}};
396 std::vector<bool> isOutputBufferEnough {};
397
398 OHOS::sptr<V2_0::MockIPreparedModel> sp =
399 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
400 EXPECT_NE(sp, nullptr);
401
402 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(sp);
403 EXPECT_CALL(*sp, Run(::testing::_, ::testing::_, ::testing::_))
404 .WillRepeatedly(
405 ::testing::DoAll(
406 ::testing::SetArgReferee<2>(outputsDims),
407 ::testing::Return(HDF_SUCCESS)
408 )
409 );
410
411 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
412 EXPECT_EQ(OH_NN_SUCCESS, result);
413
414 const auto& memoryManager = MemoryManager::GetInstance();
415 memoryManager->UnMapMemory(buffer);
416 }
417
RunFail(std::vector<IOTensor> & inputs)418 OH_NN_ReturnCode HDIPreparedModelTest::RunFail(std::vector<IOTensor>& inputs)
419 {
420 std::vector<IOTensor> outputs;
421 std::vector<std::vector<int32_t>> outputsDims {};
422 std::vector<bool> isOutputBufferEnough {};
423
424 OHOS::sptr<V2_0::MockIPreparedModel> sp =
425 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
426 EXPECT_NE(sp, nullptr);
427
428 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(sp);
429
430 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
431 return result;
432 }
433
434 /**
435 * @tc.name: hidpreparedmodel_run_007
436 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
437 * @tc.type: FUNC
438 */
439 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_007, TestSize.Level0)
440 {
441 LOGE("Run hidpreparedmodel_run_007");
442 std::vector<IOTensor> inputs;
443 IOTensor inputTensor;
444 inputTensor.dataType = OH_NN_BOOL;
445 inputs.emplace_back(std::move(inputTensor));
446
447 OH_NN_ReturnCode result = RunFail(inputs);
448 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
449 }
450
451 /**
452 * @tc.name: hidpreparedmodel_run_008
453 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
454 * @tc.type: FUNC
455 */
456 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_008, TestSize.Level0)
457 {
458 LOGE("Run hidpreparedmodel_run_008");
459 std::vector<IOTensor> inputs;
460 IOTensor inputTensor;
461 inputTensor.dataType = OH_NN_INT16;
462 inputs.emplace_back(std::move(inputTensor));
463
464 OH_NN_ReturnCode result = RunFail(inputs);
465 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
466 }
467
468 /**
469 * @tc.name: hidpreparedmodel_run_009
470 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
471 * @tc.type: FUNC
472 */
473 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_009, TestSize.Level0)
474 {
475 LOGE("Run hidpreparedmodel_run_009");
476 std::vector<IOTensor> inputs;
477 IOTensor inputTensor;
478 inputTensor.dataType = OH_NN_INT64;
479 inputs.emplace_back(std::move(inputTensor));
480
481 OH_NN_ReturnCode result = RunFail(inputs);
482 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
483 }
484
485 /**
486 * @tc.name: hidpreparedmodel_run_010
487 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
488 * @tc.type: FUNC
489 */
490 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_010, TestSize.Level0)
491 {
492 LOGE("Run hidpreparedmodel_run_010");
493 std::vector<IOTensor> inputs;
494 IOTensor inputTensor;
495 inputTensor.dataType = OH_NN_UINT8;
496 inputs.emplace_back(std::move(inputTensor));
497
498 OH_NN_ReturnCode result = RunFail(inputs);
499 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
500 }
501
502 /**
503 * @tc.name: hidpreparedmodel_run_011
504 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
505 * @tc.type: FUNC
506 */
507 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_011, TestSize.Level0)
508 {
509 LOGE("Run hidpreparedmodel_run_011");
510 std::vector<IOTensor> inputs;
511 IOTensor inputTensor;
512 inputTensor.dataType = OH_NN_UINT16;
513 inputs.emplace_back(std::move(inputTensor));
514
515 OH_NN_ReturnCode result = RunFail(inputs);
516 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
517 }
518
519 /**
520 * @tc.name: hidpreparedmodel_run_012
521 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
522 * @tc.type: FUNC
523 */
524 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_012, TestSize.Level0)
525 {
526 LOGE("Run hidpreparedmodel_run_012");
527 std::vector<IOTensor> inputs;
528 IOTensor inputTensor;
529 inputTensor.dataType = OH_NN_UINT32;
530 inputs.emplace_back(std::move(inputTensor));
531
532 OH_NN_ReturnCode result = RunFail(inputs);
533 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
534 }
535
536 /**
537 * @tc.name: hidpreparedmodel_run_013
538 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
539 * @tc.type: FUNC
540 */
541 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_013, TestSize.Level0)
542 {
543 LOGE("Run hidpreparedmodel_run_013");
544 std::vector<IOTensor> inputs;
545 IOTensor inputTensor;
546 inputTensor.dataType = OH_NN_UINT64;
547 inputs.emplace_back(std::move(inputTensor));
548
549 OH_NN_ReturnCode result = RunFail(inputs);
550 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
551 }
552
553 /**
554 * @tc.name: hidpreparedmodel_run_014
555 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
556 * @tc.type: FUNC
557 */
558 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_014, TestSize.Level0)
559 {
560 LOGE("Run hidpreparedmodel_run_014");
561 std::vector<IOTensor> inputs;
562 IOTensor inputTensor;
563 inputTensor.dataType = OH_NN_FLOAT16;
564 inputs.emplace_back(std::move(inputTensor));
565
566 OH_NN_ReturnCode result = RunFail(inputs);
567 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
568 }
569
570 /**
571 * @tc.name: hidpreparedmodel_run_015
572 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
573 * @tc.type: FUNC
574 */
575 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_015, TestSize.Level0)
576 {
577 LOGE("Run hidpreparedmodel_run_015");
578 std::vector<IOTensor> inputs;
579 IOTensor inputTensor;
580 inputTensor.dataType = OH_NN_FLOAT32;
581 inputs.emplace_back(std::move(inputTensor));
582
583 OH_NN_ReturnCode result = RunFail(inputs);
584 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
585 }
586
587 /**
588 * @tc.name: hidpreparedmodel_run_016
589 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
590 * @tc.type: FUNC
591 */
592 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_016, TestSize.Level0)
593 {
594 LOGE("Run hidpreparedmodel_run_016");
595 std::vector<IOTensor> inputs;
596 IOTensor inputTensor;
597 inputTensor.dataType = OH_NN_FLOAT64;
598 inputTensor.format = OH_NN_FORMAT_NHWC;
599 inputs.emplace_back(std::move(inputTensor));
600
601 OH_NN_ReturnCode result = RunFail(inputs);
602 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
603 }
604
605 /**
606 * @tc.name: hidpreparedmodel_run_017
607 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
608 * @tc.type: FUNC
609 */
610 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_017, TestSize.Level0)
611 {
612 LOGE("Run hidpreparedmodel_run_017");
613 std::vector<IOTensor> inputs;
614 IOTensor inputTensor;
615 inputTensor.dataType = OH_NN_UNKNOWN;
616 inputTensor.format = OH_NN_FORMAT_NONE;
617 inputs.emplace_back(std::move(inputTensor));
618
619 OH_NN_ReturnCode result = RunFail(inputs);
620 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
621 }
622
623 /**
624 * @tc.name: hidpreparedmodel_run_018
625 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
626 * @tc.type: FUNC
627 */
628 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_018, TestSize.Level0)
629 {
630 LOGE("Run hidpreparedmodel_run_018");
631 std::vector<IOTensor> inputs;
632 IOTensor inputTensor;
633 inputTensor.dataType = OH_NN_INT32;
634 inputs.emplace_back(std::move(inputTensor));
635
636 OH_NN_ReturnCode result = RunFail(inputs);
637 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
638 }
639
640 /**
641 * @tc.name: hidpreparedmodel_run_019
642 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
643 * @tc.type: FUNC
644 */
645 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_019, TestSize.Level0)
646 {
647 LOGE("Run hidpreparedmodel_run_019");
648 std::vector<NN_Tensor*> inputs;
649 std::vector<NN_Tensor*> outputs;
650 std::vector<std::vector<int32_t>> outputsDims {};
651 std::vector<bool> isOutputBufferEnough {};
652
653 inputs.emplace_back(nullptr);
654
655 OHOS::sptr<V2_0::MockIPreparedModel> sp =
656 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
657 EXPECT_NE(sp, nullptr);
658
659 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(sp);
660 OH_NN_ReturnCode ret = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
661 EXPECT_EQ(OH_NN_FAILED, ret);
662 }
663
664 /**
665 * @tc.name: hidpreparedmodel_run_020
666 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
667 * @tc.type: FUNC
668 */
669 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_020, TestSize.Level0)
670 {
671 LOGE("Run hidpreparedmodel_run_020");
672 std::vector<NN_Tensor*> inputs;
673 std::vector<NN_Tensor*> outputs;
674 std::vector<std::vector<int32_t>> outputsDims {};
675 std::vector<bool> isOutputBufferEnough {};
676
677 MockTensor* tensorImpl = new (std::nothrow) MockTensor();
678 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(tensorImpl);
679 inputs.emplace_back(tensor);
680
681 OHOS::sptr<V2_0::MockIPreparedModel> sp =
682 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
683 EXPECT_NE(sp, nullptr);
684
685 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(sp);
686 OH_NN_ReturnCode ret = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
687 EXPECT_EQ(OH_NN_FAILED, ret);
688
689 testing::Mock::AllowLeak(tensorImpl);
690 }
691
692 /**
693 * @tc.name: hidpreparedmodel_run_021
694 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
695 * @tc.type: FUNC
696 */
697 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_021, TestSize.Level0)
698 {
699 LOGE("Run hidpreparedmodel_run_021");
700 std::vector<NN_Tensor*> inputs;
701 std::vector<NN_Tensor*> outputs;
702 std::vector<std::vector<int32_t>> outputsDims {};
703 std::vector<bool> isOutputBufferEnough {};
704
705 size_t deviceId = 1;
706 NNTensor2_0* tensorImpl = new (std::nothrow) NNTensor2_0(deviceId);
707 TensorDesc TensorDesc;
708
709 tensorImpl->SetTensorDesc(&TensorDesc);
710 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(tensorImpl);
711 inputs.emplace_back(tensor);
712
713 OHOS::sptr<V2_0::MockIPreparedModel> sp =
714 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
715 EXPECT_NE(sp, nullptr);
716
717 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(sp);
718 OH_NN_ReturnCode ret = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
719 EXPECT_EQ(OH_NN_FAILED, ret);
720 }
721
722 /**
723 * @tc.name: hidpreparedmodel_run_022
724 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
725 * @tc.type: FUNC
726 */
727 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_022, TestSize.Level0)
728 {
729 LOGE("Run hidpreparedmodel_run_022");
730 std::vector<NN_Tensor*> inputs;
731 std::vector<NN_Tensor*> outputs;
732 std::vector<std::vector<int32_t>> outputsDims {};
733 std::vector<bool> isOutputBufferEnough {};
734
735 size_t backendId = 1;
736 NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
737 EXPECT_NE(nullptr, nnTensor);
738
739 TensorDesc tensorDesc;
740 char name = 'a';
741 tensorDesc.SetName(&name);
742 tensorDesc.SetDataType(OH_NN_UINT32);
743 tensorDesc.SetFormat(OH_NN_FORMAT_NCHW);
744 int32_t expectDim[2] = {3, 3};
745 int32_t* ptr = expectDim;
746 uint32_t dimensionCount = 2;
747 tensorDesc.SetShape(ptr, dimensionCount);
748
749 OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(&tensorDesc);
750 EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
751
752 nnTensor->SetSize(200);
753 nnTensor->SetOffset(0);
754 float m_dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
755 void* buffer = m_dataArry;
756 nnTensor->SetData(buffer);
757
758 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(nnTensor);
759 inputs.emplace_back(tensor);
760
761 OHOS::sptr<V2_0::MockIPreparedModel> sp =
762 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
763 EXPECT_NE(sp, nullptr);
764
765 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(sp);
766 OH_NN_ReturnCode ret = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
767 EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, ret);
768 }
769
770 /**
771 * @tc.name: hidpreparedmodel_getmodelid_001
772 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
773 * @tc.type: FUNC
774 */
775 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_getmodelid_001, TestSize.Level0)
776 {
777 LOGE("GetModelID hidpreparedmodel_getmodelid_001");
778 OHOS::sptr<V2_0::MockIPreparedModel> sp =
779 OHOS::sptr<V2_0::MockIPreparedModel>(new (std::nothrow) V2_0::MockIPreparedModel());
780 EXPECT_NE(sp, nullptr);
781
782 uint32_t index = 0;
783 std::unique_ptr<HDIPreparedModelV2_0> preparedModel = std::make_unique<HDIPreparedModelV2_0>(sp);
784 OH_NN_ReturnCode ret = preparedModel->GetModelID(index);
785 EXPECT_EQ(OH_NN_SUCCESS, ret);
786 }
787 } // namespace UnitTest
788 } // namespace NeuralNetworkRuntime
789 } // namespace OHOS
790