1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include <sys/types.h>
17 #include <sys/stat.h>
18 #include <fcntl.h>
19
20 #include <gtest/gtest.h>
21 #include <gmock/gmock.h>
22
23 #include "common/log.h"
24 #include "hdi_prepared_model_v1_0.h"
25 #include "memory_manager.h"
26 #include "transform.h"
27 #include "test/unittest/common/v1_0/mock_idevice.h"
28 #include "test/unittest/common/file_utils.h"
29 #include "tensor.h"
30 #include "nntensor.h"
31
32 using namespace testing;
33 using namespace testing::ext;
34 using namespace OHOS::NeuralNetworkRuntime;
35 namespace OHOS {
36 namespace NeuralNetworkRuntime {
37 namespace UnitTest {
38 class HDIPreparedModelTest : public testing::Test {
39 protected:
40 void GetBuffer(void*& buffer, size_t length);
41 void InitTensor(std::vector<IOTensor>& inputs, void* buffer, size_t length);
42 OH_NN_ReturnCode Run(std::vector<IOTensor>& inputs);
43 OH_NN_ReturnCode RunFail(std::vector<IOTensor>& inputs);
44 };
45
46 class MockTensor : public Tensor {
47 public:
48 MOCK_METHOD1(SetTensorDesc, OH_NN_ReturnCode(const TensorDesc*));
49 MOCK_METHOD0(CreateData, OH_NN_ReturnCode());
50 MOCK_METHOD1(CreateData, OH_NN_ReturnCode(size_t));
51 MOCK_METHOD3(CreateData, OH_NN_ReturnCode(int, size_t, size_t));
52 MOCK_CONST_METHOD0(GetTensorDesc, TensorDesc*());
53 MOCK_CONST_METHOD0(GetData, void*());
54 MOCK_CONST_METHOD0(GetFd, int());
55 MOCK_CONST_METHOD0(GetSize, size_t());
56 MOCK_CONST_METHOD0(GetOffset, size_t());
57 MOCK_CONST_METHOD0(GetBackendID, size_t());
58 };
59
GetBuffer(void * & buffer,size_t length)60 void HDIPreparedModelTest::GetBuffer(void*& buffer, size_t length)
61 {
62 std::string data = "ABCD";
63 const size_t dataLength = 100;
64 data.resize(dataLength, '-');
65
66 std::string filename = "/data/log/memory-001.dat";
67 FileUtils fileUtils(filename);
68 fileUtils.WriteFile(data);
69
70 int fd = open(filename.c_str(), O_RDWR);
71 EXPECT_NE(-1, fd);
72
73 const auto& memoryManager = MemoryManager::GetInstance();
74 buffer = memoryManager->MapMemory(fd, length);
75 close(fd);
76 }
77
InitTensor(std::vector<IOTensor> & inputs,void * buffer,size_t length)78 void HDIPreparedModelTest::InitTensor(std::vector<IOTensor>& inputs, void* buffer, size_t length)
79 {
80 IOTensor inputTensor;
81 inputTensor.dataType = OH_NN_INT8;
82 inputTensor.dataType = OH_NN_INT8;
83 inputTensor.format = OH_NN_FORMAT_NCHW;
84 inputTensor.data = buffer;
85 inputTensor.length = length;
86 inputs.emplace_back(std::move(inputTensor));
87 }
88
Run(std::vector<IOTensor> & inputs)89 OH_NN_ReturnCode HDIPreparedModelTest::Run(std::vector<IOTensor>& inputs)
90 {
91 const int vvPosition = 2;
92 const int vPosition = 3;
93 std::vector<IOTensor> outputs;
94 std::vector<std::vector<int32_t>> outputsDims {{0}};
95 std::vector<bool> isOutputBufferEnough {};
96
97 OHOS::sptr<V1_0::MockIPreparedModel> sp =
98 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
99 EXPECT_NE(sp, nullptr);
100
101 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(sp);
102 EXPECT_CALL(*sp, Run(::testing::_, ::testing::_, ::testing::_, ::testing::_))
103 .WillRepeatedly(::testing::DoAll(
104 ::testing::SetArgReferee<vvPosition>(outputsDims),
105 ::testing::SetArgReferee<vPosition>(isOutputBufferEnough),
106 ::testing::Return(HDF_SUCCESS))
107 );
108
109 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
110 return result;
111 }
112
113 /**
114 * @tc.name: hidpreparedmodel_constructor_001
115 * @tc.desc: Verify the Constructor function validate constructor success.
116 * @tc.type: FUNC
117 */
118 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_constructor_001, TestSize.Level0)
119 {
120 OHOS::sptr<V1_0::IPreparedModel> hdiPreparedModel =
121 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
122 EXPECT_NE(hdiPreparedModel, nullptr);
123
124 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(hdiPreparedModel);
125 EXPECT_NE(preparedModel, nullptr);
126 }
127
128 /**
129 * @tc.name: hidpreparedmodel_exportmodelcache_001
130 * @tc.desc: Verify the ExportModelCache function return memory error.
131 * @tc.type: FUNC
132 */
133 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_001, TestSize.Level0)
134 {
135 std::vector<V1_0::SharedBuffer> bufferVect = {{100, 100, 0, 100}};
136 OHOS::sptr<V1_0::IPreparedModel> hdiPreparedModel =
137 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
138 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(hdiPreparedModel);
139 std::vector<Buffer> modelCache;
140 EXPECT_CALL(*((V1_0::MockIPreparedModel*)hdiPreparedModel.GetRefPtr()),
141 ExportModelCache(::testing::_))
142 .WillRepeatedly(
143 ::testing::DoAll(
144 ::testing::SetArgReferee<0>(bufferVect),
145 ::testing::Return(HDF_SUCCESS)
146 )
147 );
148
149 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
150 EXPECT_EQ(OH_NN_MEMORY_ERROR, result);
151 }
152
153 /**
154 * @tc.name: hidpreparedmodel_exportmodelcache_002
155 * @tc.desc: Verify the ExportModelCache function return success.
156 * @tc.type: FUNC
157 */
158 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_002, TestSize.Level0)
159 {
160 std::vector<V1_0::SharedBuffer> bufferVect;
161 OHOS::sptr<V1_0::IPreparedModel> mockPreparedModel =
162 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
163 EXPECT_NE(mockPreparedModel, nullptr);
164
165 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(mockPreparedModel);
166 std::vector<Buffer> modelCache;
167 EXPECT_CALL(*((V1_0::MockIPreparedModel*)mockPreparedModel.GetRefPtr()),
168 ExportModelCache(::testing::_))
169 .WillRepeatedly(
170 ::testing::DoAll(
171 ::testing::SetArgReferee<0>(bufferVect),
172 ::testing::Return(HDF_SUCCESS)
173 )
174 );
175
176 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
177 EXPECT_EQ(OH_NN_SUCCESS, result);
178 }
179
180 /**
181 * @tc.name: hidpreparedmodel_exportmodelcache_003
182 * @tc.desc: Verify the ExportModelCache function return invalid parameter.
183 * @tc.type: FUNC
184 */
185 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_003, TestSize.Level0)
186 {
187 OHOS::sptr<V1_0::IPreparedModel> hdiPreparedModel =
188 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
189 EXPECT_NE(hdiPreparedModel, nullptr);
190
191 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(hdiPreparedModel);
192 std::vector<Buffer> modelCache;
193 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
194 EXPECT_EQ(OH_NN_SUCCESS, result);
195 }
196
197 /**
198 * @tc.name: hidpreparedmodel_exportmodelcache_004
199 * @tc.desc: Verify the ExportModelCache function return unvailable device.
200 * @tc.type: FUNC
201 */
202 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_004, TestSize.Level0)
203 {
204 std::vector<V1_0::SharedBuffer> bufferVect = {{100, 100, 0, 100}};
205 OHOS::sptr<V1_0::IPreparedModel> mockPreparedModel =
206 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
207 EXPECT_NE(mockPreparedModel, nullptr);
208
209 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(mockPreparedModel);
210 std::vector<Buffer> modelCache;
211 EXPECT_CALL(*((V1_0::MockIPreparedModel*)mockPreparedModel.GetRefPtr()),
212 ExportModelCache(::testing::_))
213 .WillRepeatedly(
214 ::testing::DoAll(
215 ::testing::SetArgReferee<0>(bufferVect),
216 ::testing::Return(HDF_FAILURE)
217 )
218 );
219
220 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
221 EXPECT_EQ(OH_NN_SAVE_CACHE_EXCEPTION, result);
222 }
223
224 /**
225 * @tc.name: hidpreparedmodel_exportmodelcache_005
226 * @tc.desc: Verify the ExportModelCache function return unvailable device.
227 * @tc.type: FUNC
228 */
229 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_005, TestSize.Level0)
230 {
231 LOGE("ExportModelCache hidpreparedmodel_exportmodelcache_005");
232 std::vector<V1_0::SharedBuffer> bufferVect = {{100, 100, 0, 100}};
233 OHOS::sptr<V1_0::IPreparedModel> mockPreparedModel =
234 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
235 EXPECT_NE(mockPreparedModel, nullptr);
236
237 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(mockPreparedModel);
238
239 std::vector<Buffer> modelCache;
240 Buffer buffer;
241 modelCache.emplace_back(buffer);
242 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
243 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
244 }
245
246 /**
247 * @tc.name: hidpreparedmodel_run_001
248 * @tc.desc: Verify the Run function return invalid parameter.
249 * @tc.type: FUNC
250 */
251 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_001, TestSize.Level0)
252 {
253 IOTensor inputTensor;
254 inputTensor.dataType = OH_NN_INT8;
255
256 IOTensor outputTensor;
257 outputTensor.dataType = OH_NN_INT8;
258 std::vector<IOTensor> inputs;
259 inputs.emplace_back(std::move(inputTensor));
260 std::vector<IOTensor> outputs;
261
262 std::vector<V1_0::IOTensor> iOutputTensors;
263 V1_0::IOTensor iTensor;
264 iOutputTensors.emplace_back(iTensor);
265 std::vector<std::vector<int32_t>> outputsDims {{0}};
266 std::vector<bool> isOutputBufferEnough {};
267
268 std::shared_ptr<V1_0::MockIPreparedModel> sp = std::make_shared<V1_0::MockIPreparedModel>();
269 OHOS::sptr<V1_0::IPreparedModel> hdiPreparedModel =
270 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
271 EXPECT_NE(hdiPreparedModel, nullptr);
272
273 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(hdiPreparedModel);
274 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
275 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
276 }
277
278 /**
279 * @tc.name: hidpreparedmodel_run_002
280 * @tc.desc: Verify the Run function return success.
281 * @tc.type: FUNC
282 */
283 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_002, TestSize.Level0)
284 {
285 const size_t length = 100;
286 void* buffer = nullptr;
287 GetBuffer(buffer, length);
288
289 std::vector<IOTensor> inputs;
290 std::vector<IOTensor> outputs;
291 InitTensor(inputs, buffer, length);
292
293 OH_NN_ReturnCode result = Run(inputs);
294 EXPECT_EQ(OH_NN_SUCCESS, result);
295 const auto& memoryManager = MemoryManager::GetInstance();
296 memoryManager->UnMapMemory(buffer);
297 }
298
299 /**
300 * @tc.name: hidpreparedmodel_run_003
301 * @tc.desc: Verify the Run function return unavailable device in case of run failure.
302 * @tc.type: FUNC
303 */
304 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_003, TestSize.Level0)
305 {
306 const size_t length = 100;
307 void* buffer = nullptr;
308 GetBuffer(buffer, length);
309
310 std::vector<IOTensor> inputs;
311 std::vector<IOTensor> outputs;
312 InitTensor(inputs, buffer, length);
313
314 std::vector<std::vector<int32_t>> outputsDims {};
315 std::vector<bool> isOutputBufferEnough {};
316
317 OHOS::sptr<V1_0::MockIPreparedModel> sp =
318 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
319 EXPECT_NE(sp, nullptr);
320
321 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(sp);
322
323 EXPECT_CALL(*sp, Run(::testing::_, ::testing::_, ::testing::_, ::testing::_))
324 .WillRepeatedly(
325 ::testing::DoAll(
326 ::testing::SetArgReferee<2>(outputsDims),
327 ::testing::SetArgReferee<3>(isOutputBufferEnough),
328 ::testing::Return(HDF_FAILURE)
329 )
330 );
331
332 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
333 EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, result);
334 const auto& memoryManager = MemoryManager::GetInstance();
335 memoryManager->UnMapMemory(buffer);
336 }
337
338 /**
339 * @tc.name: hidpreparedmodel_run_004
340 * @tc.desc: Verify the Run function return invalid parameter.
341 * @tc.type: FUNC
342 */
343 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_004, TestSize.Level0)
344 {
345 std::vector<IOTensor> inputs;
346 InitTensor(inputs, nullptr, 0);
347 OH_NN_ReturnCode result = Run(inputs);
348 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
349 }
350
351 /**
352 * @tc.name: hidpreparedmodel_run_005
353 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
354 * @tc.type: FUNC
355 */
356 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_005, TestSize.Level0)
357 {
358 const size_t length = 100;
359 void* buffer = nullptr;
360 GetBuffer(buffer, length);
361
362 std::vector<IOTensor> inputs;
363 std::vector<IOTensor> outputs;
364 InitTensor(inputs, buffer, length);
365 InitTensor(outputs, nullptr, 0);
366
367 std::vector<std::vector<int32_t>> outputsDims {};
368 std::vector<bool> isOutputBufferEnough {};
369
370 OHOS::sptr<V1_0::MockIPreparedModel> sp =
371 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
372 EXPECT_NE(sp, nullptr);
373
374 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(sp);
375
376 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
377 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
378 const auto& memoryManager = MemoryManager::GetInstance();
379 memoryManager->UnMapMemory(buffer);
380 }
381
382 /**
383 * @tc.name: hidpreparedmodel_run_006
384 * @tc.desc: Verify the Run function return success.
385 * @tc.type: FUNC
386 */
387 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_006, TestSize.Level0)
388 {
389 LOGE("Run hidpreparedmodel_run_006");
390 const size_t length = 100;
391 void* buffer = nullptr;
392 GetBuffer(buffer, length);
393
394 std::vector<IOTensor> inputs;
395 std::vector<IOTensor> outputs;
396 InitTensor(inputs, buffer, length);
397 InitTensor(outputs, buffer, length);
398
399 const int vvPosition = 2;
400 const int vPosition = 3;
401 std::vector<std::vector<int32_t>> outputsDims {{0}};
402 std::vector<bool> isOutputBufferEnough {};
403
404 OHOS::sptr<V1_0::MockIPreparedModel> sp =
405 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
406 EXPECT_NE(sp, nullptr);
407
408 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(sp);
409 EXPECT_CALL(*sp, Run(::testing::_, ::testing::_, ::testing::_, ::testing::_))
410 .WillRepeatedly(::testing::DoAll(
411 ::testing::SetArgReferee<vvPosition>(outputsDims),
412 ::testing::SetArgReferee<vPosition>(isOutputBufferEnough),
413 ::testing::Return(HDF_SUCCESS))
414 );
415
416 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
417 EXPECT_EQ(OH_NN_SUCCESS, result);
418
419 const auto& memoryManager = MemoryManager::GetInstance();
420 memoryManager->UnMapMemory(buffer);
421 }
422
RunFail(std::vector<IOTensor> & inputs)423 OH_NN_ReturnCode HDIPreparedModelTest::RunFail(std::vector<IOTensor>& inputs)
424 {
425 std::vector<IOTensor> outputs;
426 std::vector<std::vector<int32_t>> outputsDims {};
427 std::vector<bool> isOutputBufferEnough {};
428
429 OHOS::sptr<V1_0::MockIPreparedModel> sp =
430 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
431 EXPECT_NE(sp, nullptr);
432
433 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(sp);
434
435 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
436 return result;
437 }
438
439 /**
440 * @tc.name: hidpreparedmodel_run_007
441 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
442 * @tc.type: FUNC
443 */
444 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_007, TestSize.Level0)
445 {
446 LOGE("Run hidpreparedmodel_run_007");
447 std::vector<IOTensor> inputs;
448 IOTensor inputTensor;
449 inputTensor.dataType = OH_NN_BOOL;
450 inputs.emplace_back(std::move(inputTensor));
451
452 OH_NN_ReturnCode result = RunFail(inputs);
453 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
454 }
455
456 /**
457 * @tc.name: hidpreparedmodel_run_008
458 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
459 * @tc.type: FUNC
460 */
461 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_008, TestSize.Level0)
462 {
463 LOGE("Run hidpreparedmodel_run_008");
464 std::vector<IOTensor> inputs;
465 IOTensor inputTensor;
466 inputTensor.dataType = OH_NN_INT16;
467 inputs.emplace_back(std::move(inputTensor));
468
469 OH_NN_ReturnCode result = RunFail(inputs);
470 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
471 }
472
473 /**
474 * @tc.name: hidpreparedmodel_run_009
475 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
476 * @tc.type: FUNC
477 */
478 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_009, TestSize.Level0)
479 {
480 LOGE("Run hidpreparedmodel_run_009");
481 std::vector<IOTensor> inputs;
482 IOTensor inputTensor;
483 inputTensor.dataType = OH_NN_INT64;
484 inputs.emplace_back(std::move(inputTensor));
485
486 OH_NN_ReturnCode result = RunFail(inputs);
487 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
488 }
489
490 /**
491 * @tc.name: hidpreparedmodel_run_010
492 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
493 * @tc.type: FUNC
494 */
495 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_010, TestSize.Level0)
496 {
497 LOGE("Run hidpreparedmodel_run_010");
498 std::vector<IOTensor> inputs;
499 IOTensor inputTensor;
500 inputTensor.dataType = OH_NN_UINT8;
501 inputs.emplace_back(std::move(inputTensor));
502
503 OH_NN_ReturnCode result = RunFail(inputs);
504 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
505 }
506
507 /**
508 * @tc.name: hidpreparedmodel_run_011
509 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
510 * @tc.type: FUNC
511 */
512 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_011, TestSize.Level0)
513 {
514 LOGE("Run hidpreparedmodel_run_011");
515 std::vector<IOTensor> inputs;
516 IOTensor inputTensor;
517 inputTensor.dataType = OH_NN_UINT16;
518 inputs.emplace_back(std::move(inputTensor));
519
520 OH_NN_ReturnCode result = RunFail(inputs);
521 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
522 }
523
524 /**
525 * @tc.name: hidpreparedmodel_run_012
526 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
527 * @tc.type: FUNC
528 */
529 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_012, TestSize.Level0)
530 {
531 LOGE("Run hidpreparedmodel_run_012");
532 std::vector<IOTensor> inputs;
533 IOTensor inputTensor;
534 inputTensor.dataType = OH_NN_UINT32;
535 inputs.emplace_back(std::move(inputTensor));
536
537 OH_NN_ReturnCode result = RunFail(inputs);
538 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
539 }
540
541 /**
542 * @tc.name: hidpreparedmodel_run_013
543 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
544 * @tc.type: FUNC
545 */
546 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_013, TestSize.Level0)
547 {
548 LOGE("Run hidpreparedmodel_run_013");
549 std::vector<IOTensor> inputs;
550 IOTensor inputTensor;
551 inputTensor.dataType = OH_NN_UINT64;
552 inputs.emplace_back(std::move(inputTensor));
553
554 OH_NN_ReturnCode result = RunFail(inputs);
555 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
556 }
557
558 /**
559 * @tc.name: hidpreparedmodel_run_014
560 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
561 * @tc.type: FUNC
562 */
563 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_014, TestSize.Level0)
564 {
565 LOGE("Run hidpreparedmodel_run_014");
566 std::vector<IOTensor> inputs;
567 IOTensor inputTensor;
568 inputTensor.dataType = OH_NN_FLOAT16;
569 inputs.emplace_back(std::move(inputTensor));
570
571 OH_NN_ReturnCode result = RunFail(inputs);
572 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
573 }
574
575 /**
576 * @tc.name: hidpreparedmodel_run_015
577 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
578 * @tc.type: FUNC
579 */
580 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_015, TestSize.Level0)
581 {
582 LOGE("Run hidpreparedmodel_run_015");
583 std::vector<IOTensor> inputs;
584 IOTensor inputTensor;
585 inputTensor.dataType = OH_NN_FLOAT32;
586 inputs.emplace_back(std::move(inputTensor));
587
588 OH_NN_ReturnCode result = RunFail(inputs);
589 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
590 }
591
592 /**
593 * @tc.name: hidpreparedmodel_run_016
594 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
595 * @tc.type: FUNC
596 */
597 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_016, TestSize.Level0)
598 {
599 LOGE("Run hidpreparedmodel_run_016");
600 std::vector<IOTensor> inputs;
601 IOTensor inputTensor;
602 inputTensor.dataType = OH_NN_FLOAT64;
603 inputTensor.format = OH_NN_FORMAT_NHWC;
604 inputs.emplace_back(std::move(inputTensor));
605
606 OH_NN_ReturnCode result = RunFail(inputs);
607 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
608 }
609
610 /**
611 * @tc.name: hidpreparedmodel_run_017
612 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
613 * @tc.type: FUNC
614 */
615 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_017, TestSize.Level0)
616 {
617 LOGE("Run hidpreparedmodel_run_017");
618 std::vector<IOTensor> inputs;
619 IOTensor inputTensor;
620 inputTensor.dataType = OH_NN_UNKNOWN;
621 inputTensor.format = OH_NN_FORMAT_NONE;
622 inputs.emplace_back(std::move(inputTensor));
623
624 OH_NN_ReturnCode result = RunFail(inputs);
625 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
626 }
627
628 /**
629 * @tc.name: hidpreparedmodel_run_018
630 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
631 * @tc.type: FUNC
632 */
633 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_018, TestSize.Level0)
634 {
635 LOGE("Run hidpreparedmodel_run_018");
636 std::vector<IOTensor> inputs;
637 IOTensor inputTensor;
638 inputTensor.dataType = OH_NN_INT32;
639 inputs.emplace_back(std::move(inputTensor));
640
641 OH_NN_ReturnCode result = RunFail(inputs);
642 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
643 }
644
645 /**
646 * @tc.name: hidpreparedmodel_run_019
647 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
648 * @tc.type: FUNC
649 */
650 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_019, TestSize.Level0)
651 {
652 LOGE("Run hidpreparedmodel_run_019");
653 std::vector<NN_Tensor*> inputs;
654 std::vector<NN_Tensor*> outputs;
655 std::vector<std::vector<int32_t>> outputsDims {};
656 std::vector<bool> isOutputBufferEnough {};
657
658 inputs.emplace_back(nullptr);
659
660 OHOS::sptr<V1_0::MockIPreparedModel> sp =
661 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
662 EXPECT_NE(sp, nullptr);
663
664 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(sp);
665 OH_NN_ReturnCode ret = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
666 EXPECT_EQ(OH_NN_FAILED, ret);
667 }
668
669 /**
670 * @tc.name: hidpreparedmodel_run_020
671 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
672 * @tc.type: FUNC
673 */
674 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_020, TestSize.Level0)
675 {
676 LOGE("Run hidpreparedmodel_run_020");
677 std::vector<NN_Tensor*> inputs;
678 std::vector<NN_Tensor*> outputs;
679 std::vector<std::vector<int32_t>> outputsDims {};
680 std::vector<bool> isOutputBufferEnough {};
681
682 MockTensor* tensorImpl = new (std::nothrow) MockTensor();
683 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(tensorImpl);
684 inputs.emplace_back(tensor);
685
686 OHOS::sptr<V1_0::MockIPreparedModel> sp =
687 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
688 EXPECT_NE(sp, nullptr);
689
690 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(sp);
691 OH_NN_ReturnCode ret = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
692 EXPECT_EQ(OH_NN_FAILED, ret);
693
694 testing::Mock::AllowLeak(tensorImpl);
695 }
696
697 /**
698 * @tc.name: hidpreparedmodel_run_021
699 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
700 * @tc.type: FUNC
701 */
702 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_021, TestSize.Level0)
703 {
704 LOGE("Run hidpreparedmodel_run_021");
705 std::vector<NN_Tensor*> inputs;
706 std::vector<NN_Tensor*> outputs;
707 std::vector<std::vector<int32_t>> outputsDims {};
708 std::vector<bool> isOutputBufferEnough {};
709
710 size_t deviceId = 1;
711 NNTensor2_0* tensorImpl = new (std::nothrow) NNTensor2_0(deviceId);
712 TensorDesc TensorDesc;
713
714 tensorImpl->SetTensorDesc(&TensorDesc);
715 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(tensorImpl);
716 inputs.emplace_back(tensor);
717
718 OHOS::sptr<V1_0::MockIPreparedModel> sp =
719 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
720 EXPECT_NE(sp, nullptr);
721
722 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(sp);
723 OH_NN_ReturnCode ret = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
724 EXPECT_EQ(OH_NN_FAILED, ret);
725 }
726
727 /**
728 * @tc.name: hidpreparedmodel_run_022
729 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
730 * @tc.type: FUNC
731 */
732 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_022, TestSize.Level0)
733 {
734 LOGE("Run hidpreparedmodel_run_022");
735 std::vector<NN_Tensor*> inputs;
736 std::vector<NN_Tensor*> outputs;
737 std::vector<std::vector<int32_t>> outputsDims {};
738 std::vector<bool> isOutputBufferEnough {};
739
740 size_t backendId = 1;
741 NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
742 EXPECT_NE(nullptr, nnTensor);
743
744 TensorDesc tensorDesc;
745 char name = 'a';
746 tensorDesc.SetName(&name);
747 tensorDesc.SetDataType(OH_NN_UINT32);
748 tensorDesc.SetFormat(OH_NN_FORMAT_NCHW);
749 int32_t expectDim[2] = {3, 3};
750 int32_t* ptr = expectDim;
751 uint32_t dimensionCount = 2;
752 tensorDesc.SetShape(ptr, dimensionCount);
753
754 OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(&tensorDesc);
755 EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
756
757 nnTensor->SetSize(200);
758 nnTensor->SetOffset(0);
759 float m_dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
760 void* buffer = m_dataArry;
761 nnTensor->SetData(buffer);
762
763 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(nnTensor);
764 inputs.emplace_back(tensor);
765
766 OHOS::sptr<V1_0::MockIPreparedModel> sp =
767 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
768 EXPECT_NE(sp, nullptr);
769
770 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(sp);
771 OH_NN_ReturnCode ret = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
772 EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, ret);
773 }
774
775 /**
776 * @tc.name: hidpreparedmodel_getmodelid_001
777 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
778 * @tc.type: FUNC
779 */
780 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_getmodelid_001, TestSize.Level0)
781 {
782 LOGE("GetModelID hidpreparedmodel_getmodelid_001");
783 OHOS::sptr<V1_0::MockIPreparedModel> sp =
784 OHOS::sptr<V1_0::MockIPreparedModel>(new (std::nothrow) V1_0::MockIPreparedModel());
785 EXPECT_NE(sp, nullptr);
786
787 uint32_t index = 0;
788 std::unique_ptr<HDIPreparedModelV1_0> preparedModel = std::make_unique<HDIPreparedModelV1_0>(sp);
789 OH_NN_ReturnCode ret = preparedModel->GetModelID(index);
790 EXPECT_EQ(OH_NN_SUCCESS, ret);
791 }
792 } // namespace UnitTest
793 } // namespace NeuralNetworkRuntime
794 } // namespace OHOS
795