1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include <gtest/gtest.h>
17 #include <gmock/gmock.h>
18
19 #include "nnbackend.h"
20 #include "nncompiler.h"
21 #include "device.h"
22 #include "interfaces/kits/c/neural_network_runtime/neural_network_runtime_type.h"
23 #include "common/utils.h"
24 #include "inner_model.h"
25
26 using namespace testing;
27 using namespace testing::ext;
28 using namespace OHOS::NeuralNetworkRuntime;
29
30 namespace OHOS {
31 namespace NeuralNetworkRuntime {
32 namespace UnitTest {
33 class NNCompilerTest : public testing::Test {
34 public:
35 NNCompilerTest() = default;
36 ~NNCompilerTest() = default;
37 OH_NN_ReturnCode BuildModel(InnerModel& innerModel);
38 };
39
40 class MockIDevice : public Device {
41 public:
42 MOCK_METHOD1(GetDeviceName, OH_NN_ReturnCode(std::string&));
43 MOCK_METHOD1(GetVendorName, OH_NN_ReturnCode(std::string&));
44 MOCK_METHOD1(GetVersion, OH_NN_ReturnCode(std::string&));
45 MOCK_METHOD1(GetDeviceType, OH_NN_ReturnCode(OH_NN_DeviceType&));
46 MOCK_METHOD1(GetDeviceStatus, OH_NN_ReturnCode(DeviceStatus&));
47 MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
48 std::vector<bool>&));
49 MOCK_METHOD1(IsFloat16PrecisionSupported, OH_NN_ReturnCode(bool&));
50 MOCK_METHOD1(IsPerformanceModeSupported, OH_NN_ReturnCode(bool&));
51 MOCK_METHOD1(IsPrioritySupported, OH_NN_ReturnCode(bool&));
52 MOCK_METHOD1(IsDynamicInputSupported, OH_NN_ReturnCode(bool&));
53 MOCK_METHOD1(IsModelCacheSupported, OH_NN_ReturnCode(bool&));
54 MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
55 const ModelConfig&,
56 std::shared_ptr<PreparedModel>&));
57 MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(const void*,
58 const ModelConfig&,
59 std::shared_ptr<PreparedModel>&));
60 MOCK_METHOD4(PrepareModelFromModelCache, OH_NN_ReturnCode(const std::vector<Buffer>&,
61 const ModelConfig&,
62 std::shared_ptr<PreparedModel>&,
63 bool&));
64 MOCK_METHOD3(PrepareOfflineModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
65 const ModelConfig&,
66 std::shared_ptr<PreparedModel>&));
67 MOCK_METHOD1(AllocateBuffer, void*(size_t));
68 MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<TensorDesc>));
69 MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<NNTensor>));
70 MOCK_METHOD1(ReleaseBuffer, OH_NN_ReturnCode(const void*));
71 MOCK_METHOD2(AllocateBuffer, OH_NN_ReturnCode(size_t, int&));
72 MOCK_METHOD2(ReleaseBuffer, OH_NN_ReturnCode(int, size_t));
73 };
74
75 class MockIPreparedModel : public PreparedModel {
76 public:
77 MOCK_METHOD1(ExportModelCache, OH_NN_ReturnCode(std::vector<Buffer>&));
78 MOCK_METHOD4(Run, OH_NN_ReturnCode(const std::vector<IOTensor>&,
79 const std::vector<IOTensor>&,
80 std::vector<std::vector<int32_t>>&,
81 std::vector<bool>&));
82 MOCK_METHOD4(Run, OH_NN_ReturnCode(const std::vector<NN_Tensor*>&,
83 const std::vector<NN_Tensor*>&,
84 std::vector<std::vector<int32_t>>&,
85 std::vector<bool>&));
86 MOCK_CONST_METHOD1(GetModelID, OH_NN_ReturnCode(uint32_t&));
87 MOCK_METHOD2(GetInputDimRanges, OH_NN_ReturnCode(std::vector<std::vector<uint32_t>>&,
88 std::vector<std::vector<uint32_t>>&));
89 };
90
91 class MockInnerModel : public InnerModel {
92 public:
93 MOCK_CONST_METHOD0(IsBuild, bool());
94 MOCK_METHOD2(BuildFromLiteGraph, OH_NN_ReturnCode(const mindspore::lite::LiteGraph*,
95 const ExtensionConfig&));
96 MOCK_METHOD2(BuildFromMetaGraph, OH_NN_ReturnCode(const void*, const ExtensionConfig&));
97 MOCK_METHOD1(AddTensor, OH_NN_ReturnCode(const OH_NN_Tensor&));
98 MOCK_METHOD1(AddTensorDesc, OH_NN_ReturnCode(const NN_TensorDesc*));
99 MOCK_METHOD2(SetTensorQuantParam, OH_NN_ReturnCode(uint32_t, const NN_QuantParam*));
100 MOCK_METHOD2(SetTensorType, OH_NN_ReturnCode(uint32_t, OH_NN_TensorType));
101 MOCK_METHOD3(SetTensorValue, OH_NN_ReturnCode(uint32_t, const void*, size_t));
102 MOCK_METHOD4(AddOperation, OH_NN_ReturnCode(OH_NN_OperationType,
103 const OH_NN_UInt32Array&,
104 const OH_NN_UInt32Array&,
105 const OH_NN_UInt32Array&));
106 MOCK_METHOD3(GetSupportedOperations, OH_NN_ReturnCode(size_t, const bool**, uint32_t&));
107 MOCK_METHOD2(SpecifyInputsAndOutputs, OH_NN_ReturnCode(const OH_NN_UInt32Array&, const OH_NN_UInt32Array&));
108 MOCK_METHOD4(SetInputsAndOutputsInfo, OH_NN_ReturnCode(const OH_NN_TensorInfo*, size_t,
109 const OH_NN_TensorInfo*, size_t));
110 MOCK_METHOD0(Build, OH_NN_ReturnCode());
111 MOCK_CONST_METHOD0(GetInputTensors, std::vector<std::shared_ptr<NNTensor>>());
112 MOCK_CONST_METHOD0(GetOutputTensors, std::vector<std::shared_ptr<NNTensor>>());
113 MOCK_CONST_METHOD0(GetInputTensorDescs, std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>>());
114 MOCK_CONST_METHOD0(GetOutputTensorDescs, std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>>());
115 MOCK_CONST_METHOD0(GetLiteGraphs, std::shared_ptr<mindspore::lite::LiteGraph>());
116 MOCK_CONST_METHOD0(GetMetaGraph, void*());
117 MOCK_CONST_METHOD0(GetExtensionConfig, ExtensionConfig());
118 };
119
120
BuildModel(InnerModel & innerModel)121 OH_NN_ReturnCode NNCompilerTest::BuildModel(InnerModel& innerModel)
122 {
123 int32_t inputDims[4] = {1, 2, 2, 3};
124 OH_NN_Tensor input1 = {OH_NN_FLOAT32, 4, inputDims, nullptr, OH_NN_TENSOR};
125 OH_NN_ReturnCode ret = innerModel.AddTensor(input1);
126 if (ret != OH_NN_SUCCESS) {
127 return ret;
128 }
129
130 // 添加Add算子的第二个输入Tensor,类型为float32,张量形状为[1, 2, 2, 3]
131 OH_NN_Tensor input2 = {OH_NN_FLOAT32, 4, inputDims, nullptr, OH_NN_TENSOR};
132 ret = innerModel.AddTensor(input2);
133 if (ret != OH_NN_SUCCESS) {
134 return ret;
135 }
136
137 // 添加Add算子的参数Tensor,该参数Tensor用于指定激活函数的类型,Tensor的数据类型为int8。
138 int32_t activationDims = 1;
139 int8_t activationValue = OH_NN_FUSED_NONE;
140 OH_NN_Tensor activation = {OH_NN_INT8, 1, &activationDims, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
141 ret = innerModel.AddTensor(activation);
142 if (ret != OH_NN_SUCCESS) {
143 return ret;
144 }
145
146 // 将激活函数类型设置为OH_NN_FUSED_NONE,表示该算子不添加激活函数。
147 uint32_t index = 2;
148 ret = innerModel.SetTensorValue(index, &activationValue, sizeof(int8_t));
149 if (ret != OH_NN_SUCCESS) {
150 return ret;
151 }
152
153 // 设置Add算子的输出,类型为float32,张量形状为[1, 2, 2, 3]
154 OH_NN_Tensor output = {OH_NN_FLOAT32, 4, inputDims, nullptr, OH_NN_TENSOR};
155 ret = innerModel.AddTensor(output);
156 if (ret != OH_NN_SUCCESS) {
157 return ret;
158 }
159
160 // 指定Add算子的输入、参数和输出索引
161 uint32_t inputIndicesValues[2] = {0, 1};
162 uint32_t paramIndicesValues = 2;
163 uint32_t outputIndicesValues = 3;
164 OH_NN_UInt32Array paramIndices = {¶mIndicesValues, 1};
165 OH_NN_UInt32Array inputIndices = {inputIndicesValues, 2};
166 OH_NN_UInt32Array outputIndices = {&outputIndicesValues, 1};
167
168 // 向模型实例添加Add算子
169 ret = innerModel.AddOperation(OH_NN_OPS_ADD, paramIndices, inputIndices, outputIndices);
170 if (ret != OH_NN_SUCCESS) {
171 return ret;
172 }
173
174 // 设置模型实例的输入、输出索引
175 ret = innerModel.SpecifyInputsAndOutputs(inputIndices, outputIndices);
176 if (ret != OH_NN_SUCCESS) {
177 return ret;
178 }
179
180 // 完成模型实例的构建
181 ret = innerModel.Build();
182 if (ret != OH_NN_SUCCESS) {
183 return ret;
184 }
185
186 return ret;
187 }
188
189 /**
190 * @tc.name: nncompilertest_construct_001
191 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
192 * @tc.type: FUNC
193 */
194 HWTEST_F(NNCompilerTest, nncompilertest_construct_001, TestSize.Level0)
195 {
196 LOGE("NNCompiler nncompilertest_construct_001");
197 size_t backendID = 1;
198 InnerModel innerModel;
199 BuildModel(innerModel);
200 void* model = &innerModel;
201 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
202
203 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
204 EXPECT_NE(nullptr, nncompiler);
205
206 testing::Mock::AllowLeak(device.get());
207 }
208
209 /**
210 * @tc.name: nncompilertest_construct_002
211 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
212 * @tc.type: FUNC
213 */
214 HWTEST_F(NNCompilerTest, nncompilertest_construct_002, TestSize.Level0)
215 {
216 LOGE("NNCompiler nncompilertest_construct_002");
217 size_t backendID = 1;
218 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
219
220 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
221 EXPECT_NE(nullptr, nncompiler);
222
223 testing::Mock::AllowLeak(device.get());
224 }
225
226 /**
227 * @tc.name: nncompilertest_getbackendid_001
228 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
229 * @tc.type: FUNC
230 */
231 HWTEST_F(NNCompilerTest, nncompilertest_getbackendid_001, TestSize.Level0)
232 {
233 LOGE("GetBackendID nncompilertest_getbackendid_001");
234 size_t backendID = 1;
235 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
236
237 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
238 EXPECT_NE(nullptr, nncompiler);
239
240 size_t ret = nncompiler->GetBackendID();
241 EXPECT_NE(0, ret);
242
243 testing::Mock::AllowLeak(device.get());
244 }
245
246 /**
247 * @tc.name: nncompilertest_setcachedir_001
248 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
249 * @tc.type: FUNC
250 */
251 HWTEST_F(NNCompilerTest, nncompilertest_setcachedir_001, TestSize.Level0)
252 {
253 LOGE("SetCacheDir nncompilertest_setcachedir_001");
254 size_t backendID = 1;
255
256 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
257 EXPECT_NE(nullptr, nncompiler);
258
259 std::string cacheModelPath = "mock";
260 uint32_t version = 0;
261 OH_NN_ReturnCode ret = nncompiler->SetCacheDir(cacheModelPath, version);
262 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
263 }
264
265 /**
266 * @tc.name: nncompilertest_setcachedir_002
267 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
268 * @tc.type: FUNC
269 */
270 HWTEST_F(NNCompilerTest, nncompilertest_setcachedir_002, TestSize.Level0)
271 {
272 LOGE("SetCacheDir nncompilertest_setcachedir_002");
273 size_t backendID = 1;
274
275 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
276 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
277 .WillRepeatedly(::testing::Return(OH_NN_OPERATION_FORBIDDEN));
278
279 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
280 EXPECT_NE(nullptr, nncompiler);
281
282 std::string cacheModelPath = "mock";
283 uint32_t version = 0;
284 OH_NN_ReturnCode ret = nncompiler->SetCacheDir(cacheModelPath, version);
285 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
286
287 testing::Mock::AllowLeak(device.get());
288 }
289
290 /**
291 * @tc.name: nncompilertest_setcachedir_003
292 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
293 * @tc.type: FUNC
294 */
295 HWTEST_F(NNCompilerTest, nncompilertest_setcachedir_003, TestSize.Level0)
296 {
297 LOGE("SetCacheDir nncompilertest_setcachedir_003");
298 size_t backendID = 1;
299
300 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
301 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
302 .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
303
304 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
305 EXPECT_NE(nullptr, nncompiler);
306
307 std::string cacheModelPath = "mock";
308 uint32_t version = 0;
309 OH_NN_ReturnCode ret = nncompiler->SetCacheDir(cacheModelPath, version);
310 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
311
312 testing::Mock::AllowLeak(device.get());
313 }
314
315 /**
316 * @tc.name: nncompilertest_setcachedir_004
317 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
318 * @tc.type: FUNC
319 */
320 HWTEST_F(NNCompilerTest, nncompilertest_setcachedir_004, TestSize.Level0)
321 {
322 LOGE("SetCacheDir nncompilertest_setcachedir_004");
323 size_t backendID = 1;
324
325 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
326 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon47a1f0e00102(bool& isSupportedCache) 327 .WillOnce(Invoke([](bool& isSupportedCache) {
328 // 这里直接修改传入的引用参数
329 isSupportedCache = true;
330 return OH_NN_SUCCESS; // 假设成功的状态码
331 }));
332
333 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
334 EXPECT_NE(nullptr, nncompiler);
335
336 std::string cacheModelPath = "mock";
337 uint32_t version = 0;
338 OH_NN_ReturnCode ret = nncompiler->SetCacheDir(cacheModelPath, version);
339 EXPECT_EQ(OH_NN_SUCCESS, ret);
340
341 testing::Mock::AllowLeak(device.get());
342 }
343
344 /**
345 * @tc.name: nncompilertest_setperformance_001
346 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
347 * @tc.type: FUNC
348 */
349 HWTEST_F(NNCompilerTest, nncompilertest_setperformance_001, TestSize.Level0)
350 {
351 LOGE("SetPerformance nncompilertest_setperformance_001");
352 size_t backendID = 1;
353 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
354
355 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
356 EXPECT_NE(nullptr, nncompiler);
357
358 OH_NN_PerformanceMode performance = OH_NN_PERFORMANCE_NONE;
359 OH_NN_ReturnCode ret = nncompiler->SetPerformance(performance);
360 EXPECT_EQ(OH_NN_SUCCESS, ret);
361
362 testing::Mock::AllowLeak(device.get());
363 }
364
365 /**
366 * @tc.name: nncompilertest_setperformance_002
367 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
368 * @tc.type: FUNC
369 */
370 HWTEST_F(NNCompilerTest, nncompilertest_setperformance_002, TestSize.Level0)
371 {
372 LOGE("SetPerformance nncompilertest_setperformance_002");
373 size_t backendID = 1;
374
375 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
376 EXPECT_NE(nullptr, nncompiler);
377
378 OH_NN_PerformanceMode performance = OH_NN_PERFORMANCE_NONE;
379 OH_NN_ReturnCode ret = nncompiler->SetPerformance(performance);
380 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
381 }
382
383 /**
384 * @tc.name: nncompilertest_setperformance_003
385 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
386 * @tc.type: FUNC
387 */
388 HWTEST_F(NNCompilerTest, nncompilertest_setperformance_003, TestSize.Level0)
389 {
390 LOGE("SetPerformance nncompilertest_setperformance_003");
391 size_t backendID = 1;
392
393 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
394 EXPECT_CALL(*((MockIDevice *) device.get()), IsPerformanceModeSupported(::testing::_))
395 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
396
397 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
398 EXPECT_NE(nullptr, nncompiler);
399
400 OH_NN_PerformanceMode performance = OH_NN_PERFORMANCE_NONE;
401 OH_NN_ReturnCode ret = nncompiler->SetPerformance(performance);
402 EXPECT_EQ(OH_NN_FAILED, ret);
403
404 testing::Mock::AllowLeak(device.get());
405 }
406
407 /**
408 * @tc.name: nncompilertest_setperformance_004
409 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
410 * @tc.type: FUNC
411 */
412 HWTEST_F(NNCompilerTest, nncompilertest_setperformance_004, TestSize.Level0)
413 {
414 LOGE("SetPerformance nncompilertest_setperformance_004");
415 size_t backendID = 1;
416
417 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
418 EXPECT_CALL(*((MockIDevice *) device.get()), IsPerformanceModeSupported(::testing::_))
419 .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
420
421 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
422 EXPECT_NE(nullptr, nncompiler);
423
424 OH_NN_PerformanceMode performance = OH_NN_PERFORMANCE_LOW;
425 OH_NN_ReturnCode ret = nncompiler->SetPerformance(performance);
426 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
427
428 testing::Mock::AllowLeak(device.get());
429 }
430
431 /**
432 * @tc.name: nncompilertest_setpriority_001
433 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
434 * @tc.type: FUNC
435 */
436 HWTEST_F(NNCompilerTest, nncompilertest_setpriority_001, TestSize.Level0)
437 {
438 LOGE("SetPriority nncompilertest_setpriority_001");
439 size_t backendID = 1;
440 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
441
442 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
443 EXPECT_NE(nullptr, nncompiler);
444
445 OH_NN_Priority priority = OH_NN_PRIORITY_NONE;
446 OH_NN_ReturnCode ret = nncompiler->SetPriority(priority);
447 EXPECT_EQ(OH_NN_SUCCESS, ret);
448
449 testing::Mock::AllowLeak(device.get());
450 }
451
452 /**
453 * @tc.name: nncompilertest_setpriority_002
454 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
455 * @tc.type: FUNC
456 */
457 HWTEST_F(NNCompilerTest, nncompilertest_setpriority_002, TestSize.Level0)
458 {
459 LOGE("SetPriority nncompilertest_setpriority_002");
460 size_t backendID = 1;
461
462 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
463 EXPECT_NE(nullptr, nncompiler);
464
465 OH_NN_Priority priority = OH_NN_PRIORITY_NONE;
466 OH_NN_ReturnCode ret = nncompiler->SetPriority(priority);
467 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
468 }
469
470 /**
471 * @tc.name: nncompilertest_setpriority_003
472 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
473 * @tc.type: FUNC
474 */
475 HWTEST_F(NNCompilerTest, nncompilertest_setpriority_003, TestSize.Level0)
476 {
477 LOGE("SetPriority nncompilertest_setpriority_003");
478 size_t backendID = 1;
479 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
480 EXPECT_CALL(*((MockIDevice *) device.get()), IsPrioritySupported(::testing::_))
481 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
482
483 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
484 EXPECT_NE(nullptr, nncompiler);
485
486 OH_NN_Priority priority = OH_NN_PRIORITY_NONE;
487 OH_NN_ReturnCode ret = nncompiler->SetPriority(priority);
488 EXPECT_EQ(OH_NN_FAILED, ret);
489
490 testing::Mock::AllowLeak(device.get());
491 }
492
493 /**
494 * @tc.name: nncompilertest_setpriority_004
495 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
496 * @tc.type: FUNC
497 */
498 HWTEST_F(NNCompilerTest, nncompilertest_setpriority_004, TestSize.Level0)
499 {
500 LOGE("SetPriority nncompilertest_setpriority_004");
501 size_t backendID = 1;
502 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
503 EXPECT_CALL(*((MockIDevice *) device.get()), IsPrioritySupported(::testing::_))
504 .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
505
506 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
507 EXPECT_NE(nullptr, nncompiler);
508
509 OH_NN_Priority priority = OH_NN_PRIORITY_LOW;
510 OH_NN_ReturnCode ret = nncompiler->SetPriority(priority);
511 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
512
513 testing::Mock::AllowLeak(device.get());
514 }
515
516 /**
517 * @tc.name: nncompilertest_setenablefp16_001
518 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
519 * @tc.type: FUNC
520 */
521 HWTEST_F(NNCompilerTest, nncompilertest_setenablefp16_001, TestSize.Level0)
522 {
523 LOGE("SetEnableFp16 nncompilertest_setenablefp16_001");
524 size_t backendID = 1;
525
526 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
527 EXPECT_NE(nullptr, nncompiler);
528
529 bool isFp16 = true;
530 OH_NN_ReturnCode ret = nncompiler->SetEnableFp16(isFp16);
531 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
532 }
533
534 /**
535 * @tc.name: nncompilertest_setenablefp16_002
536 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
537 * @tc.type: FUNC
538 */
539 HWTEST_F(NNCompilerTest, nncompilertest_setenablefp16_002, TestSize.Level0)
540 {
541 LOGE("SetEnableFp16 nncompilertest_setenablefp16_002");
542 size_t backendID = 1;
543 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
544 EXPECT_CALL(*((MockIDevice *) device.get()), IsFloat16PrecisionSupported(::testing::_))
545 .WillRepeatedly(::testing::Return(OH_NN_FAILED));
546
547 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
548 EXPECT_NE(nullptr, nncompiler);
549
550 bool isFp16 = true;
551 OH_NN_ReturnCode ret = nncompiler->SetEnableFp16(isFp16);
552 EXPECT_EQ(OH_NN_FAILED, ret);
553
554 testing::Mock::AllowLeak(device.get());
555 }
556
557 /**
558 * @tc.name: nncompilertest_setenablefp16_003
559 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
560 * @tc.type: FUNC
561 */
562 HWTEST_F(NNCompilerTest, nncompilertest_setenablefp16_003, TestSize.Level0)
563 {
564 LOGE("SetEnableFp16 nncompilertest_setenablefp16_003");
565 size_t backendID = 1;
566 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
567 EXPECT_CALL(*((MockIDevice *) device.get()), IsFloat16PrecisionSupported(::testing::_))
568 .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
569
570 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
571 EXPECT_NE(nullptr, nncompiler);
572
573 bool isFp16 = true;
574 OH_NN_ReturnCode ret = nncompiler->SetEnableFp16(isFp16);
575 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
576
577 testing::Mock::AllowLeak(device.get());
578 }
579
580 /**
581 * @tc.name: nncompilertest_setenablefp16_004
582 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
583 * @tc.type: FUNC
584 */
585 HWTEST_F(NNCompilerTest, nncompilertest_setenablefp16_004, TestSize.Level0)
586 {
587 LOGE("SetEnableFp16 nncompilertest_setenablefp16_004");
588 size_t backendID = 1;
589 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
590 EXPECT_CALL(*((MockIDevice *) device.get()), IsPrioritySupported(::testing::_))
591 .WillRepeatedly(::testing::Return(OH_NN_SUCCESS));
592
593 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
594 EXPECT_NE(nullptr, nncompiler);
595
596 bool isFp16 = false;
597 OH_NN_ReturnCode ret = nncompiler->SetEnableFp16(isFp16);
598 EXPECT_EQ(OH_NN_SUCCESS, ret);
599
600 testing::Mock::AllowLeak(device.get());
601 }
602
603 /**
604 * @tc.name: nncompilertest_isbuild_001
605 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
606 * @tc.type: FUNC
607 */
608 HWTEST_F(NNCompilerTest, nncompilertest_isbuild_001, TestSize.Level0)
609 {
610 LOGE("IsBuild nncompilertest_isbuild_001");
611 size_t backendID = 1;
612 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
613
614 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
615 EXPECT_NE(nullptr, nncompiler);
616
617 bool ret = nncompiler->IsBuild();
618 EXPECT_EQ(false, ret);
619
620 testing::Mock::AllowLeak(device.get());
621 }
622
623 /**
624 * @tc.name: nncompilertest_build_001
625 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
626 * @tc.type: FUNC
627 */
628 HWTEST_F(NNCompilerTest, nncompilertest_build_001, TestSize.Level0)
629 {
630 LOGE("Build nncompilertest_build_001");
631 size_t backendID = 1;
632
633 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
634 EXPECT_NE(nullptr, nncompiler);
635
636 OH_NN_ReturnCode ret = nncompiler->Build();
637 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
638 }
639
640 /**
641 * @tc.name: nncompilertest_build_002
642 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
643 * @tc.type: FUNC
644 */
645 HWTEST_F(NNCompilerTest, nncompilertest_build_002, TestSize.Level0)
646 {
647 LOGE("Build nncompilertest_build_002");
648 size_t backendID = 1;
649 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
650
651 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
652 EXPECT_NE(nullptr, nncompiler);
653
654 OH_NN_ReturnCode ret = nncompiler->Build();
655 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
656
657 testing::Mock::AllowLeak(device.get());
658 }
659
660 /**
661 * @tc.name: nncompilertest_build_003
662 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
663 * @tc.type: FUNC
664 */
665 HWTEST_F(NNCompilerTest, nncompilertest_build_003, TestSize.Level0)
666 {
667 LOGE("Build nncompilertest_build_003");
668 size_t backendID = 1;
669 InnerModel innerModel;
670 BuildModel(innerModel);
671 void* model = &innerModel;
672 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
673
674 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
675 EXPECT_NE(nullptr, nncompiler);
676
677 OH_NN_ReturnCode ret = nncompiler->Build();
678 EXPECT_EQ(OH_NN_SUCCESS, ret);
679
680 OH_NN_ReturnCode retBuild = nncompiler->Build();
681 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, retBuild);
682
683 testing::Mock::AllowLeak(device.get());
684 }
685
686 /**
687 * @tc.name: nncompilertest_build_004
688 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
689 * @tc.type: FUNC
690 */
691 HWTEST_F(NNCompilerTest, nncompilertest_build_004, TestSize.Level0)
692 {
693 LOGE("Build nncompilertest_build_004");
694 size_t backendID = 1;
695 InnerModel innerModel;
696 void* model = &innerModel;
697 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
698
699 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
700 EXPECT_NE(nullptr, nncompiler);
701
702 OH_NN_ReturnCode ret = nncompiler->Build();
703 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
704
705 testing::Mock::AllowLeak(device.get());
706 }
707
708 /**
709 * @tc.name: nncompilertest_build_005
710 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
711 * @tc.type: FUNC
712 */
713 HWTEST_F(NNCompilerTest, nncompilertest_build_005, TestSize.Level0)
714 {
715 LOGE("Build nncompilertest_build_005");
716 size_t backendID = 1;
717 InnerModel innerModel;
718 BuildModel(innerModel);
719 void* model = &innerModel;
720 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
721 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon47a1f0e00202(bool& isSupportedCache) 722 .WillOnce(Invoke([](bool& isSupportedCache) {
723 // 这里直接修改传入的引用参数
724 isSupportedCache = true;
725 return OH_NN_SUCCESS; // 假设成功的状态码
726 }));
727
728 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
729 EXPECT_NE(nullptr, nncompiler);
730
731 std::string cacheModelPath = "mock";
732 uint32_t version = UINT32_MAX;
733 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
734 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);
735
736 OH_NN_ReturnCode ret = nncompiler->Build();
737 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
738
739 testing::Mock::AllowLeak(device.get());
740 }
741
742 /**
743 * @tc.name: nncompilertest_build_006
744 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
745 * @tc.type: FUNC
746 */
747 HWTEST_F(NNCompilerTest, nncompilertest_build_006, TestSize.Level0)
748 {
749 LOGE("Build nncompilertest_build_006");
750 size_t backendID = 1;
751 InnerModel innerModel;
752 BuildModel(innerModel);
753 void* model = &innerModel;
754 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
755 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon47a1f0e00302(bool& isSupportedCache) 756 .WillOnce(Invoke([](bool& isSupportedCache) {
757 // 这里直接修改传入的引用参数
758 isSupportedCache = true;
759 return OH_NN_SUCCESS; // 假设成功的状态码
760 }));
761
762 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
763 EXPECT_NE(nullptr, nncompiler);
764
765 std::string cacheModelPath = "mock";
766 uint32_t version = 0;
767 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
768 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);
769
770 OH_NN_ReturnCode ret = nncompiler->Build();
771 EXPECT_EQ(OH_NN_FAILED, ret);
772
773 testing::Mock::AllowLeak(device.get());
774 }
775
776 /**
777 * @tc.name: nncompilertest_build_007
778 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
779 * @tc.type: FUNC
780 */
781 HWTEST_F(NNCompilerTest, nncompilertest_build_007, TestSize.Level0)
782 {
783 LOGE("Build nncompilertest_build_007");
784 size_t backendID = 1;
785 InnerModel innerModel;
786 BuildModel(innerModel);
787 void* model = &innerModel;
788 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
789
790 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
791 EXPECT_NE(nullptr, nncompiler);
792
793 OH_NN_ReturnCode ret = nncompiler->Build();
794 EXPECT_EQ(OH_NN_SUCCESS, ret);
795
796 testing::Mock::AllowLeak(device.get());
797 }
798
799 /**
800 * @tc.name: nncompilertest_savetocachefile_001
801 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
802 * @tc.type: FUNC
803 */
804 HWTEST_F(NNCompilerTest, nncompilertest_savetocachefile_001, TestSize.Level0)
805 {
806 LOGE("SaveToCacheFile nncompilertest_savetocachefile_001");
807 size_t backendID = 1;
808 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
809
810 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
811 EXPECT_NE(nullptr, nncompiler);
812
813 OH_NN_ReturnCode ret = nncompiler->SaveToCacheFile();
814 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
815
816 testing::Mock::AllowLeak(device.get());
817 }
818
819 /**
820 * @tc.name: nncompilertest_savetocachefile_002
821 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
822 * @tc.type: FUNC
823 */
824 HWTEST_F(NNCompilerTest, nncompilertest_savetocachefile_002, TestSize.Level0)
825 {
826 LOGE("SaveToCacheFile nncompilertest_savetocachefile_002");
827 size_t backendID = 1;
828
829 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
830 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon47a1f0e00402(bool& isSupportedCache) 831 .WillOnce(Invoke([](bool& isSupportedCache) {
832 // 这里直接修改传入的引用参数
833 isSupportedCache = true;
834 return OH_NN_SUCCESS; // 假设成功的状态码
835 }));
836
837 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
838 EXPECT_NE(nullptr, nncompiler);
839
840 std::string cacheModelPath = "mock";
841 uint32_t version = UINT32_MAX;
842 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
843 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);
844
845 OH_NN_ReturnCode retSave = nncompiler->SaveToCacheFile();
846 EXPECT_EQ(OH_NN_INVALID_PARAMETER, retSave);
847
848 testing::Mock::AllowLeak(device.get());
849 }
850
851 /**
852 * @tc.name: nncompilertest_savetocachefile_003
853 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
854 * @tc.type: FUNC
855 */
856 HWTEST_F(NNCompilerTest, nncompilertest_savetocachefile_003, TestSize.Level0)
857 {
858 LOGE("SaveToCacheFile nncompilertest_savetocachefile_003");
859 size_t backendID = 1;
860
861 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
862 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon47a1f0e00502(bool& isSupportedCache) 863 .WillOnce(Invoke([](bool& isSupportedCache) {
864 // 这里直接修改传入的引用参数
865 isSupportedCache = true;
866 return OH_NN_SUCCESS; // 假设成功的状态码
867 }));
868
869 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
870 EXPECT_NE(nullptr, nncompiler);
871
872 std::string cacheModelPath = "mock";
873 uint32_t version = 0;
874 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
875 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);
876
877 OH_NN_ReturnCode retSave = nncompiler->SaveToCacheFile();
878 EXPECT_EQ(OH_NN_FAILED, retSave);
879
880 testing::Mock::AllowLeak(device.get());
881 }
882
883 /**
884 * @tc.name: nncompilertest_savetocachefile_004
885 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
886 * @tc.type: FUNC
887 */
888 HWTEST_F(NNCompilerTest, nncompilertest_savetocachefile_004, TestSize.Level0)
889 {
890 LOGE("SaveToCacheFile nncompilertest_savetocachefile_004");
891 size_t backendID = 1;
892
893 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
894 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon47a1f0e00602(bool& isSupportedCache) 895 .WillOnce(Invoke([](bool& isSupportedCache) {
896 // 这里直接修改传入的引用参数
897 isSupportedCache = true;
898 return OH_NN_SUCCESS; // 假设成功的状态码
899 }));
900
901 InnerModel innerModel;
902 BuildModel(innerModel);
903 void* model = &innerModel;
904
905 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
906 EXPECT_NE(nullptr, nncompiler);
907
908 OH_NN_ReturnCode retBuild = nncompiler->Build();
909 EXPECT_EQ(OH_NN_SUCCESS, retBuild);
910
911 std::string cacheModelPath = "mock";
912 uint32_t version = 0;
913 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
914 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);;
915
916 OH_NN_ReturnCode retSave = nncompiler->SaveToCacheFile();
917 EXPECT_EQ(OH_NN_FAILED, retSave);
918
919 testing::Mock::AllowLeak(device.get());
920 }
921
922 /**
923 * @tc.name: nncompilertest_restorefromcachefile_001
924 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
925 * @tc.type: FUNC
926 */
927 HWTEST_F(NNCompilerTest, nncompilertest_restorefromcachefile_001, TestSize.Level0)
928 {
929 LOGE("RestoreFromCacheFile nncompilertest_restorefromcachefile_001");
930 size_t backendID = 1;
931 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
932
933 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
934 EXPECT_NE(nullptr, nncompiler);
935
936 OH_NN_ReturnCode ret = nncompiler->RestoreFromCacheFile();
937 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
938
939 testing::Mock::AllowLeak(device.get());
940 }
941
942 /**
943 * @tc.name: nncompilertest_restorefromcachefile_002
944 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
945 * @tc.type: FUNC
946 */
947 HWTEST_F(NNCompilerTest, nncompilertest_restorefromcachefile_002, TestSize.Level0)
948 {
949 LOGE("RestoreFromCacheFile nncompilertest_restorefromcachefile_002");
950 size_t backendID = 1;
951 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
952 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon47a1f0e00702(bool& isSupportedCache) 953 .WillOnce(Invoke([](bool& isSupportedCache) {
954 // 这里直接修改传入的引用参数
955 isSupportedCache = true;
956 return OH_NN_SUCCESS; // 假设成功的状态码
957 }));
958
959 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
960 EXPECT_NE(nullptr, nncompiler);
961
962 std::string cacheModelPath = "mock";
963 uint32_t version = UINT32_MAX;
964 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
965 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);;
966
967 OH_NN_ReturnCode ret = nncompiler->RestoreFromCacheFile();
968 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
969
970 testing::Mock::AllowLeak(device.get());
971 }
972
973 /**
974 * @tc.name: nncompilertest_restorefromcachefile_003
975 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
976 * @tc.type: FUNC
977 */
978 HWTEST_F(NNCompilerTest, nncompilertest_restorefromcachefile_003, TestSize.Level0)
979 {
980 LOGE("RestoreFromCacheFile nncompilertest_restorefromcachefile_003");
981 size_t backendID = 1;
982 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
983 std::shared_ptr<MockIPreparedModel> prepared = std::make_shared<MockIPreparedModel>();
984 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon47a1f0e00802(bool& isSupportedCache) 985 .WillOnce(Invoke([](bool& isSupportedCache) {
986 // 这里直接修改传入的引用参数
987 isSupportedCache = true;
988 return OH_NN_SUCCESS;
989 }));
990
991 InnerModel innerModel;
992 BuildModel(innerModel);
993 void* model = &innerModel;
994 EXPECT_CALL(*((MockIDevice *) device.get()),
995 PrepareModel(testing::A<std::shared_ptr<const mindspore::lite::LiteGraph>>(), ::testing::_, ::testing::_))
996 .WillOnce(Invoke([&prepared](std::shared_ptr<const mindspore::lite::LiteGraph> model,
997 const ModelConfig& config,
__anon47a1f0e00902(std::shared_ptr<const mindspore::lite::LiteGraph> model, const ModelConfig& config, std::shared_ptr<PreparedModel>& preparedModel) 998 std::shared_ptr<PreparedModel>& preparedModel) {
999 preparedModel = prepared;
1000 return OH_NN_SUCCESS;
1001 }));
1002
1003 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
1004 EXPECT_NE(nullptr, nncompiler);
1005
1006 OH_NN_ReturnCode retBuild = nncompiler->Build();
1007 EXPECT_EQ(OH_NN_SUCCESS, retBuild);
1008
1009 std::string cacheModelPath = "/data/data";
1010 uint32_t version = UINT32_MAX;
1011 OH_NN_ReturnCode retSetCacheDir = nncompiler->SetCacheDir(cacheModelPath, version);
1012 EXPECT_EQ(OH_NN_SUCCESS, retSetCacheDir);;
1013
1014 OH_NN_ReturnCode ret = nncompiler->RestoreFromCacheFile();
1015 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1016
1017 testing::Mock::AllowLeak(device.get());
1018 testing::Mock::AllowLeak(prepared.get());
1019 }
1020
1021 /**
1022 * @tc.name: nncompilertest_savetocachebuffer_001
1023 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1024 * @tc.type: FUNC
1025 */
1026 HWTEST_F(NNCompilerTest, nncompilertest_savetocachebuffer_001, TestSize.Level0)
1027 {
1028 LOGE("SaveToCacheBuffer nncompilertest_savetocachebuffer_001");
1029 size_t backendID = 1;
1030 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1031
1032 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1033 EXPECT_NE(nullptr, nncompiler);
1034
1035 size_t length = 10;
1036 size_t* modelSize = &length;
1037 InnerModel innerModel;
1038 BuildModel(innerModel);
1039 void* model = &innerModel;
1040 OH_NN_ReturnCode ret = nncompiler->SaveToCacheBuffer(model, length, modelSize);
1041 EXPECT_EQ(OH_NN_UNSUPPORTED, ret);
1042
1043 testing::Mock::AllowLeak(device.get());
1044 }
1045
1046 /**
1047 * @tc.name: nncompilertest_restorefromcachebuffer_001
1048 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1049 * @tc.type: FUNC
1050 */
1051 HWTEST_F(NNCompilerTest, nncompilertest_restorefromcachebuffer_001, TestSize.Level0)
1052 {
1053 LOGE("RestoreFromCacheBuffer nncompilertest_restorefromcachebuffer_001");
1054 size_t backendID = 1;
1055 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1056
1057 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1058 EXPECT_NE(nullptr, nncompiler);
1059
1060 size_t length = 10;
1061 InnerModel innerModel;
1062 BuildModel(innerModel);
1063 void* model = &innerModel;
1064 OH_NN_ReturnCode ret = nncompiler->RestoreFromCacheBuffer(model, length);
1065 EXPECT_EQ(OH_NN_UNSUPPORTED, ret);
1066
1067 testing::Mock::AllowLeak(device.get());
1068 }
1069
1070 /**
1071 * @tc.name: nncompilertest_setextensionconfig_001
1072 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1073 * @tc.type: FUNC
1074 */
1075 HWTEST_F(NNCompilerTest, nncompilertest_setextensionconfig_001, TestSize.Level0)
1076 {
1077 LOGE("SetExtensionConfig nncompilertest_setextensionconfig_001");
1078 size_t backendID = 1;
1079 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1080
1081 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1082 EXPECT_NE(nullptr, nncompiler);
1083
1084 std::unordered_map<std::string, std::vector<char>> configs;
1085 OH_NN_ReturnCode ret = nncompiler->SetExtensionConfig(configs);
1086 EXPECT_EQ(OH_NN_SUCCESS, ret);
1087
1088 testing::Mock::AllowLeak(device.get());
1089 }
1090
1091 /**
1092 * @tc.name: nncompilertest_setoptions_001
1093 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1094 * @tc.type: FUNC
1095 */
1096 HWTEST_F(NNCompilerTest, nncompilertest_setoptions_001, TestSize.Level0)
1097 {
1098 LOGE("SetOptions nncompilertest_setoptions_001");
1099 size_t backendID = 1;
1100 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1101
1102 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1103 EXPECT_NE(nullptr, nncompiler);
1104
1105 std::vector<std::shared_ptr<void>> options;
1106 OH_NN_ReturnCode ret = nncompiler->SetOptions(options);
1107 EXPECT_EQ(OH_NN_UNSUPPORTED, ret);
1108
1109 testing::Mock::AllowLeak(device.get());
1110 }
1111
1112 /**
1113 * @tc.name: nncompilertest_createexecutor_001
1114 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1115 * @tc.type: FUNC
1116 */
1117 HWTEST_F(NNCompilerTest, nncompilertest_createexecutor_001, TestSize.Level0)
1118 {
1119 LOGE("CreateExecutor nncompilertest_createexecutor_001");
1120 size_t backendID = 1;
1121 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1122
1123 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID);
1124 EXPECT_NE(nullptr, nncompiler);
1125
1126 NNExecutor* ret = nncompiler->CreateExecutor();
1127 EXPECT_EQ(nullptr, ret);
1128
1129 testing::Mock::AllowLeak(device.get());
1130 }
1131
1132 /**
1133 * @tc.name: nncompilertest_createexecutor_002
1134 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1135 * @tc.type: FUNC
1136 */
1137 HWTEST_F(NNCompilerTest, nncompilertest_createexecutor_002, TestSize.Level0)
1138 {
1139 LOGE("CreateExecutor nncompilertest_createexecutor_002");
1140 size_t backendID = 1;
1141
1142 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(nullptr, backendID);
1143 EXPECT_NE(nullptr, nncompiler);
1144
1145 NNExecutor* ret = nncompiler->CreateExecutor();
1146 EXPECT_EQ(nullptr, ret);
1147 }
1148
1149 /**
1150 * @tc.name: nncompilertest_createexecutor_003
1151 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1.
1152 * @tc.type: FUNC
1153 */
1154 HWTEST_F(NNCompilerTest, nncompilertest_createexecutor_003, TestSize.Level0)
1155 {
1156 LOGE("CreateExecutor nncompilertest_createexecutor_003");
1157 size_t backendID = 1;
1158 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1159 std::shared_ptr<MockIPreparedModel> prepared = std::make_shared<MockIPreparedModel>();
1160 EXPECT_CALL(*((MockIDevice *) device.get()), IsModelCacheSupported(::testing::_))
__anon47a1f0e00a02(bool& isSupportedCache) 1161 .WillOnce(Invoke([](bool& isSupportedCache) {
1162 // 这里直接修改传入的引用参数
1163 isSupportedCache = true;
1164 return OH_NN_SUCCESS; // 假设成功的状态码
1165 }));
1166
1167 InnerModel innerModel;
1168 BuildModel(innerModel);
1169 void* model = &innerModel;
1170 EXPECT_CALL(*((MockIDevice *) device.get()),
1171 PrepareModel(testing::A<std::shared_ptr<const mindspore::lite::LiteGraph>>(), ::testing::_, ::testing::_))
1172 .WillOnce(Invoke([&prepared](std::shared_ptr<const mindspore::lite::LiteGraph> model,
1173 const ModelConfig& config,
__anon47a1f0e00b02(std::shared_ptr<const mindspore::lite::LiteGraph> model, const ModelConfig& config, std::shared_ptr<PreparedModel>& preparedModel) 1174 std::shared_ptr<PreparedModel>& preparedModel) {
1175 // 这里直接修改传入的引用参数
1176 preparedModel = prepared;
1177 return OH_NN_SUCCESS; // 假设成功的状态码
1178 }));
1179
1180 NNCompiler* nncompiler = new (std::nothrow) NNCompiler(model, device, backendID);
1181 EXPECT_NE(nullptr, nncompiler);
1182
1183 OH_NN_ReturnCode retBuild = nncompiler->Build();
1184 EXPECT_EQ(OH_NN_SUCCESS, retBuild);
1185
1186 NNExecutor* ret = nncompiler->CreateExecutor();
1187 EXPECT_NE(nullptr, ret);
1188
1189 delete nncompiler;
1190 nncompiler = nullptr;
1191
1192 testing::Mock::AllowLeak(device.get());
1193 testing::Mock::AllowLeak(prepared.get());
1194 }
1195 } // namespace UnitTest
1196 } // namespace NeuralNetworkRuntime
1197 } // namespace OHOS