1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "neural_network_runtime_test.h"
17 
18 #include "mindir.h"
19 
20 #include "common/utils.h"
21 #include "compilation.h"
22 #include "hdi_device_v1_0.h"
23 #include "test/unittest/common/v1_0/mock_idevice.h"
24 #include "nnexecutor.h"
25 
26 namespace OHOS {
27 namespace NeuralNetworkRuntime {
PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,const ModelConfig & config,std::shared_ptr<PreparedModel> & preparedModel)28 OH_NN_ReturnCode HDIDeviceV1_0::PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,
29     const ModelConfig& config, std::shared_ptr<PreparedModel>& preparedModel)
30 {
31     if (model == nullptr) {
32         return OH_NN_INVALID_PARAMETER;
33     }
34 
35     if (config.enableFloat16 == false) {
36         return OH_NN_FAILED;
37     }
38 
39     sptr<OHOS::HDI::Nnrt::V1_0::IPreparedModel> iPreparedModel = sptr<OHOS::HDI::Nnrt::V1_0
40         ::MockIPreparedModel>(new OHOS::HDI::Nnrt::V1_0::MockIPreparedModel());
41     if (iPreparedModel == nullptr) {
42         LOGE("HDIDeviceV1_0 mock PrepareModel failed, error happened when new sptr");
43         return OH_NN_NULL_PTR;
44     }
45 
46     preparedModel = CreateSharedPtr<HDIPreparedModelV1_0>(iPreparedModel);
47     return OH_NN_SUCCESS;
48 }
49 
GetDeviceType(OH_NN_DeviceType & deviceType)50 OH_NN_ReturnCode HDIDeviceV1_0::GetDeviceType(OH_NN_DeviceType& deviceType)
51 {
52     if (deviceType == OH_NN_OTHERS) {
53         return OH_NN_UNAVAILABLE_DEVICE;
54     }
55 
56     return OH_NN_SUCCESS;
57 }
58 
IsModelCacheSupported(bool & isSupported)59 OH_NN_ReturnCode HDIDeviceV1_0::IsModelCacheSupported(bool& isSupported)
60 {
61     isSupported = true;
62     return OH_NN_SUCCESS;
63 }
64 
IsPerformanceModeSupported(bool & isSupported)65 OH_NN_ReturnCode HDIDeviceV1_0::IsPerformanceModeSupported(bool& isSupported)
66 {
67     isSupported = true;
68     return OH_NN_SUCCESS;
69 }
70 
IsPrioritySupported(bool & isSupported)71 OH_NN_ReturnCode HDIDeviceV1_0::IsPrioritySupported(bool& isSupported)
72 {
73     isSupported = true;
74     return OH_NN_SUCCESS;
75 }
76 
IsFloat16PrecisionSupported(bool & isSupported)77 OH_NN_ReturnCode HDIDeviceV1_0::IsFloat16PrecisionSupported(bool& isSupported)
78 {
79     isSupported = true;
80     return OH_NN_SUCCESS;
81 }
82 
GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,std::vector<bool> & ops)83 OH_NN_ReturnCode HDIDeviceV1_0::GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,
84     std::vector<bool>& ops)
85 {
86     if (model == nullptr) {
87         LOGE("HDIDeviceV1_0 mock GetSupportedOperation failed, Model is nullptr, cannot query supported operation.");
88         return OH_NN_NULL_PTR;
89     }
90 
91     ops.emplace_back(true);
92     return OH_NN_SUCCESS;
93 }
94 
IsDynamicInputSupported(bool & isSupported)95 OH_NN_ReturnCode HDIDeviceV1_0::IsDynamicInputSupported(bool& isSupported)
96 {
97     isSupported = true;
98     return OH_NN_SUCCESS;
99 }
100 } // namespace NeuralNetworkRuntime
101 } // namespace OHOS
102 
103 namespace OHOS {
104 namespace NeuralNetworkRuntime {
105 namespace Unittest {
BuildModel(InnerModel & model)106 OH_NN_ReturnCode NeuralNetworkRuntimeTest::BuildModel(InnerModel& model)
107 {
108     int32_t inputDims[2] = {3, 4};
109     OH_NN_Tensor input1 = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
110     OH_NN_ReturnCode ret = model.AddTensor(input1);
111     if (ret != OH_NN_SUCCESS) {
112         return ret;
113     }
114 
115     // 添加Add算子的第二个输入Tensor,类型为float32,张量形状为[3, 4]
116     OH_NN_Tensor input2 = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
117     ret = model.AddTensor(input2);
118     if (ret != OH_NN_SUCCESS) {
119         return ret;
120     }
121 
122     // 添加Add算子的参数Tensor,该参数Tensor用于指定激活函数的类型,Tensor的数据类型为int8。
123     int32_t activationDims = 1;
124     int8_t activationValue = OH_NN_FUSED_NONE;
125     OH_NN_Tensor activation = {OH_NN_INT8, 1, &activationDims, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
126     ret = model.AddTensor(activation);
127     if (ret != OH_NN_SUCCESS) {
128         return ret;
129     }
130 
131     // 将激活函数类型设置为OH_NN_FUSED_NONE,表示该算子不添加激活函数。
132     uint32_t index = 2;
133     ret = model.SetTensorValue(index, &activationValue, sizeof(int8_t));
134     if (ret != OH_NN_SUCCESS) {
135         return ret;
136     }
137 
138     // 设置Add算子的输出,类型为float32,张量形状为[3, 4]
139     OH_NN_Tensor output = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
140     ret = model.AddTensor(output);
141     if (ret != OH_NN_SUCCESS) {
142         return ret;
143     }
144 
145     // 指定Add算子的输入、参数和输出索引
146     uint32_t inputIndicesValues[2] = {0, 1};
147     uint32_t paramIndicesValues = 2;
148     uint32_t outputIndicesValues = 3;
149     OH_NN_UInt32Array paramIndices = {&paramIndicesValues, 1};
150     OH_NN_UInt32Array inputIndices = {inputIndicesValues, 2};
151     OH_NN_UInt32Array outputIndices = {&outputIndicesValues, 1};
152 
153     // 向模型实例添加Add算子
154     ret = model.AddOperation(OH_NN_OPS_ADD, paramIndices, inputIndices, outputIndices);
155     if (ret != OH_NN_SUCCESS) {
156         return ret;
157     }
158 
159     // 设置模型实例的输入、输出索引
160     ret = model.SpecifyInputsAndOutputs(inputIndices, outputIndices);
161     if (ret != OH_NN_SUCCESS) {
162         return ret;
163     }
164 
165     // 完成模型实例的构建
166     ret = model.Build();
167     if (ret != OH_NN_SUCCESS) {
168         return ret;
169     }
170 
171     return ret;
172 }
173 
InitIndices()174 void NeuralNetworkRuntimeTest::InitIndices()
175 {
176     m_inputIndices.data = m_inputIndexs;
177     m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
178 
179     m_outputIndices.data = m_outputIndexs;
180     m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
181 
182     m_paramIndices.data = m_paramIndexs;
183     m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
184 }
185 
AddModelTensor(InnerModel & innerModel)186 void NeuralNetworkRuntimeTest::AddModelTensor(InnerModel& innerModel)
187 {
188     const int dim[2] = {2, 2};
189     const OH_NN_Tensor& tensor = {OH_NN_FLOAT32, 2, dim, nullptr, OH_NN_TENSOR};
190 
191     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
192     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
193     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensor));
194 
195     const OH_NN_Tensor& tensorParam = {OH_NN_INT8, 0, nullptr, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
196     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddTensor(tensorParam));
197 }
198 
SetTensor()199 void NeuralNetworkRuntimeTest::SetTensor()
200 {
201     m_tensor.dataType = OH_NN_INT32;
202     m_tensor.dimensionCount = 0;
203     m_tensor.dimensions = nullptr;
204     m_tensor.quantParam = nullptr;
205     m_tensor.type = OH_NN_TENSOR;
206 }
207 
SetInnerBuild(InnerModel & innerModel)208 void NeuralNetworkRuntimeTest::SetInnerBuild(InnerModel& innerModel)
209 {
210     uint32_t index = 3;
211     const int8_t activation = 0;
212     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
213         static_cast<const void *>(&activation), sizeof(int8_t)));
214 
215     OH_NN_OperationType opType {OH_NN_OPS_ADD};
216     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
217     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
218     EXPECT_EQ(OH_NN_SUCCESS, innerModel.Build());
219 }
220 
SetInputAndOutput(Executor & executor)221 void NeuralNetworkRuntimeTest::SetInputAndOutput(Executor& executor)
222 {
223     size_t input1Index = 0;
224     int32_t inputDims[2] = {3, 4};
225     size_t lengthSize = 12 * sizeof(float);
226     size_t *length = &lengthSize;
227 
228     size_t minInputDims = 1;
229     size_t maxInputDims = 12;
230 
231     size_t *minInputDimsAdress = &minInputDims;
232     size_t **minInputDimsAdressA = &minInputDimsAdress;
233 
234     size_t *maxInputDimsAdress = &maxInputDims;
235     size_t **maxInputDimsAdressA = &maxInputDimsAdress;
236 
237     m_tensor = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
238     EXPECT_EQ(OH_NN_SUCCESS, executor.GetInputDimRange(input1Index, minInputDimsAdressA, maxInputDimsAdressA, length));
239 
240     uint32_t outputIndex = 0;
241 
242     int32_t shape = 3;
243     int32_t* shapeA = &shape;
244     int32_t** shapeAA = &shapeA;
245     uint32_t* shapeNum = &outputIndex;
246     EXPECT_EQ(OH_NN_SUCCESS, executor.GetOutputShape(outputIndex, shapeAA, shapeNum));
247 }
248 
249 class MockIPreparedModel : public PreparedModel {
250 public:
251     MOCK_METHOD1(ExportModelCache, OH_NN_ReturnCode(std::vector<Buffer>&));
252     MOCK_METHOD4(Run, OH_NN_ReturnCode(const std::vector<IOTensor>&,
253                                  const std::vector<IOTensor>&,
254                                  std::vector<std::vector<int32_t>>&,
255                                  std::vector<bool>&));
256     MOCK_METHOD4(Run, OH_NN_ReturnCode(const std::vector<NN_Tensor*>&,
257                                  const std::vector<NN_Tensor*>&,
258                                  std::vector<std::vector<int32_t>>&,
259                                  std::vector<bool>&));
260     MOCK_CONST_METHOD1(GetModelID, OH_NN_ReturnCode(uint32_t&));
261     MOCK_METHOD2(GetInputDimRanges, OH_NN_ReturnCode(std::vector<std::vector<uint32_t>>&,
262                                                std::vector<std::vector<uint32_t>>&));
263 };
264 
265 class MockIDevice : public Device {
266 public:
267     MOCK_METHOD1(GetDeviceName, OH_NN_ReturnCode(std::string&));
268     MOCK_METHOD1(GetVendorName, OH_NN_ReturnCode(std::string&));
269     MOCK_METHOD1(GetVersion, OH_NN_ReturnCode(std::string&));
270     MOCK_METHOD1(GetDeviceType, OH_NN_ReturnCode(OH_NN_DeviceType&));
271     MOCK_METHOD1(GetDeviceStatus, OH_NN_ReturnCode(DeviceStatus&));
272     MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
273         std::vector<bool>&));
274     MOCK_METHOD1(IsFloat16PrecisionSupported, OH_NN_ReturnCode(bool&));
275     MOCK_METHOD1(IsPerformanceModeSupported, OH_NN_ReturnCode(bool&));
276     MOCK_METHOD1(IsPrioritySupported, OH_NN_ReturnCode(bool&));
277     MOCK_METHOD1(IsDynamicInputSupported, OH_NN_ReturnCode(bool&));
278     MOCK_METHOD1(IsModelCacheSupported, OH_NN_ReturnCode(bool&));
279     MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
280                                           const ModelConfig&,
281                                           std::shared_ptr<PreparedModel>&));
282     MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(const void*,
283                                           const ModelConfig&,
284                                           std::shared_ptr<PreparedModel>&));
285     MOCK_METHOD4(PrepareModelFromModelCache, OH_NN_ReturnCode(const std::vector<Buffer>&,
286                                                               const ModelConfig&,
287                                                               std::shared_ptr<PreparedModel>&,
288                                                               bool&));
289     MOCK_METHOD3(PrepareOfflineModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
290                                                  const ModelConfig&,
291                                                  std::shared_ptr<PreparedModel>&));
292     MOCK_METHOD1(AllocateBuffer, void*(size_t));
293     MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<TensorDesc>));
294     MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<NNTensor>));
295     MOCK_METHOD1(ReleaseBuffer, OH_NN_ReturnCode(const void*));
296     MOCK_METHOD2(AllocateBuffer, OH_NN_ReturnCode(size_t, int&));
297     MOCK_METHOD2(ReleaseBuffer, OH_NN_ReturnCode(int, size_t));
298 };
299 
300 /*
301  * @tc.name: model_construct_001
302  * @tc.desc: Verify the return model of the OH_NNModel_Construct function.
303  * @tc.type: FUNC
304  */
305 HWTEST_F(NeuralNetworkRuntimeTest, model_construct_001, testing::ext::TestSize.Level0)
306 {
307     OH_NNModel* ret = OH_NNModel_Construct();
308     EXPECT_NE(nullptr, ret);
309 }
310 
311 /*
312  * @tc.name: model_add_tensor_001
313  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Tensor function.
314  * @tc.type: FUNC
315  */
316 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_001, testing::ext::TestSize.Level0)
317 {
318     OH_NNModel* model = nullptr;
319     const int32_t dimInput[2] = {2, 2};
320     const OH_NN_Tensor tensor = {OH_NN_INT8, 2, dimInput, nullptr, OH_NN_TENSOR};
321     OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, &tensor);
322     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
323 }
324 
325 /*
326  * @tc.name: model_add_tensor_002
327  * @tc.desc: Verify the OH_NN_Tensor is nullptr of the OH_NNModel_AddTensor function.
328  * @tc.type: FUNC
329  */
330 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_002, testing::ext::TestSize.Level0)
331 {
332     InnerModel innerModel;
333 
334     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
335     OH_NN_Tensor* tensor = nullptr;
336     OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, tensor);
337     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
338 }
339 
340 /*
341  * @tc.name: model_add_tensor_003
342  * @tc.desc: Verify the success of the OH_NNModel_AddTensor function.
343  * @tc.type: FUNC
344  */
345 HWTEST_F(NeuralNetworkRuntimeTest, model_add_tensor_003, testing::ext::TestSize.Level0)
346 {
347     InnerModel innerModel;
348     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
349 
350     const int32_t dimInput[2] = {2, 2};
351     const OH_NN_Tensor tensor = {OH_NN_INT8, 2, dimInput, nullptr, OH_NN_TENSOR};
352     OH_NN_ReturnCode ret = OH_NNModel_AddTensor(model, &tensor);
353     EXPECT_EQ(OH_NN_SUCCESS, ret);
354 }
355 
356 /*
357  * @tc.name: model_add_operation_001
358  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_AddOperation function.
359  * @tc.type: FUNC
360  */
361 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_001, testing::ext::TestSize.Level0)
362 {
363     InnerModel innerModel;
364     OH_NNModel* model = nullptr;
365     OH_NN_OperationType opType {OH_NN_OPS_ADD};
366 
367     InitIndices();
368     AddModelTensor(innerModel);
369 
370     uint32_t index = 3;
371     const int8_t activation = 0;
372     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
373         static_cast<const void *>(&activation), sizeof(int8_t)));
374 
375     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, &m_outputIndices);
376     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
377 }
378 
379 /*
380  * @tc.name: model_add_operation_002
381  * @tc.desc: Verify the paramIndices is nullptr of the OH_NNModel_AddOperation function.
382  * @tc.type: FUNC
383  */
384 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_002, testing::ext::TestSize.Level0)
385 {
386     InnerModel innerModel;
387     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
388     OH_NN_OperationType opType {OH_NN_OPS_ADD};
389 
390     m_inputIndices.data = m_inputIndexs;
391     m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
392 
393     m_outputIndices.data = m_outputIndexs;
394     m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
395 
396     AddModelTensor(innerModel);
397     uint32_t index = 3;
398     const int8_t activation = 0;
399     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
400         static_cast<const void *>(&activation), sizeof(int8_t)));
401 
402     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, nullptr, &m_inputIndices, &m_outputIndices);
403     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
404 }
405 
406 /*
407  * @tc.name: model_add_operation_003
408  * @tc.desc: Verify the inputIndices is nullptr of the OH_NNModel_AddOperation function.
409  * @tc.type: FUNC
410  */
411 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_003, testing::ext::TestSize.Level0)
412 {
413     InnerModel innerModel;
414     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
415     OH_NN_OperationType opType {OH_NN_OPS_ADD};
416 
417     m_paramIndices.data = m_paramIndexs;
418     m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
419 
420     m_outputIndices.data = m_outputIndexs;
421     m_outputIndices.size = sizeof(m_outputIndexs) / sizeof(uint32_t);
422 
423     AddModelTensor(innerModel);
424     uint32_t index = 3;
425     const int8_t activation = 0;
426     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
427         static_cast<const void *>(&activation), sizeof(int8_t)));
428 
429     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, nullptr, &m_outputIndices);
430     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
431 }
432 
433 /*
434  * @tc.name: model_add_operation_004
435  * @tc.desc: Verify the outputIndices is nullptr of the OH_NNModel_AddOperation function.
436  * @tc.type: FUNC
437  */
438 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_004, testing::ext::TestSize.Level0)
439 {
440     InnerModel innerModel;
441     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
442     OH_NN_OperationType opType {OH_NN_OPS_ADD};
443 
444     m_paramIndices.data = m_paramIndexs;
445     m_paramIndices.size = sizeof(m_paramIndexs) / sizeof(uint32_t);
446 
447     m_inputIndices.data = m_inputIndexs;
448     m_inputIndices.size = sizeof(m_inputIndexs) / sizeof(uint32_t);
449 
450     AddModelTensor(innerModel);
451     uint32_t index = 3;
452     const int8_t activation = 0;
453     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
454         static_cast<const void *>(&activation), sizeof(int8_t)));
455 
456     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, nullptr);
457     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
458 }
459 
460 /*
461  * @tc.name: model_add_operation_005
462  * @tc.desc: Verify the success of the OH_NNModel_AddOperation function.
463  * @tc.type: FUNC
464  */
465 HWTEST_F(NeuralNetworkRuntimeTest, model_add_operation_005, testing::ext::TestSize.Level0)
466 {
467     InnerModel innerModel;
468     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
469     OH_NN_OperationType opType {OH_NN_OPS_ADD};
470 
471     InitIndices();
472     AddModelTensor(innerModel);
473 
474     uint32_t index = 3;
475     const int8_t activation = 0;
476     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
477         static_cast<const void *>(&activation), sizeof(int8_t)));
478 
479     OH_NN_ReturnCode ret = OH_NNModel_AddOperation(model, opType, &m_paramIndices, &m_inputIndices, &m_outputIndices);
480     EXPECT_EQ(OH_NN_SUCCESS, ret);
481 }
482 
483 /*
484  * @tc.name: model_set_tensor_data_001
485  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_SetTensorData function.
486  * @tc.type: FUNC
487  */
488 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_001, testing::ext::TestSize.Level0)
489 {
490     InnerModel innerModel;
491     OH_NNModel* model = nullptr;
492     AddModelTensor(innerModel);
493 
494     uint32_t index = 3;
495     const int8_t activation = 0;
496 
497     OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation),
498         sizeof(int8_t));
499     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
500 }
501 
502 /*
503  * @tc.name: model_set_tensor_data_002
504  * @tc.desc: Verify the data is nullptr of the OH_NNModel_SetTensorData function.
505  * @tc.type: FUNC
506  */
507 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_002, testing::ext::TestSize.Level0)
508 {
509     InnerModel innerModel;
510     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
511     AddModelTensor(innerModel);
512 
513     uint32_t index = 3;
514 
515     OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, nullptr, sizeof(int8_t));
516     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
517 }
518 
519 /*
520  * @tc.name: model_set_tensor_data_003
521  * @tc.desc: Verify the length is 0 of the OH_NNModel_SetTensorData function.
522  * @tc.type: FUNC
523  */
524 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_003, testing::ext::TestSize.Level0)
525 {
526     InnerModel innerModel;
527     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
528     AddModelTensor(innerModel);
529 
530     uint32_t index = 3;
531     const int8_t activation = 0;
532 
533     OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation), 0);
534     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
535 }
536 
537 /*
538  * @tc.name: model_set_tensor_data_004
539  * @tc.desc: Verify the successs of the OH_NNModel_SetTensorData function.
540  * @tc.type: FUNC
541  */
542 HWTEST_F(NeuralNetworkRuntimeTest, model_set_tensor_data_004, testing::ext::TestSize.Level0)
543 {
544     InnerModel innerModel;
545     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
546     AddModelTensor(innerModel);
547 
548     uint32_t index = 3;
549     const int8_t activation = 0;
550 
551     OH_NN_ReturnCode ret = OH_NNModel_SetTensorData(model, index, static_cast<const void *>(&activation),
552         sizeof(int8_t));
553     EXPECT_EQ(OH_NN_SUCCESS, ret);
554 }
555 
556 /*
557  * @tc.name: model_specify_inputs_and_outputs_001
558  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
559  * @tc.type: FUNC
560  */
561 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_001, testing::ext::TestSize.Level0)
562 {
563     InnerModel innerModel;
564     OH_NNModel* model = nullptr;
565 
566     InitIndices();
567     AddModelTensor(innerModel);
568 
569     OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, &m_outputIndices);
570     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
571 }
572 
573 /*
574  * @tc.name: model_specify_inputs_and_outputs_002
575  * @tc.desc: Verify the inputIndices is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
576  * @tc.type: FUNC
577  */
578 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_002, testing::ext::TestSize.Level0)
579 {
580     InnerModel innerModel;
581     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
582 
583     InitIndices();
584     AddModelTensor(innerModel);
585 
586     OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, nullptr, &m_outputIndices);
587     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
588 }
589 
590 /*
591  * @tc.name: model_specify_inputs_and_outputs_003
592  * @tc.desc: Verify the outputIndices is nullptr of the OH_NNModel_SpecifyInputsAndOutputs function.
593  * @tc.type: FUNC
594  */
595 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_003, testing::ext::TestSize.Level0)
596 {
597     InnerModel innerModel;
598     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
599 
600     InitIndices();
601     AddModelTensor(innerModel);
602 
603     OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, nullptr);
604     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
605 }
606 
607 /*
608  * @tc.name: model_specify_inputs_and_outputs_004
609  * @tc.desc: Verify the success of the OH_NNModel_SpecifyInputsAndOutputs function.
610  * @tc.type: FUNC
611  */
612 HWTEST_F(NeuralNetworkRuntimeTest, model_specify_inputs_and_outputs_004, testing::ext::TestSize.Level0)
613 {
614     InnerModel innerModel;
615     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
616 
617     InitIndices();
618     AddModelTensor(innerModel);
619 
620     OH_NN_ReturnCode ret = OH_NNModel_SpecifyInputsAndOutputs(model, &m_inputIndices, &m_outputIndices);
621     EXPECT_EQ(OH_NN_SUCCESS, ret);
622 }
623 
624 /*
625  * @tc.name: model_finish_001
626  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Finish function.
627  * @tc.type: FUNC
628  */
629 HWTEST_F(NeuralNetworkRuntimeTest, model_finish_001, testing::ext::TestSize.Level0)
630 {
631     InnerModel innerModel;
632     OH_NNModel* model = nullptr;
633 
634     OH_NN_OperationType opType {OH_NN_OPS_ADD};
635 
636     InitIndices();
637     AddModelTensor(innerModel);
638 
639     uint32_t index = 3;
640     const int8_t activation = 0;
641     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index, static_cast<const void *>(&activation),
642         sizeof(int8_t)));
643 
644     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
645     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
646 
647     OH_NN_ReturnCode ret = OH_NNModel_Finish(model);
648     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
649 }
650 
651 /*
652  * @tc.name: model_finish_002
653  * @tc.desc: Verify the success of the OH_NNModel_Finish function.
654  * @tc.type: FUNC
655  */
656 HWTEST_F(NeuralNetworkRuntimeTest, model_finish_002, testing::ext::TestSize.Level0)
657 {
658     InnerModel innerModel;
659     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
660 
661     OH_NN_OperationType opType {OH_NN_OPS_ADD};
662 
663     InitIndices();
664     AddModelTensor(innerModel);
665 
666     const int8_t activation = 0;
667     uint32_t index = 3;
668     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SetTensorValue(index,
669         static_cast<const void *>(&activation), sizeof(int8_t)));
670 
671     EXPECT_EQ(OH_NN_SUCCESS, innerModel.AddOperation(opType, m_paramIndices, m_inputIndices, m_outputIndices));
672     EXPECT_EQ(OH_NN_SUCCESS, innerModel.SpecifyInputsAndOutputs(m_inputIndices, m_outputIndices));
673 
674     OH_NN_ReturnCode ret = OH_NNModel_Finish(model);
675     EXPECT_EQ(OH_NN_SUCCESS, ret);
676 }
677 
678 /*
679  * @tc.name: model_destroy_001
680  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_Destroy function.
681  * @tc.type: FUNC
682  */
683 HWTEST_F(NeuralNetworkRuntimeTest, model_destroy_001, testing::ext::TestSize.Level0)
684 {
685     InnerModel innerModel;
686     OH_NNModel** pModel = nullptr;
687     OH_NNModel_Destroy(pModel);
688     EXPECT_EQ(nullptr, pModel);
689 }
690 
691 /*
692  * @tc.name: model_destroy_002
693  * @tc.desc: Verify the *OH_NNModel is nullptr of the OH_NNModel_Destroy function.
694  * @tc.type: FUNC
695  */
696 HWTEST_F(NeuralNetworkRuntimeTest, model_destroy_002, testing::ext::TestSize.Level0)
697 {
698     InnerModel innerModel;
699     OH_NNModel* model = nullptr;
700     OH_NNModel** pModel = &model;
701     OH_NNModel_Destroy(pModel);
702     EXPECT_EQ(nullptr, model);
703 }
704 
705 /*
706  * @tc.name: model_destroy_003
707  * @tc.desc: Verify the normal model of the OH_NNModel_Destroy function.
708  * @tc.type: FUNC
709  */
710 HWTEST_F(NeuralNetworkRuntimeTest, model_destroy_003, testing::ext::TestSize.Level0)
711 {
712     InnerModel* innerModel = new InnerModel();
713     EXPECT_NE(nullptr, innerModel);
714     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
715     OH_NNModel_Destroy(&model);
716     EXPECT_EQ(nullptr, model);
717 }
718 
719 /*
720  * @tc.name: model_get_available_operation_001
721  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_GetAvailableOperations function.
722  * @tc.type: FUNC
723  */
724 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_001, testing::ext::TestSize.Level0)
725 {
726     InnerModel innerModel;
727     OH_NNModel* model = nullptr;
728 
729     uint32_t opCount = 1;
730     const bool *pIsAvailable = nullptr;
731 
732     InitIndices();
733     AddModelTensor(innerModel);
734     SetInnerBuild(innerModel);
735 
736     size_t deviceID = 10;
737     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
738     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
739 }
740 
741 /*
742  * @tc.name: model_get_available_operation_002
743  * @tc.desc: Verify the isAvailable is nullptr of the OH_NNModel_GetAvailableOperations function.
744  * @tc.type: FUNC
745  */
746 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_002, testing::ext::TestSize.Level0)
747 {
748     InnerModel innerModel;
749     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
750 
751     uint32_t opCount = 1;
752     InitIndices();
753     AddModelTensor(innerModel);
754     SetInnerBuild(innerModel);
755 
756     size_t deviceID = 10;
757     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, nullptr, &opCount);
758     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
759 }
760 
761 /*
762  * @tc.name: model_get_available_operation_003
763  * @tc.desc: Verify the *isAvailable is no nullptr of the OH_NNModel_GetAvailableOperations function.
764  * @tc.type: FUNC
765  */
766 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_003, testing::ext::TestSize.Level0)
767 {
768     InnerModel innerModel;
769     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
770 
771     const bool isAvailable = true;
772     const bool *pIsAvailable = &isAvailable;
773     uint32_t opCount = 1;
774 
775     InitIndices();
776     AddModelTensor(innerModel);
777     SetInnerBuild(innerModel);
778 
779     size_t deviceID = 10;
780     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
781     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
782 }
783 
784 /*
785  * @tc.name: model_get_available_operation_004
786  * @tc.desc: Verify the opCount is nullptr of the OH_NNModel_GetAvailableOperations function.
787  * @tc.type: FUNC
788  */
789 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_004, testing::ext::TestSize.Level0)
790 {
791     InnerModel innerModel;
792     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
793 
794     const bool *pIsAvailable = nullptr;
795     uint32_t* opCount = nullptr;
796 
797     InitIndices();
798     AddModelTensor(innerModel);
799     SetInnerBuild(innerModel);
800 
801     size_t deviceID = 10;
802     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, opCount);
803     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
804 }
805 
806 /*
807  * @tc.name: model_get_available_operation_005
808  * @tc.desc: Verify the success of the OH_NNModel_GetAvailableOperations function.
809  * @tc.type: FUNC
810  */
811 HWTEST_F(NeuralNetworkRuntimeTest, model_get_available_operation_005, testing::ext::TestSize.Level0)
812 {
813     InnerModel innerModel;
814     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
815 
816     const bool *pIsAvailable = nullptr;
817     uint32_t opCount = 1;
818 
819     InitIndices();
820     AddModelTensor(innerModel);
821     SetInnerBuild(innerModel);
822 
823     size_t deviceID = 10;
824     OH_NN_ReturnCode ret = OH_NNModel_GetAvailableOperations(model, deviceID, &pIsAvailable, &opCount);
825     EXPECT_EQ(OH_NN_FAILED, ret);
826 }
827 
828 /*
829  * @tc.name: compilation_construct_001
830  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNCompilation_Construct function.
831  * @tc.type: FUNC
832  */
833 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_001, testing::ext::TestSize.Level0)
834 {
835     InnerModel innerModel;
836     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
837     const OH_NNModel* model = nullptr;
838     OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
839     EXPECT_EQ(nullptr, ret);
840 }
841 
842 /*
843  * @tc.name: compilation_construct_002
844  * @tc.desc: Verify the not OH_NNModel_Build before creating compilation of the OH_NNCompilation_Construct function.
845  * @tc.type: FUNC
846  */
847 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_002, testing::ext::TestSize.Level0)
848 {
849     InnerModel innerModel;
850     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
851     OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
852     EXPECT_NE(nullptr, ret);
853 }
854 
855 /*
856  * @tc.name: compilation_construct_003
857  * @tc.desc: Verify the normal model of the OH_NNCompilation_Construct function.
858  * @tc.type: FUNC
859  */
860 HWTEST_F(NeuralNetworkRuntimeTest, compilation_construct_003, testing::ext::TestSize.Level0)
861 {
862     InnerModel innerModel;
863     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
864     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
865     OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
866     EXPECT_NE(nullptr, ret);
867 }
868 
869 /*
870  * @tc.name: compilation_set_device_001
871  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetDevice function.
872  * @tc.type: FUNC
873  */
874 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_device_001, testing::ext::TestSize.Level0)
875 {
876     OH_NNCompilation* compilation = nullptr;
877     size_t deviceId = 1;
878     OH_NN_ReturnCode ret = OH_NNCompilation_SetDevice(compilation, deviceId);
879     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
880 }
881 
882 /*
883  * @tc.name: compilation_set_device_002
884  * @tc.desc: Verify the success of the OH_NNCompilation_SetDevice function.
885  * @tc.type: FUNC
886  */
887 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_device_002, testing::ext::TestSize.Level0)
888 {
889     InnerModel innerModel;
890     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
891 
892     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
893     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
894     size_t deviceId = 1;
895     OH_NN_ReturnCode ret = OH_NNCompilation_SetDevice(nnCompilation, deviceId);
896     EXPECT_EQ(OH_NN_SUCCESS, ret);
897 }
898 
899 /*
900  * @tc.name: compilation_set_cache_001
901  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function.
902  * @tc.type: FUNC
903  */
904 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_001, testing::ext::TestSize.Level0)
905 {
906     InnerModel innerModel;
907     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
908     OH_NNCompilation* nnCompilation = nullptr;
909     const char* cacheDir = "../";
910     uint32_t version = 1;
911     OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
912     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
913 }
914 
915 /*
916  * @tc.name: compilation_set_cache_002
917  * @tc.desc: Verify the cachePath is nullptr of the OH_NNCompilation_SetCache function.
918  * @tc.type: FUNC
919  */
920 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_002, testing::ext::TestSize.Level0)
921 {
922     InnerModel innerModel;
923     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
924 
925     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
926     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
927     const char* cacheDir = nullptr;
928     uint32_t version = 1;
929     OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
930     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
931 }
932 
933 /*
934  * @tc.name: compilation_set_cache_003
935  * @tc.desc: Verify the success of the OH_NNCompilation_SetCache function.
936  * @tc.type: FUNC
937  */
938 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_cache_003, testing::ext::TestSize.Level0)
939 {
940     InnerModel innerModel;
941     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
942 
943     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
944     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
945     const char* cacheDir = "../";
946     uint32_t version = 1;
947     OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
948     EXPECT_EQ(OH_NN_SUCCESS, ret);
949 }
950 
951 /*
952  * @tc.name: compilation_set_performance_mode_001
953  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetPerformanceMode function.
954  * @tc.type: FUNC
955  */
956 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_performance_mode_001, testing::ext::TestSize.Level0)
957 {
958     InnerModel innerModel;
959     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
960     OH_NNCompilation* nnCompilation = nullptr;
961     OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE;
962 
963     OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode);
964     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
965 }
966 
967 /*
968  * @tc.name: compilation_set_performance_mode_002
969  * @tc.desc: Verify the success of the OH_NNCompilation_SetPerformanceMode function.
970  * @tc.type: FUNC
971  */
972 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_performance_mode_002, testing::ext::TestSize.Level0)
973 {
974     InnerModel innerModel;
975     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
976 
977     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
978     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
979     OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE;
980 
981     OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode);
982     EXPECT_EQ(OH_NN_SUCCESS, ret);
983 }
984 
985 /*
986  * @tc.name: compilation_set_priority_001
987  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetPriority function.
988  * @tc.type: FUNC
989  */
990 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_priority_001, testing::ext::TestSize.Level0)
991 {
992     InnerModel innerModel;
993     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
994     OH_NNCompilation* nnCompilation = nullptr;
995     OH_NN_Priority priority = OH_NN_PRIORITY_LOW;
996 
997     OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority);
998     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
999 }
1000 
1001 /*
1002  * @tc.name: compilation_set_priority_002
1003  * @tc.desc: Verify the success of the OH_NNCompilation_SetPriority function.
1004  * @tc.type: FUNC
1005  */
1006 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_priority_002, testing::ext::TestSize.Level0)
1007 {
1008     InnerModel innerModel;
1009     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1010 
1011     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1012     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1013     OH_NN_Priority priority = OH_NN_PRIORITY_LOW;
1014 
1015     OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority);
1016     EXPECT_EQ(OH_NN_SUCCESS, ret);
1017 }
1018 
1019 /*
1020  * @tc.name: compilation_set_enable_float16_001
1021  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_EnableFloat16 function.
1022  * @tc.type: FUNC
1023  */
1024 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_enable_float16_001, testing::ext::TestSize.Level0)
1025 {
1026     InnerModel innerModel;
1027     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1028     OH_NNCompilation* nnCompilation = nullptr;
1029     bool enableFloat16 = true;
1030 
1031     OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16);
1032     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1033 }
1034 
1035 /*
1036  * @tc.name: compilation_set_enable_float16_002
1037  * @tc.desc: Verify the success of the OH_NNCompilation_EnableFloat16 function.
1038  * @tc.type: FUNC
1039  */
1040 HWTEST_F(NeuralNetworkRuntimeTest, compilation_set_enable_float16_002, testing::ext::TestSize.Level0)
1041 {
1042     InnerModel innerModel;
1043     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1044 
1045     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1046     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1047     bool enableFloat16 = true;
1048 
1049     OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16);
1050     EXPECT_EQ(OH_NN_SUCCESS, ret);
1051 }
1052 
1053 /*
1054  * @tc.name: compilation_build_001
1055  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_Build function.
1056  * @tc.type: FUNC
1057  */
1058 HWTEST_F(NeuralNetworkRuntimeTest, compilation_build_001, testing::ext::TestSize.Level0)
1059 {
1060     InnerModel innerModel;
1061     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1062     OH_NNCompilation* nnCompilation = nullptr;
1063 
1064     OH_NN_ReturnCode ret = OH_NNCompilation_Build(nnCompilation);
1065     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1066 }
1067 
1068 /*
1069  * @tc.name: compilation_build_002
1070  * @tc.desc: Verify the success of the OH_NNCompilation_Build function.
1071  * @tc.type: FUNC
1072  */
1073 HWTEST_F(NeuralNetworkRuntimeTest, compilation_build_002, testing::ext::TestSize.Level0)
1074 {
1075     InnerModel innerModel;
1076     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1077 
1078     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1079     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1080 
1081     OH_NN_ReturnCode ret = OH_NNCompilation_Build(nnCompilation);
1082     EXPECT_EQ(OH_NN_FAILED, ret);
1083 }
1084 
1085 /*
1086  * @tc.name: compilation_destroy_001
1087  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_Destroy function.
1088  * @tc.type: FUNC
1089  */
1090 HWTEST_F(NeuralNetworkRuntimeTest, compilation_destroy_001, testing::ext::TestSize.Level0)
1091 {
1092     OH_NNCompilation** pCompilation = nullptr;
1093     OH_NNCompilation_Destroy(pCompilation);
1094     EXPECT_EQ(nullptr, pCompilation);
1095 }
1096 
1097 /*
1098  * @tc.name: compilation_destroy_002
1099  * @tc.desc: Verify the *OH_NNCompilation is nullptr of the OH_NNCompilation_Destroy function.
1100  * @tc.type: FUNC
1101  */
1102 HWTEST_F(NeuralNetworkRuntimeTest, compilation_destroy_002, testing::ext::TestSize.Level0)
1103 {
1104     OH_NNCompilation* compilation = nullptr;
1105     OH_NNCompilation** pCompilation = &compilation;
1106     OH_NNCompilation_Destroy(pCompilation);
1107     EXPECT_EQ(nullptr, compilation);
1108 }
1109 
1110 /*
1111  * @tc.name: compilation_destroy_003
1112  * @tc.desc: Verify the normal model of the OH_NNCompilation_Destroy function.
1113  * @tc.type: FUNC
1114  */
1115 HWTEST_F(NeuralNetworkRuntimeTest, compilation_destroy_003, testing::ext::TestSize.Level0)
1116 {
1117     InnerModel* innerModel = new InnerModel();
1118     EXPECT_NE(nullptr, innerModel);
1119 
1120     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1121     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1122     OH_NNCompilation_Destroy(&nnCompilation);
1123     EXPECT_EQ(nullptr, nnCompilation);
1124 }
1125 
1126 /**
1127  * @tc.name: excutor_construct_001
1128  * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNExecutor_Construct function
1129  * @tc.type: FUNC
1130  */
1131 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_001, testing::ext::TestSize.Level0)
1132 {
1133     InnerModel innerModel;
1134     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1135 
1136     OH_NNCompilation* nnCompilation = nullptr;
1137     OH_NNExecutor* executor = OH_NNExecutor_Construct(nnCompilation);
1138     EXPECT_EQ(nullptr, executor);
1139 }
1140 
1141 /**
1142  * @tc.name: excutor_construct_002
1143  * @tc.desc: Verify the not OH_NNCompilation_Build before creating executor of the OH_NNExecutor_Construct function
1144  * @tc.type: FUNC
1145  */
1146 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_002, testing::ext::TestSize.Level0)
1147 {
1148     InnerModel innerModel;
1149     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1150 
1151     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1152     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1153     OH_NNExecutor * executor = OH_NNExecutor_Construct(nnCompilation);
1154     EXPECT_EQ(nullptr, executor);
1155 }
1156 
1157 /**
1158  * @tc.name: excutor_construct_003
1159  * @tc.desc: Verify the success of the OH_NNExecutor_Construct function
1160  * @tc.type: FUNC
1161  */
1162 HWTEST_F(NeuralNetworkRuntimeTest, excutor_construct_003, testing::ext::TestSize.Level0)
1163 {
1164     InnerModel innerModel;
1165     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1166 
1167     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1168     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1169     OH_NNExecutor * executor = OH_NNExecutor_Construct(nnCompilation);
1170     EXPECT_EQ(nullptr, executor);
1171 }
1172 
1173 /**
1174  * @tc.name: excutor_setinput_001
1175  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetInput function
1176  * @tc.type: FUNC
1177  */
1178 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_001, testing::ext::TestSize.Level0)
1179 {
1180     SetTensor();
1181 
1182     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1183     const void *buffer = input;
1184     size_t length = 2 * sizeof(float);
1185     uint32_t inputIndex = 0;
1186     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nullptr, inputIndex, &m_tensor, buffer, length));
1187 }
1188 
1189 /**
1190  * @tc.name: excutor_setinput_002
1191  * @tc.desc: Verify the OH_NN_Tensor is nullptr of the OH_NNExecutor_SetInput function
1192  * @tc.type: FUNC
1193  */
1194 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_002, testing::ext::TestSize.Level0)
1195 {
1196     InnerModel innerModel;
1197     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1198 
1199     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1200     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1201     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1202 
1203     uint32_t inputIndex = 0;
1204     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1205     const void *buffer = input;
1206     size_t length = 2 * sizeof(float);
1207     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, nullptr, buffer, length));
1208 }
1209 
1210 /**
1211  * @tc.name: excutor_setinput_003
1212  * @tc.desc: Verify the data is nullptr of the OH_NNExecutor_SetInput function
1213  * @tc.type: FUNC
1214  */
1215 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_003, testing::ext::TestSize.Level0)
1216 {
1217     InnerModel innerModel;
1218     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1219 
1220     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1221     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1222     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1223 
1224     SetTensor();
1225 
1226     uint32_t inputIndex = 0;
1227     const void *buffer = nullptr;
1228     size_t length = 2 * sizeof(float);
1229     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length));
1230 }
1231 
1232 /**
1233  * @tc.name: excutor_setinput_004
1234  * @tc.desc: Verify the length is 0 of the OH_NNExecutor_SetInput function
1235  * @tc.type: FUNC
1236  */
1237 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_004, testing::ext::TestSize.Level0)
1238 {
1239     InnerModel innerModel;
1240     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1241 
1242     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1243     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1244     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1245 
1246     uint32_t inputIndex = 0;
1247     SetTensor();
1248 
1249     size_t length = 0;
1250     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1251     const void *buffer = input;
1252     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length));
1253 }
1254 
1255 /**
1256  * @tc.name: excutor_setinput_005
1257  * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1258  * @tc.type: FUNC
1259  */
1260 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_005, testing::ext::TestSize.Level0)
1261 {
1262     InnerModel innerModel;
1263     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1264 
1265     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1266     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1267     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1268 
1269     uint32_t inputIndex = 0;
1270     int32_t dims[2] = {3, 4};
1271     m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1272 
1273     float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1274     const void *buffer = input;
1275     size_t length = 12 * sizeof(float);
1276     OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length);
1277     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1278 }
1279 
1280 /**
1281  * @tc.name: excutor_setinput_006
1282  * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1283  * @tc.type: FUNC
1284  */
1285 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_006, testing::ext::TestSize.Level0)
1286 {
1287     LOGE("OH_NNExecutor_SetInput excutor_setinput_006");
1288     size_t m_backendID {0};
1289     std::shared_ptr<Device> m_device {nullptr};
1290     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1291     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1292         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1293     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1294     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1295     NNExecutor* executor = new (std::nothrow) NNExecutor(
1296         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1297     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1298 
1299     uint32_t inputIndex = 0;
1300     int32_t dims[2] = {3, 4};
1301     m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1302 
1303     float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1304     const void *buffer = input;
1305     size_t length = 12 * sizeof(float);
1306     OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length);
1307     EXPECT_EQ(OH_NN_FAILED, ret);
1308 
1309     testing::Mock::AllowLeak(mockIPreparedMode.get());
1310 }
1311 
1312 /**
1313  * @tc.name: excutor_setinput_007
1314  * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1315  * @tc.type: FUNC
1316  */
1317 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_007, testing::ext::TestSize.Level0)
1318 {
1319     LOGE("OH_NNExecutor_SetInput excutor_setinput_007");
1320     size_t m_backendID {0};
1321     std::shared_ptr<Device> m_device {nullptr};
1322     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1323     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1324         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1325     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1326     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1327     NNExecutor* executor = new (std::nothrow) NNExecutor(
1328         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1329     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1330 
1331     uint32_t inputIndex = 0;
1332 
1333     float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1334     const void *buffer = input;
1335     size_t length = 12 * sizeof(float);
1336     OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, nullptr, buffer, length);
1337     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1338 
1339     testing::Mock::AllowLeak(mockIPreparedMode.get());
1340 }
1341 
1342 /**
1343  * @tc.name: excutor_setinput_008
1344  * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1345  * @tc.type: FUNC
1346  */
1347 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_008, testing::ext::TestSize.Level0)
1348 {
1349     LOGE("OH_NNExecutor_SetInput excutor_setinput_008");
1350     size_t m_backendID {0};
1351     std::shared_ptr<Device> m_device {nullptr};
1352     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1353     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1354         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1355     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1356     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1357     NNExecutor* executor = new (std::nothrow) NNExecutor(
1358         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1359     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1360 
1361     uint32_t inputIndex = 0;
1362     int32_t dims[2] = {3, 4};
1363     m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1364 
1365     size_t length = 12 * sizeof(float);
1366     OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, nullptr, length);
1367     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1368 
1369     testing::Mock::AllowLeak(mockIPreparedMode.get());
1370 }
1371 
1372 /**
1373  * @tc.name: excutor_setinput_009
1374  * @tc.desc: Verify the success of the OH_NNExecutor_SetInput function
1375  * @tc.type: FUNC
1376  */
1377 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setinput_009, testing::ext::TestSize.Level0)
1378 {
1379     LOGE("OH_NNExecutor_SetInput excutor_setinput_009");
1380     size_t m_backendID {0};
1381     std::shared_ptr<Device> m_device {nullptr};
1382     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1383     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1384         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1385     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1386     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1387     NNExecutor* executor = new (std::nothrow) NNExecutor(
1388         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1389     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1390 
1391     uint32_t inputIndex = 0;
1392     int32_t dims[2] = {3, 4};
1393     m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
1394 
1395     float input[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1396     const void *buffer = input;
1397     size_t length = 0;
1398     OH_NN_ReturnCode ret = OH_NNExecutor_SetInput(nnExecutor, inputIndex, &m_tensor, buffer, length);
1399     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1400 
1401     testing::Mock::AllowLeak(mockIPreparedMode.get());
1402 }
1403 
1404 /**
1405  * @tc.name: excutor_setoutput_001
1406  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetOutput function
1407  * @tc.type: FUNC
1408  */
1409 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_001, testing::ext::TestSize.Level0)
1410 {
1411     LOGE("OH_NNExecutor_SetOutput excutor_setoutput_001");
1412     uint32_t outputIndex = 0;
1413     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1414     void *buffer = input;
1415     size_t length = 9 * sizeof(int32_t);
1416     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nullptr, outputIndex, buffer, length));
1417 }
1418 
1419 /**
1420  * @tc.name: excutor_setoutput_002
1421  * @tc.desc: Verify the data is nullptr of the OH_NNExecutor_SetOutput function
1422  * @tc.type: FUNC
1423  */
1424 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_002, testing::ext::TestSize.Level0)
1425 {
1426     InnerModel innerModel;
1427     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1428 
1429     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1430     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1431     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1432 
1433     uint32_t outputIndex = 0;
1434     void *buffer = nullptr;
1435     size_t length = 9 * sizeof(int32_t);
1436     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, buffer, length));
1437 }
1438 
1439 /**
1440  * @tc.name: excutor_setoutput_003
1441  * @tc.desc: Verify the length is 0 of the OH_NNExecutor_SetOutput function
1442  * @tc.type: FUNC
1443  */
1444 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_003, testing::ext::TestSize.Level0)
1445 {
1446     InnerModel innerModel;
1447     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1448 
1449     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1450     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1451     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1452 
1453     uint32_t outputIndex = 0;
1454     float input[9] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1455     void *buffer = input;
1456     size_t length = 0;
1457     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, buffer, length));
1458 }
1459 
1460 /**
1461  * @tc.name: excutor_setoutput_004
1462  * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1463  * @tc.type: FUNC
1464  */
1465 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_004, testing::ext::TestSize.Level0)
1466 {
1467     InnerModel innerModel;
1468     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1469 
1470     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1471     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1472     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1473 
1474     uint32_t outputIndex = 0;
1475     float output[12];
1476     size_t length = 12 * sizeof(float);
1477     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, output, length));
1478 }
1479 
1480 /**
1481  * @tc.name: excutor_setoutput_005
1482  * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1483  * @tc.type: FUNC
1484  */
1485 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_005, testing::ext::TestSize.Level0)
1486 {
1487     LOGE("OH_NNExecutor_SetOutput excutor_setinput_006");
1488     size_t m_backendID {0};
1489     std::shared_ptr<Device> m_device {nullptr};
1490     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1491     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1492         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1493     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1494     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1495     NNExecutor* executor = new (std::nothrow) NNExecutor(
1496         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1497     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1498 
1499     uint32_t outputIndex = 0;
1500     float output[12];
1501     size_t length = 12 * sizeof(float);
1502     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, output, length));
1503 
1504     testing::Mock::AllowLeak(mockIPreparedMode.get());
1505 }
1506 
1507 /**
1508  * @tc.name: excutor_setoutput_006
1509  * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1510  * @tc.type: FUNC
1511  */
1512 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_006, testing::ext::TestSize.Level0)
1513 {
1514     LOGE("OH_NNExecutor_SetOutput excutor_setinput_006");
1515     size_t m_backendID {0};
1516     std::shared_ptr<Device> m_device {nullptr};
1517     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1518     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1519         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1520     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1521     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1522     NNExecutor* executor = new (std::nothrow) NNExecutor(
1523         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1524     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1525 
1526     uint32_t outputIndex = 0;
1527     size_t length = 12 * sizeof(float);
1528     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, nullptr, length));
1529 
1530     testing::Mock::AllowLeak(mockIPreparedMode.get());
1531 }
1532 
1533 /**
1534  * @tc.name: excutor_setoutput_007
1535  * @tc.desc: Verify the success of the OH_NNExecutor_SetOutput function
1536  * @tc.type: FUNC
1537  */
1538 HWTEST_F(NeuralNetworkRuntimeTest, excutor_setoutput_007, testing::ext::TestSize.Level0)
1539 {
1540     LOGE("OH_NNExecutor_SetOutput excutor_setoutput_007");
1541     size_t m_backendID {0};
1542     std::shared_ptr<Device> m_device {nullptr};
1543     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1544     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1545         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1546     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1547     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1548     NNExecutor* executor = new (std::nothrow) NNExecutor(
1549         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1550     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1551 
1552     uint32_t outputIndex = 0;
1553     float output[12];
1554     size_t length = 0;
1555     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_SetOutput(nnExecutor, outputIndex, output, length));
1556 
1557     testing::Mock::AllowLeak(mockIPreparedMode.get());
1558 }
1559 
1560 /**
1561  * @tc.name: excutor_getoutputshape_001
1562  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_GetOutputShape function
1563  * @tc.type: FUNC
1564  */
1565 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_001, testing::ext::TestSize.Level0)
1566 {
1567     InnerModel innerModel;
1568     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1569     OH_NNExecutor* nnExecutor = nullptr;
1570 
1571     int32_t* ptr = nullptr;
1572     int32_t** shape = &ptr;
1573     uint32_t length = 2;
1574     uint32_t outputIndex = 0;
1575     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1576         shape, &length));
1577 }
1578 
1579 /**
1580  * @tc.name: excutor_getoutputshape_002
1581  * @tc.desc: Verify the shape is nullptr of the OH_NNExecutor_GetOutputShape function
1582  * @tc.type: FUNC
1583  */
1584 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_002, testing::ext::TestSize.Level0)
1585 {
1586     InnerModel innerModel;
1587     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1588 
1589     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1590     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1591     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1592 
1593     uint32_t outputIndex = 0;
1594     int32_t** shape = nullptr;
1595     uint32_t length = 2;
1596     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1597         shape, &length));
1598 }
1599 
1600 /**
1601  * @tc.name: excutor_getoutputshape_003
1602  * @tc.desc: Verify the *shape is not nullptr of the OH_NNExecutor_GetOutputShape function
1603  * @tc.type: FUNC
1604  */
1605 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_003, testing::ext::TestSize.Level0)
1606 {
1607     InnerModel innerModel;
1608     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1609 
1610     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1611     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1612     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1613 
1614     int32_t expectDim[2] = {3, 3};
1615     int32_t* ptr = expectDim;
1616     int32_t** shape = &ptr;
1617     uint32_t length = 2;
1618     uint32_t outputIndex = 0;
1619     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex,
1620         shape, &length));
1621 }
1622 
1623 /**
1624  * @tc.name: excutor_getoutputshape_004
1625  * @tc.desc: Verify the length is nullptr of the OH_NNExecutor_GetOutputShape function
1626  * @tc.type: FUNC
1627  */
1628 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_004, testing::ext::TestSize.Level0)
1629 {
1630     InnerModel innerModel;
1631     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1632 
1633     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1634     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1635     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1636 
1637     int32_t* ptr = nullptr;
1638     int32_t** shape = &ptr;
1639     uint32_t outputIndex = 0;
1640     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex, shape, nullptr));
1641 }
1642 
1643 /**
1644  * @tc.name: excutor_getoutputshape_005
1645  * @tc.desc: Verify the success of the OH_NNExecutor_GetOutputShape function
1646  * @tc.type: FUNC
1647  */
1648 HWTEST_F(NeuralNetworkRuntimeTest, excutor_getoutputshape_005, testing::ext::TestSize.Level0)
1649 {
1650     InnerModel innerModel;
1651     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1652 
1653     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1654     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1655     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1656 
1657     int32_t* ptr = nullptr;
1658     int32_t** shape = &ptr;
1659     uint32_t length = 2;
1660     uint32_t outputIndex = 0;
1661     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex, shape, &length));
1662 }
1663 
1664 /**
1665  * @tc.name: excutor_run_001
1666  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_Run function
1667  * @tc.type: FUNC
1668  */
1669 HWTEST_F(NeuralNetworkRuntimeTest, excutor_run_001, testing::ext::TestSize.Level0)
1670 {
1671     OH_NNExecutor* nnExecutor = nullptr;
1672     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_Run(nnExecutor));
1673 }
1674 
1675 /**
1676  * @tc.name: excutor_run_002
1677  * @tc.desc: Verify the success of the OH_NNExecutor_Run function
1678  * @tc.type: FUNC
1679  */
1680 HWTEST_F(NeuralNetworkRuntimeTest, excutor_run_002, testing::ext::TestSize.Level0)
1681 {
1682     InnerModel innerModel;
1683     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1684 
1685     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1686     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1687     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1688 
1689     int32_t inputDims[2] = {3, 4};
1690     m_tensor = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
1691     EXPECT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_Run(nnExecutor));
1692 }
1693 
1694 /**
1695  * @tc.name: excutor_run_003
1696  * @tc.desc: Verify the success of the OH_NNExecutor_Run function
1697  * @tc.type: FUNC
1698  */
1699 HWTEST_F(NeuralNetworkRuntimeTest, excutor_run_003, testing::ext::TestSize.Level0)
1700 {
1701     LOGE("OH_NNExecutor_Run excutor_run_003");
1702     size_t m_backendID {0};
1703     std::shared_ptr<Device> m_device {nullptr};
1704     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1705     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1706         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1707     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1708     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1709     NNExecutor* executor = new (std::nothrow) NNExecutor(
1710         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1711     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1712 
1713     int32_t inputDims[2] = {3, 4};
1714     m_tensor = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
1715     OH_NN_ReturnCode ret = OH_NNExecutor_Run(nnExecutor);
1716     EXPECT_EQ(OH_NN_SUCCESS, ret);
1717 
1718     testing::Mock::AllowLeak(mockIPreparedMode.get());
1719 }
1720 
1721 /*
1722  * @tc.name: executor_allocate_input_memory_001
1723  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_AllocateInputMemory function.
1724  * @tc.type: FUNC
1725  */
1726 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_001, testing::ext::TestSize.Level0)
1727 {
1728     OH_NNExecutor* nnExecutor = nullptr;
1729     uint32_t outputIndex = 0;
1730     size_t length = 9 * sizeof(float);
1731 
1732     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1733     EXPECT_EQ(nullptr, ret);
1734 }
1735 
1736 /*
1737  * @tc.name: executor_allocate_input_memory_002
1738  * @tc.desc: Verify the passed length equals 0 of the OH_NNExecutor_AllocateInputMemory function.
1739  * @tc.type: FUNC
1740  */
1741 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_002, testing::ext::TestSize.Level0)
1742 {
1743     InnerModel innerModel;
1744     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1745 
1746     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1747     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1748     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1749 
1750     uint32_t outputIndex = 0;
1751     size_t length = 0;
1752 
1753     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1754     EXPECT_EQ(nullptr, ret);
1755 }
1756 
1757 /*
1758  * @tc.name: executor_allocate_input_memory_003
1759  * @tc.desc: Verify the error when creating input memory in executor of the OH_NNExecutor_AllocateInputMemory function.
1760  * @tc.type: FUNC
1761  */
1762 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_003, testing::ext::TestSize.Level0)
1763 {
1764     InnerModel innerModel;
1765     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1766 
1767     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1768     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1769     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1770 
1771     uint32_t outputIndex = 6;
1772     size_t length = 9 * sizeof(float);
1773 
1774     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1775     EXPECT_EQ(nullptr, ret);
1776 }
1777 
1778 /*
1779  * @tc.name: executor_allocate_input_memory_004
1780  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1781  * @tc.type: FUNC
1782  */
1783 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_004, testing::ext::TestSize.Level0)
1784 {
1785     InnerModel innerModel;
1786     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1787 
1788     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1789     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1790     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1791 
1792     uint32_t outputIndex = 0;
1793     size_t length = 9 * sizeof(float);
1794 
1795     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1796     EXPECT_EQ(nullptr, ret);
1797 }
1798 
1799 /*
1800  * @tc.name: executor_allocate_input_memory_005
1801  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1802  * @tc.type: FUNC
1803  */
1804 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_005, testing::ext::TestSize.Level0)
1805 {
1806     LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_input_memory_005");
1807     size_t m_backendID {0};
1808     std::shared_ptr<Device> m_device {nullptr};
1809     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1810     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1811         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1812     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1813     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1814     NNExecutor* executor = new (std::nothrow) NNExecutor(
1815         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1816     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1817 
1818     uint32_t outputIndex = 0;
1819     size_t length = 9 * sizeof(float);
1820 
1821     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1822     EXPECT_EQ(nullptr, ret);
1823 
1824     testing::Mock::AllowLeak(mockIPreparedMode.get());
1825 }
1826 
1827 /*
1828  * @tc.name: executor_allocate_input_memory_006
1829  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1830  * @tc.type: FUNC
1831  */
1832 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_006, testing::ext::TestSize.Level0)
1833 {
1834     LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_input_memory_006");
1835     size_t m_backendID {0};
1836     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1837 
1838     std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
1839     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1840     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1841 
1842     std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType> pair1;
1843     std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType> pair2;
1844     std::shared_ptr<TensorDesc> tensorDesr = std::make_shared<TensorDesc>();
1845     int32_t expectDim[2] = {3, 3};
1846     int32_t* ptr = expectDim;
1847     uint32_t dimensionCount = 2;
1848     tensorDesr->SetShape(ptr, dimensionCount);
1849     pair1.first = tensorDesr;
1850     pair2.first = tensorDesr;
1851     m_inputTensorDescs.emplace_back(pair1);
1852     m_inputTensorDescs.emplace_back(pair2);
1853     m_outputTensorDescs.emplace_back(pair1);
1854     m_outputTensorDescs.emplace_back(pair2);
1855 
1856     size_t length = 9 * sizeof(float);
1857     EXPECT_CALL(*((MockIDevice *) device.get()), AllocateTensorBuffer(length, m_inputTensorDescs[0].first))
1858         .WillRepeatedly(::testing::Return(reinterpret_cast<void*>(0x1000)));
1859 
1860     NNExecutor* executor = new (std::nothrow) NNExecutor(
1861         m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
1862     EXPECT_NE(nullptr, executor);
1863     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1864 
1865     uint32_t outputIndex = 0;
1866 
1867     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1868     EXPECT_NE(nullptr, ret);
1869 
1870     testing::Mock::AllowLeak(device.get());
1871 }
1872 
1873 /*
1874  * @tc.name: executor_allocate_input_memory_007
1875  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1876  * @tc.type: FUNC
1877  */
1878 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_input_memory_007, testing::ext::TestSize.Level0)
1879 {
1880     LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_input_memory_007");
1881     size_t m_backendID {0};
1882     std::shared_ptr<Device> m_device {nullptr};
1883     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1884     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1885         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1886     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1887     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1888     NNExecutor* executor = new (std::nothrow) NNExecutor(
1889         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1890     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1891 
1892     uint32_t outputIndex = 0;
1893     size_t length = 0;
1894 
1895     OH_NN_Memory* ret = OH_NNExecutor_AllocateInputMemory(nnExecutor, outputIndex, length);
1896     EXPECT_EQ(nullptr, ret);
1897 
1898     testing::Mock::AllowLeak(mockIPreparedMode.get());
1899 }
1900 
1901 /*
1902  * @tc.name: executor_allocate_output_memory_001
1903  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_AllocateOutputMemory function.
1904  * @tc.type: FUNC
1905  */
1906 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_001, testing::ext::TestSize.Level0)
1907 {
1908     OH_NNExecutor* nnExecutor = nullptr;
1909     uint32_t outputIndex = 0;
1910     size_t length = 9 * sizeof(float);
1911 
1912     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1913     EXPECT_EQ(nullptr, ret);
1914 }
1915 
1916 /*
1917  * @tc.name: executor_allocate_output_memory_002
1918  * @tc.desc: Verify the passed length equals 0 of the OH_NNExecutor_AllocateOutputMemory function.
1919  * @tc.type: FUNC
1920  */
1921 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_002, testing::ext::TestSize.Level0)
1922 {
1923     InnerModel innerModel;
1924     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1925 
1926     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1927     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1928     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1929 
1930     uint32_t outputIndex = 0;
1931     size_t length = 0;
1932 
1933     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1934     EXPECT_EQ(nullptr, ret);
1935 }
1936 
1937 /*
1938  * @tc.name: executor_allocate_output_memory_003
1939  * @tc.desc: Verify the error when create output memory in executor of the OH_NNExecutor_AllocateOutputMemory function.
1940  * @tc.type: FUNC
1941  */
1942 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_003, testing::ext::TestSize.Level0)
1943 {
1944     InnerModel innerModel;
1945     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1946 
1947     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1948     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1949     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1950 
1951     uint32_t outputIndex = 6;
1952     size_t length = 9 * sizeof(float);
1953 
1954     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1955     EXPECT_EQ(nullptr, ret);
1956 }
1957 
1958 /*
1959  * @tc.name: executor_allocate_output_memory_004
1960  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateOutputMemory function.
1961  * @tc.type: FUNC
1962  */
1963 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_004, testing::ext::TestSize.Level0)
1964 {
1965     InnerModel innerModel;
1966     EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
1967 
1968     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
1969     OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
1970     OH_NNExecutor* nnExecutor = OH_NNExecutor_Construct(nnCompilation);
1971 
1972     uint32_t outputIndex = 0;
1973     size_t length = 9 * sizeof(float);
1974 
1975     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
1976     EXPECT_EQ(nullptr, ret);
1977 }
1978 
1979 /*
1980  * @tc.name: executor_allocate_output_memory_005
1981  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
1982  * @tc.type: FUNC
1983  */
1984 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_005, testing::ext::TestSize.Level0)
1985 {
1986     LOGE("OH_NNExecutor_AllocateOutputMemory executor_allocate_output_memory_005");
1987     size_t m_backendID {0};
1988     std::shared_ptr<Device> m_device {nullptr};
1989     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
1990     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
1991         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
1992     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1993     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1994     NNExecutor* executor = new (std::nothrow) NNExecutor(
1995         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
1996     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
1997 
1998     uint32_t outputIndex = 0;
1999     size_t length = 9 * sizeof(float);
2000 
2001     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
2002     EXPECT_EQ(nullptr, ret);
2003 
2004     testing::Mock::AllowLeak(mockIPreparedMode.get());
2005 }
2006 
2007 /*
2008  * @tc.name: executor_allocate_output_memory_006
2009  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
2010  * @tc.type: FUNC
2011  */
2012 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_006, testing::ext::TestSize.Level0)
2013 {
2014     LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_output_memory_006");
2015     size_t m_backendID {0};
2016     std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
2017 
2018     std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
2019     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2020     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2021 
2022     std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType> pair1;
2023     std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType> pair2;
2024     std::shared_ptr<TensorDesc> tensorDesr = std::make_shared<TensorDesc>();
2025     int32_t expectDim[2] = {3, 3};
2026     int32_t* ptr = expectDim;
2027     uint32_t dimensionCount = 2;
2028     tensorDesr->SetShape(ptr, dimensionCount);
2029     pair1.first = tensorDesr;
2030     pair2.first = tensorDesr;
2031     m_inputTensorDescs.emplace_back(pair1);
2032     m_inputTensorDescs.emplace_back(pair2);
2033     m_outputTensorDescs.emplace_back(pair1);
2034     m_outputTensorDescs.emplace_back(pair2);
2035 
2036     size_t length = 9 * sizeof(float);
2037     EXPECT_CALL(*((MockIDevice *) device.get()), AllocateTensorBuffer(length, m_outputTensorDescs[0].first))
2038         .WillRepeatedly(::testing::Return(reinterpret_cast<void*>(0x1000)));
2039 
2040     NNExecutor* executor = new (std::nothrow) NNExecutor(
2041         m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
2042     EXPECT_NE(nullptr, executor);
2043     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2044 
2045     uint32_t outputIndex = 0;
2046 
2047     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
2048     EXPECT_NE(nullptr, ret);
2049 
2050     testing::Mock::AllowLeak(device.get());
2051 }
2052 
2053 /*
2054  * @tc.name: executor_allocate_output_memory_007
2055  * @tc.desc: Verify the success of the OH_NNExecutor_AllocateInputMemory function.
2056  * @tc.type: FUNC
2057  */
2058 HWTEST_F(NeuralNetworkRuntimeTest, executor_allocate_output_memory_007, testing::ext::TestSize.Level0)
2059 {
2060     LOGE("OH_NNExecutor_AllocateInputMemory executor_allocate_output_memory_007");
2061     size_t m_backendID {0};
2062     std::shared_ptr<Device> m_device {nullptr};
2063     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2064     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2065         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2066     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2067     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2068     NNExecutor* executor = new (std::nothrow) NNExecutor(
2069         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2070     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2071 
2072     uint32_t outputIndex = 0;
2073     size_t length = 0;
2074 
2075     OH_NN_Memory* ret = OH_NNExecutor_AllocateOutputMemory(nnExecutor, outputIndex, length);
2076     EXPECT_EQ(nullptr, ret);
2077 
2078     testing::Mock::AllowLeak(mockIPreparedMode.get());
2079 }
2080 
2081 /*
2082  * @tc.name: executor_destroy_input_memory_001
2083  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_DestroyInputMemory function.
2084  * @tc.type: FUNC
2085  */
2086 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_001, testing::ext::TestSize.Level0)
2087 {
2088     InnerModel innerModel;
2089     BuildModel(innerModel);
2090     OH_NNExecutor* nnExecutor = nullptr;
2091 
2092     uint32_t inputIndex = 0;
2093     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2094     void* const data = dataArry;
2095     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2096     OH_NN_Memory* pMemory = &memory;
2097     OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, &pMemory);
2098     EXPECT_EQ(nullptr, nnExecutor);
2099 }
2100 
2101 /*
2102  * @tc.name: executor_destroy_input_memory_002
2103  * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_DestroyInputMemory function.
2104  * @tc.type: FUNC
2105  */
2106 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_002, testing::ext::TestSize.Level0)
2107 {
2108     LOGE("OH_NNExecutor_DestroyInputMemory executor_destroy_input_memory_002");
2109     size_t m_backendID {0};
2110     std::shared_ptr<Device> m_device {nullptr};
2111     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2112     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2113         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2114     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2115     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2116     NNExecutor* executor = new (std::nothrow) NNExecutor(
2117         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2118     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2119 
2120     uint32_t inputIndex = 0;
2121     OH_NN_Memory** memory = nullptr;
2122     OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, memory);
2123     EXPECT_EQ(nullptr, memory);
2124 
2125     testing::Mock::AllowLeak(mockIPreparedMode.get());
2126 }
2127 
2128 /*
2129  * @tc.name: executor_destroy_input_memory_003
2130  * @tc.desc: Verify the *memory is nullptr of the OH_NNExecutor_DestroyInputMemory function.
2131  * @tc.type: FUNC
2132  */
2133 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_003, testing::ext::TestSize.Level0)
2134 {
2135     LOGE("OH_NNExecutor_DestroyInputMemory executor_destroy_input_memory_003");
2136     size_t m_backendID {0};
2137     std::shared_ptr<Device> m_device {nullptr};
2138     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2139     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2140         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2141     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2142     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2143     NNExecutor* executor = new (std::nothrow) NNExecutor(
2144         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2145     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2146 
2147     uint32_t inputIndex = 0;
2148     OH_NN_Memory* memory = nullptr;
2149     OH_NN_Memory** pMemory = &memory;
2150     OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, pMemory);
2151     EXPECT_EQ(nullptr, memory);
2152 
2153     testing::Mock::AllowLeak(mockIPreparedMode.get());
2154 }
2155 
2156 /*
2157  * @tc.name: executor_destroy_input_memory_004
2158  * @tc.desc: Verify the error happened when destroying input memory of the OH_NNExecutor_DestroyInputMemory function.
2159  * @tc.type: FUNC
2160  */
2161 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_input_memory_004, testing::ext::TestSize.Level0)
2162 {
2163     LOGE("OH_NNExecutor_DestroyInputMemory executor_destroy_input_memory_004");
2164     size_t m_backendID {0};
2165     std::shared_ptr<Device> m_device {nullptr};
2166     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2167     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2168         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2169     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2170     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2171     NNExecutor* executor = new (std::nothrow) NNExecutor(
2172         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2173     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2174 
2175     uint32_t inputIndex = 6;
2176     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2177     void* const data = dataArry;
2178     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2179     OH_NN_Memory* pMemory = &memory;
2180     OH_NNExecutor_DestroyInputMemory(nnExecutor, inputIndex, &pMemory);
2181     EXPECT_NE(nullptr, pMemory);
2182 
2183     testing::Mock::AllowLeak(mockIPreparedMode.get());
2184 }
2185 
2186 /*
2187  * @tc.name: executor_destroy_output_memory_001
2188  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
2189  * @tc.type: FUNC
2190  */
2191 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_001, testing::ext::TestSize.Level0)
2192 {
2193     OH_NNExecutor* nnExecutor = nullptr;
2194     uint32_t outputIndex = 0;
2195     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2196     void* const data = dataArry;
2197     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2198     OH_NN_Memory* pMemory = &memory;
2199     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
2200     EXPECT_EQ(nullptr, nnExecutor);
2201 }
2202 
2203 /*
2204  * @tc.name: executor_destroy_output_memory_002
2205  * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
2206  * @tc.type: FUNC
2207  */
2208 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_002, testing::ext::TestSize.Level0)
2209 {
2210     LOGE("OH_NNExecutor_DestroyOutputMemory executor_destroy_output_memory_002");
2211     size_t m_backendID {0};
2212     std::shared_ptr<Device> m_device {nullptr};
2213     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2214     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2215         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2216     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2217     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2218     NNExecutor* executor = new (std::nothrow) NNExecutor(
2219         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2220     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2221 
2222     uint32_t outputIndex = 0;
2223     OH_NN_Memory** memory = nullptr;
2224     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, memory);
2225     EXPECT_EQ(nullptr, memory);
2226 
2227     testing::Mock::AllowLeak(mockIPreparedMode.get());
2228 }
2229 
2230 /*
2231  * @tc.name: executor_destroy_output_memory_003
2232  * @tc.desc: Verify the *memory is nullptr of the OH_NNExecutor_DestroyOutputMemory function.
2233  * @tc.type: FUNC
2234  */
2235 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_003, testing::ext::TestSize.Level0)
2236 {
2237     LOGE("OH_NNExecutor_DestroyOutputMemory executor_destroy_output_memory_003");
2238     size_t m_backendID {0};
2239     std::shared_ptr<Device> m_device {nullptr};
2240     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2241     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2242         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2243     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2244     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2245     NNExecutor* executor = new (std::nothrow) NNExecutor(
2246         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2247     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2248 
2249     uint32_t outputIndex = 0;
2250     OH_NN_Memory* memory = nullptr;
2251     OH_NN_Memory** pMemory = &memory;
2252     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, pMemory);
2253     EXPECT_EQ(nullptr, memory);
2254 
2255     testing::Mock::AllowLeak(mockIPreparedMode.get());
2256 }
2257 
2258 /*
2259  * @tc.name: executor_destroy_output_memory_004
2260  * @tc.desc: Verify the error happened when destroying output memory of the OH_NNExecutor_DestroyOutputMemory function.
2261  * @tc.type: FUNC
2262  */
2263 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_004, testing::ext::TestSize.Level0)
2264 {
2265     LOGE("OH_NNExecutor_DestroyOutputMemory executor_destroy_output_memory_004");
2266     size_t m_backendID {0};
2267     std::shared_ptr<Device> m_device {nullptr};
2268     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2269     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2270         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2271     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2272     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2273     NNExecutor* executor = new (std::nothrow) NNExecutor(
2274         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2275     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2276 
2277     uint32_t outputIndex = 6;
2278     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2279     void* const data = dataArry;
2280     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2281     OH_NN_Memory* pMemory = &memory;
2282     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
2283     EXPECT_NE(nullptr, pMemory);
2284 
2285     testing::Mock::AllowLeak(mockIPreparedMode.get());
2286 }
2287 
2288 /*
2289  * @tc.name: executor_destroy_output_memory_005
2290  * @tc.desc: Verify the success of the OH_NNExecutor_DestroyOutputMemory function.
2291  * @tc.type: FUNC
2292  */
2293 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_output_memory_005, testing::ext::TestSize.Level0)
2294 {
2295     LOGE("OH_NNExecutor_DestroyOutputMemory executor_destroy_output_memory_005");
2296     size_t m_backendID {0};
2297     std::shared_ptr<Device> m_device {nullptr};
2298     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2299     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2300         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2301     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2302     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2303     NNExecutor* executor = new (std::nothrow) NNExecutor(
2304         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2305     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2306 
2307     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2308     void* const data = dataArry;
2309     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2310     OH_NN_Memory* pMemory = &memory;
2311     uint32_t outputIndex = 0;
2312     OH_NNExecutor_DestroyOutputMemory(nnExecutor, outputIndex, &pMemory);
2313     EXPECT_NE(nullptr, pMemory);
2314 
2315     testing::Mock::AllowLeak(mockIPreparedMode.get());
2316 }
2317 
2318 /*
2319  * @tc.name: executor_set_input_with_memory_001
2320  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetInputWithMemory function.
2321  * @tc.type: FUNC
2322  */
2323 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_001, testing::ext::TestSize.Level0)
2324 {
2325     OH_NNExecutor* nnExecutor = nullptr;
2326 
2327     SetTensor();
2328 
2329     uint32_t inputIndex = 0;
2330     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2331     void* const data = dataArry;
2332     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2333 
2334     OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, &memory);
2335     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2336 }
2337 
2338 /*
2339  * @tc.name: executor_set_input_with_memory_002
2340  * @tc.desc: Verify the operand is nullptr of the OH_NNExecutor_SetInputWithMemory function.
2341  * @tc.type: FUNC
2342  */
2343 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_002, testing::ext::TestSize.Level0)
2344 {
2345     LOGE("OH_NNExecutor_SetInputWithMemory executor_set_input_with_memory_002");
2346     size_t m_backendID {0};
2347     std::shared_ptr<Device> m_device {nullptr};
2348     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2349     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2350         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2351     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2352     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2353     NNExecutor* executor = new (std::nothrow) NNExecutor(
2354         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2355     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2356 
2357     OH_NN_Tensor* operand = nullptr;
2358 
2359     uint32_t inputIndex = 0;
2360     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2361     void* const data = dataArry;
2362     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2363 
2364     OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, operand, &memory);
2365     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2366 
2367     testing::Mock::AllowLeak(mockIPreparedMode.get());
2368 }
2369 
2370 /*
2371  * @tc.name: executor_set_input_with_memory_003
2372  * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_SetInputWithMemory function.
2373  * @tc.type: FUNC
2374  */
2375 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_003, testing::ext::TestSize.Level0)
2376 {
2377     LOGE("OH_NNExecutor_SetInputWithMemory executor_set_input_with_memory_003");
2378     size_t m_backendID {0};
2379     std::shared_ptr<Device> m_device {nullptr};
2380     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2381     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2382         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2383     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2384     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2385     NNExecutor* executor = new (std::nothrow) NNExecutor(
2386         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2387     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2388 
2389     SetTensor();
2390 
2391     uint32_t inputIndex = 0;
2392     OH_NN_Memory* memory = nullptr;
2393     OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, memory);
2394     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2395 
2396     testing::Mock::AllowLeak(mockIPreparedMode.get());
2397 }
2398 
2399 /*
2400  * @tc.name: executor_set_input_with_memory_004
2401  * @tc.desc: Verify the success of the OH_NNExecutor_SetInputWithMemory function.
2402  * @tc.type: FUNC
2403  */
2404 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_input_with_memory_004, testing::ext::TestSize.Level0)
2405 {
2406     LOGE("OH_NNExecutor_SetInputWithMemory executor_set_input_with_memory_004");
2407     size_t m_backendID {0};
2408     std::shared_ptr<Device> m_device {nullptr};
2409     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2410     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2411         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2412     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2413     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2414     NNExecutor* executor = new (std::nothrow) NNExecutor(
2415         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2416     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2417 
2418     uint32_t inputIndex = 0;
2419     int32_t dims[2] = {3, 4};
2420     m_tensor = {OH_NN_FLOAT32, 2, dims, nullptr, OH_NN_TENSOR};
2421 
2422     float dataArry[12] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
2423     void* const data = dataArry;
2424     OH_NN_Memory memory = {data, 12 * sizeof(float)};
2425 
2426     OH_NN_ReturnCode ret = OH_NNExecutor_SetInputWithMemory(nnExecutor, inputIndex, &m_tensor, &memory);
2427     EXPECT_EQ(OH_NN_FAILED, ret);
2428 
2429     testing::Mock::AllowLeak(mockIPreparedMode.get());
2430 }
2431 
2432 
2433 /*
2434  * @tc.name: executor_set_output_with_memory_001
2435  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_SetOutputWithMemory function.
2436  * @tc.type: FUNC
2437  */
2438 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_001, testing::ext::TestSize.Level0)
2439 {
2440     OH_NNExecutor* nnExecutor = nullptr;
2441     uint32_t outputIndex = 0;
2442     float dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
2443     void* const data = dataArry;
2444     OH_NN_Memory memory = {data, 9 * sizeof(float)};
2445     OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, &memory);
2446     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2447 }
2448 
2449 /*
2450  * @tc.name: executor_set_output_with_memory_002
2451  * @tc.desc: Verify the memory is nullptr of the OH_NNExecutor_SetOutputWithMemory function.
2452  * @tc.type: FUNC
2453  */
2454 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_002, testing::ext::TestSize.Level0)
2455 {
2456     LOGE("OH_NNExecutor_SetOutputWithMemory executor_set_output_with_memory_002");
2457     size_t m_backendID {0};
2458     std::shared_ptr<Device> m_device {nullptr};
2459     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2460     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2461         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2462     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2463     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2464     NNExecutor* executor = new (std::nothrow) NNExecutor(
2465         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2466     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2467 
2468     uint32_t outputIndex = 0;
2469     OH_NN_Memory* memory = nullptr;
2470     OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, memory);
2471     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2472 
2473     testing::Mock::AllowLeak(mockIPreparedMode.get());
2474 }
2475 
2476 /*
2477  * @tc.name: executor_set_output_with_memory_003
2478  * @tc.desc: Verify the success of the OH_NNExecutor_SetOutputWithMemory function.
2479  * @tc.type: FUNC
2480  */
2481 HWTEST_F(NeuralNetworkRuntimeTest, executor_set_output_with_memory_003, testing::ext::TestSize.Level0)
2482 {
2483     LOGE("OH_NNExecutor_SetOutputWithMemory executor_set_output_with_memory_003");
2484     size_t m_backendID {0};
2485     std::shared_ptr<Device> m_device {nullptr};
2486     std::shared_ptr<MockIPreparedModel> mockIPreparedMode = std::make_shared<MockIPreparedModel>();
2487     EXPECT_CALL(*((MockIPreparedModel *) mockIPreparedMode.get()), GetInputDimRanges(::testing::_, ::testing::_))
2488         .WillRepeatedly(::testing::Return(OH_NN_FAILED));
2489     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2490     std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2491     NNExecutor* executor = new (std::nothrow) NNExecutor(
2492         m_backendID, m_device, mockIPreparedMode, m_inputTensorDescs, m_outputTensorDescs);
2493     OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(executor);
2494 
2495     uint32_t outputIndex = 0;
2496     float dataArry[12] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
2497     void* const data = dataArry;
2498     OH_NN_Memory memory = {data, 12 * sizeof(float)};
2499     OH_NN_ReturnCode ret = OH_NNExecutor_SetOutputWithMemory(nnExecutor, outputIndex, &memory);
2500     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2501 
2502     testing::Mock::AllowLeak(mockIPreparedMode.get());
2503 }
2504 
2505 /*
2506  * @tc.name: executor_destroy_001
2507  * @tc.desc: Verify the OH_NNExecutor is nullptr of the OH_NNExecutor_Destroy function.
2508  * @tc.type: FUNC
2509  */
2510 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_001, testing::ext::TestSize.Level0)
2511 {
2512     OH_NNExecutor** pExecutor = nullptr;
2513     OH_NNExecutor_Destroy(pExecutor);
2514     EXPECT_EQ(nullptr, pExecutor);
2515 }
2516 
2517 /*
2518  * @tc.name: executor_destroy_002
2519  * @tc.desc: Verify the *OH_NNExecutor is nullptr of the OH_NNExecutor_Destroy function.
2520  * @tc.type: FUNC
2521  */
2522 HWTEST_F(NeuralNetworkRuntimeTest, executor_destroy_002, testing::ext::TestSize.Level0)
2523 {
2524     OH_NNExecutor* nnExecutor = nullptr;
2525     OH_NNExecutor** pExecutor = &nnExecutor;
2526     OH_NNExecutor_Destroy(pExecutor);
2527     EXPECT_EQ(nullptr, nnExecutor);
2528 }
2529 
2530 /*
2531  * @tc.name: device_get_all_devices_id_001
2532  * @tc.desc: Verify the allDevicesID is nullptr of the OH_NNDevice_GetAllDevicesID function.
2533  * @tc.type: FUNC
2534  */
2535 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_001, testing::ext::TestSize.Level0)
2536 {
2537     const size_t** allDevicesId = nullptr;
2538     uint32_t deviceCount = 1;
2539     uint32_t* pDeviceCount = &deviceCount;
2540     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(allDevicesId, pDeviceCount);
2541     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2542 }
2543 
2544 /*
2545  * @tc.name: device_get_all_devices_id_002
2546  * @tc.desc: Verify the *allDevicesID is not nullptr of the OH_NNDevice_GetAllDevicesID function.
2547  * @tc.type: FUNC
2548  */
2549 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_002, testing::ext::TestSize.Level0)
2550 {
2551     const size_t devicesId = 1;
2552     const size_t* allDevicesId = &devicesId;
2553     const size_t** pAllDevicesId = &allDevicesId;
2554     uint32_t deviceCount = 1;
2555     uint32_t* pDeviceCount = &deviceCount;
2556     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2557     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2558 }
2559 
2560 /*
2561  * @tc.name: device_get_all_devices_id_003
2562  * @tc.desc: Verify the deviceCount is nullptr of the OH_NNDevice_GetAllDevicesID function.
2563  * @tc.type: FUNC
2564  */
2565 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_003, testing::ext::TestSize.Level0)
2566 {
2567     const size_t* allDevicesId = nullptr;
2568     const size_t** pAllDevicesId = &allDevicesId;
2569     uint32_t* pDeviceCount = nullptr;
2570     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2571     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2572 }
2573 
2574 /*
2575  * @tc.name: device_get_all_devices_id_004
2576  * @tc.desc: Verify the get no device of the OH_NNDevice_GetAllDevicesID function.
2577  * @tc.type: FUNC
2578  */
2579 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_004, testing::ext::TestSize.Level0)
2580 {
2581     const size_t* allDevicesId = nullptr;
2582     const size_t** pAllDevicesId = &allDevicesId;
2583     uint32_t deviceCount = 1;
2584     uint32_t* pDeviceCount = &deviceCount;
2585     OHOS::HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_FAILED;
2586     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2587     EXPECT_EQ(OH_NN_SUCCESS, ret);
2588 }
2589 
2590 /*
2591  * @tc.name: device_get_all_devices_id_005
2592  * @tc.desc: Verify the success of the OH_NNDevice_GetAllDevicesID function.
2593  * @tc.type: FUNC
2594  */
2595 HWTEST_F(NeuralNetworkRuntimeTest, device_get_all_devices_id_005, testing::ext::TestSize.Level0)
2596 {
2597     const size_t* allDevicesId = nullptr;
2598     const size_t** pAllDevicesId = &allDevicesId;
2599     uint32_t deviceCount = 1;
2600     uint32_t* pDeviceCount = &deviceCount;
2601     OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(pAllDevicesId, pDeviceCount);
2602     EXPECT_EQ(OH_NN_SUCCESS, ret);
2603 }
2604 
2605 /*
2606  * @tc.name: device_get_name_001
2607  * @tc.desc: Verify the name is nullptr of the OH_NNDevice_GetName function.
2608  * @tc.type: FUNC
2609  */
2610 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_001, testing::ext::TestSize.Level0)
2611 {
2612     size_t deviceID = 1;
2613     const char **name = nullptr;
2614     OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, name);
2615     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2616 }
2617 
2618 /*
2619  * @tc.name: device_get_name_002
2620  * @tc.desc: Verify the *name is not nullptr of the OH_NNDevice_GetName function.
2621  * @tc.type: FUNC
2622  */
2623 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_002, testing::ext::TestSize.Level0)
2624 {
2625     size_t deviceID = 1;
2626     const char* name = "diviceId";
2627     const char** pName = &name;
2628     OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2629     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2630 }
2631 
2632 /*
2633  * @tc.name: device_get_name_003
2634  * @tc.desc: Verify the error happened when getting name of deviceID of the OH_NNDevice_GetName function.
2635  * @tc.type: FUNC
2636  */
2637 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_003, testing::ext::TestSize.Level0)
2638 {
2639     size_t deviceID = 12345;
2640     const char* name = nullptr;
2641     const char** pName = &name;
2642     OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2643     EXPECT_EQ(OH_NN_FAILED, ret);
2644 }
2645 
2646 /*
2647  * @tc.name: device_get_name_004
2648  * @tc.desc: Verify the success of the OH_NNDevice_GetName function.
2649  * @tc.type: FUNC
2650  */
2651 HWTEST_F(NeuralNetworkRuntimeTest, device_get_name_004, testing::ext::TestSize.Level0)
2652 {
2653     size_t deviceID = 1;
2654     const char* name = nullptr;
2655     const char** pName = &name;
2656     OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceID, pName);
2657     EXPECT_EQ(OH_NN_FAILED, ret);
2658 }
2659 
2660 /*
2661  * @tc.name: device_get_type_001
2662  * @tc.desc: Verify the device is nullptr of the OH_NNDevice_GetType function.
2663  * @tc.type: FUNC
2664  */
2665 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_001, testing::ext::TestSize.Level0)
2666 {
2667     size_t deviceID = 12345;
2668     OH_NN_DeviceType deviceType = OH_NN_CPU;
2669     OH_NN_DeviceType* pDeviceType = &deviceType;
2670     OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2671     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2672 }
2673 
2674 /*
2675  * @tc.name: device_get_type_002
2676  * @tc.desc: Verify the OH_NN_DeviceType is nullptr of the OH_NNDevice_GetType function.
2677  * @tc.type: FUNC
2678  */
2679 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_002, testing::ext::TestSize.Level0)
2680 {
2681     size_t deviceID = 1;
2682     OH_NN_DeviceType* pDeviceType = nullptr;
2683     OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2684     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2685 }
2686 
2687 /*
2688  * @tc.name: device_get_type_003
2689  * @tc.desc: Verify the error happened when getting name of deviceID of the OH_NNDevice_GetType function.
2690  * @tc.type: FUNC
2691  */
2692 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_003, testing::ext::TestSize.Level0)
2693 {
2694     size_t deviceID = 1;
2695     OH_NN_DeviceType deviceType = OH_NN_OTHERS;
2696     OH_NN_DeviceType* pDeviceType = &deviceType;
2697     OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2698     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2699 }
2700 
2701 /*
2702  * @tc.name: device_get_type_004
2703  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2704  * @tc.type: FUNC
2705  */
2706 HWTEST_F(NeuralNetworkRuntimeTest, device_get_type_004, testing::ext::TestSize.Level0)
2707 {
2708     size_t deviceID =  1;
2709     OH_NN_DeviceType deviceType = OH_NN_CPU;
2710     OH_NN_DeviceType* pDeviceType = &deviceType;
2711     OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
2712     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2713 }
2714 
2715 /*
2716  * @tc.name: oh_nnquantparam_create_001
2717  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2718  * @tc.type: FUNC
2719  */
2720 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_create_001, testing::ext::TestSize.Level0)
2721 {
2722     LOGE("OH_NNQuantParam_Create oh_nnquantparam_create_001");
2723     NN_QuantParam* ret = OH_NNQuantParam_Create();
2724     EXPECT_NE(nullptr, ret);
2725 }
2726 
2727 /*
2728  * @tc.name: oh_nnquantparam_setscales_001
2729  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2730  * @tc.type: FUNC
2731  */
2732 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setscales_001, testing::ext::TestSize.Level0)
2733 {
2734     LOGE("OH_NNQuantParam_SetScales oh_nnquantparam_setscales_001");
2735     size_t quantNum = 1;
2736     OH_NN_ReturnCode ret = OH_NNQuantParam_SetScales(nullptr, nullptr, quantNum);
2737     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2738 }
2739 
2740 /*
2741  * @tc.name: oh_nnquantparam_setscales_002
2742  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2743  * @tc.type: FUNC
2744  */
2745 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setscales_002, testing::ext::TestSize.Level0)
2746 {
2747     LOGE("OH_NNQuantParam_SetScales oh_nnquantparam_setscales_002");
2748     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2749     size_t quantNum = 1;
2750     OH_NN_ReturnCode ret = OH_NNQuantParam_SetScales(quantParams, nullptr, quantNum);
2751     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2752 }
2753 
2754 /*
2755  * @tc.name: oh_nnquantparam_setscales_003
2756  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2757  * @tc.type: FUNC
2758  */
2759 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setscales_003, testing::ext::TestSize.Level0)
2760 {
2761     LOGE("OH_NNQuantParam_SetScales oh_nnquantparam_setscales_003");
2762     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2763     double scale = 2;
2764     size_t quantNum = 0;
2765     OH_NN_ReturnCode ret = OH_NNQuantParam_SetScales(quantParams, &scale, quantNum);
2766     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2767 }
2768 
2769 /*
2770  * @tc.name: oh_nnquantparam_setscales_004
2771  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2772  * @tc.type: FUNC
2773  */
2774 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setscales_004, testing::ext::TestSize.Level0)
2775 {
2776     LOGE("OH_NNQuantParam_SetScales oh_nnquantparam_setscales_004");
2777     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2778     double scale = 2;
2779     size_t quantNum = 2;
2780     OH_NN_ReturnCode ret = OH_NNQuantParam_SetScales(quantParams, &scale, quantNum);
2781     EXPECT_EQ(OH_NN_SUCCESS, ret);
2782 }
2783 
2784 /*
2785  * @tc.name: oh_nnquantparam_setzeropoints_001
2786  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2787  * @tc.type: FUNC
2788  */
2789 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setzeropoints_001, testing::ext::TestSize.Level0)
2790 {
2791     LOGE("OH_NNQuantParam_SetZeroPoints oh_nnquantparam_setzeropoints_001");
2792     size_t quantNum = 2;
2793     OH_NN_ReturnCode ret = OH_NNQuantParam_SetZeroPoints(nullptr, nullptr, quantNum);
2794     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2795 }
2796 
2797 /*
2798  * @tc.name: oh_nnquantparam_setzeropoints_002
2799  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2800  * @tc.type: FUNC
2801  */
2802 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setzeropoints_002, testing::ext::TestSize.Level0)
2803 {
2804     LOGE("OH_NNQuantParam_SetZeroPoints oh_nnquantparam_setzeropoints_002");
2805     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2806     size_t quantNum = 2;
2807     OH_NN_ReturnCode ret = OH_NNQuantParam_SetZeroPoints(quantParams, nullptr, quantNum);
2808     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2809 }
2810 
2811 /*
2812  * @tc.name: oh_nnquantparam_setzeropoints_003
2813  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2814  * @tc.type: FUNC
2815  */
2816 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setzeropoints_003, testing::ext::TestSize.Level0)
2817 {
2818     LOGE("OH_NNQuantParam_SetZeroPoints oh_nnquantparam_setzeropoints_003");
2819     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2820     int32_t zeroPoints = 2;
2821     size_t quantNum = 0;
2822     OH_NN_ReturnCode ret = OH_NNQuantParam_SetZeroPoints(quantParams, &zeroPoints, quantNum);
2823     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2824 }
2825 
2826 /*
2827  * @tc.name: oh_nnquantparam_setzeropoints_004
2828  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2829  * @tc.type: FUNC
2830  */
2831 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setzeropoints_004, testing::ext::TestSize.Level0)
2832 {
2833     LOGE("OH_NNQuantParam_SetZeroPoints oh_nnquantparam_setzeropoints_004");
2834     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2835     int32_t zeroPoints = 2;
2836     size_t quantNum = 2;
2837     OH_NN_ReturnCode ret = OH_NNQuantParam_SetZeroPoints(quantParams, &zeroPoints, quantNum);
2838     EXPECT_EQ(OH_NN_SUCCESS, ret);
2839 }
2840 
2841 /*
2842  * @tc.name: oh_nnquantparam_setnumbits_001
2843  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2844  * @tc.type: FUNC
2845  */
2846 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setnumbits_001, testing::ext::TestSize.Level0)
2847 {
2848     LOGE("OH_NNQuantParam_SetNumBits oh_nnquantparam_setnumbits_001");
2849     size_t quantNum = 2;
2850     OH_NN_ReturnCode ret = OH_NNQuantParam_SetNumBits(nullptr, nullptr, quantNum);
2851     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2852 }
2853 
2854 /*
2855  * @tc.name: oh_nnquantparam_setnumbits_002
2856  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2857  * @tc.type: FUNC
2858  */
2859 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setnumbits_002, testing::ext::TestSize.Level0)
2860 {
2861     LOGE("OH_NNQuantParam_SetNumBits oh_nnquantparam_setnumbits_002");
2862     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2863     size_t quantNum = 2;
2864     OH_NN_ReturnCode ret = OH_NNQuantParam_SetNumBits(quantParams, nullptr, quantNum);
2865     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2866 }
2867 
2868 /*
2869  * @tc.name: oh_nnquantparam_setnumbits_003
2870  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2871  * @tc.type: FUNC
2872  */
2873 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setnumbits_003, testing::ext::TestSize.Level0)
2874 {
2875     LOGE("OH_NNQuantParam_SetNumBits oh_nnquantparam_setnumbits_003");
2876     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2877     uint32_t zeroPoints = 2;
2878     size_t quantNum = 0;
2879     OH_NN_ReturnCode ret = OH_NNQuantParam_SetNumBits(quantParams, &zeroPoints, quantNum);
2880     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2881 }
2882 
2883 /*
2884  * @tc.name: oh_nnquantparam_setnumbits_004
2885  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2886  * @tc.type: FUNC
2887  */
2888 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_setnumbits_004, testing::ext::TestSize.Level0)
2889 {
2890     LOGE("OH_NNQuantParam_SetNumBits oh_nnquantparam_setnumbits_004");
2891     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2892     uint32_t zeroPoints = 2;
2893     size_t quantNum = 2;
2894     OH_NN_ReturnCode ret = OH_NNQuantParam_SetNumBits(quantParams, &zeroPoints, quantNum);
2895     EXPECT_EQ(OH_NN_SUCCESS, ret);
2896 }
2897 
2898 /*
2899  * @tc.name: oh_nnquantparam_destroy_001
2900  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2901  * @tc.type: FUNC
2902  */
2903 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_destroy_001, testing::ext::TestSize.Level0)
2904 {
2905     LOGE("OH_NNQuantParam_Destroy oh_nnquantparam_destroy_001");
2906     OH_NN_ReturnCode ret = OH_NNQuantParam_Destroy(nullptr);
2907     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2908 }
2909 
2910 /*
2911  * @tc.name: oh_nnquantparam_destroy_002
2912  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2913  * @tc.type: FUNC
2914  */
2915 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_destroy_002, testing::ext::TestSize.Level0)
2916 {
2917     LOGE("OH_NNQuantParam_Destroy oh_nnquantparam_destroy_002");
2918     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2919     NN_QuantParam** quantParamsDex = &quantParams;
2920     *quantParamsDex = nullptr;
2921     OH_NN_ReturnCode ret = OH_NNQuantParam_Destroy(quantParamsDex);
2922     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2923 }
2924 
2925 /*
2926  * @tc.name: oh_nnquantparam_destroy_003
2927  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2928  * @tc.type: FUNC
2929  */
2930 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnquantparam_destroy_003, testing::ext::TestSize.Level0)
2931 {
2932     LOGE("OH_NNQuantParam_Destroy oh_nnquantparam_destroy_003");
2933     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2934     NN_QuantParam** quantParamsDex = &quantParams;
2935     OH_NN_ReturnCode ret = OH_NNQuantParam_Destroy(quantParamsDex);
2936     EXPECT_EQ(OH_NN_SUCCESS, ret);
2937 }
2938 
2939 /*
2940  * @tc.name: oh_nnmodel_addtensortomodel_001
2941  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2942  * @tc.type: FUNC
2943  */
2944 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_addtensortomodel_001, testing::ext::TestSize.Level0)
2945 {
2946     LOGE("OH_NNModel_AddTensorToModel oh_nnmodel_addtensortomodel_001");
2947     TensorDesc* tensorDescImpl = new (std::nothrow) TensorDesc();
2948     NN_TensorDesc* tensor = reinterpret_cast<NN_TensorDesc*>(tensorDescImpl);
2949     OH_NN_ReturnCode ret = OH_NNModel_AddTensorToModel(nullptr, tensor);
2950     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2951 }
2952 
2953 /*
2954  * @tc.name: oh_nnmodel_addtensortomodel_002
2955  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2956  * @tc.type: FUNC
2957  */
2958 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_addtensortomodel_002, testing::ext::TestSize.Level0)
2959 {
2960     LOGE("OH_NNModel_AddTensorToModel oh_nnmodel_addtensortomodel_002");
2961     OH_NNModel* model = OH_NNModel_Construct();
2962     OH_NN_ReturnCode ret = OH_NNModel_AddTensorToModel(model, nullptr);
2963     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2964 }
2965 
2966 /*
2967  * @tc.name: oh_nnmodel_addtensortomodel_003
2968  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2969  * @tc.type: FUNC
2970  */
2971 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_addtensortomodel_003, testing::ext::TestSize.Level0)
2972 {
2973     LOGE("OH_NNModel_AddTensorToModel oh_nnmodel_addtensortomodel_003");
2974     OH_NNModel* model = OH_NNModel_Construct();
2975     TensorDesc* tensorDescImpl = new (std::nothrow) TensorDesc();
2976     NN_TensorDesc* tensor = reinterpret_cast<NN_TensorDesc*>(tensorDescImpl);
2977     OH_NN_ReturnCode ret = OH_NNModel_AddTensorToModel(model, tensor);
2978     EXPECT_EQ(OH_NN_SUCCESS, ret);
2979 }
2980 
2981 /*
2982  * @tc.name: oh_nnmodel_settensorquantparams_001
2983  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2984  * @tc.type: FUNC
2985  */
2986 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensorquantparams_001, testing::ext::TestSize.Level0)
2987 {
2988     LOGE("OH_NNModel_SetTensorQuantParams oh_nnmodel_settensorquantparams_001");
2989     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
2990     uint32_t index = 10;
2991     OH_NN_ReturnCode ret = OH_NNModel_SetTensorQuantParams(nullptr, index, quantParams);
2992     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2993 }
2994 
2995 /*
2996  * @tc.name: oh_nnmodel_settensorquantparams_002
2997  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
2998  * @tc.type: FUNC
2999  */
3000 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensorquantparams_002, testing::ext::TestSize.Level0)
3001 {
3002     LOGE("OH_NNModel_SetTensorQuantParams oh_nnmodel_settensorquantparams_002");
3003     OH_NNModel* model = OH_NNModel_Construct();
3004     uint32_t index = 10;
3005     OH_NN_ReturnCode ret = OH_NNModel_SetTensorQuantParams(model, index, nullptr);
3006     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
3007 }
3008 
3009 /*
3010  * @tc.name: oh_nnmodel_settensorquantparams_003
3011  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
3012  * @tc.type: FUNC
3013  */
3014 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensorquantparams_003, testing::ext::TestSize.Level0)
3015 {
3016     LOGE("OH_NNModel_SetTensorQuantParams oh_nnmodel_settensorquantparams_003");
3017     OH_NNModel* model = OH_NNModel_Construct();
3018     NN_QuantParam* quantParams = OH_NNQuantParam_Create();
3019     uint32_t index = 10;
3020     OH_NN_ReturnCode ret = OH_NNModel_SetTensorQuantParams(model, index, quantParams);
3021     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
3022 }
3023 
3024 /*
3025  * @tc.name: oh_nnmodel_settensortype_001
3026  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
3027  * @tc.type: FUNC
3028  */
3029 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensortype_001, testing::ext::TestSize.Level0)
3030 {
3031     LOGE("OH_NNModel_SetTensorType oh_nnmodel_settensortype_001");
3032     OH_NN_TensorType tensorType = OH_NN_REDUCE_MIN_KEEP_DIMS;
3033     uint32_t index = 10;
3034     OH_NN_ReturnCode ret = OH_NNModel_SetTensorType(nullptr, index, tensorType);
3035     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
3036 }
3037 
3038 /*
3039  * @tc.name: oh_nnmodel_settensortype_002
3040  * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
3041  * @tc.type: FUNC
3042  */
3043 HWTEST_F(NeuralNetworkRuntimeTest, oh_nnmodel_settensortype_002, testing::ext::TestSize.Level0)
3044 {
3045     LOGE("OH_NNModel_SetTensorType oh_nnmodel_settensortype_002");
3046     OH_NNModel* model = OH_NNModel_Construct();
3047     OH_NN_TensorType tensorType = OH_NN_REDUCE_MIN_COEFF;
3048     uint32_t index = 10;
3049     OH_NN_ReturnCode ret = OH_NNModel_SetTensorType(model, index, tensorType);
3050     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
3051 }
3052 } // namespace Unittest
3053 } // namespace NeuralNetworkRuntime
3054 } // namespace OHOS
3055