1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "neural_network_runtime_inner_test.h"
17 
18 #include "mindir.h"
19 #include "inner_model.h"
20 
21 namespace OHOS {
22 namespace NeuralNetworkRuntime {
23 namespace Unittest {
SetUpTestCase(void)24 void NeuralNetworkRuntimeInnerTest::SetUpTestCase(void)
25 {
26 }
27 
TearDownTestCase(void)28 void NeuralNetworkRuntimeInnerTest::TearDownTestCase(void)
29 {
30 }
31 
SetUp(void)32 void NeuralNetworkRuntimeInnerTest::SetUp(void)
33 {
34 }
35 
TearDown(void)36 void NeuralNetworkRuntimeInnerTest::TearDown(void)
37 {
38 }
39 
40 /*
41  * @tc.name: build_from_lite_graph_001
42  * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNModel_BuildFromLiteGraph function.
43  * @tc.type: FUNC
44  */
45 HWTEST_F(NeuralNetworkRuntimeInnerTest, build_from_lite_graph_001, testing::ext::TestSize.Level0)
46 {
47     OH_NNModel* model = nullptr;
48     OH_NN_Extension* extensions = nullptr;
49     size_t extensionSize = 0;
50     mindspore::lite::LiteGraph* liteGraph = new (std::nothrow) mindspore::lite::LiteGraph;
51     EXPECT_NE(nullptr, liteGraph);
52     OH_NN_ReturnCode ret = OH_NNModel_BuildFromLiteGraph(model, liteGraph, extensions, extensionSize);
53     delete liteGraph;
54     liteGraph = nullptr;
55     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
56 }
57 
58 /*
59  * @tc.name: build_from_lite_graph_002
60  * @tc.desc: Verify the liteGraph is nullptr of the OH_NNModel_BuildFromLiteGraph function.
61  * @tc.type: FUNC
62  */
63 HWTEST_F(NeuralNetworkRuntimeInnerTest, build_from_lite_graph_002, testing::ext::TestSize.Level0)
64 {
65     OHOS::NeuralNetworkRuntime::InnerModel* innerModel = new (std::nothrow) OHOS::NeuralNetworkRuntime::InnerModel();
66     EXPECT_NE(nullptr, innerModel);
67     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
68     const void* liteGraph = nullptr;
69     OH_NN_Extension* extensions = nullptr;
70     size_t extensionSize = 0;
71     OH_NN_ReturnCode ret = OH_NNModel_BuildFromLiteGraph(model, liteGraph, extensions, extensionSize);
72     delete innerModel;
73     innerModel = nullptr;
74     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
75 }
76 
77 /*
78  * @tc.name: build_from_lite_graph_003
79  * @tc.desc: Verify the success of the OH_NNModel_BuildFromLiteGraph function.
80  * @tc.type: FUNC
81  */
82 HWTEST_F(NeuralNetworkRuntimeInnerTest, build_from_lite_graph_003, testing::ext::TestSize.Level0)
83 {
84     OHOS::NeuralNetworkRuntime::InnerModel* innerModel = new (std::nothrow) OHOS::NeuralNetworkRuntime::InnerModel();
85     EXPECT_NE(nullptr, innerModel);
86     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
87     mindspore::lite::LiteGraph* liteGraph = new (std::nothrow) mindspore::lite::LiteGraph;
88     OH_NN_Extension* extensions = nullptr;
89     size_t extensionSize = 0;
90     EXPECT_NE(nullptr, liteGraph);
91     liteGraph->name_ = "testGraph";
92     liteGraph->input_indices_ = {0};
93     liteGraph->output_indices_ = {1};
94     const std::vector<mindspore::lite::QuantParam> quant_params {};
95     for (size_t indexInput = 0; indexInput < liteGraph->input_indices_.size(); ++indexInput) {
96         const std::vector<int32_t> dim = {3, 3};
97         const std::vector<uint8_t> data(36, 1);
98 
99         liteGraph->all_tensors_.emplace_back(mindspore::lite::MindIR_Tensor_Create());
100     }
101     for (size_t indexOutput = 0; indexOutput < liteGraph->output_indices_.size(); ++indexOutput) {
102         const std::vector<int32_t> dimOut = {3, 3};
103         const std::vector<uint8_t> dataOut(36, 1);
104         liteGraph->all_tensors_.emplace_back(mindspore::lite::MindIR_Tensor_Create());
105     }
106     OH_NN_ReturnCode ret = OH_NNModel_BuildFromLiteGraph(model, liteGraph, extensions, extensionSize);
107     delete innerModel;
108     innerModel = nullptr;
109     EXPECT_EQ(OH_NN_SUCCESS, ret);
110 }
111 
112 /*
113  * @tc.name: build_from_lite_graph_004
114  * @tc.desc: Verify that the liteGraph parameter passed to the OH_NNModel_BuildFromLiteGraph function is invalid.
115  * @tc.type: FUNC
116  */
117 HWTEST_F(NeuralNetworkRuntimeInnerTest, build_from_lite_graph_004, testing::ext::TestSize.Level0)
118 {
119     OHOS::NeuralNetworkRuntime::InnerModel* innerModel = new (std::nothrow) OHOS::NeuralNetworkRuntime::InnerModel();
120     EXPECT_NE(nullptr, innerModel);
121     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
122     mindspore::lite::LiteGraph* liteGraph = new (std::nothrow) mindspore::lite::LiteGraph;
123     OH_NN_Extension* extensions = nullptr;
124     size_t extensionSize = 0;
125     EXPECT_NE(nullptr, liteGraph);
126     liteGraph->name_ = "testGraph";
127     OH_NN_ReturnCode ret = OH_NNModel_BuildFromLiteGraph(model, liteGraph, extensions, extensionSize);
128     delete innerModel;
129     delete liteGraph;
130     innerModel = nullptr;
131     liteGraph = nullptr;
132     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
133 }
134 
135 /*
136  * @tc.name: build_from_lite_graph_005
137  * @tc.desc: Verify the success of the OH_NNModel_BuildFromLiteGraph function.
138  * @tc.type: FUNC
139  */
140 HWTEST_F(NeuralNetworkRuntimeInnerTest, build_from_lite_graph_005, testing::ext::TestSize.Level0)
141 {
142     LOGE("OH_NNModel_BuildFromLiteGraph build_from_lite_graph_005");
143     OHOS::NeuralNetworkRuntime::InnerModel* innerModel = new (std::nothrow) OHOS::NeuralNetworkRuntime::InnerModel();
144     EXPECT_NE(nullptr, innerModel);
145     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
146     mindspore::lite::LiteGraph* liteGraph = new (std::nothrow) mindspore::lite::LiteGraph;
147     char a = 'a';
148     OH_NN_Extension extension1 = {"QuantBuffer", &a, 8};
149     OH_NN_Extension extension2 = {"ModelName", &a, 8};
150     OH_NN_Extension extension3 = {"Profiling", &a, 8};
151     OH_NN_Extension extension7 = {"isProfiling", &a, 8};
152     OH_NN_Extension extension4 = {"opLayout", &a, 8};
153     OH_NN_Extension extension5 = {"InputDims", &a, 8};
154     OH_NN_Extension extension6 = {"DynamicDims", &a, 8};
155     OH_NN_Extension extension[7] = {extension1, extension2, extension7, extension4, extension5, extension6, extension3};
156     size_t extensionSize = 7;
157     EXPECT_NE(nullptr, liteGraph);
158     liteGraph->name_ = "testGraph";
159     liteGraph->input_indices_ = {0};
160     liteGraph->output_indices_ = {1};
161     mindspore::lite::DataType data_type = mindspore::lite::DataType::DATA_TYPE_INT32;
162     int dim = 1;
163     int32_t *dims = &dim;
164     uint32_t dims_size = 1;
165     mindspore::lite::Format format = mindspore::lite::Format::FORMAT_HWCK;
166     uint8_t datas = 0;
167     uint8_t *data = &datas;
168     uint32_t data_size = 2;
169     mindspore::lite::QuantParam quant_params;
170     uint32_t quant_params_size = 0;
171     mindspore::lite::TensorPtr ptr2 = mindspore::lite::MindIR_Tensor_Create(&a, data_type, dims, dims_size,
172                                format, data, data_size,
173                                &quant_params, quant_params_size);
174 
175     for (size_t indexInput = 0; indexInput < liteGraph->input_indices_.size(); ++indexInput) {
176         const std::vector<int32_t> dim = {3, 3};
177         const std::vector<uint8_t> data(36, 1);
178 
179         liteGraph->all_tensors_.emplace_back(ptr2);
180     }
181     for (size_t indexOutput = 0; indexOutput < liteGraph->output_indices_.size(); ++indexOutput) {
182         const std::vector<int32_t> dimOut = {3, 3};
183         const std::vector<uint8_t> dataOut(36, 1);
184         liteGraph->all_tensors_.emplace_back(ptr2);
185     }
186     OH_NN_ReturnCode ret = OH_NNModel_BuildFromLiteGraph(model, liteGraph, extension, extensionSize);
187     delete innerModel;
188     innerModel = nullptr;
189     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
190 }
191 
192 /*
193  * @tc.name: oh_nnmodel_buildfrommetagraph_001
194  * @tc.desc: Verify that the liteGraph parameter passed to the OH_NNModel_BuildFromLiteGraph function is invalid.
195  * @tc.type: FUNC
196  */
197 HWTEST_F(NeuralNetworkRuntimeInnerTest, oh_nnmodel_buildfrommetagraph_001, testing::ext::TestSize.Level0)
198 {
199     LOGE("OH_NNModel_BuildFromMetaGraph oh_nnmodel_buildfrommetagraph_001");
200     OHOS::NeuralNetworkRuntime::InnerModel* innerModel = new (std::nothrow) OHOS::NeuralNetworkRuntime::InnerModel();
201     EXPECT_NE(nullptr, innerModel);
202     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
203     mindspore::lite::LiteGraph* liteGraph = new (std::nothrow) mindspore::lite::LiteGraph;
204     char a = 'a';
205     OH_NN_Extension extension1 = {"QuantBuffer", &a, 1};
206     OH_NN_Extension extension2 = {"ModelName", &a, 1};
207     OH_NN_Extension extension3 = {"Profiling", &a, 1};
208     OH_NN_Extension extension4 = {"opLayout", &a, 1};
209     OH_NN_Extension extension[4] = {extension1, extension2, extension3, extension4};
210 
211     size_t extensionSize = 4;
212     EXPECT_NE(nullptr, liteGraph);
213     liteGraph->name_ = "testGraph";
214     liteGraph->input_indices_ = {0};
215     liteGraph->output_indices_ = {1};
216     const std::vector<mindspore::lite::QuantParam> quant_params {};
217     for (size_t indexInput = 0; indexInput < liteGraph->input_indices_.size(); ++indexInput) {
218         const std::vector<int32_t> dim = {3, 3};
219         const std::vector<uint8_t> data(36, 1);
220 
221         liteGraph->all_tensors_.emplace_back(mindspore::lite::MindIR_Tensor_Create());
222     }
223     for (size_t indexOutput = 0; indexOutput < liteGraph->output_indices_.size(); ++indexOutput) {
224         const std::vector<int32_t> dimOut = {3, 3};
225         const std::vector<uint8_t> dataOut(36, 1);
226         liteGraph->all_tensors_.emplace_back(mindspore::lite::MindIR_Tensor_Create());
227     }
228     OH_NN_ReturnCode ret = OH_NNModel_BuildFromMetaGraph(model, liteGraph, extension, extensionSize);
229     delete innerModel;
230     innerModel = nullptr;
231     EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
232 }
233 
234 /*
235  * @tc.name: oh_nnmodel_buildfrommetagraph_002
236  * @tc.desc: Verify that the liteGraph parameter passed to the OH_NNModel_BuildFromLiteGraph function is invalid.
237  * @tc.type: FUNC
238  */
239 HWTEST_F(NeuralNetworkRuntimeInnerTest, oh_nnmodel_buildfrommetagraph_002, testing::ext::TestSize.Level0)
240 {
241     LOGE("OH_NNModel_BuildFromMetaGraph oh_nnmodel_buildfrommetagraph_002");
242     mindspore::lite::LiteGraph* liteGraph = new (std::nothrow) mindspore::lite::LiteGraph;
243     OH_NN_Extension* extensions = nullptr;
244 
245     size_t extensionSize = 0;
246     EXPECT_NE(nullptr, liteGraph);
247     liteGraph->name_ = "testGraph";
248     liteGraph->input_indices_ = {0};
249     liteGraph->output_indices_ = {1};
250     const std::vector<mindspore::lite::QuantParam> quant_params {};
251     for (size_t indexInput = 0; indexInput < liteGraph->input_indices_.size(); ++indexInput) {
252         const std::vector<int32_t> dim = {3, 3};
253         const std::vector<uint8_t> data(36, 1);
254 
255         liteGraph->all_tensors_.emplace_back(mindspore::lite::MindIR_Tensor_Create());
256     }
257     for (size_t indexOutput = 0; indexOutput < liteGraph->output_indices_.size(); ++indexOutput) {
258         const std::vector<int32_t> dimOut = {3, 3};
259         const std::vector<uint8_t> dataOut(36, 1);
260         liteGraph->all_tensors_.emplace_back(mindspore::lite::MindIR_Tensor_Create());
261     }
262     OH_NN_ReturnCode ret = OH_NNModel_BuildFromMetaGraph(nullptr, liteGraph, extensions, extensionSize);
263     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
264 }
265 
266 /*
267  * @tc.name: oh_nnmodel_buildfrommetagraph_003
268  * @tc.desc: Verify that the liteGraph parameter passed to the OH_NNModel_BuildFromLiteGraph function is invalid.
269  * @tc.type: FUNC
270  */
271 HWTEST_F(NeuralNetworkRuntimeInnerTest, oh_nnmodel_buildfrommetagraph_003, testing::ext::TestSize.Level0)
272 {
273     LOGE("OH_NNModel_BuildFromMetaGraph oh_nnmodel_buildfrommetagraph_003");
274     OHOS::NeuralNetworkRuntime::InnerModel* innerModel = new (std::nothrow) OHOS::NeuralNetworkRuntime::InnerModel();
275     EXPECT_NE(nullptr, innerModel);
276     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
277     OH_NN_Extension* extensions = nullptr;
278     size_t extensionSize = 0;
279     OH_NN_ReturnCode ret = OH_NNModel_BuildFromMetaGraph(model, nullptr, extensions, extensionSize);
280     delete innerModel;
281     innerModel = nullptr;
282     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
283 }
284 
285 /*
286  * @tc.name: oh_nnmodel_setinputsandoutputsinfo_001
287  * @tc.desc: Verify that the liteGraph parameter passed to the OH_NNModel_BuildFromLiteGraph function is invalid.
288  * @tc.type: FUNC
289  */
290 HWTEST_F(NeuralNetworkRuntimeInnerTest, oh_nnmodel_setinputsandoutputsinfo_001, testing::ext::TestSize.Level0)
291 {
292     LOGE("OH_NNModel_SetInputsAndOutputsInfo oh_nnmodel_setinputsandoutputsinfo_001");
293     OHOS::NeuralNetworkRuntime::InnerModel* innerModel = new (std::nothrow) OHOS::NeuralNetworkRuntime::InnerModel();
294     EXPECT_NE(nullptr, innerModel);
295     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
296 
297     OH_NN_TensorInfo inputsInfo;
298     size_t inputSize = 1;
299     OH_NN_TensorInfo outputsInfo;
300     size_t outputSize = 1 ;
301     OH_NN_ReturnCode ret = OH_NNModel_SetInputsAndOutputsInfo(model, &inputsInfo, inputSize, &outputsInfo, outputSize);
302     delete innerModel;
303     innerModel = nullptr;
304     EXPECT_EQ(OH_NN_SUCCESS, ret);
305 }
306 
307 /*
308  * @tc.name: oh_nnmodel_setinputsandoutputsinfo_002
309  * @tc.desc: Verify that the liteGraph parameter passed to the OH_NNModel_BuildFromLiteGraph function is invalid.
310  * @tc.type: FUNC
311  */
312 HWTEST_F(NeuralNetworkRuntimeInnerTest, oh_nnmodel_setinputsandoutputsinfo_002, testing::ext::TestSize.Level0)
313 {
314     LOGE("OH_NNModel_SetInputsAndOutputsInfo oh_nnmodel_setinputsandoutputsinfo_002");
315     OH_NN_TensorInfo inputsInfo;
316     size_t inputSize = 1;
317     OH_NN_TensorInfo outputsInfo;
318     size_t outputSize = 1 ;
319     OH_NN_ReturnCode ret = OH_NNModel_SetInputsAndOutputsInfo(nullptr,
320         &inputsInfo, inputSize, &outputsInfo, outputSize);
321     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
322 }
323 
324 /*
325  * @tc.name: oh_nnmodel_setinputsandoutputsinfo_003
326  * @tc.desc: Verify that the liteGraph parameter passed to the OH_NNModel_BuildFromLiteGraph function is invalid.
327  * @tc.type: FUNC
328  */
329 HWTEST_F(NeuralNetworkRuntimeInnerTest, oh_nnmodel_setinputsandoutputsinfo_003, testing::ext::TestSize.Level0)
330 {
331     LOGE("OH_NNModel_SetInputsAndOutputsInfo oh_nnmodel_setinputsandoutputsinfo_003");
332     OHOS::NeuralNetworkRuntime::InnerModel* innerModel = new (std::nothrow) OHOS::NeuralNetworkRuntime::InnerModel();
333     EXPECT_NE(nullptr, innerModel);
334     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
335 
336     OH_NN_TensorInfo inputsInfo;
337     size_t inputSize = 0;
338     OH_NN_TensorInfo outputsInfo;
339     size_t outputSize = 1 ;
340     OH_NN_ReturnCode ret = OH_NNModel_SetInputsAndOutputsInfo(model, &inputsInfo, inputSize, &outputsInfo, outputSize);
341     delete innerModel;
342     innerModel = nullptr;
343     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
344 }
345 
346 /*
347  * @tc.name: oh_nnmodel_setinputsandoutputsinfo_004
348  * @tc.desc: Verify that the liteGraph parameter passed to the OH_NNModel_BuildFromLiteGraph function is invalid.
349  * @tc.type: FUNC
350  */
351 HWTEST_F(NeuralNetworkRuntimeInnerTest, oh_nnmodel_setinputsandoutputsinfo_004, testing::ext::TestSize.Level0)
352 {
353     LOGE("OH_NNModel_SetInputsAndOutputsInfo oh_nnmodel_setinputsandoutputsinfo_004");
354     OHOS::NeuralNetworkRuntime::InnerModel* innerModel = new (std::nothrow) OHOS::NeuralNetworkRuntime::InnerModel();
355     EXPECT_NE(nullptr, innerModel);
356     OH_NNModel* model = reinterpret_cast<OH_NNModel*>(innerModel);
357 
358     OH_NN_TensorInfo inputsInfo;
359     size_t inputSize = 1;
360     OH_NN_TensorInfo outputsInfo;
361     size_t outputSize = 0;
362     OH_NN_ReturnCode ret = OH_NNModel_SetInputsAndOutputsInfo(model, &inputsInfo, inputSize, &outputsInfo, outputSize);
363     delete innerModel;
364     innerModel = nullptr;
365     EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
366 }
367 } // namespace Unittest
368 } // namespace NeuralNetworkRuntime
369 } // namespace OHOS
370