1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "ops/fullconnection_builder.h"
17
18 #include "ops_test.h"
19
20 using namespace testing;
21 using namespace testing::ext;
22 using namespace OHOS::NeuralNetworkRuntime::Ops;
23
24
25 namespace OHOS {
26 namespace NeuralNetworkRuntime {
27 namespace UnitTest {
28 class FullConnectionBuilderTest : public OpsTest {
29 public:
30 void SetUp() override;
31 void TearDown() override;
32
33 void SetInputToAlltensor();
34 void SetActivation(OH_NN_DataType dataType,
35 const std::vector<int32_t> &dim, const OH_NN_QuantParam* quantParam, OH_NN_TensorType type);
36 void SetHasBias(OH_NN_DataType dataType,
37 const std::vector<int32_t> &dim, const OH_NN_QuantParam* quantParam, OH_NN_TensorType type);
38
39 public:
40 FullConnectionBuilder m_builder;
41 std::vector<uint32_t> m_inputs {0, 1, 2};
42 std::vector<uint32_t> m_outputs {3};
43 std::vector<uint32_t> m_params {4, 5};
44 std::vector<int32_t> m_output_dim {2, 2};
45 std::vector<int32_t> m_param_dim {};
46 };
47
SetUp()48 void FullConnectionBuilderTest::SetUp() {}
49
TearDown()50 void FullConnectionBuilderTest::TearDown() {}
51
SetInputToAlltensor()52 void FullConnectionBuilderTest::SetInputToAlltensor()
53 {
54 std::vector<int32_t> m_input_dim{2, 2};
55 std::vector<int32_t> biasDim = {2};
56 std::shared_ptr<NNTensor> tensor = TransToNNTensor(OH_NN_FLOAT32, m_input_dim, nullptr, OH_NN_TENSOR);
57 m_allTensors.emplace_back(tensor);
58
59 int32_t numWeight = 4;
60 int32_t numBias = 2;
61 tensor = TransToNNTensor(OH_NN_FLOAT32, m_input_dim, nullptr, OH_NN_TENSOR);
62 float* valueWeight = new (std::nothrow) float[4]{1, 1, 1, 1};
63 EXPECT_NE(nullptr, valueWeight);
64
65 tensor->SetBuffer(valueWeight, numWeight * sizeof(float));
66 m_allTensors.emplace_back(tensor);
67
68 tensor = TransToNNTensor(OH_NN_FLOAT32, biasDim, nullptr, OH_NN_TENSOR);
69 float* valueBias = new (std::nothrow) float[2]{0, 0};
70 EXPECT_NE(nullptr, valueBias);
71 tensor->SetBuffer(valueBias, numBias * sizeof(float));
72 m_allTensors.emplace_back(tensor);
73 }
74
SetActivation(OH_NN_DataType dataType,const std::vector<int32_t> & dim,const OH_NN_QuantParam * quantParam,OH_NN_TensorType type)75 void FullConnectionBuilderTest::SetActivation(OH_NN_DataType dataType,
76 const std::vector<int32_t> &dim, const OH_NN_QuantParam* quantParam, OH_NN_TensorType type)
77 {
78 std::shared_ptr<NNTensor> tensor = TransToNNTensor(dataType, dim, quantParam, type);
79 int8_t* activationValue = new (std::nothrow) int8_t(0);
80 EXPECT_NE(nullptr, activationValue);
81
82 tensor->SetBuffer(activationValue, sizeof(int8_t));
83 m_allTensors.emplace_back(tensor);
84 }
85
SetHasBias(OH_NN_DataType dataType,const std::vector<int32_t> & dim,const OH_NN_QuantParam * quantParam,OH_NN_TensorType type)86 void FullConnectionBuilderTest::SetHasBias(OH_NN_DataType dataType,
87 const std::vector<int32_t> &dim, const OH_NN_QuantParam* quantParam, OH_NN_TensorType type)
88 {
89 std::shared_ptr<NNTensor> tensor = TransToNNTensor(dataType, dim, quantParam, type);
90 bool* hasBiasValue = new (std::nothrow) bool (true);
91 EXPECT_NE(nullptr, hasBiasValue);
92
93 tensor->SetBuffer(hasBiasValue, sizeof(bool));
94 m_allTensors.emplace_back(tensor);
95 }
96
97 /**
98 * @tc.name: fullconnection_build_001
99 * @tc.desc: Verify the success of the build function
100 * @tc.type: FUNC
101 */
102 HWTEST_F(FullConnectionBuilderTest, fullconnection_build_001, TestSize.Level1)
103 {
104 m_inputsIndex = m_inputs;
105 m_paramsIndex = m_params;
106 SetInputToAlltensor();
107
108 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_output_dim, nullptr);
109 SetActivation(OH_NN_INT8, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_ACTIVATIONTYPE);
110 SetHasBias(OH_NN_BOOL, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_HAS_BIAS);
111
112 EXPECT_EQ(OH_NN_SUCCESS, m_builder.Build(m_paramsIndex, m_inputsIndex, m_outputsIndex, m_allTensors));
113 }
114
115 /**
116 * @tc.name: fullconnection_build_002
117 * @tc.desc: Verify the forbidden of the build function
118 * @tc.type: FUNC
119 */
120 HWTEST_F(FullConnectionBuilderTest, fullconnection_build_002, TestSize.Level1)
121 {
122 m_inputsIndex = m_inputs;
123 m_paramsIndex = m_params;
124 SetInputToAlltensor();
125
126 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_output_dim, nullptr);
127 SetActivation(OH_NN_INT8, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_ACTIVATIONTYPE);
128 SetHasBias(OH_NN_BOOL, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_HAS_BIAS);
129
130 EXPECT_EQ(OH_NN_SUCCESS, m_builder.Build(m_paramsIndex, m_inputsIndex, m_outputsIndex, m_allTensors));
131 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, m_builder.Build(m_paramsIndex, m_inputsIndex, m_outputsIndex, m_allTensors));
132 }
133
134 /**
135 * @tc.name: fullconnection_build_003
136 * @tc.desc: Verify the missing output of the build function
137 * @tc.type: FUNC
138 */
139 HWTEST_F(FullConnectionBuilderTest, fullconnection_build_003, TestSize.Level1)
140 {
141 m_outputs = {};
142 m_params = {3, 4};
143 m_inputsIndex = m_inputs;
144 m_paramsIndex = m_params;
145 SetInputToAlltensor();
146
147 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_output_dim, nullptr);
148 SetActivation(OH_NN_INT8, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_ACTIVATIONTYPE);
149 SetHasBias(OH_NN_BOOL, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_HAS_BIAS);
150
151 EXPECT_EQ(OH_NN_INVALID_PARAMETER, m_builder.Build(m_paramsIndex, m_inputsIndex, m_outputsIndex, m_allTensors));
152 }
153
154 /**
155 * @tc.name: fullconnection_build_004
156 * @tc.desc: Verify the inputIndex out of bounds of the build function
157 * @tc.type: FUNC
158 */
159 HWTEST_F(FullConnectionBuilderTest, fullconnection_build_004, TestSize.Level1)
160 {
161 m_inputs = {0, 1, 6};
162 m_outputs = {3};
163 m_params = {4, 5};
164
165 m_inputsIndex = m_inputs;
166 m_paramsIndex = m_params;
167 SetInputToAlltensor();
168
169 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_output_dim, nullptr);
170 SetActivation(OH_NN_INT8, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_ACTIVATIONTYPE);
171 SetHasBias(OH_NN_BOOL, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_HAS_BIAS);
172
173 EXPECT_EQ(OH_NN_INVALID_PARAMETER, m_builder.Build(m_paramsIndex, m_inputsIndex, m_outputsIndex, m_allTensors));
174 }
175
176 /**
177 * @tc.name: fullconnection_build_005
178 * @tc.desc: Verify the behavior of the build function
179 * @tc.type: FUNC
180 */
181
182 HWTEST_F(FullConnectionBuilderTest, fullconnection_build_005, TestSize.Level1)
183 {
184 m_inputsIndex = m_inputs;
185 m_paramsIndex = m_params;
186 SetInputToAlltensor();
187
188 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_output_dim, nullptr);
189 std::shared_ptr<NNTensor> tensor = TransToNNTensor(OH_NN_INT32, m_param_dim, nullptr,
190 OH_NN_FULL_CONNECTION_ACTIVATIONTYPE);
191 int32_t *activationValue = new (std::nothrow) int32_t(0);
192 EXPECT_NE(nullptr, activationValue);
193
194 tensor->SetBuffer(activationValue, sizeof(int32_t));
195 m_allTensors.emplace_back(tensor);
196 SetHasBias(OH_NN_BOOL, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_HAS_BIAS);
197 EXPECT_EQ(OH_NN_INVALID_PARAMETER, m_builder.Build(m_paramsIndex, m_inputsIndex, m_outputsIndex, m_allTensors));
198 }
199
200 /**
201 * @tc.name: fullconnection_build_006
202 * @tc.desc: Verify the behavior of the build function
203 * @tc.type: FUNC
204 */
205
206 HWTEST_F(FullConnectionBuilderTest, fullconnection_build_006, TestSize.Level1)
207 {
208 m_inputsIndex = m_inputs;
209 m_paramsIndex = m_params;
210 SetInputToAlltensor();
211
212 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_output_dim, nullptr);
213 SetActivation(OH_NN_INT8, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_ACTIVATIONTYPE);
214 std::shared_ptr<NNTensor> tensor = TransToNNTensor(OH_NN_INT32, m_param_dim, nullptr,
215 OH_NN_FULL_CONNECTION_HAS_BIAS);
216 int32_t *hasBiasValue = new (std::nothrow) int32_t(1);
217 EXPECT_NE(nullptr, hasBiasValue);
218
219 tensor->SetBuffer(hasBiasValue, sizeof(int32_t));
220 m_allTensors.emplace_back(tensor);
221 EXPECT_EQ(OH_NN_INVALID_PARAMETER, m_builder.Build(m_paramsIndex, m_inputsIndex, m_outputsIndex, m_allTensors));
222 }
223
224 /**
225 * @tc.name: fullconnection_build_008
226 * @tc.desc: Verify the behavior of the build function
227 * @tc.type: FUNC
228 */
229
230 HWTEST_F(FullConnectionBuilderTest, fullconnection_build_008, TestSize.Level1)
231 {
232 m_param_dim = {2};
233 m_inputsIndex = m_inputs;
234 m_paramsIndex = m_params;
235 SetInputToAlltensor();
236
237 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_output_dim, nullptr);
238 std::shared_ptr<NNTensor> tensor = TransToNNTensor(OH_NN_INT8, m_param_dim, nullptr,
239 OH_NN_FULL_CONNECTION_ACTIVATIONTYPE);
240 int8_t *activationValue = new (std::nothrow) int8_t[2]{0, 0};
241 EXPECT_NE(nullptr, activationValue);
242
243 tensor->SetBuffer(activationValue, 2 * sizeof(int8_t));
244 m_allTensors.emplace_back(tensor);
245 SetHasBias(OH_NN_BOOL, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_HAS_BIAS);
246 EXPECT_EQ(OH_NN_INVALID_PARAMETER, m_builder.Build(m_paramsIndex, m_inputsIndex, m_outputsIndex, m_allTensors));
247 }
248
249 /**
250 * @tc.name: fullconnection_build_009
251 * @tc.desc: Verify the invalid avtivation value of the build function
252 * @tc.type: FUNC
253 */
254
255 HWTEST_F(FullConnectionBuilderTest, fullconnection_build_009, TestSize.Level1)
256 {
257 m_inputsIndex = m_inputs;
258 m_paramsIndex = m_params;
259 SetInputToAlltensor();
260
261 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_output_dim, nullptr);
262 std::shared_ptr<NNTensor> tensor = TransToNNTensor(OH_NN_INT8, m_param_dim, nullptr,
263 OH_NN_FULL_CONNECTION_ACTIVATIONTYPE);
264 int8_t *activationValue = new (std::nothrow) int8_t(10);
265 EXPECT_NE(nullptr, activationValue);
266
267 tensor->SetBuffer(activationValue, sizeof(int8_t));
268 m_allTensors.emplace_back(tensor);
269 SetHasBias(OH_NN_BOOL, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_HAS_BIAS);
270 EXPECT_EQ(OH_NN_INVALID_PARAMETER, m_builder.Build(m_paramsIndex, m_inputsIndex, m_outputsIndex, m_allTensors));
271 }
272
273 /**
274 * @tc.name: fullconnection_build_010
275 * @tc.desc: Verify the invalid param to fullconnection of the build function
276 * @tc.type: FUNC
277 */
278
279 HWTEST_F(FullConnectionBuilderTest, fullconnection_build_010, TestSize.Level1)
280 {
281 m_inputsIndex = m_inputs;
282 m_paramsIndex = m_params;
283 SetInputToAlltensor();
284
285 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_output_dim, nullptr);
286 std::shared_ptr<NNTensor> tensor = TransToNNTensor(OH_NN_INT8, m_param_dim, nullptr,
287 OH_NN_DIV_ACTIVATIONTYPE);
288 int8_t *activationValue = new (std::nothrow) int8_t(0);
289 EXPECT_NE(nullptr, activationValue);
290 tensor->SetBuffer(activationValue, sizeof(int8_t));
291
292 m_allTensors.emplace_back(tensor);
293 SetHasBias(OH_NN_BOOL, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_HAS_BIAS);
294 EXPECT_EQ(OH_NN_INVALID_PARAMETER, m_builder.Build(m_paramsIndex, m_inputsIndex, m_outputsIndex, m_allTensors));
295 }
296
297 /**
298 * @tc.name: fullconnection_build_011
299 * @tc.desc: Verify the invalid avtivation value of the build function
300 * @tc.type: FUNC
301 */
302
303 HWTEST_F(FullConnectionBuilderTest, fullconnection_build_011, TestSize.Level1)
304 {
305 m_inputsIndex = m_inputs;
306 m_paramsIndex = m_params;
307 SetInputToAlltensor();
308
309 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_output_dim, nullptr);
310 SetActivation(OH_NN_INT8, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_ACTIVATIONTYPE);
311 SetHasBias(OH_NN_BOOL, m_param_dim, nullptr, OH_NN_MUL_ACTIVATION_TYPE);
312 EXPECT_EQ(OH_NN_INVALID_PARAMETER, m_builder.Build(m_paramsIndex, m_inputsIndex, m_outputsIndex, m_allTensors));
313 }
314
315 /**
316 * @tc.name: fullconnection_getprimitive_001
317 * @tc.desc: Verify the success of the GetPrimitive function
318 * @tc.type: FUNC
319 */
320 HWTEST_F(FullConnectionBuilderTest, fullconnection_getprimitive_001, TestSize.Level1)
321 {
322 m_inputsIndex = m_inputs;
323 m_paramsIndex = m_params;
324 SetInputToAlltensor();
325
326 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_output_dim, nullptr);
327 SetActivation(OH_NN_INT8, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_ACTIVATIONTYPE);
328 SetHasBias(OH_NN_BOOL, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_HAS_BIAS);
329 EXPECT_EQ(OH_NN_SUCCESS, m_builder.Build(m_paramsIndex, m_inputsIndex, m_outputsIndex, m_allTensors));
330 LiteGraphTensorPtr primitive = m_builder.GetPrimitive();
331 LiteGraphTensorPtr expectPrimitive = {nullptr, DestroyLiteGraphPrimitive};
332 EXPECT_NE(expectPrimitive, primitive);
333
334 int8_t activationReturn = mindspore::lite::MindIR_FullConnection_GetActivationType(primitive.get());
335 EXPECT_EQ(activationReturn, 0);
336 bool hasBiasReturn = mindspore::lite::MindIR_FullConnection_GetHasBias(primitive.get());
337 EXPECT_EQ(hasBiasReturn, true);
338 }
339
340 /**
341 * @tc.name: fullconnection_getprimitive_002
342 * @tc.desc: Verify the nullptr return of the GetPrimitive function
343 * @tc.type: FUNC
344 */
345 HWTEST_F(FullConnectionBuilderTest, fullconnection_getprimitive_002, TestSize.Level1)
346 {
347 m_inputsIndex = m_inputs;
348 m_paramsIndex = m_params;
349 SetInputToAlltensor();
350
351 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_output_dim, nullptr);
352 SetActivation(OH_NN_INT8, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_ACTIVATIONTYPE);
353 SetHasBias(OH_NN_BOOL, m_param_dim, nullptr, OH_NN_FULL_CONNECTION_HAS_BIAS);
354 LiteGraphTensorPtr primitive = m_builder.GetPrimitive();
355 LiteGraphTensorPtr expectPrimitive = {nullptr, DestroyLiteGraphPrimitive};
356 EXPECT_EQ(expectPrimitive, primitive);
357 }
358 } // namespace UnitTest
359 } // namespace NeuralNetworkRuntime
360 } // namespace OHOS