1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "ops/layernorm_builder.h"
17
18 #include "ops_test.h"
19
20 using namespace testing;
21 using namespace testing::ext;
22 using namespace OHOS::NeuralNetworkRuntime::Ops;
23
24 namespace OHOS {
25 namespace NeuralNetworkRuntime {
26 namespace UnitTest {
27 class LayerNormBuilderTest : public OpsTest {
28 public:
29 void SetUp() override;
30 void TearDown() override;
31
32 protected:
33 void SaveNormAixsTensor(OH_NN_DataType dataType,
34 const std::vector<int32_t> &dim, const OH_NN_QuantParam* quantParam, OH_NN_TensorType type);
35 void SaveEpsilonTensor(OH_NN_DataType dataType,
36 const std::vector<int32_t> &dim, const OH_NN_QuantParam* quantParam, OH_NN_TensorType type);
37 void SaveParamAxisTensor(OH_NN_DataType dataType,
38 const std::vector<int32_t> &dim, const OH_NN_QuantParam* quantParam, OH_NN_TensorType type);
39 void SetInputTensor(std::shared_ptr<NNTensor> inputTensor);
40
41 public:
42 LayerNormBuilder m_layerNorm;
43 std::vector<uint32_t> m_inputs {0, 1, 2};
44 std::vector<uint32_t> m_outputs {3};
45 std::vector<uint32_t> m_params {4, 5, 6};
46 std::vector<int32_t> m_inputDimNorm {2, 3};
47 std::vector<int32_t> m_inputDimEpsilon {3};
48 std::vector<int32_t> m_inputDimParam {3};
49 std::vector<int32_t> m_outputDim {3};
50 std::vector<int32_t> m_paramDim {};
51 std::shared_ptr<NNTensor> m_inputTensor {};
52 };
53
SetUp()54 void LayerNormBuilderTest::SetUp() {}
55
TearDown()56 void LayerNormBuilderTest::TearDown() {}
57
SaveNormAixsTensor(OH_NN_DataType dataType,const std::vector<int32_t> & dim,const OH_NN_QuantParam * quantParam,OH_NN_TensorType type)58 void LayerNormBuilderTest::SaveNormAixsTensor(OH_NN_DataType dataType,
59 const std::vector<int32_t> &dim, const OH_NN_QuantParam* quantParam, OH_NN_TensorType type)
60 {
61 int64_t* beginNormAxisValue = new (std::nothrow) int64_t(1);
62 EXPECT_NE(nullptr, beginNormAxisValue);
63 std::shared_ptr<NNTensor> normAxisTensor = TransToNNTensor(dataType, dim, quantParam, type);
64 normAxisTensor->SetBuffer(beginNormAxisValue, sizeof(int64_t));
65 m_allTensors.emplace_back(normAxisTensor);
66 }
67
SaveEpsilonTensor(OH_NN_DataType dataType,const std::vector<int32_t> & dim,const OH_NN_QuantParam * quantParam,OH_NN_TensorType type)68 void LayerNormBuilderTest::SaveEpsilonTensor(OH_NN_DataType dataType,
69 const std::vector<int32_t> &dim, const OH_NN_QuantParam* quantParam, OH_NN_TensorType type)
70 {
71 float* epsilonValue = new (std::nothrow) float(0.0f);
72 EXPECT_NE(nullptr, epsilonValue);
73 std::shared_ptr<NNTensor> transposeBTensor = TransToNNTensor(dataType, dim, quantParam, type);
74 transposeBTensor->SetBuffer(epsilonValue, sizeof(float));
75 m_allTensors.emplace_back(transposeBTensor);
76 }
77
SaveParamAxisTensor(OH_NN_DataType dataType,const std::vector<int32_t> & dim,const OH_NN_QuantParam * quantParam,OH_NN_TensorType type)78 void LayerNormBuilderTest::SaveParamAxisTensor(OH_NN_DataType dataType,
79 const std::vector<int32_t> &dim, const OH_NN_QuantParam* quantParam, OH_NN_TensorType type)
80 {
81 int64_t* beginNormParamValue = new (std::nothrow) int64_t(1);
82 EXPECT_NE(nullptr, beginNormParamValue);
83 std::shared_ptr<NNTensor> paramAxisTensor = TransToNNTensor(dataType, dim, quantParam, type);
84 paramAxisTensor->SetBuffer(beginNormParamValue, sizeof(int64_t));
85 m_allTensors.emplace_back(paramAxisTensor);
86 }
87
SetInputTensor(std::shared_ptr<NNTensor> inputTensor)88 void LayerNormBuilderTest::SetInputTensor(std::shared_ptr<NNTensor> inputTensor)
89 {
90 inputTensor = TransToNNTensor(OH_NN_FLOAT32, m_inputDimNorm, nullptr, OH_NN_TENSOR);
91 m_allTensors.emplace_back(inputTensor);
92
93 inputTensor = TransToNNTensor(OH_NN_FLOAT32, m_inputDimEpsilon, nullptr, OH_NN_TENSOR);
94 m_allTensors.emplace_back(inputTensor);
95
96 inputTensor = TransToNNTensor(OH_NN_FLOAT32, m_inputDimParam, nullptr, OH_NN_TENSOR);
97 m_allTensors.emplace_back(inputTensor);
98 }
99
100 /**
101 * @tc.name: layernorm_build_001
102 * @tc.desc: Verify that the build function returns a successful message.
103 * @tc.type: FUNC
104 */
105 HWTEST_F(LayerNormBuilderTest, layernorm_build_001, TestSize.Level0)
106 {
107 SetInputTensor(m_inputTensor);
108 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
109 SaveNormAixsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
110 SaveEpsilonTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
111 SaveParamAxisTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
112
113 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
114 EXPECT_EQ(OH_NN_SUCCESS, ret);
115 }
116
117 /**
118 * @tc.name: layernorm_build_002
119 * @tc.desc: Verify that the build function returns a failed message with duplicate Build().
120 * @tc.type: FUNC
121 */
122 HWTEST_F(LayerNormBuilderTest, layernorm_build_002, TestSize.Level0)
123 {
124 SetInputTensor(m_inputTensor);
125 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
126 SaveNormAixsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
127 SaveEpsilonTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
128 SaveParamAxisTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
129
130 EXPECT_EQ(OH_NN_SUCCESS, m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors));
131 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
132 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
133 }
134
135 /**
136 * @tc.name: layernorm_build_003
137 * @tc.desc: Verify that the build function returns a failed message with invalided input.
138 * @tc.type: FUNC
139 */
140 HWTEST_F(LayerNormBuilderTest, layernorm_build_003, TestSize.Level0)
141 {
142 m_inputs = {0, 1, 2, 3};
143 m_outputs = {4};
144 m_params = {5, 6, 7};
145
146 SetInputTensor(m_inputTensor);
147 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
148 SaveNormAixsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
149 SaveEpsilonTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
150 SaveParamAxisTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
151
152 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
153 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
154 }
155
156 /**
157 * @tc.name: layernorm_build_004
158 * @tc.desc: Verify that the build function returns a failed message with invalided output.
159 * @tc.type: FUNC
160 */
161 HWTEST_F(LayerNormBuilderTest, layernorm_build_004, TestSize.Level0)
162 {
163 m_outputs = {3, 4};
164 m_params = {5, 6, 7};
165
166 SetInputTensor(m_inputTensor);
167 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
168 SaveNormAixsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
169 SaveEpsilonTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
170 SaveParamAxisTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
171
172 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
173 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
174 }
175
176 /**
177 * @tc.name: layernorm_build_005
178 * @tc.desc: Verify that the build function returns a failed message with null allTensor.
179 * @tc.type: FUNC
180 */
181 HWTEST_F(LayerNormBuilderTest, layernorm_build_005, TestSize.Level0)
182 {
183 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputs, m_allTensors);
184 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
185 }
186
187 /**
188 * @tc.name: layernorm_build_006
189 * @tc.desc: Verify that the build function returns a failed message with invalided allTensor.
190 * @tc.type: FUNC
191 */
192 HWTEST_F(LayerNormBuilderTest, layernorm_build_006, TestSize.Level0)
193 {
194 SetInputTensor(m_inputTensor);
195
196 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputs, m_allTensors);
197 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
198 }
199
200 /**
201 * @tc.name: layernorm_build_007
202 * @tc.desc: Verify that the build function returns a failed message with invalid beginNormAxis's dataType.
203 * @tc.type: FUNC
204 */
205 HWTEST_F(LayerNormBuilderTest, layernorm_build_007, TestSize.Level0)
206 {
207 SetInputTensor(m_inputTensor);
208 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
209
210 std::shared_ptr<NNTensor> normAxisTensor;
211 normAxisTensor = TransToNNTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
212 float beginNormAxisValue = 1e-7;
213 normAxisTensor->SetBuffer(&beginNormAxisValue, sizeof(beginNormAxisValue));
214 m_allTensors.emplace_back(normAxisTensor);
215
216 SaveEpsilonTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
217 SaveParamAxisTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
218
219 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
220 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
221 normAxisTensor->SetBuffer(nullptr, 0);
222 }
223
224 /**
225 * @tc.name: layernorm_build_008
226 * @tc.desc: Verify that the build function returns a failed message with invalid beginNormAxis's dimension.
227 * @tc.type: FUNC
228 */
229 HWTEST_F(LayerNormBuilderTest, layernorm_build_008, TestSize.Level0)
230 {
231 std::vector<int32_t> expectParamDim = {2};
232
233 SetInputTensor(m_inputTensor);
234 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
235
236 std::shared_ptr<NNTensor> normAxisTensor;
237 normAxisTensor = TransToNNTensor(OH_NN_INT64, expectParamDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
238 int64_t beginNormAxisValue[2] = {1, 2};
239 normAxisTensor->SetBuffer(beginNormAxisValue, 2 * sizeof(int64_t));
240 m_allTensors.emplace_back(normAxisTensor);
241
242 SaveEpsilonTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
243 SaveParamAxisTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
244
245 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
246 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
247 normAxisTensor->SetBuffer(nullptr, 0);
248 }
249
250 /**
251 * @tc.name: layernorm_build_009
252 * @tc.desc: Verify that the build function returns a failed message with invalid epsilon's dataType.
253 * @tc.type: FUNC
254 */
255 HWTEST_F(LayerNormBuilderTest, layernorm_build_009, TestSize.Level0)
256 {
257 SetInputTensor(m_inputTensor);
258 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
259 SaveNormAixsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
260 SaveParamAxisTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
261
262 std::shared_ptr<NNTensor> epsilonTensor;
263 epsilonTensor = TransToNNTensor(OH_NN_INT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
264 int32_t epsilonValue = 1;
265 epsilonTensor->SetBuffer(&epsilonValue, sizeof(epsilonValue));
266 m_allTensors.emplace_back(epsilonTensor);
267
268 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
269 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
270 epsilonTensor->SetBuffer(nullptr, 0);
271 }
272
273 /**
274 * @tc.name: layernorm_build_010
275 * @tc.desc: Verify that the build function returns a failed message with invalid epsilon's dimension.
276 * @tc.type: FUNC
277 */
278 HWTEST_F(LayerNormBuilderTest, layernorm_build_010, TestSize.Level0)
279 {
280 std::vector<int32_t> expectParamDim = {2};
281
282 SetInputTensor(m_inputTensor);
283 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
284 SaveNormAixsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
285 SaveParamAxisTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
286
287 std::shared_ptr<NNTensor> epsilonTensor;
288 epsilonTensor = TransToNNTensor(OH_NN_FLOAT32, expectParamDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
289 float epsilonValue[2] = {1e-7, 1e-7};
290 epsilonTensor->SetBuffer(epsilonValue, 2 * sizeof(float));
291 m_allTensors.emplace_back(epsilonTensor);
292
293 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
294 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
295 epsilonTensor->SetBuffer(nullptr, 0);
296 }
297
298 /**
299 * @tc.name: layernorm_build_011
300 * @tc.desc: Verify that the build function returns a failed message with invalid beginParamAxis's dataType.
301 * @tc.type: FUNC
302 */
303 HWTEST_F(LayerNormBuilderTest, layernorm_build_011, TestSize.Level0)
304 {
305 SetInputTensor(m_inputTensor);
306 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
307 SaveNormAixsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
308 SaveEpsilonTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
309
310 std::shared_ptr<NNTensor> paramAxisTensor;
311 paramAxisTensor = TransToNNTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
312 float beginNormParamValue = 1;
313 paramAxisTensor->SetBuffer(&beginNormParamValue, sizeof(beginNormParamValue));
314 m_allTensors.emplace_back(paramAxisTensor);
315
316 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
317 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
318 paramAxisTensor->SetBuffer(nullptr, 0);
319 }
320
321 /**
322 * @tc.name: layernorm_build_012
323 * @tc.desc: Verify that the build function returns a failed message with invalid beginParamAxis's dimension.
324 * @tc.type: FUNC
325 */
326 HWTEST_F(LayerNormBuilderTest, layernorm_build_012, TestSize.Level0)
327 {
328 std::vector<int32_t> expectParamDim = {2};
329
330 SetInputTensor(m_inputTensor);
331 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
332 SaveNormAixsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
333 SaveEpsilonTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
334
335 std::shared_ptr<NNTensor> paramAxisTensor;
336 paramAxisTensor = TransToNNTensor(OH_NN_INT64, expectParamDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
337 int64_t beginNormParamValue[2] = {1, 1};
338 paramAxisTensor->SetBuffer(beginNormParamValue, 2 * sizeof(int64_t));
339 m_allTensors.emplace_back(paramAxisTensor);
340
341 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
342 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
343 paramAxisTensor->SetBuffer(nullptr, 0);
344 }
345
346 /**
347 * @tc.name: layernorm_build_0013
348 * @tc.desc: Verify that the build function returns a failed message with invalid param.
349 * @tc.type: FUNC
350 */
351 HWTEST_F(LayerNormBuilderTest, layernorm_build_0013, TestSize.Level0)
352 {
353 SetInputTensor(m_inputTensor);
354 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
355 SaveNormAixsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_BATCH_NORM_EPSILON);
356 SaveEpsilonTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
357 SaveParamAxisTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
358
359 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
360 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
361 }
362
363 /**
364 * @tc.name: layernorm_build_014
365 * @tc.desc: Verify that the build function returns a failed message without set buffer for normAxis.
366 * @tc.type: FUNC
367 */
368 HWTEST_F(LayerNormBuilderTest, layernorm_build_014, TestSize.Level0)
369 {
370 SetInputTensor(m_inputTensor);
371 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
372 SaveEpsilonTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
373 SaveParamAxisTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
374
375 std::shared_ptr<NNTensor> normAxisTensor;
376 normAxisTensor = TransToNNTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
377 m_allTensors.emplace_back(normAxisTensor);
378
379 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
380 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
381 }
382
383 /**
384 * @tc.name: layernorm_build_015
385 * @tc.desc: Verify that the build function returns a failed message without set buffer for epsilon.
386 * @tc.type: FUNC
387 */
388 HWTEST_F(LayerNormBuilderTest, layernorm_build_015, TestSize.Level0)
389 {
390 SetInputTensor(m_inputTensor);
391 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
392 SaveNormAixsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
393 SaveParamAxisTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
394
395 std::shared_ptr<NNTensor> epsilonTensor;
396 epsilonTensor = TransToNNTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
397 m_allTensors.emplace_back(epsilonTensor);
398
399 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
400 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
401 }
402
403 /**
404 * @tc.name: layernorm_build_016
405 * @tc.desc: Verify that the build function returns a failed message without set buffer for paramsAxis.
406 * @tc.type: FUNC
407 */
408 HWTEST_F(LayerNormBuilderTest, layernorm_build_016, TestSize.Level0)
409 {
410 SetInputTensor(m_inputTensor);
411 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
412 SaveNormAixsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
413 SaveEpsilonTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
414
415 std::shared_ptr<NNTensor> paramAxisTensor;
416 paramAxisTensor = TransToNNTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
417 m_allTensors.emplace_back(paramAxisTensor);
418
419 OH_NN_ReturnCode ret = m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors);
420 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
421 }
422
423 /**
424 * @tc.name: layernorm_getprimitive_001
425 * @tc.desc: Verify that the getPrimitive function returns a successful message
426 * @tc.type: FUNC
427 */
428 HWTEST_F(LayerNormBuilderTest, layernorm_getprimitive_001, TestSize.Level0)
429 {
430 SetInputTensor(m_inputTensor);
431 SaveOutputTensor(m_outputs, OH_NN_FLOAT32, m_outputDim, nullptr);
432 SaveNormAixsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_NORM_AXIS);
433 SaveEpsilonTensor(OH_NN_FLOAT32, m_paramDim, nullptr, OH_NN_LAYER_NORM_EPSILON);
434 SaveParamAxisTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS);
435
436 int64_t beginNormAxisValue = 1;
437 float epsilonValue = 0.0f;
438 int64_t beginNormParamValue = 1;
439 EXPECT_EQ(OH_NN_SUCCESS, m_layerNorm.Build(m_params, m_inputs, m_outputsIndex, m_allTensors));
440 LiteGraphPrimitvePtr primitive = m_layerNorm.GetPrimitive();
441 LiteGraphPrimitvePtr expectPrimitive(nullptr, DestroyLiteGraphPrimitive);
442 EXPECT_NE(expectPrimitive, primitive);
443 auto returnValue = mindspore::lite::MindIR_LayerNormFusion_GetBeginNormAxis(primitive.get());
444 EXPECT_EQ(returnValue, beginNormAxisValue);
445 returnValue = mindspore::lite::MindIR_LayerNormFusion_GetEpsilon(primitive.get());
446 EXPECT_EQ(returnValue, epsilonValue);
447 returnValue = mindspore::lite::MindIR_LayerNormFusion_GetBeginParamsAxis(primitive.get());
448 EXPECT_EQ(returnValue, beginNormParamValue);
449 }
450
451 /**
452 * @tc.name: layernorm_getprimitive_002
453 * @tc.desc: Verify that the getPrimitive function returns a failed message without build.
454 * @tc.type: FUNC
455 */
456 HWTEST_F(LayerNormBuilderTest, layernorm_getprimitive_002, TestSize.Level0)
457 {
458 LayerNormBuilder layerNorm;
459 LiteGraphPrimitvePtr primitive = m_layerNorm.GetPrimitive();
460 LiteGraphPrimitvePtr expectPrimitive(nullptr, DestroyLiteGraphPrimitive);
461 EXPECT_EQ(expectPrimitive, primitive);
462 }
463 }
464 }
465 }