1 /*
2 * Copyright (c) 2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "ops/log_softmax_builder.h"
17
18 #include "ops_test.h"
19
20 using namespace testing;
21 using namespace testing::ext;
22 using namespace OHOS::NeuralNetworkRuntime::Ops;
23
24 namespace OHOS {
25 namespace NeuralNetworkRuntime {
26 namespace UnitTest {
27 class LogSoftmaxBuilderTest : public OpsTest {
28 public:
29 void SetUp() override;
30 void TearDown() override;
31
32 protected:
33 void SaveParamsTensor(OH_NN_DataType dataType,
34 const std::vector<int32_t> &dim, const OH_NN_QuantParam* quantParam, OH_NN_TensorType type);
35
36 protected:
37 LogSoftmaxBuilder m_builder;
38 std::vector<uint32_t> m_inputs {0};
39 std::vector<uint32_t> m_outputs {1};
40 std::vector<uint32_t> m_params {2};
41 std::vector<int32_t> m_dim {2, 2};
42 std::vector<int32_t> m_paramDim {};
43 };
44
SetUp()45 void LogSoftmaxBuilderTest::SetUp() {}
46
TearDown()47 void LogSoftmaxBuilderTest::TearDown() {}
48
SaveParamsTensor(OH_NN_DataType dataType,const std::vector<int32_t> & dim,const OH_NN_QuantParam * quantParam,OH_NN_TensorType type)49 void LogSoftmaxBuilderTest::SaveParamsTensor(OH_NN_DataType dataType,
50 const std::vector<int32_t> &dim, const OH_NN_QuantParam* quantParam, OH_NN_TensorType type)
51 {
52 std::shared_ptr<NNTensor> axisTensor = TransToNNTensor(dataType, dim, quantParam, type);
53 int64_t* axisValue = new (std::nothrow) int64_t[1] {0};
54 EXPECT_NE(nullptr, axisValue);
55 axisTensor->SetBuffer(axisValue, sizeof(int64_t));
56 m_allTensors.emplace_back(axisTensor);
57 }
58
59 /**
60 * @tc.name: log_softmax_build_001
61 * @tc.desc: Verify that the build function returns a successful message.
62 * @tc.type: FUNC
63 */
64 HWTEST_F(LogSoftmaxBuilderTest, log_softmax_build_001, TestSize.Level1)
65 {
66 SaveInputTensor(m_inputs, OH_NN_INT32, m_dim, nullptr);
67 SaveOutputTensor(m_outputs, OH_NN_INT32, m_dim, nullptr);
68 SaveParamsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LOG_SOFTMAX_AXIS);
69
70 OH_NN_ReturnCode ret = m_builder.Build(m_params, m_inputsIndex, m_outputsIndex, m_allTensors);
71 EXPECT_EQ(OH_NN_SUCCESS, ret);
72 }
73
74 /**
75 * @tc.name: log_softmax_build_002
76 * @tc.desc: Verify that the build function returns a failed message with true m_isBuild.
77 * @tc.type: FUNC
78 */
79 HWTEST_F(LogSoftmaxBuilderTest, log_softmax_build_002, TestSize.Level1)
80 {
81 SaveInputTensor(m_inputs, OH_NN_INT32, m_dim, nullptr);
82 SaveOutputTensor(m_outputs, OH_NN_INT32, m_dim, nullptr);
83 SaveParamsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LOG_SOFTMAX_AXIS);
84
85 EXPECT_EQ(OH_NN_SUCCESS, m_builder.Build(m_params, m_inputsIndex, m_outputsIndex, m_allTensors));
86 OH_NN_ReturnCode ret = m_builder.Build(m_params, m_inputsIndex, m_outputsIndex, m_allTensors);
87 EXPECT_EQ(OH_NN_OPERATION_FORBIDDEN, ret);
88 }
89
90 /**
91 * @tc.name: log_softmax_build_003
92 * @tc.desc: Verify that the build function returns a failed message with invalided input.
93 * @tc.type: FUNC
94 */
95 HWTEST_F(LogSoftmaxBuilderTest, log_softmax_build_003, TestSize.Level1)
96 {
97 m_inputs = {0, 1};
98 m_outputs = {3};
99 m_params = {4};
100
101 SaveInputTensor(m_inputs, OH_NN_INT32, m_dim, nullptr);
102 SaveOutputTensor(m_outputs, OH_NN_INT32, m_dim, nullptr);
103 SaveParamsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LOG_SOFTMAX_AXIS);
104
105 OH_NN_ReturnCode ret = m_builder.Build(m_params, m_inputsIndex, m_outputsIndex, m_allTensors);
106 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
107 }
108
109 /**
110 * @tc.name: log_softmax_build_004
111 * @tc.desc: Verify that the build function returns a failed message with invalided output.
112 * @tc.type: FUNC
113 */
114 HWTEST_F(LogSoftmaxBuilderTest, log_softmax_build_004, TestSize.Level1)
115 {
116 m_outputs = {1, 2};
117 m_params = {3};
118
119 SaveInputTensor(m_inputs, OH_NN_INT32, m_dim, nullptr);
120 SaveOutputTensor(m_outputs, OH_NN_INT32, m_dim, nullptr);
121 SaveParamsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LOG_SOFTMAX_AXIS);
122
123 OH_NN_ReturnCode ret = m_builder.Build(m_params, m_inputsIndex, m_outputsIndex, m_allTensors);
124 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
125 }
126
127 /**
128 * @tc.name: log_softmax_build_005
129 * @tc.desc: Verify that the build function returns a failed message with empty allTensor.
130 * @tc.type: FUNC
131 */
132 HWTEST_F(LogSoftmaxBuilderTest, log_softmax_build_005, TestSize.Level1)
133 {
134 OH_NN_ReturnCode ret = m_builder.Build(m_params, m_inputs, m_outputs, m_allTensors);
135 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
136 }
137
138 /**
139 * @tc.name: log_softmax_build_006
140 * @tc.desc: Verify that the build function returns a failed message without output tensor.
141 * @tc.type: FUNC
142 */
143 HWTEST_F(LogSoftmaxBuilderTest, log_softmax_build_006, TestSize.Level1)
144 {
145 SaveInputTensor(m_inputs, OH_NN_INT32, m_dim, nullptr);
146
147 OH_NN_ReturnCode ret = m_builder.Build(m_params, m_inputsIndex, m_outputs, m_allTensors);
148 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
149 }
150
151 /**
152 * @tc.name: log_softmax_build_007
153 * @tc.desc: Verify that the build function returns a failed message with invalid keep_dims's dataType.
154 * @tc.type: FUNC
155 */
156 HWTEST_F(LogSoftmaxBuilderTest, log_softmax_build_007, TestSize.Level1)
157 {
158 SaveInputTensor(m_inputs, OH_NN_INT32, m_dim, nullptr);
159 SaveOutputTensor(m_outputs, OH_NN_INT32, m_dim, nullptr);
160
161 std::shared_ptr<NNTensor> axisTensor = TransToNNTensor(OH_NN_FLOAT32, m_paramDim,
162 nullptr, OH_NN_LOG_SOFTMAX_AXIS);
__anona5cf5d540102null163 float* axisValue = new (std::nothrow) float[1] {0.0f};
164 axisTensor->SetBuffer(axisValue, sizeof(float));
165 m_allTensors.emplace_back(axisTensor);
166
167 OH_NN_ReturnCode ret = m_builder.Build(m_params, m_inputsIndex, m_outputsIndex, m_allTensors);
168 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
169 }
170
171 /**
172 * @tc.name: log_softmax_build_008
173 * @tc.desc: Verify that the build function returns a failed message with passing invalid keep_dims param.
174 * @tc.type: FUNC
175 */
176 HWTEST_F(LogSoftmaxBuilderTest, log_softmax_build_008, TestSize.Level1)
177 {
178 SaveInputTensor(m_inputs, OH_NN_INT32, m_dim, nullptr);
179 SaveOutputTensor(m_outputs, OH_NN_INT32, m_dim, nullptr);
180 SaveParamsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_MUL_ACTIVATION_TYPE);
181
182 OH_NN_ReturnCode ret = m_builder.Build(m_params, m_inputsIndex, m_outputsIndex, m_allTensors);
183 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
184 }
185
186 /**
187 * @tc.name: log_softmax_build_009
188 * @tc.desc: Verify that the build function returns a failed message without set buffer for keep_dims.
189 * @tc.type: FUNC
190 */
191 HWTEST_F(LogSoftmaxBuilderTest, log_softmax_build_009, TestSize.Level1)
192 {
193 SaveInputTensor(m_inputs, OH_NN_INT32, m_dim, nullptr);
194 SaveOutputTensor(m_outputs, OH_NN_INT32, m_dim, nullptr);
195
196 std::shared_ptr<NNTensor> axisTensor = TransToNNTensor(OH_NN_INT64, m_paramDim,
197 nullptr, OH_NN_LOG_SOFTMAX_AXIS);
198 m_allTensors.emplace_back(axisTensor);
199
200 OH_NN_ReturnCode ret = m_builder.Build(m_params, m_inputsIndex, m_outputsIndex, m_allTensors);
201 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
202 }
203
204 /**
205 * @tc.name: log_softmax_getprimitive_001
206 * @tc.desc: Verify that the getPrimitive function returns a successful message
207 * @tc.type: FUNC
208 */
209 HWTEST_F(LogSoftmaxBuilderTest, log_softmax_getprimitive_001, TestSize.Level1)
210 {
211 SaveInputTensor(m_inputs, OH_NN_INT32, m_dim, nullptr);
212 SaveOutputTensor(m_outputs, OH_NN_INT32, m_dim, nullptr);
213 SaveParamsTensor(OH_NN_INT64, m_paramDim, nullptr, OH_NN_LOG_SOFTMAX_AXIS);
214
215 int64_t axisValue = 0;
216 EXPECT_EQ(OH_NN_SUCCESS, m_builder.Build(m_params, m_inputsIndex, m_outputsIndex, m_allTensors));
217 LiteGraphPrimitvePtr primitive = m_builder.GetPrimitive();
218 LiteGraphPrimitvePtr expectPrimitive(nullptr, DestroyLiteGraphPrimitive);
219 EXPECT_NE(expectPrimitive, primitive);
220
221 auto returnValue = mindspore::lite::MindIR_LogSoftmax_GetAxis(primitive.get());
222 EXPECT_EQ(returnValue, axisValue);
223 }
224
225 /**
226 * @tc.name: log_softmax_getprimitive_002
227 * @tc.desc: Verify that the getPrimitive function returns a failed message without build.
228 * @tc.type: FUNC
229 */
230 HWTEST_F(LogSoftmaxBuilderTest, log_softmax_getprimitive_002, TestSize.Level1)
231 {
232 LiteGraphPrimitvePtr primitive = m_builder.GetPrimitive();
233 LiteGraphPrimitvePtr expectPrimitive(nullptr, DestroyLiteGraphPrimitive);
234 EXPECT_EQ(expectPrimitive, primitive);
235 }
236 }
237 }
238 }