1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "gelu_builder.h"
17 
18 #include "mindir.h"
19 #include "ops_registry.h"
20 
21 namespace OHOS {
22 namespace NeuralNetworkRuntime {
23 namespace Ops {
24 static const int INPUT_NUMS = 1;
25 static const int OUTPUT_NUMS = 1;
26 static const int PARAM_MAX_NUM = 1;
27 static const int SCALAR_LENGTH = 1;
28 static const std::string OP_NAME = "Gelu";
29 
GeluBuilder()30 GeluBuilder::GeluBuilder() {}
31 
~GeluBuilder()32 GeluBuilder::~GeluBuilder() {}
33 
SetApproximate(const std::shared_ptr<NNTensor> & tensor)34 OH_NN_ReturnCode GeluBuilder::SetApproximate(const std::shared_ptr<NNTensor>& tensor)
35 {
36     if (tensor->GetDataType() != OH_NN_BOOL) {
37         LOGE("[GeLU] The approximate should be type OH_NN_BOOL.");
38         return OH_NN_INVALID_PARAMETER;
39     }
40 
41     if (tensor->GetElementCount() != SCALAR_LENGTH) {
42         LOGE("[GeLU] The approximate should be scalar.");
43         return OH_NN_INVALID_PARAMETER;
44     }
45 
46     void* buffer = tensor->GetBuffer();
47     if (buffer == nullptr) {
48         LOGE("[GeLU] Tensor buffer is nullptr.");
49         return OH_NN_INVALID_PARAMETER;
50     }
51     m_approximate = *(static_cast<bool*>(buffer));
52 
53     return OH_NN_SUCCESS;
54 }
55 
Build(const std::vector<uint32_t> & paramsIndex,const std::vector<uint32_t> & inputsIndex,const std::vector<uint32_t> & outputsIndex,const std::vector<std::shared_ptr<NNTensor>> & allTensors)56 OH_NN_ReturnCode GeluBuilder::Build(const std::vector<uint32_t>& paramsIndex,
57                                     const std::vector<uint32_t>& inputsIndex,
58                                     const std::vector<uint32_t>& outputsIndex,
59                                     const std::vector<std::shared_ptr<NNTensor>>& allTensors)
60 {
61     if (m_isBuild) {
62         LOGE("[Gelu] Build failed, operation has been build, cannot build again");
63         return OH_NN_OPERATION_FORBIDDEN;
64     }
65 
66     OH_NN_ReturnCode returnCode = CheckIOIndex(inputsIndex, outputsIndex, allTensors, INPUT_NUMS, OUTPUT_NUMS);
67     if (returnCode != OH_NN_SUCCESS) {
68         LOGE("[Gelu] Build failed, passed invalid input or output indices.");
69         return returnCode;
70     }
71 
72     returnCode = CheckParamIndex(paramsIndex, allTensors, PARAM_MAX_NUM);
73     if (returnCode != OH_NN_SUCCESS) {
74         LOGE("[Gelu] Build failed, passed invalid param indices.");
75         return returnCode;
76     }
77 
78     for (int i : paramsIndex) {
79         std::shared_ptr<NNTensor> tensor = allTensors[i];
80         tensor->IdentifyOpParameter();
81         if (m_paramMap.find(tensor->GetType()) != m_paramMap.end()) {
82             returnCode = (this->*(m_paramMap[tensor->GetType()]))(tensor);
83         } else {
84             LOGE("[Gelu] Build failed, param invalid, type=%d", tensor->GetType());
85             return OH_NN_INVALID_PARAMETER;
86         }
87 
88         if (returnCode != OH_NN_SUCCESS) {
89             LOGE("[Gelu] Build failed, passed invalid param.");
90             return returnCode;
91         }
92     }
93 
94     m_inputsIndex = inputsIndex;
95     m_outputsIndex = outputsIndex;
96 
97     m_isBuild = true;
98     m_name = OP_NAME;
99     return OH_NN_SUCCESS;
100 }
101 
GetPrimitive()102 LiteGraphPrimitvePtr GeluBuilder::GetPrimitive()
103 {
104     if (!m_isBuild) {
105         LOGE("[Gelu] GetPrimitive failed, cannot get primitive before call build.");
106         return {nullptr, DestroyLiteGraphPrimitive};
107     }
108 
109     mindspore::lite::ActivationType activationType = mindspore::lite::ACTIVATION_TYPE_GELU;
110     float alpha = 0.0f;
111     float minVal = 0.0f;
112     float maxVal = 0.0f;
113     void* primitive = mindspore::lite::MindIR_Activation_CreatePrimitive(activationType,
114         alpha, minVal, maxVal, m_approximate);
115     LiteGraphPrimitvePtr graphPrimitivePtr(primitive, DestroyLiteGraphPrimitive);
116     return graphPrimitivePtr;
117 }
118 
119 REGISTER_OPS(GeluBuilder, OH_NN_OPS_GELU);
120 } // namespace Ops
121 } // namespace NeuralNetworkRuntime
122 } // namespace OHOS