1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "layernorm_builder.h"
17
18 #include "mindir.h"
19
20 #include "frameworks/native/ops_registry.h"
21
22 namespace OHOS {
23 namespace NeuralNetworkRuntime {
24 namespace Ops {
25 static const int INPUT_NUM = 3;
26 static const int OUTPUT_NUM = 1;
27 static const int INPUT_X = 0;
28 static const int INPUT_GAMMA = 1;
29 static const int INPUT_BETA = 2;
30 static const std::string OP_NAME = "LayerNorm";
31
LayerNormBuilder()32 LayerNormBuilder::LayerNormBuilder() {}
33
~LayerNormBuilder()34 LayerNormBuilder::~LayerNormBuilder() {}
35
SetBeginNormAxis(std::shared_ptr<NNTensor> tensor)36 OH_NN_ReturnCode LayerNormBuilder::SetBeginNormAxis(std::shared_ptr<NNTensor> tensor)
37 {
38 tensor->IdentifyOpParameter();
39 if (tensor->GetDataType() != OH_NN_INT32) {
40 LOGE("[LayerNormBuilder] SetBeginNormAxis failed. The has_bias should be type OH_NN_INT32.");
41 return OH_NN_INVALID_PARAMETER;
42 }
43
44 if (!tensor->IsScalar()) {
45 LOGE("[LayerNormBuilder] SetBeginNormAxis failed. The beginNormAxis should be a scalar value.");
46 return OH_NN_INVALID_PARAMETER;
47 }
48
49 void* buffer = tensor->GetBuffer();
50 if (buffer == nullptr) {
51 LOGE("[LayerNormBuilder] SetBeginNormAxis failed, the beginNormAxis passed a empty buffer.");
52 return OH_NN_INVALID_PARAMETER;
53 }
54
55 m_beginNormAxis = *static_cast<int32_t*>(buffer);
56 return OH_NN_SUCCESS;
57 }
58
SetEpsilon(std::shared_ptr<NNTensor> tensor)59 OH_NN_ReturnCode LayerNormBuilder::SetEpsilon(std::shared_ptr<NNTensor> tensor)
60 {
61 tensor->IdentifyOpParameter();
62 if (tensor->GetDataType() != OH_NN_FLOAT32) {
63 LOGE("[LayerNormBuilder] SetEpsilon failed. The epsilon should be type OH_NN_FLOAT32.");
64 return OH_NN_INVALID_PARAMETER;
65 }
66
67 if (!tensor->IsScalar()) {
68 LOGE("[LayerNormBuilder] SetEpsilon failed. The epsilon should be a scalar value.");
69 return OH_NN_INVALID_PARAMETER;
70 }
71
72 void* buffer = tensor->GetBuffer();
73 if (buffer == nullptr) {
74 LOGE("[LayerNormBuilder] SetEpsilon failed, the epsilon passed a empty buffer.");
75 return OH_NN_INVALID_PARAMETER;
76 }
77
78 m_epsilon = *static_cast<float*>(buffer);
79 return OH_NN_SUCCESS;
80 }
81
SetBeginParamsAxis(std::shared_ptr<NNTensor> tensor)82 OH_NN_ReturnCode LayerNormBuilder::SetBeginParamsAxis(std::shared_ptr<NNTensor> tensor)
83 {
84 tensor->IdentifyOpParameter();
85 if (tensor->GetDataType() != OH_NN_INT32) {
86 LOGE("[LayerNormBuilder] SetBeginParamsAxis failed. The has_bias should be type OH_NN_INT32.");
87 return OH_NN_INVALID_PARAMETER;
88 }
89
90 if (!tensor->IsScalar()) {
91 LOGE("[LayerNormBuilder] SetBeginParamsAxis failed. The beginNormAxis should be a scalar value.");
92 return OH_NN_INVALID_PARAMETER;
93 }
94
95 void* buffer = tensor->GetBuffer();
96 if (buffer == nullptr) {
97 LOGE("[LayerNormBuilder] SetBeginParamsAxis failed, the beginParamsAxis passed a empty buffer.");
98 return OH_NN_INVALID_PARAMETER;
99 }
100
101 m_beginParamsAxis = *static_cast<int32_t*>(buffer);
102 return OH_NN_SUCCESS;
103 }
104
Build(const std::vector<uint32_t> & paramsIndex,const std::vector<uint32_t> & inputsIndex,const std::vector<uint32_t> & outputsIndex,const std::vector<std::shared_ptr<NNTensor>> & allTensors)105 OH_NN_ReturnCode LayerNormBuilder::Build(const std::vector<uint32_t>& paramsIndex,
106 const std::vector<uint32_t>& inputsIndex,
107 const std::vector<uint32_t>& outputsIndex,
108 const std::vector<std::shared_ptr<NNTensor>>& allTensors)
109 {
110 if (m_isBuild) {
111 LOGE("[LayerNormBuilder] Build failed. LayerNorm operation has been build, cannot build again.");
112 return OH_NN_OPERATION_FORBIDDEN;
113 }
114
115 OH_NN_ReturnCode returnCode = CheckIOIndex(inputsIndex, outputsIndex, allTensors, INPUT_NUM, OUTPUT_NUM);
116 if (returnCode != OH_NN_SUCCESS) {
117 LOGE("[LayerNormBuilder] Build failed. Passed invalid input or output index.");
118 return returnCode;
119 }
120
121 m_inputsIndex = inputsIndex;
122 m_outputsIndex = outputsIndex;
123
124 for (int i : paramsIndex) {
125 std::shared_ptr<NNTensor> tensor = allTensors[i];
126 switch (tensor->GetType()) {
127 case OH_NN_LAYER_NORM_BEGIN_NORM_AXIS:
128 returnCode = SetBeginNormAxis(tensor);
129 break;
130 case OH_NN_LAYER_NORM_EPSILON:
131 returnCode = SetEpsilon(tensor);
132 break;
133 case OH_NN_LAYER_NORM_BEGIN_PARAM_AXIS:
134 returnCode = SetBeginParamsAxis(tensor);
135 break;
136 default:
137 LOGE("[LayerNormBuilder] Parameter Type is invalid, type=%d", tensor->GetType());
138 return OH_NN_INVALID_PARAMETER;
139 }
140
141 if (returnCode != OH_NN_SUCCESS) {
142 LOGE("[LayerNormBuilder] Build failed. Passed invalid param.");
143 return returnCode;
144 }
145 }
146
147 auto inputShape = allTensors[inputsIndex[INPUT_X]]->GetDimensions();
148 int inputShapeSize = static_cast<int>(inputShape.size());
149 // beginNormAxis must great than 1, because normal shape cannot equal input shape.
150 if (m_beginNormAxis >= inputShapeSize || m_beginNormAxis < 1) {
151 LOGE("[LayerNormBuilder] Build failed, invalid beginNormAxis value, it should be [1, rank(input)).");
152 return OH_NN_INVALID_PARAMETER;
153 }
154 // validate gamma and beta shape
155 returnCode = ValidateGammaAndBetaShape(inputsIndex, m_beginNormAxis, allTensors);
156 if (returnCode != OH_NN_SUCCESS) {
157 return returnCode;
158 }
159
160 // The quantization type of the first output determinies that of the operator.
161 SetQuantType(outputsIndex, allTensors);
162
163 m_name = OP_NAME;
164 m_isBuild = true;
165 return OH_NN_SUCCESS;
166 }
167
GetPrimitive()168 LiteGraphPrimitvePtr LayerNormBuilder::GetPrimitive()
169 {
170 if (!m_isBuild) {
171 LOGE("[LayerNormBuilder] GetPrimitive failed, cannot get primitive before call build.");
172 return {nullptr, DestroyLiteGraphPrimitive};
173 }
174
175 void* primitive = mindspore::lite::MindIR_LayerNormFusion_CreatePrimitive(m_beginNormAxis,
176 m_epsilon, m_elementwiseAffine, m_beginParamsAxis);
177 LiteGraphPrimitvePtr graphPrimitivePtr(primitive, DestroyLiteGraphPrimitive);
178 return graphPrimitivePtr;
179 }
180
ValidateGammaAndBetaShape(const std::vector<uint32_t> & inputsIndex,int beginAxis,const std::vector<std::shared_ptr<NNTensor>> & allTensors) const181 OH_NN_ReturnCode LayerNormBuilder::ValidateGammaAndBetaShape(const std::vector<uint32_t>& inputsIndex,
182 int beginAxis, const std::vector<std::shared_ptr<NNTensor>>& allTensors) const
183 {
184 auto inputShape = allTensors[inputsIndex[INPUT_X]]->GetDimensions();
185 auto gammaShape = allTensors[inputsIndex[INPUT_GAMMA]]->GetDimensions();
186 auto betaShape = allTensors[inputsIndex[INPUT_BETA]]->GetDimensions();
187 int inputShapeSize = static_cast<int>(inputShape.size());
188
189 for (auto i = beginAxis; i < inputShapeSize; i++) {
190 if (gammaShape[i - beginAxis] != inputShape[i]) {
191 LOGE("[LayerNormBuilder] Invalid gamma shape, gamma shape should equal to normalized shape.");
192 return OH_NN_INVALID_PARAMETER;
193 }
194 if (betaShape[i - beginAxis] != inputShape[i]) {
195 LOGE("[LayerNormBuilder] Invalid beta shape, bata shape should equal to normalized shape.");
196 return OH_NN_INVALID_PARAMETER;
197 }
198 }
199
200 return OH_NN_SUCCESS;
201 }
202
203 REGISTER_OPS(LayerNormBuilder, OH_NN_OPS_LAYER_NORM);
204 } // namespace Ops
205 } // namespace NeuralNetworkRuntime
206 } // namespace OHOS