1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "softmax_builder.h"
17
18 #include "mindir.h"
19
20 namespace OHOS {
21 namespace NeuralNetworkRuntime {
22 namespace Ops {
23 static const int INPUT_NUM = 1;
24 static const int OUTPUT_NUM = 1;
25 static const std::string OP_NAME = "Softmax";
26
SoftmaxBuilder()27 SoftmaxBuilder::SoftmaxBuilder() {}
28
~SoftmaxBuilder()29 SoftmaxBuilder::~SoftmaxBuilder() {}
30
SetAxis(std::shared_ptr<NNTensor> tensor)31 OH_NN_ReturnCode SoftmaxBuilder::SetAxis(std::shared_ptr<NNTensor> tensor)
32 {
33 // Set Axis
34 if (tensor->GetDataType() != OH_NN_INT64) {
35 LOGE("[SoftmaxBuilder] The 2nd input axis should be type OH_NN_INT64.");
36 return OH_NN_INVALID_PARAMETER;
37 }
38
39 if (tensor->GetElementCount() != 1) {
40 LOGE("[SoftmaxBuilder] The 2nd input axis should be scaler.");
41 return OH_NN_INVALID_PARAMETER;
42 }
43
44 m_axis.clear();
45
46 void* buffer = tensor->GetBuffer();
47 if (buffer == nullptr) {
48 LOGE("[SoftmaxBuilder] Tensor buffer is nullptr.");
49 return OH_NN_INVALID_PARAMETER;
50 }
51 m_axis.emplace_back(*(static_cast<const int64_t*>(buffer)));
52
53 return OH_NN_SUCCESS;
54 }
55
Build(const std::vector<uint32_t> & paramsIndex,const std::vector<uint32_t> & inputsIndex,const std::vector<uint32_t> & outputsIndex,const std::vector<std::shared_ptr<NNTensor>> & allTensors)56 OH_NN_ReturnCode SoftmaxBuilder::Build(const std::vector<uint32_t>& paramsIndex,
57 const std::vector<uint32_t>& inputsIndex,
58 const std::vector<uint32_t>& outputsIndex,
59 const std::vector<std::shared_ptr<NNTensor>>& allTensors)
60 {
61 if (m_isBuild) {
62 LOGE("[SoftmaxBuilder] Softmax operation has been build, cannot build again.");
63 return OH_NN_OPERATION_FORBIDDEN;
64 }
65
66 OH_NN_ReturnCode returnCode = CheckIOIndex(inputsIndex, outputsIndex, allTensors, INPUT_NUM, OUTPUT_NUM);
67 if (returnCode != OH_NN_SUCCESS) {
68 LOGE("[SoftmaxBuilder] Passed invalid input or output index.");
69 return returnCode;
70 }
71
72 m_inputsIndex = inputsIndex;
73 m_outputsIndex = outputsIndex;
74
75 for (int i : paramsIndex) {
76 std::shared_ptr<NNTensor> tensor = allTensors[i];
77 tensor->IdentifyOpParameter();
78 switch (tensor->GetType()) {
79 case OH_NN_SOFTMAX_AXIS:
80 returnCode = SetAxis(tensor);
81 break;
82 default:
83 LOGE("[SoftmaxBuilder] Parameter Type is invalid. type=%d", tensor->GetType());
84 return OH_NN_INVALID_PARAMETER;
85 }
86
87 if (returnCode != OH_NN_SUCCESS) {
88 LOGE("[SoftmaxBuilder] Passed invalid param.");
89 return returnCode;
90 }
91 }
92
93 // The quantization type of the first output determinies that of the operator.
94 SetQuantType(outputsIndex, allTensors);
95
96 m_isBuild = true;
97 m_name = OP_NAME;
98 return OH_NN_SUCCESS;
99 }
100
GetPrimitive()101 LiteGraphTensorPtr SoftmaxBuilder::GetPrimitive()
102 {
103 if (!m_isBuild) {
104 LOGE("[SoftmaxBuilder] Cannot get primitive before call build.");
105 return {nullptr, DestroyLiteGraphPrimitive};
106 }
107
108 auto primitive = mindspore::lite::MindIR_Softmax_CreatePrimitive(m_axis);
109 if (primitive == nullptr) {
110 LOGE("[SoftmaxBuilder] Create primitive of Softmax failed.");
111 return {nullptr, DestroyLiteGraphPrimitive};
112 }
113
114 LiteGraphTensorPtr graphPrimitivePtr(primitive, DestroyLiteGraphPrimitive);
115 return graphPrimitivePtr;
116 }
117
118 REGISTER_OPS(SoftmaxBuilder, OH_NN_OPS_SOFTMAX);
119 } // namespace Ops
120 } // namespace NeuralNetworkRuntime
121 } // namespace OHOS