1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "add_builder.h"
17
18 #include "frameworks/native/transform.h"
19 #include "frameworks/native/validation.h"
20
21 namespace OHOS {
22 namespace NeuralNetworkRuntime {
23 namespace Ops {
24 static const int INPUT_NUM = 2;
25 static const int OUTPUT_NUM = 1;
26 static const std::string OP_NAME = "Add";
27
AddBuilder()28 AddBuilder::AddBuilder() {}
29
~AddBuilder()30 AddBuilder::~AddBuilder() {}
31
SetActivation(std::shared_ptr<NNTensor> & tensor)32 OH_NN_ReturnCode AddBuilder::SetActivation(std::shared_ptr<NNTensor>& tensor)
33 {
34 tensor->IdentifyOpParameter();
35
36 if (tensor->GetDataType() != OH_NN_INT8) {
37 LOGE("[Add] SetActivation failed, the activationType should be type OH_NN_INT8.");
38 return OH_NN_INVALID_PARAMETER;
39 }
40
41 void* buffer = tensor->GetBuffer();
42 if (buffer == nullptr) {
43 LOGE("[Add] SetActivation GetBuffer return nullptr.");
44 return OH_NN_INVALID_PARAMETER;
45 }
46 int8_t* fuseData = static_cast<int8_t*>(buffer);
47 if (!Validation::ValidateFuseType(static_cast<OH_NN_FuseType>(*fuseData))) {
48 LOGE("[Add] SetActivation failed, fuse activation type is invalid.");
49 return OH_NN_INVALID_PARAMETER;
50 }
51 m_activationType = NNToMS::TransfromFusionType(static_cast<OH_NN_FuseType>(*fuseData));
52
53 return OH_NN_SUCCESS;
54 }
55
Build(const std::vector<uint32_t> & paramsIndex,const std::vector<uint32_t> & inputsIndex,const std::vector<uint32_t> & outputsIndex,const std::vector<std::shared_ptr<NNTensor>> & allTensors)56 OH_NN_ReturnCode AddBuilder::Build(const std::vector<uint32_t>& paramsIndex,
57 const std::vector<uint32_t>& inputsIndex,
58 const std::vector<uint32_t>& outputsIndex,
59 const std::vector<std::shared_ptr<NNTensor>>& allTensors)
60 {
61 if (m_isBuild) {
62 LOGE("[Add] Build failed, operation has been build, cannot build again.");
63 return OH_NN_OPERATION_FORBIDDEN;
64 }
65
66 auto ret = CheckIOIndex(inputsIndex, outputsIndex, allTensors, INPUT_NUM, OUTPUT_NUM);
67 if (ret != OH_NN_SUCCESS) {
68 LOGE("[Add] Build failed, the input or output index of Add operation is invalid.");
69 return ret;
70 }
71
72 m_inputsIndex = inputsIndex;
73 m_outputsIndex = outputsIndex;
74
75 for (uint32_t i : paramsIndex) {
76 std::shared_ptr<NNTensor> tensor = allTensors[i];
77 switch (tensor->GetType()) {
78 case OH_NN_ADD_ACTIVATIONTYPE:
79 ret = SetActivation(tensor);
80 break;
81 default:
82 LOGE("[Add] Build failed, param invalid, type = %d.", tensor->GetType());
83 return OH_NN_INVALID_PARAMETER;
84 }
85
86 if (ret != OH_NN_SUCCESS) {
87 LOGE("[Add] Build failed, passed invalid param.");
88 return ret;
89 }
90 }
91
92 // The quantization type of the first output determinies that of the operator.
93 SetQuantType(outputsIndex, allTensors);
94
95 m_name = OP_NAME;
96 m_isBuild = true;
97 return OH_NN_SUCCESS;
98 }
99
GetPrimitive()100 LiteGraphPrimitvePtr AddBuilder::GetPrimitive()
101 {
102 if (!m_isBuild) {
103 LOGE("[Add] GetPrimitive failed, cannot get primitive before call build.");
104 return {nullptr, DestroyLiteGraphPrimitive};
105 }
106
107 void* primitive = mindspore::lite::MindIR_AddFusion_CreatePrimitive(m_activationType);
108 LiteGraphPrimitvePtr graphPrimitivePtr(primitive, DestroyLiteGraphPrimitive) ;
109 return graphPrimitivePtr;
110 }
111
112 REGISTER_OPS(AddBuilder, OH_NN_OPS_ADD);
113 } // namespace Ops
114 } // namespace NeuralNetworkRuntime
115 } // namespace OHOS
116