1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "matmul_builder.h"
17
18 #include "frameworks/native/transform.h"
19 #include "frameworks/native/validation.h"
20 #include "frameworks/native/ops_registry.h"
21
22 namespace OHOS {
23 namespace NeuralNetworkRuntime {
24 namespace Ops {
25 static const int INPUT_NUM = 2;
26 static const int OUTPUT_NUM = 1;
27 static const int SCALE_LENGTH = 1;
28 static const std::string OP_NAME = "Matmul";
29
MatmulBuilder()30 MatmulBuilder::MatmulBuilder() {}
31
~MatmulBuilder()32 MatmulBuilder::~MatmulBuilder() {}
33
SetTransposeA(std::shared_ptr<NNTensor> tensor)34 OH_NN_ReturnCode MatmulBuilder::SetTransposeA(std::shared_ptr<NNTensor> tensor)
35 {
36 tensor->IdentifyOpParameter();
37 if (tensor->GetElementCount() != SCALE_LENGTH) {
38 LOGE("[Matmul] Matmul SetTransposeA failed. The transposeA should be scaler.");
39 return OH_NN_INVALID_PARAMETER;
40 }
41
42 if (tensor->GetDataType() != OH_NN_BOOL) {
43 LOGE("[Matmul] Matmul SetTransposeA failed. The transposeA should have type OH_NN_BOOL.");
44 return OH_NN_INVALID_PARAMETER;
45 }
46
47 void* buffer = tensor->GetBuffer();
48 if (buffer == nullptr) {
49 LOGE("[Matmul] SetTransposeA failed, the transposeA passed a empty buffer.");
50 return OH_NN_INVALID_PARAMETER;
51 }
52
53 m_transposeA = *static_cast<bool*>(buffer);
54 return OH_NN_SUCCESS;
55 }
56
SetTransposeB(std::shared_ptr<NNTensor> tensor)57 OH_NN_ReturnCode MatmulBuilder::SetTransposeB(std::shared_ptr<NNTensor> tensor)
58 {
59 tensor->IdentifyOpParameter();
60 if (tensor->GetElementCount() != SCALE_LENGTH) {
61 LOGE("[Matmul] Matmul SetTransposeB failed. The transposeB should be scaler.");
62 return OH_NN_INVALID_PARAMETER;
63 }
64
65 if (tensor->GetDataType() != OH_NN_BOOL) {
66 LOGE("[Matmul] Matmul SetTransposeB failed. The transposeB TransposeY should have type OH_NN_BOOL.");
67 return OH_NN_INVALID_PARAMETER;
68 }
69
70 void* buffer = tensor->GetBuffer();
71 if (buffer == nullptr) {
72 LOGE("[Matmul] SetTransposeB failed, the transposeB passed a empty buffer.");
73 return OH_NN_INVALID_PARAMETER;
74 }
75
76 m_transposeB = *static_cast<bool*>(buffer);
77 return OH_NN_SUCCESS;
78 }
79
SetActivationType(std::shared_ptr<NNTensor> tensor)80 OH_NN_ReturnCode MatmulBuilder::SetActivationType(std::shared_ptr<NNTensor> tensor)
81 {
82 tensor->IdentifyOpParameter();
83 if (tensor->GetElementCount() != SCALE_LENGTH) {
84 LOGE("[Matmul] Matmul SetActivationType failed. The shape of activation should be scaler.");
85 return OH_NN_INVALID_PARAMETER;
86 }
87
88 if (tensor->GetDataType() != OH_NN_INT8) {
89 LOGE("[Matmul] Matmul SetActivationType failed. The activation should be type OH_NN_INT8.");
90 return OH_NN_INVALID_PARAMETER;
91 }
92
93 void* buffer = tensor->GetBuffer();
94 if (buffer == nullptr) {
95 LOGE("[Matmul] SetActivationType failed, the activationType passed a empty buffer.");
96 return OH_NN_INVALID_PARAMETER;
97 }
98
99 int8_t* fuseData = static_cast<int8_t*>(buffer);
100 if (!OHOS::NeuralNetworkRuntime::Validation::ValidateFuseType(static_cast<OH_NN_FuseType>(*fuseData))) {
101 LOGE("[Matmul] Matmul SetActivationType failed. Fuse activation type is invalid");
102 return OH_NN_INVALID_PARAMETER;
103 }
104
105 auto fuseType = (OH_NN_FuseType)(*fuseData);
106 m_activationType = NNToMS::TransfromFusionType(fuseType);
107 return OH_NN_SUCCESS;
108 }
109
Build(const std::vector<uint32_t> & paramsIndex,const std::vector<uint32_t> & inputsIndex,const std::vector<uint32_t> & outputsIndex,const std::vector<std::shared_ptr<NNTensor>> & allTensors)110 OH_NN_ReturnCode MatmulBuilder::Build(const std::vector<uint32_t>& paramsIndex,
111 const std::vector<uint32_t>& inputsIndex,
112 const std::vector<uint32_t>& outputsIndex,
113 const std::vector<std::shared_ptr<NNTensor>>& allTensors)
114 {
115 if (m_isBuild) {
116 LOGE("[Matmul] Matmul Build failed. operation has been build, cannot build again.");
117 return OH_NN_OPERATION_FORBIDDEN;
118 }
119
120 OH_NN_ReturnCode returnCode = CheckIOIndex(inputsIndex, outputsIndex, allTensors, INPUT_NUM, OUTPUT_NUM);
121 if (returnCode != OH_NN_SUCCESS) {
122 LOGE("[Matmul] Matmul Build failed. Passed invalid input or output indices.");
123 return returnCode;
124 }
125
126 m_inputsIndex = inputsIndex;
127 m_outputsIndex = outputsIndex;
128
129 for (int i : paramsIndex) {
130 std::shared_ptr<NNTensor> tensor = allTensors[i];
131 switch (tensor->GetType()) {
132 case OH_NN_MATMUL_TRANSPOSE_A:
133 returnCode = SetTransposeA(tensor);
134 break;
135 case OH_NN_MATMUL_TRANSPOSE_B:
136 returnCode = SetTransposeB(tensor);
137 break;
138 case OH_NN_MATMUL_ACTIVATION_TYPE:
139 returnCode = SetActivationType(tensor);
140 break;
141 default:
142 LOGE("[Matmul] Parameter Type is invalid, type=%d", tensor->GetType());
143 return OH_NN_INVALID_PARAMETER;
144 }
145
146 if (returnCode != OH_NN_SUCCESS) {
147 LOGE("[Matmul] Matmul Build failed. Passed invalid param.");
148 return returnCode;
149 }
150 }
151
152 // The quantization type of the first output determinies that of the operator.
153 SetQuantType(outputsIndex, allTensors);
154
155 m_isBuild = true;
156 m_name = OP_NAME;
157 return OH_NN_SUCCESS;
158 }
159
GetPrimitive()160 LiteGraphPrimitvePtr MatmulBuilder::GetPrimitive()
161 {
162 if (!m_isBuild) {
163 LOGE("[Matmul] Matmul GetPrimitive failed. Cannot get primitive before call build.");
164 return {nullptr, DestroyLiteGraphPrimitive};
165 }
166
167 auto primitive = mindspore::lite::MindIR_MatMulFusion_CreatePrimitive(m_transposeA, m_transposeB, m_activationType);
168 LiteGraphPrimitvePtr graphPrimitivePtr(primitive, DestroyLiteGraphPrimitive);
169 return graphPrimitivePtr;
170 }
171
172 REGISTER_OPS(MatmulBuilder, OH_NN_OPS_MATMUL);
173 } // namespace Ops
174 } // namespace NeuralNetworkRuntime
175 } // namespace OHOS