1 /** 2 * Copyright 2024 Huawei Technologies Co., Ltd 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 #ifndef MINDSPORE_CCSRC_BACKEND_COMMON_GRAPH_KERNEL_TRANSPOSE_MATMUL_FUSION_H_ 17 #define MINDSPORE_CCSRC_BACKEND_COMMON_GRAPH_KERNEL_TRANSPOSE_MATMUL_FUSION_H_ 18 #include "include/backend/optimizer/pass.h" 19 20 namespace mindspore::graphkernel { 21 /** 22 * @brief Transform Transpose + MutMul to a single MatMul with attribute trans_a/trans_b 23 * @example 24 * %1 = Transpose(B, (1, 0)) 25 * %2 = MatMul(A, %1, trans_a=false, trans_b=false) 26 * ----------> 27 * %1 = MatMul(A, B, trans_a=false, trans_b=true) 28 * @example 29 * %1 = Transpose(A, (0, 1, 3, 2)) 30 * %2 = BatchMatMul(%1, B, trans_a=false, trans_b=false) 31 * ----------> 32 * %1 = BatchMatMul(A, B, trans_a=true, trans_b=false) 33 */ 34 class TransposeMatmulFusion : public opt::Pass { 35 public: TransposeMatmulFusion()36 TransposeMatmulFusion() : Pass("transpose_matmul_fusion") {} 37 ~TransposeMatmulFusion() override = default; 38 bool Run(const FuncGraphPtr &func_graph) override; 39 }; 40 } // namespace mindspore::graphkernel 41 #endif // MINDSPORE_CCSRC_BACKEND_COMMON_GRAPH_KERNEL_TRANSPOSE_MATMUL_FUSION_H_ 42