1 /** 2 * Copyright 2023 Huawei Technologies Co., Ltd 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef MINDSPORE_CCSRC_BACKEND_COMMON_GRAPH_KERNEL_RECOGNIZE_SOFTMAX_GRAD_EXT_H_ 18 #define MINDSPORE_CCSRC_BACKEND_COMMON_GRAPH_KERNEL_RECOGNIZE_SOFTMAX_GRAD_EXT_H_ 19 20 #include <memory> 21 #include "include/backend/optimizer/optimizer.h" 22 23 namespace mindspore { 24 namespace graphkernel { 25 class RecognizeSoftmaxGradExt : public opt::PatternProcessPass { 26 public: 27 explicit RecognizeSoftmaxGradExt(bool multigraph = true) 28 : PatternProcessPass("recognize_softmax_grad_ext", multigraph) { 29 mul1_ = std::make_shared<Var>(std::make_shared<Primitive>("Mul")); 30 mul2_ = std::make_shared<Var>(std::make_shared<Primitive>("Mul")); 31 sub_ = std::make_shared<Var>(std::make_shared<Primitive>("Sub")); 32 reduce_sum_ = std::make_shared<Var>(std::make_shared<Primitive>("ReduceSum")); 33 } 34 ~RecognizeSoftmaxGradExt() override = default; 35 const BaseRef DefinePattern() const override; 36 const AnfNodePtr Process(const FuncGraphPtr &, const AnfNodePtr &, const EquivPtr &) const override; 37 38 protected: 39 VarPtr mul1_; 40 VarPtr mul2_; 41 VarPtr sub_; 42 VarPtr reduce_sum_; 43 }; 44 } // namespace graphkernel 45 } // namespace mindspore 46 47 #endif // MINDSPORE_CCSRC_BACKEND_COMMON_GRAPH_KERNEL_RECOGNIZE_SOFTMAX_GRAD_EXT_H_ 48