• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2020 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #ifndef MINDSPORE_CCSRC_BACKEND_OPTIMIZER_GPU_ADAM_WEIGHT_DECAY_FUSION_H_
17 #define MINDSPORE_CCSRC_BACKEND_OPTIMIZER_GPU_ADAM_WEIGHT_DECAY_FUSION_H_
18 
19 #include <memory>
20 #include "backend/optimizer/common/optimizer.h"
21 
22 namespace mindspore {
23 namespace opt {
24 class AdamWeightDecayFusion : public PatternProcessPass {
25  public:
26   explicit AdamWeightDecayFusion(bool multigraph = true) : PatternProcessPass("adam_weight_decay_fusion", multigraph) {
27     beta1_ = std::make_shared<Var>();
28     one_sub_beta1_ = std::make_shared<Var>();
29     beta2_ = std::make_shared<Var>();
30     one_sub_beta2_ = std::make_shared<Var>();
31     eps_ = std::make_shared<Var>();
32     lr_ = std::make_shared<Var>();
33     weight_decay_ = std::make_shared<Var>();
34     param_ = std::make_shared<Var>();
35     m_ = std::make_shared<Var>();
36     v_ = std::make_shared<Var>();
37     gradient_ = std::make_shared<Var>();
38     u_ = std::make_shared<Var>();
39   }
40   ~AdamWeightDecayFusion() override = default;
41   const BaseRef DefinePattern() const override;
42   const AnfNodePtr Process(const FuncGraphPtr &, const AnfNodePtr &, const EquivPtr &) const override;
43 
44  private:
45   VarPtr beta1_;
46   VarPtr one_sub_beta1_;
47   VarPtr beta2_;
48   VarPtr one_sub_beta2_;
49   VarPtr eps_;
50   VarPtr lr_;
51   VarPtr weight_decay_;
52   VarPtr param_;
53   VarPtr m_;
54   VarPtr v_;
55   VarPtr gradient_;
56   VarPtr u_;
57 };
58 }  // namespace opt
59 }  // namespace mindspore
60 #endif  // MINDSPORE_CCSRC_BACKEND_OPTIMIZER_GPU_ADAM_WEIGHT_DECAY_FUSION_H_
61