• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2022 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MINDSPORE_CORE_OPS_FUSED_ADA_FACTOR_H_
18 #define MINDSPORE_CORE_OPS_FUSED_ADA_FACTOR_H_
19 
20 #include <map>
21 #include <memory>
22 #include <string>
23 #include <vector>
24 
25 #include "mindapi/base/types.h"
26 #include "ops/base_operator.h"
27 
28 namespace mindspore {
29 namespace ops {
30 constexpr auto kNameFusedAdaFactor = "FusedAdaFactor";
31 constexpr auto kNameFusedAdaFactorWithGlobalNorm = "FusedAdaFactorWithGlobalNorm";
32 /// \brief FusedAdaFactor operation. Refer to Python API @ref mindspore.ops.FusedAdaFactor for more details.
33 class MIND_API FusedAdaFactor : public BaseOperator {
34  public:
35   MIND_API_BASE_MEMBER(FusedAdaFactor);
36   /// \brief Constructor.
FusedAdaFactor()37   FusedAdaFactor() : BaseOperator(kNameFusedAdaFactor) {}
38 
39   /// \brief Constructor with op name.
FusedAdaFactor(const std::string & name)40   explicit FusedAdaFactor(const std::string &name) : BaseOperator(name) {}
41 
42   /// \brief Set enable_scale_parameter.
43   void set_enable_scale_parameter(bool flag);
44   /// \brief Get enable_scale_parameter.
45   ///
46   /// \return enable_scale_parameter.
47   bool get_enable_scale_parameter() const;
48 
49   /// \brief Set enable_first_moment.
50   void set_enable_first_moment(bool flag);
51   /// \brief Get enable_first_moment.
52   ///
53   /// \return enable_first_moment.
54   bool get_enable_first_moment() const;
55 
56   /// \brief Set enable_weight_decay.
57   void set_enable_weight_decay(bool flag);
58   /// \brief Get enable_weight_decay.
59   ///
60   /// \return enable_weight_decay.
61   bool get_enable_weight_decay() const;
62 };
63 
64 /// \brief FusedAdaFactorWithGlobalNorm operation. Refer to Python API @ref mindspore.ops.FusedAdaFactorWithGlobalNorm
65 /// for more details.
66 class MIND_API FusedAdaFactorWithGlobalNorm : public FusedAdaFactor {
67  public:
68   MIND_API_BASE_MEMBER(FusedAdaFactorWithGlobalNorm);
69   /// \brief Constructor.
FusedAdaFactorWithGlobalNorm()70   FusedAdaFactorWithGlobalNorm() : FusedAdaFactor(kNameFusedAdaFactorWithGlobalNorm) {}
71 };
72 
73 MIND_API abstract::AbstractBasePtr FusedAdaFactorInfer(const abstract::AnalysisEnginePtr &,
74                                                        const PrimitivePtr &primitive,
75                                                        const std::vector<abstract::AbstractBasePtr> &input_args);
76 }  // namespace ops
77 }  // namespace mindspore
78 
79 #endif  // MINDSPORE_CORE_OPS_FUSED_ADA_FACTOR_H_
80