• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2020-2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MINDSPORE_CORE_OPS_ACTIVATION_H_
18 #define MINDSPORE_CORE_OPS_ACTIVATION_H_
19 #include "mindapi/base/types.h"
20 #include "ops/base_operator.h"
21 
22 namespace mindspore {
23 namespace ops {
24 constexpr auto kNameActivation = "Activation";
25 /// \brief Activation defined Activation operator prototype of lite.
26 class MIND_API Activation : public BaseOperator {
27  public:
28   MIND_API_BASE_MEMBER(Activation);
29   /// \brief Constructor.
Activation()30   Activation() : BaseOperator(kNameActivation) {}
31 
32   /// \brief Method to init the op's attributes.
33   ///
34   /// \param[in] alpha Define a size factor.
35   /// \param[in] min_val Define a lower bound.
36   /// \param[in] max_val Define a upper bound.
37   /// \param[in] activation_type Define the activation type.
38   /// \param[in] approximate Define a boolean value to decide whether to use an approximate algorithm, only useful for
39   ///            GELU.
40   void Init(const float alpha = 0.2, const float min_val = -1.0, const float max_val = 1.0,
41             const ActivationType &activation_type = NO_ACTIVATION, bool approximate = false);
42 
43   /// \brief Method to set alpha attribute.
44   ///
45   /// \param[in] alpha Define a size factor.
46   void set_alpha(const float alpha);
47 
48   /// \brief Method to set min_val attribute.
49   ///
50   /// \param[in] min_val Define a lower bound.
51   void set_min_val(const float min_val);
52 
53   /// \brief Method to set max_val attribute.
54   ///
55   /// \param[in] max_val Define a upper bound.
56   void set_max_val(const float max_val);
57 
58   /// \brief Method to set activation type.
59   ///
60   /// \param[in] activation_type Define the activation type.
61   void set_activation_type(const ActivationType &activation_type);
62 
63   /// \brief Method to get alpha attribute.
64   ///
65   /// \return alpha attribute.
66   float get_alpha() const;
67 
68   /// \brief Method to get min_val attribute.
69   ///
70   /// \return min_val attribute.
71   float get_min_val() const;
72 
73   /// \brief Method to get max_val attribute.
74   ///
75   /// \return max_val attribute.
76   float get_max_val() const;
77 
78   /// \brief Method to get activation type.
79   ///
80   /// \return activation type.
81   ActivationType get_activation_type() const;
82 
83   /// \brief Method to set approximate attribute.
84   ///
85   /// \param[in] approximate Define a boolean value to decide whether to use an approximate algorithm, only useful for
86   ///            GELU.
87   void set_approximate(bool approximate);
88 
89   /// \brief Method to get approximate attribute.
90   ///
91   /// \return approximate attribute.
92   bool get_approximate() const;
93 };
94 }  // namespace ops
95 }  // namespace mindspore
96 
97 #endif  // MINDSPORE_CORE_OPS_ACTIVATION_H_
98