• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2020-2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MINDSPORE_CORE_OPS_LAYER_NORM_FUSION_H_
18 #define MINDSPORE_CORE_OPS_LAYER_NORM_FUSION_H_
19 #include <memory>
20 #include <vector>
21 
22 #include "mindapi/base/types.h"
23 #include "ops/base_operator.h"
24 #include "abstract/abstract_value.h"
25 
26 namespace mindspore {
27 namespace ops {
28 constexpr auto kNameLayerNormFusion = "LayerNormFusion";
29 /// \brief LayerNormFusion defined LayerNorm operator prototype of lite.
30 class MIND_API LayerNormFusion : public BaseOperator {
31  public:
32   MIND_API_BASE_MEMBER(LayerNormFusion);
33   /// \brief Constructor.
LayerNormFusion()34   LayerNormFusion() : BaseOperator(kNameLayerNormFusion) {}
35 
36   /// \brief Method to init the op's attributes.
37   ///
38   /// \param[in] begin_norm_axis Define the first normalization dimension of input.
39   /// \param[in] begin_params_axis Define the first parameter dimension.
40   /// \param[in] epsilon Define a value added to the denominator for numerical stability.
41   /// \param[in] elementwise_affine Define a boolean value to indicate that the operation is element-wise or not.
42   void Init(const int64_t begin_norm_axis = 1, const int64_t begin_params_axis = 1, const float epsilon = 1e-7,
43             const bool elementwise_affine = false);
44 
45   /// \brief Set begin_norm_axis.
46   void set_begin_norm_axis(const int64_t begin_norm_axis);
47   /// \brief Set begin_params_axis.
48   void set_begin_params_axis(const int64_t begin_params_axis);
49   /// \brief Set epsilon.
50   void set_epsilon(const float epsilon);
51   /// \brief Get begin_norm_axis.
52   ///
53   /// \return begin_norm_axis.
54   int64_t get_begin_norm_axis() const;
55   /// \brief Get begin_params_axis.
56   ///
57   /// \return begin_params_axis.
58   int64_t get_begin_params_axis() const;
59   /// \brief Get epsilon.
60   ///
61   /// \return epsilon.
62   float get_epsilon() const;
63 
64   /// \brief Method to set elementwise_affine attribute.
65   ///
66   /// \param[in] elementwise_affine Define a boolean value to indicate that the operation is element-wise or not.
67   void set_elementwise_affine(const bool elementwise_affine);
68 
69   /// \brief Method to get elementwise_affine attribute.
70   ///
71   /// \return a boolean value.
72   bool get_elementwise_affine() const;
73 };
74 
75 abstract::AbstractBasePtr LayerNormFusionInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,
76                                                const std::vector<abstract::AbstractBasePtr> &input_args);
77 }  // namespace ops
78 }  // namespace mindspore
79 
80 #endif  // MINDSPORE_CORE_OPS_LAYER_NORM_FUSION_H_
81