• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2020 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MINDSPORE_CCSRC_FRONTEND_PARALLEL_OPS_INFO_LAYER_NORM_INFO_H_
18 #define MINDSPORE_CCSRC_FRONTEND_PARALLEL_OPS_INFO_LAYER_NORM_INFO_H_
19 
20 #include <string>
21 #include <memory>
22 #include <unordered_map>
23 #include <vector>
24 #include "ir/value.h"
25 #include "frontend/parallel/auto_parallel/operator_costmodel.h"
26 #include "frontend/parallel/ops_info/operator_info.h"
27 #include "frontend/parallel/strategy.h"
28 
29 namespace mindspore {
30 namespace parallel {
31 constexpr size_t LAYER_NORM_INPUT_SIZE = 3;
32 constexpr size_t LAYER_NORM_INPUT_INDEX = 0;
33 constexpr size_t LAYER_NORM_GAMMA_INDEX = 1;
34 constexpr size_t LAYER_NORM_BETA_INDEX = 2;
35 constexpr char BEGIN_NORM_AXIS[] = "begin_norm_axis";
36 
37 // The dimensions of input tensor starting from begin norm axis cannot be split. Other dimensions can be split
38 // arbitrarily. Gamma and beta should match input to meet the broadcast requirements of mul and add.
39 class LayerNormInfo : public OperatorInfo {
40  public:
LayerNormInfo(const std::string & operator_name,const Shapes & inputs_shape,const Shapes & outputs_shape,const PrimitiveAttrs & attrs)41   LayerNormInfo(const std::string &operator_name, const Shapes &inputs_shape, const Shapes &outputs_shape,
42                 const PrimitiveAttrs &attrs)
43       : OperatorInfo(operator_name, inputs_shape, outputs_shape, attrs, std::make_shared<LayerNormCost>()),
44         begin_norm_axis_(0) {}
45   ~LayerNormInfo() override = default;
46 
47   Status Init(const StrategyPtr &strategy) override;
48   Status InitForCostModel(const StrategyPtr &strategy) override;
49   std::vector<StrategyPtr> GenerateOpStrategies(int64_t) override;
50   Status SetCostUnderStrategy(const StrategyPtr &) override;
51 
52  protected:
53   Status GetAttrs() override;
54   Status CheckStrategy(const StrategyPtr &strategy) override;
InferForwardCommunication()55   Status InferForwardCommunication() override { return SUCCESS; }
56   Status InferDevMatrixShape() override;
57   Status InferTensorMap() override;
58   Status InferAsLossDivisor() override;
59   Status CreateInputTensorMap(size_t input_index);
60   Status GenerateGammaAndBetaStrategies(const std::vector<StrategyPtr> &sp_vector);
61   Status InitShapes();
62 
63  private:
64   size_t begin_norm_axis_;
65   Shape input_shape_;
66   Shape gamma_shape_;
67   Shape beta_shape_;
68 };
69 }  // namespace parallel
70 }  // namespace mindspore
71 
72 #endif  // MINDSPORE_CCSRC_FRONTEND_PARALLEL_OPS_INFO_LAYER_NORM_INFO_H_
73