• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2019 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MINDSPORE_CCSRC_FRONTEND_PARALLEL_OPS_INFO_BATCH_PARALLEL_INFO_H_
18 #define MINDSPORE_CCSRC_FRONTEND_PARALLEL_OPS_INFO_BATCH_PARALLEL_INFO_H_
19 
20 #include <memory>
21 #include <string>
22 #include <unordered_map>
23 #include <vector>
24 #include "ir/value.h"
25 #include "frontend/parallel/ops_info/operator_info.h"
26 #include "frontend/parallel/strategy.h"
27 
28 namespace mindspore {
29 namespace parallel {
30 class BatchParallelInfo : public OperatorInfo {
31  public:
BatchParallelInfo(const std::string & name,const Shapes & inputs_shape,const Shapes & outputs_shape,const PrimitiveAttrs & attrs,OperatorCostPtr cost)32   BatchParallelInfo(const std::string &name, const Shapes &inputs_shape, const Shapes &outputs_shape,
33                     const PrimitiveAttrs &attrs, OperatorCostPtr cost)
34       : OperatorInfo(name, inputs_shape, outputs_shape, attrs, cost), dev_num_(1) {}
BatchParallelInfo(const std::string & name,const Shapes & inputs_shape,const Shapes & outputs_shape,const PrimitiveAttrs & attrs)35   BatchParallelInfo(const std::string &name, const Shapes &inputs_shape, const Shapes &outputs_shape,
36                     const PrimitiveAttrs &attrs)
37       : OperatorInfo(name, inputs_shape, outputs_shape, attrs, std::make_shared<BatchParallelCost>()), dev_num_(1) {}
38 
39   ~BatchParallelInfo() override = default;
40   Status Init(const StrategyPtr &strategy) override;
41   Status InitForCostModel(const StrategyPtr &strategy) override;
42   std::vector<StrategyPtr> GenerateOpStrategies(int64_t stage_id) override;
43   Status SetCostUnderStrategy(const StrategyPtr &strategy) override;
44   void ReplaceNodeInputOrAttrs() override;
45 
46  protected:
47   Status CheckStrategy(const StrategyPtr &strategy) override;
48   Status InferForwardCommunication() override;
49   Status InferDevMatrixShape() override;
50   Status InferTensorMap() override;
51   Status GetAttrs() override;
52   Status InferAsLossDivisor() override;
53 
54  private:
55   int64_t dev_num_ = 1;
56   bool need_replace_input_ = false;
57   Shape replace_shape_;
58 };
59 
60 class SparseSoftmaxCrossEntropyWithLogitsInfo : public BatchParallelInfo {
61  public:
SparseSoftmaxCrossEntropyWithLogitsInfo(const std::string & name,const Shapes & inputs_shape,const Shapes & outputs_shape,const PrimitiveAttrs & attrs)62   SparseSoftmaxCrossEntropyWithLogitsInfo(const std::string &name, const Shapes &inputs_shape,
63                                           const Shapes &outputs_shape, const PrimitiveAttrs &attrs)
64       : BatchParallelInfo(name, inputs_shape, outputs_shape, attrs,
65                           std::make_shared<SparseSoftmaxCrossEntropyWithLogitsCost>()) {}
66   ~SparseSoftmaxCrossEntropyWithLogitsInfo() override = default;
67   void ReComputeBatchSplitFlagList() override;
68 };
69 }  // namespace parallel
70 }  // namespace mindspore
71 
72 #endif  // MINDSPORE_CCSRC_FRONTEND_PARALLEL_OPS_INFO_BATCH_PARALLEL_INFO_H_
73