• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define USE_DEPRECATED_API
18 #include "tools/optimizer/fisson/multi_conv_split_pass.h"
19 #include <string>
20 #include <memory>
21 #include "include/common/utils/utils.h"
22 #include "base/base.h"
23 #include "ops/fusion/conv2d_fusion.h"
24 #include "tools/optimizer/common/gllo_utils.h"
25 #include "tools/optimizer/parallel/split_strategy.h"
26 #include "nnacl/op_base.h"
27 #include "ops/op_utils.h"
28 
29 using mindspore::schema::PrimitiveType_Conv2dTransposeFusion;
30 namespace mindspore {
31 namespace opt {
IsMultiParallelConvNode(const AnfNodePtr & node) const32 std::string MultiConvSplitPass::IsMultiParallelConvNode(const AnfNodePtr &node) const {
33   MS_ASSERT(node != nullptr);
34   for (const auto &parallel_prim : kParallelOpNames) {
35     if (CheckPrimitiveType(node, parallel_prim.first.first)) {
36       return parallel_prim.second;
37     }
38   }
39   return {};
40 }
41 
DefinePattern() const42 const BaseRef MultiConvSplitPass::DefinePattern() const {
43   auto conv1_var = std::make_shared<CondVar>(IsParallelSplitConvNode);
44   MS_CHECK_TRUE_MSG(conv1_var != nullptr, nullptr, "create CondVar return nullptr");
45   auto conv1_other_var = std::make_shared<SeqVar>();
46   MS_CHECK_TRUE_MSG(conv1_other_var != nullptr, nullptr, "create SeqVar return nullptr");
47   VectorRef res = VectorRef({conv1_var, conv1_other_var});
48   int32_t idx = 1;
49   while (idx < num_) {
50     auto tmp_var = std::make_shared<CondVar>(IsParallelSplitConvNode);
51     MS_CHECK_TRUE_MSG(tmp_var != nullptr, nullptr, "create CondVar return nullptr");
52     auto tmp_other_var = std::make_shared<SeqVar>();
53     MS_CHECK_TRUE_MSG(tmp_other_var != nullptr, nullptr, "create SeqVar return nullptr");
54     res = VectorRef({tmp_var, res, tmp_other_var});
55     idx++;
56   }
57   return res;
58 }
59 
Process(const FuncGraphPtr & func_graph,const AnfNodePtr & node,const EquivPtr &) const60 const AnfNodePtr MultiConvSplitPass::Process(const FuncGraphPtr &func_graph, const AnfNodePtr &node,
61                                              const EquivPtr &) const {
62   MS_ASSERT(func_graph != nullptr && node != nullptr);
63   auto cnode = node->cast<CNodePtr>();
64   MS_CHECK_TRUE_MSG(cnode != nullptr, nullptr, "input node is not a cnode");
65   auto device_type_attr = cnode->GetAttr(mindspore::ops::kDeviceType);
66   auto device_type = (device_type_attr != nullptr) ? GetValue<int32_t>(device_type_attr) : kDeviceTypeNone;
67   if (device_type != kDeviceTypeNone) {
68     return node;
69   }
70   auto parallel_name = IsMultiParallelConvNode(node);
71   if (parallel_name.empty()) {
72     return node;
73   }
74   // if current node has more than two outputs node, we do not split it.
75   auto manager = func_graph->manager();
76   MS_CHECK_TRUE_MSG(manager != nullptr, nullptr, "manager of func_graph is nullptr");
77   auto node_users_iter = manager->node_users().find(node);
78   if (node_users_iter == manager->node_users().end()) {
79     return node;
80   }
81   auto output_info_list = node_users_iter->second;
82   if (output_info_list.size() > kDefaultBatch) {
83     return node;
84   }
85 
86   if (strategys_.find(parallel_name) == strategys_.end()) {
87     MS_LOG(ERROR) << "Find " << parallel_name << " strategy failed";
88     return nullptr;
89   }
90   auto multi_node_split_proxy =
91     std::make_shared<MultiNodeSplitProxy>(strategys_.at(parallel_name), primitive_type_, fmk_type_, num_);
92   MS_CHECK_TRUE_MSG(multi_node_split_proxy != nullptr, nullptr, "create MultiNodeSplitProxy return nullptr");
93   return multi_node_split_proxy->DoSplit(func_graph, node);
94 }
95 
96 }  // namespace opt
97 }  // namespace mindspore
98