1 /**
2 * Copyright 2021 Huawei Technologies Co., Ltd
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "tools/optimizer/fisson/multi_conv_split_pass.h"
18 #include <string>
19 #include <memory>
20 #include "utils/utils.h"
21 #include "base/base.h"
22 #include "ops/fusion/conv2d_fusion.h"
23 #include "tools/optimizer/common/gllo_utils.h"
24 #include "tools/optimizer/parallel/split_strategy.h"
25 #include "nnacl/op_base.h"
26
27 using mindspore::converter::FmkType;
28 using mindspore::schema::PrimitiveType_Conv2dTransposeFusion;
29 namespace mindspore {
30 namespace opt {
IsMultiParallelConvNode(const AnfNodePtr & node) const31 std::string MultiConvSplitPass::IsMultiParallelConvNode(const AnfNodePtr &node) const {
32 MS_ASSERT(node != nullptr);
33 for (const auto ¶llel_prim : kParallelOpNames) {
34 if (CheckPrimitiveType(node, parallel_prim.first.first)) {
35 return parallel_prim.second;
36 }
37 }
38 return {};
39 }
40
DefinePattern() const41 const BaseRef MultiConvSplitPass::DefinePattern() const {
42 auto conv1_var = std::make_shared<CondVar>(IsParallelSplitConvNode);
43 MS_CHECK_TRUE_MSG(conv1_var != nullptr, nullptr, "create CondVar return nullptr");
44 auto conv1_other_var = std::make_shared<SeqVar>();
45 MS_CHECK_TRUE_MSG(conv1_other_var != nullptr, nullptr, "create SeqVar return nullptr");
46 VectorRef res = VectorRef({conv1_var, conv1_other_var});
47 int32_t idx = 1;
48 while (idx < num_) {
49 auto tmp_var = std::make_shared<CondVar>(IsParallelSplitConvNode);
50 MS_CHECK_TRUE_MSG(tmp_var != nullptr, nullptr, "create CondVar return nullptr");
51 auto tmp_other_var = std::make_shared<SeqVar>();
52 MS_CHECK_TRUE_MSG(tmp_other_var != nullptr, nullptr, "create SeqVar return nullptr");
53 res = VectorRef({tmp_var, res, tmp_other_var});
54 idx++;
55 }
56 return res;
57 }
58
Process(const FuncGraphPtr & func_graph,const AnfNodePtr & node,const EquivPtr &) const59 const AnfNodePtr MultiConvSplitPass::Process(const FuncGraphPtr &func_graph, const AnfNodePtr &node,
60 const EquivPtr &) const {
61 MS_ASSERT(func_graph != nullptr && node != nullptr);
62 auto cnode = node->cast<CNodePtr>();
63 MS_CHECK_TRUE_MSG(cnode != nullptr, nullptr, "input node is not a cnode");
64 auto device_type_attr = cnode->GetAttr(mindspore::ops::kDeviceType);
65 auto device_type = (device_type_attr != nullptr) ? GetValue<int32_t>(device_type_attr) : kDeviceTypeNone;
66 if (device_type != kDeviceTypeNone) {
67 return node;
68 }
69 auto parallel_name = IsMultiParallelConvNode(node);
70 if (parallel_name.empty()) {
71 return node;
72 }
73 // if current node has more than two outputs node, we do not split it.
74 auto manager = func_graph->manager();
75 MS_CHECK_TRUE_MSG(manager != nullptr, nullptr, "manager of func_graph is nullptr");
76 auto node_users_iter = manager->node_users().find(node);
77 if (node_users_iter == manager->node_users().end()) {
78 return node;
79 }
80 auto output_info_list = node_users_iter->second;
81 if (output_info_list.size() > kDefaultBatch) {
82 return node;
83 }
84
85 if (strategys_.find(parallel_name) == strategys_.end()) {
86 MS_LOG(ERROR) << "Find " << parallel_name << " strategy failed";
87 return nullptr;
88 }
89 auto multi_node_split_proxy =
90 std::make_shared<MultiNodeSplitProxy>(strategys_.at(parallel_name), primitive_type_, fmk_type_, num_);
91 MS_CHECK_TRUE_MSG(multi_node_split_proxy != nullptr, nullptr, "create MultiNodeSplitProxy return nullptr");
92 return multi_node_split_proxy->DoSplit(func_graph, node);
93 }
94
95 } // namespace opt
96 } // namespace mindspore
97