1 /**
2 * Copyright 2021 Huawei Technologies Co., Ltd
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include "backend/optimizer/common/common_backend_optimization.h"
17 #include <memory>
18 #include <string>
19 #include "backend/optimizer/common/optimizer.h"
20 #include "backend/optimizer/pass/convert_const_input_to_attr.h"
21 #include "backend/optimizer/pass/convert_tuple_output_to_maketuple.h"
22 #include "backend/optimizer/pass/convert_const_input_to_tensor_input.h"
23 #include "backend/optimizer/pass/convert_tuple_input_to_dynamic_input.h"
24 #include "backend/optimizer/pass/const_to_attr_strided_slice_grad.h"
25 #include "backend/optimizer/pass/convert_const_scalar_to_tensor.h"
26 #include "backend/optimizer/pass/convert_attr_to_unify_mindir.h"
27 #include "backend/optimizer/pass/add_training_attr.h"
28 #include "backend/optimizer/pass/optimize_updatestate.h"
29 #include "backend/optimizer/pass/conv_transpose_to_conv_bp.h"
30 #include "utils/ms_context.h"
31 #include "debug/anf_ir_dump.h"
32
33 namespace mindspore {
34 namespace opt {
BackendCommonOptimization(const std::shared_ptr<session::KernelGraph> & kernel_graph)35 void BackendCommonOptimization(const std::shared_ptr<session::KernelGraph> &kernel_graph) {
36 MS_EXCEPTION_IF_NULL(kernel_graph);
37 MS_LOG(INFO) << "start common opt graph:" << kernel_graph->graph_id();
38 #ifdef ENABLE_DUMP_IR
39 auto context_ptr = MsContext::GetInstance();
40 MS_EXCEPTION_IF_NULL(context_ptr);
41 bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
42 if (save_graphs) {
43 std::string file_name = "hwopt_common_before_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
44 DumpIR(file_name, kernel_graph);
45 }
46 #endif
47 auto optimizer = std::make_shared<GraphOptimizer>();
48 auto common_pm = std::make_shared<PassManager>("common_pm");
49 common_pm->AddPass(std::make_shared<ConvertConstInputToAttr>());
50 common_pm->AddPass(std::make_shared<ConvertAttrToUnifyMindIR>());
51 common_pm->AddPass(std::make_shared<ConstToAttrStridedSliceGradPass>());
52 common_pm->AddPass(std::make_shared<ConvertConstInputToTensorInput>());
53 common_pm->AddPass(std::make_shared<ConvertTupleOutputToMaketuple>());
54 common_pm->AddPass(std::make_shared<ConvertConstScalarToTensor>());
55 common_pm->AddPass(std::make_shared<ConvertTupleInputToDynamicInput>());
56 common_pm->AddPass(std::make_shared<AddTrainingAttr>());
57 optimizer->AddPassManager(common_pm);
58 (void)optimizer->Optimize(kernel_graph);
59 kernel_graph->SetExecOrderByDefault();
60 #ifdef ENABLE_DUMP_IR
61 if (save_graphs) {
62 std::string file_name = "hwopt_common_after_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
63 DumpIR(file_name, kernel_graph);
64 }
65 #endif
66 }
67
CommonFinalOptimization(const std::shared_ptr<session::KernelGraph> & kernel_graph)68 void CommonFinalOptimization(const std::shared_ptr<session::KernelGraph> &kernel_graph) {
69 MS_EXCEPTION_IF_NULL(kernel_graph);
70 // Run optimizer passes.
71 auto optimizer = std::make_shared<GraphOptimizer>();
72 auto pm = std::make_shared<PassManager>("final_opt");
73 pm->AddPass(std::make_shared<OptimizeUpdateState>());
74 optimizer->AddPassManager(pm);
75 (void)optimizer->Optimize(kernel_graph);
76 kernel_graph->SetExecOrderByDefault();
77 #ifdef ENABLE_DUMP_IR
78 // Dump IR if save_graphs is set.
79 auto context = MsContext::GetInstance();
80 MS_EXCEPTION_IF_NULL(context);
81 const bool save_graphs = context->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
82 if (save_graphs) {
83 std::string filename = "hwopt_common_final_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
84 DumpIR(filename, kernel_graph);
85 }
86 #endif
87 }
88
CommonUnifyMindIROptimization(const std::shared_ptr<session::KernelGraph> & kernel_graph)89 void CommonUnifyMindIROptimization(const std::shared_ptr<session::KernelGraph> &kernel_graph) {
90 MS_EXCEPTION_IF_NULL(kernel_graph);
91 MS_LOG(INFO) << "start common unify mindir opt graph:" << kernel_graph->graph_id();
92 #ifdef ENABLE_DUMP_IR
93 auto context_ptr = MsContext::GetInstance();
94 MS_EXCEPTION_IF_NULL(context_ptr);
95 bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
96 if (save_graphs) {
97 std::string file_name =
98 "hwopt_common_unify_mindir_before_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
99 DumpIR(file_name, kernel_graph);
100 }
101 #endif
102 auto opt = std::make_shared<GraphOptimizer>();
103 auto pm = std::make_shared<PassManager>("common_unify_mindir_pm");
104 pm->AddPass(std::make_shared<ConvTransposeToConvBackpropInputPass>());
105 opt->AddPassManager(pm);
106 (void)opt->Optimize(kernel_graph);
107 kernel_graph->SetExecOrderByDefault();
108 #ifdef ENABLE_DUMP_IR
109 if (save_graphs) {
110 std::string file_name = "hwopt_common_unify_mindir_after_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
111 DumpIR(file_name, kernel_graph);
112 }
113 #endif
114 }
115 } // namespace opt
116 } // namespace mindspore
117