• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #ifndef MINDSPORE_CCSRC_BACKEND_OPTIMIZER_GRAPH_KERNEL_TRANSFORM_OP_OPTIMIZER_H_
17 #define MINDSPORE_CCSRC_BACKEND_OPTIMIZER_GRAPH_KERNEL_TRANSFORM_OP_OPTIMIZER_H_
18 
19 #include <string>
20 #include <vector>
21 #include "backend/optimizer/common/pass.h"
22 #include "ir/func_graph.h"
23 #include "backend/optimizer/graph_kernel/model/lite_graph.h"
24 
25 namespace mindspore {
26 namespace opt {
27 /**
28  * @brief Eliminate the unnecessary transformation ops when the other operators
29  *        are format flexible.
30  * @example
31  *   %1 = Transpose(p0) // NCHW to NHWC
32  *   %2 = Transpose(p1) // NCHW to NHWC
33  *   %3 = Add(%1, %2)
34  *   return %3
35  *  -->
36  *   %1 = Add(p0, p1)
37  *   %2 = Transpose(%1) // NCHW to NHWC
38  *   return %2
39  * @example
40  *   %1 = Transpose(p0) // NCHW to NHWC
41  *   %2 = Transpose(p1) // NCHW to NHWC
42  *   %3 = Add(%1, %2)
43  *   %4 = Transpose(%3) // NHWC to NCHW
44  *   return %4
45  *  -->
46  *   %1 = Add(p0, p1)
47  *   return %1
48  */
49 class TransformOpOptimizer : public Pass {
50  public:
TransformOpOptimizer()51   TransformOpOptimizer() : Pass("transform_op_optimizer") {}
52   ~TransformOpOptimizer() = default;
53   bool Run(const FuncGraphPtr &func_graph) override;
54 
55  private:
56   bool Process(const graphkernel::LiteGraphPtr &litegraph, const std::string &trans_op_name = "Transpose");
57   bool IsFlexibleOp(const graphkernel::NodePtr &node);
58   size_t ori_trans_op_num_{0};
59 };
60 }  // namespace opt
61 }  // namespace mindspore
62 #endif  // MINDSPORE_CCSRC_BACKEND_OPTIMIZER_GRAPH_KERNEL_TRANSFORM_OP_OPTIMIZER_H_
63