• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2020-2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <set>
18 #include <string>
19 #include "ir/func_graph.h"
20 #include "mindspore/core/ops/array_ops.h"
21 #include "mindspore/core/ops/framework_ops.h"
22 #include "mindspore/core/ops/other_ops.h"
23 #include "mindspore/core/ops/sequence_ops.h"
24 #include "mindspore/core/ops/nn_ops.h"
25 #include "utils/parallel_node_check.h"
26 
27 namespace mindspore {
28 // clang-format off
29 #ifndef ENABLE_SECURITY
30 static const std::set<std::string> PARALLEL_BLACK_LIST_ = {mindspore::kTupleGetItemOpName, "J", "list_getitem",
31   "array_getitem", "tuple_setitem", "Depend", "list_setitem", "array_setitem", "dict_getitem",
32   "list_append", "list_map", "list_reduce", "tuple_reversed", "tile_shape", "tuple_div", "tuple_to_array",
33   "make_dict", "make_slice", "string_eq", "VirtualLoss", "Return", "env_getitem", "TensorShape", "ScalarToTensor",
34   "partial", "env_setitem", "env_getitem", "env_add", "Shape", "InsertGradientOf",
35   "dot", "im2col", "col2im", "im2col_v1", "state_setitem", "TensorDump", "_MirrorSilentCheck",
36   "ImageSummary", "TensorSummary", "Debug", "HistogramSummary", "col2im_v1", "resolve", "BroadcastGradientArgs",
37   "InvertPermutation", "DropoutGenMask", "StatelessDropOutGenMask", "embed", "create_instance", "RefToEmbed",
38   "StopGradient", "UpdateState", "Load", "Switch", "Print", "call_instance", "TensorMove", "DType",
39   "ScalarAdd", "ScalarSub", "ScalarMul", "ScalarDiv", "ScalarFloorDiv", "ScalarPow", "ScalarSummary", "ScalarCast",
40   "ScalarMod", "ScalarGt", "ScalarGe", "ScalarLt", "ScalarLe", "ScalarEq", "TensorToTuple", "TupleToTensor"};
41 #else
42 static const std::set<std::string> PARALLEL_BLACK_LIST_ = {mindspore::kTupleGetItemOpName, "J", "list_getitem",
43   "array_getitem", "tuple_setitem", "Depend", "list_setitem", "array_setitem", "dict_getitem",
44   "list_append", "list_map", "list_reduce", "tuple_reversed", "tile_shape", "tuple_div", "tuple_to_array",
45   "make_dict", "make_slice", "string_eq", "VirtualLoss", "Return", "env_getitem", "TensorShape", "ScalarToTensor",
46   "identity", "partial", "env_setitem", "env_getitem", "env_add", "Shape", "FillV2", "_MirrorSilentCheck",
47   "dot", "im2col", "col2im", "im2col_v1", "state_setitem", "Debug", "col2im_v1", "resolve", "BroadcastGradientArgs",
48   "InvertPermutation", "DropoutGenMask", "StatelessDropOutGenMask", "embed", "create_instance", "RefToEmbed",
49   "StopGradient", "UpdateState", "Load", "Switch", "Print", "call_instance", "TensorMove", "DType",
50   "ScalarAdd", "ScalarSub", "ScalarMul", "ScalarDiv", "ScalarFloorDiv", "ScalarPow", "ScalarSummary", "ScalarCast",
51   "ScalarMod", "ScalarGt", "ScalarGe", "ScalarLt", "ScalarLe", "ScalarEq", "InsertGradientOf", "TensorToTuple",
52   "TupleToTensor"};
53 #endif
54 static const std::set<PrimitivePtr> ALLGATHER_NODE_LIST_ = {prim::kPrimAllGather, prim::kPrimMiniStepAllGather,
55                                                             prim::kPrimMicroStepAllGather};
56 static const std::set<PrimitivePtr> TRIVIAL_NODE_LIST_ = {prim::kPrimCast, prim::kPrimDepend, prim::kPrimQuant,
57                                                           prim::kPrimMirrorSilentCheck, prim::kPrimInsertGradientOf,
58                                                             std::make_shared<Primitive>("AscendAntiQuant")};
59 // clang-format on
60 
IsInParallelBlackList(const PrimitivePtr & prim)61 bool IsInParallelBlackList(const PrimitivePtr &prim) {
62   MS_EXCEPTION_IF_NULL(prim);
63   return (PARALLEL_BLACK_LIST_.find(prim->name()) != PARALLEL_BLACK_LIST_.end());
64 }
65 
IsInAllGatherNodeList(const CNodePtr & cnode)66 bool IsInAllGatherNodeList(const CNodePtr &cnode) {
67   for (auto &value : ALLGATHER_NODE_LIST_) {
68     if (IsPrimitiveCNode(cnode, value)) {
69       return true;
70     }
71   }
72   return false;
73 }
74 
IsInTrivialNodeList(const CNodePtr & cnode)75 bool IsInTrivialNodeList(const CNodePtr &cnode) {
76   for (auto &value : TRIVIAL_NODE_LIST_) {
77     if (IsPrimitiveCNode(cnode, value)) {
78       return true;
79     }
80   }
81   return false;
82 }
83 
84 // Return true if cnode is ReShape and match pattern DropoutGenMask -> ReShape -> FlashAttentionScore
IsReshapeBetweenDropoutGenMaskAndFlashAttentionScore(const CNodePtr & cnode)85 bool IsReshapeBetweenDropoutGenMaskAndFlashAttentionScore(const CNodePtr &cnode) {
86   if (!IsPrimitiveCNode(cnode, prim::kPrimReshape)) {
87     return false;
88   }
89   auto input1 = cnode->input(kIndex1);
90   if (!IsPrimitiveCNode(input1, prim::kPrimDropoutGenMask)) {
91     return false;
92   }
93   auto func_graph = cnode->func_graph();
94   auto manager = func_graph->manager();
95   auto node_users = manager->node_users()[cnode];
96   if (node_users.size() != 1 || !IsPrimitiveCNode(node_users.begin()->first, prim::kPrimFlashAttentionScore)) {
97     return false;
98   }
99   return true;
100 }
101 
IsParallelConsiderCNode(const CNodePtr & cnode)102 bool IsParallelConsiderCNode(const CNodePtr &cnode) {
103   if (cnode == nullptr || cnode->size() == 0) {
104     return false;
105   }
106   const auto &prim_node = cnode->input(0)->cast<ValueNodePtr>();
107   if (prim_node == nullptr) {
108     return false;
109   }
110   const auto &prim = prim_node->value()->cast<PrimitivePtr>();
111   if (prim == nullptr) {
112     return false;
113   }
114   // If match pattern DropoutGenMask -> ReShape -> FlashAttentionScore, skip ReShape
115   if (IsReshapeBetweenDropoutGenMaskAndFlashAttentionScore(cnode)) {
116     return false;
117   }
118   return !IsInParallelBlackList(prim);
119 }
120 }  // namespace mindspore
121