1 /** 2 * Copyright 2019-2023 Huawei Technologies Co., Ltd 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef PARALLEL_AUTO_PARALLEL_REC_PARTITION_H_ 18 #define PARALLEL_AUTO_PARALLEL_REC_PARTITION_H_ 19 20 #include <cmath> 21 #include <iostream> 22 #include <map> 23 #include <memory> 24 #include <string> 25 #include <utility> 26 #include <vector> 27 28 #include "frontend/parallel/auto_parallel/rec_core/rec_cost.h" 29 #include "frontend/parallel/auto_parallel/rec_core/rec_graph.h" 30 #include "frontend/parallel/auto_parallel/rec_core/rec_strategy.h" 31 #include "frontend/parallel/status.h" 32 #include "ir/func_graph.h" 33 34 namespace mindspore { 35 namespace parallel { 36 constexpr bool ENABLE_PIPE_ALGO = false; 37 38 enum PartitionOrder { TopologyOrder, WeightOrder }; 39 40 constexpr PartitionOrder PARTITION_ORDER = PartitionOrder::TopologyOrder; 41 42 std::vector<size_t> SortByWeight(const std::shared_ptr<Graph> &graph); 43 44 double GetWeights(const Graph::NodeType &node); 45 46 StrategyRec PartitionNode(Graph::NodeType node, 47 const std::vector<std::pair<std::string, StrategyRec>> &node_name_to_strategy, 48 const std::shared_ptr<Graph> &graph, const bool isTraining); 49 50 Status PartitionForAllDevices(size_t num_device, double device_memory, const std::shared_ptr<Graph> &graph, 51 bool isTraining, const FuncGraphPtr &root); 52 53 Graph::NodeType ApplyStrToTensor(Graph::NodeType Node); 54 55 void DevicesMemoryControl(const size_t num_device, const double device_memory, const std::shared_ptr<Graph> &graph); 56 57 StrategyRec GetOneLoopStrategy(size_t op_inputs_num, const StrategyRec &old_str, StrategyRec new_str); 58 59 Graph::NodeType ChangeStrategy(Graph::NodeType Node, size_t n_cut); 60 61 size_t GetDataTypeSize(const TensorType &type); 62 } // namespace parallel 63 } // namespace mindspore 64 65 #endif // PARALLEL_AUTO_PARALLEL_REC_PARTITION_H_ 66