1 /** 2 * Copyright 2020 Huawei Technologies Co., Ltd 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef PARALLEL_AUTO_PARALLEL_REC_PARTITION_H_ 18 #define PARALLEL_AUTO_PARALLEL_REC_PARTITION_H_ 19 20 #include <cmath> 21 #include <iostream> 22 #include <map> 23 #include <memory> 24 #include <string> 25 #include <utility> 26 #include <vector> 27 28 #include "frontend/parallel/auto_parallel/rec_core/rec_cost.h" 29 #include "frontend/parallel/auto_parallel/rec_core/rec_graph.h" 30 #include "frontend/parallel/auto_parallel/rec_core/rec_strategy.h" 31 #include "frontend/parallel/status.h" 32 33 namespace mindspore { 34 namespace parallel { 35 std::vector<size_t> SortByWeight(const std::shared_ptr<Graph> &graph); 36 37 double GetWeights(const Graph::NodeType &node); 38 39 StrategyRec PartitionNode(const Graph::NodeType &node, 40 const std::vector<std::pair<std::string, StrategyRec>> &node_name_to_strategy, 41 const std::shared_ptr<Graph> &graph); 42 43 Status PartitionForAllDevices(const size_t num_device, const double device_memory, const std::shared_ptr<Graph> &graph); 44 45 Graph::NodeType ApplyStrToTensor(Graph::NodeType Node); 46 47 Status DevicesMemoryControl(const size_t num_device, const double device_memory, const std::shared_ptr<Graph> &graph); 48 49 size_t GetDataTypeSize(const TensorType &type); 50 } // namespace parallel 51 } // namespace mindspore 52 53 #endif // PARALLEL_AUTO_PARALLEL_REC_PARTITION_H_ 54