1 /** 2 * Copyright 2022 Huawei Technologies Co., Ltd 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef MINDSPORE_CCSRC_RUNTIME_HARDWARE_CPU_ALLREDUCE_IMPL_H_ 18 #define MINDSPORE_CCSRC_RUNTIME_HARDWARE_CPU_ALLREDUCE_IMPL_H_ 19 20 #include <string> 21 #include <memory> 22 #include "include/backend/distributed/cluster/cluster_context.h" 23 #include "plugin/device/cpu/hal/hardware/ms_collective_node.h" 24 25 namespace mindspore { 26 namespace device { 27 namespace cpu { 28 class AllReduceLauncher { 29 public: 30 AllReduceLauncher(const AllReduceLauncher &) = delete; 31 AllReduceLauncher &operator=(const AllReduceLauncher &) = delete; 32 AllReduceLauncher() = default; 33 ~AllReduceLauncher() = default; 34 35 bool Initialize(); 36 bool Finalize(); 37 38 bool Execute(const void *input_data, void *const output_data, size_t data_size) const; 39 40 const std::shared_ptr<ps::core::CollectiveNode> &collective_node() const; 41 42 private: 43 size_t rank_id_{0}; 44 size_t rank_size_{0}; 45 std::string node_role_{distributed::kEnvRoleOfWorker}; 46 std::shared_ptr<ps::core::CollectiveNode> abs_node_{nullptr}; 47 48 bool RingAllReduce(const void *input_data, void *const output_data, size_t data_size) const; 49 bool ReduceBroadcastAllReduce(const void *input_data, void *const output_data, size_t data_size) const; 50 }; 51 } // namespace cpu 52 } // namespace device 53 } // namespace mindspore 54 55 #endif // MINDSPORE_CCSRC_RUNTIME_HARDWARE_CPU_ALLREDUCE_IMPL_H_ 56