1 /** 2 * Copyright 2021 Huawei Technologies Co., Ltd 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef MINDSPORE_CCSRC_RUNTIME_FRAMEWORK_ACTOR_MEMORY_AWARE_ACTOR_H_ 18 #define MINDSPORE_CCSRC_RUNTIME_FRAMEWORK_ACTOR_MEMORY_AWARE_ACTOR_H_ 19 20 #include <string> 21 #include <memory> 22 #include <mutex> 23 #include "runtime/graph_scheduler/actor/abstract_actor.h" 24 #include "runtime/graph_scheduler/device_tensor_store.h" 25 26 namespace mindspore { 27 namespace runtime { 28 // The actor represents a set of common memory related operations of actor. 29 class MemoryAwareActor : public AbstractActor { 30 public: MemoryAwareActor(const std::string & name,KernelTransformType type,const AID * recorder_aid,const AID & memory_manager_aid)31 explicit MemoryAwareActor(const std::string &name, KernelTransformType type, const AID *recorder_aid, 32 const AID &memory_manager_aid) 33 : AbstractActor(name, type, recorder_aid), memory_manager_aid_(memory_manager_aid) {} 34 ~MemoryAwareActor() override = default; 35 SendMemoryAllocReq(OpContext<DeviceTensor> * const context)36 virtual void SendMemoryAllocReq(OpContext<DeviceTensor> *const context) {} SendMemoryFreeReq(OpContext<DeviceTensor> * const context)37 virtual void SendMemoryFreeReq(OpContext<DeviceTensor> *const context) {} OnMemoryAllocFinish(OpContext<DeviceTensor> * const context)38 virtual void OnMemoryAllocFinish(OpContext<DeviceTensor> *const context) {} 39 memory_manager_aid()40 const AID &memory_manager_aid() const { return memory_manager_aid_; } 41 42 protected: 43 friend class GraphScheduler; 44 45 // The processing after actor run: 1.erase input, 2.free memory, 3.send output. PostRun(OpContext<DeviceTensor> * const context)46 void PostRun(OpContext<DeviceTensor> *const context) { 47 // The input is invalid and needs to be erased when finish run. 48 EraseInput(context); 49 50 // Note that SendMemoryFreeReq must be in front of SendOutput, because SendOutput will trigger SendMemoryAllocReq of 51 // the next actor and the actor is asynchronous execution. So it is necessary to ensure that SendMemoryFreeReq of 52 // the current actor is in front of SendMemoryAllocReq of the next actor. One is to reuse the memory more fully, 53 // the other is to ensure the execution order and avoid the illegal memory timing problem. 54 SendMemoryFreeReq(context); 55 56 SendOutput(context); 57 } 58 59 // The id of memory manager actor. Send message to it for alloc and free memory. 60 const AID memory_manager_aid_; 61 }; 62 63 using MemoryAwareActorPtr = std::shared_ptr<MemoryAwareActor>; 64 } // namespace runtime 65 } // namespace mindspore 66 67 #endif // MINDSPORE_CCSRC_RUNTIME_FRAMEWORK_ACTOR_MEMORY_AWARE_ACTOR_H_ 68