• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2022 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MINDSPORE_CCSRC_C_API_SRC_RESOURCE_MANAGER_H_
18 #define MINDSPORE_CCSRC_C_API_SRC_RESOURCE_MANAGER_H_
19 
20 #include <utility>
21 #include <unordered_map>
22 #include <string>
23 #include <vector>
24 #include <memory>
25 #include "base/base.h"
26 #include "include/c_api/ms/base/handle_types.h"
27 #include "c_api/src/common.h"
28 #include "pipeline/jit/ps/resource.h"
29 #include "utils/ms_context.h"
30 #include "backend/graph_compiler/backend_base.h"
31 #include "c_api/src/dynamic_op_info.h"
32 
33 static const size_t maxOpPoolSize = 500;
34 class ResourceManager {
35  public:
ResourceManager()36   ResourceManager() {
37     context_ = mindspore::MsContext::GetInstance();
38     (void)context_->set_backend_policy("ms");
39     context_->set_param<int>(mindspore::MS_CTX_EXECUTION_MODE, mindspore::kGraphMode);
40   }
41 
~ResourceManager()42   ~ResourceManager() {
43     for (auto iter : backends_) {
44       const auto &backend = iter.second;
45       if (backend != nullptr) {
46         backend->ClearOpExecutorResource();
47       }
48     }
49     backends_.clear();
50     ptr_res_pool_.clear();
51     dynamic_op_pool_.clear();
52     results_.clear();
53   }
54 
SetResult(const std::string & key,const mindspore::Any & value)55   void SetResult(const std::string &key, const mindspore::Any &value) { results_[key] = value; }
56 
GetResult(const std::string & key)57   mindspore::Any GetResult(const std::string &key) const {
58     auto iter = results_.find(key);
59     if (iter == results_.end()) {
60       MS_LOG(EXCEPTION) << "this key is not in resource list:" << key;
61     }
62     return iter->second;
63   }
64 
CacheBackend(const std::string & device_target,const MindRTBackendPtr & backend)65   void CacheBackend(const std::string &device_target, const MindRTBackendPtr &backend) {
66     backends_[device_target] = backend;
67   }
68 
GetBackendFromCache(const std::string & device_target)69   MindRTBackendPtr GetBackendFromCache(const std::string &device_target) {
70     auto iter = backends_.find(device_target);
71     if (iter == backends_.end()) {
72       MS_LOG(INFO) << "Current backend has not been cached in backends pool.";
73       return nullptr;
74     }
75     return iter->second;
76   }
77 
CacheOpRunInfo(std::shared_ptr<InnerOpInfo> inner_info,FrontendOpRunInfoPtr run_info)78   void CacheOpRunInfo(std::shared_ptr<InnerOpInfo> inner_info, FrontendOpRunInfoPtr run_info) {
79     if (dynamic_op_pool_.size() > maxOpPoolSize) {
80       dynamic_op_pool_.erase(dynamic_op_pool_.begin());
81     }
82     dynamic_op_pool_[*inner_info] = run_info;
83   }
84 
GetOpRunInfoFromCache(std::shared_ptr<InnerOpInfo> inner_info)85   FrontendOpRunInfoPtr GetOpRunInfoFromCache(std::shared_ptr<InnerOpInfo> inner_info) {
86     auto iter = dynamic_op_pool_.find(*inner_info);
87     if (iter == dynamic_op_pool_.end()) {
88       MS_LOG(INFO) << "The OpInfo has not been cached in dynamic operator pool.";
89       return nullptr;
90     }
91     return iter->second;
92   }
93 
GetCachedOpNum()94   size_t GetCachedOpNum() const { return dynamic_op_pool_.size(); }
95 
SetInfer(bool infer)96   void SetInfer(bool infer) { auto_infer_ = infer; }
97 
GetInfer()98   bool GetInfer() const { return auto_infer_; }
99 
StoreSrcPtr(const BasePtr & src_ptr)100   void StoreSrcPtr(const BasePtr &src_ptr) {
101     (void)ptr_res_pool_.insert(std::make_pair(reinterpret_cast<Handle>(src_ptr.get()), src_ptr));
102   }
103 
GetSrcPtr(ConstHandle ptr)104   BasePtr GetSrcPtr(ConstHandle ptr) {
105     auto iter = ptr_res_pool_.find(ptr);
106     if (iter == ptr_res_pool_.end()) {
107       MS_LOG(ERROR) << "The key handle " << ptr << " is not exist in resource pool.";
108       return nullptr;
109     }
110     return iter->second;
111   }
112 
ReleaseSrcPtr(ConstHandle ptr)113   void ReleaseSrcPtr(ConstHandle ptr) {
114     auto iter = ptr_res_pool_.find(ptr);
115     if (iter != ptr_res_pool_.end()) {
116       (void)ptr_res_pool_.erase(iter);
117     }
118   }
119 
120  private:
121   std::unordered_map<ConstHandle, BasePtr> ptr_res_pool_{};
122   std::unordered_map<InnerOpInfo, FrontendOpRunInfoPtr> dynamic_op_pool_{};
123   std::unordered_map<std::string, MindRTBackendPtr> backends_{};
124   mindspore::HashMap<std::string, mindspore::Any> results_{};
125   std::shared_ptr<mindspore::MsContext> context_ = nullptr;
126   bool auto_infer_ = true;
127 };
128 #endif  // MINDSPORE_CCSRC_C_API_SRC_RESOURCE_MANAGER_H_
129