• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2020-2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "src/delegate/npu/npu_manager.h"
18 #include <sys/system_properties.h>
19 #include <sys/fcntl.h>
20 #include <unistd.h>
21 #include "include/hiai_ir_build.h"
22 #include "include/HiAiModelManagerService.h"
23 #include "src/common/file_utils.h"
24 
25 namespace mindspore {
26 #define MAX_MODEL_NUM 20
CompareVersion(const string & version1,const string & version2)27 int NPUManager::CompareVersion(const string &version1, const string &version2) {
28   std::istringstream iss1(version1);
29   std::istringstream iss2(version2);
30   string string1;
31   string string2;
32   while (!iss1.eof() || !iss2.eof()) {
33     getline(iss1, string1, '.');
34     getline(iss2, string2, '.');
35     if (stoi(string1) > stoi(string2)) return 1;
36     if (stoi(string1) < stoi(string2)) return -1;
37     string1 = string2 = "0";
38   }
39   return 0;
40 }
41 
CheckEMUIVersion()42 bool NPUManager::CheckEMUIVersion() {
43   char emui[128] = {0x00};
44   __system_property_get("ro.build.version.emui", emui);
45   std::string emui_str = emui;
46   size_t pos = emui_str.find('_');
47   if (pos != std::string::npos) {
48     auto version = emui_str.substr(pos + 1);
49     int ret = CompareVersion(version, "10.0.0");
50     if (ret < 0) {
51       MS_LOG(WARNING) << "EMUI version " << version << " less than 10.0.0";
52       return false;
53     }
54   }
55   return true;
56 }
57 
Reset()58 void NPUManager::Reset() {
59   for (auto client : clients_) {
60     client->UnLoadModel();
61     client.reset();
62   }
63   clients_.clear();
64   subgraph_index_ = 0;
65   domi::HiaiIrBuild ir_build;
66   for (const auto &model_map : models_) {
67     auto model = model_map.second;
68     if (!model->is_freed_) {
69       ir_build.ReleaseModelBuff(*model->model_buffer_data_);
70       model->is_freed_ = true;
71     }
72     model->model_buffer_data_.reset();
73     model->desc_.reset();
74     model->client_.reset();
75   }
76   models_.clear();
77 }
78 
CheckDDKVersion()79 bool NPUManager::CheckDDKVersion() {
80   auto client = std::make_shared<hiai::AiModelMngerClient>();
81   if (client->GetVersion() != nullptr) {
82     std::string version = client->GetVersion();
83     int ret = CompareVersion(version, "100.320.011.018");
84     if (ret <= 0) {
85       MS_LOG(WARNING) << "DDK Version " << version << " less than 100.320.011.018";
86       return false;
87     }
88   } else {
89     MS_LOG(WARNING) << "Get DDK Version failed!";
90     return false;
91   }
92   return true;
93 }
94 
IsSupportNPU()95 bool NPUManager::IsSupportNPU() {
96   // Avoid multiple checks
97   if (!is_check_version_) {
98     is_check_version_ = true;
99     if (IsKirinChip() && CheckDDKVersion()) {
100       is_support_ = true;
101       MS_LOG(INFO) << "The current device support NPU.";
102     } else {
103       is_support_ = false;
104       MS_LOG(WARNING) << "The current device NOT SUPPORT NPU.";
105     }
106     return is_support_;
107   } else {
108     return is_support_;
109   }
110 }
111 
IsKirinChip()112 bool NPUManager::IsKirinChip() {
113   std::ifstream cpu_info("/proc/cpuinfo");
114   if (!(cpu_info.good() && cpu_info.is_open())) {
115     return false;
116   }
117   std::string line;
118   while (!cpu_info.eof()) {
119     getline(cpu_info, line);
120     if (line.find("Hardware") == string::npos) {
121       continue;
122     }
123     auto index = line.find("Kirin");
124     if (index == string::npos) {
125       continue;
126     }
127     // support Kirin 990 5G\990E\9000E
128     if (line.find("990") != string::npos || line.find("9000") != string::npos) {
129       cpu_info.close();
130       return true;
131     }
132     auto kirin_number_str = line.substr(index + 5);
133     auto kirin_number = atoi(kirin_number_str.c_str());
134     if (kirin_number >= 985 || kirin_number == 810 || kirin_number == 820) {
135       cpu_info.close();
136       return true;
137     } else {
138       MS_LOG(WARNING) << "Unsupported KirinChip " << kirin_number;
139       cpu_info.close();
140       return false;
141     }
142   }
143   return false;
144 }
145 
AddModel(std::shared_ptr<domi::ModelBufferData> model_buffer_data,const std::string & model_name,int frequency)146 int NPUManager::AddModel(std::shared_ptr<domi::ModelBufferData> model_buffer_data, const std::string &model_name,
147                          int frequency) {
148   auto model = std::make_shared<SubGraphModel>(subgraph_index_, model_name, model_buffer_data);
149   auto desc = std::make_shared<hiai::AiModelDescription>(model_name, frequency, 0, 0, 0);
150   model->desc_ = desc;
151   models_.insert({model_name, model});
152   subgraph_index_++;
153   return RET_OK;
154 }
155 
CreateAiModelMngerClient()156 std::shared_ptr<hiai::AiModelMngerClient> NPUManager::CreateAiModelMngerClient() {
157   auto client = std::make_shared<hiai::AiModelMngerClient>();
158   if (client == nullptr) {
159     MS_LOG(ERROR) << "NPU client is nullptr.";
160     return nullptr;
161   }
162   int ret = client->Init(nullptr);
163   if (ret != hiai::AI_SUCCESS) {
164     MS_LOG(ERROR) << "NPU client init failed. code is " << ret;
165     return nullptr;
166   }
167   return client;
168 }
169 
LoadOMModel()170 int NPUManager::LoadOMModel() {
171   std::vector<std::shared_ptr<hiai::AiModelDescription>> models_desc;
172   std::shared_ptr<hiai::AiModelMngerClient> client = nullptr;
173   std::shared_ptr<hiai::AiModelBuilder> mc_builder = nullptr;
174   std::unordered_map<std::shared_ptr<hiai::AiModelBuilder>, hiai::MemBuffer *> builder_buffer_map;
175   int total = 0;
176   for (const auto &model_map : models_) {
177     if (total % MAX_MODEL_NUM == 0) {
178       client = CreateAiModelMngerClient();
179       if (client == nullptr) {
180         MS_LOG(ERROR) << "Create Client failed.";
181         return RET_ERROR;
182       }
183       mc_builder = std::make_shared<hiai::AiModelBuilder>(client);
184       if (mc_builder == nullptr) {
185         MS_LOG(ERROR) << "Create AiModelBuilder failed.";
186         return RET_ERROR;
187       }
188     }
189     total++;
190     auto model = model_map.second;
191     if (model->is_loaded_ && model->is_freed_) {
192       continue;
193     }
194     models_desc.push_back(model->desc_);
195 
196     auto buffer = mc_builder->InputMemBufferCreate(model->model_buffer_data_->data, model->model_buffer_data_->length);
197     if (buffer == nullptr) {
198       MS_LOG(ERROR) << "NPU input memory buffer create failed.";
199       return RET_ERROR;
200     }
201     builder_buffer_map.insert({mc_builder, buffer});
202     model->desc_->SetModelBuffer(buffer->GetMemBufferData(), buffer->GetMemBufferSize());
203     if (models_desc.size() == MAX_MODEL_NUM) {
204       auto ret = LoadModel(client, models_desc);
205       if (ret != RET_ERROR) {
206         MS_LOG(ERROR) << "Client load model failed.";
207         return RET_ERROR;
208       }
209       models_desc.clear();
210     }
211   }
212 
213   if (!models_desc.empty()) {
214     auto ret = LoadModel(client, models_desc);
215     if (ret != RET_OK) {
216       MS_LOG(ERROR) << "Client load model failed.";
217       return RET_ERROR;
218     }
219     models_desc.clear();
220   }
221 
222   for (auto it : builder_buffer_map) {
223     it.first->MemBufferDestroy(it.second);
224   }
225   builder_buffer_map.clear();
226   return RET_OK;
227 }
228 
GetClient(const std::string & model_name)229 std::shared_ptr<hiai::AiModelMngerClient> NPUManager::GetClient(const std::string &model_name) {
230   if (models_.find(model_name) == models_.end() || models_[model_name] == nullptr) {
231     return nullptr;
232   }
233   return models_[model_name]->client_;
234 }
235 
LoadModel(const std::shared_ptr<hiai::AiModelMngerClient> & client,std::vector<std::shared_ptr<hiai::AiModelDescription>> desc_list)236 int NPUManager::LoadModel(const std::shared_ptr<hiai::AiModelMngerClient> &client,
237                           std::vector<std::shared_ptr<hiai::AiModelDescription>> desc_list) {
238   auto ret = client->Load(desc_list);
239   if (ret != hiai::AI_SUCCESS) {
240     MS_LOG(ERROR) << "Client load model failed." << ret;
241     return RET_ERROR;
242   }
243 
244   for (const auto &desc : desc_list) {
245     auto it = models_.find(desc->GetName());
246     it->second->is_loaded_ = true;
247     it->second->client_ = client;
248   }
249 
250   this->clients_.push_back(client);
251   return RET_OK;
252 }
253 }  // namespace mindspore
254