1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "nncompiled_cache.h"
17
18 #include <unistd.h>
19 #include <functional>
20 #include <memory>
21 #include <limits>
22
23 #include "common/utils.h"
24 #include "backend_manager.h"
25 #include "nnbackend.h"
26
27 namespace OHOS {
28 namespace NeuralNetworkRuntime {
29 constexpr int32_t MAX_MODEL_SIZE = 200 * 1024 * 1024; // 200MB
30 constexpr int32_t NULL_PTR_LENGTH = 0;
31 constexpr int32_t NUMBER_CACHE_INFO_MEMBERS = 3;
32 constexpr int32_t HEX_UNIT = 16;
33 constexpr char ROOT_DIR_STR = '/';
34 constexpr char DOUBLE_SLASH_STR[] = "//";
35 constexpr int OPVERSION_SUBSTR_NUM = 2;
36 const std::string CURRENT_VERSION = "0x00000000";
37 const std::string HIAI_VERSION_PATH = "/data/data/hiai/version";
38
Save(const std::vector<OHOS::NeuralNetworkRuntime::Buffer> & caches,const std::string & cacheDir,uint32_t version)39 OH_NN_ReturnCode NNCompiledCache::Save(const std::vector<OHOS::NeuralNetworkRuntime::Buffer>& caches,
40 const std::string& cacheDir,
41 uint32_t version)
42 {
43 if (caches.empty()) {
44 LOGE("[NNCompiledCache] Save failed, caches is empty.");
45 return OH_NN_INVALID_PARAMETER;
46 }
47
48 if (m_device == nullptr) {
49 LOGE("[NNCompiledCache] Save failed, m_device is empty.");
50 return OH_NN_INVALID_PARAMETER;
51 }
52
53 OH_NN_ReturnCode ret = GenerateCacheFiles(caches, cacheDir, version);
54 if (ret != OH_NN_SUCCESS) {
55 LOGE("[NNCompiledCache] Save failed, error happened when calling GenerateCacheFiles.");
56 return ret;
57 }
58
59 LOGI("[NNCompiledCache] Save success. %zu caches are saved.", caches.size());
60 return OH_NN_SUCCESS;
61 }
62
Restore(const std::string & cacheDir,uint32_t version,std::vector<OHOS::NeuralNetworkRuntime::Buffer> & caches)63 OH_NN_ReturnCode NNCompiledCache::Restore(const std::string& cacheDir,
64 uint32_t version,
65 std::vector<OHOS::NeuralNetworkRuntime::Buffer>& caches)
66 {
67 if (cacheDir.empty()) {
68 LOGE("[NNCompiledCache] Restore failed, cacheDir is empty.");
69 return OH_NN_INVALID_PARAMETER;
70 }
71
72 if (!caches.empty()) {
73 LOGE("[NNCompiledCache] Restore failed, caches is not empty.");
74 return OH_NN_INVALID_PARAMETER;
75 }
76
77 if (m_device == nullptr) {
78 LOGE("[NNCompiledCache] Restore failed, m_device is empty.");
79 return OH_NN_INVALID_PARAMETER;
80 }
81
82 std::string cacheInfoPath = cacheDir + "/" + m_modelName + "cache_info.nncache";
83 char path[PATH_MAX];
84 if (realpath(cacheInfoPath.c_str(), path) == nullptr) {
85 LOGE("[NNCompiledCache] Restore failed, fail to get the real path of cacheInfoPath.");
86 return OH_NN_INVALID_PARAMETER;
87 }
88 if (access(cacheInfoPath.c_str(), F_OK) != 0) {
89 LOGE("[NNCompiledCache] Restore failed, cacheInfoPath is not exist.");
90 return OH_NN_INVALID_PARAMETER;
91 }
92
93 NNCompiledCacheInfo cacheInfo;
94 OH_NN_ReturnCode ret = CheckCacheInfo(cacheInfo, cacheInfoPath);
95 if (ret != OH_NN_SUCCESS) {
96 LOGE("[NNCompiledCache] Restore failed, error happened when calling CheckCacheInfo.");
97 return ret;
98 }
99
100 if (static_cast<int64_t>(version) > cacheInfo.version) {
101 LOGE("[NNCompiledCache] Restore failed, version is not match.");
102 return OH_NN_INVALID_PARAMETER;
103 }
104
105 if (static_cast<int64_t>(version) < cacheInfo.version) {
106 LOGE("[NNCompiledCache] Restore failed, the current version is lower than the cache files, "
107 "please set a higher version.");
108 return OH_NN_OPERATION_FORBIDDEN;
109 }
110
111 for (uint32_t i = 0; i < cacheInfo.fileNumber; ++i) {
112 std::string cacheModelPath = cacheDir + "/" + m_modelName + std::to_string(i) + ".nncache";
113 if (access(cacheModelPath.c_str(), 0) != 0) {
114 LOGE("[NNCompiledCache] Restore failed, %{public}s is not exist.", cacheModelPath.c_str());
115 return OH_NN_INVALID_PARAMETER;
116 }
117
118 OHOS::NeuralNetworkRuntime::Buffer modelBuffer;
119 ret = ReadCacheModelFile(cacheModelPath, modelBuffer);
120 if (ret != OH_NN_SUCCESS) {
121 LOGE("[NNCompiledCache] Restore failed, error happened when calling ReadCacheModelFile.");
122 return ret;
123 }
124
125 if (GetCrc16(static_cast<char*>(modelBuffer.data), modelBuffer.length) !=
126 cacheInfo.modelCheckSum[i]) {
127 LOGE("[NNCompiledCache] Restore failed, the cache model file %{public}s has been changed.",
128 cacheModelPath.c_str());
129 return OH_NN_INVALID_FILE;
130 }
131
132 caches.emplace_back(std::move(modelBuffer));
133 }
134
135 return ret;
136 }
137
SetBackend(size_t backendID)138 OH_NN_ReturnCode NNCompiledCache::SetBackend(size_t backendID)
139 {
140 BackendManager& backendManager = BackendManager::GetInstance();
141 std::shared_ptr<Backend> backend = backendManager.GetBackend(backendID);
142 if (backend == nullptr) {
143 LOGE("[NNCompiledCache] SetBackend failed, backend with backendID %{public}zu is not exist.", backendID);
144 return OH_NN_INVALID_PARAMETER;
145 }
146
147 std::shared_ptr<NNBackend> nnBackend = std::reinterpret_pointer_cast<NNBackend>(backend);
148 m_device = nnBackend->GetDevice();
149 if (m_device == nullptr) {
150 LOGE("[NNCompiledCache] SetBackend failed, device with backendID %{public}zu is not exist.", backendID);
151 return OH_NN_FAILED;
152 }
153
154 m_backendID = backendID;
155 return OH_NN_SUCCESS;
156 }
157
SetModelName(const std::string & modelName)158 void NNCompiledCache::SetModelName(const std::string& modelName)
159 {
160 m_modelName = modelName;
161 }
162
GenerateCacheFiles(const std::vector<OHOS::NeuralNetworkRuntime::Buffer> & caches,const std::string & cacheDir,uint32_t version) const163 OH_NN_ReturnCode NNCompiledCache::GenerateCacheFiles(const std::vector<OHOS::NeuralNetworkRuntime::Buffer>& caches,
164 const std::string& cacheDir,
165 uint32_t version) const
166 {
167 const size_t cacheNumber = caches.size();
168 uint32_t cacheSize = NUMBER_CACHE_INFO_MEMBERS + cacheNumber + 1;
169 std::unique_ptr<int64_t[]> cacheInfo = CreateUniquePtr<int64_t[]>(cacheSize);
170 if (cacheInfo == nullptr) {
171 LOGE("[NNCompiledCache] GenerateCacheFiles failed, fail to create cacheInfo instance.");
172 return OH_NN_MEMORY_ERROR;
173 }
174
175 OH_NN_ReturnCode ret = GenerateCacheModel(caches, cacheInfo, cacheDir, version);
176 if (ret != OH_NN_SUCCESS) {
177 LOGE("[NNCompiledCache] GenerateCacheFiles failed, error happened when calling GenerateCacheModel.");
178 return ret;
179 }
180
181 uint32_t infoCharNumber = cacheSize * sizeof(uint64_t);
182 ret = WriteCacheInfo(infoCharNumber, cacheInfo, cacheDir);
183 if (ret != OH_NN_SUCCESS) {
184 LOGE("[NNCompiledCache] GenerateCacheFiles failed, error happened when calling WriteCacheInfo.");
185 return ret;
186 }
187
188 return OH_NN_SUCCESS;
189 }
190
GenerateCacheModel(const std::vector<OHOS::NeuralNetworkRuntime::Buffer> & caches,std::unique_ptr<int64_t[]> & cacheInfo,const std::string & cacheDir,uint32_t version) const191 OH_NN_ReturnCode NNCompiledCache::GenerateCacheModel(const std::vector<OHOS::NeuralNetworkRuntime::Buffer>& caches,
192 std::unique_ptr<int64_t[]>& cacheInfo,
193 const std::string& cacheDir,
194 uint32_t version) const
195 {
196 size_t cacheNumber = caches.size();
197
198 auto cacheInfoPtr = cacheInfo.get();
199 *cacheInfoPtr++ = static_cast<int64_t>(cacheNumber);
200 *cacheInfoPtr++ = static_cast<int64_t>(version);
201 *cacheInfoPtr++ = static_cast<int64_t>(m_backendID); // Should call SetBackend first.
202
203 // standardize the input dir
204 OH_NN_ReturnCode ret = OH_NN_SUCCESS;
205 char path[PATH_MAX];
206 if (realpath(cacheDir.c_str(), path) == nullptr) {
207 LOGE("[NNCompiledCache] GenerateCacheModel failed, fail to get the real path of cacheDir.");
208 return OH_NN_INVALID_PARAMETER;
209 }
210
211 // verify the Standardized path available
212 ret = VerifyCachePath(path);
213 if (ret != OH_NN_SUCCESS) {
214 LOGE("[NNCompiledCache] GenerateCacheModel failed, fail to verify the file path of cacheDir.");
215 return ret;
216 }
217
218 std::string cachePath = path;
219 for (size_t i = 0; i < cacheNumber; ++i) {
220 std::string cacheModelFile = cachePath + "/" + m_modelName + std::to_string(i) + ".nncache";
221 std::ofstream cacheModelStream(cacheModelFile, std::ios::binary | std::ios::out | std::ios::trunc);
222 if (cacheModelStream.fail()) {
223 LOGE("[NNCompiledCache] GenerateCacheModel failed, model cache file is invalid.");
224 return OH_NN_INVALID_PARAMETER;
225 }
226
227 uint64_t checkSum =
228 static_cast<int64_t>(GetCrc16(static_cast<char*>(caches[i].data), caches[i].length));
229 *cacheInfoPtr++ = checkSum;
230 if (!cacheModelStream.write(static_cast<const char*>(caches[i].data), caches[i].length)) {
231 LOGE("[NNCompiledCache] GenerateCacheModel failed, fail to write cache model.");
232 cacheModelStream.close();
233 return OH_NN_SAVE_CACHE_EXCEPTION;
234 };
235
236 cacheModelStream.close();
237 }
238
239 std::string currentVersion = CURRENT_VERSION;
240 char versionPath[PATH_MAX];
241 if (realpath(HIAI_VERSION_PATH.c_str(), versionPath) != nullptr) {
242 std::ifstream inf(versionPath);
243 if (inf.is_open()) {
244 getline(inf, currentVersion);
245 }
246 inf.close();
247 }
248
249 int currentOpVersion = std::stoi(currentVersion.substr(OPVERSION_SUBSTR_NUM));
250 *cacheInfoPtr++ = currentOpVersion;
251
252 return OH_NN_SUCCESS;
253 }
254
WriteCacheInfo(uint32_t cacheSize,std::unique_ptr<int64_t[]> & cacheInfo,const std::string & cacheDir) const255 OH_NN_ReturnCode NNCompiledCache::WriteCacheInfo(uint32_t cacheSize,
256 std::unique_ptr<int64_t[]>& cacheInfo,
257 const std::string& cacheDir) const
258 {
259 // standardize the input dir
260 char path[PATH_MAX];
261 if (realpath(cacheDir.c_str(), path) == nullptr) {
262 LOGE("[NNCompiledCache] WriteCacheInfo failed, fail to get the real path of cacheDir.");
263 return OH_NN_INVALID_PARAMETER;
264 }
265
266 // verify the Standardized path available
267 OH_NN_ReturnCode ret = VerifyCachePath(path);
268 if (ret != OH_NN_SUCCESS) {
269 LOGE("[NNCompiledCache] WriteCacheInfo failed, fail to verify the file path of cacheDir.");
270 return ret;
271 }
272
273 std::string cachePath = path;
274 std::string cacheInfoPath = cachePath + "/" + m_modelName + "cache_info.nncache";
275 std::ofstream cacheInfoStream(cacheInfoPath, std::ios::binary | std::ios::out | std::ios::trunc);
276 if (cacheInfoStream.fail()) {
277 LOGE("[NNCompiledCache] WriteCacheInfo failed, model cache info file is invalid.");
278 return OH_NN_INVALID_FILE;
279 }
280
281 if (!cacheInfoStream.write(reinterpret_cast<const char*>(cacheInfo.get()), cacheSize)) {
282 LOGE("[NNCompiledCache] WriteCacheInfo failed, fail to write cache info.");
283 cacheInfoStream.close();
284 return OH_NN_SAVE_CACHE_EXCEPTION;
285 }
286
287 cacheInfoStream.close();
288 return OH_NN_SUCCESS;
289 }
290
CheckCacheInfo(NNCompiledCacheInfo & modelCacheInfo,const std::string & cacheInfoPath) const291 OH_NN_ReturnCode NNCompiledCache::CheckCacheInfo(NNCompiledCacheInfo& modelCacheInfo,
292 const std::string& cacheInfoPath) const
293 {
294 // cacheInfoPath is validated outside.
295 std::ifstream infoCacheFile(cacheInfoPath.c_str(), std::ios::in | std::ios::binary);
296 if (!infoCacheFile) {
297 LOGE("[NNCompiledCache] CheckCacheInfo failed, error happened when opening cache info file.");
298 return OH_NN_INVALID_FILE;
299 }
300
301 int charNumber = NUMBER_CACHE_INFO_MEMBERS * sizeof(uint64_t);
302 if (!infoCacheFile.read(reinterpret_cast<char*>(&(modelCacheInfo)), charNumber)) {
303 LOGE("[NNCompiledCache] CheckCacheInfo failed, error happened when reading cache info file.");
304 infoCacheFile.close();
305 return OH_NN_INVALID_FILE;
306 }
307
308 // modelCacheInfo.deviceId type is int64_t,
309 // it is transformed from size_t value, so the transform here will not truncate value.
310 size_t deviceId = static_cast<size_t>(modelCacheInfo.deviceId);
311 if (deviceId != m_backendID) {
312 LOGE("[NNCompiledCache] CheckCacheInfo failed. The deviceId in the cache files "
313 "is different from current deviceId,"
314 "please change the cache directory or current deviceId.");
315 infoCacheFile.close();
316 return OH_NN_INVALID_PARAMETER;
317 }
318
319 std::vector<int64_t> modelCheckSum;
320 modelCheckSum.resize(modelCacheInfo.fileNumber);
321 modelCacheInfo.modelCheckSum.resize(modelCacheInfo.fileNumber);
322 if (!infoCacheFile.read(reinterpret_cast<char*>(&modelCheckSum[0]),
323 modelCacheInfo.fileNumber * sizeof(uint64_t))) {
324 LOGE("[NNCompiledCache] CheckCacheInfo failed. The info cache file has been changed.");
325 infoCacheFile.close();
326 return OH_NN_INVALID_FILE;
327 }
328
329 for (uint32_t i = 0; i < modelCacheInfo.fileNumber; ++i) {
330 modelCacheInfo.modelCheckSum[i] = static_cast<unsigned short>(modelCheckSum[i]);
331 }
332
333 if (!infoCacheFile.read(reinterpret_cast<char*>(&(modelCacheInfo.opVersion)), sizeof(uint64_t))) {
334 LOGW("[NNCompiledCache] opVersion failed.");
335 }
336
337 infoCacheFile.close();
338 return OH_NN_SUCCESS;
339 }
340
ReadCacheModelFile(const std::string & filePath,OHOS::NeuralNetworkRuntime::Buffer & cache) const341 OH_NN_ReturnCode NNCompiledCache::ReadCacheModelFile(const std::string& filePath,
342 OHOS::NeuralNetworkRuntime::Buffer& cache) const
343 {
344 // filePath is validate in NNCompiledCache::Restore, no need to check again.
345 std::ifstream ifs(filePath.c_str(), std::ios::in | std::ios::binary);
346 if (!ifs) {
347 LOGE("[NNCompiledCache] ReadCacheModelFile failed, file is invalid.");
348 return OH_NN_INVALID_FILE;
349 }
350
351 int fsize{-1};
352 OH_NN_ReturnCode ret = GetCacheFileLength(ifs, fsize);
353 if (ret != OH_NN_SUCCESS) {
354 ifs.close();
355 LOGE("[NNCompiledCache] ReadCacheModelFile failed, get file %{public}s length fialed.", filePath.c_str());
356 return ret;
357 }
358
359 ifs.seekg(0, std::ios::beg);
360 if (!ifs.good()) {
361 LOGE("[NNCompiledCache] ReadCacheModelFile failed, file is invalid.");
362 ifs.close();
363 return OH_NN_INVALID_FILE;
364 }
365
366 char* ptr = static_cast<char*>(m_device->AllocateBuffer(fsize));
367 if (ptr == nullptr) {
368 LOGE("[NNCompiledCache] ReadCacheModelFile failed, failed to allocate memory.");
369 ifs.close();
370 return OH_NN_MEMORY_ERROR;
371 }
372
373 ifs.read(ptr, fsize);
374 if (!ifs.good()) {
375 LOGE("[NNCompiledCache] ReadCacheModelFile failed, failed to read file.");
376 ifs.close();
377 m_device->ReleaseBuffer(ptr);
378 ptr = nullptr;
379 return OH_NN_INVALID_FILE;
380 }
381
382 ifs.close();
383 cache.data = ptr;
384 cache.length = static_cast<size_t>(fsize); // fsize should be non-negative, safe to cast.
385 return OH_NN_SUCCESS;
386 }
387
GetCrc16(char * buffer,size_t length) const388 unsigned short NNCompiledCache::GetCrc16(char* buffer, size_t length) const
389 {
390 unsigned int sum = 0;
391 while (length > 1) {
392 sum += *(reinterpret_cast<unsigned short*>(buffer));
393 length -= sizeof(unsigned short);
394 buffer += sizeof(unsigned short);
395 }
396
397 if (length > 0) {
398 sum += *(reinterpret_cast<unsigned char*>(buffer));
399 }
400
401 while (sum >> HEX_UNIT) {
402 sum = (sum >> HEX_UNIT) + (sum & 0xffff);
403 }
404
405 return static_cast<unsigned short>(~sum);
406 }
407
GetCacheFileLength(std::ifstream & ifs,int & fileSize) const408 OH_NN_ReturnCode NNCompiledCache::GetCacheFileLength(std::ifstream& ifs, int& fileSize) const
409 {
410 ifs.seekg(0, std::ios::end);
411 if (!ifs.good()) {
412 LOGE("[NNCompiledCache] GetCacheFileLength failed, fail to set the position of the next character "
413 "to be extracted from the input stream.");
414 return OH_NN_FAILED;
415 }
416
417 int handleValue = ifs.tellg();
418 if (handleValue == -1) {
419 LOGE("[NNCompiledCache] GetCacheFileLength failed, fail to get position of the input stream.");
420 return OH_NN_INVALID_FILE;
421 }
422
423 if ((handleValue > MAX_MODEL_SIZE) || (handleValue == NULL_PTR_LENGTH)) {
424 LOGE("[NNCompiledCache] GetCacheFileLength failed, unable to read huge or empty input stream, "
425 "get cache file size=%{public}d",
426 handleValue);
427 return OH_NN_INVALID_FILE;
428 }
429
430 fileSize = handleValue;
431 return OH_NN_SUCCESS;
432 }
433
VerifyCachePath(const std::string & cachePath) const434 OH_NN_ReturnCode NNCompiledCache::VerifyCachePath(const std::string& cachePath) const
435 {
436 // exception: input path is not start with '/'.
437 if (cachePath.find(ROOT_DIR_STR) != size_t(0)) {
438 LOGE("[NNCompiledCache] VerifyCachePath failed, input file dir=%{public}s is invalid, "
439 "should start with '/'.",
440 cachePath.c_str());
441 return OH_NN_INVALID_FILE;
442 }
443
444 // exception: input path contains continuous double '/'.
445 if (cachePath.find(DOUBLE_SLASH_STR) != std::string::npos) {
446 LOGE("[NNCompiledCache] VerifyCachePath failed, input file dir=%{public}s is invalid, "
447 "containing double '/'.",
448 cachePath.c_str());
449 return OH_NN_INVALID_FILE;
450 }
451
452 return OH_NN_SUCCESS;
453 }
454 } // namespace NeuralNetworkRuntime
455 } // namespace OHOS
456