• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef NEURAL_NETWORK_BACKEND_NNCOMPILED_CACHE_H
17 #define NEURAL_NETWORK_BACKEND_NNCOMPILED_CACHE_H
18 
19 #include <vector>
20 #include <fstream>
21 #include <memory>
22 
23 #include "device.h"
24 #include "interfaces/kits/c/neural_network_runtime/neural_network_runtime.h"
25 #include "tensor_desc.h"
26 
27 namespace OHOS {
28 namespace NeuralNetworkRuntime {
29 const uint32_t INVALID_CAHCE_VERSION = UINT32_MAX; // UINT32_MAX is reserved for invalid cache version.
30 
31 struct NNCompiledCacheInfo {
32     int64_t fileNumber{0};
33     int64_t version{0};
34     int64_t deviceId{0};
35     std::vector<unsigned short> modelCheckSum;
36     int64_t opVersion{0};
37 };
38 
39 class NNCompiledCache {
40 public:
41     NNCompiledCache() = default;
42     ~NNCompiledCache() = default;
43 
44     OH_NN_ReturnCode Save(const std::vector<Buffer>& caches,
45                           const std::string& cacheDir,
46                           uint32_t version);
47     OH_NN_ReturnCode Restore(const std::string& cacheDir,
48                              uint32_t version,
49                              std::vector<Buffer>& caches);
50 
51     OH_NN_ReturnCode SetBackend(size_t backendID);
52     void SetModelName(const std::string& modelName);
53     OH_NN_ReturnCode WriteCacheInfo(uint32_t cacheSize,
54                                     std::unique_ptr<int64_t[]>& cacheInfo,
55                                     const std::string& cacheDir) const;
56     OH_NN_ReturnCode CheckCacheInfo(NNCompiledCacheInfo& modelCacheInfo, const std::string& cacheInfoPath) const;
57 
58 private:
59     OH_NN_ReturnCode GenerateCacheFiles(const std::vector<Buffer>& caches,
60                                         const std::string& cacheDir,
61                                         uint32_t version) const;
62     OH_NN_ReturnCode GenerateCacheModel(const std::vector<Buffer>& caches,
63                                         std::unique_ptr<int64_t[]>& cacheInfo,
64                                         const std::string& cacheDir,
65                                         uint32_t version) const;
66     OH_NN_ReturnCode ReadCacheModelFile(const std::string& file, Buffer& cache) const;
67     unsigned short GetCrc16(char* buffer, size_t length) const;
68     OH_NN_ReturnCode GetCacheFileLength(std::ifstream& ifs, int& fileSize) const;
69     OH_NN_ReturnCode VerifyCachePath(const std::string& cachePath) const;
70 
71 private:
72     size_t m_backendID {0};
73     std::string m_modelName;
74     std::shared_ptr<Device> m_device {nullptr};
75 };
76 
77 } // namespace NeuralNetworkRuntime
78 } // namespace OHOS
79 
80 #endif // NEURAL_NETWORK_BACKEND_NNCOMPILED_CACHE_H
81