• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef NEURAL_NETWORK_RUNTIME_DEVICE_H
17 #define NEURAL_NETWORK_RUNTIME_DEVICE_H
18 
19 #include <string>
20 #include <vector>
21 #include <memory>
22 
23 #include "interfaces/kits/c/neural_network_runtime_type.h"
24 #include "cpp_type.h"
25 #include "prepared_model.h"
26 #include "mindir.h"
27 
28 namespace OHOS {
29 namespace NeuralNetworkRuntime {
30 class Device {
31 public:
32     Device() = default;
33     virtual ~Device() = default;
34 
35     virtual OH_NN_ReturnCode GetDeviceName(std::string& name) = 0;
36     virtual OH_NN_ReturnCode GetVendorName(std::string& name) = 0;
37     virtual OH_NN_ReturnCode GetDeviceType(OH_NN_DeviceType& deviceType) = 0;
38     virtual OH_NN_ReturnCode GetDeviceStatus(DeviceStatus& status) = 0;
39     virtual OH_NN_ReturnCode GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,
40                                                    std::vector<bool>& ops) = 0;
41 
42     virtual OH_NN_ReturnCode IsFloat16PrecisionSupported(bool& isSupported) = 0;
43     virtual OH_NN_ReturnCode IsPerformanceModeSupported(bool& isSupported) = 0;
44     virtual OH_NN_ReturnCode IsPrioritySupported(bool& isSupported) = 0;
45     virtual OH_NN_ReturnCode IsDynamicInputSupported(bool& isSupported) = 0;
46     virtual OH_NN_ReturnCode IsModelCacheSupported(bool& isSupported) = 0;
47 
48     virtual OH_NN_ReturnCode PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,
49                                           const ModelConfig& config,
50                                           std::shared_ptr<PreparedModel>& preparedModel) = 0;
51     virtual OH_NN_ReturnCode PrepareModelFromModelCache(const std::vector<ModelBuffer>& modelCache,
52                                                         const ModelConfig& config,
53                                                         std::shared_ptr<PreparedModel>& preparedModel) = 0;
54 
55     virtual void* AllocateBuffer(size_t length) = 0;
56     virtual OH_NN_ReturnCode ReleaseBuffer(const void* buffer) = 0;
57 };
58 } // namespace NeuralNetworkRuntime
59 } // namespace OHOS
60 #endif // NEURAL_NETWORK_RUNTIME_DEVICE_H