• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /**
18  * @addtogroup MindSpore
19  * @{
20  *
21  * @brief 提供MindSpore Lite的模型推理相关接口。
22  *
23  * @Syscap SystemCapability.Ai.MindSpore
24  * @since 9
25  */
26 
27 /**
28  * @file types.h
29  *
30  * @brief 提供了MindSpore Lite支持的模型文件类型和设备类型。
31  *
32  * @library libmindspore_lite_ndk.so
33  * @since 9
34  */
35 #ifndef MINDSPORE_INCLUDE_C_API_TYPES_C_H
36 #define MINDSPORE_INCLUDE_C_API_TYPES_C_H
37 
38 #ifdef __cplusplus
39 extern "C" {
40 #endif
41 
42 #ifndef OH_AI_API
43 #ifdef _WIN32
44 #define OH_AI_API __declspec(dllexport)
45 #else
46 #define OH_AI_API __attribute__((visibility("default")))
47 #endif
48 #endif
49 
50 typedef enum OH_AI_ModelType {
51   OH_AI_MODELTYPE_MINDIR = 0,
52   // insert new data type here
53   OH_AI_MODELTYPE_INVALID = 0xFFFFFFFF
54 } OH_AI_ModelType;
55 
56 typedef enum OH_AI_DeviceType {
57   OH_AI_DEVICETYPE_CPU = 0,
58   OH_AI_DEVICETYPE_GPU,
59   OH_AI_DEVICETYPE_KIRIN_NPU,
60   // add new type here
61   // ohos-only device range: [60, 80)
62   OH_AI_DEVICETYPE_NNRT = 60,
63   OH_AI_DEVICETYPE_INVALID = 100,
64 } OH_AI_DeviceType;
65 
66 typedef enum OH_AI_NNRTDeviceType {
67   /** Devices that are not CPU, GPU, or dedicated accelerator */
68   OH_AI_NNRTDEVICE_OTHERS = 0,
69   /** CPU device */
70   OH_AI_NNRTDEVICE_CPU = 1,
71   /** GPU device */
72   OH_AI_NNRTDEVICE_GPU = 2,
73   /** Dedicated hardware accelerator */
74   OH_AI_NNRTDEVICE_ACCELERATOR = 3,
75 } OH_AI_NNRTDeviceType;
76 
77 typedef enum OH_AI_PerformanceMode {
78   /** No performance mode preference */
79   OH_AI_PERFORMANCE_NONE = 0,
80   /** Low power consumption mode*/
81   OH_AI_PERFORMANCE_LOW = 1,
82   /** Medium performance mode */
83   OH_AI_PERFORMANCE_MEDIUM = 2,
84   /** High performance mode */
85   OH_AI_PERFORMANCE_HIGH = 3,
86   /** Ultimate performance mode */
87   OH_AI_PERFORMANCE_EXTREME = 4
88 } OH_AI_PerformanceMode;
89 
90 typedef enum OH_AI_Priority {
91   /** No priority preference */
92   OH_AI_PRIORITY_NONE = 0,
93   /** Low priority */
94   OH_AI_PRIORITY_LOW = 1,
95   /** Medium priority */
96   OH_AI_PRIORITY_MEDIUM = 2,
97   /** High priority */
98   OH_AI_PRIORITY_HIGH = 3
99 } OH_AI_Priority;
100 
101 typedef enum OH_AI_OptimizationLevel {
102   /** Do not change */
103   OH_AI_KO0 = 0,
104   /** Cast network to float16, keep batchnorm and loss in float32 */
105   OH_AI_KO2 = 2,
106   /** Cast network to float16, including bacthnorm */
107   OH_AI_KO3 = 3,
108   /** Choose optimization based on device */
109   OH_AI_KAUTO = 4,
110   OH_AI_KOPTIMIZATIONTYPE = 0xFFFFFFFF
111 } OH_AI_OptimizationLevel;
112 
113 typedef enum OH_AI_QuantizationType {
114   OH_AI_NO_QUANT = 0,
115   OH_AI_WEIGHT_QUANT = 1,
116   OH_AI_FULL_QUANT = 2,
117   OH_AI_UNKNOWN_QUANT_TYPE = 0xFFFFFFFF
118 } OH_AI_QuantizationType;
119 
120 typedef struct NNRTDeviceDesc NNRTDeviceDesc;
121 #ifdef __cplusplus
122 }
123 #endif
124 #endif  // MINDSPORE_INCLUDE_C_API_TYPES_C_H
125