• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /**
18  * @addtogroup MindSpore
19  * @{
20  *
21  * @brief Provides APIs related to MindSpore Lite model inference.
22  *
23  * @Syscap SystemCapability.Ai.MindSpore
24  * @since 9
25  */
26 
27 /**
28  * @file types.h
29  * @kit MindSporeLiteKit
30  * @brief Provides the model file types and device types supported by MindSpore Lite.
31  *
32  * @library libmindspore_lite_ndk.so
33  * @since 9
34  */
35 #ifndef MINDSPORE_INCLUDE_C_API_TYPES_C_H
36 #define MINDSPORE_INCLUDE_C_API_TYPES_C_H
37 
38 #ifdef __cplusplus
39 extern "C" {
40 #endif
41 
42 #ifndef OH_AI_API
43 #ifdef _WIN32
44 #define OH_AI_API __declspec(dllexport)
45 #else
46 #define OH_AI_API __attribute__((visibility("default")))
47 #endif
48 #endif
49 
50 /**
51  * @brief model file type.
52  *
53  * @since 9
54  */
55 typedef enum OH_AI_ModelType {
56     /** the model type is MindIR, and the corresponding model file extension is .ms. */
57     OH_AI_MODELTYPE_MINDIR = 0,
58     /** invaild model type */
59     OH_AI_MODELTYPE_INVALID = 0xFFFFFFFF
60 } OH_AI_ModelType;
61 
62 /**
63  * @brief device type information.
64  *
65  * @since 9
66  */
67 typedef enum OH_AI_DeviceType {
68     /** cpu */
69     OH_AI_DEVICETYPE_CPU = 0,
70     /** gpu */
71     OH_AI_DEVICETYPE_GPU,
72     /** kirin npu */
73     OH_AI_DEVICETYPE_KIRIN_NPU,
74     /** nnrt device, ohos-only device range: [60, 80) */
75     OH_AI_DEVICETYPE_NNRT = 60,
76     /** invalid device type */
77     OH_AI_DEVICETYPE_INVALID = 100,
78 } OH_AI_DeviceType;
79 
80 /**
81  * @brief the hard deivce type managed by NNRT.
82  *
83  * @since 10
84  */
85 typedef enum OH_AI_NNRTDeviceType {
86     /** Devices that are not CPU, GPU, or dedicated accelerator */
87     OH_AI_NNRTDEVICE_OTHERS = 0,
88     /** CPU device */
89     OH_AI_NNRTDEVICE_CPU = 1,
90     /** GPU device */
91     OH_AI_NNRTDEVICE_GPU = 2,
92     /** Dedicated hardware accelerator */
93     OH_AI_NNRTDEVICE_ACCELERATOR = 3,
94 } OH_AI_NNRTDeviceType;
95 
96 /**
97  * @brief performance mode of the NNRT hard deivce.
98  *
99  * @since 10
100  */
101 typedef enum OH_AI_PerformanceMode {
102     /** No performance mode preference */
103     OH_AI_PERFORMANCE_NONE = 0,
104     /** Low power consumption mode*/
105     OH_AI_PERFORMANCE_LOW = 1,
106     /** Medium performance mode */
107     OH_AI_PERFORMANCE_MEDIUM = 2,
108     /** High performance mode */
109     OH_AI_PERFORMANCE_HIGH = 3,
110     /** Ultimate performance mode */
111     OH_AI_PERFORMANCE_EXTREME = 4
112 } OH_AI_PerformanceMode;
113 
114 /**
115  * @brief NNRT reasoning task priority.
116  *
117  * @since 10
118  */
119 typedef enum OH_AI_Priority {
120     /** No priority preference */
121     OH_AI_PRIORITY_NONE = 0,
122     /** Low priority */
123     OH_AI_PRIORITY_LOW = 1,
124     /** Medium priority */
125     OH_AI_PRIORITY_MEDIUM = 2,
126     /** High priority */
127     OH_AI_PRIORITY_HIGH = 3
128 } OH_AI_Priority;
129 
130 /**
131  * @brief optimization level for train model.
132  *
133  * @since 11
134  */
135 typedef enum OH_AI_OptimizationLevel {
136     /** Do not change */
137     OH_AI_KO0 = 0,
138     /** Cast network to float16, keep batchnorm and loss in float32 */
139     OH_AI_KO2 = 2,
140     /** Cast network to float16, including bacthnorm */
141     OH_AI_KO3 = 3,
142     /** Choose optimization based on device */
143     OH_AI_KAUTO = 4,
144     /** Invalid optimizatin level */
145     OH_AI_KOPTIMIZATIONTYPE = 0xFFFFFFFF
146 } OH_AI_OptimizationLevel;
147 
148 /**
149  * @brief quantization type
150  *
151  * @since 11
152  */
153 typedef enum OH_AI_QuantizationType {
154     /** Do not change */
155     OH_AI_NO_QUANT = 0,
156     /** weight quantization */
157     OH_AI_WEIGHT_QUANT = 1,
158     /** full quantization */
159     OH_AI_FULL_QUANT = 2,
160     /** invalid quantization type */
161     OH_AI_UNKNOWN_QUANT_TYPE = 0xFFFFFFFF
162 } OH_AI_QuantizationType;
163 
164 typedef struct NNRTDeviceDesc NNRTDeviceDesc;
165 #ifdef __cplusplus
166 }
167 #endif
168 #endif  // MINDSPORE_INCLUDE_C_API_TYPES_C_H
169