• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #ifndef MINDSPORE_INCLUDE_C_API_CONTEXT_C_H
17 #define MINDSPORE_INCLUDE_C_API_CONTEXT_C_H
18 
19 #include <stddef.h>
20 #include <stdint.h>
21 #include <stdbool.h>
22 #include "include/c_api/status_c.h"
23 #include "include/c_api/types_c.h"
24 
25 #ifdef __cplusplus
26 extern "C" {
27 #endif
28 
29 typedef void *OH_AI_ContextHandle;
30 typedef void *OH_AI_DeviceInfoHandle;
31 
32 /// \brief Create a context object.
33 ///
34 /// \return Context object handle.
35 OH_AI_API OH_AI_ContextHandle OH_AI_ContextCreate();
36 
37 /// \brief Destroy the context object.
38 ///
39 /// \param[in] context Context object handle address.
40 OH_AI_API void OH_AI_ContextDestroy(OH_AI_ContextHandle *context);
41 
42 /// \brief Set the number of threads at runtime.
43 ///
44 /// \param[in] context Context object handle.
45 /// \param[in] thread_num the number of threads at runtime.
46 OH_AI_API void OH_AI_ContextSetThreadNum(OH_AI_ContextHandle context, int32_t thread_num);
47 
48 /// \brief Obtain the current thread number setting.
49 ///
50 /// \param[in] context Context object handle.
51 ///
52 /// \return The current thread number setting.
53 OH_AI_API int32_t OH_AI_ContextGetThreadNum(const OH_AI_ContextHandle context);
54 
55 /// \brief Set the thread affinity to CPU cores.
56 ///
57 /// \param[in] context Context object handle.
58 /// \param[in] mode: 0: no affinities, 1: big cores first, 2: little cores first
59 OH_AI_API void OH_AI_ContextSetThreadAffinityMode(OH_AI_ContextHandle context, int mode);
60 
61 /// \brief Obtain the thread affinity of CPU cores.
62 ///
63 /// \param[in] context Context object handle.
64 ///
65 /// \return Thread affinity to CPU cores. 0: no affinities, 1: big cores first, 2: little cores first
66 OH_AI_API int OH_AI_ContextGetThreadAffinityMode(const OH_AI_ContextHandle context);
67 
68 /// \brief Set the thread lists to CPU cores.
69 ///
70 /// \note If core_list and mode are set by OH_AI_ContextSetThreadAffinityMode at the same time,
71 /// the core_list is effective, but the mode is not effective.
72 ///
73 /// \param[in] context Context object handle.
74 /// \param[in] core_list: a array of thread core lists.
75 /// \param[in] core_num The number of core.
76 OH_AI_API void OH_AI_ContextSetThreadAffinityCoreList(OH_AI_ContextHandle context, const int32_t *core_list, size_t core_num);
77 
78 /// \brief Obtain the thread lists of CPU cores.
79 ///
80 /// \param[in] context Context object handle.
81 /// \param[out] core_num The number of core.
82 ///
83 /// \return a array of thread core lists.
84 OH_AI_API const int32_t *OH_AI_ContextGetThreadAffinityCoreList(const OH_AI_ContextHandle context, size_t *core_num);
85 
86 /// \brief Set the status whether to perform model inference or training in parallel.
87 ///
88 /// \param[in] context Context object handle.
89 /// \param[in] is_parallel: true, parallel; false, not in parallel.
90 OH_AI_API void OH_AI_ContextSetEnableParallel(OH_AI_ContextHandle context, bool is_parallel);
91 
92 /// \brief Obtain the status whether to perform model inference or training in parallel.
93 ///
94 /// \param[in] context Context object handle.
95 ///
96 /// \return Bool value that indicates whether in parallel.
97 OH_AI_API bool OH_AI_ContextGetEnableParallel(const OH_AI_ContextHandle context);
98 
99 /// \brief Add device info to context object.
100 ///
101 /// \param[in] context Context object handle.
102 /// \param[in] device_info Device info object handle.
103 OH_AI_API void OH_AI_ContextAddDeviceInfo(OH_AI_ContextHandle context, OH_AI_DeviceInfoHandle device_info);
104 
105 /// \brief Create a device info object.
106 ///
107 /// \param[in] device_info Device info object handle.
108 ///
109 /// \return Device info object handle.
110 OH_AI_API OH_AI_DeviceInfoHandle OH_AI_DeviceInfoCreate(OH_AI_DeviceType device_type);
111 
112 /// \brief Destroy the device info object.
113 ///
114 /// \param[in] device_info Device info object handle address.
115 OH_AI_API void OH_AI_DeviceInfoDestroy(OH_AI_DeviceInfoHandle *device_info);
116 
117 /// \brief Set provider's name.
118 ///
119 /// \param[in] device_info Device info object handle.
120 /// \param[in] provider define the provider's name.
121 OH_AI_API void OH_AI_DeviceInfoSetProvider(OH_AI_DeviceInfoHandle device_info, const char *provider);
122 
123 /// \brief Obtain provider's name
124 ///
125 /// \param[in] device_info Device info object handle.
126 ///
127 /// \return provider's name.
128 OH_AI_API const char *OH_AI_DeviceInfoGetProvider(const OH_AI_DeviceInfoHandle device_info);
129 
130 /// \brief Set provider's device type.
131 ///
132 /// \param[in] device_info Device info object handle.
133 /// \param[in] device define the provider's device type. EG: CPU.
134 OH_AI_API void OH_AI_DeviceInfoSetProviderDevice(OH_AI_DeviceInfoHandle device_info, const char *device);
135 
136 /// \brief Obtain provider's device type.
137 ///
138 /// \param[in] device_info Device info object handle.
139 ///
140 /// \return provider's device type.
141 OH_AI_API const char *OH_AI_DeviceInfoGetProviderDevice(const OH_AI_DeviceInfoHandle device_info);
142 
143 /// \brief Obtain the device type of the device info.
144 ///
145 /// \param[in] device_info Device info object handle.
146 ///
147 /// \return Device Type of the device info.
148 OH_AI_API OH_AI_DeviceType OH_AI_DeviceInfoGetDeviceType(const OH_AI_DeviceInfoHandle device_info);
149 
150 /// \brief Set enables to perform the float16 inference, Only valid for CPU/GPU.
151 ///
152 /// \param[in] device_info Device info object handle.
153 /// \param[in] is_fp16 Enable float16 inference or not.
154 OH_AI_API void OH_AI_DeviceInfoSetEnableFP16(OH_AI_DeviceInfoHandle device_info, bool is_fp16);
155 
156 /// \brief Obtain enables to perform the float16 inference, Only valid for CPU/GPU.
157 ///
158 /// \param[in] device_info Device info object handle.
159 ///
160 /// \return Whether enable float16 inference.
161 OH_AI_API bool OH_AI_DeviceInfoGetEnableFP16(const OH_AI_DeviceInfoHandle device_info);
162 
163 /// \brief Set the NPU frequency, Only valid for NPU.
164 ///
165 /// \param[in] device_info Device info object handle.
166 /// \param[in] frequency Can be set to 1 (low power consumption), 2 (balanced), 3 (high performance), 4 (extreme
167 /// performance), default as 3.
168 OH_AI_API void OH_AI_DeviceInfoSetFrequency(OH_AI_DeviceInfoHandle device_info, int frequency);
169 
170 /// \brief Obtain the NPU frequency, Only valid for NPU.
171 ///
172 /// \param[in] device_info Device info object handle.
173 ///
174 /// \return NPU frequency
175 OH_AI_API int OH_AI_DeviceInfoGetFrequency(const OH_AI_DeviceInfoHandle device_info);
176 
177 /// \brief Obtain the all device descriptions in NNRT.
178 ///
179 /// \param[out] num Number of NNRT device description.
180 ///
181 /// \return NNRT device description array.
182 OH_AI_API NNRTDeviceDesc *OH_AI_GetAllNNRTDeviceDescs(size_t *num);
183 
184 /// \brief Obtain the specified element in NNRt device description array.
185 ///
186 /// \param[in] descs NNRT device description array.
187 /// \param[in] index Element index.
188 ///
189 /// \return NNRT device description.
190 OH_AI_API NNRTDeviceDesc *OH_AI_GetElementOfNNRTDeviceDescs(NNRTDeviceDesc *descs, size_t index);
191 
192 /// \brief Obtain the all device descriptions in NNRT.
193 ///
194 /// \param[out] num Number of NNRT device description.
195 ///
196 /// \return NNRT device description array.
197 OH_AI_API NNRTDeviceDesc *OH_AI_GetAllNNRTDeviceDescs(size_t *num);
198 
199 /// \brief Destroy the NNRT device descriptions returned by OH_AI_GetAllNNRTDeviceDescs().
200 ///
201 /// \param[in] desc NNRT device description array.
202 OH_AI_API void OH_AI_DestroyAllNNRTDeviceDescs(NNRTDeviceDesc **desc);
203 
204 /// \brief Obtain the device id in NNRT device description.
205 ///
206 /// \param[in] desc pointer to the NNRT device description instance.
207 ///
208 /// \return NNRT device id.
209 OH_AI_API size_t OH_AI_GetDeviceIdFromNNRTDeviceDesc(const NNRTDeviceDesc *desc);
210 
211 /// \brief Obtain the device name in NNRT device description.
212 ///
213 /// \param[in] desc pointer to the NNRT device description instance.
214 ///
215 /// \return NNRT device name.
216 OH_AI_API const char *OH_AI_GetNameFromNNRTDeviceDesc(const NNRTDeviceDesc *desc);
217 
218 /// \brief Obtain the device type in NNRT device description.
219 ///
220 /// \param[in] desc pointer to the NNRT device description instance.
221 ///
222 /// \return NNRT device type.
223 OH_AI_API OH_AI_NNRTDeviceType OH_AI_GetTypeFromNNRTDeviceDesc(const NNRTDeviceDesc *desc);
224 
225 /// \brief Create the NNRT device info by exactly matching the specific device name.
226 ///
227 /// \param[in] name NNRt device name.
228 ///
229 /// \return Device info object handle.
230 OH_AI_API OH_AI_DeviceInfoHandle OH_AI_CreateNNRTDeviceInfoByName(const char *name);
231 
232 /// \brief Create the NNRT device info by finding the first device with the specific device type.
233 ///
234 /// \param[in] name NNRt device type.
235 ///
236 /// \return Device info object handle.
237 OH_AI_API OH_AI_DeviceInfoHandle OH_AI_CreateNNRTDeviceInfoByType(OH_AI_NNRTDeviceType type);
238 
239 /// \brief Set the NNRT device id, Only valid for NNRT.
240 ///
241 /// \param[in] device_info Device info object handle.
242 /// \param[in] device_id NNRT device id.
243 OH_AI_API void OH_AI_DeviceInfoSetDeviceId(OH_AI_DeviceInfoHandle device_info, size_t device_id);
244 
245 /// \brief Obtain the NNRT device id, Only valid for NNRT.
246 ///
247 /// \param[in] device_info Device info object handle.
248 ///
249 /// \return NNRT device id.
250 OH_AI_API size_t OH_AI_DeviceInfoGetDeviceId(const OH_AI_DeviceInfoHandle device_info);
251 
252 /// \brief Set the NNRT performance mode, Only valid for NNRT.
253 ///
254 /// \param[in] device_info Device info object handle.
255 /// \param[in] device_id NNRT performance mode.
256 OH_AI_API void OH_AI_DeviceInfoSetPerformanceMode(OH_AI_DeviceInfoHandle device_info, OH_AI_PerformanceMode mode);
257 
258 /// \brief Obtain the NNRT performance mode, Only valid for NNRT.
259 ///
260 /// \param[in] device_info Device info object handle.
261 ///
262 /// \return NNRT performance mode.
263 OH_AI_API OH_AI_PerformanceMode OH_AI_DeviceInfoGetPerformanceMode(const OH_AI_DeviceInfoHandle device_info);
264 
265 /// \brief Set the NNRT priority, Only valid for NNRT.
266 ///
267 /// \param[in] device_info Device info object handle.
268 /// \param[in] device_id NNRT priority.
269 OH_AI_API void OH_AI_DeviceInfoSetPriority(OH_AI_DeviceInfoHandle device_info, OH_AI_Priority priority);
270 
271 /// \brief Obtain the NNRT priority, Only valid for NNRT.
272 ///
273 /// \param[in] device_info Device info object handle.
274 ///
275 /// \return NNRT priority.
276 OH_AI_API OH_AI_Priority OH_AI_DeviceInfoGetPriority(const OH_AI_DeviceInfoHandle device_info);
277 
278 /// \brief Add extension of key/value format to device info, Only valid for NNRT.
279 ///
280 /// \param[in] device_info Device info object handle.
281 /// \param[in] name The content of key as a C string.
282 /// \param[in] value The pointer to the value, which is a byte array.
283 /// \param[in] value_size The size of the value, which is a byte array.
284 ///
285 /// \return OH_AI_STATUS_SUCCESS if success, or detail error code if failed.
286 OH_AI_API OH_AI_Status OH_AI_DeviceInfoAddExtension(OH_AI_DeviceInfoHandle device_info, const char *name, const char *value, size_t value_size);
287 #ifdef __cplusplus
288 }
289 #endif
290 #endif  // MINDSPORE_INCLUDE_C_API_CONTEXT_C_H
291