• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #ifndef MINDSPORE_INCLUDE_C_API_CONTEXT_C_H
17 #define MINDSPORE_INCLUDE_C_API_CONTEXT_C_H
18 
19 #include <stddef.h>
20 #include <stdint.h>
21 #include <stdbool.h>
22 #include "mindspore/types.h"
23 
24 #ifdef __cplusplus
25 extern "C" {
26 #endif
27 
28 typedef void *OH_AI_ContextHandle;
29 typedef void *OH_AI_DeviceInfoHandle;
30 
31 /// \brief Create a context object.
32 ///
33 /// \return Context object handle.
34 OH_AI_API OH_AI_ContextHandle OH_AI_ContextCreate();
35 
36 /// \brief Destroy the context object.
37 ///
38 /// \param[in] context Context object handle address.
39 OH_AI_API void OH_AI_ContextDestroy(OH_AI_ContextHandle *context);
40 
41 /// \brief Set the number of threads at runtime.
42 ///
43 /// \param[in] context Context object handle.
44 /// \param[in] thread_num the number of threads at runtime.
45 OH_AI_API void OH_AI_ContextSetThreadNum(OH_AI_ContextHandle context, int32_t thread_num);
46 
47 /// \brief Obtain the current thread number setting.
48 ///
49 /// \param[in] context Context object handle.
50 ///
51 /// \return The current thread number setting.
52 OH_AI_API int32_t OH_AI_ContextGetThreadNum(const OH_AI_ContextHandle context);
53 
54 /// \brief Set the thread affinity to CPU cores.
55 ///
56 /// \param[in] context Context object handle.
57 /// \param[in] mode: 0: no affinities, 1: big cores first, 2: little cores first
58 OH_AI_API void OH_AI_ContextSetThreadAffinityMode(OH_AI_ContextHandle context, int mode);
59 
60 /// \brief Obtain the thread affinity of CPU cores.
61 ///
62 /// \param[in] context Context object handle.
63 ///
64 /// \return Thread affinity to CPU cores. 0: no affinities, 1: big cores first, 2: little cores first
65 OH_AI_API int OH_AI_ContextGetThreadAffinityMode(const OH_AI_ContextHandle context);
66 
67 /// \brief Set the thread lists to CPU cores.
68 ///
69 /// \note If core_list and mode are set by OH_AI_ContextSetThreadAffinityMode at the same time,
70 /// the core_list is effective, but the mode is not effective.
71 ///
72 /// \param[in] context Context object handle.
73 /// \param[in] core_list: a array of thread core lists.
74 /// \param[in] core_num The number of core.
75 OH_AI_API void OH_AI_ContextSetThreadAffinityCoreList(OH_AI_ContextHandle context, const int32_t *core_list,
76                                                       size_t core_num);
77 
78 /// \brief Obtain the thread lists of CPU cores.
79 ///
80 /// \param[in] context Context object handle.
81 /// \param[out] core_num The number of core.
82 ///
83 /// \return a array of thread core lists.
84 OH_AI_API const int32_t *OH_AI_ContextGetThreadAffinityCoreList(const OH_AI_ContextHandle context, size_t *core_num);
85 
86 /// \brief Set the status whether to perform model inference or training in parallel.
87 ///
88 /// \param[in] context Context object handle.
89 /// \param[in] is_parallel: true, parallel; false, not in parallel.
90 OH_AI_API void OH_AI_ContextSetEnableParallel(OH_AI_ContextHandle context, bool is_parallel);
91 
92 /// \brief Obtain the status whether to perform model inference or training in parallel.
93 ///
94 /// \param[in] context Context object handle.
95 ///
96 /// \return Bool value that indicates whether in parallel.
97 OH_AI_API bool OH_AI_ContextGetEnableParallel(const OH_AI_ContextHandle context);
98 
99 /// \brief Add device info to context object.
100 ///
101 /// \param[in] context Context object handle.
102 /// \param[in] device_info Device info object handle.
103 OH_AI_API void OH_AI_ContextAddDeviceInfo(OH_AI_ContextHandle context, OH_AI_DeviceInfoHandle device_info);
104 
105 /// \brief Create a device info object.
106 ///
107 /// \param[in] device_info Device info object handle.
108 ///
109 /// \return Device info object handle.
110 OH_AI_API OH_AI_DeviceInfoHandle OH_AI_DeviceInfoCreate(OH_AI_DeviceType device_type);
111 
112 /// \brief Destroy the device info object.
113 ///
114 /// \param[in] device_info Device info object handle address.
115 OH_AI_API void OH_AI_DeviceInfoDestroy(OH_AI_DeviceInfoHandle *device_info);
116 
117 /// \brief Set provider's name.
118 ///
119 /// \param[in] device_info Device info object handle.
120 /// \param[in] provider define the provider's name.
121 OH_AI_API void OH_AI_DeviceInfoSetProvider(OH_AI_DeviceInfoHandle device_info, const char *provider);
122 
123 /// \brief Obtain provider's name
124 ///
125 /// \param[in] device_info Device info object handle.
126 ///
127 /// \return provider's name.
128 OH_AI_API const char *OH_AI_DeviceInfoGetProvider(const OH_AI_DeviceInfoHandle device_info);
129 
130 /// \brief Set provider's device type.
131 ///
132 /// \param[in] device_info Device info object handle.
133 /// \param[in] device define the provider's device type. EG: CPU.
134 OH_AI_API void OH_AI_DeviceInfoSetProviderDevice(OH_AI_DeviceInfoHandle device_info, const char *device);
135 
136 /// \brief Obtain provider's device type.
137 ///
138 /// \param[in] device_info Device info object handle.
139 ///
140 /// \return provider's device type.
141 OH_AI_API const char *OH_AI_DeviceInfoGetProviderDevice(const OH_AI_DeviceInfoHandle device_info);
142 
143 /// \brief Obtain the device type of the device info.
144 ///
145 /// \param[in] device_info Device info object handle.
146 ///
147 /// \return Device Type of the device info.
148 OH_AI_API OH_AI_DeviceType OH_AI_DeviceInfoGetDeviceType(const OH_AI_DeviceInfoHandle device_info);
149 
150 /// \brief Set enables to perform the float16 inference, Only valid for CPU/GPU.
151 ///
152 /// \param[in] device_info Device info object handle.
153 /// \param[in] is_fp16 Enable float16 inference or not.
154 OH_AI_API void OH_AI_DeviceInfoSetEnableFP16(OH_AI_DeviceInfoHandle device_info, bool is_fp16);
155 
156 /// \brief Obtain enables to perform the float16 inference, Only valid for CPU/GPU.
157 ///
158 /// \param[in] device_info Device info object handle.
159 ///
160 /// \return Whether enable float16 inference.
161 OH_AI_API bool OH_AI_DeviceInfoGetEnableFP16(const OH_AI_DeviceInfoHandle device_info);
162 
163 /// \brief Set the NPU frequency, Only valid for NPU.
164 ///
165 /// \param[in] device_info Device info object handle.
166 /// \param[in] frequency Can be set to 1 (low power consumption), 2 (balanced), 3 (high performance), 4 (extreme
167 /// performance), default as 3.
168 OH_AI_API void OH_AI_DeviceInfoSetFrequency(OH_AI_DeviceInfoHandle device_info, int frequency);
169 
170 /// \brief Obtain the NPU frequency, Only valid for NPU.
171 ///
172 /// \param[in] device_info Device info object handle.
173 ///
174 /// \return NPU frequency
175 OH_AI_API int OH_AI_DeviceInfoGetFrequency(const OH_AI_DeviceInfoHandle device_info);
176 
177 /// \brief Obtain the all device descriptions in NNRT.
178 ///
179 /// \param[out] num Number of NNRT device description.
180 ///
181 /// \return NNRT device description array.
182 OH_AI_API NNRTDeviceDesc *OH_AI_GetAllNNRTDeviceDescs(size_t *num);
183 
184 /// \brief Obtain the specified element in NNRt device description array.
185 ///
186 /// \param[in] descs NNRT device description array.
187 /// \param[in] index Element index.
188 ///
189 /// \return NNRT device description.
190 OH_AI_API NNRTDeviceDesc *OH_AI_GetElementOfNNRTDeviceDescs(NNRTDeviceDesc *descs, size_t index);
191 
192 /// \brief Destroy the NNRT device descriptions returned by OH_AI_NNRTGetAllDeviceDescs().
193 ///
194 /// \param[in] desc NNRT device description array.
195 OH_AI_API void OH_AI_DestroyAllNNRTDeviceDescs(NNRTDeviceDesc **desc);
196 
197 /// \brief Obtain the device id in NNRT device description.
198 ///
199 /// \param[in] desc pointer to the NNRT device description instance.
200 ///
201 /// \return NNRT device id.
202 OH_AI_API size_t OH_AI_GetDeviceIdFromNNRTDeviceDesc(const NNRTDeviceDesc *desc);
203 
204 /// \brief Obtain the device name in NNRT device description.
205 ///
206 /// \param[in] desc pointer to the NNRT device description instance.
207 ///
208 /// \return NNRT device name.
209 OH_AI_API const char *OH_AI_GetNameFromNNRTDeviceDesc(const NNRTDeviceDesc *desc);
210 
211 /// \brief Obtain the device type in NNRT device description.
212 ///
213 /// \param[in] desc pointer to the NNRT device description instance.
214 ///
215 /// \return NNRT device type.
216 OH_AI_API OH_AI_NNRTDeviceType OH_AI_GetTypeFromNNRTDeviceDesc(const NNRTDeviceDesc *desc);
217 
218 /// \brief Create the NNRT device info by exactly matching the specific device name.
219 ///
220 /// \param[in] name NNRt device name.
221 ///
222 /// \return Device info object handle.
223 OH_AI_API OH_AI_DeviceInfoHandle OH_AI_CreateNNRTDeviceInfoByName(const char *name);
224 
225 /// \brief Create the NNRT device info by finding the first device with the specific device type.
226 ///
227 /// \param[in] name NNRt device type.
228 ///
229 /// \return Device info object handle.
230 OH_AI_API OH_AI_DeviceInfoHandle OH_AI_CreateNNRTDeviceInfoByType(OH_AI_NNRTDeviceType type);
231 
232 /// \brief Set the NNRT device id, Only valid for NNRT.
233 ///
234 /// \param[in] device_info Device info object handle.
235 /// \param[in] device_id NNRT device id.
236 OH_AI_API void OH_AI_DeviceInfoSetDeviceId(OH_AI_DeviceInfoHandle device_info, size_t device_id);
237 
238 /// \brief Obtain the NNRT device id, Only valid for NNRT.
239 ///
240 /// \param[in] device_info Device info object handle.
241 ///
242 /// \return NNRT device id.
243 OH_AI_API size_t OH_AI_DeviceInfoGetDeviceId(const OH_AI_DeviceInfoHandle device_info);
244 
245 /// \brief Set the NNRT performance mode, Only valid for NNRT.
246 ///
247 /// \param[in] device_info Device info object handle.
248 /// \param[in] device_id NNRT performance mode.
249 OH_AI_API void OH_AI_DeviceInfoSetPerformanceMode(OH_AI_DeviceInfoHandle device_info, OH_AI_PerformanceMode mode);
250 
251 /// \brief Obtain the NNRT performance mode, Only valid for NNRT.
252 ///
253 /// \param[in] device_info Device info object handle.
254 ///
255 /// \return NNRT performance mode.
256 OH_AI_API OH_AI_PerformanceMode OH_AI_DeviceInfoGetPerformanceMode(const OH_AI_DeviceInfoHandle device_info);
257 
258 /// \brief Set the NNRT priority, Only valid for NNRT.
259 ///
260 /// \param[in] device_info Device info object handle.
261 /// \param[in] device_id NNRT priority.
262 OH_AI_API void OH_AI_DeviceInfoSetPriority(OH_AI_DeviceInfoHandle device_info, OH_AI_Priority priority);
263 
264 /// \brief Obtain the NNRT priority, Only valid for NNRT.
265 ///
266 /// \param[in] device_info Device info object handle.
267 ///
268 /// \return NNRT priority.
269 OH_AI_API OH_AI_Priority OH_AI_DeviceInfoGetPriority(const OH_AI_DeviceInfoHandle device_info);
270 
271 /// \brief Add extension of key/value format to device info, Only valid for NNRT.
272 ///
273 /// \param[in] device_info Device info object handle.
274 /// \param[in] name The content of key as a C string.
275 /// \param[in] value The pointer to the value, which is a byte array.
276 /// \param[in] value_size The size of the value, which is a byte array.
277 ///
278 /// \return OH_AI_STATUS_SUCCESS if success, or detail error code if failed.
279 OH_AI_API OH_AI_Status OH_AI_DeviceInfoAddExtension(OH_AI_DeviceInfoHandle device_info, const char *name, const char *value, size_t value_size);
280 #ifdef __cplusplus
281 }
282 #endif
283 #endif  // MINDSPORE_INCLUDE_C_API_CONTEXT_C_H
284