• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /**
18  * @addtogroup MindSpore
19  * @{
20  *
21  * @brief Provides APIs related to MindSpore Lite model inference.
22  *
23  * @Syscap SystemCapability.Ai.MindSpore
24  * @since 9
25  */
26 
27 /**
28  * @file context.h
29  *
30  * @brief Provides **Context** APIs for configuring runtime information.
31  *
32  * @since 9
33  */
34 #ifndef MINDSPORE_INCLUDE_C_API_CONTEXT_C_H
35 #define MINDSPORE_INCLUDE_C_API_CONTEXT_C_H
36 
37 #include <stddef.h>
38 #include <stdint.h>
39 #include <stdbool.h>
40 #include "mindspore/types.h"
41 
42 #ifdef __cplusplus
43 extern "C"
44 {
45 #endif
46 /**
47  * @brief Defines the pointer to the MindSpore context.
48  *
49  * @since 9
50  */
51 typedef void *OH_AI_ContextHandle;
52 
53 /**
54  * @brief Defines the pointer to the MindSpore device information.
55  *
56  * @since 9
57  */
58 typedef void *OH_AI_DeviceInfoHandle;
59 
60 /**
61  * \brief Creates a context object.
62  *
63  * @return {@link OH_AI_ContextHandle} that points to the context.
64  * @since 9
65  */
66 OH_AI_API OH_AI_ContextHandle OH_AI_ContextCreate();
67 
68 /**
69  * \brief Destroys a context object.
70  *
71  * \param context Level-2 pointer to {@link OH_AI_ContextHandle}. After the context is destroyed, the pointer is set to null.
72  * @since 9
73  */
74 OH_AI_API void OH_AI_ContextDestroy(OH_AI_ContextHandle *context);
75 
76 /**
77  * \brief Sets the number of runtime threads.
78  *
79  * \param context {@link OH_AI_ContextHandle} that points to the context instance.
80  * \param thread_num Number of runtime threads.
81  * @since 9
82  */
83 OH_AI_API void OH_AI_ContextSetThreadNum(OH_AI_ContextHandle context, int32_t thread_num);
84 
85 /**
86  * \brief Obtains the number of threads.
87  *
88  * \param context {@link OH_AI_ContextHandle} that points to the context instance.
89  * \return Number of threads.
90  * @since 9
91  */
92 OH_AI_API int32_t OH_AI_ContextGetThreadNum(const OH_AI_ContextHandle context);
93 
94 /**
95  * \brief Sets the affinity mode for binding runtime threads to CPU cores, which are categorized into middle cores and big cores depending on the CPU frequency.
96  *
97  * \param context {@link OH_AI_ContextHandle} that points to the context instance.
98  * \param mode Affinity mode. **0**: no affinities; **1**: big cores first; **2**: middle cores first
99  * @since 9
100  */
101 OH_AI_API void OH_AI_ContextSetThreadAffinityMode(OH_AI_ContextHandle context, int mode);
102 
103 /**
104  * \brief Obtains the affinity mode for binding runtime threads to CPU cores.
105  *
106  * \param context {@link OH_AI_ContextHandle} that points to the context instance.
107  * \return Affinity mode. **0**: no affinities; **1**: big cores first; **2**: middle cores first
108  * @since 9
109  */
110 OH_AI_API int OH_AI_ContextGetThreadAffinityMode(const OH_AI_ContextHandle context);
111 
112 /**
113  * \brief Sets the list of CPU cores bound to a runtime thread.
114  *
115  * For example, if **core_list** is set to **[2,6,8]**, threads run on the 2nd, 6th, and 8th CPU cores.
116  * If {@link OH_AI_ContextSetThreadAffinityMode} and {@link OH_AI_ContextSetThreadAffinityCoreList} are called for the same context object,
117  * the **core_list** parameter of {@link OH_AI_ContextSetThreadAffinityCoreList} takes effect, but the **mode** parameter of {@link OH_AI_ContextSetThreadAffinityMode} does not.
118  *
119  * \param context {@link OH_AI_ContextHandle} that points to the context instance.
120  * \param core_list List of bound CPU cores.
121  * \param core_num Number of cores, which indicates the length of {@link core_list}.
122  * @since 9
123  */
124 OH_AI_API void OH_AI_ContextSetThreadAffinityCoreList(OH_AI_ContextHandle context, const int32_t *core_list,
125                                                         size_t core_num);
126 
127 /**
128  * \brief Obtains the list of bound CPU cores.
129  *
130  * \param context {@link OH_AI_ContextHandle} that points to the context instance.
131  * \param core_num Number of CPU cores.
132  * \return List of bound CPU cores.
133  * @since 9
134  */
135 OH_AI_API const int32_t *OH_AI_ContextGetThreadAffinityCoreList(const OH_AI_ContextHandle context, size_t *core_num);
136 
137 /**
138  * \brief Sets whether to enable parallelism between operators.
139  *
140  * \param context {@link OH_AI_ContextHandle} that points to the context instance.
141  * \param is_parallel Whether to enable parallelism between operators. The value **true** means to enable parallelism between operators, and the value **false** means the opposite.
142  * @since 9
143  */
144 OH_AI_API void OH_AI_ContextSetEnableParallel(OH_AI_ContextHandle context, bool is_parallel);
145 
146 /**
147  * \brief Checks whether parallelism between operators is supported.
148  *
149  * \param context {@link OH_AI_ContextHandle} that points to the context instance.
150  * \return Whether parallelism between operators is supported. The value **true** means that parallelism between operators is supported, and the value **false** means the opposite.
151  * @since 9
152  */
153 OH_AI_API bool OH_AI_ContextGetEnableParallel(const OH_AI_ContextHandle context);
154 
155 /**
156  * \brief Adds information about a running device.
157  *
158  * \param context {@link OH_AI_ContextHandle} that points to the context instance.
159  * \param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
160  * @since 9
161  */
162 OH_AI_API void OH_AI_ContextAddDeviceInfo(OH_AI_ContextHandle context, OH_AI_DeviceInfoHandle device_info);
163 
164 /**
165  * \brief Creates a device information object.
166  *
167  * \param device_type Device type. For details, see {@link OH_AI_DeviceType}.
168  *
169  * \return {@link OH_AI_DeviceInfoHandle} that points to the device information instance.
170  * @since 9
171  */
172 OH_AI_API OH_AI_DeviceInfoHandle OH_AI_DeviceInfoCreate(OH_AI_DeviceType device_type);
173 
174 /**
175  * \brief Destroys a device information instance.
176  *
177  * \param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
178  * @since 9
179  */
180 OH_AI_API void OH_AI_DeviceInfoDestroy(OH_AI_DeviceInfoHandle *device_info);
181 
182 /**
183  * \brief Sets the name of a provider.
184  *
185  * \param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
186  * \param provider Provider name.
187  * @since 9
188  */
189 OH_AI_API void OH_AI_DeviceInfoSetProvider(OH_AI_DeviceInfoHandle device_info, const char *provider);
190 
191 /**
192  * \brief Obtains the provider name.
193  *
194  * \param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
195  *
196  * \return Provider name.
197  * @since 9
198  */
199 OH_AI_API const char *OH_AI_DeviceInfoGetProvider(const OH_AI_DeviceInfoHandle device_info);
200 
201 /**
202  * \brief Sets the name of a provider device.
203  *
204  * \param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
205  * \param device Name of the provider device, for example, CPU.
206  * @since 9
207  */
208 OH_AI_API void OH_AI_DeviceInfoSetProviderDevice(OH_AI_DeviceInfoHandle device_info, const char *device);
209 
210 /**
211  * \brief Obtains the name of a provider device.
212  *
213  * \param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
214  *
215  * \return Name of the provider device.
216  * @since 9
217  */
218 OH_AI_API const char *OH_AI_DeviceInfoGetProviderDevice(const OH_AI_DeviceInfoHandle device_info);
219 
220 /**
221  * \brief Obtains the type of a provider device.
222  *
223  * \param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
224  * \return Type of the provider device.
225  * @since 9
226  */
227 OH_AI_API OH_AI_DeviceType OH_AI_DeviceInfoGetDeviceType(const OH_AI_DeviceInfoHandle device_info);
228 
229 /**
230  * \brief Sets whether to enable float16 inference. This function is available only for CPU/GPU devices.
231  *
232  * \param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
233  * \param is_fp16 Whether to enable float16 inference.
234  * @since 9
235  */
236 OH_AI_API void OH_AI_DeviceInfoSetEnableFP16(OH_AI_DeviceInfoHandle device_info, bool is_fp16);
237 
238 /**
239  * \brief Checks whether float16 inference is enabled. This function is available only for CPU/GPU devices.
240  *
241  * \param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
242  * \return Whether float16 inference is enabled.
243  * @since 9
244  */
245 OH_AI_API bool OH_AI_DeviceInfoGetEnableFP16(const OH_AI_DeviceInfoHandle device_info);
246 
247 /**
248  * \brief Sets the NPU frequency type. This function is available only for NPU devices.
249  *
250  * \param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
251  * \param frequency NPU frequency type. The value ranges from **0** to **4**. The default value is **3**. **1**: low power consumption; **2**: balanced; **3**: high performance; **4**: ultra-high performance
252  * @since 9
253  */
254 OH_AI_API void OH_AI_DeviceInfoSetFrequency(OH_AI_DeviceInfoHandle device_info, int frequency);
255 
256 /**
257  * \brief Obtains the NPU frequency type. This function is available only for NPU devices.
258  *
259  * \param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
260  *
261  * \return Frequency type of the NPU. The value ranges from **0** to **4**. **1**: low power consumption; **2**: balanced; **3**: high performance; **4**: ultra-high performance
262  * @since 9
263  */
264 OH_AI_API int OH_AI_DeviceInfoGetFrequency(const OH_AI_DeviceInfoHandle device_info);
265 
266 #ifdef __cplusplus
267 }
268 #endif
269 
270 /** @} */
271 #endif // MINDSPORE_INCLUDE_C_API_CONTEXT_C_H
272