1 /** 2 * Copyright 2021 Huawei Technologies Co., Ltd 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 #ifndef MINDSPORE_INCLUDE_C_API_CONTEXT_C_H 17 #define MINDSPORE_INCLUDE_C_API_CONTEXT_C_H 18 19 #include <stddef.h> 20 #include <stdint.h> 21 #include <stdbool.h> 22 #include "include/c_api/types_c.h" 23 24 #ifdef __cplusplus 25 extern "C" { 26 #endif 27 28 typedef void *OH_AI_ContextHandle; 29 typedef void *OH_AI_DeviceInfoHandle; 30 31 /// \brief Create a context object. 32 /// 33 /// \return Context object handle. 34 OH_AI_API OH_AI_ContextHandle OH_AI_ContextCreate(); 35 36 /// \brief Destroy the context object. 37 /// 38 /// \param[in] context Context object handle address. 39 OH_AI_API void OH_AI_ContextDestroy(OH_AI_ContextHandle *context); 40 41 /// \brief Set the number of threads at runtime. 42 /// 43 /// \param[in] context Context object handle. 44 /// \param[in] thread_num the number of threads at runtime. 45 OH_AI_API void OH_AI_ContextSetThreadNum(OH_AI_ContextHandle context, int32_t thread_num); 46 47 /// \brief Obtain the current thread number setting. 48 /// 49 /// \param[in] context Context object handle. 50 /// 51 /// \return The current thread number setting. 52 OH_AI_API int32_t OH_AI_ContextGetThreadNum(const OH_AI_ContextHandle context); 53 54 /// \brief Set the thread affinity to CPU cores. 55 /// 56 /// \param[in] context Context object handle. 57 /// \param[in] mode: 0: no affinities, 1: big cores first, 2: middle cores first 58 OH_AI_API void OH_AI_ContextSetThreadAffinityMode(OH_AI_ContextHandle context, int mode); 59 60 /// \brief Obtain the thread affinity of CPU cores. 61 /// 62 /// \param[in] context Context object handle. 63 /// 64 /// \return Thread affinity to CPU cores. 0: no affinities, 1: big cores first, 2: middle cores first 65 OH_AI_API int OH_AI_ContextGetThreadAffinityMode(const OH_AI_ContextHandle context); 66 67 /// \brief Set the thread lists to CPU cores. 68 /// 69 /// \note If core_list and mode are set by OH_AI_ContextSetThreadAffinityMode at the same time, 70 /// the core_list is effective, but the mode is not effective. 71 /// 72 /// \param[in] context Context object handle. 73 /// \param[in] core_list: a array of thread core lists. 74 /// \param[in] core_num The number of core. 75 OH_AI_API void OH_AI_ContextSetThreadAffinityCoreList(OH_AI_ContextHandle context, const int32_t *core_list, 76 size_t core_num); 77 78 /// \brief Obtain the thread lists of CPU cores. 79 /// 80 /// \param[in] context Context object handle. 81 /// \param[out] core_num The number of core. 82 /// 83 /// \return a array of thread core lists. 84 OH_AI_API const int32_t *OH_AI_ContextGetThreadAffinityCoreList(const OH_AI_ContextHandle context, size_t *core_num); 85 86 /// \brief Set the status whether to perform model inference or training in parallel. 87 /// 88 /// \param[in] context Context object handle. 89 /// \param[in] is_parallel: true, parallel; false, not in parallel. 90 OH_AI_API void OH_AI_ContextSetEnableParallel(OH_AI_ContextHandle context, bool is_parallel); 91 92 /// \brief Obtain the status whether to perform model inference or training in parallel. 93 /// 94 /// \param[in] context Context object handle. 95 /// 96 /// \return Bool value that indicates whether in parallel. 97 OH_AI_API bool OH_AI_ContextGetEnableParallel(const OH_AI_ContextHandle context); 98 99 /// \brief Add device info to context object. 100 /// 101 /// \param[in] context Context object handle. 102 /// \param[in] device_info Device info object handle. 103 OH_AI_API void OH_AI_ContextAddDeviceInfo(OH_AI_ContextHandle context, OH_AI_DeviceInfoHandle device_info); 104 105 /// \brief Create a device info object. 106 /// 107 /// \param[in] device_info Device info object handle. 108 /// 109 /// \return Device info object handle. 110 OH_AI_API OH_AI_DeviceInfoHandle OH_AI_DeviceInfoCreate(OH_AI_DeviceType device_type); 111 112 /// \brief Destroy the device info object. 113 /// 114 /// \param[in] device_info Device info object handle address. 115 OH_AI_API void OH_AI_DeviceInfoDestroy(OH_AI_DeviceInfoHandle *device_info); 116 117 /// \brief Set provider's name. 118 /// 119 /// \param[in] device_info Device info object handle. 120 /// \param[in] provider define the provider's name. 121 OH_AI_API void OH_AI_DeviceInfoSetProvider(OH_AI_DeviceInfoHandle device_info, const char *provider); 122 123 /// \brief Obtain provider's name 124 /// 125 /// \param[in] device_info Device info object handle. 126 /// 127 /// \return provider's name. 128 OH_AI_API const char *OH_AI_DeviceInfoGetProvider(const OH_AI_DeviceInfoHandle device_info); 129 130 /// \brief Set provider's device type. 131 /// 132 /// \param[in] device_info Device info object handle. 133 /// \param[in] device define the provider's device type. EG: CPU. 134 OH_AI_API void OH_AI_DeviceInfoSetProviderDevice(OH_AI_DeviceInfoHandle device_info, const char *device); 135 136 /// \brief Obtain provider's device type. 137 /// 138 /// \param[in] device_info Device info object handle. 139 /// 140 /// \return provider's device type. 141 OH_AI_API const char *OH_AI_DeviceInfoGetProviderDevice(const OH_AI_DeviceInfoHandle device_info); 142 143 /// \brief Obtain the device type of the device info. 144 /// 145 /// \param[in] device_info Device info object handle. 146 /// 147 /// \return Device Type of the device info. 148 OH_AI_API OH_AI_DeviceType OH_AI_DeviceInfoGetDeviceType(const OH_AI_DeviceInfoHandle device_info); 149 150 /// \brief Set enables to perform the float16 inference, Only valid for CPU/GPU. 151 /// 152 /// \param[in] device_info Device info object handle. 153 /// \param[in] is_fp16 Enable float16 inference or not. 154 OH_AI_API void OH_AI_DeviceInfoSetEnableFP16(OH_AI_DeviceInfoHandle device_info, bool is_fp16); 155 156 /// \brief Obtain enables to perform the float16 inference, Only valid for CPU/GPU. 157 /// 158 /// \param[in] device_info Device info object handle. 159 /// 160 /// \return Whether enable float16 inference. 161 OH_AI_API bool OH_AI_DeviceInfoGetEnableFP16(const OH_AI_DeviceInfoHandle device_info); 162 163 /// \brief Set the NPU frequency, Only valid for NPU. 164 /// 165 /// \param[in] device_info Device info object handle. 166 /// \param[in] frequency Can be set to 1 (low power consumption), 2 (balanced), 3 (high performance), 4 (extreme 167 /// performance), default as 3. 168 OH_AI_API void OH_AI_DeviceInfoSetFrequency(OH_AI_DeviceInfoHandle device_info, int frequency); 169 170 /// \brief Obtain the NPU frequency, Only valid for NPU. 171 /// 172 /// \param[in] device_info Device info object handle. 173 /// 174 /// \return NPU frequency 175 OH_AI_API int OH_AI_DeviceInfoGetFrequency(const OH_AI_DeviceInfoHandle device_info); 176 177 #ifdef __cplusplus 178 } 179 #endif 180 #endif // MINDSPORE_INCLUDE_C_API_CONTEXT_C_H 181