1 /**
2 * Copyright 2021 Huawei Technologies Co., Ltd
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "include/api/context.h"
18 #include "include/c_api/context_c.h"
19 #include <string.h>
20 #include "src/litert/c_api/type_c_private.h"
21 #include "src/litert/c_api/context_c.h"
22 #include "include/api/context.h"
23 #include "src/common/log_adapter.h"
24 #ifdef SUPPORT_NNRT_METAGRAPH
25 #include "src/litert/delegate/nnrt/hiai_foundation_wrapper.h"
26 #endif
27 #ifdef SUPPORT_NNRT
28 #include "neural_network_runtime/neural_network_runtime.h"
29 #endif
30
31 // ================ Context ================
OH_AI_ContextCreate()32 OH_AI_ContextHandle OH_AI_ContextCreate() {
33 auto impl = new (std::nothrow) mindspore::ContextC();
34 if (impl == nullptr) {
35 MS_LOG(ERROR) << "memory allocation failed.";
36 return nullptr;
37 }
38 impl->context_ = new (std::nothrow) mindspore::Context();
39 if (impl->context_ == nullptr) {
40 MS_LOG(ERROR) << "memory allocation failed.";
41 delete impl;
42 return nullptr;
43 }
44 impl->owned_by_model_ = false;
45 return static_cast<OH_AI_ContextHandle>(impl);
46 }
47
OH_AI_ContextDestroy(OH_AI_ContextHandle * context)48 void OH_AI_ContextDestroy(OH_AI_ContextHandle *context) {
49 if (context == nullptr || *context == nullptr) {
50 MS_LOG(ERROR) << "context is nullptr.";
51 return;
52 }
53 auto impl = static_cast<mindspore::ContextC *>(*context);
54 if (impl->owned_by_model_) {
55 impl->context_ = nullptr;
56 }
57 delete impl;
58 *context = nullptr;
59 }
60
61 extern "C" {
62 #define TRUE 1
63
OH_AI_Inner_ContextNeedDestroy()64 int32_t OH_AI_Inner_ContextNeedDestroy() {
65 return TRUE;
66 }
67 }
68
OH_AI_ContextSetThreadNum(OH_AI_ContextHandle context,int32_t thread_num)69 void OH_AI_ContextSetThreadNum(OH_AI_ContextHandle context, int32_t thread_num) {
70 if (context == nullptr) {
71 MS_LOG(ERROR) << "context is nullptr.";
72 return;
73 }
74 auto impl = static_cast<mindspore::ContextC *>(context);
75 impl->context_->SetThreadNum(thread_num);
76 }
77
OH_AI_ContextGetThreadNum(const OH_AI_ContextHandle context)78 int32_t OH_AI_ContextGetThreadNum(const OH_AI_ContextHandle context) {
79 if (context == nullptr) {
80 MS_LOG(ERROR) << "context is nullptr.";
81 return 0;
82 }
83 auto impl = static_cast<mindspore::ContextC *>(context);
84 return impl->context_->GetThreadNum();
85 }
86
OH_AI_ContextSetThreadAffinityMode(OH_AI_ContextHandle context,int mode)87 void OH_AI_ContextSetThreadAffinityMode(OH_AI_ContextHandle context, int mode) {
88 if (context == nullptr) {
89 MS_LOG(ERROR) << "context is nullptr.";
90 return;
91 }
92 auto impl = static_cast<mindspore::ContextC *>(context);
93 impl->context_->SetThreadAffinity(mode);
94 return;
95 }
96
OH_AI_ContextGetThreadAffinityMode(const OH_AI_ContextHandle context)97 int OH_AI_ContextGetThreadAffinityMode(const OH_AI_ContextHandle context) {
98 if (context == nullptr) {
99 MS_LOG(ERROR) << "param is nullptr.";
100 return 0;
101 }
102 auto impl = static_cast<mindspore::ContextC *>(context);
103 return impl->context_->GetThreadAffinityMode();
104 }
105
OH_AI_ContextSetThreadAffinityCoreList(OH_AI_ContextHandle context,const int32_t * core_list,size_t core_num)106 void OH_AI_ContextSetThreadAffinityCoreList(OH_AI_ContextHandle context, const int32_t *core_list, size_t core_num) {
107 if (context == nullptr || core_list == nullptr) {
108 MS_LOG(ERROR) << "context or core_list is nullptr.";
109 return;
110 }
111 const std::vector<int32_t> vec_core_list(core_list, core_list + core_num);
112 auto impl = static_cast<mindspore::ContextC *>(context);
113 impl->context_->SetThreadAffinity(vec_core_list);
114 return;
115 }
116
OH_AI_ContextGetThreadAffinityCoreList(const OH_AI_ContextHandle context,size_t * core_num)117 const int32_t *OH_AI_ContextGetThreadAffinityCoreList(const OH_AI_ContextHandle context, size_t *core_num) {
118 if (context == nullptr || core_num == nullptr) {
119 MS_LOG(ERROR) << "context or core_num is nullptr.";
120 return nullptr;
121 }
122 auto impl = static_cast<mindspore::ContextC *>(context);
123 auto affinity_core_list = impl->context_->GetThreadAffinityCoreList();
124 *core_num = affinity_core_list.size();
125 int32_t *core_list = static_cast<int32_t *>(malloc((*core_num) * sizeof(int32_t)));
126 if (core_list == nullptr) {
127 MS_LOG(ERROR) << "malloc core_list is null.";
128 return nullptr;
129 }
130 for (size_t i = 0; i < affinity_core_list.size(); i++) {
131 core_list[i] = affinity_core_list[i];
132 }
133 return core_list;
134 }
135
OH_AI_ContextSetEnableParallel(OH_AI_ContextHandle context,bool is_parallel)136 void OH_AI_ContextSetEnableParallel(OH_AI_ContextHandle context, bool is_parallel) {
137 if (context == nullptr) {
138 MS_LOG(ERROR) << "context is nullptr.";
139 return;
140 }
141 auto impl = static_cast<mindspore::ContextC *>(context);
142 impl->context_->SetEnableParallel(is_parallel);
143 }
144
OH_AI_ContextGetEnableParallel(const OH_AI_ContextHandle context)145 bool OH_AI_ContextGetEnableParallel(const OH_AI_ContextHandle context) {
146 if (context == nullptr) {
147 MS_LOG(ERROR) << "context is nullptr.";
148 return false;
149 }
150 auto impl = static_cast<mindspore::ContextC *>(context);
151 return impl->context_->GetEnableParallel();
152 }
153
OH_AI_ContextAddDeviceInfo(OH_AI_ContextHandle context,OH_AI_DeviceInfoHandle device_info)154 void OH_AI_ContextAddDeviceInfo(OH_AI_ContextHandle context, OH_AI_DeviceInfoHandle device_info) {
155 if (context == nullptr || device_info == nullptr) {
156 MS_LOG(ERROR) << "context or device_info is nullptr.";
157 return;
158 }
159 auto impl = static_cast<mindspore::ContextC *>(context);
160 std::shared_ptr<mindspore::DeviceInfoContext> device(static_cast<mindspore::DeviceInfoContext *>(device_info));
161 impl->context_->MutableDeviceInfo().push_back(device);
162 }
163
164 // ================ DeviceInfo ================
OH_AI_DeviceInfoCreate(OH_AI_DeviceType device_type)165 OH_AI_DeviceInfoHandle OH_AI_DeviceInfoCreate(OH_AI_DeviceType device_type) {
166 mindspore::DeviceInfoContext *impl;
167 if (OH_AI_DEVICETYPE_CPU == device_type) {
168 impl = new (std::nothrow) mindspore::CPUDeviceInfo();
169 } else if (OH_AI_DEVICETYPE_GPU == device_type) {
170 impl = new (std::nothrow) mindspore::GPUDeviceInfo();
171 } else if (OH_AI_DEVICETYPE_KIRIN_NPU == device_type) {
172 impl = new (std::nothrow) mindspore::KirinNPUDeviceInfo();
173 } else if (OH_AI_DEVICETYPE_NNRT == device_type) {
174 impl = new (std::nothrow) mindspore::NNRTDeviceInfo();
175 } else {
176 MS_LOG(ERROR) << "device_type is invalid.";
177 impl = nullptr;
178 }
179 if (impl == nullptr) {
180 MS_LOG(ERROR) << "memory allocation failed.";
181 return nullptr;
182 }
183 return static_cast<OH_AI_DeviceInfoHandle>(impl);
184 }
185
OH_AI_DeviceInfoDestroy(OH_AI_DeviceInfoHandle * device_info)186 void OH_AI_DeviceInfoDestroy(OH_AI_DeviceInfoHandle *device_info) {
187 if (device_info == nullptr || *device_info == nullptr) {
188 MS_LOG(ERROR) << "device_info is nullptr.";
189 return;
190 }
191 auto impl = static_cast<mindspore::DeviceInfoContext *>(*device_info);
192 delete impl;
193 *device_info = nullptr;
194 }
195
OH_AI_DeviceInfoSetProvider(OH_AI_DeviceInfoHandle device_info,const char * provider)196 void OH_AI_DeviceInfoSetProvider(OH_AI_DeviceInfoHandle device_info, const char *provider) {
197 if (device_info == nullptr) {
198 MS_LOG(ERROR) << "device_info is nullptr.";
199 return;
200 }
201 if (provider == nullptr) {
202 MS_LOG(ERROR) << "provider is nullptr.";
203 return;
204 }
205 auto impl = static_cast<mindspore::DeviceInfoContext *>(device_info);
206 impl->SetProvider(provider);
207 }
208
OH_AI_DeviceInfoGetProvider(const OH_AI_DeviceInfoHandle device_info)209 const char *OH_AI_DeviceInfoGetProvider(const OH_AI_DeviceInfoHandle device_info) {
210 if (device_info == nullptr) {
211 MS_LOG(ERROR) << "device_info is nullptr.";
212 return nullptr;
213 }
214 auto impl = static_cast<mindspore::DeviceInfoContext *>(device_info);
215 char *provider = static_cast<char *>(malloc(impl->GetProvider().size() + 1));
216 if (provider == nullptr) {
217 MS_LOG(ERROR) << "malloc provider is null.";
218 return nullptr;
219 }
220 for (size_t i = 0; i < impl->GetProvider().size(); i++) {
221 provider[i] = impl->GetProvider()[i];
222 }
223 provider[impl->GetProvider().size()] = '\0';
224 return provider;
225 }
226
OH_AI_DeviceInfoSetProviderDevice(OH_AI_DeviceInfoHandle device_info,const char * device)227 void OH_AI_DeviceInfoSetProviderDevice(OH_AI_DeviceInfoHandle device_info, const char *device) {
228 if (device_info == nullptr) {
229 MS_LOG(ERROR) << "device_info is nullptr.";
230 return;
231 }
232 if (device == nullptr) {
233 MS_LOG(ERROR) << "device is nullptr.";
234 return;
235 }
236 auto impl = static_cast<mindspore::DeviceInfoContext *>(device_info);
237 impl->SetProviderDevice(device);
238 }
239
OH_AI_DeviceInfoGetProviderDevice(const OH_AI_DeviceInfoHandle device_info)240 const char *OH_AI_DeviceInfoGetProviderDevice(const OH_AI_DeviceInfoHandle device_info) {
241 if (device_info == nullptr) {
242 MS_LOG(ERROR) << "device_info is nullptr.";
243 return nullptr;
244 }
245 auto impl = static_cast<mindspore::DeviceInfoContext *>(device_info);
246 char *provider_device = static_cast<char *>(malloc(impl->GetProviderDevice().size() + 1));
247 if (provider_device == nullptr) {
248 MS_LOG(ERROR) << "malloc provider_device is null.";
249 return nullptr;
250 }
251 for (size_t i = 0; i < impl->GetProviderDevice().size(); i++) {
252 provider_device[i] = impl->GetProviderDevice()[i];
253 }
254 provider_device[impl->GetProviderDevice().size()] = '\0';
255 return provider_device;
256 }
257
OH_AI_DeviceInfoGetDeviceType(const OH_AI_DeviceInfoHandle device_info)258 OH_AI_DeviceType OH_AI_DeviceInfoGetDeviceType(const OH_AI_DeviceInfoHandle device_info) {
259 if (device_info == nullptr) {
260 MS_LOG(ERROR) << "device_info is nullptr.";
261 return OH_AI_DEVICETYPE_INVALID;
262 }
263 auto impl = static_cast<mindspore::DeviceInfoContext *>(device_info);
264 return static_cast<OH_AI_DeviceType>(impl->GetDeviceType());
265 }
266
OH_AI_DeviceInfoSetEnableFP16(OH_AI_DeviceInfoHandle device_info,bool is_fp16)267 void OH_AI_DeviceInfoSetEnableFP16(OH_AI_DeviceInfoHandle device_info, bool is_fp16) {
268 if (device_info == nullptr) {
269 MS_LOG(ERROR) << "device_info is nullptr.";
270 return;
271 }
272 auto impl_device = static_cast<mindspore::DeviceInfoContext *>(device_info);
273 if (OH_AI_DEVICETYPE_CPU == static_cast<OH_AI_DeviceType>(impl_device->GetDeviceType())) {
274 auto impl = static_cast<mindspore::CPUDeviceInfo *>(device_info);
275 impl->SetEnableFP16(is_fp16);
276 } else if (OH_AI_DEVICETYPE_GPU == static_cast<OH_AI_DeviceType>(impl_device->GetDeviceType())) {
277 auto impl = static_cast<mindspore::GPUDeviceInfo *>(device_info);
278 impl->SetEnableFP16(is_fp16);
279 } else if (OH_AI_DEVICETYPE_NNRT == static_cast<OH_AI_DeviceType>(impl_device->GetDeviceType())) {
280 auto impl = static_cast<mindspore::NNRTDeviceInfo *>(device_info);
281 impl->SetEnableFP16(is_fp16);
282 } else {
283 MS_LOG(ERROR) << "Unsupported Feature.";
284 }
285 }
286
OH_AI_DeviceInfoGetEnableFP16(const OH_AI_DeviceInfoHandle device_info)287 bool OH_AI_DeviceInfoGetEnableFP16(const OH_AI_DeviceInfoHandle device_info) {
288 if (device_info == nullptr) {
289 MS_LOG(ERROR) << "device_info is nullptr.";
290 return false;
291 }
292 auto impl_device = static_cast<mindspore::DeviceInfoContext *>(device_info);
293 if (OH_AI_DEVICETYPE_CPU == static_cast<OH_AI_DeviceType>(impl_device->GetDeviceType())) {
294 auto impl = static_cast<mindspore::CPUDeviceInfo *>(device_info);
295 return impl->GetEnableFP16();
296 } else if (OH_AI_DEVICETYPE_GPU == static_cast<OH_AI_DeviceType>(impl_device->GetDeviceType())) {
297 auto impl = static_cast<mindspore::GPUDeviceInfo *>(device_info);
298 return impl->GetEnableFP16();
299 } else if (OH_AI_DEVICETYPE_NNRT == static_cast<OH_AI_DeviceType>(impl_device->GetDeviceType())) {
300 auto impl = static_cast<mindspore::NNRTDeviceInfo *>(device_info);
301 return impl->GetEnableFP16();
302 } else {
303 MS_LOG(ERROR) << "Unsupported Feature. device_type: " << impl_device->GetDeviceType();
304 return false;
305 }
306 }
307
OH_AI_DeviceInfoSetFrequency(OH_AI_DeviceInfoHandle device_info,int frequency)308 void OH_AI_DeviceInfoSetFrequency(OH_AI_DeviceInfoHandle device_info, int frequency) { // only for KirinNPU
309 if (device_info == nullptr) {
310 MS_LOG(ERROR) << "device_info is nullptr.";
311 return;
312 }
313 auto impl_device = static_cast<mindspore::DeviceInfoContext *>(device_info);
314 if (static_cast<OH_AI_DeviceType>(impl_device->GetDeviceType()) == OH_AI_DEVICETYPE_KIRIN_NPU) {
315 auto impl = static_cast<mindspore::KirinNPUDeviceInfo *>(device_info);
316 impl->SetFrequency(frequency);
317 } else {
318 MS_LOG(ERROR) << "Unsupported Feature.";
319 }
320 }
321
OH_AI_DeviceInfoGetFrequency(const OH_AI_DeviceInfoHandle device_info)322 int OH_AI_DeviceInfoGetFrequency(const OH_AI_DeviceInfoHandle device_info) { // only for KirinNPU
323 if (device_info == nullptr) {
324 MS_LOG(ERROR) << "device_info is nullptr.";
325 return -1;
326 }
327 auto impl_device = static_cast<mindspore::DeviceInfoContext *>(device_info);
328 if (static_cast<OH_AI_DeviceType>(impl_device->GetDeviceType()) == OH_AI_DEVICETYPE_KIRIN_NPU) {
329 auto impl = static_cast<mindspore::KirinNPUDeviceInfo *>(device_info);
330 return impl->GetFrequency();
331 } else {
332 MS_LOG(ERROR) << "Unsupported Feature.";
333 return -1;
334 }
335 }
336
OH_AI_GetAllNNRTDeviceDescs(size_t * num)337 NNRTDeviceDesc *OH_AI_GetAllNNRTDeviceDescs(size_t *num) {
338 if (num == nullptr) {
339 MS_LOG(ERROR) << "Input num is null";
340 return nullptr;
341 }
342 #ifdef SUPPORT_NNRT
343 #ifdef SUPPORT_NNRT_METAGRAPH
344 void *hiai_handle_{nullptr};
345 auto ret_load = mindspore::lite::LoadHiaiFLibraryFromPath(&hiai_handle_);
346 if (!ret_load || hiai_handle_ == nullptr) {
347 MS_LOG(ERROR) << "Load HiAI_Foundation so failed.";
348 }
349 #endif
350 *num = 0;
351
352 const size_t *all_device_ids;
353 uint32_t device_count;
354 auto ret = OH_NNDevice_GetAllDevicesID(&all_device_ids, &device_count);
355 if ((ret != OH_NN_SUCCESS) || (device_count == 0)) {
356 MS_LOG(ERROR) << "NNRT get all device id failed, ret: " << ret;
357 return nullptr;
358 }
359
360 NNRTDeviceDesc *desc = (NNRTDeviceDesc *)malloc(sizeof(NNRTDeviceDesc) * device_count);
361 if (desc == nullptr) {
362 MS_LOG(ERROR) << "NNRT allocate desc failed";
363 return nullptr;
364 }
365
366 for (uint32_t i = 0; i < device_count; i++) {
367 desc[i].device_id = all_device_ids[i];
368 OH_NN_DeviceType type;
369 (void)OH_NNDevice_GetType(all_device_ids[i], &type);
370 desc[i].device_type = static_cast<OH_AI_NNRTDeviceType>(type);
371
372 const char *name = nullptr;
373 (void)OH_NNDevice_GetName(all_device_ids[i], &name);
374 if (name == nullptr) {
375 MS_LOG(ERROR) << "OH_NNDevice_GetName error.";
376 return nullptr;
377 }
378 desc[i].device_name[127] = '\0';
379 strncpy(desc[i].device_name, name, 127);
380 }
381 *num = device_count;
382 return desc;
383 #else
384 return nullptr;
385 #endif
386 }
387
OH_AI_GetElementOfNNRTDeviceDescs(NNRTDeviceDesc * descs,size_t index)388 NNRTDeviceDesc *OH_AI_GetElementOfNNRTDeviceDescs(NNRTDeviceDesc *descs, size_t index) {
389 if (descs == nullptr) {
390 MS_LOG(ERROR) << "descs is null";
391 return nullptr;
392 }
393 return descs + index;
394 }
395
OH_AI_DestroyAllNNRTDeviceDescs(NNRTDeviceDesc ** desc)396 void OH_AI_DestroyAllNNRTDeviceDescs(NNRTDeviceDesc **desc) {
397 if (desc == nullptr) {
398 MS_LOG(WARNING) << "desc is null";
399 return;
400 }
401 free(*desc);
402 *desc = nullptr;
403 }
404
OH_AI_GetDeviceIdFromNNRTDeviceDesc(const NNRTDeviceDesc * desc)405 size_t OH_AI_GetDeviceIdFromNNRTDeviceDesc(const NNRTDeviceDesc *desc) {
406 if (desc == nullptr) {
407 MS_LOG(ERROR) << "NNRT desc is null";
408 return 0;
409 }
410 return desc->device_id;
411 }
412
OH_AI_GetNameFromNNRTDeviceDesc(const NNRTDeviceDesc * desc)413 const char *OH_AI_GetNameFromNNRTDeviceDesc(const NNRTDeviceDesc *desc) {
414 if (desc == nullptr) {
415 MS_LOG(ERROR) << "NNRT desc is null";
416 return nullptr;
417 }
418 return desc->device_name;
419 }
420
OH_AI_GetTypeFromNNRTDeviceDesc(const NNRTDeviceDesc * desc)421 OH_AI_NNRTDeviceType OH_AI_GetTypeFromNNRTDeviceDesc(const NNRTDeviceDesc *desc) {
422 if (desc == nullptr) {
423 MS_LOG(ERROR) << "NNRT desc is null";
424 return OH_AI_NNRTDeviceType::OH_AI_NNRTDEVICE_OTHERS;
425 }
426 return desc->device_type;
427 }
428
OH_AI_CreateNNRTDeviceInfoByName(const char * name)429 OH_AI_DeviceInfoHandle OH_AI_CreateNNRTDeviceInfoByName(const char *name) {
430 size_t num = 0;
431 NNRTDeviceDesc *desc = OH_AI_GetAllNNRTDeviceDescs(&num);
432 if (desc == nullptr) {
433 MS_LOG(ERROR) << "Get all device desc failed";
434 return nullptr;
435 }
436 if (name == nullptr) {
437 MS_LOG(ERROR) << "NNRT device name is nullptr";
438 return nullptr;
439 }
440 OH_AI_DeviceInfoHandle handle = nullptr;
441 for (size_t i = 0; i < num; i++) {
442 if (strncmp(desc[i].device_name, name, NNRT_DEVICE_NAME_MAX - 1) == 0) {
443 handle = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
444 OH_AI_DeviceInfoSetDeviceId(handle, desc[i].device_id);
445 break;
446 }
447 }
448 OH_AI_DestroyAllNNRTDeviceDescs(&desc);
449 return handle;
450 }
451
OH_AI_CreateNNRTDeviceInfoByType(OH_AI_NNRTDeviceType type)452 OH_AI_DeviceInfoHandle OH_AI_CreateNNRTDeviceInfoByType(OH_AI_NNRTDeviceType type) {
453 size_t num = 0;
454 NNRTDeviceDesc *desc = OH_AI_GetAllNNRTDeviceDescs(&num);
455 if (desc == nullptr) {
456 MS_LOG(ERROR) << "Get all device desc failed";
457 return nullptr;
458 }
459
460 OH_AI_DeviceInfoHandle handle = nullptr;
461 for (size_t i = 0; i < num; i++) {
462 if (desc[i].device_type == type) {
463 handle = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_NNRT);
464 OH_AI_DeviceInfoSetDeviceId(handle, desc[i].device_id);
465 break;
466 }
467 }
468 OH_AI_DestroyAllNNRTDeviceDescs(&desc);
469 return handle;
470 }
471
OH_AI_DeviceInfoSetDeviceId(OH_AI_DeviceInfoHandle device_info,size_t device_id)472 void OH_AI_DeviceInfoSetDeviceId(OH_AI_DeviceInfoHandle device_info, size_t device_id) {
473 if (device_info == nullptr) {
474 MS_LOG(ERROR) << "device info is null";
475 return;
476 }
477 if (OH_AI_DeviceInfoGetDeviceType(device_info) != OH_AI_DEVICETYPE_NNRT) {
478 MS_LOG(ERROR) << "Set device_id of non-NNRT device is not allowable, ignored";
479 return;
480 }
481 auto impl = reinterpret_cast<mindspore::NNRTDeviceInfo *>(device_info);
482 impl->SetDeviceID(device_id);
483 }
484
OH_AI_DeviceInfoGetDeviceId(const OH_AI_DeviceInfoHandle device_info)485 size_t OH_AI_DeviceInfoGetDeviceId(const OH_AI_DeviceInfoHandle device_info) {
486 if (device_info == nullptr) {
487 MS_LOG(ERROR) << "device info is null";
488 return 0;
489 }
490 if (OH_AI_DeviceInfoGetDeviceType(device_info) != OH_AI_DEVICETYPE_NNRT) {
491 MS_LOG(ERROR) << "Get device_id of non-NNRT device is not allowable, ignored";
492 return 0;
493 }
494 auto impl = reinterpret_cast<mindspore::NNRTDeviceInfo *>(device_info);
495 return impl->GetDeviceID();
496 }
497
OH_AI_DeviceInfoSetPerformanceMode(OH_AI_DeviceInfoHandle device_info,OH_AI_PerformanceMode mode)498 void OH_AI_DeviceInfoSetPerformanceMode(OH_AI_DeviceInfoHandle device_info, OH_AI_PerformanceMode mode) {
499 if (device_info == nullptr) {
500 MS_LOG(ERROR) << "device info is null";
501 return;
502 }
503 if (OH_AI_DeviceInfoGetDeviceType(device_info) != OH_AI_DEVICETYPE_NNRT) {
504 MS_LOG(ERROR) << "Set performance_mode of non-NNRT device is not allowable, ignored";
505 return;
506 }
507 auto impl = reinterpret_cast<mindspore::NNRTDeviceInfo *>(device_info);
508 impl->SetPerformanceMode(mode);
509 }
510
OH_AI_DeviceInfoGetPerformanceMode(const OH_AI_DeviceInfoHandle device_info)511 OH_AI_PerformanceMode OH_AI_DeviceInfoGetPerformanceMode(const OH_AI_DeviceInfoHandle device_info) {
512 if (device_info == nullptr) {
513 MS_LOG(ERROR) << "device info is null";
514 return OH_AI_PERFORMANCE_NONE;
515 }
516 if (OH_AI_DeviceInfoGetDeviceType(device_info) != OH_AI_DEVICETYPE_NNRT) {
517 MS_LOG(ERROR) << "Get performance_mode of non-NNRT device is not allowable, ignored";
518 return OH_AI_PERFORMANCE_NONE;
519 }
520 auto impl = reinterpret_cast<mindspore::NNRTDeviceInfo *>(device_info);
521 return static_cast<OH_AI_PerformanceMode>(impl->GetPerformanceMode());
522 }
523
OH_AI_DeviceInfoSetPriority(OH_AI_DeviceInfoHandle device_info,OH_AI_Priority priority)524 void OH_AI_DeviceInfoSetPriority(OH_AI_DeviceInfoHandle device_info, OH_AI_Priority priority) {
525 if (device_info == nullptr) {
526 MS_LOG(ERROR) << "device info is null";
527 return;
528 }
529 if (OH_AI_DeviceInfoGetDeviceType(device_info) != OH_AI_DEVICETYPE_NNRT) {
530 MS_LOG(ERROR) << "Set priority of non-NNRT device is not allowable, ignored";
531 return;
532 }
533 auto impl = reinterpret_cast<mindspore::NNRTDeviceInfo *>(device_info);
534 impl->SetPriority(priority);
535 }
536
OH_AI_DeviceInfoGetPriority(const OH_AI_DeviceInfoHandle device_info)537 OH_AI_Priority OH_AI_DeviceInfoGetPriority(const OH_AI_DeviceInfoHandle device_info) {
538 if (device_info == nullptr) {
539 MS_LOG(ERROR) << "device info is null";
540 return OH_AI_PRIORITY_NONE;
541 }
542 if (OH_AI_DeviceInfoGetDeviceType(device_info) != OH_AI_DEVICETYPE_NNRT) {
543 MS_LOG(ERROR) << "Get priority of non-NNRT device is not allowable, ignored";
544 return OH_AI_PRIORITY_NONE;
545 }
546 auto impl = reinterpret_cast<mindspore::NNRTDeviceInfo *>(device_info);
547 return static_cast<OH_AI_Priority>(impl->GetPriority());
548 }
549
OH_AI_DeviceInfoAddExtension(OH_AI_DeviceInfoHandle device_info,const char * name,const char * value,size_t value_size)550 OH_AI_API OH_AI_Status OH_AI_DeviceInfoAddExtension(OH_AI_DeviceInfoHandle device_info,
551 const char *name, const char*value, size_t value_size) {
552 if (device_info == nullptr) {
553 MS_LOG(ERROR) << "device info is null";
554 return OH_AI_STATUS_LITE_NULLPTR;
555 }
556 if (name == nullptr || value == nullptr) {
557 MS_LOG(ERROR) << "name/value is not valid";
558 return OH_AI_STATUS_LITE_NULLPTR;
559 }
560 if (OH_AI_DeviceInfoGetDeviceType(device_info) != OH_AI_DEVICETYPE_NNRT) {
561 MS_LOG(ERROR) << "Add extension to non-NNRT device is not allowable, ignored";
562 return OH_AI_STATUS_LITE_ERROR;
563 }
564 static std::vector<std::string> extension_keys = {"CachePath", "CacheVersion", "ModelName", "QuantBuffer",
565 "QuantConfigData", "isProfiling", "opLayout", "InputDims",
566 "DynamicDims", "BandMode", "NPU_FM_SHARED"};
567 auto it = std::find(extension_keys.begin(), extension_keys.end(), std::string(name));
568 if (it == extension_keys.end()) {
569 MS_LOG(ERROR) << "The name of the extension is not allowable, only can be one of {CachePath, CacheVersion,"
570 << " ModelName, QuantBuffer, isProfiling, opLayout, InputDims, DynamicDims, BandMode,"
571 << "NPU_FM_SHARED}.";
572 return OH_AI_STATUS_LITE_ERROR;
573 }
574 auto impl = reinterpret_cast<mindspore::NNRTDeviceInfo *>(device_info);
575 mindspore::Extension extension;
576 extension.name = std::string(name);
577 extension.value = std::vector<uint8_t>(value, value + value_size);
578 std::vector<mindspore::Extension> extension_list = impl->GetExtensions();
579 extension_list.push_back(extension);
580 impl->SetExtensions(extension_list);
581 return OH_AI_STATUS_SUCCESS;
582 }
583