• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021-2023 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #ifndef MINDSPORE_LITE_SRC_RUNTIME_CXX_API_CONVERTERS_H_
17 #define MINDSPORE_LITE_SRC_RUNTIME_CXX_API_CONVERTERS_H_
18 
19 #include <vector>
20 #include <string>
21 #include <memory>
22 #include "include/api/context.h"
23 #include "include/api/status.h"
24 #include "include/api/cfg.h"
25 #include "include/train/train_cfg.h"
26 #include "src/litert/inner_context.h"
27 #include "src/common/log_adapter.h"
28 
29 namespace mindspore {
30 class MS_API ContextUtils {
31  public:
32   static std::shared_ptr<lite::InnerContext> Convert(Context *context);
33 
34  private:
35   static void SetContextAttr(int32_t thread_num, int32_t inter_op_parallel_num, bool enable_parallel,
36                              const std::vector<int32_t> &affinity_core_list, int delegate_mode,
37                              const std::shared_ptr<Delegate> &delegate, lite::InnerContext *inner_context,
38                              bool float_mode = false);
39   static Status AddCpuDevice(const std::shared_ptr<Allocator> &allocator, int affinity_mode, bool enable_fp16,
40                              const std::string &provider, const std::string &provider_device,
41                              lite::InnerContext *inner_context);
42   static Status AddGpuDevice(bool enable_fp16, uint32_t device_id, int rank_id, int group_size, bool enable_gl_texture,
43                              void *gl_context, void *gl_display, const std::string &provider,
44                              const std::string &provider_device, const std::shared_ptr<Allocator> &allocator,
45                              lite::InnerContext *inner_context);
46   static Status AddNpuDevice(bool enable_fp16, int frequency, lite::InnerContext *inner_context);
47   static Status AddAscendDevice(lite::InnerContext *inner_context, DeviceInfoContext *device);
48   static Status AddCustomDevice(lite::InnerContext *inner_context, const std::shared_ptr<DeviceInfoContext> &device);
49   static Status AddNNRtDevice(lite::InnerContext *inner_context, size_t device_id, int performance_mode, int priority,
50                               bool enable_fp16, const std::vector<Extension> &extensions);
IsAffinityModeValid(int affinity_mode)51   static bool IsAffinityModeValid(int affinity_mode) {
52     return affinity_mode >= lite::NO_BIND && affinity_mode <= lite::MID_CPU;
53   }
54   static void ResetContextDefaultParam(Context *context);
55 };
56 
A2L_ConvertQT(mindspore::QuantizationType qt)57 inline lite::QuantizationType A2L_ConvertQT(mindspore::QuantizationType qt) {
58   if (qt == kNoQuant) {
59     return lite::QT_NONE;
60   }
61   if (qt == kWeightQuant) {
62     return lite::QT_WEIGHT;
63   }
64   if (qt == kFullQuant || qt == kUnknownQuantType) {
65     MS_LOG(WARNING) << "QuantizationType " << qt << " does not support, set the quantizationType to default.";
66   }
67   return lite::QT_DEFAULT;
68 }
69 
70 Status A2L_ConvertConfig(const TrainCfg *a_train_cfg, lite::TrainCfg *l_train_cfg);
71 }  // namespace mindspore
72 
73 #endif  // MINDSPORE_LITE_SRC_RUNTIME_CXX_API_CONVERTERS_H_
74