• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (C) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef MINDSPORE_INCLUDE_JS_API_COMMON_NAPI_H
17 #define MINDSPORE_INCLUDE_JS_API_COMMON_NAPI_H
18 
19 #include <string>
20 #include <fstream>
21 #include "napi/native_api.h"
22 #include "napi/native_node_api.h"
23 #include "ms_errors.h"
24 #include "include/api/types.h"
25 
26 namespace mindspore {
27 
28 class CommonNapi {
29  public:
30   CommonNapi() = delete;
31   ~CommonNapi() = delete;
32 
33   static std::string getMessageByCode(int32_t &code);
34   static int32_t GetPropertyInt32(napi_env env, napi_value config_obj, const std::string &type, int32_t &result);
35   static int32_t GetPropertyString(napi_env env, napi_value config_obj, const std::string &type, std::string &result);
36   static int32_t GetPropertyInt32Array(napi_env env, napi_value config_obj, const std::string &type,
37                                        std::vector<int32_t> &result);
38   static int32_t GetPropertyBigIntUint64(napi_env env, napi_value config_obj, const std::string &type,
39                                          uint64_t &result);
40   static int32_t GetPropertyStringArray(napi_env env, napi_value config_obj, const std::string &type,
41                                         std::vector<std::string> &result);
42   static int32_t GetStringArray(napi_env env, napi_value value, std::vector<std::string> &result);
43   static void WriteTensorData(MSTensor tensor, std::string file_path);
44   static void WriteOutputsData(const std::vector<MSTensor> outputs, std::string file_path);
45 };
46 
47 struct MSLiteAsyncContext {
48   explicit MSLiteAsyncContext(napi_env env);
49   virtual ~MSLiteAsyncContext();
50   int status = SUCCESS;
51   std::string errMessage = "";
52 };
53 
54 enum ContextThreadAffinityMode : int32_t {
55   CONTEXT_AFFINITY_MODE = 0,
56   CONTEXT_BIG_CORES_FIRST,
57   CONTEXT_LITTLE_CORES_FIRST
58 };
59 
60 enum TensorFormat : int32_t {
61   TENSOR_DEFAULT_FORMAT = -1,
62   TENSOR_NCHW,
63   TENSOR_NHWC,
64   TENSOR_NHWC4,
65   TENSOR_HWKC,
66   TENSOR_HWCK,
67   TENSOR_KCHW
68 };
69 
70 enum TensorDataType : int32_t {
71   TENSOR_UNKNOWN = 0,
72   TENSOR_INT8 = 32,
73   TENSOR_INT16 = 33,
74   TENSOR_INT32 = 34,
75   TENSOR_INT64 = 35,
76   TENSOR_UINT8 = 37,
77   TENSOR_UINT16 = 38,
78   TENSOR_UINT32 = 39,
79   TENSOR_UINT64 = 40,
80   TENSOR_FLOAT16 = 42,
81   TENSOR_FLOAT32 = 43,
82   TENSOR_FLOAT64 = 44
83 };
84 
85 enum ModelMode : int32_t {
86   kBuffer = 0,
87   kPath,
88   kFD,
89   // add new type here
90   kInvalidModelMode = 10,
91 };
92 
93 enum ContextQuantizationType : int32_t {
94   NO_QUANT = 0,
95   WEIGHT_QUANT = 1,
96   FULL_QUANT = 2,
97 };
98 
99 enum ContextOptimizationLevel : int32_t {
100   O0 = 0,
101   O2 = 2,
102   O3 = 3,
103   AUTO = 4,
104 };
105 
106 enum ContextPerformanceMode : int32_t {
107   PERFORMANCE_NONE = 0,
108   PERFORMANCE_LOW = 1,
109   PERFORMANCE_MEDIUM = 2,
110   PERFORMANCE_HIGH = 3,
111   PERFORMANCE_EXTREME = 4,
112 };
113 
114 enum ContextPriority : int32_t {
115   PRIORITY_NONE = 0,
116   PRIORITY_LOW = 1,
117   PRIORITY_MEDIUM = 2,
118   PRIORITY_HIGH = 3,
119 };
120 
121 enum ContextNnrtDeviceType : int32_t {
122   NNRTDEVICE_OTHERS = 0,
123   NNRTDEVICE_CPU = 1,
124   NNRTDEVICE_GPU = 2,
125   NNRTDEVICE_ACCELERATOR = 3,
126 };
127 
128 struct ModelInfo {
129   std::string model_path = "";
130   char *model_buffer_data = nullptr;
131   size_t model_buffer_total = 0;
132   int32_t model_fd = 0;
133   ModelMode mode = kBuffer;
134   bool train_model = false;
135 };
136 
137 struct CpuDevice {
138   int thread_num;
139   int thread_affinity_mode;
140   std::vector<int32_t> thread_affinity_cores;
141   std::string precision_mode;
CpuDeviceCpuDevice142   CpuDevice(){};
CpuDeviceCpuDevice143   CpuDevice(int thread_num, int affinity_mode, std::vector<int32_t> affinity_cores, std::string precision)
144       : thread_num(thread_num),
145         thread_affinity_mode(affinity_mode),
146         thread_affinity_cores(affinity_cores),
147         precision_mode(precision){};
148 };
149 
150 struct NnrtDeviceDesc {
151   std::string name;
152   ContextNnrtDeviceType type;
153   size_t id;
154 };
155 
156 struct NNRTDevice {
157   size_t device_id;
158   int performance_mode{-1};
159   int priority{-1};
NNRTDeviceNNRTDevice160   NNRTDevice(){};
NNRTDeviceNNRTDevice161   NNRTDevice(int device_id, int performance_mode, int priority)
162       : device_id(device_id), performance_mode(performance_mode), priority(priority){};
163 };
164 
165 struct TrainConfig {
166   std::vector<std::string> loss_names;
167   int optimization_level = kO0; // kAUTO
168 };
169 
170 struct ContextInfo {
171   std::vector<std::string> target;
172   CpuDevice cpu_device;
173   NNRTDevice nnrt_device;
174   TrainConfig train_cfg;
175 };
176 
177 const int32_t NAPI_ERR_INPUT_INVALID = 401;
178 const int32_t NAPI_ERR_INVALID_PARAM = 1000101;
179 const int32_t NAPI_ERR_NO_MEMORY = 1000102;
180 const int32_t NAPI_ERR_ILLEGAL_STATE = 1000103;
181 const int32_t NAPI_ERR_UNSUPPORTED = 1000104;
182 const int32_t NAPI_ERR_TIMEOUT = 1000105;
183 const int32_t NAPI_ERR_STREAM_LIMIT = 1000201;
184 const int32_t NAPI_ERR_SYSTEM = 1000301;
185 
186 const std::string NAPI_ERROR_INVALID_PARAM_INFO = "input parameter value error";
187 const std::string NAPI_ERR_INPUT_INVALID_INFO = "input parameter type or number mismatch";
188 const std::string NAPI_ERR_INVALID_PARAM_INFO = "invalid parameter";
189 const std::string NAPI_ERR_NO_MEMORY_INFO = "allocate memory failed";
190 const std::string NAPI_ERR_ILLEGAL_STATE_INFO = "Operation not permit at current state";
191 const std::string NAPI_ERR_UNSUPPORTED_INFO = "unsupported option";
192 const std::string NAPI_ERR_TIMEOUT_INFO = "time out";
193 const std::string NAPI_ERR_STREAM_LIMIT_INFO = "stream number limited";
194 const std::string NAPI_ERR_SYSTEM_INFO = "system error";
195 }  // namespace mindspore
196 #endif  // COMMON_NAPI_H