• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2020-2023 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #ifndef MINDSPORE_INCLUDE_API_TYPES_H
17 #define MINDSPORE_INCLUDE_API_TYPES_H
18 
19 #include <cstddef>
20 #include <string>
21 #include <vector>
22 #include <memory>
23 #include <functional>
24 #include "include/api/data_type.h"
25 #include "include/api/dual_abi_helper.h"
26 #include "include/api/format.h"
27 #include "include/api/visible.h"
28 
29 namespace mindspore {
30 enum ModelType : uint32_t {
31   kMindIR = 0,               ///< Model type is MindIR
32   kAIR = 1,                  ///< Model type is AIR
33   kOM = 2,                   ///< Model type is OM
34   kONNX = 3,                 ///< Model type is ONNX
35   kMindIR_Lite = 4,          ///< Model type is MindIR_LITE
36   kDataFlow = 5,             ///< Model type is DataFlow
37   kUnknownType = 0xFFFFFFFF  ///< Unknown model type
38 };
39 
40 enum QuantizationType : uint32_t {
41   kNoQuant = 0,                   ///< Do not quantize
42   kWeightQuant = 1,               ///< Only Quantize weight
43   kFullQuant = 2,                 ///< Quantize whole network
44   kUnknownQuantType = 0xFFFFFFFF  ///< Quantization type unknown
45 };
46 
47 enum OptimizationLevel : uint32_t {
48   kO0 = 0,                        ///< Do not optimize
49   kO2 = 2,                        ///< Cast network to float16, keep batchnorm and loss in float32,
50   kO3 = 3,                        ///< Cast network to float16, including bacthnorm
51   kAuto = 4,                      ///< Choose optimization based on device
52   kOptimizationType = 0xFFFFFFFF  ///< Unknown optimization type
53 };
54 
55 struct QuantParam {
56   int bit_num;         ///< Quantization bit num
57   double scale;        ///< Quantization scale
58   int32_t zero_point;  ///< Quantization zero point
59   double min;          ///< Quantization min value
60   double max;          ///< Quantization max value
61 };
62 
63 class Allocator;
64 /// \brief The MSTensor class defines a tensor in MindSpore.
65 class MS_API MSTensor {
66  public:
67   class Impl;
68   /// \brief Creates a MSTensor object, whose data need to be copied before accessed by Model, must be used in pairs
69   /// with DestroyTensorPtr.
70   ///
71   /// \param[in] name The name of the MSTensor.
72   /// \param[in] type The data type of the MSTensor.
73   /// \param[in] shape The shape of the MSTensor.
74   /// \param[in] data The data pointer that points to allocated memory.
75   /// \param[in] data_len The length of the memory, in bytes.
76   /// \param[in] device The tensor of device type.
77   /// \param[in] device_id The tensor of device id.
78   ///
79   /// \return A pointer of MSTensor.
80   static inline MSTensor *CreateTensor(const std::string &name, DataType type, const std::vector<int64_t> &shape,
81                                        const void *data, size_t data_len, const std::string &device = "",
82                                        int device_id = -1) noexcept;
83 
84   /// \brief Creates a MSTensor object, whose data need to be copied before accessed by Model, must be used in pairs
85   /// with DestroyTensorPtr.
86   ///
87   /// \param[in] name The name of the MSTensor.
88   /// \param[in] tensor The src tensor.
89   /// \param[in] device The tensor of device type.
90   /// \param[in] device_id The tensor of device id.
91   ///
92   /// \return A pointer of MSTensor.
93   static inline MSTensor *CreateTensor(const std::string &name, const MSTensor &tensor, const std::string &device = "",
94                                        int device_id = -1) noexcept;
95 
96   /// \brief Creates a MSTensor object, whose data can be directly accessed by Model, must be used in pairs with
97   /// DestroyTensorPtr.
98   ///
99   /// \param[in] name The name of the MSTensor.
100   /// \param[in] type The data type of the MSTensor.
101   /// \param[in] shape The shape of the MSTensor.
102   /// \param[in] data The data pointer that points to allocated memory.
103   /// \param[in] data_len The length of the memory, in bytes.
104   /// \param[in] own_data Whether the data memory should be freed in MSTensor destruction.
105   ///
106   /// \return A pointer of MSTensor.
107   static inline MSTensor *CreateRefTensor(const std::string &name, DataType type, const std::vector<int64_t> &shape,
108                                           const void *data, size_t data_len, bool own_data = true) noexcept;
109 
110   /// \brief Creates a MSTensor object, whose device data can be directly accessed by Model, must be used in pairs with
111   /// DestroyTensorPtr.
112   ///
113   /// \param[in] name The name of the MSTensor.
114   /// \param[in] type The data type of the MSTensor.
115   /// \param[in] shape The shape of the MSTensor.
116   /// \param[in] data The data pointer that points to device memory.
117   /// \param[in] data_len The length of the memory, in bytes.
118   ///
119   /// \return A pointer of MSTensor.
120   static inline MSTensor CreateDeviceTensor(const std::string &name, DataType type, const std::vector<int64_t> &shape,
121                                             void *data, size_t data_len) noexcept;
122 
123   /// \brief Creates a MSTensor object from local file, must be used in pairs with DestroyTensorPtr.
124   ///
125   /// \param[in] file Path of file to be read.
126   /// \param[in] type The data type of the MSTensor.
127   /// \param[in] shape The shape of the MSTensor.
128   ///
129   /// \return A pointer of MSTensor.
130   static inline MSTensor *CreateTensorFromFile(const std::string &file, DataType type = DataType::kNumberTypeUInt8,
131                                                const std::vector<int64_t> &shape = {}) noexcept;
132 
133   /// \brief Create a string type MSTensor object whose data can be accessed by Model only after being copied, must be
134   /// used in pair with DestroyTensorPtr.
135   ///
136   /// \param[in] name The name of the MSTensor.
137   /// \param[in] str A vector container containing several strings.
138   ///
139   /// \return A pointer of MSTensor.
140   static inline MSTensor *StringsToTensor(const std::string &name, const std::vector<std::string> &str);
141 
142   /// \brief Parse the string type MSTensor object into strings.
143   ///
144   /// \param[in] tensor A MSTensor object.
145   ///
146   /// \return A vector container containing several strings.
147   static inline std::vector<std::string> TensorToStrings(const MSTensor &tensor);
148 
149   /// \brief Destroy an object created by Clone, StringsToTensor, CreateRefTensor or CreateTensor. Do
150   /// not use it to destroy MSTensor from other sources.
151   ///
152   /// \param[in] tensor A MSTensor object.
153   static void DestroyTensorPtr(MSTensor *tensor) noexcept;
154 
155   MSTensor();
156   explicit MSTensor(const std::shared_ptr<Impl> &impl);
157   // if malloc data, user need to free after constructing MSTensor, else memory leak.
158   inline MSTensor(const std::string &name, DataType type, const std::vector<int64_t> &shape, const void *data,
159                   size_t data_len);
160   explicit MSTensor(std::nullptr_t);
161   ~MSTensor();
162 
163   /// \brief Obtains the name of the MSTensor.
164   ///
165   /// \return The name of the MSTensor.
166   inline std::string Name() const;
167 
168   /// \brief Obtains the data type of the MSTensor.
169   ///
170   /// \return The data type of the MSTensor.
171   enum DataType DataType() const;
172 
173   /// \brief Obtains the shape of the MSTensor.
174   ///
175   /// \return The shape of the MSTensor.
176   const std::vector<int64_t> &Shape() const;
177 
178   /// \brief Obtains the number of elements of the MSTensor.
179   ///
180   /// \return The number of elements of the MSTensor.
181   int64_t ElementNum() const;
182 
183   /// \brief Obtains a shared pointer to the copy of data of the MSTensor. The data can be read on host.
184   ///
185   /// \return A shared pointer to the copy of data of the MSTensor.
186   std::shared_ptr<const void> Data() const;
187 
188   /// \brief Obtains the pointer to the data of the MSTensor. If the MSTensor is a device tensor, the data cannot be
189   /// accessed directly on host.
190   ///
191   /// \return A pointer to the data of the MSTensor.
192   void *MutableData();
193 
194   /// \brief Obtains the length of the data of the MSTensor, in bytes.
195   ///
196   /// \return The length of the data of the MSTensor, in bytes.
197   size_t DataSize() const;
198 
199   /// \brief Get the MSTensor device id
200   ///
201   /// \return device id of MSTensor
202   int GetDeviceId() const;
203 
204   /// \brief Get the MSTensor device type
205   ///
206   /// \return device type of MSTensor
207   std::string GetDevice() const;
208 
209   /// \brief Get whether the MSTensor data is const data
210   ///
211   /// \return Const flag of MSTensor
212   bool IsConst() const;
213 
214   /// \brief Gets the boolean value that indicates whether the memory of MSTensor is on device.
215   ///
216   /// \return The boolean value that indicates whether the memory of MSTensor is on device.
217   bool IsDevice() const;
218 
219   /// \brief Gets a deep copy of the MSTensor, must be used in pair with DestroyTensorPtr.
220   ///
221   /// \return A pointer points to a deep copy of the MSTensor.
222   MSTensor *Clone() const;
223 
224   /// \brief Gets the boolean value that indicates whether the MSTensor is valid.
225   ///
226   /// \return The boolean value that indicates whether the MSTensor is valid.
227   bool operator==(std::nullptr_t) const;
228 
229   /// \brief Gets the boolean value that indicates whether the MSTensor is valid.
230   ///
231   /// \return The boolean value that indicates whether the MSTensor is valid.
232   bool operator!=(std::nullptr_t) const;
233 
234   /// \brief Get the boolean value that indicates whether the MSTensor equals tensor.
235   ///
236   /// \param[in] another MSTensor.
237   ///
238   /// \return The boolean value that indicates whether the MSTensor equals tensor.
239   bool operator==(const MSTensor &tensor) const;
240 
241   /// \brief Get the boolean value that indicates whether the MSTensor not equals tensor.
242   ///
243   /// \param[in] another MSTensor.
244   ///
245   /// \return The boolean value that indicates whether the MSTensor not equals tensor.
246   bool operator!=(const MSTensor &tensor) const;
247 
248   /// \brief Set the shape of for the MSTensor.
249   ///
250   /// \param[in] shape Shape of the MSTensor, a vector of int64_t.
251   void SetShape(const std::vector<int64_t> &shape);
252 
253   /// \brief Set the data type for the MSTensor.
254   ///
255   /// \param[in] data_type The data type of the MSTensor.
256   void SetDataType(enum DataType data_type);
257 
258   /// \brief Set the name for the MSTensor.
259   ///
260   /// \param[in] name The name of the MSTensor.
261   inline void SetTensorName(const std::string &name);
262 
263   /// \brief Set the Allocator for the MSTensor.
264   ///
265   /// \param[in] allocator A pointer to Allocator.
266   void SetAllocator(std::shared_ptr<Allocator> allocator);
267 
268   /// \brief Obtain the Allocator of the MSTensor.
269   ///
270   /// \return A pointer to Allocator.
271   std::shared_ptr<Allocator> allocator() const;
272 
273   /// \brief Set the format for the MSTensor.
274   ///
275   /// \param[in] format The format of the MSTensor.
276   void SetFormat(mindspore::Format format);
277 
278   /// \brief Obtain the format of the MSTensor.
279   ///
280   /// \return The format of the MSTensor.
281   mindspore::Format format() const;
282 
283   /// \brief Set the data for the MSTensor.
284   ///
285   /// \note Deprecated, this interface will be removed in the next iteration
286   ///
287   /// \note A pointer to the data should be created by malloc interface
288   ///
289   /// \note The memory pointed to origin data pointer of MSTensor needs to be managed by the user
290   ///
291   /// \param[in] data A pointer to the data of the MSTensor.
292   /// \param[in] own_data Whether the data memory should be freed in MSTensor destruction.
293   void SetData(void *data, bool own_data = true);
294 
295   /// \brief Set the device data address for the MSTensor. Only valid for Lite.
296   ///
297   /// \note The memory pointed to origin data pointer of MSTensor needs to be managed by the user
298   ///
299   /// \param[in] data A pointer to the device data of the MSTensor.
300   void SetDeviceData(void *data);
301 
302   /// \brief Get the device data address of the MSTensor set by SetDeviceData. Only valid for Lite.
303   ///
304   /// \return A pointer to the device data of the MSTensor.
305   void *GetDeviceData();
306 
307   /// \brief Get the quantization parameters of the MSTensor.
308   ///
309   /// \return The quantization parameters of the MSTensor.
310   std::vector<QuantParam> QuantParams() const;
311 
312   /// \brief Set the quantization parameters for the MSTensor.
313   ///
314   /// \param[in] quant_params The quantization parameters of the MSTensor.
315   void SetQuantParams(std::vector<QuantParam> quant_params);
316 
impl()317   const std::shared_ptr<Impl> impl() const { return impl_; }
318 
319  private:
320   // api without std::string
321   static MSTensor *CreateTensor(const std::vector<char> &name, enum DataType type, const std::vector<int64_t> &shape,
322                                 const void *data, size_t data_len, const std::vector<char> &device = {},
323                                 int device_id = -1) noexcept;
324   static MSTensor *CreateTensor(const std::vector<char> &name, const MSTensor &tensor, const std::vector<char> &device,
325                                 int device_id = -1) noexcept;
326   static MSTensor *CreateRefTensor(const std::vector<char> &name, enum DataType type, const std::vector<int64_t> &shape,
327                                    const void *data, size_t data_len, bool own_data) noexcept;
328   static MSTensor CreateDeviceTensor(const std::vector<char> &name, enum DataType type,
329                                      const std::vector<int64_t> &shape, void *data, size_t data_len) noexcept;
330   static MSTensor *CreateTensorFromFile(const std::vector<char> &file, enum DataType type,
331                                         const std::vector<int64_t> &shape) noexcept;
332   static MSTensor *CharStringsToTensor(const std::vector<char> &name, const std::vector<std::vector<char>> &str);
333   static std::vector<std::vector<char>> TensorToStringChars(const MSTensor &tensor);
334 
335   MSTensor(const std::vector<char> &name, enum DataType type, const std::vector<int64_t> &shape, const void *data,
336            size_t data_len);
337   std::vector<char> CharName() const;
338   void SetTensorName(const std::vector<char> &name);
339 
340   friend class ModelImpl;
341   std::shared_ptr<Impl> impl_;
342 };
343 
344 class MS_API Buffer {
345  public:
346   Buffer();
347   Buffer(const void *data, size_t data_len);
348   ~Buffer();
349 
350   const void *Data() const;
351   void *MutableData();
352   size_t DataSize() const;
353 
354   bool ResizeData(size_t data_len);
355   bool SetData(const void *data, size_t data_len);
356 
357   Buffer Clone() const;
358 
359  private:
360   class Impl;
361   std::shared_ptr<Impl> impl_;
362 };
363 
CreateTensor(const std::string & name,enum DataType type,const std::vector<int64_t> & shape,const void * data,size_t data_len,const std::string & device,int device_id)364 MSTensor *MSTensor::CreateTensor(const std::string &name, enum DataType type, const std::vector<int64_t> &shape,
365                                  const void *data, size_t data_len, const std::string &device, int device_id) noexcept {
366   return CreateTensor(StringToChar(name), type, shape, data, data_len, StringToChar(device), device_id);
367 }
368 
CreateTensor(const std::string & name,const MSTensor & tensor,const std::string & device,int device_id)369 MSTensor *MSTensor::CreateTensor(const std::string &name, const MSTensor &tensor, const std::string &device,
370                                  int device_id) noexcept {
371   return CreateTensor(StringToChar(name), tensor, StringToChar(device), device_id);
372 }
373 
CreateRefTensor(const std::string & name,enum DataType type,const std::vector<int64_t> & shape,const void * data,size_t data_len,bool own_data)374 MSTensor *MSTensor::CreateRefTensor(const std::string &name, enum DataType type, const std::vector<int64_t> &shape,
375                                     const void *data, size_t data_len, bool own_data) noexcept {
376   return CreateRefTensor(StringToChar(name), type, shape, data, data_len, own_data);
377 }
378 
CreateDeviceTensor(const std::string & name,enum DataType type,const std::vector<int64_t> & shape,void * data,size_t data_len)379 MSTensor MSTensor::CreateDeviceTensor(const std::string &name, enum DataType type, const std::vector<int64_t> &shape,
380                                       void *data, size_t data_len) noexcept {
381   return CreateDeviceTensor(StringToChar(name), type, shape, data, data_len);
382 }
383 
CreateTensorFromFile(const std::string & file,enum DataType type,const std::vector<int64_t> & shape)384 MSTensor *MSTensor::CreateTensorFromFile(const std::string &file, enum DataType type,
385                                          const std::vector<int64_t> &shape) noexcept {
386   return CreateTensorFromFile(StringToChar(file), type, shape);
387 }
388 
StringsToTensor(const std::string & name,const std::vector<std::string> & str)389 MSTensor *MSTensor::StringsToTensor(const std::string &name, const std::vector<std::string> &str) {
390   return CharStringsToTensor(StringToChar(name), VectorStringToChar(str));
391 }
392 
TensorToStrings(const MSTensor & tensor)393 std::vector<std::string> MSTensor::TensorToStrings(const MSTensor &tensor) {
394   return VectorCharToString(TensorToStringChars(tensor));
395 }
396 
MSTensor(const std::string & name,enum DataType type,const std::vector<int64_t> & shape,const void * data,size_t data_len)397 MSTensor::MSTensor(const std::string &name, enum DataType type, const std::vector<int64_t> &shape, const void *data,
398                    size_t data_len)
399     : MSTensor(StringToChar(name), type, shape, data, data_len) {}
400 
Name()401 std::string MSTensor::Name() const { return CharToString(CharName()); }
402 
SetTensorName(const std::string & name)403 void MSTensor::SetTensorName(const std::string &name) { SetTensorName(StringToChar(name)); }
404 
405 using Key = struct MS_API Key {
406   const size_t max_key_len = 32;
407   size_t len = 0;
408   unsigned char key[32] = {0};
KeyKey409   Key() : len(0) {}
410   explicit Key(const char *dec_key, size_t key_len);
411 };
412 
413 constexpr char kDecModeAesGcm[] = "AES-GCM";
414 
415 /// \brief CallBackParam defined input arguments for callBack function.
416 struct MSCallBackParam {
417   std::string node_name; /**< node name argument */
418   std::string node_type; /**< node type argument */
419   double execute_time;   /**< gpu execute time */
420 };
421 
422 /// \brief KernelCallBack defined the function pointer for callBack.
423 using MSKernelCallBack =
424   std::function<bool(const std::vector<MSTensor> & /* inputs */, const std::vector<MSTensor> & /* outputs */,
425                      const MSCallBackParam &opInfo)>;
426 
427 MS_API std::vector<char> CharVersion();
Version()428 inline std::string Version() { return CharToString(CharVersion()); }
429 
430 }  // namespace mindspore
431 #endif  // MINDSPORE_INCLUDE_API_TYPES_H
432