• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2020-2022 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #ifndef MINDSPORE_LITE_INCLUDE_LITE_TYPES_H_
17 #define MINDSPORE_LITE_INCLUDE_LITE_TYPES_H_
18 
19 #include <memory>
20 
21 namespace mindspore {
22 class Allocator;
23 using AllocatorPtr = std::shared_ptr<Allocator>;
24 
25 class Delegate;
26 using DelegatePtr = std::shared_ptr<Delegate>;
27 
28 namespace lite {
29 class Tensor;
30 
31 /// \brief CpuBindMode defined for holding bind cpu strategy argument.
32 typedef enum {
33   NO_BIND,    /**< no bind */
34   HIGHER_CPU, /**< bind higher cpu first */
35   MID_CPU     /**< bind middle cpu first */
36 } CpuBindMode;
37 
38 /// \brief DeviceType defined for holding user's preferred backend.
39 typedef enum {
40   DT_CPU,    /**< CPU device type */
41   DT_GPU,    /**< GPU device type */
42   DT_NPU,    /**< NPU device type */
43   DT_ASCEND, /**< ASCEND device type */
44   DT_CUSTOM, /**< EXTEND device type */
45   DT_NNRT,   /**< NNRT device type */
46   DT_END     /**< NO device type */
47 } DeviceType;
48 
49 typedef enum {
50   FT_FLATBUFFERS, /**< Flatbuffers format */
51   FT_PROTOBUF     /**< Protobuf format */
52 } FormatType;
53 
54 typedef enum {
55   QT_DEFAULT, /**< the quantization of the original model will apply */
56   QT_NONE,    /**< apply no quantization */
57   QT_WEIGHT   /**< apply weight quantization */
58 } QuantizationType;
59 
60 typedef enum {
61   MT_TRAIN,    /**< Both Train and Inference part of the compiled model are serialized */
62   MT_INFERENCE /**< Only the Inference part of the compiled model is serialized */
63 } ModelType;
64 }  // namespace lite
65 }  // namespace mindspore
66 #endif  // MINDSPORE_LITE_INCLUDE_LITE_TYPES_H_
67