• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /**
18  * @addtogroup MindSpore
19  * @{
20  *
21  * @brief 提供MindSpore Lite的模型推理相关接口。
22  *
23  * @Syscap SystemCapability.Ai.MindSpore
24  * @since 9
25  */
26 
27 /**
28  * @file model.h
29  *
30  * @brief 提供了模型相关接口,可以用于模型创建、模型推理等。
31  *
32  * @library libmindspore_lite_ndk.so
33  * @since 9
34  */
35 #ifndef MINDSPORE_INCLUDE_C_API_MODEL_C_H
36 #define MINDSPORE_INCLUDE_C_API_MODEL_C_H
37 
38 #include "mindspore/tensor.h"
39 #include "mindspore/context.h"
40 #include "mindspore/status.h"
41 
42 #ifdef __cplusplus
43 extern "C" {
44 #endif
45 
46 typedef void *OH_AI_ModelHandle;
47 
48 typedef struct OH_AI_TensorHandleArray {
49   size_t handle_num;
50   OH_AI_TensorHandle *handle_list;
51 } OH_AI_TensorHandleArray;
52 
53 #define OH_AI_MAX_SHAPE_NUM 32
54 typedef struct OH_AI_ShapeInfo {
55   size_t shape_num;
56   int64_t shape[OH_AI_MAX_SHAPE_NUM];
57 } OH_AI_ShapeInfo;
58 
59 typedef struct OH_AI_CallBackParam {
60   char *node_name;
61   char *node_type;
62 } OH_AI_CallBackParam;
63 
64 typedef bool (*OH_AI_KernelCallBack)(const OH_AI_TensorHandleArray inputs, const OH_AI_TensorHandleArray outputs,
65                                      const OH_AI_CallBackParam kernel_Info);
66 
67 /**
68  * @brief Create a model object.
69  * @return Model object handle.
70  * @since 9
71  */
72 OH_AI_API OH_AI_ModelHandle OH_AI_ModelCreate();
73 
74 /**
75  * @brief Destroy the model object.
76  * @param model Model object handle address.
77  * @since 9
78  */
79 OH_AI_API void OH_AI_ModelDestroy(OH_AI_ModelHandle *model);
80 
81 /**
82  * @brief Build the model from model file buffer so that it can run on a device.
83  * @param model Model object handle.
84  * @param model_data Define the buffer read from a model file.
85  * @param data_size Define bytes number of model file buffer.
86  * @param model_type Define The type of model file.
87  * @param model_context Define the context used to store options during execution.
88  * @return OH_AI_Status.
89  * @since 9
90  */
91 OH_AI_API OH_AI_Status OH_AI_ModelBuild(OH_AI_ModelHandle model, const void *model_data, size_t data_size,
92                                         OH_AI_ModelType model_type, const OH_AI_ContextHandle model_context);
93 
94 /**
95  * @brief Load and build the model from model path so that it can run on a device.
96  * @param model Model object handle.
97  * @param model_path Define the model file path.
98  * @param model_type Define The type of model file.
99  * @param model_context Define the context used to store options during execution.
100  * @return OH_AI_Status.
101  * @since 9
102  */
103 OH_AI_API OH_AI_Status OH_AI_ModelBuildFromFile(OH_AI_ModelHandle model, const char *model_path,
104                                                 OH_AI_ModelType model_type, const OH_AI_ContextHandle model_context);
105 
106 /**
107  * @brief Resizes the shapes of inputs.
108  * @param model Model object handle.
109  * @param inputs The array that includes all input tensor handles.
110  * @param shape_infos Defines the new shapes of inputs, should be consistent with inputs.
111  * @param shape_info_num The num of shape_infos.
112  * @return OH_AI_Status.
113  * @since 9
114  */
115 OH_AI_API OH_AI_Status OH_AI_ModelResize(OH_AI_ModelHandle model, const OH_AI_TensorHandleArray inputs,
116                                          OH_AI_ShapeInfo *shape_infos, size_t shape_info_num);
117 
118 /**
119  * @brief Inference model.
120  * @param model Model object handle.
121  * @param inputs The array that includes all input tensor handles.
122  * @param outputs The array that includes all output tensor handles.
123  * @param before CallBack before predict.
124  * @param after CallBack after predict.
125  * @return OH_AI_Status.
126  * @since 9
127  */
128 OH_AI_API OH_AI_Status OH_AI_ModelPredict(OH_AI_ModelHandle model, const OH_AI_TensorHandleArray inputs,
129                                           OH_AI_TensorHandleArray *outputs, const OH_AI_KernelCallBack before,
130                                           const OH_AI_KernelCallBack after);
131 
132 /**
133  * @brief Obtains all input tensor handles of the model.
134  * @param model Model object handle.
135  * @return The array that includes all input tensor handles.
136  * @since 9
137  */
138 OH_AI_API OH_AI_TensorHandleArray OH_AI_ModelGetInputs(const OH_AI_ModelHandle model);
139 
140 /**
141  * @brief Obtains all output tensor handles of the model.
142  * @param model Model object handle.
143  * @return The array that includes all output tensor handles.
144  * @since 9
145  */
146 OH_AI_API OH_AI_TensorHandleArray OH_AI_ModelGetOutputs(const OH_AI_ModelHandle model);
147 
148 /**
149  * @brief Obtains the input tensor handle of the model by name.
150  * @param model Model object handle.
151  * @param tensor_name The name of tensor.
152  * @return The input tensor handle with the given name, if the name is not found, an NULL is returned.
153  * @since 9
154  */
155 OH_AI_API OH_AI_TensorHandle OH_AI_ModelGetInputByTensorName(const OH_AI_ModelHandle model, const char *tensor_name);
156 
157 /**
158  * @brief Obtains the output tensor handle of the model by name.
159  * @param model Model object handle.
160  * @param tensor_name The name of tensor.
161  * @return The output tensor handle with the given name, if the name is not found, an NULL is returned.
162  * @since 9
163  */
164 OH_AI_API OH_AI_TensorHandle OH_AI_ModelGetOutputByTensorName(const OH_AI_ModelHandle model, const char *tensor_name);
165 
166 #ifdef __cplusplus
167 }
168 #endif
169 #endif  // MINDSPORE_INCLUDE_C_API_MODEL_C_H
170