• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #ifndef MINDSPORE_INCLUDE_C_API_MODEL_C_H
17 #define MINDSPORE_INCLUDE_C_API_MODEL_C_H
18 
19 #include "mindspore/tensor.h"
20 #include "mindspore/context.h"
21 #include "mindspore/status.h"
22 
23 #ifdef __cplusplus
24 extern "C" {
25 #endif
26 
27 typedef void *OH_AI_ModelHandle;
28 
29 typedef struct OH_AI_TensorHandleArray {
30   size_t handle_num;
31   OH_AI_TensorHandle *handle_list;
32 } OH_AI_TensorHandleArray;
33 
34 #define OH_AI_MAX_SHAPE_NUM 32
35 typedef struct OH_AI_ShapeInfo {
36   size_t shape_num;
37   int64_t shape[OH_AI_MAX_SHAPE_NUM];
38 } OH_AI_ShapeInfo;
39 
40 typedef struct OH_AI_CallBackParam {
41   char *node_name;
42   char *node_type;
43 } OH_AI_CallBackParam;
44 
45 typedef bool (*OH_AI_KernelCallBack)(const OH_AI_TensorHandleArray inputs, const OH_AI_TensorHandleArray outputs,
46                                      const OH_AI_CallBackParam kernel_Info);
47 
48 /// \brief Create a model object.
49 ///
50 /// \return Model object handle.
51 OH_AI_API OH_AI_ModelHandle OH_AI_ModelCreate();
52 
53 /// \brief Destroy the model object.
54 ///
55 /// \param[in] model Model object handle address.
56 OH_AI_API void OH_AI_ModelDestroy(OH_AI_ModelHandle *model);
57 
58 /// \brief Build the model from model file buffer so that it can run on a device.
59 ///
60 /// \param[in] model Model object handle.
61 /// \param[in] model_data Define the buffer read from a model file.
62 /// \param[in] data_size Define bytes number of model file buffer.
63 /// \param[in] model_type Define The type of model file.
64 /// \param[in] model_context Define the context used to store options during execution.
65 ///
66 /// \return OH_AI_Status.
67 OH_AI_API OH_AI_Status OH_AI_ModelBuild(OH_AI_ModelHandle model, const void *model_data, size_t data_size,
68                                         OH_AI_ModelType model_type, const OH_AI_ContextHandle model_context);
69 
70 /// \brief Load and build the model from model path so that it can run on a device.
71 ///
72 /// \param[in] model Model object handle.
73 /// \param[in] model_path Define the model file path.
74 /// \param[in] model_type Define The type of model file.
75 /// \param[in] model_context Define the context used to store options during execution.
76 ///
77 /// \return OH_AI_Status.
78 OH_AI_API OH_AI_Status OH_AI_ModelBuildFromFile(OH_AI_ModelHandle model, const char *model_path,
79                                                 OH_AI_ModelType model_type, const OH_AI_ContextHandle model_context);
80 
81 /// \brief Resizes the shapes of inputs.
82 ///
83 /// \param[in] model Model object handle.
84 /// \param[in] inputs The array that includes all input tensor handles.
85 /// \param[in] shape_infos Defines the new shapes of inputs, should be consistent with inputs.
86 /// \param[in] shape_info_num The num of shape_infos.
87 ///
88 /// \return OH_AI_Status.
89 OH_AI_API OH_AI_Status OH_AI_ModelResize(OH_AI_ModelHandle model, const OH_AI_TensorHandleArray inputs,
90                                          OH_AI_ShapeInfo *shape_infos, size_t shape_info_num);
91 
92 /// \brief Inference model.
93 ///
94 /// \param[in] model Model object handle.
95 /// \param[in] inputs The array that includes all input tensor handles.
96 /// \param[out] outputs The array that includes all output tensor handles.
97 /// \param[in] before CallBack before predict.
98 /// \param[in] after CallBack after predict.
99 ///
100 /// \return OH_AI_Status.
101 OH_AI_API OH_AI_Status OH_AI_ModelPredict(OH_AI_ModelHandle model, const OH_AI_TensorHandleArray inputs,
102                                           OH_AI_TensorHandleArray *outputs, const OH_AI_KernelCallBack before,
103                                           const OH_AI_KernelCallBack after);
104 
105 /// \brief Obtains all input tensor handles of the model.
106 ///
107 /// \param[in] model Model object handle.
108 ///
109 /// \return The array that includes all input tensor handles.
110 OH_AI_API OH_AI_TensorHandleArray OH_AI_ModelGetInputs(const OH_AI_ModelHandle model);
111 
112 /// \brief Obtains all output tensor handles of the model.
113 ///
114 /// \param[in] model Model object handle.
115 ///
116 /// \return The array that includes all output tensor handles.
117 OH_AI_API OH_AI_TensorHandleArray OH_AI_ModelGetOutputs(const OH_AI_ModelHandle model);
118 
119 /// \brief Obtains the input tensor handle of the model by name.
120 ///
121 /// \param[in] model Model object handle.
122 /// \param[in] tensor_name The name of tensor.
123 ///
124 /// \return The input tensor handle with the given name, if the name is not found, an NULL is returned.
125 OH_AI_API OH_AI_TensorHandle OH_AI_ModelGetInputByTensorName(const OH_AI_ModelHandle model, const char *tensor_name);
126 
127 /// \brief Obtains the output tensor handle of the model by name.
128 ///
129 /// \param[in] model Model object handle.
130 /// \param[in] tensor_name The name of tensor.
131 ///
132 /// \return The output tensor handle with the given name, if the name is not found, an NULL is returned.
133 OH_AI_API OH_AI_TensorHandle OH_AI_ModelGetOutputByTensorName(const OH_AI_ModelHandle model, const char *tensor_name);
134 
135 #ifdef __cplusplus
136 }
137 #endif
138 #endif  // MINDSPORE_INCLUDE_C_API_MODEL_C_H
139