1 /** 2 * Copyright 2021 Huawei Technologies Co., Ltd 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 #ifndef MINDSPORE_INCLUDE_C_API_MODEL_C_H 17 #define MINDSPORE_INCLUDE_C_API_MODEL_C_H 18 19 #include "include/c_api/tensor_c.h" 20 #include "include/c_api/context_c.h" 21 #include "include/c_api/status_c.h" 22 23 #ifdef __cplusplus 24 extern "C" { 25 #endif 26 27 typedef void *OH_AI_ModelHandle; 28 29 typedef struct OH_AI_TensorHandleArray { 30 size_t handle_num; 31 OH_AI_TensorHandle *handle_list; 32 } OH_AI_TensorHandleArray; 33 34 #define OH_AI_MAX_SHAPE_NUM 32 35 typedef struct OH_AI_ShapeInfo { 36 size_t shape_num; 37 int64_t shape[OH_AI_MAX_SHAPE_NUM]; 38 } OH_AI_ShapeInfo; 39 40 typedef struct OH_AI_CallBackParam { 41 char *node_name; 42 char *node_type; 43 } OH_AI_CallBackParam; 44 45 typedef bool (*OH_AI_KernelCallBack)(const OH_AI_TensorHandleArray inputs, const OH_AI_TensorHandleArray outputs, 46 const OH_AI_CallBackParam kernel_Info); 47 48 /// \brief Create a model object. Only valid for Lite. 49 /// 50 /// \return Model object handle. 51 OH_AI_API OH_AI_ModelHandle OH_AI_ModelCreate(); 52 53 /// \brief Destroy the model object. Only valid for Lite. 54 /// 55 /// \param[in] model Model object handle address. 56 OH_AI_API void OH_AI_ModelDestroy(OH_AI_ModelHandle *model); 57 58 /// \brief Set workspace for the model object. Only valid for Iot. 59 /// 60 /// \param[in] model Model object handle. 61 /// \param[in] workspace Define the workspace address. 62 /// \param[in] workspace_size Define the workspace size. 63 OH_AI_API void OH_AI_ModelSetWorkspace(OH_AI_ModelHandle model, void *workspace, size_t workspace_size); 64 65 /// \brief Build the model from model file buffer so that it can run on a device. Only valid for Lite. 66 /// 67 /// \param[in] model Model object handle. 68 /// \param[in] model_data Define the buffer read from a model file. 69 /// \param[in] data_size Define bytes number of model file buffer. 70 /// \param[in] model_type Define The type of model file. 71 /// \param[in] model_context Define the context used to store options during execution. 72 /// 73 /// \return OH_AI_Status. 74 OH_AI_API OH_AI_Status OH_AI_ModelBuild(OH_AI_ModelHandle model, const void *model_data, size_t data_size, 75 OH_AI_ModelType model_type, const OH_AI_ContextHandle model_context); 76 77 /// \brief Load and build the model from model path so that it can run on a device. Only valid for Lite. 78 /// 79 /// \param[in] model Model object handle. 80 /// \param[in] model_path Define the model file path. 81 /// \param[in] model_type Define The type of model file. 82 /// \param[in] model_context Define the context used to store options during execution. 83 /// 84 /// \return OH_AI_Status. 85 OH_AI_API OH_AI_Status OH_AI_ModelBuildFromFile(OH_AI_ModelHandle model, const char *model_path, 86 OH_AI_ModelType model_type, const OH_AI_ContextHandle model_context); 87 88 /// \brief Resizes the shapes of inputs. 89 /// 90 /// \param[in] model Model object handle. 91 /// \param[in] inputs The array that includes all input tensor handles. 92 /// \param[in] shape_infos Defines the new shapes of inputs, should be consistent with inputs. 93 /// \param[in] shape_info_num The num of shape_infos. 94 /// 95 /// \return OH_AI_Status. 96 OH_AI_API OH_AI_Status OH_AI_ModelResize(OH_AI_ModelHandle model, const OH_AI_TensorHandleArray inputs, 97 OH_AI_ShapeInfo *shape_infos, size_t shape_info_num); 98 99 /// \brief Inference model. 100 /// 101 /// \param[in] model Model object handle. 102 /// \param[in] inputs The array that includes all input tensor handles. 103 /// \param[out] outputs The array that includes all output tensor handles. 104 /// \param[in] before CallBack before predict. 105 /// \param[in] after CallBack after predict. 106 /// 107 /// \return OH_AI_Status. 108 OH_AI_API OH_AI_Status OH_AI_ModelPredict(OH_AI_ModelHandle model, const OH_AI_TensorHandleArray inputs, 109 OH_AI_TensorHandleArray *outputs, const OH_AI_KernelCallBack before, 110 const OH_AI_KernelCallBack after); 111 112 /// \brief Obtains all input tensor handles of the model. 113 /// 114 /// \param[in] model Model object handle. 115 /// 116 /// \return The array that includes all input tensor handles. 117 OH_AI_API OH_AI_TensorHandleArray OH_AI_ModelGetInputs(const OH_AI_ModelHandle model); 118 119 /// \brief Obtains all output tensor handles of the model. 120 /// 121 /// \param[in] model Model object handle. 122 /// 123 /// \return The array that includes all output tensor handles. 124 OH_AI_API OH_AI_TensorHandleArray OH_AI_ModelGetOutputs(const OH_AI_ModelHandle model); 125 126 /// \brief Obtains the input tensor handle of the model by name. 127 /// 128 /// \param[in] model Model object handle. 129 /// \param[in] tensor_name The name of tensor. 130 /// 131 /// \return The input tensor handle with the given name, if the name is not found, an NULL is returned. 132 OH_AI_API OH_AI_TensorHandle OH_AI_ModelGetInputByTensorName(const OH_AI_ModelHandle model, const char *tensor_name); 133 134 /// \brief Obtains the output tensor handle of the model by name. 135 /// 136 /// \param[in] model Model object handle. 137 /// \param[in] tensor_name The name of tensor. 138 /// 139 /// \return The output tensor handle with the given name, if the name is not found, an NULL is returned. 140 OH_AI_API OH_AI_TensorHandle OH_AI_ModelGetOutputByTensorName(const OH_AI_ModelHandle model, const char *tensor_name); 141 142 #ifdef __cplusplus 143 } 144 #endif 145 #endif // MINDSPORE_INCLUDE_C_API_MODEL_C_H 146