• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Using MindSpore Lite Native APIs to Develop AI Applications
2
3## Scenarios
4
5You can use the native APIs provided by [MindSpore Lite](../reference/native-apis/_mind_spore.md) to deploy AI algorithms and provides APIs for the UI layer to invoke the algorithms for model inference. A typical scenario is the AI SDK development.
6
7## Basic concepts
8
9- [N-API](../reference/native-lib/third_party_napi/napi.md): a set of native APIs used to build JavaScript components. N-APIs can be used to encapsulate libraries developed using C/C++ into JavaScript modules.
10
11## Preparing the Environment
12
13- Install DevEco Studio 3.1.0.500 or later, and update the SDK to API version 10 or later.
14
15## How to Develop
16
17### 1. Create a native C++ project.
18
19Open DevEco Studio, choose **File** > **New** > **Create Project** to create a native C++ template project. By default, the **entry/src/main/** directory of the created project contains the **cpp/** directory. You can store C/C++ code in this directory and provide JavaScript APIs for the UI layer to call the code.
20
21### 2. Compile the C++ inference code.
22
23Assume that you have prepared a model in the **.ms** format.
24
25Before using the Native APIs provided by MindSpore Lite for development, you need to reference the corresponding header files.
26
27```c
28#include <iostream>
29#include <mindspore/model.h>
30#include <mindspore/context.h>
31#include <mindspore/status.h>
32#include <mindspore/tensor.h>
33#include <rawfile/raw_file_manager.h>
34```
35
36(1). Read model files.
37
38```C++
39void *ReadModelFile(NativeResourceManager *nativeResourceManager, const std::string &modelName, size_t *modelSize) {
40    auto rawFile = OH_ResourceManager_OpenRawFile(nativeResourceManager, modelName.c_str());
41    if (rawFile == nullptr) {
42        LOGE("Open model file failed");
43        return nullptr;
44    }
45    long fileSize = OH_ResourceManager_GetRawFileSize(rawFile);
46    void *modelBuffer = malloc(fileSize);
47    if (modelBuffer == nullptr) {
48        LOGE("Get model file size failed");
49    }
50    int ret = OH_ResourceManager_ReadRawFile(rawFile, modelBuffer, fileSize);
51    if (ret == 0) {
52        LOGI("Read model file failed");
53        OH_ResourceManager_CloseRawFile(rawFile);
54        return nullptr;
55    }
56    OH_ResourceManager_CloseRawFile(rawFile);
57    *modelSize = fileSize;
58    return modelBuffer;
59}
60```
61
62(2). Create a context, set parameters such as the number of threads and device type, and load the model.
63
64```c++
65void DestroyModelBuffer(void **buffer) {
66    if (buffer == nullptr) {
67        return;
68    }
69    free(*buffer);
70    *buffer = nullptr;
71}
72
73OH_AI_ModelHandle CreateMSLiteModel(void *modelBuffer, size_t modelSize) {
74    // Create a context.
75    auto context = OH_AI_ContextCreate();
76    if (context == nullptr) {
77        DestroyModelBuffer(&modelBuffer);
78        LOGE("Create MSLite context failed.\n");
79        return nullptr;
80    }
81    auto cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
82    OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
83
84    // Load the .ms model file.
85    auto model = OH_AI_ModelCreate();
86    if (model == nullptr) {
87        DestroyModelBuffer(&modelBuffer);
88        LOGE("Allocate MSLite Model failed.\n");
89        return nullptr;
90    }
91
92    auto build_ret = OH_AI_ModelBuild(model, modelBuffer, modelSize, OH_AI_MODELTYPE_MINDIR, context);
93    DestroyModelBuffer(&modelBuffer);
94    if (build_ret != OH_AI_STATUS_SUCCESS) {
95        OH_AI_ModelDestroy(&model);
96        LOGE("Build MSLite model failed.\n");
97        return nullptr;
98    }
99    LOGI("Build MSLite model success.\n");
100    return model;
101}
102```
103
104(3). Set the model input data, perform model inference, and obtain the output data.
105
106```c++
107#define GET_PARAMS(env, info, num)    \
108    size_t argc = num;                \
109    napi_value argv[num] = {nullptr}; \
110    napi_value thisVar = nullptr;     \
111    void *data = nullptr;             \
112    napi_get_cb_info(env, info, &argc, argv, &thisVar, &data)
113
114constexpr int kNumPrintOfOutData = 10;
115constexpr int RANDOM_RANGE = 128;
116
117void FillTensorWithRandom(OH_AI_TensorHandle msTensor) {
118    auto size = OH_AI_TensorGetDataSize(msTensor);
119    char *data = (char *)OH_AI_TensorGetMutableData(msTensor);
120    for (size_t i = 0; i < size; i++) {
121        data[i] = (char)(rand() / RANDOM_RANGE);
122    }
123}
124
125// fill data to inputs tensor
126int FillInputTensors(OH_AI_TensorHandleArray &inputs) {
127    for (size_t i = 0; i < inputs.handle_num; i++) {
128        FillTensorWithRandom(inputs.handle_list[i]);
129    }
130    return OH_AI_STATUS_SUCCESS;
131}
132
133void RunMSLiteModel(OH_AI_ModelHandle model) {
134    // Set the model input data.
135    auto inputs = OH_AI_ModelGetInputs(model);
136    FillInputTensors(inputs);
137
138    auto outputs = OH_AI_ModelGetOutputs(model);
139
140    // Perform inference and print the output.
141    auto predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
142    if (predict_ret != OH_AI_STATUS_SUCCESS) {
143        OH_AI_ModelDestroy(&model);
144        LOGE("Predict MSLite model error.\n");
145        return;
146    }
147    LOGI("Run MSLite model success.\n");
148
149    LOGI("Get model outputs:\n");
150    for (size_t i = 0; i < outputs.handle_num; i++) {
151        auto tensor = outputs.handle_list[i];
152        LOGI("- Tensor %{public}d name is: %{public}s.\n", static_cast<int>(i), OH_AI_TensorGetName(tensor));
153        LOGI("- Tensor %{public}d size is: %{public}d.\n", static_cast<int>(i), (int)OH_AI_TensorGetDataSize(tensor));
154        auto out_data = reinterpret_cast<const float *>(OH_AI_TensorGetData(tensor));
155        std::cout << "Output data is:";
156        for (int i = 0; (i < OH_AI_TensorGetElementNum(tensor)) && (i <= kNumPrintOfOutData); i++) {
157            std::cout << out_data[i] << " ";
158        }
159        std::cout << std::endl;
160    }
161    OH_AI_ModelDestroy(&model);
162}
163```
164
165
166(4). Implement a complete model inference process.
167
168```C++
169static napi_value RunDemo(napi_env env, napi_callback_info info)
170{
171    LOGI("Enter runDemo()");
172    GET_PARAMS(env, info, 2);
173    napi_value error_ret;
174    napi_create_int32(env, -1, &error_ret);
175
176    const std::string modelName = "ml_headpose.ms";
177    size_t modelSize;
178    auto resourcesManager = OH_ResourceManager_InitNativeResourceManager(env, argv[1]);
179    auto modelBuffer = ReadModelFile(resourcesManager, modelName, &modelSize);
180    if (modelBuffer == nullptr) {
181        LOGE("Read model failed");
182        return error_ret;
183    }
184    LOGI("Read model file success");
185
186    auto model = CreateMSLiteModel(modelBuffer, modelSize);
187    if (model == nullptr) {
188        OH_AI_ModelDestroy(&model);
189        LOGE("MSLiteFwk Build model failed.\n");
190        return error_ret;
191    }
192
193    RunMSLiteModel(model);
194
195    napi_value success_ret;
196    napi_create_int32(env, 0, &success_ret);
197
198    LOGI("Exit runDemo()");
199    return success_ret;
200}
201```
202
203(5). Write the **CMake** script to link the MindSpore Lite dynamic library `libmindspore_lite_ndk.so`.
204
205```cmake
206cmake_minimum_required(VERSION 3.4.1)
207project(OHOSMSLiteNapi)
208
209set(NATIVERENDER_ROOT_PATH ${CMAKE_CURRENT_SOURCE_DIR})
210
211include_directories(${NATIVERENDER_ROOT_PATH}
212                    ${NATIVERENDER_ROOT_PATH}/include)
213
214add_library(mslite_napi SHARED mslite_napi.cpp)
215target_link_libraries(mslite_napi PUBLIC mindspore_lite_ndk) # MindSpore Lite dynamic library to link
216target_link_libraries(mslite_napi PUBLIC hilog_ndk.z)
217target_link_libraries(mslite_napi PUBLIC rawfile.z)
218target_link_libraries(mslite_napi PUBLIC ace_napi.z)
219```
220
221
222### 3. Use N-APIs to encapsulate C++ dynamic libraries into JavaScript modules.
223
224
225Create the **libmslite_api/** subdirectory in **entry/src/main/cpp/types/**, and create the **index.d.ts** file in the subdirectory. The file content is as follows:
226
227```ts
228export const runDemo: (a:String, b:Object) => number;
229```
230
231Use the preceding code to define the JavaScript API `runDemo()`.
232
233In addition, add the **oh-package.json5** file to associate the API with the **.so** file to form a complete JavaScript module.
234
235```json
236{
237  "name": "libmslite_napi.so",
238  "types": "./index.d.ts"
239}
240```
241
242### 4. Invoke the encapsulated MindSpore module in the UI code.
243
244In **entry/src/ets/MainAbility/pages/index.ets**, define the **onClick()** event and call the encapsulated **runDemo()** API in the event callback.
245
246```ts
247import hilog from '@ohos.hilog'
248import msliteNapi from'libmslite_napi.so' // Import the msliteNapi module.
249import resManager from '@ohos.resourceManager'
250
251const TAG = 'MSLiteNativeDemo'
252
253@Entry
254@Component
255struct Index {
256    @State message: string = 'MindSpore Lite Demo'
257    build() {
258        Row() {
259            Column() {
260                Text(this.message)
261                    .fontSize(30)
262                    .fontWeight(FontWeight.Bold)
263                    .onClick(() => {
264                        resManager.getResourceManager().then(mgr => {
265                            hilog.info(0x0000, TAG, '*** Start MSLite Demo ***');
266                            let ret: number = 0;
267                            ret = msliteNapi.runDemo("", mgr); // Call runDemo() to perform AI model inference.
268                            if (ret == -1) {
269                                hilog.info(0x0000, TAG, 'Error when running MSLite Demo!');
270                            }
271                            hilog.info(0x0000, TAG, '*** Finished MSLite Demo ***');
272                        })
273                    })
274            }
275            .width('100%')
276        }
277        .height('100%')
278    }
279}
280```
281
282## Debugging and Verification
283
284On DevEco Studio, connect to the device and click **Run entry**. The following log is generated for the application process:
285
286```text
28708-08 16:55:33.766 1513-1529/com.mslite.native_demo I A00000/MSLiteNativeDemo: *** Start MSLite Demo ***
28808-08 16:55:33.766 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: Enter runDemo()
28908-08 16:55:33.772 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: Read model file success
29008-08 16:55:33.799 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: Build MSLite model success.
29108-08 16:55:33.818 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: Run MSLite model success.
29208-08 16:55:33.818 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: Get model outputs:
29308-08 16:55:33.818 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: - Tensor 0 name is: output_node_0.
29408-08 16:55:33.818 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: - Tensor 0 size is: 12.
29508-08 16:55:33.826 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: Exit runDemo()
29608-08 16:55:33.827 1513-1529/com.mslite.native_demo I A00000/MSLiteNativeDemo: *** Finished MSLite Demo ***
297```
298