• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# 使用MindSpore Lite开发AI应用 (C/C++)
2
3## 使用场景
4
5开发者可使用[MindSpore Lite提供的Native API](../../reference/apis-mindspore-lite-kit/_mind_spore.md)来部署AI算法,并提供高层接口供UI层调用,进行AI模型推理。典型场景如:AI套件SDK开发。
6
7## 基本概念
8
9- N-API:用于构建JS本地化组件的一套接口。可利用N-API,将C/C++开发的库封装成JS模块。
10
11## 环境准备
12
13- 安装DevEco Studio,要求版本 >= 3.1.0.500,并更新SDK到API 10或以上。
14
15## 开发步骤
16
17### 1. 新建Native工程
18
19打开DevEco Studio,新建工程,依次点击 **File -> New -> Create Project** 创建 **Native C++** 模板工程。在创建出的工程 **entry/src/main/** 目录下会默认包含 **cpp/** 目录,可以在此目录放置C/C++代码,并提供JS API供UI调用。
20
21### 2. 编写C++推理代码
22
23假设开发者已准备好.ms格式模型。
24
25在使用MindSpore Lite Native API进行开发前,需要先引用对应的头文件。
26
27```c
28#include <iostream>
29#include <mindspore/model.h>
30#include <mindspore/context.h>
31#include <mindspore/status.h>
32#include <mindspore/tensor.h>
33#include <rawfile/raw_file_manager.h>
34```
35
36(1). 读取模型文件。
37
38```C++
39#define LOGI(...) ((void)OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "[MSLiteNapi]", __VA_ARGS__))
40#define LOGD(...) ((void)OH_LOG_Print(LOG_APP, LOG_DEBUG, LOG_DOMAIN, "[MSLiteNapi]", __VA_ARGS__))
41#define LOGW(...) ((void)OH_LOG_Print(LOG_APP, LOG_WARN, LOG_DOMAIN, "[MSLiteNapi]", __VA_ARGS__))
42#define LOGE(...) ((void)OH_LOG_Print(LOG_APP, LOG_ERROR, LOG_DOMAIN, "[MSLiteNapi]", __VA_ARGS__))
43
44void *ReadModelFile(NativeResourceManager *nativeResourceManager, const std::string &modelName, size_t *modelSize) {
45    auto rawFile = OH_ResourceManager_OpenRawFile(nativeResourceManager, modelName.c_str());
46    if (rawFile == nullptr) {
47        LOGE("Open model file failed");
48        return nullptr;
49    }
50    long fileSize = OH_ResourceManager_GetRawFileSize(rawFile);
51    void *modelBuffer = malloc(fileSize);
52    if (modelBuffer == nullptr) {
53        LOGE("Get model file size failed");
54    }
55    int ret = OH_ResourceManager_ReadRawFile(rawFile, modelBuffer, fileSize);
56    if (ret == 0) {
57        LOGE("Read model file failed");
58        OH_ResourceManager_CloseRawFile(rawFile);
59        return nullptr;
60    }
61    OH_ResourceManager_CloseRawFile(rawFile);
62    *modelSize = fileSize;
63    return modelBuffer;
64}
65```
66
67(2). 创建上下文,设置线程数、设备类型等参数,并加载模型。
68
69```c++
70void DestroyModelBuffer(void **buffer) {
71    if (buffer == nullptr) {
72        return;
73    }
74    free(*buffer);
75    *buffer = nullptr;
76}
77
78OH_AI_ModelHandle CreateMSLiteModel(void *modelBuffer, size_t modelSize) {
79    // 创建上下文
80    auto context = OH_AI_ContextCreate();
81    if (context == nullptr) {
82        DestroyModelBuffer(&modelBuffer);
83        LOGE("Create MSLite context failed.\n");
84        return nullptr;
85    }
86    auto cpu_device_info = OH_AI_DeviceInfoCreate(OH_AI_DEVICETYPE_CPU);
87    OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
88
89    // 加载.ms模型文件
90    auto model = OH_AI_ModelCreate();
91    if (model == nullptr) {
92        DestroyModelBuffer(&modelBuffer);
93        LOGE("Allocate MSLite Model failed.\n");
94        return nullptr;
95    }
96
97    auto build_ret = OH_AI_ModelBuild(model, modelBuffer, modelSize, OH_AI_MODELTYPE_MINDIR, context);
98    DestroyModelBuffer(&modelBuffer);
99    if (build_ret != OH_AI_STATUS_SUCCESS) {
100        OH_AI_ModelDestroy(&model);
101        LOGE("Build MSLite model failed.\n");
102        return nullptr;
103    }
104    LOGI("Build MSLite model success.\n");
105    return model;
106}
107```
108
109(3). 设置模型输入数据,执行模型推理并获取输出数据。
110
111```c++
112#define GET_PARAMS(env, info, num)    \
113    size_t argc = num;                \
114    napi_value argv[num] = {nullptr}; \
115    napi_value thisVar = nullptr;     \
116    void *data = nullptr;             \
117    napi_get_cb_info(env, info, &argc, argv, &thisVar, &data)
118
119constexpr int kNumPrintOfOutData = 10;
120constexpr int RANDOM_RANGE = 128;
121
122void FillTensorWithRandom(OH_AI_TensorHandle msTensor) {
123    auto size = OH_AI_TensorGetDataSize(msTensor);
124    char *data = (char *)OH_AI_TensorGetMutableData(msTensor);
125    for (size_t i = 0; i < size; i++) {
126        data[i] = (char)(rand() / RANDOM_RANGE);
127    }
128}
129
130// fill data to inputs tensor
131int FillInputTensors(OH_AI_TensorHandleArray &inputs) {
132    for (size_t i = 0; i < inputs.handle_num; i++) {
133        FillTensorWithRandom(inputs.handle_list[i]);
134    }
135    return OH_AI_STATUS_SUCCESS;
136}
137
138void RunMSLiteModel(OH_AI_ModelHandle model) {
139    // 设置模型输入数据
140    auto inputs = OH_AI_ModelGetInputs(model);
141    FillInputTensors(inputs);
142
143    auto outputs = OH_AI_ModelGetOutputs(model);
144
145    // 执行推理并打印输出
146    auto predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
147    if (predict_ret != OH_AI_STATUS_SUCCESS) {
148        OH_AI_ModelDestroy(&model);
149        LOGE("Predict MSLite model error.\n");
150        return;
151    }
152    LOGI("Run MSLite model success.\n");
153
154    LOGI("Get model outputs:\n");
155    for (size_t i = 0; i < outputs.handle_num; i++) {
156        auto tensor = outputs.handle_list[i];
157        LOGI("- Tensor %{public}d name is: %{public}s.\n", static_cast<int>(i), OH_AI_TensorGetName(tensor));
158        LOGI("- Tensor %{public}d size is: %{public}d.\n", static_cast<int>(i), (int)OH_AI_TensorGetDataSize(tensor));
159        auto out_data = reinterpret_cast<const float *>(OH_AI_TensorGetData(tensor));
160        std::cout << "Output data is:";
161        for (int i = 0; (i < OH_AI_TensorGetElementNum(tensor)) && (i <= kNumPrintOfOutData); i++) {
162            std::cout << out_data[i] << " ";
163        }
164        std::cout << std::endl;
165    }
166    OH_AI_ModelDestroy(&model);
167}
168```
169
170
171(4). 调用以上3个方法,实现完整的模型推理流程。
172
173```C++
174static napi_value RunDemo(napi_env env, napi_callback_info info)
175{
176    LOGI("Enter runDemo()");
177    GET_PARAMS(env, info, 2);
178    napi_value error_ret;
179    napi_create_int32(env, -1, &error_ret);
180
181    const std::string modelName = "ml_headpose.ms";
182    size_t modelSize;
183    auto resourcesManager = OH_ResourceManager_InitNativeResourceManager(env, argv[1]);
184    auto modelBuffer = ReadModelFile(resourcesManager, modelName, &modelSize);
185    if (modelBuffer == nullptr) {
186        LOGE("Read model failed");
187        return error_ret;
188    }
189    LOGI("Read model file success");
190
191    auto model = CreateMSLiteModel(modelBuffer, modelSize);
192    if (model == nullptr) {
193        OH_AI_ModelDestroy(&model);
194        LOGE("MSLiteFwk Build model failed.\n");
195        return error_ret;
196    }
197
198    RunMSLiteModel(model);
199
200    napi_value success_ret;
201    napi_create_int32(env, 0, &success_ret);
202
203    LOGI("Exit runDemo()");
204    return success_ret;
205}
206```
207
208(5). 编写CMake脚本,链接MindSpore Lite动态库`libmindspore_lite_ndk.so`。
209
210```cmake
211cmake_minimum_required(VERSION 3.4.1)
212project(OHOSMSLiteNapi)
213
214set(NATIVERENDER_ROOT_PATH ${CMAKE_CURRENT_SOURCE_DIR})
215
216include_directories(${NATIVERENDER_ROOT_PATH}
217                    ${NATIVERENDER_ROOT_PATH}/include)
218
219add_library(mslite_napi SHARED mslite_napi.cpp)
220target_link_libraries(mslite_napi PUBLIC mindspore_lite_ndk) # 链接MindSpore Lite动态库。
221target_link_libraries(mslite_napi PUBLIC hilog_ndk.z)
222target_link_libraries(mslite_napi PUBLIC rawfile.z)
223target_link_libraries(mslite_napi PUBLIC ace_napi.z)
224```
225
226
227### 3. 使用N-API将C++动态库封装成JS模块
228
229
230在 **entry/src/main/cpp/types/** 新建 **libmslite_api/** 子目录,并在子目录中创建 **index.d.ts**,内容如下:
231
232```ts
233export const runDemo: (a:String, b:Object) => number;
234```
235
236以上代码用于定义JS接口`runDemo()` 。
237
238另外,新增 **oh-package.json5** 文件,将API与so相关联,成为一个完整的JS模块:
239
240```json
241{
242  "name": "libmslite_napi.so",
243  "types": "./index.d.ts"
244}
245```
246
247### 4. 在UI代码中调用封装的MindSpore模块
248
249在 **entry/src/ets/MainAbility/pages/index.ets** 中,定义`onClick()`事件,并在事件回调中调用封装的`runDemo()`接口。
250
251```ts
252import hilog from '@ohos.hilog'
253import msliteNapi from 'libmslite_napi.so' // 导入msliteNapi模块。
254import resManager from '@ohos.resourceManager'
255
256const TAG = 'MSLiteNativeDemo'
257
258@Entry
259@Component
260struct Index {
261  @State message: string = 'MindSpore Lite Demo'
262  build() {
263    Row() {
264      Column() {
265        Text(this.message)
266          .fontSize(30)
267          .fontWeight(FontWeight.Bold)
268            .onClick(async () => {
269              hilog.info(0x0000, TAG, '*** Start MSLite Demo ***');
270              let ret: number = 0;
271              ret = msliteNapi.runDemo("", getContext(this).resourceManager); // 调用runDemo(),执行AI模型推理。
272              if (ret == -1) {
273                hilog.error(0x0000, TAG, 'Error when running MSLite Demo!');
274              }
275              hilog.info(0x0000, TAG, '*** Finished MSLite Demo ***');
276            })
277      }
278      .width('100%')
279    }
280    .height('100%')
281  }
282}
283```
284
285## 调测验证
286
287在DevEco Studio 中连接设备,点击Run entry运行,应用进程有如下日志:
288
289```text
29008-08 16:55:33.766 1513-1529/com.mslite.native_demo I A00000/MSLiteNativeDemo: *** Start MSLite Demo ***
29108-08 16:55:33.766 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: Enter runDemo()
29208-08 16:55:33.772 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: Read model file success
29308-08 16:55:33.799 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: Build MSLite model success.
29408-08 16:55:33.818 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: Run MSLite model success.
29508-08 16:55:33.818 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: Get model outputs:
29608-08 16:55:33.818 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: - Tensor 0 name is: output_node_0.
29708-08 16:55:33.818 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: - Tensor 0 size is: 12.
29808-08 16:55:33.826 1513-1529/com.mslite.native_demo I A00000/[MSLiteNapi]: Exit runDemo()
29908-08 16:55:33.827 1513-1529/com.mslite.native_demo I A00000/MSLiteNativeDemo: *** Finished MSLite Demo ***
300```
301
302