• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1From 53b13af45b23aaac2ad84f42a7c6624509be1333 Mon Sep 17 00:00:00 2001
2From: fangzhou0329 <fangzhou12@huawei.com>
3Date: Mon, 15 May 2023 17:42:41 +0800
4Subject: [PATCH] auto-apply 0008-add-js-api.patch
5
6---
7 build.sh                                      |    0
8 include/js_api/@ohos.ai.mindspore.d.ts        |  222 ++++
9 include/js_api/common_napi.h                  |  104 ++
10 include/js_api/ms_errors.h                    |   39 +
11 include/js_api/ms_info.h                      |   69 ++
12 include/js_api/ms_parameters_napi.h           |   24 +
13 include/js_api/mslite_model_callback_napi.h   |   38 +
14 include/js_api/mslite_model_napi.h            |   85 ++
15 include/js_api/mstensor_napi.h                |   52 +
16 include/js_api/native_module_ohos_ms.h        |   22 +
17 mindspore/lite/BUILD.gn                       |    1 +
18 mindspore/lite/src/runtime/js_api/BUILD.gn    |   55 +
19 .../lite/src/runtime/js_api/common_napi.cc    |  246 ++++
20 .../src/runtime/js_api/mslite_model_napi.cc   | 1053 +++++++++++++++++
21 .../lite/src/runtime/js_api/mstensor_napi.cc  |  426 +++++++
22 .../runtime/js_api/native_module_ohos_ms.cc   |   48 +
23 16 files changed, 2476 insertions(+)
24 mode change 100644 => 100755 build.sh
25 create mode 100644 include/js_api/@ohos.ai.mindspore.d.ts
26 create mode 100644 include/js_api/common_napi.h
27 create mode 100644 include/js_api/ms_errors.h
28 create mode 100644 include/js_api/ms_info.h
29 create mode 100644 include/js_api/ms_parameters_napi.h
30 create mode 100644 include/js_api/mslite_model_callback_napi.h
31 create mode 100644 include/js_api/mslite_model_napi.h
32 create mode 100644 include/js_api/mstensor_napi.h
33 create mode 100644 include/js_api/native_module_ohos_ms.h
34 create mode 100644 mindspore/lite/src/runtime/js_api/BUILD.gn
35 create mode 100644 mindspore/lite/src/runtime/js_api/common_napi.cc
36 create mode 100644 mindspore/lite/src/runtime/js_api/mslite_model_napi.cc
37 create mode 100644 mindspore/lite/src/runtime/js_api/mstensor_napi.cc
38 create mode 100644 mindspore/lite/src/runtime/js_api/native_module_ohos_ms.cc
39
40diff --git a/build.sh b/build.sh
41old mode 100644
42new mode 100755
43diff --git a/include/js_api/@ohos.ai.mindspore.d.ts b/include/js_api/@ohos.ai.mindspore.d.ts
44new file mode 100644
45index 00000000..ccb2c600
46--- /dev/null
47+++ b/include/js_api/@ohos.ai.mindspore.d.ts
48@@ -0,0 +1,222 @@
49+/*
50+* Copyright (C) 2023 Huawei Device Co., Ltd.
51+* Licensed under the Apache License, Version 2.0 (the "License");
52+* you may not use this file except in compliance with the License.
53+* You may obtain a copy of the License at
54+*
55+* http://www.apache.org/licenses/LICENSE-2.0
56+*
57+* Unless required by applicable law or agreed to in writing, software
58+* distributed under the License is distributed on an "AS IS" BASIS,
59+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
60+* See the License for the specific language governing permissions and
61+* limitations under the License.
62+*/
63+
64+import { ErrorCallback, AsyncCallback, Callback } from './basic';
65+
66+/**
67+ * @name mslite
68+ * @since 9
69+ * @import import mslite from '@ohos.mslite'
70+ */
71+declare namespace mslite {
72+  /**
73+   * Creates an MSLiteModel instance.
74+   * @since 9
75+   * @syscap SystemCapability.MsLite.MsLiteModel
76+   * @import import mslite from '@ohos.mslite'
77+   * @param model The path to the model (string)
78+   * @param options Options related to model inference.
79+   * @throws { BusinessError } 401 - invaild path. Return by callback.
80+   * @return A Promise instance used to return MSLiteModel instance if the operation is successful; returns null otherwise.
81+   */
82+  function loadModelFromFile(
83+    model: string,
84+    options?: Context): Promise<MSLiteModel>;
85+
86+  /**
87+   * Creates an MSLiteModel instance.
88+   * @since 9
89+   * @syscap SystemCapability.MsLite.MsLiteModel
90+   * @import import mslite from '@ohos.mslite'
91+   * @param model The model content in memory(ArrayBuffer).
92+   * @param options Options related to model inference.
93+   * @throws { BusinessError } 401 - No memory. Return by callback.
94+   * @return A Promise instance used to return MSLiteModel instance if the operation is successful; returns null otherwise.
95+   */
96+  function loadModelFromBuffer(
97+    model: ArrayBuffer,
98+    options?: Context): Promise<MSLiteModel>;
99+
100+  /**
101+   * Creates an MSLiteModel instance.
102+   * @since 9
103+   * @syscap SystemCapability.MsLite.MsLiteModel
104+   * @import import mslite from '@ohos.mslite'
105+   * @param model The memory fd to the model (number).
106+   * @param options Options related to model inference.
107+   * @throws { BusinessError } 401 - invaild fd. Return by callback.
108+   * @return A Promise instance used to return MSLiteModel instance if the operation is successful; returns null otherwise.
109+   */
110+  function loadModelFromFd(
111+    model: number,
112+    options?: Context): Promise<MSLiteModel>;
113+
114+    /**
115+   * Manages model. Before calling an MSLiteModel method, you must use loadMSLiteModel()
116+   * to create an MSLiteModel instance.
117+   * @since 9
118+   * @syscap SystemCapability.MsLite.MsLiteModel
119+   */
120+  interface MSLiteModel {
121+    /**
122+     * Get model input tensors.
123+     * @since 9
124+     * @syscap SystemCapability.MsLite.MsLiteModel
125+     * @return MSTensor Array
126+     */
127+    getInputs(): MSTensor[];
128+
129+    /**
130+     * Infer model.
131+     * @since 9
132+     * @syscap SystemCapability.MsLite.MsLiteModel
133+     * @inputs inputs tensor
134+     * @return A Promise instance used to return MSTensor array if the operation is successful; returns null otherwise.
135+     */
136+     predict(inputs: MSTensor[]): Promise<MSTensor[]>;
137+
138+     /**
139+     * resize model input.
140+     * @since 9
141+     * @syscap SystemCapability.MsLite.MsLiteModel
142+     * @inputs inputs tensor
143+     * @dims   resize shape,the order is same with inputs
144+     * @return  true if the operation is successful; returns false otherwise.
145+     */
146+     resize(inputs: MSTensor[], dims: Array<Array<number>>): boolean;
147+  }
148+
149+  /**
150+   * Provides the device configurations.
151+   * @since 9
152+   * @syscap SystemCapability.MsLite.MsLiteModel
153+   */
154+  interface Context {
155+    target?: string[];
156+    cpu?:CpuDevice;
157+    nnrt?:NnrtDevice;
158+  }
159+
160+  /**
161+   * Provides the CPU device info.
162+   * @since 9
163+   * @syscap SystemCapability.MsLite.MsLiteModel
164+   */
165+  interface CpuDevice {
166+    thread_num?: number
167+    thread_affinity_mode?: ThreadAffinityMode;
168+    thread_affinity_core_list?: number[];
169+    precision_mode?: string;
170+  }
171+
172+  /**
173+   * Provides the NNRT device info.
174+   * @since 9
175+   * @syscap SystemCapability.MsLite.MsLiteModel
176+   */
177+  interface NnrtDevice {
178+  }
179+
180+  /**
181+   * Provides CPU thread affinity mode.
182+   * @since 9
183+   * @syscap SystemCapability.MsLite.Context
184+   */
185+  enum ThreadAffinityMode {
186+    /**
187+     * NO_BIND.
188+     * @since 9
189+     * @syscap SystemCapability.MsLite.Context
190+     */
191+    NO_AFFINITIES = 0,
192+
193+    /**
194+     * BIG_CORES_FIRST.
195+     * @since 9
196+     * @syscap SystemCapability.MsLite.Context
197+     */
198+    BIG_CORES_FIRST = 1,
199+
200+    /**
201+     * LITTLE_CORES_FIRST.
202+     * @since 9
203+     * @syscap SystemCapability.MsLite.Context
204+     */
205+    LITTLE_CORES_FIRST = 2,
206+  }
207+
208+  /**
209+   * Provides MSTensor defination.
210+   * @since 9
211+   * @syscap SystemCapability.MsLite.MsTensor
212+   */
213+  interface MSTensor {
214+    /** The name of the tensor. */
215+    'name': string;
216+    /** The shape of the tensor. */
217+    'shape': number[];
218+    /** Number of elements in the tensor. */
219+    'element_num': number;
220+    /** Number of elements in the tensor. */
221+    'data_size': number;
222+    /** The data type for the array. */
223+    'dtype': number;
224+    /** The format type of the tensor. */
225+    'format': number;
226+
227+    /**
228+     * Get MSTensor data.
229+     * @since 9
230+     * @syscap SystemCapability.MsLite.MsTensor
231+     * @return ArrayBuffer.
232+     */
233+    data(): ArrayBuffer;
234+
235+    /**
236+     * Set MSTensor data.
237+     * @since 9
238+     * @syscap SystemCapability.MsLite.MsTensor
239+     * @param inputArray
240+     */
241+    setData(inputArray: ArrayBuffer): void;
242+  }
243+
244+  enum DataType {
245+    kTypeUnknown = 0,
246+    kNumberTypeInt8 = 32,
247+    kNumberTypeInt16 = 33,
248+    kNumberTypeInt32 = 34,
249+    kNumberTypeInt64 = 35,
250+    kNumberTypeUInt8 = 37,
251+    kNumberTypeUInt16 = 38,
252+    kNumberTypeUInt32 = 39,
253+    kNumberTypeUInt64 = 40,
254+    kNumberTypeFloat16 = 42,
255+    kNumberTypeFloat32 = 43,
256+    kNumberTypeFloat64 = 44,
257+    kNumberTypeEnd = 46,
258+  }
259+
260+  enum Format {
261+    DEFAULT_FORMAT = -1,
262+    NCHW = 0,
263+    NHWC = 1,
264+    NHWC4 = 2,
265+    HWKC = 3,
266+    HWCK = 4,
267+    KCHW = 5,
268+  }
269+}
270+export default mslite;
271\ No newline at end of file
272diff --git a/include/js_api/common_napi.h b/include/js_api/common_napi.h
273new file mode 100644
274index 00000000..c52f3b80
275--- /dev/null
276+++ b/include/js_api/common_napi.h
277@@ -0,0 +1,104 @@
278+/**
279+ * Copyright (C) 2023 Huawei Device Co., Ltd.
280+ * Licensed under the Apache License, Version 2.0 (the "License");
281+ * you may not use this file except in compliance with the License.
282+ * You may obtain a copy of the License at
283+ *
284+ *     http://www.apache.org/licenses/LICENSE-2.0
285+ *
286+ * Unless required by applicable law or agreed to in writing, software
287+ * distributed under the License is distributed on an "AS IS" BASIS,
288+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
289+ * See the License for the specific language governing permissions and
290+ * limitations under the License.
291+ */
292+
293+#ifndef MINDSPORE_INCLUDE_JS_API_COMMON_NAPI_H
294+#define MINDSPORE_INCLUDE_JS_API_COMMON_NAPI_H
295+
296+#include <string>
297+#include <fstream>
298+#include "napi/native_api.h"
299+#include "napi/native_node_api.h"
300+#include "ms_errors.h"
301+#include "include/api/types.h"
302+
303+namespace mindspore {
304+
305+class CommonNapi {
306+ public:
307+  CommonNapi() = delete;
308+  ~CommonNapi() = delete;
309+
310+  static std::string getMessageByCode(int32_t &code);
311+  static int32_t GetPropertyInt32(napi_env env, napi_value config_obj, const std::string &type, int32_t &result);
312+  static int32_t GetPropertyString(napi_env env, napi_value config_obj, const std::string &type, std::string &result);
313+  static int32_t GetPropertyInt32Array(napi_env env, napi_value config_obj, const std::string &type,
314+                                       std::vector<int32_t> &result);
315+  static int32_t GetPropertyStringArray(napi_env env, napi_value config_obj, const std::string &type,
316+                                        std::vector<std::string> &result);
317+  static void WriteTensorData(MSTensor tensor, std::string file_path);
318+  static void WriteOutputsData(const std::vector<MSTensor> outputs, std::string file_path);
319+};
320+
321+struct MSLiteAsyncContext {
322+  explicit MSLiteAsyncContext(napi_env env);
323+  virtual ~MSLiteAsyncContext();
324+  int status = SUCCESS;
325+  std::string errMessage = "";
326+};
327+
328+enum ModelMode : int32_t {
329+  kBuffer = 0,
330+  kPath,
331+  kFD,
332+  // add new type here
333+  kInvalidModelMode = 10,
334+};
335+
336+struct ModelInfo {
337+  std::string model_path = "";
338+  char *model_buffer_data = nullptr;
339+  size_t model_buffer_total = 0;
340+  size_t model_fd = 0;
341+  ModelMode mode = kBuffer;
342+};
343+
344+struct CpuDevice {
345+  int thread_num;
346+  int thread_affinity_mode;
347+  std::vector<int32_t> thread_affinity_cores;
348+  std::string precision_mode;
349+  CpuDevice(){};
350+  CpuDevice(int thread_num, int affinity_mode, std::vector<int32_t> affinity_cores, std::string precision)
351+      : thread_num(thread_num),
352+        thread_affinity_mode(affinity_mode),
353+        thread_affinity_cores(affinity_cores),
354+        precision_mode(precision){};
355+};
356+
357+struct ContextInfo {
358+  std::vector<std::string> target;
359+  CpuDevice cpu_device;
360+};
361+
362+const int32_t NAPI_ERR_INPUT_INVALID = 401;
363+const int32_t NAPI_ERR_INVALID_PARAM = 1000101;
364+const int32_t NAPI_ERR_NO_MEMORY = 1000102;
365+const int32_t NAPI_ERR_ILLEGAL_STATE = 1000103;
366+const int32_t NAPI_ERR_UNSUPPORTED = 1000104;
367+const int32_t NAPI_ERR_TIMEOUT = 1000105;
368+const int32_t NAPI_ERR_STREAM_LIMIT = 1000201;
369+const int32_t NAPI_ERR_SYSTEM = 1000301;
370+
371+const std::string NAPI_ERROR_INVALID_PARAM_INFO = "input parameter value error";
372+const std::string NAPI_ERR_INPUT_INVALID_INFO = "input parameter type or number mismatch";
373+const std::string NAPI_ERR_INVALID_PARAM_INFO = "invalid parameter";
374+const std::string NAPI_ERR_NO_MEMORY_INFO = "allocate memory failed";
375+const std::string NAPI_ERR_ILLEGAL_STATE_INFO = "Operation not permit at current state";
376+const std::string NAPI_ERR_UNSUPPORTED_INFO = "unsupported option";
377+const std::string NAPI_ERR_TIMEOUT_INFO = "time out";
378+const std::string NAPI_ERR_STREAM_LIMIT_INFO = "stream number limited";
379+const std::string NAPI_ERR_SYSTEM_INFO = "system error";
380+}  // namespace mindspore
381+#endif  // COMMON_NAPI_H
382\ No newline at end of file
383diff --git a/include/js_api/ms_errors.h b/include/js_api/ms_errors.h
384new file mode 100644
385index 00000000..4a60966f
386--- /dev/null
387+++ b/include/js_api/ms_errors.h
388@@ -0,0 +1,39 @@
389+/**
390+ * Copyright (C) 2023 Huawei Device Co., Ltd.
391+ * Licensed under the Apache License, Version 2.0 (the "License");
392+ * you may not use this file except in compliance with the License.
393+ * You may obtain a copy of the License at
394+ *
395+ *     http://www.apache.org/licenses/LICENSE-2.0
396+ *
397+ * Unless required by applicable law or agreed to in writing, software
398+ * distributed under the License is distributed on an "AS IS" BASIS,
399+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
400+ * See the License for the specific language governing permissions and
401+ * limitations under the License.
402+ */
403+#ifndef MINDSPORE_INCLUDE_JS_API_MS_ERRORS_H
404+#define MINDSPORE_INCLUDE_JS_API_MS_ERRORS_H
405+
406+namespace mindspore {
407+const int32_t BASE_MSLITE_ERR_OFFSET = 1000199;
408+
409+/** Success */
410+const int32_t SUCCESS = 0;
411+
412+/** Fail */
413+const int32_t ERROR = BASE_MSLITE_ERR_OFFSET;
414+
415+/** Status error */
416+const int32_t ERR_ILLEGAL_STATE = BASE_MSLITE_ERR_OFFSET - 1;
417+
418+/** Invalid parameter */
419+const int32_t ERR_INVALID_PARAM = BASE_MSLITE_ERR_OFFSET - 2;
420+
421+/** Not existed parameter */
422+const int32_t ERR_NOT_EXISTED_PARAM = BASE_MSLITE_ERR_OFFSET - 3;
423+
424+/** Invalid operation */
425+const int32_t ERR_INVALID_OPERATION = BASE_MSLITE_ERR_OFFSET - 3;
426+}  // namespace mindspore
427+#endif  // MS_ERRORS_H
428\ No newline at end of file
429diff --git a/include/js_api/ms_info.h b/include/js_api/ms_info.h
430new file mode 100644
431index 00000000..6f563231
432--- /dev/null
433+++ b/include/js_api/ms_info.h
434@@ -0,0 +1,69 @@
435+/**
436+ * Copyright (C) 2023 Huawei Device Co., Ltd.
437+ * Licensed under the Apache License, Version 2.0 (the "License");
438+ * you may not use this file except in compliance with the License.
439+ * You may obtain a copy of the License at
440+ *
441+ *     http://www.apache.org/licenses/LICENSE-2.0
442+ *
443+ * Unless required by applicable law or agreed to in writing, software
444+ * distributed under the License is distributed on an "AS IS" BASIS,
445+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
446+ * See the License for the specific language governing permissions and
447+ * limitations under the License.
448+ */
449+#ifndef MINDSPORE_INCLUDE_JS_API_MS_INFO_H
450+#define MINDSPORE_INCLUDE_JS_API_MS_INFO_H
451+
452+namespace mindspore {
453+enum InterruptType {
454+  INTERRUPT_TYPE_BEGIN = 1,
455+  INTERRUPT_TYPE_END = 2,
456+};
457+
458+enum InterruptHint {
459+  INTERRUPT_HINT_NONE = 0,
460+  INTERRUPT_HINT_RESUME,
461+  INTERRUPT_HINT_PAUSE,
462+  INTERRUPT_HINT_STOP,
463+  INTERRUPT_HINT_DUCK,
464+  INTERRUPT_HINT_UNDUCK
465+};
466+
467+enum InterruptForceType {
468+  /**
469+   * Force type, system change audio state.
470+   */
471+  INTERRUPT_FORCE = 0,
472+  /**
473+   * Share type, application change audio state.
474+   */
475+  INTERRUPT_SHARE
476+};
477+
478+struct InterruptEvent {
479+  /**
480+   * Interrupt event type, begin or end
481+   */
482+  InterruptType eventType;
483+  /**
484+   * Interrupt force type, force or share
485+   */
486+  InterruptForceType forceType;
487+  /**
488+   * Interrupt hint type. In force type, the audio state already changed,
489+   * but in share mode, only provide a hint for application to decide.
490+   */
491+  InterruptHint hintType;
492+};
493+
494+// Used internally only by AudioFramework
495+struct InterruptEventInternal {
496+  InterruptType eventType;
497+  InterruptForceType forceType;
498+  InterruptHint hintType;
499+  float duckVolume;
500+};
501+
502+}  // namespace mindspore
503+#endif  // MS_INFO_H
504\ No newline at end of file
505diff --git a/include/js_api/ms_parameters_napi.h b/include/js_api/ms_parameters_napi.h
506new file mode 100644
507index 00000000..9585255f
508--- /dev/null
509+++ b/include/js_api/ms_parameters_napi.h
510@@ -0,0 +1,24 @@
511+/**
512+ * Copyright (C) 2023 Huawei Device Co., Ltd.
513+ * Licensed under the Apache License, Version 2.0 (the "License");
514+ * you may not use this file except in compliance with the License.
515+ * You may obtain a copy of the License at
516+ *
517+ *     http://www.apache.org/licenses/LICENSE-2.0
518+ *
519+ * Unless required by applicable law or agreed to in writing, software
520+ * distributed under the License is distributed on an "AS IS" BASIS,
521+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
522+ * See the License for the specific language governing permissions and
523+ * limitations under the License.
524+ */
525+#ifndef MINDSPORE_INCLUDE_JS_API_MS_PARAMETERS_NAPI_H
526+#define MINDSPORE_INCLUDE_JS_API_MS_PARAMETERS_NAPI_H
527+
528+#include <iostream>
529+
530+namespace mindspore {
531+
532+static const std::int32_t REFERENCE_CREATION_COUNT = 1;
533+}
534+#endif  // MS_PARAMETERS_NAPI
535\ No newline at end of file
536diff --git a/include/js_api/mslite_model_callback_napi.h b/include/js_api/mslite_model_callback_napi.h
537new file mode 100644
538index 00000000..3b3ee595
539--- /dev/null
540+++ b/include/js_api/mslite_model_callback_napi.h
541@@ -0,0 +1,38 @@
542+/**
543+ * Copyright (C) 2023 Huawei Device Co., Ltd.
544+ * Licensed under the Apache License, Version 2.0 (the "License");
545+ * you may not use this file except in compliance with the License.
546+ * You may obtain a copy of the License at
547+ *
548+ *     http://www.apache.org/licenses/LICENSE-2.0
549+ *
550+ * Unless required by applicable law or agreed to in writing, software
551+ * distributed under the License is distributed on an "AS IS" BASIS,
552+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
553+ * See the License for the specific language governing permissions and
554+ * limitations under the License.
555+ */
556+#ifndef MINDSPORE_INCLUDE_JS_API_MSLITE_MODEL_CALLBACK_NAPI_H
557+#define MINDSPORE_INCLUDE_JS_API_MSLITE_MODEL_CALLBACK_NAPI_H
558+
559+#include <queue>
560+#include <uv.h>
561+#include "mslite_model_napi.h"
562+#include "ms_info.h"
563+#include "common_napi.h"
564+
565+namespace mindspore {
566+enum class AsyncWorkType : int32_t {
567+  ASYNC_WORK_PREPARE = 0,
568+  ASYNC_WORK_PLAY,
569+  ASYNC_WORK_PAUSE,
570+  ASYNC_WORK_STOP,
571+  ASYNC_WORK_RESET,
572+  ASYNC_WORK_SEEK,
573+  ASYNC_WORK_SPEED,
574+  ASYNC_WORK_VOLUME,
575+  ASYNC_WORK_BITRATE,
576+  ASYNC_WORK_INVALID,
577+};
578+}  // namespace mindspore
579+#endif  // COMMON_NAPI_H
580\ No newline at end of file
581diff --git a/include/js_api/mslite_model_napi.h b/include/js_api/mslite_model_napi.h
582new file mode 100644
583index 00000000..f570d232
584--- /dev/null
585+++ b/include/js_api/mslite_model_napi.h
586@@ -0,0 +1,85 @@
587+/**
588+ * Copyright 2023 Huawei Technologies Co., Ltd
589+ *
590+ * Licensed under the Apache License, Version 2.0 (the "License");
591+ * you may not use this file except in compliance with the License.
592+ * You may obtain a copy of the License at
593+ *
594+ * http://www.apache.org/licenses/LICENSE-2.0
595+ *
596+ * Unless required by applicable law or agreed to in writing, software
597+ * distributed under the License is distributed on an "AS IS" BASIS,
598+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
599+ * See the License for the specific language governing permissions and
600+ * limitations under the License.
601+ */
602+#ifndef MINDSPORE_INCLUDE_JS_API_MSLITE_MODEL_NAPI_H
603+#define MINDSPORE_INCLUDE_JS_API_MSLITE_MODEL_NAPI_H
604+
605+#include <memory>
606+#include <mutex>
607+#include "include/api/model.h"
608+#include "include/api/context.h"
609+#include "common_napi.h"
610+#include "mslite_model_callback_napi.h"
611+#include "napi/native_api.h"
612+#include "napi/native_node_api.h"
613+
614+namespace mindspore {
615+class MSLiteModelNapi {
616+ public:
617+  MSLiteModelNapi();
618+  ~MSLiteModelNapi();
619+
620+  static napi_value Init(napi_env env, napi_value exports);
621+  std::shared_ptr<mindspore::Model> native_model_ = nullptr;
622+
623+ private:
624+  struct MSLiteModelAsyncContext {
625+    napi_async_work work;
626+    napi_deferred deferred;
627+    napi_ref callbackRef = nullptr;
628+    int32_t status = SUCCESS;
629+    MSLiteModelNapi *lite_model = nullptr;
630+    ModelInfo model_info;
631+    ContextInfo context;
632+
633+    MSLiteModelAsyncContext() {
634+      // setting context default value
635+      context.target.push_back("cpu");
636+      context.cpu_device.thread_num = 2;
637+      context.cpu_device.thread_affinity_mode = 0;
638+      context.cpu_device.precision_mode = "enforce_fp32";
639+    }
640+  };
641+  static napi_value Constructor(napi_env env, napi_callback_info info);
642+  static void Finalize(napi_env env, void *nativeObject, void *finalize);
643+  static napi_value LoadMSLiteModelFromFile(napi_env env, napi_callback_info info);
644+  static napi_value LoadMSLiteModelFromBuffer(napi_env env, napi_callback_info info);
645+  static napi_value LoadMSLiteModelFromFd(napi_env env, napi_callback_info info);
646+  static napi_value GetInputs(napi_env env, napi_callback_info info);
647+  static napi_value Resize(napi_env env, napi_callback_info info);
648+  static napi_value PredictAsync(napi_env env, napi_callback_info info);
649+  static int32_t ParseModelInfo(napi_env env, napi_value root, ModelInfo &model_info);
650+  static int32_t ParseContextInfo(napi_env env, napi_value root, ContextInfo &info);
651+  static void GetMSLiteModelAsyncCallbackComplete(napi_env env, napi_status status, void *data);
652+  static void PredictAsyncCallbackComplete(napi_env env, napi_status status, void *data);
653+  static napi_value CreateMSLiteModelWrapper(napi_env env, MSLiteModelAsyncContext *async_context);
654+  static void CommonCallbackRoutine(napi_env env, MSLiteModelAsyncContext *&asyncContext, const napi_value &valueParam);
655+  static std::shared_ptr<mindspore::Model> CreateModel(ModelInfo *model_info_ptr, ContextInfo *contex_ptr);
656+  static int32_t GetCpuDeviceInfo(napi_env env, napi_value args, ContextInfo &context);
657+  static int32_t GetDeviceInfoContext(ContextInfo *context_info_ptr,
658+                                      std::vector<std::shared_ptr<DeviceInfoContext>> &device_infos);
659+  static int32_t SetTensorData(napi_env env, napi_value thisVar, napi_value argv,
660+                               MSLiteModelAsyncContext *async_context);
661+
662+  static thread_local napi_ref constructor_;
663+  napi_env env_ = nullptr;
664+  napi_ref wrapper_ = nullptr;
665+
666+  static ModelInfo *model_info_;
667+  static ContextInfo *context_;
668+  static std::mutex create_mutex_;
669+};
670+}  // namespace mindspore
671+#endif  // MINDSPORE_INCLUDE_JS_API_MSLITE_MODEL_NAPI_H
672\ No newline at end of file
673diff --git a/include/js_api/mstensor_napi.h b/include/js_api/mstensor_napi.h
674new file mode 100644
675index 00000000..0e9462c8
676--- /dev/null
677+++ b/include/js_api/mstensor_napi.h
678@@ -0,0 +1,52 @@
679+/**
680+ * Copyright 2022 Huawei Technologies Co., Ltd
681+ *
682+ * Licensed under the Apache License, Version 2.0 (the "License");
683+ * you may not use this file except in compliance with the License.
684+ * You may obtain a copy of the License at
685+ *
686+ * http://www.apache.org/licenses/LICENSE-2.0
687+ *
688+ * Unless required by applicable law or agreed to in writing, software
689+ * distributed under the License is distributed on an "AS IS" BASIS,
690+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
691+ * See the License for the specific language governing permissions and
692+ * limitations under the License.
693+ */
694+#ifndef MINDSPORE_INCLUDE_JS_API_MSTENSOR_NAPI_H
695+#define MINDSPORE_INCLUDE_JS_API_MSTENSOR_NAPI_H
696+
697+#include "include/api/types.h"
698+#include "napi/native_api.h"
699+#include "napi/native_node_api.h"
700+
701+namespace mindspore {
702+class MSTensorNapi {
703+ public:
704+  static napi_value NewInstance(napi_env env, mindspore::MSTensor tensor);
705+
706+ private:
707+  static napi_value Constructor(napi_env env, napi_callback_info info);
708+  static void Finalize(napi_env env, void *nativeObject, void *finalize);
709+  static napi_value GetConstructor(napi_env env);
710+
711+  static napi_value GetName(napi_env env, napi_callback_info info);
712+  static napi_value GetShape(napi_env env, napi_callback_info info);
713+  static napi_value GetElementNum(napi_env env, napi_callback_info info);
714+  static napi_value GetDtype(napi_env env, napi_callback_info info);
715+  static napi_value GetFormat(napi_env env, napi_callback_info info);
716+  static napi_value GetDataSize(napi_env env, napi_callback_info info);
717+  static napi_value GetDataBuffer(napi_env env, napi_callback_info info);
718+  static napi_value SetData(napi_env env, napi_callback_info info);
719+
720+  MSTensorNapi();
721+  ~MSTensorNapi();
722+
723+  static thread_local napi_ref constructor_;
724+  napi_env env_ = nullptr;
725+  napi_ref wrapper_ = nullptr;
726+
727+  std::unique_ptr<MSTensor> nativeMSTensor_ = nullptr;
728+};
729+}  // namespace mindspore
730+#endif  // MINDSPORE_INCLUDE_JS_API_MSTENSOR_NAPI_H
731\ No newline at end of file
732diff --git a/include/js_api/native_module_ohos_ms.h b/include/js_api/native_module_ohos_ms.h
733new file mode 100644
734index 00000000..202e8384
735--- /dev/null
736+++ b/include/js_api/native_module_ohos_ms.h
737@@ -0,0 +1,22 @@
738+/**
739+ * Copyright 2023 Huawei Technologies Co., Ltd
740+ *
741+ * Licensed under the Apache License, Version 2.0 (the "License");
742+ * you may not use this file except in compliance with the License.
743+ * You may obtain a copy of the License at
744+ *
745+ * http://www.apache.org/licenses/LICENSE-2.0
746+ *
747+ * Unless required by applicable law or agreed to in writing, software
748+ * distributed under the License is distributed on an "AS IS" BASIS,
749+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
750+ * See the License for the specific language governing permissions and
751+ * limitations under the License.
752+ */
753+#ifndef MINDSPORE_INCLUDE_JS_API_NATIVE_MODULE_OHOS_MS_H
754+#define MINDSPORE_INCLUDE_JS_API_NATIVE_MODULE_OHOS_MS_H
755+
756+#include "mslite_model_napi.h"
757+#include "mstensor_napi.h"
758+
759+#endif  // MINDSPORE_INCLUDE_JS_API_NATIVE_MODULE_OHOS_MS_H
760\ No newline at end of file
761diff --git a/mindspore/lite/BUILD.gn b/mindspore/lite/BUILD.gn
762index d761b69c..b8bac6c4 100644
763--- a/mindspore/lite/BUILD.gn
764+++ b/mindspore/lite/BUILD.gn
765@@ -69,6 +69,7 @@ ohos_group("mindspore") {
766   deps = [
767     ":mindspore_lib",
768     "mindir:mindir_lib",
769+    "src/runtime/js_api:mslite"
770   ]
771 }
772
773diff --git a/mindspore/lite/src/runtime/js_api/BUILD.gn b/mindspore/lite/src/runtime/js_api/BUILD.gn
774new file mode 100644
775index 00000000..44669c26
776--- /dev/null
777+++ b/mindspore/lite/src/runtime/js_api/BUILD.gn
778@@ -0,0 +1,55 @@
779+# Copyright (c) 2023 Huawei Device Co., Ltd.
780+# Licensed under the Apache License, Version 2.0 (the "License");
781+# you may not use this file except in compliance with the License.
782+# You may obtain a copy of the License at
783+#
784+#     http://www.apache.org/licenses/LICENSE-2.0
785+#
786+# Unless required by applicable law or agreed to in writing, software
787+# distributed under the License is distributed on an "AS IS" BASIS,
788+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
789+# See the License for the specific language governing permissions and
790+# limitations under the License.
791+
792+import("//build/ohos.gni")
793+import("//build/ohos/ace/ace.gni")
794+
795+ohos_shared_library("mslite") {
796+  include_dirs = [
797+    "//third_party/mindspore/mindspore-src/source/",
798+    "//third_party/mindspore/mindspore-src/source/include/api",
799+    "//third_party/mindspore/mindspore-src/source/mindspore/core",
800+    "//third_party//mindspore/mindspore-src/source/mindspore/lite",
801+    "//third_party/libuv/include",
802+
803+    "//foundation/arkui/napi",
804+    "//foundation/arkui/napi/interfaces/inner_api",
805+    "//foundation/arkui/napi/interfaces/kits",
806+    "//third_party/libuv/include",
807+    "//third_party/node/src",
808+    "//base/hiviewdfx/hilog/interfaces/native/innerkits/include",
809+  ]
810+
811+  sources = [
812+    "mslite_model_napi.cc",
813+    "mstensor_napi.cc",
814+    "native_module_ohos_ms.cc",
815+    "common_napi.cc",
816+  ]
817+
818+  deps = [
819+    "../../../:mindspore_lib",
820+  ]
821+  external_deps = [
822+    "ability_runtime:abilitykit_native",
823+    "ability_runtime:napi_base_context",
824+    "c_utils:utils",
825+    "hiviewdfx_hilog_native:libhilog",
826+    "napi:ace_napi",
827+    "resource_management:global_resmgr",
828+  ]
829+
830+  relative_install_dir = "module/ai"
831+  part_name = "mindspore"
832+  subsystem_name = "ai"
833+}
834diff --git a/mindspore/lite/src/runtime/js_api/common_napi.cc b/mindspore/lite/src/runtime/js_api/common_napi.cc
835new file mode 100644
836index 00000000..6a07c712
837--- /dev/null
838+++ b/mindspore/lite/src/runtime/js_api/common_napi.cc
839@@ -0,0 +1,246 @@
840+/*
841+ * Copyright (C) 2023 Huawei Device Co., Ltd.
842+ * Licensed under the Apache License, Version 2.0 (the "License");
843+ * you may not use this file except in compliance with the License.
844+ * You may obtain a copy of the License at
845+ *
846+ *     http://www.apache.org/licenses/LICENSE-2.0
847+ *
848+ * Unless required by applicable law or agreed to in writing, software
849+ * distributed under the License is distributed on an "AS IS" BASIS,
850+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
851+ * See the License for the specific language governing permissions and
852+ * limitations under the License.
853+ */
854+
855+#include "include/js_api/common_napi.h"
856+#include <climits>
857+#include "src/common/log.h"
858+
859+namespace mindspore {
860+
861+namespace {
862+const int SIZE = 100;
863+}
864+
865+std::string CommonNapi::getMessageByCode(int32_t &code) {
866+  std::string err_message;
867+  switch (code) {
868+    case NAPI_ERR_INVALID_PARAM:
869+      err_message = NAPI_ERR_INVALID_PARAM_INFO;
870+      break;
871+    case NAPI_ERR_NO_MEMORY:
872+      err_message = NAPI_ERR_NO_MEMORY_INFO;
873+      break;
874+    case NAPI_ERR_ILLEGAL_STATE:
875+      err_message = NAPI_ERR_ILLEGAL_STATE_INFO;
876+      break;
877+    case NAPI_ERR_UNSUPPORTED:
878+      err_message = NAPI_ERR_UNSUPPORTED_INFO;
879+      break;
880+    case NAPI_ERR_TIMEOUT:
881+      err_message = NAPI_ERR_TIMEOUT_INFO;
882+      break;
883+    case NAPI_ERR_STREAM_LIMIT:
884+      err_message = NAPI_ERR_STREAM_LIMIT_INFO;
885+      break;
886+    case NAPI_ERR_SYSTEM:
887+      err_message = NAPI_ERR_SYSTEM_INFO;
888+      break;
889+    case NAPI_ERR_INPUT_INVALID:
890+      err_message = NAPI_ERR_INPUT_INVALID_INFO;
891+      break;
892+    default:
893+      err_message = NAPI_ERR_SYSTEM_INFO;
894+      code = NAPI_ERR_SYSTEM;
895+      break;
896+  }
897+  return err_message;
898+}
899+
900+int32_t CommonNapi::GetPropertyInt32(napi_env env, napi_value config_obj, const std::string &type, int32_t &result) {
901+  napi_value item = nullptr;
902+  bool exist = false;
903+  napi_status status = napi_has_named_property(env, config_obj, type.c_str(), &exist);
904+
905+  if (status != napi_ok || !exist) {
906+    MS_LOG(ERROR) << "can not find " << type.c_str() << " property";
907+    return ERR_NOT_EXISTED_PARAM;
908+  }
909+
910+  if (napi_get_named_property(env, config_obj, type.c_str(), &item) != napi_ok) {
911+    MS_LOG(ERROR) << "get " << type.c_str() << " property fail";
912+    return ERR_INVALID_PARAM;
913+  }
914+
915+  if (napi_get_value_int32(env, item, &result) != napi_ok) {
916+    MS_LOG(ERROR) << "get " << type.c_str() << " property value fail";
917+    return ERR_INVALID_PARAM;
918+  }
919+  return SUCCESS;
920+}
921+
922+int32_t CommonNapi::GetPropertyString(napi_env env, napi_value config_obj, const std::string &type,
923+                                      std::string &result) {
924+  napi_value item = nullptr;
925+  bool exist = false;
926+  char buffer[SIZE];
927+  size_t length = 0;
928+
929+  napi_status status = napi_has_named_property(env, config_obj, type.c_str(), &exist);
930+  if (status != napi_ok || !exist) {
931+    MS_LOG(ERROR) << "can not find target property";
932+    return ERR_NOT_EXISTED_PARAM;
933+  }
934+
935+  if (status != napi_ok || !exist) {
936+    MS_LOG(ERROR) << "can not find " << type.c_str() << " property";
937+    return ERR_NOT_EXISTED_PARAM;
938+  }
939+
940+  if (napi_get_named_property(env, config_obj, type.c_str(), &item) != napi_ok) {
941+    MS_LOG(ERROR) << "get " << type.c_str() << " property fail";
942+    return ERR_INVALID_PARAM;
943+  }
944+
945+  if (napi_get_value_string_utf8(env, item, buffer, SIZE, &length) != napi_ok) {
946+    MS_LOG(ERROR) << "get " << type.c_str() << " property value fail";
947+    return ERR_INVALID_PARAM;
948+  }
949+  result = std::string(buffer);
950+  return SUCCESS;
951+}
952+
953+int32_t CommonNapi::GetPropertyInt32Array(napi_env env, napi_value config_obj, const std::string &type,
954+                                          std::vector<int32_t> &result) {
955+  napi_value item = nullptr;
956+  bool exist = false;
957+  napi_status status = napi_has_named_property(env, config_obj, type.c_str(), &exist);
958+  if (status != napi_ok || !exist) {
959+    MS_LOG(ERROR) << "can not find " << type.c_str() << " property";
960+    return ERR_NOT_EXISTED_PARAM;
961+  }
962+
963+  if (napi_get_named_property(env, config_obj, type.c_str(), &item) != napi_ok) {
964+    MS_LOG(ERROR) << "get " << type.c_str() << " property fail";
965+    return ERR_INVALID_PARAM;
966+  }
967+
968+  uint32_t array_length = 0;
969+  status = napi_get_array_length(env, item, &array_length);
970+  if (status != napi_ok || array_length <= 0) {
971+    MS_LOG(ERROR) << "can not get array length";
972+    return ERR_INVALID_PARAM;
973+  }
974+  MS_LOG(DEBUG) << "GetPropertyInt32Array array_length: " << array_length;
975+
976+  for (size_t i = 0; i < array_length; i++) {
977+    int32_t int_value = 0;
978+    napi_value element = nullptr;
979+    status = napi_get_element(env, item, i, &element);
980+    if (status != napi_ok) {
981+      MS_LOG(ERROR) << "can not get element";
982+      return ERR_INVALID_PARAM;
983+    }
984+
985+    if (napi_get_value_int32(env, element, &int_value) != napi_ok) {
986+      MS_LOG(ERROR) << "get " << type.c_str() << " property value fail";
987+      return ERR_INVALID_PARAM;
988+    }
989+    result.push_back(int_value);
990+  }
991+
992+  return SUCCESS;
993+}
994+
995+int32_t CommonNapi::GetPropertyStringArray(napi_env env, napi_value config_obj, const std::string &type,
996+                                           std::vector<std::string> &result) {
997+  napi_value item = nullptr;
998+  bool exist = false;
999+  napi_status status = napi_has_named_property(env, config_obj, type.c_str(), &exist);
1000+
1001+  if (status != napi_ok || !exist) {
1002+    MS_LOG(ERROR) << "can not find " << type.c_str() << " property";
1003+    return ERR_NOT_EXISTED_PARAM;
1004+  }
1005+
1006+  if (napi_get_named_property(env, config_obj, type.c_str(), &item) != napi_ok) {
1007+    MS_LOG(ERROR) << "get " << type.c_str() << " property fail";
1008+    return ERR_INVALID_PARAM;
1009+  }
1010+
1011+  uint32_t array_length = 0;
1012+  status = napi_get_array_length(env, item, &array_length);
1013+  if (status != napi_ok || array_length <= 0) {
1014+    MS_LOG(ERROR) << "can not get array length";
1015+    return ERR_INVALID_PARAM;
1016+  }
1017+
1018+  for (size_t i = 0; i < array_length; i++) {
1019+    char buffer[SIZE];
1020+    size_t length = 0;
1021+
1022+    napi_value element = nullptr;
1023+    status = napi_get_element(env, item, i, &element);
1024+    if (status != napi_ok) {
1025+      MS_LOG(ERROR) << "can not get element";
1026+      return ERR_INVALID_PARAM;
1027+    }
1028+
1029+    if (napi_get_value_string_utf8(env, element, buffer, SIZE, &length) != napi_ok) {
1030+      MS_LOG(ERROR) << "get " << type.c_str() << " property value fail";
1031+      return ERR_INVALID_PARAM;
1032+    }
1033+    result.push_back(std::string(buffer));
1034+  }
1035+
1036+  return SUCCESS;
1037+}
1038+
1039+void CommonNapi::WriteTensorData(MSTensor tensor, std::string file_path) {
1040+  std::ofstream out_file;
1041+  out_file.open(file_path, std::ios::out | std::ios::app);
1042+  if (!out_file.is_open()) {
1043+    MS_LOG(ERROR) << "output file open failed";
1044+    return;
1045+  }
1046+  auto out_data = reinterpret_cast<const float *>(tensor.Data().get());
1047+  out_file << tensor.Name() << " ";
1048+  for (auto dim : tensor.Shape()) {
1049+    out_file << dim << " ";
1050+  }
1051+  out_file << std::endl;
1052+  for (int i = 0; i < tensor.ElementNum(); i++) {
1053+    out_file << out_data[i] << " ";
1054+  }
1055+  out_file << std::endl;
1056+  out_file.close();
1057+}
1058+
1059+void CommonNapi::WriteOutputsData(const std::vector<MSTensor> outputs, std::string file_path) {
1060+  std::ofstream out_file;
1061+  out_file.open(file_path, std::ios::out | std::ios::app);
1062+  if (!out_file.is_open()) {
1063+    MS_LOG(ERROR) << "output file open failed";
1064+    return;
1065+  }
1066+  for (auto tensor : outputs) {
1067+    MS_LOG(INFO) << "tensor name is: " << tensor.Name().c_str()
1068+                 << "tensor size is: " << static_cast<int>(tensor.DataSize())
1069+                 << "tensor elements num is: " << static_cast<int>(tensor.ElementNum());
1070+    // dtype float
1071+    auto out_data = reinterpret_cast<const float *>(tensor.Data().get());
1072+    out_file << tensor.Name() << " ";
1073+    for (auto dim : tensor.Shape()) {
1074+      out_file << dim << " ";
1075+    }
1076+    out_file << std::endl;
1077+    for (int i = 0; i < tensor.ElementNum(); i++) {
1078+      out_file << out_data[i] << " ";
1079+    }
1080+    out_file << std::endl;
1081+  }
1082+  out_file.close();
1083+}
1084+
1085+}  // namespace mindspore
1086\ No newline at end of file
1087diff --git a/mindspore/lite/src/runtime/js_api/mslite_model_napi.cc b/mindspore/lite/src/runtime/js_api/mslite_model_napi.cc
1088new file mode 100644
1089index 00000000..0ea92e13
1090--- /dev/null
1091+++ b/mindspore/lite/src/runtime/js_api/mslite_model_napi.cc
1092@@ -0,0 +1,1053 @@
1093+/**
1094+ * Copyright 2023 Huawei Technologies Co., Ltd
1095+ *
1096+ * Licensed under the Apache License, Version 2.0 (the "License");
1097+ * you may not use this file except in compliance with the License.
1098+ * You may obtain a copy of the License at
1099+ *
1100+ * http://www.apache.org/licenses/LICENSE-2.0
1101+ *
1102+ * Unless required by applicable law or agreed to in writing, software
1103+ * distributed under the License is distributed on an "AS IS" BASIS,
1104+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1105+ * See the License for the specific language governing permissions and
1106+ * limitations under the License.
1107+ */
1108+#include "include/js_api/mslite_model_napi.h"
1109+#include <climits>
1110+#include <algorithm>
1111+#include <random>
1112+#include <cstring>
1113+#include <memory>
1114+#include <map>
1115+#include <vector>
1116+#include <unistd.h>
1117+#include <fcntl.h>
1118+#include <sys/stat.h>
1119+#include <sys/mman.h>
1120+#include "include/js_api/mstensor_napi.h"
1121+#include "include/js_api/common_napi.h"
1122+#include "include/js_api/ms_parameters_napi.h"
1123+#include "include/js_api/ms_errors.h"
1124+#include "include/js_api/mslite_model_callback_napi.h"
1125+#include "src/common/log.h"
1126+
1127+namespace mindspore {
1128+thread_local napi_ref MSLiteModelNapi::constructor_ = nullptr;
1129+ModelInfo *MSLiteModelNapi::model_info_ = nullptr;
1130+ContextInfo *MSLiteModelNapi::context_ = nullptr;
1131+std::mutex MSLiteModelNapi::create_mutex_;
1132+
1133+#define GET_PARAMS(env, info, num) \
1134+  size_t argc = num;               \
1135+  napi_value argv[num] = {0};      \
1136+  napi_value thisVar = nullptr;    \
1137+  void *data;                      \
1138+  napi_get_cb_info(env, info, &argc, argv, &thisVar, &data)
1139+
1140+namespace {
1141+const int ARGS_ONE = 1;
1142+const int ARGS_TWO = 2;
1143+
1144+const int PARAM0 = 0;
1145+const int PARAM1 = 1;
1146+
1147+const int SIZE = 100;
1148+const std::string CLASS_NAME = "MSLiteModel";
1149+
1150+const std::unordered_map<std::string, DeviceType> kDeviceTypes{
1151+  {"cpu", kCPU},
1152+  {"nnrt", kNNRt},
1153+  {"nnrt", kGPU},
1154+};
1155+}  // namespace
1156+
1157+MSLiteModelNapi::MSLiteModelNapi() : env_(nullptr), wrapper_(nullptr) {
1158+  MS_LOG(INFO) << "MSLiteModelNapi Instances create.";
1159+}
1160+
1161+MSLiteModelNapi::~MSLiteModelNapi() {
1162+  if (wrapper_ != nullptr) {
1163+    napi_delete_reference(env_, wrapper_);
1164+  }
1165+  if (model_info_ != nullptr) {
1166+    if (model_info_->model_buffer_data != nullptr) {
1167+      model_info_->model_buffer_data = nullptr;
1168+    }
1169+  }
1170+  MS_LOG(INFO) << "MSLiteModelNapi Instances destroy.";
1171+}
1172+
1173+void MSLiteModelNapi::Finalize(napi_env env, void *nativeObject, void *finalize) {
1174+  (void)env;
1175+  (void)finalize;
1176+  if (nativeObject != nullptr) {
1177+    // delete nativeObject
1178+    auto obj = static_cast<MSLiteModelNapi *>(nativeObject);
1179+    delete obj;
1180+    obj = nullptr;
1181+  }
1182+  MS_LOG(INFO) << "Finalize success";
1183+}
1184+
1185+napi_value MSLiteModelNapi::Init(napi_env env, napi_value exports) {
1186+  napi_property_descriptor properties[] = {DECLARE_NAPI_FUNCTION("getInputs", GetInputs),
1187+                                           DECLARE_NAPI_FUNCTION("resize", Resize),
1188+                                           DECLARE_NAPI_FUNCTION("predict", PredictAsync)};
1189+
1190+  napi_property_descriptor staticProperty[] = {
1191+    DECLARE_NAPI_STATIC_FUNCTION("loadModelFromFile", LoadMSLiteModelFromFile),
1192+    DECLARE_NAPI_STATIC_FUNCTION("loadModelFromBuffer", LoadMSLiteModelFromBuffer),
1193+    DECLARE_NAPI_STATIC_FUNCTION("loadModelFromFd", LoadMSLiteModelFromFd),
1194+  };
1195+
1196+  napi_value constructor = nullptr;
1197+  napi_status status = napi_define_class(env, CLASS_NAME.c_str(), NAPI_AUTO_LENGTH, Constructor, nullptr,
1198+                                         sizeof(properties) / sizeof(properties[0]), properties, &constructor);
1199+  if (status != napi_ok) {
1200+    MS_LOG(ERROR) << "Failed to define MSLiteModel class";
1201+    return nullptr;
1202+  }
1203+
1204+  status = napi_create_reference(env, constructor, REFERENCE_CREATION_COUNT, &constructor_);
1205+  if (status != napi_ok) {
1206+    MS_LOG(ERROR) << "Failed to create reference of constructor";
1207+    return nullptr;
1208+  }
1209+
1210+  status = napi_set_named_property(env, exports, CLASS_NAME.c_str(), constructor);
1211+  if (status != napi_ok) {
1212+    MS_LOG(ERROR) << "Failed to set constructor";
1213+    return nullptr;
1214+  }
1215+
1216+  status = napi_define_properties(env, exports, sizeof(staticProperty) / sizeof(staticProperty[0]), staticProperty);
1217+  if (status != napi_ok) {
1218+    MS_LOG(ERROR) << "Failed to define static function";
1219+    return nullptr;
1220+  }
1221+
1222+  MS_LOG(INFO) << "init success";
1223+  return exports;
1224+}
1225+
1226+std::shared_ptr<mindspore::Model> MSLiteModelNapi::CreateModel(ModelInfo *model_info_ptr,
1227+                                                               ContextInfo *context_info_ptr) {
1228+  // create and init context
1229+  std::string s;
1230+  for (const auto &device_name : context_info_ptr->target) {
1231+    s += device_name + " ";
1232+  }
1233+  MS_LOG(DEBUG) << "target device: " << s.c_str();
1234+
1235+  auto context = std::make_shared<mindspore::Context>();
1236+  if (context == nullptr) {
1237+    MS_LOG(ERROR) << "Failed to new context.";
1238+    return nullptr;
1239+  }
1240+
1241+  auto &device_infos = context->MutableDeviceInfo();
1242+  if (context_info_ptr->target.empty()) {
1243+    MS_LOG(ERROR) << "context is empty.";
1244+    return nullptr;
1245+  }
1246+  if (GetDeviceInfoContext(context_info_ptr, device_infos) != SUCCESS) {
1247+    MS_LOG(ERROR) << "Create context failed.";
1248+    return nullptr;
1249+  }
1250+
1251+  switch (model_info_ptr->mode) {
1252+    case kBuffer: {
1253+      MS_LOG(DEBUG) << "input model buffer, model_buffer_total: " << model_info_ptr->model_buffer_total;
1254+      if (model_info_ptr->model_buffer_data == nullptr || model_info_ptr->model_buffer_total <= 0) {
1255+        MS_LOG(ERROR) << "Failed to build model.";
1256+        return nullptr;
1257+      }
1258+      std::shared_ptr<mindspore::Model> model_ptr = std::make_shared<mindspore::Model>();
1259+      if (model_ptr == nullptr) {
1260+        MS_LOG(ERROR) << "Failed to new mindspore::model.";
1261+        return nullptr;
1262+      }
1263+      auto ret = model_ptr->Build(model_info_ptr->model_buffer_data, model_info_ptr->model_buffer_total,
1264+                                  mindspore::kMindIR, context);
1265+      if (ret == mindspore::kSuccess) {
1266+        MS_LOG(INFO) << "Build model from buffer success.";
1267+        return model_ptr;
1268+      }
1269+      break;
1270+    }
1271+    case kPath: {
1272+      MS_LOG(DEBUG) << "input model path, model_buffer_total: " << model_info_ptr->model_path.c_str();
1273+      std::shared_ptr<mindspore::Model> model_ptr = std::make_shared<mindspore::Model>();
1274+      if (model_ptr == nullptr) {
1275+        MS_LOG(ERROR) << "Failed to new mindspore::model.";
1276+        return nullptr;
1277+      }
1278+      auto ret = model_ptr->Build(model_info_ptr->model_path, mindspore::kMindIR, context);
1279+      if (ret == mindspore::kSuccess) {
1280+        return model_ptr;
1281+      }
1282+      return nullptr;
1283+    }
1284+    case kFD: {
1285+      MS_LOG(DEBUG) << "input model fd:" << model_info_ptr->model_fd
1286+                    << ", model_buffer_total: " << model_info_ptr->model_buffer_total;
1287+      std::shared_ptr<mindspore::Model> model_ptr = std::make_shared<mindspore::Model>();
1288+      if (model_ptr == nullptr) {
1289+        MS_LOG(ERROR) << "Failed to new mindspore::model.";
1290+        return nullptr;
1291+      }
1292+      auto ret = model_ptr->Build(model_info_ptr->model_buffer_data, model_info_ptr->model_buffer_total,
1293+                                  mindspore::kMindIR, context);
1294+      if (ret == mindspore::kSuccess) {
1295+        MS_LOG(INFO) << "Build model from buffer success.";
1296+        return model_ptr;
1297+      }
1298+      (void)munmap(model_info_ptr->model_buffer_data, model_info_ptr->model_buffer_total);
1299+
1300+      break;
1301+    }
1302+    default: {
1303+      MS_LOG(ERROR) << "Invalid model mode.";
1304+    }
1305+  }
1306+  MS_LOG(ERROR) << "Build model failed.";
1307+  return nullptr;
1308+}
1309+
1310+int32_t MSLiteModelNapi::GetDeviceInfoContext(ContextInfo *context_ptr,
1311+                                              std::vector<std::shared_ptr<DeviceInfoContext>> &device_infos) {
1312+  for (auto device_name : context_ptr->target) {
1313+    if (kDeviceTypes.find(device_name) == kDeviceTypes.end()) {
1314+      MS_LOG(ERROR) << "Invalid device: " << device_name.c_str();
1315+      return ERR_INVALID_OPERATION;
1316+    }
1317+
1318+    auto device_type = kDeviceTypes.at(device_name);
1319+    switch (device_type) {
1320+      case kCPU: {
1321+        auto cpu_device = std::make_shared<mindspore::CPUDeviceInfo>();
1322+        if (cpu_device == nullptr) {
1323+          MS_LOG(ERROR) << "Failed to new CPU deviceInfo.";
1324+          return ERR_INVALID_OPERATION;
1325+        }
1326+        bool is_fp16 = (context_ptr->cpu_device.precision_mode.compare("preferred_fp16") == 0) ? true : false;
1327+        cpu_device->SetEnableFP16(is_fp16);
1328+        device_infos.push_back(cpu_device);
1329+        break;
1330+      }
1331+      case kNNRt: {
1332+        auto nnrt_device = std::make_shared<mindspore::NNRTDeviceInfo>();
1333+        if (nnrt_device == nullptr) {
1334+          MS_LOG(ERROR) << "Failed to new NNRT deviceInfo.";
1335+          return ERR_INVALID_OPERATION;
1336+        }
1337+        device_infos.push_back(nnrt_device);
1338+        break;
1339+      }
1340+      default: {
1341+        MS_LOG(ERROR) << "invalid device.";
1342+        return ERR_INVALID_OPERATION;
1343+      }
1344+    }
1345+  }
1346+  return SUCCESS;
1347+}
1348+
1349+napi_value MSLiteModelNapi::Constructor(napi_env env, napi_callback_info info) {
1350+  napi_status status;
1351+  napi_value result = nullptr;
1352+  GET_PARAMS(env, info, ARGS_TWO);
1353+
1354+  MSLiteModelNapi *model_napi = new (std::nothrow) MSLiteModelNapi();
1355+  if (model_napi == nullptr) {
1356+    MS_LOG(ERROR) << "No memory";
1357+    return result;
1358+  }
1359+
1360+  model_napi->env_ = env;
1361+  model_napi->native_model_ = CreateModel(model_info_, context_);
1362+  if (model_napi->native_model_ == nullptr) {
1363+    MS_LOG(ERROR) << "Failed to create model.";
1364+    return result;
1365+  }
1366+
1367+  status = napi_wrap(env, thisVar, reinterpret_cast<void *>(model_napi), MSLiteModelNapi::Finalize, nullptr,
1368+                     &(model_napi->wrapper_));
1369+  if (status != napi_ok) {
1370+    delete model_napi;
1371+    napi_get_undefined(env, &result);
1372+    MS_LOG(ERROR) << "Failed to wrap native instance";
1373+    return result;
1374+  }
1375+  return thisVar;
1376+}
1377+
1378+int32_t MSLiteModelNapi::ParseModelInfo(napi_env env, napi_value root, ModelInfo &model_info) {
1379+  napi_valuetype valueType;
1380+  napi_status status = napi_typeof(env, root, &valueType);
1381+  if (status != napi_ok) {
1382+    MS_LOG(ERROR) << "napi_typeof error.";
1383+    return ERR_INVALID_PARAM;
1384+  }
1385+  if ((valueType != napi_object) && (valueType != napi_string) && (valueType != napi_number)) {
1386+    MS_LOG(ERROR) << "napi_type not support.";
1387+    return ERR_INVALID_PARAM;
1388+  }
1389+
1390+  bool is_model_buffer = false;
1391+  napi_is_arraybuffer(env, root, &is_model_buffer);
1392+  if (is_model_buffer) {
1393+    // copy buffer
1394+    char *array_buffer_data;
1395+    size_t array_buffer_total;
1396+    status = napi_get_arraybuffer_info(env, root, reinterpret_cast<void **>(&array_buffer_data), &array_buffer_total);
1397+    if ((status != napi_ok) || (array_buffer_total <= 0)) {
1398+      MS_LOG(ERROR) << "Parse model buffer failed.";
1399+      return ERR_INVALID_PARAM;
1400+    }
1401+
1402+    // shallow copy
1403+    model_info.model_buffer_data = array_buffer_data;
1404+    model_info.model_buffer_total = array_buffer_total;
1405+    model_info.mode = kBuffer;
1406+  } else if (valueType == napi_number) {
1407+    int32_t fd;
1408+    status = napi_get_value_int32(env, root, &fd);
1409+    if ((status != napi_ok) || (fd <= 0)) {
1410+      MS_LOG(ERROR) << "Parse model FD failed.";
1411+      return ERR_INVALID_PARAM;
1412+    }
1413+    int size = lseek(fd, 0, SEEK_END);
1414+    (void)lseek(fd, 0, SEEK_SET);
1415+    auto mmap_buffers = mmap(NULL, size, PROT_READ, MAP_SHARED, fd, 0);
1416+    if (mmap_buffers == NULL) {
1417+      MS_LOG(ERROR) << "mmap_buffers is NULL.";
1418+      return ERR_INVALID_PARAM;
1419+    }
1420+    model_info.model_fd = fd;
1421+    model_info.model_buffer_data = static_cast<char *>(mmap_buffers);
1422+    model_info.model_buffer_total = size;
1423+    model_info.mode = kFD;
1424+    close(fd);
1425+  } else {
1426+    char char_buf[SIZE];
1427+    size_t buf_length = 0;
1428+    status = napi_get_value_string_utf8(env, root, char_buf, SIZE, &buf_length);
1429+    if ((status != napi_ok) || (buf_length <= 0)) {
1430+      MS_LOG(ERROR) << "Parse model file failed.";
1431+      return ERR_INVALID_PARAM;
1432+    }
1433+    model_info.model_path.assign(char_buf, char_buf + buf_length);
1434+    model_info.mode = kPath;
1435+    MS_LOG(DEBUG) << "model_path: " << model_info.model_path.c_str();
1436+  }
1437+  return SUCCESS;
1438+}
1439+
1440+int32_t MSLiteModelNapi::ParseContextInfo(napi_env env, napi_value args, ContextInfo &context) {
1441+  napi_valuetype valueType;
1442+  napi_status status = napi_typeof(env, args, &valueType);
1443+  if ((status != napi_ok) || (valueType != napi_object)) {
1444+    MS_LOG(ERROR) << "napi_typeof check failed.";
1445+    return ERR_NOT_EXISTED_PARAM;
1446+  }
1447+
1448+  std::vector<std::string> str_values;
1449+  auto ret = CommonNapi::GetPropertyStringArray(env, args, "target", str_values);
1450+  if (ret != SUCCESS && ret != ERR_NOT_EXISTED_PARAM) {
1451+    MS_LOG(ERROR) << "Get context target failed.";
1452+    return ret;
1453+  }
1454+  context.target.assign(str_values.begin(), str_values.end());
1455+
1456+  ret = GetCpuDeviceInfo(env, args, context);
1457+  if (ret != SUCCESS && ret != ERR_NOT_EXISTED_PARAM) {
1458+    MS_LOG(ERROR) << "Get context CpuDeviceInfo failed.";
1459+    return ret;
1460+  }
1461+  return SUCCESS;
1462+}
1463+
1464+napi_value MSLiteModelNapi::CreateMSLiteModelWrapper(napi_env env, MSLiteModelAsyncContext *async_context) {
1465+  std::lock_guard<std::mutex> lock(create_mutex_);
1466+  napi_status status;
1467+  napi_value result = nullptr;
1468+  napi_value constructor;
1469+  napi_get_undefined(env, &result);
1470+
1471+  status = napi_get_reference_value(env, constructor_, &constructor);
1472+  if (status != napi_ok) {
1473+    MS_LOG(ERROR) << "get reference failed.";
1474+    return result;
1475+  }
1476+  model_info_ = &(async_context->model_info);
1477+  context_ = &(async_context->context);
1478+  status = napi_new_instance(env, constructor, 0, nullptr, &result);
1479+  if (status == napi_ok) {
1480+    return result;
1481+  }
1482+
1483+  return result;
1484+}
1485+
1486+void MSLiteModelNapi::GetMSLiteModelAsyncCallbackComplete(napi_env env, napi_status status, void *data) {
1487+  napi_value valueParam = nullptr;
1488+  auto async_context = static_cast<MSLiteModelAsyncContext *>(data);
1489+
1490+  if (async_context != nullptr) {
1491+    if (!async_context->status) {
1492+      valueParam = CreateMSLiteModelWrapper(env, async_context);
1493+    }
1494+    CommonCallbackRoutine(env, async_context, valueParam);
1495+  } else {
1496+    MS_LOG(ERROR) << "GetMSLiteModelAsyncCallbackComplete asyncContext is Null!";
1497+  }
1498+}
1499+
1500+void MSLiteModelNapi::CommonCallbackRoutine(napi_env env, MSLiteModelAsyncContext *&asyncContext,
1501+                                            const napi_value &valueParam) {
1502+  napi_value result[ARGS_TWO] = {0};
1503+  napi_value retVal;
1504+
1505+  if (!asyncContext->status) {
1506+    napi_get_undefined(env, &result[PARAM0]);
1507+    result[PARAM1] = valueParam;
1508+  } else {
1509+    napi_value message = nullptr;
1510+    std::string messageValue = CommonNapi::getMessageByCode(asyncContext->status);
1511+    napi_create_string_utf8(env, messageValue.c_str(), NAPI_AUTO_LENGTH, &message);
1512+
1513+    napi_value code = nullptr;
1514+    napi_create_string_utf8(env, (std::to_string(asyncContext->status)).c_str(), NAPI_AUTO_LENGTH, &code);
1515+
1516+    napi_create_error(env, code, message, &result[PARAM0]);
1517+    napi_get_undefined(env, &result[PARAM1]);
1518+  }
1519+
1520+  if (asyncContext->deferred) {
1521+    if (!asyncContext->status) {
1522+      napi_resolve_deferred(env, asyncContext->deferred, result[PARAM1]);
1523+    } else {
1524+      napi_reject_deferred(env, asyncContext->deferred, result[PARAM0]);
1525+    }
1526+  } else {
1527+    napi_value callback = nullptr;
1528+    napi_get_reference_value(env, asyncContext->callbackRef, &callback);
1529+    napi_call_function(env, nullptr, callback, ARGS_TWO, result, &retVal);
1530+    napi_delete_reference(env, asyncContext->callbackRef);
1531+  }
1532+  napi_delete_async_work(env, asyncContext->work);
1533+
1534+  delete asyncContext;
1535+  asyncContext = nullptr;
1536+}
1537+
1538+napi_value MSLiteModelNapi::LoadMSLiteModelFromFile(napi_env env, napi_callback_info info) {
1539+  napi_status status;
1540+  napi_value result = nullptr;
1541+
1542+  GET_PARAMS(env, info, ARGS_TWO);
1543+
1544+  std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
1545+
1546+  int32_t ret;
1547+  for (size_t i = PARAM0; i < argc; i++) {
1548+    if (i == PARAM0) {
1549+      ret = ParseModelInfo(env, argv[i], asyncContext->model_info);
1550+      if (ret != SUCCESS) {
1551+        MS_LOG(ERROR) << "Parsing model info failed.";
1552+        return result;
1553+      }
1554+    } else if (i == PARAM1) {
1555+      ret = ParseContextInfo(env, argv[i], asyncContext->context);
1556+      if (ret != SUCCESS && ret != ERR_NOT_EXISTED_PARAM) {
1557+        MS_LOG(ERROR) << "Parsing context info failed.";
1558+        return result;
1559+      }
1560+    } else {
1561+      MS_LOG(ERROR) << "Invalid input params.";
1562+      return result;
1563+    }
1564+  }
1565+  status = napi_create_promise(env, &asyncContext->deferred, &result);
1566+  if (status != napi_ok) {
1567+    MS_LOG(ERROR) << "create promise failed.";
1568+    return result;
1569+  }
1570+
1571+  napi_value resource = nullptr;
1572+  napi_create_string_utf8(env, "LoadMSLiteModelFromFile", NAPI_AUTO_LENGTH, &resource);
1573+  status = napi_create_async_work(
1574+    env, nullptr, resource,
1575+    [](napi_env env, void *data) {
1576+      auto context = static_cast<MSLiteModelAsyncContext *>(data);
1577+      context->status = SUCCESS;
1578+    },
1579+    GetMSLiteModelAsyncCallbackComplete, static_cast<void *>(asyncContext.get()), &asyncContext->work);
1580+  if (status != napi_ok) {
1581+    result = nullptr;
1582+  } else {
1583+    status = napi_queue_async_work(env, asyncContext->work);
1584+    if (status == napi_ok) {
1585+      asyncContext.release();
1586+    } else {
1587+      result = nullptr;
1588+    }
1589+  }
1590+  return result;
1591+}
1592+
1593+napi_value MSLiteModelNapi::LoadMSLiteModelFromBuffer(napi_env env, napi_callback_info info) {
1594+  napi_status status;
1595+  napi_value result = nullptr;
1596+
1597+  GET_PARAMS(env, info, ARGS_TWO);
1598+
1599+  std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
1600+
1601+  int32_t ret;
1602+  for (size_t i = PARAM0; i < argc; i++) {
1603+    if (i == PARAM0) {
1604+      ret = ParseModelInfo(env, argv[i], asyncContext->model_info);
1605+      if (ret != SUCCESS) {
1606+        MS_LOG(ERROR) << "Parsing model info failed.";
1607+        return result;
1608+      }
1609+    } else if (i == PARAM1) {
1610+      ret = ParseContextInfo(env, argv[i], asyncContext->context);
1611+      if (ret != SUCCESS && ret != ERR_NOT_EXISTED_PARAM) {
1612+        MS_LOG(ERROR) << "Parsing context info failed.";
1613+        return result;
1614+      }
1615+    } else {
1616+      MS_LOG(ERROR) << "Invalid input params.";
1617+      return result;
1618+    }
1619+  }
1620+  status = napi_create_promise(env, &asyncContext->deferred, &result);
1621+  if (status != napi_ok) {
1622+    MS_LOG(ERROR) << "create promise failed.";
1623+    return result;
1624+  }
1625+
1626+  napi_value resource = nullptr;
1627+  napi_create_string_utf8(env, "LoadMSLiteModelFromBuffer", NAPI_AUTO_LENGTH, &resource);
1628+  status = napi_create_async_work(
1629+    env, nullptr, resource,
1630+    [](napi_env env, void *data) {
1631+      auto context = static_cast<MSLiteModelAsyncContext *>(data);
1632+      context->status = SUCCESS;
1633+    },
1634+    GetMSLiteModelAsyncCallbackComplete, static_cast<void *>(asyncContext.get()), &asyncContext->work);
1635+  if (status != napi_ok) {
1636+    result = nullptr;
1637+  } else {
1638+    status = napi_queue_async_work(env, asyncContext->work);
1639+    if (status == napi_ok) {
1640+      asyncContext.release();
1641+    } else {
1642+      result = nullptr;
1643+    }
1644+  }
1645+  return result;
1646+}
1647+
1648+napi_value MSLiteModelNapi::LoadMSLiteModelFromFd(napi_env env, napi_callback_info info) {
1649+  napi_status status;
1650+  napi_value result = nullptr;
1651+
1652+  GET_PARAMS(env, info, ARGS_TWO);
1653+
1654+  std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
1655+
1656+  int32_t ret;
1657+  for (size_t i = PARAM0; i < argc; i++) {
1658+    if (i == PARAM0) {
1659+      ret = ParseModelInfo(env, argv[i], asyncContext->model_info);
1660+      if (ret != SUCCESS) {
1661+        MS_LOG(ERROR) << "Parsing model info failed.";
1662+        return result;
1663+      }
1664+    } else if (i == PARAM1) {
1665+      ret = ParseContextInfo(env, argv[i], asyncContext->context);
1666+      if (ret != SUCCESS && ret != ERR_NOT_EXISTED_PARAM) {
1667+        MS_LOG(ERROR) << "Parsing context info failed.";
1668+        return result;
1669+      }
1670+    } else {
1671+      MS_LOG(ERROR) << "Invalid input params.";
1672+      return result;
1673+    }
1674+  }
1675+  status = napi_create_promise(env, &asyncContext->deferred, &result);
1676+  if (status != napi_ok) {
1677+    MS_LOG(ERROR) << "create promise failed.";
1678+    return result;
1679+  }
1680+
1681+  napi_value resource = nullptr;
1682+  napi_create_string_utf8(env, "LoadMSLiteModelFromFd", NAPI_AUTO_LENGTH, &resource);
1683+  status = napi_create_async_work(
1684+    env, nullptr, resource,
1685+    [](napi_env env, void *data) {
1686+      auto context = static_cast<MSLiteModelAsyncContext *>(data);
1687+      context->status = SUCCESS;
1688+    },
1689+    GetMSLiteModelAsyncCallbackComplete, static_cast<void *>(asyncContext.get()), &asyncContext->work);
1690+  if (status != napi_ok) {
1691+    result = nullptr;
1692+  } else {
1693+    status = napi_queue_async_work(env, asyncContext->work);
1694+    if (status == napi_ok) {
1695+      asyncContext.release();
1696+    } else {
1697+      result = nullptr;
1698+    }
1699+  }
1700+  return result;
1701+}
1702+
1703+int32_t MSLiteModelNapi::GetCpuDeviceInfo(napi_env env, napi_value args, ContextInfo &context) {
1704+  bool has_cpu_property = false;
1705+  napi_status status = napi_has_named_property(env, args, "cpu", &has_cpu_property);
1706+  if (status != napi_ok) {
1707+    MS_LOG(ERROR) << "can not find cpu property";
1708+    return ERR_INVALID_OPERATION;
1709+  }
1710+  if (!has_cpu_property) {
1711+    return ERR_NOT_EXISTED_PARAM;
1712+  }
1713+
1714+  napi_value config_item = nullptr;
1715+  status = napi_get_named_property(env, args, "cpu", &config_item);
1716+  if (status != napi_ok) {
1717+    MS_LOG(ERROR) << "can not get cpu property";
1718+    return ERR_INVALID_OPERATION;
1719+  }
1720+
1721+  int32_t int_value = 0;
1722+  std::string str_value = "";
1723+  std::vector<int32_t> affinity_cores;
1724+
1725+  if (CommonNapi::GetPropertyInt32(env, config_item, "thread_num", int_value) == SUCCESS) {
1726+    MS_LOG(DEBUG) << "thread_num: " << int_value;
1727+    context.cpu_device.thread_num = int_value;
1728+  }
1729+
1730+  if (CommonNapi::GetPropertyInt32(env, config_item, "thread_affinity_mode", int_value) == SUCCESS) {
1731+    MS_LOG(DEBUG) << "thread_affinity_mode: " << int_value;
1732+    context.cpu_device.thread_num = int_value;
1733+  }
1734+
1735+  if (CommonNapi::GetPropertyInt32Array(env, config_item, "thread_affinity_core_list", affinity_cores) == SUCCESS) {
1736+    MS_LOG(DEBUG) << "affinity_cores size: " << affinity_cores.size();
1737+    context.cpu_device.thread_affinity_cores.assign(affinity_cores.begin(), affinity_cores.end());
1738+  }
1739+
1740+  if (CommonNapi::GetPropertyString(env, config_item, "precision_mode", str_value) == SUCCESS) {
1741+    MS_LOG(DEBUG) << "precision_mode: " << str_value.c_str();
1742+    context.cpu_device.precision_mode = str_value;
1743+  }
1744+  return SUCCESS;
1745+}
1746+
1747+napi_value MSLiteModelNapi::GetInputs(napi_env env, napi_callback_info info) {
1748+  napi_value undefinedResult = nullptr;
1749+  napi_get_undefined(env, &undefinedResult);
1750+
1751+  size_t argCount = 0;
1752+  napi_value jsThis = nullptr;
1753+  napi_value jsResult = nullptr;
1754+  MSLiteModelNapi *modelNapi = nullptr;
1755+
1756+  napi_status status = napi_get_cb_info(env, info, &argCount, nullptr, &jsThis, nullptr);
1757+  if (status != napi_ok || jsThis == nullptr) {
1758+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
1759+    return undefinedResult;
1760+  }
1761+
1762+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
1763+  if (status != napi_ok || modelNapi == nullptr) {
1764+    MS_LOG(ERROR) << "get model napi error";
1765+    return undefinedResult;
1766+  }
1767+
1768+  if (modelNapi->native_model_ == nullptr) {
1769+    MS_LOG(ERROR) << "model is released(null), please create model again";
1770+    return undefinedResult;
1771+  }
1772+  std::vector<MSTensor> inputs = modelNapi->native_model_->GetInputs();
1773+  std::vector<MSTensor> tensor_inputs;
1774+  for (size_t i = 0; i < inputs.size(); i++) {
1775+    auto tensor = mindspore::MSTensor::CreateTensor(inputs.at(i).Name(), inputs.at(i).DataType(), {}, nullptr, 0);
1776+    if (tensor == nullptr) {
1777+      MS_LOG(ERROR) << "create tensor failed.";
1778+      return undefinedResult;
1779+    }
1780+    tensor->SetShape(inputs.at(i).Shape());
1781+    tensor->SetFormat(inputs.at(i).format());
1782+    tensor->SetDataType(inputs.at(i).DataType());
1783+    tensor_inputs.push_back(*tensor);
1784+    delete tensor;
1785+  }
1786+
1787+  size_t size = inputs.size();
1788+  MS_LOG(INFO) << "inputs size: " << size;
1789+
1790+  napi_create_array_with_length(env, size, &jsResult);
1791+  for (size_t i = 0; i < size; i++) {
1792+    status = napi_set_element(env, jsResult, i, MSTensorNapi::NewInstance(env, tensor_inputs[i]));
1793+    if (status != napi_ok) {
1794+      MS_LOG(ERROR) << "napi_set_element failed! code: " << status;
1795+    }
1796+  }
1797+  MS_LOG(INFO) << "get model inputs success: " << inputs[0].Name().c_str();
1798+  return jsResult;
1799+}
1800+
1801+napi_value MSLiteModelNapi::Resize(napi_env env, napi_callback_info info) {
1802+  napi_value undefinedResult = nullptr;
1803+  bool result = false;
1804+  napi_get_undefined(env, &undefinedResult);
1805+  napi_value argv[ARGS_TWO] = {0};
1806+  size_t argCount = 2;
1807+  napi_value jsThis = nullptr;
1808+  napi_value jsResult = nullptr;
1809+  MSLiteModelNapi *modelNapi = nullptr;
1810+
1811+  napi_status status = napi_get_cb_info(env, info, &argCount, argv, &jsThis, nullptr);
1812+  if (status != napi_ok || jsThis == nullptr) {
1813+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
1814+    return undefinedResult;
1815+  }
1816+
1817+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
1818+  if (status != napi_ok || modelNapi == nullptr) {
1819+    MS_LOG(ERROR) << "get model napi error";
1820+    return undefinedResult;
1821+  }
1822+
1823+  if (modelNapi->native_model_ == nullptr) {
1824+    MS_LOG(ERROR) << "model is released(null), please create model again";
1825+    return undefinedResult;
1826+  }
1827+  std::vector<MSTensor> inputs = modelNapi->native_model_->GetInputs();
1828+  std::vector<MSTensor> tensor_inputs;
1829+  std::vector<std::vector<int64_t>> dims;
1830+
1831+  // set inputs data
1832+  uint32_t array_length = 0;
1833+  status = napi_get_array_length(env, argv[PARAM0], &array_length);
1834+  if (status != napi_ok || array_length <= 0) {
1835+    MS_LOG(ERROR) << "Get inputs tensor length failed.";
1836+    return undefinedResult;
1837+  }
1838+  if (inputs.size() != array_length) {
1839+    MS_LOG(ERROR) << "array length not equal to model inputs size.";
1840+    return undefinedResult;
1841+  }
1842+  for (size_t i = 0; i < array_length; i++) {
1843+    napi_value element = nullptr;
1844+    status = napi_get_element(env, argv[PARAM0], i, &element);
1845+    if (status != napi_ok) {
1846+      MS_LOG(ERROR) << "can not get element";
1847+      return undefinedResult;
1848+    }
1849+
1850+    std::string property_name = "data";
1851+    bool exist = false;
1852+    napi_value data_func = nullptr;
1853+
1854+    status = napi_has_named_property(env, element, property_name.c_str(), &exist);
1855+    if (status != napi_ok || !exist) {
1856+      MS_LOG(ERROR) << "can not find target property";
1857+      return undefinedResult;
1858+    }
1859+
1860+    if (status != napi_ok || !exist) {
1861+      MS_LOG(INFO) << "can not find " << property_name.c_str() << " property.";
1862+      return undefinedResult;
1863+    }
1864+
1865+    if (napi_get_named_property(env, element, property_name.c_str(), &data_func) != napi_ok) {
1866+      MS_LOG(ERROR) << "get " << property_name.c_str() << " property fail.";
1867+      return undefinedResult;
1868+    }
1869+    void *js_data = nullptr;
1870+    size_t length = 0;
1871+    napi_value return_val;
1872+
1873+    status = napi_call_function(env, element, data_func, 0, nullptr, &return_val);
1874+    if (status != napi_ok || return_val == nullptr) {
1875+      MS_LOG(ERROR) << "napi call function error.";
1876+      return undefinedResult;
1877+    }
1878+
1879+    status = napi_get_arraybuffer_info(env, return_val, &js_data, &length);
1880+    if (status != napi_ok || js_data == nullptr) {
1881+      MS_LOG(ERROR) << "Get js data error.";
1882+      return undefinedResult;
1883+    }
1884+    if (inputs[i].DataSize() != length) {
1885+      MS_LOG(ERROR) << "tensor size is: " << static_cast<int>(inputs[i].DataSize()) << ", but data length got "
1886+                    << static_cast<int>(length);
1887+      return undefinedResult;
1888+    }
1889+
1890+    auto tensor_data = inputs[i].MutableData();
1891+    if (tensor_data == nullptr) {
1892+      MS_LOG(ERROR) << "malloc data for tensor failed.";
1893+      return undefinedResult;
1894+    }
1895+    memcpy(tensor_data, js_data, length);
1896+  }
1897+
1898+  napi_value dim_num = nullptr;
1899+  int64_t dim_ele = 0;
1900+  uint32_t dims_size = 0;
1901+  uint32_t dim_size = 0;
1902+
1903+  status = napi_is_array(env, argv[PARAM1], &result);
1904+  if (status != napi_ok || result == false) {
1905+    MS_LOG(ERROR) << "new dim is not a array";
1906+    return undefinedResult;
1907+  }
1908+
1909+  status = napi_get_array_length(env, argv[PARAM1], &dims_size);
1910+  if (status != napi_ok) {
1911+    MS_LOG(ERROR) << "get new dims size error";
1912+    return undefinedResult;
1913+  }
1914+  for (size_t i = 0; i < dims_size; i++) {
1915+    napi_value dim_element = nullptr;
1916+    status = napi_get_element(env, argv[PARAM1], i, &dim_element);
1917+    if (status != napi_ok) {
1918+      MS_LOG(ERROR) << "can not get element";
1919+      return undefinedResult;
1920+    }
1921+
1922+    status = napi_is_array(env, dim_element, &result);
1923+    if (status != napi_ok || result == false) {
1924+      MS_LOG(ERROR) << "new dim's element is not a array";
1925+      return undefinedResult;
1926+    }
1927+
1928+    status = napi_get_array_length(env, dim_element, &dim_size);
1929+    if (status != napi_ok) {
1930+      MS_LOG(ERROR) << "get new dim size error";
1931+      return undefinedResult;
1932+    }
1933+    std::vector<int64_t> dim(dim_size);
1934+    for (size_t j = 0; j < dim_size; j++) {
1935+      status = napi_get_element(env, dim_element, j, &dim_num);
1936+      if (status != napi_ok) {
1937+        MS_LOG(ERROR) << "get dim num error";
1938+        return undefinedResult;
1939+      }
1940+      status = napi_get_value_int64(env, dim_num, &dim_ele);
1941+      if (status != napi_ok) {
1942+        MS_LOG(ERROR) << "get dim element error";
1943+        return undefinedResult;
1944+      }
1945+      dim[j] = dim_ele;
1946+    }
1947+    dims.push_back(dim);
1948+  }
1949+  if (modelNapi->native_model_->Resize(inputs, dims) != mindspore::kSuccess) {
1950+    MS_LOG(ERROR) << "resize failed";
1951+    return undefinedResult;
1952+  }
1953+  status = napi_get_boolean(env, result, &jsResult);
1954+  if (status != napi_ok) {
1955+    MS_LOG(ERROR) << "get bool error";
1956+    return undefinedResult;
1957+  }
1958+  return jsResult;
1959+}
1960+
1961+template <typename T, typename Distribution>
1962+void GenerateRandomData(int size, void *data, Distribution distribution) {
1963+  std::mt19937 random_engine;
1964+  int elements_num = size / sizeof(T);
1965+  (void)std::generate_n(static_cast<T *>(data), elements_num,
1966+                        [&distribution, &random_engine]() { return static_cast<T>(distribution(random_engine)); });
1967+}
1968+
1969+int GenerateInputDataWithRandom(std::vector<mindspore::MSTensor> inputs) {
1970+  for (auto tensor : inputs) {
1971+    auto input_data = tensor.MutableData();
1972+    if (input_data == nullptr) {
1973+      std::cerr << "MallocData for inTensor failed." << std::endl;
1974+      return -1;
1975+    }
1976+    GenerateRandomData<float>(tensor.DataSize(), input_data, std::uniform_real_distribution<float>(0.1f, 1.0f));
1977+  }
1978+  return mindspore::kSuccess;
1979+}
1980+
1981+napi_value MSLiteModelNapi::PredictAsync(napi_env env, napi_callback_info info) {
1982+  napi_status status = napi_ok;
1983+  napi_value undefinedResult = nullptr;
1984+  napi_value result = nullptr;
1985+
1986+  std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
1987+  if (asyncContext == nullptr) {
1988+    MS_LOG(ERROR) << "MSLiteModelAsyncContext object create failed.";
1989+    return undefinedResult;
1990+  }
1991+
1992+  GET_PARAMS(env, info, ARGS_ONE);
1993+
1994+  if (SetTensorData(env, thisVar, argv[PARAM0], asyncContext.get()) != SUCCESS) {
1995+    MS_LOG(ERROR) << "Set tensor data failed.";
1996+    return undefinedResult;
1997+  }
1998+
1999+  napi_create_promise(env, &asyncContext->deferred, &result);
2000+  if (status != napi_ok) {
2001+    MS_LOG(ERROR) << "create promise failed.";
2002+    return result;
2003+  }
2004+
2005+  napi_value resource = nullptr;
2006+  napi_create_string_utf8(env, "Predict", NAPI_AUTO_LENGTH, &resource);
2007+  status = napi_create_async_work(
2008+    env, nullptr, resource,
2009+    [](napi_env env, void *data) {
2010+      auto context = static_cast<MSLiteModelAsyncContext *>(data);
2011+      context->status = SUCCESS;
2012+    },
2013+    PredictAsyncCallbackComplete, static_cast<void *>(asyncContext.get()), &asyncContext->work);
2014+  if (status != napi_ok) {
2015+    result = nullptr;
2016+  } else {
2017+    status = napi_queue_async_work(env, asyncContext->work);
2018+    if (status == napi_ok) {
2019+      asyncContext.release();
2020+    } else {
2021+      result = nullptr;
2022+    }
2023+  }
2024+  return result;
2025+}
2026+
2027+int32_t MSLiteModelNapi::SetTensorData(napi_env env, napi_value thisVar, napi_value argv,
2028+                                       MSLiteModelAsyncContext *async_context) {
2029+  uint32_t array_length = 0;
2030+  napi_status status = napi_get_array_length(env, argv, &array_length);
2031+  if (status != napi_ok || array_length <= 0) {
2032+    MS_LOG(ERROR) << "Get inputs tensor length failed.";
2033+    return ERR_INVALID_PARAM;
2034+  }
2035+
2036+  status = napi_unwrap(env, thisVar, reinterpret_cast<void **>(&(async_context->lite_model)));
2037+  if (status != napi_ok || async_context->lite_model == nullptr) {
2038+    MS_LOG(ERROR) << "get model napi error";
2039+    return ERROR;
2040+  }
2041+  auto modelNapi = async_context->lite_model;
2042+  if (modelNapi->native_model_ == nullptr) {
2043+    MS_LOG(ERROR) << "model is released(null), please create model again";
2044+    return ERROR;
2045+  }
2046+
2047+  auto inputs = modelNapi->native_model_->GetInputs();
2048+  if (inputs.size() != array_length) {
2049+    MS_LOG(ERROR) << "array length not equal to model inputs size.";
2050+    return ERR_INVALID_PARAM;
2051+  }
2052+
2053+  for (size_t i = 0; i < array_length; i++) {
2054+    napi_value element = nullptr;
2055+    status = napi_get_element(env, argv, i, &element);
2056+    if (status != napi_ok) {
2057+      MS_LOG(ERROR) << "can not get element";
2058+      return ERROR;
2059+    }
2060+
2061+    std::string property_name = "data";
2062+    bool exist = false;
2063+    napi_value data_func = nullptr;
2064+
2065+    napi_status status = napi_has_named_property(env, element, property_name.c_str(), &exist);
2066+    if (status != napi_ok || !exist) {
2067+      MS_LOG(ERROR) << "can not find target property";
2068+      return ERROR;
2069+    }
2070+
2071+    if (status != napi_ok || !exist) {
2072+      MS_LOG(INFO) << "can not find " << property_name.c_str() << " property.";
2073+      return ERROR;
2074+    }
2075+
2076+    if (napi_get_named_property(env, element, property_name.c_str(), &data_func) != napi_ok) {
2077+      MS_LOG(ERROR) << "get " << property_name.c_str() << " property fail.";
2078+      return ERROR;
2079+    }
2080+    void *js_data = nullptr;
2081+    size_t length = 0;
2082+    napi_value return_val;
2083+
2084+    status = napi_call_function(env, element, data_func, 0, nullptr, &return_val);
2085+    if (status != napi_ok || return_val == nullptr) {
2086+      MS_LOG(ERROR) << "napi call function error.";
2087+      return ERROR;
2088+    }
2089+
2090+    status = napi_get_arraybuffer_info(env, return_val, &js_data, &length);
2091+    if (status != napi_ok || js_data == nullptr) {
2092+      MS_LOG(ERROR) << "Get js data error.";
2093+      return ERROR;
2094+    }
2095+    if (inputs[i].DataSize() != length) {
2096+      MS_LOG(ERROR) << "tensor size is: " << static_cast<int>(inputs[i].DataSize()) << ", but data length got "
2097+                    << static_cast<int>(length);
2098+      return ERROR;
2099+    }
2100+
2101+    auto tensor_data = inputs[i].MutableData();
2102+    if (tensor_data == nullptr) {
2103+      MS_LOG(ERROR) << "malloc data for tensor failed.";
2104+      return ERROR;
2105+    }
2106+    memcpy(tensor_data, js_data, length);
2107+  }
2108+  return SUCCESS;
2109+}
2110+
2111+void MSLiteModelNapi::PredictAsyncCallbackComplete(napi_env env, napi_status status, void *data) {
2112+  napi_value valueParam = nullptr;
2113+  auto asyncContext = static_cast<MSLiteModelAsyncContext *>(data);
2114+
2115+  if (asyncContext != nullptr) {
2116+    if (!asyncContext->status) {
2117+      auto modelNapi = asyncContext->lite_model;
2118+      if (modelNapi->native_model_ == nullptr) {
2119+        MS_LOG(ERROR) << "model is released(null), please create model again";
2120+        return;
2121+      }
2122+      auto inputs = modelNapi->native_model_->GetInputs();
2123+      auto outputs = modelNapi->native_model_->GetOutputs();
2124+
2125+      auto predict_ret = modelNapi->native_model_->Predict(inputs, &outputs);
2126+      if (predict_ret != mindspore::kSuccess) {
2127+        MS_LOG(ERROR) << "model predict failed.";
2128+        return;
2129+      }
2130+
2131+      napi_create_array_with_length(env, outputs.size(), &valueParam);
2132+      for (size_t i = 0; i < outputs.size(); i++) {
2133+        status = napi_set_element(env, valueParam, i, MSTensorNapi::NewInstance(env, outputs[i]));
2134+        if (status != napi_ok) {
2135+          MS_LOG(ERROR) << "napi_set_element failed! code: " << status;
2136+        }
2137+      }
2138+      MS_LOG(INFO) << "predict model success.";
2139+    }
2140+    CommonCallbackRoutine(env, asyncContext, valueParam);
2141+  } else {
2142+    MS_LOG(ERROR) << "ERROR: PredictAsyncCallbackComplete asyncContext is Null!";
2143+  }
2144+}
2145+}  // namespace mindspore
2146diff --git a/mindspore/lite/src/runtime/js_api/mstensor_napi.cc b/mindspore/lite/src/runtime/js_api/mstensor_napi.cc
2147new file mode 100644
2148index 00000000..a03bd484
2149--- /dev/null
2150+++ b/mindspore/lite/src/runtime/js_api/mstensor_napi.cc
2151@@ -0,0 +1,426 @@
2152+/**
2153+ * Copyright 2023 Huawei Technologies Co., Ltd
2154+ *
2155+ * Licensed under the Apache License, Version 2.0 (the "License");
2156+ * you may not use this file except in compliance with the License.
2157+ * You may obtain a copy of the License at
2158+ *
2159+ * http://www.apache.org/licenses/LICENSE-2.0
2160+ *
2161+ * Unless required by applicable law or agreed to in writing, software
2162+ * distributed under the License is distributed on an "AS IS" BASIS,
2163+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
2164+ * See the License for the specific language governing permissions and
2165+ * limitations under the License.
2166+ */
2167+
2168+#include "include/js_api/mstensor_napi.h"
2169+#include <climits>
2170+#include <string.h>
2171+#include <map>
2172+#include "src/common/log.h"
2173+
2174+namespace mindspore {
2175+thread_local napi_ref MSTensorNapi::constructor_ = nullptr;
2176+const std::string CLASS_NAME = "MSTensor";
2177+
2178+#define GET_PARAMS(env, info, num) \
2179+  size_t argc = num;               \
2180+  napi_value argv[num] = {0};      \
2181+  napi_value thisVar = nullptr;    \
2182+  void *data;                      \
2183+  napi_get_cb_info(env, info, &argc, argv, &thisVar, &data)
2184+
2185+const std::unordered_map<std::string, napi_typedarray_type> kDTypeMap{
2186+  {"int32", napi_int32_array},
2187+  {"float32", napi_float32_array},
2188+  {"int8", napi_int8_array},
2189+  {"uint8", napi_uint8_array},
2190+};
2191+
2192+namespace {
2193+const int ARGS_TWO = 2;
2194+}
2195+
2196+MSTensorNapi::MSTensorNapi() { MS_LOG(DEBUG) << "MSLITE MSTensorNapi Instances create."; }
2197+
2198+MSTensorNapi::~MSTensorNapi() {
2199+  if (wrapper_ != nullptr) {
2200+    napi_delete_reference(env_, wrapper_);
2201+  }
2202+  if (nativeMSTensor_ != nullptr) {
2203+    nativeMSTensor_->SetData(nullptr);
2204+  }
2205+  MS_LOG(INFO) << "MSLITE MSTensorNapi Instances destroy.";
2206+}
2207+
2208+napi_value MSTensorNapi::Constructor(napi_env env, napi_callback_info info) {
2209+  napi_value jsThis = nullptr;
2210+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
2211+  if (status != napi_ok || jsThis == nullptr) {
2212+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
2213+    return nullptr;
2214+  }
2215+
2216+  MSTensorNapi *tensprNapi = new (std::nothrow) MSTensorNapi();
2217+  if (tensprNapi == nullptr) {
2218+    MS_LOG(ERROR) << "No memory";
2219+    return nullptr;
2220+  }
2221+
2222+  tensprNapi->env_ = env;
2223+  status = napi_wrap(env, jsThis, tensprNapi, MSTensorNapi::Finalize, nullptr, &(tensprNapi->wrapper_));
2224+  if (status != napi_ok) {
2225+    delete tensprNapi;
2226+    MS_LOG(ERROR) << "Failed to wrap native instance";
2227+    return nullptr;
2228+  }
2229+
2230+  MS_LOG(INFO) << "Constructor success.";
2231+  return jsThis;
2232+}
2233+
2234+void MSTensorNapi::Finalize(napi_env env, void *nativeObject, void *finalize) {
2235+  (void)env;
2236+  (void)finalize;
2237+  if (nativeObject != nullptr) {
2238+    delete reinterpret_cast<MSTensorNapi *>(nativeObject);
2239+  }
2240+  MS_LOG(INFO) << "Finalize success.";
2241+}
2242+
2243+napi_value MSTensorNapi::NewInstance(napi_env env, mindspore::MSTensor tensor) {
2244+  napi_value cons = GetConstructor(env);
2245+  if (cons == nullptr) {
2246+    MS_LOG(ERROR) << "NewInstance GetConstructor is nullptr!";
2247+    return nullptr;
2248+  }
2249+  napi_value instance;
2250+  napi_status status = napi_new_instance(env, cons, 0, nullptr, &instance);
2251+  if (status != napi_ok) {
2252+    MS_LOG(ERROR) << "NewInstance napi_new_instance failed! code: " << status;
2253+    return nullptr;
2254+  }
2255+
2256+  MSTensorNapi *proxy = nullptr;
2257+  status = napi_unwrap(env, instance, reinterpret_cast<void **>(&proxy));
2258+  if (proxy == nullptr) {
2259+    MS_LOG(ERROR) << "NewInstance native instance is nullptr! code: " << status;
2260+    return instance;
2261+  }
2262+  // MSTensor 不需要new 内存,直接获取Model.getInputs()
2263+  proxy->nativeMSTensor_ = std::make_unique<mindspore::MSTensor>(tensor);
2264+  if (proxy->nativeMSTensor_ == nullptr) {
2265+    MS_LOG(ERROR) << "NewInstance native tensor unique ptr is nullptr!";
2266+    return instance;
2267+  }
2268+  return instance;
2269+}
2270+
2271+napi_value MSTensorNapi::GetConstructor(napi_env env) {
2272+  napi_value cons;
2273+  if (constructor_ != nullptr) {
2274+    napi_get_reference_value(env, constructor_, &cons);
2275+    return cons;
2276+  }
2277+
2278+  MS_LOG(INFO) << "Get msTensorNapi constructor.";
2279+  napi_property_descriptor properties[] = {
2280+    DECLARE_NAPI_GETTER("name", GetName),
2281+    DECLARE_NAPI_GETTER("shape", GetShape),
2282+    DECLARE_NAPI_GETTER("element_num", GetElementNum),
2283+    DECLARE_NAPI_GETTER("dtype", GetDtype),
2284+    DECLARE_NAPI_GETTER("format", GetFormat),
2285+    DECLARE_NAPI_GETTER("data_size", GetDataSize),
2286+
2287+    DECLARE_NAPI_FUNCTION("data", GetDataBuffer),
2288+    DECLARE_NAPI_FUNCTION("setData", SetData),
2289+  };
2290+
2291+  napi_status status = napi_define_class(env, CLASS_NAME.c_str(), NAPI_AUTO_LENGTH, Constructor, nullptr,
2292+                                         sizeof(properties) / sizeof(napi_property_descriptor), properties, &cons);
2293+  if (status != napi_ok) {
2294+    MS_LOG(ERROR) << "MSLITE Failed to define MSTensor class";
2295+    return nullptr;
2296+  }
2297+
2298+  status = napi_create_reference(env, cons, 1, &constructor_);
2299+  if (status != napi_ok) {
2300+    MS_LOG(ERROR) << "MSLITE Failed to create reference of constructor";
2301+    return nullptr;
2302+  }
2303+
2304+  return cons;
2305+}
2306+
2307+napi_value MSTensorNapi::GetName(napi_env env, napi_callback_info info) {
2308+  napi_value undefinedResult = nullptr;
2309+  napi_get_undefined(env, &undefinedResult);
2310+  napi_value jsThis = nullptr;
2311+  napi_value jsResult = nullptr;
2312+  MSTensorNapi *tensor = nullptr;
2313+
2314+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
2315+  if (status != napi_ok || jsThis == nullptr) {
2316+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
2317+    return undefinedResult;
2318+  }
2319+
2320+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
2321+  if (status != napi_ok || tensor == nullptr) {
2322+    MS_LOG(ERROR) << "get tensor napi error";
2323+    return undefinedResult;
2324+  }
2325+
2326+  status = napi_create_string_utf8(env, tensor->nativeMSTensor_->Name().c_str(), NAPI_AUTO_LENGTH, &jsResult);
2327+  if (status != napi_ok) {
2328+    MS_LOG(ERROR) << "napi_create_string_utf8 error";
2329+    return undefinedResult;
2330+  }
2331+
2332+  MS_LOG(INFO) << "GetName success.";
2333+  return jsResult;
2334+}
2335+
2336+napi_value MSTensorNapi::GetShape(napi_env env, napi_callback_info info) {
2337+  napi_value undefinedResult = nullptr;
2338+  napi_get_undefined(env, &undefinedResult);
2339+  napi_value jsThis = nullptr;
2340+  napi_value jsResult = nullptr;
2341+  MSTensorNapi *tensor = nullptr;
2342+
2343+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
2344+  if (status != napi_ok || jsThis == nullptr) {
2345+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
2346+    return undefinedResult;
2347+  }
2348+
2349+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
2350+  if (status != napi_ok || tensor == nullptr) {
2351+    MS_LOG(ERROR) << "get tensor napi error";
2352+    return undefinedResult;
2353+  }
2354+
2355+  // return array
2356+  auto shape = tensor->nativeMSTensor_->Shape();
2357+  size_t size = shape.size();
2358+  napi_create_array_with_length(env, size, &jsResult);
2359+  for (size_t i = 0; i < size; i++) {
2360+    napi_value num;
2361+    status = napi_create_int32(env, shape.at(i), &num);
2362+    if (status != napi_ok) {
2363+      MS_LOG(ERROR) << "napi_create_int32 error";
2364+      return undefinedResult;
2365+    }
2366+    napi_set_element(env, jsResult, i, num);
2367+  }
2368+
2369+  MS_LOG(INFO) << "GetShape success.";
2370+  return jsResult;
2371+}
2372+
2373+napi_value MSTensorNapi::GetElementNum(napi_env env, napi_callback_info info) {
2374+  napi_value undefinedResult = nullptr;
2375+  napi_get_undefined(env, &undefinedResult);
2376+  napi_value jsThis = nullptr;
2377+  napi_value jsResult = nullptr;
2378+  MSTensorNapi *tensor = nullptr;
2379+
2380+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
2381+  if (status != napi_ok || jsThis == nullptr) {
2382+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
2383+    return undefinedResult;
2384+  }
2385+
2386+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
2387+  if (status != napi_ok || tensor == nullptr) {
2388+    MS_LOG(ERROR) << "get tensor napi error";
2389+    return undefinedResult;
2390+  }
2391+
2392+  status = napi_create_int32(env, tensor->nativeMSTensor_->ElementNum(), &jsResult);
2393+  if (status != napi_ok) {
2394+    MS_LOG(ERROR) << "napi_create_int32 error";
2395+    return undefinedResult;
2396+  }
2397+
2398+  MS_LOG(INFO) << "GetElementNum success.";
2399+  return jsResult;
2400+}
2401+
2402+napi_value MSTensorNapi::GetDtype(napi_env env, napi_callback_info info) {
2403+  napi_value undefinedResult = nullptr;
2404+  napi_get_undefined(env, &undefinedResult);
2405+  napi_value jsThis = nullptr;
2406+  napi_value jsResult = nullptr;
2407+  MSTensorNapi *tensor = nullptr;
2408+
2409+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
2410+  if (status != napi_ok || jsThis == nullptr) {
2411+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
2412+    return undefinedResult;
2413+  }
2414+
2415+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
2416+  if (status != napi_ok || tensor == nullptr) {
2417+    MS_LOG(ERROR) << "get tensor napi error";
2418+    return undefinedResult;
2419+  }
2420+
2421+  status = napi_create_int32(env, static_cast<int32_t>(tensor->nativeMSTensor_->DataType()), &jsResult);
2422+  if (status != napi_ok) {
2423+    MS_LOG(ERROR) << "napi_create_int32 error";
2424+    return undefinedResult;
2425+  }
2426+
2427+  MS_LOG(INFO) << "GetDtype success.";
2428+  return jsResult;
2429+}
2430+
2431+napi_value MSTensorNapi::GetFormat(napi_env env, napi_callback_info info) {
2432+  napi_value undefinedResult = nullptr;
2433+  napi_get_undefined(env, &undefinedResult);
2434+  napi_value jsThis = nullptr;
2435+  napi_value jsResult = nullptr;
2436+  MSTensorNapi *tensor = nullptr;
2437+
2438+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
2439+  if (status != napi_ok || jsThis == nullptr) {
2440+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
2441+    return undefinedResult;
2442+  }
2443+
2444+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
2445+  if (status != napi_ok || tensor == nullptr) {
2446+    MS_LOG(ERROR) << "get tensor napi error";
2447+    return undefinedResult;
2448+  }
2449+
2450+  status = napi_create_int32(env, static_cast<int32_t>(tensor->nativeMSTensor_->format()), &jsResult);
2451+  if (status != napi_ok) {
2452+    MS_LOG(ERROR) << "napi_create_int32 error";
2453+    return undefinedResult;
2454+  }
2455+
2456+  MS_LOG(INFO) << "GetFormat success.";
2457+  return jsResult;
2458+}
2459+
2460+napi_value MSTensorNapi::GetDataSize(napi_env env, napi_callback_info info) {
2461+  napi_value undefinedResult = nullptr;
2462+  napi_get_undefined(env, &undefinedResult);
2463+  napi_value jsThis = nullptr;
2464+  napi_value jsResult = nullptr;
2465+  MSTensorNapi *tensor = nullptr;
2466+
2467+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
2468+  if (status != napi_ok || jsThis == nullptr) {
2469+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
2470+    return undefinedResult;
2471+  }
2472+
2473+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
2474+  if (status != napi_ok || tensor == nullptr) {
2475+    MS_LOG(ERROR) << "get tensor napi error";
2476+    return undefinedResult;
2477+  }
2478+
2479+  status = napi_create_int32(env, tensor->nativeMSTensor_->DataSize(), &jsResult);
2480+  if (status != napi_ok) {
2481+    MS_LOG(ERROR) << "napi_create_int32 error";
2482+    return undefinedResult;
2483+  }
2484+
2485+  MS_LOG(INFO) << "GetDataSize success.";
2486+  return jsResult;
2487+}
2488+
2489+napi_value MSTensorNapi::GetDataBuffer(napi_env env, napi_callback_info info) {
2490+  napi_value undefinedResult = nullptr;
2491+  napi_get_undefined(env, &undefinedResult);
2492+
2493+  napi_value jsThis = nullptr;
2494+  napi_value jsResult = nullptr;
2495+  MSTensorNapi *tensor = nullptr;
2496+
2497+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
2498+  if (status != napi_ok || jsThis == nullptr) {
2499+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
2500+    return undefinedResult;
2501+  }
2502+
2503+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
2504+  if (status != napi_ok || tensor == nullptr) {
2505+    MS_LOG(ERROR) << "get tensor napi error";
2506+    return undefinedResult;
2507+  }
2508+
2509+  size_t byte_length = tensor->nativeMSTensor_->DataSize();
2510+  auto tensor_data = tensor->nativeMSTensor_->MutableData();
2511+  if (tensor_data == nullptr) {
2512+    MS_LOG(ERROR) << "tensor_data is null.";
2513+    return undefinedResult;
2514+  }
2515+
2516+  void *data = nullptr;
2517+  status = napi_create_arraybuffer(env, byte_length, &data, &jsResult);
2518+  if (status != napi_ok) {
2519+    MS_LOG(ERROR) << "napi_create_arraybuffer error";
2520+    return undefinedResult;
2521+  }
2522+  if (data == nullptr || jsResult == nullptr) {
2523+    MS_LOG(ERROR) << "napi_create_arraybuffer error";
2524+    return undefinedResult;
2525+  }
2526+
2527+  memcpy(data, tensor_data, byte_length);
2528+  MS_LOG(INFO) << "GetDataBuffer success.";
2529+  return jsResult;
2530+}
2531+
2532+napi_value MSTensorNapi::SetData(napi_env env, napi_callback_info info) {
2533+  napi_value undefinedResult = nullptr;
2534+  napi_get_undefined(env, &undefinedResult);
2535+  MSTensorNapi *tensor = nullptr;
2536+
2537+  GET_PARAMS(env, info, ARGS_TWO);
2538+
2539+  napi_status status = napi_unwrap(env, thisVar, reinterpret_cast<void **>(&tensor));
2540+  if (status != napi_ok || tensor == nullptr) {
2541+    MS_LOG(ERROR) << "get tensor napi error";
2542+    return undefinedResult;
2543+  }
2544+
2545+  if (tensor->nativeMSTensor_->DataType() != mindspore::DataType::kNumberTypeFloat32) {
2546+    MS_LOG(ERROR) << "tensor data type must be Float32(43), but got "
2547+                  << static_cast<int>(tensor->nativeMSTensor_->DataType());
2548+    return undefinedResult;
2549+  }
2550+
2551+  // convert napi_value to c++ type data
2552+  void *js_data = nullptr;
2553+  size_t length = 0;
2554+  status = napi_get_arraybuffer_info(env, argv[0], &js_data, &length);
2555+  if (status != napi_ok || js_data == nullptr) {
2556+    MS_LOG(ERROR) << "Get js data error.";
2557+    return undefinedResult;
2558+  }
2559+
2560+  if (tensor->nativeMSTensor_->DataSize() != length) {
2561+    MS_LOG(ERROR) << "tensor size is: " << static_cast<int>(tensor->nativeMSTensor_->DataSize())
2562+                  << "but data length got " << length;
2563+    return undefinedResult;
2564+  }
2565+
2566+  // memcpy
2567+  auto tensor_data = tensor->nativeMSTensor_->MutableData();
2568+  if (tensor_data == nullptr) {
2569+    MS_LOG(ERROR) << "malloc data for tensor failed.";
2570+    return undefinedResult;
2571+  }
2572+  memcpy(tensor_data, js_data, length);
2573+
2574+  MS_LOG(INFO) << "SetFloatData success.";
2575+  return undefinedResult;
2576+}
2577+}  // namespace mindspore
2578\ No newline at end of file
2579diff --git a/mindspore/lite/src/runtime/js_api/native_module_ohos_ms.cc b/mindspore/lite/src/runtime/js_api/native_module_ohos_ms.cc
2580new file mode 100644
2581index 00000000..a1954ae1
2582--- /dev/null
2583+++ b/mindspore/lite/src/runtime/js_api/native_module_ohos_ms.cc
2584@@ -0,0 +1,48 @@
2585+/**
2586+ * Copyright 2023 Huawei Technologies Co., Ltd
2587+ *
2588+ * Licensed under the Apache License, Version 2.0 (the "License");
2589+ * you may not use this file except in compliance with the License.
2590+ * You may obtain a copy of the License at
2591+ *
2592+ * http://www.apache.org/licenses/LICENSE-2.0
2593+ *
2594+ * Unless required by applicable law or agreed to in writing, software
2595+ * distributed under the License is distributed on an "AS IS" BASIS,
2596+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
2597+ * See the License for the specific language governing permissions and
2598+ * limitations under the License.
2599+ */
2600+
2601+#include "include/js_api/native_module_ohos_ms.h"
2602+#include "src/common/log_adapter.h"
2603+
2604+/*
2605+ * Function registering all props and functions of ohos.ai.mslite module
2606+ * which involves player and the recorder
2607+ */
2608+static napi_value Export(napi_env env, napi_value exports) {
2609+  MS_LOG(INFO) << "Export() is called.";
2610+
2611+  mindspore::MSLiteModelNapi::Init(env, exports);
2612+  return exports;
2613+}
2614+
2615+/*
2616+ * module define
2617+ */
2618+static napi_module g_module = {.nm_version = 1,
2619+                               .nm_flags = 0,
2620+                               .nm_filename = nullptr,
2621+                               .nm_register_func = Export,
2622+                               .nm_modname = "ai.mslite",
2623+                               .nm_priv = ((void *)0),
2624+                               .reserved = {0}};
2625+
2626+/*
2627+ * module register
2628+ */
2629+extern "C" __attribute__((constructor)) void RegisterModule(void) {
2630+  MS_LOG(INFO) << "RegisterModule() is called";
2631+  napi_module_register(&g_module);
2632+}
2633--
26342.17.1
2635
2636