• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1From b33b431abbacc69132377aca203859c5466eb68d Mon Sep 17 00:00:00 2001
2From: fangzhou0329 <fangzhou12@huawei.com>
3Date: Mon, 5 Jun 2023 17:55:14 +0800
4Subject: [PATCH] add ohos js callback api and bugfix
5
6---
7 include/js_api/@ohos.ai.mindSporeLite.d.ts    | 485 ++++++++++++++++++
8 include/js_api/@ohos.ai.mindspore.d.ts        | 222 --------
9 mindspore/lite/BUILD.gn                       |   2 +-
10 mindspore/lite/src/runtime/js_api/BUILD.gn    |   2 +-
11 .../lite/src/runtime/js_api/common_napi.cc    |  38 +-
12 .../src/runtime/js_api/mslite_model_napi.cc   | 231 ++++++---
13 .../lite/src/runtime/js_api/mstensor_napi.cc  |  12 +-
14 7 files changed, 662 insertions(+), 330 deletions(-)
15 create mode 100644 include/js_api/@ohos.ai.mindSporeLite.d.ts
16 delete mode 100644 include/js_api/@ohos.ai.mindspore.d.ts
17
18diff --git a/include/js_api/@ohos.ai.mindSporeLite.d.ts b/include/js_api/@ohos.ai.mindSporeLite.d.ts
19new file mode 100644
20index 00000000..005f68eb
21--- /dev/null
22+++ b/include/js_api/@ohos.ai.mindSporeLite.d.ts
23@@ -0,0 +1,485 @@
24+/*
25+ * Copyright (c) 2023 Huawei Device Co., Ltd.
26+ * Licensed under the Apache License, Version 2.0 (the "License");
27+ * you may not use this file except in compliance with the License.
28+ * You may obtain a copy of the License at
29+ *
30+ * http://www.apache.org/licenses/LICENSE-2.0
31+ *
32+ * Unless required by applicable law or agreed to in writing, software
33+ * distributed under the License is distributed on an "AS IS" BASIS,
34+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
35+ * See the License for the specific language governing permissions and
36+ * limitations under the License.
37+ */
38+
39+import { Callback } from './@ohos.base';
40+
41+/**
42+ * @namespace mindSporeLite
43+ * @syscap SystemCapability.AI.MindSporeLite
44+ * @stagemodelonly
45+ * @since 10
46+ */
47+declare namespace mindSporeLite {
48+  /**
49+   * Create a Model instance from file path.
50+   * @param { string } model - model indicates model path to be loaded
51+   * @param { callback: Callback<Model> } callback - the callback of model
52+   * @syscap SystemCapability.AI.MindSporeLite
53+   * @stagemodelonly
54+   * @since 10
55+   */
56+  function loadModelFromFile(
57+    model: string, callback: Callback<Model>): void;
58+
59+  /**
60+   * Create a Model instance from file path.
61+   * @param { string } model - model indicates model path to be loaded
62+   * @param { Context } [context] - context indicates model context information
63+   * @param { callback: Callback<Model> } callback - the callback of model
64+   * @syscap SystemCapability.AI.MindSporeLite
65+   * @stagemodelonly
66+   * @since 10
67+   */
68+  function loadModelFromFile(
69+    model: string,
70+    context: Context, callback: Callback<Model>): void;
71+
72+  /**
73+   * Create a Model instance from file path
74+   * @param { string } model - model indicates model path to be loaded
75+   * @param { Context } context - context indicates model context information
76+   * @return { Promise<Model> } the promise returned by the function.
77+   * @syscap SystemCapability.AI.MindSporeLite
78+   * @stagemodelonly
79+   * @since 10
80+   */
81+  function loadModelFromFile(
82+    model: string,
83+    context?: Context): Promise<Model>;
84+
85+  /**
86+   * Create a Model instance from buffer
87+   * @param { ArrayBuffer } model - model indicates model buffer to be loaded
88+   * @param { callback: Callback<Model> } callback - the callback of model
89+   * @syscap SystemCapability.AI.MindSporeLite
90+   * @stagemodelonly
91+   * @since 10
92+   */
93+  function loadModelFromBuffer(
94+    model: ArrayBuffer, callback: Callback<Model>): void;
95+
96+  /**
97+   * Create a Model instance from buffer
98+   * @param { ArrayBuffer } model - model indicates model buffer to be loaded
99+   * @param { Context } [context] - context indicates model context information
100+   * @param { callback: Callback<Model> } callback - the callback of model
101+   * @syscap SystemCapability.AI.MindSporeLite
102+   * @stagemodelonly
103+   * @since 10
104+   */
105+  function loadModelFromBuffer(
106+    model: ArrayBuffer,
107+    context: Context, callback: Callback<Model>): void;
108+
109+  /**
110+   * Create a Model instance from buffer
111+   * @param { ArrayBuffer } model - model indicates model buffer to be loaded
112+   * @param { Context } context - context indicates model context information
113+   * @return { Promise<Model> } the promise returned by the function.
114+   * @syscap SystemCapability.AI.MindSporeLite
115+   * @stagemodelonly
116+   * @since 10
117+   */
118+  function loadModelFromBuffer(
119+    model: ArrayBuffer,
120+    context?: Context): Promise<Model>;
121+
122+  /**
123+   * Create a Model instance from file description
124+   * @param { number } model - model indicates model file description to be loaded
125+   * @param { callback: Callback<Model> } callback - the callback of model
126+   * @syscap SystemCapability.AI.MindSporeLite
127+   * @stagemodelonly
128+   * @since 10
129+   */
130+  function loadModelFromFd(
131+    model: number, callback: Callback<Model>): void;
132+
133+  /**
134+   * Create a Model instance from file description
135+   * @param { number } model - model indicates model file description to be loaded
136+   * @param { Context } [context] - context indicates model context information
137+   * @param { callback: Callback<Model> } callback - the callback of model
138+   * @syscap SystemCapability.AI.MindSporeLite
139+   * @stagemodelonly
140+   * @since 10
141+   */
142+  function loadModelFromFd(
143+    model: number,
144+    context: Context, callback: Callback<Model>): void;
145+
146+  /**
147+   * Creates a Model instance file description
148+   * @param { number } model - model indicates model file description to be loaded
149+   * @param { Context } context - context indicates model context information
150+   * @return { Promise<Model> } the promise returned by the function.
151+   * @syscap SystemCapability.AI.MindSporeLite
152+   * @stagemodelonly
153+   * @since 10
154+   */
155+  function loadModelFromFd(
156+    model: number,
157+    context?: Context): Promise<Model>;
158+
159+  /**
160+   * Provides manages model function. Including get inputs, predict ,resize.
161+   * @typedef Model
162+   * @syscap SystemCapability.AI.MindSporeLite
163+   * @stagemodelonly
164+   * @since 10
165+   */
166+  interface Model {
167+    /**
168+     * Get model input tensors.
169+     * @return { MSTensor[] } the MSTensor array of the inputs.
170+     * @syscap SystemCapability.AI.MindSporeLite.
171+     * @stagemodelonly
172+     * @since 10
173+     */
174+    getInputs(): MSTensor[];
175+
176+    /**
177+     * Infer model
178+     * @param { MSTensor[] } inputs - indicates the MSTensor array of the inputs.
179+     * @param { callback: Callback<Model> }  callback - the callback of model.
180+     * @syscap SystemCapability.AI.MindSporeLite
181+     * @stagemodelonly
182+     * @since 10
183+     */
184+     predict(inputs: MSTensor[], callback: Callback<Model>): void;
185+
186+    /**
187+     * Infer model
188+     * @param { MSTensor[] } inputs - indicates the MSTensor array of the inputs.
189+     * @return { Promise<Model> } the promise returned by the function.
190+     * @syscap SystemCapability.AI.MindSporeLite
191+     * @stagemodelonly
192+     * @since 10
193+     */
194+     predict(inputs: MSTensor[]): Promise<MSTensor[]>;
195+
196+    /**
197+     * resize model input
198+     * @param { MSTensor[] } inputs - indicates the MSTensor array of the inputs.
199+     * @param { Array<Array<number>> } dims - indicates the target new shape array
200+     * @return { boolean } the boolen result if the resize operation is successful
201+     * @syscap SystemCapability.AI.MindSporeLite
202+     * @stagemodelonly
203+     * @since 10
204+     */
205+     resize(inputs: MSTensor[], dims: Array<Array<number>>): boolean;
206+  }
207+
208+  /**
209+   * Provides the device configurations
210+   * @typedef Context
211+   * @syscap SystemCapability.AI.MindSporeLite
212+   * @stagemodelonly
213+   * @since 10
214+   */
215+  interface Context {
216+    /**
217+      * The target device,optional device names: "cpu", "nnrt".If not set, default is "cpu".
218+      * @type {string[]}
219+      * @since 10
220+      */
221+    target?: string[];
222+    /**
223+      * The cpu device infomation
224+      * @type {CpuDevice}
225+      * @since 10
226+      */
227+    cpu?: CpuDevice;
228+    /**
229+      * The NNRT device infomation
230+      * @type {NNRTDevice}
231+      * @since 10
232+      */
233+    nnrt?: NNRTDevice;
234+  }
235+
236+  /**
237+   * Provides the CPU device info
238+   * @typedef CpuDevice
239+   * @syscap SystemCapability.AI.MindSporeLite
240+   * @stagemodelonly
241+   * @since 10
242+   */
243+  interface CpuDevice {
244+    /**
245+      * The number of threads used in model prediction.
246+      * @type {number}
247+      * @since 10
248+      */
249+    threadNum?: number;
250+    /**
251+      * The thread affinity mode
252+      * @type {ThreadAffinityMode}
253+      * @since 10
254+      */
255+    threadAffinityMode?: ThreadAffinityMode;
256+    /**
257+      * The thread affinity core list
258+      * @type {number[]}
259+      * @since 10
260+      */
261+    threadAffinityCoreList?: number[];
262+    /**
263+      * The precision mode
264+      * @type {string}
265+      * @since 10
266+      */
267+    precisionMode?: string;
268+  }
269+
270+  /**
271+   * Provides the NNRT device info
272+   * @typedef NNRTDevice
273+   * @syscap SystemCapability.AI.MindSporeLite
274+   * @stagemodelonly
275+   * @since 10
276+   */
277+  interface NNRTDevice {
278+  }
279+
280+  /**
281+   * Enum for provide CPU thread affinity mode
282+   * @enum {number}
283+   * @syscap SystemCapability.AI.MindSporeLite
284+   * @stagemodelonly
285+   * @since 10
286+   */
287+  export enum ThreadAffinityMode {
288+    /**
289+     * Thread affinity mode is no bind.
290+     * @syscap SystemCapability.AI.MindSporeLite
291+     * @since 10
292+     */
293+    NO_AFFINITIES = 0,
294+
295+    /**
296+     * Thread affinity mode is big cores first
297+     * @syscap SystemCapability.AI.MindSporeLite
298+     * @since 10
299+     */
300+    BIG_CORES_FIRST = 1,
301+
302+    /**
303+     * Thread affinity mode is little cores first
304+     * @syscap SystemCapability.AI.MindSporeLite
305+     * @since 10
306+     */
307+    LITTLE_CORES_FIRST = 2,
308+  }
309+
310+  /**
311+   * Provides MSTensor defination
312+   * @typedef MSTensor
313+   * @syscap SystemCapability.AI.MindSporeLite
314+   * @stagemodelonly
315+   * @since 10
316+   */
317+  interface MSTensor {
318+    /**
319+      * The name of the tensor.
320+      * @type {string}
321+      * @since 10
322+      */
323+    name: string;
324+    /**
325+      * The shape of the tensor.
326+      * @type {number[]}
327+      * @since 10
328+      */
329+    shape: number[];
330+    /**
331+      * The number of elements in the tensor.
332+      * @type {number}
333+      * @since 10
334+      */
335+    elementNum: number;
336+    /**
337+      * The data size of the tensor.
338+      * @type {number}
339+      * @since 10
340+      */
341+    dataSize: number;
342+    /**
343+      * The data type of the tensor.
344+      * @type {DataType}
345+      * @since 10
346+      */
347+    dtype: DataType;
348+    /**
349+      * The format of the tensor.
350+      * @type {DataType}
351+      * @since 10
352+      */
353+    format: Format;
354+
355+    /**
356+     * Get MSTensor data
357+     * @return { ArrayBuffer } the data of tensor
358+     * @syscap SystemCapability.AI.MindSporeLite
359+     * @stagemodelonly
360+     * @since 10
361+     */
362+    getData(): ArrayBuffer;
363+
364+    /**
365+     * Set MSTensor data
366+     * @param { ArrayBuffer } inputArray - indicates the buffer of tensor
367+     * @syscap SystemCapability.AI.MindSporeLite
368+     * @stagemodelonly
369+     * @since 10
370+     */
371+    setData(inputArray: ArrayBuffer): void;
372+  }
373+
374+  /**
375+   * Enum for provide MSTensor data type
376+   * @enum {number}
377+   * @syscap SystemCapability.AI.MindSporeLite
378+   * @stagemodelonly
379+   * @since 10
380+   */
381+  export enum DataType {
382+   /**
383+     * data type is unknown
384+     * @syscap SystemCapability.AI.MindSporeLite
385+     * @since 10
386+     */
387+    TYPE_UNKNOWN = 0,
388+   /**
389+     * data type is int8
390+     * @syscap SystemCapability.AI.MindSporeLite
391+     * @since 10
392+     */
393+    NUMBER_TYPE_INT8 = 32,
394+   /**
395+     * data type is int16
396+     * @syscap SystemCapability.AI.MindSporeLite
397+     * @since 10
398+     */
399+    NUMBER_TYPE_INT16 = 33,
400+   /**
401+     * data type is int32
402+     * @syscap SystemCapability.AI.MindSporeLite
403+     * @since 10
404+     */
405+    NUMBER_TYPE_INT32 = 34,
406+   /**
407+     * data type is int64
408+     * @syscap SystemCapability.AI.MindSporeLite
409+     * @since 10
410+     */
411+    NUMBER_TYPE_INT64 = 35,
412+   /**
413+     * data type is uint8
414+     * @syscap SystemCapability.AI.MindSporeLite
415+     * @since 10
416+     */
417+    NUMBER_TYPE_UINT8 = 37,
418+   /**
419+     * data type is uint16
420+     * @syscap SystemCapability.AI.MindSporeLite
421+     * @since 10
422+     */
423+    NUMBER_TYPE_UINT16 = 38,
424+   /**
425+     * data type is uint32
426+     * @syscap SystemCapability.AI.MindSporeLite
427+     * @since 10
428+     */
429+    NUMBER_TYPE_UINT32 = 39,
430+   /**
431+     * data type is uint64
432+     * @syscap SystemCapability.AI.MindSporeLite
433+     * @since 10
434+     */
435+    NUMBER_TYPE_UINT64 = 40,
436+   /**
437+     * data type is float16
438+     * @syscap SystemCapability.AI.MindSporeLite
439+     * @since 10
440+     */
441+    NUMBER_TYPE_FLOAT16 = 42,
442+   /**
443+     * data type is float32
444+     * @syscap SystemCapability.AI.MindSporeLite
445+     * @since 10
446+     */
447+    NUMBER_TYPE_FLOAT32 = 43,
448+   /**
449+     * data type is float64
450+     * @syscap SystemCapability.AI.MindSporeLite
451+     * @since 10
452+     */
453+    NUMBER_TYPE_FLOAT64 = 44,
454+  }
455+
456+  /**
457+   * Enum for provide MSTensor format
458+   * @enum {number}
459+   * @syscap SystemCapability.AI.MindSporeLite
460+   * @stagemodelonly
461+   * @since 10
462+   */
463+  export enum Format {
464+   /**
465+     * data format is default
466+     * @syscap SystemCapability.AI.MindSporeLite
467+     * @since 10
468+     */
469+    DEFAULT_FORMAT = -1,
470+   /**
471+     * data format is NCHW
472+     * @syscap SystemCapability.AI.MindSporeLite
473+     * @since 10
474+     */
475+    NCHW = 0,
476+   /**
477+     * data format is NHWC
478+     * @syscap SystemCapability.AI.MindSporeLite
479+     * @since 10
480+     */
481+    NHWC = 1,
482+   /**
483+     * data format is NHWC4
484+     * @syscap SystemCapability.AI.MindSporeLite
485+     * @since 10
486+     */
487+    NHWC4 = 2,
488+   /**
489+     * data format is HWKC
490+     * @syscap SystemCapability.AI.MindSporeLite
491+     * @since 10
492+     */
493+    HWKC = 3,
494+   /**
495+     * data format is HWCK
496+     * @syscap SystemCapability.AI.MindSporeLite
497+     * @since 10
498+     */
499+    HWCK = 4,
500+   /**
501+     * data format is HWCK
502+     * @syscap SystemCapability.AI.MindSporeLite
503+     * @since 10
504+     */
505+    KCHW = 5,
506+  }
507+}
508+export default mindSporeLite;
509diff --git a/include/js_api/@ohos.ai.mindspore.d.ts b/include/js_api/@ohos.ai.mindspore.d.ts
510deleted file mode 100644
511index ce59f029..00000000
512--- a/include/js_api/@ohos.ai.mindspore.d.ts
513+++ /dev/null
514@@ -1,222 +0,0 @@
515-/*
516-* Copyright (C) 2023 Huawei Device Co., Ltd.
517-* Licensed under the Apache License, Version 2.0 (the "License");
518-* you may not use this file except in compliance with the License.
519-* You may obtain a copy of the License at
520-*
521-* http://www.apache.org/licenses/LICENSE-2.0
522-*
523-* Unless required by applicable law or agreed to in writing, software
524-* distributed under the License is distributed on an "AS IS" BASIS,
525-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
526-* See the License for the specific language governing permissions and
527-* limitations under the License.
528-*/
529-
530-import { ErrorCallback, AsyncCallback, Callback } from './basic';
531-
532-/**
533- * @name mslite
534- * @since 9
535- * @import import mslite from '@ohos.mslite'
536- */
537-declare namespace mslite {
538-  /**
539-   * Creates an MSLiteModel instance.
540-   * @since 9
541-   * @syscap SystemCapability.MsLite.MsLiteModel
542-   * @import import mslite from '@ohos.mslite'
543-   * @param model The path to the model (string)
544-   * @param options Options related to model inference.
545-   * @throws { BusinessError } 401 - invaild path. Return by callback.
546-   * @return A Promise instance used to return MSLiteModel instance if the operation is successful; returns null otherwise.
547-   */
548-  function loadModelFromFile(
549-    model: string,
550-    options?: Context): Promise<MSLiteModel>;
551-
552-  /**
553-   * Creates an MSLiteModel instance.
554-   * @since 9
555-   * @syscap SystemCapability.MsLite.MsLiteModel
556-   * @import import mslite from '@ohos.mslite'
557-   * @param model The model content in memory(ArrayBuffer).
558-   * @param options Options related to model inference.
559-   * @throws { BusinessError } 401 - No memory. Return by callback.
560-   * @return A Promise instance used to return MSLiteModel instance if the operation is successful; returns null otherwise.
561-   */
562-  function loadModelFromBuffer(
563-    model: ArrayBuffer,
564-    options?: Context): Promise<MSLiteModel>;
565-
566-  /**
567-   * Creates an MSLiteModel instance.
568-   * @since 9
569-   * @syscap SystemCapability.MsLite.MsLiteModel
570-   * @import import mslite from '@ohos.mslite'
571-   * @param model The memory fd to the model (number).
572-   * @param options Options related to model inference.
573-   * @throws { BusinessError } 401 - invaild fd. Return by callback.
574-   * @return A Promise instance used to return MSLiteModel instance if the operation is successful; returns null otherwise.
575-   */
576-  function loadModelFromFd(
577-    model: number,
578-    options?: Context): Promise<MSLiteModel>;
579-
580-    /**
581-   * Manages model. Before calling an MSLiteModel method, you must use loadMSLiteModel()
582-   * to create an MSLiteModel instance.
583-   * @since 9
584-   * @syscap SystemCapability.MsLite.MsLiteModel
585-   */
586-  interface MSLiteModel {
587-    /**
588-     * Get model input tensors.
589-     * @since 9
590-     * @syscap SystemCapability.MsLite.MsLiteModel
591-     * @return MSTensor Array
592-     */
593-    getInputs(): MSTensor[];
594-
595-    /**
596-     * Infer model.
597-     * @since 9
598-     * @syscap SystemCapability.MsLite.MsLiteModel
599-     * @inputs inputs tensor
600-     * @return A Promise instance used to return MSTensor array if the operation is successful; returns null otherwise.
601-     */
602-     predict(inputs: MSTensor[]): Promise<MSTensor[]>;
603-
604-     /**
605-     * resize model input.
606-     * @since 9
607-     * @syscap SystemCapability.MsLite.MsLiteModel
608-     * @inputs inputs tensor
609-     * @dims   resize shape,the order is same with inputs
610-     * @return  true if the operation is successful; returns false otherwise.
611-     */
612-     resize(inputs: MSTensor[], dims: Array<Array<number>>): boolean;
613-  }
614-
615-  /**
616-   * Provides the device configurations.
617-   * @since 9
618-   * @syscap SystemCapability.MsLite.MsLiteModel
619-   */
620-  interface Context {
621-    target?: string[];
622-    cpu?:CpuDevice;
623-    nnrt?:NnrtDevice;
624-  }
625-
626-  /**
627-   * Provides the CPU device info.
628-   * @since 9
629-   * @syscap SystemCapability.MsLite.MsLiteModel
630-   */
631-  interface CpuDevice {
632-    thread_num?: number
633-    thread_affinity_mode?: ThreadAffinityMode;
634-    thread_affinity_core_list?: number[];
635-    precision_mode?: string;
636-  }
637-
638-  /**
639-   * Provides the NNRT device info.
640-   * @since 9
641-   * @syscap SystemCapability.MsLite.MsLiteModel
642-   */
643-  interface NnrtDevice {
644-  }
645-
646-  /**
647-   * Provides CPU thread affinity mode.
648-   * @since 9
649-   * @syscap SystemCapability.MsLite.Context
650-   */
651-  enum ThreadAffinityMode {
652-    /**
653-     * NO_BIND.
654-     * @since 9
655-     * @syscap SystemCapability.MsLite.Context
656-     */
657-    NO_AFFINITIES = 0,
658-
659-    /**
660-     * BIG_CORES_FIRST.
661-     * @since 9
662-     * @syscap SystemCapability.MsLite.Context
663-     */
664-    BIG_CORES_FIRST = 1,
665-
666-    /**
667-     * LITTLE_CORES_FIRST.
668-     * @since 9
669-     * @syscap SystemCapability.MsLite.Context
670-     */
671-    LITTLE_CORES_FIRST = 2,
672-  }
673-
674-  /**
675-   * Provides MSTensor defination.
676-   * @since 9
677-   * @syscap SystemCapability.MsLite.MsTensor
678-   */
679-  interface MSTensor {
680-    /** The name of the tensor. */
681-    'name': string;
682-    /** The shape of the tensor. */
683-    'shape': number[];
684-    /** Number of elements in the tensor. */
685-    'element_num': number;
686-    /** Number of elements in the tensor. */
687-    'data_size': number;
688-    /** The data type for the array. */
689-    'dtype': number;
690-    /** The format type of the tensor. */
691-    'format': number;
692-
693-    /**
694-     * Get MSTensor data.
695-     * @since 9
696-     * @syscap SystemCapability.MsLite.MsTensor
697-     * @return ArrayBuffer.
698-     */
699-    data(): ArrayBuffer;
700-
701-    /**
702-     * Set MSTensor data.
703-     * @since 9
704-     * @syscap SystemCapability.MsLite.MsTensor
705-     * @param inputArray
706-     */
707-    setData(inputArray: ArrayBuffer): void;
708-  }
709-
710-  enum DataType {
711-    kTypeUnknown = 0,
712-    kNumberTypeInt8 = 32,
713-    kNumberTypeInt16 = 33,
714-    kNumberTypeInt32 = 34,
715-    kNumberTypeInt64 = 35,
716-    kNumberTypeUInt8 = 37,
717-    kNumberTypeUInt16 = 38,
718-    kNumberTypeUInt32 = 39,
719-    kNumberTypeUInt64 = 40,
720-    kNumberTypeFloat16 = 42,
721-    kNumberTypeFloat32 = 43,
722-    kNumberTypeFloat64 = 44,
723-    kNumberTypeEnd = 46,
724-  }
725-
726-  enum Format {
727-    DEFAULT_FORMAT = -1,
728-    NCHW = 0,
729-    NHWC = 1,
730-    NHWC4 = 2,
731-    HWKC = 3,
732-    HWCK = 4,
733-    KCHW = 5,
734-  }
735-}
736-export default mslite;
737\ No newline at end of file
738diff --git a/mindspore/lite/BUILD.gn b/mindspore/lite/BUILD.gn
739index 7032b028..9ad0caf7 100644
740--- a/mindspore/lite/BUILD.gn
741+++ b/mindspore/lite/BUILD.gn
742@@ -70,7 +70,7 @@ ohos_group("mindspore") {
743     ":mindspore_lib",
744     ":mindspore_train_lib",
745     "mindir:mindir_lib",
746-    "src/runtime/js_api:mslite"
747+    "src/runtime/js_api:mindspore_lite_napi"
748   ]
749 }
750
751diff --git a/mindspore/lite/src/runtime/js_api/BUILD.gn b/mindspore/lite/src/runtime/js_api/BUILD.gn
752index 44669c26..04031370 100644
753--- a/mindspore/lite/src/runtime/js_api/BUILD.gn
754+++ b/mindspore/lite/src/runtime/js_api/BUILD.gn
755@@ -14,7 +14,7 @@
756 import("//build/ohos.gni")
757 import("//build/ohos/ace/ace.gni")
758
759-ohos_shared_library("mslite") {
760+ohos_shared_library("mindspore_lite_napi") {
761   include_dirs = [
762     "//third_party/mindspore/mindspore-src/source/",
763     "//third_party/mindspore/mindspore-src/source/include/api",
764diff --git a/mindspore/lite/src/runtime/js_api/common_napi.cc b/mindspore/lite/src/runtime/js_api/common_napi.cc
765index 6fa2501e..5054a317 100644
766--- a/mindspore/lite/src/runtime/js_api/common_napi.cc
767+++ b/mindspore/lite/src/runtime/js_api/common_napi.cc
768@@ -64,17 +64,17 @@ int32_t CommonNapi::GetPropertyInt32(napi_env env, napi_value config_obj, const
769   napi_status status = napi_has_named_property(env, config_obj, type.c_str(), &exist);
770
771   if (status != napi_ok || !exist) {
772-    MS_LOG(ERROR) << "can not find " << type.c_str() << " property";
773+    MS_LOG(WARNING) << "can not find " << type.c_str() << " property";
774     return ERR_NOT_EXISTED_PARAM;
775   }
776
777   if (napi_get_named_property(env, config_obj, type.c_str(), &item) != napi_ok) {
778-    MS_LOG(ERROR) << "get " << type.c_str() << " property fail";
779+    MS_LOG(WARNING) << "get " << type.c_str() << " property fail";
780     return ERR_INVALID_PARAM;
781   }
782
783   if (napi_get_value_int32(env, item, &result) != napi_ok) {
784-    MS_LOG(ERROR) << "get " << type.c_str() << " property value fail";
785+    MS_LOG(WARNING) << "get " << type.c_str() << " property value fail";
786     return ERR_INVALID_PARAM;
787   }
788   return SUCCESS;
789@@ -88,23 +88,19 @@ int32_t CommonNapi::GetPropertyString(napi_env env, napi_value config_obj, const
790   size_t length = 0;
791
792   napi_status status = napi_has_named_property(env, config_obj, type.c_str(), &exist);
793-  if (status != napi_ok || !exist) {
794-    MS_LOG(ERROR) << "can not find target property";
795-    return ERR_NOT_EXISTED_PARAM;
796-  }
797
798   if (status != napi_ok || !exist) {
799-    MS_LOG(ERROR) << "can not find " << type.c_str() << " property";
800+    MS_LOG(WARNING) << "can not find " << type.c_str() << " property";
801     return ERR_NOT_EXISTED_PARAM;
802   }
803
804   if (napi_get_named_property(env, config_obj, type.c_str(), &item) != napi_ok) {
805-    MS_LOG(ERROR) << "get " << type.c_str() << " property fail";
806+    MS_LOG(WARNING) << "get " << type.c_str() << " property fail";
807     return ERR_INVALID_PARAM;
808   }
809
810   if (napi_get_value_string_utf8(env, item, buffer, SIZE, &length) != napi_ok) {
811-    MS_LOG(ERROR) << "get " << type.c_str() << " property value fail";
812+    MS_LOG(WARNING) << "get " << type.c_str() << " property value fail";
813     return ERR_INVALID_PARAM;
814   }
815   result = std::string(buffer);
816@@ -117,34 +113,34 @@ int32_t CommonNapi::GetPropertyInt32Array(napi_env env, napi_value config_obj, c
817   bool exist = false;
818   napi_status status = napi_has_named_property(env, config_obj, type.c_str(), &exist);
819   if (status != napi_ok || !exist) {
820-    MS_LOG(ERROR) << "can not find " << type.c_str() << " property";
821+    MS_LOG(WARNING) << "can not find " << type.c_str() << " property";
822     return ERR_NOT_EXISTED_PARAM;
823   }
824
825   if (napi_get_named_property(env, config_obj, type.c_str(), &item) != napi_ok) {
826-    MS_LOG(ERROR) << "get " << type.c_str() << " property fail";
827+    MS_LOG(WARNING) << "get " << type.c_str() << " property fail";
828     return ERR_INVALID_PARAM;
829   }
830
831   uint32_t array_length = 0;
832   status = napi_get_array_length(env, item, &array_length);
833   if (status != napi_ok || array_length <= 0) {
834-    MS_LOG(ERROR) << "can not get array length";
835+    MS_LOG(WARNING) << "can not get array length";
836     return ERR_INVALID_PARAM;
837   }
838   MS_LOG(DEBUG) << "GetPropertyInt32Array array_length: " << array_length;
839
840   for (size_t i = 0; i < array_length; i++) {
841-    int32_t int_value = 0;
842+    int32_t int_value = {0};
843     napi_value element = nullptr;
844     status = napi_get_element(env, item, i, &element);
845     if (status != napi_ok) {
846-      MS_LOG(ERROR) << "can not get element";
847+      MS_LOG(WARNING) << "can not get element";
848       return ERR_INVALID_PARAM;
849     }
850
851     if (napi_get_value_int32(env, element, &int_value) != napi_ok) {
852-      MS_LOG(ERROR) << "get " << type.c_str() << " property value fail";
853+      MS_LOG(WARNING) << "get " << type.c_str() << " property value fail";
854       return ERR_INVALID_PARAM;
855     }
856     result.push_back(int_value);
857@@ -160,19 +156,19 @@ int32_t CommonNapi::GetPropertyStringArray(napi_env env, napi_value config_obj,
858   napi_status status = napi_has_named_property(env, config_obj, type.c_str(), &exist);
859
860   if (status != napi_ok || !exist) {
861-    MS_LOG(ERROR) << "can not find " << type.c_str() << " property";
862+    MS_LOG(WARNING) << "can not find " << type.c_str() << " property";
863     return ERR_NOT_EXISTED_PARAM;
864   }
865
866   if (napi_get_named_property(env, config_obj, type.c_str(), &item) != napi_ok) {
867-    MS_LOG(ERROR) << "get " << type.c_str() << " property fail";
868+    MS_LOG(WARNING) << "get " << type.c_str() << " property fail";
869     return ERR_INVALID_PARAM;
870   }
871
872   uint32_t array_length = 0;
873   status = napi_get_array_length(env, item, &array_length);
874   if (status != napi_ok || array_length <= 0) {
875-    MS_LOG(ERROR) << "can not get array length";
876+    MS_LOG(WARNING) << "can not get array length";
877     return ERR_INVALID_PARAM;
878   }
879
880@@ -183,12 +179,12 @@ int32_t CommonNapi::GetPropertyStringArray(napi_env env, napi_value config_obj,
881     napi_value element = nullptr;
882     status = napi_get_element(env, item, i, &element);
883     if (status != napi_ok) {
884-      MS_LOG(ERROR) << "can not get element";
885+      MS_LOG(WARNING) << "can not get element";
886       return ERR_INVALID_PARAM;
887     }
888
889     if (napi_get_value_string_utf8(env, element, buffer, SIZE, &length) != napi_ok) {
890-      MS_LOG(ERROR) << "get " << type.c_str() << " property value fail";
891+      MS_LOG(WARNING) << "get " << type.c_str() << " property value fail";
892       return ERR_INVALID_PARAM;
893     }
894     result.push_back(std::string(buffer));
895diff --git a/mindspore/lite/src/runtime/js_api/mslite_model_napi.cc b/mindspore/lite/src/runtime/js_api/mslite_model_napi.cc
896index 31f7fdc3..61ed5d28 100644
897--- a/mindspore/lite/src/runtime/js_api/mslite_model_napi.cc
898+++ b/mindspore/lite/src/runtime/js_api/mslite_model_napi.cc
899@@ -23,8 +23,8 @@
900 #include <vector>
901 #include <unistd.h>
902 #include <fcntl.h>
903-#include <sys/stat.h>
904 #include <sys/mman.h>
905+#include <sys/stat.h>
906 #include "include/js_api/mstensor_napi.h"
907 #include "include/js_api/common_napi.h"
908 #include "include/js_api/ms_parameters_napi.h"
909@@ -48,12 +48,15 @@ std::mutex MSLiteModelNapi::create_mutex_;
910 namespace {
911 const int ARGS_ONE = 1;
912 const int ARGS_TWO = 2;
913+const int ARGS_THREE = 3;
914
915 const int PARAM0 = 0;
916 const int PARAM1 = 1;
917+const int PARAM2 = 2;
918
919 const int SIZE = 100;
920-const std::string CLASS_NAME = "MSLiteModel";
921+
922+const std::string CLASS_NAME = "Model";
923
924 const std::unordered_map<std::string, DeviceType> kDeviceTypes{
925   {"cpu", kCPU},
926@@ -185,6 +188,7 @@ std::shared_ptr<mindspore::Model> MSLiteModelNapi::CreateModel(ModelInfo *model_
927       }
928       auto ret = model_ptr->Build(model_info_ptr->model_path, mindspore::kMindIR, context);
929       if (ret == mindspore::kSuccess) {
930+        MS_LOG(INFO) << "Build model from path success.";
931         return model_ptr;
932       }
933       return nullptr;
934@@ -199,8 +203,9 @@ std::shared_ptr<mindspore::Model> MSLiteModelNapi::CreateModel(ModelInfo *model_
935       }
936       auto ret = model_ptr->Build(model_info_ptr->model_buffer_data, model_info_ptr->model_buffer_total,
937                                   mindspore::kMindIR, context);
938+
939       if (ret == mindspore::kSuccess) {
940-        MS_LOG(INFO) << "Build model from buffer success.";
941+        MS_LOG(INFO) << "Build model from fd success.";
942         return model_ptr;
943       }
944       (void)munmap(model_info_ptr->model_buffer_data, model_info_ptr->model_buffer_total);
945@@ -291,7 +296,7 @@ int32_t MSLiteModelNapi::ParseModelInfo(napi_env env, napi_value root, ModelInfo
946     return ERR_INVALID_PARAM;
947   }
948   if ((valueType != napi_object) && (valueType != napi_string) && (valueType != napi_number)) {
949-    MS_LOG(ERROR) << "napi_type not support.";
950+    MS_LOG(ERROR) << "model is invaild.";
951     return ERR_INVALID_PARAM;
952   }
953
954@@ -318,18 +323,18 @@ int32_t MSLiteModelNapi::ParseModelInfo(napi_env env, napi_value root, ModelInfo
955       MS_LOG(ERROR) << "Parse model FD failed.";
956       return ERR_INVALID_PARAM;
957     }
958+
959     int size = lseek(fd, 0, SEEK_END);
960     (void)lseek(fd, 0, SEEK_SET);
961     auto mmap_buffers = mmap(NULL, size, PROT_READ, MAP_SHARED, fd, 0);
962     if (mmap_buffers == NULL) {
963       MS_LOG(ERROR) << "mmap_buffers is NULL.";
964       return ERR_INVALID_PARAM;
965-    }
966+    }
967     model_info.model_fd = fd;
968     model_info.model_buffer_data = static_cast<char *>(mmap_buffers);
969     model_info.model_buffer_total = size;
970     model_info.mode = kFD;
971-    close(fd);
972   } else {
973     char char_buf[SIZE];
974     size_t buf_length = 0;
975@@ -349,20 +354,20 @@ int32_t MSLiteModelNapi::ParseContextInfo(napi_env env, napi_value args, Context
976   napi_valuetype valueType;
977   napi_status status = napi_typeof(env, args, &valueType);
978   if ((status != napi_ok) || (valueType != napi_object)) {
979-    MS_LOG(ERROR) << "napi_typeof check failed.";
980+    MS_LOG(ERROR) << "model is invaild.";
981     return ERR_NOT_EXISTED_PARAM;
982   }
983
984   std::vector<std::string> str_values;
985   auto ret = CommonNapi::GetPropertyStringArray(env, args, "target", str_values);
986-  if (ret != SUCCESS && ret != ERR_NOT_EXISTED_PARAM) {
987+  if (ret != SUCCESS) {
988     MS_LOG(ERROR) << "Get context target failed.";
989     return ret;
990   }
991   context.target.assign(str_values.begin(), str_values.end());
992
993   ret = GetCpuDeviceInfo(env, args, context);
994-  if (ret != SUCCESS && ret != ERR_NOT_EXISTED_PARAM) {
995+  if (ret != ERR_NOT_EXISTED_PARAM && ret != SUCCESS) {
996     MS_LOG(ERROR) << "Get context CpuDeviceInfo failed.";
997     return ret;
998   }
999@@ -407,12 +412,12 @@ void MSLiteModelNapi::GetMSLiteModelAsyncCallbackComplete(napi_env env, napi_sta
1000
1001 void MSLiteModelNapi::CommonCallbackRoutine(napi_env env, MSLiteModelAsyncContext *&asyncContext,
1002                                             const napi_value &valueParam) {
1003-  napi_value result[ARGS_TWO] = {0};
1004+  napi_value result[ARGS_ONE] = {0};
1005   napi_value retVal;
1006+  napi_value error = nullptr;
1007
1008-  if (!asyncContext->status) {
1009-    napi_get_undefined(env, &result[PARAM0]);
1010-    result[PARAM1] = valueParam;
1011+    if (!asyncContext->status) {
1012+    result[PARAM0] = valueParam;
1013   } else {
1014     napi_value message = nullptr;
1015     std::string messageValue = CommonNapi::getMessageByCode(asyncContext->status);
1016@@ -421,20 +426,20 @@ void MSLiteModelNapi::CommonCallbackRoutine(napi_env env, MSLiteModelAsyncContex
1017     napi_value code = nullptr;
1018     napi_create_string_utf8(env, (std::to_string(asyncContext->status)).c_str(), NAPI_AUTO_LENGTH, &code);
1019
1020-    napi_create_error(env, code, message, &result[PARAM0]);
1021-    napi_get_undefined(env, &result[PARAM1]);
1022+    napi_create_error(env, code, message, &error);
1023+    napi_get_undefined(env, &result[PARAM0]);
1024   }
1025
1026-  if (asyncContext->deferred) {
1027+  if (asyncContext->deferred != nullptr) {
1028     if (!asyncContext->status) {
1029-      napi_resolve_deferred(env, asyncContext->deferred, result[PARAM1]);
1030+      napi_resolve_deferred(env, asyncContext->deferred, result[PARAM0]);
1031     } else {
1032-      napi_reject_deferred(env, asyncContext->deferred, result[PARAM0]);
1033+      napi_reject_deferred(env, asyncContext->deferred, error);
1034     }
1035   } else {
1036     napi_value callback = nullptr;
1037     napi_get_reference_value(env, asyncContext->callbackRef, &callback);
1038-    napi_call_function(env, nullptr, callback, ARGS_TWO, result, &retVal);
1039+    napi_call_function(env, nullptr, callback, ARGS_ONE, result, &retVal);
1040     napi_delete_reference(env, asyncContext->callbackRef);
1041   }
1042   napi_delete_async_work(env, asyncContext->work);
1043@@ -446,34 +451,50 @@ void MSLiteModelNapi::CommonCallbackRoutine(napi_env env, MSLiteModelAsyncContex
1044 napi_value MSLiteModelNapi::LoadMSLiteModelFromFile(napi_env env, napi_callback_info info) {
1045   napi_status status;
1046   napi_value result = nullptr;
1047-
1048-  GET_PARAMS(env, info, ARGS_TWO);
1049+  const int32_t refCount = 1;
1050+  GET_PARAMS(env, info, ARGS_THREE);
1051
1052   std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
1053
1054   int32_t ret;
1055   for (size_t i = PARAM0; i < argc; i++) {
1056+    napi_valuetype valueType = napi_undefined;
1057+    napi_typeof(env, argv[i], &valueType);
1058     if (i == PARAM0) {
1059       ret = ParseModelInfo(env, argv[i], asyncContext->model_info);
1060       if (ret != SUCCESS) {
1061-        MS_LOG(ERROR) << "Parsing model info failed.";
1062+        MS_LOG(ERROR) << "Parsing model failed.";
1063         return result;
1064       }
1065     } else if (i == PARAM1) {
1066       ret = ParseContextInfo(env, argv[i], asyncContext->context);
1067-      if (ret != SUCCESS && ret != ERR_NOT_EXISTED_PARAM) {
1068-        MS_LOG(ERROR) << "Parsing context info failed.";
1069+      if (ret != SUCCESS) {
1070+        MS_LOG(ERROR) << "Parsing context failed.";
1071         return result;
1072       }
1073+    } else if (i == PARAM2) {
1074+      if (valueType == napi_function) {
1075+        napi_create_reference(env, argv[i], refCount, &asyncContext->callbackRef);
1076+      }
1077+      break;
1078     } else {
1079       MS_LOG(ERROR) << "Invalid input params.";
1080       return result;
1081     }
1082   }
1083-  status = napi_create_promise(env, &asyncContext->deferred, &result);
1084-  if (status != napi_ok) {
1085-    MS_LOG(ERROR) << "create promise failed.";
1086-    return result;
1087+
1088+  if (asyncContext->callbackRef == nullptr) {
1089+    status = napi_create_promise(env, &asyncContext->deferred, &result);
1090+    if (status != napi_ok) {
1091+      MS_LOG(ERROR) << "create promise failed.";
1092+      return result;
1093+    }
1094+  } else {
1095+    status = napi_get_undefined(env, &result);
1096+    if (status != napi_ok) {
1097+      MS_LOG(ERROR) << "create callback failed.";
1098+      return result;
1099+    }
1100   }
1101
1102   napi_value resource = nullptr;
1103@@ -501,34 +522,50 @@ napi_value MSLiteModelNapi::LoadMSLiteModelFromFile(napi_env env, napi_callback_
1104 napi_value MSLiteModelNapi::LoadMSLiteModelFromBuffer(napi_env env, napi_callback_info info) {
1105   napi_status status;
1106   napi_value result = nullptr;
1107-
1108-  GET_PARAMS(env, info, ARGS_TWO);
1109+  const int32_t refCount = 1;
1110+  GET_PARAMS(env, info, ARGS_THREE);
1111
1112   std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
1113
1114   int32_t ret;
1115   for (size_t i = PARAM0; i < argc; i++) {
1116+    napi_valuetype valueType = napi_undefined;
1117+    napi_typeof(env, argv[i], &valueType);
1118     if (i == PARAM0) {
1119       ret = ParseModelInfo(env, argv[i], asyncContext->model_info);
1120       if (ret != SUCCESS) {
1121-        MS_LOG(ERROR) << "Parsing model info failed.";
1122+        MS_LOG(ERROR) << "Parsing model failed.";
1123         return result;
1124       }
1125     } else if (i == PARAM1) {
1126       ret = ParseContextInfo(env, argv[i], asyncContext->context);
1127-      if (ret != SUCCESS && ret != ERR_NOT_EXISTED_PARAM) {
1128-        MS_LOG(ERROR) << "Parsing context info failed.";
1129+      if (ret != SUCCESS) {
1130+        MS_LOG(ERROR) << "Parsing context failed.";
1131         return result;
1132       }
1133+    } else if (i == PARAM2) {
1134+      if (valueType == napi_function) {
1135+        napi_create_reference(env, argv[i], refCount, &asyncContext->callbackRef);
1136+      }
1137+      break;
1138     } else {
1139       MS_LOG(ERROR) << "Invalid input params.";
1140       return result;
1141     }
1142   }
1143-  status = napi_create_promise(env, &asyncContext->deferred, &result);
1144-  if (status != napi_ok) {
1145-    MS_LOG(ERROR) << "create promise failed.";
1146-    return result;
1147+
1148+  if (asyncContext->callbackRef == nullptr) {
1149+    status = napi_create_promise(env, &asyncContext->deferred, &result);
1150+    if (status != napi_ok) {
1151+      MS_LOG(ERROR) << "create promise failed.";
1152+      return result;
1153+    }
1154+  } else {
1155+    status = napi_get_undefined(env, &result);
1156+    if (status != napi_ok) {
1157+      MS_LOG(ERROR) << "create callback failed.";
1158+      return result;
1159+    }
1160   }
1161
1162   napi_value resource = nullptr;
1163@@ -556,34 +593,50 @@ napi_value MSLiteModelNapi::LoadMSLiteModelFromBuffer(napi_env env, napi_callbac
1164 napi_value MSLiteModelNapi::LoadMSLiteModelFromFd(napi_env env, napi_callback_info info) {
1165   napi_status status;
1166   napi_value result = nullptr;
1167-
1168-  GET_PARAMS(env, info, ARGS_TWO);
1169+  const int32_t refCount = 1;
1170+  GET_PARAMS(env, info, ARGS_THREE);
1171
1172   std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
1173
1174   int32_t ret;
1175   for (size_t i = PARAM0; i < argc; i++) {
1176+    napi_valuetype valueType = napi_undefined;
1177+    napi_typeof(env, argv[i], &valueType);
1178     if (i == PARAM0) {
1179       ret = ParseModelInfo(env, argv[i], asyncContext->model_info);
1180       if (ret != SUCCESS) {
1181-        MS_LOG(ERROR) << "Parsing model info failed.";
1182+        MS_LOG(ERROR) << "Parsing model failed.";
1183         return result;
1184       }
1185     } else if (i == PARAM1) {
1186       ret = ParseContextInfo(env, argv[i], asyncContext->context);
1187-      if (ret != SUCCESS && ret != ERR_NOT_EXISTED_PARAM) {
1188-        MS_LOG(ERROR) << "Parsing context info failed.";
1189+      if (ret != SUCCESS) {
1190+        MS_LOG(ERROR) << "Parsing context failed.";
1191         return result;
1192       }
1193+    } else if (i == PARAM2) {
1194+      if (valueType == napi_function) {
1195+        napi_create_reference(env, argv[i], refCount, &asyncContext->callbackRef);
1196+      }
1197+      break;
1198     } else {
1199       MS_LOG(ERROR) << "Invalid input params.";
1200       return result;
1201     }
1202   }
1203-  status = napi_create_promise(env, &asyncContext->deferred, &result);
1204-  if (status != napi_ok) {
1205-    MS_LOG(ERROR) << "create promise failed.";
1206-    return result;
1207+
1208+  if (asyncContext->callbackRef == nullptr) {
1209+    status = napi_create_promise(env, &asyncContext->deferred, &result);
1210+    if (status != napi_ok) {
1211+      MS_LOG(ERROR) << "create promise failed.";
1212+      return result;
1213+    }
1214+  } else {
1215+    status = napi_get_undefined(env, &result);
1216+    if (status != napi_ok) {
1217+      MS_LOG(ERROR) << "create callback failed.";
1218+      return result;
1219+    }
1220   }
1221
1222   napi_value resource = nullptr;
1223@@ -630,24 +683,32 @@ int32_t MSLiteModelNapi::GetCpuDeviceInfo(napi_env env, napi_value args, Context
1224   std::string str_value = "";
1225   std::vector<int32_t> affinity_cores;
1226
1227-  if (CommonNapi::GetPropertyInt32(env, config_item, "thread_num", int_value) == SUCCESS) {
1228-    MS_LOG(DEBUG) << "thread_num: " << int_value;
1229+  if (CommonNapi::GetPropertyInt32(env, config_item, "threadNum", int_value) == SUCCESS) {
1230+    MS_LOG(DEBUG) << "threadNum: " << int_value;
1231     context.cpu_device.thread_num = int_value;
1232+  } else {
1233+    context.cpu_device.thread_num = PARAM2;
1234   }
1235
1236-  if (CommonNapi::GetPropertyInt32(env, config_item, "thread_affinity_mode", int_value) == SUCCESS) {
1237-    MS_LOG(DEBUG) << "thread_affinity_mode: " << int_value;
1238-    context.cpu_device.thread_num = int_value;
1239+  if (CommonNapi::GetPropertyInt32(env, config_item, "threadAffinityMode", int_value) == SUCCESS) {
1240+    MS_LOG(DEBUG) << "threadAffinityMode: " << int_value;
1241+    context.cpu_device.thread_affinity_mode = int_value;
1242+  } else {
1243+    context.cpu_device.thread_affinity_mode = PARAM0;
1244   }
1245
1246-  if (CommonNapi::GetPropertyInt32Array(env, config_item, "thread_affinity_core_list", affinity_cores) == SUCCESS) {
1247-    MS_LOG(DEBUG) << "affinity_cores size: " << affinity_cores.size();
1248+  if (CommonNapi::GetPropertyInt32Array(env, config_item, "threadAffinityCoreList", affinity_cores) == SUCCESS) {
1249+    MS_LOG(DEBUG) << "affinityCores size: " << affinity_cores.size();
1250     context.cpu_device.thread_affinity_cores.assign(affinity_cores.begin(), affinity_cores.end());
1251+  } else {
1252+    context.cpu_device.thread_affinity_cores = {};
1253   }
1254
1255-  if (CommonNapi::GetPropertyString(env, config_item, "precision_mode", str_value) == SUCCESS) {
1256-    MS_LOG(DEBUG) << "precision_mode: " << str_value.c_str();
1257+  if (CommonNapi::GetPropertyString(env, config_item, "precisionMode", str_value) == SUCCESS) {
1258+    MS_LOG(DEBUG) << "precisionMode: " << str_value.c_str();
1259     context.cpu_device.precision_mode = str_value;
1260+  } else {
1261+    context.cpu_device.precision_mode = "enforce_fp32";
1262   }
1263   return SUCCESS;
1264 }
1265@@ -694,13 +755,12 @@ napi_value MSLiteModelNapi::GetInputs(napi_env env, napi_callback_info info) {
1266
1267   size_t size = inputs.size();
1268   MS_LOG(INFO) << "inputs size: " << size;
1269-
1270   napi_create_array_with_length(env, size, &jsResult);
1271   for (size_t i = 0; i < size; i++) {
1272     status = napi_set_element(env, jsResult, i, MSTensorNapi::NewInstance(env, tensor_inputs[i]));
1273     if (status != napi_ok) {
1274       MS_LOG(ERROR) << "napi_set_element failed! code: " << status;
1275-    }
1276+    }
1277   }
1278   MS_LOG(INFO) << "get model inputs success: " << inputs[0].Name().c_str();
1279   return jsResult;
1280@@ -710,18 +770,17 @@ napi_value MSLiteModelNapi::Resize(napi_env env, napi_callback_info info) {
1281   napi_value undefinedResult = nullptr;
1282   bool result = false;
1283   napi_get_undefined(env, &undefinedResult);
1284-  napi_value argv[ARGS_TWO] = {0};
1285-  size_t argCount = 2;
1286+
1287   napi_value jsThis = nullptr;
1288   napi_value jsResult = nullptr;
1289   MSLiteModelNapi *modelNapi = nullptr;
1290-
1291+  napi_value argv[ARGS_TWO] = {0};
1292+  size_t argCount = PARAM2;
1293   napi_status status = napi_get_cb_info(env, info, &argCount, argv, &jsThis, nullptr);
1294   if (status != napi_ok || jsThis == nullptr) {
1295     MS_LOG(ERROR) << "Failed to retrieve details about the callback";
1296     return undefinedResult;
1297   }
1298-
1299   status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
1300   if (status != napi_ok || modelNapi == nullptr) {
1301     MS_LOG(ERROR) << "get model napi error";
1302@@ -755,7 +814,7 @@ napi_value MSLiteModelNapi::Resize(napi_env env, napi_callback_info info) {
1303       return undefinedResult;
1304     }
1305
1306-    std::string property_name = "data";
1307+    std::string property_name = "getData";
1308     bool exist = false;
1309     napi_value data_func = nullptr;
1310
1311@@ -766,7 +825,7 @@ napi_value MSLiteModelNapi::Resize(napi_env env, napi_callback_info info) {
1312     }
1313
1314     if (status != napi_ok || !exist) {
1315-      MS_LOG(INFO) << "can not find " << property_name.c_str() << " property.";
1316+      MS_LOG(ERROR) << "can not find " << property_name.c_str() << " property.";
1317       return undefinedResult;
1318     }
1319
1320@@ -834,6 +893,7 @@ napi_value MSLiteModelNapi::Resize(napi_env env, napi_callback_info info) {
1321     }
1322
1323     status = napi_get_array_length(env, dim_element, &dim_size);
1324+    MS_LOG(ERROR) << "DIM SIZE IS:" << dim_size;
1325     if (status != napi_ok) {
1326       MS_LOG(ERROR) << "get new dim size error";
1327       return undefinedResult;
1328@@ -890,24 +950,48 @@ napi_value MSLiteModelNapi::PredictAsync(napi_env env, napi_callback_info info)
1329   napi_status status = napi_ok;
1330   napi_value undefinedResult = nullptr;
1331   napi_value result = nullptr;
1332-
1333+  const int32_t refCount = 1;
1334+  napi_valuetype valueType;
1335+
1336   std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
1337   if (asyncContext == nullptr) {
1338     MS_LOG(ERROR) << "MSLiteModelAsyncContext object create failed.";
1339     return undefinedResult;
1340   }
1341
1342-  GET_PARAMS(env, info, ARGS_ONE);
1343+  GET_PARAMS(env, info, ARGS_TWO);
1344+  for (size_t i = PARAM0; i < argc; i++) {
1345+    if (i == PARAM1) {
1346+      status = napi_typeof(env, argv[i], &valueType);
1347+      if ((status != napi_ok) || (valueType != napi_function)) {
1348+        MS_LOG(ERROR) << "napi_typeof check callback failed.";
1349+        return result;
1350+      }
1351+      status = napi_create_reference(env, argv[i], refCount, &asyncContext->callbackRef);
1352+      if (status != napi_ok) {
1353+        MS_LOG(ERROR) << "Failed to create reference of callback";
1354+        return result;
1355+      }
1356+    }
1357+  }
1358
1359   if (SetTensorData(env, thisVar, argv[PARAM0], asyncContext.get()) != SUCCESS) {
1360     MS_LOG(ERROR) << "Set tensor data failed.";
1361     return undefinedResult;
1362   }
1363
1364-  napi_create_promise(env, &asyncContext->deferred, &result);
1365-  if (status != napi_ok) {
1366-    MS_LOG(ERROR) << "create promise failed.";
1367-    return result;
1368+  if (asyncContext->callbackRef == nullptr) {
1369+    status = napi_create_promise(env, &asyncContext->deferred, &result);
1370+    if (status != napi_ok) {
1371+      MS_LOG(ERROR) << "create promise failed.";
1372+      return result;
1373+    }
1374+  } else {
1375+    status = napi_get_undefined(env, &result);
1376+    if (status != napi_ok) {
1377+      MS_LOG(ERROR) << "create callback failed.";
1378+      return result;
1379+    }
1380   }
1381
1382   napi_value resource = nullptr;
1383@@ -966,18 +1050,14 @@ int32_t MSLiteModelNapi::SetTensorData(napi_env env, napi_value thisVar, napi_va
1384       return ERROR;
1385     }
1386
1387-    std::string property_name = "data";
1388+    std::string property_name = "getData";
1389     bool exist = false;
1390     napi_value data_func = nullptr;
1391
1392     napi_status status = napi_has_named_property(env, element, property_name.c_str(), &exist);
1393-    if (status != napi_ok || !exist) {
1394-      MS_LOG(ERROR) << "can not find target property";
1395-      return ERROR;
1396-    }
1397
1398     if (status != napi_ok || !exist) {
1399-      MS_LOG(INFO) << "can not find " << property_name.c_str() << " property.";
1400+      MS_LOG(ERROR) << "can not find " << property_name.c_str() << " property.";
1401       return ERROR;
1402     }
1403
1404@@ -994,7 +1074,6 @@ int32_t MSLiteModelNapi::SetTensorData(napi_env env, napi_value thisVar, napi_va
1405       MS_LOG(ERROR) << "napi call function error.";
1406       return ERROR;
1407     }
1408-
1409     status = napi_get_arraybuffer_info(env, return_val, &js_data, &length);
1410     if (status != napi_ok || js_data == nullptr) {
1411       MS_LOG(ERROR) << "Get js data error.";
1412diff --git a/mindspore/lite/src/runtime/js_api/mstensor_napi.cc b/mindspore/lite/src/runtime/js_api/mstensor_napi.cc
1413index a03bd484..cd8044e9 100644
1414--- a/mindspore/lite/src/runtime/js_api/mstensor_napi.cc
1415+++ b/mindspore/lite/src/runtime/js_api/mstensor_napi.cc
1416@@ -128,12 +128,12 @@ napi_value MSTensorNapi::GetConstructor(napi_env env) {
1417   napi_property_descriptor properties[] = {
1418     DECLARE_NAPI_GETTER("name", GetName),
1419     DECLARE_NAPI_GETTER("shape", GetShape),
1420-    DECLARE_NAPI_GETTER("element_num", GetElementNum),
1421+    DECLARE_NAPI_GETTER("elementNum", GetElementNum),
1422     DECLARE_NAPI_GETTER("dtype", GetDtype),
1423     DECLARE_NAPI_GETTER("format", GetFormat),
1424-    DECLARE_NAPI_GETTER("data_size", GetDataSize),
1425+    DECLARE_NAPI_GETTER("dataSize", GetDataSize),
1426
1427-    DECLARE_NAPI_FUNCTION("data", GetDataBuffer),
1428+    DECLARE_NAPI_FUNCTION("getData", GetDataBuffer),
1429     DECLARE_NAPI_FUNCTION("setData", SetData),
1430   };
1431
1432@@ -391,12 +391,6 @@ napi_value MSTensorNapi::SetData(napi_env env, napi_callback_info info) {
1433     return undefinedResult;
1434   }
1435
1436-  if (tensor->nativeMSTensor_->DataType() != mindspore::DataType::kNumberTypeFloat32) {
1437-    MS_LOG(ERROR) << "tensor data type must be Float32(43), but got "
1438-                  << static_cast<int>(tensor->nativeMSTensor_->DataType());
1439-    return undefinedResult;
1440-  }
1441-
1442   // convert napi_value to c++ type data
1443   void *js_data = nullptr;
1444   size_t length = 0;
1445--
14462.17.1
1447
1448