From b76ed2aef929564040297d967eb0f5fa402eacc5 Mon Sep 17 00:00:00 2001 From: fangzhou12 Date: Fri, 7 Jul 2023 16:27:31 +0800 Subject: [PATCH] enable enum --- include/js_api/common_napi.h | 31 +++++ include/js_api/mslite_model_napi.h | 36 ++++++ .../src/runtime/js_api/mslite_model_napi.cc | 109 ++++++++++++++++++ 3 files changed, 176 insertions(+) diff --git a/include/js_api/common_napi.h b/include/js_api/common_napi.h index 415f813e..d6e161dd 100644 --- a/include/js_api/common_napi.h +++ b/include/js_api/common_napi.h @@ -48,6 +48,37 @@ struct MSLiteAsyncContext { std::string errMessage = ""; }; +enum ContextThreadAffinityMode : int32_t { + CONTEXT_AFFINITY_MODE = 0, + CONTEXT_BIG_CORES_FIRST, + CONTEXT_LITTLE_CORES_FIRST +}; + +enum TensorFormat : int32_t { + TENSOR_DEFAULT_FORMAT = -1, + TENSOR_NCHW, + TENSOR_NHWC, + TENSOR_NHWC4, + TENSOR_HWKC, + TENSOR_HWCK, + TENSOR_KCHW +}; + +enum TensorDataType : int32_t { + TENSOR_UNKNOWN = 0, + TENSOR_INT8 = 32, + TENSOR_INT16 = 33, + TENSOR_INT32 = 34, + TENSOR_INT64 = 35, + TENSOR_UINT8 = 37, + TENSOR_UINT16 = 38, + TENSOR_UINT32 = 39, + TENSOR_UINT64 = 40, + TENSOR_FLOAT16 = 42, + TENSOR_FLOAT32 = 43, + TENSOR_FLOAT64 = 44 +}; + enum ModelMode : int32_t { kBuffer = 0, kPath, diff --git a/include/js_api/mslite_model_napi.h b/include/js_api/mslite_model_napi.h index 717b595a..e85fc2f4 100644 --- a/include/js_api/mslite_model_napi.h +++ b/include/js_api/mslite_model_napi.h @@ -24,8 +24,37 @@ #include "mslite_model_callback_napi.h" #include "napi/native_api.h" #include "napi/native_node_api.h" +#include "include/js_api/common_napi.h" namespace mindspore { +static const std::map tensorFormatMap = { + {"DEFAULT_FORMAT", TENSOR_DEFAULT_FORMAT}, + {"NCHW", TENSOR_NCHW}, + {"NHWC", TENSOR_NHWC}, + {"NHWC4", TENSOR_NHWC4}, + {"HWKC", TENSOR_HWKC}, + {"HWCK", TENSOR_HWCK}, + {"KCHW", TENSOR_KCHW} +}; +static const std::map tensorDataTypeMap = { + {"TYPE_UNKNOWN", TENSOR_UNKNOWN}, + {"NUMBER_TYPE_INT8", TENSOR_INT8}, + {"NUMBER_TYPE_INT16", TENSOR_INT16}, + {"NUMBER_TYPE_INT32", TENSOR_INT32}, + {"NUMBER_TYPE_INT64", TENSOR_INT64}, + {"NUMBER_TYPE_UINT8", TENSOR_UINT8}, + {"NUMBER_TYPE_UINT16", TENSOR_UINT16}, + {"NUMBER_TYPE_UINT32", TENSOR_UINT32}, + {"NUMBER_TYPE_UINT64", TENSOR_UINT64}, + {"NUMBER_TYPE_FLOAT16", TENSOR_FLOAT16}, + {"NUMBER_TYPE_FLOAT32", TENSOR_FLOAT32}, + {"NUMBER_TYPE_FLOAT64", TENSOR_FLOAT64} +}; +static const std::map contextThreadAffinityModeMap = { + {"NO_AFFINITIES", CONTEXT_AFFINITY_MODE}, + {"BIG_CORES_FIRST", CONTEXT_BIG_CORES_FIRST}, + {"LITTLE_CORES_FIRST", CONTEXT_LITTLE_CORES_FIRST}, +}; class MSLiteModelNapi { public: MSLiteModelNapi(); @@ -72,9 +101,16 @@ class MSLiteModelNapi { std::vector> &device_infos); static int32_t SetTensorData(napi_env env, napi_value thisVar, napi_value argv, MSLiteModelAsyncContext *async_context); + static napi_status AddNamedProperty(napi_env env, napi_value object, const std::string name, int32_t enumValue); + static napi_value CreateFormatObject(napi_env env); + static napi_value CreateDataTypeObject(napi_env env); + static napi_value CreateThreadAffinityModeObject(napi_env env); static thread_local napi_ref constructor_; napi_env env_ = nullptr; + static napi_ref tensorFormat_; + static napi_ref tensorDataType_; + static napi_ref contextThreadAffinityMode_; static ModelInfo *model_info_; static ContextInfo *context_; diff --git a/mindspore/lite/src/runtime/js_api/mslite_model_napi.cc b/mindspore/lite/src/runtime/js_api/mslite_model_napi.cc index 0f1934a7..2ecc478d 100644 --- a/mindspore/lite/src/runtime/js_api/mslite_model_napi.cc +++ b/mindspore/lite/src/runtime/js_api/mslite_model_napi.cc @@ -37,6 +37,9 @@ thread_local napi_ref MSLiteModelNapi::constructor_ = nullptr; ModelInfo *MSLiteModelNapi::model_info_ = nullptr; ContextInfo *MSLiteModelNapi::context_ = nullptr; std::mutex MSLiteModelNapi::create_mutex_; +napi_ref MSLiteModelNapi::tensorFormat_ = nullptr; +napi_ref MSLiteModelNapi::tensorDataType_ = nullptr; +napi_ref MSLiteModelNapi::contextThreadAffinityMode_ = nullptr; #define GET_PARAMS(env, info, num) \ size_t argc = num; \ @@ -101,6 +104,9 @@ napi_value MSLiteModelNapi::Init(napi_env env, napi_value exports) { DECLARE_NAPI_STATIC_FUNCTION("loadModelFromFile", LoadMSLiteModelFromFile), DECLARE_NAPI_STATIC_FUNCTION("loadModelFromBuffer", LoadMSLiteModelFromBuffer), DECLARE_NAPI_STATIC_FUNCTION("loadModelFromFd", LoadMSLiteModelFromFd), + DECLARE_NAPI_PROPERTY("Format", CreateFormatObject(env)), + DECLARE_NAPI_PROPERTY("DataType", CreateDataTypeObject(env)), + DECLARE_NAPI_PROPERTY("ThreadAffinityMode", CreateThreadAffinityModeObject(env)), }; napi_value constructor = nullptr; @@ -133,6 +139,109 @@ napi_value MSLiteModelNapi::Init(napi_env env, napi_value exports) { return exports; } +napi_value MSLiteModelNapi::CreateFormatObject(napi_env env) +{ + napi_value result = nullptr; + napi_status status; + std::string propName; + int32_t refCount = 1; + + status = napi_create_object(env, &result); + if (status == napi_ok) { + for (auto &iter: tensorFormatMap) { + propName = iter.first; + status = AddNamedProperty(env, result, propName, iter.second); + if (status != napi_ok) { + MS_LOG(ERROR) << "Failed to add named prop in CreateFormatObject."; + break; + } + propName.clear(); + } + if (status == napi_ok) { + status = napi_create_reference(env, result, refCount, &tensorFormat_); + if (status == napi_ok) { + return result; + } + } + } + MS_LOG(ERROR) << "CreateFormatObject is Failed!"; + napi_get_undefined(env, &result); + return result; +} + +napi_value MSLiteModelNapi::CreateDataTypeObject(napi_env env) +{ + napi_value result = nullptr; + napi_status status; + std::string propName; + int32_t refCount = 1; + + status = napi_create_object(env, &result); + if (status == napi_ok) { + for (auto &iter: tensorDataTypeMap) { + propName = iter.first; + status = AddNamedProperty(env, result, propName, iter.second); + if (status != napi_ok) { + MS_LOG(ERROR) << "Failed to add named prop in CreateDataTypeObject."; + break; + } + propName.clear(); + } + if (status == napi_ok) { + status = napi_create_reference(env, result, refCount, &tensorDataType_); + if (status == napi_ok) { + return result; + } + } + } + MS_LOG(ERROR) << "CreateDataTypeObject is Failed!"; + napi_get_undefined(env, &result); + return result; +} + +napi_value MSLiteModelNapi::CreateThreadAffinityModeObject(napi_env env) +{ + napi_value result = nullptr; + napi_status status; + std::string propName; + int32_t refCount = 1; + + status = napi_create_object(env, &result); + if (status == napi_ok) { + for (auto &iter: contextThreadAffinityModeMap) { + propName = iter.first; + status = AddNamedProperty(env, result, propName, iter.second); + if (status != napi_ok) { + MS_LOG(ERROR) << "Failed to add named prop in CreateThreadAffinityModeObject."; + break; + } + propName.clear(); + } + if (status == napi_ok) { + status = napi_create_reference(env, result, refCount, &contextThreadAffinityMode_); + if (status == napi_ok) { + return result; + } + } + } + MS_LOG(ERROR) << "CreateThreadAffinityModeObject is Failed!"; + napi_get_undefined(env, &result); + return result; +} + +napi_status MSLiteModelNapi::AddNamedProperty(napi_env env, napi_value object, + const std::string name, int32_t enumValue) +{ + napi_status status; + napi_value enumNapiValue; + + status = napi_create_int32(env, enumValue, &enumNapiValue); + if (status == napi_ok) { + status = napi_set_named_property(env, object, name.c_str(), enumNapiValue); + } + return status; +} + std::shared_ptr MSLiteModelNapi::CreateModel(ModelInfo *model_info_ptr, ContextInfo *context_info_ptr) { // create and init context -- 2.17.1