From 7d6c8c9c2913622866630427022f6f0888cf278c Mon Sep 17 00:00:00 2001 From: Zhu Guodong Date: Wed, 19 Jul 2023 19:00:41 +0800 Subject: [PATCH] auto-apply 0037-npu-support-custom-model.patch --- .../runtime/delegate/nnrt/nnrt_delegate.cc | 42 +++++++++++++++---- .../src/runtime/delegate/nnrt/nnrt_delegate.h | 1 + 2 files changed, 36 insertions(+), 7 deletions(-) diff --git a/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.cc b/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.cc index db95727f..15b117ca 100644 --- a/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.cc +++ b/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.cc @@ -23,6 +23,7 @@ #include "interfaces/innerkits/c/neural_network_runtime_inner.h" #include "nnrt_model_kernel.h" #include "schema/model_generated.h" +#include "schema/ops_generated.h" #include "flatbuffers/flatbuffers.h" #include "runtime/tensor_category.h" @@ -62,6 +63,24 @@ Status NNRTDelegate::Build(DelegateModel *model) { return BuildNormalModel(model); } +bool NNRTDelegate::IsCustomModel() const { + // check if there is only one Cutsom kernel in LiteModel. + if (lite_graph_ == nullptr) { + return false; + } + if (lite_graph_->all_nodes_.size() != 1) { + return false; + } + auto node = lite_graph_->all_nodes_[0]; + if (node == nullptr) { + return false; + } + if (node->node_type_ != mindspore::schema::PrimitiveType_Custom) { + return false; + } + return true; +} + #ifdef SUPPORT_NNRT_METAGRAPH bool NNRTDelegate::IsKirinNPU() const { const std::string kirin_npu_name_prefix = "NPU_"; @@ -87,8 +106,6 @@ Status NNRTDelegate::BuildKirinNPUModel(DelegateModel *model) return kLiteNullptr; } - SetKirinModelInputsAndOutputs(nn_model); - size_t extension_size = nnrt_device_info_.extensions_.size(); OH_NN_Extension *extensions = (OH_NN_Extension *)malloc(sizeof(OH_NN_Extension) * extension_size); if (extensions == nullptr) { @@ -105,11 +122,22 @@ Status NNRTDelegate::BuildKirinNPUModel(DelegateModel *model) MS_LOG_DEBUG << "set extension, item name: " << extensions[i].name << ", value size: " << extensions[i].valueSize; } - auto ret = OH_NNModel_BuildFromMetaGraph(nn_model, meta_graph_, extensions, extension_size); - if (ret != OH_NN_SUCCESS) { - MS_LOG(ERROR) << "Build NNModel failed, ret: " << ret; - OH_NNModel_Destroy(&nn_model); - return kLiteError; + if (IsCustomModel()) { + auto ret = OH_NNModel_BuildFromLiteGraph(nn_model, lite_graph_); + if (ret != OH_NN_SUCCESS) { + MS_LOG(ERROR) << "Build NNModel failed, ret: " << ret; + OH_NNModel_Destroy(&nn_model); + return kLiteError; + } + } else { + SetKirinModelInputsAndOutputs(nn_model); + + auto ret = OH_NNModel_BuildFromMetaGraph(nn_model, meta_graph_, extensions, extension_size); + if (ret != OH_NN_SUCCESS) { + MS_LOG(ERROR) << "Build NNModel failed, ret: " << ret; + OH_NNModel_Destroy(&nn_model); + return kLiteError; + } } auto ret2 = CreateFullModelKernel(model, nn_model); diff --git a/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.h b/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.h index 0441139b..99f6f89b 100644 --- a/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.h +++ b/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.h @@ -73,6 +73,7 @@ class NNRTDelegate : public Delegate { Status InitNNCompilation(OH_NNCompilation *nn_compilation) const; static OH_NN_DataType CastToNNRTDataType(mindspore::DataType data_type); static OH_NN_Format CastToNNRTFormat(Format format); + bool IsCustomModel() const; #ifdef SUPPORT_NNRT_METAGRAPH bool IsKirinNPU() const; -- 2.34.1