• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1From 7d6c8c9c2913622866630427022f6f0888cf278c Mon Sep 17 00:00:00 2001
2From: Zhu Guodong <zhuguodong0001@163.com>
3Date: Wed, 19 Jul 2023 19:00:41 +0800
4Subject: [PATCH] auto-apply 0037-npu-support-custom-model.patch
5
6---
7 .../runtime/delegate/nnrt/nnrt_delegate.cc    | 42 +++++++++++++++----
8 .../src/runtime/delegate/nnrt/nnrt_delegate.h |  1 +
9 2 files changed, 36 insertions(+), 7 deletions(-)
10
11diff --git a/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.cc b/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.cc
12index db95727f..15b117ca 100644
13--- a/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.cc
14+++ b/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.cc
15@@ -23,6 +23,7 @@
16 #include "interfaces/innerkits/c/neural_network_runtime_inner.h"
17 #include "nnrt_model_kernel.h"
18 #include "schema/model_generated.h"
19+#include "schema/ops_generated.h"
20 #include "flatbuffers/flatbuffers.h"
21 #include "runtime/tensor_category.h"
22
23@@ -62,6 +63,24 @@ Status NNRTDelegate::Build(DelegateModel<schema::Primitive> *model) {
24   return BuildNormalModel(model);
25 }
26
27+bool NNRTDelegate::IsCustomModel() const {
28+  // check if there is only one Cutsom kernel in LiteModel.
29+  if (lite_graph_ == nullptr) {
30+    return false;
31+  }
32+  if (lite_graph_->all_nodes_.size() != 1) {
33+    return false;
34+  }
35+  auto node = lite_graph_->all_nodes_[0];
36+  if (node == nullptr) {
37+    return false;
38+  }
39+  if (node->node_type_ != mindspore::schema::PrimitiveType_Custom) {
40+    return false;
41+  }
42+  return true;
43+}
44+
45 #ifdef SUPPORT_NNRT_METAGRAPH
46 bool NNRTDelegate::IsKirinNPU() const {
47   const std::string kirin_npu_name_prefix = "NPU_";
48@@ -87,8 +106,6 @@ Status NNRTDelegate::BuildKirinNPUModel(DelegateModel<schema::Primitive> *model)
49     return kLiteNullptr;
50   }
51
52-  SetKirinModelInputsAndOutputs(nn_model);
53-
54   size_t extension_size = nnrt_device_info_.extensions_.size();
55   OH_NN_Extension *extensions = (OH_NN_Extension *)malloc(sizeof(OH_NN_Extension) * extension_size);
56   if (extensions == nullptr) {
57@@ -105,11 +122,22 @@ Status NNRTDelegate::BuildKirinNPUModel(DelegateModel<schema::Primitive> *model)
58     MS_LOG_DEBUG << "set extension, item name: " << extensions[i].name << ", value size: " << extensions[i].valueSize;
59   }
60
61-  auto ret = OH_NNModel_BuildFromMetaGraph(nn_model, meta_graph_, extensions, extension_size);
62-  if (ret != OH_NN_SUCCESS) {
63-    MS_LOG(ERROR) << "Build NNModel failed, ret: " << ret;
64-    OH_NNModel_Destroy(&nn_model);
65-    return kLiteError;
66+  if (IsCustomModel()) {
67+    auto ret = OH_NNModel_BuildFromLiteGraph(nn_model, lite_graph_);
68+    if (ret != OH_NN_SUCCESS) {
69+      MS_LOG(ERROR) << "Build NNModel failed, ret: " << ret;
70+      OH_NNModel_Destroy(&nn_model);
71+      return kLiteError;
72+    }
73+  } else {
74+    SetKirinModelInputsAndOutputs(nn_model);
75+
76+    auto ret = OH_NNModel_BuildFromMetaGraph(nn_model, meta_graph_, extensions, extension_size);
77+    if (ret != OH_NN_SUCCESS) {
78+      MS_LOG(ERROR) << "Build NNModel failed, ret: " << ret;
79+      OH_NNModel_Destroy(&nn_model);
80+      return kLiteError;
81+    }
82   }
83
84   auto ret2 =  CreateFullModelKernel(model, nn_model);
85diff --git a/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.h b/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.h
86index 0441139b..99f6f89b 100644
87--- a/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.h
88+++ b/mindspore/lite/src/runtime/delegate/nnrt/nnrt_delegate.h
89@@ -73,6 +73,7 @@ class NNRTDelegate : public Delegate {
90   Status InitNNCompilation(OH_NNCompilation *nn_compilation) const;
91   static OH_NN_DataType CastToNNRTDataType(mindspore::DataType data_type);
92   static OH_NN_Format CastToNNRTFormat(Format format);
93+  bool IsCustomModel() const;
94
95 #ifdef SUPPORT_NNRT_METAGRAPH
96   bool IsKirinNPU() const;
97--
982.34.1
99
100