• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2022 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MINDSPORE_LITE_SRC_LITERT_DELEGATE_NNAPI_NNAPI_SUBGRAPH_H_
18 #define MINDSPORE_LITE_SRC_LITERT_DELEGATE_NNAPI_NNAPI_SUBGRAPH_H_
19 
20 #include <vector>
21 #include <utility>
22 #include <unordered_map>
23 #include "include/api/kernel.h"
24 #include "src/common/log_adapter.h"
25 #include "src/litert/delegate/nnapi/op/nnapi_op.h"
26 #include "src/litert/delegate/nnapi/nnapi_implementation.h"
27 
28 namespace mindspore {
29 namespace lite {
30 class NNAPISubGraph : public kernel::Kernel {
31  public:
NNAPISubGraph(std::vector<NNAPIOp * > ops,const std::vector<mindspore::MSTensor> & inputs,const std::vector<mindspore::MSTensor> & outputs,const std::vector<ANeuralNetworksDevice * > devices,bool relax_fp32_to_fp16)32   NNAPISubGraph(std::vector<NNAPIOp *> ops, const std::vector<mindspore::MSTensor> &inputs,
33                 const std::vector<mindspore::MSTensor> &outputs, const std::vector<ANeuralNetworksDevice *> devices,
34                 bool relax_fp32_to_fp16)
35       : kernel::Kernel(inputs, outputs, nullptr, nullptr),
36         ops_(std::move(ops)),
37         relax_fp32_to_fp16_(relax_fp32_to_fp16),
38         devices_(std::move(devices)) {}
39 
40   ~NNAPISubGraph() override;
41 
42   int Init();
43 
44   int CreateNNAPIModel();
45 
46   int CompileNNAPIModel();
47 
48   int Prepare() override;
49 
ReSize()50   int ReSize() override {
51     MS_LOG(ERROR) << "NNAPI does not support the resize function temporarily.";
52     return RET_ERROR;
53   }
54 
55   int Execute() override;
56 
57  private:
58   int PreProcess();
59 
60   std::vector<NNAPIOp *> ops_;
61   std::vector<MSTensor> all_tensors_;
62   std::vector<uint32_t> input_indices_;
63   std::vector<uint32_t> output_indices_;
64 
65   bool relax_fp32_to_fp16_ = true;
66   std::vector<ANeuralNetworksDevice *> devices_;
67   ANeuralNetworksModel *nnapi_model_ = nullptr;
68   ANeuralNetworksCompilation *nnapi_compilation_ = nullptr;
69   ANeuralNetworksExecution *nnapi_execution_ = nullptr;
70 };
71 }  // namespace lite
72 }  // namespace mindspore
73 
74 #endif  // MINDSPORE_LITE_SRC_LITERT_DELEGATE_NNAPI_NNAPI_SUBGRAPH_H_
75