• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MINDSPORE_LITE_MICRO_CODER_GRAPH_H_
18 #define MINDSPORE_LITE_MICRO_CODER_GRAPH_H_
19 
20 #include <map>
21 #include <memory>
22 #include <unordered_map>
23 #include <vector>
24 #include <string>
25 #include "coder/config.h"
26 #include "include/context.h"
27 #include "include/model.h"
28 #include "schema/inner/model_generated.h"
29 #include "src/common/graph_util.h"
30 #include "src/tensor.h"
31 
32 namespace mindspore::lite::micro {
33 class CoderGraph {
34  public:
CoderGraph(Model * model)35   explicit CoderGraph(Model *model) : model_(model) {}
36   ~CoderGraph();
37 
38   int ConvertTensors();
39   int InitGraphInOutTensors();
40 
41   void SetAllTensors(const std::vector<Tensor *> &all_tensors);
42 
43   void InitInputs();
44   void InitOutputs();
45 
46   void SetInputIndices(const std::vector<uint32_t> &input_indices);
47 
48   void SetOutputIndices(const std::vector<uint32_t> &output_indices);
49 
50   void AddInputMap(const std::string &node_id, Tensor *input_tensor);
51 
52   void AddOutputMap(const std::string &node_id, Tensor *output_tensor);
53 
54   std::vector<uint32_t> input_indices() const;
55 
56   std::vector<uint32_t> output_indices() const;
57 
58   std::vector<Tensor *> input_tensors() const;
59 
60   std::vector<Tensor *> output_tensors() const;
61 
62   std::vector<Tensor *> all_tensors() const;
63 
64   const std::map<NODE_ID, std::vector<Tensor *>> &GetOutputsMap() const;
65 
model()66   const Model *model() const { return this->model_; }
67 
68   void DumpUnSupportLayer(Target target);
69 
70  private:
71   // graph_inputs && weight && bias is value_node
72   // others are parameter_node
73   std::vector<Tensor *> all_tensors_;
74 
75   std::vector<Tensor *> input_tensors_;
76 
77   std::vector<Tensor *> output_tensors_;
78 
79   std::vector<uint32_t> input_indices_;
80 
81   std::vector<uint32_t> output_indices_;
82 
83   std::map<std::string, std::vector<Tensor *>> inputs_map_;
84 
85   std::map<std::string, std::vector<Tensor *>> outputs_map_;
86 
87   Model *model_{nullptr};
88 };
89 }  // namespace mindspore::lite::micro
90 #endif  // MINDSPORE_LITE_MICRO_CODER_GRAPH_H_
91