• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2019-2020 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MINDSPORE_CCSRC_PIPELINE_JIT_PIPELINE_H_
18 #define MINDSPORE_CCSRC_PIPELINE_JIT_PIPELINE_H_
19 
20 #include <vector>
21 #include <utility>
22 #include <string>
23 #include <memory>
24 #include <unordered_map>
25 #include <map>
26 #include <mutex>
27 
28 #include "pybind11/pybind11.h"
29 
30 #include "ir/anf.h"
31 #include "ir/tensor.h"
32 #include "pipeline/jit/action.h"
33 #include "vm/segment_runner.h"
34 #include "vm/transform.h"
35 #include "pipeline/jit/base.h"
36 
37 namespace mindspore {
38 extern const char kMsConvert[];
39 extern const char kMsVm[];
40 
41 // namespace to support pipeline structures definition
42 namespace pipeline {
43 
44 namespace py = pybind11;
45 
46 class Pipeline {
47  public:
Pipeline(const ResourcePtr & res,const std::vector<ActionItem> & actions)48   Pipeline(const ResourcePtr &res, const std::vector<ActionItem> &actions) : resource_(res), actions_(actions) {}
49 
50   ~Pipeline() = default;
51 
52   void Run(const std::string &phase);
53 
resource()54   ResourcePtr resource() { return resource_; }
55 
56  private:
57   ResourcePtr resource_;
58   std::vector<ActionItem> actions_;
59 };
60 
61 // A function pipeline.
62 class GraphExecutorPy : public std::enable_shared_from_this<GraphExecutorPy> {
63  public:
GetInstance()64   static std::shared_ptr<GraphExecutorPy> GetInstance() {
65     std::lock_guard<std::mutex> i_lock(instance_lock_);
66     if (executor_ == nullptr) {
67       executor_ = std::shared_ptr<GraphExecutorPy>(new (std::nothrow) GraphExecutorPy());
68     }
69     return executor_;
70   }
71 
72   ~GraphExecutorPy();
73 
phase()74   const std::string &phase() const { return phase_; }
75   void SaveCompiledGraph(const std::string &phase);
76   bool CompileInner(const py::object &source_obj, const py::tuple &args, const py::object &phase_obj, bool use_vm,
77                     const std::string &queue_name);
78   bool Compile(const py::object &source_obj, const py::tuple &args, const py::object &phase_obj, bool use_vm,
79                const std::string &queue_name);
80 
81   void ProcessVmArg(const py::tuple &args, const std::string &phase, VectorRef *arg_list);
82 
83   // for pynative mode when use_vm is on
84   py::object Run(const py::tuple &args, const py::object &phase_obj);
85   ResourcePtr GetResource(const std::string &phase);
86   FuncGraphPtr GetFuncGraph(const std::string &phase);
87   FuncGraphPtr GetGradGraph(const std::string &phase);
88   void SetGradGraph(const FuncGraphPtr &grad_graph, const std::string &phase);
89   py::bytes GetFuncGraphProto(const std::string &phase, const std::string &type);
90   compile::VmEvalFuncPtr GetVmEvalFunc(const std::string &phase);
91   bool HasCompiled(const std::string &phase) const;
92 
93   FuncGraphPtr BuildGraph(const py::dict &init_params, const std::string &phase,
94                           const py::object &broadcast_params = {});
95   void UpdataParamNodeDefaultInput(const std::string &phase,
96                                    const std::unordered_map<std::string, tensor::TensorPtr> &params);
97   void RunInitGraph(const py::dict &init_params, const std::string &phase) const;
98   void PyExePath(const py::object &py_exe_path);
99   void KernelBuildServerDir(const py::object &kernel_build_server_dir);
100   py::dict GetParameterLayout(const std::string &phase);
101   py::dict GetCNodeStrategy(const std::string &phase);
102   py::list GetParallelParameterNameList(const std::string &phase);
103   void SetCNodeStrategy(const std::string &name, const parallel::Strategys &strategy);
104   size_t GetNumOpsInfo(const std::string &phase);
105   void SetNumOpsInfo(size_t);
106   py::dict GetAllreduceFusion(const std::string &phase);
107   void DelNetRes(const std::string &id);
108   void ReleaseResource(const py::object &phase_obj);
109   static void ClearRes();
110 #ifdef ENABLE_DEBUGGER
GetDebugTerminate()111   static bool GetDebugTerminate() { return debugger_terminate_; }
DebugTerminate(bool val,bool exit_success)112   static void DebugTerminate(bool val, bool exit_success) {
113     debugger_terminate_ = val;
114     exit_success_ = exit_success;
115   }
116   void TerminateDebugger();
117 #endif
118 
119   std::map<std::string, std::pair<PrimitivePyAdapterPtr, std::string>> FetchInfoForQuantExport(
120     const std::string &phase);
121 
122  private:
123   GraphExecutorPy();
124   void ConvertObjectToTensors(const py::dict &dict, std::map<std::string, tensor::TensorPtr> *tensors);
125   void GetWeightInfo(const CNodePtr &root_node, const AnfNodePtr &weight_node,
126                      std::map<std::string, std::pair<PrimitivePyAdapterPtr, std::string>> *fake_quant_table);
127   void GetGeBackendPolicy() const;
128   // filter some pipeline actions according to phase, e.g. when exporting onnx, it is no need to execute actions after
129   // 'validate' stage
130   static std::vector<ActionItem> FilterActions(const std::vector<ActionItem> &actions, const std::string &phase);
131 
132   std::map<std::string, ExecutorInfoPtr> info_;
133   static std::shared_ptr<GraphExecutorPy> executor_;
134   static std::mutex instance_lock_;
135 #ifdef ENABLE_DEBUGGER
136   static bool debugger_terminate_;
137   static bool exit_success_;
138 #endif
139   std::map<std::string, py::dict> stra_dict_;
140   std::string phase_ = "";
141   std::map<std::string, size_t> phase_to_num_op_info_;
142 };
143 using GraphExecutorPyPtr = std::shared_ptr<GraphExecutorPy>;
144 
145 void CheckArgsValid(const py::tuple &args);
146 // Generate a key for mapping function graph
147 py::object GenerateArgumentsKey(const std::unordered_map<std::string, py::object> &args);
148 py::bool_ VerifyInputSignature(const py::list &input_signature, const py::tuple &inputs);
149 
150 bool InitDistribute(const std::map<std::string, std::string> &options);
151 
152 void ResetOpId();
153 void InitHccl();
154 void FinalizeHccl();
155 uint32_t GetHcclRankId();
156 uint32_t GetHcclRankSize();
157 void InitPipeline();
158 void FinalizeBackend();
159 void ClearResAtexit();
160 void ReleaseGeTsd();
161 
162 void ExportGraph(const std::string &file_name, const std::string &, const std::string &phase);
163 FuncGraphPtr LoadMindIR(const std::string &file_name, char *dec_key, const size_t key_len, const std::string &dec_mode);
164 
165 // init and exec dataset sub graph
166 bool InitExecDataset(const std::string &queue_name, int64_t iter_num, int64_t batch_size,
167                      const std::vector<TypePtr> &types, const std::vector<std::vector<int64_t>> &shapes,
168                      const std::vector<int64_t> &input_indexes, const std::string &phase, bool need_run);
169 
170 // Build and run dataset subgraph for ms backend
171 bool InitExecDatasetVm(const std::string &queue_name, int64_t size, int64_t batch_size,
172                        const std::vector<TypePtr> &types, const std::vector<std::vector<int64_t>> &shapes,
173                        const std::vector<int64_t> &input_indexes, bool need_run);
174 
175 void ProcessVmArgInner(const py::tuple &args, const ResourcePtr &res, VectorRef *const arg_list);
176 
177 py::bytes PyEncrypt(char *plain_data, size_t plain_len, char *key, size_t key_len, const std::string &enc_mode);
178 py::bytes PyDecrypt(const std::string &encrypt_data_path, char *key, size_t key_len, const std::string &dec_mode);
179 bool PyIsCipherFile(const std::string &file_path);
180 }  // namespace pipeline
181 }  // namespace mindspore
182 
183 #endif  // MINDSPORE_CCSRC_PIPELINE_JIT_PIPELINE_H_
184