1 /**
2 * Copyright 2020 Huawei Technologies Co., Ltd
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include <iostream>
17 #include "common/common_test.h"
18 #include "transform/transform_base_test.h"
19 #include "pybind_api/ir/tensor_py.h"
20 #include "pipeline/jit/parse/resolve.h"
21
22 using mindspore::tensor::TensorPy;
23
24 namespace mindspore {
25 namespace transform {
26 using mindspore::parse::ParsePythonCode;
27 namespace python_adapter = mindspore::parse::python_adapter;
28 using mindspore::parse::ResolveAll;
getAnfGraph(string package,string function)29 std::vector<FuncGraphPtr> getAnfGraph(string package, string function) {
30 py::function fn_ = python_adapter::GetPyFn(package, function);
31 FuncGraphPtr func_graph = ParsePythonCode(fn_);
32 std::vector<FuncGraphPtr> graphVector;
33 graphVector.clear();
34 if (nullptr == func_graph) return graphVector;
35
36 // save the func_graph to manager
37 std::shared_ptr<FuncGraphManager> manager = Manage(func_graph);
38
39 // call resolve
40 bool ret_ = ResolveAll(manager);
41
42 if (!ret_) return graphVector;
43
44 // get graph
45 for (auto func_graph : manager->func_graphs()) {
46 graphVector.push_back(func_graph);
47 }
48 return graphVector;
49 }
50
PrintMeTensor(MeTensor * tensor)51 void PrintMeTensor(MeTensor* tensor) {
52 #define DTYPE float
53 DTYPE* me_data = reinterpret_cast<DTYPE*>((*tensor).data_c());
54 size_t elements = (*tensor).ElementsNum();
55 std::cout << "the in memory block data size is: " << std::dec << tensor->data().nbytes() << " bytes" << std::endl;
56 std::cout << "the in memory block data is: " << std::endl;
57 for (int i = 0; i < elements; i++) {
58 std::cout << static_cast<DTYPE>(*(me_data + i)) << std::endl;
59 }
60
61 std::cout << "the py::str() data is: " << std::endl;
62 py::array tensor_data = TensorPy::AsNumpy(*tensor);
63 std::cout << std::string(py::str(tensor_data)) << std::endl;
64
65 std::cout << "tensor dtype is: " << py::str(tensor_data.dtype()) << std::endl;
66 }
67
MakeFuncGraph(const PrimitivePtr prim,unsigned int nparam)68 FuncGraphPtr MakeFuncGraph(const PrimitivePtr prim, unsigned int nparam) {
69 // build the func_graph manually, eg:
70 // MakeFuncGraph(std::make_shared<Primitive>("scalar_add"), 2) means:
71 /* python source code:
72 * @mindspore
73 * def f(x, y):
74 * return x + y
75 */
76 FuncGraphPtr func_graph = std::make_shared<FuncGraph>();
77 std::vector<AnfNodePtr> inputs;
78 inputs.push_back(NewValueNode(prim));
79 for (unsigned int i = 0; i < nparam; i++) {
80 if ((prim->name() == "ScalarSummary" || prim->name() == "TensorSummary" ||
81 prim->name() == "ImageSummary" || prim->name() == "HistogramSummary") &&
82 i == 0) {
83 auto input = NewValueNode("testSummary");
84 inputs.push_back(input);
85 } else {
86 auto input = func_graph->add_parameter();
87 input->set_name("x" + std::to_string(i));
88 inputs.push_back(input);
89 }
90 }
91 CNodePtr cnode_prim = func_graph->NewCNode(inputs);
92 inputs.clear();
93 inputs.push_back(NewValueNode(std::make_shared<Primitive>("Return")));
94 inputs.push_back(cnode_prim);
95 CNodePtr cnode_return = func_graph->NewCNode(inputs);
96 func_graph->set_return(cnode_return);
97 return func_graph;
98 }
99
MakeTensor(const TypePtr & t,std::initializer_list<int64_t> shp)100 MeTensorPtr MakeTensor(const TypePtr& t, std::initializer_list<int64_t> shp) {
101 auto shape = std::vector<int64_t>(shp);
102 auto tensor = std::make_shared<tensor::Tensor>(t->type_id(), shape);
103 return tensor;
104 }
105
106 } // namespace transform
107 } // namespace mindspore
108