1 /* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6
7 http://www.apache.org/licenses/LICENSE-2.0
8
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15
16 #include <memory>
17 #include <string>
18 #include <vector>
19
20 #include "absl/container/flat_hash_map.h"
21 #include "absl/types/variant.h"
22 #include "pybind11/pybind11.h"
23 #include "tensorflow/core/platform/env.h"
24 #include "tensorflow/core/profiler/convert/xplane_to_tools_data.h"
25 #include "tensorflow/core/profiler/rpc/profiler_server.h"
26 #include "tensorflow/python/lib/core/pybind11_status.h"
27 #include "tensorflow/python/profiler/internal/profiler_pywrap_impl.h"
28
29 namespace py = ::pybind11;
30
31 using ::tensorflow::profiler::pywrap::ProfilerSessionWrapper;
32
33 namespace {
34
35 // This must be called under GIL because it reads Python objects. Reading Python
36 // objects require GIL because the objects can be mutated by other Python
37 // threads. In addition, Python objects are reference counted; reading py::dict
38 // will increase its reference count.
ConvertDictToMap(const py::dict & dict)39 absl::flat_hash_map<std::string, absl::variant<int>> ConvertDictToMap(
40 const py::dict& dict) {
41 absl::flat_hash_map<std::string, absl::variant<int>> map;
42 for (const auto& kw : dict) {
43 if (!kw.second.is_none()) {
44 map.emplace(kw.first.cast<std::string>(), kw.second.cast<int>());
45 }
46 }
47 return map;
48 }
49
50 } // namespace
51
PYBIND11_MODULE(_pywrap_profiler,m)52 PYBIND11_MODULE(_pywrap_profiler, m) {
53 py::class_<ProfilerSessionWrapper> profiler_session_class(m,
54 "ProfilerSession");
55 profiler_session_class.def(py::init<>())
56 .def("start",
57 [](ProfilerSessionWrapper& wrapper, const char* logdir,
58 const py::dict& options) {
59 tensorflow::Status status;
60 absl::flat_hash_map<std::string, absl::variant<int>> opts =
61 ConvertDictToMap(options);
62 {
63 py::gil_scoped_release release;
64 status = wrapper.Start(logdir, opts);
65 }
66 // Py_INCREF and Py_DECREF must be called holding the GIL.
67 tensorflow::MaybeRaiseRegisteredFromStatus(status);
68 })
69 .def("stop",
70 [](ProfilerSessionWrapper& wrapper) {
71 tensorflow::string content;
72 tensorflow::Status status;
73 {
74 py::gil_scoped_release release;
75 status = wrapper.Stop(&content);
76 }
77 // Py_INCREF and Py_DECREF must be called holding the GIL.
78 tensorflow::MaybeRaiseRegisteredFromStatus(status);
79 // The content is not valid UTF-8. It must be converted to bytes.
80 return py::bytes(content);
81 })
82 .def("export_to_tb", [](ProfilerSessionWrapper& wrapper) {
83 tensorflow::Status status;
84 {
85 py::gil_scoped_release release;
86 status = wrapper.ExportToTensorBoard();
87 }
88 // Py_INCREF and Py_DECREF must be called holding the GIL.
89 tensorflow::MaybeRaiseRegisteredFromStatus(status);
90 });
91
92 m.def("start_server", [](int port) {
93 auto profiler_server =
94 absl::make_unique<tensorflow::profiler::ProfilerServer>();
95 profiler_server->StartProfilerServer(port);
96 // Intentionally release profiler server. Should transfer ownership to
97 // caller instead.
98 profiler_server.release();
99 });
100
101 m.def("trace",
102 [](const char* service_addr, const char* logdir,
103 const char* worker_list, bool include_dataset_ops, int duration_ms,
104 int num_tracing_attempts, py::dict options) {
105 tensorflow::Status status;
106 absl::flat_hash_map<std::string, absl::variant<int>> opts =
107 ConvertDictToMap(options);
108 {
109 py::gil_scoped_release release;
110 status = tensorflow::profiler::pywrap::Trace(
111 service_addr, logdir, worker_list, include_dataset_ops,
112 duration_ms, num_tracing_attempts, opts);
113 }
114 // Py_INCREF and Py_DECREF must be called holding the GIL.
115 tensorflow::MaybeRaiseRegisteredFromStatus(status);
116 });
117
118 m.def("monitor", [](const char* service_addr, int duration_ms,
119 int monitoring_level, bool display_timestamp) {
120 tensorflow::string content;
121 tensorflow::Status status;
122 {
123 py::gil_scoped_release release;
124 status = tensorflow::profiler::pywrap::Monitor(
125 service_addr, duration_ms, monitoring_level, display_timestamp,
126 &content);
127 }
128 // Py_INCREF and Py_DECREF must be called holding the GIL.
129 tensorflow::MaybeRaiseRegisteredFromStatus(status);
130 return content;
131 });
132
133 m.def("xspace_to_tools_data",
134 [](const py::list& xspace_path_list, const py::str& py_tool_name) {
135 std::vector<tensorflow::profiler::XSpace> xspaces;
136 xspaces.reserve(xspace_path_list.size());
137 std::vector<std::string> filenames;
138 filenames.reserve(xspace_path_list.size());
139 for (py::handle obj : xspace_path_list) {
140 std::string filename = std::string(py::cast<py::str>(obj));
141
142 tensorflow::profiler::XSpace xspace;
143 tensorflow::Status status;
144
145 status = tensorflow::ReadBinaryProto(tensorflow::Env::Default(),
146 filename, &xspace);
147
148 if (!status.ok()) {
149 return py::make_tuple(py::bytes(""), py::bool_(false));
150 }
151
152 xspaces.push_back(xspace);
153 filenames.push_back(filename);
154 }
155 std::string tool_name = std::string(py_tool_name);
156 auto tool_data_and_success =
157 tensorflow::profiler::ConvertMultiXSpacesToToolData(
158 xspaces, filenames, tool_name);
159 return py::make_tuple(py::bytes(tool_data_and_success.first),
160 py::bool_(tool_data_and_success.second));
161 });
162
163 m.def("xspace_to_tools_data_from_byte_string",
164 [](const py::list& xspace_string_list, const py::list& filenames_list,
165 const py::str& py_tool_name) {
166 std::vector<tensorflow::profiler::XSpace> xspaces;
167 xspaces.reserve(xspace_string_list.size());
168 std::vector<std::string> filenames;
169 filenames.reserve(filenames_list.size());
170
171 // XSpace string inputs
172 for (py::handle obj : xspace_string_list) {
173 std::string xspace_string = std::string(py::cast<py::bytes>(obj));
174
175 tensorflow::profiler::XSpace xspace;
176
177 if (!xspace.ParseFromString(xspace_string)) {
178 return py::make_tuple(py::bytes(""), py::bool_(false));
179 }
180
181 xspaces.push_back(xspace);
182 }
183
184 // Filenames
185 for (py::handle obj : filenames_list) {
186 filenames.push_back(std::string(py::cast<py::str>(obj)));
187 }
188
189 std::string tool_name = std::string(py_tool_name);
190 auto tool_data_and_success =
191 tensorflow::profiler::ConvertMultiXSpacesToToolData(
192 xspaces, filenames, tool_name);
193 return py::make_tuple(py::bytes(tool_data_and_success.first),
194 py::bool_(tool_data_and_success.second));
195 });
196 };
197