• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #include "tensorflow/lite/tools/benchmark/profiling_listener.h"
17 
18 #include <fstream>
19 
20 #include "tensorflow/lite/tools/logging.h"
21 
22 namespace tflite {
23 namespace benchmark {
24 
ProfilingListener(Interpreter * interpreter,uint32_t max_num_entries,const std::string & csv_file_path,std::shared_ptr<profiling::ProfileSummaryFormatter> summarizer_formatter)25 ProfilingListener::ProfilingListener(
26     Interpreter* interpreter, uint32_t max_num_entries,
27     const std::string& csv_file_path,
28     std::shared_ptr<profiling::ProfileSummaryFormatter> summarizer_formatter)
29     : run_summarizer_(summarizer_formatter),
30       init_summarizer_(summarizer_formatter),
31       csv_file_path_(csv_file_path),
32       interpreter_(interpreter),
33       profiler_(max_num_entries) {
34   TFLITE_TOOLS_CHECK(interpreter);
35   interpreter_->SetProfiler(&profiler_);
36 
37   // We start profiling here in order to catch events that are recorded during
38   // the benchmark run preparation stage where TFLite interpreter is
39   // initialized and model graph is prepared.
40   profiler_.Reset();
41   profiler_.StartProfiling();
42 }
43 
OnBenchmarkStart(const BenchmarkParams & params)44 void ProfilingListener::OnBenchmarkStart(const BenchmarkParams& params) {
45   // At this point, we have completed the preparation for benchmark runs
46   // including TFLite interpreter initialization etc. So we are going to process
47   // profiling events recorded during this stage.
48   profiler_.StopProfiling();
49   auto profile_events = profiler_.GetProfileEvents();
50   init_summarizer_.ProcessProfiles(profile_events, *interpreter_);
51   profiler_.Reset();
52 }
53 
OnSingleRunStart(RunType run_type)54 void ProfilingListener::OnSingleRunStart(RunType run_type) {
55   if (run_type == REGULAR) {
56     profiler_.Reset();
57     profiler_.StartProfiling();
58   }
59 }
60 
OnSingleRunEnd()61 void ProfilingListener::OnSingleRunEnd() {
62   profiler_.StopProfiling();
63   auto profile_events = profiler_.GetProfileEvents();
64   run_summarizer_.ProcessProfiles(profile_events, *interpreter_);
65 }
66 
OnBenchmarkEnd(const BenchmarkResults & results)67 void ProfilingListener::OnBenchmarkEnd(const BenchmarkResults& results) {
68   std::ofstream output_file(csv_file_path_);
69   std::ostream* output_stream = nullptr;
70   if (output_file.good()) {
71     output_stream = &output_file;
72   }
73   if (init_summarizer_.HasProfiles()) {
74     WriteOutput("Profiling Info for Benchmark Initialization:",
75                 init_summarizer_.GetOutputString(),
76                 output_stream == nullptr ? &TFLITE_LOG(INFO) : output_stream);
77   }
78   if (run_summarizer_.HasProfiles()) {
79     WriteOutput("Operator-wise Profiling Info for Regular Benchmark Runs:",
80                 run_summarizer_.GetOutputString(),
81                 output_stream == nullptr ? &TFLITE_LOG(INFO) : output_stream);
82   }
83 }
84 
WriteOutput(const std::string & header,const string & data,std::ostream * stream)85 void ProfilingListener::WriteOutput(const std::string& header,
86                                     const string& data, std::ostream* stream) {
87   (*stream) << header << std::endl;
88   (*stream) << data << std::endl;
89 }
90 
91 }  // namespace benchmark
92 }  // namespace tflite
93