• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright 2019 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "test/scenario/stats_collection.h"
12 
13 #include "common_video/libyuv/include/webrtc_libyuv.h"
14 #include "rtc_base/memory_usage.h"
15 #include "rtc_base/thread.h"
16 
17 namespace webrtc {
18 namespace test {
19 
VideoQualityAnalyzer(VideoQualityAnalyzerConfig config,std::unique_ptr<RtcEventLogOutput> writer)20 VideoQualityAnalyzer::VideoQualityAnalyzer(
21     VideoQualityAnalyzerConfig config,
22     std::unique_ptr<RtcEventLogOutput> writer)
23     : config_(config), writer_(std::move(writer)) {
24   if (writer_) {
25     PrintHeaders();
26   }
27 }
28 
29 VideoQualityAnalyzer::~VideoQualityAnalyzer() = default;
30 
PrintHeaders()31 void VideoQualityAnalyzer::PrintHeaders() {
32   writer_->Write(
33       "capture_time render_time capture_width capture_height render_width "
34       "render_height psnr\n");
35 }
36 
Handler()37 std::function<void(const VideoFramePair&)> VideoQualityAnalyzer::Handler() {
38   return [this](VideoFramePair pair) { HandleFramePair(pair); };
39 }
40 
HandleFramePair(VideoFramePair sample,double psnr)41 void VideoQualityAnalyzer::HandleFramePair(VideoFramePair sample, double psnr) {
42   layer_analyzers_[sample.layer_id].HandleFramePair(sample, psnr,
43                                                     writer_.get());
44   cached_.reset();
45 }
46 
HandleFramePair(VideoFramePair sample)47 void VideoQualityAnalyzer::HandleFramePair(VideoFramePair sample) {
48   double psnr = NAN;
49   if (sample.decoded)
50     psnr = I420PSNR(*sample.captured->ToI420(), *sample.decoded->ToI420());
51 
52   if (config_.thread) {
53     config_.thread->PostTask(RTC_FROM_HERE, [this, sample, psnr] {
54       HandleFramePair(std::move(sample), psnr);
55     });
56   } else {
57     HandleFramePair(std::move(sample), psnr);
58   }
59 }
60 
layer_stats() const61 std::vector<VideoQualityStats> VideoQualityAnalyzer::layer_stats() const {
62   std::vector<VideoQualityStats> res;
63   for (auto& layer : layer_analyzers_)
64     res.push_back(layer.second.stats_);
65   return res;
66 }
67 
stats()68 VideoQualityStats& VideoQualityAnalyzer::stats() {
69   if (!cached_) {
70     cached_ = VideoQualityStats();
71     for (auto& layer : layer_analyzers_)
72       cached_->AddStats(layer.second.stats_);
73   }
74   return *cached_;
75 }
76 
HandleFramePair(VideoFramePair sample,double psnr,RtcEventLogOutput * writer)77 void VideoLayerAnalyzer::HandleFramePair(VideoFramePair sample,
78                                          double psnr,
79                                          RtcEventLogOutput* writer) {
80   RTC_CHECK(sample.captured);
81   HandleCapturedFrame(sample);
82   if (!sample.decoded) {
83     // Can only happen in the beginning of a call or if the resolution is
84     // reduced. Otherwise we will detect a freeze.
85     ++stats_.lost_count;
86     ++skip_count_;
87   } else {
88     stats_.psnr_with_freeze.AddSample(psnr);
89     if (sample.repeated) {
90       ++stats_.freeze_count;
91       ++skip_count_;
92     } else {
93       stats_.psnr.AddSample(psnr);
94       HandleRenderedFrame(sample);
95     }
96   }
97   if (writer) {
98     LogWriteFormat(writer, "%.3f %.3f %.3f %i %i %i %i %.3f\n",
99                    sample.capture_time.seconds<double>(),
100                    sample.render_time.seconds<double>(),
101                    sample.captured->width(), sample.captured->height(),
102                    sample.decoded ? sample.decoded->width() : 0,
103                    sample.decoded ? sample.decoded->height() : 0, psnr);
104   }
105 }
106 
HandleCapturedFrame(const VideoFramePair & sample)107 void VideoLayerAnalyzer::HandleCapturedFrame(const VideoFramePair& sample) {
108   stats_.capture.AddFrameInfo(*sample.captured, sample.capture_time);
109   if (last_freeze_time_.IsInfinite())
110     last_freeze_time_ = sample.capture_time;
111 }
112 
HandleRenderedFrame(const VideoFramePair & sample)113 void VideoLayerAnalyzer::HandleRenderedFrame(const VideoFramePair& sample) {
114   stats_.capture_to_decoded_delay.AddSample(sample.decoded_time -
115                                             sample.capture_time);
116   stats_.end_to_end_delay.AddSample(sample.render_time - sample.capture_time);
117   stats_.render.AddFrameInfo(*sample.decoded, sample.render_time);
118   stats_.skipped_between_rendered.AddSample(skip_count_);
119   skip_count_ = 0;
120 
121   if (last_render_time_.IsFinite()) {
122     RTC_DCHECK(sample.render_time.IsFinite());
123     TimeDelta render_interval = sample.render_time - last_render_time_;
124     TimeDelta mean_interval = stats_.render.frames.interval().Mean();
125     if (render_interval > TimeDelta::Millis(150) + mean_interval ||
126         render_interval > 3 * mean_interval) {
127       stats_.freeze_duration.AddSample(render_interval);
128       stats_.time_between_freezes.AddSample(last_render_time_ -
129                                             last_freeze_time_);
130       last_freeze_time_ = sample.render_time;
131     }
132   }
133   last_render_time_ = sample.render_time;
134 }
135 
AddStats(Call::Stats sample)136 void CallStatsCollector::AddStats(Call::Stats sample) {
137   if (sample.send_bandwidth_bps > 0)
138     stats_.target_rate.AddSampleBps(sample.send_bandwidth_bps);
139   if (sample.pacer_delay_ms > 0)
140     stats_.pacer_delay.AddSample(TimeDelta::Millis(sample.pacer_delay_ms));
141   if (sample.rtt_ms > 0)
142     stats_.round_trip_time.AddSample(TimeDelta::Millis(sample.rtt_ms));
143   stats_.memory_usage.AddSample(rtc::GetProcessResidentSizeBytes());
144 }
145 
AddStats(AudioReceiveStream::Stats sample)146 void AudioReceiveStatsCollector::AddStats(AudioReceiveStream::Stats sample) {
147   stats_.expand_rate.AddSample(sample.expand_rate);
148   stats_.accelerate_rate.AddSample(sample.accelerate_rate);
149   stats_.jitter_buffer.AddSampleMs(sample.jitter_buffer_ms);
150 }
151 
AddStats(VideoSendStream::Stats sample,Timestamp at_time)152 void VideoSendStatsCollector::AddStats(VideoSendStream::Stats sample,
153                                        Timestamp at_time) {
154   // It's not certain that we yet have estimates for any of these stats.
155   // Check that they are positive before mixing them in.
156   if (sample.encode_frame_rate <= 0)
157     return;
158 
159   stats_.encode_frame_rate.AddSample(sample.encode_frame_rate);
160   stats_.encode_time.AddSampleMs(sample.avg_encode_time_ms);
161   stats_.encode_usage.AddSample(sample.encode_usage_percent / 100.0);
162   stats_.media_bitrate.AddSampleBps(sample.media_bitrate_bps);
163 
164   size_t fec_bytes = 0;
165   for (const auto& kv : sample.substreams) {
166     fec_bytes += kv.second.rtp_stats.fec.payload_bytes +
167                  kv.second.rtp_stats.fec.padding_bytes;
168   }
169   if (last_update_.IsFinite()) {
170     auto fec_delta = DataSize::Bytes(fec_bytes - last_fec_bytes_);
171     auto time_delta = at_time - last_update_;
172     stats_.fec_bitrate.AddSample(fec_delta / time_delta);
173   }
174   last_fec_bytes_ = fec_bytes;
175   last_update_ = at_time;
176 }
177 
AddStats(VideoReceiveStream::Stats sample)178 void VideoReceiveStatsCollector::AddStats(VideoReceiveStream::Stats sample) {
179   if (sample.decode_ms > 0)
180     stats_.decode_time.AddSampleMs(sample.decode_ms);
181   if (sample.max_decode_ms > 0)
182     stats_.decode_time_max.AddSampleMs(sample.max_decode_ms);
183   if (sample.width > 0 && sample.height > 0) {
184     stats_.decode_pixels.AddSample(sample.width * sample.height);
185     stats_.resolution.AddSample(sample.height);
186   }
187 }
188 }  // namespace test
189 }  // namespace webrtc
190