1 /*
2 * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "video/receive_statistics_proxy2.h"
12
13 #include <algorithm>
14 #include <cmath>
15 #include <utility>
16
17 #include "modules/video_coding/include/video_codec_interface.h"
18 #include "rtc_base/checks.h"
19 #include "rtc_base/logging.h"
20 #include "rtc_base/strings/string_builder.h"
21 #include "rtc_base/thread.h"
22 #include "rtc_base/time_utils.h"
23 #include "system_wrappers/include/clock.h"
24 #include "system_wrappers/include/metrics.h"
25 #include "video/video_receive_stream2.h"
26
27 namespace webrtc {
28 namespace internal {
29 namespace {
30 // Periodic time interval for processing samples for `freq_offset_counter_`.
31 const int64_t kFreqOffsetProcessIntervalMs = 40000;
32
33 // Configuration for bad call detection.
34 const int kBadCallMinRequiredSamples = 10;
35 const int kMinSampleLengthMs = 990;
36 const int kNumMeasurements = 10;
37 const int kNumMeasurementsVariance = kNumMeasurements * 1.5;
38 const float kBadFraction = 0.8f;
39 // For fps:
40 // Low means low enough to be bad, high means high enough to be good
41 const int kLowFpsThreshold = 12;
42 const int kHighFpsThreshold = 14;
43 // For qp and fps variance:
44 // Low means low enough to be good, high means high enough to be bad
45 const int kLowQpThresholdVp8 = 60;
46 const int kHighQpThresholdVp8 = 70;
47 const int kLowVarianceThreshold = 1;
48 const int kHighVarianceThreshold = 2;
49
50 // Some metrics are reported as a maximum over this period.
51 // This should be synchronized with a typical getStats polling interval in
52 // the clients.
53 const int kMovingMaxWindowMs = 1000;
54
55 // How large window we use to calculate the framerate/bitrate.
56 const int kRateStatisticsWindowSizeMs = 1000;
57
58 // Some sane ballpark estimate for maximum common value of inter-frame delay.
59 // Values below that will be stored explicitly in the array,
60 // values above - in the map.
61 const int kMaxCommonInterframeDelayMs = 500;
62
UmaPrefixForContentType(VideoContentType content_type)63 const char* UmaPrefixForContentType(VideoContentType content_type) {
64 if (videocontenttypehelpers::IsScreenshare(content_type))
65 return "WebRTC.Video.Screenshare";
66 return "WebRTC.Video";
67 }
68
UmaSuffixForContentType(VideoContentType content_type)69 std::string UmaSuffixForContentType(VideoContentType content_type) {
70 char ss_buf[1024];
71 rtc::SimpleStringBuilder ss(ss_buf);
72 int simulcast_id = videocontenttypehelpers::GetSimulcastId(content_type);
73 if (simulcast_id > 0) {
74 ss << ".S" << simulcast_id - 1;
75 }
76 int experiment_id = videocontenttypehelpers::GetExperimentId(content_type);
77 if (experiment_id > 0) {
78 ss << ".ExperimentGroup" << experiment_id - 1;
79 }
80 return ss.str();
81 }
82
83 // TODO(https://bugs.webrtc.org/11572): Workaround for an issue with some
84 // rtc::Thread instances and/or implementations that don't register as the
85 // current task queue.
IsCurrentTaskQueueOrThread(TaskQueueBase * task_queue)86 bool IsCurrentTaskQueueOrThread(TaskQueueBase* task_queue) {
87 if (task_queue->IsCurrent())
88 return true;
89
90 rtc::Thread* current_thread = rtc::ThreadManager::Instance()->CurrentThread();
91 if (!current_thread)
92 return false;
93
94 return static_cast<TaskQueueBase*>(current_thread) == task_queue;
95 }
96
97 } // namespace
98
ReceiveStatisticsProxy(uint32_t remote_ssrc,Clock * clock,TaskQueueBase * worker_thread)99 ReceiveStatisticsProxy::ReceiveStatisticsProxy(uint32_t remote_ssrc,
100 Clock* clock,
101 TaskQueueBase* worker_thread)
102 : clock_(clock),
103 start_ms_(clock->TimeInMilliseconds()),
104 last_sample_time_(clock->TimeInMilliseconds()),
105 fps_threshold_(kLowFpsThreshold,
106 kHighFpsThreshold,
107 kBadFraction,
108 kNumMeasurements),
109 qp_threshold_(kLowQpThresholdVp8,
110 kHighQpThresholdVp8,
111 kBadFraction,
112 kNumMeasurements),
113 variance_threshold_(kLowVarianceThreshold,
114 kHighVarianceThreshold,
115 kBadFraction,
116 kNumMeasurementsVariance),
117 num_bad_states_(0),
118 num_certain_states_(0),
119 remote_ssrc_(remote_ssrc),
120 // 1000ms window, scale 1000 for ms to s.
121 decode_fps_estimator_(1000, 1000),
122 renders_fps_estimator_(1000, 1000),
123 render_fps_tracker_(100, 10u),
124 render_pixel_tracker_(100, 10u),
125 video_quality_observer_(new VideoQualityObserver()),
126 interframe_delay_max_moving_(kMovingMaxWindowMs),
127 freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs),
128 last_content_type_(VideoContentType::UNSPECIFIED),
129 last_codec_type_(kVideoCodecVP8),
130 num_delayed_frames_rendered_(0),
131 sum_missed_render_deadline_ms_(0),
132 timing_frame_info_counter_(kMovingMaxWindowMs),
133 worker_thread_(worker_thread) {
134 RTC_DCHECK(worker_thread);
135 decode_queue_.Detach();
136 incoming_render_queue_.Detach();
137 stats_.ssrc = remote_ssrc_;
138 }
139
~ReceiveStatisticsProxy()140 ReceiveStatisticsProxy::~ReceiveStatisticsProxy() {
141 RTC_DCHECK_RUN_ON(&main_thread_);
142 }
143
UpdateHistograms(absl::optional<int> fraction_lost,const StreamDataCounters & rtp_stats,const StreamDataCounters * rtx_stats)144 void ReceiveStatisticsProxy::UpdateHistograms(
145 absl::optional<int> fraction_lost,
146 const StreamDataCounters& rtp_stats,
147 const StreamDataCounters* rtx_stats) {
148 RTC_DCHECK_RUN_ON(&main_thread_);
149
150 char log_stream_buf[8 * 1024];
151 rtc::SimpleStringBuilder log_stream(log_stream_buf);
152
153 int stream_duration_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000;
154
155 if (stats_.frame_counts.key_frames > 0 ||
156 stats_.frame_counts.delta_frames > 0) {
157 RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.ReceiveStreamLifetimeInSeconds",
158 stream_duration_sec);
159 log_stream << "WebRTC.Video.ReceiveStreamLifetimeInSeconds "
160 << stream_duration_sec << '\n';
161 }
162
163 log_stream << "Frames decoded " << stats_.frames_decoded << '\n';
164
165 if (num_unique_frames_) {
166 int num_dropped_frames = *num_unique_frames_ - stats_.frames_decoded;
167 RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DroppedFrames.Receiver",
168 num_dropped_frames);
169 log_stream << "WebRTC.Video.DroppedFrames.Receiver " << num_dropped_frames
170 << '\n';
171 }
172
173 if (fraction_lost && stream_duration_sec >= metrics::kMinRunTimeInSeconds) {
174 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.ReceivedPacketsLostInPercent",
175 *fraction_lost);
176 log_stream << "WebRTC.Video.ReceivedPacketsLostInPercent " << *fraction_lost
177 << '\n';
178 }
179
180 if (first_decoded_frame_time_ms_) {
181 const int64_t elapsed_ms =
182 (clock_->TimeInMilliseconds() - *first_decoded_frame_time_ms_);
183 if (elapsed_ms >=
184 metrics::kMinRunTimeInSeconds * rtc::kNumMillisecsPerSec) {
185 int decoded_fps = static_cast<int>(
186 (stats_.frames_decoded * 1000.0f / elapsed_ms) + 0.5f);
187 RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.DecodedFramesPerSecond",
188 decoded_fps);
189 log_stream << "WebRTC.Video.DecodedFramesPerSecond " << decoded_fps
190 << '\n';
191
192 const uint32_t frames_rendered = stats_.frames_rendered;
193 if (frames_rendered > 0) {
194 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.DelayedFramesToRenderer",
195 static_cast<int>(num_delayed_frames_rendered_ *
196 100 / frames_rendered));
197 if (num_delayed_frames_rendered_ > 0) {
198 RTC_HISTOGRAM_COUNTS_1000(
199 "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs",
200 static_cast<int>(sum_missed_render_deadline_ms_ /
201 num_delayed_frames_rendered_));
202 }
203 }
204 }
205 }
206
207 const int kMinRequiredSamples = 200;
208 int samples = static_cast<int>(render_fps_tracker_.TotalSampleCount());
209 if (samples >= kMinRequiredSamples) {
210 int rendered_fps = round(render_fps_tracker_.ComputeTotalRate());
211 RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond",
212 rendered_fps);
213 log_stream << "WebRTC.Video.RenderFramesPerSecond " << rendered_fps << '\n';
214 RTC_HISTOGRAM_COUNTS_100000(
215 "WebRTC.Video.RenderSqrtPixelsPerSecond",
216 round(render_pixel_tracker_.ComputeTotalRate()));
217 }
218
219 absl::optional<int> sync_offset_ms =
220 sync_offset_counter_.Avg(kMinRequiredSamples);
221 if (sync_offset_ms) {
222 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs",
223 *sync_offset_ms);
224 log_stream << "WebRTC.Video.AVSyncOffsetInMs " << *sync_offset_ms << '\n';
225 }
226 AggregatedStats freq_offset_stats = freq_offset_counter_.GetStats();
227 if (freq_offset_stats.num_samples > 0) {
228 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtpToNtpFreqOffsetInKhz",
229 freq_offset_stats.average);
230 log_stream << "WebRTC.Video.RtpToNtpFreqOffsetInKhz "
231 << freq_offset_stats.ToString() << '\n';
232 }
233
234 int num_total_frames =
235 stats_.frame_counts.key_frames + stats_.frame_counts.delta_frames;
236 if (num_total_frames >= kMinRequiredSamples) {
237 int num_key_frames = stats_.frame_counts.key_frames;
238 int key_frames_permille =
239 (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames;
240 RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesReceivedInPermille",
241 key_frames_permille);
242 log_stream << "WebRTC.Video.KeyFramesReceivedInPermille "
243 << key_frames_permille << '\n';
244 }
245
246 absl::optional<int> qp = qp_counters_.vp8.Avg(kMinRequiredSamples);
247 if (qp) {
248 RTC_HISTOGRAM_COUNTS_200("WebRTC.Video.Decoded.Vp8.Qp", *qp);
249 log_stream << "WebRTC.Video.Decoded.Vp8.Qp " << *qp << '\n';
250 }
251
252 absl::optional<int> decode_ms = decode_time_counter_.Avg(kMinRequiredSamples);
253 if (decode_ms) {
254 RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DecodeTimeInMs", *decode_ms);
255 log_stream << "WebRTC.Video.DecodeTimeInMs " << *decode_ms << '\n';
256 }
257 absl::optional<int> jb_delay_ms =
258 jitter_buffer_delay_counter_.Avg(kMinRequiredSamples);
259 if (jb_delay_ms) {
260 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs",
261 *jb_delay_ms);
262 log_stream << "WebRTC.Video.JitterBufferDelayInMs " << *jb_delay_ms << '\n';
263 }
264
265 absl::optional<int> target_delay_ms =
266 target_delay_counter_.Avg(kMinRequiredSamples);
267 if (target_delay_ms) {
268 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.TargetDelayInMs",
269 *target_delay_ms);
270 log_stream << "WebRTC.Video.TargetDelayInMs " << *target_delay_ms << '\n';
271 }
272 absl::optional<int> current_delay_ms =
273 current_delay_counter_.Avg(kMinRequiredSamples);
274 if (current_delay_ms) {
275 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs",
276 *current_delay_ms);
277 log_stream << "WebRTC.Video.CurrentDelayInMs " << *current_delay_ms << '\n';
278 }
279 absl::optional<int> delay_ms = delay_counter_.Avg(kMinRequiredSamples);
280 if (delay_ms)
281 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", *delay_ms);
282
283 // Aggregate content_specific_stats_ by removing experiment or simulcast
284 // information;
285 std::map<VideoContentType, ContentSpecificStats> aggregated_stats;
286 for (const auto& it : content_specific_stats_) {
287 // Calculate simulcast specific metrics (".S0" ... ".S2" suffixes).
288 VideoContentType content_type = it.first;
289 if (videocontenttypehelpers::GetSimulcastId(content_type) > 0) {
290 // Aggregate on experiment id.
291 videocontenttypehelpers::SetExperimentId(&content_type, 0);
292 aggregated_stats[content_type].Add(it.second);
293 }
294 // Calculate experiment specific metrics (".ExperimentGroup[0-7]" suffixes).
295 content_type = it.first;
296 if (videocontenttypehelpers::GetExperimentId(content_type) > 0) {
297 // Aggregate on simulcast id.
298 videocontenttypehelpers::SetSimulcastId(&content_type, 0);
299 aggregated_stats[content_type].Add(it.second);
300 }
301 // Calculate aggregated metrics (no suffixes. Aggregated on everything).
302 content_type = it.first;
303 videocontenttypehelpers::SetSimulcastId(&content_type, 0);
304 videocontenttypehelpers::SetExperimentId(&content_type, 0);
305 aggregated_stats[content_type].Add(it.second);
306 }
307
308 for (const auto& it : aggregated_stats) {
309 // For the metric Foo we report the following slices:
310 // WebRTC.Video.Foo,
311 // WebRTC.Video.Screenshare.Foo,
312 // WebRTC.Video.Foo.S[0-3],
313 // WebRTC.Video.Foo.ExperimentGroup[0-7],
314 // WebRTC.Video.Screenshare.Foo.S[0-3],
315 // WebRTC.Video.Screenshare.Foo.ExperimentGroup[0-7].
316 auto content_type = it.first;
317 auto stats = it.second;
318 std::string uma_prefix = UmaPrefixForContentType(content_type);
319 std::string uma_suffix = UmaSuffixForContentType(content_type);
320 // Metrics can be sliced on either simulcast id or experiment id but not
321 // both.
322 RTC_DCHECK(videocontenttypehelpers::GetExperimentId(content_type) == 0 ||
323 videocontenttypehelpers::GetSimulcastId(content_type) == 0);
324
325 absl::optional<int> e2e_delay_ms =
326 stats.e2e_delay_counter.Avg(kMinRequiredSamples);
327 if (e2e_delay_ms) {
328 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
329 uma_prefix + ".EndToEndDelayInMs" + uma_suffix, *e2e_delay_ms);
330 log_stream << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " "
331 << *e2e_delay_ms << '\n';
332 }
333 absl::optional<int> e2e_delay_max_ms = stats.e2e_delay_counter.Max();
334 if (e2e_delay_max_ms && e2e_delay_ms) {
335 RTC_HISTOGRAM_COUNTS_SPARSE_100000(
336 uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, *e2e_delay_max_ms);
337 log_stream << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " "
338 << *e2e_delay_max_ms << '\n';
339 }
340 absl::optional<int> interframe_delay_ms =
341 stats.interframe_delay_counter.Avg(kMinRequiredSamples);
342 if (interframe_delay_ms) {
343 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
344 uma_prefix + ".InterframeDelayInMs" + uma_suffix,
345 *interframe_delay_ms);
346 log_stream << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " "
347 << *interframe_delay_ms << '\n';
348 }
349 absl::optional<int> interframe_delay_max_ms =
350 stats.interframe_delay_counter.Max();
351 if (interframe_delay_max_ms && interframe_delay_ms) {
352 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
353 uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix,
354 *interframe_delay_max_ms);
355 log_stream << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix << " "
356 << *interframe_delay_max_ms << '\n';
357 }
358
359 absl::optional<uint32_t> interframe_delay_95p_ms =
360 stats.interframe_delay_percentiles.GetPercentile(0.95f);
361 if (interframe_delay_95p_ms && interframe_delay_ms != -1) {
362 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
363 uma_prefix + ".InterframeDelay95PercentileInMs" + uma_suffix,
364 *interframe_delay_95p_ms);
365 log_stream << uma_prefix << ".InterframeDelay95PercentileInMs"
366 << uma_suffix << " " << *interframe_delay_95p_ms << '\n';
367 }
368
369 absl::optional<int> width = stats.received_width.Avg(kMinRequiredSamples);
370 if (width) {
371 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
372 uma_prefix + ".ReceivedWidthInPixels" + uma_suffix, *width);
373 log_stream << uma_prefix << ".ReceivedWidthInPixels" << uma_suffix << " "
374 << *width << '\n';
375 }
376
377 absl::optional<int> height = stats.received_height.Avg(kMinRequiredSamples);
378 if (height) {
379 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
380 uma_prefix + ".ReceivedHeightInPixels" + uma_suffix, *height);
381 log_stream << uma_prefix << ".ReceivedHeightInPixels" << uma_suffix << " "
382 << *height << '\n';
383 }
384
385 if (content_type != VideoContentType::UNSPECIFIED) {
386 // Don't report these 3 metrics unsliced, as more precise variants
387 // are reported separately in this method.
388 float flow_duration_sec = stats.flow_duration_ms / 1000.0;
389 if (flow_duration_sec >= metrics::kMinRunTimeInSeconds) {
390 int media_bitrate_kbps = static_cast<int>(stats.total_media_bytes * 8 /
391 flow_duration_sec / 1000);
392 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
393 uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix,
394 media_bitrate_kbps);
395 log_stream << uma_prefix << ".MediaBitrateReceivedInKbps" << uma_suffix
396 << " " << media_bitrate_kbps << '\n';
397 }
398
399 int num_total_frames =
400 stats.frame_counts.key_frames + stats.frame_counts.delta_frames;
401 if (num_total_frames >= kMinRequiredSamples) {
402 int num_key_frames = stats.frame_counts.key_frames;
403 int key_frames_permille =
404 (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames;
405 RTC_HISTOGRAM_COUNTS_SPARSE_1000(
406 uma_prefix + ".KeyFramesReceivedInPermille" + uma_suffix,
407 key_frames_permille);
408 log_stream << uma_prefix << ".KeyFramesReceivedInPermille" << uma_suffix
409 << " " << key_frames_permille << '\n';
410 }
411
412 absl::optional<int> qp = stats.qp_counter.Avg(kMinRequiredSamples);
413 if (qp) {
414 RTC_HISTOGRAM_COUNTS_SPARSE_200(
415 uma_prefix + ".Decoded.Vp8.Qp" + uma_suffix, *qp);
416 log_stream << uma_prefix << ".Decoded.Vp8.Qp" << uma_suffix << " "
417 << *qp << '\n';
418 }
419 }
420 }
421
422 StreamDataCounters rtp_rtx_stats = rtp_stats;
423 if (rtx_stats)
424 rtp_rtx_stats.Add(*rtx_stats);
425
426 int64_t elapsed_sec =
427 rtp_rtx_stats.TimeSinceFirstPacketInMs(clock_->TimeInMilliseconds()) /
428 1000;
429 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) {
430 RTC_HISTOGRAM_COUNTS_10000(
431 "WebRTC.Video.BitrateReceivedInKbps",
432 static_cast<int>(rtp_rtx_stats.transmitted.TotalBytes() * 8 /
433 elapsed_sec / 1000));
434 int media_bitrate_kbs = static_cast<int>(rtp_stats.MediaPayloadBytes() * 8 /
435 elapsed_sec / 1000);
436 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.MediaBitrateReceivedInKbps",
437 media_bitrate_kbs);
438 log_stream << "WebRTC.Video.MediaBitrateReceivedInKbps "
439 << media_bitrate_kbs << '\n';
440 RTC_HISTOGRAM_COUNTS_10000(
441 "WebRTC.Video.PaddingBitrateReceivedInKbps",
442 static_cast<int>(rtp_rtx_stats.transmitted.padding_bytes * 8 /
443 elapsed_sec / 1000));
444 RTC_HISTOGRAM_COUNTS_10000(
445 "WebRTC.Video.RetransmittedBitrateReceivedInKbps",
446 static_cast<int>(rtp_rtx_stats.retransmitted.TotalBytes() * 8 /
447 elapsed_sec / 1000));
448 if (rtx_stats) {
449 RTC_HISTOGRAM_COUNTS_10000(
450 "WebRTC.Video.RtxBitrateReceivedInKbps",
451 static_cast<int>(rtx_stats->transmitted.TotalBytes() * 8 /
452 elapsed_sec / 1000));
453 }
454 const RtcpPacketTypeCounter& counters = stats_.rtcp_packet_type_counts;
455 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.NackPacketsSentPerMinute",
456 counters.nack_packets * 60 / elapsed_sec);
457 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FirPacketsSentPerMinute",
458 counters.fir_packets * 60 / elapsed_sec);
459 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.PliPacketsSentPerMinute",
460 counters.pli_packets * 60 / elapsed_sec);
461 if (counters.nack_requests > 0) {
462 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.UniqueNackRequestsSentInPercent",
463 counters.UniqueNackRequestsInPercent());
464 }
465 }
466
467 if (num_certain_states_ >= kBadCallMinRequiredSamples) {
468 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.Any",
469 100 * num_bad_states_ / num_certain_states_);
470 }
471 absl::optional<double> fps_fraction =
472 fps_threshold_.FractionHigh(kBadCallMinRequiredSamples);
473 if (fps_fraction) {
474 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.FrameRate",
475 static_cast<int>(100 * (1 - *fps_fraction)));
476 }
477 absl::optional<double> variance_fraction =
478 variance_threshold_.FractionHigh(kBadCallMinRequiredSamples);
479 if (variance_fraction) {
480 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.FrameRateVariance",
481 static_cast<int>(100 * *variance_fraction));
482 }
483 absl::optional<double> qp_fraction =
484 qp_threshold_.FractionHigh(kBadCallMinRequiredSamples);
485 if (qp_fraction) {
486 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.Qp",
487 static_cast<int>(100 * *qp_fraction));
488 }
489
490 RTC_LOG(LS_INFO) << log_stream.str();
491 video_quality_observer_->UpdateHistograms(
492 videocontenttypehelpers::IsScreenshare(last_content_type_));
493 }
494
QualitySample(Timestamp now)495 void ReceiveStatisticsProxy::QualitySample(Timestamp now) {
496 RTC_DCHECK_RUN_ON(&main_thread_);
497
498 if (last_sample_time_ + kMinSampleLengthMs > now.ms())
499 return;
500
501 double fps =
502 render_fps_tracker_.ComputeRateForInterval(now.ms() - last_sample_time_);
503 absl::optional<int> qp = qp_sample_.Avg(1);
504
505 bool prev_fps_bad = !fps_threshold_.IsHigh().value_or(true);
506 bool prev_qp_bad = qp_threshold_.IsHigh().value_or(false);
507 bool prev_variance_bad = variance_threshold_.IsHigh().value_or(false);
508 bool prev_any_bad = prev_fps_bad || prev_qp_bad || prev_variance_bad;
509
510 fps_threshold_.AddMeasurement(static_cast<int>(fps));
511 if (qp)
512 qp_threshold_.AddMeasurement(*qp);
513 absl::optional<double> fps_variance_opt = fps_threshold_.CalculateVariance();
514 double fps_variance = fps_variance_opt.value_or(0);
515 if (fps_variance_opt) {
516 variance_threshold_.AddMeasurement(static_cast<int>(fps_variance));
517 }
518
519 bool fps_bad = !fps_threshold_.IsHigh().value_or(true);
520 bool qp_bad = qp_threshold_.IsHigh().value_or(false);
521 bool variance_bad = variance_threshold_.IsHigh().value_or(false);
522 bool any_bad = fps_bad || qp_bad || variance_bad;
523
524 if (!prev_any_bad && any_bad) {
525 RTC_LOG(LS_INFO) << "Bad call (any) start: " << now.ms();
526 } else if (prev_any_bad && !any_bad) {
527 RTC_LOG(LS_INFO) << "Bad call (any) end: " << now.ms();
528 }
529
530 if (!prev_fps_bad && fps_bad) {
531 RTC_LOG(LS_INFO) << "Bad call (fps) start: " << now.ms();
532 } else if (prev_fps_bad && !fps_bad) {
533 RTC_LOG(LS_INFO) << "Bad call (fps) end: " << now.ms();
534 }
535
536 if (!prev_qp_bad && qp_bad) {
537 RTC_LOG(LS_INFO) << "Bad call (qp) start: " << now.ms();
538 } else if (prev_qp_bad && !qp_bad) {
539 RTC_LOG(LS_INFO) << "Bad call (qp) end: " << now.ms();
540 }
541
542 if (!prev_variance_bad && variance_bad) {
543 RTC_LOG(LS_INFO) << "Bad call (variance) start: " << now.ms();
544 } else if (prev_variance_bad && !variance_bad) {
545 RTC_LOG(LS_INFO) << "Bad call (variance) end: " << now.ms();
546 }
547
548 RTC_LOG(LS_VERBOSE) << "SAMPLE: sample_length: "
549 << (now.ms() - last_sample_time_) << " fps: " << fps
550 << " fps_bad: " << fps_bad << " qp: " << qp.value_or(-1)
551 << " qp_bad: " << qp_bad
552 << " variance_bad: " << variance_bad
553 << " fps_variance: " << fps_variance;
554
555 last_sample_time_ = now.ms();
556 qp_sample_.Reset();
557
558 if (fps_threshold_.IsHigh() || variance_threshold_.IsHigh() ||
559 qp_threshold_.IsHigh()) {
560 if (any_bad)
561 ++num_bad_states_;
562 ++num_certain_states_;
563 }
564 }
565
UpdateFramerate(int64_t now_ms) const566 void ReceiveStatisticsProxy::UpdateFramerate(int64_t now_ms) const {
567 RTC_DCHECK_RUN_ON(&main_thread_);
568
569 int64_t old_frames_ms = now_ms - kRateStatisticsWindowSizeMs;
570 while (!frame_window_.empty() &&
571 frame_window_.begin()->first < old_frames_ms) {
572 frame_window_.erase(frame_window_.begin());
573 }
574
575 size_t framerate =
576 (frame_window_.size() * 1000 + 500) / kRateStatisticsWindowSizeMs;
577
578 stats_.network_frame_rate = static_cast<int>(framerate);
579 }
580
581 absl::optional<int64_t>
GetCurrentEstimatedPlayoutNtpTimestampMs(int64_t now_ms) const582 ReceiveStatisticsProxy::GetCurrentEstimatedPlayoutNtpTimestampMs(
583 int64_t now_ms) const {
584 RTC_DCHECK_RUN_ON(&main_thread_);
585 if (!last_estimated_playout_ntp_timestamp_ms_ ||
586 !last_estimated_playout_time_ms_) {
587 return absl::nullopt;
588 }
589 int64_t elapsed_ms = now_ms - *last_estimated_playout_time_ms_;
590 return *last_estimated_playout_ntp_timestamp_ms_ + elapsed_ms;
591 }
592
GetStats() const593 VideoReceiveStreamInterface::Stats ReceiveStatisticsProxy::GetStats() const {
594 RTC_DCHECK_RUN_ON(&main_thread_);
595
596 // Like VideoReceiveStreamInterface::GetStats, called on the worker thread
597 // from StatsCollector::ExtractMediaInfo via worker_thread()->BlockingCall().
598 // WebRtcVideoChannel::GetStats(), GetVideoReceiverInfo.
599
600 // Get current frame rates here, as only updating them on new frames prevents
601 // us from ever correctly displaying frame rate of 0.
602 int64_t now_ms = clock_->TimeInMilliseconds();
603 UpdateFramerate(now_ms);
604
605 stats_.render_frame_rate = renders_fps_estimator_.Rate(now_ms).value_or(0);
606 stats_.decode_frame_rate = decode_fps_estimator_.Rate(now_ms).value_or(0);
607
608 if (last_decoded_frame_time_ms_) {
609 // Avoid using a newer timestamp than might be pending for decoded frames.
610 // If we do use now_ms, we might roll the max window to a value that is
611 // higher than that of a decoded frame timestamp that we haven't yet
612 // captured the data for (i.e. pending call to OnDecodedFrame).
613 stats_.interframe_delay_max_ms =
614 interframe_delay_max_moving_.Max(*last_decoded_frame_time_ms_)
615 .value_or(-1);
616 } else {
617 // We're paused. Avoid changing the state of `interframe_delay_max_moving_`.
618 stats_.interframe_delay_max_ms = -1;
619 }
620
621 stats_.freeze_count = video_quality_observer_->NumFreezes();
622 stats_.pause_count = video_quality_observer_->NumPauses();
623 stats_.total_freezes_duration_ms =
624 video_quality_observer_->TotalFreezesDurationMs();
625 stats_.total_pauses_duration_ms =
626 video_quality_observer_->TotalPausesDurationMs();
627 stats_.total_frames_duration_ms =
628 video_quality_observer_->TotalFramesDurationMs();
629 stats_.sum_squared_frame_durations =
630 video_quality_observer_->SumSquaredFrameDurationsSec();
631 stats_.content_type = last_content_type_;
632 stats_.timing_frame_info = timing_frame_info_counter_.Max(now_ms);
633 stats_.jitter_buffer_delay_seconds =
634 static_cast<double>(current_delay_counter_.Sum(1).value_or(0)) /
635 rtc::kNumMillisecsPerSec;
636 stats_.jitter_buffer_emitted_count = current_delay_counter_.NumSamples();
637 stats_.estimated_playout_ntp_timestamp_ms =
638 GetCurrentEstimatedPlayoutNtpTimestampMs(now_ms);
639 return stats_;
640 }
641
OnIncomingPayloadType(int payload_type)642 void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) {
643 RTC_DCHECK_RUN_ON(&decode_queue_);
644 worker_thread_->PostTask(SafeTask(task_safety_.flag(), [payload_type, this] {
645 RTC_DCHECK_RUN_ON(&main_thread_);
646 stats_.current_payload_type = payload_type;
647 }));
648 }
649
OnDecoderInfo(const VideoDecoder::DecoderInfo & decoder_info)650 void ReceiveStatisticsProxy::OnDecoderInfo(
651 const VideoDecoder::DecoderInfo& decoder_info) {
652 RTC_DCHECK_RUN_ON(&decode_queue_);
653 worker_thread_->PostTask(SafeTask(
654 task_safety_.flag(),
655 [this, name = decoder_info.implementation_name,
656 is_hardware_accelerated = decoder_info.is_hardware_accelerated]() {
657 RTC_DCHECK_RUN_ON(&main_thread_);
658 stats_.decoder_implementation_name = name;
659 stats_.power_efficient_decoder = is_hardware_accelerated;
660 }));
661 }
662
OnFrameBufferTimingsUpdated(int max_decode_ms,int current_delay_ms,int target_delay_ms,int jitter_buffer_ms,int min_playout_delay_ms,int render_delay_ms)663 void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated(
664 int max_decode_ms,
665 int current_delay_ms,
666 int target_delay_ms,
667 int jitter_buffer_ms,
668 int min_playout_delay_ms,
669 int render_delay_ms) {
670 RTC_DCHECK_RUN_ON(&main_thread_);
671 stats_.max_decode_ms = max_decode_ms;
672 stats_.current_delay_ms = current_delay_ms;
673 stats_.target_delay_ms = target_delay_ms;
674 stats_.jitter_buffer_ms = jitter_buffer_ms;
675 stats_.min_playout_delay_ms = min_playout_delay_ms;
676 stats_.render_delay_ms = render_delay_ms;
677 jitter_buffer_delay_counter_.Add(jitter_buffer_ms);
678 target_delay_counter_.Add(target_delay_ms);
679 current_delay_counter_.Add(current_delay_ms);
680 // Network delay (rtt/2) + target_delay_ms (jitter delay + decode time +
681 // render delay).
682 delay_counter_.Add(target_delay_ms + avg_rtt_ms_ / 2);
683 }
684
OnUniqueFramesCounted(int num_unique_frames)685 void ReceiveStatisticsProxy::OnUniqueFramesCounted(int num_unique_frames) {
686 RTC_DCHECK_RUN_ON(&main_thread_);
687 num_unique_frames_.emplace(num_unique_frames);
688 }
689
OnTimingFrameInfoUpdated(const TimingFrameInfo & info)690 void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated(
691 const TimingFrameInfo& info) {
692 RTC_DCHECK_RUN_ON(&main_thread_);
693 if (info.flags != VideoSendTiming::kInvalid) {
694 int64_t now_ms = clock_->TimeInMilliseconds();
695 timing_frame_info_counter_.Add(info, now_ms);
696 }
697
698 // Measure initial decoding latency between the first frame arriving and
699 // the first frame being decoded.
700 if (!first_frame_received_time_ms_.has_value()) {
701 first_frame_received_time_ms_ = info.receive_finish_ms;
702 }
703 if (stats_.first_frame_received_to_decoded_ms == -1 &&
704 first_decoded_frame_time_ms_) {
705 stats_.first_frame_received_to_decoded_ms =
706 *first_decoded_frame_time_ms_ - *first_frame_received_time_ms_;
707 }
708 }
709
RtcpPacketTypesCounterUpdated(uint32_t ssrc,const RtcpPacketTypeCounter & packet_counter)710 void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated(
711 uint32_t ssrc,
712 const RtcpPacketTypeCounter& packet_counter) {
713 if (ssrc != remote_ssrc_)
714 return;
715
716 if (!IsCurrentTaskQueueOrThread(worker_thread_)) {
717 // RtpRtcpInterface::Configuration has a single
718 // RtcpPacketTypeCounterObserver and that same configuration may be used for
719 // both receiver and sender (see ModuleRtpRtcpImpl::ModuleRtpRtcpImpl). The
720 // RTCPSender implementation currently makes calls to this function on a
721 // process thread whereas the RTCPReceiver implementation calls back on the
722 // [main] worker thread.
723 // So until the sender implementation has been updated, we work around this
724 // here by posting the update to the expected thread. We make a by value
725 // copy of the `task_safety_` to handle the case if the queued task
726 // runs after the `ReceiveStatisticsProxy` has been deleted. In such a
727 // case the packet_counter update won't be recorded.
728 worker_thread_->PostTask(
729 SafeTask(task_safety_.flag(), [ssrc, packet_counter, this]() {
730 RtcpPacketTypesCounterUpdated(ssrc, packet_counter);
731 }));
732 return;
733 }
734
735 RTC_DCHECK_RUN_ON(&main_thread_);
736 stats_.rtcp_packet_type_counts = packet_counter;
737 }
738
OnCname(uint32_t ssrc,absl::string_view cname)739 void ReceiveStatisticsProxy::OnCname(uint32_t ssrc, absl::string_view cname) {
740 RTC_DCHECK_RUN_ON(&main_thread_);
741 // TODO(pbos): Handle both local and remote ssrcs here and RTC_DCHECK that we
742 // receive stats from one of them.
743 if (remote_ssrc_ != ssrc)
744 return;
745
746 stats_.c_name = std::string(cname);
747 }
748
OnDecodedFrame(const VideoFrame & frame,absl::optional<uint8_t> qp,TimeDelta decode_time,VideoContentType content_type)749 void ReceiveStatisticsProxy::OnDecodedFrame(const VideoFrame& frame,
750 absl::optional<uint8_t> qp,
751 TimeDelta decode_time,
752 VideoContentType content_type) {
753 TimeDelta processing_delay = TimeDelta::Zero();
754 webrtc::Timestamp current_time = clock_->CurrentTime();
755 // TODO(bugs.webrtc.org/13984): some tests do not fill packet_infos().
756 TimeDelta assembly_time = TimeDelta::Zero();
757 if (frame.packet_infos().size() > 0) {
758 const auto [first_packet, last_packet] = std::minmax_element(
759 frame.packet_infos().cbegin(), frame.packet_infos().cend(),
760 [](const webrtc::RtpPacketInfo& a, const webrtc::RtpPacketInfo& b) {
761 return a.receive_time() < b.receive_time();
762 });
763 if (first_packet->receive_time().IsFinite()) {
764 processing_delay = current_time - first_packet->receive_time();
765 // Extract frame assembly time (i.e. time between earliest and latest
766 // packet arrival). Note: for single-packet frames this will be 0.
767 assembly_time =
768 last_packet->receive_time() - first_packet->receive_time();
769 }
770 }
771 // See VCMDecodedFrameCallback::Decoded for more info on what thread/queue we
772 // may be on. E.g. on iOS this gets called on
773 // "com.apple.coremedia.decompressionsession.clientcallback"
774 VideoFrameMetaData meta(frame, current_time);
775 worker_thread_->PostTask(
776 SafeTask(task_safety_.flag(), [meta, qp, decode_time, processing_delay,
777 assembly_time, content_type, this]() {
778 OnDecodedFrame(meta, qp, decode_time, processing_delay, assembly_time,
779 content_type);
780 }));
781 }
782
OnDecodedFrame(const VideoFrameMetaData & frame_meta,absl::optional<uint8_t> qp,TimeDelta decode_time,TimeDelta processing_delay,TimeDelta assembly_time,VideoContentType content_type)783 void ReceiveStatisticsProxy::OnDecodedFrame(
784 const VideoFrameMetaData& frame_meta,
785 absl::optional<uint8_t> qp,
786 TimeDelta decode_time,
787 TimeDelta processing_delay,
788 TimeDelta assembly_time,
789 VideoContentType content_type) {
790 RTC_DCHECK_RUN_ON(&main_thread_);
791
792 const bool is_screenshare =
793 videocontenttypehelpers::IsScreenshare(content_type);
794 const bool was_screenshare =
795 videocontenttypehelpers::IsScreenshare(last_content_type_);
796
797 if (is_screenshare != was_screenshare) {
798 // Reset the quality observer if content type is switched. But first report
799 // stats for the previous part of the call.
800 video_quality_observer_->UpdateHistograms(was_screenshare);
801 video_quality_observer_.reset(new VideoQualityObserver());
802 }
803
804 video_quality_observer_->OnDecodedFrame(frame_meta.rtp_timestamp, qp,
805 last_codec_type_);
806
807 ContentSpecificStats* content_specific_stats =
808 &content_specific_stats_[content_type];
809
810 ++stats_.frames_decoded;
811 if (qp) {
812 if (!stats_.qp_sum) {
813 if (stats_.frames_decoded != 1) {
814 RTC_LOG(LS_WARNING)
815 << "Frames decoded was not 1 when first qp value was received.";
816 }
817 stats_.qp_sum = 0;
818 }
819 *stats_.qp_sum += *qp;
820 content_specific_stats->qp_counter.Add(*qp);
821 } else if (stats_.qp_sum) {
822 RTC_LOG(LS_WARNING)
823 << "QP sum was already set and no QP was given for a frame.";
824 stats_.qp_sum.reset();
825 }
826 decode_time_counter_.Add(decode_time.ms());
827 stats_.decode_ms = decode_time.ms();
828 stats_.total_decode_time += decode_time;
829 stats_.total_processing_delay += processing_delay;
830 stats_.total_assembly_time += assembly_time;
831 if (!assembly_time.IsZero()) {
832 ++stats_.frames_assembled_from_multiple_packets;
833 }
834
835 last_content_type_ = content_type;
836 decode_fps_estimator_.Update(1, frame_meta.decode_timestamp.ms());
837
838 if (last_decoded_frame_time_ms_) {
839 int64_t interframe_delay_ms =
840 frame_meta.decode_timestamp.ms() - *last_decoded_frame_time_ms_;
841 RTC_DCHECK_GE(interframe_delay_ms, 0);
842 double interframe_delay = interframe_delay_ms / 1000.0;
843 stats_.total_inter_frame_delay += interframe_delay;
844 stats_.total_squared_inter_frame_delay +=
845 interframe_delay * interframe_delay;
846 interframe_delay_max_moving_.Add(interframe_delay_ms,
847 frame_meta.decode_timestamp.ms());
848 content_specific_stats->interframe_delay_counter.Add(interframe_delay_ms);
849 content_specific_stats->interframe_delay_percentiles.Add(
850 interframe_delay_ms);
851 content_specific_stats->flow_duration_ms += interframe_delay_ms;
852 }
853 if (stats_.frames_decoded == 1) {
854 first_decoded_frame_time_ms_.emplace(frame_meta.decode_timestamp.ms());
855 }
856 last_decoded_frame_time_ms_.emplace(frame_meta.decode_timestamp.ms());
857 }
858
OnRenderedFrame(const VideoFrameMetaData & frame_meta)859 void ReceiveStatisticsProxy::OnRenderedFrame(
860 const VideoFrameMetaData& frame_meta) {
861 RTC_DCHECK_RUN_ON(&main_thread_);
862 // Called from VideoReceiveStream2::OnFrame.
863
864 RTC_DCHECK_GT(frame_meta.width, 0);
865 RTC_DCHECK_GT(frame_meta.height, 0);
866
867 video_quality_observer_->OnRenderedFrame(frame_meta);
868
869 ContentSpecificStats* content_specific_stats =
870 &content_specific_stats_[last_content_type_];
871 renders_fps_estimator_.Update(1, frame_meta.decode_timestamp.ms());
872
873 ++stats_.frames_rendered;
874 stats_.width = frame_meta.width;
875 stats_.height = frame_meta.height;
876
877 render_fps_tracker_.AddSamples(1);
878 render_pixel_tracker_.AddSamples(sqrt(frame_meta.width * frame_meta.height));
879 content_specific_stats->received_width.Add(frame_meta.width);
880 content_specific_stats->received_height.Add(frame_meta.height);
881
882 // Consider taking stats_.render_delay_ms into account.
883 const int64_t time_until_rendering_ms =
884 frame_meta.render_time_ms() - frame_meta.decode_timestamp.ms();
885 if (time_until_rendering_ms < 0) {
886 sum_missed_render_deadline_ms_ += -time_until_rendering_ms;
887 ++num_delayed_frames_rendered_;
888 }
889
890 if (frame_meta.ntp_time_ms > 0) {
891 int64_t delay_ms =
892 clock_->CurrentNtpInMilliseconds() - frame_meta.ntp_time_ms;
893 if (delay_ms >= 0) {
894 content_specific_stats->e2e_delay_counter.Add(delay_ms);
895 }
896 }
897
898 QualitySample(frame_meta.decode_timestamp);
899 }
900
OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,int64_t sync_offset_ms,double estimated_freq_khz)901 void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
902 int64_t sync_offset_ms,
903 double estimated_freq_khz) {
904 RTC_DCHECK_RUN_ON(&main_thread_);
905
906 const int64_t now_ms = clock_->TimeInMilliseconds();
907 sync_offset_counter_.Add(std::abs(sync_offset_ms));
908 stats_.sync_offset_ms = sync_offset_ms;
909 last_estimated_playout_ntp_timestamp_ms_ = video_playout_ntp_ms;
910 last_estimated_playout_time_ms_ = now_ms;
911
912 const double kMaxFreqKhz = 10000.0;
913 int offset_khz = kMaxFreqKhz;
914 // Should not be zero or negative. If so, report max.
915 if (estimated_freq_khz < kMaxFreqKhz && estimated_freq_khz > 0.0)
916 offset_khz = static_cast<int>(std::fabs(estimated_freq_khz - 90.0) + 0.5);
917
918 freq_offset_counter_.Add(offset_khz);
919 }
920
OnCompleteFrame(bool is_keyframe,size_t size_bytes,VideoContentType content_type)921 void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe,
922 size_t size_bytes,
923 VideoContentType content_type) {
924 RTC_DCHECK_RUN_ON(&main_thread_);
925
926 if (is_keyframe) {
927 ++stats_.frame_counts.key_frames;
928 } else {
929 ++stats_.frame_counts.delta_frames;
930 }
931
932 // Content type extension is set only for keyframes and should be propagated
933 // for all the following delta frames. Here we may receive frames out of order
934 // and miscategorise some delta frames near the layer switch.
935 // This may slightly offset calculated bitrate and keyframes permille metrics.
936 VideoContentType propagated_content_type =
937 is_keyframe ? content_type : last_content_type_;
938
939 ContentSpecificStats* content_specific_stats =
940 &content_specific_stats_[propagated_content_type];
941
942 content_specific_stats->total_media_bytes += size_bytes;
943 if (is_keyframe) {
944 ++content_specific_stats->frame_counts.key_frames;
945 } else {
946 ++content_specific_stats->frame_counts.delta_frames;
947 }
948
949 int64_t now_ms = clock_->TimeInMilliseconds();
950 frame_window_.insert(std::make_pair(now_ms, size_bytes));
951 UpdateFramerate(now_ms);
952 }
953
OnDroppedFrames(uint32_t frames_dropped)954 void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) {
955 // Can be called on either the decode queue or the worker thread
956 // See FrameBuffer2 for more details.
957 worker_thread_->PostTask(
958 SafeTask(task_safety_.flag(), [frames_dropped, this]() {
959 RTC_DCHECK_RUN_ON(&main_thread_);
960 stats_.frames_dropped += frames_dropped;
961 }));
962 }
963
OnPreDecode(VideoCodecType codec_type,int qp)964 void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) {
965 RTC_DCHECK_RUN_ON(&main_thread_);
966 last_codec_type_ = codec_type;
967 if (last_codec_type_ == kVideoCodecVP8 && qp != -1) {
968 qp_counters_.vp8.Add(qp);
969 qp_sample_.Add(qp);
970 }
971 }
972
OnStreamInactive()973 void ReceiveStatisticsProxy::OnStreamInactive() {
974 RTC_DCHECK_RUN_ON(&main_thread_);
975
976 // TODO(sprang): Figure out any other state that should be reset.
977
978 // Don't report inter-frame delay if stream was paused.
979 last_decoded_frame_time_ms_.reset();
980
981 video_quality_observer_->OnStreamInactive();
982 }
983
OnRttUpdate(int64_t avg_rtt_ms)984 void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms) {
985 RTC_DCHECK_RUN_ON(&main_thread_);
986 avg_rtt_ms_ = avg_rtt_ms;
987 }
988
DecoderThreadStarting()989 void ReceiveStatisticsProxy::DecoderThreadStarting() {
990 RTC_DCHECK_RUN_ON(&main_thread_);
991 }
992
DecoderThreadStopped()993 void ReceiveStatisticsProxy::DecoderThreadStopped() {
994 RTC_DCHECK_RUN_ON(&main_thread_);
995 decode_queue_.Detach();
996 }
997
ContentSpecificStats()998 ReceiveStatisticsProxy::ContentSpecificStats::ContentSpecificStats()
999 : interframe_delay_percentiles(kMaxCommonInterframeDelayMs) {}
1000
1001 ReceiveStatisticsProxy::ContentSpecificStats::~ContentSpecificStats() = default;
1002
Add(const ContentSpecificStats & other)1003 void ReceiveStatisticsProxy::ContentSpecificStats::Add(
1004 const ContentSpecificStats& other) {
1005 e2e_delay_counter.Add(other.e2e_delay_counter);
1006 interframe_delay_counter.Add(other.interframe_delay_counter);
1007 flow_duration_ms += other.flow_duration_ms;
1008 total_media_bytes += other.total_media_bytes;
1009 received_height.Add(other.received_height);
1010 received_width.Add(other.received_width);
1011 qp_counter.Add(other.qp_counter);
1012 frame_counts.key_frames += other.frame_counts.key_frames;
1013 frame_counts.delta_frames += other.frame_counts.delta_frames;
1014 interframe_delay_percentiles.Add(other.interframe_delay_percentiles);
1015 }
1016
1017 } // namespace internal
1018 } // namespace webrtc
1019