1 /*
2 * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "video/receive_statistics_proxy2.h"
12
13 #include <algorithm>
14 #include <cmath>
15 #include <utility>
16
17 #include "modules/video_coding/include/video_codec_interface.h"
18 #include "rtc_base/checks.h"
19 #include "rtc_base/logging.h"
20 #include "rtc_base/strings/string_builder.h"
21 #include "rtc_base/task_utils/to_queued_task.h"
22 #include "rtc_base/thread.h"
23 #include "rtc_base/time_utils.h"
24 #include "system_wrappers/include/clock.h"
25 #include "system_wrappers/include/field_trial.h"
26 #include "system_wrappers/include/metrics.h"
27 #include "video/video_receive_stream2.h"
28
29 namespace webrtc {
30 namespace internal {
31 namespace {
32 // Periodic time interval for processing samples for |freq_offset_counter_|.
33 const int64_t kFreqOffsetProcessIntervalMs = 40000;
34
35 // Configuration for bad call detection.
36 const int kBadCallMinRequiredSamples = 10;
37 const int kMinSampleLengthMs = 990;
38 const int kNumMeasurements = 10;
39 const int kNumMeasurementsVariance = kNumMeasurements * 1.5;
40 const float kBadFraction = 0.8f;
41 // For fps:
42 // Low means low enough to be bad, high means high enough to be good
43 const int kLowFpsThreshold = 12;
44 const int kHighFpsThreshold = 14;
45 // For qp and fps variance:
46 // Low means low enough to be good, high means high enough to be bad
47 const int kLowQpThresholdVp8 = 60;
48 const int kHighQpThresholdVp8 = 70;
49 const int kLowVarianceThreshold = 1;
50 const int kHighVarianceThreshold = 2;
51
52 // Some metrics are reported as a maximum over this period.
53 // This should be synchronized with a typical getStats polling interval in
54 // the clients.
55 const int kMovingMaxWindowMs = 1000;
56
57 // How large window we use to calculate the framerate/bitrate.
58 const int kRateStatisticsWindowSizeMs = 1000;
59
60 // Some sane ballpark estimate for maximum common value of inter-frame delay.
61 // Values below that will be stored explicitly in the array,
62 // values above - in the map.
63 const int kMaxCommonInterframeDelayMs = 500;
64
UmaPrefixForContentType(VideoContentType content_type)65 const char* UmaPrefixForContentType(VideoContentType content_type) {
66 if (videocontenttypehelpers::IsScreenshare(content_type))
67 return "WebRTC.Video.Screenshare";
68 return "WebRTC.Video";
69 }
70
UmaSuffixForContentType(VideoContentType content_type)71 std::string UmaSuffixForContentType(VideoContentType content_type) {
72 char ss_buf[1024];
73 rtc::SimpleStringBuilder ss(ss_buf);
74 int simulcast_id = videocontenttypehelpers::GetSimulcastId(content_type);
75 if (simulcast_id > 0) {
76 ss << ".S" << simulcast_id - 1;
77 }
78 int experiment_id = videocontenttypehelpers::GetExperimentId(content_type);
79 if (experiment_id > 0) {
80 ss << ".ExperimentGroup" << experiment_id - 1;
81 }
82 return ss.str();
83 }
84
85 // TODO(https://bugs.webrtc.org/11572): Workaround for an issue with some
86 // rtc::Thread instances and/or implementations that don't register as the
87 // current task queue.
IsCurrentTaskQueueOrThread(TaskQueueBase * task_queue)88 bool IsCurrentTaskQueueOrThread(TaskQueueBase* task_queue) {
89 if (task_queue->IsCurrent())
90 return true;
91
92 rtc::Thread* current_thread = rtc::ThreadManager::Instance()->CurrentThread();
93 if (!current_thread)
94 return false;
95
96 return static_cast<TaskQueueBase*>(current_thread) == task_queue;
97 }
98
99 } // namespace
100
ReceiveStatisticsProxy(const VideoReceiveStream::Config * config,Clock * clock,TaskQueueBase * worker_thread)101 ReceiveStatisticsProxy::ReceiveStatisticsProxy(
102 const VideoReceiveStream::Config* config,
103 Clock* clock,
104 TaskQueueBase* worker_thread)
105 : clock_(clock),
106 start_ms_(clock->TimeInMilliseconds()),
107 enable_decode_time_histograms_(
108 !field_trial::IsEnabled("WebRTC-DecodeTimeHistogramsKillSwitch")),
109 last_sample_time_(clock->TimeInMilliseconds()),
110 fps_threshold_(kLowFpsThreshold,
111 kHighFpsThreshold,
112 kBadFraction,
113 kNumMeasurements),
114 qp_threshold_(kLowQpThresholdVp8,
115 kHighQpThresholdVp8,
116 kBadFraction,
117 kNumMeasurements),
118 variance_threshold_(kLowVarianceThreshold,
119 kHighVarianceThreshold,
120 kBadFraction,
121 kNumMeasurementsVariance),
122 num_bad_states_(0),
123 num_certain_states_(0),
124 remote_ssrc_(config->rtp.remote_ssrc),
125 // 1000ms window, scale 1000 for ms to s.
126 decode_fps_estimator_(1000, 1000),
127 renders_fps_estimator_(1000, 1000),
128 render_fps_tracker_(100, 10u),
129 render_pixel_tracker_(100, 10u),
130 video_quality_observer_(new VideoQualityObserver()),
131 interframe_delay_max_moving_(kMovingMaxWindowMs),
132 freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs),
133 last_content_type_(VideoContentType::UNSPECIFIED),
134 last_codec_type_(kVideoCodecVP8),
135 num_delayed_frames_rendered_(0),
136 sum_missed_render_deadline_ms_(0),
137 timing_frame_info_counter_(kMovingMaxWindowMs),
138 worker_thread_(worker_thread) {
139 RTC_DCHECK(worker_thread);
140 decode_queue_.Detach();
141 incoming_render_queue_.Detach();
142 stats_.ssrc = config->rtp.remote_ssrc;
143 }
144
~ReceiveStatisticsProxy()145 ReceiveStatisticsProxy::~ReceiveStatisticsProxy() {
146 RTC_DCHECK_RUN_ON(&main_thread_);
147 }
148
UpdateHistograms(absl::optional<int> fraction_lost,const StreamDataCounters & rtp_stats,const StreamDataCounters * rtx_stats)149 void ReceiveStatisticsProxy::UpdateHistograms(
150 absl::optional<int> fraction_lost,
151 const StreamDataCounters& rtp_stats,
152 const StreamDataCounters* rtx_stats) {
153 RTC_DCHECK_RUN_ON(&main_thread_);
154
155 char log_stream_buf[8 * 1024];
156 rtc::SimpleStringBuilder log_stream(log_stream_buf);
157
158 int stream_duration_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000;
159
160 if (stats_.frame_counts.key_frames > 0 ||
161 stats_.frame_counts.delta_frames > 0) {
162 RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.ReceiveStreamLifetimeInSeconds",
163 stream_duration_sec);
164 log_stream << "WebRTC.Video.ReceiveStreamLifetimeInSeconds "
165 << stream_duration_sec << '\n';
166 }
167
168 log_stream << "Frames decoded " << stats_.frames_decoded << '\n';
169
170 if (num_unique_frames_) {
171 int num_dropped_frames = *num_unique_frames_ - stats_.frames_decoded;
172 RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DroppedFrames.Receiver",
173 num_dropped_frames);
174 log_stream << "WebRTC.Video.DroppedFrames.Receiver " << num_dropped_frames
175 << '\n';
176 }
177
178 if (fraction_lost && stream_duration_sec >= metrics::kMinRunTimeInSeconds) {
179 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.ReceivedPacketsLostInPercent",
180 *fraction_lost);
181 log_stream << "WebRTC.Video.ReceivedPacketsLostInPercent " << *fraction_lost
182 << '\n';
183 }
184
185 if (first_decoded_frame_time_ms_) {
186 const int64_t elapsed_ms =
187 (clock_->TimeInMilliseconds() - *first_decoded_frame_time_ms_);
188 if (elapsed_ms >=
189 metrics::kMinRunTimeInSeconds * rtc::kNumMillisecsPerSec) {
190 int decoded_fps = static_cast<int>(
191 (stats_.frames_decoded * 1000.0f / elapsed_ms) + 0.5f);
192 RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.DecodedFramesPerSecond",
193 decoded_fps);
194 log_stream << "WebRTC.Video.DecodedFramesPerSecond " << decoded_fps
195 << '\n';
196
197 const uint32_t frames_rendered = stats_.frames_rendered;
198 if (frames_rendered > 0) {
199 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.DelayedFramesToRenderer",
200 static_cast<int>(num_delayed_frames_rendered_ *
201 100 / frames_rendered));
202 if (num_delayed_frames_rendered_ > 0) {
203 RTC_HISTOGRAM_COUNTS_1000(
204 "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs",
205 static_cast<int>(sum_missed_render_deadline_ms_ /
206 num_delayed_frames_rendered_));
207 }
208 }
209 }
210 }
211
212 const int kMinRequiredSamples = 200;
213 int samples = static_cast<int>(render_fps_tracker_.TotalSampleCount());
214 if (samples >= kMinRequiredSamples) {
215 int rendered_fps = round(render_fps_tracker_.ComputeTotalRate());
216 RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond",
217 rendered_fps);
218 log_stream << "WebRTC.Video.RenderFramesPerSecond " << rendered_fps << '\n';
219 RTC_HISTOGRAM_COUNTS_100000(
220 "WebRTC.Video.RenderSqrtPixelsPerSecond",
221 round(render_pixel_tracker_.ComputeTotalRate()));
222 }
223
224 absl::optional<int> sync_offset_ms =
225 sync_offset_counter_.Avg(kMinRequiredSamples);
226 if (sync_offset_ms) {
227 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs",
228 *sync_offset_ms);
229 log_stream << "WebRTC.Video.AVSyncOffsetInMs " << *sync_offset_ms << '\n';
230 }
231 AggregatedStats freq_offset_stats = freq_offset_counter_.GetStats();
232 if (freq_offset_stats.num_samples > 0) {
233 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtpToNtpFreqOffsetInKhz",
234 freq_offset_stats.average);
235 log_stream << "WebRTC.Video.RtpToNtpFreqOffsetInKhz "
236 << freq_offset_stats.ToString() << '\n';
237 }
238
239 int num_total_frames =
240 stats_.frame_counts.key_frames + stats_.frame_counts.delta_frames;
241 if (num_total_frames >= kMinRequiredSamples) {
242 int num_key_frames = stats_.frame_counts.key_frames;
243 int key_frames_permille =
244 (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames;
245 RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesReceivedInPermille",
246 key_frames_permille);
247 log_stream << "WebRTC.Video.KeyFramesReceivedInPermille "
248 << key_frames_permille << '\n';
249 }
250
251 absl::optional<int> qp = qp_counters_.vp8.Avg(kMinRequiredSamples);
252 if (qp) {
253 RTC_HISTOGRAM_COUNTS_200("WebRTC.Video.Decoded.Vp8.Qp", *qp);
254 log_stream << "WebRTC.Video.Decoded.Vp8.Qp " << *qp << '\n';
255 }
256
257 absl::optional<int> decode_ms = decode_time_counter_.Avg(kMinRequiredSamples);
258 if (decode_ms) {
259 RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DecodeTimeInMs", *decode_ms);
260 log_stream << "WebRTC.Video.DecodeTimeInMs " << *decode_ms << '\n';
261 }
262 absl::optional<int> jb_delay_ms =
263 jitter_buffer_delay_counter_.Avg(kMinRequiredSamples);
264 if (jb_delay_ms) {
265 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs",
266 *jb_delay_ms);
267 log_stream << "WebRTC.Video.JitterBufferDelayInMs " << *jb_delay_ms << '\n';
268 }
269
270 absl::optional<int> target_delay_ms =
271 target_delay_counter_.Avg(kMinRequiredSamples);
272 if (target_delay_ms) {
273 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.TargetDelayInMs",
274 *target_delay_ms);
275 log_stream << "WebRTC.Video.TargetDelayInMs " << *target_delay_ms << '\n';
276 }
277 absl::optional<int> current_delay_ms =
278 current_delay_counter_.Avg(kMinRequiredSamples);
279 if (current_delay_ms) {
280 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs",
281 *current_delay_ms);
282 log_stream << "WebRTC.Video.CurrentDelayInMs " << *current_delay_ms << '\n';
283 }
284 absl::optional<int> delay_ms = delay_counter_.Avg(kMinRequiredSamples);
285 if (delay_ms)
286 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", *delay_ms);
287
288 // Aggregate content_specific_stats_ by removing experiment or simulcast
289 // information;
290 std::map<VideoContentType, ContentSpecificStats> aggregated_stats;
291 for (const auto& it : content_specific_stats_) {
292 // Calculate simulcast specific metrics (".S0" ... ".S2" suffixes).
293 VideoContentType content_type = it.first;
294 if (videocontenttypehelpers::GetSimulcastId(content_type) > 0) {
295 // Aggregate on experiment id.
296 videocontenttypehelpers::SetExperimentId(&content_type, 0);
297 aggregated_stats[content_type].Add(it.second);
298 }
299 // Calculate experiment specific metrics (".ExperimentGroup[0-7]" suffixes).
300 content_type = it.first;
301 if (videocontenttypehelpers::GetExperimentId(content_type) > 0) {
302 // Aggregate on simulcast id.
303 videocontenttypehelpers::SetSimulcastId(&content_type, 0);
304 aggregated_stats[content_type].Add(it.second);
305 }
306 // Calculate aggregated metrics (no suffixes. Aggregated on everything).
307 content_type = it.first;
308 videocontenttypehelpers::SetSimulcastId(&content_type, 0);
309 videocontenttypehelpers::SetExperimentId(&content_type, 0);
310 aggregated_stats[content_type].Add(it.second);
311 }
312
313 for (const auto& it : aggregated_stats) {
314 // For the metric Foo we report the following slices:
315 // WebRTC.Video.Foo,
316 // WebRTC.Video.Screenshare.Foo,
317 // WebRTC.Video.Foo.S[0-3],
318 // WebRTC.Video.Foo.ExperimentGroup[0-7],
319 // WebRTC.Video.Screenshare.Foo.S[0-3],
320 // WebRTC.Video.Screenshare.Foo.ExperimentGroup[0-7].
321 auto content_type = it.first;
322 auto stats = it.second;
323 std::string uma_prefix = UmaPrefixForContentType(content_type);
324 std::string uma_suffix = UmaSuffixForContentType(content_type);
325 // Metrics can be sliced on either simulcast id or experiment id but not
326 // both.
327 RTC_DCHECK(videocontenttypehelpers::GetExperimentId(content_type) == 0 ||
328 videocontenttypehelpers::GetSimulcastId(content_type) == 0);
329
330 absl::optional<int> e2e_delay_ms =
331 stats.e2e_delay_counter.Avg(kMinRequiredSamples);
332 if (e2e_delay_ms) {
333 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
334 uma_prefix + ".EndToEndDelayInMs" + uma_suffix, *e2e_delay_ms);
335 log_stream << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " "
336 << *e2e_delay_ms << '\n';
337 }
338 absl::optional<int> e2e_delay_max_ms = stats.e2e_delay_counter.Max();
339 if (e2e_delay_max_ms && e2e_delay_ms) {
340 RTC_HISTOGRAM_COUNTS_SPARSE_100000(
341 uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, *e2e_delay_max_ms);
342 log_stream << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " "
343 << *e2e_delay_max_ms << '\n';
344 }
345 absl::optional<int> interframe_delay_ms =
346 stats.interframe_delay_counter.Avg(kMinRequiredSamples);
347 if (interframe_delay_ms) {
348 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
349 uma_prefix + ".InterframeDelayInMs" + uma_suffix,
350 *interframe_delay_ms);
351 log_stream << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " "
352 << *interframe_delay_ms << '\n';
353 }
354 absl::optional<int> interframe_delay_max_ms =
355 stats.interframe_delay_counter.Max();
356 if (interframe_delay_max_ms && interframe_delay_ms) {
357 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
358 uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix,
359 *interframe_delay_max_ms);
360 log_stream << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix << " "
361 << *interframe_delay_max_ms << '\n';
362 }
363
364 absl::optional<uint32_t> interframe_delay_95p_ms =
365 stats.interframe_delay_percentiles.GetPercentile(0.95f);
366 if (interframe_delay_95p_ms && interframe_delay_ms != -1) {
367 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
368 uma_prefix + ".InterframeDelay95PercentileInMs" + uma_suffix,
369 *interframe_delay_95p_ms);
370 log_stream << uma_prefix << ".InterframeDelay95PercentileInMs"
371 << uma_suffix << " " << *interframe_delay_95p_ms << '\n';
372 }
373
374 absl::optional<int> width = stats.received_width.Avg(kMinRequiredSamples);
375 if (width) {
376 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
377 uma_prefix + ".ReceivedWidthInPixels" + uma_suffix, *width);
378 log_stream << uma_prefix << ".ReceivedWidthInPixels" << uma_suffix << " "
379 << *width << '\n';
380 }
381
382 absl::optional<int> height = stats.received_height.Avg(kMinRequiredSamples);
383 if (height) {
384 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
385 uma_prefix + ".ReceivedHeightInPixels" + uma_suffix, *height);
386 log_stream << uma_prefix << ".ReceivedHeightInPixels" << uma_suffix << " "
387 << *height << '\n';
388 }
389
390 if (content_type != VideoContentType::UNSPECIFIED) {
391 // Don't report these 3 metrics unsliced, as more precise variants
392 // are reported separately in this method.
393 float flow_duration_sec = stats.flow_duration_ms / 1000.0;
394 if (flow_duration_sec >= metrics::kMinRunTimeInSeconds) {
395 int media_bitrate_kbps = static_cast<int>(stats.total_media_bytes * 8 /
396 flow_duration_sec / 1000);
397 RTC_HISTOGRAM_COUNTS_SPARSE_10000(
398 uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix,
399 media_bitrate_kbps);
400 log_stream << uma_prefix << ".MediaBitrateReceivedInKbps" << uma_suffix
401 << " " << media_bitrate_kbps << '\n';
402 }
403
404 int num_total_frames =
405 stats.frame_counts.key_frames + stats.frame_counts.delta_frames;
406 if (num_total_frames >= kMinRequiredSamples) {
407 int num_key_frames = stats.frame_counts.key_frames;
408 int key_frames_permille =
409 (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames;
410 RTC_HISTOGRAM_COUNTS_SPARSE_1000(
411 uma_prefix + ".KeyFramesReceivedInPermille" + uma_suffix,
412 key_frames_permille);
413 log_stream << uma_prefix << ".KeyFramesReceivedInPermille" << uma_suffix
414 << " " << key_frames_permille << '\n';
415 }
416
417 absl::optional<int> qp = stats.qp_counter.Avg(kMinRequiredSamples);
418 if (qp) {
419 RTC_HISTOGRAM_COUNTS_SPARSE_200(
420 uma_prefix + ".Decoded.Vp8.Qp" + uma_suffix, *qp);
421 log_stream << uma_prefix << ".Decoded.Vp8.Qp" << uma_suffix << " "
422 << *qp << '\n';
423 }
424 }
425 }
426
427 StreamDataCounters rtp_rtx_stats = rtp_stats;
428 if (rtx_stats)
429 rtp_rtx_stats.Add(*rtx_stats);
430
431 int64_t elapsed_sec =
432 rtp_rtx_stats.TimeSinceFirstPacketInMs(clock_->TimeInMilliseconds()) /
433 1000;
434 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) {
435 RTC_HISTOGRAM_COUNTS_10000(
436 "WebRTC.Video.BitrateReceivedInKbps",
437 static_cast<int>(rtp_rtx_stats.transmitted.TotalBytes() * 8 /
438 elapsed_sec / 1000));
439 int media_bitrate_kbs = static_cast<int>(rtp_stats.MediaPayloadBytes() * 8 /
440 elapsed_sec / 1000);
441 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.MediaBitrateReceivedInKbps",
442 media_bitrate_kbs);
443 log_stream << "WebRTC.Video.MediaBitrateReceivedInKbps "
444 << media_bitrate_kbs << '\n';
445 RTC_HISTOGRAM_COUNTS_10000(
446 "WebRTC.Video.PaddingBitrateReceivedInKbps",
447 static_cast<int>(rtp_rtx_stats.transmitted.padding_bytes * 8 /
448 elapsed_sec / 1000));
449 RTC_HISTOGRAM_COUNTS_10000(
450 "WebRTC.Video.RetransmittedBitrateReceivedInKbps",
451 static_cast<int>(rtp_rtx_stats.retransmitted.TotalBytes() * 8 /
452 elapsed_sec / 1000));
453 if (rtx_stats) {
454 RTC_HISTOGRAM_COUNTS_10000(
455 "WebRTC.Video.RtxBitrateReceivedInKbps",
456 static_cast<int>(rtx_stats->transmitted.TotalBytes() * 8 /
457 elapsed_sec / 1000));
458 }
459 const RtcpPacketTypeCounter& counters = stats_.rtcp_packet_type_counts;
460 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.NackPacketsSentPerMinute",
461 counters.nack_packets * 60 / elapsed_sec);
462 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FirPacketsSentPerMinute",
463 counters.fir_packets * 60 / elapsed_sec);
464 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.PliPacketsSentPerMinute",
465 counters.pli_packets * 60 / elapsed_sec);
466 if (counters.nack_requests > 0) {
467 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.UniqueNackRequestsSentInPercent",
468 counters.UniqueNackRequestsInPercent());
469 }
470 }
471
472 if (num_certain_states_ >= kBadCallMinRequiredSamples) {
473 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.Any",
474 100 * num_bad_states_ / num_certain_states_);
475 }
476 absl::optional<double> fps_fraction =
477 fps_threshold_.FractionHigh(kBadCallMinRequiredSamples);
478 if (fps_fraction) {
479 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.FrameRate",
480 static_cast<int>(100 * (1 - *fps_fraction)));
481 }
482 absl::optional<double> variance_fraction =
483 variance_threshold_.FractionHigh(kBadCallMinRequiredSamples);
484 if (variance_fraction) {
485 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.FrameRateVariance",
486 static_cast<int>(100 * *variance_fraction));
487 }
488 absl::optional<double> qp_fraction =
489 qp_threshold_.FractionHigh(kBadCallMinRequiredSamples);
490 if (qp_fraction) {
491 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.Qp",
492 static_cast<int>(100 * *qp_fraction));
493 }
494
495 RTC_LOG(LS_INFO) << log_stream.str();
496 video_quality_observer_->UpdateHistograms(
497 videocontenttypehelpers::IsScreenshare(last_content_type_));
498 }
499
QualitySample(Timestamp now)500 void ReceiveStatisticsProxy::QualitySample(Timestamp now) {
501 RTC_DCHECK_RUN_ON(&main_thread_);
502
503 if (last_sample_time_ + kMinSampleLengthMs > now.ms())
504 return;
505
506 double fps =
507 render_fps_tracker_.ComputeRateForInterval(now.ms() - last_sample_time_);
508 absl::optional<int> qp = qp_sample_.Avg(1);
509
510 bool prev_fps_bad = !fps_threshold_.IsHigh().value_or(true);
511 bool prev_qp_bad = qp_threshold_.IsHigh().value_or(false);
512 bool prev_variance_bad = variance_threshold_.IsHigh().value_or(false);
513 bool prev_any_bad = prev_fps_bad || prev_qp_bad || prev_variance_bad;
514
515 fps_threshold_.AddMeasurement(static_cast<int>(fps));
516 if (qp)
517 qp_threshold_.AddMeasurement(*qp);
518 absl::optional<double> fps_variance_opt = fps_threshold_.CalculateVariance();
519 double fps_variance = fps_variance_opt.value_or(0);
520 if (fps_variance_opt) {
521 variance_threshold_.AddMeasurement(static_cast<int>(fps_variance));
522 }
523
524 bool fps_bad = !fps_threshold_.IsHigh().value_or(true);
525 bool qp_bad = qp_threshold_.IsHigh().value_or(false);
526 bool variance_bad = variance_threshold_.IsHigh().value_or(false);
527 bool any_bad = fps_bad || qp_bad || variance_bad;
528
529 if (!prev_any_bad && any_bad) {
530 RTC_LOG(LS_INFO) << "Bad call (any) start: " << now.ms();
531 } else if (prev_any_bad && !any_bad) {
532 RTC_LOG(LS_INFO) << "Bad call (any) end: " << now.ms();
533 }
534
535 if (!prev_fps_bad && fps_bad) {
536 RTC_LOG(LS_INFO) << "Bad call (fps) start: " << now.ms();
537 } else if (prev_fps_bad && !fps_bad) {
538 RTC_LOG(LS_INFO) << "Bad call (fps) end: " << now.ms();
539 }
540
541 if (!prev_qp_bad && qp_bad) {
542 RTC_LOG(LS_INFO) << "Bad call (qp) start: " << now.ms();
543 } else if (prev_qp_bad && !qp_bad) {
544 RTC_LOG(LS_INFO) << "Bad call (qp) end: " << now.ms();
545 }
546
547 if (!prev_variance_bad && variance_bad) {
548 RTC_LOG(LS_INFO) << "Bad call (variance) start: " << now.ms();
549 } else if (prev_variance_bad && !variance_bad) {
550 RTC_LOG(LS_INFO) << "Bad call (variance) end: " << now.ms();
551 }
552
553 RTC_LOG(LS_VERBOSE) << "SAMPLE: sample_length: "
554 << (now.ms() - last_sample_time_) << " fps: " << fps
555 << " fps_bad: " << fps_bad << " qp: " << qp.value_or(-1)
556 << " qp_bad: " << qp_bad
557 << " variance_bad: " << variance_bad
558 << " fps_variance: " << fps_variance;
559
560 last_sample_time_ = now.ms();
561 qp_sample_.Reset();
562
563 if (fps_threshold_.IsHigh() || variance_threshold_.IsHigh() ||
564 qp_threshold_.IsHigh()) {
565 if (any_bad)
566 ++num_bad_states_;
567 ++num_certain_states_;
568 }
569 }
570
UpdateFramerate(int64_t now_ms) const571 void ReceiveStatisticsProxy::UpdateFramerate(int64_t now_ms) const {
572 RTC_DCHECK_RUN_ON(&main_thread_);
573
574 int64_t old_frames_ms = now_ms - kRateStatisticsWindowSizeMs;
575 while (!frame_window_.empty() &&
576 frame_window_.begin()->first < old_frames_ms) {
577 frame_window_.erase(frame_window_.begin());
578 }
579
580 size_t framerate =
581 (frame_window_.size() * 1000 + 500) / kRateStatisticsWindowSizeMs;
582
583 stats_.network_frame_rate = static_cast<int>(framerate);
584 }
585
UpdateDecodeTimeHistograms(int width,int height,int decode_time_ms) const586 void ReceiveStatisticsProxy::UpdateDecodeTimeHistograms(
587 int width,
588 int height,
589 int decode_time_ms) const {
590 RTC_DCHECK_RUN_ON(&main_thread_);
591
592 bool is_4k = (width == 3840 || width == 4096) && height == 2160;
593 bool is_hd = width == 1920 && height == 1080;
594 // Only update histograms for 4k/HD and VP9/H264.
595 if ((is_4k || is_hd) && (last_codec_type_ == kVideoCodecVP9 ||
596 last_codec_type_ == kVideoCodecH264)) {
597 const std::string kDecodeTimeUmaPrefix =
598 "WebRTC.Video.DecodeTimePerFrameInMs.";
599
600 // Each histogram needs its own line for it to not be reused in the wrong
601 // way when the format changes.
602 if (last_codec_type_ == kVideoCodecVP9) {
603 bool is_sw_decoder =
604 stats_.decoder_implementation_name.compare(0, 6, "libvpx") == 0;
605 if (is_4k) {
606 if (is_sw_decoder)
607 RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.4k.Sw",
608 decode_time_ms);
609 else
610 RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.4k.Hw",
611 decode_time_ms);
612 } else {
613 if (is_sw_decoder)
614 RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.Hd.Sw",
615 decode_time_ms);
616 else
617 RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.Hd.Hw",
618 decode_time_ms);
619 }
620 } else {
621 bool is_sw_decoder =
622 stats_.decoder_implementation_name.compare(0, 6, "FFmpeg") == 0;
623 if (is_4k) {
624 if (is_sw_decoder)
625 RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.4k.Sw",
626 decode_time_ms);
627 else
628 RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.4k.Hw",
629 decode_time_ms);
630
631 } else {
632 if (is_sw_decoder)
633 RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.Hd.Sw",
634 decode_time_ms);
635 else
636 RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.Hd.Hw",
637 decode_time_ms);
638 }
639 }
640 }
641 }
642
643 absl::optional<int64_t>
GetCurrentEstimatedPlayoutNtpTimestampMs(int64_t now_ms) const644 ReceiveStatisticsProxy::GetCurrentEstimatedPlayoutNtpTimestampMs(
645 int64_t now_ms) const {
646 RTC_DCHECK_RUN_ON(&main_thread_);
647 if (!last_estimated_playout_ntp_timestamp_ms_ ||
648 !last_estimated_playout_time_ms_) {
649 return absl::nullopt;
650 }
651 int64_t elapsed_ms = now_ms - *last_estimated_playout_time_ms_;
652 return *last_estimated_playout_ntp_timestamp_ms_ + elapsed_ms;
653 }
654
GetStats() const655 VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const {
656 RTC_DCHECK_RUN_ON(&main_thread_);
657
658 // Like VideoReceiveStream::GetStats, called on the worker thread from
659 // StatsCollector::ExtractMediaInfo via worker_thread()->Invoke().
660 // WebRtcVideoChannel::GetStats(), GetVideoReceiverInfo.
661
662 // Get current frame rates here, as only updating them on new frames prevents
663 // us from ever correctly displaying frame rate of 0.
664 int64_t now_ms = clock_->TimeInMilliseconds();
665 UpdateFramerate(now_ms);
666
667 stats_.render_frame_rate = renders_fps_estimator_.Rate(now_ms).value_or(0);
668 stats_.decode_frame_rate = decode_fps_estimator_.Rate(now_ms).value_or(0);
669
670 if (last_decoded_frame_time_ms_) {
671 // Avoid using a newer timestamp than might be pending for decoded frames.
672 // If we do use now_ms, we might roll the max window to a value that is
673 // higher than that of a decoded frame timestamp that we haven't yet
674 // captured the data for (i.e. pending call to OnDecodedFrame).
675 stats_.interframe_delay_max_ms =
676 interframe_delay_max_moving_.Max(*last_decoded_frame_time_ms_)
677 .value_or(-1);
678 } else {
679 // We're paused. Avoid changing the state of |interframe_delay_max_moving_|.
680 stats_.interframe_delay_max_ms = -1;
681 }
682
683 stats_.freeze_count = video_quality_observer_->NumFreezes();
684 stats_.pause_count = video_quality_observer_->NumPauses();
685 stats_.total_freezes_duration_ms =
686 video_quality_observer_->TotalFreezesDurationMs();
687 stats_.total_pauses_duration_ms =
688 video_quality_observer_->TotalPausesDurationMs();
689 stats_.total_frames_duration_ms =
690 video_quality_observer_->TotalFramesDurationMs();
691 stats_.sum_squared_frame_durations =
692 video_quality_observer_->SumSquaredFrameDurationsSec();
693 stats_.content_type = last_content_type_;
694 stats_.timing_frame_info = timing_frame_info_counter_.Max(now_ms);
695 stats_.jitter_buffer_delay_seconds =
696 static_cast<double>(current_delay_counter_.Sum(1).value_or(0)) /
697 rtc::kNumMillisecsPerSec;
698 stats_.jitter_buffer_emitted_count = current_delay_counter_.NumSamples();
699 stats_.estimated_playout_ntp_timestamp_ms =
700 GetCurrentEstimatedPlayoutNtpTimestampMs(now_ms);
701 return stats_;
702 }
703
OnIncomingPayloadType(int payload_type)704 void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) {
705 RTC_DCHECK_RUN_ON(&decode_queue_);
706 worker_thread_->PostTask(ToQueuedTask(task_safety_, [payload_type, this]() {
707 RTC_DCHECK_RUN_ON(&main_thread_);
708 stats_.current_payload_type = payload_type;
709 }));
710 }
711
OnDecoderImplementationName(const char * implementation_name)712 void ReceiveStatisticsProxy::OnDecoderImplementationName(
713 const char* implementation_name) {
714 RTC_DCHECK_RUN_ON(&decode_queue_);
715 worker_thread_->PostTask(ToQueuedTask(
716 task_safety_, [name = std::string(implementation_name), this]() {
717 RTC_DCHECK_RUN_ON(&main_thread_);
718 stats_.decoder_implementation_name = name;
719 }));
720 }
721
OnFrameBufferTimingsUpdated(int max_decode_ms,int current_delay_ms,int target_delay_ms,int jitter_buffer_ms,int min_playout_delay_ms,int render_delay_ms)722 void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated(
723 int max_decode_ms,
724 int current_delay_ms,
725 int target_delay_ms,
726 int jitter_buffer_ms,
727 int min_playout_delay_ms,
728 int render_delay_ms) {
729 RTC_DCHECK_RUN_ON(&decode_queue_);
730 worker_thread_->PostTask(ToQueuedTask(
731 task_safety_,
732 [max_decode_ms, current_delay_ms, target_delay_ms, jitter_buffer_ms,
733 min_playout_delay_ms, render_delay_ms, this]() {
734 RTC_DCHECK_RUN_ON(&main_thread_);
735 stats_.max_decode_ms = max_decode_ms;
736 stats_.current_delay_ms = current_delay_ms;
737 stats_.target_delay_ms = target_delay_ms;
738 stats_.jitter_buffer_ms = jitter_buffer_ms;
739 stats_.min_playout_delay_ms = min_playout_delay_ms;
740 stats_.render_delay_ms = render_delay_ms;
741 jitter_buffer_delay_counter_.Add(jitter_buffer_ms);
742 target_delay_counter_.Add(target_delay_ms);
743 current_delay_counter_.Add(current_delay_ms);
744 // Network delay (rtt/2) + target_delay_ms (jitter delay + decode time +
745 // render delay).
746 delay_counter_.Add(target_delay_ms + avg_rtt_ms_ / 2);
747 }));
748 }
749
OnUniqueFramesCounted(int num_unique_frames)750 void ReceiveStatisticsProxy::OnUniqueFramesCounted(int num_unique_frames) {
751 RTC_DCHECK_RUN_ON(&main_thread_);
752 num_unique_frames_.emplace(num_unique_frames);
753 }
754
OnTimingFrameInfoUpdated(const TimingFrameInfo & info)755 void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated(
756 const TimingFrameInfo& info) {
757 RTC_DCHECK_RUN_ON(&decode_queue_);
758 worker_thread_->PostTask(ToQueuedTask(task_safety_, [info, this]() {
759 RTC_DCHECK_RUN_ON(&main_thread_);
760 if (info.flags != VideoSendTiming::kInvalid) {
761 int64_t now_ms = clock_->TimeInMilliseconds();
762 timing_frame_info_counter_.Add(info, now_ms);
763 }
764
765 // Measure initial decoding latency between the first frame arriving and
766 // the first frame being decoded.
767 if (!first_frame_received_time_ms_.has_value()) {
768 first_frame_received_time_ms_ = info.receive_finish_ms;
769 }
770 if (stats_.first_frame_received_to_decoded_ms == -1 &&
771 first_decoded_frame_time_ms_) {
772 stats_.first_frame_received_to_decoded_ms =
773 *first_decoded_frame_time_ms_ - *first_frame_received_time_ms_;
774 }
775 }));
776 }
777
RtcpPacketTypesCounterUpdated(uint32_t ssrc,const RtcpPacketTypeCounter & packet_counter)778 void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated(
779 uint32_t ssrc,
780 const RtcpPacketTypeCounter& packet_counter) {
781 if (ssrc != remote_ssrc_)
782 return;
783
784 if (!IsCurrentTaskQueueOrThread(worker_thread_)) {
785 // RtpRtcpInterface::Configuration has a single
786 // RtcpPacketTypeCounterObserver and that same configuration may be used for
787 // both receiver and sender (see ModuleRtpRtcpImpl::ModuleRtpRtcpImpl). The
788 // RTCPSender implementation currently makes calls to this function on a
789 // process thread whereas the RTCPReceiver implementation calls back on the
790 // [main] worker thread.
791 // So until the sender implementation has been updated, we work around this
792 // here by posting the update to the expected thread. We make a by value
793 // copy of the |task_safety_| to handle the case if the queued task
794 // runs after the |ReceiveStatisticsProxy| has been deleted. In such a
795 // case the packet_counter update won't be recorded.
796 worker_thread_->PostTask(
797 ToQueuedTask(task_safety_, [ssrc, packet_counter, this]() {
798 RtcpPacketTypesCounterUpdated(ssrc, packet_counter);
799 }));
800 return;
801 }
802
803 RTC_DCHECK_RUN_ON(&main_thread_);
804 stats_.rtcp_packet_type_counts = packet_counter;
805 }
806
OnCname(uint32_t ssrc,absl::string_view cname)807 void ReceiveStatisticsProxy::OnCname(uint32_t ssrc, absl::string_view cname) {
808 RTC_DCHECK_RUN_ON(&main_thread_);
809 // TODO(pbos): Handle both local and remote ssrcs here and RTC_DCHECK that we
810 // receive stats from one of them.
811 if (remote_ssrc_ != ssrc)
812 return;
813
814 stats_.c_name = std::string(cname);
815 }
816
OnDecodedFrame(const VideoFrame & frame,absl::optional<uint8_t> qp,int32_t decode_time_ms,VideoContentType content_type)817 void ReceiveStatisticsProxy::OnDecodedFrame(const VideoFrame& frame,
818 absl::optional<uint8_t> qp,
819 int32_t decode_time_ms,
820 VideoContentType content_type) {
821 // See VCMDecodedFrameCallback::Decoded for more info on what thread/queue we
822 // may be on. E.g. on iOS this gets called on
823 // "com.apple.coremedia.decompressionsession.clientcallback"
824 VideoFrameMetaData meta(frame, clock_->CurrentTime());
825 worker_thread_->PostTask(ToQueuedTask(
826 task_safety_, [meta, qp, decode_time_ms, content_type, this]() {
827 OnDecodedFrame(meta, qp, decode_time_ms, content_type);
828 }));
829 }
830
OnDecodedFrame(const VideoFrameMetaData & frame_meta,absl::optional<uint8_t> qp,int32_t decode_time_ms,VideoContentType content_type)831 void ReceiveStatisticsProxy::OnDecodedFrame(
832 const VideoFrameMetaData& frame_meta,
833 absl::optional<uint8_t> qp,
834 int32_t decode_time_ms,
835 VideoContentType content_type) {
836 RTC_DCHECK_RUN_ON(&main_thread_);
837
838 const bool is_screenshare =
839 videocontenttypehelpers::IsScreenshare(content_type);
840 const bool was_screenshare =
841 videocontenttypehelpers::IsScreenshare(last_content_type_);
842
843 if (is_screenshare != was_screenshare) {
844 // Reset the quality observer if content type is switched. But first report
845 // stats for the previous part of the call.
846 video_quality_observer_->UpdateHistograms(was_screenshare);
847 video_quality_observer_.reset(new VideoQualityObserver());
848 }
849
850 video_quality_observer_->OnDecodedFrame(frame_meta.rtp_timestamp, qp,
851 last_codec_type_);
852
853 ContentSpecificStats* content_specific_stats =
854 &content_specific_stats_[content_type];
855
856 ++stats_.frames_decoded;
857 if (qp) {
858 if (!stats_.qp_sum) {
859 if (stats_.frames_decoded != 1) {
860 RTC_LOG(LS_WARNING)
861 << "Frames decoded was not 1 when first qp value was received.";
862 }
863 stats_.qp_sum = 0;
864 }
865 *stats_.qp_sum += *qp;
866 content_specific_stats->qp_counter.Add(*qp);
867 } else if (stats_.qp_sum) {
868 RTC_LOG(LS_WARNING)
869 << "QP sum was already set and no QP was given for a frame.";
870 stats_.qp_sum.reset();
871 }
872 decode_time_counter_.Add(decode_time_ms);
873 stats_.decode_ms = decode_time_ms;
874 stats_.total_decode_time_ms += decode_time_ms;
875 if (enable_decode_time_histograms_) {
876 UpdateDecodeTimeHistograms(frame_meta.width, frame_meta.height,
877 decode_time_ms);
878 }
879
880 last_content_type_ = content_type;
881 decode_fps_estimator_.Update(1, frame_meta.decode_timestamp.ms());
882
883 if (last_decoded_frame_time_ms_) {
884 int64_t interframe_delay_ms =
885 frame_meta.decode_timestamp.ms() - *last_decoded_frame_time_ms_;
886 RTC_DCHECK_GE(interframe_delay_ms, 0);
887 double interframe_delay = interframe_delay_ms / 1000.0;
888 stats_.total_inter_frame_delay += interframe_delay;
889 stats_.total_squared_inter_frame_delay +=
890 interframe_delay * interframe_delay;
891 interframe_delay_max_moving_.Add(interframe_delay_ms,
892 frame_meta.decode_timestamp.ms());
893 content_specific_stats->interframe_delay_counter.Add(interframe_delay_ms);
894 content_specific_stats->interframe_delay_percentiles.Add(
895 interframe_delay_ms);
896 content_specific_stats->flow_duration_ms += interframe_delay_ms;
897 }
898 if (stats_.frames_decoded == 1) {
899 first_decoded_frame_time_ms_.emplace(frame_meta.decode_timestamp.ms());
900 }
901 last_decoded_frame_time_ms_.emplace(frame_meta.decode_timestamp.ms());
902 }
903
OnRenderedFrame(const VideoFrameMetaData & frame_meta)904 void ReceiveStatisticsProxy::OnRenderedFrame(
905 const VideoFrameMetaData& frame_meta) {
906 RTC_DCHECK_RUN_ON(&main_thread_);
907 // Called from VideoReceiveStream2::OnFrame.
908
909 RTC_DCHECK_GT(frame_meta.width, 0);
910 RTC_DCHECK_GT(frame_meta.height, 0);
911
912 video_quality_observer_->OnRenderedFrame(frame_meta);
913
914 ContentSpecificStats* content_specific_stats =
915 &content_specific_stats_[last_content_type_];
916 renders_fps_estimator_.Update(1, frame_meta.decode_timestamp.ms());
917
918 ++stats_.frames_rendered;
919 stats_.width = frame_meta.width;
920 stats_.height = frame_meta.height;
921
922 render_fps_tracker_.AddSamples(1);
923 render_pixel_tracker_.AddSamples(sqrt(frame_meta.width * frame_meta.height));
924 content_specific_stats->received_width.Add(frame_meta.width);
925 content_specific_stats->received_height.Add(frame_meta.height);
926
927 // Consider taking stats_.render_delay_ms into account.
928 const int64_t time_until_rendering_ms =
929 frame_meta.render_time_ms() - frame_meta.decode_timestamp.ms();
930 if (time_until_rendering_ms < 0) {
931 sum_missed_render_deadline_ms_ += -time_until_rendering_ms;
932 ++num_delayed_frames_rendered_;
933 }
934
935 if (frame_meta.ntp_time_ms > 0) {
936 int64_t delay_ms =
937 clock_->CurrentNtpInMilliseconds() - frame_meta.ntp_time_ms;
938 if (delay_ms >= 0) {
939 content_specific_stats->e2e_delay_counter.Add(delay_ms);
940 }
941 }
942
943 QualitySample(frame_meta.decode_timestamp);
944 }
945
OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,int64_t sync_offset_ms,double estimated_freq_khz)946 void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
947 int64_t sync_offset_ms,
948 double estimated_freq_khz) {
949 RTC_DCHECK_RUN_ON(&incoming_render_queue_);
950 int64_t now_ms = clock_->TimeInMilliseconds();
951 worker_thread_->PostTask(
952 ToQueuedTask(task_safety_, [video_playout_ntp_ms, sync_offset_ms,
953 estimated_freq_khz, now_ms, this]() {
954 RTC_DCHECK_RUN_ON(&main_thread_);
955 sync_offset_counter_.Add(std::abs(sync_offset_ms));
956 stats_.sync_offset_ms = sync_offset_ms;
957 last_estimated_playout_ntp_timestamp_ms_ = video_playout_ntp_ms;
958 last_estimated_playout_time_ms_ = now_ms;
959
960 const double kMaxFreqKhz = 10000.0;
961 int offset_khz = kMaxFreqKhz;
962 // Should not be zero or negative. If so, report max.
963 if (estimated_freq_khz < kMaxFreqKhz && estimated_freq_khz > 0.0)
964 offset_khz =
965 static_cast<int>(std::fabs(estimated_freq_khz - 90.0) + 0.5);
966
967 freq_offset_counter_.Add(offset_khz);
968 }));
969 }
970
OnCompleteFrame(bool is_keyframe,size_t size_bytes,VideoContentType content_type)971 void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe,
972 size_t size_bytes,
973 VideoContentType content_type) {
974 RTC_DCHECK_RUN_ON(&main_thread_);
975
976 if (is_keyframe) {
977 ++stats_.frame_counts.key_frames;
978 } else {
979 ++stats_.frame_counts.delta_frames;
980 }
981
982 // Content type extension is set only for keyframes and should be propagated
983 // for all the following delta frames. Here we may receive frames out of order
984 // and miscategorise some delta frames near the layer switch.
985 // This may slightly offset calculated bitrate and keyframes permille metrics.
986 VideoContentType propagated_content_type =
987 is_keyframe ? content_type : last_content_type_;
988
989 ContentSpecificStats* content_specific_stats =
990 &content_specific_stats_[propagated_content_type];
991
992 content_specific_stats->total_media_bytes += size_bytes;
993 if (is_keyframe) {
994 ++content_specific_stats->frame_counts.key_frames;
995 } else {
996 ++content_specific_stats->frame_counts.delta_frames;
997 }
998
999 int64_t now_ms = clock_->TimeInMilliseconds();
1000 frame_window_.insert(std::make_pair(now_ms, size_bytes));
1001 UpdateFramerate(now_ms);
1002 }
1003
OnDroppedFrames(uint32_t frames_dropped)1004 void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) {
1005 // Can be called on either the decode queue or the worker thread
1006 // See FrameBuffer2 for more details.
1007 worker_thread_->PostTask(ToQueuedTask(task_safety_, [frames_dropped, this]() {
1008 RTC_DCHECK_RUN_ON(&main_thread_);
1009 stats_.frames_dropped += frames_dropped;
1010 }));
1011 }
1012
OnPreDecode(VideoCodecType codec_type,int qp)1013 void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) {
1014 RTC_DCHECK_RUN_ON(&decode_queue_);
1015 worker_thread_->PostTask(ToQueuedTask(task_safety_, [codec_type, qp, this]() {
1016 RTC_DCHECK_RUN_ON(&main_thread_);
1017 last_codec_type_ = codec_type;
1018 if (last_codec_type_ == kVideoCodecVP8 && qp != -1) {
1019 qp_counters_.vp8.Add(qp);
1020 qp_sample_.Add(qp);
1021 }
1022 }));
1023 }
1024
OnStreamInactive()1025 void ReceiveStatisticsProxy::OnStreamInactive() {
1026 RTC_DCHECK_RUN_ON(&main_thread_);
1027
1028 // TODO(sprang): Figure out any other state that should be reset.
1029
1030 // Don't report inter-frame delay if stream was paused.
1031 last_decoded_frame_time_ms_.reset();
1032
1033 video_quality_observer_->OnStreamInactive();
1034 }
1035
OnRttUpdate(int64_t avg_rtt_ms)1036 void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms) {
1037 RTC_DCHECK_RUN_ON(&main_thread_);
1038 avg_rtt_ms_ = avg_rtt_ms;
1039 }
1040
DecoderThreadStarting()1041 void ReceiveStatisticsProxy::DecoderThreadStarting() {
1042 RTC_DCHECK_RUN_ON(&main_thread_);
1043 }
1044
DecoderThreadStopped()1045 void ReceiveStatisticsProxy::DecoderThreadStopped() {
1046 RTC_DCHECK_RUN_ON(&main_thread_);
1047 decode_queue_.Detach();
1048 }
1049
ContentSpecificStats()1050 ReceiveStatisticsProxy::ContentSpecificStats::ContentSpecificStats()
1051 : interframe_delay_percentiles(kMaxCommonInterframeDelayMs) {}
1052
1053 ReceiveStatisticsProxy::ContentSpecificStats::~ContentSpecificStats() = default;
1054
Add(const ContentSpecificStats & other)1055 void ReceiveStatisticsProxy::ContentSpecificStats::Add(
1056 const ContentSpecificStats& other) {
1057 e2e_delay_counter.Add(other.e2e_delay_counter);
1058 interframe_delay_counter.Add(other.interframe_delay_counter);
1059 flow_duration_ms += other.flow_duration_ms;
1060 total_media_bytes += other.total_media_bytes;
1061 received_height.Add(other.received_height);
1062 received_width.Add(other.received_width);
1063 qp_counter.Add(other.qp_counter);
1064 frame_counts.key_frames += other.frame_counts.key_frames;
1065 frame_counts.delta_frames += other.frame_counts.delta_frames;
1066 interframe_delay_percentiles.Add(other.interframe_delay_percentiles);
1067 }
1068
1069 } // namespace internal
1070 } // namespace webrtc
1071