1 /*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "video/send_statistics_proxy.h"
12
13 #include <algorithm>
14 #include <array>
15 #include <cmath>
16 #include <limits>
17 #include <utility>
18
19 #include "absl/strings/match.h"
20 #include "api/video/video_codec_constants.h"
21 #include "api/video/video_codec_type.h"
22 #include "api/video_codecs/video_codec.h"
23 #include "modules/video_coding/include/video_codec_interface.h"
24 #include "rtc_base/checks.h"
25 #include "rtc_base/logging.h"
26 #include "rtc_base/numerics/mod_ops.h"
27 #include "rtc_base/strings/string_builder.h"
28 #include "system_wrappers/include/field_trial.h"
29 #include "system_wrappers/include/metrics.h"
30
31 namespace webrtc {
32 namespace {
33 const float kEncodeTimeWeigthFactor = 0.5f;
34 const size_t kMaxEncodedFrameMapSize = 150;
35 const int64_t kMaxEncodedFrameWindowMs = 800;
36 const uint32_t kMaxEncodedFrameTimestampDiff = 900000; // 10 sec.
37 const int64_t kBucketSizeMs = 100;
38 const size_t kBucketCount = 10;
39
40 const char kVp8ForcedFallbackEncoderFieldTrial[] =
41 "WebRTC-VP8-Forced-Fallback-Encoder-v2";
42 const char kVp8SwCodecName[] = "libvpx";
43
44 // Used by histograms. Values of entries should not be changed.
45 enum HistogramCodecType {
46 kVideoUnknown = 0,
47 kVideoVp8 = 1,
48 kVideoVp9 = 2,
49 kVideoH264 = 3,
50 kVideoMax = 64,
51 };
52
53 const char* kRealtimePrefix = "WebRTC.Video.";
54 const char* kScreenPrefix = "WebRTC.Video.Screenshare.";
55
GetUmaPrefix(VideoEncoderConfig::ContentType content_type)56 const char* GetUmaPrefix(VideoEncoderConfig::ContentType content_type) {
57 switch (content_type) {
58 case VideoEncoderConfig::ContentType::kRealtimeVideo:
59 return kRealtimePrefix;
60 case VideoEncoderConfig::ContentType::kScreen:
61 return kScreenPrefix;
62 }
63 RTC_NOTREACHED();
64 return nullptr;
65 }
66
PayloadNameToHistogramCodecType(const std::string & payload_name)67 HistogramCodecType PayloadNameToHistogramCodecType(
68 const std::string& payload_name) {
69 VideoCodecType codecType = PayloadStringToCodecType(payload_name);
70 switch (codecType) {
71 case kVideoCodecVP8:
72 return kVideoVp8;
73 case kVideoCodecVP9:
74 return kVideoVp9;
75 case kVideoCodecH264:
76 return kVideoH264;
77 default:
78 return kVideoUnknown;
79 }
80 }
81
UpdateCodecTypeHistogram(const std::string & payload_name)82 void UpdateCodecTypeHistogram(const std::string& payload_name) {
83 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.Encoder.CodecType",
84 PayloadNameToHistogramCodecType(payload_name),
85 kVideoMax);
86 }
87
IsForcedFallbackPossible(const CodecSpecificInfo * codec_info,int simulcast_index)88 bool IsForcedFallbackPossible(const CodecSpecificInfo* codec_info,
89 int simulcast_index) {
90 return codec_info->codecType == kVideoCodecVP8 && simulcast_index == 0 &&
91 (codec_info->codecSpecific.VP8.temporalIdx == 0 ||
92 codec_info->codecSpecific.VP8.temporalIdx == kNoTemporalIdx);
93 }
94
GetFallbackMaxPixels(const std::string & group)95 absl::optional<int> GetFallbackMaxPixels(const std::string& group) {
96 if (group.empty())
97 return absl::nullopt;
98
99 int min_pixels;
100 int max_pixels;
101 int min_bps;
102 if (sscanf(group.c_str(), "-%d,%d,%d", &min_pixels, &max_pixels, &min_bps) !=
103 3) {
104 return absl::optional<int>();
105 }
106
107 if (min_pixels <= 0 || max_pixels <= 0 || max_pixels < min_pixels)
108 return absl::optional<int>();
109
110 return absl::optional<int>(max_pixels);
111 }
112
GetFallbackMaxPixelsIfFieldTrialEnabled()113 absl::optional<int> GetFallbackMaxPixelsIfFieldTrialEnabled() {
114 std::string group =
115 webrtc::field_trial::FindFullName(kVp8ForcedFallbackEncoderFieldTrial);
116 return (absl::StartsWith(group, "Enabled"))
117 ? GetFallbackMaxPixels(group.substr(7))
118 : absl::optional<int>();
119 }
120
GetFallbackMaxPixelsIfFieldTrialDisabled()121 absl::optional<int> GetFallbackMaxPixelsIfFieldTrialDisabled() {
122 std::string group =
123 webrtc::field_trial::FindFullName(kVp8ForcedFallbackEncoderFieldTrial);
124 return (absl::StartsWith(group, "Disabled"))
125 ? GetFallbackMaxPixels(group.substr(8))
126 : absl::optional<int>();
127 }
128 } // namespace
129
130 const int SendStatisticsProxy::kStatsTimeoutMs = 5000;
131
SendStatisticsProxy(Clock * clock,const VideoSendStream::Config & config,VideoEncoderConfig::ContentType content_type)132 SendStatisticsProxy::SendStatisticsProxy(
133 Clock* clock,
134 const VideoSendStream::Config& config,
135 VideoEncoderConfig::ContentType content_type)
136 : clock_(clock),
137 payload_name_(config.rtp.payload_name),
138 rtp_config_(config.rtp),
139 fallback_max_pixels_(GetFallbackMaxPixelsIfFieldTrialEnabled()),
140 fallback_max_pixels_disabled_(GetFallbackMaxPixelsIfFieldTrialDisabled()),
141 content_type_(content_type),
142 start_ms_(clock->TimeInMilliseconds()),
143 encode_time_(kEncodeTimeWeigthFactor),
144 quality_limitation_reason_tracker_(clock_),
145 media_byte_rate_tracker_(kBucketSizeMs, kBucketCount),
146 encoded_frame_rate_tracker_(kBucketSizeMs, kBucketCount),
147 last_num_spatial_layers_(0),
148 last_num_simulcast_streams_(0),
149 last_spatial_layer_use_{},
150 bw_limited_layers_(false),
151 internal_encoder_scaler_(false),
152 uma_container_(
153 new UmaSamplesContainer(GetUmaPrefix(content_type_), stats_, clock)) {
154 }
155
~SendStatisticsProxy()156 SendStatisticsProxy::~SendStatisticsProxy() {
157 MutexLock lock(&mutex_);
158 uma_container_->UpdateHistograms(rtp_config_, stats_);
159
160 int64_t elapsed_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000;
161 RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.SendStreamLifetimeInSeconds",
162 elapsed_sec);
163
164 if (elapsed_sec >= metrics::kMinRunTimeInSeconds)
165 UpdateCodecTypeHistogram(payload_name_);
166 }
167
168 SendStatisticsProxy::FallbackEncoderInfo::FallbackEncoderInfo() = default;
169
UmaSamplesContainer(const char * prefix,const VideoSendStream::Stats & stats,Clock * const clock)170 SendStatisticsProxy::UmaSamplesContainer::UmaSamplesContainer(
171 const char* prefix,
172 const VideoSendStream::Stats& stats,
173 Clock* const clock)
174 : uma_prefix_(prefix),
175 clock_(clock),
176 input_frame_rate_tracker_(100, 10u),
177 input_fps_counter_(clock, nullptr, true),
178 sent_fps_counter_(clock, nullptr, true),
179 total_byte_counter_(clock, nullptr, true),
180 media_byte_counter_(clock, nullptr, true),
181 rtx_byte_counter_(clock, nullptr, true),
182 padding_byte_counter_(clock, nullptr, true),
183 retransmit_byte_counter_(clock, nullptr, true),
184 fec_byte_counter_(clock, nullptr, true),
185 first_rtcp_stats_time_ms_(-1),
186 first_rtp_stats_time_ms_(-1),
187 start_stats_(stats),
188 num_streams_(0),
189 num_pixels_highest_stream_(0) {
190 InitializeBitrateCounters(stats);
191 static_assert(
192 kMaxEncodedFrameTimestampDiff < std::numeric_limits<uint32_t>::max() / 2,
193 "has to be smaller than half range");
194 }
195
~UmaSamplesContainer()196 SendStatisticsProxy::UmaSamplesContainer::~UmaSamplesContainer() {}
197
InitializeBitrateCounters(const VideoSendStream::Stats & stats)198 void SendStatisticsProxy::UmaSamplesContainer::InitializeBitrateCounters(
199 const VideoSendStream::Stats& stats) {
200 for (const auto& it : stats.substreams) {
201 uint32_t ssrc = it.first;
202 total_byte_counter_.SetLast(it.second.rtp_stats.transmitted.TotalBytes(),
203 ssrc);
204 padding_byte_counter_.SetLast(it.second.rtp_stats.transmitted.padding_bytes,
205 ssrc);
206 retransmit_byte_counter_.SetLast(
207 it.second.rtp_stats.retransmitted.TotalBytes(), ssrc);
208 fec_byte_counter_.SetLast(it.second.rtp_stats.fec.TotalBytes(), ssrc);
209 switch (it.second.type) {
210 case VideoSendStream::StreamStats::StreamType::kMedia:
211 media_byte_counter_.SetLast(it.second.rtp_stats.MediaPayloadBytes(),
212 ssrc);
213 break;
214 case VideoSendStream::StreamStats::StreamType::kRtx:
215 rtx_byte_counter_.SetLast(it.second.rtp_stats.transmitted.TotalBytes(),
216 ssrc);
217 break;
218 case VideoSendStream::StreamStats::StreamType::kFlexfec:
219 break;
220 }
221 }
222 }
223
RemoveOld(int64_t now_ms)224 void SendStatisticsProxy::UmaSamplesContainer::RemoveOld(int64_t now_ms) {
225 while (!encoded_frames_.empty()) {
226 auto it = encoded_frames_.begin();
227 if (now_ms - it->second.send_ms < kMaxEncodedFrameWindowMs)
228 break;
229
230 // Use max per timestamp.
231 sent_width_counter_.Add(it->second.max_width);
232 sent_height_counter_.Add(it->second.max_height);
233
234 // Check number of encoded streams per timestamp.
235 if (num_streams_ > static_cast<size_t>(it->second.max_simulcast_idx)) {
236 if (num_streams_ > 1) {
237 int disabled_streams =
238 static_cast<int>(num_streams_ - 1 - it->second.max_simulcast_idx);
239 // Can be limited in resolution or framerate.
240 uint32_t pixels = it->second.max_width * it->second.max_height;
241 bool bw_limited_resolution =
242 disabled_streams > 0 && pixels < num_pixels_highest_stream_;
243 bw_limited_frame_counter_.Add(bw_limited_resolution);
244 if (bw_limited_resolution) {
245 bw_resolutions_disabled_counter_.Add(disabled_streams);
246 }
247 }
248 }
249 encoded_frames_.erase(it);
250 }
251 }
252
InsertEncodedFrame(const EncodedImage & encoded_frame,int simulcast_idx)253 bool SendStatisticsProxy::UmaSamplesContainer::InsertEncodedFrame(
254 const EncodedImage& encoded_frame,
255 int simulcast_idx) {
256 int64_t now_ms = clock_->TimeInMilliseconds();
257 RemoveOld(now_ms);
258 if (encoded_frames_.size() > kMaxEncodedFrameMapSize) {
259 encoded_frames_.clear();
260 }
261
262 // Check for jump in timestamp.
263 if (!encoded_frames_.empty()) {
264 uint32_t oldest_timestamp = encoded_frames_.begin()->first;
265 if (ForwardDiff(oldest_timestamp, encoded_frame.Timestamp()) >
266 kMaxEncodedFrameTimestampDiff) {
267 // Gap detected, clear frames to have a sequence where newest timestamp
268 // is not too far away from oldest in order to distinguish old and new.
269 encoded_frames_.clear();
270 }
271 }
272
273 auto it = encoded_frames_.find(encoded_frame.Timestamp());
274 if (it == encoded_frames_.end()) {
275 // First frame with this timestamp.
276 encoded_frames_.insert(
277 std::make_pair(encoded_frame.Timestamp(),
278 Frame(now_ms, encoded_frame._encodedWidth,
279 encoded_frame._encodedHeight, simulcast_idx)));
280 sent_fps_counter_.Add(1);
281 return true;
282 }
283
284 it->second.max_width =
285 std::max(it->second.max_width, encoded_frame._encodedWidth);
286 it->second.max_height =
287 std::max(it->second.max_height, encoded_frame._encodedHeight);
288 it->second.max_simulcast_idx =
289 std::max(it->second.max_simulcast_idx, simulcast_idx);
290 return false;
291 }
292
UpdateHistograms(const RtpConfig & rtp_config,const VideoSendStream::Stats & current_stats)293 void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms(
294 const RtpConfig& rtp_config,
295 const VideoSendStream::Stats& current_stats) {
296 RTC_DCHECK(uma_prefix_ == kRealtimePrefix || uma_prefix_ == kScreenPrefix);
297 const int kIndex = uma_prefix_ == kScreenPrefix ? 1 : 0;
298 const int kMinRequiredPeriodicSamples = 6;
299 char log_stream_buf[8 * 1024];
300 rtc::SimpleStringBuilder log_stream(log_stream_buf);
301 int in_width = input_width_counter_.Avg(kMinRequiredMetricsSamples);
302 int in_height = input_height_counter_.Avg(kMinRequiredMetricsSamples);
303 if (in_width != -1) {
304 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "InputWidthInPixels",
305 in_width);
306 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "InputHeightInPixels",
307 in_height);
308 log_stream << uma_prefix_ << "InputWidthInPixels " << in_width << "\n"
309 << uma_prefix_ << "InputHeightInPixels " << in_height << "\n";
310 }
311 AggregatedStats in_fps = input_fps_counter_.GetStats();
312 if (in_fps.num_samples >= kMinRequiredPeriodicSamples) {
313 RTC_HISTOGRAMS_COUNTS_100(kIndex, uma_prefix_ + "InputFramesPerSecond",
314 in_fps.average);
315 log_stream << uma_prefix_ << "InputFramesPerSecond " << in_fps.ToString()
316 << "\n";
317 }
318
319 int sent_width = sent_width_counter_.Avg(kMinRequiredMetricsSamples);
320 int sent_height = sent_height_counter_.Avg(kMinRequiredMetricsSamples);
321 if (sent_width != -1) {
322 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "SentWidthInPixels",
323 sent_width);
324 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "SentHeightInPixels",
325 sent_height);
326 log_stream << uma_prefix_ << "SentWidthInPixels " << sent_width << "\n"
327 << uma_prefix_ << "SentHeightInPixels " << sent_height << "\n";
328 }
329 AggregatedStats sent_fps = sent_fps_counter_.GetStats();
330 if (sent_fps.num_samples >= kMinRequiredPeriodicSamples) {
331 RTC_HISTOGRAMS_COUNTS_100(kIndex, uma_prefix_ + "SentFramesPerSecond",
332 sent_fps.average);
333 log_stream << uma_prefix_ << "SentFramesPerSecond " << sent_fps.ToString()
334 << "\n";
335 }
336
337 if (in_fps.num_samples > kMinRequiredPeriodicSamples &&
338 sent_fps.num_samples >= kMinRequiredPeriodicSamples) {
339 int in_fps_avg = in_fps.average;
340 if (in_fps_avg > 0) {
341 int sent_fps_avg = sent_fps.average;
342 int sent_to_in_fps_ratio_percent =
343 (100 * sent_fps_avg + in_fps_avg / 2) / in_fps_avg;
344 // If reported period is small, it may happen that sent_fps is larger than
345 // input_fps briefly on average. This should be treated as 100% sent to
346 // input ratio.
347 if (sent_to_in_fps_ratio_percent > 100)
348 sent_to_in_fps_ratio_percent = 100;
349 RTC_HISTOGRAMS_PERCENTAGE(kIndex,
350 uma_prefix_ + "SentToInputFpsRatioPercent",
351 sent_to_in_fps_ratio_percent);
352 log_stream << uma_prefix_ << "SentToInputFpsRatioPercent "
353 << sent_to_in_fps_ratio_percent << "\n";
354 }
355 }
356
357 int encode_ms = encode_time_counter_.Avg(kMinRequiredMetricsSamples);
358 if (encode_ms != -1) {
359 RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "EncodeTimeInMs",
360 encode_ms);
361 log_stream << uma_prefix_ << "EncodeTimeInMs " << encode_ms << "\n";
362 }
363 int key_frames_permille =
364 key_frame_counter_.Permille(kMinRequiredMetricsSamples);
365 if (key_frames_permille != -1) {
366 RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "KeyFramesSentInPermille",
367 key_frames_permille);
368 log_stream << uma_prefix_ << "KeyFramesSentInPermille "
369 << key_frames_permille << "\n";
370 }
371 int quality_limited =
372 quality_limited_frame_counter_.Percent(kMinRequiredMetricsSamples);
373 if (quality_limited != -1) {
374 RTC_HISTOGRAMS_PERCENTAGE(kIndex,
375 uma_prefix_ + "QualityLimitedResolutionInPercent",
376 quality_limited);
377 log_stream << uma_prefix_ << "QualityLimitedResolutionInPercent "
378 << quality_limited << "\n";
379 }
380 int downscales = quality_downscales_counter_.Avg(kMinRequiredMetricsSamples);
381 if (downscales != -1) {
382 RTC_HISTOGRAMS_ENUMERATION(
383 kIndex, uma_prefix_ + "QualityLimitedResolutionDownscales", downscales,
384 20);
385 }
386 int cpu_limited =
387 cpu_limited_frame_counter_.Percent(kMinRequiredMetricsSamples);
388 if (cpu_limited != -1) {
389 RTC_HISTOGRAMS_PERCENTAGE(
390 kIndex, uma_prefix_ + "CpuLimitedResolutionInPercent", cpu_limited);
391 }
392 int bw_limited =
393 bw_limited_frame_counter_.Percent(kMinRequiredMetricsSamples);
394 if (bw_limited != -1) {
395 RTC_HISTOGRAMS_PERCENTAGE(
396 kIndex, uma_prefix_ + "BandwidthLimitedResolutionInPercent",
397 bw_limited);
398 }
399 int num_disabled =
400 bw_resolutions_disabled_counter_.Avg(kMinRequiredMetricsSamples);
401 if (num_disabled != -1) {
402 RTC_HISTOGRAMS_ENUMERATION(
403 kIndex, uma_prefix_ + "BandwidthLimitedResolutionsDisabled",
404 num_disabled, 10);
405 }
406 int delay_ms = delay_counter_.Avg(kMinRequiredMetricsSamples);
407 if (delay_ms != -1)
408 RTC_HISTOGRAMS_COUNTS_100000(kIndex, uma_prefix_ + "SendSideDelayInMs",
409 delay_ms);
410
411 int max_delay_ms = max_delay_counter_.Avg(kMinRequiredMetricsSamples);
412 if (max_delay_ms != -1) {
413 RTC_HISTOGRAMS_COUNTS_100000(kIndex, uma_prefix_ + "SendSideDelayMaxInMs",
414 max_delay_ms);
415 }
416
417 for (const auto& it : qp_counters_) {
418 int qp_vp8 = it.second.vp8.Avg(kMinRequiredMetricsSamples);
419 if (qp_vp8 != -1) {
420 int spatial_idx = it.first;
421 if (spatial_idx == -1) {
422 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.Vp8",
423 qp_vp8);
424 } else if (spatial_idx == 0) {
425 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.Vp8.S0",
426 qp_vp8);
427 } else if (spatial_idx == 1) {
428 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.Vp8.S1",
429 qp_vp8);
430 } else if (spatial_idx == 2) {
431 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.Vp8.S2",
432 qp_vp8);
433 } else {
434 RTC_LOG(LS_WARNING)
435 << "QP stats not recorded for VP8 spatial idx " << spatial_idx;
436 }
437 }
438 int qp_vp9 = it.second.vp9.Avg(kMinRequiredMetricsSamples);
439 if (qp_vp9 != -1) {
440 int spatial_idx = it.first;
441 if (spatial_idx == -1) {
442 RTC_HISTOGRAMS_COUNTS_500(kIndex, uma_prefix_ + "Encoded.Qp.Vp9",
443 qp_vp9);
444 } else if (spatial_idx == 0) {
445 RTC_HISTOGRAMS_COUNTS_500(kIndex, uma_prefix_ + "Encoded.Qp.Vp9.S0",
446 qp_vp9);
447 } else if (spatial_idx == 1) {
448 RTC_HISTOGRAMS_COUNTS_500(kIndex, uma_prefix_ + "Encoded.Qp.Vp9.S1",
449 qp_vp9);
450 } else if (spatial_idx == 2) {
451 RTC_HISTOGRAMS_COUNTS_500(kIndex, uma_prefix_ + "Encoded.Qp.Vp9.S2",
452 qp_vp9);
453 } else {
454 RTC_LOG(LS_WARNING)
455 << "QP stats not recorded for VP9 spatial layer " << spatial_idx;
456 }
457 }
458 int qp_h264 = it.second.h264.Avg(kMinRequiredMetricsSamples);
459 if (qp_h264 != -1) {
460 int spatial_idx = it.first;
461 if (spatial_idx == -1) {
462 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.H264",
463 qp_h264);
464 } else if (spatial_idx == 0) {
465 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.H264.S0",
466 qp_h264);
467 } else if (spatial_idx == 1) {
468 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.H264.S1",
469 qp_h264);
470 } else if (spatial_idx == 2) {
471 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.H264.S2",
472 qp_h264);
473 } else {
474 RTC_LOG(LS_WARNING)
475 << "QP stats not recorded for H264 spatial idx " << spatial_idx;
476 }
477 }
478 }
479
480 if (first_rtp_stats_time_ms_ != -1) {
481 quality_adapt_timer_.Stop(clock_->TimeInMilliseconds());
482 int64_t elapsed_sec = quality_adapt_timer_.total_ms / 1000;
483 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) {
484 int quality_changes = current_stats.number_of_quality_adapt_changes -
485 start_stats_.number_of_quality_adapt_changes;
486 // Only base stats on changes during a call, discard initial changes.
487 int initial_changes =
488 initial_quality_changes_.down + initial_quality_changes_.up;
489 if (initial_changes <= quality_changes)
490 quality_changes -= initial_changes;
491 RTC_HISTOGRAMS_COUNTS_100(kIndex,
492 uma_prefix_ + "AdaptChangesPerMinute.Quality",
493 quality_changes * 60 / elapsed_sec);
494 }
495 cpu_adapt_timer_.Stop(clock_->TimeInMilliseconds());
496 elapsed_sec = cpu_adapt_timer_.total_ms / 1000;
497 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) {
498 int cpu_changes = current_stats.number_of_cpu_adapt_changes -
499 start_stats_.number_of_cpu_adapt_changes;
500 RTC_HISTOGRAMS_COUNTS_100(kIndex,
501 uma_prefix_ + "AdaptChangesPerMinute.Cpu",
502 cpu_changes * 60 / elapsed_sec);
503 }
504 }
505
506 if (first_rtcp_stats_time_ms_ != -1) {
507 int64_t elapsed_sec =
508 (clock_->TimeInMilliseconds() - first_rtcp_stats_time_ms_) / 1000;
509 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) {
510 int fraction_lost = report_block_stats_.FractionLostInPercent();
511 if (fraction_lost != -1) {
512 RTC_HISTOGRAMS_PERCENTAGE(
513 kIndex, uma_prefix_ + "SentPacketsLostInPercent", fraction_lost);
514 log_stream << uma_prefix_ << "SentPacketsLostInPercent "
515 << fraction_lost << "\n";
516 }
517
518 // The RTCP packet type counters, delivered via the
519 // RtcpPacketTypeCounterObserver interface, are aggregates over the entire
520 // life of the send stream and are not reset when switching content type.
521 // For the purpose of these statistics though, we want new counts when
522 // switching since we switch histogram name. On every reset of the
523 // UmaSamplesContainer, we save the initial state of the counters, so that
524 // we can calculate the delta here and aggregate over all ssrcs.
525 RtcpPacketTypeCounter counters;
526 for (uint32_t ssrc : rtp_config.ssrcs) {
527 auto kv = current_stats.substreams.find(ssrc);
528 if (kv == current_stats.substreams.end())
529 continue;
530
531 RtcpPacketTypeCounter stream_counters =
532 kv->second.rtcp_packet_type_counts;
533 kv = start_stats_.substreams.find(ssrc);
534 if (kv != start_stats_.substreams.end())
535 stream_counters.Subtract(kv->second.rtcp_packet_type_counts);
536
537 counters.Add(stream_counters);
538 }
539 RTC_HISTOGRAMS_COUNTS_10000(kIndex,
540 uma_prefix_ + "NackPacketsReceivedPerMinute",
541 counters.nack_packets * 60 / elapsed_sec);
542 RTC_HISTOGRAMS_COUNTS_10000(kIndex,
543 uma_prefix_ + "FirPacketsReceivedPerMinute",
544 counters.fir_packets * 60 / elapsed_sec);
545 RTC_HISTOGRAMS_COUNTS_10000(kIndex,
546 uma_prefix_ + "PliPacketsReceivedPerMinute",
547 counters.pli_packets * 60 / elapsed_sec);
548 if (counters.nack_requests > 0) {
549 RTC_HISTOGRAMS_PERCENTAGE(
550 kIndex, uma_prefix_ + "UniqueNackRequestsReceivedInPercent",
551 counters.UniqueNackRequestsInPercent());
552 }
553 }
554 }
555
556 if (first_rtp_stats_time_ms_ != -1) {
557 int64_t elapsed_sec =
558 (clock_->TimeInMilliseconds() - first_rtp_stats_time_ms_) / 1000;
559 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) {
560 RTC_HISTOGRAMS_COUNTS_100(kIndex, uma_prefix_ + "NumberOfPauseEvents",
561 target_rate_updates_.pause_resume_events);
562 log_stream << uma_prefix_ << "NumberOfPauseEvents "
563 << target_rate_updates_.pause_resume_events << "\n";
564
565 int paused_time_percent =
566 paused_time_counter_.Percent(metrics::kMinRunTimeInSeconds * 1000);
567 if (paused_time_percent != -1) {
568 RTC_HISTOGRAMS_PERCENTAGE(kIndex, uma_prefix_ + "PausedTimeInPercent",
569 paused_time_percent);
570 log_stream << uma_prefix_ << "PausedTimeInPercent "
571 << paused_time_percent << "\n";
572 }
573 }
574 }
575
576 if (fallback_info_.is_possible) {
577 // Double interval since there is some time before fallback may occur.
578 const int kMinRunTimeMs = 2 * metrics::kMinRunTimeInSeconds * 1000;
579 int64_t elapsed_ms = fallback_info_.elapsed_ms;
580 int fallback_time_percent = fallback_active_counter_.Percent(kMinRunTimeMs);
581 if (fallback_time_percent != -1 && elapsed_ms >= kMinRunTimeMs) {
582 RTC_HISTOGRAMS_PERCENTAGE(
583 kIndex, uma_prefix_ + "Encoder.ForcedSwFallbackTimeInPercent.Vp8",
584 fallback_time_percent);
585 RTC_HISTOGRAMS_COUNTS_100(
586 kIndex, uma_prefix_ + "Encoder.ForcedSwFallbackChangesPerMinute.Vp8",
587 fallback_info_.on_off_events * 60 / (elapsed_ms / 1000));
588 }
589 }
590
591 AggregatedStats total_bytes_per_sec = total_byte_counter_.GetStats();
592 if (total_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
593 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "BitrateSentInKbps",
594 total_bytes_per_sec.average * 8 / 1000);
595 log_stream << uma_prefix_ << "BitrateSentInBps "
596 << total_bytes_per_sec.ToStringWithMultiplier(8) << "\n";
597 }
598 AggregatedStats media_bytes_per_sec = media_byte_counter_.GetStats();
599 if (media_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
600 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "MediaBitrateSentInKbps",
601 media_bytes_per_sec.average * 8 / 1000);
602 log_stream << uma_prefix_ << "MediaBitrateSentInBps "
603 << media_bytes_per_sec.ToStringWithMultiplier(8) << "\n";
604 }
605 AggregatedStats padding_bytes_per_sec = padding_byte_counter_.GetStats();
606 if (padding_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
607 RTC_HISTOGRAMS_COUNTS_10000(kIndex,
608 uma_prefix_ + "PaddingBitrateSentInKbps",
609 padding_bytes_per_sec.average * 8 / 1000);
610 log_stream << uma_prefix_ << "PaddingBitrateSentInBps "
611 << padding_bytes_per_sec.ToStringWithMultiplier(8) << "\n";
612 }
613 AggregatedStats retransmit_bytes_per_sec =
614 retransmit_byte_counter_.GetStats();
615 if (retransmit_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
616 RTC_HISTOGRAMS_COUNTS_10000(kIndex,
617 uma_prefix_ + "RetransmittedBitrateSentInKbps",
618 retransmit_bytes_per_sec.average * 8 / 1000);
619 log_stream << uma_prefix_ << "RetransmittedBitrateSentInBps "
620 << retransmit_bytes_per_sec.ToStringWithMultiplier(8) << "\n";
621 }
622 if (!rtp_config.rtx.ssrcs.empty()) {
623 AggregatedStats rtx_bytes_per_sec = rtx_byte_counter_.GetStats();
624 int rtx_bytes_per_sec_avg = -1;
625 if (rtx_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
626 rtx_bytes_per_sec_avg = rtx_bytes_per_sec.average;
627 log_stream << uma_prefix_ << "RtxBitrateSentInBps "
628 << rtx_bytes_per_sec.ToStringWithMultiplier(8) << "\n";
629 } else if (total_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
630 rtx_bytes_per_sec_avg = 0; // RTX enabled but no RTX data sent, record 0.
631 }
632 if (rtx_bytes_per_sec_avg != -1) {
633 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "RtxBitrateSentInKbps",
634 rtx_bytes_per_sec_avg * 8 / 1000);
635 }
636 }
637 if (rtp_config.flexfec.payload_type != -1 ||
638 rtp_config.ulpfec.red_payload_type != -1) {
639 AggregatedStats fec_bytes_per_sec = fec_byte_counter_.GetStats();
640 if (fec_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
641 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "FecBitrateSentInKbps",
642 fec_bytes_per_sec.average * 8 / 1000);
643 log_stream << uma_prefix_ << "FecBitrateSentInBps "
644 << fec_bytes_per_sec.ToStringWithMultiplier(8) << "\n";
645 }
646 }
647 log_stream << "Frames encoded " << current_stats.frames_encoded << "\n"
648 << uma_prefix_ << "DroppedFrames.Capturer "
649 << current_stats.frames_dropped_by_capturer << "\n";
650 RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.Capturer",
651 current_stats.frames_dropped_by_capturer);
652 log_stream << uma_prefix_ << "DroppedFrames.EncoderQueue "
653 << current_stats.frames_dropped_by_encoder_queue << "\n";
654 RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.EncoderQueue",
655 current_stats.frames_dropped_by_encoder_queue);
656 log_stream << uma_prefix_ << "DroppedFrames.Encoder "
657 << current_stats.frames_dropped_by_encoder << "\n";
658 RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.Encoder",
659 current_stats.frames_dropped_by_encoder);
660 log_stream << uma_prefix_ << "DroppedFrames.Ratelimiter "
661 << current_stats.frames_dropped_by_rate_limiter << "\n";
662 RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.Ratelimiter",
663 current_stats.frames_dropped_by_rate_limiter);
664 log_stream << uma_prefix_ << "DroppedFrames.CongestionWindow "
665 << current_stats.frames_dropped_by_congestion_window;
666
667 RTC_LOG(LS_INFO) << log_stream.str();
668 }
669
OnEncoderReconfigured(const VideoEncoderConfig & config,const std::vector<VideoStream> & streams)670 void SendStatisticsProxy::OnEncoderReconfigured(
671 const VideoEncoderConfig& config,
672 const std::vector<VideoStream>& streams) {
673 MutexLock lock(&mutex_);
674
675 if (content_type_ != config.content_type) {
676 uma_container_->UpdateHistograms(rtp_config_, stats_);
677 uma_container_.reset(new UmaSamplesContainer(
678 GetUmaPrefix(config.content_type), stats_, clock_));
679 content_type_ = config.content_type;
680 }
681 uma_container_->encoded_frames_.clear();
682 uma_container_->num_streams_ = streams.size();
683 uma_container_->num_pixels_highest_stream_ =
684 streams.empty() ? 0 : (streams.back().width * streams.back().height);
685 }
686
OnEncodedFrameTimeMeasured(int encode_time_ms,int encode_usage_percent)687 void SendStatisticsProxy::OnEncodedFrameTimeMeasured(int encode_time_ms,
688 int encode_usage_percent) {
689 RTC_DCHECK_GE(encode_time_ms, 0);
690 MutexLock lock(&mutex_);
691 uma_container_->encode_time_counter_.Add(encode_time_ms);
692 encode_time_.Apply(1.0f, encode_time_ms);
693 stats_.avg_encode_time_ms = std::round(encode_time_.filtered());
694 stats_.total_encode_time_ms += encode_time_ms;
695 stats_.encode_usage_percent = encode_usage_percent;
696 }
697
OnSuspendChange(bool is_suspended)698 void SendStatisticsProxy::OnSuspendChange(bool is_suspended) {
699 int64_t now_ms = clock_->TimeInMilliseconds();
700 MutexLock lock(&mutex_);
701 stats_.suspended = is_suspended;
702 if (is_suspended) {
703 // Pause framerate (add min pause time since there may be frames/packets
704 // that are not yet sent).
705 const int64_t kMinMs = 500;
706 uma_container_->input_fps_counter_.ProcessAndPauseForDuration(kMinMs);
707 uma_container_->sent_fps_counter_.ProcessAndPauseForDuration(kMinMs);
708 // Pause bitrate stats.
709 uma_container_->total_byte_counter_.ProcessAndPauseForDuration(kMinMs);
710 uma_container_->media_byte_counter_.ProcessAndPauseForDuration(kMinMs);
711 uma_container_->rtx_byte_counter_.ProcessAndPauseForDuration(kMinMs);
712 uma_container_->padding_byte_counter_.ProcessAndPauseForDuration(kMinMs);
713 uma_container_->retransmit_byte_counter_.ProcessAndPauseForDuration(kMinMs);
714 uma_container_->fec_byte_counter_.ProcessAndPauseForDuration(kMinMs);
715 // Stop adaptation stats.
716 uma_container_->cpu_adapt_timer_.Stop(now_ms);
717 uma_container_->quality_adapt_timer_.Stop(now_ms);
718 } else {
719 // Start adaptation stats if scaling is enabled.
720 if (adaptation_limitations_.MaskedCpuCounts()
721 .resolution_adaptations.has_value())
722 uma_container_->cpu_adapt_timer_.Start(now_ms);
723 if (adaptation_limitations_.MaskedQualityCounts()
724 .resolution_adaptations.has_value())
725 uma_container_->quality_adapt_timer_.Start(now_ms);
726 // Stop pause explicitly for stats that may be zero/not updated for some
727 // time.
728 uma_container_->rtx_byte_counter_.ProcessAndStopPause();
729 uma_container_->padding_byte_counter_.ProcessAndStopPause();
730 uma_container_->retransmit_byte_counter_.ProcessAndStopPause();
731 uma_container_->fec_byte_counter_.ProcessAndStopPause();
732 }
733 }
734
GetStats()735 VideoSendStream::Stats SendStatisticsProxy::GetStats() {
736 MutexLock lock(&mutex_);
737 PurgeOldStats();
738 stats_.input_frame_rate =
739 round(uma_container_->input_frame_rate_tracker_.ComputeRate());
740 stats_.content_type =
741 content_type_ == VideoEncoderConfig::ContentType::kRealtimeVideo
742 ? VideoContentType::UNSPECIFIED
743 : VideoContentType::SCREENSHARE;
744 stats_.encode_frame_rate = round(encoded_frame_rate_tracker_.ComputeRate());
745 stats_.media_bitrate_bps = media_byte_rate_tracker_.ComputeRate() * 8;
746 stats_.quality_limitation_durations_ms =
747 quality_limitation_reason_tracker_.DurationsMs();
748 return stats_;
749 }
750
PurgeOldStats()751 void SendStatisticsProxy::PurgeOldStats() {
752 int64_t old_stats_ms = clock_->TimeInMilliseconds() - kStatsTimeoutMs;
753 for (std::map<uint32_t, VideoSendStream::StreamStats>::iterator it =
754 stats_.substreams.begin();
755 it != stats_.substreams.end(); ++it) {
756 uint32_t ssrc = it->first;
757 if (update_times_[ssrc].resolution_update_ms <= old_stats_ms) {
758 it->second.width = 0;
759 it->second.height = 0;
760 }
761 }
762 }
763
GetStatsEntry(uint32_t ssrc)764 VideoSendStream::StreamStats* SendStatisticsProxy::GetStatsEntry(
765 uint32_t ssrc) {
766 std::map<uint32_t, VideoSendStream::StreamStats>::iterator it =
767 stats_.substreams.find(ssrc);
768 if (it != stats_.substreams.end())
769 return &it->second;
770
771 bool is_media = rtp_config_.IsMediaSsrc(ssrc);
772 bool is_flexfec = rtp_config_.flexfec.payload_type != -1 &&
773 ssrc == rtp_config_.flexfec.ssrc;
774 bool is_rtx = rtp_config_.IsRtxSsrc(ssrc);
775 if (!is_media && !is_flexfec && !is_rtx)
776 return nullptr;
777
778 // Insert new entry and return ptr.
779 VideoSendStream::StreamStats* entry = &stats_.substreams[ssrc];
780 if (is_media) {
781 entry->type = VideoSendStream::StreamStats::StreamType::kMedia;
782 } else if (is_rtx) {
783 entry->type = VideoSendStream::StreamStats::StreamType::kRtx;
784 } else if (is_flexfec) {
785 entry->type = VideoSendStream::StreamStats::StreamType::kFlexfec;
786 } else {
787 RTC_NOTREACHED();
788 }
789 switch (entry->type) {
790 case VideoSendStream::StreamStats::StreamType::kMedia:
791 break;
792 case VideoSendStream::StreamStats::StreamType::kRtx:
793 entry->referenced_media_ssrc =
794 rtp_config_.GetMediaSsrcAssociatedWithRtxSsrc(ssrc);
795 break;
796 case VideoSendStream::StreamStats::StreamType::kFlexfec:
797 entry->referenced_media_ssrc =
798 rtp_config_.GetMediaSsrcAssociatedWithFlexfecSsrc(ssrc);
799 break;
800 }
801
802 return entry;
803 }
804
OnInactiveSsrc(uint32_t ssrc)805 void SendStatisticsProxy::OnInactiveSsrc(uint32_t ssrc) {
806 MutexLock lock(&mutex_);
807 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
808 if (!stats)
809 return;
810
811 stats->total_bitrate_bps = 0;
812 stats->retransmit_bitrate_bps = 0;
813 stats->height = 0;
814 stats->width = 0;
815 }
816
OnSetEncoderTargetRate(uint32_t bitrate_bps)817 void SendStatisticsProxy::OnSetEncoderTargetRate(uint32_t bitrate_bps) {
818 MutexLock lock(&mutex_);
819 if (uma_container_->target_rate_updates_.last_ms == -1 && bitrate_bps == 0)
820 return; // Start on first non-zero bitrate, may initially be zero.
821
822 int64_t now = clock_->TimeInMilliseconds();
823 if (uma_container_->target_rate_updates_.last_ms != -1) {
824 bool was_paused = stats_.target_media_bitrate_bps == 0;
825 int64_t diff_ms = now - uma_container_->target_rate_updates_.last_ms;
826 uma_container_->paused_time_counter_.Add(was_paused, diff_ms);
827
828 // Use last to not include update when stream is stopped and video disabled.
829 if (uma_container_->target_rate_updates_.last_paused_or_resumed)
830 ++uma_container_->target_rate_updates_.pause_resume_events;
831
832 // Check if video is paused/resumed.
833 uma_container_->target_rate_updates_.last_paused_or_resumed =
834 (bitrate_bps == 0) != was_paused;
835 }
836 uma_container_->target_rate_updates_.last_ms = now;
837
838 stats_.target_media_bitrate_bps = bitrate_bps;
839 }
840
UpdateEncoderFallbackStats(const CodecSpecificInfo * codec_info,int pixels,int simulcast_index)841 void SendStatisticsProxy::UpdateEncoderFallbackStats(
842 const CodecSpecificInfo* codec_info,
843 int pixels,
844 int simulcast_index) {
845 UpdateFallbackDisabledStats(codec_info, pixels, simulcast_index);
846
847 if (!fallback_max_pixels_ || !uma_container_->fallback_info_.is_possible) {
848 return;
849 }
850
851 if (!IsForcedFallbackPossible(codec_info, simulcast_index)) {
852 uma_container_->fallback_info_.is_possible = false;
853 return;
854 }
855
856 FallbackEncoderInfo* fallback_info = &uma_container_->fallback_info_;
857
858 const int64_t now_ms = clock_->TimeInMilliseconds();
859 bool is_active = fallback_info->is_active;
860 if (encoder_changed_) {
861 // Implementation changed.
862 const bool last_was_vp8_software =
863 encoder_changed_->previous_encoder_implementation == kVp8SwCodecName;
864 is_active = encoder_changed_->new_encoder_implementation == kVp8SwCodecName;
865 encoder_changed_.reset();
866 if (!is_active && !last_was_vp8_software) {
867 // First or not a VP8 SW change, update stats on next call.
868 return;
869 }
870 if (is_active && (pixels > *fallback_max_pixels_)) {
871 // Pixels should not be above |fallback_max_pixels_|. If above skip to
872 // avoid fallbacks due to failure.
873 fallback_info->is_possible = false;
874 return;
875 }
876 stats_.has_entered_low_resolution = true;
877 ++fallback_info->on_off_events;
878 }
879
880 if (fallback_info->last_update_ms) {
881 int64_t diff_ms = now_ms - *(fallback_info->last_update_ms);
882 // If the time diff since last update is greater than |max_frame_diff_ms|,
883 // video is considered paused/muted and the change is not included.
884 if (diff_ms < fallback_info->max_frame_diff_ms) {
885 uma_container_->fallback_active_counter_.Add(fallback_info->is_active,
886 diff_ms);
887 fallback_info->elapsed_ms += diff_ms;
888 }
889 }
890 fallback_info->is_active = is_active;
891 fallback_info->last_update_ms.emplace(now_ms);
892 }
893
UpdateFallbackDisabledStats(const CodecSpecificInfo * codec_info,int pixels,int simulcast_index)894 void SendStatisticsProxy::UpdateFallbackDisabledStats(
895 const CodecSpecificInfo* codec_info,
896 int pixels,
897 int simulcast_index) {
898 if (!fallback_max_pixels_disabled_ ||
899 !uma_container_->fallback_info_disabled_.is_possible ||
900 stats_.has_entered_low_resolution) {
901 return;
902 }
903
904 if (!IsForcedFallbackPossible(codec_info, simulcast_index) ||
905 stats_.encoder_implementation_name == kVp8SwCodecName) {
906 uma_container_->fallback_info_disabled_.is_possible = false;
907 return;
908 }
909
910 if (pixels <= *fallback_max_pixels_disabled_ ||
911 uma_container_->fallback_info_disabled_.min_pixel_limit_reached) {
912 stats_.has_entered_low_resolution = true;
913 }
914 }
915
OnMinPixelLimitReached()916 void SendStatisticsProxy::OnMinPixelLimitReached() {
917 MutexLock lock(&mutex_);
918 uma_container_->fallback_info_disabled_.min_pixel_limit_reached = true;
919 }
920
OnSendEncodedImage(const EncodedImage & encoded_image,const CodecSpecificInfo * codec_info)921 void SendStatisticsProxy::OnSendEncodedImage(
922 const EncodedImage& encoded_image,
923 const CodecSpecificInfo* codec_info) {
924 // Simulcast is used for VP8, H264 and Generic.
925 int simulcast_idx =
926 (codec_info && (codec_info->codecType == kVideoCodecVP8 ||
927 codec_info->codecType == kVideoCodecH264 ||
928 codec_info->codecType == kVideoCodecGeneric))
929 ? encoded_image.SpatialIndex().value_or(0)
930 : 0;
931
932 MutexLock lock(&mutex_);
933 ++stats_.frames_encoded;
934 // The current encode frame rate is based on previously encoded frames.
935 double encode_frame_rate = encoded_frame_rate_tracker_.ComputeRate();
936 // We assume that less than 1 FPS is not a trustworthy estimate - perhaps we
937 // just started encoding for the first time or after a pause. Assuming frame
938 // rate is at least 1 FPS is conservative to avoid too large increments.
939 if (encode_frame_rate < 1.0)
940 encode_frame_rate = 1.0;
941 double target_frame_size_bytes =
942 stats_.target_media_bitrate_bps / (8.0 * encode_frame_rate);
943 // |stats_.target_media_bitrate_bps| is set in
944 // SendStatisticsProxy::OnSetEncoderTargetRate.
945 stats_.total_encoded_bytes_target += round(target_frame_size_bytes);
946 if (codec_info) {
947 UpdateEncoderFallbackStats(
948 codec_info, encoded_image._encodedWidth * encoded_image._encodedHeight,
949 simulcast_idx);
950 }
951
952 if (static_cast<size_t>(simulcast_idx) >= rtp_config_.ssrcs.size()) {
953 RTC_LOG(LS_ERROR) << "Encoded image outside simulcast range ("
954 << simulcast_idx << " >= " << rtp_config_.ssrcs.size()
955 << ").";
956 return;
957 }
958 uint32_t ssrc = rtp_config_.ssrcs[simulcast_idx];
959
960 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
961 if (!stats)
962 return;
963 if (encoded_frame_rate_trackers_.count(simulcast_idx) == 0) {
964 encoded_frame_rate_trackers_[simulcast_idx] =
965 std::make_unique<rtc::RateTracker>(kBucketSizeMs, kBucketCount);
966 }
967 stats->encode_frame_rate =
968 encoded_frame_rate_trackers_[simulcast_idx]->ComputeRate();
969 stats->frames_encoded++;
970 stats->total_encode_time_ms += encoded_image.timing_.encode_finish_ms -
971 encoded_image.timing_.encode_start_ms;
972 // Report resolution of top spatial layer in case of VP9 SVC.
973 bool is_svc_low_spatial_layer =
974 (codec_info && codec_info->codecType == kVideoCodecVP9)
975 ? !codec_info->codecSpecific.VP9.end_of_picture
976 : false;
977
978 if (!stats->width || !stats->height || !is_svc_low_spatial_layer) {
979 stats->width = encoded_image._encodedWidth;
980 stats->height = encoded_image._encodedHeight;
981 update_times_[ssrc].resolution_update_ms = clock_->TimeInMilliseconds();
982 }
983
984 uma_container_->key_frame_counter_.Add(encoded_image._frameType ==
985 VideoFrameType::kVideoFrameKey);
986
987 if (encoded_image.qp_ != -1) {
988 if (!stats->qp_sum)
989 stats->qp_sum = 0;
990 *stats->qp_sum += encoded_image.qp_;
991
992 if (codec_info) {
993 if (codec_info->codecType == kVideoCodecVP8) {
994 int spatial_idx = (rtp_config_.ssrcs.size() == 1) ? -1 : simulcast_idx;
995 uma_container_->qp_counters_[spatial_idx].vp8.Add(encoded_image.qp_);
996 } else if (codec_info->codecType == kVideoCodecVP9) {
997 int spatial_idx = encoded_image.SpatialIndex().value_or(-1);
998 uma_container_->qp_counters_[spatial_idx].vp9.Add(encoded_image.qp_);
999 } else if (codec_info->codecType == kVideoCodecH264) {
1000 int spatial_idx = (rtp_config_.ssrcs.size() == 1) ? -1 : simulcast_idx;
1001 uma_container_->qp_counters_[spatial_idx].h264.Add(encoded_image.qp_);
1002 }
1003 }
1004 }
1005
1006 // If any of the simulcast streams have a huge frame, it should be counted
1007 // as a single difficult input frame.
1008 // https://w3c.github.io/webrtc-stats/#dom-rtcvideosenderstats-hugeframessent
1009 if (encoded_image.timing_.flags & VideoSendTiming::kTriggeredBySize) {
1010 ++stats->huge_frames_sent;
1011 if (!last_outlier_timestamp_ ||
1012 *last_outlier_timestamp_ < encoded_image.capture_time_ms_) {
1013 last_outlier_timestamp_.emplace(encoded_image.capture_time_ms_);
1014 ++stats_.huge_frames_sent;
1015 }
1016 }
1017
1018 media_byte_rate_tracker_.AddSamples(encoded_image.size());
1019
1020 if (uma_container_->InsertEncodedFrame(encoded_image, simulcast_idx)) {
1021 encoded_frame_rate_trackers_[simulcast_idx]->AddSamples(1);
1022 encoded_frame_rate_tracker_.AddSamples(1);
1023 }
1024
1025 absl::optional<int> downscales =
1026 adaptation_limitations_.MaskedQualityCounts().resolution_adaptations;
1027 stats_.bw_limited_resolution |=
1028 (downscales.has_value() && downscales.value() > 0);
1029
1030 if (downscales.has_value()) {
1031 uma_container_->quality_limited_frame_counter_.Add(downscales.value() > 0);
1032 if (downscales.value() > 0)
1033 uma_container_->quality_downscales_counter_.Add(downscales.value());
1034 }
1035 }
1036
OnEncoderImplementationChanged(const std::string & implementation_name)1037 void SendStatisticsProxy::OnEncoderImplementationChanged(
1038 const std::string& implementation_name) {
1039 MutexLock lock(&mutex_);
1040 encoder_changed_ = EncoderChangeEvent{stats_.encoder_implementation_name,
1041 implementation_name};
1042 stats_.encoder_implementation_name = implementation_name;
1043 }
1044
GetInputFrameRate() const1045 int SendStatisticsProxy::GetInputFrameRate() const {
1046 MutexLock lock(&mutex_);
1047 return round(uma_container_->input_frame_rate_tracker_.ComputeRate());
1048 }
1049
GetSendFrameRate() const1050 int SendStatisticsProxy::GetSendFrameRate() const {
1051 MutexLock lock(&mutex_);
1052 return round(encoded_frame_rate_tracker_.ComputeRate());
1053 }
1054
OnIncomingFrame(int width,int height)1055 void SendStatisticsProxy::OnIncomingFrame(int width, int height) {
1056 MutexLock lock(&mutex_);
1057 uma_container_->input_frame_rate_tracker_.AddSamples(1);
1058 uma_container_->input_fps_counter_.Add(1);
1059 uma_container_->input_width_counter_.Add(width);
1060 uma_container_->input_height_counter_.Add(height);
1061 if (adaptation_limitations_.MaskedCpuCounts()
1062 .resolution_adaptations.has_value()) {
1063 uma_container_->cpu_limited_frame_counter_.Add(
1064 stats_.cpu_limited_resolution);
1065 }
1066 if (encoded_frame_rate_tracker_.TotalSampleCount() == 0) {
1067 // Set start time now instead of when first key frame is encoded to avoid a
1068 // too high initial estimate.
1069 encoded_frame_rate_tracker_.AddSamples(0);
1070 }
1071 }
1072
OnFrameDropped(DropReason reason)1073 void SendStatisticsProxy::OnFrameDropped(DropReason reason) {
1074 MutexLock lock(&mutex_);
1075 switch (reason) {
1076 case DropReason::kSource:
1077 ++stats_.frames_dropped_by_capturer;
1078 break;
1079 case DropReason::kEncoderQueue:
1080 ++stats_.frames_dropped_by_encoder_queue;
1081 break;
1082 case DropReason::kEncoder:
1083 ++stats_.frames_dropped_by_encoder;
1084 break;
1085 case DropReason::kMediaOptimization:
1086 ++stats_.frames_dropped_by_rate_limiter;
1087 break;
1088 case DropReason::kCongestionWindow:
1089 ++stats_.frames_dropped_by_congestion_window;
1090 break;
1091 }
1092 }
1093
ClearAdaptationStats()1094 void SendStatisticsProxy::ClearAdaptationStats() {
1095 MutexLock lock(&mutex_);
1096 adaptation_limitations_.set_cpu_counts(VideoAdaptationCounters());
1097 adaptation_limitations_.set_quality_counts(VideoAdaptationCounters());
1098 UpdateAdaptationStats();
1099 }
1100
UpdateAdaptationSettings(VideoStreamEncoderObserver::AdaptationSettings cpu_settings,VideoStreamEncoderObserver::AdaptationSettings quality_settings)1101 void SendStatisticsProxy::UpdateAdaptationSettings(
1102 VideoStreamEncoderObserver::AdaptationSettings cpu_settings,
1103 VideoStreamEncoderObserver::AdaptationSettings quality_settings) {
1104 MutexLock lock(&mutex_);
1105 adaptation_limitations_.UpdateMaskingSettings(cpu_settings, quality_settings);
1106 SetAdaptTimer(adaptation_limitations_.MaskedCpuCounts(),
1107 &uma_container_->cpu_adapt_timer_);
1108 SetAdaptTimer(adaptation_limitations_.MaskedQualityCounts(),
1109 &uma_container_->quality_adapt_timer_);
1110 UpdateAdaptationStats();
1111 }
1112
OnAdaptationChanged(VideoAdaptationReason reason,const VideoAdaptationCounters & cpu_counters,const VideoAdaptationCounters & quality_counters)1113 void SendStatisticsProxy::OnAdaptationChanged(
1114 VideoAdaptationReason reason,
1115 const VideoAdaptationCounters& cpu_counters,
1116 const VideoAdaptationCounters& quality_counters) {
1117 MutexLock lock(&mutex_);
1118
1119 MaskedAdaptationCounts receiver =
1120 adaptation_limitations_.MaskedQualityCounts();
1121 adaptation_limitations_.set_cpu_counts(cpu_counters);
1122 adaptation_limitations_.set_quality_counts(quality_counters);
1123 switch (reason) {
1124 case VideoAdaptationReason::kCpu:
1125 ++stats_.number_of_cpu_adapt_changes;
1126 break;
1127 case VideoAdaptationReason::kQuality:
1128 TryUpdateInitialQualityResolutionAdaptUp(
1129 receiver.resolution_adaptations,
1130 adaptation_limitations_.MaskedQualityCounts().resolution_adaptations);
1131 ++stats_.number_of_quality_adapt_changes;
1132 break;
1133 }
1134 UpdateAdaptationStats();
1135 }
1136
UpdateAdaptationStats()1137 void SendStatisticsProxy::UpdateAdaptationStats() {
1138 auto cpu_counts = adaptation_limitations_.MaskedCpuCounts();
1139 auto quality_counts = adaptation_limitations_.MaskedQualityCounts();
1140
1141 bool is_cpu_limited = cpu_counts.resolution_adaptations > 0 ||
1142 cpu_counts.num_framerate_reductions > 0;
1143 bool is_bandwidth_limited = quality_counts.resolution_adaptations > 0 ||
1144 quality_counts.num_framerate_reductions > 0 ||
1145 bw_limited_layers_ || internal_encoder_scaler_;
1146 if (is_bandwidth_limited) {
1147 // We may be both CPU limited and bandwidth limited at the same time but
1148 // there is no way to express this in standardized stats. Heuristically,
1149 // bandwidth is more likely to be a limiting factor than CPU, and more
1150 // likely to vary over time, so only when we aren't bandwidth limited do we
1151 // want to know about our CPU being the bottleneck.
1152 quality_limitation_reason_tracker_.SetReason(
1153 QualityLimitationReason::kBandwidth);
1154 } else if (is_cpu_limited) {
1155 quality_limitation_reason_tracker_.SetReason(QualityLimitationReason::kCpu);
1156 } else {
1157 quality_limitation_reason_tracker_.SetReason(
1158 QualityLimitationReason::kNone);
1159 }
1160
1161 stats_.cpu_limited_resolution = cpu_counts.resolution_adaptations > 0;
1162 stats_.cpu_limited_framerate = cpu_counts.num_framerate_reductions > 0;
1163 stats_.bw_limited_resolution = quality_counts.resolution_adaptations > 0;
1164 stats_.bw_limited_framerate = quality_counts.num_framerate_reductions > 0;
1165 // If bitrate allocator has disabled some layers frame-rate or resolution are
1166 // limited depending on the encoder configuration.
1167 if (bw_limited_layers_) {
1168 switch (content_type_) {
1169 case VideoEncoderConfig::ContentType::kRealtimeVideo: {
1170 stats_.bw_limited_resolution = true;
1171 break;
1172 }
1173 case VideoEncoderConfig::ContentType::kScreen: {
1174 stats_.bw_limited_framerate = true;
1175 break;
1176 }
1177 }
1178 }
1179 if (internal_encoder_scaler_) {
1180 stats_.bw_limited_resolution = true;
1181 }
1182
1183 stats_.quality_limitation_reason =
1184 quality_limitation_reason_tracker_.current_reason();
1185
1186 // |stats_.quality_limitation_durations_ms| depends on the current time
1187 // when it is polled; it is updated in SendStatisticsProxy::GetStats().
1188 }
1189
OnBitrateAllocationUpdated(const VideoCodec & codec,const VideoBitrateAllocation & allocation)1190 void SendStatisticsProxy::OnBitrateAllocationUpdated(
1191 const VideoCodec& codec,
1192 const VideoBitrateAllocation& allocation) {
1193 int num_spatial_layers = 0;
1194 for (int i = 0; i < kMaxSpatialLayers; i++) {
1195 if (codec.spatialLayers[i].active) {
1196 num_spatial_layers++;
1197 }
1198 }
1199 int num_simulcast_streams = 0;
1200 for (int i = 0; i < kMaxSimulcastStreams; i++) {
1201 if (codec.simulcastStream[i].active) {
1202 num_simulcast_streams++;
1203 }
1204 }
1205
1206 std::array<bool, kMaxSpatialLayers> spatial_layers;
1207 for (int i = 0; i < kMaxSpatialLayers; i++) {
1208 spatial_layers[i] = (allocation.GetSpatialLayerSum(i) > 0);
1209 }
1210
1211 MutexLock lock(&mutex_);
1212
1213 bw_limited_layers_ = allocation.is_bw_limited();
1214 UpdateAdaptationStats();
1215
1216 if (spatial_layers != last_spatial_layer_use_) {
1217 // If the number of spatial layers has changed, the resolution change is
1218 // not due to quality limitations, it is because the configuration
1219 // changed.
1220 if (last_num_spatial_layers_ == num_spatial_layers &&
1221 last_num_simulcast_streams_ == num_simulcast_streams) {
1222 ++stats_.quality_limitation_resolution_changes;
1223 }
1224 last_spatial_layer_use_ = spatial_layers;
1225 }
1226 last_num_spatial_layers_ = num_spatial_layers;
1227 last_num_simulcast_streams_ = num_simulcast_streams;
1228 }
1229
1230 // Informes observer if an internal encoder scaler has reduced video
1231 // resolution or not. |is_scaled| is a flag indicating if the video is scaled
1232 // down.
OnEncoderInternalScalerUpdate(bool is_scaled)1233 void SendStatisticsProxy::OnEncoderInternalScalerUpdate(bool is_scaled) {
1234 MutexLock lock(&mutex_);
1235 internal_encoder_scaler_ = is_scaled;
1236 UpdateAdaptationStats();
1237 }
1238
1239 // TODO(asapersson): Include fps changes.
OnInitialQualityResolutionAdaptDown()1240 void SendStatisticsProxy::OnInitialQualityResolutionAdaptDown() {
1241 MutexLock lock(&mutex_);
1242 ++uma_container_->initial_quality_changes_.down;
1243 }
1244
TryUpdateInitialQualityResolutionAdaptUp(absl::optional<int> old_quality_downscales,absl::optional<int> updated_quality_downscales)1245 void SendStatisticsProxy::TryUpdateInitialQualityResolutionAdaptUp(
1246 absl::optional<int> old_quality_downscales,
1247 absl::optional<int> updated_quality_downscales) {
1248 if (uma_container_->initial_quality_changes_.down == 0)
1249 return;
1250
1251 if (old_quality_downscales.has_value() &&
1252 old_quality_downscales.value() > 0 &&
1253 updated_quality_downscales.value_or(-1) <
1254 old_quality_downscales.value()) {
1255 // Adapting up in quality.
1256 if (uma_container_->initial_quality_changes_.down >
1257 uma_container_->initial_quality_changes_.up) {
1258 ++uma_container_->initial_quality_changes_.up;
1259 }
1260 }
1261 }
1262
SetAdaptTimer(const MaskedAdaptationCounts & counts,StatsTimer * timer)1263 void SendStatisticsProxy::SetAdaptTimer(const MaskedAdaptationCounts& counts,
1264 StatsTimer* timer) {
1265 if (counts.resolution_adaptations || counts.num_framerate_reductions) {
1266 // Adaptation enabled.
1267 if (!stats_.suspended)
1268 timer->Start(clock_->TimeInMilliseconds());
1269 return;
1270 }
1271 timer->Stop(clock_->TimeInMilliseconds());
1272 }
1273
RtcpPacketTypesCounterUpdated(uint32_t ssrc,const RtcpPacketTypeCounter & packet_counter)1274 void SendStatisticsProxy::RtcpPacketTypesCounterUpdated(
1275 uint32_t ssrc,
1276 const RtcpPacketTypeCounter& packet_counter) {
1277 MutexLock lock(&mutex_);
1278 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
1279 if (!stats)
1280 return;
1281
1282 stats->rtcp_packet_type_counts = packet_counter;
1283 if (uma_container_->first_rtcp_stats_time_ms_ == -1)
1284 uma_container_->first_rtcp_stats_time_ms_ = clock_->TimeInMilliseconds();
1285 }
1286
StatisticsUpdated(const RtcpStatistics & statistics,uint32_t ssrc)1287 void SendStatisticsProxy::StatisticsUpdated(const RtcpStatistics& statistics,
1288 uint32_t ssrc) {
1289 MutexLock lock(&mutex_);
1290 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
1291 if (!stats)
1292 return;
1293
1294 stats->rtcp_stats = statistics;
1295 uma_container_->report_block_stats_.Store(ssrc, statistics);
1296 }
1297
OnReportBlockDataUpdated(ReportBlockData report_block_data)1298 void SendStatisticsProxy::OnReportBlockDataUpdated(
1299 ReportBlockData report_block_data) {
1300 MutexLock lock(&mutex_);
1301 VideoSendStream::StreamStats* stats =
1302 GetStatsEntry(report_block_data.report_block().source_ssrc);
1303 if (!stats)
1304 return;
1305 stats->report_block_data = std::move(report_block_data);
1306 }
1307
DataCountersUpdated(const StreamDataCounters & counters,uint32_t ssrc)1308 void SendStatisticsProxy::DataCountersUpdated(
1309 const StreamDataCounters& counters,
1310 uint32_t ssrc) {
1311 MutexLock lock(&mutex_);
1312 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
1313 RTC_DCHECK(stats) << "DataCountersUpdated reported for unknown ssrc " << ssrc;
1314
1315 if (stats->type == VideoSendStream::StreamStats::StreamType::kFlexfec) {
1316 // The same counters are reported for both the media ssrc and flexfec ssrc.
1317 // Bitrate stats are summed for all SSRCs. Use fec stats from media update.
1318 return;
1319 }
1320
1321 stats->rtp_stats = counters;
1322 if (uma_container_->first_rtp_stats_time_ms_ == -1) {
1323 int64_t now_ms = clock_->TimeInMilliseconds();
1324 uma_container_->first_rtp_stats_time_ms_ = now_ms;
1325 uma_container_->cpu_adapt_timer_.Restart(now_ms);
1326 uma_container_->quality_adapt_timer_.Restart(now_ms);
1327 }
1328
1329 uma_container_->total_byte_counter_.Set(counters.transmitted.TotalBytes(),
1330 ssrc);
1331 uma_container_->padding_byte_counter_.Set(counters.transmitted.padding_bytes,
1332 ssrc);
1333 uma_container_->retransmit_byte_counter_.Set(
1334 counters.retransmitted.TotalBytes(), ssrc);
1335 uma_container_->fec_byte_counter_.Set(counters.fec.TotalBytes(), ssrc);
1336 switch (stats->type) {
1337 case VideoSendStream::StreamStats::StreamType::kMedia:
1338 uma_container_->media_byte_counter_.Set(counters.MediaPayloadBytes(),
1339 ssrc);
1340 break;
1341 case VideoSendStream::StreamStats::StreamType::kRtx:
1342 uma_container_->rtx_byte_counter_.Set(counters.transmitted.TotalBytes(),
1343 ssrc);
1344 break;
1345 case VideoSendStream::StreamStats::StreamType::kFlexfec:
1346 break;
1347 }
1348 }
1349
Notify(uint32_t total_bitrate_bps,uint32_t retransmit_bitrate_bps,uint32_t ssrc)1350 void SendStatisticsProxy::Notify(uint32_t total_bitrate_bps,
1351 uint32_t retransmit_bitrate_bps,
1352 uint32_t ssrc) {
1353 MutexLock lock(&mutex_);
1354 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
1355 if (!stats)
1356 return;
1357
1358 stats->total_bitrate_bps = total_bitrate_bps;
1359 stats->retransmit_bitrate_bps = retransmit_bitrate_bps;
1360 }
1361
FrameCountUpdated(const FrameCounts & frame_counts,uint32_t ssrc)1362 void SendStatisticsProxy::FrameCountUpdated(const FrameCounts& frame_counts,
1363 uint32_t ssrc) {
1364 MutexLock lock(&mutex_);
1365 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
1366 if (!stats)
1367 return;
1368
1369 stats->frame_counts = frame_counts;
1370 }
1371
SendSideDelayUpdated(int avg_delay_ms,int max_delay_ms,uint64_t total_delay_ms,uint32_t ssrc)1372 void SendStatisticsProxy::SendSideDelayUpdated(int avg_delay_ms,
1373 int max_delay_ms,
1374 uint64_t total_delay_ms,
1375 uint32_t ssrc) {
1376 MutexLock lock(&mutex_);
1377 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
1378 if (!stats)
1379 return;
1380 stats->avg_delay_ms = avg_delay_ms;
1381 stats->max_delay_ms = max_delay_ms;
1382 stats->total_packet_send_delay_ms = total_delay_ms;
1383
1384 uma_container_->delay_counter_.Add(avg_delay_ms);
1385 uma_container_->max_delay_counter_.Add(max_delay_ms);
1386 }
1387
Start(int64_t now_ms)1388 void SendStatisticsProxy::StatsTimer::Start(int64_t now_ms) {
1389 if (start_ms == -1)
1390 start_ms = now_ms;
1391 }
1392
Stop(int64_t now_ms)1393 void SendStatisticsProxy::StatsTimer::Stop(int64_t now_ms) {
1394 if (start_ms != -1) {
1395 total_ms += now_ms - start_ms;
1396 start_ms = -1;
1397 }
1398 }
1399
Restart(int64_t now_ms)1400 void SendStatisticsProxy::StatsTimer::Restart(int64_t now_ms) {
1401 total_ms = 0;
1402 if (start_ms != -1)
1403 start_ms = now_ms;
1404 }
1405
Add(int sample)1406 void SendStatisticsProxy::SampleCounter::Add(int sample) {
1407 sum += sample;
1408 ++num_samples;
1409 }
1410
Avg(int64_t min_required_samples) const1411 int SendStatisticsProxy::SampleCounter::Avg(
1412 int64_t min_required_samples) const {
1413 if (num_samples < min_required_samples || num_samples == 0)
1414 return -1;
1415 return static_cast<int>((sum + (num_samples / 2)) / num_samples);
1416 }
1417
Add(bool sample)1418 void SendStatisticsProxy::BoolSampleCounter::Add(bool sample) {
1419 if (sample)
1420 ++sum;
1421 ++num_samples;
1422 }
1423
Add(bool sample,int64_t count)1424 void SendStatisticsProxy::BoolSampleCounter::Add(bool sample, int64_t count) {
1425 if (sample)
1426 sum += count;
1427 num_samples += count;
1428 }
Percent(int64_t min_required_samples) const1429 int SendStatisticsProxy::BoolSampleCounter::Percent(
1430 int64_t min_required_samples) const {
1431 return Fraction(min_required_samples, 100.0f);
1432 }
1433
Permille(int64_t min_required_samples) const1434 int SendStatisticsProxy::BoolSampleCounter::Permille(
1435 int64_t min_required_samples) const {
1436 return Fraction(min_required_samples, 1000.0f);
1437 }
1438
Fraction(int64_t min_required_samples,float multiplier) const1439 int SendStatisticsProxy::BoolSampleCounter::Fraction(
1440 int64_t min_required_samples,
1441 float multiplier) const {
1442 if (num_samples < min_required_samples || num_samples == 0)
1443 return -1;
1444 return static_cast<int>((sum * multiplier / num_samples) + 0.5f);
1445 }
1446
1447 SendStatisticsProxy::MaskedAdaptationCounts
MaskedCpuCounts() const1448 SendStatisticsProxy::Adaptations::MaskedCpuCounts() const {
1449 return Mask(cpu_counts_, cpu_settings_);
1450 }
1451
1452 SendStatisticsProxy::MaskedAdaptationCounts
MaskedQualityCounts() const1453 SendStatisticsProxy::Adaptations::MaskedQualityCounts() const {
1454 return Mask(quality_counts_, quality_settings_);
1455 }
1456
set_cpu_counts(const VideoAdaptationCounters & cpu_counts)1457 void SendStatisticsProxy::Adaptations::set_cpu_counts(
1458 const VideoAdaptationCounters& cpu_counts) {
1459 cpu_counts_ = cpu_counts;
1460 }
1461
set_quality_counts(const VideoAdaptationCounters & quality_counts)1462 void SendStatisticsProxy::Adaptations::set_quality_counts(
1463 const VideoAdaptationCounters& quality_counts) {
1464 quality_counts_ = quality_counts;
1465 }
1466
cpu_counts() const1467 VideoAdaptationCounters SendStatisticsProxy::Adaptations::cpu_counts() const {
1468 return cpu_counts_;
1469 }
1470
quality_counts() const1471 VideoAdaptationCounters SendStatisticsProxy::Adaptations::quality_counts()
1472 const {
1473 return quality_counts_;
1474 }
1475
UpdateMaskingSettings(VideoStreamEncoderObserver::AdaptationSettings cpu_settings,VideoStreamEncoderObserver::AdaptationSettings quality_settings)1476 void SendStatisticsProxy::Adaptations::UpdateMaskingSettings(
1477 VideoStreamEncoderObserver::AdaptationSettings cpu_settings,
1478 VideoStreamEncoderObserver::AdaptationSettings quality_settings) {
1479 cpu_settings_ = std::move(cpu_settings);
1480 quality_settings_ = std::move(quality_settings);
1481 }
1482
1483 SendStatisticsProxy::MaskedAdaptationCounts
Mask(const VideoAdaptationCounters & counters,const VideoStreamEncoderObserver::AdaptationSettings & settings) const1484 SendStatisticsProxy::Adaptations::Mask(
1485 const VideoAdaptationCounters& counters,
1486 const VideoStreamEncoderObserver::AdaptationSettings& settings) const {
1487 MaskedAdaptationCounts masked_counts;
1488 if (settings.resolution_scaling_enabled) {
1489 masked_counts.resolution_adaptations = counters.resolution_adaptations;
1490 }
1491 if (settings.framerate_scaling_enabled) {
1492 masked_counts.num_framerate_reductions = counters.fps_adaptations;
1493 }
1494 return masked_counts;
1495 }
1496
1497 } // namespace webrtc
1498