1 /*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "video/send_statistics_proxy.h"
12
13 #include <algorithm>
14 #include <array>
15 #include <cmath>
16 #include <limits>
17 #include <utility>
18
19 #include "absl/strings/match.h"
20 #include "api/video/video_codec_constants.h"
21 #include "api/video/video_codec_type.h"
22 #include "api/video_codecs/video_codec.h"
23 #include "modules/video_coding/include/video_codec_interface.h"
24 #include "rtc_base/checks.h"
25 #include "rtc_base/logging.h"
26 #include "rtc_base/numerics/mod_ops.h"
27 #include "rtc_base/strings/string_builder.h"
28 #include "system_wrappers/include/metrics.h"
29
30 namespace webrtc {
31 namespace {
32 const float kEncodeTimeWeigthFactor = 0.5f;
33 const size_t kMaxEncodedFrameMapSize = 150;
34 const int64_t kMaxEncodedFrameWindowMs = 800;
35 const uint32_t kMaxEncodedFrameTimestampDiff = 900000; // 10 sec.
36 const int64_t kBucketSizeMs = 100;
37 const size_t kBucketCount = 10;
38
39 const char kVp8ForcedFallbackEncoderFieldTrial[] =
40 "WebRTC-VP8-Forced-Fallback-Encoder-v2";
41 const char kVp8SwCodecName[] = "libvpx";
42
43 // Used by histograms. Values of entries should not be changed.
44 enum HistogramCodecType {
45 kVideoUnknown = 0,
46 kVideoVp8 = 1,
47 kVideoVp9 = 2,
48 kVideoH264 = 3,
49 kVideoAv1 = 4,
50 kVideoMax = 64,
51 };
52
53 const char* kRealtimePrefix = "WebRTC.Video.";
54 const char* kScreenPrefix = "WebRTC.Video.Screenshare.";
55
GetUmaPrefix(VideoEncoderConfig::ContentType content_type)56 const char* GetUmaPrefix(VideoEncoderConfig::ContentType content_type) {
57 switch (content_type) {
58 case VideoEncoderConfig::ContentType::kRealtimeVideo:
59 return kRealtimePrefix;
60 case VideoEncoderConfig::ContentType::kScreen:
61 return kScreenPrefix;
62 }
63 RTC_DCHECK_NOTREACHED();
64 return nullptr;
65 }
66
PayloadNameToHistogramCodecType(const std::string & payload_name)67 HistogramCodecType PayloadNameToHistogramCodecType(
68 const std::string& payload_name) {
69 VideoCodecType codecType = PayloadStringToCodecType(payload_name);
70 switch (codecType) {
71 case kVideoCodecVP8:
72 return kVideoVp8;
73 case kVideoCodecVP9:
74 return kVideoVp9;
75 case kVideoCodecH264:
76 return kVideoH264;
77 case kVideoCodecAV1:
78 return kVideoAv1;
79 default:
80 return kVideoUnknown;
81 }
82 }
83
UpdateCodecTypeHistogram(const std::string & payload_name)84 void UpdateCodecTypeHistogram(const std::string& payload_name) {
85 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.Encoder.CodecType",
86 PayloadNameToHistogramCodecType(payload_name),
87 kVideoMax);
88 }
89
IsForcedFallbackPossible(const CodecSpecificInfo * codec_info,int simulcast_index)90 bool IsForcedFallbackPossible(const CodecSpecificInfo* codec_info,
91 int simulcast_index) {
92 return codec_info->codecType == kVideoCodecVP8 && simulcast_index == 0 &&
93 (codec_info->codecSpecific.VP8.temporalIdx == 0 ||
94 codec_info->codecSpecific.VP8.temporalIdx == kNoTemporalIdx);
95 }
96
GetFallbackMaxPixels(const std::string & group)97 absl::optional<int> GetFallbackMaxPixels(const std::string& group) {
98 if (group.empty())
99 return absl::nullopt;
100
101 int min_pixels;
102 int max_pixels;
103 int min_bps;
104 if (sscanf(group.c_str(), "-%d,%d,%d", &min_pixels, &max_pixels, &min_bps) !=
105 3) {
106 return absl::optional<int>();
107 }
108
109 if (min_pixels <= 0 || max_pixels <= 0 || max_pixels < min_pixels)
110 return absl::optional<int>();
111
112 return absl::optional<int>(max_pixels);
113 }
114
GetFallbackMaxPixelsIfFieldTrialEnabled(const webrtc::FieldTrialsView & field_trials)115 absl::optional<int> GetFallbackMaxPixelsIfFieldTrialEnabled(
116 const webrtc::FieldTrialsView& field_trials) {
117 std::string group = field_trials.Lookup(kVp8ForcedFallbackEncoderFieldTrial);
118 return (absl::StartsWith(group, "Enabled"))
119 ? GetFallbackMaxPixels(group.substr(7))
120 : absl::optional<int>();
121 }
122
GetFallbackMaxPixelsIfFieldTrialDisabled(const webrtc::FieldTrialsView & field_trials)123 absl::optional<int> GetFallbackMaxPixelsIfFieldTrialDisabled(
124 const webrtc::FieldTrialsView& field_trials) {
125 std::string group = field_trials.Lookup(kVp8ForcedFallbackEncoderFieldTrial);
126 return (absl::StartsWith(group, "Disabled"))
127 ? GetFallbackMaxPixels(group.substr(8))
128 : absl::optional<int>();
129 }
130 } // namespace
131
132 const int SendStatisticsProxy::kStatsTimeoutMs = 5000;
133
SendStatisticsProxy(Clock * clock,const VideoSendStream::Config & config,VideoEncoderConfig::ContentType content_type,const FieldTrialsView & field_trials)134 SendStatisticsProxy::SendStatisticsProxy(
135 Clock* clock,
136 const VideoSendStream::Config& config,
137 VideoEncoderConfig::ContentType content_type,
138 const FieldTrialsView& field_trials)
139 : clock_(clock),
140 payload_name_(config.rtp.payload_name),
141 rtp_config_(config.rtp),
142 fallback_max_pixels_(
143 GetFallbackMaxPixelsIfFieldTrialEnabled(field_trials)),
144 fallback_max_pixels_disabled_(
145 GetFallbackMaxPixelsIfFieldTrialDisabled(field_trials)),
146 content_type_(content_type),
147 start_ms_(clock->TimeInMilliseconds()),
148 encode_time_(kEncodeTimeWeigthFactor),
149 quality_limitation_reason_tracker_(clock_),
150 media_byte_rate_tracker_(kBucketSizeMs, kBucketCount),
151 encoded_frame_rate_tracker_(kBucketSizeMs, kBucketCount),
152 last_num_spatial_layers_(0),
153 last_num_simulcast_streams_(0),
154 last_spatial_layer_use_{},
155 bw_limited_layers_(false),
156 internal_encoder_scaler_(false),
157 uma_container_(
158 new UmaSamplesContainer(GetUmaPrefix(content_type_), stats_, clock)) {
159 }
160
~SendStatisticsProxy()161 SendStatisticsProxy::~SendStatisticsProxy() {
162 MutexLock lock(&mutex_);
163 uma_container_->UpdateHistograms(rtp_config_, stats_);
164
165 int64_t elapsed_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000;
166 RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.SendStreamLifetimeInSeconds",
167 elapsed_sec);
168
169 if (elapsed_sec >= metrics::kMinRunTimeInSeconds)
170 UpdateCodecTypeHistogram(payload_name_);
171 }
172
173 SendStatisticsProxy::FallbackEncoderInfo::FallbackEncoderInfo() = default;
174
UmaSamplesContainer(const char * prefix,const VideoSendStream::Stats & stats,Clock * const clock)175 SendStatisticsProxy::UmaSamplesContainer::UmaSamplesContainer(
176 const char* prefix,
177 const VideoSendStream::Stats& stats,
178 Clock* const clock)
179 : uma_prefix_(prefix),
180 clock_(clock),
181 input_frame_rate_tracker_(100, 10u),
182 input_fps_counter_(clock, nullptr, true),
183 sent_fps_counter_(clock, nullptr, true),
184 total_byte_counter_(clock, nullptr, true),
185 media_byte_counter_(clock, nullptr, true),
186 rtx_byte_counter_(clock, nullptr, true),
187 padding_byte_counter_(clock, nullptr, true),
188 retransmit_byte_counter_(clock, nullptr, true),
189 fec_byte_counter_(clock, nullptr, true),
190 first_rtcp_stats_time_ms_(-1),
191 first_rtp_stats_time_ms_(-1),
192 start_stats_(stats),
193 num_streams_(0),
194 num_pixels_highest_stream_(0) {
195 InitializeBitrateCounters(stats);
196 static_assert(
197 kMaxEncodedFrameTimestampDiff < std::numeric_limits<uint32_t>::max() / 2,
198 "has to be smaller than half range");
199 }
200
~UmaSamplesContainer()201 SendStatisticsProxy::UmaSamplesContainer::~UmaSamplesContainer() {}
202
InitializeBitrateCounters(const VideoSendStream::Stats & stats)203 void SendStatisticsProxy::UmaSamplesContainer::InitializeBitrateCounters(
204 const VideoSendStream::Stats& stats) {
205 for (const auto& it : stats.substreams) {
206 uint32_t ssrc = it.first;
207 total_byte_counter_.SetLast(it.second.rtp_stats.transmitted.TotalBytes(),
208 ssrc);
209 padding_byte_counter_.SetLast(it.second.rtp_stats.transmitted.padding_bytes,
210 ssrc);
211 retransmit_byte_counter_.SetLast(
212 it.second.rtp_stats.retransmitted.TotalBytes(), ssrc);
213 fec_byte_counter_.SetLast(it.second.rtp_stats.fec.TotalBytes(), ssrc);
214 switch (it.second.type) {
215 case VideoSendStream::StreamStats::StreamType::kMedia:
216 media_byte_counter_.SetLast(it.second.rtp_stats.MediaPayloadBytes(),
217 ssrc);
218 break;
219 case VideoSendStream::StreamStats::StreamType::kRtx:
220 rtx_byte_counter_.SetLast(it.second.rtp_stats.transmitted.TotalBytes(),
221 ssrc);
222 break;
223 case VideoSendStream::StreamStats::StreamType::kFlexfec:
224 break;
225 }
226 }
227 }
228
RemoveOld(int64_t now_ms)229 void SendStatisticsProxy::UmaSamplesContainer::RemoveOld(int64_t now_ms) {
230 while (!encoded_frames_.empty()) {
231 auto it = encoded_frames_.begin();
232 if (now_ms - it->second.send_ms < kMaxEncodedFrameWindowMs)
233 break;
234
235 // Use max per timestamp.
236 sent_width_counter_.Add(it->second.max_width);
237 sent_height_counter_.Add(it->second.max_height);
238
239 // Check number of encoded streams per timestamp.
240 if (num_streams_ > static_cast<size_t>(it->second.max_simulcast_idx)) {
241 if (num_streams_ > 1) {
242 int disabled_streams =
243 static_cast<int>(num_streams_ - 1 - it->second.max_simulcast_idx);
244 // Can be limited in resolution or framerate.
245 uint32_t pixels = it->second.max_width * it->second.max_height;
246 bool bw_limited_resolution =
247 disabled_streams > 0 && pixels < num_pixels_highest_stream_;
248 bw_limited_frame_counter_.Add(bw_limited_resolution);
249 if (bw_limited_resolution) {
250 bw_resolutions_disabled_counter_.Add(disabled_streams);
251 }
252 }
253 }
254 encoded_frames_.erase(it);
255 }
256 }
257
InsertEncodedFrame(const EncodedImage & encoded_frame,int simulcast_idx)258 bool SendStatisticsProxy::UmaSamplesContainer::InsertEncodedFrame(
259 const EncodedImage& encoded_frame,
260 int simulcast_idx) {
261 int64_t now_ms = clock_->TimeInMilliseconds();
262 RemoveOld(now_ms);
263 if (encoded_frames_.size() > kMaxEncodedFrameMapSize) {
264 encoded_frames_.clear();
265 }
266
267 // Check for jump in timestamp.
268 if (!encoded_frames_.empty()) {
269 uint32_t oldest_timestamp = encoded_frames_.begin()->first;
270 if (ForwardDiff(oldest_timestamp, encoded_frame.Timestamp()) >
271 kMaxEncodedFrameTimestampDiff) {
272 // Gap detected, clear frames to have a sequence where newest timestamp
273 // is not too far away from oldest in order to distinguish old and new.
274 encoded_frames_.clear();
275 }
276 }
277
278 auto it = encoded_frames_.find(encoded_frame.Timestamp());
279 if (it == encoded_frames_.end()) {
280 // First frame with this timestamp.
281 encoded_frames_.insert(
282 std::make_pair(encoded_frame.Timestamp(),
283 Frame(now_ms, encoded_frame._encodedWidth,
284 encoded_frame._encodedHeight, simulcast_idx)));
285 sent_fps_counter_.Add(1);
286 return true;
287 }
288
289 it->second.max_width =
290 std::max(it->second.max_width, encoded_frame._encodedWidth);
291 it->second.max_height =
292 std::max(it->second.max_height, encoded_frame._encodedHeight);
293 it->second.max_simulcast_idx =
294 std::max(it->second.max_simulcast_idx, simulcast_idx);
295 return false;
296 }
297
UpdateHistograms(const RtpConfig & rtp_config,const VideoSendStream::Stats & current_stats)298 void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms(
299 const RtpConfig& rtp_config,
300 const VideoSendStream::Stats& current_stats) {
301 RTC_DCHECK(uma_prefix_ == kRealtimePrefix || uma_prefix_ == kScreenPrefix);
302 const int kIndex = uma_prefix_ == kScreenPrefix ? 1 : 0;
303 const int kMinRequiredPeriodicSamples = 6;
304 char log_stream_buf[8 * 1024];
305 rtc::SimpleStringBuilder log_stream(log_stream_buf);
306 int in_width = input_width_counter_.Avg(kMinRequiredMetricsSamples);
307 int in_height = input_height_counter_.Avg(kMinRequiredMetricsSamples);
308 if (in_width != -1) {
309 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "InputWidthInPixels",
310 in_width);
311 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "InputHeightInPixels",
312 in_height);
313 log_stream << uma_prefix_ << "InputWidthInPixels " << in_width << "\n"
314 << uma_prefix_ << "InputHeightInPixels " << in_height << "\n";
315 }
316 AggregatedStats in_fps = input_fps_counter_.GetStats();
317 if (in_fps.num_samples >= kMinRequiredPeriodicSamples) {
318 RTC_HISTOGRAMS_COUNTS_100(kIndex, uma_prefix_ + "InputFramesPerSecond",
319 in_fps.average);
320 log_stream << uma_prefix_ << "InputFramesPerSecond " << in_fps.ToString()
321 << "\n";
322 }
323
324 int sent_width = sent_width_counter_.Avg(kMinRequiredMetricsSamples);
325 int sent_height = sent_height_counter_.Avg(kMinRequiredMetricsSamples);
326 if (sent_width != -1) {
327 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "SentWidthInPixels",
328 sent_width);
329 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "SentHeightInPixels",
330 sent_height);
331 log_stream << uma_prefix_ << "SentWidthInPixels " << sent_width << "\n"
332 << uma_prefix_ << "SentHeightInPixels " << sent_height << "\n";
333 }
334 AggregatedStats sent_fps = sent_fps_counter_.GetStats();
335 if (sent_fps.num_samples >= kMinRequiredPeriodicSamples) {
336 RTC_HISTOGRAMS_COUNTS_100(kIndex, uma_prefix_ + "SentFramesPerSecond",
337 sent_fps.average);
338 log_stream << uma_prefix_ << "SentFramesPerSecond " << sent_fps.ToString()
339 << "\n";
340 }
341
342 if (in_fps.num_samples > kMinRequiredPeriodicSamples &&
343 sent_fps.num_samples >= kMinRequiredPeriodicSamples) {
344 int in_fps_avg = in_fps.average;
345 if (in_fps_avg > 0) {
346 int sent_fps_avg = sent_fps.average;
347 int sent_to_in_fps_ratio_percent =
348 (100 * sent_fps_avg + in_fps_avg / 2) / in_fps_avg;
349 // If reported period is small, it may happen that sent_fps is larger than
350 // input_fps briefly on average. This should be treated as 100% sent to
351 // input ratio.
352 if (sent_to_in_fps_ratio_percent > 100)
353 sent_to_in_fps_ratio_percent = 100;
354 RTC_HISTOGRAMS_PERCENTAGE(kIndex,
355 uma_prefix_ + "SentToInputFpsRatioPercent",
356 sent_to_in_fps_ratio_percent);
357 log_stream << uma_prefix_ << "SentToInputFpsRatioPercent "
358 << sent_to_in_fps_ratio_percent << "\n";
359 }
360 }
361
362 int encode_ms = encode_time_counter_.Avg(kMinRequiredMetricsSamples);
363 if (encode_ms != -1) {
364 RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "EncodeTimeInMs",
365 encode_ms);
366 log_stream << uma_prefix_ << "EncodeTimeInMs " << encode_ms << "\n";
367 }
368 int key_frames_permille =
369 key_frame_counter_.Permille(kMinRequiredMetricsSamples);
370 if (key_frames_permille != -1) {
371 RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "KeyFramesSentInPermille",
372 key_frames_permille);
373 log_stream << uma_prefix_ << "KeyFramesSentInPermille "
374 << key_frames_permille << "\n";
375 }
376 int quality_limited =
377 quality_limited_frame_counter_.Percent(kMinRequiredMetricsSamples);
378 if (quality_limited != -1) {
379 RTC_HISTOGRAMS_PERCENTAGE(kIndex,
380 uma_prefix_ + "QualityLimitedResolutionInPercent",
381 quality_limited);
382 log_stream << uma_prefix_ << "QualityLimitedResolutionInPercent "
383 << quality_limited << "\n";
384 }
385 int downscales = quality_downscales_counter_.Avg(kMinRequiredMetricsSamples);
386 if (downscales != -1) {
387 RTC_HISTOGRAMS_ENUMERATION(
388 kIndex, uma_prefix_ + "QualityLimitedResolutionDownscales", downscales,
389 20);
390 }
391 int cpu_limited =
392 cpu_limited_frame_counter_.Percent(kMinRequiredMetricsSamples);
393 if (cpu_limited != -1) {
394 RTC_HISTOGRAMS_PERCENTAGE(
395 kIndex, uma_prefix_ + "CpuLimitedResolutionInPercent", cpu_limited);
396 }
397 int bw_limited =
398 bw_limited_frame_counter_.Percent(kMinRequiredMetricsSamples);
399 if (bw_limited != -1) {
400 RTC_HISTOGRAMS_PERCENTAGE(
401 kIndex, uma_prefix_ + "BandwidthLimitedResolutionInPercent",
402 bw_limited);
403 }
404 int num_disabled =
405 bw_resolutions_disabled_counter_.Avg(kMinRequiredMetricsSamples);
406 if (num_disabled != -1) {
407 RTC_HISTOGRAMS_ENUMERATION(
408 kIndex, uma_prefix_ + "BandwidthLimitedResolutionsDisabled",
409 num_disabled, 10);
410 }
411 int delay_ms = delay_counter_.Avg(kMinRequiredMetricsSamples);
412 if (delay_ms != -1)
413 RTC_HISTOGRAMS_COUNTS_100000(kIndex, uma_prefix_ + "SendSideDelayInMs",
414 delay_ms);
415
416 int max_delay_ms = max_delay_counter_.Avg(kMinRequiredMetricsSamples);
417 if (max_delay_ms != -1) {
418 RTC_HISTOGRAMS_COUNTS_100000(kIndex, uma_prefix_ + "SendSideDelayMaxInMs",
419 max_delay_ms);
420 }
421
422 for (const auto& it : qp_counters_) {
423 int qp_vp8 = it.second.vp8.Avg(kMinRequiredMetricsSamples);
424 if (qp_vp8 != -1) {
425 int spatial_idx = it.first;
426 if (spatial_idx == -1) {
427 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.Vp8",
428 qp_vp8);
429 } else if (spatial_idx == 0) {
430 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.Vp8.S0",
431 qp_vp8);
432 } else if (spatial_idx == 1) {
433 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.Vp8.S1",
434 qp_vp8);
435 } else if (spatial_idx == 2) {
436 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.Vp8.S2",
437 qp_vp8);
438 } else {
439 RTC_LOG(LS_WARNING)
440 << "QP stats not recorded for VP8 spatial idx " << spatial_idx;
441 }
442 }
443 int qp_vp9 = it.second.vp9.Avg(kMinRequiredMetricsSamples);
444 if (qp_vp9 != -1) {
445 int spatial_idx = it.first;
446 if (spatial_idx == -1) {
447 RTC_HISTOGRAMS_COUNTS_500(kIndex, uma_prefix_ + "Encoded.Qp.Vp9",
448 qp_vp9);
449 } else if (spatial_idx == 0) {
450 RTC_HISTOGRAMS_COUNTS_500(kIndex, uma_prefix_ + "Encoded.Qp.Vp9.S0",
451 qp_vp9);
452 } else if (spatial_idx == 1) {
453 RTC_HISTOGRAMS_COUNTS_500(kIndex, uma_prefix_ + "Encoded.Qp.Vp9.S1",
454 qp_vp9);
455 } else if (spatial_idx == 2) {
456 RTC_HISTOGRAMS_COUNTS_500(kIndex, uma_prefix_ + "Encoded.Qp.Vp9.S2",
457 qp_vp9);
458 } else {
459 RTC_LOG(LS_WARNING)
460 << "QP stats not recorded for VP9 spatial layer " << spatial_idx;
461 }
462 }
463 int qp_h264 = it.second.h264.Avg(kMinRequiredMetricsSamples);
464 if (qp_h264 != -1) {
465 int spatial_idx = it.first;
466 if (spatial_idx == -1) {
467 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.H264",
468 qp_h264);
469 } else if (spatial_idx == 0) {
470 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.H264.S0",
471 qp_h264);
472 } else if (spatial_idx == 1) {
473 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.H264.S1",
474 qp_h264);
475 } else if (spatial_idx == 2) {
476 RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.H264.S2",
477 qp_h264);
478 } else {
479 RTC_LOG(LS_WARNING)
480 << "QP stats not recorded for H264 spatial idx " << spatial_idx;
481 }
482 }
483 }
484
485 if (first_rtp_stats_time_ms_ != -1) {
486 quality_adapt_timer_.Stop(clock_->TimeInMilliseconds());
487 int64_t elapsed_sec = quality_adapt_timer_.total_ms / 1000;
488 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) {
489 int quality_changes = current_stats.number_of_quality_adapt_changes -
490 start_stats_.number_of_quality_adapt_changes;
491 // Only base stats on changes during a call, discard initial changes.
492 int initial_changes =
493 initial_quality_changes_.down + initial_quality_changes_.up;
494 if (initial_changes <= quality_changes)
495 quality_changes -= initial_changes;
496 RTC_HISTOGRAMS_COUNTS_100(kIndex,
497 uma_prefix_ + "AdaptChangesPerMinute.Quality",
498 quality_changes * 60 / elapsed_sec);
499 }
500 cpu_adapt_timer_.Stop(clock_->TimeInMilliseconds());
501 elapsed_sec = cpu_adapt_timer_.total_ms / 1000;
502 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) {
503 int cpu_changes = current_stats.number_of_cpu_adapt_changes -
504 start_stats_.number_of_cpu_adapt_changes;
505 RTC_HISTOGRAMS_COUNTS_100(kIndex,
506 uma_prefix_ + "AdaptChangesPerMinute.Cpu",
507 cpu_changes * 60 / elapsed_sec);
508 }
509 }
510
511 if (first_rtcp_stats_time_ms_ != -1) {
512 int64_t elapsed_sec =
513 (clock_->TimeInMilliseconds() - first_rtcp_stats_time_ms_) / 1000;
514 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) {
515 int fraction_lost = report_block_stats_.FractionLostInPercent();
516 if (fraction_lost != -1) {
517 RTC_HISTOGRAMS_PERCENTAGE(
518 kIndex, uma_prefix_ + "SentPacketsLostInPercent", fraction_lost);
519 log_stream << uma_prefix_ << "SentPacketsLostInPercent "
520 << fraction_lost << "\n";
521 }
522
523 // The RTCP packet type counters, delivered via the
524 // RtcpPacketTypeCounterObserver interface, are aggregates over the entire
525 // life of the send stream and are not reset when switching content type.
526 // For the purpose of these statistics though, we want new counts when
527 // switching since we switch histogram name. On every reset of the
528 // UmaSamplesContainer, we save the initial state of the counters, so that
529 // we can calculate the delta here and aggregate over all ssrcs.
530 RtcpPacketTypeCounter counters;
531 for (uint32_t ssrc : rtp_config.ssrcs) {
532 auto kv = current_stats.substreams.find(ssrc);
533 if (kv == current_stats.substreams.end())
534 continue;
535
536 RtcpPacketTypeCounter stream_counters =
537 kv->second.rtcp_packet_type_counts;
538 kv = start_stats_.substreams.find(ssrc);
539 if (kv != start_stats_.substreams.end())
540 stream_counters.Subtract(kv->second.rtcp_packet_type_counts);
541
542 counters.Add(stream_counters);
543 }
544 RTC_HISTOGRAMS_COUNTS_10000(kIndex,
545 uma_prefix_ + "NackPacketsReceivedPerMinute",
546 counters.nack_packets * 60 / elapsed_sec);
547 RTC_HISTOGRAMS_COUNTS_10000(kIndex,
548 uma_prefix_ + "FirPacketsReceivedPerMinute",
549 counters.fir_packets * 60 / elapsed_sec);
550 RTC_HISTOGRAMS_COUNTS_10000(kIndex,
551 uma_prefix_ + "PliPacketsReceivedPerMinute",
552 counters.pli_packets * 60 / elapsed_sec);
553 if (counters.nack_requests > 0) {
554 RTC_HISTOGRAMS_PERCENTAGE(
555 kIndex, uma_prefix_ + "UniqueNackRequestsReceivedInPercent",
556 counters.UniqueNackRequestsInPercent());
557 }
558 }
559 }
560
561 if (first_rtp_stats_time_ms_ != -1) {
562 int64_t elapsed_sec =
563 (clock_->TimeInMilliseconds() - first_rtp_stats_time_ms_) / 1000;
564 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) {
565 RTC_HISTOGRAMS_COUNTS_100(kIndex, uma_prefix_ + "NumberOfPauseEvents",
566 target_rate_updates_.pause_resume_events);
567 log_stream << uma_prefix_ << "NumberOfPauseEvents "
568 << target_rate_updates_.pause_resume_events << "\n";
569
570 int paused_time_percent =
571 paused_time_counter_.Percent(metrics::kMinRunTimeInSeconds * 1000);
572 if (paused_time_percent != -1) {
573 RTC_HISTOGRAMS_PERCENTAGE(kIndex, uma_prefix_ + "PausedTimeInPercent",
574 paused_time_percent);
575 log_stream << uma_prefix_ << "PausedTimeInPercent "
576 << paused_time_percent << "\n";
577 }
578 }
579 }
580
581 if (fallback_info_.is_possible) {
582 // Double interval since there is some time before fallback may occur.
583 const int kMinRunTimeMs = 2 * metrics::kMinRunTimeInSeconds * 1000;
584 int64_t elapsed_ms = fallback_info_.elapsed_ms;
585 int fallback_time_percent = fallback_active_counter_.Percent(kMinRunTimeMs);
586 if (fallback_time_percent != -1 && elapsed_ms >= kMinRunTimeMs) {
587 RTC_HISTOGRAMS_PERCENTAGE(
588 kIndex, uma_prefix_ + "Encoder.ForcedSwFallbackTimeInPercent.Vp8",
589 fallback_time_percent);
590 RTC_HISTOGRAMS_COUNTS_100(
591 kIndex, uma_prefix_ + "Encoder.ForcedSwFallbackChangesPerMinute.Vp8",
592 fallback_info_.on_off_events * 60 / (elapsed_ms / 1000));
593 }
594 }
595
596 AggregatedStats total_bytes_per_sec = total_byte_counter_.GetStats();
597 if (total_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
598 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "BitrateSentInKbps",
599 total_bytes_per_sec.average * 8 / 1000);
600 log_stream << uma_prefix_ << "BitrateSentInBps "
601 << total_bytes_per_sec.ToStringWithMultiplier(8) << "\n";
602 }
603 AggregatedStats media_bytes_per_sec = media_byte_counter_.GetStats();
604 if (media_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
605 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "MediaBitrateSentInKbps",
606 media_bytes_per_sec.average * 8 / 1000);
607 log_stream << uma_prefix_ << "MediaBitrateSentInBps "
608 << media_bytes_per_sec.ToStringWithMultiplier(8) << "\n";
609 }
610 AggregatedStats padding_bytes_per_sec = padding_byte_counter_.GetStats();
611 if (padding_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
612 RTC_HISTOGRAMS_COUNTS_10000(kIndex,
613 uma_prefix_ + "PaddingBitrateSentInKbps",
614 padding_bytes_per_sec.average * 8 / 1000);
615 log_stream << uma_prefix_ << "PaddingBitrateSentInBps "
616 << padding_bytes_per_sec.ToStringWithMultiplier(8) << "\n";
617 }
618 AggregatedStats retransmit_bytes_per_sec =
619 retransmit_byte_counter_.GetStats();
620 if (retransmit_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
621 RTC_HISTOGRAMS_COUNTS_10000(kIndex,
622 uma_prefix_ + "RetransmittedBitrateSentInKbps",
623 retransmit_bytes_per_sec.average * 8 / 1000);
624 log_stream << uma_prefix_ << "RetransmittedBitrateSentInBps "
625 << retransmit_bytes_per_sec.ToStringWithMultiplier(8) << "\n";
626 }
627 if (!rtp_config.rtx.ssrcs.empty()) {
628 AggregatedStats rtx_bytes_per_sec = rtx_byte_counter_.GetStats();
629 int rtx_bytes_per_sec_avg = -1;
630 if (rtx_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
631 rtx_bytes_per_sec_avg = rtx_bytes_per_sec.average;
632 log_stream << uma_prefix_ << "RtxBitrateSentInBps "
633 << rtx_bytes_per_sec.ToStringWithMultiplier(8) << "\n";
634 } else if (total_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
635 rtx_bytes_per_sec_avg = 0; // RTX enabled but no RTX data sent, record 0.
636 }
637 if (rtx_bytes_per_sec_avg != -1) {
638 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "RtxBitrateSentInKbps",
639 rtx_bytes_per_sec_avg * 8 / 1000);
640 }
641 }
642 if (rtp_config.flexfec.payload_type != -1 ||
643 rtp_config.ulpfec.red_payload_type != -1) {
644 AggregatedStats fec_bytes_per_sec = fec_byte_counter_.GetStats();
645 if (fec_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
646 RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "FecBitrateSentInKbps",
647 fec_bytes_per_sec.average * 8 / 1000);
648 log_stream << uma_prefix_ << "FecBitrateSentInBps "
649 << fec_bytes_per_sec.ToStringWithMultiplier(8) << "\n";
650 }
651 }
652 log_stream << "Frames encoded " << current_stats.frames_encoded << "\n"
653 << uma_prefix_ << "DroppedFrames.Capturer "
654 << current_stats.frames_dropped_by_capturer << "\n";
655 RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.Capturer",
656 current_stats.frames_dropped_by_capturer);
657 log_stream << uma_prefix_ << "DroppedFrames.EncoderQueue "
658 << current_stats.frames_dropped_by_encoder_queue << "\n";
659 RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.EncoderQueue",
660 current_stats.frames_dropped_by_encoder_queue);
661 log_stream << uma_prefix_ << "DroppedFrames.Encoder "
662 << current_stats.frames_dropped_by_encoder << "\n";
663 RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.Encoder",
664 current_stats.frames_dropped_by_encoder);
665 log_stream << uma_prefix_ << "DroppedFrames.Ratelimiter "
666 << current_stats.frames_dropped_by_rate_limiter << "\n";
667 RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.Ratelimiter",
668 current_stats.frames_dropped_by_rate_limiter);
669 log_stream << uma_prefix_ << "DroppedFrames.CongestionWindow "
670 << current_stats.frames_dropped_by_congestion_window;
671
672 RTC_LOG(LS_INFO) << log_stream.str();
673 }
674
OnEncoderReconfigured(const VideoEncoderConfig & config,const std::vector<VideoStream> & streams)675 void SendStatisticsProxy::OnEncoderReconfigured(
676 const VideoEncoderConfig& config,
677 const std::vector<VideoStream>& streams) {
678 // Called on VideoStreamEncoder's encoder_queue_.
679 MutexLock lock(&mutex_);
680
681 if (content_type_ != config.content_type) {
682 uma_container_->UpdateHistograms(rtp_config_, stats_);
683 uma_container_.reset(new UmaSamplesContainer(
684 GetUmaPrefix(config.content_type), stats_, clock_));
685 content_type_ = config.content_type;
686 }
687 uma_container_->encoded_frames_.clear();
688 uma_container_->num_streams_ = streams.size();
689 uma_container_->num_pixels_highest_stream_ =
690 streams.empty() ? 0 : (streams.back().width * streams.back().height);
691 }
692
OnEncodedFrameTimeMeasured(int encode_time_ms,int encode_usage_percent)693 void SendStatisticsProxy::OnEncodedFrameTimeMeasured(int encode_time_ms,
694 int encode_usage_percent) {
695 RTC_DCHECK_GE(encode_time_ms, 0);
696 MutexLock lock(&mutex_);
697 uma_container_->encode_time_counter_.Add(encode_time_ms);
698 encode_time_.Apply(1.0f, encode_time_ms);
699 stats_.avg_encode_time_ms = std::round(encode_time_.filtered());
700 stats_.total_encode_time_ms += encode_time_ms;
701 stats_.encode_usage_percent = encode_usage_percent;
702 }
703
OnSuspendChange(bool is_suspended)704 void SendStatisticsProxy::OnSuspendChange(bool is_suspended) {
705 int64_t now_ms = clock_->TimeInMilliseconds();
706 MutexLock lock(&mutex_);
707 stats_.suspended = is_suspended;
708 if (is_suspended) {
709 // Pause framerate (add min pause time since there may be frames/packets
710 // that are not yet sent).
711 const int64_t kMinMs = 500;
712 uma_container_->input_fps_counter_.ProcessAndPauseForDuration(kMinMs);
713 uma_container_->sent_fps_counter_.ProcessAndPauseForDuration(kMinMs);
714 // Pause bitrate stats.
715 uma_container_->total_byte_counter_.ProcessAndPauseForDuration(kMinMs);
716 uma_container_->media_byte_counter_.ProcessAndPauseForDuration(kMinMs);
717 uma_container_->rtx_byte_counter_.ProcessAndPauseForDuration(kMinMs);
718 uma_container_->padding_byte_counter_.ProcessAndPauseForDuration(kMinMs);
719 uma_container_->retransmit_byte_counter_.ProcessAndPauseForDuration(kMinMs);
720 uma_container_->fec_byte_counter_.ProcessAndPauseForDuration(kMinMs);
721 // Stop adaptation stats.
722 uma_container_->cpu_adapt_timer_.Stop(now_ms);
723 uma_container_->quality_adapt_timer_.Stop(now_ms);
724 } else {
725 // Start adaptation stats if scaling is enabled.
726 if (adaptation_limitations_.MaskedCpuCounts()
727 .resolution_adaptations.has_value())
728 uma_container_->cpu_adapt_timer_.Start(now_ms);
729 if (adaptation_limitations_.MaskedQualityCounts()
730 .resolution_adaptations.has_value())
731 uma_container_->quality_adapt_timer_.Start(now_ms);
732 // Stop pause explicitly for stats that may be zero/not updated for some
733 // time.
734 uma_container_->rtx_byte_counter_.ProcessAndStopPause();
735 uma_container_->padding_byte_counter_.ProcessAndStopPause();
736 uma_container_->retransmit_byte_counter_.ProcessAndStopPause();
737 uma_container_->fec_byte_counter_.ProcessAndStopPause();
738 }
739 }
740
GetStats()741 VideoSendStream::Stats SendStatisticsProxy::GetStats() {
742 MutexLock lock(&mutex_);
743 PurgeOldStats();
744 stats_.input_frame_rate =
745 uma_container_->input_frame_rate_tracker_.ComputeRate();
746 stats_.frames =
747 uma_container_->input_frame_rate_tracker_.TotalSampleCount();
748 stats_.content_type =
749 content_type_ == VideoEncoderConfig::ContentType::kRealtimeVideo
750 ? VideoContentType::UNSPECIFIED
751 : VideoContentType::SCREENSHARE;
752 stats_.encode_frame_rate = round(encoded_frame_rate_tracker_.ComputeRate());
753 stats_.media_bitrate_bps = media_byte_rate_tracker_.ComputeRate() * 8;
754 stats_.quality_limitation_durations_ms =
755 quality_limitation_reason_tracker_.DurationsMs();
756
757 for (auto& substream : stats_.substreams) {
758 uint32_t ssrc = substream.first;
759 if (encoded_frame_rate_trackers_.count(ssrc) > 0) {
760 substream.second.encode_frame_rate =
761 encoded_frame_rate_trackers_[ssrc]->ComputeRate();
762 }
763 }
764 return stats_;
765 }
766
PurgeOldStats()767 void SendStatisticsProxy::PurgeOldStats() {
768 int64_t old_stats_ms = clock_->TimeInMilliseconds() - kStatsTimeoutMs;
769 for (std::map<uint32_t, VideoSendStream::StreamStats>::iterator it =
770 stats_.substreams.begin();
771 it != stats_.substreams.end(); ++it) {
772 uint32_t ssrc = it->first;
773 if (update_times_[ssrc].resolution_update_ms <= old_stats_ms) {
774 it->second.width = 0;
775 it->second.height = 0;
776 }
777 }
778 }
779
GetStatsEntry(uint32_t ssrc)780 VideoSendStream::StreamStats* SendStatisticsProxy::GetStatsEntry(
781 uint32_t ssrc) {
782 std::map<uint32_t, VideoSendStream::StreamStats>::iterator it =
783 stats_.substreams.find(ssrc);
784 if (it != stats_.substreams.end())
785 return &it->second;
786
787 bool is_media = rtp_config_.IsMediaSsrc(ssrc);
788 bool is_flexfec = rtp_config_.flexfec.payload_type != -1 &&
789 ssrc == rtp_config_.flexfec.ssrc;
790 bool is_rtx = rtp_config_.IsRtxSsrc(ssrc);
791 if (!is_media && !is_flexfec && !is_rtx)
792 return nullptr;
793
794 // Insert new entry and return ptr.
795 VideoSendStream::StreamStats* entry = &stats_.substreams[ssrc];
796 if (is_media) {
797 entry->type = VideoSendStream::StreamStats::StreamType::kMedia;
798 } else if (is_rtx) {
799 entry->type = VideoSendStream::StreamStats::StreamType::kRtx;
800 } else if (is_flexfec) {
801 entry->type = VideoSendStream::StreamStats::StreamType::kFlexfec;
802 } else {
803 RTC_DCHECK_NOTREACHED();
804 }
805 switch (entry->type) {
806 case VideoSendStream::StreamStats::StreamType::kMedia:
807 break;
808 case VideoSendStream::StreamStats::StreamType::kRtx:
809 entry->referenced_media_ssrc =
810 rtp_config_.GetMediaSsrcAssociatedWithRtxSsrc(ssrc);
811 break;
812 case VideoSendStream::StreamStats::StreamType::kFlexfec:
813 entry->referenced_media_ssrc =
814 rtp_config_.GetMediaSsrcAssociatedWithFlexfecSsrc(ssrc);
815 break;
816 }
817
818 return entry;
819 }
820
OnInactiveSsrc(uint32_t ssrc)821 void SendStatisticsProxy::OnInactiveSsrc(uint32_t ssrc) {
822 MutexLock lock(&mutex_);
823 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
824 if (!stats)
825 return;
826
827 stats->total_bitrate_bps = 0;
828 stats->retransmit_bitrate_bps = 0;
829 stats->height = 0;
830 stats->width = 0;
831 }
832
OnSetEncoderTargetRate(uint32_t bitrate_bps)833 void SendStatisticsProxy::OnSetEncoderTargetRate(uint32_t bitrate_bps) {
834 MutexLock lock(&mutex_);
835 if (uma_container_->target_rate_updates_.last_ms == -1 && bitrate_bps == 0)
836 return; // Start on first non-zero bitrate, may initially be zero.
837
838 int64_t now = clock_->TimeInMilliseconds();
839 if (uma_container_->target_rate_updates_.last_ms != -1) {
840 bool was_paused = stats_.target_media_bitrate_bps == 0;
841 int64_t diff_ms = now - uma_container_->target_rate_updates_.last_ms;
842 uma_container_->paused_time_counter_.Add(was_paused, diff_ms);
843
844 // Use last to not include update when stream is stopped and video disabled.
845 if (uma_container_->target_rate_updates_.last_paused_or_resumed)
846 ++uma_container_->target_rate_updates_.pause_resume_events;
847
848 // Check if video is paused/resumed.
849 uma_container_->target_rate_updates_.last_paused_or_resumed =
850 (bitrate_bps == 0) != was_paused;
851 }
852 uma_container_->target_rate_updates_.last_ms = now;
853
854 stats_.target_media_bitrate_bps = bitrate_bps;
855 }
856
UpdateEncoderFallbackStats(const CodecSpecificInfo * codec_info,int pixels,int simulcast_index)857 void SendStatisticsProxy::UpdateEncoderFallbackStats(
858 const CodecSpecificInfo* codec_info,
859 int pixels,
860 int simulcast_index) {
861 UpdateFallbackDisabledStats(codec_info, pixels, simulcast_index);
862
863 if (!fallback_max_pixels_ || !uma_container_->fallback_info_.is_possible) {
864 return;
865 }
866
867 if (!IsForcedFallbackPossible(codec_info, simulcast_index)) {
868 uma_container_->fallback_info_.is_possible = false;
869 return;
870 }
871
872 FallbackEncoderInfo* fallback_info = &uma_container_->fallback_info_;
873
874 const int64_t now_ms = clock_->TimeInMilliseconds();
875 bool is_active = fallback_info->is_active;
876 if (encoder_changed_) {
877 // Implementation changed.
878 const bool last_was_vp8_software =
879 encoder_changed_->previous_encoder_implementation == kVp8SwCodecName;
880 is_active = encoder_changed_->new_encoder_implementation == kVp8SwCodecName;
881 encoder_changed_.reset();
882 if (!is_active && !last_was_vp8_software) {
883 // First or not a VP8 SW change, update stats on next call.
884 return;
885 }
886 if (is_active && (pixels > *fallback_max_pixels_)) {
887 // Pixels should not be above `fallback_max_pixels_`. If above skip to
888 // avoid fallbacks due to failure.
889 fallback_info->is_possible = false;
890 return;
891 }
892 stats_.has_entered_low_resolution = true;
893 ++fallback_info->on_off_events;
894 }
895
896 if (fallback_info->last_update_ms) {
897 int64_t diff_ms = now_ms - *(fallback_info->last_update_ms);
898 // If the time diff since last update is greater than `max_frame_diff_ms`,
899 // video is considered paused/muted and the change is not included.
900 if (diff_ms < fallback_info->max_frame_diff_ms) {
901 uma_container_->fallback_active_counter_.Add(fallback_info->is_active,
902 diff_ms);
903 fallback_info->elapsed_ms += diff_ms;
904 }
905 }
906 fallback_info->is_active = is_active;
907 fallback_info->last_update_ms.emplace(now_ms);
908 }
909
UpdateFallbackDisabledStats(const CodecSpecificInfo * codec_info,int pixels,int simulcast_index)910 void SendStatisticsProxy::UpdateFallbackDisabledStats(
911 const CodecSpecificInfo* codec_info,
912 int pixels,
913 int simulcast_index) {
914 if (!fallback_max_pixels_disabled_ ||
915 !uma_container_->fallback_info_disabled_.is_possible ||
916 stats_.has_entered_low_resolution) {
917 return;
918 }
919
920 if (!IsForcedFallbackPossible(codec_info, simulcast_index) ||
921 stats_.encoder_implementation_name == kVp8SwCodecName) {
922 uma_container_->fallback_info_disabled_.is_possible = false;
923 return;
924 }
925
926 if (pixels <= *fallback_max_pixels_disabled_ ||
927 uma_container_->fallback_info_disabled_.min_pixel_limit_reached) {
928 stats_.has_entered_low_resolution = true;
929 }
930 }
931
OnMinPixelLimitReached()932 void SendStatisticsProxy::OnMinPixelLimitReached() {
933 MutexLock lock(&mutex_);
934 uma_container_->fallback_info_disabled_.min_pixel_limit_reached = true;
935 }
936
OnSendEncodedImage(const EncodedImage & encoded_image,const CodecSpecificInfo * codec_info)937 void SendStatisticsProxy::OnSendEncodedImage(
938 const EncodedImage& encoded_image,
939 const CodecSpecificInfo* codec_info) {
940 // Simulcast is used for VP8, H264 and Generic.
941 int simulcast_idx =
942 (codec_info && (codec_info->codecType == kVideoCodecVP8 ||
943 codec_info->codecType == kVideoCodecH264 ||
944 codec_info->codecType == kVideoCodecGeneric))
945 ? encoded_image.SpatialIndex().value_or(0)
946 : 0;
947
948 MutexLock lock(&mutex_);
949 ++stats_.frames_encoded;
950 // The current encode frame rate is based on previously encoded frames.
951 double encode_frame_rate = encoded_frame_rate_tracker_.ComputeRate();
952 // We assume that less than 1 FPS is not a trustworthy estimate - perhaps we
953 // just started encoding for the first time or after a pause. Assuming frame
954 // rate is at least 1 FPS is conservative to avoid too large increments.
955 if (encode_frame_rate < 1.0)
956 encode_frame_rate = 1.0;
957 double target_frame_size_bytes =
958 stats_.target_media_bitrate_bps / (8.0 * encode_frame_rate);
959 // `stats_.target_media_bitrate_bps` is set in
960 // SendStatisticsProxy::OnSetEncoderTargetRate.
961 stats_.total_encoded_bytes_target += round(target_frame_size_bytes);
962 if (codec_info) {
963 UpdateEncoderFallbackStats(
964 codec_info, encoded_image._encodedWidth * encoded_image._encodedHeight,
965 simulcast_idx);
966 }
967
968 if (static_cast<size_t>(simulcast_idx) >= rtp_config_.ssrcs.size()) {
969 RTC_LOG(LS_ERROR) << "Encoded image outside simulcast range ("
970 << simulcast_idx << " >= " << rtp_config_.ssrcs.size()
971 << ").";
972 return;
973 }
974 uint32_t ssrc = rtp_config_.ssrcs[simulcast_idx];
975
976 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
977 if (!stats)
978 return;
979
980 if (encoded_frame_rate_trackers_.count(ssrc) == 0) {
981 encoded_frame_rate_trackers_[ssrc] =
982 std::make_unique<rtc::RateTracker>(kBucketSizeMs, kBucketCount);
983 }
984
985 stats->frames_encoded++;
986 stats->total_encode_time_ms += encoded_image.timing_.encode_finish_ms -
987 encoded_image.timing_.encode_start_ms;
988 // Report resolution of the top spatial layer.
989 bool is_top_spatial_layer =
990 codec_info == nullptr || codec_info->end_of_picture;
991
992 if (!stats->width || !stats->height || is_top_spatial_layer) {
993 stats->width = encoded_image._encodedWidth;
994 stats->height = encoded_image._encodedHeight;
995 update_times_[ssrc].resolution_update_ms = clock_->TimeInMilliseconds();
996 }
997
998 uma_container_->key_frame_counter_.Add(encoded_image._frameType ==
999 VideoFrameType::kVideoFrameKey);
1000
1001 if (encoded_image.qp_ != -1) {
1002 if (!stats->qp_sum)
1003 stats->qp_sum = 0;
1004 *stats->qp_sum += encoded_image.qp_;
1005
1006 if (codec_info) {
1007 if (codec_info->codecType == kVideoCodecVP8) {
1008 int spatial_idx = (rtp_config_.ssrcs.size() == 1) ? -1 : simulcast_idx;
1009 uma_container_->qp_counters_[spatial_idx].vp8.Add(encoded_image.qp_);
1010 } else if (codec_info->codecType == kVideoCodecVP9) {
1011 int spatial_idx = encoded_image.SpatialIndex().value_or(-1);
1012 uma_container_->qp_counters_[spatial_idx].vp9.Add(encoded_image.qp_);
1013 } else if (codec_info->codecType == kVideoCodecH264) {
1014 int spatial_idx = (rtp_config_.ssrcs.size() == 1) ? -1 : simulcast_idx;
1015 uma_container_->qp_counters_[spatial_idx].h264.Add(encoded_image.qp_);
1016 }
1017 }
1018 }
1019
1020 // If any of the simulcast streams have a huge frame, it should be counted
1021 // as a single difficult input frame.
1022 // https://w3c.github.io/webrtc-stats/#dom-rtcvideosenderstats-hugeframessent
1023 if (encoded_image.timing_.flags & VideoSendTiming::kTriggeredBySize) {
1024 ++stats->huge_frames_sent;
1025 if (!last_outlier_timestamp_ ||
1026 *last_outlier_timestamp_ < encoded_image.capture_time_ms_) {
1027 last_outlier_timestamp_.emplace(encoded_image.capture_time_ms_);
1028 ++stats_.huge_frames_sent;
1029 }
1030 }
1031
1032 media_byte_rate_tracker_.AddSamples(encoded_image.size());
1033
1034 if (uma_container_->InsertEncodedFrame(encoded_image, simulcast_idx)) {
1035 // First frame seen with this timestamp, track overall fps.
1036 encoded_frame_rate_tracker_.AddSamples(1);
1037 }
1038 // is_top_spatial_layer pertains only to SVC, will always be true for
1039 // simulcast.
1040 if (is_top_spatial_layer)
1041 encoded_frame_rate_trackers_[ssrc]->AddSamples(1);
1042
1043 absl::optional<int> downscales =
1044 adaptation_limitations_.MaskedQualityCounts().resolution_adaptations;
1045 stats_.bw_limited_resolution |=
1046 (downscales.has_value() && downscales.value() > 0);
1047
1048 if (downscales.has_value()) {
1049 uma_container_->quality_limited_frame_counter_.Add(downscales.value() > 0);
1050 if (downscales.value() > 0)
1051 uma_container_->quality_downscales_counter_.Add(downscales.value());
1052 }
1053 }
1054
OnEncoderImplementationChanged(EncoderImplementation implementation)1055 void SendStatisticsProxy::OnEncoderImplementationChanged(
1056 EncoderImplementation implementation) {
1057 MutexLock lock(&mutex_);
1058 encoder_changed_ = EncoderChangeEvent{stats_.encoder_implementation_name,
1059 implementation.name};
1060 stats_.encoder_implementation_name = implementation.name;
1061 stats_.power_efficient_encoder = implementation.is_hardware_accelerated;
1062 }
1063
GetInputFrameRate() const1064 int SendStatisticsProxy::GetInputFrameRate() const {
1065 MutexLock lock(&mutex_);
1066 return round(uma_container_->input_frame_rate_tracker_.ComputeRate());
1067 }
1068
GetSendFrameRate() const1069 int SendStatisticsProxy::GetSendFrameRate() const {
1070 MutexLock lock(&mutex_);
1071 return round(encoded_frame_rate_tracker_.ComputeRate());
1072 }
1073
OnIncomingFrame(int width,int height)1074 void SendStatisticsProxy::OnIncomingFrame(int width, int height) {
1075 MutexLock lock(&mutex_);
1076 uma_container_->input_frame_rate_tracker_.AddSamples(1);
1077 uma_container_->input_fps_counter_.Add(1);
1078 uma_container_->input_width_counter_.Add(width);
1079 uma_container_->input_height_counter_.Add(height);
1080 if (adaptation_limitations_.MaskedCpuCounts()
1081 .resolution_adaptations.has_value()) {
1082 uma_container_->cpu_limited_frame_counter_.Add(
1083 stats_.cpu_limited_resolution);
1084 }
1085 if (encoded_frame_rate_tracker_.TotalSampleCount() == 0) {
1086 // Set start time now instead of when first key frame is encoded to avoid a
1087 // too high initial estimate.
1088 encoded_frame_rate_tracker_.AddSamples(0);
1089 }
1090 }
1091
OnFrameDropped(DropReason reason)1092 void SendStatisticsProxy::OnFrameDropped(DropReason reason) {
1093 MutexLock lock(&mutex_);
1094 switch (reason) {
1095 case DropReason::kSource:
1096 ++stats_.frames_dropped_by_capturer;
1097 break;
1098 case DropReason::kEncoderQueue:
1099 ++stats_.frames_dropped_by_encoder_queue;
1100 break;
1101 case DropReason::kEncoder:
1102 ++stats_.frames_dropped_by_encoder;
1103 break;
1104 case DropReason::kMediaOptimization:
1105 ++stats_.frames_dropped_by_rate_limiter;
1106 break;
1107 case DropReason::kCongestionWindow:
1108 ++stats_.frames_dropped_by_congestion_window;
1109 break;
1110 }
1111 }
1112
ClearAdaptationStats()1113 void SendStatisticsProxy::ClearAdaptationStats() {
1114 MutexLock lock(&mutex_);
1115 adaptation_limitations_.set_cpu_counts(VideoAdaptationCounters());
1116 adaptation_limitations_.set_quality_counts(VideoAdaptationCounters());
1117 UpdateAdaptationStats();
1118 }
1119
UpdateAdaptationSettings(VideoStreamEncoderObserver::AdaptationSettings cpu_settings,VideoStreamEncoderObserver::AdaptationSettings quality_settings)1120 void SendStatisticsProxy::UpdateAdaptationSettings(
1121 VideoStreamEncoderObserver::AdaptationSettings cpu_settings,
1122 VideoStreamEncoderObserver::AdaptationSettings quality_settings) {
1123 MutexLock lock(&mutex_);
1124 adaptation_limitations_.UpdateMaskingSettings(cpu_settings, quality_settings);
1125 SetAdaptTimer(adaptation_limitations_.MaskedCpuCounts(),
1126 &uma_container_->cpu_adapt_timer_);
1127 SetAdaptTimer(adaptation_limitations_.MaskedQualityCounts(),
1128 &uma_container_->quality_adapt_timer_);
1129 UpdateAdaptationStats();
1130 }
1131
OnAdaptationChanged(VideoAdaptationReason reason,const VideoAdaptationCounters & cpu_counters,const VideoAdaptationCounters & quality_counters)1132 void SendStatisticsProxy::OnAdaptationChanged(
1133 VideoAdaptationReason reason,
1134 const VideoAdaptationCounters& cpu_counters,
1135 const VideoAdaptationCounters& quality_counters) {
1136 MutexLock lock(&mutex_);
1137
1138 MaskedAdaptationCounts receiver =
1139 adaptation_limitations_.MaskedQualityCounts();
1140 adaptation_limitations_.set_cpu_counts(cpu_counters);
1141 adaptation_limitations_.set_quality_counts(quality_counters);
1142 switch (reason) {
1143 case VideoAdaptationReason::kCpu:
1144 ++stats_.number_of_cpu_adapt_changes;
1145 break;
1146 case VideoAdaptationReason::kQuality:
1147 TryUpdateInitialQualityResolutionAdaptUp(
1148 receiver.resolution_adaptations,
1149 adaptation_limitations_.MaskedQualityCounts().resolution_adaptations);
1150 ++stats_.number_of_quality_adapt_changes;
1151 break;
1152 }
1153 UpdateAdaptationStats();
1154 }
1155
UpdateAdaptationStats()1156 void SendStatisticsProxy::UpdateAdaptationStats() {
1157 auto cpu_counts = adaptation_limitations_.MaskedCpuCounts();
1158 auto quality_counts = adaptation_limitations_.MaskedQualityCounts();
1159
1160 bool is_cpu_limited = cpu_counts.resolution_adaptations > 0 ||
1161 cpu_counts.num_framerate_reductions > 0;
1162 bool is_bandwidth_limited = quality_counts.resolution_adaptations > 0 ||
1163 quality_counts.num_framerate_reductions > 0 ||
1164 bw_limited_layers_ || internal_encoder_scaler_;
1165 if (is_bandwidth_limited) {
1166 // We may be both CPU limited and bandwidth limited at the same time but
1167 // there is no way to express this in standardized stats. Heuristically,
1168 // bandwidth is more likely to be a limiting factor than CPU, and more
1169 // likely to vary over time, so only when we aren't bandwidth limited do we
1170 // want to know about our CPU being the bottleneck.
1171 quality_limitation_reason_tracker_.SetReason(
1172 QualityLimitationReason::kBandwidth);
1173 } else if (is_cpu_limited) {
1174 quality_limitation_reason_tracker_.SetReason(QualityLimitationReason::kCpu);
1175 } else {
1176 quality_limitation_reason_tracker_.SetReason(
1177 QualityLimitationReason::kNone);
1178 }
1179
1180 stats_.cpu_limited_resolution = cpu_counts.resolution_adaptations > 0;
1181 stats_.cpu_limited_framerate = cpu_counts.num_framerate_reductions > 0;
1182 stats_.bw_limited_resolution = quality_counts.resolution_adaptations > 0;
1183 stats_.bw_limited_framerate = quality_counts.num_framerate_reductions > 0;
1184 // If bitrate allocator has disabled some layers frame-rate or resolution are
1185 // limited depending on the encoder configuration.
1186 if (bw_limited_layers_) {
1187 switch (content_type_) {
1188 case VideoEncoderConfig::ContentType::kRealtimeVideo: {
1189 stats_.bw_limited_resolution = true;
1190 break;
1191 }
1192 case VideoEncoderConfig::ContentType::kScreen: {
1193 stats_.bw_limited_framerate = true;
1194 break;
1195 }
1196 }
1197 }
1198 if (internal_encoder_scaler_) {
1199 stats_.bw_limited_resolution = true;
1200 }
1201
1202 stats_.quality_limitation_reason =
1203 quality_limitation_reason_tracker_.current_reason();
1204
1205 // `stats_.quality_limitation_durations_ms` depends on the current time
1206 // when it is polled; it is updated in SendStatisticsProxy::GetStats().
1207 }
1208
OnBitrateAllocationUpdated(const VideoCodec & codec,const VideoBitrateAllocation & allocation)1209 void SendStatisticsProxy::OnBitrateAllocationUpdated(
1210 const VideoCodec& codec,
1211 const VideoBitrateAllocation& allocation) {
1212 int num_spatial_layers = 0;
1213 for (int i = 0; i < kMaxSpatialLayers; i++) {
1214 if (codec.spatialLayers[i].active) {
1215 num_spatial_layers++;
1216 }
1217 }
1218 int num_simulcast_streams = 0;
1219 for (int i = 0; i < kMaxSimulcastStreams; i++) {
1220 if (codec.simulcastStream[i].active) {
1221 num_simulcast_streams++;
1222 }
1223 }
1224
1225 std::array<bool, kMaxSpatialLayers> spatial_layers;
1226 for (int i = 0; i < kMaxSpatialLayers; i++) {
1227 spatial_layers[i] = (allocation.GetSpatialLayerSum(i) > 0);
1228 }
1229
1230 MutexLock lock(&mutex_);
1231
1232 bw_limited_layers_ = allocation.is_bw_limited();
1233 UpdateAdaptationStats();
1234
1235 if (spatial_layers != last_spatial_layer_use_) {
1236 // If the number of spatial layers has changed, the resolution change is
1237 // not due to quality limitations, it is because the configuration
1238 // changed.
1239 if (last_num_spatial_layers_ == num_spatial_layers &&
1240 last_num_simulcast_streams_ == num_simulcast_streams) {
1241 ++stats_.quality_limitation_resolution_changes;
1242 }
1243 last_spatial_layer_use_ = spatial_layers;
1244 }
1245 last_num_spatial_layers_ = num_spatial_layers;
1246 last_num_simulcast_streams_ = num_simulcast_streams;
1247 }
1248
1249 // Informes observer if an internal encoder scaler has reduced video
1250 // resolution or not. `is_scaled` is a flag indicating if the video is scaled
1251 // down.
OnEncoderInternalScalerUpdate(bool is_scaled)1252 void SendStatisticsProxy::OnEncoderInternalScalerUpdate(bool is_scaled) {
1253 MutexLock lock(&mutex_);
1254 internal_encoder_scaler_ = is_scaled;
1255 UpdateAdaptationStats();
1256 }
1257
1258 // TODO(asapersson): Include fps changes.
OnInitialQualityResolutionAdaptDown()1259 void SendStatisticsProxy::OnInitialQualityResolutionAdaptDown() {
1260 MutexLock lock(&mutex_);
1261 ++uma_container_->initial_quality_changes_.down;
1262 }
1263
TryUpdateInitialQualityResolutionAdaptUp(absl::optional<int> old_quality_downscales,absl::optional<int> updated_quality_downscales)1264 void SendStatisticsProxy::TryUpdateInitialQualityResolutionAdaptUp(
1265 absl::optional<int> old_quality_downscales,
1266 absl::optional<int> updated_quality_downscales) {
1267 if (uma_container_->initial_quality_changes_.down == 0)
1268 return;
1269
1270 if (old_quality_downscales.has_value() &&
1271 old_quality_downscales.value() > 0 &&
1272 updated_quality_downscales.value_or(-1) <
1273 old_quality_downscales.value()) {
1274 // Adapting up in quality.
1275 if (uma_container_->initial_quality_changes_.down >
1276 uma_container_->initial_quality_changes_.up) {
1277 ++uma_container_->initial_quality_changes_.up;
1278 }
1279 }
1280 }
1281
SetAdaptTimer(const MaskedAdaptationCounts & counts,StatsTimer * timer)1282 void SendStatisticsProxy::SetAdaptTimer(const MaskedAdaptationCounts& counts,
1283 StatsTimer* timer) {
1284 if (counts.resolution_adaptations || counts.num_framerate_reductions) {
1285 // Adaptation enabled.
1286 if (!stats_.suspended)
1287 timer->Start(clock_->TimeInMilliseconds());
1288 return;
1289 }
1290 timer->Stop(clock_->TimeInMilliseconds());
1291 }
1292
RtcpPacketTypesCounterUpdated(uint32_t ssrc,const RtcpPacketTypeCounter & packet_counter)1293 void SendStatisticsProxy::RtcpPacketTypesCounterUpdated(
1294 uint32_t ssrc,
1295 const RtcpPacketTypeCounter& packet_counter) {
1296 MutexLock lock(&mutex_);
1297 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
1298 if (!stats)
1299 return;
1300
1301 stats->rtcp_packet_type_counts = packet_counter;
1302 if (uma_container_->first_rtcp_stats_time_ms_ == -1)
1303 uma_container_->first_rtcp_stats_time_ms_ = clock_->TimeInMilliseconds();
1304 }
1305
OnReportBlockDataUpdated(ReportBlockData report_block_data)1306 void SendStatisticsProxy::OnReportBlockDataUpdated(
1307 ReportBlockData report_block_data) {
1308 MutexLock lock(&mutex_);
1309 VideoSendStream::StreamStats* stats =
1310 GetStatsEntry(report_block_data.report_block().source_ssrc);
1311 if (!stats)
1312 return;
1313 const RTCPReportBlock& report_block = report_block_data.report_block();
1314 uma_container_->report_block_stats_.Store(
1315 /*ssrc=*/report_block.source_ssrc,
1316 /*packets_lost=*/report_block.packets_lost,
1317 /*extended_highest_sequence_number=*/
1318 report_block.extended_highest_sequence_number);
1319
1320 stats->report_block_data = std::move(report_block_data);
1321 }
1322
DataCountersUpdated(const StreamDataCounters & counters,uint32_t ssrc)1323 void SendStatisticsProxy::DataCountersUpdated(
1324 const StreamDataCounters& counters,
1325 uint32_t ssrc) {
1326 MutexLock lock(&mutex_);
1327 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
1328 RTC_DCHECK(stats) << "DataCountersUpdated reported for unknown ssrc " << ssrc;
1329
1330 if (stats->type == VideoSendStream::StreamStats::StreamType::kFlexfec) {
1331 // The same counters are reported for both the media ssrc and flexfec ssrc.
1332 // Bitrate stats are summed for all SSRCs. Use fec stats from media update.
1333 return;
1334 }
1335
1336 stats->rtp_stats = counters;
1337 if (uma_container_->first_rtp_stats_time_ms_ == -1) {
1338 int64_t now_ms = clock_->TimeInMilliseconds();
1339 uma_container_->first_rtp_stats_time_ms_ = now_ms;
1340 uma_container_->cpu_adapt_timer_.Restart(now_ms);
1341 uma_container_->quality_adapt_timer_.Restart(now_ms);
1342 }
1343
1344 uma_container_->total_byte_counter_.Set(counters.transmitted.TotalBytes(),
1345 ssrc);
1346 uma_container_->padding_byte_counter_.Set(counters.transmitted.padding_bytes,
1347 ssrc);
1348 uma_container_->retransmit_byte_counter_.Set(
1349 counters.retransmitted.TotalBytes(), ssrc);
1350 uma_container_->fec_byte_counter_.Set(counters.fec.TotalBytes(), ssrc);
1351 switch (stats->type) {
1352 case VideoSendStream::StreamStats::StreamType::kMedia:
1353 uma_container_->media_byte_counter_.Set(counters.MediaPayloadBytes(),
1354 ssrc);
1355 break;
1356 case VideoSendStream::StreamStats::StreamType::kRtx:
1357 uma_container_->rtx_byte_counter_.Set(counters.transmitted.TotalBytes(),
1358 ssrc);
1359 break;
1360 case VideoSendStream::StreamStats::StreamType::kFlexfec:
1361 break;
1362 }
1363 }
1364
Notify(uint32_t total_bitrate_bps,uint32_t retransmit_bitrate_bps,uint32_t ssrc)1365 void SendStatisticsProxy::Notify(uint32_t total_bitrate_bps,
1366 uint32_t retransmit_bitrate_bps,
1367 uint32_t ssrc) {
1368 MutexLock lock(&mutex_);
1369 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
1370 if (!stats)
1371 return;
1372
1373 stats->total_bitrate_bps = total_bitrate_bps;
1374 stats->retransmit_bitrate_bps = retransmit_bitrate_bps;
1375 }
1376
FrameCountUpdated(const FrameCounts & frame_counts,uint32_t ssrc)1377 void SendStatisticsProxy::FrameCountUpdated(const FrameCounts& frame_counts,
1378 uint32_t ssrc) {
1379 MutexLock lock(&mutex_);
1380 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
1381 if (!stats)
1382 return;
1383
1384 stats->frame_counts = frame_counts;
1385 }
1386
SendSideDelayUpdated(int avg_delay_ms,int max_delay_ms,uint32_t ssrc)1387 void SendStatisticsProxy::SendSideDelayUpdated(int avg_delay_ms,
1388 int max_delay_ms,
1389 uint32_t ssrc) {
1390 MutexLock lock(&mutex_);
1391 VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
1392 if (!stats)
1393 return;
1394 stats->avg_delay_ms = avg_delay_ms;
1395 stats->max_delay_ms = max_delay_ms;
1396
1397 uma_container_->delay_counter_.Add(avg_delay_ms);
1398 uma_container_->max_delay_counter_.Add(max_delay_ms);
1399 }
1400
Start(int64_t now_ms)1401 void SendStatisticsProxy::StatsTimer::Start(int64_t now_ms) {
1402 if (start_ms == -1)
1403 start_ms = now_ms;
1404 }
1405
Stop(int64_t now_ms)1406 void SendStatisticsProxy::StatsTimer::Stop(int64_t now_ms) {
1407 if (start_ms != -1) {
1408 total_ms += now_ms - start_ms;
1409 start_ms = -1;
1410 }
1411 }
1412
Restart(int64_t now_ms)1413 void SendStatisticsProxy::StatsTimer::Restart(int64_t now_ms) {
1414 total_ms = 0;
1415 if (start_ms != -1)
1416 start_ms = now_ms;
1417 }
1418
Add(int sample)1419 void SendStatisticsProxy::SampleCounter::Add(int sample) {
1420 sum += sample;
1421 ++num_samples;
1422 }
1423
Avg(int64_t min_required_samples) const1424 int SendStatisticsProxy::SampleCounter::Avg(
1425 int64_t min_required_samples) const {
1426 if (num_samples < min_required_samples || num_samples == 0)
1427 return -1;
1428 return static_cast<int>((sum + (num_samples / 2)) / num_samples);
1429 }
1430
Add(bool sample)1431 void SendStatisticsProxy::BoolSampleCounter::Add(bool sample) {
1432 if (sample)
1433 ++sum;
1434 ++num_samples;
1435 }
1436
Add(bool sample,int64_t count)1437 void SendStatisticsProxy::BoolSampleCounter::Add(bool sample, int64_t count) {
1438 if (sample)
1439 sum += count;
1440 num_samples += count;
1441 }
Percent(int64_t min_required_samples) const1442 int SendStatisticsProxy::BoolSampleCounter::Percent(
1443 int64_t min_required_samples) const {
1444 return Fraction(min_required_samples, 100.0f);
1445 }
1446
Permille(int64_t min_required_samples) const1447 int SendStatisticsProxy::BoolSampleCounter::Permille(
1448 int64_t min_required_samples) const {
1449 return Fraction(min_required_samples, 1000.0f);
1450 }
1451
Fraction(int64_t min_required_samples,float multiplier) const1452 int SendStatisticsProxy::BoolSampleCounter::Fraction(
1453 int64_t min_required_samples,
1454 float multiplier) const {
1455 if (num_samples < min_required_samples || num_samples == 0)
1456 return -1;
1457 return static_cast<int>((sum * multiplier / num_samples) + 0.5f);
1458 }
1459
1460 SendStatisticsProxy::MaskedAdaptationCounts
MaskedCpuCounts() const1461 SendStatisticsProxy::Adaptations::MaskedCpuCounts() const {
1462 return Mask(cpu_counts_, cpu_settings_);
1463 }
1464
1465 SendStatisticsProxy::MaskedAdaptationCounts
MaskedQualityCounts() const1466 SendStatisticsProxy::Adaptations::MaskedQualityCounts() const {
1467 return Mask(quality_counts_, quality_settings_);
1468 }
1469
set_cpu_counts(const VideoAdaptationCounters & cpu_counts)1470 void SendStatisticsProxy::Adaptations::set_cpu_counts(
1471 const VideoAdaptationCounters& cpu_counts) {
1472 cpu_counts_ = cpu_counts;
1473 }
1474
set_quality_counts(const VideoAdaptationCounters & quality_counts)1475 void SendStatisticsProxy::Adaptations::set_quality_counts(
1476 const VideoAdaptationCounters& quality_counts) {
1477 quality_counts_ = quality_counts;
1478 }
1479
cpu_counts() const1480 VideoAdaptationCounters SendStatisticsProxy::Adaptations::cpu_counts() const {
1481 return cpu_counts_;
1482 }
1483
quality_counts() const1484 VideoAdaptationCounters SendStatisticsProxy::Adaptations::quality_counts()
1485 const {
1486 return quality_counts_;
1487 }
1488
UpdateMaskingSettings(VideoStreamEncoderObserver::AdaptationSettings cpu_settings,VideoStreamEncoderObserver::AdaptationSettings quality_settings)1489 void SendStatisticsProxy::Adaptations::UpdateMaskingSettings(
1490 VideoStreamEncoderObserver::AdaptationSettings cpu_settings,
1491 VideoStreamEncoderObserver::AdaptationSettings quality_settings) {
1492 cpu_settings_ = std::move(cpu_settings);
1493 quality_settings_ = std::move(quality_settings);
1494 }
1495
1496 SendStatisticsProxy::MaskedAdaptationCounts
Mask(const VideoAdaptationCounters & counters,const VideoStreamEncoderObserver::AdaptationSettings & settings) const1497 SendStatisticsProxy::Adaptations::Mask(
1498 const VideoAdaptationCounters& counters,
1499 const VideoStreamEncoderObserver::AdaptationSettings& settings) const {
1500 MaskedAdaptationCounts masked_counts;
1501 if (settings.resolution_scaling_enabled) {
1502 masked_counts.resolution_adaptations = counters.resolution_adaptations;
1503 }
1504 if (settings.framerate_scaling_enabled) {
1505 masked_counts.num_framerate_reductions = counters.fps_adaptations;
1506 }
1507 return masked_counts;
1508 }
1509
1510 } // namespace webrtc
1511