1 /*
2 * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
12
13 #include <algorithm>
14 #include <memory>
15 #include <utility>
16
17 #include "api/array_view.h"
18 #include "api/units/time_delta.h"
19 #include "api/video/i420_buffer.h"
20 #include "common_video/libyuv/include/webrtc_libyuv.h"
21 #include "rtc_base/cpu_time.h"
22 #include "rtc_base/logging.h"
23 #include "rtc_base/strings/string_builder.h"
24 #include "rtc_base/time_utils.h"
25
26 namespace webrtc {
27 namespace webrtc_pc_e2e {
28 namespace {
29
30 constexpr int kMaxActiveComparisons = 10;
31 constexpr int kFreezeThresholdMs = 150;
32 constexpr int kMicrosPerSecond = 1000000;
33 constexpr int kBitsInByte = 8;
34
LogFrameCounters(const std::string & name,const FrameCounters & counters)35 void LogFrameCounters(const std::string& name, const FrameCounters& counters) {
36 RTC_LOG(INFO) << "[" << name << "] Captured : " << counters.captured;
37 RTC_LOG(INFO) << "[" << name << "] Pre encoded : " << counters.pre_encoded;
38 RTC_LOG(INFO) << "[" << name << "] Encoded : " << counters.encoded;
39 RTC_LOG(INFO) << "[" << name << "] Received : " << counters.received;
40 RTC_LOG(INFO) << "[" << name << "] Decoded : " << counters.decoded;
41 RTC_LOG(INFO) << "[" << name << "] Rendered : " << counters.rendered;
42 RTC_LOG(INFO) << "[" << name << "] Dropped : " << counters.dropped;
43 }
44
LogStreamInternalStats(const std::string & name,const StreamStats & stats)45 void LogStreamInternalStats(const std::string& name, const StreamStats& stats) {
46 RTC_LOG(INFO) << "[" << name
47 << "] Dropped by encoder : " << stats.dropped_by_encoder;
48 RTC_LOG(INFO) << "[" << name << "] Dropped before encoder : "
49 << stats.dropped_before_encoder;
50 }
51
52 template <typename T>
MaybeGetValue(const std::map<size_t,T> & map,size_t key)53 absl::optional<T> MaybeGetValue(const std::map<size_t, T>& map, size_t key) {
54 auto it = map.find(key);
55 if (it == map.end()) {
56 return absl::nullopt;
57 }
58 return it->second;
59 }
60
61 } // namespace
62
AddEvent(Timestamp event_time)63 void RateCounter::AddEvent(Timestamp event_time) {
64 if (event_first_time_.IsMinusInfinity()) {
65 event_first_time_ = event_time;
66 }
67 event_last_time_ = event_time;
68 event_count_++;
69 }
70
GetEventsPerSecond() const71 double RateCounter::GetEventsPerSecond() const {
72 RTC_DCHECK(!IsEmpty());
73 // Divide on us and multiply on kMicrosPerSecond to correctly process cases
74 // where there were too small amount of events, so difference is less then 1
75 // sec. We can use us here, because Timestamp has us resolution.
76 return static_cast<double>(event_count_) /
77 (event_last_time_ - event_first_time_).us() * kMicrosPerSecond;
78 }
79
ToString() const80 std::string StatsKey::ToString() const {
81 rtc::StringBuilder out;
82 out << stream_label << "_" << sender << "_" << receiver;
83 return out.str();
84 }
85
operator <(const StatsKey & a,const StatsKey & b)86 bool operator<(const StatsKey& a, const StatsKey& b) {
87 if (a.stream_label != b.stream_label) {
88 return a.stream_label < b.stream_label;
89 }
90 if (a.sender != b.sender) {
91 return a.sender < b.sender;
92 }
93 return a.receiver < b.receiver;
94 }
95
operator ==(const StatsKey & a,const StatsKey & b)96 bool operator==(const StatsKey& a, const StatsKey& b) {
97 return a.stream_label == b.stream_label && a.sender == b.sender &&
98 a.receiver == b.receiver;
99 }
100
ToString() const101 std::string InternalStatsKey::ToString() const {
102 rtc::StringBuilder out;
103 out << "stream=" << stream << "_sender=" << sender
104 << "_receiver=" << receiver;
105 return out.str();
106 }
107
operator <(const InternalStatsKey & a,const InternalStatsKey & b)108 bool operator<(const InternalStatsKey& a, const InternalStatsKey& b) {
109 if (a.stream != b.stream) {
110 return a.stream < b.stream;
111 }
112 if (a.sender != b.sender) {
113 return a.sender < b.sender;
114 }
115 return a.receiver < b.receiver;
116 }
117
operator ==(const InternalStatsKey & a,const InternalStatsKey & b)118 bool operator==(const InternalStatsKey& a, const InternalStatsKey& b) {
119 return a.stream == b.stream && a.sender == b.sender &&
120 a.receiver == b.receiver;
121 }
122
DefaultVideoQualityAnalyzer(webrtc::Clock * clock,DefaultVideoQualityAnalyzerOptions options)123 DefaultVideoQualityAnalyzer::DefaultVideoQualityAnalyzer(
124 webrtc::Clock* clock,
125 DefaultVideoQualityAnalyzerOptions options)
126 : options_(options), clock_(clock) {}
DefaultVideoQualityAnalyzer(bool heavy_metrics_computation_enabled,size_t max_frames_in_flight_per_stream_count)127 DefaultVideoQualityAnalyzer::DefaultVideoQualityAnalyzer(
128 bool heavy_metrics_computation_enabled,
129 size_t max_frames_in_flight_per_stream_count)
130 : clock_(Clock::GetRealTimeClock()) {
131 options_.heavy_metrics_computation_enabled =
132 heavy_metrics_computation_enabled;
133 options_.max_frames_in_flight_per_stream_count =
134 max_frames_in_flight_per_stream_count;
135 }
~DefaultVideoQualityAnalyzer()136 DefaultVideoQualityAnalyzer::~DefaultVideoQualityAnalyzer() {
137 Stop();
138 }
139
Start(std::string test_case_name,rtc::ArrayView<const std::string> peer_names,int max_threads_count)140 void DefaultVideoQualityAnalyzer::Start(
141 std::string test_case_name,
142 rtc::ArrayView<const std::string> peer_names,
143 int max_threads_count) {
144 test_label_ = std::move(test_case_name);
145 peers_ = std::make_unique<NamesCollection>(peer_names);
146 for (int i = 0; i < max_threads_count; i++) {
147 auto thread = std::make_unique<rtc::PlatformThread>(
148 &DefaultVideoQualityAnalyzer::ProcessComparisonsThread, this,
149 ("DefaultVideoQualityAnalyzerWorker-" + std::to_string(i)).data(),
150 rtc::ThreadPriority::kNormalPriority);
151 thread->Start();
152 thread_pool_.push_back(std::move(thread));
153 }
154 {
155 MutexLock lock(&lock_);
156 RTC_CHECK(start_time_.IsMinusInfinity());
157
158 state_ = State::kActive;
159 start_time_ = Now();
160 }
161 StartMeasuringCpuProcessTime();
162 }
163
OnFrameCaptured(absl::string_view peer_name,const std::string & stream_label,const webrtc::VideoFrame & frame)164 uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
165 absl::string_view peer_name,
166 const std::string& stream_label,
167 const webrtc::VideoFrame& frame) {
168 // |next_frame_id| is atomic, so we needn't lock here.
169 uint16_t frame_id = next_frame_id_++;
170 Timestamp start_time = Timestamp::MinusInfinity();
171 size_t peer_index = peers_->index(peer_name);
172 size_t stream_index;
173 {
174 MutexLock lock(&lock_);
175 // Create a local copy of start_time_ to access it under
176 // |comparison_lock_| without holding a |lock_|
177 start_time = start_time_;
178 stream_index = streams_.AddIfAbsent(stream_label);
179 }
180 {
181 // Ensure stats for this stream exists.
182 MutexLock lock(&comparison_lock_);
183 for (size_t i = 0; i < peers_->size(); ++i) {
184 if (i == peer_index) {
185 continue;
186 }
187 InternalStatsKey stats_key(stream_index, peer_index, i);
188 if (stream_stats_.find(stats_key) == stream_stats_.end()) {
189 stream_stats_.insert({stats_key, StreamStats()});
190 // Assume that the first freeze was before first stream frame captured.
191 // This way time before the first freeze would be counted as time
192 // between freezes.
193 stream_last_freeze_end_time_.insert({stats_key, start_time});
194 } else {
195 // When we see some |stream_label| for the first time we need to create
196 // stream stats object for it and set up some states, but we need to do
197 // it only once and for all receivers, so on the next frame on the same
198 // |stream_label| we can be sure, that it's already done and we needn't
199 // to scan though all peers again.
200 break;
201 }
202 }
203 }
204 {
205 MutexLock lock(&lock_);
206 stream_to_sender_[stream_index] = peer_index;
207 frame_counters_.captured++;
208 for (size_t i = 0; i < peers_->size(); ++i) {
209 if (i != peer_index) {
210 InternalStatsKey key(stream_index, peer_index, i);
211 stream_frame_counters_[key].captured++;
212 }
213 }
214
215 auto state_it = stream_states_.find(stream_index);
216 if (state_it == stream_states_.end()) {
217 stream_states_.emplace(stream_index,
218 StreamState(peer_index, peers_->size()));
219 }
220 StreamState* state = &stream_states_.at(stream_index);
221 state->PushBack(frame_id);
222 // Update frames in flight info.
223 auto it = captured_frames_in_flight_.find(frame_id);
224 if (it != captured_frames_in_flight_.end()) {
225 // If we overflow uint16_t and hit previous frame id and this frame is
226 // still in flight, it means that this stream wasn't rendered for long
227 // time and we need to process existing frame as dropped.
228 for (size_t i = 0; i < peers_->size(); ++i) {
229 if (i == peer_index) {
230 continue;
231 }
232
233 uint16_t oldest_frame_id = state->PopFront(i);
234 RTC_DCHECK_EQ(frame_id, oldest_frame_id);
235 frame_counters_.dropped++;
236 InternalStatsKey key(stream_index, peer_index, i);
237 stream_frame_counters_.at(key).dropped++;
238
239 MutexLock lock1(&comparison_lock_);
240 analyzer_stats_.frames_in_flight_left_count.AddSample(
241 captured_frames_in_flight_.size());
242 AddComparison(InternalStatsKey(stream_index, peer_index, i),
243 it->second.frame(), absl::nullopt, true,
244 it->second.GetStatsForPeer(i));
245 }
246
247 captured_frames_in_flight_.erase(it);
248 }
249 captured_frames_in_flight_.emplace(
250 frame_id,
251 FrameInFlight(stream_index, frame,
252 /*captured_time=*/Now(), peer_index, peers_->size()));
253 // Set frame id on local copy of the frame
254 captured_frames_in_flight_.at(frame_id).SetFrameId(frame_id);
255
256 // Update history stream<->frame mapping
257 for (auto it = stream_to_frame_id_history_.begin();
258 it != stream_to_frame_id_history_.end(); ++it) {
259 it->second.erase(frame_id);
260 }
261 stream_to_frame_id_history_[stream_index].insert(frame_id);
262
263 // If state has too many frames that are in flight => remove the oldest
264 // queued frame in order to avoid to use too much memory.
265 if (state->GetAliveFramesCount() >
266 options_.max_frames_in_flight_per_stream_count) {
267 uint16_t frame_id_to_remove = state->MarkNextAliveFrameAsDead();
268 auto it = captured_frames_in_flight_.find(frame_id_to_remove);
269 RTC_CHECK(it != captured_frames_in_flight_.end())
270 << "Frame with ID " << frame_id_to_remove
271 << " is expected to be in flight, but hasn't been found in "
272 << "|captured_frames_in_flight_|";
273 bool is_removed = it->second.RemoveFrame();
274 RTC_DCHECK(is_removed)
275 << "Invalid stream state: alive frame is removed already";
276 }
277 }
278 return frame_id;
279 }
280
OnFramePreEncode(absl::string_view peer_name,const webrtc::VideoFrame & frame)281 void DefaultVideoQualityAnalyzer::OnFramePreEncode(
282 absl::string_view peer_name,
283 const webrtc::VideoFrame& frame) {
284 MutexLock lock(&lock_);
285 auto it = captured_frames_in_flight_.find(frame.id());
286 RTC_DCHECK(it != captured_frames_in_flight_.end())
287 << "Frame id=" << frame.id() << " not found";
288 frame_counters_.pre_encoded++;
289 size_t peer_index = peers_->index(peer_name);
290 for (size_t i = 0; i < peers_->size(); ++i) {
291 if (i != peer_index) {
292 InternalStatsKey key(it->second.stream(), peer_index, i);
293 stream_frame_counters_.at(key).pre_encoded++;
294 }
295 }
296 it->second.SetPreEncodeTime(Now());
297 }
298
OnFrameEncoded(absl::string_view peer_name,uint16_t frame_id,const webrtc::EncodedImage & encoded_image,const EncoderStats & stats)299 void DefaultVideoQualityAnalyzer::OnFrameEncoded(
300 absl::string_view peer_name,
301 uint16_t frame_id,
302 const webrtc::EncodedImage& encoded_image,
303 const EncoderStats& stats) {
304 MutexLock lock(&lock_);
305 auto it = captured_frames_in_flight_.find(frame_id);
306 RTC_DCHECK(it != captured_frames_in_flight_.end());
307 // For SVC we can receive multiple encoded images for one frame, so to cover
308 // all cases we have to pick the last encode time.
309 if (!it->second.HasEncodedTime()) {
310 // Increase counters only when we meet this frame first time.
311 frame_counters_.encoded++;
312 size_t peer_index = peers_->index(peer_name);
313 for (size_t i = 0; i < peers_->size(); ++i) {
314 if (i != peer_index) {
315 InternalStatsKey key(it->second.stream(), peer_index, i);
316 stream_frame_counters_.at(key).encoded++;
317 }
318 }
319 }
320 it->second.OnFrameEncoded(Now(), encoded_image.size(),
321 stats.target_encode_bitrate);
322 }
323
OnFrameDropped(absl::string_view peer_name,webrtc::EncodedImageCallback::DropReason reason)324 void DefaultVideoQualityAnalyzer::OnFrameDropped(
325 absl::string_view peer_name,
326 webrtc::EncodedImageCallback::DropReason reason) {
327 // Here we do nothing, because we will see this drop on renderer side.
328 }
329
OnFramePreDecode(absl::string_view peer_name,uint16_t frame_id,const webrtc::EncodedImage & input_image)330 void DefaultVideoQualityAnalyzer::OnFramePreDecode(
331 absl::string_view peer_name,
332 uint16_t frame_id,
333 const webrtc::EncodedImage& input_image) {
334 MutexLock lock(&lock_);
335 size_t peer_index = peers_->index(peer_name);
336
337 auto it = captured_frames_in_flight_.find(frame_id);
338 if (it == captured_frames_in_flight_.end() ||
339 it->second.HasReceivedTime(peer_index)) {
340 // It means this frame was predecoded before, so we can skip it. It may
341 // happen when we have multiple simulcast streams in one track and received
342 // the same picture from two different streams because SFU can't reliably
343 // correlate two simulcast streams and started relaying the second stream
344 // from the same frame it has relayed right before for the first stream.
345 return;
346 }
347
348 frame_counters_.received++;
349 InternalStatsKey key(it->second.stream(),
350 stream_to_sender_.at(it->second.stream()), peer_index);
351 stream_frame_counters_.at(key).received++;
352 // Determine the time of the last received packet of this video frame.
353 RTC_DCHECK(!input_image.PacketInfos().empty());
354 int64_t last_receive_time =
355 std::max_element(input_image.PacketInfos().cbegin(),
356 input_image.PacketInfos().cend(),
357 [](const RtpPacketInfo& a, const RtpPacketInfo& b) {
358 return a.receive_time_ms() < b.receive_time_ms();
359 })
360 ->receive_time_ms();
361 it->second.OnFramePreDecode(
362 peer_index,
363 /*received_time=*/Timestamp::Millis(last_receive_time),
364 /*decode_start_time=*/Now());
365 }
366
OnFrameDecoded(absl::string_view peer_name,const webrtc::VideoFrame & frame,const DecoderStats & stats)367 void DefaultVideoQualityAnalyzer::OnFrameDecoded(
368 absl::string_view peer_name,
369 const webrtc::VideoFrame& frame,
370 const DecoderStats& stats) {
371 MutexLock lock(&lock_);
372 size_t peer_index = peers_->index(peer_name);
373
374 auto it = captured_frames_in_flight_.find(frame.id());
375 if (it == captured_frames_in_flight_.end() ||
376 it->second.HasDecodeEndTime(peer_index)) {
377 // It means this frame was decoded before, so we can skip it. It may happen
378 // when we have multiple simulcast streams in one track and received
379 // the same picture from two different streams because SFU can't reliably
380 // correlate two simulcast streams and started relaying the second stream
381 // from the same frame it has relayed right before for the first stream.
382 return;
383 }
384 frame_counters_.decoded++;
385 InternalStatsKey key(it->second.stream(),
386 stream_to_sender_.at(it->second.stream()), peer_index);
387 stream_frame_counters_.at(key).decoded++;
388 it->second.SetDecodeEndTime(peer_index, Now());
389 }
390
OnFrameRendered(absl::string_view peer_name,const webrtc::VideoFrame & frame)391 void DefaultVideoQualityAnalyzer::OnFrameRendered(
392 absl::string_view peer_name,
393 const webrtc::VideoFrame& frame) {
394 MutexLock lock(&lock_);
395 size_t peer_index = peers_->index(peer_name);
396
397 auto frame_it = captured_frames_in_flight_.find(frame.id());
398 if (frame_it == captured_frames_in_flight_.end() ||
399 frame_it->second.HasRenderedTime(peer_index)) {
400 // It means this frame was rendered before, so we can skip it. It may happen
401 // when we have multiple simulcast streams in one track and received
402 // the same picture from two different streams because SFU can't reliably
403 // correlate two simulcast streams and started relaying the second stream
404 // from the same frame it has relayed right before for the first stream.
405 return;
406 }
407
408 // Find corresponding captured frame.
409 FrameInFlight* frame_in_flight = &frame_it->second;
410 absl::optional<VideoFrame> captured_frame = frame_in_flight->frame();
411
412 const size_t stream_index = frame_in_flight->stream();
413 StreamState* state = &stream_states_.at(stream_index);
414 const InternalStatsKey stats_key(stream_index, state->owner(), peer_index);
415
416 // Update frames counters.
417 frame_counters_.rendered++;
418 stream_frame_counters_.at(stats_key).rendered++;
419
420 // Update current frame stats.
421 frame_in_flight->OnFrameRendered(peer_index, Now(), frame.width(),
422 frame.height());
423
424 // After we received frame here we need to check if there are any dropped
425 // frames between this one and last one, that was rendered for this video
426 // stream.
427 int dropped_count = 0;
428 while (!state->IsEmpty(peer_index) &&
429 state->Front(peer_index) != frame.id()) {
430 dropped_count++;
431 uint16_t dropped_frame_id = state->PopFront(peer_index);
432 // Frame with id |dropped_frame_id| was dropped. We need:
433 // 1. Update global and stream frame counters
434 // 2. Extract corresponding frame from |captured_frames_in_flight_|
435 // 3. Send extracted frame to comparison with dropped=true
436 // 4. Cleanup dropped frame
437 frame_counters_.dropped++;
438 stream_frame_counters_.at(stats_key).dropped++;
439
440 auto dropped_frame_it = captured_frames_in_flight_.find(dropped_frame_id);
441 RTC_DCHECK(dropped_frame_it != captured_frames_in_flight_.end());
442 absl::optional<VideoFrame> dropped_frame = dropped_frame_it->second.frame();
443 dropped_frame_it->second.MarkDropped(peer_index);
444
445 {
446 MutexLock lock1(&comparison_lock_);
447 analyzer_stats_.frames_in_flight_left_count.AddSample(
448 captured_frames_in_flight_.size());
449 AddComparison(stats_key, dropped_frame, absl::nullopt, true,
450 dropped_frame_it->second.GetStatsForPeer(peer_index));
451 }
452
453 if (dropped_frame_it->second.HaveAllPeersReceived()) {
454 captured_frames_in_flight_.erase(dropped_frame_it);
455 }
456 }
457 RTC_DCHECK(!state->IsEmpty(peer_index));
458 state->PopFront(peer_index);
459
460 if (state->last_rendered_frame_time(peer_index)) {
461 frame_in_flight->SetPrevFrameRenderedTime(
462 peer_index, state->last_rendered_frame_time(peer_index).value());
463 }
464 state->SetLastRenderedFrameTime(peer_index,
465 frame_in_flight->rendered_time(peer_index));
466 {
467 MutexLock cr(&comparison_lock_);
468 stream_stats_[stats_key].skipped_between_rendered.AddSample(dropped_count);
469 }
470
471 {
472 MutexLock lock(&comparison_lock_);
473 analyzer_stats_.frames_in_flight_left_count.AddSample(
474 captured_frames_in_flight_.size());
475 AddComparison(stats_key, captured_frame, frame, false,
476 frame_in_flight->GetStatsForPeer(peer_index));
477 }
478
479 if (frame_it->second.HaveAllPeersReceived()) {
480 captured_frames_in_flight_.erase(frame_it);
481 }
482 }
483
OnEncoderError(absl::string_view peer_name,const webrtc::VideoFrame & frame,int32_t error_code)484 void DefaultVideoQualityAnalyzer::OnEncoderError(
485 absl::string_view peer_name,
486 const webrtc::VideoFrame& frame,
487 int32_t error_code) {
488 RTC_LOG(LS_ERROR) << "Encoder error for frame.id=" << frame.id()
489 << ", code=" << error_code;
490 }
491
OnDecoderError(absl::string_view peer_name,uint16_t frame_id,int32_t error_code)492 void DefaultVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name,
493 uint16_t frame_id,
494 int32_t error_code) {
495 RTC_LOG(LS_ERROR) << "Decoder error for frame_id=" << frame_id
496 << ", code=" << error_code;
497 }
498
Stop()499 void DefaultVideoQualityAnalyzer::Stop() {
500 StopMeasuringCpuProcessTime();
501 {
502 MutexLock lock(&lock_);
503 if (state_ == State::kStopped) {
504 return;
505 }
506 state_ = State::kStopped;
507 }
508 comparison_available_event_.Set();
509 for (auto& thread : thread_pool_) {
510 thread->Stop();
511 }
512 // PlatformThread have to be deleted on the same thread, where it was created
513 thread_pool_.clear();
514
515 // Perform final Metrics update. On this place analyzer is stopped and no one
516 // holds any locks.
517 {
518 // Time between freezes.
519 // Count time since the last freeze to the end of the call as time
520 // between freezes.
521 MutexLock lock1(&lock_);
522 MutexLock lock2(&comparison_lock_);
523 for (auto& state_entry : stream_states_) {
524 const size_t stream_index = state_entry.first;
525 const StreamState& stream_state = state_entry.second;
526 for (size_t i = 0; i < peers_->size(); ++i) {
527 if (i == static_cast<size_t>(stream_state.owner())) {
528 continue;
529 }
530
531 InternalStatsKey stats_key(stream_index, stream_state.owner(), i);
532
533 // If there are no freezes in the call we have to report
534 // time_between_freezes_ms as call duration and in such case
535 // |stream_last_freeze_end_time_| for this stream will be |start_time_|.
536 // If there is freeze, then we need add time from last rendered frame
537 // to last freeze end as time between freezes.
538 if (stream_state.last_rendered_frame_time(i)) {
539 stream_stats_[stats_key].time_between_freezes_ms.AddSample(
540 stream_state.last_rendered_frame_time(i).value().ms() -
541 stream_last_freeze_end_time_.at(stats_key).ms());
542 }
543 }
544 }
545 analyzer_stats_.frames_in_flight_left_count.AddSample(
546 captured_frames_in_flight_.size());
547 }
548 ReportResults();
549 }
550
GetStreamLabel(uint16_t frame_id)551 std::string DefaultVideoQualityAnalyzer::GetStreamLabel(uint16_t frame_id) {
552 MutexLock lock1(&lock_);
553 auto it = captured_frames_in_flight_.find(frame_id);
554 if (it != captured_frames_in_flight_.end()) {
555 return streams_.name(it->second.stream());
556 }
557 for (auto hist_it = stream_to_frame_id_history_.begin();
558 hist_it != stream_to_frame_id_history_.end(); ++hist_it) {
559 auto hist_set_it = hist_it->second.find(frame_id);
560 if (hist_set_it != hist_it->second.end()) {
561 return streams_.name(hist_it->first);
562 }
563 }
564 RTC_CHECK(false) << "Unknown frame_id=" << frame_id;
565 }
566
GetKnownVideoStreams() const567 std::set<StatsKey> DefaultVideoQualityAnalyzer::GetKnownVideoStreams() const {
568 MutexLock lock1(&lock_);
569 MutexLock lock2(&comparison_lock_);
570 std::set<StatsKey> out;
571 for (auto& item : stream_stats_) {
572 RTC_LOG(INFO) << item.first.ToString() << " ==> "
573 << ToStatsKey(item.first).ToString();
574 out.insert(ToStatsKey(item.first));
575 }
576 return out;
577 }
578
GetGlobalCounters() const579 const FrameCounters& DefaultVideoQualityAnalyzer::GetGlobalCounters() const {
580 MutexLock lock(&lock_);
581 return frame_counters_;
582 }
583
584 std::map<StatsKey, FrameCounters>
GetPerStreamCounters() const585 DefaultVideoQualityAnalyzer::GetPerStreamCounters() const {
586 MutexLock lock(&lock_);
587 std::map<StatsKey, FrameCounters> out;
588 for (auto& item : stream_frame_counters_) {
589 out.emplace(ToStatsKey(item.first), item.second);
590 }
591 return out;
592 }
593
GetStats() const594 std::map<StatsKey, StreamStats> DefaultVideoQualityAnalyzer::GetStats() const {
595 MutexLock lock1(&lock_);
596 MutexLock lock2(&comparison_lock_);
597 std::map<StatsKey, StreamStats> out;
598 for (auto& item : stream_stats_) {
599 out.emplace(ToStatsKey(item.first), item.second);
600 }
601 return out;
602 }
603
GetAnalyzerStats() const604 AnalyzerStats DefaultVideoQualityAnalyzer::GetAnalyzerStats() const {
605 MutexLock lock(&comparison_lock_);
606 return analyzer_stats_;
607 }
608
AddComparison(InternalStatsKey stats_key,absl::optional<VideoFrame> captured,absl::optional<VideoFrame> rendered,bool dropped,FrameStats frame_stats)609 void DefaultVideoQualityAnalyzer::AddComparison(
610 InternalStatsKey stats_key,
611 absl::optional<VideoFrame> captured,
612 absl::optional<VideoFrame> rendered,
613 bool dropped,
614 FrameStats frame_stats) {
615 StartExcludingCpuThreadTime();
616 analyzer_stats_.comparisons_queue_size.AddSample(comparisons_.size());
617 // If there too many computations waiting in the queue, we won't provide
618 // frames itself to make future computations lighter.
619 if (comparisons_.size() >= kMaxActiveComparisons) {
620 comparisons_.emplace_back(std::move(stats_key), absl::nullopt,
621 absl::nullopt, dropped, std::move(frame_stats),
622 OverloadReason::kCpu);
623 } else {
624 OverloadReason overload_reason = OverloadReason::kNone;
625 if (!captured && !dropped) {
626 overload_reason = OverloadReason::kMemory;
627 }
628 comparisons_.emplace_back(std::move(stats_key), std::move(captured),
629 std::move(rendered), dropped,
630 std::move(frame_stats), overload_reason);
631 }
632 comparison_available_event_.Set();
633 StopExcludingCpuThreadTime();
634 }
635
ProcessComparisonsThread(void * obj)636 void DefaultVideoQualityAnalyzer::ProcessComparisonsThread(void* obj) {
637 static_cast<DefaultVideoQualityAnalyzer*>(obj)->ProcessComparisons();
638 }
639
ProcessComparisons()640 void DefaultVideoQualityAnalyzer::ProcessComparisons() {
641 while (true) {
642 // Try to pick next comparison to perform from the queue.
643 absl::optional<FrameComparison> comparison = absl::nullopt;
644 {
645 MutexLock lock(&comparison_lock_);
646 if (!comparisons_.empty()) {
647 comparison = comparisons_.front();
648 comparisons_.pop_front();
649 if (!comparisons_.empty()) {
650 comparison_available_event_.Set();
651 }
652 }
653 }
654 if (!comparison) {
655 bool more_frames_expected;
656 {
657 // If there are no comparisons and state is stopped =>
658 // no more frames expected.
659 MutexLock lock(&lock_);
660 more_frames_expected = state_ != State::kStopped;
661 }
662 if (!more_frames_expected) {
663 comparison_available_event_.Set();
664 return;
665 }
666 comparison_available_event_.Wait(1000);
667 continue;
668 }
669
670 StartExcludingCpuThreadTime();
671 ProcessComparison(comparison.value());
672 StopExcludingCpuThreadTime();
673 }
674 }
675
ProcessComparison(const FrameComparison & comparison)676 void DefaultVideoQualityAnalyzer::ProcessComparison(
677 const FrameComparison& comparison) {
678 // Perform expensive psnr and ssim calculations while not holding lock.
679 double psnr = -1.0;
680 double ssim = -1.0;
681 if (options_.heavy_metrics_computation_enabled && comparison.captured &&
682 !comparison.dropped) {
683 psnr = I420PSNR(&*comparison.captured, &*comparison.rendered);
684 ssim = I420SSIM(&*comparison.captured, &*comparison.rendered);
685 }
686
687 const FrameStats& frame_stats = comparison.frame_stats;
688
689 MutexLock lock(&comparison_lock_);
690 auto stats_it = stream_stats_.find(comparison.stats_key);
691 RTC_CHECK(stats_it != stream_stats_.end()) << comparison.stats_key.ToString();
692 StreamStats* stats = &stats_it->second;
693 analyzer_stats_.comparisons_done++;
694 if (comparison.overload_reason == OverloadReason::kCpu) {
695 analyzer_stats_.cpu_overloaded_comparisons_done++;
696 } else if (comparison.overload_reason == OverloadReason::kMemory) {
697 analyzer_stats_.memory_overloaded_comparisons_done++;
698 }
699 if (psnr > 0) {
700 stats->psnr.AddSample(psnr);
701 }
702 if (ssim > 0) {
703 stats->ssim.AddSample(ssim);
704 }
705 if (frame_stats.encoded_time.IsFinite()) {
706 stats->encode_time_ms.AddSample(
707 (frame_stats.encoded_time - frame_stats.pre_encode_time).ms());
708 stats->encode_frame_rate.AddEvent(frame_stats.encoded_time);
709 stats->total_encoded_images_payload += frame_stats.encoded_image_size;
710 stats->target_encode_bitrate.AddSample(frame_stats.target_encode_bitrate);
711 } else {
712 if (frame_stats.pre_encode_time.IsFinite()) {
713 stats->dropped_by_encoder++;
714 } else {
715 stats->dropped_before_encoder++;
716 }
717 }
718 // Next stats can be calculated only if frame was received on remote side.
719 if (!comparison.dropped) {
720 stats->resolution_of_rendered_frame.AddSample(
721 *comparison.frame_stats.rendered_frame_width *
722 *comparison.frame_stats.rendered_frame_height);
723 stats->transport_time_ms.AddSample(
724 (frame_stats.decode_start_time - frame_stats.encoded_time).ms());
725 stats->total_delay_incl_transport_ms.AddSample(
726 (frame_stats.rendered_time - frame_stats.captured_time).ms());
727 stats->decode_time_ms.AddSample(
728 (frame_stats.decode_end_time - frame_stats.decode_start_time).ms());
729 stats->receive_to_render_time_ms.AddSample(
730 (frame_stats.rendered_time - frame_stats.received_time).ms());
731
732 if (frame_stats.prev_frame_rendered_time.IsFinite()) {
733 TimeDelta time_between_rendered_frames =
734 frame_stats.rendered_time - frame_stats.prev_frame_rendered_time;
735 stats->time_between_rendered_frames_ms.AddSample(
736 time_between_rendered_frames.ms());
737 double average_time_between_rendered_frames_ms =
738 stats->time_between_rendered_frames_ms.GetAverage();
739 if (time_between_rendered_frames.ms() >
740 std::max(kFreezeThresholdMs + average_time_between_rendered_frames_ms,
741 3 * average_time_between_rendered_frames_ms)) {
742 stats->freeze_time_ms.AddSample(time_between_rendered_frames.ms());
743 auto freeze_end_it =
744 stream_last_freeze_end_time_.find(comparison.stats_key);
745 RTC_DCHECK(freeze_end_it != stream_last_freeze_end_time_.end());
746 stats->time_between_freezes_ms.AddSample(
747 (frame_stats.prev_frame_rendered_time - freeze_end_it->second)
748 .ms());
749 freeze_end_it->second = frame_stats.rendered_time;
750 }
751 }
752 }
753 }
754
ReportResults()755 void DefaultVideoQualityAnalyzer::ReportResults() {
756 using ::webrtc::test::ImproveDirection;
757
758 MutexLock lock1(&lock_);
759 MutexLock lock2(&comparison_lock_);
760 for (auto& item : stream_stats_) {
761 ReportResults(GetTestCaseName(StatsKeyToMetricName(ToStatsKey(item.first))),
762 item.second, stream_frame_counters_.at(item.first));
763 }
764 test::PrintResult("cpu_usage", "", test_label_.c_str(), GetCpuUsagePercent(),
765 "%", false, ImproveDirection::kSmallerIsBetter);
766 LogFrameCounters("Global", frame_counters_);
767 for (auto& item : stream_stats_) {
768 LogFrameCounters(ToStatsKey(item.first).ToString(),
769 stream_frame_counters_.at(item.first));
770 LogStreamInternalStats(ToStatsKey(item.first).ToString(), item.second);
771 }
772 if (!analyzer_stats_.comparisons_queue_size.IsEmpty()) {
773 RTC_LOG(INFO) << "comparisons_queue_size min="
774 << analyzer_stats_.comparisons_queue_size.GetMin()
775 << "; max=" << analyzer_stats_.comparisons_queue_size.GetMax()
776 << "; 99%="
777 << analyzer_stats_.comparisons_queue_size.GetPercentile(0.99);
778 }
779 RTC_LOG(INFO) << "comparisons_done=" << analyzer_stats_.comparisons_done;
780 RTC_LOG(INFO) << "cpu_overloaded_comparisons_done="
781 << analyzer_stats_.cpu_overloaded_comparisons_done;
782 RTC_LOG(INFO) << "memory_overloaded_comparisons_done="
783 << analyzer_stats_.memory_overloaded_comparisons_done;
784 }
785
ReportResults(const std::string & test_case_name,const StreamStats & stats,const FrameCounters & frame_counters)786 void DefaultVideoQualityAnalyzer::ReportResults(
787 const std::string& test_case_name,
788 const StreamStats& stats,
789 const FrameCounters& frame_counters) {
790 using ::webrtc::test::ImproveDirection;
791 TimeDelta test_duration = Now() - start_time_;
792
793 double sum_squared_interframe_delays_secs = 0;
794 Timestamp video_start_time = Timestamp::PlusInfinity();
795 Timestamp video_end_time = Timestamp::MinusInfinity();
796 for (const SamplesStatsCounter::StatsSample& sample :
797 stats.time_between_rendered_frames_ms.GetTimedSamples()) {
798 double interframe_delay_ms = sample.value;
799 const double interframe_delays_secs = interframe_delay_ms / 1000.0;
800 // Sum of squared inter frame intervals is used to calculate the harmonic
801 // frame rate metric. The metric aims to reflect overall experience related
802 // to smoothness of video playback and includes both freezes and pauses.
803 sum_squared_interframe_delays_secs +=
804 interframe_delays_secs * interframe_delays_secs;
805 if (sample.time < video_start_time) {
806 video_start_time = sample.time;
807 }
808 if (sample.time > video_end_time) {
809 video_end_time = sample.time;
810 }
811 }
812 double harmonic_framerate_fps = 0;
813 TimeDelta video_duration = video_end_time - video_start_time;
814 if (sum_squared_interframe_delays_secs > 0.0 && video_duration.IsFinite()) {
815 harmonic_framerate_fps = static_cast<double>(video_duration.us()) /
816 static_cast<double>(kMicrosPerSecond) /
817 sum_squared_interframe_delays_secs;
818 }
819
820 ReportResult("psnr", test_case_name, stats.psnr, "dB",
821 ImproveDirection::kBiggerIsBetter);
822 ReportResult("ssim", test_case_name, stats.ssim, "unitless",
823 ImproveDirection::kBiggerIsBetter);
824 ReportResult("transport_time", test_case_name, stats.transport_time_ms, "ms",
825 ImproveDirection::kSmallerIsBetter);
826 ReportResult("total_delay_incl_transport", test_case_name,
827 stats.total_delay_incl_transport_ms, "ms",
828 ImproveDirection::kSmallerIsBetter);
829 ReportResult("time_between_rendered_frames", test_case_name,
830 stats.time_between_rendered_frames_ms, "ms",
831 ImproveDirection::kSmallerIsBetter);
832 test::PrintResult("harmonic_framerate", "", test_case_name,
833 harmonic_framerate_fps, "Hz", /*important=*/false,
834 ImproveDirection::kBiggerIsBetter);
835 test::PrintResult("encode_frame_rate", "", test_case_name,
836 stats.encode_frame_rate.IsEmpty()
837 ? 0
838 : stats.encode_frame_rate.GetEventsPerSecond(),
839 "Hz", /*important=*/false,
840 ImproveDirection::kBiggerIsBetter);
841 ReportResult("encode_time", test_case_name, stats.encode_time_ms, "ms",
842 ImproveDirection::kSmallerIsBetter);
843 ReportResult("time_between_freezes", test_case_name,
844 stats.time_between_freezes_ms, "ms",
845 ImproveDirection::kBiggerIsBetter);
846 ReportResult("freeze_time_ms", test_case_name, stats.freeze_time_ms, "ms",
847 ImproveDirection::kSmallerIsBetter);
848 ReportResult("pixels_per_frame", test_case_name,
849 stats.resolution_of_rendered_frame, "count",
850 ImproveDirection::kBiggerIsBetter);
851 test::PrintResult("min_psnr", "", test_case_name,
852 stats.psnr.IsEmpty() ? 0 : stats.psnr.GetMin(), "dB",
853 /*important=*/false, ImproveDirection::kBiggerIsBetter);
854 ReportResult("decode_time", test_case_name, stats.decode_time_ms, "ms",
855 ImproveDirection::kSmallerIsBetter);
856 ReportResult("receive_to_render_time", test_case_name,
857 stats.receive_to_render_time_ms, "ms",
858 ImproveDirection::kSmallerIsBetter);
859 test::PrintResult("dropped_frames", "", test_case_name,
860 frame_counters.dropped, "count",
861 /*important=*/false, ImproveDirection::kSmallerIsBetter);
862 test::PrintResult("frames_in_flight", "", test_case_name,
863 frame_counters.captured - frame_counters.rendered -
864 frame_counters.dropped,
865 "count",
866 /*important=*/false, ImproveDirection::kSmallerIsBetter);
867 ReportResult("max_skipped", test_case_name, stats.skipped_between_rendered,
868 "count", ImproveDirection::kSmallerIsBetter);
869 ReportResult("target_encode_bitrate", test_case_name,
870 stats.target_encode_bitrate / kBitsInByte, "bytesPerSecond",
871 ImproveDirection::kNone);
872 test::PrintResult(
873 "actual_encode_bitrate", "", test_case_name,
874 static_cast<double>(stats.total_encoded_images_payload) /
875 static_cast<double>(test_duration.us()) * kMicrosPerSecond,
876 "bytesPerSecond", /*important=*/false, ImproveDirection::kNone);
877 }
878
ReportResult(const std::string & metric_name,const std::string & test_case_name,const SamplesStatsCounter & counter,const std::string & unit,webrtc::test::ImproveDirection improve_direction)879 void DefaultVideoQualityAnalyzer::ReportResult(
880 const std::string& metric_name,
881 const std::string& test_case_name,
882 const SamplesStatsCounter& counter,
883 const std::string& unit,
884 webrtc::test::ImproveDirection improve_direction) {
885 test::PrintResult(metric_name, /*modifier=*/"", test_case_name, counter, unit,
886 /*important=*/false, improve_direction);
887 }
888
GetTestCaseName(const std::string & stream_label) const889 std::string DefaultVideoQualityAnalyzer::GetTestCaseName(
890 const std::string& stream_label) const {
891 return test_label_ + "/" + stream_label;
892 }
893
Now()894 Timestamp DefaultVideoQualityAnalyzer::Now() {
895 return clock_->CurrentTime();
896 }
897
ToStatsKey(const InternalStatsKey & key) const898 StatsKey DefaultVideoQualityAnalyzer::ToStatsKey(
899 const InternalStatsKey& key) const {
900 return StatsKey(streams_.name(key.stream), peers_->name(key.sender),
901 peers_->name(key.receiver));
902 }
903
StatsKeyToMetricName(const StatsKey & key)904 std::string DefaultVideoQualityAnalyzer::StatsKeyToMetricName(
905 const StatsKey& key) {
906 if (peers_->size() <= 2) {
907 return key.stream_label;
908 }
909 return key.ToString();
910 }
911
StartMeasuringCpuProcessTime()912 void DefaultVideoQualityAnalyzer::StartMeasuringCpuProcessTime() {
913 MutexLock lock(&cpu_measurement_lock_);
914 cpu_time_ -= rtc::GetProcessCpuTimeNanos();
915 wallclock_time_ -= rtc::SystemTimeNanos();
916 }
917
StopMeasuringCpuProcessTime()918 void DefaultVideoQualityAnalyzer::StopMeasuringCpuProcessTime() {
919 MutexLock lock(&cpu_measurement_lock_);
920 cpu_time_ += rtc::GetProcessCpuTimeNanos();
921 wallclock_time_ += rtc::SystemTimeNanos();
922 }
923
StartExcludingCpuThreadTime()924 void DefaultVideoQualityAnalyzer::StartExcludingCpuThreadTime() {
925 MutexLock lock(&cpu_measurement_lock_);
926 cpu_time_ += rtc::GetThreadCpuTimeNanos();
927 }
928
StopExcludingCpuThreadTime()929 void DefaultVideoQualityAnalyzer::StopExcludingCpuThreadTime() {
930 MutexLock lock(&cpu_measurement_lock_);
931 cpu_time_ -= rtc::GetThreadCpuTimeNanos();
932 }
933
GetCpuUsagePercent()934 double DefaultVideoQualityAnalyzer::GetCpuUsagePercent() {
935 MutexLock lock(&cpu_measurement_lock_);
936 return static_cast<double>(cpu_time_) / wallclock_time_ * 100.0;
937 }
938
FrameComparison(InternalStatsKey stats_key,absl::optional<VideoFrame> captured,absl::optional<VideoFrame> rendered,bool dropped,FrameStats frame_stats,OverloadReason overload_reason)939 DefaultVideoQualityAnalyzer::FrameComparison::FrameComparison(
940 InternalStatsKey stats_key,
941 absl::optional<VideoFrame> captured,
942 absl::optional<VideoFrame> rendered,
943 bool dropped,
944 FrameStats frame_stats,
945 OverloadReason overload_reason)
946 : stats_key(std::move(stats_key)),
947 captured(std::move(captured)),
948 rendered(std::move(rendered)),
949 dropped(dropped),
950 frame_stats(std::move(frame_stats)),
951 overload_reason(overload_reason) {}
952
PopFront(size_t peer)953 uint16_t DefaultVideoQualityAnalyzer::StreamState::PopFront(size_t peer) {
954 absl::optional<uint16_t> frame_id = frame_ids_.PopFront(peer);
955 RTC_DCHECK(frame_id.has_value());
956
957 // If alive's frame queue is longer than all others, than also pop frame from
958 // it, because that frame is received by all receivers.
959 size_t owner_size = frame_ids_.size(owner_);
960 size_t other_size = 0;
961 for (size_t i = 0; i < frame_ids_.readers_count(); ++i) {
962 size_t cur_size = frame_ids_.size(i);
963 if (i != owner_ && cur_size > other_size) {
964 other_size = cur_size;
965 }
966 }
967 if (owner_size > other_size) {
968 absl::optional<uint16_t> alive_frame_id = frame_ids_.PopFront(owner_);
969 RTC_DCHECK(alive_frame_id.has_value());
970 RTC_DCHECK_EQ(frame_id.value(), alive_frame_id.value());
971 }
972
973 return frame_id.value();
974 }
975
MarkNextAliveFrameAsDead()976 uint16_t DefaultVideoQualityAnalyzer::StreamState::MarkNextAliveFrameAsDead() {
977 absl::optional<uint16_t> frame_id = frame_ids_.PopFront(owner_);
978 RTC_DCHECK(frame_id.has_value());
979 return frame_id.value();
980 }
981
SetLastRenderedFrameTime(size_t peer,Timestamp time)982 void DefaultVideoQualityAnalyzer::StreamState::SetLastRenderedFrameTime(
983 size_t peer,
984 Timestamp time) {
985 auto it = last_rendered_frame_time_.find(peer);
986 if (it == last_rendered_frame_time_.end()) {
987 last_rendered_frame_time_.insert({peer, time});
988 } else {
989 it->second = time;
990 }
991 }
992
993 absl::optional<Timestamp>
last_rendered_frame_time(size_t peer) const994 DefaultVideoQualityAnalyzer::StreamState::last_rendered_frame_time(
995 size_t peer) const {
996 return MaybeGetValue(last_rendered_frame_time_, peer);
997 }
998
RemoveFrame()999 bool DefaultVideoQualityAnalyzer::FrameInFlight::RemoveFrame() {
1000 if (!frame_) {
1001 return false;
1002 }
1003 frame_ = absl::nullopt;
1004 return true;
1005 }
1006
SetFrameId(uint16_t id)1007 void DefaultVideoQualityAnalyzer::FrameInFlight::SetFrameId(uint16_t id) {
1008 if (frame_) {
1009 frame_->set_id(id);
1010 }
1011 }
1012
1013 std::vector<size_t>
GetPeersWhichDidntReceive() const1014 DefaultVideoQualityAnalyzer::FrameInFlight::GetPeersWhichDidntReceive() const {
1015 std::vector<size_t> out;
1016 for (size_t i = 0; i < peers_count_; ++i) {
1017 auto it = receiver_stats_.find(i);
1018 if (i != owner_ && it != receiver_stats_.end() &&
1019 it->second.rendered_time.IsInfinite()) {
1020 out.push_back(i);
1021 }
1022 }
1023 return out;
1024 }
1025
HaveAllPeersReceived() const1026 bool DefaultVideoQualityAnalyzer::FrameInFlight::HaveAllPeersReceived() const {
1027 for (size_t i = 0; i < peers_count_; ++i) {
1028 if (i == owner_) {
1029 continue;
1030 }
1031
1032 auto it = receiver_stats_.find(i);
1033 if (it == receiver_stats_.end()) {
1034 return false;
1035 }
1036
1037 if (!it->second.dropped && it->second.rendered_time.IsInfinite()) {
1038 return false;
1039 }
1040 }
1041 return true;
1042 }
1043
OnFrameEncoded(webrtc::Timestamp time,int64_t encoded_image_size,uint32_t target_encode_bitrate)1044 void DefaultVideoQualityAnalyzer::FrameInFlight::OnFrameEncoded(
1045 webrtc::Timestamp time,
1046 int64_t encoded_image_size,
1047 uint32_t target_encode_bitrate) {
1048 encoded_time_ = time;
1049 encoded_image_size_ = encoded_image_size;
1050 target_encode_bitrate_ += target_encode_bitrate;
1051 }
1052
OnFramePreDecode(size_t peer,webrtc::Timestamp received_time,webrtc::Timestamp decode_start_time)1053 void DefaultVideoQualityAnalyzer::FrameInFlight::OnFramePreDecode(
1054 size_t peer,
1055 webrtc::Timestamp received_time,
1056 webrtc::Timestamp decode_start_time) {
1057 receiver_stats_[peer].received_time = received_time;
1058 receiver_stats_[peer].decode_start_time = decode_start_time;
1059 }
1060
HasReceivedTime(size_t peer) const1061 bool DefaultVideoQualityAnalyzer::FrameInFlight::HasReceivedTime(
1062 size_t peer) const {
1063 auto it = receiver_stats_.find(peer);
1064 if (it == receiver_stats_.end()) {
1065 return false;
1066 }
1067 return it->second.received_time.IsFinite();
1068 }
1069
HasDecodeEndTime(size_t peer) const1070 bool DefaultVideoQualityAnalyzer::FrameInFlight::HasDecodeEndTime(
1071 size_t peer) const {
1072 auto it = receiver_stats_.find(peer);
1073 if (it == receiver_stats_.end()) {
1074 return false;
1075 }
1076 return it->second.decode_end_time.IsFinite();
1077 }
1078
OnFrameRendered(size_t peer,webrtc::Timestamp time,int width,int height)1079 void DefaultVideoQualityAnalyzer::FrameInFlight::OnFrameRendered(
1080 size_t peer,
1081 webrtc::Timestamp time,
1082 int width,
1083 int height) {
1084 receiver_stats_[peer].rendered_time = time;
1085 receiver_stats_[peer].rendered_frame_width = width;
1086 receiver_stats_[peer].rendered_frame_height = height;
1087 }
1088
HasRenderedTime(size_t peer) const1089 bool DefaultVideoQualityAnalyzer::FrameInFlight::HasRenderedTime(
1090 size_t peer) const {
1091 auto it = receiver_stats_.find(peer);
1092 if (it == receiver_stats_.end()) {
1093 return false;
1094 }
1095 return it->second.rendered_time.IsFinite();
1096 }
1097
1098 DefaultVideoQualityAnalyzer::FrameStats
GetStatsForPeer(size_t peer) const1099 DefaultVideoQualityAnalyzer::FrameInFlight::GetStatsForPeer(size_t peer) const {
1100 FrameStats stats(captured_time_);
1101 stats.pre_encode_time = pre_encode_time_;
1102 stats.encoded_time = encoded_time_;
1103 stats.target_encode_bitrate = target_encode_bitrate_;
1104 stats.encoded_image_size = encoded_image_size_;
1105
1106 absl::optional<ReceiverFrameStats> receiver_stats =
1107 MaybeGetValue<ReceiverFrameStats>(receiver_stats_, peer);
1108 if (receiver_stats.has_value()) {
1109 stats.received_time = receiver_stats->received_time;
1110 stats.decode_start_time = receiver_stats->decode_start_time;
1111 stats.decode_end_time = receiver_stats->decode_end_time;
1112 stats.rendered_time = receiver_stats->rendered_time;
1113 stats.prev_frame_rendered_time = receiver_stats->prev_frame_rendered_time;
1114 stats.rendered_frame_width = receiver_stats->rendered_frame_width;
1115 stats.rendered_frame_height = receiver_stats->rendered_frame_height;
1116 }
1117 return stats;
1118 }
1119
AddIfAbsent(absl::string_view name)1120 size_t DefaultVideoQualityAnalyzer::NamesCollection::AddIfAbsent(
1121 absl::string_view name) {
1122 auto it = index_.find(name);
1123 if (it != index_.end()) {
1124 return it->second;
1125 }
1126 size_t out = names_.size();
1127 size_t old_capacity = names_.capacity();
1128 names_.emplace_back(name);
1129 size_t new_capacity = names_.capacity();
1130
1131 if (old_capacity == new_capacity) {
1132 index_.emplace(names_[out], out);
1133 } else {
1134 // Reallocation happened in the vector, so we need to rebuild |index_|
1135 index_.clear();
1136 for (size_t i = 0; i < names_.size(); ++i) {
1137 index_.emplace(names_[i], i);
1138 }
1139 }
1140 return out;
1141 }
1142
1143 } // namespace webrtc_pc_e2e
1144 } // namespace webrtc
1145