• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 #include "test/pc/e2e/cross_media_metrics_reporter.h"
11 
12 #include <utility>
13 #include <vector>
14 
15 #include "api/stats/rtc_stats.h"
16 #include "api/stats/rtcstats_objects.h"
17 #include "api/units/timestamp.h"
18 #include "rtc_base/event.h"
19 #include "system_wrappers/include/field_trial.h"
20 
21 namespace webrtc {
22 namespace webrtc_pc_e2e {
23 
Start(absl::string_view test_case_name,const TrackIdStreamInfoMap * reporter_helper)24 void CrossMediaMetricsReporter::Start(
25     absl::string_view test_case_name,
26     const TrackIdStreamInfoMap* reporter_helper) {
27   test_case_name_ = std::string(test_case_name);
28   reporter_helper_ = reporter_helper;
29 }
30 
OnStatsReports(absl::string_view pc_label,const rtc::scoped_refptr<const RTCStatsReport> & report)31 void CrossMediaMetricsReporter::OnStatsReports(
32     absl::string_view pc_label,
33     const rtc::scoped_refptr<const RTCStatsReport>& report) {
34   auto inbound_stats = report->GetStatsOfType<RTCInboundRTPStreamStats>();
35   std::map<absl::string_view, std::vector<const RTCInboundRTPStreamStats*>>
36       sync_group_stats;
37   for (const auto& stat : inbound_stats) {
38     auto media_source_stat =
39         report->GetAs<RTCMediaStreamTrackStats>(*stat->track_id);
40     if (stat->estimated_playout_timestamp.ValueOrDefault(0.) > 0 &&
41         media_source_stat->track_identifier.is_defined()) {
42       sync_group_stats[reporter_helper_->GetSyncGroupLabelFromTrackId(
43                            *media_source_stat->track_identifier)]
44           .push_back(stat);
45     }
46   }
47 
48   MutexLock lock(&mutex_);
49   for (const auto& pair : sync_group_stats) {
50     // If there is less than two streams, it is not a sync group.
51     if (pair.second.size() < 2) {
52       continue;
53     }
54     auto sync_group = std::string(pair.first);
55     const RTCInboundRTPStreamStats* audio_stat = pair.second[0];
56     const RTCInboundRTPStreamStats* video_stat = pair.second[1];
57 
58     RTC_CHECK(pair.second.size() == 2 && audio_stat->kind.is_defined() &&
59               video_stat->kind.is_defined() &&
60               *audio_stat->kind != *video_stat->kind)
61         << "Sync group should consist of one audio and one video stream.";
62 
63     if (*audio_stat->kind == RTCMediaStreamTrackKind::kVideo) {
64       std::swap(audio_stat, video_stat);
65     }
66     // Stream labels of a sync group are same for all polls, so we need it add
67     // it only once.
68     if (stats_info_.find(sync_group) == stats_info_.end()) {
69       auto audio_source_stat =
70           report->GetAs<RTCMediaStreamTrackStats>(*audio_stat->track_id);
71       auto video_source_stat =
72           report->GetAs<RTCMediaStreamTrackStats>(*video_stat->track_id);
73       // *_source_stat->track_identifier is always defined here because we
74       // checked it while grouping stats.
75       stats_info_[sync_group].audio_stream_label =
76           std::string(reporter_helper_->GetStreamLabelFromTrackId(
77               *audio_source_stat->track_identifier));
78       stats_info_[sync_group].video_stream_label =
79           std::string(reporter_helper_->GetStreamLabelFromTrackId(
80               *video_source_stat->track_identifier));
81     }
82 
83     double audio_video_playout_diff = *audio_stat->estimated_playout_timestamp -
84                                       *video_stat->estimated_playout_timestamp;
85     if (audio_video_playout_diff > 0) {
86       stats_info_[sync_group].audio_ahead_ms.AddSample(
87           audio_video_playout_diff);
88       stats_info_[sync_group].video_ahead_ms.AddSample(0);
89     } else {
90       stats_info_[sync_group].audio_ahead_ms.AddSample(0);
91       stats_info_[sync_group].video_ahead_ms.AddSample(
92           std::abs(audio_video_playout_diff));
93     }
94   }
95 }
96 
StopAndReportResults()97 void CrossMediaMetricsReporter::StopAndReportResults() {
98   MutexLock lock(&mutex_);
99   for (const auto& pair : stats_info_) {
100     const std::string& sync_group = pair.first;
101     ReportResult("audio_ahead_ms",
102                  GetTestCaseName(pair.second.audio_stream_label, sync_group),
103                  pair.second.audio_ahead_ms, "ms",
104                  webrtc::test::ImproveDirection::kSmallerIsBetter);
105     ReportResult("video_ahead_ms",
106                  GetTestCaseName(pair.second.video_stream_label, sync_group),
107                  pair.second.video_ahead_ms, "ms",
108                  webrtc::test::ImproveDirection::kSmallerIsBetter);
109   }
110 }
111 
ReportResult(const std::string & metric_name,const std::string & test_case_name,const SamplesStatsCounter & counter,const std::string & unit,webrtc::test::ImproveDirection improve_direction)112 void CrossMediaMetricsReporter::ReportResult(
113     const std::string& metric_name,
114     const std::string& test_case_name,
115     const SamplesStatsCounter& counter,
116     const std::string& unit,
117     webrtc::test::ImproveDirection improve_direction) {
118   test::PrintResult(metric_name, /*modifier=*/"", test_case_name, counter, unit,
119                     /*important=*/false, improve_direction);
120 }
121 
GetTestCaseName(const std::string & stream_label,const std::string & sync_group) const122 std::string CrossMediaMetricsReporter::GetTestCaseName(
123     const std::string& stream_label,
124     const std::string& sync_group) const {
125   return test_case_name_ + "/" + sync_group + "_" + stream_label;
126 }
127 
128 }  // namespace webrtc_pc_e2e
129 }  // namespace webrtc
130