1 /*
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10 #include "video/video_quality_test.h"
11
12 #include <stdio.h>
13
14 #if defined(WEBRTC_WIN)
15 #include <conio.h>
16 #endif
17
18 #include <algorithm>
19 #include <deque>
20 #include <map>
21 #include <memory>
22 #include <string>
23 #include <vector>
24
25 #include "api/fec_controller_override.h"
26 #include "api/rtc_event_log_output_file.h"
27 #include "api/task_queue/default_task_queue_factory.h"
28 #include "api/task_queue/task_queue_base.h"
29 #include "api/test/create_frame_generator.h"
30 #include "api/video/builtin_video_bitrate_allocator_factory.h"
31 #include "api/video_codecs/video_encoder.h"
32 #include "call/fake_network_pipe.h"
33 #include "call/simulated_network.h"
34 #include "media/base/media_constants.h"
35 #include "media/engine/adm_helpers.h"
36 #include "media/engine/encoder_simulcast_proxy.h"
37 #include "media/engine/fake_video_codec_factory.h"
38 #include "media/engine/internal_encoder_factory.h"
39 #include "media/engine/webrtc_video_engine.h"
40 #include "modules/audio_device/include/audio_device.h"
41 #include "modules/audio_mixer/audio_mixer_impl.h"
42 #include "modules/video_coding/codecs/h264/include/h264.h"
43 #include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h"
44 #include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
45 #include "modules/video_coding/codecs/vp8/include/vp8.h"
46 #include "modules/video_coding/codecs/vp9/include/vp9.h"
47 #include "modules/video_coding/utility/ivf_file_writer.h"
48 #include "rtc_base/strings/string_builder.h"
49 #include "rtc_base/task_queue_for_test.h"
50 #include "test/platform_video_capturer.h"
51 #include "test/testsupport/file_utils.h"
52 #include "test/video_renderer.h"
53 #include "video/frame_dumping_decoder.h"
54 #ifdef WEBRTC_WIN
55 #include "modules/audio_device/include/audio_device_factory.h"
56 #endif
57
58 namespace webrtc {
59
60 namespace {
61 enum : int { // The first valid value is 1.
62 kAbsSendTimeExtensionId = 1,
63 kGenericFrameDescriptorExtensionId00,
64 kGenericFrameDescriptorExtensionId01,
65 kTransportSequenceNumberExtensionId,
66 kVideoContentTypeExtensionId,
67 kVideoTimingExtensionId,
68 };
69
70 constexpr char kSyncGroup[] = "av_sync";
71 constexpr int kOpusMinBitrateBps = 6000;
72 constexpr int kOpusBitrateFbBps = 32000;
73 constexpr int kFramesSentInQuickTest = 1;
74 constexpr uint32_t kThumbnailSendSsrcStart = 0xE0000;
75 constexpr uint32_t kThumbnailRtxSsrcStart = 0xF0000;
76
77 constexpr int kDefaultMaxQp = cricket::WebRtcVideoChannel::kDefaultQpMax;
78
79 const VideoEncoder::Capabilities kCapabilities(false);
80
GetMinMaxBitratesBps(const VideoCodec & codec,size_t spatial_idx)81 std::pair<uint32_t, uint32_t> GetMinMaxBitratesBps(const VideoCodec& codec,
82 size_t spatial_idx) {
83 uint32_t min_bitrate = codec.minBitrate;
84 uint32_t max_bitrate = codec.maxBitrate;
85 if (spatial_idx < codec.numberOfSimulcastStreams) {
86 min_bitrate =
87 std::max(min_bitrate, codec.simulcastStream[spatial_idx].minBitrate);
88 max_bitrate =
89 std::min(max_bitrate, codec.simulcastStream[spatial_idx].maxBitrate);
90 }
91 if (codec.codecType == VideoCodecType::kVideoCodecVP9 &&
92 spatial_idx < codec.VP9().numberOfSpatialLayers) {
93 min_bitrate =
94 std::max(min_bitrate, codec.spatialLayers[spatial_idx].minBitrate);
95 max_bitrate =
96 std::min(max_bitrate, codec.spatialLayers[spatial_idx].maxBitrate);
97 }
98 max_bitrate = std::max(max_bitrate, min_bitrate);
99 return {min_bitrate * 1000, max_bitrate * 1000};
100 }
101
102 class VideoStreamFactory
103 : public VideoEncoderConfig::VideoStreamFactoryInterface {
104 public:
VideoStreamFactory(const std::vector<VideoStream> & streams)105 explicit VideoStreamFactory(const std::vector<VideoStream>& streams)
106 : streams_(streams) {}
107
108 private:
CreateEncoderStreams(int width,int height,const VideoEncoderConfig & encoder_config)109 std::vector<VideoStream> CreateEncoderStreams(
110 int width,
111 int height,
112 const VideoEncoderConfig& encoder_config) override {
113 // The highest layer must match the incoming resolution.
114 std::vector<VideoStream> streams = streams_;
115 streams[streams_.size() - 1].height = height;
116 streams[streams_.size() - 1].width = width;
117
118 streams[0].bitrate_priority = encoder_config.bitrate_priority;
119 return streams;
120 }
121
122 std::vector<VideoStream> streams_;
123 };
124
125 // This wrapper provides two features needed by the video quality tests:
126 // 1. Invoke VideoAnalyzer callbacks before and after encoding each frame.
127 // 2. Write the encoded frames to file, one file per simulcast layer.
128 class QualityTestVideoEncoder : public VideoEncoder,
129 private EncodedImageCallback {
130 public:
QualityTestVideoEncoder(std::unique_ptr<VideoEncoder> encoder,VideoAnalyzer * analyzer,std::vector<FileWrapper> files,double overshoot_factor)131 QualityTestVideoEncoder(std::unique_ptr<VideoEncoder> encoder,
132 VideoAnalyzer* analyzer,
133 std::vector<FileWrapper> files,
134 double overshoot_factor)
135 : encoder_(std::move(encoder)),
136 overshoot_factor_(overshoot_factor),
137 analyzer_(analyzer) {
138 for (FileWrapper& file : files) {
139 writers_.push_back(
140 IvfFileWriter::Wrap(std::move(file), /* byte_limit= */ 100000000));
141 }
142 }
143
144 // Implement VideoEncoder
SetFecControllerOverride(FecControllerOverride * fec_controller_override)145 void SetFecControllerOverride(
146 FecControllerOverride* fec_controller_override) {
147 // Ignored.
148 }
149
InitEncode(const VideoCodec * codec_settings,const Settings & settings)150 int32_t InitEncode(const VideoCodec* codec_settings,
151 const Settings& settings) override {
152 codec_settings_ = *codec_settings;
153 return encoder_->InitEncode(codec_settings, settings);
154 }
155
RegisterEncodeCompleteCallback(EncodedImageCallback * callback)156 int32_t RegisterEncodeCompleteCallback(
157 EncodedImageCallback* callback) override {
158 callback_ = callback;
159 return encoder_->RegisterEncodeCompleteCallback(this);
160 }
161
Release()162 int32_t Release() override { return encoder_->Release(); }
163
Encode(const VideoFrame & frame,const std::vector<VideoFrameType> * frame_types)164 int32_t Encode(const VideoFrame& frame,
165 const std::vector<VideoFrameType>* frame_types) {
166 if (analyzer_) {
167 analyzer_->PreEncodeOnFrame(frame);
168 }
169 return encoder_->Encode(frame, frame_types);
170 }
171
SetRates(const RateControlParameters & parameters)172 void SetRates(const RateControlParameters& parameters) override {
173 RTC_DCHECK_GT(overshoot_factor_, 0.0);
174 if (overshoot_factor_ == 1.0) {
175 encoder_->SetRates(parameters);
176 return;
177 }
178
179 // Simulating encoder overshooting target bitrate, by configuring actual
180 // encoder too high. Take care not to adjust past limits of config,
181 // otherwise encoders may crash on DCHECK.
182 VideoBitrateAllocation overshot_allocation;
183 for (size_t si = 0; si < kMaxSpatialLayers; ++si) {
184 const uint32_t spatial_layer_bitrate_bps =
185 parameters.bitrate.GetSpatialLayerSum(si);
186 if (spatial_layer_bitrate_bps == 0) {
187 continue;
188 }
189
190 uint32_t min_bitrate_bps;
191 uint32_t max_bitrate_bps;
192 std::tie(min_bitrate_bps, max_bitrate_bps) =
193 GetMinMaxBitratesBps(codec_settings_, si);
194 double overshoot_factor = overshoot_factor_;
195 const uint32_t corrected_bitrate = rtc::checked_cast<uint32_t>(
196 overshoot_factor * spatial_layer_bitrate_bps);
197 if (corrected_bitrate < min_bitrate_bps) {
198 overshoot_factor = min_bitrate_bps / spatial_layer_bitrate_bps;
199 } else if (corrected_bitrate > max_bitrate_bps) {
200 overshoot_factor = max_bitrate_bps / spatial_layer_bitrate_bps;
201 }
202
203 for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) {
204 if (parameters.bitrate.HasBitrate(si, ti)) {
205 overshot_allocation.SetBitrate(
206 si, ti,
207 rtc::checked_cast<uint32_t>(
208 overshoot_factor * parameters.bitrate.GetBitrate(si, ti)));
209 }
210 }
211 }
212
213 return encoder_->SetRates(
214 RateControlParameters(overshot_allocation, parameters.framerate_fps,
215 parameters.bandwidth_allocation));
216 }
217
OnPacketLossRateUpdate(float packet_loss_rate)218 void OnPacketLossRateUpdate(float packet_loss_rate) override {
219 encoder_->OnPacketLossRateUpdate(packet_loss_rate);
220 }
221
OnRttUpdate(int64_t rtt_ms)222 void OnRttUpdate(int64_t rtt_ms) override { encoder_->OnRttUpdate(rtt_ms); }
223
OnLossNotification(const LossNotification & loss_notification)224 void OnLossNotification(const LossNotification& loss_notification) override {
225 encoder_->OnLossNotification(loss_notification);
226 }
227
GetEncoderInfo() const228 EncoderInfo GetEncoderInfo() const override {
229 EncoderInfo info = encoder_->GetEncoderInfo();
230 if (overshoot_factor_ != 1.0) {
231 // We're simulating bad encoder, don't forward trusted setting
232 // from eg libvpx.
233 info.has_trusted_rate_controller = false;
234 }
235 return info;
236 }
237
238 private:
239 // Implement EncodedImageCallback
OnEncodedImage(const EncodedImage & encoded_image,const CodecSpecificInfo * codec_specific_info,const RTPFragmentationHeader * fragmentation)240 Result OnEncodedImage(const EncodedImage& encoded_image,
241 const CodecSpecificInfo* codec_specific_info,
242 const RTPFragmentationHeader* fragmentation) override {
243 if (codec_specific_info) {
244 int simulcast_index;
245 if (codec_specific_info->codecType == kVideoCodecVP9) {
246 simulcast_index = 0;
247 } else {
248 simulcast_index = encoded_image.SpatialIndex().value_or(0);
249 }
250 RTC_DCHECK_GE(simulcast_index, 0);
251 if (analyzer_) {
252 analyzer_->PostEncodeOnFrame(simulcast_index,
253 encoded_image.Timestamp());
254 }
255 if (static_cast<size_t>(simulcast_index) < writers_.size()) {
256 writers_[simulcast_index]->WriteFrame(encoded_image,
257 codec_specific_info->codecType);
258 }
259 }
260
261 return callback_->OnEncodedImage(encoded_image, codec_specific_info,
262 fragmentation);
263 }
264
OnDroppedFrame(DropReason reason)265 void OnDroppedFrame(DropReason reason) override {
266 callback_->OnDroppedFrame(reason);
267 }
268
269 const std::unique_ptr<VideoEncoder> encoder_;
270 const double overshoot_factor_;
271 VideoAnalyzer* const analyzer_;
272 std::vector<std::unique_ptr<IvfFileWriter>> writers_;
273 EncodedImageCallback* callback_ = nullptr;
274 VideoCodec codec_settings_;
275 };
276
277 #if defined(WEBRTC_WIN) && !defined(WINUWP)
PressEnterToContinue(TaskQueueBase * task_queue)278 void PressEnterToContinue(TaskQueueBase* task_queue) {
279 puts(">> Press ENTER to continue...");
280
281 while (!_kbhit() || _getch() != '\r') {
282 // Drive the message loop for the thread running the task_queue
283 SendTask(RTC_FROM_HERE, task_queue, [&]() {
284 MSG msg;
285 if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
286 TranslateMessage(&msg);
287 DispatchMessage(&msg);
288 }
289 });
290 }
291 }
292 #else
PressEnterToContinue(TaskQueueBase *)293 void PressEnterToContinue(TaskQueueBase* /*task_queue*/) {
294 puts(">> Press ENTER to continue...");
295 while (getc(stdin) != '\n' && !feof(stdin))
296 ; // NOLINT
297 }
298 #endif
299
300 } // namespace
301
CreateVideoDecoder(const SdpVideoFormat & format)302 std::unique_ptr<VideoDecoder> VideoQualityTest::CreateVideoDecoder(
303 const SdpVideoFormat& format) {
304 std::unique_ptr<VideoDecoder> decoder;
305 if (format.name == "multiplex") {
306 decoder = std::make_unique<MultiplexDecoderAdapter>(
307 decoder_factory_.get(), SdpVideoFormat(cricket::kVp9CodecName));
308 } else if (format.name == "FakeCodec") {
309 decoder = webrtc::FakeVideoDecoderFactory::CreateVideoDecoder();
310 } else {
311 decoder = decoder_factory_->CreateVideoDecoder(format);
312 }
313 if (!params_.logging.encoded_frame_base_path.empty()) {
314 rtc::StringBuilder str;
315 str << receive_logs_++;
316 std::string path =
317 params_.logging.encoded_frame_base_path + "." + str.str() + ".recv.ivf";
318 decoder = CreateFrameDumpingDecoderWrapper(
319 std::move(decoder), FileWrapper::OpenWriteOnly(path));
320 }
321 return decoder;
322 }
323
CreateVideoEncoder(const SdpVideoFormat & format,VideoAnalyzer * analyzer)324 std::unique_ptr<VideoEncoder> VideoQualityTest::CreateVideoEncoder(
325 const SdpVideoFormat& format,
326 VideoAnalyzer* analyzer) {
327 std::unique_ptr<VideoEncoder> encoder;
328 if (format.name == "VP8") {
329 encoder =
330 std::make_unique<EncoderSimulcastProxy>(encoder_factory_.get(), format);
331 } else if (format.name == "multiplex") {
332 encoder = std::make_unique<MultiplexEncoderAdapter>(
333 encoder_factory_.get(), SdpVideoFormat(cricket::kVp9CodecName));
334 } else if (format.name == "FakeCodec") {
335 encoder = webrtc::FakeVideoEncoderFactory::CreateVideoEncoder();
336 } else {
337 encoder = encoder_factory_->CreateVideoEncoder(format);
338 }
339
340 std::vector<FileWrapper> encoded_frame_dump_files;
341 if (!params_.logging.encoded_frame_base_path.empty()) {
342 char ss_buf[100];
343 rtc::SimpleStringBuilder sb(ss_buf);
344 sb << send_logs_++;
345 std::string prefix =
346 params_.logging.encoded_frame_base_path + "." + sb.str() + ".send.";
347 encoded_frame_dump_files.push_back(
348 FileWrapper::OpenWriteOnly(prefix + "1.ivf"));
349 encoded_frame_dump_files.push_back(
350 FileWrapper::OpenWriteOnly(prefix + "2.ivf"));
351 encoded_frame_dump_files.push_back(
352 FileWrapper::OpenWriteOnly(prefix + "3.ivf"));
353 }
354
355 double overshoot_factor = 1.0;
356 // Match format to either of the streams in dual-stream mode in order to get
357 // the overshoot factor. This is not very robust but we can't know for sure
358 // which stream this encoder is meant for, from within the factory.
359 if (format ==
360 SdpVideoFormat(params_.video[0].codec, params_.video[0].sdp_params)) {
361 overshoot_factor = params_.video[0].encoder_overshoot_factor;
362 } else if (format == SdpVideoFormat(params_.video[1].codec,
363 params_.video[1].sdp_params)) {
364 overshoot_factor = params_.video[1].encoder_overshoot_factor;
365 }
366 if (overshoot_factor == 0.0) {
367 // If params were zero-initialized, set to 1.0 instead.
368 overshoot_factor = 1.0;
369 }
370
371 if (analyzer || !encoded_frame_dump_files.empty() || overshoot_factor > 1.0) {
372 encoder = std::make_unique<QualityTestVideoEncoder>(
373 std::move(encoder), analyzer, std::move(encoded_frame_dump_files),
374 overshoot_factor);
375 }
376
377 return encoder;
378 }
379
VideoQualityTest(std::unique_ptr<InjectionComponents> injection_components)380 VideoQualityTest::VideoQualityTest(
381 std::unique_ptr<InjectionComponents> injection_components)
382 : clock_(Clock::GetRealTimeClock()),
383 task_queue_factory_(CreateDefaultTaskQueueFactory()),
384 rtc_event_log_factory_(task_queue_factory_.get()),
385 video_decoder_factory_([this](const SdpVideoFormat& format) {
386 return this->CreateVideoDecoder(format);
387 }),
__anon53d1c78b0502(const SdpVideoFormat& format) 388 video_encoder_factory_([this](const SdpVideoFormat& format) {
389 return this->CreateVideoEncoder(format, nullptr);
390 }),
391 video_encoder_factory_with_analyzer_(
__anon53d1c78b0602(const SdpVideoFormat& format) 392 [this](const SdpVideoFormat& format) {
393 return this->CreateVideoEncoder(format, analyzer_.get());
394 }),
395 video_bitrate_allocator_factory_(
396 CreateBuiltinVideoBitrateAllocatorFactory()),
397 receive_logs_(0),
398 send_logs_(0),
399 injection_components_(std::move(injection_components)),
400 num_video_streams_(0) {
401 if (injection_components_ == nullptr) {
402 injection_components_ = std::make_unique<InjectionComponents>();
403 }
404 if (injection_components_->video_decoder_factory != nullptr) {
405 decoder_factory_ = std::move(injection_components_->video_decoder_factory);
406 } else {
407 decoder_factory_ = std::make_unique<InternalDecoderFactory>();
408 }
409 if (injection_components_->video_encoder_factory != nullptr) {
410 encoder_factory_ = std::move(injection_components_->video_encoder_factory);
411 } else {
412 encoder_factory_ = std::make_unique<InternalEncoderFactory>();
413 }
414
415 payload_type_map_ = test::CallTest::payload_type_map_;
416 RTC_DCHECK(payload_type_map_.find(kPayloadTypeH264) ==
417 payload_type_map_.end());
418 RTC_DCHECK(payload_type_map_.find(kPayloadTypeVP8) ==
419 payload_type_map_.end());
420 RTC_DCHECK(payload_type_map_.find(kPayloadTypeVP9) ==
421 payload_type_map_.end());
422 RTC_DCHECK(payload_type_map_.find(kPayloadTypeGeneric) ==
423 payload_type_map_.end());
424 payload_type_map_[kPayloadTypeH264] = webrtc::MediaType::VIDEO;
425 payload_type_map_[kPayloadTypeVP8] = webrtc::MediaType::VIDEO;
426 payload_type_map_[kPayloadTypeVP9] = webrtc::MediaType::VIDEO;
427 payload_type_map_[kPayloadTypeGeneric] = webrtc::MediaType::VIDEO;
428
429 fec_controller_factory_ =
430 std::move(injection_components_->fec_controller_factory);
431 network_state_predictor_factory_ =
432 std::move(injection_components_->network_state_predictor_factory);
433 network_controller_factory_ =
434 std::move(injection_components_->network_controller_factory);
435 }
436
437 VideoQualityTest::InjectionComponents::InjectionComponents() = default;
438
439 VideoQualityTest::InjectionComponents::~InjectionComponents() = default;
440
TestBody()441 void VideoQualityTest::TestBody() {}
442
GenerateGraphTitle() const443 std::string VideoQualityTest::GenerateGraphTitle() const {
444 rtc::StringBuilder ss;
445 ss << params_.video[0].codec;
446 ss << " (" << params_.video[0].target_bitrate_bps / 1000 << "kbps";
447 ss << ", " << params_.video[0].fps << " FPS";
448 if (params_.screenshare[0].scroll_duration)
449 ss << ", " << params_.screenshare[0].scroll_duration << "s scroll";
450 if (params_.ss[0].streams.size() > 1)
451 ss << ", Stream #" << params_.ss[0].selected_stream;
452 if (params_.ss[0].num_spatial_layers > 1)
453 ss << ", Layer #" << params_.ss[0].selected_sl;
454 ss << ")";
455 return ss.Release();
456 }
457
CheckParamsAndInjectionComponents()458 void VideoQualityTest::CheckParamsAndInjectionComponents() {
459 if (injection_components_ == nullptr) {
460 injection_components_ = std::make_unique<InjectionComponents>();
461 }
462 if (!params_.config && injection_components_->sender_network == nullptr &&
463 injection_components_->receiver_network == nullptr) {
464 params_.config = BuiltInNetworkBehaviorConfig();
465 }
466 RTC_CHECK(
467 (params_.config && injection_components_->sender_network == nullptr &&
468 injection_components_->receiver_network == nullptr) ||
469 (!params_.config && injection_components_->sender_network != nullptr &&
470 injection_components_->receiver_network != nullptr));
471 for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) {
472 // Iterate over primary and secondary video streams.
473 if (!params_.video[video_idx].enabled)
474 return;
475 // Add a default stream in none specified.
476 if (params_.ss[video_idx].streams.empty())
477 params_.ss[video_idx].streams.push_back(
478 VideoQualityTest::DefaultVideoStream(params_, video_idx));
479 if (params_.ss[video_idx].num_spatial_layers == 0)
480 params_.ss[video_idx].num_spatial_layers = 1;
481
482 if (params_.config) {
483 if (params_.config->loss_percent != 0 ||
484 params_.config->queue_length_packets != 0) {
485 // Since LayerFilteringTransport changes the sequence numbers, we can't
486 // use that feature with pack loss, since the NACK request would end up
487 // retransmitting the wrong packets.
488 RTC_CHECK(params_.ss[video_idx].selected_sl == -1 ||
489 params_.ss[video_idx].selected_sl ==
490 params_.ss[video_idx].num_spatial_layers - 1);
491 RTC_CHECK(params_.video[video_idx].selected_tl == -1 ||
492 params_.video[video_idx].selected_tl ==
493 params_.video[video_idx].num_temporal_layers - 1);
494 }
495 }
496
497 // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as
498 // it does in some parts of the code?
499 RTC_CHECK_GE(params_.video[video_idx].max_bitrate_bps,
500 params_.video[video_idx].target_bitrate_bps);
501 RTC_CHECK_GE(params_.video[video_idx].target_bitrate_bps,
502 params_.video[video_idx].min_bitrate_bps);
503 int selected_stream = params_.ss[video_idx].selected_stream;
504 if (params_.video[video_idx].selected_tl > -1) {
505 RTC_CHECK_LT(selected_stream, params_.ss[video_idx].streams.size())
506 << "Can not use --selected_tl when --selected_stream is all streams";
507 int stream_tl = params_.ss[video_idx]
508 .streams[selected_stream]
509 .num_temporal_layers.value_or(1);
510 RTC_CHECK_LT(params_.video[video_idx].selected_tl, stream_tl);
511 }
512 RTC_CHECK_LE(params_.ss[video_idx].selected_stream,
513 params_.ss[video_idx].streams.size());
514 for (const VideoStream& stream : params_.ss[video_idx].streams) {
515 RTC_CHECK_GE(stream.min_bitrate_bps, 0);
516 RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps);
517 RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps);
518 }
519 // TODO(ivica): Should we check if the sum of all streams/layers is equal to
520 // the total bitrate? We anyway have to update them in the case bitrate
521 // estimator changes the total bitrates.
522 RTC_CHECK_GE(params_.ss[video_idx].num_spatial_layers, 1);
523 RTC_CHECK_LE(params_.ss[video_idx].selected_sl,
524 params_.ss[video_idx].num_spatial_layers);
525 RTC_CHECK(
526 params_.ss[video_idx].spatial_layers.empty() ||
527 params_.ss[video_idx].spatial_layers.size() ==
528 static_cast<size_t>(params_.ss[video_idx].num_spatial_layers));
529 if (params_.video[video_idx].codec == "VP8") {
530 RTC_CHECK_EQ(params_.ss[video_idx].num_spatial_layers, 1);
531 } else if (params_.video[video_idx].codec == "VP9") {
532 RTC_CHECK_EQ(params_.ss[video_idx].streams.size(), 1);
533 }
534 RTC_CHECK_GE(params_.call.num_thumbnails, 0);
535 if (params_.call.num_thumbnails > 0) {
536 RTC_CHECK_EQ(params_.ss[video_idx].num_spatial_layers, 1);
537 RTC_CHECK_EQ(params_.ss[video_idx].streams.size(), 3);
538 RTC_CHECK_EQ(params_.video[video_idx].num_temporal_layers, 3);
539 RTC_CHECK_EQ(params_.video[video_idx].codec, "VP8");
540 }
541 // Dual streams with FEC not supported in tests yet.
542 RTC_CHECK(!params_.video[video_idx].flexfec || num_video_streams_ == 1);
543 RTC_CHECK(!params_.video[video_idx].ulpfec || num_video_streams_ == 1);
544 }
545 }
546
547 // Static.
ParseCSV(const std::string & str)548 std::vector<int> VideoQualityTest::ParseCSV(const std::string& str) {
549 // Parse comma separated nonnegative integers, where some elements may be
550 // empty. The empty values are replaced with -1.
551 // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40}
552 // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1}
553 std::vector<int> result;
554 if (str.empty())
555 return result;
556
557 const char* p = str.c_str();
558 int value = -1;
559 int pos;
560 while (*p) {
561 if (*p == ',') {
562 result.push_back(value);
563 value = -1;
564 ++p;
565 continue;
566 }
567 RTC_CHECK_EQ(sscanf(p, "%d%n", &value, &pos), 1)
568 << "Unexpected non-number value.";
569 p += pos;
570 }
571 result.push_back(value);
572 return result;
573 }
574
575 // Static.
DefaultVideoStream(const Params & params,size_t video_idx)576 VideoStream VideoQualityTest::DefaultVideoStream(const Params& params,
577 size_t video_idx) {
578 VideoStream stream;
579 stream.width = params.video[video_idx].width;
580 stream.height = params.video[video_idx].height;
581 stream.max_framerate = params.video[video_idx].fps;
582 stream.min_bitrate_bps = params.video[video_idx].min_bitrate_bps;
583 stream.target_bitrate_bps = params.video[video_idx].target_bitrate_bps;
584 stream.max_bitrate_bps = params.video[video_idx].max_bitrate_bps;
585 stream.max_qp = kDefaultMaxQp;
586 stream.num_temporal_layers = params.video[video_idx].num_temporal_layers;
587 stream.active = true;
588 return stream;
589 }
590
591 // Static.
DefaultThumbnailStream()592 VideoStream VideoQualityTest::DefaultThumbnailStream() {
593 VideoStream stream;
594 stream.width = 320;
595 stream.height = 180;
596 stream.max_framerate = 7;
597 stream.min_bitrate_bps = 7500;
598 stream.target_bitrate_bps = 37500;
599 stream.max_bitrate_bps = 50000;
600 stream.max_qp = kDefaultMaxQp;
601 return stream;
602 }
603
604 // Static.
FillScalabilitySettings(Params * params,size_t video_idx,const std::vector<std::string> & stream_descriptors,int num_streams,size_t selected_stream,int num_spatial_layers,int selected_sl,InterLayerPredMode inter_layer_pred,const std::vector<std::string> & sl_descriptors)605 void VideoQualityTest::FillScalabilitySettings(
606 Params* params,
607 size_t video_idx,
608 const std::vector<std::string>& stream_descriptors,
609 int num_streams,
610 size_t selected_stream,
611 int num_spatial_layers,
612 int selected_sl,
613 InterLayerPredMode inter_layer_pred,
614 const std::vector<std::string>& sl_descriptors) {
615 if (params->ss[video_idx].streams.empty() &&
616 params->ss[video_idx].infer_streams) {
617 webrtc::VideoEncoderConfig encoder_config;
618 encoder_config.codec_type =
619 PayloadStringToCodecType(params->video[video_idx].codec);
620 encoder_config.content_type =
621 params->screenshare[video_idx].enabled
622 ? webrtc::VideoEncoderConfig::ContentType::kScreen
623 : webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo;
624 encoder_config.max_bitrate_bps = params->video[video_idx].max_bitrate_bps;
625 encoder_config.min_transmit_bitrate_bps =
626 params->video[video_idx].min_transmit_bps;
627 encoder_config.number_of_streams = num_streams;
628 encoder_config.spatial_layers = params->ss[video_idx].spatial_layers;
629 encoder_config.simulcast_layers = std::vector<VideoStream>(num_streams);
630 encoder_config.video_stream_factory =
631 new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
632 params->video[video_idx].codec, kDefaultMaxQp,
633 params->screenshare[video_idx].enabled, true);
634 params->ss[video_idx].streams =
635 encoder_config.video_stream_factory->CreateEncoderStreams(
636 static_cast<int>(params->video[video_idx].width),
637 static_cast<int>(params->video[video_idx].height), encoder_config);
638 } else {
639 // Read VideoStream and SpatialLayer elements from a list of comma separated
640 // lists. To use a default value for an element, use -1 or leave empty.
641 // Validity checks performed in CheckParamsAndInjectionComponents.
642 RTC_CHECK(params->ss[video_idx].streams.empty());
643 for (const auto& descriptor : stream_descriptors) {
644 if (descriptor.empty())
645 continue;
646 VideoStream stream =
647 VideoQualityTest::DefaultVideoStream(*params, video_idx);
648 std::vector<int> v = VideoQualityTest::ParseCSV(descriptor);
649 if (v[0] != -1)
650 stream.width = static_cast<size_t>(v[0]);
651 if (v[1] != -1)
652 stream.height = static_cast<size_t>(v[1]);
653 if (v[2] != -1)
654 stream.max_framerate = v[2];
655 if (v[3] != -1)
656 stream.min_bitrate_bps = v[3];
657 if (v[4] != -1)
658 stream.target_bitrate_bps = v[4];
659 if (v[5] != -1)
660 stream.max_bitrate_bps = v[5];
661 if (v.size() > 6 && v[6] != -1)
662 stream.max_qp = v[6];
663 if (v.size() > 7 && v[7] != -1) {
664 stream.num_temporal_layers = v[7];
665 } else {
666 // Automatic TL thresholds for more than two layers not supported.
667 RTC_CHECK_LE(params->video[video_idx].num_temporal_layers, 2);
668 }
669 params->ss[video_idx].streams.push_back(stream);
670 }
671 }
672
673 params->ss[video_idx].num_spatial_layers = std::max(1, num_spatial_layers);
674 params->ss[video_idx].selected_stream = selected_stream;
675
676 params->ss[video_idx].selected_sl = selected_sl;
677 params->ss[video_idx].inter_layer_pred = inter_layer_pred;
678 RTC_CHECK(params->ss[video_idx].spatial_layers.empty());
679 for (const auto& descriptor : sl_descriptors) {
680 if (descriptor.empty())
681 continue;
682 std::vector<int> v = VideoQualityTest::ParseCSV(descriptor);
683 RTC_CHECK_EQ(v.size(), 8);
684
685 SpatialLayer layer = {0};
686 layer.width = v[0];
687 layer.height = v[1];
688 layer.maxFramerate = v[2];
689 layer.numberOfTemporalLayers = v[3];
690 layer.maxBitrate = v[4];
691 layer.minBitrate = v[5];
692 layer.targetBitrate = v[6];
693 layer.qpMax = v[7];
694 layer.active = true;
695
696 params->ss[video_idx].spatial_layers.push_back(layer);
697 }
698 }
699
SetupVideo(Transport * send_transport,Transport * recv_transport)700 void VideoQualityTest::SetupVideo(Transport* send_transport,
701 Transport* recv_transport) {
702 size_t total_streams_used = 0;
703 video_receive_configs_.clear();
704 video_send_configs_.clear();
705 video_encoder_configs_.clear();
706 bool decode_all_receive_streams = true;
707 size_t num_video_substreams = params_.ss[0].streams.size();
708 RTC_CHECK(num_video_streams_ > 0);
709 video_encoder_configs_.resize(num_video_streams_);
710 std::string generic_codec_name;
711 for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) {
712 video_send_configs_.push_back(VideoSendStream::Config(send_transport));
713 video_encoder_configs_.push_back(VideoEncoderConfig());
714 num_video_substreams = params_.ss[video_idx].streams.size();
715 RTC_CHECK_GT(num_video_substreams, 0);
716 for (size_t i = 0; i < num_video_substreams; ++i)
717 video_send_configs_[video_idx].rtp.ssrcs.push_back(
718 kVideoSendSsrcs[total_streams_used + i]);
719
720 int payload_type;
721 if (params_.video[video_idx].codec == "H264") {
722 payload_type = kPayloadTypeH264;
723 } else if (params_.video[video_idx].codec == "VP8") {
724 payload_type = kPayloadTypeVP8;
725 } else if (params_.video[video_idx].codec == "VP9") {
726 payload_type = kPayloadTypeVP9;
727 } else if (params_.video[video_idx].codec == "multiplex") {
728 payload_type = kPayloadTypeVP9;
729 } else if (params_.video[video_idx].codec == "FakeCodec") {
730 payload_type = kFakeVideoSendPayloadType;
731 } else {
732 RTC_CHECK(generic_codec_name.empty() ||
733 generic_codec_name == params_.video[video_idx].codec)
734 << "Supplying multiple generic codecs is unsupported.";
735 RTC_LOG(LS_INFO) << "Treating codec " << params_.video[video_idx].codec
736 << " as generic.";
737 payload_type = kPayloadTypeGeneric;
738 generic_codec_name = params_.video[video_idx].codec;
739 }
740 video_send_configs_[video_idx].encoder_settings.encoder_factory =
741 (video_idx == 0) ? &video_encoder_factory_with_analyzer_
742 : &video_encoder_factory_;
743 video_send_configs_[video_idx].encoder_settings.bitrate_allocator_factory =
744 video_bitrate_allocator_factory_.get();
745
746 video_send_configs_[video_idx].rtp.payload_name =
747 params_.video[video_idx].codec;
748 video_send_configs_[video_idx].rtp.payload_type = payload_type;
749 video_send_configs_[video_idx].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
750 video_send_configs_[video_idx].rtp.rtx.payload_type = kSendRtxPayloadType;
751 for (size_t i = 0; i < num_video_substreams; ++i) {
752 video_send_configs_[video_idx].rtp.rtx.ssrcs.push_back(
753 kSendRtxSsrcs[i + total_streams_used]);
754 }
755 video_send_configs_[video_idx].rtp.extensions.clear();
756 if (params_.call.send_side_bwe) {
757 video_send_configs_[video_idx].rtp.extensions.emplace_back(
758 RtpExtension::kTransportSequenceNumberUri,
759 kTransportSequenceNumberExtensionId);
760 } else {
761 video_send_configs_[video_idx].rtp.extensions.emplace_back(
762 RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId);
763 }
764
765 if (params_.call.generic_descriptor) {
766 video_send_configs_[video_idx].rtp.extensions.emplace_back(
767 RtpExtension::kGenericFrameDescriptorUri00,
768 kGenericFrameDescriptorExtensionId00);
769 }
770
771 video_send_configs_[video_idx].rtp.extensions.emplace_back(
772 RtpExtension::kVideoContentTypeUri, kVideoContentTypeExtensionId);
773 video_send_configs_[video_idx].rtp.extensions.emplace_back(
774 RtpExtension::kVideoTimingUri, kVideoTimingExtensionId);
775
776 video_encoder_configs_[video_idx].video_format.name =
777 params_.video[video_idx].codec;
778
779 video_encoder_configs_[video_idx].video_format.parameters =
780 params_.video[video_idx].sdp_params;
781
782 video_encoder_configs_[video_idx].codec_type =
783 PayloadStringToCodecType(params_.video[video_idx].codec);
784
785 video_encoder_configs_[video_idx].min_transmit_bitrate_bps =
786 params_.video[video_idx].min_transmit_bps;
787
788 video_send_configs_[video_idx].suspend_below_min_bitrate =
789 params_.video[video_idx].suspend_below_min_bitrate;
790
791 video_encoder_configs_[video_idx].number_of_streams =
792 params_.ss[video_idx].streams.size();
793 video_encoder_configs_[video_idx].max_bitrate_bps = 0;
794 for (size_t i = 0; i < params_.ss[video_idx].streams.size(); ++i) {
795 video_encoder_configs_[video_idx].max_bitrate_bps +=
796 params_.ss[video_idx].streams[i].max_bitrate_bps;
797 }
798 video_encoder_configs_[video_idx].simulcast_layers =
799 std::vector<VideoStream>(params_.ss[video_idx].streams.size());
800 if (!params_.ss[video_idx].infer_streams) {
801 video_encoder_configs_[video_idx].simulcast_layers =
802 params_.ss[video_idx].streams;
803 }
804 video_encoder_configs_[video_idx].video_stream_factory =
805 new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
806 params_.video[video_idx].codec,
807 params_.ss[video_idx].streams[0].max_qp,
808 params_.screenshare[video_idx].enabled, true);
809
810 video_encoder_configs_[video_idx].spatial_layers =
811 params_.ss[video_idx].spatial_layers;
812 decode_all_receive_streams = params_.ss[video_idx].selected_stream ==
813 params_.ss[video_idx].streams.size();
814 absl::optional<int> decode_sub_stream;
815 if (!decode_all_receive_streams)
816 decode_sub_stream = params_.ss[video_idx].selected_stream;
817 CreateMatchingVideoReceiveConfigs(
818 video_send_configs_[video_idx], recv_transport,
819 params_.call.send_side_bwe, &video_decoder_factory_, decode_sub_stream,
820 true, kNackRtpHistoryMs);
821
822 if (params_.screenshare[video_idx].enabled) {
823 // Fill out codec settings.
824 video_encoder_configs_[video_idx].content_type =
825 VideoEncoderConfig::ContentType::kScreen;
826 degradation_preference_ = DegradationPreference::MAINTAIN_RESOLUTION;
827 if (params_.video[video_idx].codec == "VP8") {
828 VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
829 vp8_settings.denoisingOn = false;
830 vp8_settings.frameDroppingOn = false;
831 vp8_settings.numberOfTemporalLayers = static_cast<unsigned char>(
832 params_.video[video_idx].num_temporal_layers);
833 video_encoder_configs_[video_idx].encoder_specific_settings =
834 new rtc::RefCountedObject<
835 VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
836 } else if (params_.video[video_idx].codec == "VP9") {
837 VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
838 vp9_settings.denoisingOn = false;
839 vp9_settings.frameDroppingOn = false;
840 vp9_settings.automaticResizeOn = false;
841 vp9_settings.numberOfTemporalLayers = static_cast<unsigned char>(
842 params_.video[video_idx].num_temporal_layers);
843 vp9_settings.numberOfSpatialLayers = static_cast<unsigned char>(
844 params_.ss[video_idx].num_spatial_layers);
845 vp9_settings.interLayerPred = params_.ss[video_idx].inter_layer_pred;
846 // High FPS vp9 screenshare requires flexible mode.
847 if (params_.ss[video_idx].num_spatial_layers > 1) {
848 vp9_settings.flexibleMode = true;
849 }
850 video_encoder_configs_[video_idx].encoder_specific_settings =
851 new rtc::RefCountedObject<
852 VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
853 }
854 } else if (params_.ss[video_idx].num_spatial_layers > 1) {
855 // If SVC mode without screenshare, still need to set codec specifics.
856 RTC_CHECK(params_.video[video_idx].codec == "VP9");
857 VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
858 vp9_settings.numberOfTemporalLayers = static_cast<unsigned char>(
859 params_.video[video_idx].num_temporal_layers);
860 vp9_settings.numberOfSpatialLayers =
861 static_cast<unsigned char>(params_.ss[video_idx].num_spatial_layers);
862 vp9_settings.interLayerPred = params_.ss[video_idx].inter_layer_pred;
863 vp9_settings.automaticResizeOn = false;
864 video_encoder_configs_[video_idx].encoder_specific_settings =
865 new rtc::RefCountedObject<
866 VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
867 } else if (params_.video[video_idx].automatic_scaling) {
868 if (params_.video[video_idx].codec == "VP8") {
869 VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
870 vp8_settings.automaticResizeOn = true;
871 video_encoder_configs_[video_idx].encoder_specific_settings =
872 new rtc::RefCountedObject<
873 VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
874 } else if (params_.video[video_idx].codec == "VP9") {
875 VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
876 // Only enable quality scaler for single spatial layer.
877 vp9_settings.automaticResizeOn =
878 params_.ss[video_idx].num_spatial_layers == 1;
879 video_encoder_configs_[video_idx].encoder_specific_settings =
880 new rtc::RefCountedObject<
881 VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
882 } else if (params_.video[video_idx].codec == "H264") {
883 // Quality scaling is always on for H.264.
884 } else if (params_.video[video_idx].codec == cricket::kAv1CodecName) {
885 // TODO(bugs.webrtc.org/11404): Propagate the flag to
886 // aom_codec_enc_cfg_t::rc_resize_mode in Av1 encoder wrapper.
887 // Until then do nothing, specially do not crash.
888 } else {
889 RTC_NOTREACHED() << "Automatic scaling not supported for codec "
890 << params_.video[video_idx].codec << ", stream "
891 << video_idx;
892 }
893 } else {
894 // Default mode. Single SL, no automatic_scaling,
895 if (params_.video[video_idx].codec == "VP8") {
896 VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
897 vp8_settings.automaticResizeOn = false;
898 video_encoder_configs_[video_idx].encoder_specific_settings =
899 new rtc::RefCountedObject<
900 VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
901 } else if (params_.video[video_idx].codec == "VP9") {
902 VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
903 vp9_settings.automaticResizeOn = false;
904 video_encoder_configs_[video_idx].encoder_specific_settings =
905 new rtc::RefCountedObject<
906 VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
907 } else if (params_.video[video_idx].codec == "H264") {
908 VideoCodecH264 h264_settings = VideoEncoder::GetDefaultH264Settings();
909 video_encoder_configs_[video_idx].encoder_specific_settings =
910 new rtc::RefCountedObject<
911 VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings);
912 }
913 }
914 total_streams_used += num_video_substreams;
915 }
916
917 // FEC supported only for single video stream mode yet.
918 if (params_.video[0].flexfec) {
919 if (decode_all_receive_streams) {
920 SetSendFecConfig(GetVideoSendConfig()->rtp.ssrcs);
921 } else {
922 SetSendFecConfig({kVideoSendSsrcs[params_.ss[0].selected_stream]});
923 }
924
925 CreateMatchingFecConfig(recv_transport, *GetVideoSendConfig());
926 GetFlexFecConfig()->transport_cc = params_.call.send_side_bwe;
927 if (params_.call.send_side_bwe) {
928 GetFlexFecConfig()->rtp_header_extensions.push_back(
929 RtpExtension(RtpExtension::kTransportSequenceNumberUri,
930 kTransportSequenceNumberExtensionId));
931 } else {
932 GetFlexFecConfig()->rtp_header_extensions.push_back(
933 RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId));
934 }
935 }
936
937 if (params_.video[0].ulpfec) {
938 SetSendUlpFecConfig(GetVideoSendConfig());
939 if (decode_all_receive_streams) {
940 for (auto& receive_config : video_receive_configs_) {
941 SetReceiveUlpFecConfig(&receive_config);
942 }
943 } else {
944 SetReceiveUlpFecConfig(
945 &video_receive_configs_[params_.ss[0].selected_stream]);
946 }
947 }
948 }
949
SetupThumbnails(Transport * send_transport,Transport * recv_transport)950 void VideoQualityTest::SetupThumbnails(Transport* send_transport,
951 Transport* recv_transport) {
952 for (int i = 0; i < params_.call.num_thumbnails; ++i) {
953 // Thumbnails will be send in the other way: from receiver_call to
954 // sender_call.
955 VideoSendStream::Config thumbnail_send_config(recv_transport);
956 thumbnail_send_config.rtp.ssrcs.push_back(kThumbnailSendSsrcStart + i);
957 // TODO(nisse): Could use a simpler VP8-only encoder factory.
958 thumbnail_send_config.encoder_settings.encoder_factory =
959 &video_encoder_factory_;
960 thumbnail_send_config.encoder_settings.bitrate_allocator_factory =
961 video_bitrate_allocator_factory_.get();
962 thumbnail_send_config.rtp.payload_name = params_.video[0].codec;
963 thumbnail_send_config.rtp.payload_type = kPayloadTypeVP8;
964 thumbnail_send_config.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
965 thumbnail_send_config.rtp.rtx.payload_type = kSendRtxPayloadType;
966 thumbnail_send_config.rtp.rtx.ssrcs.push_back(kThumbnailRtxSsrcStart + i);
967 thumbnail_send_config.rtp.extensions.clear();
968 if (params_.call.send_side_bwe) {
969 thumbnail_send_config.rtp.extensions.push_back(
970 RtpExtension(RtpExtension::kTransportSequenceNumberUri,
971 kTransportSequenceNumberExtensionId));
972 } else {
973 thumbnail_send_config.rtp.extensions.push_back(
974 RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId));
975 }
976
977 VideoEncoderConfig thumbnail_encoder_config;
978 thumbnail_encoder_config.codec_type = kVideoCodecVP8;
979 thumbnail_encoder_config.video_format.name = "VP8";
980 thumbnail_encoder_config.min_transmit_bitrate_bps = 7500;
981 thumbnail_send_config.suspend_below_min_bitrate =
982 params_.video[0].suspend_below_min_bitrate;
983 thumbnail_encoder_config.number_of_streams = 1;
984 thumbnail_encoder_config.max_bitrate_bps = 50000;
985 std::vector<VideoStream> streams{params_.ss[0].streams[0]};
986 thumbnail_encoder_config.video_stream_factory =
987 new rtc::RefCountedObject<VideoStreamFactory>(streams);
988 thumbnail_encoder_config.spatial_layers = params_.ss[0].spatial_layers;
989
990 thumbnail_encoder_configs_.push_back(thumbnail_encoder_config.Copy());
991 thumbnail_send_configs_.push_back(thumbnail_send_config.Copy());
992
993 AddMatchingVideoReceiveConfigs(
994 &thumbnail_receive_configs_, thumbnail_send_config, send_transport,
995 params_.call.send_side_bwe, &video_decoder_factory_, absl::nullopt,
996 false, kNackRtpHistoryMs);
997 }
998 for (size_t i = 0; i < thumbnail_send_configs_.size(); ++i) {
999 thumbnail_send_streams_.push_back(receiver_call_->CreateVideoSendStream(
1000 thumbnail_send_configs_[i].Copy(),
1001 thumbnail_encoder_configs_[i].Copy()));
1002 }
1003 for (size_t i = 0; i < thumbnail_receive_configs_.size(); ++i) {
1004 thumbnail_receive_streams_.push_back(sender_call_->CreateVideoReceiveStream(
1005 thumbnail_receive_configs_[i].Copy()));
1006 }
1007 }
1008
DestroyThumbnailStreams()1009 void VideoQualityTest::DestroyThumbnailStreams() {
1010 for (VideoSendStream* thumbnail_send_stream : thumbnail_send_streams_) {
1011 receiver_call_->DestroyVideoSendStream(thumbnail_send_stream);
1012 }
1013 thumbnail_send_streams_.clear();
1014 for (VideoReceiveStream* thumbnail_receive_stream :
1015 thumbnail_receive_streams_) {
1016 sender_call_->DestroyVideoReceiveStream(thumbnail_receive_stream);
1017 }
1018 thumbnail_send_streams_.clear();
1019 thumbnail_receive_streams_.clear();
1020 for (std::unique_ptr<rtc::VideoSourceInterface<VideoFrame>>& video_capturer :
1021 thumbnail_capturers_) {
1022 video_capturer.reset();
1023 }
1024 }
1025
SetupThumbnailCapturers(size_t num_thumbnail_streams)1026 void VideoQualityTest::SetupThumbnailCapturers(size_t num_thumbnail_streams) {
1027 VideoStream thumbnail = DefaultThumbnailStream();
1028 for (size_t i = 0; i < num_thumbnail_streams; ++i) {
1029 auto frame_generator_capturer =
1030 std::make_unique<test::FrameGeneratorCapturer>(
1031 clock_,
1032 test::CreateSquareFrameGenerator(static_cast<int>(thumbnail.width),
1033 static_cast<int>(thumbnail.height),
1034 absl::nullopt, absl::nullopt),
1035 thumbnail.max_framerate, *task_queue_factory_);
1036 EXPECT_TRUE(frame_generator_capturer->Init());
1037 thumbnail_capturers_.push_back(std::move(frame_generator_capturer));
1038 }
1039 }
1040
1041 std::unique_ptr<test::FrameGeneratorInterface>
CreateFrameGenerator(size_t video_idx)1042 VideoQualityTest::CreateFrameGenerator(size_t video_idx) {
1043 // Setup frame generator.
1044 const size_t kWidth = 1850;
1045 const size_t kHeight = 1110;
1046 std::unique_ptr<test::FrameGeneratorInterface> frame_generator;
1047 if (params_.screenshare[video_idx].generate_slides) {
1048 frame_generator = test::CreateSlideFrameGenerator(
1049 kWidth, kHeight,
1050 params_.screenshare[video_idx].slide_change_interval *
1051 params_.video[video_idx].fps);
1052 } else {
1053 std::vector<std::string> slides = params_.screenshare[video_idx].slides;
1054 if (slides.empty()) {
1055 slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv"));
1056 slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv"));
1057 slides.push_back(test::ResourcePath("photo_1850_1110", "yuv"));
1058 slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv"));
1059 }
1060 if (params_.screenshare[video_idx].scroll_duration == 0) {
1061 // Cycle image every slide_change_interval seconds.
1062 frame_generator = test::CreateFromYuvFileFrameGenerator(
1063 slides, kWidth, kHeight,
1064 params_.screenshare[video_idx].slide_change_interval *
1065 params_.video[video_idx].fps);
1066 } else {
1067 RTC_CHECK_LE(params_.video[video_idx].width, kWidth);
1068 RTC_CHECK_LE(params_.video[video_idx].height, kHeight);
1069 RTC_CHECK_GT(params_.screenshare[video_idx].slide_change_interval, 0);
1070 const int kPauseDurationMs =
1071 (params_.screenshare[video_idx].slide_change_interval -
1072 params_.screenshare[video_idx].scroll_duration) *
1073 1000;
1074 RTC_CHECK_LE(params_.screenshare[video_idx].scroll_duration,
1075 params_.screenshare[video_idx].slide_change_interval);
1076
1077 frame_generator = test::CreateScrollingInputFromYuvFilesFrameGenerator(
1078 clock_, slides, kWidth, kHeight, params_.video[video_idx].width,
1079 params_.video[video_idx].height,
1080 params_.screenshare[video_idx].scroll_duration * 1000,
1081 kPauseDurationMs);
1082 }
1083 }
1084 return frame_generator;
1085 }
1086
CreateCapturers()1087 void VideoQualityTest::CreateCapturers() {
1088 RTC_DCHECK(video_sources_.empty());
1089 video_sources_.resize(num_video_streams_);
1090 for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) {
1091 std::unique_ptr<test::FrameGeneratorInterface> frame_generator;
1092 if (params_.screenshare[video_idx].enabled) {
1093 frame_generator = CreateFrameGenerator(video_idx);
1094 } else if (params_.video[video_idx].clip_path == "Generator") {
1095 frame_generator = test::CreateSquareFrameGenerator(
1096 static_cast<int>(params_.video[video_idx].width),
1097 static_cast<int>(params_.video[video_idx].height), absl::nullopt,
1098 absl::nullopt);
1099 } else if (params_.video[video_idx].clip_path == "GeneratorI420A") {
1100 frame_generator = test::CreateSquareFrameGenerator(
1101 static_cast<int>(params_.video[video_idx].width),
1102 static_cast<int>(params_.video[video_idx].height),
1103 test::FrameGeneratorInterface::OutputType::kI420A, absl::nullopt);
1104 } else if (params_.video[video_idx].clip_path == "GeneratorI010") {
1105 frame_generator = test::CreateSquareFrameGenerator(
1106 static_cast<int>(params_.video[video_idx].width),
1107 static_cast<int>(params_.video[video_idx].height),
1108 test::FrameGeneratorInterface::OutputType::kI010, absl::nullopt);
1109 } else if (params_.video[video_idx].clip_path.empty()) {
1110 video_sources_[video_idx] = test::CreateVideoCapturer(
1111 params_.video[video_idx].width, params_.video[video_idx].height,
1112 params_.video[video_idx].fps,
1113 params_.video[video_idx].capture_device_index);
1114 if (video_sources_[video_idx]) {
1115 continue;
1116 } else {
1117 // Failed to get actual camera, use chroma generator as backup.
1118 frame_generator = test::CreateSquareFrameGenerator(
1119 static_cast<int>(params_.video[video_idx].width),
1120 static_cast<int>(params_.video[video_idx].height), absl::nullopt,
1121 absl::nullopt);
1122 }
1123 } else {
1124 frame_generator = test::CreateFromYuvFileFrameGenerator(
1125 {params_.video[video_idx].clip_path}, params_.video[video_idx].width,
1126 params_.video[video_idx].height, 1);
1127 ASSERT_TRUE(frame_generator) << "Could not create capturer for "
1128 << params_.video[video_idx].clip_path
1129 << ".yuv. Is this file present?";
1130 }
1131 ASSERT_TRUE(frame_generator);
1132 auto frame_generator_capturer =
1133 std::make_unique<test::FrameGeneratorCapturer>(
1134 clock_, std::move(frame_generator), params_.video[video_idx].fps,
1135 *task_queue_factory_);
1136 EXPECT_TRUE(frame_generator_capturer->Init());
1137 video_sources_[video_idx] = std::move(frame_generator_capturer);
1138 }
1139 }
1140
StartAudioStreams()1141 void VideoQualityTest::StartAudioStreams() {
1142 audio_send_stream_->Start();
1143 for (AudioReceiveStream* audio_recv_stream : audio_receive_streams_)
1144 audio_recv_stream->Start();
1145 }
1146
StartThumbnails()1147 void VideoQualityTest::StartThumbnails() {
1148 for (VideoSendStream* send_stream : thumbnail_send_streams_)
1149 send_stream->Start();
1150 for (VideoReceiveStream* receive_stream : thumbnail_receive_streams_)
1151 receive_stream->Start();
1152 }
1153
StopThumbnails()1154 void VideoQualityTest::StopThumbnails() {
1155 for (VideoReceiveStream* receive_stream : thumbnail_receive_streams_)
1156 receive_stream->Stop();
1157 for (VideoSendStream* send_stream : thumbnail_send_streams_)
1158 send_stream->Stop();
1159 }
1160
1161 std::unique_ptr<test::LayerFilteringTransport>
CreateSendTransport()1162 VideoQualityTest::CreateSendTransport() {
1163 std::unique_ptr<NetworkBehaviorInterface> network_behavior = nullptr;
1164 if (injection_components_->sender_network == nullptr) {
1165 network_behavior = std::make_unique<SimulatedNetwork>(*params_.config);
1166 } else {
1167 network_behavior = std::move(injection_components_->sender_network);
1168 }
1169 return std::make_unique<test::LayerFilteringTransport>(
1170 task_queue(),
1171 std::make_unique<FakeNetworkPipe>(clock_, std::move(network_behavior)),
1172 sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9,
1173 params_.video[0].selected_tl, params_.ss[0].selected_sl,
1174 payload_type_map_, kVideoSendSsrcs[0],
1175 static_cast<uint32_t>(kVideoSendSsrcs[0] + params_.ss[0].streams.size() -
1176 1));
1177 }
1178
1179 std::unique_ptr<test::DirectTransport>
CreateReceiveTransport()1180 VideoQualityTest::CreateReceiveTransport() {
1181 std::unique_ptr<NetworkBehaviorInterface> network_behavior = nullptr;
1182 if (injection_components_->receiver_network == nullptr) {
1183 network_behavior = std::make_unique<SimulatedNetwork>(*params_.config);
1184 } else {
1185 network_behavior = std::move(injection_components_->receiver_network);
1186 }
1187 return std::make_unique<test::DirectTransport>(
1188 task_queue(),
1189 std::make_unique<FakeNetworkPipe>(clock_, std::move(network_behavior)),
1190 receiver_call_.get(), payload_type_map_);
1191 }
1192
RunWithAnalyzer(const Params & params)1193 void VideoQualityTest::RunWithAnalyzer(const Params& params) {
1194 num_video_streams_ = params.call.dual_video ? 2 : 1;
1195 std::unique_ptr<test::LayerFilteringTransport> send_transport;
1196 std::unique_ptr<test::DirectTransport> recv_transport;
1197 FILE* graph_data_output_file = nullptr;
1198
1199 params_ = params;
1200 // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to
1201 // differentiate between the analyzer and the renderer case.
1202 CheckParamsAndInjectionComponents();
1203
1204 if (!params_.analyzer.graph_data_output_filename.empty()) {
1205 graph_data_output_file =
1206 fopen(params_.analyzer.graph_data_output_filename.c_str(), "w");
1207 RTC_CHECK(graph_data_output_file)
1208 << "Can't open the file " << params_.analyzer.graph_data_output_filename
1209 << "!";
1210 }
1211
1212 if (!params.logging.rtc_event_log_name.empty()) {
1213 send_event_log_ = rtc_event_log_factory_.CreateRtcEventLog(
1214 RtcEventLog::EncodingType::Legacy);
1215 recv_event_log_ = rtc_event_log_factory_.CreateRtcEventLog(
1216 RtcEventLog::EncodingType::Legacy);
1217 std::unique_ptr<RtcEventLogOutputFile> send_output(
1218 std::make_unique<RtcEventLogOutputFile>(
1219 params.logging.rtc_event_log_name + "_send",
1220 RtcEventLog::kUnlimitedOutput));
1221 std::unique_ptr<RtcEventLogOutputFile> recv_output(
1222 std::make_unique<RtcEventLogOutputFile>(
1223 params.logging.rtc_event_log_name + "_recv",
1224 RtcEventLog::kUnlimitedOutput));
1225 bool event_log_started =
1226 send_event_log_->StartLogging(std::move(send_output),
1227 RtcEventLog::kImmediateOutput) &&
1228 recv_event_log_->StartLogging(std::move(recv_output),
1229 RtcEventLog::kImmediateOutput);
1230 RTC_DCHECK(event_log_started);
1231 } else {
1232 send_event_log_ = std::make_unique<RtcEventLogNull>();
1233 recv_event_log_ = std::make_unique<RtcEventLogNull>();
1234 }
1235
1236 SendTask(RTC_FROM_HERE, task_queue(),
1237 [this, ¶ms, &send_transport, &recv_transport]() {
1238 Call::Config send_call_config(send_event_log_.get());
1239 Call::Config recv_call_config(recv_event_log_.get());
1240 send_call_config.bitrate_config = params.call.call_bitrate_config;
1241 recv_call_config.bitrate_config = params.call.call_bitrate_config;
1242 if (params_.audio.enabled)
1243 InitializeAudioDevice(&send_call_config, &recv_call_config,
1244 params_.audio.use_real_adm);
1245
1246 CreateCalls(send_call_config, recv_call_config);
1247 send_transport = CreateSendTransport();
1248 recv_transport = CreateReceiveTransport();
1249 });
1250
1251 std::string graph_title = params_.analyzer.graph_title;
1252 if (graph_title.empty())
1253 graph_title = VideoQualityTest::GenerateGraphTitle();
1254 bool is_quick_test_enabled = field_trial::IsEnabled("WebRTC-QuickPerfTest");
1255 analyzer_ = std::make_unique<VideoAnalyzer>(
1256 send_transport.get(), params_.analyzer.test_label,
1257 params_.analyzer.avg_psnr_threshold, params_.analyzer.avg_ssim_threshold,
1258 is_quick_test_enabled
1259 ? kFramesSentInQuickTest
1260 : params_.analyzer.test_durations_secs * params_.video[0].fps,
1261 is_quick_test_enabled
1262 ? TimeDelta::Millis(1)
1263 : TimeDelta::Seconds(params_.analyzer.test_durations_secs),
1264 graph_data_output_file, graph_title,
1265 kVideoSendSsrcs[params_.ss[0].selected_stream],
1266 kSendRtxSsrcs[params_.ss[0].selected_stream],
1267 static_cast<size_t>(params_.ss[0].selected_stream),
1268 params.ss[0].selected_sl, params_.video[0].selected_tl,
1269 is_quick_test_enabled, clock_, params_.logging.rtp_dump_name,
1270 task_queue());
1271
1272 SendTask(RTC_FROM_HERE, task_queue(), [&]() {
1273 analyzer_->SetCall(sender_call_.get());
1274 analyzer_->SetReceiver(receiver_call_->Receiver());
1275 send_transport->SetReceiver(analyzer_.get());
1276 recv_transport->SetReceiver(sender_call_->Receiver());
1277
1278 SetupVideo(analyzer_.get(), recv_transport.get());
1279 SetupThumbnails(analyzer_.get(), recv_transport.get());
1280 video_receive_configs_[params_.ss[0].selected_stream].renderer =
1281 analyzer_.get();
1282
1283 CreateFlexfecStreams();
1284 CreateVideoStreams();
1285 analyzer_->SetSendStream(video_send_streams_[0]);
1286 analyzer_->SetReceiveStream(
1287 video_receive_streams_[params_.ss[0].selected_stream]);
1288
1289 GetVideoSendStream()->SetSource(analyzer_->OutputInterface(),
1290 degradation_preference_);
1291 SetupThumbnailCapturers(params_.call.num_thumbnails);
1292 for (size_t i = 0; i < thumbnail_send_streams_.size(); ++i) {
1293 thumbnail_send_streams_[i]->SetSource(thumbnail_capturers_[i].get(),
1294 degradation_preference_);
1295 }
1296
1297 CreateCapturers();
1298
1299 analyzer_->SetSource(video_sources_[0].get(), true);
1300
1301 for (size_t video_idx = 1; video_idx < num_video_streams_; ++video_idx) {
1302 video_send_streams_[video_idx]->SetSource(video_sources_[video_idx].get(),
1303 degradation_preference_);
1304 }
1305
1306 if (params_.audio.enabled) {
1307 SetupAudio(send_transport.get());
1308 StartAudioStreams();
1309 analyzer_->SetAudioReceiveStream(audio_receive_streams_[0]);
1310 }
1311 StartVideoStreams();
1312 StartThumbnails();
1313 analyzer_->StartMeasuringCpuProcessTime();
1314 });
1315
1316 analyzer_->Wait();
1317
1318 SendTask(RTC_FROM_HERE, task_queue(), [&]() {
1319 StopThumbnails();
1320 Stop();
1321
1322 DestroyStreams();
1323 DestroyThumbnailStreams();
1324
1325 if (graph_data_output_file)
1326 fclose(graph_data_output_file);
1327
1328 send_transport.reset();
1329 recv_transport.reset();
1330
1331 DestroyCalls();
1332 });
1333 analyzer_ = nullptr;
1334 }
1335
CreateAudioDevice()1336 rtc::scoped_refptr<AudioDeviceModule> VideoQualityTest::CreateAudioDevice() {
1337 #ifdef WEBRTC_WIN
1338 RTC_LOG(INFO) << "Using latest version of ADM on Windows";
1339 // We must initialize the COM library on a thread before we calling any of
1340 // the library functions. All COM functions in the ADM will return
1341 // CO_E_NOTINITIALIZED otherwise. The legacy ADM for Windows used internal
1342 // COM initialization but the new ADM requires COM to be initialized
1343 // externally.
1344 com_initializer_ = std::make_unique<webrtc_win::ScopedCOMInitializer>(
1345 webrtc_win::ScopedCOMInitializer::kMTA);
1346 RTC_CHECK(com_initializer_->Succeeded());
1347 RTC_CHECK(webrtc_win::core_audio_utility::IsSupported());
1348 RTC_CHECK(webrtc_win::core_audio_utility::IsMMCSSSupported());
1349 return CreateWindowsCoreAudioAudioDeviceModule(task_queue_factory_.get());
1350 #else
1351 // Use legacy factory method on all platforms except Windows.
1352 return AudioDeviceModule::Create(AudioDeviceModule::kPlatformDefaultAudio,
1353 task_queue_factory_.get());
1354 #endif
1355 }
1356
InitializeAudioDevice(Call::Config * send_call_config,Call::Config * recv_call_config,bool use_real_adm)1357 void VideoQualityTest::InitializeAudioDevice(Call::Config* send_call_config,
1358 Call::Config* recv_call_config,
1359 bool use_real_adm) {
1360 rtc::scoped_refptr<AudioDeviceModule> audio_device;
1361 if (use_real_adm) {
1362 // Run test with real ADM (using default audio devices) if user has
1363 // explicitly set the --audio and --use_real_adm command-line flags.
1364 audio_device = CreateAudioDevice();
1365 } else {
1366 // By default, create a test ADM which fakes audio.
1367 audio_device = TestAudioDeviceModule::Create(
1368 task_queue_factory_.get(),
1369 TestAudioDeviceModule::CreatePulsedNoiseCapturer(32000, 48000),
1370 TestAudioDeviceModule::CreateDiscardRenderer(48000), 1.f);
1371 }
1372 RTC_CHECK(audio_device);
1373
1374 AudioState::Config audio_state_config;
1375 audio_state_config.audio_mixer = AudioMixerImpl::Create();
1376 audio_state_config.audio_processing = AudioProcessingBuilder().Create();
1377 audio_state_config.audio_device_module = audio_device;
1378 send_call_config->audio_state = AudioState::Create(audio_state_config);
1379 recv_call_config->audio_state = AudioState::Create(audio_state_config);
1380 if (use_real_adm) {
1381 // The real ADM requires extra initialization: setting default devices,
1382 // setting up number of channels etc. Helper class also calls
1383 // AudioDeviceModule::Init().
1384 webrtc::adm_helpers::Init(audio_device.get());
1385 } else {
1386 audio_device->Init();
1387 }
1388 // Always initialize the ADM before injecting a valid audio transport.
1389 RTC_CHECK(audio_device->RegisterAudioCallback(
1390 send_call_config->audio_state->audio_transport()) == 0);
1391 }
1392
SetupAudio(Transport * transport)1393 void VideoQualityTest::SetupAudio(Transport* transport) {
1394 AudioSendStream::Config audio_send_config(transport);
1395 audio_send_config.rtp.ssrc = kAudioSendSsrc;
1396
1397 // Add extension to enable audio send side BWE, and allow audio bit rate
1398 // adaptation.
1399 audio_send_config.rtp.extensions.clear();
1400 audio_send_config.send_codec_spec = AudioSendStream::Config::SendCodecSpec(
1401 kAudioSendPayloadType,
1402 {"OPUS",
1403 48000,
1404 2,
1405 {{"usedtx", (params_.audio.dtx ? "1" : "0")}, {"stereo", "1"}}});
1406
1407 if (params_.call.send_side_bwe) {
1408 audio_send_config.rtp.extensions.push_back(
1409 webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri,
1410 kTransportSequenceNumberExtensionId));
1411 audio_send_config.min_bitrate_bps = kOpusMinBitrateBps;
1412 audio_send_config.max_bitrate_bps = kOpusBitrateFbBps;
1413 audio_send_config.send_codec_spec->transport_cc_enabled = true;
1414 // Only allow ANA when send-side BWE is enabled.
1415 audio_send_config.audio_network_adaptor_config = params_.audio.ana_config;
1416 }
1417 audio_send_config.encoder_factory = audio_encoder_factory_;
1418 SetAudioConfig(audio_send_config);
1419
1420 std::string sync_group;
1421 if (params_.video[0].enabled && params_.audio.sync_video)
1422 sync_group = kSyncGroup;
1423
1424 CreateMatchingAudioConfigs(transport, sync_group);
1425 CreateAudioStreams();
1426 }
1427
RunWithRenderers(const Params & params)1428 void VideoQualityTest::RunWithRenderers(const Params& params) {
1429 RTC_LOG(INFO) << __FUNCTION__;
1430 num_video_streams_ = params.call.dual_video ? 2 : 1;
1431 std::unique_ptr<test::LayerFilteringTransport> send_transport;
1432 std::unique_ptr<test::DirectTransport> recv_transport;
1433 std::unique_ptr<test::VideoRenderer> local_preview;
1434 std::vector<std::unique_ptr<test::VideoRenderer>> loopback_renderers;
1435
1436 if (!params.logging.rtc_event_log_name.empty()) {
1437 send_event_log_ = rtc_event_log_factory_.CreateRtcEventLog(
1438 RtcEventLog::EncodingType::Legacy);
1439 recv_event_log_ = rtc_event_log_factory_.CreateRtcEventLog(
1440 RtcEventLog::EncodingType::Legacy);
1441 std::unique_ptr<RtcEventLogOutputFile> send_output(
1442 std::make_unique<RtcEventLogOutputFile>(
1443 params.logging.rtc_event_log_name + "_send",
1444 RtcEventLog::kUnlimitedOutput));
1445 std::unique_ptr<RtcEventLogOutputFile> recv_output(
1446 std::make_unique<RtcEventLogOutputFile>(
1447 params.logging.rtc_event_log_name + "_recv",
1448 RtcEventLog::kUnlimitedOutput));
1449 bool event_log_started =
1450 send_event_log_->StartLogging(std::move(send_output),
1451 /*output_period_ms=*/5000) &&
1452 recv_event_log_->StartLogging(std::move(recv_output),
1453 /*output_period_ms=*/5000);
1454 RTC_DCHECK(event_log_started);
1455 } else {
1456 send_event_log_ = std::make_unique<RtcEventLogNull>();
1457 recv_event_log_ = std::make_unique<RtcEventLogNull>();
1458 }
1459
1460 SendTask(RTC_FROM_HERE, task_queue(), [&]() {
1461 params_ = params;
1462 CheckParamsAndInjectionComponents();
1463
1464 // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to
1465 // match the full stack tests.
1466 Call::Config send_call_config(send_event_log_.get());
1467 send_call_config.bitrate_config = params_.call.call_bitrate_config;
1468 Call::Config recv_call_config(recv_event_log_.get());
1469
1470 if (params_.audio.enabled)
1471 InitializeAudioDevice(&send_call_config, &recv_call_config,
1472 params_.audio.use_real_adm);
1473
1474 CreateCalls(send_call_config, recv_call_config);
1475
1476 // TODO(minyue): consider if this is a good transport even for audio only
1477 // calls.
1478 send_transport = CreateSendTransport();
1479
1480 recv_transport = CreateReceiveTransport();
1481
1482 // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at
1483 // least share as much code as possible. That way this test would also match
1484 // the full stack tests better.
1485 send_transport->SetReceiver(receiver_call_->Receiver());
1486 recv_transport->SetReceiver(sender_call_->Receiver());
1487
1488 if (params_.video[0].enabled) {
1489 // Create video renderers.
1490 SetupVideo(send_transport.get(), recv_transport.get());
1491 size_t num_streams_processed = 0;
1492 for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) {
1493 const size_t selected_stream_id = params_.ss[video_idx].selected_stream;
1494 const size_t num_streams = params_.ss[video_idx].streams.size();
1495 if (selected_stream_id == num_streams) {
1496 for (size_t stream_id = 0; stream_id < num_streams; ++stream_id) {
1497 rtc::StringBuilder oss;
1498 oss << "Loopback Video #" << video_idx << " - Stream #"
1499 << static_cast<int>(stream_id);
1500 loopback_renderers.emplace_back(test::VideoRenderer::Create(
1501 oss.str().c_str(),
1502 params_.ss[video_idx].streams[stream_id].width,
1503 params_.ss[video_idx].streams[stream_id].height));
1504 video_receive_configs_[stream_id + num_streams_processed].renderer =
1505 loopback_renderers.back().get();
1506 if (params_.audio.enabled && params_.audio.sync_video)
1507 video_receive_configs_[stream_id + num_streams_processed]
1508 .sync_group = kSyncGroup;
1509 }
1510 } else {
1511 rtc::StringBuilder oss;
1512 oss << "Loopback Video #" << video_idx;
1513 loopback_renderers.emplace_back(test::VideoRenderer::Create(
1514 oss.str().c_str(),
1515 params_.ss[video_idx].streams[selected_stream_id].width,
1516 params_.ss[video_idx].streams[selected_stream_id].height));
1517 video_receive_configs_[selected_stream_id + num_streams_processed]
1518 .renderer = loopback_renderers.back().get();
1519 if (params_.audio.enabled && params_.audio.sync_video)
1520 video_receive_configs_[num_streams_processed + selected_stream_id]
1521 .sync_group = kSyncGroup;
1522 }
1523 num_streams_processed += num_streams;
1524 }
1525 CreateFlexfecStreams();
1526 CreateVideoStreams();
1527
1528 CreateCapturers();
1529 if (params_.video[0].enabled) {
1530 // Create local preview
1531 local_preview.reset(test::VideoRenderer::Create(
1532 "Local Preview", params_.video[0].width, params_.video[0].height));
1533
1534 video_sources_[0]->AddOrUpdateSink(local_preview.get(),
1535 rtc::VideoSinkWants());
1536 }
1537 ConnectVideoSourcesToStreams();
1538 }
1539
1540 if (params_.audio.enabled) {
1541 SetupAudio(send_transport.get());
1542 }
1543
1544 Start();
1545 });
1546
1547 PressEnterToContinue(task_queue());
1548
1549 SendTask(RTC_FROM_HERE, task_queue(), [&]() {
1550 Stop();
1551 DestroyStreams();
1552
1553 send_transport.reset();
1554 recv_transport.reset();
1555
1556 local_preview.reset();
1557 loopback_renderers.clear();
1558
1559 DestroyCalls();
1560 });
1561 }
1562
1563 } // namespace webrtc
1564