1 /*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10 #include <algorithm> // max
11 #include <memory>
12 #include <vector>
13
14 #include "absl/algorithm/container.h"
15 #include "api/task_queue/default_task_queue_factory.h"
16 #include "api/task_queue/task_queue_base.h"
17 #include "api/test/simulated_network.h"
18 #include "api/video/builtin_video_bitrate_allocator_factory.h"
19 #include "api/video/encoded_image.h"
20 #include "api/video/video_bitrate_allocation.h"
21 #include "api/video_codecs/video_encoder.h"
22 #include "call/call.h"
23 #include "call/fake_network_pipe.h"
24 #include "call/rtp_transport_controller_send.h"
25 #include "call/simulated_network.h"
26 #include "call/video_send_stream.h"
27 #include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
28 #include "modules/rtp_rtcp/source/rtcp_sender.h"
29 #include "modules/rtp_rtcp/source/rtp_header_extensions.h"
30 #include "modules/rtp_rtcp/source/rtp_packet.h"
31 #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
32 #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h"
33 #include "modules/video_coding/codecs/vp8/include/vp8.h"
34 #include "modules/video_coding/codecs/vp9/include/vp9.h"
35 #include "rtc_base/checks.h"
36 #include "rtc_base/event.h"
37 #include "rtc_base/experiments/alr_experiment.h"
38 #include "rtc_base/logging.h"
39 #include "rtc_base/platform_thread.h"
40 #include "rtc_base/rate_limiter.h"
41 #include "rtc_base/synchronization/mutex.h"
42 #include "rtc_base/synchronization/sequence_checker.h"
43 #include "rtc_base/task_queue_for_test.h"
44 #include "rtc_base/task_utils/to_queued_task.h"
45 #include "rtc_base/time_utils.h"
46 #include "rtc_base/unique_id_generator.h"
47 #include "system_wrappers/include/sleep.h"
48 #include "test/call_test.h"
49 #include "test/configurable_frame_size_encoder.h"
50 #include "test/fake_encoder.h"
51 #include "test/fake_texture_frame.h"
52 #include "test/field_trial.h"
53 #include "test/frame_forwarder.h"
54 #include "test/frame_generator_capturer.h"
55 #include "test/frame_utils.h"
56 #include "test/gmock.h"
57 #include "test/gtest.h"
58 #include "test/null_transport.h"
59 #include "test/rtcp_packet_parser.h"
60 #include "test/rtp_header_parser.h"
61 #include "test/testsupport/perf_test.h"
62 #include "test/video_encoder_proxy_factory.h"
63 #include "video/send_statistics_proxy.h"
64 #include "video/transport_adapter.h"
65 #include "video/video_send_stream.h"
66
67 namespace webrtc {
68 namespace test {
69 class VideoSendStreamPeer {
70 public:
VideoSendStreamPeer(webrtc::VideoSendStream * base_class_stream)71 explicit VideoSendStreamPeer(webrtc::VideoSendStream* base_class_stream)
72 : internal_stream_(
73 static_cast<internal::VideoSendStream*>(base_class_stream)) {}
GetPacingFactorOverride() const74 absl::optional<float> GetPacingFactorOverride() const {
75 return internal_stream_->GetPacingFactorOverride();
76 }
77
78 private:
79 internal::VideoSendStream const* const internal_stream_;
80 };
81 } // namespace test
82
83 namespace {
84 enum : int { // The first valid value is 1.
85 kAbsSendTimeExtensionId = 1,
86 kTimestampOffsetExtensionId,
87 kTransportSequenceNumberExtensionId,
88 kVideoContentTypeExtensionId,
89 kVideoRotationExtensionId,
90 kVideoTimingExtensionId,
91 };
92
93 constexpr int64_t kRtcpIntervalMs = 1000;
94
95 enum VideoFormat {
96 kGeneric,
97 kVP8,
98 };
99
CreateVideoFrame(int width,int height,int64_t timestamp_ms)100 VideoFrame CreateVideoFrame(int width, int height, int64_t timestamp_ms) {
101 return webrtc::VideoFrame::Builder()
102 .set_video_frame_buffer(I420Buffer::Create(width, height))
103 .set_rotation(webrtc::kVideoRotation_0)
104 .set_timestamp_ms(timestamp_ms)
105 .build();
106 }
107 } // namespace
108
109 class VideoSendStreamTest : public test::CallTest {
110 public:
VideoSendStreamTest()111 VideoSendStreamTest() {
112 RegisterRtpExtension(RtpExtension(RtpExtension::kTransportSequenceNumberUri,
113 kTransportSequenceNumberExtensionId));
114 }
115
116 protected:
117 void TestNackRetransmission(uint32_t retransmit_ssrc,
118 uint8_t retransmit_payload_type);
119 void TestPacketFragmentationSize(VideoFormat format, bool with_fec);
120
121 void TestVp9NonFlexMode(uint8_t num_temporal_layers,
122 uint8_t num_spatial_layers);
123
124 void TestRequestSourceRotateVideo(bool support_orientation_ext);
125 };
126
TEST_F(VideoSendStreamTest,CanStartStartedStream)127 TEST_F(VideoSendStreamTest, CanStartStartedStream) {
128 SendTask(RTC_FROM_HERE, task_queue(), [this]() {
129 CreateSenderCall();
130
131 test::NullTransport transport;
132 CreateSendConfig(1, 0, 0, &transport);
133 CreateVideoStreams();
134 GetVideoSendStream()->Start();
135 GetVideoSendStream()->Start();
136 DestroyStreams();
137 DestroyCalls();
138 });
139 }
140
TEST_F(VideoSendStreamTest,CanStopStoppedStream)141 TEST_F(VideoSendStreamTest, CanStopStoppedStream) {
142 SendTask(RTC_FROM_HERE, task_queue(), [this]() {
143 CreateSenderCall();
144
145 test::NullTransport transport;
146 CreateSendConfig(1, 0, 0, &transport);
147 CreateVideoStreams();
148 GetVideoSendStream()->Stop();
149 GetVideoSendStream()->Stop();
150 DestroyStreams();
151 DestroyCalls();
152 });
153 }
154
TEST_F(VideoSendStreamTest,SupportsCName)155 TEST_F(VideoSendStreamTest, SupportsCName) {
156 static std::string kCName = "PjQatC14dGfbVwGPUOA9IH7RlsFDbWl4AhXEiDsBizo=";
157 class CNameObserver : public test::SendTest {
158 public:
159 CNameObserver() : SendTest(kDefaultTimeoutMs) {}
160
161 private:
162 Action OnSendRtcp(const uint8_t* packet, size_t length) override {
163 test::RtcpPacketParser parser;
164 EXPECT_TRUE(parser.Parse(packet, length));
165 if (parser.sdes()->num_packets() > 0) {
166 EXPECT_EQ(1u, parser.sdes()->chunks().size());
167 EXPECT_EQ(kCName, parser.sdes()->chunks()[0].cname);
168
169 observation_complete_.Set();
170 }
171
172 return SEND_PACKET;
173 }
174
175 void ModifyVideoConfigs(
176 VideoSendStream::Config* send_config,
177 std::vector<VideoReceiveStream::Config>* receive_configs,
178 VideoEncoderConfig* encoder_config) override {
179 send_config->rtp.c_name = kCName;
180 }
181
182 void PerformTest() override {
183 EXPECT_TRUE(Wait()) << "Timed out while waiting for RTCP with CNAME.";
184 }
185 } test;
186
187 RunBaseTest(&test);
188 }
189
TEST_F(VideoSendStreamTest,SupportsAbsoluteSendTime)190 TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) {
191 class AbsoluteSendTimeObserver : public test::SendTest {
192 public:
193 AbsoluteSendTimeObserver() : SendTest(kDefaultTimeoutMs) {
194 extensions_.Register<AbsoluteSendTime>(kAbsSendTimeExtensionId);
195 }
196
197 Action OnSendRtp(const uint8_t* packet, size_t length) override {
198 RtpPacket rtp_packet(&extensions_);
199 EXPECT_TRUE(rtp_packet.Parse(packet, length));
200
201 uint32_t abs_send_time = 0;
202 EXPECT_FALSE(rtp_packet.HasExtension<TransmissionOffset>());
203 EXPECT_TRUE(rtp_packet.GetExtension<AbsoluteSendTime>(&abs_send_time));
204 if (abs_send_time != 0) {
205 // Wait for at least one packet with a non-zero send time. The send time
206 // is a 16-bit value derived from the system clock, and it is valid
207 // for a packet to have a zero send time. To tell that from an
208 // unpopulated value we'll wait for a packet with non-zero send time.
209 observation_complete_.Set();
210 } else {
211 RTC_LOG(LS_WARNING)
212 << "Got a packet with zero absoluteSendTime, waiting"
213 " for another packet...";
214 }
215
216 return SEND_PACKET;
217 }
218
219 void ModifyVideoConfigs(
220 VideoSendStream::Config* send_config,
221 std::vector<VideoReceiveStream::Config>* receive_configs,
222 VideoEncoderConfig* encoder_config) override {
223 send_config->rtp.extensions.clear();
224 send_config->rtp.extensions.push_back(
225 RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId));
226 }
227
228 void PerformTest() override {
229 EXPECT_TRUE(Wait()) << "Timed out while waiting for single RTP packet.";
230 }
231
232 private:
233 RtpHeaderExtensionMap extensions_;
234 } test;
235
236 RunBaseTest(&test);
237 }
238
TEST_F(VideoSendStreamTest,SupportsTransmissionTimeOffset)239 TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) {
240 static const int kEncodeDelayMs = 5;
241 class TransmissionTimeOffsetObserver : public test::SendTest {
242 public:
243 TransmissionTimeOffsetObserver()
244 : SendTest(kDefaultTimeoutMs), encoder_factory_([]() {
245 return std::make_unique<test::DelayedEncoder>(
246 Clock::GetRealTimeClock(), kEncodeDelayMs);
247 }) {
248 extensions_.Register<TransmissionOffset>(kTimestampOffsetExtensionId);
249 }
250
251 private:
252 Action OnSendRtp(const uint8_t* packet, size_t length) override {
253 RtpPacket rtp_packet(&extensions_);
254 EXPECT_TRUE(rtp_packet.Parse(packet, length));
255
256 int32_t toffset = 0;
257 EXPECT_TRUE(rtp_packet.GetExtension<TransmissionOffset>(&toffset));
258 EXPECT_FALSE(rtp_packet.HasExtension<AbsoluteSendTime>());
259 EXPECT_GT(toffset, 0);
260 observation_complete_.Set();
261
262 return SEND_PACKET;
263 }
264
265 void ModifyVideoConfigs(
266 VideoSendStream::Config* send_config,
267 std::vector<VideoReceiveStream::Config>* receive_configs,
268 VideoEncoderConfig* encoder_config) override {
269 send_config->encoder_settings.encoder_factory = &encoder_factory_;
270 send_config->rtp.extensions.clear();
271 send_config->rtp.extensions.push_back(RtpExtension(
272 RtpExtension::kTimestampOffsetUri, kTimestampOffsetExtensionId));
273 }
274
275 void PerformTest() override {
276 EXPECT_TRUE(Wait()) << "Timed out while waiting for a single RTP packet.";
277 }
278
279 test::FunctionVideoEncoderFactory encoder_factory_;
280 RtpHeaderExtensionMap extensions_;
281 } test;
282
283 RunBaseTest(&test);
284 }
285
TEST_F(VideoSendStreamTest,SupportsTransportWideSequenceNumbers)286 TEST_F(VideoSendStreamTest, SupportsTransportWideSequenceNumbers) {
287 static const uint8_t kExtensionId = kTransportSequenceNumberExtensionId;
288 class TransportWideSequenceNumberObserver : public test::SendTest {
289 public:
290 TransportWideSequenceNumberObserver()
291 : SendTest(kDefaultTimeoutMs), encoder_factory_([]() {
292 return std::make_unique<test::FakeEncoder>(
293 Clock::GetRealTimeClock());
294 }) {
295 extensions_.Register<TransportSequenceNumber>(kExtensionId);
296 }
297
298 private:
299 Action OnSendRtp(const uint8_t* packet, size_t length) override {
300 RtpPacket rtp_packet(&extensions_);
301 EXPECT_TRUE(rtp_packet.Parse(packet, length));
302
303 EXPECT_TRUE(rtp_packet.HasExtension<TransportSequenceNumber>());
304 EXPECT_FALSE(rtp_packet.HasExtension<TransmissionOffset>());
305 EXPECT_FALSE(rtp_packet.HasExtension<AbsoluteSendTime>());
306
307 observation_complete_.Set();
308
309 return SEND_PACKET;
310 }
311
312 void ModifyVideoConfigs(
313 VideoSendStream::Config* send_config,
314 std::vector<VideoReceiveStream::Config>* receive_configs,
315 VideoEncoderConfig* encoder_config) override {
316 send_config->encoder_settings.encoder_factory = &encoder_factory_;
317 }
318
319 void PerformTest() override {
320 EXPECT_TRUE(Wait()) << "Timed out while waiting for a single RTP packet.";
321 }
322
323 test::FunctionVideoEncoderFactory encoder_factory_;
324 RtpHeaderExtensionMap extensions_;
325 } test;
326
327 RunBaseTest(&test);
328 }
329
TEST_F(VideoSendStreamTest,SupportsVideoRotation)330 TEST_F(VideoSendStreamTest, SupportsVideoRotation) {
331 class VideoRotationObserver : public test::SendTest {
332 public:
333 VideoRotationObserver() : SendTest(kDefaultTimeoutMs) {
334 extensions_.Register<VideoOrientation>(kVideoRotationExtensionId);
335 }
336
337 Action OnSendRtp(const uint8_t* packet, size_t length) override {
338 RtpPacket rtp_packet(&extensions_);
339 EXPECT_TRUE(rtp_packet.Parse(packet, length));
340 // Only the last packet of the frame is required to have the extension.
341 if (!rtp_packet.Marker())
342 return SEND_PACKET;
343 EXPECT_EQ(rtp_packet.GetExtension<VideoOrientation>(), kVideoRotation_90);
344 observation_complete_.Set();
345 return SEND_PACKET;
346 }
347
348 void ModifyVideoConfigs(
349 VideoSendStream::Config* send_config,
350 std::vector<VideoReceiveStream::Config>* receive_configs,
351 VideoEncoderConfig* encoder_config) override {
352 send_config->rtp.extensions.clear();
353 send_config->rtp.extensions.push_back(RtpExtension(
354 RtpExtension::kVideoRotationUri, kVideoRotationExtensionId));
355 }
356
357 void OnFrameGeneratorCapturerCreated(
358 test::FrameGeneratorCapturer* frame_generator_capturer) override {
359 frame_generator_capturer->SetFakeRotation(kVideoRotation_90);
360 }
361
362 void PerformTest() override {
363 EXPECT_TRUE(Wait()) << "Timed out while waiting for single RTP packet.";
364 }
365
366 private:
367 RtpHeaderExtensionMap extensions_;
368 } test;
369
370 RunBaseTest(&test);
371 }
372
TEST_F(VideoSendStreamTest,SupportsVideoContentType)373 TEST_F(VideoSendStreamTest, SupportsVideoContentType) {
374 class VideoContentTypeObserver : public test::SendTest {
375 public:
376 VideoContentTypeObserver()
377 : SendTest(kDefaultTimeoutMs), first_frame_sent_(false) {
378 extensions_.Register<VideoContentTypeExtension>(
379 kVideoContentTypeExtensionId);
380 }
381
382 Action OnSendRtp(const uint8_t* packet, size_t length) override {
383 RtpPacket rtp_packet(&extensions_);
384 EXPECT_TRUE(rtp_packet.Parse(packet, length));
385 // Only the last packet of the key-frame must have extension.
386 if (!rtp_packet.Marker() || first_frame_sent_)
387 return SEND_PACKET;
388 // First marker bit seen means that the first frame is sent.
389 first_frame_sent_ = true;
390 VideoContentType type;
391 EXPECT_TRUE(rtp_packet.GetExtension<VideoContentTypeExtension>(&type));
392 EXPECT_TRUE(videocontenttypehelpers::IsScreenshare(type));
393 observation_complete_.Set();
394 return SEND_PACKET;
395 }
396
397 void ModifyVideoConfigs(
398 VideoSendStream::Config* send_config,
399 std::vector<VideoReceiveStream::Config>* receive_configs,
400 VideoEncoderConfig* encoder_config) override {
401 send_config->rtp.extensions.clear();
402 send_config->rtp.extensions.push_back(RtpExtension(
403 RtpExtension::kVideoContentTypeUri, kVideoContentTypeExtensionId));
404 encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
405 }
406
407 void PerformTest() override {
408 EXPECT_TRUE(Wait()) << "Timed out while waiting for single RTP packet.";
409 }
410
411 private:
412 bool first_frame_sent_;
413 RtpHeaderExtensionMap extensions_;
414 } test;
415
416 RunBaseTest(&test);
417 }
418
TEST_F(VideoSendStreamTest,SupportsVideoTimingFrames)419 TEST_F(VideoSendStreamTest, SupportsVideoTimingFrames) {
420 class VideoTimingObserver : public test::SendTest {
421 public:
422 VideoTimingObserver()
423 : SendTest(kDefaultTimeoutMs), first_frame_sent_(false) {
424 extensions_.Register<VideoTimingExtension>(kVideoTimingExtensionId);
425 }
426
427 Action OnSendRtp(const uint8_t* packet, size_t length) override {
428 RtpPacket rtp_packet(&extensions_);
429 EXPECT_TRUE(rtp_packet.Parse(packet, length));
430 // Only the last packet of the frame must have extension.
431 // Also don't check packets of the second frame if they happen to get
432 // through before the test terminates.
433 if (!rtp_packet.Marker() || first_frame_sent_)
434 return SEND_PACKET;
435 EXPECT_TRUE(rtp_packet.HasExtension<VideoTimingExtension>());
436 observation_complete_.Set();
437 first_frame_sent_ = true;
438 return SEND_PACKET;
439 }
440
441 void ModifyVideoConfigs(
442 VideoSendStream::Config* send_config,
443 std::vector<VideoReceiveStream::Config>* receive_configs,
444 VideoEncoderConfig* encoder_config) override {
445 send_config->rtp.extensions.clear();
446 send_config->rtp.extensions.push_back(
447 RtpExtension(RtpExtension::kVideoTimingUri, kVideoTimingExtensionId));
448 }
449
450 void PerformTest() override {
451 EXPECT_TRUE(Wait()) << "Timed out while waiting for timing frames.";
452 }
453
454 private:
455 RtpHeaderExtensionMap extensions_;
456 bool first_frame_sent_;
457 } test;
458
459 RunBaseTest(&test);
460 }
461
462 class FakeReceiveStatistics : public ReceiveStatisticsProvider {
463 public:
FakeReceiveStatistics(uint32_t send_ssrc,uint32_t last_sequence_number,uint32_t cumulative_lost,uint8_t fraction_lost)464 FakeReceiveStatistics(uint32_t send_ssrc,
465 uint32_t last_sequence_number,
466 uint32_t cumulative_lost,
467 uint8_t fraction_lost) {
468 stat_.SetMediaSsrc(send_ssrc);
469 stat_.SetExtHighestSeqNum(last_sequence_number);
470 stat_.SetCumulativeLost(cumulative_lost);
471 stat_.SetFractionLost(fraction_lost);
472 }
473
RtcpReportBlocks(size_t max_blocks)474 std::vector<rtcp::ReportBlock> RtcpReportBlocks(size_t max_blocks) override {
475 EXPECT_GE(max_blocks, 1u);
476 return {stat_};
477 }
478
479 private:
480 rtcp::ReportBlock stat_;
481 };
482
483 class UlpfecObserver : public test::EndToEndTest {
484 public:
485 // Some of the test cases are expected to time out.
486 // Use a shorter timeout window than the default one for those.
487 static constexpr int kReducedTimeoutMs = 10000;
488
UlpfecObserver(bool header_extensions_enabled,bool use_nack,bool expect_red,bool expect_ulpfec,const std::string & codec,VideoEncoderFactory * encoder_factory)489 UlpfecObserver(bool header_extensions_enabled,
490 bool use_nack,
491 bool expect_red,
492 bool expect_ulpfec,
493 const std::string& codec,
494 VideoEncoderFactory* encoder_factory)
495 : EndToEndTest(expect_ulpfec ? VideoSendStreamTest::kDefaultTimeoutMs
496 : kReducedTimeoutMs),
497 encoder_factory_(encoder_factory),
498 payload_name_(codec),
499 use_nack_(use_nack),
500 expect_red_(expect_red),
501 expect_ulpfec_(expect_ulpfec),
502 sent_media_(false),
503 sent_ulpfec_(false),
504 header_extensions_enabled_(header_extensions_enabled) {
505 extensions_.Register<AbsoluteSendTime>(kAbsSendTimeExtensionId);
506 extensions_.Register<TransportSequenceNumber>(
507 kTransportSequenceNumberExtensionId);
508 }
509
510 private:
OnSendRtp(const uint8_t * packet,size_t length)511 Action OnSendRtp(const uint8_t* packet, size_t length) override {
512 RtpPacket rtp_packet(&extensions_);
513 EXPECT_TRUE(rtp_packet.Parse(packet, length));
514
515 int encapsulated_payload_type = -1;
516 if (rtp_packet.PayloadType() == VideoSendStreamTest::kRedPayloadType) {
517 EXPECT_TRUE(expect_red_);
518 encapsulated_payload_type = rtp_packet.payload()[0];
519 if (encapsulated_payload_type !=
520 VideoSendStreamTest::kFakeVideoSendPayloadType) {
521 EXPECT_EQ(VideoSendStreamTest::kUlpfecPayloadType,
522 encapsulated_payload_type);
523 }
524 } else {
525 EXPECT_EQ(VideoSendStreamTest::kFakeVideoSendPayloadType,
526 rtp_packet.PayloadType());
527 if (rtp_packet.payload_size() > 0) {
528 // Not padding-only, media received outside of RED.
529 EXPECT_FALSE(expect_red_);
530 sent_media_ = true;
531 }
532 }
533
534 if (header_extensions_enabled_) {
535 uint32_t abs_send_time;
536 EXPECT_TRUE(rtp_packet.GetExtension<AbsoluteSendTime>(&abs_send_time));
537 uint16_t transport_seq_num;
538 EXPECT_TRUE(
539 rtp_packet.GetExtension<TransportSequenceNumber>(&transport_seq_num));
540 if (!first_packet_) {
541 uint32_t kHalf24BitsSpace = 0xFFFFFF / 2;
542 if (abs_send_time <= kHalf24BitsSpace &&
543 prev_abs_send_time_ > kHalf24BitsSpace) {
544 // 24 bits wrap.
545 EXPECT_GT(prev_abs_send_time_, abs_send_time);
546 } else {
547 EXPECT_GE(abs_send_time, prev_abs_send_time_);
548 }
549
550 uint16_t seq_num_diff = transport_seq_num - prev_transport_seq_num_;
551 EXPECT_EQ(1, seq_num_diff);
552 }
553 first_packet_ = false;
554 prev_abs_send_time_ = abs_send_time;
555 prev_transport_seq_num_ = transport_seq_num;
556 }
557
558 if (encapsulated_payload_type != -1) {
559 if (encapsulated_payload_type ==
560 VideoSendStreamTest::kUlpfecPayloadType) {
561 EXPECT_TRUE(expect_ulpfec_);
562 sent_ulpfec_ = true;
563 } else {
564 sent_media_ = true;
565 }
566 }
567
568 if (sent_media_ && sent_ulpfec_) {
569 observation_complete_.Set();
570 }
571
572 return SEND_PACKET;
573 }
574
CreateSendTransport(TaskQueueBase * task_queue,Call * sender_call)575 std::unique_ptr<test::PacketTransport> CreateSendTransport(
576 TaskQueueBase* task_queue,
577 Call* sender_call) override {
578 // At low RTT (< kLowRttNackMs) -> NACK only, no FEC.
579 // Configure some network delay.
580 const int kNetworkDelayMs = 100;
581 BuiltInNetworkBehaviorConfig config;
582 config.loss_percent = 5;
583 config.queue_delay_ms = kNetworkDelayMs;
584 return std::make_unique<test::PacketTransport>(
585 task_queue, sender_call, this, test::PacketTransport::kSender,
586 VideoSendStreamTest::payload_type_map_,
587 std::make_unique<FakeNetworkPipe>(
588 Clock::GetRealTimeClock(),
589 std::make_unique<SimulatedNetwork>(config)));
590 }
591
ModifyVideoConfigs(VideoSendStream::Config * send_config,std::vector<VideoReceiveStream::Config> * receive_configs,VideoEncoderConfig * encoder_config)592 void ModifyVideoConfigs(
593 VideoSendStream::Config* send_config,
594 std::vector<VideoReceiveStream::Config>* receive_configs,
595 VideoEncoderConfig* encoder_config) override {
596 if (use_nack_) {
597 send_config->rtp.nack.rtp_history_ms =
598 (*receive_configs)[0].rtp.nack.rtp_history_ms =
599 VideoSendStreamTest::kNackRtpHistoryMs;
600 }
601 send_config->encoder_settings.encoder_factory = encoder_factory_;
602 send_config->rtp.payload_name = payload_name_;
603 send_config->rtp.ulpfec.red_payload_type =
604 VideoSendStreamTest::kRedPayloadType;
605 send_config->rtp.ulpfec.ulpfec_payload_type =
606 VideoSendStreamTest::kUlpfecPayloadType;
607 if (!header_extensions_enabled_) {
608 send_config->rtp.extensions.clear();
609 } else {
610 send_config->rtp.extensions.push_back(
611 RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId));
612 }
613 (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
614 encoder_config->codec_type = PayloadStringToCodecType(payload_name_);
615 (*receive_configs)[0].rtp.red_payload_type =
616 send_config->rtp.ulpfec.red_payload_type;
617 (*receive_configs)[0].rtp.ulpfec_payload_type =
618 send_config->rtp.ulpfec.ulpfec_payload_type;
619 }
620
PerformTest()621 void PerformTest() override {
622 EXPECT_EQ(expect_ulpfec_, Wait())
623 << "Timed out waiting for ULPFEC and/or media packets.";
624 }
625
626 VideoEncoderFactory* encoder_factory_;
627 RtpHeaderExtensionMap extensions_;
628 const std::string payload_name_;
629 const bool use_nack_;
630 const bool expect_red_;
631 const bool expect_ulpfec_;
632 bool sent_media_;
633 bool sent_ulpfec_;
634 const bool header_extensions_enabled_;
635 bool first_packet_ = true;
636 uint32_t prev_abs_send_time_ = 0;
637 uint16_t prev_transport_seq_num_ = 0;
638 };
639
TEST_F(VideoSendStreamTest,SupportsUlpfecWithExtensions)640 TEST_F(VideoSendStreamTest, SupportsUlpfecWithExtensions) {
641 test::FunctionVideoEncoderFactory encoder_factory(
642 []() { return VP8Encoder::Create(); });
643 UlpfecObserver test(true, false, true, true, "VP8", &encoder_factory);
644 RunBaseTest(&test);
645 }
646
TEST_F(VideoSendStreamTest,SupportsUlpfecWithoutExtensions)647 TEST_F(VideoSendStreamTest, SupportsUlpfecWithoutExtensions) {
648 test::FunctionVideoEncoderFactory encoder_factory(
649 []() { return VP8Encoder::Create(); });
650 UlpfecObserver test(false, false, true, true, "VP8", &encoder_factory);
651 RunBaseTest(&test);
652 }
653
654 class VideoSendStreamWithoutUlpfecTest : public test::CallTest {
655 protected:
VideoSendStreamWithoutUlpfecTest()656 VideoSendStreamWithoutUlpfecTest()
657 : field_trial_("WebRTC-DisableUlpFecExperiment/Enabled/") {}
658
659 test::ScopedFieldTrials field_trial_;
660 };
661
TEST_F(VideoSendStreamWithoutUlpfecTest,NoUlpfecIfDisabledThroughFieldTrial)662 TEST_F(VideoSendStreamWithoutUlpfecTest, NoUlpfecIfDisabledThroughFieldTrial) {
663 test::FunctionVideoEncoderFactory encoder_factory(
664 []() { return VP8Encoder::Create(); });
665 UlpfecObserver test(false, false, false, false, "VP8", &encoder_factory);
666 RunBaseTest(&test);
667 }
668
669 // The FEC scheme used is not efficient for H264, so we should not use RED/FEC
670 // since we'll still have to re-request FEC packets, effectively wasting
671 // bandwidth since the receiver has to wait for FEC retransmissions to determine
672 // that the received state is actually decodable.
TEST_F(VideoSendStreamTest,DoesNotUtilizeUlpfecForH264WithNackEnabled)673 TEST_F(VideoSendStreamTest, DoesNotUtilizeUlpfecForH264WithNackEnabled) {
674 test::FunctionVideoEncoderFactory encoder_factory([]() {
675 return std::make_unique<test::FakeH264Encoder>(Clock::GetRealTimeClock());
676 });
677 UlpfecObserver test(false, true, false, false, "H264", &encoder_factory);
678 RunBaseTest(&test);
679 }
680
681 // Without retransmissions FEC for H264 is fine.
TEST_F(VideoSendStreamTest,DoesUtilizeUlpfecForH264WithoutNackEnabled)682 TEST_F(VideoSendStreamTest, DoesUtilizeUlpfecForH264WithoutNackEnabled) {
683 test::FunctionVideoEncoderFactory encoder_factory([]() {
684 return std::make_unique<test::FakeH264Encoder>(Clock::GetRealTimeClock());
685 });
686 UlpfecObserver test(false, false, true, true, "H264", &encoder_factory);
687 RunBaseTest(&test);
688 }
689
TEST_F(VideoSendStreamTest,DoesUtilizeUlpfecForVp8WithNackEnabled)690 TEST_F(VideoSendStreamTest, DoesUtilizeUlpfecForVp8WithNackEnabled) {
691 test::FunctionVideoEncoderFactory encoder_factory(
692 []() { return VP8Encoder::Create(); });
693 UlpfecObserver test(false, true, true, true, "VP8", &encoder_factory);
694 RunBaseTest(&test);
695 }
696
697 #if defined(RTC_ENABLE_VP9)
TEST_F(VideoSendStreamTest,DoesUtilizeUlpfecForVp9WithNackEnabled)698 TEST_F(VideoSendStreamTest, DoesUtilizeUlpfecForVp9WithNackEnabled) {
699 test::FunctionVideoEncoderFactory encoder_factory(
700 []() { return VP9Encoder::Create(); });
701 UlpfecObserver test(false, true, true, true, "VP9", &encoder_factory);
702 RunBaseTest(&test);
703 }
704 #endif // defined(RTC_ENABLE_VP9)
705
TEST_F(VideoSendStreamTest,SupportsUlpfecWithMultithreadedH264)706 TEST_F(VideoSendStreamTest, SupportsUlpfecWithMultithreadedH264) {
707 std::unique_ptr<TaskQueueFactory> task_queue_factory =
708 CreateDefaultTaskQueueFactory();
709 test::FunctionVideoEncoderFactory encoder_factory([&]() {
710 return std::make_unique<test::MultithreadedFakeH264Encoder>(
711 Clock::GetRealTimeClock(), task_queue_factory.get());
712 });
713 UlpfecObserver test(false, false, true, true, "H264", &encoder_factory);
714 RunBaseTest(&test);
715 }
716
717 // TODO(brandtr): Move these FlexFEC tests when we have created
718 // FlexfecSendStream.
719 class FlexfecObserver : public test::EndToEndTest {
720 public:
FlexfecObserver(bool header_extensions_enabled,bool use_nack,const std::string & codec,VideoEncoderFactory * encoder_factory,size_t num_video_streams)721 FlexfecObserver(bool header_extensions_enabled,
722 bool use_nack,
723 const std::string& codec,
724 VideoEncoderFactory* encoder_factory,
725 size_t num_video_streams)
726 : EndToEndTest(VideoSendStreamTest::kDefaultTimeoutMs),
727 encoder_factory_(encoder_factory),
728 payload_name_(codec),
729 use_nack_(use_nack),
730 sent_media_(false),
731 sent_flexfec_(false),
732 header_extensions_enabled_(header_extensions_enabled),
733 num_video_streams_(num_video_streams) {
734 extensions_.Register<AbsoluteSendTime>(kAbsSendTimeExtensionId);
735 extensions_.Register<TransmissionOffset>(kTimestampOffsetExtensionId);
736 extensions_.Register<TransportSequenceNumber>(
737 kTransportSequenceNumberExtensionId);
738 }
739
GetNumFlexfecStreams() const740 size_t GetNumFlexfecStreams() const override { return 1; }
GetNumVideoStreams() const741 size_t GetNumVideoStreams() const override { return num_video_streams_; }
742
743 private:
OnSendRtp(const uint8_t * packet,size_t length)744 Action OnSendRtp(const uint8_t* packet, size_t length) override {
745 RtpPacket rtp_packet(&extensions_);
746 EXPECT_TRUE(rtp_packet.Parse(packet, length));
747
748 if (rtp_packet.PayloadType() == VideoSendStreamTest::kFlexfecPayloadType) {
749 EXPECT_EQ(VideoSendStreamTest::kFlexfecSendSsrc, rtp_packet.Ssrc());
750 sent_flexfec_ = true;
751 } else {
752 EXPECT_EQ(VideoSendStreamTest::kFakeVideoSendPayloadType,
753 rtp_packet.PayloadType());
754 EXPECT_THAT(::testing::make_tuple(VideoSendStreamTest::kVideoSendSsrcs,
755 num_video_streams_),
756 ::testing::Contains(rtp_packet.Ssrc()));
757 sent_media_ = true;
758 }
759
760 if (header_extensions_enabled_) {
761 EXPECT_TRUE(rtp_packet.HasExtension<AbsoluteSendTime>());
762 EXPECT_TRUE(rtp_packet.HasExtension<TransmissionOffset>());
763 EXPECT_TRUE(rtp_packet.HasExtension<TransportSequenceNumber>());
764 }
765
766 if (sent_media_ && sent_flexfec_) {
767 observation_complete_.Set();
768 }
769
770 return SEND_PACKET;
771 }
772
CreateSendTransport(TaskQueueBase * task_queue,Call * sender_call)773 std::unique_ptr<test::PacketTransport> CreateSendTransport(
774 TaskQueueBase* task_queue,
775 Call* sender_call) override {
776 // At low RTT (< kLowRttNackMs) -> NACK only, no FEC.
777 // Therefore we need some network delay.
778 const int kNetworkDelayMs = 100;
779 BuiltInNetworkBehaviorConfig config;
780 config.loss_percent = 5;
781 config.queue_delay_ms = kNetworkDelayMs;
782 return std::make_unique<test::PacketTransport>(
783 task_queue, sender_call, this, test::PacketTransport::kSender,
784 VideoSendStreamTest::payload_type_map_,
785 std::make_unique<FakeNetworkPipe>(
786 Clock::GetRealTimeClock(),
787 std::make_unique<SimulatedNetwork>(config)));
788 }
789
CreateReceiveTransport(TaskQueueBase * task_queue)790 std::unique_ptr<test::PacketTransport> CreateReceiveTransport(
791 TaskQueueBase* task_queue) override {
792 // We need the RTT to be >200 ms to send FEC and the network delay for the
793 // send transport is 100 ms, so add 100 ms (but no loss) on the return link.
794 BuiltInNetworkBehaviorConfig config;
795 config.loss_percent = 0;
796 config.queue_delay_ms = 100;
797 return std::make_unique<test::PacketTransport>(
798 task_queue, nullptr, this, test::PacketTransport::kReceiver,
799 VideoSendStreamTest::payload_type_map_,
800 std::make_unique<FakeNetworkPipe>(
801 Clock::GetRealTimeClock(),
802 std::make_unique<SimulatedNetwork>(config)));
803 }
804
ModifyVideoConfigs(VideoSendStream::Config * send_config,std::vector<VideoReceiveStream::Config> * receive_configs,VideoEncoderConfig * encoder_config)805 void ModifyVideoConfigs(
806 VideoSendStream::Config* send_config,
807 std::vector<VideoReceiveStream::Config>* receive_configs,
808 VideoEncoderConfig* encoder_config) override {
809 if (use_nack_) {
810 send_config->rtp.nack.rtp_history_ms =
811 (*receive_configs)[0].rtp.nack.rtp_history_ms =
812 VideoSendStreamTest::kNackRtpHistoryMs;
813 }
814 send_config->encoder_settings.encoder_factory = encoder_factory_;
815 send_config->rtp.payload_name = payload_name_;
816 if (header_extensions_enabled_) {
817 send_config->rtp.extensions.push_back(
818 RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId));
819 send_config->rtp.extensions.push_back(RtpExtension(
820 RtpExtension::kTimestampOffsetUri, kTimestampOffsetExtensionId));
821 } else {
822 send_config->rtp.extensions.clear();
823 }
824 (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
825 encoder_config->codec_type = PayloadStringToCodecType(payload_name_);
826 }
827
PerformTest()828 void PerformTest() override {
829 EXPECT_TRUE(Wait())
830 << "Timed out waiting for FlexFEC and/or media packets.";
831 }
832
833 VideoEncoderFactory* encoder_factory_;
834 RtpHeaderExtensionMap extensions_;
835 const std::string payload_name_;
836 const bool use_nack_;
837 bool sent_media_;
838 bool sent_flexfec_;
839 const bool header_extensions_enabled_;
840 const size_t num_video_streams_;
841 };
842
TEST_F(VideoSendStreamTest,SupportsFlexfecVp8)843 TEST_F(VideoSendStreamTest, SupportsFlexfecVp8) {
844 test::FunctionVideoEncoderFactory encoder_factory(
845 []() { return VP8Encoder::Create(); });
846 FlexfecObserver test(false, false, "VP8", &encoder_factory, 1);
847 RunBaseTest(&test);
848 }
849
TEST_F(VideoSendStreamTest,SupportsFlexfecSimulcastVp8)850 TEST_F(VideoSendStreamTest, SupportsFlexfecSimulcastVp8) {
851 test::FunctionVideoEncoderFactory encoder_factory(
852 []() { return VP8Encoder::Create(); });
853 FlexfecObserver test(false, false, "VP8", &encoder_factory, 2);
854 RunBaseTest(&test);
855 }
856
TEST_F(VideoSendStreamTest,SupportsFlexfecWithNackVp8)857 TEST_F(VideoSendStreamTest, SupportsFlexfecWithNackVp8) {
858 test::FunctionVideoEncoderFactory encoder_factory(
859 []() { return VP8Encoder::Create(); });
860 FlexfecObserver test(false, true, "VP8", &encoder_factory, 1);
861 RunBaseTest(&test);
862 }
863
TEST_F(VideoSendStreamTest,SupportsFlexfecWithRtpExtensionsVp8)864 TEST_F(VideoSendStreamTest, SupportsFlexfecWithRtpExtensionsVp8) {
865 test::FunctionVideoEncoderFactory encoder_factory(
866 []() { return VP8Encoder::Create(); });
867 FlexfecObserver test(true, false, "VP8", &encoder_factory, 1);
868 RunBaseTest(&test);
869 }
870
871 #if defined(RTC_ENABLE_VP9)
TEST_F(VideoSendStreamTest,SupportsFlexfecVp9)872 TEST_F(VideoSendStreamTest, SupportsFlexfecVp9) {
873 test::FunctionVideoEncoderFactory encoder_factory(
874 []() { return VP9Encoder::Create(); });
875 FlexfecObserver test(false, false, "VP9", &encoder_factory, 1);
876 RunBaseTest(&test);
877 }
878
TEST_F(VideoSendStreamTest,SupportsFlexfecWithNackVp9)879 TEST_F(VideoSendStreamTest, SupportsFlexfecWithNackVp9) {
880 test::FunctionVideoEncoderFactory encoder_factory(
881 []() { return VP9Encoder::Create(); });
882 FlexfecObserver test(false, true, "VP9", &encoder_factory, 1);
883 RunBaseTest(&test);
884 }
885 #endif // defined(RTC_ENABLE_VP9)
886
TEST_F(VideoSendStreamTest,SupportsFlexfecH264)887 TEST_F(VideoSendStreamTest, SupportsFlexfecH264) {
888 test::FunctionVideoEncoderFactory encoder_factory([]() {
889 return std::make_unique<test::FakeH264Encoder>(Clock::GetRealTimeClock());
890 });
891 FlexfecObserver test(false, false, "H264", &encoder_factory, 1);
892 RunBaseTest(&test);
893 }
894
TEST_F(VideoSendStreamTest,SupportsFlexfecWithNackH264)895 TEST_F(VideoSendStreamTest, SupportsFlexfecWithNackH264) {
896 test::FunctionVideoEncoderFactory encoder_factory([]() {
897 return std::make_unique<test::FakeH264Encoder>(Clock::GetRealTimeClock());
898 });
899 FlexfecObserver test(false, true, "H264", &encoder_factory, 1);
900 RunBaseTest(&test);
901 }
902
TEST_F(VideoSendStreamTest,SupportsFlexfecWithMultithreadedH264)903 TEST_F(VideoSendStreamTest, SupportsFlexfecWithMultithreadedH264) {
904 std::unique_ptr<TaskQueueFactory> task_queue_factory =
905 CreateDefaultTaskQueueFactory();
906 test::FunctionVideoEncoderFactory encoder_factory([&]() {
907 return std::make_unique<test::MultithreadedFakeH264Encoder>(
908 Clock::GetRealTimeClock(), task_queue_factory.get());
909 });
910
911 FlexfecObserver test(false, false, "H264", &encoder_factory, 1);
912 RunBaseTest(&test);
913 }
914
TestNackRetransmission(uint32_t retransmit_ssrc,uint8_t retransmit_payload_type)915 void VideoSendStreamTest::TestNackRetransmission(
916 uint32_t retransmit_ssrc,
917 uint8_t retransmit_payload_type) {
918 class NackObserver : public test::SendTest {
919 public:
920 explicit NackObserver(uint32_t retransmit_ssrc,
921 uint8_t retransmit_payload_type)
922 : SendTest(kDefaultTimeoutMs),
923 send_count_(0),
924 retransmit_count_(0),
925 retransmit_ssrc_(retransmit_ssrc),
926 retransmit_payload_type_(retransmit_payload_type) {}
927
928 private:
929 Action OnSendRtp(const uint8_t* packet, size_t length) override {
930 RtpPacket rtp_packet;
931 EXPECT_TRUE(rtp_packet.Parse(packet, length));
932
933 // NACK packets two times at some arbitrary points.
934 const int kNackedPacketsAtOnceCount = 3;
935 const int kRetransmitTarget = kNackedPacketsAtOnceCount * 2;
936
937 // Skip padding packets because they will never be retransmitted.
938 if (rtp_packet.payload_size() == 0) {
939 return SEND_PACKET;
940 }
941
942 ++send_count_;
943
944 // NACK packets at arbitrary points.
945 if (send_count_ == 5 || send_count_ == 25) {
946 nacked_sequence_numbers_.insert(
947 nacked_sequence_numbers_.end(),
948 non_padding_sequence_numbers_.end() - kNackedPacketsAtOnceCount,
949 non_padding_sequence_numbers_.end());
950
951 RtpRtcpInterface::Configuration config;
952 config.clock = Clock::GetRealTimeClock();
953 config.outgoing_transport = transport_adapter_.get();
954 config.rtcp_report_interval_ms = kRtcpIntervalMs;
955 config.local_media_ssrc = kReceiverLocalVideoSsrc;
956 RTCPSender rtcp_sender(config);
957
958 rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
959 rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]);
960
961 RTCPSender::FeedbackState feedback_state;
962
963 EXPECT_EQ(0, rtcp_sender.SendRTCP(
964 feedback_state, kRtcpNack,
965 static_cast<int>(nacked_sequence_numbers_.size()),
966 &nacked_sequence_numbers_.front()));
967 }
968
969 uint16_t sequence_number = rtp_packet.SequenceNumber();
970 if (rtp_packet.Ssrc() == retransmit_ssrc_ &&
971 retransmit_ssrc_ != kVideoSendSsrcs[0]) {
972 // Not kVideoSendSsrcs[0], assume correct RTX packet. Extract sequence
973 // number.
974 const uint8_t* rtx_header = rtp_packet.payload().data();
975 sequence_number = (rtx_header[0] << 8) + rtx_header[1];
976 }
977
978 auto found = absl::c_find(nacked_sequence_numbers_, sequence_number);
979 if (found != nacked_sequence_numbers_.end()) {
980 nacked_sequence_numbers_.erase(found);
981
982 if (++retransmit_count_ == kRetransmitTarget) {
983 EXPECT_EQ(retransmit_ssrc_, rtp_packet.Ssrc());
984 EXPECT_EQ(retransmit_payload_type_, rtp_packet.PayloadType());
985 observation_complete_.Set();
986 }
987 } else {
988 non_padding_sequence_numbers_.push_back(sequence_number);
989 }
990
991 return SEND_PACKET;
992 }
993
994 void ModifyVideoConfigs(
995 VideoSendStream::Config* send_config,
996 std::vector<VideoReceiveStream::Config>* receive_configs,
997 VideoEncoderConfig* encoder_config) override {
998 transport_adapter_.reset(
999 new internal::TransportAdapter(send_config->send_transport));
1000 transport_adapter_->Enable();
1001 send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
1002 send_config->rtp.rtx.payload_type = retransmit_payload_type_;
1003 if (retransmit_ssrc_ != kVideoSendSsrcs[0])
1004 send_config->rtp.rtx.ssrcs.push_back(retransmit_ssrc_);
1005 }
1006
1007 void PerformTest() override {
1008 EXPECT_TRUE(Wait()) << "Timed out while waiting for NACK retransmission.";
1009 }
1010
1011 std::unique_ptr<internal::TransportAdapter> transport_adapter_;
1012 int send_count_;
1013 int retransmit_count_;
1014 const uint32_t retransmit_ssrc_;
1015 const uint8_t retransmit_payload_type_;
1016 std::vector<uint16_t> nacked_sequence_numbers_;
1017 std::vector<uint16_t> non_padding_sequence_numbers_;
1018 } test(retransmit_ssrc, retransmit_payload_type);
1019
1020 RunBaseTest(&test);
1021 }
1022
TEST_F(VideoSendStreamTest,RetransmitsNack)1023 TEST_F(VideoSendStreamTest, RetransmitsNack) {
1024 // Normal NACKs should use the send SSRC.
1025 TestNackRetransmission(kVideoSendSsrcs[0], kFakeVideoSendPayloadType);
1026 }
1027
TEST_F(VideoSendStreamTest,RetransmitsNackOverRtx)1028 TEST_F(VideoSendStreamTest, RetransmitsNackOverRtx) {
1029 // NACKs over RTX should use a separate SSRC.
1030 TestNackRetransmission(kSendRtxSsrcs[0], kSendRtxPayloadType);
1031 }
1032
TestPacketFragmentationSize(VideoFormat format,bool with_fec)1033 void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
1034 bool with_fec) {
1035 // Use a fake encoder to output a frame of every size in the range [90, 290],
1036 // for each size making sure that the exact number of payload bytes received
1037 // is correct and that packets are fragmented to respect max packet size.
1038 static const size_t kMaxPacketSize = 128;
1039 static const size_t start = 90;
1040 static const size_t stop = 290;
1041
1042 // Observer that verifies that the expected number of packets and bytes
1043 // arrive for each frame size, from start_size to stop_size.
1044 class FrameFragmentationTest : public test::SendTest {
1045 public:
1046 FrameFragmentationTest(size_t max_packet_size,
1047 size_t start_size,
1048 size_t stop_size,
1049 bool test_generic_packetization,
1050 bool use_fec)
1051 : SendTest(kLongTimeoutMs),
1052 encoder_(stop),
1053 encoder_factory_(&encoder_),
1054 max_packet_size_(max_packet_size),
1055 stop_size_(stop_size),
1056 test_generic_packetization_(test_generic_packetization),
1057 use_fec_(use_fec),
1058 packet_count_(0),
1059 packets_lost_(0),
1060 last_packet_count_(0),
1061 last_packets_lost_(0),
1062 accumulated_size_(0),
1063 accumulated_payload_(0),
1064 fec_packet_received_(false),
1065 current_size_rtp_(start_size),
1066 current_size_frame_(static_cast<int>(start_size)) {
1067 // Fragmentation required, this test doesn't make sense without it.
1068 encoder_.SetFrameSize(start_size);
1069 RTC_DCHECK_GT(stop_size, max_packet_size);
1070 if (!test_generic_packetization_)
1071 encoder_.SetCodecType(kVideoCodecVP8);
1072 }
1073
1074 private:
1075 Action OnSendRtp(const uint8_t* packet, size_t size) override {
1076 size_t length = size;
1077 RtpPacket rtp_packet;
1078 EXPECT_TRUE(rtp_packet.Parse(packet, length));
1079
1080 EXPECT_LE(length, max_packet_size_);
1081
1082 if (use_fec_ && rtp_packet.payload_size() > 0) {
1083 uint8_t payload_type = rtp_packet.payload()[0];
1084 bool is_fec = rtp_packet.PayloadType() == kRedPayloadType &&
1085 payload_type == kUlpfecPayloadType;
1086 if (is_fec) {
1087 fec_packet_received_ = true;
1088 return SEND_PACKET;
1089 }
1090 }
1091
1092 accumulated_size_ += length;
1093
1094 if (use_fec_)
1095 TriggerLossReport(rtp_packet);
1096
1097 if (test_generic_packetization_) {
1098 size_t overhead = rtp_packet.headers_size() + rtp_packet.padding_size();
1099 // Only remove payload header and RED header if the packet actually
1100 // contains payload.
1101 if (length > overhead) {
1102 overhead += (1 /* Generic header */);
1103 if (use_fec_)
1104 overhead += 1; // RED for FEC header.
1105 }
1106 EXPECT_GE(length, overhead);
1107 accumulated_payload_ += length - overhead;
1108 }
1109
1110 // Marker bit set indicates last packet of a frame.
1111 if (rtp_packet.Marker()) {
1112 if (use_fec_ && accumulated_payload_ == current_size_rtp_ - 1) {
1113 // With FEC enabled, frame size is incremented asynchronously, so
1114 // "old" frames one byte too small may arrive. Accept, but don't
1115 // increase expected frame size.
1116 accumulated_size_ = 0;
1117 accumulated_payload_ = 0;
1118 return SEND_PACKET;
1119 }
1120
1121 EXPECT_GE(accumulated_size_, current_size_rtp_);
1122 if (test_generic_packetization_) {
1123 EXPECT_EQ(current_size_rtp_, accumulated_payload_);
1124 }
1125
1126 // Last packet of frame; reset counters.
1127 accumulated_size_ = 0;
1128 accumulated_payload_ = 0;
1129 if (current_size_rtp_ == stop_size_) {
1130 // Done! (Don't increase size again, might arrive more @ stop_size).
1131 observation_complete_.Set();
1132 } else {
1133 // Increase next expected frame size. If testing with FEC, make sure
1134 // a FEC packet has been received for this frame size before
1135 // proceeding, to make sure that redundancy packets don't exceed
1136 // size limit.
1137 if (!use_fec_) {
1138 ++current_size_rtp_;
1139 } else if (fec_packet_received_) {
1140 fec_packet_received_ = false;
1141 ++current_size_rtp_;
1142
1143 MutexLock lock(&mutex_);
1144 ++current_size_frame_;
1145 }
1146 }
1147 }
1148
1149 return SEND_PACKET;
1150 }
1151
1152 void TriggerLossReport(const RtpPacket& rtp_packet) {
1153 // Send lossy receive reports to trigger FEC enabling.
1154 const int kLossPercent = 5;
1155 if (++packet_count_ % (100 / kLossPercent) == 0) {
1156 packets_lost_++;
1157 int loss_delta = packets_lost_ - last_packets_lost_;
1158 int packets_delta = packet_count_ - last_packet_count_;
1159 last_packet_count_ = packet_count_;
1160 last_packets_lost_ = packets_lost_;
1161 uint8_t loss_ratio =
1162 static_cast<uint8_t>(loss_delta * 255 / packets_delta);
1163 FakeReceiveStatistics lossy_receive_stats(
1164 kVideoSendSsrcs[0], rtp_packet.SequenceNumber(),
1165 packets_lost_, // Cumulative lost.
1166 loss_ratio); // Loss percent.
1167 RtpRtcpInterface::Configuration config;
1168 config.clock = Clock::GetRealTimeClock();
1169 config.receive_statistics = &lossy_receive_stats;
1170 config.outgoing_transport = transport_adapter_.get();
1171 config.rtcp_report_interval_ms = kRtcpIntervalMs;
1172 config.local_media_ssrc = kVideoSendSsrcs[0];
1173 RTCPSender rtcp_sender(config);
1174
1175 rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
1176 rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]);
1177
1178 RTCPSender::FeedbackState feedback_state;
1179
1180 EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr));
1181 }
1182 }
1183
1184 void UpdateConfiguration() {
1185 MutexLock lock(&mutex_);
1186 // Increase frame size for next encoded frame, in the context of the
1187 // encoder thread.
1188 if (!use_fec_ && current_size_frame_ < static_cast<int32_t>(stop_size_)) {
1189 ++current_size_frame_;
1190 }
1191 encoder_.SetFrameSize(static_cast<size_t>(current_size_frame_));
1192 }
1193 void ModifySenderBitrateConfig(
1194 BitrateConstraints* bitrate_config) override {
1195 const int kMinBitrateBps = 300000;
1196 bitrate_config->min_bitrate_bps = kMinBitrateBps;
1197 }
1198
1199 void ModifyVideoConfigs(
1200 VideoSendStream::Config* send_config,
1201 std::vector<VideoReceiveStream::Config>* receive_configs,
1202 VideoEncoderConfig* encoder_config) override {
1203 transport_adapter_.reset(
1204 new internal::TransportAdapter(send_config->send_transport));
1205 transport_adapter_->Enable();
1206 if (use_fec_) {
1207 send_config->rtp.ulpfec.red_payload_type = kRedPayloadType;
1208 send_config->rtp.ulpfec.ulpfec_payload_type = kUlpfecPayloadType;
1209 }
1210
1211 if (!test_generic_packetization_)
1212 send_config->rtp.payload_name = "VP8";
1213
1214 send_config->encoder_settings.encoder_factory = &encoder_factory_;
1215 send_config->rtp.max_packet_size = kMaxPacketSize;
1216 encoder_.RegisterPostEncodeCallback([this]() { UpdateConfiguration(); });
1217
1218 // Make sure there is at least one extension header, to make the RTP
1219 // header larger than the base length of 12 bytes.
1220 EXPECT_FALSE(send_config->rtp.extensions.empty());
1221
1222 // Setup screen content disables frame dropping which makes this easier.
1223 EXPECT_EQ(1u, encoder_config->simulcast_layers.size());
1224 encoder_config->simulcast_layers[0].num_temporal_layers = 2;
1225 encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
1226 }
1227
1228 void PerformTest() override {
1229 EXPECT_TRUE(Wait()) << "Timed out while observing incoming RTP packets.";
1230 }
1231
1232 std::unique_ptr<internal::TransportAdapter> transport_adapter_;
1233 test::ConfigurableFrameSizeEncoder encoder_;
1234 test::VideoEncoderProxyFactory encoder_factory_;
1235
1236 const size_t max_packet_size_;
1237 const size_t stop_size_;
1238 const bool test_generic_packetization_;
1239 const bool use_fec_;
1240
1241 uint32_t packet_count_;
1242 uint32_t packets_lost_;
1243 uint32_t last_packet_count_;
1244 uint32_t last_packets_lost_;
1245 size_t accumulated_size_;
1246 size_t accumulated_payload_;
1247 bool fec_packet_received_;
1248
1249 size_t current_size_rtp_;
1250 Mutex mutex_;
1251 int current_size_frame_ RTC_GUARDED_BY(mutex_);
1252 };
1253
1254 // Don't auto increment if FEC is used; continue sending frame size until
1255 // a FEC packet has been received.
1256 FrameFragmentationTest test(kMaxPacketSize, start, stop, format == kGeneric,
1257 with_fec);
1258
1259 RunBaseTest(&test);
1260 }
1261
1262 // TODO(sprang): Is there any way of speeding up these tests?
TEST_F(VideoSendStreamTest,FragmentsGenericAccordingToMaxPacketSize)1263 TEST_F(VideoSendStreamTest, FragmentsGenericAccordingToMaxPacketSize) {
1264 TestPacketFragmentationSize(kGeneric, false);
1265 }
1266
TEST_F(VideoSendStreamTest,FragmentsGenericAccordingToMaxPacketSizeWithFec)1267 TEST_F(VideoSendStreamTest, FragmentsGenericAccordingToMaxPacketSizeWithFec) {
1268 TestPacketFragmentationSize(kGeneric, true);
1269 }
1270
TEST_F(VideoSendStreamTest,FragmentsVp8AccordingToMaxPacketSize)1271 TEST_F(VideoSendStreamTest, FragmentsVp8AccordingToMaxPacketSize) {
1272 TestPacketFragmentationSize(kVP8, false);
1273 }
1274
TEST_F(VideoSendStreamTest,FragmentsVp8AccordingToMaxPacketSizeWithFec)1275 TEST_F(VideoSendStreamTest, FragmentsVp8AccordingToMaxPacketSizeWithFec) {
1276 TestPacketFragmentationSize(kVP8, true);
1277 }
1278
1279 // The test will go through a number of phases.
1280 // 1. Start sending packets.
1281 // 2. As soon as the RTP stream has been detected, signal a low REMB value to
1282 // suspend the stream.
1283 // 3. Wait until |kSuspendTimeFrames| have been captured without seeing any RTP
1284 // packets.
1285 // 4. Signal a high REMB and then wait for the RTP stream to start again.
1286 // When the stream is detected again, and the stats show that the stream
1287 // is no longer suspended, the test ends.
TEST_F(VideoSendStreamTest,SuspendBelowMinBitrate)1288 TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
1289 static const int kSuspendTimeFrames = 60; // Suspend for 2 seconds @ 30 fps.
1290
1291 class RembObserver : public test::SendTest {
1292 public:
1293 class CaptureObserver : public rtc::VideoSinkInterface<VideoFrame> {
1294 public:
1295 explicit CaptureObserver(RembObserver* remb_observer)
1296 : remb_observer_(remb_observer) {}
1297
1298 void OnFrame(const VideoFrame&) {
1299 MutexLock lock(&remb_observer_->mutex_);
1300 if (remb_observer_->test_state_ == kDuringSuspend &&
1301 ++remb_observer_->suspended_frame_count_ > kSuspendTimeFrames) {
1302 VideoSendStream::Stats stats = remb_observer_->stream_->GetStats();
1303 EXPECT_TRUE(stats.suspended);
1304 remb_observer_->SendRtcpFeedback(remb_observer_->high_remb_bps_);
1305 remb_observer_->test_state_ = kWaitingForPacket;
1306 }
1307 }
1308
1309 private:
1310 RembObserver* const remb_observer_;
1311 };
1312
1313 RembObserver()
1314 : SendTest(kDefaultTimeoutMs),
1315 clock_(Clock::GetRealTimeClock()),
1316 capture_observer_(this),
1317 stream_(nullptr),
1318 test_state_(kBeforeSuspend),
1319 rtp_count_(0),
1320 last_sequence_number_(0),
1321 suspended_frame_count_(0),
1322 low_remb_bps_(0),
1323 high_remb_bps_(0) {}
1324
1325 private:
1326 Action OnSendRtp(const uint8_t* packet, size_t length) override {
1327 MutexLock lock(&mutex_);
1328 ++rtp_count_;
1329 RtpPacket rtp_packet;
1330 EXPECT_TRUE(rtp_packet.Parse(packet, length));
1331 last_sequence_number_ = rtp_packet.SequenceNumber();
1332
1333 if (test_state_ == kBeforeSuspend) {
1334 // The stream has started. Try to suspend it.
1335 SendRtcpFeedback(low_remb_bps_);
1336 test_state_ = kDuringSuspend;
1337 } else if (test_state_ == kDuringSuspend) {
1338 if (rtp_packet.padding_size() == 0) {
1339 // Received non-padding packet during suspension period. Reset the
1340 // counter.
1341 suspended_frame_count_ = 0;
1342 }
1343 SendRtcpFeedback(0); // REMB is only sent if value is > 0.
1344 } else if (test_state_ == kWaitingForPacket) {
1345 if (rtp_packet.padding_size() == 0) {
1346 // Non-padding packet observed. Test is almost complete. Will just
1347 // have to wait for the stats to change.
1348 test_state_ = kWaitingForStats;
1349 }
1350 SendRtcpFeedback(0); // REMB is only sent if value is > 0.
1351 } else if (test_state_ == kWaitingForStats) {
1352 VideoSendStream::Stats stats = stream_->GetStats();
1353 if (stats.suspended == false) {
1354 // Stats flipped to false. Test is complete.
1355 observation_complete_.Set();
1356 }
1357 SendRtcpFeedback(0); // REMB is only sent if value is > 0.
1358 }
1359
1360 return SEND_PACKET;
1361 }
1362
1363 void set_low_remb_bps(int value) {
1364 MutexLock lock(&mutex_);
1365 low_remb_bps_ = value;
1366 }
1367
1368 void set_high_remb_bps(int value) {
1369 MutexLock lock(&mutex_);
1370 high_remb_bps_ = value;
1371 }
1372
1373 void OnVideoStreamsCreated(
1374 VideoSendStream* send_stream,
1375 const std::vector<VideoReceiveStream*>& receive_streams) override {
1376 stream_ = send_stream;
1377 }
1378
1379 void OnFrameGeneratorCapturerCreated(
1380 test::FrameGeneratorCapturer* frame_generator_capturer) override {
1381 frame_generator_capturer->AddOrUpdateSink(&capture_observer_,
1382 rtc::VideoSinkWants());
1383 }
1384
1385 void ModifyVideoConfigs(
1386 VideoSendStream::Config* send_config,
1387 std::vector<VideoReceiveStream::Config>* receive_configs,
1388 VideoEncoderConfig* encoder_config) override {
1389 RTC_DCHECK_EQ(1, encoder_config->number_of_streams);
1390 transport_adapter_.reset(
1391 new internal::TransportAdapter(send_config->send_transport));
1392 transport_adapter_->Enable();
1393 send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
1394 send_config->suspend_below_min_bitrate = true;
1395 int min_bitrate_bps =
1396 test::DefaultVideoStreamFactory::kDefaultMinBitratePerStream[0];
1397 set_low_remb_bps(min_bitrate_bps - 10000);
1398 int threshold_window = std::max(min_bitrate_bps / 10, 20000);
1399 ASSERT_GT(encoder_config->max_bitrate_bps,
1400 min_bitrate_bps + threshold_window + 5000);
1401 set_high_remb_bps(min_bitrate_bps + threshold_window + 5000);
1402 }
1403
1404 void PerformTest() override {
1405 EXPECT_TRUE(Wait()) << "Timed out during suspend-below-min-bitrate test.";
1406 }
1407
1408 enum TestState {
1409 kBeforeSuspend,
1410 kDuringSuspend,
1411 kWaitingForPacket,
1412 kWaitingForStats
1413 };
1414
1415 virtual void SendRtcpFeedback(int remb_value)
1416 RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_) {
1417 FakeReceiveStatistics receive_stats(kVideoSendSsrcs[0],
1418 last_sequence_number_, rtp_count_, 0);
1419 RtpRtcpInterface::Configuration config;
1420 config.clock = clock_;
1421 config.receive_statistics = &receive_stats;
1422 config.outgoing_transport = transport_adapter_.get();
1423 config.rtcp_report_interval_ms = kRtcpIntervalMs;
1424 config.local_media_ssrc = kVideoSendSsrcs[0];
1425 RTCPSender rtcp_sender(config);
1426
1427 rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
1428 rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]);
1429 if (remb_value > 0) {
1430 rtcp_sender.SetRemb(remb_value, std::vector<uint32_t>());
1431 }
1432 RTCPSender::FeedbackState feedback_state;
1433 EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr));
1434 }
1435
1436 std::unique_ptr<internal::TransportAdapter> transport_adapter_;
1437 Clock* const clock_;
1438 CaptureObserver capture_observer_;
1439 VideoSendStream* stream_;
1440
1441 Mutex mutex_;
1442 TestState test_state_ RTC_GUARDED_BY(mutex_);
1443 int rtp_count_ RTC_GUARDED_BY(mutex_);
1444 int last_sequence_number_ RTC_GUARDED_BY(mutex_);
1445 int suspended_frame_count_ RTC_GUARDED_BY(mutex_);
1446 int low_remb_bps_ RTC_GUARDED_BY(mutex_);
1447 int high_remb_bps_ RTC_GUARDED_BY(mutex_);
1448 } test;
1449
1450 RunBaseTest(&test);
1451 }
1452
1453 // This test that padding stops being send after a while if the Camera stops
1454 // producing video frames and that padding resumes if the camera restarts.
TEST_F(VideoSendStreamTest,NoPaddingWhenVideoIsMuted)1455 TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
1456 class NoPaddingWhenVideoIsMuted : public test::SendTest {
1457 public:
1458 NoPaddingWhenVideoIsMuted()
1459 : SendTest(kDefaultTimeoutMs),
1460 clock_(Clock::GetRealTimeClock()),
1461 capturer_(nullptr) {}
1462
1463 private:
1464 Action OnSendRtp(const uint8_t* packet, size_t length) override {
1465 MutexLock lock(&mutex_);
1466 last_packet_time_ms_ = clock_->TimeInMilliseconds();
1467
1468 RtpPacket rtp_packet;
1469 rtp_packet.Parse(packet, length);
1470 const bool only_padding = rtp_packet.payload_size() == 0;
1471
1472 if (test_state_ == kBeforeStopCapture) {
1473 // Packets are flowing, stop camera.
1474 capturer_->Stop();
1475 test_state_ = kWaitingForPadding;
1476 } else if (test_state_ == kWaitingForPadding && only_padding) {
1477 // We're still getting padding, after stopping camera.
1478 test_state_ = kWaitingForNoPackets;
1479 } else if (test_state_ == kWaitingForMediaAfterCameraRestart &&
1480 !only_padding) {
1481 // Media packets are flowing again, stop camera a second time.
1482 capturer_->Stop();
1483 test_state_ = kWaitingForPaddingAfterCameraStopsAgain;
1484 } else if (test_state_ == kWaitingForPaddingAfterCameraStopsAgain &&
1485 only_padding) {
1486 // Padding is still flowing, test ok.
1487 observation_complete_.Set();
1488 }
1489 return SEND_PACKET;
1490 }
1491
1492 Action OnSendRtcp(const uint8_t* packet, size_t length) override {
1493 MutexLock lock(&mutex_);
1494 const int kNoPacketsThresholdMs = 2000;
1495 if (test_state_ == kWaitingForNoPackets &&
1496 (last_packet_time_ms_ &&
1497 clock_->TimeInMilliseconds() - last_packet_time_ms_.value() >
1498 kNoPacketsThresholdMs)) {
1499 // No packets seen for |kNoPacketsThresholdMs|, restart camera.
1500 capturer_->Start();
1501 test_state_ = kWaitingForMediaAfterCameraRestart;
1502 }
1503 return SEND_PACKET;
1504 }
1505
1506 void ModifyVideoConfigs(
1507 VideoSendStream::Config* send_config,
1508 std::vector<VideoReceiveStream::Config>* receive_configs,
1509 VideoEncoderConfig* encoder_config) override {
1510 // Make sure padding is sent if encoder is not producing media.
1511 encoder_config->min_transmit_bitrate_bps = 50000;
1512 }
1513
1514 void OnFrameGeneratorCapturerCreated(
1515 test::FrameGeneratorCapturer* frame_generator_capturer) override {
1516 MutexLock lock(&mutex_);
1517 capturer_ = frame_generator_capturer;
1518 }
1519
1520 void PerformTest() override {
1521 EXPECT_TRUE(Wait())
1522 << "Timed out while waiting for RTP packets to stop being sent.";
1523 }
1524
1525 enum TestState {
1526 kBeforeStopCapture,
1527 kWaitingForPadding,
1528 kWaitingForNoPackets,
1529 kWaitingForMediaAfterCameraRestart,
1530 kWaitingForPaddingAfterCameraStopsAgain
1531 };
1532
1533 TestState test_state_ = kBeforeStopCapture;
1534 Clock* const clock_;
1535 Mutex mutex_;
1536 absl::optional<int64_t> last_packet_time_ms_ RTC_GUARDED_BY(mutex_);
1537 test::FrameGeneratorCapturer* capturer_ RTC_GUARDED_BY(mutex_);
1538 } test;
1539
1540 RunBaseTest(&test);
1541 }
1542
TEST_F(VideoSendStreamTest,PaddingIsPrimarilyRetransmissions)1543 TEST_F(VideoSendStreamTest, PaddingIsPrimarilyRetransmissions) {
1544 const int kCapacityKbps = 10000; // 10 Mbps
1545 class PaddingIsPrimarilyRetransmissions : public test::EndToEndTest {
1546 public:
1547 PaddingIsPrimarilyRetransmissions()
1548 : EndToEndTest(kDefaultTimeoutMs),
1549 clock_(Clock::GetRealTimeClock()),
1550 padding_length_(0),
1551 total_length_(0),
1552 call_(nullptr) {}
1553
1554 private:
1555 void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
1556 call_ = sender_call;
1557 }
1558
1559 Action OnSendRtp(const uint8_t* packet, size_t length) override {
1560 MutexLock lock(&mutex_);
1561
1562 RtpPacket rtp_packet;
1563 rtp_packet.Parse(packet, length);
1564 padding_length_ += rtp_packet.padding_size();
1565 total_length_ += length;
1566 return SEND_PACKET;
1567 }
1568
1569 std::unique_ptr<test::PacketTransport> CreateSendTransport(
1570 TaskQueueBase* task_queue,
1571 Call* sender_call) override {
1572 const int kNetworkDelayMs = 50;
1573 BuiltInNetworkBehaviorConfig config;
1574 config.loss_percent = 10;
1575 config.link_capacity_kbps = kCapacityKbps;
1576 config.queue_delay_ms = kNetworkDelayMs;
1577 return std::make_unique<test::PacketTransport>(
1578 task_queue, sender_call, this, test::PacketTransport::kSender,
1579 payload_type_map_,
1580 std::make_unique<FakeNetworkPipe>(
1581 Clock::GetRealTimeClock(),
1582 std::make_unique<SimulatedNetwork>(config)));
1583 }
1584
1585 void ModifyVideoConfigs(
1586 VideoSendStream::Config* send_config,
1587 std::vector<VideoReceiveStream::Config>* receive_configs,
1588 VideoEncoderConfig* encoder_config) override {
1589 // Turn on RTX.
1590 send_config->rtp.rtx.payload_type = kFakeVideoSendPayloadType;
1591 send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
1592 }
1593
1594 void PerformTest() override {
1595 // TODO(isheriff): Some platforms do not ramp up as expected to full
1596 // capacity due to packet scheduling delays. Fix that before getting
1597 // rid of this.
1598 SleepMs(5000);
1599 {
1600 MutexLock lock(&mutex_);
1601 // Expect padding to be a small percentage of total bytes sent.
1602 EXPECT_LT(padding_length_, .1 * total_length_);
1603 }
1604 }
1605
1606 Mutex mutex_;
1607 Clock* const clock_;
1608 size_t padding_length_ RTC_GUARDED_BY(mutex_);
1609 size_t total_length_ RTC_GUARDED_BY(mutex_);
1610 Call* call_;
1611 } test;
1612
1613 RunBaseTest(&test);
1614 }
1615
1616 // This test first observes "high" bitrate use at which point it sends a REMB to
1617 // indicate that it should be lowered significantly. The test then observes that
1618 // the bitrate observed is sinking well below the min-transmit-bitrate threshold
1619 // to verify that the min-transmit bitrate respects incoming REMB.
1620 //
1621 // Note that the test starts at "high" bitrate and does not ramp up to "higher"
1622 // bitrate since no receiver block or remb is sent in the initial phase.
TEST_F(VideoSendStreamTest,MinTransmitBitrateRespectsRemb)1623 TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
1624 static const int kMinTransmitBitrateBps = 400000;
1625 static const int kHighBitrateBps = 150000;
1626 static const int kRembBitrateBps = 80000;
1627 static const int kRembRespectedBitrateBps = 100000;
1628 class BitrateObserver : public test::SendTest {
1629 public:
1630 explicit BitrateObserver(TaskQueueBase* task_queue)
1631 : SendTest(kDefaultTimeoutMs),
1632 task_queue_(task_queue),
1633 retranmission_rate_limiter_(Clock::GetRealTimeClock(), 1000),
1634 stream_(nullptr),
1635 bitrate_capped_(false) {}
1636
1637 ~BitrateObserver() override {
1638 // Make sure we free |rtp_rtcp_| in the same context as we constructed it.
1639 SendTask(RTC_FROM_HERE, task_queue_, [this]() { rtp_rtcp_ = nullptr; });
1640 }
1641
1642 private:
1643 Action OnSendRtp(const uint8_t* packet, size_t length) override {
1644 if (RtpHeaderParser::IsRtcp(packet, length))
1645 return DROP_PACKET;
1646
1647 RtpPacket rtp_packet;
1648 if (!rtp_packet.Parse(packet, length))
1649 return DROP_PACKET;
1650 RTC_DCHECK(stream_);
1651 VideoSendStream::Stats stats = stream_->GetStats();
1652 if (!stats.substreams.empty()) {
1653 EXPECT_EQ(1u, stats.substreams.size());
1654 int total_bitrate_bps =
1655 stats.substreams.begin()->second.total_bitrate_bps;
1656 test::PrintResult("bitrate_stats_", "min_transmit_bitrate_low_remb",
1657 "bitrate_bps", static_cast<size_t>(total_bitrate_bps),
1658 "bps", false);
1659 if (total_bitrate_bps > kHighBitrateBps) {
1660 rtp_rtcp_->SetRemb(kRembBitrateBps, {rtp_packet.Ssrc()});
1661 rtp_rtcp_->Process();
1662 bitrate_capped_ = true;
1663 } else if (bitrate_capped_ &&
1664 total_bitrate_bps < kRembRespectedBitrateBps) {
1665 observation_complete_.Set();
1666 }
1667 }
1668 // Packets don't have to be delivered since the test is the receiver.
1669 return DROP_PACKET;
1670 }
1671
1672 void OnVideoStreamsCreated(
1673 VideoSendStream* send_stream,
1674 const std::vector<VideoReceiveStream*>& receive_streams) override {
1675 stream_ = send_stream;
1676 RtpRtcpInterface::Configuration config;
1677 config.clock = Clock::GetRealTimeClock();
1678 config.outgoing_transport = feedback_transport_.get();
1679 config.retransmission_rate_limiter = &retranmission_rate_limiter_;
1680 rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(config);
1681 rtp_rtcp_->SetRTCPStatus(RtcpMode::kReducedSize);
1682 }
1683
1684 void ModifyVideoConfigs(
1685 VideoSendStream::Config* send_config,
1686 std::vector<VideoReceiveStream::Config>* receive_configs,
1687 VideoEncoderConfig* encoder_config) override {
1688 feedback_transport_.reset(
1689 new internal::TransportAdapter(send_config->send_transport));
1690 feedback_transport_->Enable();
1691 encoder_config->min_transmit_bitrate_bps = kMinTransmitBitrateBps;
1692 }
1693
1694 void PerformTest() override {
1695 EXPECT_TRUE(Wait())
1696 << "Timeout while waiting for low bitrate stats after REMB.";
1697 }
1698
1699 TaskQueueBase* const task_queue_;
1700 std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
1701 std::unique_ptr<internal::TransportAdapter> feedback_transport_;
1702 RateLimiter retranmission_rate_limiter_;
1703 VideoSendStream* stream_;
1704 bool bitrate_capped_;
1705 } test(task_queue());
1706
1707 RunBaseTest(&test);
1708 }
1709
TEST_F(VideoSendStreamTest,ChangingNetworkRoute)1710 TEST_F(VideoSendStreamTest, ChangingNetworkRoute) {
1711 static const int kStartBitrateBps = 300000;
1712 static const int kNewMaxBitrateBps = 1234567;
1713 static const uint8_t kExtensionId = kTransportSequenceNumberExtensionId;
1714 class ChangingNetworkRouteTest : public test::EndToEndTest {
1715 public:
1716 explicit ChangingNetworkRouteTest(TaskQueueBase* task_queue)
1717 : EndToEndTest(test::CallTest::kDefaultTimeoutMs),
1718 task_queue_(task_queue),
1719 call_(nullptr) {
1720 module_process_thread_.Detach();
1721 task_queue_thread_.Detach();
1722 extensions_.Register<TransportSequenceNumber>(kExtensionId);
1723 }
1724
1725 ~ChangingNetworkRouteTest() {
1726 // Block until all already posted tasks run to avoid 'use after free'
1727 // when such task accesses |this|.
1728 SendTask(RTC_FROM_HERE, task_queue_, [] {});
1729 }
1730
1731 void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
1732 RTC_DCHECK_RUN_ON(&task_queue_thread_);
1733 RTC_DCHECK(!call_);
1734 call_ = sender_call;
1735 }
1736
1737 void ModifyVideoConfigs(
1738 VideoSendStream::Config* send_config,
1739 std::vector<VideoReceiveStream::Config>* receive_configs,
1740 VideoEncoderConfig* encoder_config) override {
1741 RTC_DCHECK_RUN_ON(&task_queue_thread_);
1742 send_config->rtp.extensions.clear();
1743 send_config->rtp.extensions.push_back(RtpExtension(
1744 RtpExtension::kTransportSequenceNumberUri, kExtensionId));
1745 (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
1746 (*receive_configs)[0].rtp.transport_cc = true;
1747 }
1748
1749 void ModifyAudioConfigs(
1750 AudioSendStream::Config* send_config,
1751 std::vector<AudioReceiveStream::Config>* receive_configs) override {
1752 RTC_DCHECK_RUN_ON(&task_queue_thread_);
1753 send_config->rtp.extensions.clear();
1754 send_config->rtp.extensions.push_back(RtpExtension(
1755 RtpExtension::kTransportSequenceNumberUri, kExtensionId));
1756 (*receive_configs)[0].rtp.extensions.clear();
1757 (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
1758 (*receive_configs)[0].rtp.transport_cc = true;
1759 }
1760
1761 Action OnSendRtp(const uint8_t* packet, size_t length) override {
1762 RTC_DCHECK_RUN_ON(&module_process_thread_);
1763 task_queue_->PostTask(ToQueuedTask([this]() {
1764 RTC_DCHECK_RUN_ON(&task_queue_thread_);
1765 if (!call_)
1766 return;
1767 Call::Stats stats = call_->GetStats();
1768 if (stats.send_bandwidth_bps > kStartBitrateBps)
1769 observation_complete_.Set();
1770 }));
1771 return SEND_PACKET;
1772 }
1773
1774 void OnStreamsStopped() override {
1775 RTC_DCHECK_RUN_ON(&task_queue_thread_);
1776 call_ = nullptr;
1777 }
1778
1779 void PerformTest() override {
1780 rtc::NetworkRoute new_route;
1781 new_route.connected = true;
1782 new_route.local = rtc::RouteEndpoint::CreateWithNetworkId(10);
1783 new_route.remote = rtc::RouteEndpoint::CreateWithNetworkId(20);
1784 BitrateConstraints bitrate_config;
1785
1786 SendTask(RTC_FROM_HERE, task_queue_,
1787 [this, &new_route, &bitrate_config]() {
1788 RTC_DCHECK_RUN_ON(&task_queue_thread_);
1789 call_->GetTransportControllerSend()->OnNetworkRouteChanged(
1790 "transport", new_route);
1791 bitrate_config.start_bitrate_bps = kStartBitrateBps;
1792 call_->GetTransportControllerSend()->SetSdpBitrateParameters(
1793 bitrate_config);
1794 });
1795
1796 EXPECT_TRUE(Wait())
1797 << "Timed out while waiting for start bitrate to be exceeded.";
1798
1799 SendTask(
1800 RTC_FROM_HERE, task_queue_, [this, &new_route, &bitrate_config]() {
1801 RTC_DCHECK_RUN_ON(&task_queue_thread_);
1802 bitrate_config.start_bitrate_bps = -1;
1803 bitrate_config.max_bitrate_bps = kNewMaxBitrateBps;
1804 call_->GetTransportControllerSend()->SetSdpBitrateParameters(
1805 bitrate_config);
1806 // TODO(holmer): We should set the last sent packet id here and
1807 // verify that we correctly ignore any packet loss reported prior to
1808 // that id.
1809 new_route.local = rtc::RouteEndpoint::CreateWithNetworkId(
1810 new_route.local.network_id() + 1);
1811 call_->GetTransportControllerSend()->OnNetworkRouteChanged(
1812 "transport", new_route);
1813 EXPECT_GE(call_->GetStats().send_bandwidth_bps, kStartBitrateBps);
1814 });
1815 }
1816
1817 private:
1818 webrtc::SequenceChecker module_process_thread_;
1819 webrtc::SequenceChecker task_queue_thread_;
1820 TaskQueueBase* const task_queue_;
1821 RtpHeaderExtensionMap extensions_;
1822 Call* call_ RTC_GUARDED_BY(task_queue_thread_);
1823 } test(task_queue());
1824
1825 RunBaseTest(&test);
1826 }
1827
1828 // Test that if specified, relay cap is lifted on transition to direct
1829 // connection.
TEST_F(VideoSendStreamTest,RelayToDirectRoute)1830 TEST_F(VideoSendStreamTest, RelayToDirectRoute) {
1831 static const int kStartBitrateBps = 300000;
1832 static const int kRelayBandwidthCapBps = 800000;
1833 static const int kMinPacketsToSend = 100;
1834 webrtc::test::ScopedFieldTrials field_trials(
1835 std::string(field_trial::GetFieldTrialString()) +
1836 "WebRTC-Bwe-NetworkRouteConstraints/relay_cap:" +
1837 std::to_string(kRelayBandwidthCapBps) + "bps/");
1838
1839 class RelayToDirectRouteTest : public test::EndToEndTest {
1840 public:
1841 explicit RelayToDirectRouteTest(TaskQueueBase* task_queue)
1842 : EndToEndTest(test::CallTest::kDefaultTimeoutMs),
1843 task_queue_(task_queue),
1844 call_(nullptr),
1845 packets_sent_(0),
1846 relayed_phase_(true) {
1847 module_process_thread_.Detach();
1848 task_queue_thread_.Detach();
1849 }
1850
1851 ~RelayToDirectRouteTest() {
1852 // Block until all already posted tasks run to avoid 'use after free'
1853 // when such task accesses |this|.
1854 SendTask(RTC_FROM_HERE, task_queue_, [] {});
1855 }
1856
1857 void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
1858 RTC_DCHECK_RUN_ON(&task_queue_thread_);
1859 RTC_DCHECK(!call_);
1860 call_ = sender_call;
1861 }
1862
1863 Action OnSendRtp(const uint8_t* packet, size_t length) override {
1864 RTC_DCHECK_RUN_ON(&module_process_thread_);
1865 task_queue_->PostTask(ToQueuedTask([this]() {
1866 RTC_DCHECK_RUN_ON(&task_queue_thread_);
1867 if (!call_)
1868 return;
1869 bool had_time_to_exceed_cap_in_relayed_phase =
1870 relayed_phase_ && ++packets_sent_ > kMinPacketsToSend;
1871 bool did_exceed_cap =
1872 call_->GetStats().send_bandwidth_bps > kRelayBandwidthCapBps;
1873 if (did_exceed_cap || had_time_to_exceed_cap_in_relayed_phase)
1874 observation_complete_.Set();
1875 }));
1876 return SEND_PACKET;
1877 }
1878
1879 void OnStreamsStopped() override {
1880 RTC_DCHECK_RUN_ON(&task_queue_thread_);
1881 call_ = nullptr;
1882 }
1883
1884 void PerformTest() override {
1885 rtc::NetworkRoute route;
1886 route.connected = true;
1887 route.local = rtc::RouteEndpoint::CreateWithNetworkId(10);
1888 route.remote = rtc::RouteEndpoint::CreateWithNetworkId(20);
1889
1890 SendTask(RTC_FROM_HERE, task_queue_, [this, &route]() {
1891 RTC_DCHECK_RUN_ON(&task_queue_thread_);
1892 relayed_phase_ = true;
1893 route.remote = route.remote.CreateWithTurn(true);
1894 call_->GetTransportControllerSend()->OnNetworkRouteChanged("transport",
1895 route);
1896 BitrateConstraints bitrate_config;
1897 bitrate_config.start_bitrate_bps = kStartBitrateBps;
1898
1899 call_->GetTransportControllerSend()->SetSdpBitrateParameters(
1900 bitrate_config);
1901 });
1902
1903 EXPECT_TRUE(Wait())
1904 << "Timeout waiting for sufficient packets sent count.";
1905
1906 SendTask(RTC_FROM_HERE, task_queue_, [this, &route]() {
1907 RTC_DCHECK_RUN_ON(&task_queue_thread_);
1908 EXPECT_LE(call_->GetStats().send_bandwidth_bps, kRelayBandwidthCapBps);
1909
1910 route.remote = route.remote.CreateWithTurn(false);
1911 call_->GetTransportControllerSend()->OnNetworkRouteChanged("transport",
1912 route);
1913 relayed_phase_ = false;
1914 observation_complete_.Reset();
1915 });
1916
1917 EXPECT_TRUE(Wait())
1918 << "Timeout while waiting for bandwidth to outgrow relay cap.";
1919 }
1920
1921 private:
1922 webrtc::SequenceChecker module_process_thread_;
1923 webrtc::SequenceChecker task_queue_thread_;
1924 TaskQueueBase* const task_queue_;
1925 Call* call_ RTC_GUARDED_BY(task_queue_thread_);
1926 int packets_sent_ RTC_GUARDED_BY(task_queue_thread_);
1927 bool relayed_phase_ RTC_GUARDED_BY(task_queue_thread_);
1928 } test(task_queue());
1929
1930 RunBaseTest(&test);
1931 }
1932
TEST_F(VideoSendStreamTest,ChangingTransportOverhead)1933 TEST_F(VideoSendStreamTest, ChangingTransportOverhead) {
1934 class ChangingTransportOverheadTest : public test::EndToEndTest {
1935 public:
1936 explicit ChangingTransportOverheadTest(TaskQueueBase* task_queue)
1937 : EndToEndTest(test::CallTest::kDefaultTimeoutMs),
1938 task_queue_(task_queue),
1939 call_(nullptr),
1940 packets_sent_(0),
1941 transport_overhead_(0) {}
1942
1943 void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
1944 call_ = sender_call;
1945 }
1946
1947 Action OnSendRtp(const uint8_t* packet, size_t length) override {
1948 EXPECT_LE(length, kMaxRtpPacketSize);
1949 MutexLock lock(&lock_);
1950 if (++packets_sent_ < 100)
1951 return SEND_PACKET;
1952 observation_complete_.Set();
1953 return SEND_PACKET;
1954 }
1955
1956 void ModifyVideoConfigs(
1957 VideoSendStream::Config* send_config,
1958 std::vector<VideoReceiveStream::Config>* receive_configs,
1959 VideoEncoderConfig* encoder_config) override {
1960 send_config->rtp.max_packet_size = kMaxRtpPacketSize;
1961 }
1962
1963 void PerformTest() override {
1964 SendTask(RTC_FROM_HERE, task_queue_, [this]() {
1965 transport_overhead_ = 100;
1966 call_->GetTransportControllerSend()->OnTransportOverheadChanged(
1967 transport_overhead_);
1968 });
1969
1970 EXPECT_TRUE(Wait());
1971
1972 {
1973 MutexLock lock(&lock_);
1974 packets_sent_ = 0;
1975 }
1976
1977 SendTask(RTC_FROM_HERE, task_queue_, [this]() {
1978 transport_overhead_ = 500;
1979 call_->GetTransportControllerSend()->OnTransportOverheadChanged(
1980 transport_overhead_);
1981 });
1982
1983 EXPECT_TRUE(Wait());
1984 }
1985
1986 private:
1987 TaskQueueBase* const task_queue_;
1988 Call* call_;
1989 Mutex lock_;
1990 int packets_sent_ RTC_GUARDED_BY(lock_);
1991 int transport_overhead_;
1992 const size_t kMaxRtpPacketSize = 1000;
1993 } test(task_queue());
1994
1995 RunBaseTest(&test);
1996 }
1997
1998 // Test class takes takes as argument a switch selecting if type switch should
1999 // occur and a function pointer to reset the send stream. This is necessary
2000 // since you cannot change the content type of a VideoSendStream, you need to
2001 // recreate it. Stopping and recreating the stream can only be done on the main
2002 // thread and in the context of VideoSendStreamTest (not BaseTest).
2003 template <typename T>
2004 class MaxPaddingSetTest : public test::SendTest {
2005 public:
2006 static const uint32_t kMinTransmitBitrateBps = 400000;
2007 static const uint32_t kActualEncodeBitrateBps = 40000;
2008 static const uint32_t kMinPacketsToSend = 50;
2009
MaxPaddingSetTest(bool test_switch_content_type,T * stream_reset_fun,TaskQueueBase * task_queue)2010 MaxPaddingSetTest(bool test_switch_content_type,
2011 T* stream_reset_fun,
2012 TaskQueueBase* task_queue)
2013 : SendTest(test::CallTest::kDefaultTimeoutMs),
2014 running_without_padding_(test_switch_content_type),
2015 stream_resetter_(stream_reset_fun),
2016 task_queue_(task_queue) {
2017 RTC_DCHECK(stream_resetter_);
2018 module_process_thread_.Detach();
2019 task_queue_thread_.Detach();
2020 }
2021
~MaxPaddingSetTest()2022 ~MaxPaddingSetTest() {
2023 // Block until all already posted tasks run to avoid 'use after free'
2024 // when such task accesses |this|.
2025 SendTask(RTC_FROM_HERE, task_queue_, [] {});
2026 }
2027
ModifyVideoConfigs(VideoSendStream::Config * send_config,std::vector<VideoReceiveStream::Config> * receive_configs,VideoEncoderConfig * encoder_config)2028 void ModifyVideoConfigs(
2029 VideoSendStream::Config* send_config,
2030 std::vector<VideoReceiveStream::Config>* receive_configs,
2031 VideoEncoderConfig* encoder_config) override {
2032 RTC_DCHECK_RUN_ON(&task_queue_thread_);
2033 RTC_DCHECK_EQ(1, encoder_config->number_of_streams);
2034 if (running_without_padding_) {
2035 encoder_config->min_transmit_bitrate_bps = 0;
2036 encoder_config->content_type =
2037 VideoEncoderConfig::ContentType::kRealtimeVideo;
2038 } else {
2039 encoder_config->min_transmit_bitrate_bps = kMinTransmitBitrateBps;
2040 encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
2041 }
2042 send_stream_config_ = send_config->Copy();
2043 encoder_config_ = encoder_config->Copy();
2044 }
2045
OnCallsCreated(Call * sender_call,Call * receiver_call)2046 void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
2047 RTC_DCHECK_RUN_ON(&task_queue_thread_);
2048 RTC_DCHECK(task_queue_->IsCurrent());
2049 RTC_DCHECK(!call_);
2050 RTC_DCHECK(sender_call);
2051 call_ = sender_call;
2052 }
2053
2054 // Called on the pacer thread.
OnSendRtp(const uint8_t * packet,size_t length)2055 Action OnSendRtp(const uint8_t* packet, size_t length) override {
2056 RTC_DCHECK_RUN_ON(&module_process_thread_);
2057
2058 // Check the stats on the correct thread and signal the 'complete' flag
2059 // once we detect that we're done.
2060
2061 task_queue_->PostTask(ToQueuedTask([this]() {
2062 RTC_DCHECK_RUN_ON(&task_queue_thread_);
2063 // In case we get a callback during teardown.
2064 // When this happens, OnStreamsStopped() has been called already,
2065 // |call_| is null and the streams are being torn down.
2066 if (!call_)
2067 return;
2068
2069 ++packets_sent_;
2070
2071 Call::Stats stats = call_->GetStats();
2072 if (running_without_padding_) {
2073 EXPECT_EQ(0, stats.max_padding_bitrate_bps);
2074
2075 // Wait until at least kMinPacketsToSend frames have been encoded, so
2076 // that we have reliable data.
2077 if (packets_sent_ < kMinPacketsToSend)
2078 return;
2079
2080 // We've sent kMinPacketsToSend packets with default configuration,
2081 // switch to enabling screen content and setting min transmit bitrate.
2082 // Note that we need to recreate the stream if changing content type.
2083 packets_sent_ = 0;
2084
2085 encoder_config_.min_transmit_bitrate_bps = kMinTransmitBitrateBps;
2086 encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
2087
2088 running_without_padding_ = false;
2089 (*stream_resetter_)(send_stream_config_, encoder_config_);
2090 } else {
2091 // Make sure the pacer has been configured with a min transmit bitrate.
2092 if (stats.max_padding_bitrate_bps > 0) {
2093 observation_complete_.Set();
2094 }
2095 }
2096 }));
2097
2098 return SEND_PACKET;
2099 }
2100
2101 // Called on |task_queue_|
OnStreamsStopped()2102 void OnStreamsStopped() override {
2103 RTC_DCHECK_RUN_ON(&task_queue_thread_);
2104 RTC_DCHECK(task_queue_->IsCurrent());
2105 call_ = nullptr;
2106 }
2107
PerformTest()2108 void PerformTest() override {
2109 ASSERT_TRUE(Wait()) << "Timed out waiting for a valid padding bitrate.";
2110 }
2111
2112 private:
2113 webrtc::SequenceChecker task_queue_thread_;
2114 Call* call_ RTC_GUARDED_BY(task_queue_thread_) = nullptr;
2115 VideoSendStream::Config send_stream_config_{nullptr};
2116 VideoEncoderConfig encoder_config_;
2117 webrtc::SequenceChecker module_process_thread_;
2118 uint32_t packets_sent_ RTC_GUARDED_BY(task_queue_thread_) = 0;
2119 bool running_without_padding_ RTC_GUARDED_BY(task_queue_thread_);
2120 T* const stream_resetter_;
2121 TaskQueueBase* const task_queue_;
2122 };
2123
TEST_F(VideoSendStreamTest,RespectsMinTransmitBitrate)2124 TEST_F(VideoSendStreamTest, RespectsMinTransmitBitrate) {
2125 auto reset_fun = [](const VideoSendStream::Config& send_stream_config,
2126 const VideoEncoderConfig& encoder_config) {};
2127 MaxPaddingSetTest<decltype(reset_fun)> test(false, &reset_fun, task_queue());
2128 RunBaseTest(&test);
2129 }
2130
TEST_F(VideoSendStreamTest,RespectsMinTransmitBitrateAfterContentSwitch)2131 TEST_F(VideoSendStreamTest, RespectsMinTransmitBitrateAfterContentSwitch) {
2132 // Function for removing and recreating the send stream with a new config.
2133 auto reset_fun = [this](const VideoSendStream::Config& send_stream_config,
2134 const VideoEncoderConfig& encoder_config) {
2135 RTC_DCHECK(task_queue()->IsCurrent());
2136 Stop();
2137 DestroyVideoSendStreams();
2138 SetVideoSendConfig(send_stream_config);
2139 SetVideoEncoderConfig(encoder_config);
2140 CreateVideoSendStreams();
2141 SetVideoDegradation(DegradationPreference::MAINTAIN_RESOLUTION);
2142 Start();
2143 };
2144 MaxPaddingSetTest<decltype(reset_fun)> test(true, &reset_fun, task_queue());
2145 RunBaseTest(&test);
2146 }
2147
2148 // This test verifies that new frame sizes reconfigures encoders even though not
2149 // (yet) sending. The purpose of this is to permit encoding as quickly as
2150 // possible once we start sending. Likely the frames being input are from the
2151 // same source that will be sent later, which just means that we're ready
2152 // earlier.
TEST_F(VideoSendStreamTest,EncoderReconfigureOnResolutionChangeWhenNotSending)2153 TEST_F(VideoSendStreamTest,
2154 EncoderReconfigureOnResolutionChangeWhenNotSending) {
2155 class EncoderObserver : public test::FakeEncoder {
2156 public:
2157 EncoderObserver()
2158 : FakeEncoder(Clock::GetRealTimeClock()),
2159 number_of_initializations_(0),
2160 last_initialized_frame_width_(0),
2161 last_initialized_frame_height_(0) {}
2162
2163 void WaitForResolution(int width, int height) {
2164 {
2165 MutexLock lock(&mutex_);
2166 if (last_initialized_frame_width_ == width &&
2167 last_initialized_frame_height_ == height) {
2168 return;
2169 }
2170 }
2171 EXPECT_TRUE(
2172 init_encode_called_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
2173 {
2174 MutexLock lock(&mutex_);
2175 EXPECT_EQ(width, last_initialized_frame_width_);
2176 EXPECT_EQ(height, last_initialized_frame_height_);
2177 }
2178 }
2179
2180 private:
2181 int32_t InitEncode(const VideoCodec* config,
2182 const Settings& settings) override {
2183 MutexLock lock(&mutex_);
2184 last_initialized_frame_width_ = config->width;
2185 last_initialized_frame_height_ = config->height;
2186 ++number_of_initializations_;
2187 init_encode_called_.Set();
2188 return FakeEncoder::InitEncode(config, settings);
2189 }
2190
2191 int32_t Encode(const VideoFrame& input_image,
2192 const std::vector<VideoFrameType>* frame_types) override {
2193 ADD_FAILURE()
2194 << "Unexpected Encode call since the send stream is not started";
2195 return 0;
2196 }
2197
2198 Mutex mutex_;
2199 rtc::Event init_encode_called_;
2200 size_t number_of_initializations_ RTC_GUARDED_BY(&mutex_);
2201 int last_initialized_frame_width_ RTC_GUARDED_BY(&mutex_);
2202 int last_initialized_frame_height_ RTC_GUARDED_BY(&mutex_);
2203 };
2204
2205 test::NullTransport transport;
2206 EncoderObserver encoder;
2207 test::VideoEncoderProxyFactory encoder_factory(&encoder);
2208
2209 SendTask(RTC_FROM_HERE, task_queue(), [this, &transport, &encoder_factory]() {
2210 CreateSenderCall();
2211 CreateSendConfig(1, 0, 0, &transport);
2212 GetVideoSendConfig()->encoder_settings.encoder_factory = &encoder_factory;
2213 CreateVideoStreams();
2214 CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
2215 kDefaultHeight);
2216 frame_generator_capturer_->Start();
2217 });
2218
2219 encoder.WaitForResolution(kDefaultWidth, kDefaultHeight);
2220
2221 SendTask(RTC_FROM_HERE, task_queue(), [this]() {
2222 frame_generator_capturer_->ChangeResolution(kDefaultWidth * 2,
2223 kDefaultHeight * 2);
2224 });
2225
2226 encoder.WaitForResolution(kDefaultWidth * 2, kDefaultHeight * 2);
2227
2228 SendTask(RTC_FROM_HERE, task_queue(), [this]() {
2229 DestroyStreams();
2230 DestroyCalls();
2231 });
2232 }
2233
TEST_F(VideoSendStreamTest,CanReconfigureToUseStartBitrateAbovePreviousMax)2234 TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
2235 class StartBitrateObserver : public test::FakeEncoder {
2236 public:
2237 StartBitrateObserver()
2238 : FakeEncoder(Clock::GetRealTimeClock()), start_bitrate_kbps_(0) {}
2239 int32_t InitEncode(const VideoCodec* config,
2240 const Settings& settings) override {
2241 MutexLock lock(&mutex_);
2242 start_bitrate_kbps_ = config->startBitrate;
2243 start_bitrate_changed_.Set();
2244 return FakeEncoder::InitEncode(config, settings);
2245 }
2246
2247 void SetRates(const RateControlParameters& parameters) override {
2248 MutexLock lock(&mutex_);
2249 start_bitrate_kbps_ = parameters.bitrate.get_sum_kbps();
2250 start_bitrate_changed_.Set();
2251 FakeEncoder::SetRates(parameters);
2252 }
2253
2254 int GetStartBitrateKbps() const {
2255 MutexLock lock(&mutex_);
2256 return start_bitrate_kbps_;
2257 }
2258
2259 bool WaitForStartBitrate() {
2260 return start_bitrate_changed_.Wait(
2261 VideoSendStreamTest::kDefaultTimeoutMs);
2262 }
2263
2264 private:
2265 mutable Mutex mutex_;
2266 rtc::Event start_bitrate_changed_;
2267 int start_bitrate_kbps_ RTC_GUARDED_BY(mutex_);
2268 };
2269
2270 CreateSenderCall();
2271
2272 test::NullTransport transport;
2273 CreateSendConfig(1, 0, 0, &transport);
2274
2275 BitrateConstraints bitrate_config;
2276 bitrate_config.start_bitrate_bps =
2277 2 * GetVideoEncoderConfig()->max_bitrate_bps;
2278 sender_call_->GetTransportControllerSend()->SetSdpBitrateParameters(
2279 bitrate_config);
2280
2281 StartBitrateObserver encoder;
2282 test::VideoEncoderProxyFactory encoder_factory(&encoder);
2283 GetVideoSendConfig()->encoder_settings.encoder_factory = &encoder_factory;
2284
2285 CreateVideoStreams();
2286
2287 // Start capturing and encoding frames to force encoder reconfiguration.
2288 CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
2289 kDefaultHeight);
2290 frame_generator_capturer_->Start();
2291
2292 EXPECT_TRUE(encoder.WaitForStartBitrate());
2293 EXPECT_EQ(GetVideoEncoderConfig()->max_bitrate_bps / 1000,
2294 encoder.GetStartBitrateKbps());
2295
2296 GetVideoEncoderConfig()->max_bitrate_bps =
2297 2 * bitrate_config.start_bitrate_bps;
2298 GetVideoSendStream()->ReconfigureVideoEncoder(
2299 GetVideoEncoderConfig()->Copy());
2300
2301 // New bitrate should be reconfigured above the previous max. As there's no
2302 // network connection this shouldn't be flaky, as no bitrate should've been
2303 // reported in between.
2304 EXPECT_TRUE(encoder.WaitForStartBitrate());
2305 EXPECT_EQ(bitrate_config.start_bitrate_bps / 1000,
2306 encoder.GetStartBitrateKbps());
2307
2308 DestroyStreams();
2309 }
2310
2311 class StartStopBitrateObserver : public test::FakeEncoder {
2312 public:
StartStopBitrateObserver()2313 StartStopBitrateObserver() : FakeEncoder(Clock::GetRealTimeClock()) {}
InitEncode(const VideoCodec * config,const Settings & settings)2314 int32_t InitEncode(const VideoCodec* config,
2315 const Settings& settings) override {
2316 MutexLock lock(&mutex_);
2317 encoder_init_.Set();
2318 return FakeEncoder::InitEncode(config, settings);
2319 }
2320
SetRates(const RateControlParameters & parameters)2321 void SetRates(const RateControlParameters& parameters) override {
2322 MutexLock lock(&mutex_);
2323 bitrate_kbps_ = parameters.bitrate.get_sum_kbps();
2324 bitrate_changed_.Set();
2325 FakeEncoder::SetRates(parameters);
2326 }
2327
WaitForEncoderInit()2328 bool WaitForEncoderInit() {
2329 return encoder_init_.Wait(VideoSendStreamTest::kDefaultTimeoutMs);
2330 }
2331
WaitBitrateChanged(bool non_zero)2332 bool WaitBitrateChanged(bool non_zero) {
2333 do {
2334 absl::optional<int> bitrate_kbps;
2335 {
2336 MutexLock lock(&mutex_);
2337 bitrate_kbps = bitrate_kbps_;
2338 }
2339 if (!bitrate_kbps)
2340 continue;
2341
2342 if ((non_zero && *bitrate_kbps > 0) ||
2343 (!non_zero && *bitrate_kbps == 0)) {
2344 return true;
2345 }
2346 } while (bitrate_changed_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
2347 return false;
2348 }
2349
2350 private:
2351 Mutex mutex_;
2352 rtc::Event encoder_init_;
2353 rtc::Event bitrate_changed_;
2354 absl::optional<int> bitrate_kbps_ RTC_GUARDED_BY(mutex_);
2355 };
2356
2357 // This test that if the encoder use an internal source, VideoEncoder::SetRates
2358 // will be called with zero bitrate during initialization and that
2359 // VideoSendStream::Stop also triggers VideoEncoder::SetRates Start to be called
2360 // with zero bitrate.
TEST_F(VideoSendStreamTest,VideoSendStreamStopSetEncoderRateToZero)2361 TEST_F(VideoSendStreamTest, VideoSendStreamStopSetEncoderRateToZero) {
2362 test::NullTransport transport;
2363 StartStopBitrateObserver encoder;
2364 test::VideoEncoderProxyFactory encoder_factory(&encoder);
2365 encoder_factory.SetHasInternalSource(true);
2366 test::FrameForwarder forwarder;
2367
2368 SendTask(RTC_FROM_HERE, task_queue(),
2369 [this, &transport, &encoder_factory, &forwarder]() {
2370 CreateSenderCall();
2371 CreateSendConfig(1, 0, 0, &transport);
2372
2373 sender_call_->SignalChannelNetworkState(MediaType::VIDEO,
2374 kNetworkUp);
2375 GetVideoSendConfig()->encoder_settings.encoder_factory =
2376 &encoder_factory;
2377
2378 CreateVideoStreams();
2379 // Inject a frame, to force encoder creation.
2380 GetVideoSendStream()->Start();
2381 GetVideoSendStream()->SetSource(&forwarder,
2382 DegradationPreference::DISABLED);
2383 forwarder.IncomingCapturedFrame(CreateVideoFrame(640, 480, 4));
2384 });
2385
2386 EXPECT_TRUE(encoder.WaitForEncoderInit());
2387
2388 SendTask(RTC_FROM_HERE, task_queue(),
2389 [this]() { GetVideoSendStream()->Start(); });
2390 EXPECT_TRUE(encoder.WaitBitrateChanged(true));
2391
2392 SendTask(RTC_FROM_HERE, task_queue(),
2393 [this]() { GetVideoSendStream()->Stop(); });
2394 EXPECT_TRUE(encoder.WaitBitrateChanged(false));
2395
2396 SendTask(RTC_FROM_HERE, task_queue(),
2397 [this]() { GetVideoSendStream()->Start(); });
2398 EXPECT_TRUE(encoder.WaitBitrateChanged(true));
2399
2400 SendTask(RTC_FROM_HERE, task_queue(), [this]() {
2401 DestroyStreams();
2402 DestroyCalls();
2403 });
2404 }
2405
2406 // Tests that when the encoder uses an internal source, the VideoEncoder will
2407 // be updated with a new bitrate when turning the VideoSendStream on/off with
2408 // VideoSendStream::UpdateActiveSimulcastLayers, and when the VideoStreamEncoder
2409 // is reconfigured with new active layers.
TEST_F(VideoSendStreamTest,VideoSendStreamUpdateActiveSimulcastLayers)2410 TEST_F(VideoSendStreamTest, VideoSendStreamUpdateActiveSimulcastLayers) {
2411 test::NullTransport transport;
2412 StartStopBitrateObserver encoder;
2413 test::VideoEncoderProxyFactory encoder_factory(&encoder);
2414 encoder_factory.SetHasInternalSource(true);
2415 test::FrameForwarder forwarder;
2416
2417 SendTask(RTC_FROM_HERE, task_queue(),
2418 [this, &transport, &encoder_factory, &forwarder]() {
2419 CreateSenderCall();
2420 // Create two simulcast streams.
2421 CreateSendConfig(2, 0, 0, &transport);
2422
2423 sender_call_->SignalChannelNetworkState(MediaType::VIDEO,
2424 kNetworkUp);
2425 GetVideoSendConfig()->encoder_settings.encoder_factory =
2426 &encoder_factory;
2427
2428 CreateVideoStreams();
2429
2430 // Inject a frame, to force encoder creation.
2431 GetVideoSendStream()->Start();
2432 GetVideoSendStream()->SetSource(&forwarder,
2433 DegradationPreference::DISABLED);
2434 forwarder.IncomingCapturedFrame(CreateVideoFrame(640, 480, 4));
2435 });
2436
2437 EXPECT_TRUE(encoder.WaitForEncoderInit());
2438
2439 // When we turn on the simulcast layers it will update the BitrateAllocator,
2440 // which in turn updates the VideoEncoder's bitrate.
2441 SendTask(RTC_FROM_HERE, task_queue(), [this]() {
2442 GetVideoSendStream()->UpdateActiveSimulcastLayers({true, true});
2443 });
2444 EXPECT_TRUE(encoder.WaitBitrateChanged(true));
2445
2446 GetVideoEncoderConfig()->simulcast_layers[0].active = true;
2447 GetVideoEncoderConfig()->simulcast_layers[1].active = false;
2448 SendTask(RTC_FROM_HERE, task_queue(), [this]() {
2449 GetVideoSendStream()->ReconfigureVideoEncoder(
2450 GetVideoEncoderConfig()->Copy());
2451 });
2452 // TODO(bugs.webrtc.org/8807): Currently we require a hard reconfiguration to
2453 // update the VideoBitrateAllocator and BitrateAllocator of which layers are
2454 // active. Once the change is made for a "soft" reconfiguration we can remove
2455 // the expecation for an encoder init. We can also test that bitrate changes
2456 // when just updating individual active layers, which should change the
2457 // bitrate set to the video encoder.
2458 EXPECT_TRUE(encoder.WaitForEncoderInit());
2459 EXPECT_TRUE(encoder.WaitBitrateChanged(true));
2460
2461 // Turning off both simulcast layers should trigger a bitrate change of 0.
2462 GetVideoEncoderConfig()->simulcast_layers[0].active = false;
2463 GetVideoEncoderConfig()->simulcast_layers[1].active = false;
2464 SendTask(RTC_FROM_HERE, task_queue(), [this]() {
2465 GetVideoSendStream()->UpdateActiveSimulcastLayers({false, false});
2466 });
2467 EXPECT_TRUE(encoder.WaitBitrateChanged(false));
2468
2469 SendTask(RTC_FROM_HERE, task_queue(), [this]() {
2470 DestroyStreams();
2471 DestroyCalls();
2472 });
2473 }
2474
TEST_F(VideoSendStreamTest,EncoderIsProperlyInitializedAndDestroyed)2475 TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
2476 class EncoderStateObserver : public test::SendTest, public VideoEncoder {
2477 public:
2478 explicit EncoderStateObserver(TaskQueueBase* task_queue)
2479 : SendTest(kDefaultTimeoutMs),
2480 task_queue_(task_queue),
2481 stream_(nullptr),
2482 initialized_(false),
2483 callback_registered_(false),
2484 num_releases_(0),
2485 released_(false),
2486 encoder_factory_(this) {}
2487
2488 bool IsReleased() RTC_LOCKS_EXCLUDED(mutex_) {
2489 MutexLock lock(&mutex_);
2490 return released_;
2491 }
2492
2493 bool IsReadyForEncode() RTC_LOCKS_EXCLUDED(mutex_) {
2494 MutexLock lock(&mutex_);
2495 return IsReadyForEncodeLocked();
2496 }
2497
2498 size_t num_releases() RTC_LOCKS_EXCLUDED(mutex_) {
2499 MutexLock lock(&mutex_);
2500 return num_releases_;
2501 }
2502
2503 private:
2504 bool IsReadyForEncodeLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_) {
2505 return initialized_ && callback_registered_;
2506 }
2507
2508 void SetFecControllerOverride(
2509 FecControllerOverride* fec_controller_override) override {
2510 // Ignored.
2511 }
2512
2513 int32_t InitEncode(const VideoCodec* codecSettings,
2514 const Settings& settings) override
2515 RTC_LOCKS_EXCLUDED(mutex_) {
2516 MutexLock lock(&mutex_);
2517 EXPECT_FALSE(initialized_);
2518 initialized_ = true;
2519 released_ = false;
2520 return 0;
2521 }
2522
2523 int32_t Encode(const VideoFrame& inputImage,
2524 const std::vector<VideoFrameType>* frame_types) override {
2525 EXPECT_TRUE(IsReadyForEncode());
2526
2527 observation_complete_.Set();
2528 return 0;
2529 }
2530
2531 int32_t RegisterEncodeCompleteCallback(
2532 EncodedImageCallback* callback) override RTC_LOCKS_EXCLUDED(mutex_) {
2533 MutexLock lock(&mutex_);
2534 EXPECT_TRUE(initialized_);
2535 callback_registered_ = true;
2536 return 0;
2537 }
2538
2539 int32_t Release() override RTC_LOCKS_EXCLUDED(mutex_) {
2540 MutexLock lock(&mutex_);
2541 EXPECT_TRUE(IsReadyForEncodeLocked());
2542 EXPECT_FALSE(released_);
2543 initialized_ = false;
2544 callback_registered_ = false;
2545 released_ = true;
2546 ++num_releases_;
2547 return 0;
2548 }
2549
2550 void SetRates(const RateControlParameters& parameters) override {
2551 EXPECT_TRUE(IsReadyForEncode());
2552 }
2553
2554 void OnVideoStreamsCreated(
2555 VideoSendStream* send_stream,
2556 const std::vector<VideoReceiveStream*>& receive_streams) override {
2557 stream_ = send_stream;
2558 }
2559
2560 void ModifyVideoConfigs(
2561 VideoSendStream::Config* send_config,
2562 std::vector<VideoReceiveStream::Config>* receive_configs,
2563 VideoEncoderConfig* encoder_config) override {
2564 send_config->encoder_settings.encoder_factory = &encoder_factory_;
2565 encoder_config_ = encoder_config->Copy();
2566 }
2567
2568 void PerformTest() override {
2569 EXPECT_TRUE(Wait()) << "Timed out while waiting for Encode.";
2570
2571 SendTask(RTC_FROM_HERE, task_queue_, [this]() {
2572 EXPECT_EQ(0u, num_releases());
2573 stream_->ReconfigureVideoEncoder(std::move(encoder_config_));
2574 EXPECT_EQ(0u, num_releases());
2575 stream_->Stop();
2576 // Encoder should not be released before destroying the VideoSendStream.
2577 EXPECT_FALSE(IsReleased());
2578 EXPECT_TRUE(IsReadyForEncode());
2579 stream_->Start();
2580 });
2581
2582 // Sanity check, make sure we still encode frames with this encoder.
2583 EXPECT_TRUE(Wait()) << "Timed out while waiting for Encode.";
2584 }
2585
2586 TaskQueueBase* const task_queue_;
2587 Mutex mutex_;
2588 VideoSendStream* stream_;
2589 bool initialized_ RTC_GUARDED_BY(mutex_);
2590 bool callback_registered_ RTC_GUARDED_BY(mutex_);
2591 size_t num_releases_ RTC_GUARDED_BY(mutex_);
2592 bool released_ RTC_GUARDED_BY(mutex_);
2593 test::VideoEncoderProxyFactory encoder_factory_;
2594 VideoEncoderConfig encoder_config_;
2595 } test_encoder(task_queue());
2596
2597 RunBaseTest(&test_encoder);
2598
2599 EXPECT_TRUE(test_encoder.IsReleased());
2600 EXPECT_EQ(1u, test_encoder.num_releases());
2601 }
2602
2603 static const size_t kVideoCodecConfigObserverNumberOfTemporalLayers = 3;
2604 template <typename T>
2605 class VideoCodecConfigObserver : public test::SendTest,
2606 public test::FakeEncoder {
2607 public:
VideoCodecConfigObserver(VideoCodecType video_codec_type,const char * codec_name)2608 VideoCodecConfigObserver(VideoCodecType video_codec_type,
2609 const char* codec_name)
2610 : SendTest(VideoSendStreamTest::kDefaultTimeoutMs),
2611 FakeEncoder(Clock::GetRealTimeClock()),
2612 video_codec_type_(video_codec_type),
2613 codec_name_(codec_name),
2614 num_initializations_(0),
2615 stream_(nullptr),
2616 encoder_factory_(this) {
2617 InitCodecSpecifics();
2618 }
2619
2620 private:
ModifyVideoConfigs(VideoSendStream::Config * send_config,std::vector<VideoReceiveStream::Config> * receive_configs,VideoEncoderConfig * encoder_config)2621 void ModifyVideoConfigs(
2622 VideoSendStream::Config* send_config,
2623 std::vector<VideoReceiveStream::Config>* receive_configs,
2624 VideoEncoderConfig* encoder_config) override {
2625 send_config->encoder_settings.encoder_factory = &encoder_factory_;
2626 send_config->rtp.payload_name = codec_name_;
2627
2628 encoder_config->codec_type = video_codec_type_;
2629 encoder_config->encoder_specific_settings = GetEncoderSpecificSettings();
2630 EXPECT_EQ(1u, encoder_config->simulcast_layers.size());
2631 encoder_config->simulcast_layers[0].num_temporal_layers =
2632 kVideoCodecConfigObserverNumberOfTemporalLayers;
2633 encoder_config_ = encoder_config->Copy();
2634 }
2635
OnVideoStreamsCreated(VideoSendStream * send_stream,const std::vector<VideoReceiveStream * > & receive_streams)2636 void OnVideoStreamsCreated(
2637 VideoSendStream* send_stream,
2638 const std::vector<VideoReceiveStream*>& receive_streams) override {
2639 stream_ = send_stream;
2640 }
2641
InitEncode(const VideoCodec * config,const Settings & settings)2642 int32_t InitEncode(const VideoCodec* config,
2643 const Settings& settings) override {
2644 EXPECT_EQ(video_codec_type_, config->codecType);
2645 VerifyCodecSpecifics(*config);
2646 ++num_initializations_;
2647 init_encode_event_.Set();
2648 return FakeEncoder::InitEncode(config, settings);
2649 }
2650
2651 void InitCodecSpecifics();
2652 void VerifyCodecSpecifics(const VideoCodec& config) const;
2653 rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
2654 GetEncoderSpecificSettings() const;
2655
PerformTest()2656 void PerformTest() override {
2657 EXPECT_TRUE(
2658 init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
2659 ASSERT_EQ(1u, num_initializations_) << "VideoEncoder not initialized.";
2660
2661 // Change encoder settings to actually trigger reconfiguration.
2662 encoder_settings_.frameDroppingOn = !encoder_settings_.frameDroppingOn;
2663 encoder_config_.encoder_specific_settings = GetEncoderSpecificSettings();
2664 stream_->ReconfigureVideoEncoder(std::move(encoder_config_));
2665 ASSERT_TRUE(
2666 init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
2667 EXPECT_EQ(2u, num_initializations_)
2668 << "ReconfigureVideoEncoder did not reinitialize the encoder with "
2669 "new encoder settings.";
2670 }
2671
Encode(const VideoFrame & input_image,const std::vector<VideoFrameType> * frame_types)2672 int32_t Encode(const VideoFrame& input_image,
2673 const std::vector<VideoFrameType>* frame_types) override {
2674 // Silently skip the encode, FakeEncoder::Encode doesn't produce VP8.
2675 return 0;
2676 }
2677
2678 T encoder_settings_;
2679 const VideoCodecType video_codec_type_;
2680 const char* const codec_name_;
2681 rtc::Event init_encode_event_;
2682 size_t num_initializations_;
2683 VideoSendStream* stream_;
2684 test::VideoEncoderProxyFactory encoder_factory_;
2685 VideoEncoderConfig encoder_config_;
2686 };
2687
2688 template <>
InitCodecSpecifics()2689 void VideoCodecConfigObserver<VideoCodecH264>::InitCodecSpecifics() {
2690 encoder_settings_ = VideoEncoder::GetDefaultH264Settings();
2691 }
2692
2693 template <>
VerifyCodecSpecifics(const VideoCodec & config) const2694 void VideoCodecConfigObserver<VideoCodecH264>::VerifyCodecSpecifics(
2695 const VideoCodec& config) const {
2696 // Check that the number of temporal layers has propagated properly to
2697 // VideoCodec.
2698 EXPECT_EQ(kVideoCodecConfigObserverNumberOfTemporalLayers,
2699 config.H264().numberOfTemporalLayers);
2700
2701 for (unsigned char i = 0; i < config.numberOfSimulcastStreams; ++i) {
2702 EXPECT_EQ(kVideoCodecConfigObserverNumberOfTemporalLayers,
2703 config.simulcastStream[i].numberOfTemporalLayers);
2704 }
2705
2706 // Set expected temporal layers as they should have been set when
2707 // reconfiguring the encoder and not match the set config.
2708 VideoCodecH264 encoder_settings = encoder_settings_;
2709 encoder_settings.numberOfTemporalLayers =
2710 kVideoCodecConfigObserverNumberOfTemporalLayers;
2711 EXPECT_EQ(
2712 0, memcmp(&config.H264(), &encoder_settings, sizeof(encoder_settings_)));
2713 }
2714
2715 template <>
2716 rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
GetEncoderSpecificSettings() const2717 VideoCodecConfigObserver<VideoCodecH264>::GetEncoderSpecificSettings() const {
2718 return new rtc::RefCountedObject<
2719 VideoEncoderConfig::H264EncoderSpecificSettings>(encoder_settings_);
2720 }
2721
2722 template <>
InitCodecSpecifics()2723 void VideoCodecConfigObserver<VideoCodecVP8>::InitCodecSpecifics() {
2724 encoder_settings_ = VideoEncoder::GetDefaultVp8Settings();
2725 }
2726
2727 template <>
VerifyCodecSpecifics(const VideoCodec & config) const2728 void VideoCodecConfigObserver<VideoCodecVP8>::VerifyCodecSpecifics(
2729 const VideoCodec& config) const {
2730 // Check that the number of temporal layers has propagated properly to
2731 // VideoCodec.
2732 EXPECT_EQ(kVideoCodecConfigObserverNumberOfTemporalLayers,
2733 config.VP8().numberOfTemporalLayers);
2734
2735 for (unsigned char i = 0; i < config.numberOfSimulcastStreams; ++i) {
2736 EXPECT_EQ(kVideoCodecConfigObserverNumberOfTemporalLayers,
2737 config.simulcastStream[i].numberOfTemporalLayers);
2738 }
2739
2740 // Set expected temporal layers as they should have been set when
2741 // reconfiguring the encoder and not match the set config.
2742 VideoCodecVP8 encoder_settings = encoder_settings_;
2743 encoder_settings.numberOfTemporalLayers =
2744 kVideoCodecConfigObserverNumberOfTemporalLayers;
2745 EXPECT_EQ(
2746 0, memcmp(&config.VP8(), &encoder_settings, sizeof(encoder_settings_)));
2747 }
2748
2749 template <>
2750 rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
GetEncoderSpecificSettings() const2751 VideoCodecConfigObserver<VideoCodecVP8>::GetEncoderSpecificSettings() const {
2752 return new rtc::RefCountedObject<
2753 VideoEncoderConfig::Vp8EncoderSpecificSettings>(encoder_settings_);
2754 }
2755
2756 template <>
InitCodecSpecifics()2757 void VideoCodecConfigObserver<VideoCodecVP9>::InitCodecSpecifics() {
2758 encoder_settings_ = VideoEncoder::GetDefaultVp9Settings();
2759 }
2760
2761 template <>
VerifyCodecSpecifics(const VideoCodec & config) const2762 void VideoCodecConfigObserver<VideoCodecVP9>::VerifyCodecSpecifics(
2763 const VideoCodec& config) const {
2764 // Check that the number of temporal layers has propagated properly to
2765 // VideoCodec.
2766 EXPECT_EQ(kVideoCodecConfigObserverNumberOfTemporalLayers,
2767 config.VP9().numberOfTemporalLayers);
2768
2769 for (unsigned char i = 0; i < config.numberOfSimulcastStreams; ++i) {
2770 EXPECT_EQ(kVideoCodecConfigObserverNumberOfTemporalLayers,
2771 config.simulcastStream[i].numberOfTemporalLayers);
2772 }
2773
2774 // Set expected temporal layers as they should have been set when
2775 // reconfiguring the encoder and not match the set config.
2776 VideoCodecVP9 encoder_settings = encoder_settings_;
2777 encoder_settings.numberOfTemporalLayers =
2778 kVideoCodecConfigObserverNumberOfTemporalLayers;
2779 EXPECT_EQ(
2780 0, memcmp(&(config.VP9()), &encoder_settings, sizeof(encoder_settings_)));
2781 }
2782
2783 template <>
2784 rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
GetEncoderSpecificSettings() const2785 VideoCodecConfigObserver<VideoCodecVP9>::GetEncoderSpecificSettings() const {
2786 return new rtc::RefCountedObject<
2787 VideoEncoderConfig::Vp9EncoderSpecificSettings>(encoder_settings_);
2788 }
2789
TEST_F(VideoSendStreamTest,EncoderSetupPropagatesVp8Config)2790 TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp8Config) {
2791 VideoCodecConfigObserver<VideoCodecVP8> test(kVideoCodecVP8, "VP8");
2792 RunBaseTest(&test);
2793 }
2794
TEST_F(VideoSendStreamTest,EncoderSetupPropagatesVp9Config)2795 TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp9Config) {
2796 VideoCodecConfigObserver<VideoCodecVP9> test(kVideoCodecVP9, "VP9");
2797 RunBaseTest(&test);
2798 }
2799
2800 // Fails on MSAN: https://bugs.chromium.org/p/webrtc/issues/detail?id=11376.
2801 #if defined(MEMORY_SANITIZER)
2802 #define MAYBE_EncoderSetupPropagatesH264Config \
2803 DISABLED_EncoderSetupPropagatesH264Config
2804 #else
2805 #define MAYBE_EncoderSetupPropagatesH264Config EncoderSetupPropagatesH264Config
2806 #endif
TEST_F(VideoSendStreamTest,MAYBE_EncoderSetupPropagatesH264Config)2807 TEST_F(VideoSendStreamTest, MAYBE_EncoderSetupPropagatesH264Config) {
2808 VideoCodecConfigObserver<VideoCodecH264> test(kVideoCodecH264, "H264");
2809 RunBaseTest(&test);
2810 }
2811
TEST_F(VideoSendStreamTest,RtcpSenderReportContainsMediaBytesSent)2812 TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
2813 class RtcpSenderReportTest : public test::SendTest {
2814 public:
2815 RtcpSenderReportTest()
2816 : SendTest(kDefaultTimeoutMs),
2817 rtp_packets_sent_(0),
2818 media_bytes_sent_(0) {}
2819
2820 private:
2821 Action OnSendRtp(const uint8_t* packet, size_t length) override {
2822 MutexLock lock(&mutex_);
2823 RtpPacket rtp_packet;
2824 EXPECT_TRUE(rtp_packet.Parse(packet, length));
2825 ++rtp_packets_sent_;
2826 media_bytes_sent_ += rtp_packet.payload_size();
2827 return SEND_PACKET;
2828 }
2829
2830 Action OnSendRtcp(const uint8_t* packet, size_t length) override {
2831 MutexLock lock(&mutex_);
2832 test::RtcpPacketParser parser;
2833 EXPECT_TRUE(parser.Parse(packet, length));
2834
2835 if (parser.sender_report()->num_packets() > 0) {
2836 // Only compare sent media bytes if SenderPacketCount matches the
2837 // number of sent rtp packets (a new rtp packet could be sent before
2838 // the rtcp packet).
2839 if (parser.sender_report()->sender_octet_count() > 0 &&
2840 parser.sender_report()->sender_packet_count() ==
2841 rtp_packets_sent_) {
2842 EXPECT_EQ(media_bytes_sent_,
2843 parser.sender_report()->sender_octet_count());
2844 observation_complete_.Set();
2845 }
2846 }
2847
2848 return SEND_PACKET;
2849 }
2850
2851 void PerformTest() override {
2852 EXPECT_TRUE(Wait()) << "Timed out while waiting for RTCP sender report.";
2853 }
2854
2855 Mutex mutex_;
2856 size_t rtp_packets_sent_ RTC_GUARDED_BY(&mutex_);
2857 size_t media_bytes_sent_ RTC_GUARDED_BY(&mutex_);
2858 } test;
2859
2860 RunBaseTest(&test);
2861 }
2862
TEST_F(VideoSendStreamTest,TranslatesTwoLayerScreencastToTargetBitrate)2863 TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) {
2864 static const int kScreencastMaxTargetBitrateDeltaKbps = 1;
2865
2866 class VideoStreamFactory
2867 : public VideoEncoderConfig::VideoStreamFactoryInterface {
2868 public:
2869 VideoStreamFactory() {}
2870
2871 private:
2872 std::vector<VideoStream> CreateEncoderStreams(
2873 int width,
2874 int height,
2875 const VideoEncoderConfig& encoder_config) override {
2876 std::vector<VideoStream> streams =
2877 test::CreateVideoStreams(width, height, encoder_config);
2878 RTC_CHECK_GT(streams[0].max_bitrate_bps,
2879 kScreencastMaxTargetBitrateDeltaKbps);
2880 streams[0].target_bitrate_bps =
2881 streams[0].max_bitrate_bps -
2882 kScreencastMaxTargetBitrateDeltaKbps * 1000;
2883 return streams;
2884 }
2885 };
2886
2887 class ScreencastTargetBitrateTest : public test::SendTest,
2888 public test::FakeEncoder {
2889 public:
2890 ScreencastTargetBitrateTest()
2891 : SendTest(kDefaultTimeoutMs),
2892 test::FakeEncoder(Clock::GetRealTimeClock()),
2893 encoder_factory_(this) {}
2894
2895 private:
2896 int32_t InitEncode(const VideoCodec* config,
2897 const Settings& settings) override {
2898 EXPECT_EQ(config->numberOfSimulcastStreams, 1);
2899 EXPECT_EQ(static_cast<unsigned int>(kScreencastMaxTargetBitrateDeltaKbps),
2900 config->simulcastStream[0].maxBitrate -
2901 config->simulcastStream[0].targetBitrate);
2902 observation_complete_.Set();
2903 return test::FakeEncoder::InitEncode(config, settings);
2904 }
2905
2906 void ModifyVideoConfigs(
2907 VideoSendStream::Config* send_config,
2908 std::vector<VideoReceiveStream::Config>* receive_configs,
2909 VideoEncoderConfig* encoder_config) override {
2910 send_config->encoder_settings.encoder_factory = &encoder_factory_;
2911 EXPECT_EQ(1u, encoder_config->number_of_streams);
2912 encoder_config->video_stream_factory =
2913 new rtc::RefCountedObject<VideoStreamFactory>();
2914 EXPECT_EQ(1u, encoder_config->simulcast_layers.size());
2915 encoder_config->simulcast_layers[0].num_temporal_layers = 2;
2916 encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
2917 }
2918
2919 void PerformTest() override {
2920 EXPECT_TRUE(Wait())
2921 << "Timed out while waiting for the encoder to be initialized.";
2922 }
2923 test::VideoEncoderProxyFactory encoder_factory_;
2924 } test;
2925
2926 RunBaseTest(&test);
2927 }
2928
TEST_F(VideoSendStreamTest,ReconfigureBitratesSetsEncoderBitratesCorrectly)2929 TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
2930 // These are chosen to be "kind of odd" to not be accidentally checked against
2931 // default values.
2932 static const int kMinBitrateKbps = 137;
2933 static const int kStartBitrateKbps = 345;
2934 static const int kLowerMaxBitrateKbps = 312;
2935 static const int kMaxBitrateKbps = 413;
2936 static const int kIncreasedStartBitrateKbps = 451;
2937 static const int kIncreasedMaxBitrateKbps = 597;
2938 // If these fields trial are on, we get lower bitrates than expected by this
2939 // test, due to the packetization overhead and encoder pushback.
2940 webrtc::test::ScopedFieldTrials field_trials(
2941 std::string(field_trial::GetFieldTrialString()) +
2942 "WebRTC-SubtractPacketizationOverhead/Disabled/"
2943 "WebRTC-VideoRateControl/bitrate_adjuster:false/");
2944
2945 class EncoderBitrateThresholdObserver : public test::SendTest,
2946 public VideoBitrateAllocatorFactory,
2947 public test::FakeEncoder {
2948 public:
2949 explicit EncoderBitrateThresholdObserver(TaskQueueBase* task_queue)
2950 : SendTest(kDefaultTimeoutMs),
2951 FakeEncoder(Clock::GetRealTimeClock()),
2952 task_queue_(task_queue),
2953 target_bitrate_(0),
2954 num_rate_allocator_creations_(0),
2955 num_encoder_initializations_(0),
2956 call_(nullptr),
2957 send_stream_(nullptr),
2958 encoder_factory_(this),
2959 bitrate_allocator_factory_(
2960 CreateBuiltinVideoBitrateAllocatorFactory()) {}
2961
2962 private:
2963 std::unique_ptr<VideoBitrateAllocator> CreateVideoBitrateAllocator(
2964 const VideoCodec& codec) override {
2965 EXPECT_GE(codec.startBitrate, codec.minBitrate);
2966 EXPECT_LE(codec.startBitrate, codec.maxBitrate);
2967 if (num_rate_allocator_creations_ == 0) {
2968 EXPECT_EQ(static_cast<unsigned int>(kMinBitrateKbps), codec.minBitrate);
2969 EXPECT_EQ(static_cast<unsigned int>(kStartBitrateKbps),
2970 codec.startBitrate);
2971 EXPECT_EQ(static_cast<unsigned int>(kMaxBitrateKbps), codec.maxBitrate);
2972 } else if (num_rate_allocator_creations_ == 1) {
2973 EXPECT_EQ(static_cast<unsigned int>(kLowerMaxBitrateKbps),
2974 codec.maxBitrate);
2975 // The start bitrate should be kept (-1) and capped to the max bitrate.
2976 // Since this is not an end-to-end call no receiver should have been
2977 // returning a REMB that could lower this estimate.
2978 EXPECT_EQ(codec.startBitrate, codec.maxBitrate);
2979 } else if (num_rate_allocator_creations_ == 2) {
2980 EXPECT_EQ(static_cast<unsigned int>(kIncreasedMaxBitrateKbps),
2981 codec.maxBitrate);
2982 // The start bitrate will be whatever the rate BitRateController has
2983 // currently configured but in the span of the set max and min bitrate.
2984 }
2985 ++num_rate_allocator_creations_;
2986 create_rate_allocator_event_.Set();
2987
2988 return bitrate_allocator_factory_->CreateVideoBitrateAllocator(codec);
2989 }
2990
2991 int32_t InitEncode(const VideoCodec* codecSettings,
2992 const Settings& settings) override {
2993 EXPECT_EQ(0, num_encoder_initializations_);
2994 EXPECT_EQ(static_cast<unsigned int>(kMinBitrateKbps),
2995 codecSettings->minBitrate);
2996 EXPECT_EQ(static_cast<unsigned int>(kStartBitrateKbps),
2997 codecSettings->startBitrate);
2998 EXPECT_EQ(static_cast<unsigned int>(kMaxBitrateKbps),
2999 codecSettings->maxBitrate);
3000
3001 ++num_encoder_initializations_;
3002
3003 observation_complete_.Set();
3004 init_encode_event_.Set();
3005
3006 return FakeEncoder::InitEncode(codecSettings, settings);
3007 }
3008
3009 void SetRates(const RateControlParameters& parameters) override {
3010 {
3011 MutexLock lock(&mutex_);
3012 if (target_bitrate_ == parameters.bitrate.get_sum_kbps()) {
3013 FakeEncoder::SetRates(parameters);
3014 return;
3015 }
3016 target_bitrate_ = parameters.bitrate.get_sum_kbps();
3017 }
3018 bitrate_changed_event_.Set();
3019 FakeEncoder::SetRates(parameters);
3020 }
3021
3022 void WaitForSetRates(uint32_t expected_bitrate) {
3023 // Wait for the expected rate to be set. In some cases there can be
3024 // more than one update pending, in which case we keep waiting
3025 // until the correct value has been observed.
3026 const int64_t start_time = rtc::TimeMillis();
3027 do {
3028 MutexLock lock(&mutex_);
3029 if (target_bitrate_ == expected_bitrate) {
3030 return;
3031 }
3032 } while (bitrate_changed_event_.Wait(
3033 std::max(int64_t{1}, VideoSendStreamTest::kDefaultTimeoutMs -
3034 (rtc::TimeMillis() - start_time))));
3035 MutexLock lock(&mutex_);
3036 EXPECT_EQ(target_bitrate_, expected_bitrate)
3037 << "Timed out while waiting encoder rate to be set.";
3038 }
3039
3040 void ModifySenderBitrateConfig(
3041 BitrateConstraints* bitrate_config) override {
3042 bitrate_config->min_bitrate_bps = kMinBitrateKbps * 1000;
3043 bitrate_config->start_bitrate_bps = kStartBitrateKbps * 1000;
3044 bitrate_config->max_bitrate_bps = kMaxBitrateKbps * 1000;
3045 }
3046
3047 void ModifyVideoConfigs(
3048 VideoSendStream::Config* send_config,
3049 std::vector<VideoReceiveStream::Config>* receive_configs,
3050 VideoEncoderConfig* encoder_config) override {
3051 send_config->encoder_settings.encoder_factory = &encoder_factory_;
3052 send_config->encoder_settings.bitrate_allocator_factory = this;
3053 // Set bitrates lower/higher than min/max to make sure they are properly
3054 // capped.
3055 encoder_config->max_bitrate_bps = kMaxBitrateKbps * 1000;
3056 EXPECT_EQ(1u, encoder_config->simulcast_layers.size());
3057 encoder_config->simulcast_layers[0].min_bitrate_bps =
3058 kMinBitrateKbps * 1000;
3059 encoder_config_ = encoder_config->Copy();
3060 }
3061
3062 void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
3063 call_ = sender_call;
3064 }
3065
3066 void OnVideoStreamsCreated(
3067 VideoSendStream* send_stream,
3068 const std::vector<VideoReceiveStream*>& receive_streams) override {
3069 send_stream_ = send_stream;
3070 }
3071
3072 void PerformTest() override {
3073 ASSERT_TRUE(create_rate_allocator_event_.Wait(
3074 VideoSendStreamTest::kDefaultTimeoutMs))
3075 << "Timed out while waiting for rate allocator to be created.";
3076 ASSERT_TRUE(
3077 init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs))
3078 << "Timed out while waiting for encoder to be configured.";
3079 WaitForSetRates(kStartBitrateKbps);
3080 BitrateConstraints bitrate_config;
3081 bitrate_config.start_bitrate_bps = kIncreasedStartBitrateKbps * 1000;
3082 bitrate_config.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000;
3083 SendTask(RTC_FROM_HERE, task_queue_, [this, &bitrate_config]() {
3084 call_->GetTransportControllerSend()->SetSdpBitrateParameters(
3085 bitrate_config);
3086 });
3087 // Encoder rate is capped by EncoderConfig max_bitrate_bps.
3088 WaitForSetRates(kMaxBitrateKbps);
3089 encoder_config_.max_bitrate_bps = kLowerMaxBitrateKbps * 1000;
3090 send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
3091 ASSERT_TRUE(create_rate_allocator_event_.Wait(
3092 VideoSendStreamTest::kDefaultTimeoutMs));
3093 EXPECT_EQ(2, num_rate_allocator_creations_)
3094 << "Rate allocator should have been recreated.";
3095
3096 WaitForSetRates(kLowerMaxBitrateKbps);
3097 EXPECT_EQ(1, num_encoder_initializations_);
3098
3099 encoder_config_.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000;
3100 send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
3101 ASSERT_TRUE(create_rate_allocator_event_.Wait(
3102 VideoSendStreamTest::kDefaultTimeoutMs));
3103 EXPECT_EQ(3, num_rate_allocator_creations_)
3104 << "Rate allocator should have been recreated.";
3105
3106 // Expected target bitrate is the start bitrate set in the call to
3107 // call_->GetTransportControllerSend()->SetSdpBitrateParameters.
3108 WaitForSetRates(kIncreasedStartBitrateKbps);
3109 EXPECT_EQ(1, num_encoder_initializations_);
3110 }
3111
3112 TaskQueueBase* const task_queue_;
3113 rtc::Event create_rate_allocator_event_;
3114 rtc::Event init_encode_event_;
3115 rtc::Event bitrate_changed_event_;
3116 Mutex mutex_;
3117 uint32_t target_bitrate_ RTC_GUARDED_BY(&mutex_);
3118
3119 int num_rate_allocator_creations_;
3120 int num_encoder_initializations_;
3121 webrtc::Call* call_;
3122 webrtc::VideoSendStream* send_stream_;
3123 test::VideoEncoderProxyFactory encoder_factory_;
3124 std::unique_ptr<VideoBitrateAllocatorFactory> bitrate_allocator_factory_;
3125 webrtc::VideoEncoderConfig encoder_config_;
3126 } test(task_queue());
3127
3128 RunBaseTest(&test);
3129 }
3130
TEST_F(VideoSendStreamTest,ReportsSentResolution)3131 TEST_F(VideoSendStreamTest, ReportsSentResolution) {
3132 static const size_t kNumStreams = 3;
3133 // Unusual resolutions to make sure that they are the ones being reported.
3134 static const struct {
3135 int width;
3136 int height;
3137 } kEncodedResolution[kNumStreams] = {{241, 181}, {300, 121}, {121, 221}};
3138 class ScreencastTargetBitrateTest : public test::SendTest,
3139 public test::FakeEncoder {
3140 public:
3141 ScreencastTargetBitrateTest()
3142 : SendTest(kDefaultTimeoutMs),
3143 test::FakeEncoder(Clock::GetRealTimeClock()),
3144 send_stream_(nullptr),
3145 encoder_factory_(this) {}
3146
3147 private:
3148 int32_t Encode(const VideoFrame& input_image,
3149 const std::vector<VideoFrameType>* frame_types) override {
3150 CodecSpecificInfo specifics;
3151 specifics.codecType = kVideoCodecGeneric;
3152
3153 uint8_t buffer[16] = {0};
3154 EncodedImage encoded(buffer, sizeof(buffer), sizeof(buffer));
3155 encoded.SetTimestamp(input_image.timestamp());
3156 encoded.capture_time_ms_ = input_image.render_time_ms();
3157
3158 for (size_t i = 0; i < kNumStreams; ++i) {
3159 encoded._frameType = (*frame_types)[i];
3160 encoded._encodedWidth = kEncodedResolution[i].width;
3161 encoded._encodedHeight = kEncodedResolution[i].height;
3162 encoded.SetSpatialIndex(i);
3163 EncodedImageCallback* callback;
3164 {
3165 MutexLock lock(&mutex_);
3166 callback = callback_;
3167 }
3168 RTC_DCHECK(callback);
3169 if (callback->OnEncodedImage(encoded, &specifics, nullptr).error !=
3170 EncodedImageCallback::Result::OK) {
3171 return -1;
3172 }
3173 }
3174
3175 observation_complete_.Set();
3176 return 0;
3177 }
3178 void ModifyVideoConfigs(
3179 VideoSendStream::Config* send_config,
3180 std::vector<VideoReceiveStream::Config>* receive_configs,
3181 VideoEncoderConfig* encoder_config) override {
3182 send_config->encoder_settings.encoder_factory = &encoder_factory_;
3183 EXPECT_EQ(kNumStreams, encoder_config->number_of_streams);
3184 }
3185
3186 size_t GetNumVideoStreams() const override { return kNumStreams; }
3187
3188 void PerformTest() override {
3189 EXPECT_TRUE(Wait())
3190 << "Timed out while waiting for the encoder to send one frame.";
3191 VideoSendStream::Stats stats = send_stream_->GetStats();
3192
3193 for (size_t i = 0; i < kNumStreams; ++i) {
3194 ASSERT_TRUE(stats.substreams.find(kVideoSendSsrcs[i]) !=
3195 stats.substreams.end())
3196 << "No stats for SSRC: " << kVideoSendSsrcs[i]
3197 << ", stats should exist as soon as frames have been encoded.";
3198 VideoSendStream::StreamStats ssrc_stats =
3199 stats.substreams[kVideoSendSsrcs[i]];
3200 EXPECT_EQ(kEncodedResolution[i].width, ssrc_stats.width);
3201 EXPECT_EQ(kEncodedResolution[i].height, ssrc_stats.height);
3202 }
3203 }
3204
3205 void OnVideoStreamsCreated(
3206 VideoSendStream* send_stream,
3207 const std::vector<VideoReceiveStream*>& receive_streams) override {
3208 send_stream_ = send_stream;
3209 }
3210
3211 VideoSendStream* send_stream_;
3212 test::VideoEncoderProxyFactory encoder_factory_;
3213 } test;
3214
3215 RunBaseTest(&test);
3216 }
3217
3218 #if defined(RTC_ENABLE_VP9)
3219 class Vp9HeaderObserver : public test::SendTest {
3220 public:
Vp9HeaderObserver()3221 Vp9HeaderObserver()
3222 : SendTest(VideoSendStreamTest::kLongTimeoutMs),
3223 encoder_factory_([]() { return VP9Encoder::Create(); }),
3224 vp9_settings_(VideoEncoder::GetDefaultVp9Settings()),
3225 packets_sent_(0),
3226 frames_sent_(0),
3227 expected_width_(0),
3228 expected_height_(0) {}
3229
ModifyVideoConfigsHook(VideoSendStream::Config * send_config,std::vector<VideoReceiveStream::Config> * receive_configs,VideoEncoderConfig * encoder_config)3230 virtual void ModifyVideoConfigsHook(
3231 VideoSendStream::Config* send_config,
3232 std::vector<VideoReceiveStream::Config>* receive_configs,
3233 VideoEncoderConfig* encoder_config) {}
3234
3235 virtual void InspectHeader(const RTPVideoHeaderVP9& vp9) = 0;
3236
3237 private:
3238 const int kVp9PayloadType = test::CallTest::kVideoSendPayloadType;
3239
ModifyVideoConfigs(VideoSendStream::Config * send_config,std::vector<VideoReceiveStream::Config> * receive_configs,VideoEncoderConfig * encoder_config)3240 void ModifyVideoConfigs(
3241 VideoSendStream::Config* send_config,
3242 std::vector<VideoReceiveStream::Config>* receive_configs,
3243 VideoEncoderConfig* encoder_config) override {
3244 send_config->encoder_settings.encoder_factory = &encoder_factory_;
3245 send_config->rtp.payload_name = "VP9";
3246 send_config->rtp.payload_type = kVp9PayloadType;
3247 ModifyVideoConfigsHook(send_config, receive_configs, encoder_config);
3248 encoder_config->encoder_specific_settings = new rtc::RefCountedObject<
3249 VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings_);
3250 EXPECT_EQ(1u, encoder_config->number_of_streams);
3251 EXPECT_EQ(1u, encoder_config->simulcast_layers.size());
3252 encoder_config->simulcast_layers[0].num_temporal_layers =
3253 vp9_settings_.numberOfTemporalLayers;
3254 encoder_config_ = encoder_config->Copy();
3255 }
3256
ModifyVideoCaptureStartResolution(int * width,int * height,int * frame_rate)3257 void ModifyVideoCaptureStartResolution(int* width,
3258 int* height,
3259 int* frame_rate) override {
3260 expected_width_ = *width;
3261 expected_height_ = *height;
3262 }
3263
PerformTest()3264 void PerformTest() override {
3265 bool wait = Wait();
3266 {
3267 // In case of time out, OnSendRtp might still access frames_sent_;
3268 MutexLock lock(&mutex_);
3269 EXPECT_TRUE(wait) << "Test timed out waiting for VP9 packet, num frames "
3270 << frames_sent_;
3271 }
3272 }
3273
OnSendRtp(const uint8_t * packet,size_t length)3274 Action OnSendRtp(const uint8_t* packet, size_t length) override {
3275 RtpPacket rtp_packet;
3276 EXPECT_TRUE(rtp_packet.Parse(packet, length));
3277
3278 EXPECT_EQ(kVp9PayloadType, rtp_packet.PayloadType());
3279 rtc::ArrayView<const uint8_t> rtp_payload = rtp_packet.payload();
3280
3281 bool new_packet = packets_sent_ == 0 ||
3282 IsNewerSequenceNumber(rtp_packet.SequenceNumber(),
3283 last_packet_sequence_number_);
3284 if (!rtp_payload.empty() && new_packet) {
3285 RTPVideoHeader video_header;
3286 EXPECT_NE(
3287 VideoRtpDepacketizerVp9::ParseRtpPayload(rtp_payload, &video_header),
3288 0);
3289 EXPECT_EQ(VideoCodecType::kVideoCodecVP9, video_header.codec);
3290 // Verify common fields for all configurations.
3291 const auto& vp9_header =
3292 absl::get<RTPVideoHeaderVP9>(video_header.video_type_header);
3293 VerifyCommonHeader(vp9_header);
3294 CompareConsecutiveFrames(rtp_packet, video_header);
3295 // Verify configuration specific settings.
3296 InspectHeader(vp9_header);
3297
3298 ++packets_sent_;
3299 if (rtp_packet.Marker()) {
3300 MutexLock lock(&mutex_);
3301 ++frames_sent_;
3302 }
3303 last_packet_marker_ = rtp_packet.Marker();
3304 last_packet_sequence_number_ = rtp_packet.SequenceNumber();
3305 last_packet_timestamp_ = rtp_packet.Timestamp();
3306 last_vp9_ = vp9_header;
3307 }
3308 return SEND_PACKET;
3309 }
3310
3311 protected:
ContinuousPictureId(const RTPVideoHeaderVP9 & vp9) const3312 bool ContinuousPictureId(const RTPVideoHeaderVP9& vp9) const {
3313 if (last_vp9_.picture_id > vp9.picture_id) {
3314 return vp9.picture_id == 0; // Wrap.
3315 } else {
3316 return vp9.picture_id == last_vp9_.picture_id + 1;
3317 }
3318 }
3319
VerifySpatialIdxWithinFrame(const RTPVideoHeaderVP9 & vp9) const3320 void VerifySpatialIdxWithinFrame(const RTPVideoHeaderVP9& vp9) const {
3321 bool new_layer = vp9.spatial_idx != last_vp9_.spatial_idx;
3322 EXPECT_EQ(new_layer, vp9.beginning_of_frame);
3323 EXPECT_EQ(new_layer, last_vp9_.end_of_frame);
3324 EXPECT_EQ(new_layer ? last_vp9_.spatial_idx + 1 : last_vp9_.spatial_idx,
3325 vp9.spatial_idx);
3326 }
3327
VerifyFixedTemporalLayerStructure(const RTPVideoHeaderVP9 & vp9,uint8_t num_layers) const3328 void VerifyFixedTemporalLayerStructure(const RTPVideoHeaderVP9& vp9,
3329 uint8_t num_layers) const {
3330 switch (num_layers) {
3331 case 0:
3332 VerifyTemporalLayerStructure0(vp9);
3333 break;
3334 case 1:
3335 VerifyTemporalLayerStructure1(vp9);
3336 break;
3337 case 2:
3338 VerifyTemporalLayerStructure2(vp9);
3339 break;
3340 case 3:
3341 VerifyTemporalLayerStructure3(vp9);
3342 break;
3343 default:
3344 RTC_NOTREACHED();
3345 }
3346 }
3347
VerifyTemporalLayerStructure0(const RTPVideoHeaderVP9 & vp9) const3348 void VerifyTemporalLayerStructure0(const RTPVideoHeaderVP9& vp9) const {
3349 EXPECT_EQ(kNoTl0PicIdx, vp9.tl0_pic_idx);
3350 EXPECT_EQ(kNoTemporalIdx, vp9.temporal_idx); // no tid
3351 EXPECT_FALSE(vp9.temporal_up_switch);
3352 }
3353
VerifyTemporalLayerStructure1(const RTPVideoHeaderVP9 & vp9) const3354 void VerifyTemporalLayerStructure1(const RTPVideoHeaderVP9& vp9) const {
3355 EXPECT_NE(kNoTl0PicIdx, vp9.tl0_pic_idx);
3356 EXPECT_EQ(0, vp9.temporal_idx); // 0,0,0,...
3357 EXPECT_FALSE(vp9.temporal_up_switch);
3358 }
3359
VerifyTemporalLayerStructure2(const RTPVideoHeaderVP9 & vp9) const3360 void VerifyTemporalLayerStructure2(const RTPVideoHeaderVP9& vp9) const {
3361 EXPECT_NE(kNoTl0PicIdx, vp9.tl0_pic_idx);
3362 EXPECT_GE(vp9.temporal_idx, 0); // 0,1,0,1,... (tid reset on I-frames).
3363 EXPECT_LE(vp9.temporal_idx, 1);
3364 EXPECT_EQ(vp9.temporal_idx > 0, vp9.temporal_up_switch);
3365 if (IsNewPictureId(vp9)) {
3366 uint8_t expected_tid =
3367 (!vp9.inter_pic_predicted || last_vp9_.temporal_idx == 1) ? 0 : 1;
3368 EXPECT_EQ(expected_tid, vp9.temporal_idx);
3369 }
3370 }
3371
VerifyTemporalLayerStructure3(const RTPVideoHeaderVP9 & vp9) const3372 void VerifyTemporalLayerStructure3(const RTPVideoHeaderVP9& vp9) const {
3373 EXPECT_NE(kNoTl0PicIdx, vp9.tl0_pic_idx);
3374 EXPECT_GE(vp9.temporal_idx, 0); // 0,2,1,2,... (tid reset on I-frames).
3375 EXPECT_LE(vp9.temporal_idx, 2);
3376 if (IsNewPictureId(vp9) && vp9.inter_pic_predicted) {
3377 EXPECT_NE(vp9.temporal_idx, last_vp9_.temporal_idx);
3378 switch (vp9.temporal_idx) {
3379 case 0:
3380 EXPECT_EQ(2, last_vp9_.temporal_idx);
3381 EXPECT_FALSE(vp9.temporal_up_switch);
3382 break;
3383 case 1:
3384 EXPECT_EQ(2, last_vp9_.temporal_idx);
3385 EXPECT_TRUE(vp9.temporal_up_switch);
3386 break;
3387 case 2:
3388 EXPECT_LT(last_vp9_.temporal_idx, 2);
3389 EXPECT_TRUE(vp9.temporal_up_switch);
3390 break;
3391 }
3392 }
3393 }
3394
VerifyTl0Idx(const RTPVideoHeaderVP9 & vp9) const3395 void VerifyTl0Idx(const RTPVideoHeaderVP9& vp9) const {
3396 if (vp9.tl0_pic_idx == kNoTl0PicIdx)
3397 return;
3398
3399 uint8_t expected_tl0_idx = last_vp9_.tl0_pic_idx;
3400 if (vp9.temporal_idx == 0)
3401 ++expected_tl0_idx;
3402 EXPECT_EQ(expected_tl0_idx, vp9.tl0_pic_idx);
3403 }
3404
IsNewPictureId(const RTPVideoHeaderVP9 & vp9) const3405 bool IsNewPictureId(const RTPVideoHeaderVP9& vp9) const {
3406 return frames_sent_ > 0 && (vp9.picture_id != last_vp9_.picture_id);
3407 }
3408
3409 // Flexible mode (F=1): Non-flexible mode (F=0):
3410 //
3411 // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
3412 // |I|P|L|F|B|E|V|-| |I|P|L|F|B|E|V|-|
3413 // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
3414 // I: |M| PICTURE ID | I: |M| PICTURE ID |
3415 // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
3416 // M: | EXTENDED PID | M: | EXTENDED PID |
3417 // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
3418 // L: | T |U| S |D| L: | T |U| S |D|
3419 // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
3420 // P,F: | P_DIFF |X|N| | TL0PICIDX |
3421 // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
3422 // X: |EXTENDED P_DIFF| V: | SS .. |
3423 // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
3424 // V: | SS .. |
3425 // +-+-+-+-+-+-+-+-+
VerifyCommonHeader(const RTPVideoHeaderVP9 & vp9) const3426 void VerifyCommonHeader(const RTPVideoHeaderVP9& vp9) const {
3427 EXPECT_EQ(kMaxTwoBytePictureId, vp9.max_picture_id); // M:1
3428 EXPECT_NE(kNoPictureId, vp9.picture_id); // I:1
3429 EXPECT_EQ(vp9_settings_.flexibleMode, vp9.flexible_mode); // F
3430
3431 if (vp9_settings_.numberOfSpatialLayers > 1) {
3432 EXPECT_LT(vp9.spatial_idx, vp9_settings_.numberOfSpatialLayers);
3433 } else if (vp9_settings_.numberOfTemporalLayers > 1) {
3434 EXPECT_EQ(vp9.spatial_idx, 0);
3435 } else {
3436 EXPECT_EQ(vp9.spatial_idx, kNoSpatialIdx);
3437 }
3438
3439 if (vp9_settings_.numberOfTemporalLayers > 1) {
3440 EXPECT_LT(vp9.temporal_idx, vp9_settings_.numberOfTemporalLayers);
3441 } else if (vp9_settings_.numberOfSpatialLayers > 1) {
3442 EXPECT_EQ(vp9.temporal_idx, 0);
3443 } else {
3444 EXPECT_EQ(vp9.temporal_idx, kNoTemporalIdx);
3445 }
3446
3447 if (vp9.ss_data_available) // V
3448 VerifySsData(vp9);
3449
3450 if (frames_sent_ == 0)
3451 EXPECT_FALSE(vp9.inter_pic_predicted); // P
3452
3453 if (!vp9.inter_pic_predicted) {
3454 EXPECT_TRUE(vp9.temporal_idx == 0 || vp9.temporal_idx == kNoTemporalIdx);
3455 EXPECT_FALSE(vp9.temporal_up_switch);
3456 }
3457 }
3458
3459 // Scalability structure (SS).
3460 //
3461 // +-+-+-+-+-+-+-+-+
3462 // V: | N_S |Y|G|-|-|-|
3463 // +-+-+-+-+-+-+-+-+
3464 // Y: | WIDTH | N_S + 1 times
3465 // +-+-+-+-+-+-+-+-+
3466 // | HEIGHT |
3467 // +-+-+-+-+-+-+-+-+
3468 // G: | N_G |
3469 // +-+-+-+-+-+-+-+-+
3470 // N_G: | T |U| R |-|-| N_G times
3471 // +-+-+-+-+-+-+-+-+
3472 // | P_DIFF | R times
3473 // +-+-+-+-+-+-+-+-+
VerifySsData(const RTPVideoHeaderVP9 & vp9) const3474 void VerifySsData(const RTPVideoHeaderVP9& vp9) const {
3475 EXPECT_TRUE(vp9.ss_data_available); // V
3476 EXPECT_EQ(vp9_settings_.numberOfSpatialLayers, // N_S + 1
3477 vp9.num_spatial_layers);
3478 EXPECT_TRUE(vp9.spatial_layer_resolution_present); // Y:1
3479 int expected_width = expected_width_;
3480 int expected_height = expected_height_;
3481 for (int i = static_cast<int>(vp9.num_spatial_layers) - 1; i >= 0; --i) {
3482 EXPECT_EQ(expected_width, vp9.width[i]); // WIDTH
3483 EXPECT_EQ(expected_height, vp9.height[i]); // HEIGHT
3484 expected_width /= 2;
3485 expected_height /= 2;
3486 }
3487 }
3488
CompareConsecutiveFrames(const RtpPacket & rtp_packet,const RTPVideoHeader & video) const3489 void CompareConsecutiveFrames(const RtpPacket& rtp_packet,
3490 const RTPVideoHeader& video) const {
3491 const auto& vp9_header =
3492 absl::get<RTPVideoHeaderVP9>(video.video_type_header);
3493
3494 bool new_frame =
3495 packets_sent_ == 0 ||
3496 IsNewerTimestamp(rtp_packet.Timestamp(), last_packet_timestamp_);
3497 EXPECT_EQ(new_frame, video.is_first_packet_in_frame);
3498 if (!new_frame) {
3499 EXPECT_FALSE(last_packet_marker_);
3500 EXPECT_EQ(last_packet_timestamp_, rtp_packet.Timestamp());
3501 EXPECT_EQ(last_vp9_.picture_id, vp9_header.picture_id);
3502 EXPECT_EQ(last_vp9_.temporal_idx, vp9_header.temporal_idx);
3503 EXPECT_EQ(last_vp9_.tl0_pic_idx, vp9_header.tl0_pic_idx);
3504 VerifySpatialIdxWithinFrame(vp9_header);
3505 return;
3506 }
3507 // New frame.
3508 EXPECT_TRUE(vp9_header.beginning_of_frame);
3509
3510 // Compare with last packet in previous frame.
3511 if (frames_sent_ == 0)
3512 return;
3513 EXPECT_TRUE(last_vp9_.end_of_frame);
3514 EXPECT_TRUE(last_packet_marker_);
3515 EXPECT_TRUE(ContinuousPictureId(vp9_header));
3516 VerifyTl0Idx(vp9_header);
3517 }
3518
3519 test::FunctionVideoEncoderFactory encoder_factory_;
3520 VideoCodecVP9 vp9_settings_;
3521 webrtc::VideoEncoderConfig encoder_config_;
3522 bool last_packet_marker_ = false;
3523 uint16_t last_packet_sequence_number_ = 0;
3524 uint32_t last_packet_timestamp_ = 0;
3525 RTPVideoHeaderVP9 last_vp9_;
3526 size_t packets_sent_;
3527 Mutex mutex_;
3528 size_t frames_sent_;
3529 int expected_width_;
3530 int expected_height_;
3531 };
3532
TEST_F(VideoSendStreamTest,Vp9NonFlexMode_1Tl1SLayers)3533 TEST_F(VideoSendStreamTest, Vp9NonFlexMode_1Tl1SLayers) {
3534 const uint8_t kNumTemporalLayers = 1;
3535 const uint8_t kNumSpatialLayers = 1;
3536 TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
3537 }
3538
TEST_F(VideoSendStreamTest,Vp9NonFlexMode_2Tl1SLayers)3539 TEST_F(VideoSendStreamTest, Vp9NonFlexMode_2Tl1SLayers) {
3540 const uint8_t kNumTemporalLayers = 2;
3541 const uint8_t kNumSpatialLayers = 1;
3542 TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
3543 }
3544
TEST_F(VideoSendStreamTest,Vp9NonFlexMode_3Tl1SLayers)3545 TEST_F(VideoSendStreamTest, Vp9NonFlexMode_3Tl1SLayers) {
3546 const uint8_t kNumTemporalLayers = 3;
3547 const uint8_t kNumSpatialLayers = 1;
3548 TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
3549 }
3550
TEST_F(VideoSendStreamTest,Vp9NonFlexMode_1Tl2SLayers)3551 TEST_F(VideoSendStreamTest, Vp9NonFlexMode_1Tl2SLayers) {
3552 const uint8_t kNumTemporalLayers = 1;
3553 const uint8_t kNumSpatialLayers = 2;
3554 TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
3555 }
3556
TEST_F(VideoSendStreamTest,Vp9NonFlexMode_2Tl2SLayers)3557 TEST_F(VideoSendStreamTest, Vp9NonFlexMode_2Tl2SLayers) {
3558 const uint8_t kNumTemporalLayers = 2;
3559 const uint8_t kNumSpatialLayers = 2;
3560 TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
3561 }
3562
TEST_F(VideoSendStreamTest,Vp9NonFlexMode_3Tl2SLayers)3563 TEST_F(VideoSendStreamTest, Vp9NonFlexMode_3Tl2SLayers) {
3564 const uint8_t kNumTemporalLayers = 3;
3565 const uint8_t kNumSpatialLayers = 2;
3566 TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
3567 }
3568
TestVp9NonFlexMode(uint8_t num_temporal_layers,uint8_t num_spatial_layers)3569 void VideoSendStreamTest::TestVp9NonFlexMode(uint8_t num_temporal_layers,
3570 uint8_t num_spatial_layers) {
3571 static const size_t kNumFramesToSend = 100;
3572 // Set to < kNumFramesToSend and coprime to length of temporal layer
3573 // structures to verify temporal id reset on key frame.
3574 static const int kKeyFrameInterval = 31;
3575
3576 static const int kWidth = kMinVp9SpatialLayerWidth;
3577 static const int kHeight = kMinVp9SpatialLayerHeight;
3578 static const float kGoodBitsPerPixel = 0.1f;
3579 class NonFlexibleMode : public Vp9HeaderObserver {
3580 public:
3581 NonFlexibleMode(uint8_t num_temporal_layers, uint8_t num_spatial_layers)
3582 : num_temporal_layers_(num_temporal_layers),
3583 num_spatial_layers_(num_spatial_layers),
3584 l_field_(num_temporal_layers > 1 || num_spatial_layers > 1) {}
3585
3586 void ModifyVideoConfigsHook(
3587 VideoSendStream::Config* send_config,
3588 std::vector<VideoReceiveStream::Config>* receive_configs,
3589 VideoEncoderConfig* encoder_config) override {
3590 encoder_config->codec_type = kVideoCodecVP9;
3591 int bitrate_bps = 0;
3592 for (int sl_idx = 0; sl_idx < num_spatial_layers_; ++sl_idx) {
3593 const int width = kWidth << sl_idx;
3594 const int height = kHeight << sl_idx;
3595 const float bpp = kGoodBitsPerPixel / (1 << sl_idx);
3596 bitrate_bps += static_cast<int>(width * height * bpp * 30);
3597 }
3598 encoder_config->max_bitrate_bps = bitrate_bps * 2;
3599
3600 vp9_settings_.flexibleMode = false;
3601 vp9_settings_.frameDroppingOn = false;
3602 vp9_settings_.automaticResizeOn = false;
3603 vp9_settings_.keyFrameInterval = kKeyFrameInterval;
3604 vp9_settings_.numberOfTemporalLayers = num_temporal_layers_;
3605 vp9_settings_.numberOfSpatialLayers = num_spatial_layers_;
3606 }
3607
3608 void ModifyVideoCaptureStartResolution(int* width,
3609 int* height,
3610 int* frame_rate) override {
3611 expected_width_ = kWidth << (num_spatial_layers_ - 1);
3612 expected_height_ = kHeight << (num_spatial_layers_ - 1);
3613 *width = expected_width_;
3614 *height = expected_height_;
3615 }
3616
3617 void InspectHeader(const RTPVideoHeaderVP9& vp9) override {
3618 bool ss_data_expected =
3619 !vp9.inter_pic_predicted && vp9.beginning_of_frame &&
3620 (vp9.spatial_idx == 0 || vp9.spatial_idx == kNoSpatialIdx);
3621 EXPECT_EQ(ss_data_expected, vp9.ss_data_available);
3622 if (num_spatial_layers_ > 1) {
3623 EXPECT_EQ(vp9.spatial_idx > 0, vp9.inter_layer_predicted);
3624 } else {
3625 EXPECT_FALSE(vp9.inter_layer_predicted);
3626 }
3627
3628 EXPECT_EQ(!vp9.inter_pic_predicted,
3629 frames_sent_ % kKeyFrameInterval == 0);
3630
3631 if (IsNewPictureId(vp9)) {
3632 if (num_temporal_layers_ == 1 && num_spatial_layers_ == 1) {
3633 EXPECT_EQ(kNoSpatialIdx, vp9.spatial_idx);
3634 } else {
3635 EXPECT_EQ(0, vp9.spatial_idx);
3636 }
3637 if (num_spatial_layers_ > 1)
3638 EXPECT_EQ(num_spatial_layers_ - 1, last_vp9_.spatial_idx);
3639 }
3640
3641 VerifyFixedTemporalLayerStructure(vp9,
3642 l_field_ ? num_temporal_layers_ : 0);
3643
3644 if (frames_sent_ > kNumFramesToSend)
3645 observation_complete_.Set();
3646 }
3647 const uint8_t num_temporal_layers_;
3648 const uint8_t num_spatial_layers_;
3649 const bool l_field_;
3650
3651 private:
3652 void ModifySenderBitrateConfig(
3653 BitrateConstraints* bitrate_config) override {
3654 const int kMinBitrateBps = 300000;
3655 bitrate_config->min_bitrate_bps = kMinBitrateBps;
3656 }
3657 } test(num_temporal_layers, num_spatial_layers);
3658
3659 RunBaseTest(&test);
3660 }
3661
TEST_F(VideoSendStreamTest,Vp9NonFlexModeSmallResolution)3662 TEST_F(VideoSendStreamTest, Vp9NonFlexModeSmallResolution) {
3663 static const size_t kNumFramesToSend = 50;
3664 static const int kWidth = 4;
3665 static const int kHeight = 4;
3666 class NonFlexibleModeResolution : public Vp9HeaderObserver {
3667 void ModifyVideoConfigsHook(
3668 VideoSendStream::Config* send_config,
3669 std::vector<VideoReceiveStream::Config>* receive_configs,
3670 VideoEncoderConfig* encoder_config) override {
3671 encoder_config->codec_type = kVideoCodecVP9;
3672 vp9_settings_.flexibleMode = false;
3673 vp9_settings_.numberOfTemporalLayers = 1;
3674 vp9_settings_.numberOfSpatialLayers = 1;
3675
3676 EXPECT_EQ(1u, encoder_config->number_of_streams);
3677 }
3678
3679 void InspectHeader(const RTPVideoHeaderVP9& vp9_header) override {
3680 if (frames_sent_ > kNumFramesToSend)
3681 observation_complete_.Set();
3682 }
3683
3684 void ModifyVideoCaptureStartResolution(int* width,
3685 int* height,
3686 int* frame_rate) override {
3687 expected_width_ = kWidth;
3688 expected_height_ = kHeight;
3689 *width = kWidth;
3690 *height = kHeight;
3691 }
3692 } test;
3693
3694 RunBaseTest(&test);
3695 }
3696
3697 #if defined(WEBRTC_ANDROID)
3698 // Crashes on Android; bugs.webrtc.org/7401
3699 #define MAYBE_Vp9FlexModeRefCount DISABLED_Vp9FlexModeRefCount
3700 #else
3701 // TODO(webrtc:9270): Support of flexible mode is temporarily disabled. Enable
3702 // the test after webrtc:9270 is implemented.
3703 #define MAYBE_Vp9FlexModeRefCount DISABLED_Vp9FlexModeRefCount
3704 // #define MAYBE_Vp9FlexModeRefCount Vp9FlexModeRefCount
3705 #endif
TEST_F(VideoSendStreamTest,MAYBE_Vp9FlexModeRefCount)3706 TEST_F(VideoSendStreamTest, MAYBE_Vp9FlexModeRefCount) {
3707 class FlexibleMode : public Vp9HeaderObserver {
3708 void ModifyVideoConfigsHook(
3709 VideoSendStream::Config* send_config,
3710 std::vector<VideoReceiveStream::Config>* receive_configs,
3711 VideoEncoderConfig* encoder_config) override {
3712 encoder_config->codec_type = kVideoCodecVP9;
3713 encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
3714 vp9_settings_.flexibleMode = true;
3715 vp9_settings_.numberOfTemporalLayers = 1;
3716 vp9_settings_.numberOfSpatialLayers = 2;
3717 }
3718
3719 void InspectHeader(const RTPVideoHeaderVP9& vp9_header) override {
3720 EXPECT_TRUE(vp9_header.flexible_mode);
3721 EXPECT_EQ(kNoTl0PicIdx, vp9_header.tl0_pic_idx);
3722 if (vp9_header.inter_pic_predicted) {
3723 EXPECT_GT(vp9_header.num_ref_pics, 0u);
3724 observation_complete_.Set();
3725 }
3726 }
3727 } test;
3728
3729 RunBaseTest(&test);
3730 }
3731 #endif // defined(RTC_ENABLE_VP9)
3732
TestRequestSourceRotateVideo(bool support_orientation_ext)3733 void VideoSendStreamTest::TestRequestSourceRotateVideo(
3734 bool support_orientation_ext) {
3735 CreateSenderCall();
3736
3737 test::NullTransport transport;
3738 CreateSendConfig(1, 0, 0, &transport);
3739 GetVideoSendConfig()->rtp.extensions.clear();
3740 if (support_orientation_ext) {
3741 GetVideoSendConfig()->rtp.extensions.push_back(
3742 RtpExtension(RtpExtension::kVideoRotationUri, 1));
3743 }
3744
3745 CreateVideoStreams();
3746 test::FrameForwarder forwarder;
3747 GetVideoSendStream()->SetSource(&forwarder,
3748 DegradationPreference::MAINTAIN_FRAMERATE);
3749
3750 EXPECT_TRUE(forwarder.sink_wants().rotation_applied !=
3751 support_orientation_ext);
3752
3753 DestroyStreams();
3754 }
3755
TEST_F(VideoSendStreamTest,RequestSourceRotateIfVideoOrientationExtensionNotSupported)3756 TEST_F(VideoSendStreamTest,
3757 RequestSourceRotateIfVideoOrientationExtensionNotSupported) {
3758 TestRequestSourceRotateVideo(false);
3759 }
3760
TEST_F(VideoSendStreamTest,DoNotRequestsRotationIfVideoOrientationExtensionSupported)3761 TEST_F(VideoSendStreamTest,
3762 DoNotRequestsRotationIfVideoOrientationExtensionSupported) {
3763 TestRequestSourceRotateVideo(true);
3764 }
3765
TEST_F(VideoSendStreamTest,EncoderConfigMaxFramerateReportedToSource)3766 TEST_F(VideoSendStreamTest, EncoderConfigMaxFramerateReportedToSource) {
3767 static const int kMaxFps = 22;
3768 class FpsObserver : public test::SendTest,
3769 public test::FrameGeneratorCapturer::SinkWantsObserver {
3770 public:
3771 FpsObserver() : SendTest(kDefaultTimeoutMs) {}
3772
3773 void OnFrameGeneratorCapturerCreated(
3774 test::FrameGeneratorCapturer* frame_generator_capturer) override {
3775 frame_generator_capturer->SetSinkWantsObserver(this);
3776 }
3777
3778 void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* sink,
3779 const rtc::VideoSinkWants& wants) override {
3780 if (wants.max_framerate_fps == kMaxFps)
3781 observation_complete_.Set();
3782 }
3783
3784 void ModifyVideoConfigs(
3785 VideoSendStream::Config* send_config,
3786 std::vector<VideoReceiveStream::Config>* receive_configs,
3787 VideoEncoderConfig* encoder_config) override {
3788 encoder_config->simulcast_layers[0].max_framerate = kMaxFps;
3789 }
3790
3791 void PerformTest() override {
3792 EXPECT_TRUE(Wait()) << "Timed out while waiting for fps to be reported.";
3793 }
3794 } test;
3795
3796 RunBaseTest(&test);
3797 }
3798
3799 // This test verifies that overhead is removed from the bandwidth estimate by
3800 // testing that the maximum possible target payload rate is smaller than the
3801 // maximum bandwidth estimate by the overhead rate.
TEST_F(VideoSendStreamTest,RemoveOverheadFromBandwidth)3802 TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) {
3803 test::ScopedFieldTrials override_field_trials(
3804 "WebRTC-SendSideBwe-WithOverhead/Enabled/");
3805 class RemoveOverheadFromBandwidthTest : public test::EndToEndTest,
3806 public test::FakeEncoder {
3807 public:
3808 explicit RemoveOverheadFromBandwidthTest(TaskQueueBase* task_queue)
3809 : EndToEndTest(test::CallTest::kDefaultTimeoutMs),
3810 FakeEncoder(Clock::GetRealTimeClock()),
3811 task_queue_(task_queue),
3812 encoder_factory_(this),
3813 call_(nullptr),
3814 max_bitrate_bps_(0),
3815 first_packet_sent_(false) {}
3816
3817 void SetRates(const RateControlParameters& parameters) override {
3818 MutexLock lock(&mutex_);
3819 // Wait for the first sent packet so that videosendstream knows
3820 // rtp_overhead.
3821 if (first_packet_sent_) {
3822 max_bitrate_bps_ = parameters.bitrate.get_sum_bps();
3823 bitrate_changed_event_.Set();
3824 }
3825 return FakeEncoder::SetRates(parameters);
3826 }
3827
3828 void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
3829 call_ = sender_call;
3830 }
3831
3832 void ModifyVideoConfigs(
3833 VideoSendStream::Config* send_config,
3834 std::vector<VideoReceiveStream::Config>* receive_configs,
3835 VideoEncoderConfig* encoder_config) override {
3836 send_config->rtp.max_packet_size = 1200;
3837 send_config->encoder_settings.encoder_factory = &encoder_factory_;
3838 EXPECT_FALSE(send_config->rtp.extensions.empty());
3839 }
3840
3841 Action OnSendRtp(const uint8_t* packet, size_t length) override {
3842 MutexLock lock(&mutex_);
3843 first_packet_sent_ = true;
3844 return SEND_PACKET;
3845 }
3846
3847 void PerformTest() override {
3848 BitrateConstraints bitrate_config;
3849 constexpr int kStartBitrateBps = 60000;
3850 constexpr int kMaxBitrateBps = 60000;
3851 constexpr int kMinBitrateBps = 10000;
3852 bitrate_config.start_bitrate_bps = kStartBitrateBps;
3853 bitrate_config.max_bitrate_bps = kMaxBitrateBps;
3854 bitrate_config.min_bitrate_bps = kMinBitrateBps;
3855 SendTask(RTC_FROM_HERE, task_queue_, [this, &bitrate_config]() {
3856 call_->GetTransportControllerSend()->SetSdpBitrateParameters(
3857 bitrate_config);
3858 call_->GetTransportControllerSend()->OnTransportOverheadChanged(40);
3859 });
3860
3861 // At a bitrate of 60kbps with a packet size of 1200B video and an
3862 // overhead of 40B per packet video produces 2240bps overhead.
3863 // So the encoder BW should be set to 57760bps.
3864 EXPECT_TRUE(
3865 bitrate_changed_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
3866 {
3867 MutexLock lock(&mutex_);
3868 EXPECT_LE(max_bitrate_bps_, 57760u);
3869 }
3870 }
3871
3872 private:
3873 TaskQueueBase* const task_queue_;
3874 test::VideoEncoderProxyFactory encoder_factory_;
3875 Call* call_;
3876 Mutex mutex_;
3877 uint32_t max_bitrate_bps_ RTC_GUARDED_BY(&mutex_);
3878 bool first_packet_sent_ RTC_GUARDED_BY(&mutex_);
3879 rtc::Event bitrate_changed_event_;
3880 } test(task_queue());
3881 RunBaseTest(&test);
3882 }
3883
3884 class PacingFactorObserver : public test::SendTest {
3885 public:
PacingFactorObserver(bool configure_send_side,absl::optional<float> expected_pacing_factor)3886 PacingFactorObserver(bool configure_send_side,
3887 absl::optional<float> expected_pacing_factor)
3888 : test::SendTest(VideoSendStreamTest::kDefaultTimeoutMs),
3889 configure_send_side_(configure_send_side),
3890 expected_pacing_factor_(expected_pacing_factor) {}
3891
ModifyVideoConfigs(VideoSendStream::Config * send_config,std::vector<VideoReceiveStream::Config> * receive_configs,VideoEncoderConfig * encoder_config)3892 void ModifyVideoConfigs(
3893 VideoSendStream::Config* send_config,
3894 std::vector<VideoReceiveStream::Config>* receive_configs,
3895 VideoEncoderConfig* encoder_config) override {
3896 // Check if send-side bwe extension is already present, and remove it if
3897 // it is not desired.
3898 bool has_send_side = false;
3899 for (auto it = send_config->rtp.extensions.begin();
3900 it != send_config->rtp.extensions.end(); ++it) {
3901 if (it->uri == RtpExtension::kTransportSequenceNumberUri) {
3902 if (configure_send_side_) {
3903 has_send_side = true;
3904 } else {
3905 send_config->rtp.extensions.erase(it);
3906 }
3907 break;
3908 }
3909 }
3910
3911 if (configure_send_side_ && !has_send_side) {
3912 rtc::UniqueNumberGenerator<int> unique_id_generator;
3913 unique_id_generator.AddKnownId(0); // First valid RTP extension ID is 1.
3914 for (const RtpExtension& extension : send_config->rtp.extensions) {
3915 unique_id_generator.AddKnownId(extension.id);
3916 }
3917 // Want send side, not present by default, so add it.
3918 send_config->rtp.extensions.emplace_back(
3919 RtpExtension::kTransportSequenceNumberUri, unique_id_generator());
3920 }
3921
3922 // ALR only enabled for screenshare.
3923 encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
3924 }
3925
OnVideoStreamsCreated(VideoSendStream * send_stream,const std::vector<VideoReceiveStream * > & receive_streams)3926 void OnVideoStreamsCreated(
3927 VideoSendStream* send_stream,
3928 const std::vector<VideoReceiveStream*>& receive_streams) override {
3929 auto internal_send_peer = test::VideoSendStreamPeer(send_stream);
3930 // Video streams created, check that pacing factor is correctly configured.
3931 EXPECT_EQ(expected_pacing_factor_,
3932 internal_send_peer.GetPacingFactorOverride());
3933 observation_complete_.Set();
3934 }
3935
PerformTest()3936 void PerformTest() override {
3937 EXPECT_TRUE(Wait()) << "Timed out while waiting for stream creation.";
3938 }
3939
3940 private:
3941 const bool configure_send_side_;
3942 const absl::optional<float> expected_pacing_factor_;
3943 };
3944
GetAlrProbingExperimentString()3945 std::string GetAlrProbingExperimentString() {
3946 return std::string(
3947 AlrExperimentSettings::kScreenshareProbingBweExperimentName) +
3948 "/1.0,2875,80,40,-60,3/";
3949 }
3950 const float kAlrProbingExperimentPaceMultiplier = 1.0f;
3951
TEST_F(VideoSendStreamTest,AlrConfiguredWhenSendSideOn)3952 TEST_F(VideoSendStreamTest, AlrConfiguredWhenSendSideOn) {
3953 test::ScopedFieldTrials alr_experiment(GetAlrProbingExperimentString());
3954 // Send-side bwe on, use pacing factor from |kAlrProbingExperiment| above.
3955 PacingFactorObserver test_with_send_side(true,
3956 kAlrProbingExperimentPaceMultiplier);
3957 RunBaseTest(&test_with_send_side);
3958 }
3959
TEST_F(VideoSendStreamTest,AlrNotConfiguredWhenSendSideOff)3960 TEST_F(VideoSendStreamTest, AlrNotConfiguredWhenSendSideOff) {
3961 test::ScopedFieldTrials alr_experiment(GetAlrProbingExperimentString());
3962 // Send-side bwe off, use configuration should not be overridden.
3963 PacingFactorObserver test_without_send_side(false, absl::nullopt);
3964 RunBaseTest(&test_without_send_side);
3965 }
3966
3967 // Test class takes as argument a function pointer to reset the send
3968 // stream and call OnVideoStreamsCreated. This is necessary since you cannot
3969 // change the content type of a VideoSendStream, you need to recreate it.
3970 // Stopping and recreating the stream can only be done on the main thread and in
3971 // the context of VideoSendStreamTest (not BaseTest). The test switches from
3972 // realtime to screenshare and back.
3973 template <typename T>
3974 class ContentSwitchTest : public test::SendTest {
3975 public:
3976 enum class StreamState {
3977 kBeforeSwitch = 0,
3978 kInScreenshare = 1,
3979 kAfterSwitchBack = 2,
3980 };
3981 static const uint32_t kMinPacketsToSend = 50;
3982
ContentSwitchTest(T * stream_reset_fun)3983 explicit ContentSwitchTest(T* stream_reset_fun)
3984 : SendTest(test::CallTest::kDefaultTimeoutMs),
3985 call_(nullptr),
3986 state_(StreamState::kBeforeSwitch),
3987 send_stream_(nullptr),
3988 send_stream_config_(nullptr),
3989 packets_sent_(0),
3990 stream_resetter_(stream_reset_fun) {
3991 RTC_DCHECK(stream_resetter_);
3992 }
3993
OnVideoStreamsCreated(VideoSendStream * send_stream,const std::vector<VideoReceiveStream * > & receive_streams)3994 void OnVideoStreamsCreated(
3995 VideoSendStream* send_stream,
3996 const std::vector<VideoReceiveStream*>& receive_streams) override {
3997 MutexLock lock(&mutex_);
3998 send_stream_ = send_stream;
3999 }
4000
ModifyVideoConfigs(VideoSendStream::Config * send_config,std::vector<VideoReceiveStream::Config> * receive_configs,VideoEncoderConfig * encoder_config)4001 void ModifyVideoConfigs(
4002 VideoSendStream::Config* send_config,
4003 std::vector<VideoReceiveStream::Config>* receive_configs,
4004 VideoEncoderConfig* encoder_config) override {
4005 RTC_DCHECK_EQ(1, encoder_config->number_of_streams);
4006 encoder_config->min_transmit_bitrate_bps = 0;
4007 encoder_config->content_type =
4008 VideoEncoderConfig::ContentType::kRealtimeVideo;
4009 send_stream_config_ = send_config->Copy();
4010 encoder_config_ = encoder_config->Copy();
4011 }
4012
OnCallsCreated(Call * sender_call,Call * receiver_call)4013 void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
4014 call_ = sender_call;
4015 }
4016
OnSendRtp(const uint8_t * packet,size_t length)4017 Action OnSendRtp(const uint8_t* packet, size_t length) override {
4018 MutexLock lock(&mutex_);
4019
4020 auto internal_send_peer = test::VideoSendStreamPeer(send_stream_);
4021 float pacing_factor =
4022 internal_send_peer.GetPacingFactorOverride().value_or(0.0f);
4023 float expected_pacing_factor = PacedSender::kDefaultPaceMultiplier;
4024 if (send_stream_->GetStats().content_type ==
4025 webrtc::VideoContentType::SCREENSHARE) {
4026 expected_pacing_factor = 1.0f; // Currently used pacing factor in ALR.
4027 }
4028
4029 EXPECT_NEAR(expected_pacing_factor, pacing_factor, 1e-6);
4030
4031 // Wait until at least kMinPacketsToSend packets to be sent, so that
4032 // some frames would be encoded.
4033 if (++packets_sent_ < kMinPacketsToSend)
4034 return SEND_PACKET;
4035
4036 if (state_ != StreamState::kAfterSwitchBack) {
4037 // We've sent kMinPacketsToSend packets, switch the content type and move
4038 // move to the next state.
4039 // Note that we need to recreate the stream if changing content type.
4040 packets_sent_ = 0;
4041 if (encoder_config_.content_type ==
4042 VideoEncoderConfig::ContentType::kRealtimeVideo) {
4043 encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
4044 } else {
4045 encoder_config_.content_type =
4046 VideoEncoderConfig::ContentType::kRealtimeVideo;
4047 }
4048 switch (state_) {
4049 case StreamState::kBeforeSwitch:
4050 state_ = StreamState::kInScreenshare;
4051 break;
4052 case StreamState::kInScreenshare:
4053 state_ = StreamState::kAfterSwitchBack;
4054 break;
4055 case StreamState::kAfterSwitchBack:
4056 RTC_NOTREACHED();
4057 break;
4058 }
4059 content_switch_event_.Set();
4060 return SEND_PACKET;
4061 }
4062
4063 observation_complete_.Set();
4064 return SEND_PACKET;
4065 }
4066
PerformTest()4067 void PerformTest() override {
4068 while (GetStreamState() != StreamState::kAfterSwitchBack) {
4069 ASSERT_TRUE(
4070 content_switch_event_.Wait(test::CallTest::kDefaultTimeoutMs));
4071 (*stream_resetter_)(send_stream_config_, encoder_config_, this);
4072 }
4073
4074 ASSERT_TRUE(Wait())
4075 << "Timed out waiting for a frame sent after switch back";
4076 }
4077
4078 private:
GetStreamState()4079 StreamState GetStreamState() {
4080 MutexLock lock(&mutex_);
4081 return state_;
4082 }
4083
4084 Mutex mutex_;
4085 rtc::Event content_switch_event_;
4086 Call* call_;
4087 StreamState state_ RTC_GUARDED_BY(mutex_);
4088 VideoSendStream* send_stream_ RTC_GUARDED_BY(mutex_);
4089 VideoSendStream::Config send_stream_config_;
4090 VideoEncoderConfig encoder_config_;
4091 uint32_t packets_sent_ RTC_GUARDED_BY(mutex_);
4092 T* stream_resetter_;
4093 };
4094
TEST_F(VideoSendStreamTest,SwitchesToScreenshareAndBack)4095 TEST_F(VideoSendStreamTest, SwitchesToScreenshareAndBack) {
4096 auto reset_fun = [this](const VideoSendStream::Config& send_stream_config,
4097 const VideoEncoderConfig& encoder_config,
4098 test::BaseTest* test) {
4099 SendTask(RTC_FROM_HERE, task_queue(),
4100 [this, &send_stream_config, &encoder_config, &test]() {
4101 Stop();
4102 DestroyVideoSendStreams();
4103 SetVideoSendConfig(send_stream_config);
4104 SetVideoEncoderConfig(encoder_config);
4105 CreateVideoSendStreams();
4106 SetVideoDegradation(DegradationPreference::MAINTAIN_RESOLUTION);
4107 test->OnVideoStreamsCreated(GetVideoSendStream(),
4108 video_receive_streams_);
4109 Start();
4110 });
4111 };
4112 ContentSwitchTest<decltype(reset_fun)> test(&reset_fun);
4113 RunBaseTest(&test);
4114 }
4115
4116 } // namespace webrtc
4117