• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 
2 /*
3  *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
4  *
5  *  Use of this source code is governed by a BSD-style license
6  *  that can be found in the LICENSE file in the root of the source
7  *  tree. An additional intellectual property rights grant can be found
8  *  in the file PATENTS.  All contributing project authors may
9  *  be found in the AUTHORS file in the root of the source tree.
10  */
11 #include "video/video_stream_encoder.h"
12 
13 #include <algorithm>
14 #include <limits>
15 #include <memory>
16 #include <tuple>
17 #include <utility>
18 
19 #include "absl/memory/memory.h"
20 #include "api/field_trials_view.h"
21 #include "api/rtp_parameters.h"
22 #include "api/task_queue/default_task_queue_factory.h"
23 #include "api/task_queue/task_queue_base.h"
24 #include "api/task_queue/task_queue_factory.h"
25 #include "api/test/mock_fec_controller_override.h"
26 #include "api/test/mock_video_encoder.h"
27 #include "api/test/mock_video_encoder_factory.h"
28 #include "api/units/data_rate.h"
29 #include "api/units/time_delta.h"
30 #include "api/video/builtin_video_bitrate_allocator_factory.h"
31 #include "api/video/i420_buffer.h"
32 #include "api/video/nv12_buffer.h"
33 #include "api/video/video_adaptation_reason.h"
34 #include "api/video/video_bitrate_allocation.h"
35 #include "api/video_codecs/sdp_video_format.h"
36 #include "api/video_codecs/video_codec.h"
37 #include "api/video_codecs/video_encoder.h"
38 #include "api/video_codecs/vp8_temporal_layers.h"
39 #include "api/video_codecs/vp8_temporal_layers_factory.h"
40 #include "call/adaptation/test/fake_adaptation_constraint.h"
41 #include "call/adaptation/test/fake_resource.h"
42 #include "common_video/h264/h264_common.h"
43 #include "common_video/include/video_frame_buffer.h"
44 #include "media/base/video_adapter.h"
45 #include "media/engine/webrtc_video_engine.h"
46 #include "modules/video_coding/codecs/av1/libaom_av1_encoder.h"
47 #include "modules/video_coding/codecs/h264/include/h264.h"
48 #include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
49 #include "modules/video_coding/codecs/vp8/include/vp8.h"
50 #include "modules/video_coding/codecs/vp9/include/vp9.h"
51 #include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
52 #include "modules/video_coding/codecs/vp9/svc_config.h"
53 #include "modules/video_coding/utility/quality_scaler.h"
54 #include "modules/video_coding/utility/simulcast_rate_allocator.h"
55 #include "modules/video_coding/utility/vp8_constants.h"
56 #include "rtc_base/event.h"
57 #include "rtc_base/experiments/encoder_info_settings.h"
58 #include "rtc_base/gunit.h"
59 #include "rtc_base/logging.h"
60 #include "rtc_base/ref_counted_object.h"
61 #include "rtc_base/synchronization/mutex.h"
62 #include "system_wrappers/include/metrics.h"
63 #include "test/encoder_settings.h"
64 #include "test/fake_encoder.h"
65 #include "test/frame_forwarder.h"
66 #include "test/gmock.h"
67 #include "test/gtest.h"
68 #include "test/mappable_native_buffer.h"
69 #include "test/scoped_key_value_config.h"
70 #include "test/time_controller/simulated_time_controller.h"
71 #include "test/video_encoder_nullable_proxy_factory.h"
72 #include "test/video_encoder_proxy_factory.h"
73 #include "video/config/encoder_stream_factory.h"
74 #include "video/frame_cadence_adapter.h"
75 #include "video/send_statistics_proxy.h"
76 
77 namespace webrtc {
78 
79 using ::testing::_;
80 using ::testing::AllOf;
81 using ::testing::Eq;
82 using ::testing::Field;
83 using ::testing::Ge;
84 using ::testing::Gt;
85 using ::testing::Invoke;
86 using ::testing::Le;
87 using ::testing::Lt;
88 using ::testing::Matcher;
89 using ::testing::Mock;
90 using ::testing::NiceMock;
91 using ::testing::Optional;
92 using ::testing::Return;
93 using ::testing::SizeIs;
94 using ::testing::StrictMock;
95 
96 namespace {
97 const int kMinPixelsPerFrame = 320 * 180;
98 const int kQpLow = 1;
99 const int kQpHigh = 2;
100 const int kMinFramerateFps = 2;
101 const int kMinBalancedFramerateFps = 7;
102 constexpr TimeDelta kFrameTimeout = TimeDelta::Millis(100);
103 const size_t kMaxPayloadLength = 1440;
104 const DataRate kTargetBitrate = DataRate::KilobitsPerSec(1000);
105 const DataRate kLowTargetBitrate = DataRate::KilobitsPerSec(100);
106 const DataRate kStartBitrate = DataRate::KilobitsPerSec(600);
107 const DataRate kSimulcastTargetBitrate = DataRate::KilobitsPerSec(3150);
108 const int kMaxInitialFramedrop = 4;
109 const int kDefaultFramerate = 30;
110 const int64_t kFrameIntervalMs = rtc::kNumMillisecsPerSec / kDefaultFramerate;
111 const int64_t kProcessIntervalMs = 1000;
112 const VideoEncoder::ResolutionBitrateLimits
113     kEncoderBitrateLimits540p(960 * 540, 100 * 1000, 100 * 1000, 2000 * 1000);
114 const VideoEncoder::ResolutionBitrateLimits
115     kEncoderBitrateLimits720p(1280 * 720, 200 * 1000, 200 * 1000, 4000 * 1000);
116 
117 uint8_t kOptimalSps[] = {0,    0,    0,    1,    H264::NaluType::kSps,
118                          0x00, 0x00, 0x03, 0x03, 0xF4,
119                          0x05, 0x03, 0xC7, 0xE0, 0x1B,
120                          0x41, 0x10, 0x8D, 0x00};
121 
122 const uint8_t kCodedFrameVp8Qp25[] = {
123     0x10, 0x02, 0x00, 0x9d, 0x01, 0x2a, 0x10, 0x00, 0x10, 0x00,
124     0x02, 0x47, 0x08, 0x85, 0x85, 0x88, 0x85, 0x84, 0x88, 0x0c,
125     0x82, 0x00, 0x0c, 0x0d, 0x60, 0x00, 0xfe, 0xfc, 0x5c, 0xd0};
126 
CreateSimpleNV12Frame()127 VideoFrame CreateSimpleNV12Frame() {
128   return VideoFrame::Builder()
129       .set_video_frame_buffer(rtc::make_ref_counted<NV12Buffer>(
130           /*width=*/16, /*height=*/16))
131       .build();
132 }
133 
PassAFrame(TaskQueueBase * encoder_queue,FrameCadenceAdapterInterface::Callback * video_stream_encoder_callback,int64_t ntp_time_ms)134 void PassAFrame(
135     TaskQueueBase* encoder_queue,
136     FrameCadenceAdapterInterface::Callback* video_stream_encoder_callback,
137     int64_t ntp_time_ms) {
138   encoder_queue->PostTask([video_stream_encoder_callback, ntp_time_ms] {
139     video_stream_encoder_callback->OnFrame(Timestamp::Millis(ntp_time_ms), 1,
140                                            CreateSimpleNV12Frame());
141   });
142 }
143 
144 class TestBuffer : public webrtc::I420Buffer {
145  public:
TestBuffer(rtc::Event * event,int width,int height)146   TestBuffer(rtc::Event* event, int width, int height)
147       : I420Buffer(width, height), event_(event) {}
148 
149  private:
150   friend class rtc::RefCountedObject<TestBuffer>;
~TestBuffer()151   ~TestBuffer() override {
152     if (event_)
153       event_->Set();
154   }
155   rtc::Event* const event_;
156 };
157 
158 // A fake native buffer that can't be converted to I420. Upon scaling, it
159 // produces another FakeNativeBuffer.
160 class FakeNativeBuffer : public webrtc::VideoFrameBuffer {
161  public:
FakeNativeBuffer(rtc::Event * event,int width,int height)162   FakeNativeBuffer(rtc::Event* event, int width, int height)
163       : event_(event), width_(width), height_(height) {}
type() const164   webrtc::VideoFrameBuffer::Type type() const override { return Type::kNative; }
width() const165   int width() const override { return width_; }
height() const166   int height() const override { return height_; }
ToI420()167   rtc::scoped_refptr<webrtc::I420BufferInterface> ToI420() override {
168     return nullptr;
169   }
CropAndScale(int offset_x,int offset_y,int crop_width,int crop_height,int scaled_width,int scaled_height)170   rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(
171       int offset_x,
172       int offset_y,
173       int crop_width,
174       int crop_height,
175       int scaled_width,
176       int scaled_height) override {
177     return rtc::make_ref_counted<FakeNativeBuffer>(nullptr, scaled_width,
178                                                    scaled_height);
179   }
180 
181  private:
182   friend class rtc::RefCountedObject<FakeNativeBuffer>;
~FakeNativeBuffer()183   ~FakeNativeBuffer() override {
184     if (event_)
185       event_->Set();
186   }
187   rtc::Event* const event_;
188   const int width_;
189   const int height_;
190 };
191 
192 // A fake native buffer that is backed by an NV12 buffer.
193 class FakeNV12NativeBuffer : public webrtc::VideoFrameBuffer {
194  public:
FakeNV12NativeBuffer(rtc::Event * event,int width,int height)195   FakeNV12NativeBuffer(rtc::Event* event, int width, int height)
196       : nv12_buffer_(NV12Buffer::Create(width, height)), event_(event) {}
197 
type() const198   webrtc::VideoFrameBuffer::Type type() const override { return Type::kNative; }
width() const199   int width() const override { return nv12_buffer_->width(); }
height() const200   int height() const override { return nv12_buffer_->height(); }
ToI420()201   rtc::scoped_refptr<webrtc::I420BufferInterface> ToI420() override {
202     return nv12_buffer_->ToI420();
203   }
GetMappedFrameBuffer(rtc::ArrayView<VideoFrameBuffer::Type> types)204   rtc::scoped_refptr<VideoFrameBuffer> GetMappedFrameBuffer(
205       rtc::ArrayView<VideoFrameBuffer::Type> types) override {
206     if (absl::c_find(types, Type::kNV12) != types.end()) {
207       return nv12_buffer_;
208     }
209     return nullptr;
210   }
GetNV12() const211   const NV12BufferInterface* GetNV12() const { return nv12_buffer_.get(); }
212 
213  private:
214   friend class rtc::RefCountedObject<FakeNV12NativeBuffer>;
~FakeNV12NativeBuffer()215   ~FakeNV12NativeBuffer() override {
216     if (event_)
217       event_->Set();
218   }
219   rtc::scoped_refptr<NV12Buffer> nv12_buffer_;
220   rtc::Event* const event_;
221 };
222 
223 class CpuOveruseDetectorProxy : public OveruseFrameDetector {
224  public:
CpuOveruseDetectorProxy(CpuOveruseMetricsObserver * metrics_observer,const FieldTrialsView & field_trials)225   CpuOveruseDetectorProxy(CpuOveruseMetricsObserver* metrics_observer,
226                           const FieldTrialsView& field_trials)
227       : OveruseFrameDetector(metrics_observer, field_trials),
228         last_target_framerate_fps_(-1),
229         framerate_updated_event_(true /* manual_reset */,
230                                  false /* initially_signaled */) {}
~CpuOveruseDetectorProxy()231   virtual ~CpuOveruseDetectorProxy() {}
232 
OnTargetFramerateUpdated(int framerate_fps)233   void OnTargetFramerateUpdated(int framerate_fps) override {
234     MutexLock lock(&lock_);
235     last_target_framerate_fps_ = framerate_fps;
236     OveruseFrameDetector::OnTargetFramerateUpdated(framerate_fps);
237     framerate_updated_event_.Set();
238   }
239 
GetLastTargetFramerate()240   int GetLastTargetFramerate() {
241     MutexLock lock(&lock_);
242     return last_target_framerate_fps_;
243   }
244 
GetOptions()245   CpuOveruseOptions GetOptions() { return options_; }
246 
framerate_updated_event()247   rtc::Event* framerate_updated_event() { return &framerate_updated_event_; }
248 
249  private:
250   Mutex lock_;
251   int last_target_framerate_fps_ RTC_GUARDED_BY(lock_);
252   rtc::Event framerate_updated_event_;
253 };
254 
255 class FakeVideoSourceRestrictionsListener
256     : public VideoSourceRestrictionsListener {
257  public:
FakeVideoSourceRestrictionsListener()258   FakeVideoSourceRestrictionsListener()
259       : was_restrictions_updated_(false), restrictions_updated_event_() {}
~FakeVideoSourceRestrictionsListener()260   ~FakeVideoSourceRestrictionsListener() override {
261     RTC_DCHECK(was_restrictions_updated_);
262   }
263 
restrictions_updated_event()264   rtc::Event* restrictions_updated_event() {
265     return &restrictions_updated_event_;
266   }
267 
268   // VideoSourceRestrictionsListener implementation.
OnVideoSourceRestrictionsUpdated(VideoSourceRestrictions restrictions,const VideoAdaptationCounters & adaptation_counters,rtc::scoped_refptr<Resource> reason,const VideoSourceRestrictions & unfiltered_restrictions)269   void OnVideoSourceRestrictionsUpdated(
270       VideoSourceRestrictions restrictions,
271       const VideoAdaptationCounters& adaptation_counters,
272       rtc::scoped_refptr<Resource> reason,
273       const VideoSourceRestrictions& unfiltered_restrictions) override {
274     was_restrictions_updated_ = true;
275     restrictions_updated_event_.Set();
276   }
277 
278  private:
279   bool was_restrictions_updated_;
280   rtc::Event restrictions_updated_event_;
281 };
282 
WantsFps(Matcher<int> fps_matcher)283 auto WantsFps(Matcher<int> fps_matcher) {
284   return Field("max_framerate_fps", &rtc::VideoSinkWants::max_framerate_fps,
285                fps_matcher);
286 }
287 
WantsMaxPixels(Matcher<int> max_pixel_matcher)288 auto WantsMaxPixels(Matcher<int> max_pixel_matcher) {
289   return Field("max_pixel_count", &rtc::VideoSinkWants::max_pixel_count,
290                AllOf(max_pixel_matcher, Gt(0)));
291 }
292 
ResolutionMax()293 auto ResolutionMax() {
294   return AllOf(
295       WantsMaxPixels(Eq(std::numeric_limits<int>::max())),
296       Field("target_pixel_count", &rtc::VideoSinkWants::target_pixel_count,
297             Eq(absl::nullopt)));
298 }
299 
FpsMax()300 auto FpsMax() {
301   return WantsFps(Eq(kDefaultFramerate));
302 }
303 
FpsUnlimited()304 auto FpsUnlimited() {
305   return WantsFps(Eq(std::numeric_limits<int>::max()));
306 }
307 
FpsMatchesResolutionMax(Matcher<int> fps_matcher)308 auto FpsMatchesResolutionMax(Matcher<int> fps_matcher) {
309   return AllOf(WantsFps(fps_matcher), ResolutionMax());
310 }
311 
FpsMaxResolutionMatches(Matcher<int> pixel_matcher)312 auto FpsMaxResolutionMatches(Matcher<int> pixel_matcher) {
313   return AllOf(FpsMax(), WantsMaxPixels(pixel_matcher));
314 }
315 
FpsMaxResolutionMax()316 auto FpsMaxResolutionMax() {
317   return AllOf(FpsMax(), ResolutionMax());
318 }
319 
UnlimitedSinkWants()320 auto UnlimitedSinkWants() {
321   return AllOf(FpsUnlimited(), ResolutionMax());
322 }
323 
FpsInRangeForPixelsInBalanced(int last_frame_pixels)324 auto FpsInRangeForPixelsInBalanced(int last_frame_pixels) {
325   Matcher<int> fps_range_matcher;
326 
327   if (last_frame_pixels <= 320 * 240) {
328     fps_range_matcher = AllOf(Ge(7), Le(10));
329   } else if (last_frame_pixels <= 480 * 360) {
330     fps_range_matcher = AllOf(Ge(10), Le(15));
331   } else if (last_frame_pixels <= 640 * 480) {
332     fps_range_matcher = Ge(15);
333   } else {
334     fps_range_matcher = Eq(kDefaultFramerate);
335   }
336   return Field("max_framerate_fps", &rtc::VideoSinkWants::max_framerate_fps,
337                fps_range_matcher);
338 }
339 
FpsEqResolutionEqTo(const rtc::VideoSinkWants & other_wants)340 auto FpsEqResolutionEqTo(const rtc::VideoSinkWants& other_wants) {
341   return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)),
342                WantsMaxPixels(Eq(other_wants.max_pixel_count)));
343 }
344 
FpsMaxResolutionLt(const rtc::VideoSinkWants & other_wants)345 auto FpsMaxResolutionLt(const rtc::VideoSinkWants& other_wants) {
346   return AllOf(FpsMax(), WantsMaxPixels(Lt(other_wants.max_pixel_count)));
347 }
348 
FpsMaxResolutionGt(const rtc::VideoSinkWants & other_wants)349 auto FpsMaxResolutionGt(const rtc::VideoSinkWants& other_wants) {
350   return AllOf(FpsMax(), WantsMaxPixels(Gt(other_wants.max_pixel_count)));
351 }
352 
FpsLtResolutionEq(const rtc::VideoSinkWants & other_wants)353 auto FpsLtResolutionEq(const rtc::VideoSinkWants& other_wants) {
354   return AllOf(WantsFps(Lt(other_wants.max_framerate_fps)),
355                WantsMaxPixels(Eq(other_wants.max_pixel_count)));
356 }
357 
FpsGtResolutionEq(const rtc::VideoSinkWants & other_wants)358 auto FpsGtResolutionEq(const rtc::VideoSinkWants& other_wants) {
359   return AllOf(WantsFps(Gt(other_wants.max_framerate_fps)),
360                WantsMaxPixels(Eq(other_wants.max_pixel_count)));
361 }
362 
FpsEqResolutionLt(const rtc::VideoSinkWants & other_wants)363 auto FpsEqResolutionLt(const rtc::VideoSinkWants& other_wants) {
364   return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)),
365                WantsMaxPixels(Lt(other_wants.max_pixel_count)));
366 }
367 
FpsEqResolutionGt(const rtc::VideoSinkWants & other_wants)368 auto FpsEqResolutionGt(const rtc::VideoSinkWants& other_wants) {
369   return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)),
370                WantsMaxPixels(Gt(other_wants.max_pixel_count)));
371 }
372 
373 class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
374  public:
VideoStreamEncoderUnderTest(TimeController * time_controller,std::unique_ptr<FrameCadenceAdapterInterface> cadence_adapter,std::unique_ptr<webrtc::TaskQueueBase,webrtc::TaskQueueDeleter> encoder_queue,SendStatisticsProxy * stats_proxy,const VideoStreamEncoderSettings & settings,VideoStreamEncoder::BitrateAllocationCallbackType allocation_callback_type,const FieldTrialsView & field_trials,int num_cores)375   VideoStreamEncoderUnderTest(
376       TimeController* time_controller,
377       std::unique_ptr<FrameCadenceAdapterInterface> cadence_adapter,
378       std::unique_ptr<webrtc::TaskQueueBase, webrtc::TaskQueueDeleter>
379           encoder_queue,
380       SendStatisticsProxy* stats_proxy,
381       const VideoStreamEncoderSettings& settings,
382       VideoStreamEncoder::BitrateAllocationCallbackType
383           allocation_callback_type,
384       const FieldTrialsView& field_trials,
385       int num_cores)
386       : VideoStreamEncoder(
387             time_controller->GetClock(),
388             num_cores,
389             stats_proxy,
390             settings,
391             std::unique_ptr<OveruseFrameDetector>(
392                 overuse_detector_proxy_ =
393                     new CpuOveruseDetectorProxy(stats_proxy, field_trials)),
394             std::move(cadence_adapter),
395             std::move(encoder_queue),
396             allocation_callback_type,
397             field_trials),
398         time_controller_(time_controller),
399         fake_cpu_resource_(FakeResource::Create("FakeResource[CPU]")),
400         fake_quality_resource_(FakeResource::Create("FakeResource[QP]")),
401         fake_adaptation_constraint_("FakeAdaptationConstraint") {
402     InjectAdaptationResource(fake_quality_resource_,
403                              VideoAdaptationReason::kQuality);
404     InjectAdaptationResource(fake_cpu_resource_, VideoAdaptationReason::kCpu);
405     InjectAdaptationConstraint(&fake_adaptation_constraint_);
406   }
407 
SetSourceAndWaitForRestrictionsUpdated(rtc::VideoSourceInterface<VideoFrame> * source,const DegradationPreference & degradation_preference)408   void SetSourceAndWaitForRestrictionsUpdated(
409       rtc::VideoSourceInterface<VideoFrame>* source,
410       const DegradationPreference& degradation_preference) {
411     FakeVideoSourceRestrictionsListener listener;
412     AddRestrictionsListenerForTesting(&listener);
413     SetSource(source, degradation_preference);
414     listener.restrictions_updated_event()->Wait(TimeDelta::Seconds(5));
415     RemoveRestrictionsListenerForTesting(&listener);
416   }
417 
SetSourceAndWaitForFramerateUpdated(rtc::VideoSourceInterface<VideoFrame> * source,const DegradationPreference & degradation_preference)418   void SetSourceAndWaitForFramerateUpdated(
419       rtc::VideoSourceInterface<VideoFrame>* source,
420       const DegradationPreference& degradation_preference) {
421     overuse_detector_proxy_->framerate_updated_event()->Reset();
422     SetSource(source, degradation_preference);
423     overuse_detector_proxy_->framerate_updated_event()->Wait(
424         TimeDelta::Seconds(5));
425   }
426 
OnBitrateUpdatedAndWaitForManagedResources(DataRate target_bitrate,DataRate stable_target_bitrate,DataRate link_allocation,uint8_t fraction_lost,int64_t round_trip_time_ms,double cwnd_reduce_ratio)427   void OnBitrateUpdatedAndWaitForManagedResources(
428       DataRate target_bitrate,
429       DataRate stable_target_bitrate,
430       DataRate link_allocation,
431       uint8_t fraction_lost,
432       int64_t round_trip_time_ms,
433       double cwnd_reduce_ratio) {
434     OnBitrateUpdated(target_bitrate, stable_target_bitrate, link_allocation,
435                      fraction_lost, round_trip_time_ms, cwnd_reduce_ratio);
436     // Bitrate is updated on the encoder queue.
437     WaitUntilTaskQueueIsIdle();
438   }
439 
440   // This is used as a synchronisation mechanism, to make sure that the
441   // encoder queue is not blocked before we start sending it frames.
WaitUntilTaskQueueIsIdle()442   void WaitUntilTaskQueueIsIdle() {
443     time_controller_->AdvanceTime(TimeDelta::Zero());
444   }
445 
446   // Triggers resource usage measurements on the fake CPU resource.
TriggerCpuOveruse()447   void TriggerCpuOveruse() {
448     rtc::Event event;
449     encoder_queue()->PostTask([this, &event] {
450       fake_cpu_resource_->SetUsageState(ResourceUsageState::kOveruse);
451       event.Set();
452     });
453     ASSERT_TRUE(event.Wait(TimeDelta::Seconds(5)));
454     time_controller_->AdvanceTime(TimeDelta::Zero());
455   }
456 
TriggerCpuUnderuse()457   void TriggerCpuUnderuse() {
458     rtc::Event event;
459     encoder_queue()->PostTask([this, &event] {
460       fake_cpu_resource_->SetUsageState(ResourceUsageState::kUnderuse);
461       event.Set();
462     });
463     ASSERT_TRUE(event.Wait(TimeDelta::Seconds(5)));
464     time_controller_->AdvanceTime(TimeDelta::Zero());
465   }
466 
467   // Triggers resource usage measurements on the fake quality resource.
TriggerQualityLow()468   void TriggerQualityLow() {
469     rtc::Event event;
470     encoder_queue()->PostTask([this, &event] {
471       fake_quality_resource_->SetUsageState(ResourceUsageState::kOveruse);
472       event.Set();
473     });
474     ASSERT_TRUE(event.Wait(TimeDelta::Seconds(5)));
475     time_controller_->AdvanceTime(TimeDelta::Zero());
476   }
TriggerQualityHigh()477   void TriggerQualityHigh() {
478     rtc::Event event;
479     encoder_queue()->PostTask([this, &event] {
480       fake_quality_resource_->SetUsageState(ResourceUsageState::kUnderuse);
481       event.Set();
482     });
483     ASSERT_TRUE(event.Wait(TimeDelta::Seconds(5)));
484     time_controller_->AdvanceTime(TimeDelta::Zero());
485   }
486 
487   TimeController* const time_controller_;
488   CpuOveruseDetectorProxy* overuse_detector_proxy_;
489   rtc::scoped_refptr<FakeResource> fake_cpu_resource_;
490   rtc::scoped_refptr<FakeResource> fake_quality_resource_;
491   FakeAdaptationConstraint fake_adaptation_constraint_;
492 };
493 
494 // Simulates simulcast behavior and makes highest stream resolutions divisible
495 // by 4.
496 class CroppingVideoStreamFactory
497     : public VideoEncoderConfig::VideoStreamFactoryInterface {
498  public:
CroppingVideoStreamFactory()499   CroppingVideoStreamFactory() {}
500 
501  private:
CreateEncoderStreams(int frame_width,int frame_height,const VideoEncoderConfig & encoder_config)502   std::vector<VideoStream> CreateEncoderStreams(
503       int frame_width,
504       int frame_height,
505       const VideoEncoderConfig& encoder_config) override {
506     std::vector<VideoStream> streams = test::CreateVideoStreams(
507         frame_width - frame_width % 4, frame_height - frame_height % 4,
508         encoder_config);
509     return streams;
510   }
511 };
512 
513 class AdaptingFrameForwarder : public test::FrameForwarder {
514  public:
AdaptingFrameForwarder(TimeController * time_controller)515   explicit AdaptingFrameForwarder(TimeController* time_controller)
516       : time_controller_(time_controller), adaptation_enabled_(false) {}
~AdaptingFrameForwarder()517   ~AdaptingFrameForwarder() override {}
518 
set_adaptation_enabled(bool enabled)519   void set_adaptation_enabled(bool enabled) {
520     MutexLock lock(&mutex_);
521     adaptation_enabled_ = enabled;
522   }
523 
adaption_enabled() const524   bool adaption_enabled() const {
525     MutexLock lock(&mutex_);
526     return adaptation_enabled_;
527   }
528 
529   // The "last wants" is a snapshot of the previous rtc::VideoSinkWants where
530   // the resolution or frame rate was different than it is currently. If
531   // something else is modified, such as encoder resolutions, but the resolution
532   // and frame rate stays the same, last wants is not updated.
last_wants() const533   rtc::VideoSinkWants last_wants() const {
534     MutexLock lock(&mutex_);
535     return last_wants_;
536   }
537 
last_sent_width() const538   absl::optional<int> last_sent_width() const { return last_width_; }
last_sent_height() const539   absl::optional<int> last_sent_height() const { return last_height_; }
540 
IncomingCapturedFrame(const VideoFrame & video_frame)541   void IncomingCapturedFrame(const VideoFrame& video_frame) override {
542     RTC_DCHECK(time_controller_->GetMainThread()->IsCurrent());
543     time_controller_->AdvanceTime(TimeDelta::Zero());
544 
545     int cropped_width = 0;
546     int cropped_height = 0;
547     int out_width = 0;
548     int out_height = 0;
549     if (adaption_enabled()) {
550       RTC_DLOG(LS_INFO) << "IncomingCapturedFrame: AdaptFrameResolution()"
551                         << "w=" << video_frame.width()
552                         << "h=" << video_frame.height();
553       if (adapter_.AdaptFrameResolution(
554               video_frame.width(), video_frame.height(),
555               video_frame.timestamp_us() * 1000, &cropped_width,
556               &cropped_height, &out_width, &out_height)) {
557         VideoFrame adapted_frame =
558             VideoFrame::Builder()
559                 .set_video_frame_buffer(rtc::make_ref_counted<TestBuffer>(
560                     nullptr, out_width, out_height))
561                 .set_ntp_time_ms(video_frame.ntp_time_ms())
562                 .set_timestamp_ms(99)
563                 .set_rotation(kVideoRotation_0)
564                 .build();
565         if (video_frame.has_update_rect()) {
566           adapted_frame.set_update_rect(
567               video_frame.update_rect().ScaleWithFrame(
568                   video_frame.width(), video_frame.height(), 0, 0,
569                   video_frame.width(), video_frame.height(), out_width,
570                   out_height));
571         }
572         test::FrameForwarder::IncomingCapturedFrame(adapted_frame);
573         last_width_.emplace(adapted_frame.width());
574         last_height_.emplace(adapted_frame.height());
575       } else {
576         last_width_ = absl::nullopt;
577         last_height_ = absl::nullopt;
578       }
579     } else {
580       RTC_DLOG(LS_INFO) << "IncomingCapturedFrame: adaptation not enabled";
581       test::FrameForwarder::IncomingCapturedFrame(video_frame);
582       last_width_.emplace(video_frame.width());
583       last_height_.emplace(video_frame.height());
584     }
585   }
586 
OnOutputFormatRequest(int width,int height)587   void OnOutputFormatRequest(int width, int height) {
588     absl::optional<std::pair<int, int>> target_aspect_ratio =
589         std::make_pair(width, height);
590     absl::optional<int> max_pixel_count = width * height;
591     absl::optional<int> max_fps;
592     adapter_.OnOutputFormatRequest(target_aspect_ratio, max_pixel_count,
593                                    max_fps);
594   }
595 
AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame> * sink,const rtc::VideoSinkWants & wants)596   void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
597                        const rtc::VideoSinkWants& wants) override {
598     MutexLock lock(&mutex_);
599     rtc::VideoSinkWants prev_wants = sink_wants_locked();
600     bool did_adapt =
601         prev_wants.max_pixel_count != wants.max_pixel_count ||
602         prev_wants.target_pixel_count != wants.target_pixel_count ||
603         prev_wants.max_framerate_fps != wants.max_framerate_fps;
604     if (did_adapt) {
605       last_wants_ = prev_wants;
606     }
607     adapter_.OnSinkWants(wants);
608     test::FrameForwarder::AddOrUpdateSinkLocked(sink, wants);
609   }
610 
RequestRefreshFrame()611   void RequestRefreshFrame() override { ++refresh_frames_requested_; }
612 
613   TimeController* const time_controller_;
614   cricket::VideoAdapter adapter_;
615   bool adaptation_enabled_ RTC_GUARDED_BY(mutex_);
616   rtc::VideoSinkWants last_wants_ RTC_GUARDED_BY(mutex_);
617   absl::optional<int> last_width_;
618   absl::optional<int> last_height_;
619   int refresh_frames_requested_{0};
620 };
621 
622 // TODO(nisse): Mock only VideoStreamEncoderObserver.
623 class MockableSendStatisticsProxy : public SendStatisticsProxy {
624  public:
MockableSendStatisticsProxy(Clock * clock,const VideoSendStream::Config & config,VideoEncoderConfig::ContentType content_type,const FieldTrialsView & field_trials)625   MockableSendStatisticsProxy(Clock* clock,
626                               const VideoSendStream::Config& config,
627                               VideoEncoderConfig::ContentType content_type,
628                               const FieldTrialsView& field_trials)
629       : SendStatisticsProxy(clock, config, content_type, field_trials) {}
630 
GetStats()631   VideoSendStream::Stats GetStats() override {
632     MutexLock lock(&lock_);
633     if (mock_stats_)
634       return *mock_stats_;
635     return SendStatisticsProxy::GetStats();
636   }
637 
GetInputFrameRate() const638   int GetInputFrameRate() const override {
639     MutexLock lock(&lock_);
640     if (mock_stats_)
641       return mock_stats_->input_frame_rate;
642     return SendStatisticsProxy::GetInputFrameRate();
643   }
SetMockStats(const VideoSendStream::Stats & stats)644   void SetMockStats(const VideoSendStream::Stats& stats) {
645     MutexLock lock(&lock_);
646     mock_stats_.emplace(stats);
647   }
648 
ResetMockStats()649   void ResetMockStats() {
650     MutexLock lock(&lock_);
651     mock_stats_.reset();
652   }
653 
SetDroppedFrameCallback(std::function<void (DropReason)> callback)654   void SetDroppedFrameCallback(std::function<void(DropReason)> callback) {
655     on_frame_dropped_ = std::move(callback);
656   }
657 
658  private:
OnFrameDropped(DropReason reason)659   void OnFrameDropped(DropReason reason) override {
660     SendStatisticsProxy::OnFrameDropped(reason);
661     if (on_frame_dropped_)
662       on_frame_dropped_(reason);
663   }
664 
665   mutable Mutex lock_;
666   absl::optional<VideoSendStream::Stats> mock_stats_ RTC_GUARDED_BY(lock_);
667   std::function<void(DropReason)> on_frame_dropped_;
668 };
669 
670 class SimpleVideoStreamEncoderFactory {
671  public:
672   class AdaptedVideoStreamEncoder : public VideoStreamEncoder {
673    public:
674     using VideoStreamEncoder::VideoStreamEncoder;
~AdaptedVideoStreamEncoder()675     ~AdaptedVideoStreamEncoder() { Stop(); }
676   };
677 
678   class MockFakeEncoder : public test::FakeEncoder {
679    public:
680     using FakeEncoder::FakeEncoder;
681     MOCK_METHOD(CodecSpecificInfo,
682                 EncodeHook,
683                 (EncodedImage & encoded_image,
684                  rtc::scoped_refptr<EncodedImageBuffer> buffer),
685                 (override));
686   };
687 
SimpleVideoStreamEncoderFactory()688   SimpleVideoStreamEncoderFactory() {
689     encoder_settings_.encoder_factory = &encoder_factory_;
690     encoder_settings_.bitrate_allocator_factory =
691         bitrate_allocator_factory_.get();
692   }
693 
CreateWithEncoderQueue(std::unique_ptr<FrameCadenceAdapterInterface> zero_hertz_adapter,std::unique_ptr<TaskQueueBase,TaskQueueDeleter> encoder_queue,const FieldTrialsView * field_trials=nullptr)694   std::unique_ptr<AdaptedVideoStreamEncoder> CreateWithEncoderQueue(
695       std::unique_ptr<FrameCadenceAdapterInterface> zero_hertz_adapter,
696       std::unique_ptr<TaskQueueBase, TaskQueueDeleter> encoder_queue,
697       const FieldTrialsView* field_trials = nullptr) {
698     auto result = std::make_unique<AdaptedVideoStreamEncoder>(
699         time_controller_.GetClock(),
700         /*number_of_cores=*/1,
701         /*stats_proxy=*/stats_proxy_.get(), encoder_settings_,
702         std::make_unique<CpuOveruseDetectorProxy>(
703             /*stats_proxy=*/nullptr,
704             field_trials ? *field_trials : field_trials_),
705         std::move(zero_hertz_adapter), std::move(encoder_queue),
706         VideoStreamEncoder::BitrateAllocationCallbackType::
707             kVideoBitrateAllocation,
708         field_trials ? *field_trials : field_trials_);
709     result->SetSink(&sink_, /*rotation_applied=*/false);
710     return result;
711   }
712 
Create(std::unique_ptr<FrameCadenceAdapterInterface> zero_hertz_adapter,TaskQueueBase ** encoder_queue_ptr=nullptr)713   std::unique_ptr<AdaptedVideoStreamEncoder> Create(
714       std::unique_ptr<FrameCadenceAdapterInterface> zero_hertz_adapter,
715       TaskQueueBase** encoder_queue_ptr = nullptr) {
716     auto encoder_queue =
717         time_controller_.GetTaskQueueFactory()->CreateTaskQueue(
718             "EncoderQueue", TaskQueueFactory::Priority::NORMAL);
719     if (encoder_queue_ptr)
720       *encoder_queue_ptr = encoder_queue.get();
721     return CreateWithEncoderQueue(std::move(zero_hertz_adapter),
722                                   std::move(encoder_queue));
723   }
724 
DepleteTaskQueues()725   void DepleteTaskQueues() { time_controller_.AdvanceTime(TimeDelta::Zero()); }
GetMockFakeEncoder()726   MockFakeEncoder& GetMockFakeEncoder() { return mock_fake_encoder_; }
727 
GetTimeController()728   GlobalSimulatedTimeController* GetTimeController() {
729     return &time_controller_;
730   }
731 
732  private:
733   class NullEncoderSink : public VideoStreamEncoderInterface::EncoderSink {
734    public:
735     ~NullEncoderSink() override = default;
OnEncoderConfigurationChanged(std::vector<VideoStream> streams,bool is_svc,VideoEncoderConfig::ContentType content_type,int min_transmit_bitrate_bps)736     void OnEncoderConfigurationChanged(
737         std::vector<VideoStream> streams,
738         bool is_svc,
739         VideoEncoderConfig::ContentType content_type,
740         int min_transmit_bitrate_bps) override {}
OnBitrateAllocationUpdated(const VideoBitrateAllocation & allocation)741     void OnBitrateAllocationUpdated(
742         const VideoBitrateAllocation& allocation) override {}
OnVideoLayersAllocationUpdated(VideoLayersAllocation allocation)743     void OnVideoLayersAllocationUpdated(
744         VideoLayersAllocation allocation) override {}
OnEncodedImage(const EncodedImage & encoded_image,const CodecSpecificInfo * codec_specific_info)745     Result OnEncodedImage(
746         const EncodedImage& encoded_image,
747         const CodecSpecificInfo* codec_specific_info) override {
748       return Result(EncodedImageCallback::Result::OK);
749     }
750   };
751 
752   test::ScopedKeyValueConfig field_trials_;
753   GlobalSimulatedTimeController time_controller_{Timestamp::Zero()};
754   std::unique_ptr<TaskQueueFactory> task_queue_factory_{
755       time_controller_.CreateTaskQueueFactory()};
756   std::unique_ptr<MockableSendStatisticsProxy> stats_proxy_ =
757       std::make_unique<MockableSendStatisticsProxy>(
758           time_controller_.GetClock(),
759           VideoSendStream::Config(nullptr),
760           webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo,
761           field_trials_);
762   std::unique_ptr<VideoBitrateAllocatorFactory> bitrate_allocator_factory_ =
763       CreateBuiltinVideoBitrateAllocatorFactory();
764   VideoStreamEncoderSettings encoder_settings_{
765       VideoEncoder::Capabilities(/*loss_notification=*/false)};
766   MockFakeEncoder mock_fake_encoder_{time_controller_.GetClock()};
767   test::VideoEncoderProxyFactory encoder_factory_{&mock_fake_encoder_};
768   NullEncoderSink sink_;
769 };
770 
771 class MockFrameCadenceAdapter : public FrameCadenceAdapterInterface {
772  public:
773   MOCK_METHOD(void, Initialize, (Callback * callback), (override));
774   MOCK_METHOD(void,
775               SetZeroHertzModeEnabled,
776               (absl::optional<ZeroHertzModeParams>),
777               (override));
778   MOCK_METHOD(void, OnFrame, (const VideoFrame&), (override));
779   MOCK_METHOD(absl::optional<uint32_t>, GetInputFrameRateFps, (), (override));
780   MOCK_METHOD(void, UpdateFrameRate, (), (override));
781   MOCK_METHOD(void,
782               UpdateLayerQualityConvergence,
783               (size_t spatial_index, bool converged),
784               (override));
785   MOCK_METHOD(void,
786               UpdateLayerStatus,
787               (size_t spatial_index, bool enabled),
788               (override));
789   MOCK_METHOD(void, ProcessKeyFrameRequest, (), (override));
790 };
791 
792 class MockEncoderSelector
793     : public VideoEncoderFactory::EncoderSelectorInterface {
794  public:
795   MOCK_METHOD(void,
796               OnCurrentEncoder,
797               (const SdpVideoFormat& format),
798               (override));
799   MOCK_METHOD(absl::optional<SdpVideoFormat>,
800               OnAvailableBitrate,
801               (const DataRate& rate),
802               (override));
803   MOCK_METHOD(absl::optional<SdpVideoFormat>,
804               OnResolutionChange,
805               (const RenderResolution& resolution),
806               (override));
807   MOCK_METHOD(absl::optional<SdpVideoFormat>, OnEncoderBroken, (), (override));
808 };
809 
810 class MockVideoSourceInterface : public rtc::VideoSourceInterface<VideoFrame> {
811  public:
812   MOCK_METHOD(void,
813               AddOrUpdateSink,
814               (rtc::VideoSinkInterface<VideoFrame>*,
815                const rtc::VideoSinkWants&),
816               (override));
817   MOCK_METHOD(void,
818               RemoveSink,
819               (rtc::VideoSinkInterface<VideoFrame>*),
820               (override));
821   MOCK_METHOD(void, RequestRefreshFrame, (), (override));
822 };
823 
824 }  // namespace
825 
826 class VideoStreamEncoderTest : public ::testing::Test {
827  public:
828   static constexpr TimeDelta kDefaultTimeout = TimeDelta::Seconds(1);
829 
VideoStreamEncoderTest()830   VideoStreamEncoderTest()
831       : video_send_config_(VideoSendStream::Config(nullptr)),
832         codec_width_(320),
833         codec_height_(240),
834         max_framerate_(kDefaultFramerate),
835         fake_encoder_(&time_controller_),
836         encoder_factory_(&fake_encoder_),
837         stats_proxy_(new MockableSendStatisticsProxy(
838             time_controller_.GetClock(),
839             video_send_config_,
840             webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo,
841             field_trials_)),
842         sink_(&time_controller_, &fake_encoder_) {}
843 
SetUp()844   void SetUp() override {
845     metrics::Reset();
846     video_send_config_ = VideoSendStream::Config(nullptr);
847     video_send_config_.encoder_settings.encoder_factory = &encoder_factory_;
848     video_send_config_.encoder_settings.bitrate_allocator_factory =
849         &bitrate_allocator_factory_;
850     video_send_config_.rtp.payload_name = "FAKE";
851     video_send_config_.rtp.payload_type = 125;
852 
853     VideoEncoderConfig video_encoder_config;
854     test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
855     EXPECT_EQ(1u, video_encoder_config.simulcast_layers.size());
856     video_encoder_config.simulcast_layers[0].num_temporal_layers = 1;
857     video_encoder_config.simulcast_layers[0].max_framerate = max_framerate_;
858     video_encoder_config_ = video_encoder_config.Copy();
859 
860     ConfigureEncoder(std::move(video_encoder_config));
861   }
862 
ConfigureEncoder(VideoEncoderConfig video_encoder_config,VideoStreamEncoder::BitrateAllocationCallbackType allocation_callback_type=VideoStreamEncoder::BitrateAllocationCallbackType::kVideoBitrateAllocationWhenScreenSharing,int num_cores=1)863   void ConfigureEncoder(
864       VideoEncoderConfig video_encoder_config,
865       VideoStreamEncoder::BitrateAllocationCallbackType
866           allocation_callback_type =
867               VideoStreamEncoder::BitrateAllocationCallbackType::
868                   kVideoBitrateAllocationWhenScreenSharing,
869       int num_cores = 1) {
870     if (video_stream_encoder_)
871       video_stream_encoder_->Stop();
872 
873     auto encoder_queue = GetTaskQueueFactory()->CreateTaskQueue(
874         "EncoderQueue", TaskQueueFactory::Priority::NORMAL);
875     TaskQueueBase* encoder_queue_ptr = encoder_queue.get();
876     std::unique_ptr<FrameCadenceAdapterInterface> cadence_adapter =
877         FrameCadenceAdapterInterface::Create(time_controller_.GetClock(),
878                                              encoder_queue_ptr, field_trials_);
879     video_stream_encoder_ = std::make_unique<VideoStreamEncoderUnderTest>(
880         &time_controller_, std::move(cadence_adapter), std::move(encoder_queue),
881         stats_proxy_.get(), video_send_config_.encoder_settings,
882         allocation_callback_type, field_trials_, num_cores);
883     video_stream_encoder_->SetSink(&sink_, /*rotation_applied=*/false);
884     video_stream_encoder_->SetSource(
885         &video_source_, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
886     video_stream_encoder_->SetStartBitrate(kTargetBitrate.bps());
887     video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
888                                             kMaxPayloadLength, nullptr);
889     video_stream_encoder_->WaitUntilTaskQueueIsIdle();
890   }
891 
ResetEncoder(const std::string & payload_name,size_t num_streams,size_t num_temporal_layers,unsigned char num_spatial_layers,bool screenshare,VideoStreamEncoder::BitrateAllocationCallbackType allocation_callback_type=VideoStreamEncoder::BitrateAllocationCallbackType::kVideoBitrateAllocationWhenScreenSharing,int num_cores=1)892   void ResetEncoder(const std::string& payload_name,
893                     size_t num_streams,
894                     size_t num_temporal_layers,
895                     unsigned char num_spatial_layers,
896                     bool screenshare,
897                     VideoStreamEncoder::BitrateAllocationCallbackType
898                         allocation_callback_type =
899                             VideoStreamEncoder::BitrateAllocationCallbackType::
900                                 kVideoBitrateAllocationWhenScreenSharing,
901                     int num_cores = 1) {
902     video_send_config_.rtp.payload_name = payload_name;
903 
904     VideoEncoderConfig video_encoder_config;
905     test::FillEncoderConfiguration(PayloadStringToCodecType(payload_name),
906                                    num_streams, &video_encoder_config);
907     for (auto& layer : video_encoder_config.simulcast_layers) {
908       layer.num_temporal_layers = num_temporal_layers;
909       layer.max_framerate = kDefaultFramerate;
910     }
911     video_encoder_config.max_bitrate_bps =
912         num_streams == 1 ? kTargetBitrate.bps() : kSimulcastTargetBitrate.bps();
913     video_encoder_config.content_type =
914         screenshare ? VideoEncoderConfig::ContentType::kScreen
915                     : VideoEncoderConfig::ContentType::kRealtimeVideo;
916     if (payload_name == "VP9") {
917       VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
918       vp9_settings.numberOfSpatialLayers = num_spatial_layers;
919       vp9_settings.automaticResizeOn = num_spatial_layers <= 1;
920       video_encoder_config.encoder_specific_settings =
921           rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
922               vp9_settings);
923     }
924     ConfigureEncoder(std::move(video_encoder_config), allocation_callback_type,
925                      num_cores);
926   }
927 
CreateFrame(int64_t ntp_time_ms,rtc::Event * destruction_event) const928   VideoFrame CreateFrame(int64_t ntp_time_ms,
929                          rtc::Event* destruction_event) const {
930     return VideoFrame::Builder()
931         .set_video_frame_buffer(rtc::make_ref_counted<TestBuffer>(
932             destruction_event, codec_width_, codec_height_))
933         .set_ntp_time_ms(ntp_time_ms)
934         .set_timestamp_ms(99)
935         .set_rotation(kVideoRotation_0)
936         .build();
937   }
938 
CreateFrameWithUpdatedPixel(int64_t ntp_time_ms,rtc::Event * destruction_event,int offset_x) const939   VideoFrame CreateFrameWithUpdatedPixel(int64_t ntp_time_ms,
940                                          rtc::Event* destruction_event,
941                                          int offset_x) const {
942     return VideoFrame::Builder()
943         .set_video_frame_buffer(rtc::make_ref_counted<TestBuffer>(
944             destruction_event, codec_width_, codec_height_))
945         .set_ntp_time_ms(ntp_time_ms)
946         .set_timestamp_ms(99)
947         .set_rotation(kVideoRotation_0)
948         .set_update_rect(VideoFrame::UpdateRect{offset_x, 0, 1, 1})
949         .build();
950   }
951 
CreateFrame(int64_t ntp_time_ms,int width,int height) const952   VideoFrame CreateFrame(int64_t ntp_time_ms, int width, int height) const {
953     auto buffer = rtc::make_ref_counted<TestBuffer>(nullptr, width, height);
954     I420Buffer::SetBlack(buffer.get());
955     return VideoFrame::Builder()
956         .set_video_frame_buffer(std::move(buffer))
957         .set_ntp_time_ms(ntp_time_ms)
958         .set_timestamp_ms(ntp_time_ms)
959         .set_rotation(kVideoRotation_0)
960         .build();
961   }
962 
CreateNV12Frame(int64_t ntp_time_ms,int width,int height) const963   VideoFrame CreateNV12Frame(int64_t ntp_time_ms, int width, int height) const {
964     return VideoFrame::Builder()
965         .set_video_frame_buffer(NV12Buffer::Create(width, height))
966         .set_ntp_time_ms(ntp_time_ms)
967         .set_timestamp_ms(ntp_time_ms)
968         .set_rotation(kVideoRotation_0)
969         .build();
970   }
971 
CreateFakeNativeFrame(int64_t ntp_time_ms,rtc::Event * destruction_event,int width,int height) const972   VideoFrame CreateFakeNativeFrame(int64_t ntp_time_ms,
973                                    rtc::Event* destruction_event,
974                                    int width,
975                                    int height) const {
976     return VideoFrame::Builder()
977         .set_video_frame_buffer(rtc::make_ref_counted<FakeNativeBuffer>(
978             destruction_event, width, height))
979         .set_ntp_time_ms(ntp_time_ms)
980         .set_timestamp_ms(99)
981         .set_rotation(kVideoRotation_0)
982         .build();
983   }
984 
CreateFakeNV12NativeFrame(int64_t ntp_time_ms,rtc::Event * destruction_event,int width,int height) const985   VideoFrame CreateFakeNV12NativeFrame(int64_t ntp_time_ms,
986                                        rtc::Event* destruction_event,
987                                        int width,
988                                        int height) const {
989     return VideoFrame::Builder()
990         .set_video_frame_buffer(rtc::make_ref_counted<FakeNV12NativeBuffer>(
991             destruction_event, width, height))
992         .set_ntp_time_ms(ntp_time_ms)
993         .set_timestamp_ms(99)
994         .set_rotation(kVideoRotation_0)
995         .build();
996   }
997 
CreateFakeNativeFrame(int64_t ntp_time_ms,rtc::Event * destruction_event) const998   VideoFrame CreateFakeNativeFrame(int64_t ntp_time_ms,
999                                    rtc::Event* destruction_event) const {
1000     return CreateFakeNativeFrame(ntp_time_ms, destruction_event, codec_width_,
1001                                  codec_height_);
1002   }
1003 
VerifyAllocatedBitrate(const VideoBitrateAllocation & expected_bitrate)1004   void VerifyAllocatedBitrate(const VideoBitrateAllocation& expected_bitrate) {
1005     video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1006         kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1007 
1008     video_source_.IncomingCapturedFrame(
1009         CreateFrame(1, codec_width_, codec_height_));
1010     WaitForEncodedFrame(1);
1011     EXPECT_EQ(expected_bitrate, sink_.GetLastVideoBitrateAllocation());
1012   }
1013 
WaitForEncodedFrame(int64_t expected_ntp_time)1014   void WaitForEncodedFrame(int64_t expected_ntp_time) {
1015     sink_.WaitForEncodedFrame(expected_ntp_time);
1016     AdvanceTime(TimeDelta::Seconds(1) / max_framerate_);
1017   }
1018 
TimedWaitForEncodedFrame(int64_t expected_ntp_time,TimeDelta timeout)1019   bool TimedWaitForEncodedFrame(int64_t expected_ntp_time, TimeDelta timeout) {
1020     bool ok = sink_.TimedWaitForEncodedFrame(expected_ntp_time, timeout);
1021     AdvanceTime(TimeDelta::Seconds(1) / max_framerate_);
1022     return ok;
1023   }
1024 
WaitForEncodedFrame(uint32_t expected_width,uint32_t expected_height)1025   void WaitForEncodedFrame(uint32_t expected_width, uint32_t expected_height) {
1026     sink_.WaitForEncodedFrame(expected_width, expected_height);
1027     AdvanceTime(TimeDelta::Seconds(1) / max_framerate_);
1028   }
1029 
ExpectDroppedFrame()1030   void ExpectDroppedFrame() {
1031     sink_.ExpectDroppedFrame();
1032     AdvanceTime(TimeDelta::Seconds(1) / max_framerate_);
1033   }
1034 
WaitForFrame(TimeDelta timeout)1035   bool WaitForFrame(TimeDelta timeout) {
1036     bool ok = sink_.WaitForFrame(timeout);
1037     AdvanceTime(TimeDelta::Seconds(1) / max_framerate_);
1038     return ok;
1039   }
1040 
1041   class TestEncoder : public test::FakeEncoder {
1042    public:
TestEncoder(TimeController * time_controller)1043     explicit TestEncoder(TimeController* time_controller)
1044         : FakeEncoder(time_controller->GetClock()),
1045           time_controller_(time_controller) {
1046       RTC_DCHECK(time_controller_);
1047     }
1048 
GetEncoderInfo() const1049     VideoEncoder::EncoderInfo GetEncoderInfo() const override {
1050       MutexLock lock(&local_mutex_);
1051       EncoderInfo info = FakeEncoder::GetEncoderInfo();
1052       if (initialized_ == EncoderState::kInitialized) {
1053         if (quality_scaling_) {
1054           info.scaling_settings = VideoEncoder::ScalingSettings(
1055               kQpLow, kQpHigh, kMinPixelsPerFrame);
1056         }
1057         info.is_hardware_accelerated = is_hardware_accelerated_;
1058         for (int i = 0; i < kMaxSpatialLayers; ++i) {
1059           if (temporal_layers_supported_[i]) {
1060             info.fps_allocation[i].clear();
1061             int num_layers = temporal_layers_supported_[i].value() ? 2 : 1;
1062             for (int tid = 0; tid < num_layers; ++tid)
1063               info.fps_allocation[i].push_back(255 / (num_layers - tid));
1064           }
1065         }
1066       }
1067 
1068       info.resolution_bitrate_limits = resolution_bitrate_limits_;
1069       info.requested_resolution_alignment = requested_resolution_alignment_;
1070       info.apply_alignment_to_all_simulcast_layers =
1071           apply_alignment_to_all_simulcast_layers_;
1072       info.preferred_pixel_formats = preferred_pixel_formats_;
1073       if (is_qp_trusted_.has_value()) {
1074         info.is_qp_trusted = is_qp_trusted_;
1075       }
1076       return info;
1077     }
1078 
RegisterEncodeCompleteCallback(EncodedImageCallback * callback)1079     int32_t RegisterEncodeCompleteCallback(
1080         EncodedImageCallback* callback) override {
1081       MutexLock lock(&local_mutex_);
1082       encoded_image_callback_ = callback;
1083       return FakeEncoder::RegisterEncodeCompleteCallback(callback);
1084     }
1085 
ContinueEncode()1086     void ContinueEncode() { continue_encode_event_.Set(); }
1087 
CheckLastTimeStampsMatch(int64_t ntp_time_ms,uint32_t timestamp) const1088     void CheckLastTimeStampsMatch(int64_t ntp_time_ms,
1089                                   uint32_t timestamp) const {
1090       MutexLock lock(&local_mutex_);
1091       EXPECT_EQ(timestamp_, timestamp);
1092       EXPECT_EQ(ntp_time_ms_, ntp_time_ms);
1093     }
1094 
SetQualityScaling(bool b)1095     void SetQualityScaling(bool b) {
1096       MutexLock lock(&local_mutex_);
1097       quality_scaling_ = b;
1098     }
1099 
SetRequestedResolutionAlignment(int requested_resolution_alignment)1100     void SetRequestedResolutionAlignment(int requested_resolution_alignment) {
1101       MutexLock lock(&local_mutex_);
1102       requested_resolution_alignment_ = requested_resolution_alignment;
1103     }
1104 
SetApplyAlignmentToAllSimulcastLayers(bool b)1105     void SetApplyAlignmentToAllSimulcastLayers(bool b) {
1106       MutexLock lock(&local_mutex_);
1107       apply_alignment_to_all_simulcast_layers_ = b;
1108     }
1109 
SetIsHardwareAccelerated(bool is_hardware_accelerated)1110     void SetIsHardwareAccelerated(bool is_hardware_accelerated) {
1111       MutexLock lock(&local_mutex_);
1112       is_hardware_accelerated_ = is_hardware_accelerated;
1113     }
1114 
SetTemporalLayersSupported(size_t spatial_idx,bool supported)1115     void SetTemporalLayersSupported(size_t spatial_idx, bool supported) {
1116       RTC_DCHECK_LT(spatial_idx, kMaxSpatialLayers);
1117       MutexLock lock(&local_mutex_);
1118       temporal_layers_supported_[spatial_idx] = supported;
1119     }
1120 
SetResolutionBitrateLimits(std::vector<ResolutionBitrateLimits> thresholds)1121     void SetResolutionBitrateLimits(
1122         std::vector<ResolutionBitrateLimits> thresholds) {
1123       MutexLock lock(&local_mutex_);
1124       resolution_bitrate_limits_ = thresholds;
1125     }
1126 
ForceInitEncodeFailure(bool force_failure)1127     void ForceInitEncodeFailure(bool force_failure) {
1128       MutexLock lock(&local_mutex_);
1129       force_init_encode_failed_ = force_failure;
1130     }
1131 
SimulateOvershoot(double rate_factor)1132     void SimulateOvershoot(double rate_factor) {
1133       MutexLock lock(&local_mutex_);
1134       rate_factor_ = rate_factor;
1135     }
1136 
GetLastFramerate() const1137     uint32_t GetLastFramerate() const {
1138       MutexLock lock(&local_mutex_);
1139       return last_framerate_;
1140     }
1141 
GetLastUpdateRect() const1142     VideoFrame::UpdateRect GetLastUpdateRect() const {
1143       MutexLock lock(&local_mutex_);
1144       return last_update_rect_;
1145     }
1146 
LastFrameTypes() const1147     const std::vector<VideoFrameType>& LastFrameTypes() const {
1148       MutexLock lock(&local_mutex_);
1149       return last_frame_types_;
1150     }
1151 
InjectFrame(const VideoFrame & input_image,bool keyframe)1152     void InjectFrame(const VideoFrame& input_image, bool keyframe) {
1153       const std::vector<VideoFrameType> frame_type = {
1154           keyframe ? VideoFrameType::kVideoFrameKey
1155                    : VideoFrameType::kVideoFrameDelta};
1156       {
1157         MutexLock lock(&local_mutex_);
1158         last_frame_types_ = frame_type;
1159       }
1160       FakeEncoder::Encode(input_image, &frame_type);
1161     }
1162 
InjectEncodedImage(const EncodedImage & image,const CodecSpecificInfo * codec_specific_info)1163     void InjectEncodedImage(const EncodedImage& image,
1164                             const CodecSpecificInfo* codec_specific_info) {
1165       MutexLock lock(&local_mutex_);
1166       encoded_image_callback_->OnEncodedImage(image, codec_specific_info);
1167     }
1168 
SetEncodedImageData(rtc::scoped_refptr<EncodedImageBufferInterface> encoded_image_data)1169     void SetEncodedImageData(
1170         rtc::scoped_refptr<EncodedImageBufferInterface> encoded_image_data) {
1171       MutexLock lock(&local_mutex_);
1172       encoded_image_data_ = encoded_image_data;
1173     }
1174 
ExpectNullFrame()1175     void ExpectNullFrame() {
1176       MutexLock lock(&local_mutex_);
1177       expect_null_frame_ = true;
1178     }
1179 
1180     absl::optional<VideoEncoder::RateControlParameters>
GetAndResetLastRateControlSettings()1181     GetAndResetLastRateControlSettings() {
1182       auto settings = last_rate_control_settings_;
1183       last_rate_control_settings_.reset();
1184       return settings;
1185     }
1186 
GetLastInputWidth() const1187     int GetLastInputWidth() const {
1188       MutexLock lock(&local_mutex_);
1189       return last_input_width_;
1190     }
1191 
GetLastInputHeight() const1192     int GetLastInputHeight() const {
1193       MutexLock lock(&local_mutex_);
1194       return last_input_height_;
1195     }
1196 
GetLastInputPixelFormat()1197     absl::optional<VideoFrameBuffer::Type> GetLastInputPixelFormat() {
1198       MutexLock lock(&local_mutex_);
1199       return last_input_pixel_format_;
1200     }
1201 
GetNumSetRates() const1202     int GetNumSetRates() const {
1203       MutexLock lock(&local_mutex_);
1204       return num_set_rates_;
1205     }
1206 
SetPreferredPixelFormats(absl::InlinedVector<VideoFrameBuffer::Type,kMaxPreferredPixelFormats> pixel_formats)1207     void SetPreferredPixelFormats(
1208         absl::InlinedVector<VideoFrameBuffer::Type, kMaxPreferredPixelFormats>
1209             pixel_formats) {
1210       MutexLock lock(&local_mutex_);
1211       preferred_pixel_formats_ = std::move(pixel_formats);
1212     }
1213 
SetIsQpTrusted(absl::optional<bool> trusted)1214     void SetIsQpTrusted(absl::optional<bool> trusted) {
1215       MutexLock lock(&local_mutex_);
1216       is_qp_trusted_ = trusted;
1217     }
1218 
LastEncoderComplexity()1219     VideoCodecComplexity LastEncoderComplexity() {
1220       MutexLock lock(&local_mutex_);
1221       return last_encoder_complexity_;
1222     }
1223 
1224    private:
Encode(const VideoFrame & input_image,const std::vector<VideoFrameType> * frame_types)1225     int32_t Encode(const VideoFrame& input_image,
1226                    const std::vector<VideoFrameType>* frame_types) override {
1227       {
1228         MutexLock lock(&local_mutex_);
1229         if (expect_null_frame_) {
1230           EXPECT_EQ(input_image.timestamp(), 0u);
1231           EXPECT_EQ(input_image.width(), 1);
1232           last_frame_types_ = *frame_types;
1233           expect_null_frame_ = false;
1234         } else {
1235           EXPECT_GT(input_image.timestamp(), timestamp_);
1236           EXPECT_GT(input_image.ntp_time_ms(), ntp_time_ms_);
1237           EXPECT_EQ(input_image.timestamp(), input_image.ntp_time_ms() * 90);
1238         }
1239 
1240         timestamp_ = input_image.timestamp();
1241         ntp_time_ms_ = input_image.ntp_time_ms();
1242         last_input_width_ = input_image.width();
1243         last_input_height_ = input_image.height();
1244         last_update_rect_ = input_image.update_rect();
1245         last_frame_types_ = *frame_types;
1246         last_input_pixel_format_ = input_image.video_frame_buffer()->type();
1247       }
1248       int32_t result = FakeEncoder::Encode(input_image, frame_types);
1249       return result;
1250     }
1251 
EncodeHook(EncodedImage & encoded_image,rtc::scoped_refptr<EncodedImageBuffer> buffer)1252     CodecSpecificInfo EncodeHook(
1253         EncodedImage& encoded_image,
1254         rtc::scoped_refptr<EncodedImageBuffer> buffer) override {
1255       CodecSpecificInfo codec_specific;
1256       {
1257         MutexLock lock(&mutex_);
1258         codec_specific.codecType = config_.codecType;
1259       }
1260       MutexLock lock(&local_mutex_);
1261       if (encoded_image_data_) {
1262         encoded_image.SetEncodedData(encoded_image_data_);
1263       }
1264       return codec_specific;
1265     }
1266 
InitEncode(const VideoCodec * config,const Settings & settings)1267     int32_t InitEncode(const VideoCodec* config,
1268                        const Settings& settings) override {
1269       int res = FakeEncoder::InitEncode(config, settings);
1270 
1271       MutexLock lock(&local_mutex_);
1272       EXPECT_EQ(initialized_, EncoderState::kUninitialized);
1273 
1274       if (config->codecType == kVideoCodecVP8) {
1275         // Simulate setting up temporal layers, in order to validate the life
1276         // cycle of these objects.
1277         Vp8TemporalLayersFactory factory;
1278         frame_buffer_controller_ =
1279             factory.Create(*config, settings, &fec_controller_override_);
1280       }
1281 
1282       last_encoder_complexity_ = config->GetVideoEncoderComplexity();
1283 
1284       if (force_init_encode_failed_) {
1285         initialized_ = EncoderState::kInitializationFailed;
1286         return -1;
1287       }
1288 
1289       initialized_ = EncoderState::kInitialized;
1290       return res;
1291     }
1292 
Release()1293     int32_t Release() override {
1294       MutexLock lock(&local_mutex_);
1295       EXPECT_NE(initialized_, EncoderState::kUninitialized);
1296       initialized_ = EncoderState::kUninitialized;
1297       return FakeEncoder::Release();
1298     }
1299 
SetRates(const RateControlParameters & parameters)1300     void SetRates(const RateControlParameters& parameters) {
1301       MutexLock lock(&local_mutex_);
1302       num_set_rates_++;
1303       VideoBitrateAllocation adjusted_rate_allocation;
1304       for (size_t si = 0; si < kMaxSpatialLayers; ++si) {
1305         for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) {
1306           if (parameters.bitrate.HasBitrate(si, ti)) {
1307             adjusted_rate_allocation.SetBitrate(
1308                 si, ti,
1309                 static_cast<uint32_t>(parameters.bitrate.GetBitrate(si, ti) *
1310                                       rate_factor_));
1311           }
1312         }
1313       }
1314       last_framerate_ = static_cast<uint32_t>(parameters.framerate_fps + 0.5);
1315       last_rate_control_settings_ = parameters;
1316       RateControlParameters adjusted_paramters = parameters;
1317       adjusted_paramters.bitrate = adjusted_rate_allocation;
1318       FakeEncoder::SetRates(adjusted_paramters);
1319     }
1320 
1321     TimeController* const time_controller_;
1322     mutable Mutex local_mutex_;
1323     enum class EncoderState {
1324       kUninitialized,
1325       kInitializationFailed,
1326       kInitialized
1327     } initialized_ RTC_GUARDED_BY(local_mutex_) = EncoderState::kUninitialized;
1328     rtc::Event continue_encode_event_;
1329     uint32_t timestamp_ RTC_GUARDED_BY(local_mutex_) = 0;
1330     int64_t ntp_time_ms_ RTC_GUARDED_BY(local_mutex_) = 0;
1331     int last_input_width_ RTC_GUARDED_BY(local_mutex_) = 0;
1332     int last_input_height_ RTC_GUARDED_BY(local_mutex_) = 0;
1333     bool quality_scaling_ RTC_GUARDED_BY(local_mutex_) = true;
1334     int requested_resolution_alignment_ RTC_GUARDED_BY(local_mutex_) = 1;
1335     bool apply_alignment_to_all_simulcast_layers_ RTC_GUARDED_BY(local_mutex_) =
1336         false;
1337     bool is_hardware_accelerated_ RTC_GUARDED_BY(local_mutex_) = false;
1338     rtc::scoped_refptr<EncodedImageBufferInterface> encoded_image_data_
1339         RTC_GUARDED_BY(local_mutex_);
1340     std::unique_ptr<Vp8FrameBufferController> frame_buffer_controller_
1341         RTC_GUARDED_BY(local_mutex_);
1342     absl::optional<bool>
1343         temporal_layers_supported_[kMaxSpatialLayers] RTC_GUARDED_BY(
1344             local_mutex_);
1345     bool force_init_encode_failed_ RTC_GUARDED_BY(local_mutex_) = false;
1346     double rate_factor_ RTC_GUARDED_BY(local_mutex_) = 1.0;
1347     uint32_t last_framerate_ RTC_GUARDED_BY(local_mutex_) = 0;
1348     absl::optional<VideoEncoder::RateControlParameters>
1349         last_rate_control_settings_;
1350     VideoFrame::UpdateRect last_update_rect_ RTC_GUARDED_BY(local_mutex_) = {
1351         0, 0, 0, 0};
1352     std::vector<VideoFrameType> last_frame_types_;
1353     bool expect_null_frame_ = false;
1354     EncodedImageCallback* encoded_image_callback_ RTC_GUARDED_BY(local_mutex_) =
1355         nullptr;
1356     NiceMock<MockFecControllerOverride> fec_controller_override_;
1357     std::vector<ResolutionBitrateLimits> resolution_bitrate_limits_
1358         RTC_GUARDED_BY(local_mutex_);
1359     int num_set_rates_ RTC_GUARDED_BY(local_mutex_) = 0;
1360     absl::optional<VideoFrameBuffer::Type> last_input_pixel_format_
1361         RTC_GUARDED_BY(local_mutex_);
1362     absl::InlinedVector<VideoFrameBuffer::Type, kMaxPreferredPixelFormats>
1363         preferred_pixel_formats_ RTC_GUARDED_BY(local_mutex_);
1364     absl::optional<bool> is_qp_trusted_ RTC_GUARDED_BY(local_mutex_);
RTC_GUARDED_BY(local_mutex_)1365     VideoCodecComplexity last_encoder_complexity_ RTC_GUARDED_BY(local_mutex_){
1366         VideoCodecComplexity::kComplexityNormal};
1367   };
1368 
1369   class TestSink : public VideoStreamEncoder::EncoderSink {
1370    public:
TestSink(TimeController * time_controller,TestEncoder * test_encoder)1371     TestSink(TimeController* time_controller, TestEncoder* test_encoder)
1372         : time_controller_(time_controller), test_encoder_(test_encoder) {
1373       RTC_DCHECK(time_controller_);
1374     }
1375 
WaitForEncodedFrame(int64_t expected_ntp_time)1376     void WaitForEncodedFrame(int64_t expected_ntp_time) {
1377       EXPECT_TRUE(TimedWaitForEncodedFrame(expected_ntp_time, kDefaultTimeout));
1378     }
1379 
TimedWaitForEncodedFrame(int64_t expected_ntp_time,TimeDelta timeout)1380     bool TimedWaitForEncodedFrame(int64_t expected_ntp_time,
1381                                   TimeDelta timeout) {
1382       uint32_t timestamp = 0;
1383       if (!WaitForFrame(timeout))
1384         return false;
1385       {
1386         MutexLock lock(&mutex_);
1387         timestamp = last_timestamp_;
1388       }
1389       test_encoder_->CheckLastTimeStampsMatch(expected_ntp_time, timestamp);
1390       return true;
1391     }
1392 
WaitForEncodedFrame(uint32_t expected_width,uint32_t expected_height)1393     void WaitForEncodedFrame(uint32_t expected_width,
1394                              uint32_t expected_height) {
1395       EXPECT_TRUE(WaitForFrame(kDefaultTimeout));
1396       CheckLastFrameSizeMatches(expected_width, expected_height);
1397     }
1398 
CheckLastFrameSizeMatches(uint32_t expected_width,uint32_t expected_height)1399     void CheckLastFrameSizeMatches(uint32_t expected_width,
1400                                    uint32_t expected_height) {
1401       uint32_t width = 0;
1402       uint32_t height = 0;
1403       {
1404         MutexLock lock(&mutex_);
1405         width = last_width_;
1406         height = last_height_;
1407       }
1408       EXPECT_EQ(expected_height, height);
1409       EXPECT_EQ(expected_width, width);
1410     }
1411 
CheckLastFrameRotationMatches(VideoRotation expected_rotation)1412     void CheckLastFrameRotationMatches(VideoRotation expected_rotation) {
1413       VideoRotation rotation;
1414       {
1415         MutexLock lock(&mutex_);
1416         rotation = last_rotation_;
1417       }
1418       EXPECT_EQ(expected_rotation, rotation);
1419     }
1420 
ExpectDroppedFrame()1421     void ExpectDroppedFrame() {
1422       EXPECT_FALSE(WaitForFrame(TimeDelta::Millis(100)));
1423     }
1424 
WaitForFrame(TimeDelta timeout)1425     bool WaitForFrame(TimeDelta timeout) {
1426       RTC_DCHECK(time_controller_->GetMainThread()->IsCurrent());
1427       time_controller_->AdvanceTime(TimeDelta::Zero());
1428       bool ret = encoded_frame_event_.Wait(timeout);
1429       time_controller_->AdvanceTime(TimeDelta::Zero());
1430       return ret;
1431     }
1432 
SetExpectNoFrames()1433     void SetExpectNoFrames() {
1434       MutexLock lock(&mutex_);
1435       expect_frames_ = false;
1436     }
1437 
number_of_reconfigurations() const1438     int number_of_reconfigurations() const {
1439       MutexLock lock(&mutex_);
1440       return number_of_reconfigurations_;
1441     }
1442 
last_min_transmit_bitrate() const1443     int last_min_transmit_bitrate() const {
1444       MutexLock lock(&mutex_);
1445       return min_transmit_bitrate_bps_;
1446     }
1447 
SetNumExpectedLayers(size_t num_layers)1448     void SetNumExpectedLayers(size_t num_layers) {
1449       MutexLock lock(&mutex_);
1450       num_expected_layers_ = num_layers;
1451     }
1452 
GetLastCaptureTimeMs() const1453     int64_t GetLastCaptureTimeMs() const {
1454       MutexLock lock(&mutex_);
1455       return last_capture_time_ms_;
1456     }
1457 
GetLastEncodedImage()1458     const EncodedImage& GetLastEncodedImage() {
1459       MutexLock lock(&mutex_);
1460       return last_encoded_image_;
1461     }
1462 
GetLastEncodedImageData()1463     std::vector<uint8_t> GetLastEncodedImageData() {
1464       MutexLock lock(&mutex_);
1465       return std::move(last_encoded_image_data_);
1466     }
1467 
GetLastVideoBitrateAllocation()1468     VideoBitrateAllocation GetLastVideoBitrateAllocation() {
1469       MutexLock lock(&mutex_);
1470       return last_bitrate_allocation_;
1471     }
1472 
number_of_bitrate_allocations() const1473     int number_of_bitrate_allocations() const {
1474       MutexLock lock(&mutex_);
1475       return number_of_bitrate_allocations_;
1476     }
1477 
GetLastVideoLayersAllocation()1478     VideoLayersAllocation GetLastVideoLayersAllocation() {
1479       MutexLock lock(&mutex_);
1480       return last_layers_allocation_;
1481     }
1482 
number_of_layers_allocations() const1483     int number_of_layers_allocations() const {
1484       MutexLock lock(&mutex_);
1485       return number_of_layers_allocations_;
1486     }
1487 
1488    private:
OnEncodedImage(const EncodedImage & encoded_image,const CodecSpecificInfo * codec_specific_info)1489     Result OnEncodedImage(
1490         const EncodedImage& encoded_image,
1491         const CodecSpecificInfo* codec_specific_info) override {
1492       MutexLock lock(&mutex_);
1493       EXPECT_TRUE(expect_frames_);
1494       last_encoded_image_ = EncodedImage(encoded_image);
1495       last_encoded_image_data_ = std::vector<uint8_t>(
1496           encoded_image.data(), encoded_image.data() + encoded_image.size());
1497       uint32_t timestamp = encoded_image.Timestamp();
1498       if (last_timestamp_ != timestamp) {
1499         num_received_layers_ = 1;
1500         last_width_ = encoded_image._encodedWidth;
1501         last_height_ = encoded_image._encodedHeight;
1502       } else {
1503         ++num_received_layers_;
1504         last_width_ = std::max(encoded_image._encodedWidth, last_width_);
1505         last_height_ = std::max(encoded_image._encodedHeight, last_height_);
1506       }
1507       last_timestamp_ = timestamp;
1508       last_capture_time_ms_ = encoded_image.capture_time_ms_;
1509       last_rotation_ = encoded_image.rotation_;
1510       if (num_received_layers_ == num_expected_layers_) {
1511         encoded_frame_event_.Set();
1512       }
1513       return Result(Result::OK, last_timestamp_);
1514     }
1515 
OnEncoderConfigurationChanged(std::vector<VideoStream> streams,bool is_svc,VideoEncoderConfig::ContentType content_type,int min_transmit_bitrate_bps)1516     void OnEncoderConfigurationChanged(
1517         std::vector<VideoStream> streams,
1518         bool is_svc,
1519         VideoEncoderConfig::ContentType content_type,
1520         int min_transmit_bitrate_bps) override {
1521       MutexLock lock(&mutex_);
1522       ++number_of_reconfigurations_;
1523       min_transmit_bitrate_bps_ = min_transmit_bitrate_bps;
1524     }
1525 
OnBitrateAllocationUpdated(const VideoBitrateAllocation & allocation)1526     void OnBitrateAllocationUpdated(
1527         const VideoBitrateAllocation& allocation) override {
1528       MutexLock lock(&mutex_);
1529       ++number_of_bitrate_allocations_;
1530       last_bitrate_allocation_ = allocation;
1531     }
1532 
OnVideoLayersAllocationUpdated(VideoLayersAllocation allocation)1533     void OnVideoLayersAllocationUpdated(
1534         VideoLayersAllocation allocation) override {
1535       MutexLock lock(&mutex_);
1536       ++number_of_layers_allocations_;
1537       last_layers_allocation_ = allocation;
1538       rtc::StringBuilder log;
1539       for (const auto& layer : allocation.active_spatial_layers) {
1540         log << layer.width << "x" << layer.height << "@" << layer.frame_rate_fps
1541             << "[";
1542         for (const auto target_bitrate :
1543              layer.target_bitrate_per_temporal_layer) {
1544           log << target_bitrate.kbps() << ",";
1545         }
1546         log << "]";
1547       }
1548       RTC_DLOG(LS_INFO) << "OnVideoLayersAllocationUpdated " << log.str();
1549     }
1550 
1551     TimeController* const time_controller_;
1552     mutable Mutex mutex_;
1553     TestEncoder* test_encoder_;
1554     rtc::Event encoded_frame_event_;
1555     EncodedImage last_encoded_image_;
1556     std::vector<uint8_t> last_encoded_image_data_;
1557     uint32_t last_timestamp_ = 0;
1558     int64_t last_capture_time_ms_ = 0;
1559     uint32_t last_height_ = 0;
1560     uint32_t last_width_ = 0;
1561     VideoRotation last_rotation_ = kVideoRotation_0;
1562     size_t num_expected_layers_ = 1;
1563     size_t num_received_layers_ = 0;
1564     bool expect_frames_ = true;
1565     int number_of_reconfigurations_ = 0;
1566     int min_transmit_bitrate_bps_ = 0;
1567     VideoBitrateAllocation last_bitrate_allocation_ RTC_GUARDED_BY(&mutex_);
1568     int number_of_bitrate_allocations_ RTC_GUARDED_BY(&mutex_) = 0;
1569     VideoLayersAllocation last_layers_allocation_ RTC_GUARDED_BY(&mutex_);
1570     int number_of_layers_allocations_ RTC_GUARDED_BY(&mutex_) = 0;
1571   };
1572 
1573   class VideoBitrateAllocatorProxyFactory
1574       : public VideoBitrateAllocatorFactory {
1575    public:
VideoBitrateAllocatorProxyFactory()1576     VideoBitrateAllocatorProxyFactory()
1577         : bitrate_allocator_factory_(
1578               CreateBuiltinVideoBitrateAllocatorFactory()) {}
1579 
CreateVideoBitrateAllocator(const VideoCodec & codec)1580     std::unique_ptr<VideoBitrateAllocator> CreateVideoBitrateAllocator(
1581         const VideoCodec& codec) override {
1582       MutexLock lock(&mutex_);
1583       codec_config_ = codec;
1584       return bitrate_allocator_factory_->CreateVideoBitrateAllocator(codec);
1585     }
1586 
codec_config() const1587     VideoCodec codec_config() const {
1588       MutexLock lock(&mutex_);
1589       return codec_config_;
1590     }
1591 
1592    private:
1593     std::unique_ptr<VideoBitrateAllocatorFactory> bitrate_allocator_factory_;
1594 
1595     mutable Mutex mutex_;
1596     VideoCodec codec_config_ RTC_GUARDED_BY(mutex_);
1597   };
1598 
clock()1599   Clock* clock() { return time_controller_.GetClock(); }
AdvanceTime(TimeDelta duration)1600   void AdvanceTime(TimeDelta duration) {
1601     time_controller_.AdvanceTime(duration);
1602   }
1603 
CurrentTimeMs()1604   int64_t CurrentTimeMs() { return clock()->CurrentTime().ms(); }
1605 
1606  protected:
GetTaskQueueFactory()1607   virtual TaskQueueFactory* GetTaskQueueFactory() {
1608     return time_controller_.GetTaskQueueFactory();
1609   }
1610 
1611   test::ScopedKeyValueConfig field_trials_;
1612   GlobalSimulatedTimeController time_controller_{Timestamp::Micros(1234)};
1613   VideoSendStream::Config video_send_config_;
1614   VideoEncoderConfig video_encoder_config_;
1615   int codec_width_;
1616   int codec_height_;
1617   int max_framerate_;
1618   TestEncoder fake_encoder_;
1619   test::VideoEncoderProxyFactory encoder_factory_;
1620   VideoBitrateAllocatorProxyFactory bitrate_allocator_factory_;
1621   std::unique_ptr<MockableSendStatisticsProxy> stats_proxy_;
1622   TestSink sink_;
1623   AdaptingFrameForwarder video_source_{&time_controller_};
1624   std::unique_ptr<VideoStreamEncoderUnderTest> video_stream_encoder_;
1625 };
1626 
TEST_F(VideoStreamEncoderTest,EncodeOneFrame)1627 TEST_F(VideoStreamEncoderTest, EncodeOneFrame) {
1628   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1629       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1630   rtc::Event frame_destroyed_event;
1631   video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event));
1632   WaitForEncodedFrame(1);
1633   EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeout));
1634   video_stream_encoder_->Stop();
1635 }
1636 
TEST_F(VideoStreamEncoderTest,DropsFramesBeforeFirstOnBitrateUpdated)1637 TEST_F(VideoStreamEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) {
1638   // Dropped since no target bitrate has been set.
1639   rtc::Event frame_destroyed_event;
1640   // The encoder will cache up to one frame for a short duration. Adding two
1641   // frames means that the first frame will be dropped and the second frame will
1642   // be sent when the encoder is enabled.
1643   video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event));
1644   AdvanceTime(TimeDelta::Millis(10));
1645   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
1646   AdvanceTime(TimeDelta::Zero());
1647   EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeout));
1648 
1649   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1650       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1651 
1652   // The pending frame should be received.
1653   WaitForEncodedFrame(2);
1654   video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr));
1655 
1656   WaitForEncodedFrame(3);
1657   video_stream_encoder_->Stop();
1658 }
1659 
TEST_F(VideoStreamEncoderTest,DropsFramesWhenRateSetToZero)1660 TEST_F(VideoStreamEncoderTest, DropsFramesWhenRateSetToZero) {
1661   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1662       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1663   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
1664   WaitForEncodedFrame(1);
1665 
1666   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1667       DataRate::Zero(), DataRate::Zero(), DataRate::Zero(), 0, 0, 0);
1668 
1669   // The encoder will cache up to one frame for a short duration. Adding two
1670   // frames means that the first frame will be dropped and the second frame will
1671   // be sent when the encoder is resumed.
1672   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
1673   video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr));
1674 
1675   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1676       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1677   WaitForEncodedFrame(3);
1678   video_source_.IncomingCapturedFrame(CreateFrame(4, nullptr));
1679   WaitForEncodedFrame(4);
1680   video_stream_encoder_->Stop();
1681 }
1682 
TEST_F(VideoStreamEncoderTest,DropsFramesWithSameOrOldNtpTimestamp)1683 TEST_F(VideoStreamEncoderTest, DropsFramesWithSameOrOldNtpTimestamp) {
1684   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1685       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1686   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
1687   WaitForEncodedFrame(1);
1688 
1689   // This frame will be dropped since it has the same ntp timestamp.
1690   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
1691 
1692   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
1693   WaitForEncodedFrame(2);
1694   video_stream_encoder_->Stop();
1695 }
1696 
TEST_F(VideoStreamEncoderTest,DropsFrameAfterStop)1697 TEST_F(VideoStreamEncoderTest, DropsFrameAfterStop) {
1698   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1699       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1700 
1701   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
1702   WaitForEncodedFrame(1);
1703 
1704   video_stream_encoder_->Stop();
1705   sink_.SetExpectNoFrames();
1706   rtc::Event frame_destroyed_event;
1707   video_source_.IncomingCapturedFrame(CreateFrame(2, &frame_destroyed_event));
1708   EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeout));
1709 }
1710 
TEST_F(VideoStreamEncoderTest,DropsPendingFramesOnSlowEncode)1711 TEST_F(VideoStreamEncoderTest, DropsPendingFramesOnSlowEncode) {
1712   test::FrameForwarder source;
1713   video_stream_encoder_->SetSource(&source,
1714                                    DegradationPreference::MAINTAIN_FRAMERATE);
1715   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1716       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1717 
1718   int dropped_count = 0;
1719   stats_proxy_->SetDroppedFrameCallback(
1720       [&dropped_count](VideoStreamEncoderObserver::DropReason) {
1721         ++dropped_count;
1722       });
1723 
1724   source.IncomingCapturedFrame(CreateFrame(1, nullptr));
1725   source.IncomingCapturedFrame(CreateFrame(2, nullptr));
1726   WaitForEncodedFrame(2);
1727   video_stream_encoder_->Stop();
1728   EXPECT_EQ(1, dropped_count);
1729 }
1730 
TEST_F(VideoStreamEncoderTest,NativeFrameWithoutI420SupportGetsDelivered)1731 TEST_F(VideoStreamEncoderTest, NativeFrameWithoutI420SupportGetsDelivered) {
1732   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1733       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1734 
1735   rtc::Event frame_destroyed_event;
1736   video_source_.IncomingCapturedFrame(
1737       CreateFakeNativeFrame(1, &frame_destroyed_event));
1738   WaitForEncodedFrame(1);
1739   EXPECT_EQ(VideoFrameBuffer::Type::kNative,
1740             fake_encoder_.GetLastInputPixelFormat());
1741   EXPECT_EQ(fake_encoder_.config().width, fake_encoder_.GetLastInputWidth());
1742   EXPECT_EQ(fake_encoder_.config().height, fake_encoder_.GetLastInputHeight());
1743   video_stream_encoder_->Stop();
1744 }
1745 
TEST_F(VideoStreamEncoderTest,NativeFrameWithoutI420SupportGetsCroppedIfNecessary)1746 TEST_F(VideoStreamEncoderTest,
1747        NativeFrameWithoutI420SupportGetsCroppedIfNecessary) {
1748   // Use the cropping factory.
1749   video_encoder_config_.video_stream_factory =
1750       rtc::make_ref_counted<CroppingVideoStreamFactory>();
1751   video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config_),
1752                                           kMaxPayloadLength);
1753   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
1754 
1755   // Capture a frame at codec_width_/codec_height_.
1756   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1757       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1758   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
1759   WaitForEncodedFrame(1);
1760   // The encoder will have been configured once.
1761   EXPECT_EQ(1, sink_.number_of_reconfigurations());
1762   EXPECT_EQ(codec_width_, fake_encoder_.config().width);
1763   EXPECT_EQ(codec_height_, fake_encoder_.config().height);
1764 
1765   // Now send in a fake frame that needs to be cropped as the width/height
1766   // aren't divisible by 4 (see CreateEncoderStreams above).
1767   rtc::Event frame_destroyed_event;
1768   video_source_.IncomingCapturedFrame(CreateFakeNativeFrame(
1769       2, &frame_destroyed_event, codec_width_ + 1, codec_height_ + 1));
1770   WaitForEncodedFrame(2);
1771   EXPECT_EQ(VideoFrameBuffer::Type::kNative,
1772             fake_encoder_.GetLastInputPixelFormat());
1773   EXPECT_EQ(fake_encoder_.config().width, fake_encoder_.GetLastInputWidth());
1774   EXPECT_EQ(fake_encoder_.config().height, fake_encoder_.GetLastInputHeight());
1775   video_stream_encoder_->Stop();
1776 }
1777 
TEST_F(VideoStreamEncoderTest,NonI420FramesShouldNotBeConvertedToI420)1778 TEST_F(VideoStreamEncoderTest, NonI420FramesShouldNotBeConvertedToI420) {
1779   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1780       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1781 
1782   video_source_.IncomingCapturedFrame(
1783       CreateNV12Frame(1, codec_width_, codec_height_));
1784   WaitForEncodedFrame(1);
1785   EXPECT_EQ(VideoFrameBuffer::Type::kNV12,
1786             fake_encoder_.GetLastInputPixelFormat());
1787   video_stream_encoder_->Stop();
1788 }
1789 
TEST_F(VideoStreamEncoderTest,NativeFrameGetsDelivered_NoFrameTypePreference)1790 TEST_F(VideoStreamEncoderTest, NativeFrameGetsDelivered_NoFrameTypePreference) {
1791   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1792       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1793 
1794   fake_encoder_.SetPreferredPixelFormats({});
1795 
1796   rtc::Event frame_destroyed_event;
1797   video_source_.IncomingCapturedFrame(CreateFakeNV12NativeFrame(
1798       1, &frame_destroyed_event, codec_width_, codec_height_));
1799   WaitForEncodedFrame(1);
1800   EXPECT_EQ(VideoFrameBuffer::Type::kNative,
1801             fake_encoder_.GetLastInputPixelFormat());
1802   video_stream_encoder_->Stop();
1803 }
1804 
TEST_F(VideoStreamEncoderTest,NativeFrameGetsDelivered_PixelFormatPreferenceMatches)1805 TEST_F(VideoStreamEncoderTest,
1806        NativeFrameGetsDelivered_PixelFormatPreferenceMatches) {
1807   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1808       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1809 
1810   fake_encoder_.SetPreferredPixelFormats({VideoFrameBuffer::Type::kNV12});
1811 
1812   rtc::Event frame_destroyed_event;
1813   video_source_.IncomingCapturedFrame(CreateFakeNV12NativeFrame(
1814       1, &frame_destroyed_event, codec_width_, codec_height_));
1815   WaitForEncodedFrame(1);
1816   EXPECT_EQ(VideoFrameBuffer::Type::kNative,
1817             fake_encoder_.GetLastInputPixelFormat());
1818   video_stream_encoder_->Stop();
1819 }
1820 
TEST_F(VideoStreamEncoderTest,NativeFrameGetsDelivered_MappingIsNotFeasible)1821 TEST_F(VideoStreamEncoderTest, NativeFrameGetsDelivered_MappingIsNotFeasible) {
1822   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1823       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1824 
1825   // Fake NV12 native frame does not allow mapping to I444.
1826   fake_encoder_.SetPreferredPixelFormats({VideoFrameBuffer::Type::kI444});
1827 
1828   rtc::Event frame_destroyed_event;
1829   video_source_.IncomingCapturedFrame(CreateFakeNV12NativeFrame(
1830       1, &frame_destroyed_event, codec_width_, codec_height_));
1831   WaitForEncodedFrame(1);
1832   EXPECT_EQ(VideoFrameBuffer::Type::kNative,
1833             fake_encoder_.GetLastInputPixelFormat());
1834   video_stream_encoder_->Stop();
1835 }
1836 
TEST_F(VideoStreamEncoderTest,NativeFrameGetsDelivered_BackedByNV12)1837 TEST_F(VideoStreamEncoderTest, NativeFrameGetsDelivered_BackedByNV12) {
1838   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1839       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1840 
1841   rtc::Event frame_destroyed_event;
1842   video_source_.IncomingCapturedFrame(CreateFakeNV12NativeFrame(
1843       1, &frame_destroyed_event, codec_width_, codec_height_));
1844   WaitForEncodedFrame(1);
1845   EXPECT_EQ(VideoFrameBuffer::Type::kNative,
1846             fake_encoder_.GetLastInputPixelFormat());
1847   video_stream_encoder_->Stop();
1848 }
1849 
TEST_F(VideoStreamEncoderTest,DropsFramesWhenCongestionWindowPushbackSet)1850 TEST_F(VideoStreamEncoderTest, DropsFramesWhenCongestionWindowPushbackSet) {
1851   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1852       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1853   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
1854   WaitForEncodedFrame(1);
1855 
1856   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1857       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0.5);
1858   // The congestion window pushback is set to 0.5, which will drop 1/2 of
1859   // frames. Adding two frames means that the first frame will be dropped and
1860   // the second frame will be sent to the encoder.
1861   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
1862   video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr));
1863   WaitForEncodedFrame(3);
1864   video_source_.IncomingCapturedFrame(CreateFrame(4, nullptr));
1865   video_source_.IncomingCapturedFrame(CreateFrame(5, nullptr));
1866   WaitForEncodedFrame(5);
1867   EXPECT_EQ(2u, stats_proxy_->GetStats().frames_dropped_by_congestion_window);
1868   video_stream_encoder_->Stop();
1869 }
1870 
TEST_F(VideoStreamEncoderTest,ConfigureEncoderTriggersOnEncoderConfigurationChanged)1871 TEST_F(VideoStreamEncoderTest,
1872        ConfigureEncoderTriggersOnEncoderConfigurationChanged) {
1873   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1874       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1875   EXPECT_EQ(0, sink_.number_of_reconfigurations());
1876 
1877   // Capture a frame and wait for it to synchronize with the encoder thread.
1878   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
1879   WaitForEncodedFrame(1);
1880   // The encoder will have been configured once when the first frame is
1881   // received.
1882   EXPECT_EQ(1, sink_.number_of_reconfigurations());
1883 
1884   VideoEncoderConfig video_encoder_config;
1885   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
1886   video_encoder_config.min_transmit_bitrate_bps = 9999;
1887   video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
1888                                           kMaxPayloadLength);
1889 
1890   // Capture a frame and wait for it to synchronize with the encoder thread.
1891   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
1892   WaitForEncodedFrame(2);
1893   EXPECT_EQ(2, sink_.number_of_reconfigurations());
1894   EXPECT_EQ(9999, sink_.last_min_transmit_bitrate());
1895 
1896   video_stream_encoder_->Stop();
1897 }
1898 
TEST_F(VideoStreamEncoderTest,FrameResolutionChangeReconfigureEncoder)1899 TEST_F(VideoStreamEncoderTest, FrameResolutionChangeReconfigureEncoder) {
1900   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1901       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1902 
1903   // Capture a frame and wait for it to synchronize with the encoder thread.
1904   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
1905   WaitForEncodedFrame(1);
1906   // The encoder will have been configured once.
1907   EXPECT_EQ(1, sink_.number_of_reconfigurations());
1908   EXPECT_EQ(codec_width_, fake_encoder_.config().width);
1909   EXPECT_EQ(codec_height_, fake_encoder_.config().height);
1910 
1911   codec_width_ *= 2;
1912   codec_height_ *= 2;
1913   // Capture a frame with a higher resolution and wait for it to synchronize
1914   // with the encoder thread.
1915   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
1916   WaitForEncodedFrame(2);
1917   EXPECT_EQ(codec_width_, fake_encoder_.config().width);
1918   EXPECT_EQ(codec_height_, fake_encoder_.config().height);
1919   EXPECT_EQ(2, sink_.number_of_reconfigurations());
1920 
1921   video_stream_encoder_->Stop();
1922 }
1923 
TEST_F(VideoStreamEncoderTest,EncoderInstanceDestroyedBeforeAnotherInstanceCreated)1924 TEST_F(VideoStreamEncoderTest,
1925        EncoderInstanceDestroyedBeforeAnotherInstanceCreated) {
1926   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1927       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1928 
1929   // Capture a frame and wait for it to synchronize with the encoder thread.
1930   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
1931   WaitForEncodedFrame(1);
1932 
1933   VideoEncoderConfig video_encoder_config;
1934   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
1935   // Changing the max payload data length recreates encoder.
1936   video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
1937                                           kMaxPayloadLength / 2);
1938 
1939   // Capture a frame and wait for it to synchronize with the encoder thread.
1940   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
1941   WaitForEncodedFrame(2);
1942   EXPECT_EQ(1, encoder_factory_.GetMaxNumberOfSimultaneousEncoderInstances());
1943 
1944   video_stream_encoder_->Stop();
1945 }
1946 
TEST_F(VideoStreamEncoderTest,BitrateLimitsChangeReconfigureRateAllocator)1947 TEST_F(VideoStreamEncoderTest, BitrateLimitsChangeReconfigureRateAllocator) {
1948   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1949       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1950 
1951   VideoEncoderConfig video_encoder_config;
1952   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
1953   video_encoder_config.max_bitrate_bps = kTargetBitrate.bps();
1954   video_stream_encoder_->SetStartBitrate(kStartBitrate.bps());
1955   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
1956                                           kMaxPayloadLength);
1957 
1958   // Capture a frame and wait for it to synchronize with the encoder thread.
1959   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
1960   WaitForEncodedFrame(1);
1961   // The encoder will have been configured once when the first frame is
1962   // received.
1963   EXPECT_EQ(1, sink_.number_of_reconfigurations());
1964   EXPECT_EQ(kTargetBitrate.bps(),
1965             bitrate_allocator_factory_.codec_config().maxBitrate * 1000);
1966   EXPECT_EQ(kStartBitrate.bps(),
1967             bitrate_allocator_factory_.codec_config().startBitrate * 1000);
1968 
1969   test::FillEncoderConfiguration(kVideoCodecVP8, 1,
1970                                  &video_encoder_config);  //???
1971   video_encoder_config.max_bitrate_bps = kTargetBitrate.bps() * 2;
1972   video_stream_encoder_->SetStartBitrate(kStartBitrate.bps() * 2);
1973   video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
1974                                           kMaxPayloadLength);
1975 
1976   // Capture a frame and wait for it to synchronize with the encoder thread.
1977   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
1978   WaitForEncodedFrame(2);
1979   EXPECT_EQ(2, sink_.number_of_reconfigurations());
1980   // Bitrate limits have changed - rate allocator should be reconfigured,
1981   // encoder should not be reconfigured.
1982   EXPECT_EQ(kTargetBitrate.bps() * 2,
1983             bitrate_allocator_factory_.codec_config().maxBitrate * 1000);
1984   EXPECT_EQ(kStartBitrate.bps() * 2,
1985             bitrate_allocator_factory_.codec_config().startBitrate * 1000);
1986   EXPECT_EQ(1, fake_encoder_.GetNumInitializations());
1987 
1988   video_stream_encoder_->Stop();
1989 }
1990 
TEST_F(VideoStreamEncoderTest,IntersectionOfEncoderAndAppBitrateLimitsUsedWhenBothProvided)1991 TEST_F(VideoStreamEncoderTest,
1992        IntersectionOfEncoderAndAppBitrateLimitsUsedWhenBothProvided) {
1993   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
1994       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
1995 
1996   const uint32_t kMinEncBitrateKbps = 100;
1997   const uint32_t kMaxEncBitrateKbps = 1000;
1998   const VideoEncoder::ResolutionBitrateLimits encoder_bitrate_limits(
1999       /*frame_size_pixels=*/codec_width_ * codec_height_,
2000       /*min_start_bitrate_bps=*/0,
2001       /*min_bitrate_bps=*/kMinEncBitrateKbps * 1000,
2002       /*max_bitrate_bps=*/kMaxEncBitrateKbps * 1000);
2003   fake_encoder_.SetResolutionBitrateLimits({encoder_bitrate_limits});
2004 
2005   VideoEncoderConfig video_encoder_config;
2006   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
2007   video_encoder_config.max_bitrate_bps = (kMaxEncBitrateKbps + 1) * 1000;
2008   video_encoder_config.simulcast_layers[0].min_bitrate_bps =
2009       (kMinEncBitrateKbps + 1) * 1000;
2010   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
2011                                           kMaxPayloadLength);
2012 
2013   // When both encoder and app provide bitrate limits, the intersection of
2014   // provided sets should be used.
2015   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
2016   WaitForEncodedFrame(1);
2017   EXPECT_EQ(kMaxEncBitrateKbps,
2018             bitrate_allocator_factory_.codec_config().maxBitrate);
2019   EXPECT_EQ(kMinEncBitrateKbps + 1,
2020             bitrate_allocator_factory_.codec_config().minBitrate);
2021 
2022   video_encoder_config.max_bitrate_bps = (kMaxEncBitrateKbps - 1) * 1000;
2023   video_encoder_config.simulcast_layers[0].min_bitrate_bps =
2024       (kMinEncBitrateKbps - 1) * 1000;
2025   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
2026                                           kMaxPayloadLength);
2027   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
2028   WaitForEncodedFrame(2);
2029   EXPECT_EQ(kMaxEncBitrateKbps - 1,
2030             bitrate_allocator_factory_.codec_config().maxBitrate);
2031   EXPECT_EQ(kMinEncBitrateKbps,
2032             bitrate_allocator_factory_.codec_config().minBitrate);
2033 
2034   video_stream_encoder_->Stop();
2035 }
2036 
TEST_F(VideoStreamEncoderTest,EncoderAndAppLimitsDontIntersectEncoderLimitsIgnored)2037 TEST_F(VideoStreamEncoderTest,
2038        EncoderAndAppLimitsDontIntersectEncoderLimitsIgnored) {
2039   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
2040       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
2041 
2042   const uint32_t kMinAppBitrateKbps = 100;
2043   const uint32_t kMaxAppBitrateKbps = 200;
2044   const uint32_t kMinEncBitrateKbps = kMaxAppBitrateKbps + 1;
2045   const uint32_t kMaxEncBitrateKbps = kMaxAppBitrateKbps * 2;
2046   const VideoEncoder::ResolutionBitrateLimits encoder_bitrate_limits(
2047       /*frame_size_pixels=*/codec_width_ * codec_height_,
2048       /*min_start_bitrate_bps=*/0,
2049       /*min_bitrate_bps=*/kMinEncBitrateKbps * 1000,
2050       /*max_bitrate_bps=*/kMaxEncBitrateKbps * 1000);
2051   fake_encoder_.SetResolutionBitrateLimits({encoder_bitrate_limits});
2052 
2053   VideoEncoderConfig video_encoder_config;
2054   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
2055   video_encoder_config.max_bitrate_bps = kMaxAppBitrateKbps * 1000;
2056   video_encoder_config.simulcast_layers[0].min_bitrate_bps =
2057       kMinAppBitrateKbps * 1000;
2058   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
2059                                           kMaxPayloadLength);
2060 
2061   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
2062   WaitForEncodedFrame(1);
2063   EXPECT_EQ(kMaxAppBitrateKbps,
2064             bitrate_allocator_factory_.codec_config().maxBitrate);
2065   EXPECT_EQ(kMinAppBitrateKbps,
2066             bitrate_allocator_factory_.codec_config().minBitrate);
2067 
2068   video_stream_encoder_->Stop();
2069 }
2070 
TEST_F(VideoStreamEncoderTest,EncoderRecommendedMaxAndMinBitratesUsedForGivenResolution)2071 TEST_F(VideoStreamEncoderTest,
2072        EncoderRecommendedMaxAndMinBitratesUsedForGivenResolution) {
2073   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
2074       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
2075 
2076   const VideoEncoder::ResolutionBitrateLimits encoder_bitrate_limits_270p(
2077       480 * 270, 34 * 1000, 12 * 1000, 1234 * 1000);
2078   const VideoEncoder::ResolutionBitrateLimits encoder_bitrate_limits_360p(
2079       640 * 360, 43 * 1000, 21 * 1000, 2345 * 1000);
2080   fake_encoder_.SetResolutionBitrateLimits(
2081       {encoder_bitrate_limits_270p, encoder_bitrate_limits_360p});
2082 
2083   VideoEncoderConfig video_encoder_config;
2084   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
2085   video_encoder_config.max_bitrate_bps = 0;
2086   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
2087                                           kMaxPayloadLength);
2088 
2089   // 270p. The bitrate limits recommended by encoder for 270p should be used.
2090   video_source_.IncomingCapturedFrame(CreateFrame(1, 480, 270));
2091   WaitForEncodedFrame(1);
2092   EXPECT_EQ(static_cast<uint32_t>(encoder_bitrate_limits_270p.min_bitrate_bps),
2093             bitrate_allocator_factory_.codec_config().minBitrate * 1000);
2094   EXPECT_EQ(static_cast<uint32_t>(encoder_bitrate_limits_270p.max_bitrate_bps),
2095             bitrate_allocator_factory_.codec_config().maxBitrate * 1000);
2096 
2097   // 360p. The bitrate limits recommended by encoder for 360p should be used.
2098   video_source_.IncomingCapturedFrame(CreateFrame(2, 640, 360));
2099   WaitForEncodedFrame(2);
2100   EXPECT_EQ(static_cast<uint32_t>(encoder_bitrate_limits_360p.min_bitrate_bps),
2101             bitrate_allocator_factory_.codec_config().minBitrate * 1000);
2102   EXPECT_EQ(static_cast<uint32_t>(encoder_bitrate_limits_360p.max_bitrate_bps),
2103             bitrate_allocator_factory_.codec_config().maxBitrate * 1000);
2104 
2105   // Resolution between 270p and 360p. The bitrate limits recommended by
2106   // encoder for 360p should be used.
2107   video_source_.IncomingCapturedFrame(
2108       CreateFrame(3, (640 + 480) / 2, (360 + 270) / 2));
2109   WaitForEncodedFrame(3);
2110   EXPECT_EQ(static_cast<uint32_t>(encoder_bitrate_limits_360p.min_bitrate_bps),
2111             bitrate_allocator_factory_.codec_config().minBitrate * 1000);
2112   EXPECT_EQ(static_cast<uint32_t>(encoder_bitrate_limits_360p.max_bitrate_bps),
2113             bitrate_allocator_factory_.codec_config().maxBitrate * 1000);
2114 
2115   // Resolution higher than 360p. The caps recommended by encoder should be
2116   // ignored.
2117   video_source_.IncomingCapturedFrame(CreateFrame(4, 960, 540));
2118   WaitForEncodedFrame(4);
2119   EXPECT_NE(static_cast<uint32_t>(encoder_bitrate_limits_270p.min_bitrate_bps),
2120             bitrate_allocator_factory_.codec_config().minBitrate * 1000);
2121   EXPECT_NE(static_cast<uint32_t>(encoder_bitrate_limits_270p.max_bitrate_bps),
2122             bitrate_allocator_factory_.codec_config().maxBitrate * 1000);
2123   EXPECT_NE(static_cast<uint32_t>(encoder_bitrate_limits_360p.min_bitrate_bps),
2124             bitrate_allocator_factory_.codec_config().minBitrate * 1000);
2125   EXPECT_NE(static_cast<uint32_t>(encoder_bitrate_limits_360p.max_bitrate_bps),
2126             bitrate_allocator_factory_.codec_config().maxBitrate * 1000);
2127 
2128   // Resolution lower than 270p. The max bitrate limit recommended by encoder
2129   // for 270p should be used.
2130   video_source_.IncomingCapturedFrame(CreateFrame(5, 320, 180));
2131   WaitForEncodedFrame(5);
2132   EXPECT_EQ(static_cast<uint32_t>(encoder_bitrate_limits_270p.min_bitrate_bps),
2133             bitrate_allocator_factory_.codec_config().minBitrate * 1000);
2134   EXPECT_EQ(static_cast<uint32_t>(encoder_bitrate_limits_270p.max_bitrate_bps),
2135             bitrate_allocator_factory_.codec_config().maxBitrate * 1000);
2136 
2137   video_stream_encoder_->Stop();
2138 }
2139 
TEST_F(VideoStreamEncoderTest,EncoderRecommendedMaxBitrateCapsTargetBitrate)2140 TEST_F(VideoStreamEncoderTest, EncoderRecommendedMaxBitrateCapsTargetBitrate) {
2141   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
2142       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
2143 
2144   VideoEncoderConfig video_encoder_config;
2145   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
2146   video_encoder_config.max_bitrate_bps = 0;
2147   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
2148                                           kMaxPayloadLength);
2149 
2150   // Encode 720p frame to get the default encoder target bitrate.
2151   video_source_.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
2152   WaitForEncodedFrame(1);
2153   const uint32_t kDefaultTargetBitrateFor720pKbps =
2154       bitrate_allocator_factory_.codec_config()
2155           .simulcastStream[0]
2156           .targetBitrate;
2157 
2158   // Set the max recommended encoder bitrate to something lower than the default
2159   // target bitrate.
2160   const VideoEncoder::ResolutionBitrateLimits encoder_bitrate_limits(
2161       1280 * 720, 10 * 1000, 10 * 1000,
2162       kDefaultTargetBitrateFor720pKbps / 2 * 1000);
2163   fake_encoder_.SetResolutionBitrateLimits({encoder_bitrate_limits});
2164 
2165   // Change resolution to trigger encoder reinitialization.
2166   video_source_.IncomingCapturedFrame(CreateFrame(2, 640, 360));
2167   WaitForEncodedFrame(2);
2168   video_source_.IncomingCapturedFrame(CreateFrame(3, 1280, 720));
2169   WaitForEncodedFrame(3);
2170 
2171   // Ensure the target bitrate is capped by the max bitrate.
2172   EXPECT_EQ(bitrate_allocator_factory_.codec_config().maxBitrate * 1000,
2173             static_cast<uint32_t>(encoder_bitrate_limits.max_bitrate_bps));
2174   EXPECT_EQ(bitrate_allocator_factory_.codec_config()
2175                     .simulcastStream[0]
2176                     .targetBitrate *
2177                 1000,
2178             static_cast<uint32_t>(encoder_bitrate_limits.max_bitrate_bps));
2179 
2180   video_stream_encoder_->Stop();
2181 }
2182 
TEST_F(VideoStreamEncoderTest,EncoderMaxAndMinBitratesUsedForTwoStreamsHighestActive)2183 TEST_F(VideoStreamEncoderTest,
2184        EncoderMaxAndMinBitratesUsedForTwoStreamsHighestActive) {
2185   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits270p(
2186       480 * 270, 34 * 1000, 12 * 1000, 1234 * 1000);
2187   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits360p(
2188       640 * 360, 43 * 1000, 21 * 1000, 2345 * 1000);
2189   fake_encoder_.SetResolutionBitrateLimits(
2190       {kEncoderLimits270p, kEncoderLimits360p});
2191 
2192   // Two streams, highest stream active.
2193   VideoEncoderConfig config;
2194   webrtc::VideoEncoder::EncoderInfo encoder_info;
2195   const int kNumStreams = 2;
2196   test::FillEncoderConfiguration(kVideoCodecVP8, kNumStreams, &config);
2197   config.max_bitrate_bps = 0;
2198   config.simulcast_layers[0].active = false;
2199   config.simulcast_layers[1].active = true;
2200   config.video_stream_factory =
2201       rtc::make_ref_counted<cricket::EncoderStreamFactory>(
2202           "VP8", /*max qp*/ 56, /*screencast*/ false,
2203           /*screenshare enabled*/ false, encoder_info);
2204   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
2205 
2206   // The encoder bitrate limits for 270p should be used.
2207   video_source_.IncomingCapturedFrame(CreateFrame(1, 480, 270));
2208   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2209   EXPECT_EQ(fake_encoder_.config().numberOfSimulcastStreams, kNumStreams);
2210   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits270p.min_bitrate_bps),
2211             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2212   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits270p.max_bitrate_bps),
2213             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2214 
2215   // The encoder bitrate limits for 360p should be used.
2216   video_source_.IncomingCapturedFrame(CreateFrame(2, 640, 360));
2217   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2218   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits360p.min_bitrate_bps),
2219             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2220   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits360p.max_bitrate_bps),
2221             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2222 
2223   // Resolution b/w 270p and 360p. The encoder limits for 360p should be used.
2224   video_source_.IncomingCapturedFrame(
2225       CreateFrame(3, (640 + 480) / 2, (360 + 270) / 2));
2226   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2227   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits360p.min_bitrate_bps),
2228             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2229   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits360p.max_bitrate_bps),
2230             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2231 
2232   // Resolution higher than 360p. Encoder limits should be ignored.
2233   video_source_.IncomingCapturedFrame(CreateFrame(4, 960, 540));
2234   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2235   EXPECT_NE(static_cast<uint32_t>(kEncoderLimits270p.min_bitrate_bps),
2236             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2237   EXPECT_NE(static_cast<uint32_t>(kEncoderLimits270p.max_bitrate_bps),
2238             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2239   EXPECT_NE(static_cast<uint32_t>(kEncoderLimits360p.min_bitrate_bps),
2240             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2241   EXPECT_NE(static_cast<uint32_t>(kEncoderLimits360p.max_bitrate_bps),
2242             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2243 
2244   // Resolution lower than 270p. The encoder limits for 270p should be used.
2245   video_source_.IncomingCapturedFrame(CreateFrame(5, 320, 180));
2246   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2247   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits270p.min_bitrate_bps),
2248             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2249   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits270p.max_bitrate_bps),
2250             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2251 
2252   video_stream_encoder_->Stop();
2253 }
2254 
TEST_F(VideoStreamEncoderTest,DefaultEncoderMaxAndMinBitratesUsedForTwoStreamsHighestActive)2255 TEST_F(VideoStreamEncoderTest,
2256        DefaultEncoderMaxAndMinBitratesUsedForTwoStreamsHighestActive) {
2257   // Two streams, highest stream active.
2258   VideoEncoderConfig config;
2259   webrtc::VideoEncoder::EncoderInfo encoder_info;
2260   const int kNumStreams = 2;
2261   test::FillEncoderConfiguration(kVideoCodecVP8, kNumStreams, &config);
2262   config.max_bitrate_bps = 0;
2263   config.simulcast_layers[0].active = false;
2264   config.simulcast_layers[1].active = true;
2265   config.video_stream_factory =
2266       rtc::make_ref_counted<cricket::EncoderStreamFactory>(
2267           "VP8", /*max qp*/ 56, /*screencast*/ false,
2268           /*screenshare enabled*/ false, encoder_info);
2269   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
2270 
2271   // Default bitrate limits for 270p should be used.
2272   const absl::optional<VideoEncoder::ResolutionBitrateLimits>
2273       kDefaultLimits270p =
2274           EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution(
2275               kVideoCodecVP8, 480 * 270);
2276   video_source_.IncomingCapturedFrame(CreateFrame(1, 480, 270));
2277   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2278   EXPECT_EQ(fake_encoder_.config().numberOfSimulcastStreams, kNumStreams);
2279   EXPECT_EQ(static_cast<uint32_t>(kDefaultLimits270p->min_bitrate_bps),
2280             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2281   EXPECT_EQ(static_cast<uint32_t>(kDefaultLimits270p->max_bitrate_bps),
2282             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2283 
2284   // Default bitrate limits for 360p should be used.
2285   const absl::optional<VideoEncoder::ResolutionBitrateLimits>
2286       kDefaultLimits360p =
2287           EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution(
2288               kVideoCodecVP8, 640 * 360);
2289   video_source_.IncomingCapturedFrame(CreateFrame(2, 640, 360));
2290   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2291   EXPECT_EQ(static_cast<uint32_t>(kDefaultLimits360p->min_bitrate_bps),
2292             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2293   EXPECT_EQ(static_cast<uint32_t>(kDefaultLimits360p->max_bitrate_bps),
2294             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2295 
2296   // Resolution b/w 270p and 360p. The default limits for 360p should be used.
2297   video_source_.IncomingCapturedFrame(
2298       CreateFrame(3, (640 + 480) / 2, (360 + 270) / 2));
2299   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2300   EXPECT_EQ(static_cast<uint32_t>(kDefaultLimits360p->min_bitrate_bps),
2301             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2302   EXPECT_EQ(static_cast<uint32_t>(kDefaultLimits360p->max_bitrate_bps),
2303             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2304 
2305   // Default bitrate limits for 540p should be used.
2306   const absl::optional<VideoEncoder::ResolutionBitrateLimits>
2307       kDefaultLimits540p =
2308           EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution(
2309               kVideoCodecVP8, 960 * 540);
2310   video_source_.IncomingCapturedFrame(CreateFrame(4, 960, 540));
2311   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2312   EXPECT_EQ(static_cast<uint32_t>(kDefaultLimits540p->min_bitrate_bps),
2313             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2314   EXPECT_EQ(static_cast<uint32_t>(kDefaultLimits540p->max_bitrate_bps),
2315             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2316 
2317   video_stream_encoder_->Stop();
2318 }
2319 
TEST_F(VideoStreamEncoderTest,EncoderMaxAndMinBitratesUsedForThreeStreamsMiddleActive)2320 TEST_F(VideoStreamEncoderTest,
2321        EncoderMaxAndMinBitratesUsedForThreeStreamsMiddleActive) {
2322   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits270p(
2323       480 * 270, 34 * 1000, 12 * 1000, 1234 * 1000);
2324   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits360p(
2325       640 * 360, 43 * 1000, 21 * 1000, 2345 * 1000);
2326   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits720p(
2327       1280 * 720, 54 * 1000, 31 * 1000, 3456 * 1000);
2328   fake_encoder_.SetResolutionBitrateLimits(
2329       {kEncoderLimits270p, kEncoderLimits360p, kEncoderLimits720p});
2330 
2331   // Three streams, middle stream active.
2332   VideoEncoderConfig config;
2333   webrtc::VideoEncoder::EncoderInfo encoder_info;
2334   const int kNumStreams = 3;
2335   test::FillEncoderConfiguration(kVideoCodecVP8, kNumStreams, &config);
2336   config.simulcast_layers[0].active = false;
2337   config.simulcast_layers[1].active = true;
2338   config.simulcast_layers[2].active = false;
2339   config.video_stream_factory =
2340       rtc::make_ref_counted<cricket::EncoderStreamFactory>(
2341           "VP8", /*max qp*/ 56, /*screencast*/ false,
2342           /*screenshare enabled*/ false, encoder_info);
2343   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
2344 
2345   // The encoder bitrate limits for 360p should be used.
2346   video_source_.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
2347   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2348   EXPECT_EQ(fake_encoder_.config().numberOfSimulcastStreams, kNumStreams);
2349   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits360p.min_bitrate_bps),
2350             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2351   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits360p.max_bitrate_bps),
2352             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2353 
2354   // The encoder bitrate limits for 270p should be used.
2355   video_source_.IncomingCapturedFrame(CreateFrame(2, 960, 540));
2356   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2357   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits270p.min_bitrate_bps),
2358             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2359   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits270p.max_bitrate_bps),
2360             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2361 
2362   video_stream_encoder_->Stop();
2363 }
2364 
TEST_F(VideoStreamEncoderTest,EncoderMaxAndMinBitratesNotUsedForThreeStreamsLowestActive)2365 TEST_F(VideoStreamEncoderTest,
2366        EncoderMaxAndMinBitratesNotUsedForThreeStreamsLowestActive) {
2367   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits270p(
2368       480 * 270, 34 * 1000, 12 * 1000, 1234 * 1000);
2369   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits360p(
2370       640 * 360, 43 * 1000, 21 * 1000, 2345 * 1000);
2371   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits720p(
2372       1280 * 720, 54 * 1000, 31 * 1000, 3456 * 1000);
2373   fake_encoder_.SetResolutionBitrateLimits(
2374       {kEncoderLimits270p, kEncoderLimits360p, kEncoderLimits720p});
2375 
2376   // Three streams, lowest stream active.
2377   VideoEncoderConfig config;
2378   webrtc::VideoEncoder::EncoderInfo encoder_info;
2379   const int kNumStreams = 3;
2380   test::FillEncoderConfiguration(kVideoCodecVP8, kNumStreams, &config);
2381   config.simulcast_layers[0].active = true;
2382   config.simulcast_layers[1].active = false;
2383   config.simulcast_layers[2].active = false;
2384   config.video_stream_factory =
2385       rtc::make_ref_counted<cricket::EncoderStreamFactory>(
2386           "VP8", /*max qp*/ 56, /*screencast*/ false,
2387           /*screenshare enabled*/ false, encoder_info);
2388   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
2389 
2390   // Resolution on lowest stream lower than 270p. The encoder limits not applied
2391   // on lowest stream, limits for 270p should not be used
2392   video_source_.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
2393   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2394   EXPECT_EQ(fake_encoder_.config().numberOfSimulcastStreams, kNumStreams);
2395   EXPECT_NE(static_cast<uint32_t>(kEncoderLimits270p.min_bitrate_bps),
2396             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2397   EXPECT_NE(static_cast<uint32_t>(kEncoderLimits270p.max_bitrate_bps),
2398             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2399 
2400   video_stream_encoder_->Stop();
2401 }
2402 
TEST_F(VideoStreamEncoderTest,EncoderMaxBitrateCappedByConfigForTwoStreamsHighestActive)2403 TEST_F(VideoStreamEncoderTest,
2404        EncoderMaxBitrateCappedByConfigForTwoStreamsHighestActive) {
2405   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits270p(
2406       480 * 270, 34 * 1000, 12 * 1000, 1234 * 1000);
2407   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits360p(
2408       640 * 360, 43 * 1000, 21 * 1000, 2345 * 1000);
2409   fake_encoder_.SetResolutionBitrateLimits(
2410       {kEncoderLimits270p, kEncoderLimits360p});
2411   const int kMaxBitrateBps = kEncoderLimits360p.max_bitrate_bps - 100 * 1000;
2412 
2413   // Two streams, highest stream active.
2414   VideoEncoderConfig config;
2415   webrtc::VideoEncoder::EncoderInfo encoder_info;
2416   const int kNumStreams = 2;
2417   test::FillEncoderConfiguration(kVideoCodecVP8, kNumStreams, &config);
2418   config.simulcast_layers[0].active = false;
2419   config.simulcast_layers[1].active = true;
2420   config.simulcast_layers[1].max_bitrate_bps = kMaxBitrateBps;
2421   config.video_stream_factory =
2422       rtc::make_ref_counted<cricket::EncoderStreamFactory>(
2423           "VP8", /*max qp*/ 56, /*screencast*/ false,
2424           /*screenshare enabled*/ false, encoder_info);
2425   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
2426 
2427   // The encoder bitrate limits for 270p should be used.
2428   video_source_.IncomingCapturedFrame(CreateFrame(1, 480, 270));
2429   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2430   EXPECT_EQ(fake_encoder_.config().numberOfSimulcastStreams, kNumStreams);
2431   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits270p.min_bitrate_bps),
2432             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2433   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits270p.max_bitrate_bps),
2434             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2435 
2436   // The max configured bitrate is less than the encoder limit for 360p.
2437   video_source_.IncomingCapturedFrame(CreateFrame(2, 640, 360));
2438   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
2439   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits360p.min_bitrate_bps),
2440             fake_encoder_.config().simulcastStream[1].minBitrate * 1000);
2441   EXPECT_EQ(static_cast<uint32_t>(kMaxBitrateBps),
2442             fake_encoder_.config().simulcastStream[1].maxBitrate * 1000);
2443 
2444   video_stream_encoder_->Stop();
2445 }
2446 
TEST_F(VideoStreamEncoderTest,SwitchSourceDeregisterEncoderAsSink)2447 TEST_F(VideoStreamEncoderTest, SwitchSourceDeregisterEncoderAsSink) {
2448   EXPECT_TRUE(video_source_.has_sinks());
2449   test::FrameForwarder new_video_source;
2450   video_stream_encoder_->SetSource(
2451       &new_video_source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
2452   EXPECT_FALSE(video_source_.has_sinks());
2453   EXPECT_TRUE(new_video_source.has_sinks());
2454 
2455   video_stream_encoder_->Stop();
2456 }
2457 
TEST_F(VideoStreamEncoderTest,SinkWantsRotationApplied)2458 TEST_F(VideoStreamEncoderTest, SinkWantsRotationApplied) {
2459   EXPECT_FALSE(video_source_.sink_wants().rotation_applied);
2460   video_stream_encoder_->SetSink(&sink_, true /*rotation_applied*/);
2461   EXPECT_TRUE(video_source_.sink_wants().rotation_applied);
2462   video_stream_encoder_->Stop();
2463 }
2464 
2465 class ResolutionAlignmentTest
2466     : public VideoStreamEncoderTest,
2467       public ::testing::WithParamInterface<
2468           ::testing::tuple<int, std::vector<double>>> {
2469  public:
ResolutionAlignmentTest()2470   ResolutionAlignmentTest()
2471       : requested_alignment_(::testing::get<0>(GetParam())),
2472         scale_factors_(::testing::get<1>(GetParam())) {}
2473 
2474  protected:
2475   const int requested_alignment_;
2476   const std::vector<double> scale_factors_;
2477 };
2478 
2479 INSTANTIATE_TEST_SUITE_P(
2480     AlignmentAndScaleFactors,
2481     ResolutionAlignmentTest,
2482     ::testing::Combine(
2483         ::testing::Values(1, 2, 3, 4, 5, 6, 16, 22),  // requested_alignment_
2484         ::testing::Values(std::vector<double>{-1.0},  // scale_factors_
2485                           std::vector<double>{-1.0, -1.0},
2486                           std::vector<double>{-1.0, -1.0, -1.0},
2487                           std::vector<double>{4.0, 2.0, 1.0},
2488                           std::vector<double>{9999.0, -1.0, 1.0},
2489                           std::vector<double>{3.99, 2.01, 1.0},
2490                           std::vector<double>{4.9, 1.7, 1.25},
2491                           std::vector<double>{10.0, 4.0, 3.0},
2492                           std::vector<double>{1.75, 3.5},
2493                           std::vector<double>{1.5, 2.5},
2494                           std::vector<double>{1.3, 1.0})));
2495 
TEST_P(ResolutionAlignmentTest,SinkWantsAlignmentApplied)2496 TEST_P(ResolutionAlignmentTest, SinkWantsAlignmentApplied) {
2497   // Set requested resolution alignment.
2498   video_source_.set_adaptation_enabled(true);
2499   fake_encoder_.SetRequestedResolutionAlignment(requested_alignment_);
2500   fake_encoder_.SetApplyAlignmentToAllSimulcastLayers(true);
2501 
2502   // Fill config with the scaling factor by which to reduce encoding size.
2503   const int num_streams = scale_factors_.size();
2504   VideoEncoderConfig config;
2505   webrtc::VideoEncoder::EncoderInfo encoder_info;
2506   test::FillEncoderConfiguration(kVideoCodecVP8, num_streams, &config);
2507   for (int i = 0; i < num_streams; ++i) {
2508     config.simulcast_layers[i].scale_resolution_down_by = scale_factors_[i];
2509   }
2510   config.video_stream_factory =
2511       rtc::make_ref_counted<cricket::EncoderStreamFactory>(
2512           "VP8", /*max qp*/ 56, /*screencast*/ false,
2513           /*screenshare enabled*/ false, encoder_info);
2514   video_stream_encoder_->ConfigureEncoder(std::move(config), kMaxPayloadLength);
2515 
2516   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
2517       kSimulcastTargetBitrate, kSimulcastTargetBitrate, kSimulcastTargetBitrate,
2518       0, 0, 0);
2519   // Wait for all layers before triggering event.
2520   sink_.SetNumExpectedLayers(num_streams);
2521 
2522   // On the 1st frame, we should have initialized the encoder and
2523   // asked for its resolution requirements.
2524   int64_t timestamp_ms = kFrameIntervalMs;
2525   video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720));
2526   WaitForEncodedFrame(timestamp_ms);
2527   EXPECT_EQ(1, fake_encoder_.GetNumInitializations());
2528 
2529   // On the 2nd frame, we should be receiving a correctly aligned resolution.
2530   // (It's up the to the encoder to potentially drop the previous frame,
2531   // to avoid coding back-to-back keyframes.)
2532   timestamp_ms += kFrameIntervalMs;
2533   video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720));
2534   WaitForEncodedFrame(timestamp_ms);
2535   EXPECT_GE(fake_encoder_.GetNumInitializations(), 1);
2536 
2537   VideoCodec codec = fake_encoder_.config();
2538   EXPECT_EQ(codec.numberOfSimulcastStreams, num_streams);
2539   // Frame size should be a multiple of the requested alignment.
2540   for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) {
2541     EXPECT_EQ(codec.simulcastStream[i].width % requested_alignment_, 0);
2542     EXPECT_EQ(codec.simulcastStream[i].height % requested_alignment_, 0);
2543     // Aspect ratio should match.
2544     EXPECT_EQ(codec.width * codec.simulcastStream[i].height,
2545               codec.height * codec.simulcastStream[i].width);
2546   }
2547 
2548   video_stream_encoder_->Stop();
2549 }
2550 
TEST_F(VideoStreamEncoderTest,TestCpuDowngrades_BalancedMode)2551 TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) {
2552   const int kFramerateFps = 30;
2553   const int kWidth = 1280;
2554   const int kHeight = 720;
2555 
2556   // We rely on the automatic resolution adaptation, but we handle framerate
2557   // adaptation manually by mocking the stats proxy.
2558   video_source_.set_adaptation_enabled(true);
2559 
2560   // Enable BALANCED preference, no initial limitation.
2561   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
2562       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
2563   video_stream_encoder_->SetSource(&video_source_,
2564                                    webrtc::DegradationPreference::BALANCED);
2565   EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
2566   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
2567   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
2568   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
2569 
2570   // Adapt down as far as possible.
2571   rtc::VideoSinkWants last_wants;
2572   int64_t t = 1;
2573   int loop_count = 0;
2574   do {
2575     ++loop_count;
2576     last_wants = video_source_.sink_wants();
2577 
2578     // Simulate the framerate we've been asked to adapt to.
2579     const int fps = std::min(kFramerateFps, last_wants.max_framerate_fps);
2580     const int frame_interval_ms = rtc::kNumMillisecsPerSec / fps;
2581     VideoSendStream::Stats mock_stats = stats_proxy_->GetStats();
2582     mock_stats.input_frame_rate = fps;
2583     stats_proxy_->SetMockStats(mock_stats);
2584 
2585     video_source_.IncomingCapturedFrame(CreateFrame(t, kWidth, kHeight));
2586     sink_.WaitForEncodedFrame(t);
2587     t += frame_interval_ms;
2588 
2589     video_stream_encoder_->TriggerCpuOveruse();
2590     EXPECT_THAT(
2591         video_source_.sink_wants(),
2592         FpsInRangeForPixelsInBalanced(*video_source_.last_sent_width() *
2593                                       *video_source_.last_sent_height()));
2594   } while (video_source_.sink_wants().max_pixel_count <
2595                last_wants.max_pixel_count ||
2596            video_source_.sink_wants().max_framerate_fps <
2597                last_wants.max_framerate_fps);
2598 
2599   // Verify that we've adapted all the way down.
2600   stats_proxy_->ResetMockStats();
2601   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
2602   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate);
2603   EXPECT_EQ(loop_count - 1,
2604             stats_proxy_->GetStats().number_of_cpu_adapt_changes);
2605   EXPECT_EQ(kMinPixelsPerFrame, *video_source_.last_sent_width() *
2606                                     *video_source_.last_sent_height());
2607   EXPECT_EQ(kMinBalancedFramerateFps,
2608             video_source_.sink_wants().max_framerate_fps);
2609 
2610   // Adapt back up the same number of times we adapted down.
2611   for (int i = 0; i < loop_count - 1; ++i) {
2612     last_wants = video_source_.sink_wants();
2613 
2614     // Simulate the framerate we've been asked to adapt to.
2615     const int fps = std::min(kFramerateFps, last_wants.max_framerate_fps);
2616     const int frame_interval_ms = rtc::kNumMillisecsPerSec / fps;
2617     VideoSendStream::Stats mock_stats = stats_proxy_->GetStats();
2618     mock_stats.input_frame_rate = fps;
2619     stats_proxy_->SetMockStats(mock_stats);
2620 
2621     video_source_.IncomingCapturedFrame(CreateFrame(t, kWidth, kHeight));
2622     sink_.WaitForEncodedFrame(t);
2623     t += frame_interval_ms;
2624 
2625     video_stream_encoder_->TriggerCpuUnderuse();
2626     EXPECT_THAT(
2627         video_source_.sink_wants(),
2628         FpsInRangeForPixelsInBalanced(*video_source_.last_sent_width() *
2629                                       *video_source_.last_sent_height()));
2630     EXPECT_TRUE(video_source_.sink_wants().max_pixel_count >
2631                     last_wants.max_pixel_count ||
2632                 video_source_.sink_wants().max_framerate_fps >
2633                     last_wants.max_framerate_fps);
2634   }
2635 
2636   EXPECT_THAT(video_source_.sink_wants(), FpsMaxResolutionMax());
2637   stats_proxy_->ResetMockStats();
2638   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
2639   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
2640   EXPECT_EQ((loop_count - 1) * 2,
2641             stats_proxy_->GetStats().number_of_cpu_adapt_changes);
2642 
2643   video_stream_encoder_->Stop();
2644 }
2645 
TEST_F(VideoStreamEncoderTest,SinkWantsNotChangedByResourceLimitedBeforeDegradationPreferenceChange)2646 TEST_F(VideoStreamEncoderTest,
2647        SinkWantsNotChangedByResourceLimitedBeforeDegradationPreferenceChange) {
2648   video_stream_encoder_->OnBitrateUpdated(kTargetBitrate, kTargetBitrate,
2649                                           kTargetBitrate, 0, 0, 0);
2650   EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
2651 
2652   const int kFrameWidth = 1280;
2653   const int kFrameHeight = 720;
2654 
2655   int64_t ntp_time = kFrameIntervalMs;
2656 
2657   // Force an input frame rate to be available, or the adaptation call won't
2658   // know what framerate to adapt form.
2659   const int kInputFps = 30;
2660   VideoSendStream::Stats stats = stats_proxy_->GetStats();
2661   stats.input_frame_rate = kInputFps;
2662   stats_proxy_->SetMockStats(stats);
2663 
2664   video_source_.set_adaptation_enabled(true);
2665   video_stream_encoder_->SetSource(
2666       &video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
2667   EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
2668   video_source_.IncomingCapturedFrame(
2669       CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
2670   sink_.WaitForEncodedFrame(ntp_time);
2671   ntp_time += kFrameIntervalMs;
2672 
2673   // Trigger CPU overuse.
2674   video_stream_encoder_->TriggerCpuOveruse();
2675   video_source_.IncomingCapturedFrame(
2676       CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
2677   sink_.WaitForEncodedFrame(ntp_time);
2678   ntp_time += kFrameIntervalMs;
2679 
2680   EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
2681   EXPECT_EQ(std::numeric_limits<int>::max(),
2682             video_source_.sink_wants().max_pixel_count);
2683   // Some framerate constraint should be set.
2684   int restricted_fps = video_source_.sink_wants().max_framerate_fps;
2685   EXPECT_LT(restricted_fps, kInputFps);
2686   video_source_.IncomingCapturedFrame(
2687       CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
2688   sink_.WaitForEncodedFrame(ntp_time);
2689   ntp_time += 100;
2690 
2691   video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
2692       &video_source_, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
2693   // Give the encoder queue time to process the change in degradation preference
2694   // by waiting for an encoded frame.
2695   video_source_.IncomingCapturedFrame(
2696       CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
2697   sink_.WaitForEncodedFrame(ntp_time);
2698   ntp_time += kFrameIntervalMs;
2699 
2700   video_stream_encoder_->TriggerQualityLow();
2701   video_source_.IncomingCapturedFrame(
2702       CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
2703   sink_.WaitForEncodedFrame(ntp_time);
2704   ntp_time += kFrameIntervalMs;
2705 
2706   // Some resolution constraint should be set.
2707   EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
2708   EXPECT_LT(video_source_.sink_wants().max_pixel_count,
2709             kFrameWidth * kFrameHeight);
2710   EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, kInputFps);
2711 
2712   int pixel_count = video_source_.sink_wants().max_pixel_count;
2713   // Triggering a CPU underuse should not change the sink wants since it has
2714   // not been overused for resolution since we changed degradation preference.
2715   video_stream_encoder_->TriggerCpuUnderuse();
2716   video_source_.IncomingCapturedFrame(
2717       CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
2718   sink_.WaitForEncodedFrame(ntp_time);
2719   ntp_time += kFrameIntervalMs;
2720   EXPECT_EQ(video_source_.sink_wants().max_pixel_count, pixel_count);
2721   EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, kInputFps);
2722 
2723   // Change the degradation preference back. CPU underuse should not adapt since
2724   // QP is most limited.
2725   video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
2726       &video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
2727   video_source_.IncomingCapturedFrame(
2728       CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
2729   sink_.WaitForEncodedFrame(ntp_time);
2730   ntp_time += 100;
2731   // Resolution adaptations is gone after changing degradation preference.
2732   EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
2733   EXPECT_EQ(std::numeric_limits<int>::max(),
2734             video_source_.sink_wants().max_pixel_count);
2735   // The fps adaptation from above is now back.
2736   EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, restricted_fps);
2737 
2738   // Trigger CPU underuse.
2739   video_stream_encoder_->TriggerCpuUnderuse();
2740   video_source_.IncomingCapturedFrame(
2741       CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
2742   sink_.WaitForEncodedFrame(ntp_time);
2743   ntp_time += kFrameIntervalMs;
2744   EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, restricted_fps);
2745 
2746   // Trigger QP underuse, fps should return to normal.
2747   video_stream_encoder_->TriggerQualityHigh();
2748   video_source_.IncomingCapturedFrame(
2749       CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
2750   sink_.WaitForEncodedFrame(ntp_time);
2751   ntp_time += kFrameIntervalMs;
2752   EXPECT_THAT(video_source_.sink_wants(), FpsMax());
2753 
2754   video_stream_encoder_->Stop();
2755 }
2756 
TEST_F(VideoStreamEncoderTest,SinkWantsStoredByDegradationPreference)2757 TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
2758   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
2759       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
2760   EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
2761 
2762   const int kFrameWidth = 1280;
2763   const int kFrameHeight = 720;
2764 
2765   int64_t frame_timestamp = 1;
2766 
2767   video_source_.IncomingCapturedFrame(
2768       CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
2769   WaitForEncodedFrame(frame_timestamp);
2770   frame_timestamp += kFrameIntervalMs;
2771 
2772   // Trigger CPU overuse.
2773   video_stream_encoder_->TriggerCpuOveruse();
2774   video_source_.IncomingCapturedFrame(
2775       CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
2776   WaitForEncodedFrame(frame_timestamp);
2777   frame_timestamp += kFrameIntervalMs;
2778 
2779   // Default degradation preference is maintain-framerate, so will lower max
2780   // wanted resolution.
2781   EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
2782   EXPECT_LT(video_source_.sink_wants().max_pixel_count,
2783             kFrameWidth * kFrameHeight);
2784   EXPECT_EQ(kDefaultFramerate, video_source_.sink_wants().max_framerate_fps);
2785 
2786   // Set new source, switch to maintain-resolution.
2787   test::FrameForwarder new_video_source;
2788   video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
2789       &new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
2790   // Give the encoder queue time to process the change in degradation preference
2791   // by waiting for an encoded frame.
2792   new_video_source.IncomingCapturedFrame(
2793       CreateFrame(frame_timestamp, kFrameWidth, kFrameWidth));
2794   sink_.WaitForEncodedFrame(frame_timestamp);
2795   frame_timestamp += kFrameIntervalMs;
2796   // Initially no degradation registered.
2797   EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
2798 
2799   // Force an input frame rate to be available, or the adaptation call won't
2800   // know what framerate to adapt form.
2801   const int kInputFps = 30;
2802   VideoSendStream::Stats stats = stats_proxy_->GetStats();
2803   stats.input_frame_rate = kInputFps;
2804   stats_proxy_->SetMockStats(stats);
2805 
2806   video_stream_encoder_->TriggerCpuOveruse();
2807   new_video_source.IncomingCapturedFrame(
2808       CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
2809   WaitForEncodedFrame(frame_timestamp);
2810   frame_timestamp += kFrameIntervalMs;
2811 
2812   // Some framerate constraint should be set.
2813   EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
2814   EXPECT_EQ(std::numeric_limits<int>::max(),
2815             new_video_source.sink_wants().max_pixel_count);
2816   EXPECT_LT(new_video_source.sink_wants().max_framerate_fps, kInputFps);
2817 
2818   // Turn off degradation completely.
2819   video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
2820       &new_video_source, webrtc::DegradationPreference::DISABLED);
2821   // Give the encoder queue time to process the change in degradation preference
2822   // by waiting for an encoded frame.
2823   new_video_source.IncomingCapturedFrame(
2824       CreateFrame(frame_timestamp, kFrameWidth, kFrameWidth));
2825   sink_.WaitForEncodedFrame(frame_timestamp);
2826   frame_timestamp += kFrameIntervalMs;
2827   EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
2828 
2829   video_stream_encoder_->TriggerCpuOveruse();
2830   new_video_source.IncomingCapturedFrame(
2831       CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
2832   WaitForEncodedFrame(frame_timestamp);
2833   frame_timestamp += kFrameIntervalMs;
2834 
2835   // Still no degradation.
2836   EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
2837 
2838   // Calling SetSource with resolution scaling enabled apply the old SinkWants.
2839   video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
2840       &new_video_source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
2841   // Give the encoder queue time to process the change in degradation preference
2842   // by waiting for an encoded frame.
2843   new_video_source.IncomingCapturedFrame(
2844       CreateFrame(frame_timestamp, kFrameWidth, kFrameWidth));
2845   sink_.WaitForEncodedFrame(frame_timestamp);
2846   frame_timestamp += kFrameIntervalMs;
2847   EXPECT_LT(new_video_source.sink_wants().max_pixel_count,
2848             kFrameWidth * kFrameHeight);
2849   EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
2850   EXPECT_EQ(kDefaultFramerate, new_video_source.sink_wants().max_framerate_fps);
2851 
2852   // Calling SetSource with framerate scaling enabled apply the old SinkWants.
2853   video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
2854       &new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
2855   // Give the encoder queue time to process the change in degradation preference
2856   // by waiting for an encoded frame.
2857   new_video_source.IncomingCapturedFrame(
2858       CreateFrame(frame_timestamp, kFrameWidth, kFrameWidth));
2859   sink_.WaitForEncodedFrame(frame_timestamp);
2860   frame_timestamp += kFrameIntervalMs;
2861   EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
2862   EXPECT_EQ(std::numeric_limits<int>::max(),
2863             new_video_source.sink_wants().max_pixel_count);
2864   EXPECT_LT(new_video_source.sink_wants().max_framerate_fps, kInputFps);
2865 
2866   video_stream_encoder_->Stop();
2867 }
2868 
TEST_F(VideoStreamEncoderTest,StatsTracksQualityAdaptationStats)2869 TEST_F(VideoStreamEncoderTest, StatsTracksQualityAdaptationStats) {
2870   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
2871       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
2872 
2873   const int kWidth = 1280;
2874   const int kHeight = 720;
2875   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
2876   WaitForEncodedFrame(1);
2877   VideoSendStream::Stats stats = stats_proxy_->GetStats();
2878   EXPECT_FALSE(stats.bw_limited_resolution);
2879   EXPECT_EQ(0, stats.number_of_quality_adapt_changes);
2880 
2881   // Trigger adapt down.
2882   video_stream_encoder_->TriggerQualityLow();
2883   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
2884   WaitForEncodedFrame(2);
2885 
2886   stats = stats_proxy_->GetStats();
2887   EXPECT_TRUE(stats.bw_limited_resolution);
2888   EXPECT_EQ(1, stats.number_of_quality_adapt_changes);
2889 
2890   // Trigger adapt up.
2891   video_stream_encoder_->TriggerQualityHigh();
2892   video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
2893   WaitForEncodedFrame(3);
2894 
2895   stats = stats_proxy_->GetStats();
2896   EXPECT_FALSE(stats.bw_limited_resolution);
2897   EXPECT_EQ(2, stats.number_of_quality_adapt_changes);
2898   EXPECT_EQ(0, stats.number_of_cpu_adapt_changes);
2899 
2900   video_stream_encoder_->Stop();
2901 }
2902 
TEST_F(VideoStreamEncoderTest,StatsTracksCpuAdaptationStats)2903 TEST_F(VideoStreamEncoderTest, StatsTracksCpuAdaptationStats) {
2904   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
2905       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
2906 
2907   const int kWidth = 1280;
2908   const int kHeight = 720;
2909   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
2910   WaitForEncodedFrame(1);
2911   VideoSendStream::Stats stats = stats_proxy_->GetStats();
2912   EXPECT_FALSE(stats.cpu_limited_resolution);
2913   EXPECT_EQ(0, stats.number_of_cpu_adapt_changes);
2914 
2915   // Trigger CPU overuse.
2916   video_stream_encoder_->TriggerCpuOveruse();
2917   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
2918   WaitForEncodedFrame(2);
2919 
2920   stats = stats_proxy_->GetStats();
2921   EXPECT_TRUE(stats.cpu_limited_resolution);
2922   EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
2923 
2924   // Trigger CPU normal use.
2925   video_stream_encoder_->TriggerCpuUnderuse();
2926   video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
2927   WaitForEncodedFrame(3);
2928 
2929   stats = stats_proxy_->GetStats();
2930   EXPECT_FALSE(stats.cpu_limited_resolution);
2931   EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
2932   EXPECT_EQ(0, stats.number_of_quality_adapt_changes);
2933 
2934   video_stream_encoder_->Stop();
2935 }
2936 
TEST_F(VideoStreamEncoderTest,SwitchingSourceKeepsCpuAdaptation)2937 TEST_F(VideoStreamEncoderTest, SwitchingSourceKeepsCpuAdaptation) {
2938   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
2939       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
2940 
2941   const int kWidth = 1280;
2942   const int kHeight = 720;
2943   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
2944   WaitForEncodedFrame(1);
2945   VideoSendStream::Stats stats = stats_proxy_->GetStats();
2946   EXPECT_FALSE(stats.bw_limited_resolution);
2947   EXPECT_FALSE(stats.cpu_limited_resolution);
2948   EXPECT_EQ(0, stats.number_of_cpu_adapt_changes);
2949 
2950   // Trigger CPU overuse.
2951   video_stream_encoder_->TriggerCpuOveruse();
2952   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
2953   WaitForEncodedFrame(2);
2954   stats = stats_proxy_->GetStats();
2955   EXPECT_FALSE(stats.bw_limited_resolution);
2956   EXPECT_TRUE(stats.cpu_limited_resolution);
2957   EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
2958 
2959   // Set new source with adaptation still enabled.
2960   test::FrameForwarder new_video_source;
2961   video_stream_encoder_->SetSource(
2962       &new_video_source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
2963 
2964   new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
2965   WaitForEncodedFrame(3);
2966   stats = stats_proxy_->GetStats();
2967   EXPECT_FALSE(stats.bw_limited_resolution);
2968   EXPECT_TRUE(stats.cpu_limited_resolution);
2969   EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
2970 
2971   // Set adaptation disabled.
2972   video_stream_encoder_->SetSource(&new_video_source,
2973                                    webrtc::DegradationPreference::DISABLED);
2974 
2975   new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
2976   WaitForEncodedFrame(4);
2977   stats = stats_proxy_->GetStats();
2978   EXPECT_FALSE(stats.bw_limited_resolution);
2979   EXPECT_FALSE(stats.cpu_limited_resolution);
2980   EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
2981 
2982   // Set adaptation back to enabled.
2983   video_stream_encoder_->SetSource(
2984       &new_video_source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
2985 
2986   new_video_source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
2987   WaitForEncodedFrame(5);
2988   stats = stats_proxy_->GetStats();
2989   EXPECT_FALSE(stats.bw_limited_resolution);
2990   EXPECT_TRUE(stats.cpu_limited_resolution);
2991   EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
2992 
2993   // Trigger CPU normal use.
2994   video_stream_encoder_->TriggerCpuUnderuse();
2995   new_video_source.IncomingCapturedFrame(CreateFrame(6, kWidth, kHeight));
2996   WaitForEncodedFrame(6);
2997   stats = stats_proxy_->GetStats();
2998   EXPECT_FALSE(stats.bw_limited_resolution);
2999   EXPECT_FALSE(stats.cpu_limited_resolution);
3000   EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
3001   EXPECT_EQ(0, stats.number_of_quality_adapt_changes);
3002 
3003   video_stream_encoder_->Stop();
3004 }
3005 
TEST_F(VideoStreamEncoderTest,SwitchingSourceKeepsQualityAdaptation)3006 TEST_F(VideoStreamEncoderTest, SwitchingSourceKeepsQualityAdaptation) {
3007   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3008       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3009 
3010   const int kWidth = 1280;
3011   const int kHeight = 720;
3012   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
3013   WaitForEncodedFrame(1);
3014   VideoSendStream::Stats stats = stats_proxy_->GetStats();
3015   EXPECT_FALSE(stats.bw_limited_resolution);
3016   EXPECT_FALSE(stats.bw_limited_framerate);
3017   EXPECT_EQ(0, stats.number_of_quality_adapt_changes);
3018 
3019   // Set new source with adaptation still enabled.
3020   test::FrameForwarder new_video_source;
3021   video_stream_encoder_->SetSource(&new_video_source,
3022                                    webrtc::DegradationPreference::BALANCED);
3023 
3024   new_video_source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
3025   WaitForEncodedFrame(2);
3026   stats = stats_proxy_->GetStats();
3027   EXPECT_FALSE(stats.bw_limited_resolution);
3028   EXPECT_FALSE(stats.bw_limited_framerate);
3029   EXPECT_EQ(0, stats.number_of_quality_adapt_changes);
3030 
3031   // Trigger adapt down.
3032   video_stream_encoder_->TriggerQualityLow();
3033   new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
3034   WaitForEncodedFrame(3);
3035   stats = stats_proxy_->GetStats();
3036   EXPECT_TRUE(stats.bw_limited_resolution);
3037   EXPECT_FALSE(stats.bw_limited_framerate);
3038   EXPECT_EQ(1, stats.number_of_quality_adapt_changes);
3039 
3040   // Set new source with adaptation still enabled.
3041   video_stream_encoder_->SetSource(&new_video_source,
3042                                    webrtc::DegradationPreference::BALANCED);
3043 
3044   new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
3045   WaitForEncodedFrame(4);
3046   stats = stats_proxy_->GetStats();
3047   EXPECT_TRUE(stats.bw_limited_resolution);
3048   EXPECT_FALSE(stats.bw_limited_framerate);
3049   EXPECT_EQ(1, stats.number_of_quality_adapt_changes);
3050 
3051   // Disable resolution scaling.
3052   video_stream_encoder_->SetSource(
3053       &new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
3054 
3055   new_video_source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
3056   WaitForEncodedFrame(5);
3057   stats = stats_proxy_->GetStats();
3058   EXPECT_FALSE(stats.bw_limited_resolution);
3059   EXPECT_FALSE(stats.bw_limited_framerate);
3060   EXPECT_EQ(1, stats.number_of_quality_adapt_changes);
3061   EXPECT_EQ(0, stats.number_of_cpu_adapt_changes);
3062 
3063   video_stream_encoder_->Stop();
3064 }
3065 
TEST_F(VideoStreamEncoderTest,QualityAdaptationStatsAreResetWhenScalerIsDisabled)3066 TEST_F(VideoStreamEncoderTest,
3067        QualityAdaptationStatsAreResetWhenScalerIsDisabled) {
3068   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3069       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3070 
3071   const int kWidth = 1280;
3072   const int kHeight = 720;
3073   int64_t timestamp_ms = kFrameIntervalMs;
3074   video_source_.set_adaptation_enabled(true);
3075   video_source_.IncomingCapturedFrame(
3076       CreateFrame(timestamp_ms, kWidth, kHeight));
3077   WaitForEncodedFrame(timestamp_ms);
3078   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
3079   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3080   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3081 
3082   // Trigger adapt down.
3083   video_stream_encoder_->TriggerQualityLow();
3084   timestamp_ms += kFrameIntervalMs;
3085   video_source_.IncomingCapturedFrame(
3086       CreateFrame(timestamp_ms, kWidth, kHeight));
3087   WaitForEncodedFrame(timestamp_ms);
3088   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
3089   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3090   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3091 
3092   // Trigger overuse.
3093   video_stream_encoder_->TriggerCpuOveruse();
3094   timestamp_ms += kFrameIntervalMs;
3095   video_source_.IncomingCapturedFrame(
3096       CreateFrame(timestamp_ms, kWidth, kHeight));
3097   WaitForEncodedFrame(timestamp_ms);
3098   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
3099   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3100   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3101 
3102   // Leave source unchanged, but disable quality scaler.
3103   fake_encoder_.SetQualityScaling(false);
3104 
3105   VideoEncoderConfig video_encoder_config;
3106   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
3107   // Make format different, to force recreation of encoder.
3108   video_encoder_config.video_format.parameters["foo"] = "foo";
3109   video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
3110                                           kMaxPayloadLength);
3111   timestamp_ms += kFrameIntervalMs;
3112   video_source_.IncomingCapturedFrame(
3113       CreateFrame(timestamp_ms, kWidth, kHeight));
3114   WaitForEncodedFrame(timestamp_ms);
3115   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
3116   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3117   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3118 
3119   video_stream_encoder_->Stop();
3120 }
3121 
TEST_F(VideoStreamEncoderTest,StatsTracksCpuAdaptationStatsWhenSwitchingSource_Balanced)3122 TEST_F(VideoStreamEncoderTest,
3123        StatsTracksCpuAdaptationStatsWhenSwitchingSource_Balanced) {
3124   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3125       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3126 
3127   const int kWidth = 1280;
3128   const int kHeight = 720;
3129   int sequence = 1;
3130 
3131   // Enable BALANCED preference, no initial limitation.
3132   test::FrameForwarder source;
3133   video_stream_encoder_->SetSource(&source,
3134                                    webrtc::DegradationPreference::BALANCED);
3135   source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
3136   WaitForEncodedFrame(sequence++);
3137   VideoSendStream::Stats stats = stats_proxy_->GetStats();
3138   EXPECT_FALSE(stats.cpu_limited_resolution);
3139   EXPECT_FALSE(stats.cpu_limited_framerate);
3140   EXPECT_EQ(0, stats.number_of_cpu_adapt_changes);
3141 
3142   // Trigger CPU overuse, should now adapt down.
3143   video_stream_encoder_->TriggerCpuOveruse();
3144   source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
3145   WaitForEncodedFrame(sequence++);
3146   stats = stats_proxy_->GetStats();
3147   EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
3148 
3149   // Set new degradation preference should clear restrictions since we changed
3150   // from BALANCED.
3151   video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
3152       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
3153   source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
3154   WaitForEncodedFrame(sequence++);
3155   stats = stats_proxy_->GetStats();
3156   EXPECT_FALSE(stats.cpu_limited_resolution);
3157   EXPECT_FALSE(stats.cpu_limited_framerate);
3158   EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
3159 
3160   // Force an input frame rate to be available, or the adaptation call won't
3161   // know what framerate to adapt from.
3162   VideoSendStream::Stats mock_stats = stats_proxy_->GetStats();
3163   mock_stats.input_frame_rate = 30;
3164   stats_proxy_->SetMockStats(mock_stats);
3165   video_stream_encoder_->TriggerCpuOveruse();
3166   stats_proxy_->ResetMockStats();
3167   source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
3168   WaitForEncodedFrame(sequence++);
3169 
3170   // We have now adapted once.
3171   stats = stats_proxy_->GetStats();
3172   EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
3173 
3174   // Back to BALANCED, should clear the restrictions again.
3175   video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
3176       &source, webrtc::DegradationPreference::BALANCED);
3177   source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
3178   WaitForEncodedFrame(sequence++);
3179   stats = stats_proxy_->GetStats();
3180   EXPECT_FALSE(stats.cpu_limited_resolution);
3181   EXPECT_FALSE(stats.cpu_limited_framerate);
3182   EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
3183 
3184   video_stream_encoder_->Stop();
3185 }
3186 
TEST_F(VideoStreamEncoderTest,StatsTracksCpuAdaptationStatsWhenSwitchingSource)3187 TEST_F(VideoStreamEncoderTest,
3188        StatsTracksCpuAdaptationStatsWhenSwitchingSource) {
3189   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3190       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3191 
3192   const int kWidth = 1280;
3193   const int kHeight = 720;
3194   int sequence = 1;
3195 
3196   video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
3197   WaitForEncodedFrame(sequence++);
3198   VideoSendStream::Stats stats = stats_proxy_->GetStats();
3199   EXPECT_FALSE(stats.cpu_limited_resolution);
3200   EXPECT_FALSE(stats.cpu_limited_framerate);
3201   EXPECT_EQ(0, stats.number_of_cpu_adapt_changes);
3202 
3203   // Trigger CPU overuse, should now adapt down.
3204   video_stream_encoder_->TriggerCpuOveruse();
3205   video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
3206   WaitForEncodedFrame(sequence++);
3207   stats = stats_proxy_->GetStats();
3208   EXPECT_TRUE(stats.cpu_limited_resolution);
3209   EXPECT_FALSE(stats.cpu_limited_framerate);
3210   EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
3211 
3212   // Set new source with adaptation still enabled.
3213   test::FrameForwarder new_video_source;
3214   video_stream_encoder_->SetSource(
3215       &new_video_source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
3216 
3217   new_video_source.IncomingCapturedFrame(
3218       CreateFrame(sequence, kWidth, kHeight));
3219   WaitForEncodedFrame(sequence++);
3220   stats = stats_proxy_->GetStats();
3221   EXPECT_TRUE(stats.cpu_limited_resolution);
3222   EXPECT_FALSE(stats.cpu_limited_framerate);
3223   EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
3224 
3225   // Set cpu adaptation by frame dropping.
3226   video_stream_encoder_->SetSource(
3227       &new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
3228   new_video_source.IncomingCapturedFrame(
3229       CreateFrame(sequence, kWidth, kHeight));
3230   WaitForEncodedFrame(sequence++);
3231   stats = stats_proxy_->GetStats();
3232   // Not adapted at first.
3233   EXPECT_FALSE(stats.cpu_limited_resolution);
3234   EXPECT_FALSE(stats.cpu_limited_framerate);
3235   EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
3236 
3237   // Force an input frame rate to be available, or the adaptation call won't
3238   // know what framerate to adapt from.
3239   VideoSendStream::Stats mock_stats = stats_proxy_->GetStats();
3240   mock_stats.input_frame_rate = 30;
3241   stats_proxy_->SetMockStats(mock_stats);
3242   video_stream_encoder_->TriggerCpuOveruse();
3243   stats_proxy_->ResetMockStats();
3244 
3245   new_video_source.IncomingCapturedFrame(
3246       CreateFrame(sequence, kWidth, kHeight));
3247   WaitForEncodedFrame(sequence++);
3248 
3249   // Framerate now adapted.
3250   stats = stats_proxy_->GetStats();
3251   EXPECT_FALSE(stats.cpu_limited_resolution);
3252   EXPECT_TRUE(stats.cpu_limited_framerate);
3253   EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
3254 
3255   // Disable CPU adaptation.
3256   video_stream_encoder_->SetSource(&new_video_source,
3257                                    webrtc::DegradationPreference::DISABLED);
3258   new_video_source.IncomingCapturedFrame(
3259       CreateFrame(sequence, kWidth, kHeight));
3260   WaitForEncodedFrame(sequence++);
3261 
3262   stats = stats_proxy_->GetStats();
3263   EXPECT_FALSE(stats.cpu_limited_resolution);
3264   EXPECT_FALSE(stats.cpu_limited_framerate);
3265   EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
3266 
3267   // Try to trigger overuse. Should not succeed.
3268   stats_proxy_->SetMockStats(mock_stats);
3269   video_stream_encoder_->TriggerCpuOveruse();
3270   stats_proxy_->ResetMockStats();
3271 
3272   stats = stats_proxy_->GetStats();
3273   EXPECT_FALSE(stats.cpu_limited_resolution);
3274   EXPECT_FALSE(stats.cpu_limited_framerate);
3275   EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
3276 
3277   // Switch back the source with resolution adaptation enabled.
3278   video_stream_encoder_->SetSource(
3279       &video_source_, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
3280   video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
3281   WaitForEncodedFrame(sequence++);
3282   stats = stats_proxy_->GetStats();
3283   EXPECT_TRUE(stats.cpu_limited_resolution);
3284   EXPECT_FALSE(stats.cpu_limited_framerate);
3285   EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
3286 
3287   // Trigger CPU normal usage.
3288   video_stream_encoder_->TriggerCpuUnderuse();
3289   video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
3290   WaitForEncodedFrame(sequence++);
3291   stats = stats_proxy_->GetStats();
3292   EXPECT_FALSE(stats.cpu_limited_resolution);
3293   EXPECT_FALSE(stats.cpu_limited_framerate);
3294   EXPECT_EQ(3, stats.number_of_cpu_adapt_changes);
3295 
3296   // Back to the source with adaptation off, set it back to maintain-resolution.
3297   video_stream_encoder_->SetSource(
3298       &new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
3299   new_video_source.IncomingCapturedFrame(
3300       CreateFrame(sequence, kWidth, kHeight));
3301   WaitForEncodedFrame(sequence++);
3302   stats = stats_proxy_->GetStats();
3303   // Disabled, since we previously switched the source to disabled.
3304   EXPECT_FALSE(stats.cpu_limited_resolution);
3305   EXPECT_TRUE(stats.cpu_limited_framerate);
3306   EXPECT_EQ(3, stats.number_of_cpu_adapt_changes);
3307 
3308   // Trigger CPU normal usage.
3309   video_stream_encoder_->TriggerCpuUnderuse();
3310   new_video_source.IncomingCapturedFrame(
3311       CreateFrame(sequence, kWidth, kHeight));
3312   WaitForEncodedFrame(sequence++);
3313   stats = stats_proxy_->GetStats();
3314   EXPECT_FALSE(stats.cpu_limited_resolution);
3315   EXPECT_FALSE(stats.cpu_limited_framerate);
3316   EXPECT_EQ(4, stats.number_of_cpu_adapt_changes);
3317   EXPECT_EQ(0, stats.number_of_quality_adapt_changes);
3318 
3319   video_stream_encoder_->Stop();
3320 }
3321 
TEST_F(VideoStreamEncoderTest,ScalingUpAndDownDoesNothingWithMaintainResolution)3322 TEST_F(VideoStreamEncoderTest,
3323        ScalingUpAndDownDoesNothingWithMaintainResolution) {
3324   const int kWidth = 1280;
3325   const int kHeight = 720;
3326   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3327       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3328 
3329   // Expect no scaling to begin with.
3330   EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
3331 
3332   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
3333   WaitForEncodedFrame(1);
3334 
3335   // Trigger scale down.
3336   video_stream_encoder_->TriggerQualityLow();
3337 
3338   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
3339   WaitForEncodedFrame(2);
3340 
3341   // Expect a scale down.
3342   EXPECT_TRUE(video_source_.sink_wants().max_pixel_count);
3343   EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight);
3344 
3345   // Set resolution scaling disabled.
3346   test::FrameForwarder new_video_source;
3347   video_stream_encoder_->SetSource(
3348       &new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
3349 
3350   // Trigger scale down.
3351   video_stream_encoder_->TriggerQualityLow();
3352   new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
3353   WaitForEncodedFrame(3);
3354 
3355   // Expect no scaling.
3356   EXPECT_EQ(std::numeric_limits<int>::max(),
3357             new_video_source.sink_wants().max_pixel_count);
3358 
3359   // Trigger scale up.
3360   video_stream_encoder_->TriggerQualityHigh();
3361   new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
3362   WaitForEncodedFrame(4);
3363 
3364   // Expect nothing to change, still no scaling.
3365   EXPECT_EQ(std::numeric_limits<int>::max(),
3366             new_video_source.sink_wants().max_pixel_count);
3367 
3368   video_stream_encoder_->Stop();
3369 }
3370 
TEST_F(VideoStreamEncoderTest,SkipsSameAdaptDownRequest_MaintainFramerateMode)3371 TEST_F(VideoStreamEncoderTest,
3372        SkipsSameAdaptDownRequest_MaintainFramerateMode) {
3373   const int kWidth = 1280;
3374   const int kHeight = 720;
3375   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3376       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3377 
3378   // Enable MAINTAIN_FRAMERATE preference, no initial limitation.
3379   test::FrameForwarder source;
3380   video_stream_encoder_->SetSource(
3381       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
3382 
3383   source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
3384   WaitForEncodedFrame(1);
3385   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3386   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
3387   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3388 
3389   // Trigger adapt down, expect scaled down resolution.
3390   video_stream_encoder_->TriggerCpuOveruse();
3391   EXPECT_THAT(source.sink_wants(),
3392               FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
3393   const int kLastMaxPixelCount = source.sink_wants().max_pixel_count;
3394   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
3395   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3396 
3397   // Trigger adapt down for same input resolution, expect no change.
3398   video_stream_encoder_->TriggerCpuOveruse();
3399   EXPECT_EQ(kLastMaxPixelCount, source.sink_wants().max_pixel_count);
3400   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
3401   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3402 
3403   video_stream_encoder_->Stop();
3404 }
3405 
TEST_F(VideoStreamEncoderTest,SkipsSameOrLargerAdaptDownRequest_BalancedMode)3406 TEST_F(VideoStreamEncoderTest, SkipsSameOrLargerAdaptDownRequest_BalancedMode) {
3407   const int kWidth = 1280;
3408   const int kHeight = 720;
3409   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3410       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3411 
3412   // Enable BALANCED preference, no initial limitation.
3413   test::FrameForwarder source;
3414   video_stream_encoder_->SetSource(&source,
3415                                    webrtc::DegradationPreference::BALANCED);
3416   source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
3417   sink_.WaitForEncodedFrame(1);
3418   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3419 
3420   // Trigger adapt down, expect scaled down resolution.
3421   video_stream_encoder_->TriggerQualityLow();
3422   EXPECT_THAT(source.sink_wants(),
3423               FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
3424   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3425   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3426   const int kLastMaxPixelCount = source.sink_wants().max_pixel_count;
3427 
3428   // Trigger adapt down for same input resolution, expect no change.
3429   source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
3430   sink_.WaitForEncodedFrame(2);
3431   video_stream_encoder_->TriggerQualityLow();
3432   EXPECT_EQ(kLastMaxPixelCount, source.sink_wants().max_pixel_count);
3433   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3434   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3435 
3436   // Trigger adapt down for larger input resolution, expect no change.
3437   source.IncomingCapturedFrame(CreateFrame(3, kWidth + 1, kHeight + 1));
3438   sink_.WaitForEncodedFrame(3);
3439   video_stream_encoder_->TriggerQualityLow();
3440   EXPECT_EQ(kLastMaxPixelCount, source.sink_wants().max_pixel_count);
3441   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3442   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3443 
3444   video_stream_encoder_->Stop();
3445 }
3446 
TEST_F(VideoStreamEncoderTest,FpsCountReturnsToZeroForFewerAdaptationsUpThanDown)3447 TEST_F(VideoStreamEncoderTest,
3448        FpsCountReturnsToZeroForFewerAdaptationsUpThanDown) {
3449   const int kWidth = 640;
3450   const int kHeight = 360;
3451   const int64_t kFrameIntervalMs = 150;
3452   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3453       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3454 
3455   // Enable BALANCED preference, no initial limitation.
3456   AdaptingFrameForwarder source(&time_controller_);
3457   source.set_adaptation_enabled(true);
3458   video_stream_encoder_->SetSource(&source,
3459                                    webrtc::DegradationPreference::BALANCED);
3460 
3461   int64_t timestamp_ms = kFrameIntervalMs;
3462   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3463   sink_.WaitForEncodedFrame(kWidth, kHeight);
3464   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3465   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3466   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
3467   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3468 
3469   // Trigger adapt down, expect reduced fps (640x360@15fps).
3470   video_stream_encoder_->TriggerQualityLow();
3471   timestamp_ms += kFrameIntervalMs;
3472   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3473   sink_.WaitForEncodedFrame(timestamp_ms);
3474   EXPECT_THAT(source.sink_wants(),
3475               FpsMatchesResolutionMax(Lt(kDefaultFramerate)));
3476   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3477   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
3478   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3479 
3480   // Source requests 270p, expect reduced resolution (480x270@15fps).
3481   source.OnOutputFormatRequest(480, 270);
3482   timestamp_ms += kFrameIntervalMs;
3483   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3484   WaitForEncodedFrame(480, 270);
3485   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3486 
3487   // Trigger adapt down, expect reduced fps (480x270@10fps).
3488   video_stream_encoder_->TriggerQualityLow();
3489   timestamp_ms += kFrameIntervalMs;
3490   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3491   sink_.WaitForEncodedFrame(timestamp_ms);
3492   EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
3493   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3494   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
3495   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3496 
3497   // Source requests QVGA, expect reduced resolution (320x180@10fps).
3498   source.OnOutputFormatRequest(320, 180);
3499   timestamp_ms += kFrameIntervalMs;
3500   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3501   WaitForEncodedFrame(320, 180);
3502   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3503 
3504   // Trigger adapt down, expect reduced fps (320x180@7fps).
3505   video_stream_encoder_->TriggerQualityLow();
3506   timestamp_ms += kFrameIntervalMs;
3507   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3508   sink_.WaitForEncodedFrame(timestamp_ms);
3509   EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
3510   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3511   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
3512   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3513 
3514   // Source requests VGA, expect increased resolution (640x360@7fps).
3515   source.OnOutputFormatRequest(640, 360);
3516   timestamp_ms += kFrameIntervalMs;
3517   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3518   WaitForEncodedFrame(timestamp_ms);
3519   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3520 
3521   // Trigger adapt up, expect increased fps (640x360@(max-2)fps).
3522   video_stream_encoder_->TriggerQualityHigh();
3523   timestamp_ms += kFrameIntervalMs;
3524   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3525   WaitForEncodedFrame(timestamp_ms);
3526   EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
3527   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3528   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
3529   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3530 
3531   // Trigger adapt up, expect increased fps (640x360@(max-1)fps).
3532   video_stream_encoder_->TriggerQualityHigh();
3533   timestamp_ms += kFrameIntervalMs;
3534   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3535   WaitForEncodedFrame(timestamp_ms);
3536   EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
3537   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3538   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
3539   EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3540 
3541   // Trigger adapt up, expect increased fps (640x360@maxfps).
3542   video_stream_encoder_->TriggerQualityHigh();
3543   timestamp_ms += kFrameIntervalMs;
3544   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3545   WaitForEncodedFrame(timestamp_ms);
3546   EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
3547   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3548   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
3549   EXPECT_EQ(6, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3550 
3551   video_stream_encoder_->Stop();
3552 }
3553 
TEST_F(VideoStreamEncoderTest,FpsCountReturnsToZeroForFewerAdaptationsUpThanDownWithTwoResources)3554 TEST_F(VideoStreamEncoderTest,
3555        FpsCountReturnsToZeroForFewerAdaptationsUpThanDownWithTwoResources) {
3556   const int kWidth = 1280;
3557   const int kHeight = 720;
3558   const int64_t kFrameIntervalMs = 150;
3559   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3560       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3561 
3562   // Enable BALANCED preference, no initial limitation.
3563   AdaptingFrameForwarder source(&time_controller_);
3564   source.set_adaptation_enabled(true);
3565   video_stream_encoder_->SetSource(&source,
3566                                    webrtc::DegradationPreference::BALANCED);
3567 
3568   int64_t timestamp_ms = kFrameIntervalMs;
3569   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3570   sink_.WaitForEncodedFrame(kWidth, kHeight);
3571   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3572   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3573   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
3574   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3575 
3576   // Trigger adapt down, expect scaled down resolution (960x540@maxfps).
3577   video_stream_encoder_->TriggerQualityLow();
3578   timestamp_ms += kFrameIntervalMs;
3579   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3580   sink_.WaitForEncodedFrame(timestamp_ms);
3581   EXPECT_THAT(source.sink_wants(),
3582               FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
3583   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3584   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
3585   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3586 
3587   // Trigger adapt down, expect scaled down resolution (640x360@maxfps).
3588   video_stream_encoder_->TriggerQualityLow();
3589   timestamp_ms += kFrameIntervalMs;
3590   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3591   sink_.WaitForEncodedFrame(timestamp_ms);
3592   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
3593   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3594   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
3595   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3596 
3597   // Trigger adapt down, expect reduced fps (640x360@15fps).
3598   video_stream_encoder_->TriggerQualityLow();
3599   timestamp_ms += kFrameIntervalMs;
3600   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3601   WaitForEncodedFrame(timestamp_ms);
3602   EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
3603   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3604   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
3605   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3606 
3607   // Source requests QVGA, expect reduced resolution (320x180@15fps).
3608   source.OnOutputFormatRequest(320, 180);
3609   timestamp_ms += kFrameIntervalMs;
3610   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3611   WaitForEncodedFrame(320, 180);
3612   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3613   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3614 
3615   // Trigger adapt down, expect reduced fps (320x180@7fps).
3616   video_stream_encoder_->TriggerCpuOveruse();
3617   timestamp_ms += kFrameIntervalMs;
3618   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3619   WaitForEncodedFrame(timestamp_ms);
3620   EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
3621   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3622   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
3623   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
3624   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate);
3625   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3626   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3627 
3628   // Source requests HD, expect increased resolution (640x360@7fps).
3629   source.OnOutputFormatRequest(1280, 720);
3630   timestamp_ms += kFrameIntervalMs;
3631   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3632   WaitForEncodedFrame(timestamp_ms);
3633   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3634   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3635 
3636   // Trigger adapt up, expect increased fps (640x360@(max-1)fps).
3637   video_stream_encoder_->TriggerCpuUnderuse();
3638   timestamp_ms += kFrameIntervalMs;
3639   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3640   WaitForEncodedFrame(timestamp_ms);
3641   EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
3642   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3643   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
3644   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
3645   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate);
3646   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3647   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3648 
3649   // Trigger adapt up, expect increased fps (640x360@maxfps).
3650   video_stream_encoder_->TriggerQualityHigh();
3651   video_stream_encoder_->TriggerCpuUnderuse();
3652   timestamp_ms += kFrameIntervalMs;
3653   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3654   WaitForEncodedFrame(timestamp_ms);
3655   EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
3656   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3657   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
3658   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
3659   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
3660   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3661   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3662 
3663   // Trigger adapt up, expect increased resolution (960x570@maxfps).
3664   video_stream_encoder_->TriggerQualityHigh();
3665   video_stream_encoder_->TriggerCpuUnderuse();
3666   timestamp_ms += kFrameIntervalMs;
3667   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3668   WaitForEncodedFrame(timestamp_ms);
3669   EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants()));
3670   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3671   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
3672   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
3673   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
3674   EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3675   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3676 
3677   // Trigger adapt up, expect increased resolution (1280x720@maxfps).
3678   video_stream_encoder_->TriggerQualityHigh();
3679   video_stream_encoder_->TriggerCpuUnderuse();
3680   timestamp_ms += kFrameIntervalMs;
3681   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3682   WaitForEncodedFrame(timestamp_ms);
3683   EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants()));
3684   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3685   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
3686   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
3687   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
3688   EXPECT_EQ(6, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3689   EXPECT_EQ(5, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3690 
3691   video_stream_encoder_->Stop();
3692 }
3693 
TEST_F(VideoStreamEncoderTest,NoChangeForInitialNormalUsage_MaintainFramerateMode)3694 TEST_F(VideoStreamEncoderTest,
3695        NoChangeForInitialNormalUsage_MaintainFramerateMode) {
3696   const int kWidth = 1280;
3697   const int kHeight = 720;
3698   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3699       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3700 
3701   // Enable MAINTAIN_FRAMERATE preference, no initial limitation.
3702   test::FrameForwarder source;
3703   video_stream_encoder_->SetSource(
3704       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
3705 
3706   source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
3707   WaitForEncodedFrame(kWidth, kHeight);
3708   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3709   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
3710   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3711 
3712   // Trigger adapt up, expect no change.
3713   video_stream_encoder_->TriggerCpuUnderuse();
3714   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3715   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
3716   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3717 
3718   video_stream_encoder_->Stop();
3719 }
3720 
TEST_F(VideoStreamEncoderTest,NoChangeForInitialNormalUsage_MaintainResolutionMode)3721 TEST_F(VideoStreamEncoderTest,
3722        NoChangeForInitialNormalUsage_MaintainResolutionMode) {
3723   const int kWidth = 1280;
3724   const int kHeight = 720;
3725   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3726       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3727 
3728   // Enable MAINTAIN_RESOLUTION preference, no initial limitation.
3729   test::FrameForwarder source;
3730   video_stream_encoder_->SetSource(
3731       &source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
3732 
3733   source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
3734   WaitForEncodedFrame(kWidth, kHeight);
3735   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3736   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
3737   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3738 
3739   // Trigger adapt up, expect no change.
3740   video_stream_encoder_->TriggerCpuUnderuse();
3741   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3742   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
3743   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3744 
3745   video_stream_encoder_->Stop();
3746 }
3747 
TEST_F(VideoStreamEncoderTest,NoChangeForInitialNormalUsage_BalancedMode)3748 TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_BalancedMode) {
3749   const int kWidth = 1280;
3750   const int kHeight = 720;
3751   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3752       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3753 
3754   // Enable BALANCED preference, no initial limitation.
3755   test::FrameForwarder source;
3756   video_stream_encoder_->SetSource(&source,
3757                                    webrtc::DegradationPreference::BALANCED);
3758 
3759   source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
3760   sink_.WaitForEncodedFrame(kWidth, kHeight);
3761   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3762   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3763   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
3764   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3765 
3766   // Trigger adapt up, expect no change.
3767   video_stream_encoder_->TriggerQualityHigh();
3768   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3769   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3770   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
3771   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3772 
3773   video_stream_encoder_->Stop();
3774 }
3775 
TEST_F(VideoStreamEncoderTest,NoChangeForInitialNormalUsage_DisabledMode)3776 TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_DisabledMode) {
3777   const int kWidth = 1280;
3778   const int kHeight = 720;
3779   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3780       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3781 
3782   // Enable DISABLED preference, no initial limitation.
3783   test::FrameForwarder source;
3784   video_stream_encoder_->SetSource(&source,
3785                                    webrtc::DegradationPreference::DISABLED);
3786 
3787   source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
3788   sink_.WaitForEncodedFrame(kWidth, kHeight);
3789   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3790   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3791   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
3792   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3793 
3794   // Trigger adapt up, expect no change.
3795   video_stream_encoder_->TriggerQualityHigh();
3796   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3797   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3798   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
3799   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3800 
3801   video_stream_encoder_->Stop();
3802 }
3803 
TEST_F(VideoStreamEncoderTest,AdaptsResolutionForLowQuality_MaintainFramerateMode)3804 TEST_F(VideoStreamEncoderTest,
3805        AdaptsResolutionForLowQuality_MaintainFramerateMode) {
3806   const int kWidth = 1280;
3807   const int kHeight = 720;
3808   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3809       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3810 
3811   // Enable MAINTAIN_FRAMERATE preference, no initial limitation.
3812   AdaptingFrameForwarder source(&time_controller_);
3813   source.set_adaptation_enabled(true);
3814   video_stream_encoder_->SetSource(
3815       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
3816 
3817   source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
3818   WaitForEncodedFrame(1);
3819   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3820   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3821   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3822 
3823   // Trigger adapt down, expect scaled down resolution.
3824   video_stream_encoder_->TriggerQualityLow();
3825   source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
3826   WaitForEncodedFrame(2);
3827   EXPECT_THAT(source.sink_wants(),
3828               FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
3829   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3830   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3831 
3832   // Trigger adapt up, expect no restriction.
3833   video_stream_encoder_->TriggerQualityHigh();
3834   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3835   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3836   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3837   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3838 
3839   video_stream_encoder_->Stop();
3840 }
3841 
TEST_F(VideoStreamEncoderTest,AdaptsFramerateForLowQuality_MaintainResolutionMode)3842 TEST_F(VideoStreamEncoderTest,
3843        AdaptsFramerateForLowQuality_MaintainResolutionMode) {
3844   const int kWidth = 1280;
3845   const int kHeight = 720;
3846   const int kInputFps = 30;
3847   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3848       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3849 
3850   VideoSendStream::Stats stats = stats_proxy_->GetStats();
3851   stats.input_frame_rate = kInputFps;
3852   stats_proxy_->SetMockStats(stats);
3853 
3854   // Expect no scaling to begin with (preference: MAINTAIN_FRAMERATE).
3855   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
3856   sink_.WaitForEncodedFrame(1);
3857   EXPECT_THAT(video_source_.sink_wants(), FpsMaxResolutionMax());
3858 
3859   // Trigger adapt down, expect scaled down resolution.
3860   video_stream_encoder_->TriggerQualityLow();
3861   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
3862   sink_.WaitForEncodedFrame(2);
3863   EXPECT_THAT(video_source_.sink_wants(),
3864               FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
3865 
3866   // Enable MAINTAIN_RESOLUTION preference.
3867   test::FrameForwarder new_video_source;
3868   video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
3869       &new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
3870   // Give the encoder queue time to process the change in degradation preference
3871   // by waiting for an encoded frame.
3872   new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
3873   sink_.WaitForEncodedFrame(3);
3874   EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
3875 
3876   // Trigger adapt down, expect reduced framerate.
3877   video_stream_encoder_->TriggerQualityLow();
3878   new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
3879   sink_.WaitForEncodedFrame(4);
3880   EXPECT_THAT(new_video_source.sink_wants(),
3881               FpsMatchesResolutionMax(Lt(kInputFps)));
3882 
3883   // Trigger adapt up, expect no restriction.
3884   video_stream_encoder_->TriggerQualityHigh();
3885   EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
3886 
3887   video_stream_encoder_->Stop();
3888 }
3889 
TEST_F(VideoStreamEncoderTest,DoesNotScaleBelowSetResolutionLimit)3890 TEST_F(VideoStreamEncoderTest, DoesNotScaleBelowSetResolutionLimit) {
3891   const int kWidth = 1280;
3892   const int kHeight = 720;
3893   const size_t kNumFrames = 10;
3894 
3895   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3896       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3897 
3898   // Enable adapter, expected input resolutions when downscaling:
3899   // 1280x720 -> 960x540 -> 640x360 -> 480x270 -> 320x180 (kMinPixelsPerFrame)
3900   video_source_.set_adaptation_enabled(true);
3901 
3902   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
3903   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
3904 
3905   int downscales = 0;
3906   for (size_t i = 1; i <= kNumFrames; i++) {
3907     video_source_.IncomingCapturedFrame(
3908         CreateFrame(i * kFrameIntervalMs, kWidth, kHeight));
3909     WaitForEncodedFrame(i * kFrameIntervalMs);
3910 
3911     // Trigger scale down.
3912     rtc::VideoSinkWants last_wants = video_source_.sink_wants();
3913     video_stream_encoder_->TriggerQualityLow();
3914     EXPECT_GE(video_source_.sink_wants().max_pixel_count, kMinPixelsPerFrame);
3915 
3916     if (video_source_.sink_wants().max_pixel_count < last_wants.max_pixel_count)
3917       ++downscales;
3918 
3919     EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
3920     EXPECT_EQ(downscales,
3921               stats_proxy_->GetStats().number_of_quality_adapt_changes);
3922     EXPECT_GT(downscales, 0);
3923   }
3924   video_stream_encoder_->Stop();
3925 }
3926 
TEST_F(VideoStreamEncoderTest,AdaptsResolutionUpAndDownTwiceOnOveruse_MaintainFramerateMode)3927 TEST_F(VideoStreamEncoderTest,
3928        AdaptsResolutionUpAndDownTwiceOnOveruse_MaintainFramerateMode) {
3929   const int kWidth = 1280;
3930   const int kHeight = 720;
3931   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3932       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3933 
3934   // Enable MAINTAIN_FRAMERATE preference, no initial limitation.
3935   AdaptingFrameForwarder source(&time_controller_);
3936   source.set_adaptation_enabled(true);
3937   video_stream_encoder_->SetSource(
3938       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
3939 
3940   int64_t timestamp_ms = kFrameIntervalMs;
3941   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3942   WaitForEncodedFrame(kWidth, kHeight);
3943   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3944   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
3945   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3946 
3947   // Trigger adapt down, expect scaled down resolution.
3948   video_stream_encoder_->TriggerCpuOveruse();
3949   timestamp_ms += kFrameIntervalMs;
3950   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3951   WaitForEncodedFrame(timestamp_ms);
3952   EXPECT_THAT(source.sink_wants(),
3953               FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
3954   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
3955   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3956 
3957   // Trigger adapt up, expect no restriction.
3958   video_stream_encoder_->TriggerCpuUnderuse();
3959   timestamp_ms += kFrameIntervalMs;
3960   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3961   WaitForEncodedFrame(kWidth, kHeight);
3962   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3963   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
3964   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3965 
3966   // Trigger adapt down, expect scaled down resolution.
3967   video_stream_encoder_->TriggerCpuOveruse();
3968   timestamp_ms += kFrameIntervalMs;
3969   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3970   WaitForEncodedFrame(timestamp_ms);
3971   EXPECT_THAT(source.sink_wants(),
3972               FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
3973   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
3974   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3975 
3976   // Trigger adapt up, expect no restriction.
3977   video_stream_encoder_->TriggerCpuUnderuse();
3978   timestamp_ms += kFrameIntervalMs;
3979   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
3980   sink_.WaitForEncodedFrame(kWidth, kHeight);
3981   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
3982   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
3983   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
3984 
3985   video_stream_encoder_->Stop();
3986 }
3987 
TEST_F(VideoStreamEncoderTest,AdaptsResolutionUpAndDownTwiceForLowQuality_BalancedMode_NoFpsLimit)3988 TEST_F(VideoStreamEncoderTest,
3989        AdaptsResolutionUpAndDownTwiceForLowQuality_BalancedMode_NoFpsLimit) {
3990   const int kWidth = 1280;
3991   const int kHeight = 720;
3992   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
3993       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
3994 
3995   // Enable BALANCED preference, no initial limitation.
3996   AdaptingFrameForwarder source(&time_controller_);
3997   source.set_adaptation_enabled(true);
3998   video_stream_encoder_->SetSource(&source,
3999                                    webrtc::DegradationPreference::BALANCED);
4000 
4001   int64_t timestamp_ms = kFrameIntervalMs;
4002   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4003   sink_.WaitForEncodedFrame(kWidth, kHeight);
4004   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
4005   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
4006   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4007 
4008   // Trigger adapt down, expect scaled down resolution.
4009   video_stream_encoder_->TriggerQualityLow();
4010   timestamp_ms += kFrameIntervalMs;
4011   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4012   sink_.WaitForEncodedFrame(timestamp_ms);
4013   EXPECT_THAT(source.sink_wants(),
4014               FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
4015   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
4016   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4017 
4018   // Trigger adapt up, expect no restriction.
4019   video_stream_encoder_->TriggerQualityHigh();
4020   timestamp_ms += kFrameIntervalMs;
4021   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4022   sink_.WaitForEncodedFrame(kWidth, kHeight);
4023   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
4024   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
4025   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4026 
4027   // Trigger adapt down, expect scaled down resolution.
4028   video_stream_encoder_->TriggerQualityLow();
4029   timestamp_ms += kFrameIntervalMs;
4030   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4031   sink_.WaitForEncodedFrame(timestamp_ms);
4032   EXPECT_THAT(source.sink_wants(),
4033               FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
4034   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
4035   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4036 
4037   // Trigger adapt up, expect no restriction.
4038   video_stream_encoder_->TriggerQualityHigh();
4039   timestamp_ms += kFrameIntervalMs;
4040   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4041   sink_.WaitForEncodedFrame(kWidth, kHeight);
4042   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
4043   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
4044   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4045 
4046   video_stream_encoder_->Stop();
4047 }
4048 
TEST_F(VideoStreamEncoderTest,AdaptUpIfBwEstimateIsHigherThanMinBitrate)4049 TEST_F(VideoStreamEncoderTest, AdaptUpIfBwEstimateIsHigherThanMinBitrate) {
4050   fake_encoder_.SetResolutionBitrateLimits(
4051       {kEncoderBitrateLimits540p, kEncoderBitrateLimits720p});
4052 
4053   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4054       DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps),
4055       DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps),
4056       DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps), 0,
4057       0, 0);
4058 
4059   // Enable MAINTAIN_FRAMERATE preference, no initial limitation.
4060   AdaptingFrameForwarder source(&time_controller_);
4061   source.set_adaptation_enabled(true);
4062   video_stream_encoder_->SetSource(
4063       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
4064 
4065   // Insert 720p frame.
4066   int64_t timestamp_ms = kFrameIntervalMs;
4067   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720));
4068   WaitForEncodedFrame(1280, 720);
4069 
4070   // Reduce bitrate and trigger adapt down.
4071   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4072       DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps),
4073       DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps),
4074       DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps), 0,
4075       0, 0);
4076   video_stream_encoder_->TriggerQualityLow();
4077 
4078   // Insert 720p frame. It should be downscaled and encoded.
4079   timestamp_ms += kFrameIntervalMs;
4080   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720));
4081   WaitForEncodedFrame(960, 540);
4082 
4083   // Trigger adapt up. Higher resolution should not be requested duo to lack
4084   // of bitrate.
4085   video_stream_encoder_->TriggerQualityHigh();
4086   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMatches(Lt(1280 * 720)));
4087 
4088   // Increase bitrate.
4089   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4090       DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps),
4091       DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps),
4092       DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps), 0,
4093       0, 0);
4094 
4095   // Trigger adapt up. Higher resolution should be requested.
4096   video_stream_encoder_->TriggerQualityHigh();
4097   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
4098 
4099   video_stream_encoder_->Stop();
4100 }
4101 
TEST_F(VideoStreamEncoderTest,DropFirstFramesIfBwEstimateIsTooLow)4102 TEST_F(VideoStreamEncoderTest, DropFirstFramesIfBwEstimateIsTooLow) {
4103   fake_encoder_.SetResolutionBitrateLimits(
4104       {kEncoderBitrateLimits540p, kEncoderBitrateLimits720p});
4105 
4106   // Set bitrate equal to min bitrate of 540p.
4107   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4108       DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps),
4109       DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps),
4110       DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps), 0,
4111       0, 0);
4112 
4113   // Enable MAINTAIN_FRAMERATE preference, no initial limitation.
4114   AdaptingFrameForwarder source(&time_controller_);
4115   source.set_adaptation_enabled(true);
4116   video_stream_encoder_->SetSource(
4117       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
4118 
4119   // Insert 720p frame. It should be dropped and lower resolution should be
4120   // requested.
4121   int64_t timestamp_ms = kFrameIntervalMs;
4122   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720));
4123   ExpectDroppedFrame();
4124   EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count < 1280 * 720, 5000);
4125 
4126   // Insert 720p frame. It should be downscaled and encoded.
4127   timestamp_ms += kFrameIntervalMs;
4128   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720));
4129   WaitForEncodedFrame(960, 540);
4130 
4131   video_stream_encoder_->Stop();
4132 }
4133 
4134 class BalancedDegradationTest : public VideoStreamEncoderTest {
4135  protected:
SetupTest()4136   void SetupTest() {
4137     // Reset encoder for field trials to take effect.
4138     ConfigureEncoder(video_encoder_config_.Copy());
4139     OnBitrateUpdated(kTargetBitrate);
4140 
4141     // Enable BALANCED preference.
4142     source_.set_adaptation_enabled(true);
4143     video_stream_encoder_->SetSource(&source_, DegradationPreference::BALANCED);
4144   }
4145 
OnBitrateUpdated(DataRate bitrate)4146   void OnBitrateUpdated(DataRate bitrate) {
4147     video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4148         bitrate, bitrate, bitrate, 0, 0, 0);
4149   }
4150 
InsertFrame()4151   void InsertFrame() {
4152     timestamp_ms_ += kFrameIntervalMs;
4153     source_.IncomingCapturedFrame(CreateFrame(timestamp_ms_, kWidth, kHeight));
4154   }
4155 
InsertFrameAndWaitForEncoded()4156   void InsertFrameAndWaitForEncoded() {
4157     InsertFrame();
4158     sink_.WaitForEncodedFrame(timestamp_ms_);
4159   }
4160 
4161   const int kWidth = 640;  // pixels:640x360=230400
4162   const int kHeight = 360;
4163   const int64_t kFrameIntervalMs = 150;  // Use low fps to not drop any frame.
4164   int64_t timestamp_ms_ = 0;
4165   AdaptingFrameForwarder source_{&time_controller_};
4166 };
4167 
TEST_F(BalancedDegradationTest,AdaptDownTwiceIfMinFpsDiffLtThreshold)4168 TEST_F(BalancedDegradationTest, AdaptDownTwiceIfMinFpsDiffLtThreshold) {
4169   test::ScopedKeyValueConfig field_trials(
4170       field_trials_,
4171       "WebRTC-Video-BalancedDegradationSettings/"
4172       "pixels:57600|129600|230400,fps:7|10|24,fps_diff:1|1|1/");
4173   SetupTest();
4174 
4175   // Force input frame rate.
4176   const int kInputFps = 24;
4177   VideoSendStream::Stats stats = stats_proxy_->GetStats();
4178   stats.input_frame_rate = kInputFps;
4179   stats_proxy_->SetMockStats(stats);
4180 
4181   InsertFrameAndWaitForEncoded();
4182   EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
4183 
4184   // Trigger adapt down, expect scaled down framerate and resolution,
4185   // since Fps diff (input-requested:0) < threshold.
4186   video_stream_encoder_->TriggerQualityLow();
4187   EXPECT_THAT(source_.sink_wants(),
4188               AllOf(WantsFps(Eq(24)), WantsMaxPixels(Le(230400))));
4189 
4190   video_stream_encoder_->Stop();
4191 }
4192 
TEST_F(BalancedDegradationTest,AdaptDownOnceIfFpsDiffGeThreshold)4193 TEST_F(BalancedDegradationTest, AdaptDownOnceIfFpsDiffGeThreshold) {
4194   test::ScopedKeyValueConfig field_trials(
4195       field_trials_,
4196       "WebRTC-Video-BalancedDegradationSettings/"
4197       "pixels:57600|129600|230400,fps:7|10|24,fps_diff:1|1|1/");
4198   SetupTest();
4199 
4200   // Force input frame rate.
4201   const int kInputFps = 25;
4202   VideoSendStream::Stats stats = stats_proxy_->GetStats();
4203   stats.input_frame_rate = kInputFps;
4204   stats_proxy_->SetMockStats(stats);
4205 
4206   InsertFrameAndWaitForEncoded();
4207   EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
4208 
4209   // Trigger adapt down, expect scaled down framerate only (640x360@24fps).
4210   // Fps diff (input-requested:1) == threshold.
4211   video_stream_encoder_->TriggerQualityLow();
4212   EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(24)));
4213 
4214   video_stream_encoder_->Stop();
4215 }
4216 
TEST_F(BalancedDegradationTest,AdaptDownUsesCodecSpecificFps)4217 TEST_F(BalancedDegradationTest, AdaptDownUsesCodecSpecificFps) {
4218   test::ScopedKeyValueConfig field_trials(
4219       field_trials_,
4220       "WebRTC-Video-BalancedDegradationSettings/"
4221       "pixels:57600|129600|230400,fps:7|10|24,vp8_fps:8|11|22/");
4222   SetupTest();
4223 
4224   EXPECT_EQ(kVideoCodecVP8, video_encoder_config_.codec_type);
4225 
4226   InsertFrameAndWaitForEncoded();
4227   EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
4228 
4229   // Trigger adapt down, expect scaled down framerate (640x360@22fps).
4230   video_stream_encoder_->TriggerQualityLow();
4231   EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(22)));
4232 
4233   video_stream_encoder_->Stop();
4234 }
4235 
TEST_F(BalancedDegradationTest,NoAdaptUpIfBwEstimateIsLessThanMinBitrate)4236 TEST_F(BalancedDegradationTest, NoAdaptUpIfBwEstimateIsLessThanMinBitrate) {
4237   test::ScopedKeyValueConfig field_trials(
4238       field_trials_,
4239       "WebRTC-Video-BalancedDegradationSettings/"
4240       "pixels:57600|129600|230400,fps:7|10|14,kbps:0|0|425/");
4241   SetupTest();
4242 
4243   const DataRate kMinBitrate = DataRate::KilobitsPerSec(425);
4244   const DataRate kTooLowMinBitrate = DataRate::KilobitsPerSec(424);
4245   OnBitrateUpdated(kTooLowMinBitrate);
4246 
4247   InsertFrameAndWaitForEncoded();
4248   EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
4249   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4250 
4251   // Trigger adapt down, expect scaled down framerate (640x360@14fps).
4252   video_stream_encoder_->TriggerQualityLow();
4253   InsertFrameAndWaitForEncoded();
4254   EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(14)));
4255   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4256 
4257   // Trigger adapt down, expect scaled down resolution (480x270@14fps).
4258   video_stream_encoder_->TriggerQualityLow();
4259   InsertFrameAndWaitForEncoded();
4260   EXPECT_THAT(source_.sink_wants(), FpsEqResolutionLt(source_.last_wants()));
4261   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4262 
4263   // Trigger adapt down, expect scaled down framerate (480x270@10fps).
4264   video_stream_encoder_->TriggerQualityLow();
4265   InsertFrameAndWaitForEncoded();
4266   EXPECT_THAT(source_.sink_wants(), FpsLtResolutionEq(source_.last_wants()));
4267   EXPECT_EQ(source_.sink_wants().max_framerate_fps, 10);
4268   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4269 
4270   // Trigger adapt up, expect no upscale in fps (target bitrate < min bitrate).
4271   video_stream_encoder_->TriggerQualityHigh();
4272   InsertFrameAndWaitForEncoded();
4273   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4274 
4275   // Trigger adapt up, expect upscaled fps (target bitrate == min bitrate).
4276   OnBitrateUpdated(kMinBitrate);
4277   video_stream_encoder_->TriggerQualityHigh();
4278   InsertFrameAndWaitForEncoded();
4279   EXPECT_EQ(source_.sink_wants().max_framerate_fps, 14);
4280   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4281 
4282   video_stream_encoder_->Stop();
4283 }
4284 
TEST_F(BalancedDegradationTest,InitialFrameDropAdaptsFpsAndResolutionInOneStep)4285 TEST_F(BalancedDegradationTest,
4286        InitialFrameDropAdaptsFpsAndResolutionInOneStep) {
4287   test::ScopedKeyValueConfig field_trials(
4288       field_trials_,
4289       "WebRTC-Video-BalancedDegradationSettings/"
4290       "pixels:57600|129600|230400,fps:7|24|24/");
4291   SetupTest();
4292   OnBitrateUpdated(kLowTargetBitrate);
4293 
4294   EXPECT_THAT(source_.sink_wants(), UnlimitedSinkWants());
4295 
4296   // Insert frame, expect scaled down:
4297   // framerate (640x360@24fps) -> resolution (480x270@24fps).
4298   InsertFrame();
4299   EXPECT_FALSE(WaitForFrame(TimeDelta::Seconds(1)));
4300   EXPECT_LT(source_.sink_wants().max_pixel_count, kWidth * kHeight);
4301   EXPECT_EQ(source_.sink_wants().max_framerate_fps, 24);
4302 
4303   // Insert frame, expect scaled down:
4304   // resolution (320x180@24fps).
4305   InsertFrame();
4306   EXPECT_FALSE(WaitForFrame(TimeDelta::Seconds(1)));
4307   EXPECT_LT(source_.sink_wants().max_pixel_count,
4308             source_.last_wants().max_pixel_count);
4309   EXPECT_EQ(source_.sink_wants().max_framerate_fps, 24);
4310 
4311   // Frame should not be dropped (min pixels per frame reached).
4312   InsertFrameAndWaitForEncoded();
4313 
4314   video_stream_encoder_->Stop();
4315 }
4316 
TEST_F(BalancedDegradationTest,NoAdaptUpInResolutionIfBwEstimateIsLessThanMinBitrate)4317 TEST_F(BalancedDegradationTest,
4318        NoAdaptUpInResolutionIfBwEstimateIsLessThanMinBitrate) {
4319   test::ScopedKeyValueConfig field_trials(
4320       field_trials_,
4321       "WebRTC-Video-BalancedDegradationSettings/"
4322       "pixels:57600|129600|230400,fps:7|10|14,kbps_res:0|0|435/");
4323   SetupTest();
4324 
4325   const DataRate kResolutionMinBitrate = DataRate::KilobitsPerSec(435);
4326   const DataRate kTooLowMinResolutionBitrate = DataRate::KilobitsPerSec(434);
4327   OnBitrateUpdated(kTooLowMinResolutionBitrate);
4328 
4329   InsertFrameAndWaitForEncoded();
4330   EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
4331   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4332 
4333   // Trigger adapt down, expect scaled down framerate (640x360@14fps).
4334   video_stream_encoder_->TriggerQualityLow();
4335   InsertFrameAndWaitForEncoded();
4336   EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(14)));
4337   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4338 
4339   // Trigger adapt down, expect scaled down resolution (480x270@14fps).
4340   video_stream_encoder_->TriggerQualityLow();
4341   InsertFrameAndWaitForEncoded();
4342   EXPECT_THAT(source_.sink_wants(), FpsEqResolutionLt(source_.last_wants()));
4343   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4344 
4345   // Trigger adapt down, expect scaled down framerate (480x270@10fps).
4346   video_stream_encoder_->TriggerQualityLow();
4347   InsertFrameAndWaitForEncoded();
4348   EXPECT_THAT(source_.sink_wants(), FpsLtResolutionEq(source_.last_wants()));
4349   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4350 
4351   // Trigger adapt up, expect upscaled fps (no bitrate limit) (480x270@14fps).
4352   video_stream_encoder_->TriggerQualityHigh();
4353   InsertFrameAndWaitForEncoded();
4354   EXPECT_THAT(source_.sink_wants(), FpsGtResolutionEq(source_.last_wants()));
4355   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4356 
4357   // Trigger adapt up, expect no upscale in res (target bitrate < min bitrate).
4358   video_stream_encoder_->TriggerQualityHigh();
4359   InsertFrameAndWaitForEncoded();
4360   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4361 
4362   // Trigger adapt up, expect upscaled res (target bitrate == min bitrate).
4363   OnBitrateUpdated(kResolutionMinBitrate);
4364   video_stream_encoder_->TriggerQualityHigh();
4365   InsertFrameAndWaitForEncoded();
4366   EXPECT_THAT(source_.sink_wants(), FpsEqResolutionGt(source_.last_wants()));
4367   EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4368 
4369   video_stream_encoder_->Stop();
4370 }
4371 
TEST_F(BalancedDegradationTest,NoAdaptUpInFpsAndResolutionIfBwEstimateIsLessThanMinBitrate)4372 TEST_F(BalancedDegradationTest,
4373        NoAdaptUpInFpsAndResolutionIfBwEstimateIsLessThanMinBitrate) {
4374   test::ScopedKeyValueConfig field_trials(
4375       field_trials_,
4376       "WebRTC-Video-BalancedDegradationSettings/"
4377       "pixels:57600|129600|230400,fps:7|10|14,kbps:0|0|425,kbps_res:0|0|435/");
4378   SetupTest();
4379 
4380   const DataRate kMinBitrate = DataRate::KilobitsPerSec(425);
4381   const DataRate kTooLowMinBitrate = DataRate::KilobitsPerSec(424);
4382   const DataRate kResolutionMinBitrate = DataRate::KilobitsPerSec(435);
4383   const DataRate kTooLowMinResolutionBitrate = DataRate::KilobitsPerSec(434);
4384   OnBitrateUpdated(kTooLowMinBitrate);
4385 
4386   InsertFrameAndWaitForEncoded();
4387   EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
4388   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4389 
4390   // Trigger adapt down, expect scaled down framerate (640x360@14fps).
4391   video_stream_encoder_->TriggerQualityLow();
4392   InsertFrameAndWaitForEncoded();
4393   EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(14)));
4394   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4395 
4396   // Trigger adapt down, expect scaled down resolution (480x270@14fps).
4397   video_stream_encoder_->TriggerQualityLow();
4398   InsertFrameAndWaitForEncoded();
4399   EXPECT_THAT(source_.sink_wants(), FpsEqResolutionLt(source_.last_wants()));
4400   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4401 
4402   // Trigger adapt down, expect scaled down framerate (480x270@10fps).
4403   video_stream_encoder_->TriggerQualityLow();
4404   InsertFrameAndWaitForEncoded();
4405   EXPECT_THAT(source_.sink_wants(), FpsLtResolutionEq(source_.last_wants()));
4406   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4407 
4408   // Trigger adapt up, expect no upscale (target bitrate < min bitrate).
4409   video_stream_encoder_->TriggerQualityHigh();
4410   InsertFrameAndWaitForEncoded();
4411   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4412 
4413   // Trigger adapt up, expect upscaled fps (target bitrate == min bitrate).
4414   OnBitrateUpdated(kMinBitrate);
4415   video_stream_encoder_->TriggerQualityHigh();
4416   InsertFrameAndWaitForEncoded();
4417   EXPECT_THAT(source_.sink_wants(), FpsGtResolutionEq(source_.last_wants()));
4418   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4419 
4420   // Trigger adapt up, expect no upscale in res (target bitrate < min bitrate).
4421   OnBitrateUpdated(kTooLowMinResolutionBitrate);
4422   video_stream_encoder_->TriggerQualityHigh();
4423   InsertFrameAndWaitForEncoded();
4424   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4425 
4426   // Trigger adapt up, expect upscaled res (target bitrate == min bitrate).
4427   OnBitrateUpdated(kResolutionMinBitrate);
4428   video_stream_encoder_->TriggerQualityHigh();
4429   InsertFrameAndWaitForEncoded();
4430   EXPECT_THAT(source_.sink_wants(), FpsEqResolutionGt(source_.last_wants()));
4431   EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4432 
4433   video_stream_encoder_->Stop();
4434 }
4435 
TEST_F(VideoStreamEncoderTest,AdaptsResolutionOnOveruseAndLowQuality_MaintainFramerateMode)4436 TEST_F(VideoStreamEncoderTest,
4437        AdaptsResolutionOnOveruseAndLowQuality_MaintainFramerateMode) {
4438   const int kWidth = 1280;
4439   const int kHeight = 720;
4440   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4441       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
4442 
4443   // Enable MAINTAIN_FRAMERATE preference, no initial limitation.
4444   AdaptingFrameForwarder source(&time_controller_);
4445   source.set_adaptation_enabled(true);
4446   video_stream_encoder_->SetSource(
4447       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
4448 
4449   int64_t timestamp_ms = kFrameIntervalMs;
4450   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4451   WaitForEncodedFrame(kWidth, kHeight);
4452   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
4453   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
4454   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
4455   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
4456   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4457 
4458   // Trigger cpu adapt down, expect scaled down resolution (960x540).
4459   video_stream_encoder_->TriggerCpuOveruse();
4460   timestamp_ms += kFrameIntervalMs;
4461   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4462   WaitForEncodedFrame(timestamp_ms);
4463   EXPECT_THAT(source.sink_wants(),
4464               FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
4465   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
4466   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
4467   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
4468   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4469 
4470   // Trigger cpu adapt down, expect scaled down resolution (640x360).
4471   video_stream_encoder_->TriggerCpuOveruse();
4472   timestamp_ms += kFrameIntervalMs;
4473   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4474   WaitForEncodedFrame(timestamp_ms);
4475   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
4476   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
4477   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
4478   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
4479   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4480 
4481   // Trigger cpu adapt down, expect scaled down resolution (480x270).
4482   video_stream_encoder_->TriggerCpuOveruse();
4483   timestamp_ms += kFrameIntervalMs;
4484   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4485   WaitForEncodedFrame(timestamp_ms);
4486   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
4487   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
4488   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
4489   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
4490   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4491 
4492   // Trigger quality adapt down, expect scaled down resolution (320x180).
4493   video_stream_encoder_->TriggerQualityLow();
4494   timestamp_ms += kFrameIntervalMs;
4495   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4496   WaitForEncodedFrame(timestamp_ms);
4497   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
4498   rtc::VideoSinkWants last_wants = source.sink_wants();
4499   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
4500   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
4501   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
4502   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4503 
4504   // Trigger quality adapt down, expect no change (min resolution reached).
4505   video_stream_encoder_->TriggerQualityLow();
4506   timestamp_ms += kFrameIntervalMs;
4507   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4508   WaitForEncodedFrame(timestamp_ms);
4509   EXPECT_THAT(source.sink_wants(), FpsMax());
4510   EXPECT_EQ(source.sink_wants().max_pixel_count, last_wants.max_pixel_count);
4511   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
4512   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
4513   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
4514   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4515 
4516   // Trigger quality adapt up, expect upscaled resolution (480x270).
4517   video_stream_encoder_->TriggerQualityHigh();
4518   timestamp_ms += kFrameIntervalMs;
4519   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4520   WaitForEncodedFrame(timestamp_ms);
4521   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
4522   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
4523   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
4524   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
4525   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4526 
4527   // Trigger quality and cpu adapt up since both are most limited, expect
4528   // upscaled resolution (640x360).
4529   video_stream_encoder_->TriggerCpuUnderuse();
4530   video_stream_encoder_->TriggerQualityHigh();
4531   timestamp_ms += kFrameIntervalMs;
4532   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4533   WaitForEncodedFrame(timestamp_ms);
4534   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
4535   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
4536   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
4537   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
4538   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4539 
4540   // Trigger quality and cpu adapt up since both are most limited, expect
4541   // upscaled resolution (960x540).
4542   video_stream_encoder_->TriggerCpuUnderuse();
4543   video_stream_encoder_->TriggerQualityHigh();
4544   timestamp_ms += kFrameIntervalMs;
4545   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4546   WaitForEncodedFrame(timestamp_ms);
4547   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
4548   last_wants = source.sink_wants();
4549   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
4550   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
4551   EXPECT_EQ(5, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
4552   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4553 
4554   // Trigger cpu adapt up, expect no change since not most limited (960x540).
4555   // However the stats will change since the CPU resource is no longer limited.
4556   video_stream_encoder_->TriggerCpuUnderuse();
4557   timestamp_ms += kFrameIntervalMs;
4558   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4559   WaitForEncodedFrame(timestamp_ms);
4560   EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(last_wants));
4561   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
4562   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
4563   EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
4564   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4565 
4566   // Trigger quality adapt up, expect no restriction (1280x720).
4567   video_stream_encoder_->TriggerQualityHigh();
4568   timestamp_ms += kFrameIntervalMs;
4569   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
4570   WaitForEncodedFrame(kWidth, kHeight);
4571   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
4572   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
4573   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
4574   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
4575   EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
4576   EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
4577 
4578   video_stream_encoder_->Stop();
4579 }
4580 
TEST_F(VideoStreamEncoderTest,CpuLimitedHistogramIsReported)4581 TEST_F(VideoStreamEncoderTest, CpuLimitedHistogramIsReported) {
4582   const int kWidth = 640;
4583   const int kHeight = 360;
4584 
4585   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4586       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
4587 
4588   for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
4589     video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
4590     WaitForEncodedFrame(i);
4591   }
4592 
4593   video_stream_encoder_->TriggerCpuOveruse();
4594   for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
4595     video_source_.IncomingCapturedFrame(CreateFrame(
4596         SendStatisticsProxy::kMinRequiredMetricsSamples + i, kWidth, kHeight));
4597     WaitForEncodedFrame(SendStatisticsProxy::kMinRequiredMetricsSamples + i);
4598   }
4599 
4600   video_stream_encoder_->Stop();
4601   video_stream_encoder_.reset();
4602   stats_proxy_.reset();
4603 
4604   EXPECT_METRIC_EQ(
4605       1, metrics::NumSamples("WebRTC.Video.CpuLimitedResolutionInPercent"));
4606   EXPECT_METRIC_EQ(
4607       1, metrics::NumEvents("WebRTC.Video.CpuLimitedResolutionInPercent", 50));
4608 }
4609 
TEST_F(VideoStreamEncoderTest,CpuLimitedHistogramIsNotReportedForDisabledDegradation)4610 TEST_F(VideoStreamEncoderTest,
4611        CpuLimitedHistogramIsNotReportedForDisabledDegradation) {
4612   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4613       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
4614   const int kWidth = 640;
4615   const int kHeight = 360;
4616 
4617   video_stream_encoder_->SetSource(&video_source_,
4618                                    webrtc::DegradationPreference::DISABLED);
4619 
4620   for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
4621     video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
4622     WaitForEncodedFrame(i);
4623   }
4624 
4625   video_stream_encoder_->Stop();
4626   video_stream_encoder_.reset();
4627   stats_proxy_.reset();
4628 
4629   EXPECT_EQ(0,
4630             metrics::NumSamples("WebRTC.Video.CpuLimitedResolutionInPercent"));
4631 }
4632 
TEST_F(VideoStreamEncoderTest,ReportsVideoBitrateAllocation)4633 TEST_F(VideoStreamEncoderTest, ReportsVideoBitrateAllocation) {
4634   ResetEncoder("FAKE", 1, 1, 1, /*screenshare*/ false,
4635                VideoStreamEncoder::BitrateAllocationCallbackType::
4636                    kVideoBitrateAllocation);
4637 
4638   const int kDefaultFps = 30;
4639   const VideoBitrateAllocation expected_bitrate =
4640       SimulcastRateAllocator(fake_encoder_.config())
4641           .Allocate(VideoBitrateAllocationParameters(kLowTargetBitrate.bps(),
4642                                                      kDefaultFps));
4643 
4644   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4645       kLowTargetBitrate, kLowTargetBitrate, kLowTargetBitrate, 0, 0, 0);
4646 
4647   video_source_.IncomingCapturedFrame(
4648       CreateFrame(CurrentTimeMs(), codec_width_, codec_height_));
4649   WaitForEncodedFrame(CurrentTimeMs());
4650   EXPECT_EQ(sink_.GetLastVideoBitrateAllocation(), expected_bitrate);
4651   EXPECT_EQ(sink_.number_of_bitrate_allocations(), 1);
4652 
4653   // Check that encoder has been updated too, not just allocation observer.
4654   EXPECT_TRUE(fake_encoder_.GetAndResetLastRateControlSettings().has_value());
4655   AdvanceTime(TimeDelta::Seconds(1) / kDefaultFps);
4656 
4657   // VideoBitrateAllocation not updated on second frame.
4658   video_source_.IncomingCapturedFrame(
4659       CreateFrame(CurrentTimeMs(), codec_width_, codec_height_));
4660   WaitForEncodedFrame(CurrentTimeMs());
4661   EXPECT_EQ(sink_.number_of_bitrate_allocations(), 1);
4662   AdvanceTime(TimeDelta::Millis(1) / kDefaultFps);
4663 
4664   // VideoBitrateAllocation updated after a process interval.
4665   const int64_t start_time_ms = CurrentTimeMs();
4666   while (CurrentTimeMs() - start_time_ms < 5 * kProcessIntervalMs) {
4667     video_source_.IncomingCapturedFrame(
4668         CreateFrame(CurrentTimeMs(), codec_width_, codec_height_));
4669     WaitForEncodedFrame(CurrentTimeMs());
4670     AdvanceTime(TimeDelta::Millis(1) / kDefaultFps);
4671   }
4672   EXPECT_GT(sink_.number_of_bitrate_allocations(), 3);
4673 
4674   video_stream_encoder_->Stop();
4675 }
4676 
TEST_F(VideoStreamEncoderTest,ReportsVideoLayersAllocationForVP8Simulcast)4677 TEST_F(VideoStreamEncoderTest, ReportsVideoLayersAllocationForVP8Simulcast) {
4678   ResetEncoder("VP8", /*num_streams*/ 2, 1, 1, /*screenshare*/ false,
4679                VideoStreamEncoder::BitrateAllocationCallbackType::
4680                    kVideoLayersAllocation);
4681 
4682   const int kDefaultFps = 30;
4683 
4684   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4685       kLowTargetBitrate, kLowTargetBitrate, kLowTargetBitrate, 0, 0, 0);
4686 
4687   video_source_.IncomingCapturedFrame(
4688       CreateFrame(CurrentTimeMs(), codec_width_, codec_height_));
4689   WaitForEncodedFrame(CurrentTimeMs());
4690   EXPECT_EQ(sink_.number_of_layers_allocations(), 1);
4691   VideoLayersAllocation last_layer_allocation =
4692       sink_.GetLastVideoLayersAllocation();
4693   // kLowTargetBitrate is only enough for one spatial layer.
4694   ASSERT_EQ(last_layer_allocation.active_spatial_layers.size(), 1u);
4695 
4696   VideoBitrateAllocation bitrate_allocation =
4697       fake_encoder_.GetAndResetLastRateControlSettings()->target_bitrate;
4698   // Check that encoder has been updated too, not just allocation observer.
4699   EXPECT_EQ(bitrate_allocation.get_sum_bps(), kLowTargetBitrate.bps());
4700   AdvanceTime(TimeDelta::Seconds(1) / kDefaultFps);
4701 
4702   // VideoLayersAllocation might be updated if frame rate changes.
4703   int number_of_layers_allocation = 1;
4704   const int64_t start_time_ms = CurrentTimeMs();
4705   while (CurrentTimeMs() - start_time_ms < 10 * kProcessIntervalMs) {
4706     video_source_.IncomingCapturedFrame(
4707         CreateFrame(CurrentTimeMs(), codec_width_, codec_height_));
4708     WaitForEncodedFrame(CurrentTimeMs());
4709     if (number_of_layers_allocation != sink_.number_of_layers_allocations()) {
4710       number_of_layers_allocation = sink_.number_of_layers_allocations();
4711       VideoLayersAllocation new_allocation =
4712           sink_.GetLastVideoLayersAllocation();
4713       ASSERT_EQ(new_allocation.active_spatial_layers.size(), 1u);
4714       EXPECT_NE(new_allocation.active_spatial_layers[0].frame_rate_fps,
4715                 last_layer_allocation.active_spatial_layers[0].frame_rate_fps);
4716       EXPECT_EQ(new_allocation.active_spatial_layers[0]
4717                     .target_bitrate_per_temporal_layer,
4718                 last_layer_allocation.active_spatial_layers[0]
4719                     .target_bitrate_per_temporal_layer);
4720       last_layer_allocation = new_allocation;
4721     }
4722   }
4723   EXPECT_LE(sink_.number_of_layers_allocations(), 3);
4724   video_stream_encoder_->Stop();
4725 }
4726 
TEST_F(VideoStreamEncoderTest,ReportsVideoLayersAllocationForVP8WithMiddleLayerDisabled)4727 TEST_F(VideoStreamEncoderTest,
4728        ReportsVideoLayersAllocationForVP8WithMiddleLayerDisabled) {
4729   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx=*/0, true);
4730   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 1, true);
4731   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 2, true);
4732   VideoEncoderConfig video_encoder_config;
4733   test::FillEncoderConfiguration(VideoCodecType::kVideoCodecVP8,
4734                                  /* num_streams*/ 3, &video_encoder_config);
4735   video_encoder_config.max_bitrate_bps = 2 * kTargetBitrate.bps();
4736   video_encoder_config.content_type =
4737       VideoEncoderConfig::ContentType::kRealtimeVideo;
4738   video_encoder_config.encoder_specific_settings =
4739       rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
4740           VideoEncoder::GetDefaultVp8Settings());
4741   for (auto& layer : video_encoder_config.simulcast_layers) {
4742     layer.num_temporal_layers = 2;
4743   }
4744   // Simulcast layers are used for enabling/disabling streams.
4745   video_encoder_config.simulcast_layers[0].active = true;
4746   video_encoder_config.simulcast_layers[1].active = false;
4747   video_encoder_config.simulcast_layers[2].active = true;
4748   ConfigureEncoder(std::move(video_encoder_config),
4749                    VideoStreamEncoder::BitrateAllocationCallbackType::
4750                        kVideoLayersAllocation);
4751 
4752   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4753       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
4754 
4755   video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720));
4756   WaitForEncodedFrame(CurrentTimeMs());
4757   EXPECT_EQ(sink_.number_of_layers_allocations(), 1);
4758   VideoLayersAllocation last_layer_allocation =
4759       sink_.GetLastVideoLayersAllocation();
4760 
4761   ASSERT_THAT(last_layer_allocation.active_spatial_layers, SizeIs(2));
4762   EXPECT_THAT(last_layer_allocation.active_spatial_layers[0]
4763                   .target_bitrate_per_temporal_layer,
4764               SizeIs(2));
4765   EXPECT_LT(last_layer_allocation.active_spatial_layers[0].width, 1280);
4766   EXPECT_EQ(last_layer_allocation.active_spatial_layers[1].width, 1280);
4767   video_stream_encoder_->Stop();
4768 }
4769 
TEST_F(VideoStreamEncoderTest,ReportsVideoLayersAllocationForVP8WithMiddleAndHighestLayerDisabled)4770 TEST_F(VideoStreamEncoderTest,
4771        ReportsVideoLayersAllocationForVP8WithMiddleAndHighestLayerDisabled) {
4772   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx=*/0, true);
4773   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 1, true);
4774   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 2, true);
4775   VideoEncoderConfig video_encoder_config;
4776   test::FillEncoderConfiguration(VideoCodecType::kVideoCodecVP8,
4777                                  /* num_streams*/ 3, &video_encoder_config);
4778   video_encoder_config.max_bitrate_bps = 2 * kTargetBitrate.bps();
4779   video_encoder_config.content_type =
4780       VideoEncoderConfig::ContentType::kRealtimeVideo;
4781   video_encoder_config.encoder_specific_settings =
4782       rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
4783           VideoEncoder::GetDefaultVp8Settings());
4784   for (auto& layer : video_encoder_config.simulcast_layers) {
4785     layer.num_temporal_layers = 2;
4786   }
4787   // Simulcast layers are used for enabling/disabling streams.
4788   video_encoder_config.simulcast_layers[0].active = true;
4789   video_encoder_config.simulcast_layers[1].active = false;
4790   video_encoder_config.simulcast_layers[2].active = false;
4791   ConfigureEncoder(std::move(video_encoder_config),
4792                    VideoStreamEncoder::BitrateAllocationCallbackType::
4793                        kVideoLayersAllocation);
4794 
4795   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4796       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
4797 
4798   video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720));
4799   WaitForEncodedFrame(CurrentTimeMs());
4800   EXPECT_EQ(sink_.number_of_layers_allocations(), 1);
4801   VideoLayersAllocation last_layer_allocation =
4802       sink_.GetLastVideoLayersAllocation();
4803 
4804   ASSERT_THAT(last_layer_allocation.active_spatial_layers, SizeIs(1));
4805   EXPECT_THAT(last_layer_allocation.active_spatial_layers[0]
4806                   .target_bitrate_per_temporal_layer,
4807               SizeIs(2));
4808   EXPECT_LT(last_layer_allocation.active_spatial_layers[0].width, 1280);
4809 
4810   video_stream_encoder_->Stop();
4811 }
4812 
TEST_F(VideoStreamEncoderTest,ReportsVideoLayersAllocationForV9SvcWithTemporalLayerSupport)4813 TEST_F(VideoStreamEncoderTest,
4814        ReportsVideoLayersAllocationForV9SvcWithTemporalLayerSupport) {
4815   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx=*/0, true);
4816   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 1, true);
4817   VideoEncoderConfig video_encoder_config;
4818   test::FillEncoderConfiguration(VideoCodecType::kVideoCodecVP9,
4819                                  /* num_streams*/ 1, &video_encoder_config);
4820   video_encoder_config.max_bitrate_bps = 2 * kTargetBitrate.bps();
4821   video_encoder_config.content_type =
4822       VideoEncoderConfig::ContentType::kRealtimeVideo;
4823   VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
4824   vp9_settings.numberOfSpatialLayers = 2;
4825   vp9_settings.numberOfTemporalLayers = 2;
4826   vp9_settings.interLayerPred = InterLayerPredMode::kOn;
4827   vp9_settings.automaticResizeOn = false;
4828   video_encoder_config.encoder_specific_settings =
4829       rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
4830           vp9_settings);
4831   ConfigureEncoder(std::move(video_encoder_config),
4832                    VideoStreamEncoder::BitrateAllocationCallbackType::
4833                        kVideoLayersAllocation);
4834 
4835   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4836       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
4837 
4838   video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720));
4839   WaitForEncodedFrame(CurrentTimeMs());
4840   EXPECT_EQ(sink_.number_of_layers_allocations(), 1);
4841   VideoLayersAllocation last_layer_allocation =
4842       sink_.GetLastVideoLayersAllocation();
4843 
4844   ASSERT_THAT(last_layer_allocation.active_spatial_layers, SizeIs(2));
4845   EXPECT_THAT(last_layer_allocation.active_spatial_layers[0]
4846                   .target_bitrate_per_temporal_layer,
4847               SizeIs(2));
4848   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0].width, 640);
4849   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0].height, 360);
4850   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0].frame_rate_fps, 30);
4851   EXPECT_THAT(last_layer_allocation.active_spatial_layers[1]
4852                   .target_bitrate_per_temporal_layer,
4853               SizeIs(2));
4854   EXPECT_EQ(last_layer_allocation.active_spatial_layers[1].width, 1280);
4855   EXPECT_EQ(last_layer_allocation.active_spatial_layers[1].height, 720);
4856   EXPECT_EQ(last_layer_allocation.active_spatial_layers[1].frame_rate_fps, 30);
4857 
4858   // Since full SVC is used, expect the top layer to utilize the full target
4859   // rate.
4860   EXPECT_EQ(last_layer_allocation.active_spatial_layers[1]
4861                 .target_bitrate_per_temporal_layer[1],
4862             kTargetBitrate);
4863   video_stream_encoder_->Stop();
4864 }
4865 
TEST_F(VideoStreamEncoderTest,ReportsVideoLayersAllocationForV9SvcWithoutTemporalLayerSupport)4866 TEST_F(VideoStreamEncoderTest,
4867        ReportsVideoLayersAllocationForV9SvcWithoutTemporalLayerSupport) {
4868   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx=*/0, false);
4869   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 1, false);
4870   VideoEncoderConfig video_encoder_config;
4871   test::FillEncoderConfiguration(VideoCodecType::kVideoCodecVP9,
4872                                  /* num_streams*/ 1, &video_encoder_config);
4873   video_encoder_config.max_bitrate_bps = 2 * kTargetBitrate.bps();
4874   video_encoder_config.content_type =
4875       VideoEncoderConfig::ContentType::kRealtimeVideo;
4876   VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
4877   vp9_settings.numberOfSpatialLayers = 2;
4878   vp9_settings.numberOfTemporalLayers = 2;
4879   vp9_settings.interLayerPred = InterLayerPredMode::kOn;
4880   vp9_settings.automaticResizeOn = false;
4881   video_encoder_config.encoder_specific_settings =
4882       rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
4883           vp9_settings);
4884   ConfigureEncoder(std::move(video_encoder_config),
4885                    VideoStreamEncoder::BitrateAllocationCallbackType::
4886                        kVideoLayersAllocation);
4887 
4888   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4889       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
4890 
4891   video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720));
4892   WaitForEncodedFrame(CurrentTimeMs());
4893   EXPECT_EQ(sink_.number_of_layers_allocations(), 1);
4894   VideoLayersAllocation last_layer_allocation =
4895       sink_.GetLastVideoLayersAllocation();
4896 
4897   ASSERT_THAT(last_layer_allocation.active_spatial_layers, SizeIs(2));
4898   EXPECT_THAT(last_layer_allocation.active_spatial_layers[0]
4899                   .target_bitrate_per_temporal_layer,
4900               SizeIs(1));
4901   EXPECT_THAT(last_layer_allocation.active_spatial_layers[1]
4902                   .target_bitrate_per_temporal_layer,
4903               SizeIs(1));
4904   // Since full SVC is used, expect the top layer to utilize the full target
4905   // rate.
4906   EXPECT_EQ(last_layer_allocation.active_spatial_layers[1]
4907                 .target_bitrate_per_temporal_layer[0],
4908             kTargetBitrate);
4909   video_stream_encoder_->Stop();
4910 }
4911 
TEST_F(VideoStreamEncoderTest,ReportsVideoLayersAllocationForVP9KSvcWithTemporalLayerSupport)4912 TEST_F(VideoStreamEncoderTest,
4913        ReportsVideoLayersAllocationForVP9KSvcWithTemporalLayerSupport) {
4914   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx=*/0, true);
4915   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 1, true);
4916   VideoEncoderConfig video_encoder_config;
4917   test::FillEncoderConfiguration(VideoCodecType::kVideoCodecVP9,
4918                                  /* num_streams*/ 1, &video_encoder_config);
4919   video_encoder_config.max_bitrate_bps = 2 * kTargetBitrate.bps();
4920   video_encoder_config.content_type =
4921       VideoEncoderConfig::ContentType::kRealtimeVideo;
4922   VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
4923   vp9_settings.numberOfSpatialLayers = 2;
4924   vp9_settings.numberOfTemporalLayers = 2;
4925   vp9_settings.interLayerPred = InterLayerPredMode::kOnKeyPic;
4926   vp9_settings.automaticResizeOn = false;
4927   video_encoder_config.encoder_specific_settings =
4928       rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
4929           vp9_settings);
4930   ConfigureEncoder(std::move(video_encoder_config),
4931                    VideoStreamEncoder::BitrateAllocationCallbackType::
4932                        kVideoLayersAllocation);
4933 
4934   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4935       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
4936 
4937   video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720));
4938   WaitForEncodedFrame(CurrentTimeMs());
4939   EXPECT_EQ(sink_.number_of_layers_allocations(), 1);
4940   VideoLayersAllocation last_layer_allocation =
4941       sink_.GetLastVideoLayersAllocation();
4942 
4943   ASSERT_THAT(last_layer_allocation.active_spatial_layers, SizeIs(2));
4944   EXPECT_THAT(last_layer_allocation.active_spatial_layers[0]
4945                   .target_bitrate_per_temporal_layer,
4946               SizeIs(2));
4947   EXPECT_THAT(last_layer_allocation.active_spatial_layers[1]
4948                   .target_bitrate_per_temporal_layer,
4949               SizeIs(2));
4950   // Since  KSVC is, spatial layers are independend except on key frames.
4951   EXPECT_LT(last_layer_allocation.active_spatial_layers[1]
4952                 .target_bitrate_per_temporal_layer[1],
4953             kTargetBitrate);
4954   video_stream_encoder_->Stop();
4955 }
4956 
TEST_F(VideoStreamEncoderTest,ReportsVideoLayersAllocationForV9SvcWithLowestLayerDisabled)4957 TEST_F(VideoStreamEncoderTest,
4958        ReportsVideoLayersAllocationForV9SvcWithLowestLayerDisabled) {
4959   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx=*/0, true);
4960   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 1, true);
4961   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 2, true);
4962   VideoEncoderConfig video_encoder_config;
4963   test::FillEncoderConfiguration(VideoCodecType::kVideoCodecVP9,
4964                                  /* num_streams*/ 1, &video_encoder_config);
4965   video_encoder_config.max_bitrate_bps = 2 * kTargetBitrate.bps();
4966   video_encoder_config.content_type =
4967       VideoEncoderConfig::ContentType::kRealtimeVideo;
4968   VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
4969   vp9_settings.numberOfSpatialLayers = 3;
4970   vp9_settings.numberOfTemporalLayers = 2;
4971   vp9_settings.interLayerPred = InterLayerPredMode::kOn;
4972   vp9_settings.automaticResizeOn = false;
4973   video_encoder_config.encoder_specific_settings =
4974       rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
4975           vp9_settings);
4976   // Simulcast layers are used for enabling/disabling streams.
4977   video_encoder_config.simulcast_layers.resize(3);
4978   video_encoder_config.simulcast_layers[0].active = false;
4979   video_encoder_config.simulcast_layers[1].active = true;
4980   video_encoder_config.simulcast_layers[2].active = true;
4981   ConfigureEncoder(std::move(video_encoder_config),
4982                    VideoStreamEncoder::BitrateAllocationCallbackType::
4983                        kVideoLayersAllocation);
4984 
4985   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
4986       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
4987 
4988   video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720));
4989   WaitForEncodedFrame(CurrentTimeMs());
4990   EXPECT_EQ(sink_.number_of_layers_allocations(), 1);
4991   VideoLayersAllocation last_layer_allocation =
4992       sink_.GetLastVideoLayersAllocation();
4993 
4994   ASSERT_THAT(last_layer_allocation.active_spatial_layers, SizeIs(2));
4995   EXPECT_THAT(last_layer_allocation.active_spatial_layers[0]
4996                   .target_bitrate_per_temporal_layer,
4997               SizeIs(2));
4998   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0].width, 640);
4999   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0].spatial_id, 0);
5000 
5001   EXPECT_EQ(last_layer_allocation.active_spatial_layers[1].width, 1280);
5002   EXPECT_EQ(last_layer_allocation.active_spatial_layers[1].spatial_id, 1);
5003   EXPECT_THAT(last_layer_allocation.active_spatial_layers[1]
5004                   .target_bitrate_per_temporal_layer,
5005               SizeIs(2));
5006   // Since full SVC is used, expect the top layer to utilize the full target
5007   // rate.
5008   EXPECT_EQ(last_layer_allocation.active_spatial_layers[1]
5009                 .target_bitrate_per_temporal_layer[1],
5010             kTargetBitrate);
5011   video_stream_encoder_->Stop();
5012 }
5013 
TEST_F(VideoStreamEncoderTest,ReportsVideoLayersAllocationForV9SvcWithHighestLayerDisabled)5014 TEST_F(VideoStreamEncoderTest,
5015        ReportsVideoLayersAllocationForV9SvcWithHighestLayerDisabled) {
5016   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx=*/0, true);
5017   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 1, true);
5018   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 2, true);
5019   VideoEncoderConfig video_encoder_config;
5020   test::FillEncoderConfiguration(VideoCodecType::kVideoCodecVP9,
5021                                  /* num_streams*/ 1, &video_encoder_config);
5022   video_encoder_config.max_bitrate_bps = 2 * kTargetBitrate.bps();
5023   video_encoder_config.content_type =
5024       VideoEncoderConfig::ContentType::kRealtimeVideo;
5025   VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
5026   vp9_settings.numberOfSpatialLayers = 3;
5027   vp9_settings.numberOfTemporalLayers = 2;
5028   vp9_settings.interLayerPred = InterLayerPredMode::kOn;
5029   vp9_settings.automaticResizeOn = false;
5030   video_encoder_config.encoder_specific_settings =
5031       rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
5032           vp9_settings);
5033   // Simulcast layers are used for enabling/disabling streams.
5034   video_encoder_config.simulcast_layers.resize(3);
5035   video_encoder_config.simulcast_layers[2].active = false;
5036   ConfigureEncoder(std::move(video_encoder_config),
5037                    VideoStreamEncoder::BitrateAllocationCallbackType::
5038                        kVideoLayersAllocation);
5039 
5040   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5041       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
5042 
5043   video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720));
5044   WaitForEncodedFrame(CurrentTimeMs());
5045   EXPECT_EQ(sink_.number_of_layers_allocations(), 1);
5046   VideoLayersAllocation last_layer_allocation =
5047       sink_.GetLastVideoLayersAllocation();
5048 
5049   ASSERT_THAT(last_layer_allocation.active_spatial_layers, SizeIs(2));
5050   EXPECT_THAT(last_layer_allocation.active_spatial_layers[0]
5051                   .target_bitrate_per_temporal_layer,
5052               SizeIs(2));
5053   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0].width, 320);
5054   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0].spatial_id, 0);
5055 
5056   EXPECT_EQ(last_layer_allocation.active_spatial_layers[1].width, 640);
5057   EXPECT_EQ(last_layer_allocation.active_spatial_layers[1].spatial_id, 1);
5058   EXPECT_THAT(last_layer_allocation.active_spatial_layers[1]
5059                   .target_bitrate_per_temporal_layer,
5060               SizeIs(2));
5061   video_stream_encoder_->Stop();
5062 }
5063 
TEST_F(VideoStreamEncoderTest,ReportsVideoLayersAllocationForV9SvcWithAllButHighestLayerDisabled)5064 TEST_F(VideoStreamEncoderTest,
5065        ReportsVideoLayersAllocationForV9SvcWithAllButHighestLayerDisabled) {
5066   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx=*/0, true);
5067   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 1, true);
5068   fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 2, true);
5069   VideoEncoderConfig video_encoder_config;
5070   test::FillEncoderConfiguration(VideoCodecType::kVideoCodecVP9,
5071                                  /* num_streams*/ 1, &video_encoder_config);
5072   video_encoder_config.max_bitrate_bps = 2 * kTargetBitrate.bps();
5073   video_encoder_config.content_type =
5074       VideoEncoderConfig::ContentType::kRealtimeVideo;
5075   VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
5076   vp9_settings.numberOfSpatialLayers = 3;
5077   vp9_settings.numberOfTemporalLayers = 2;
5078   vp9_settings.interLayerPred = InterLayerPredMode::kOn;
5079   vp9_settings.automaticResizeOn = false;
5080   video_encoder_config.encoder_specific_settings =
5081       rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
5082           vp9_settings);
5083   // Simulcast layers are used for enabling/disabling streams.
5084   video_encoder_config.simulcast_layers.resize(3);
5085   video_encoder_config.simulcast_layers[0].active = false;
5086   video_encoder_config.simulcast_layers[1].active = false;
5087   video_encoder_config.simulcast_layers[2].active = true;
5088   ConfigureEncoder(std::move(video_encoder_config),
5089                    VideoStreamEncoder::BitrateAllocationCallbackType::
5090                        kVideoLayersAllocation);
5091 
5092   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5093       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
5094 
5095   video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720));
5096   WaitForEncodedFrame(CurrentTimeMs());
5097   EXPECT_EQ(sink_.number_of_layers_allocations(), 1);
5098   VideoLayersAllocation last_layer_allocation =
5099       sink_.GetLastVideoLayersAllocation();
5100 
5101   ASSERT_THAT(last_layer_allocation.active_spatial_layers, SizeIs(1));
5102   EXPECT_THAT(last_layer_allocation.active_spatial_layers[0]
5103                   .target_bitrate_per_temporal_layer,
5104               SizeIs(2));
5105   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0].width, 1280);
5106   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0].spatial_id, 0);
5107   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0]
5108                 .target_bitrate_per_temporal_layer[1],
5109             kTargetBitrate);
5110   video_stream_encoder_->Stop();
5111 }
5112 
TEST_F(VideoStreamEncoderTest,ReportsVideoLayersAllocationForH264)5113 TEST_F(VideoStreamEncoderTest, ReportsVideoLayersAllocationForH264) {
5114   ResetEncoder("H264", 1, 1, 1, false,
5115                VideoStreamEncoder::BitrateAllocationCallbackType::
5116                    kVideoLayersAllocation);
5117   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5118       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
5119 
5120   video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720));
5121   WaitForEncodedFrame(CurrentTimeMs());
5122   EXPECT_EQ(sink_.number_of_layers_allocations(), 1);
5123   VideoLayersAllocation last_layer_allocation =
5124       sink_.GetLastVideoLayersAllocation();
5125 
5126   ASSERT_THAT(last_layer_allocation.active_spatial_layers, SizeIs(1));
5127   ASSERT_THAT(last_layer_allocation.active_spatial_layers[0]
5128                   .target_bitrate_per_temporal_layer,
5129               SizeIs(1));
5130   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0]
5131                 .target_bitrate_per_temporal_layer[0],
5132             kTargetBitrate);
5133   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0].width, 1280);
5134   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0].height, 720);
5135   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0].frame_rate_fps, 30);
5136   video_stream_encoder_->Stop();
5137 }
5138 
TEST_F(VideoStreamEncoderTest,ReportsUpdatedVideoLayersAllocationWhenBweChanges)5139 TEST_F(VideoStreamEncoderTest,
5140        ReportsUpdatedVideoLayersAllocationWhenBweChanges) {
5141   ResetEncoder("VP8", /*num_streams*/ 2, 1, 1, /*screenshare*/ false,
5142                VideoStreamEncoder::BitrateAllocationCallbackType::
5143                    kVideoLayersAllocation);
5144 
5145   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5146       kLowTargetBitrate, kLowTargetBitrate, kLowTargetBitrate, 0, 0, 0);
5147 
5148   video_source_.IncomingCapturedFrame(
5149       CreateFrame(CurrentTimeMs(), codec_width_, codec_height_));
5150   WaitForEncodedFrame(CurrentTimeMs());
5151   EXPECT_EQ(sink_.number_of_layers_allocations(), 1);
5152   VideoLayersAllocation last_layer_allocation =
5153       sink_.GetLastVideoLayersAllocation();
5154   // kLowTargetBitrate is only enough for one spatial layer.
5155   ASSERT_EQ(last_layer_allocation.active_spatial_layers.size(), 1u);
5156   EXPECT_EQ(last_layer_allocation.active_spatial_layers[0]
5157                 .target_bitrate_per_temporal_layer[0],
5158             kLowTargetBitrate);
5159 
5160   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5161       kSimulcastTargetBitrate, kSimulcastTargetBitrate, kSimulcastTargetBitrate,
5162       0, 0, 0);
5163   video_source_.IncomingCapturedFrame(
5164       CreateFrame(CurrentTimeMs(), codec_width_, codec_height_));
5165   WaitForEncodedFrame(CurrentTimeMs());
5166 
5167   EXPECT_EQ(sink_.number_of_layers_allocations(), 2);
5168   last_layer_allocation = sink_.GetLastVideoLayersAllocation();
5169   ASSERT_EQ(last_layer_allocation.active_spatial_layers.size(), 2u);
5170   EXPECT_GT(last_layer_allocation.active_spatial_layers[1]
5171                 .target_bitrate_per_temporal_layer[0],
5172             DataRate::Zero());
5173 
5174   video_stream_encoder_->Stop();
5175 }
5176 
TEST_F(VideoStreamEncoderTest,ReportsUpdatedVideoLayersAllocationWhenResolutionChanges)5177 TEST_F(VideoStreamEncoderTest,
5178        ReportsUpdatedVideoLayersAllocationWhenResolutionChanges) {
5179   ResetEncoder("VP8", /*num_streams*/ 2, 1, 1, /*screenshare*/ false,
5180                VideoStreamEncoder::BitrateAllocationCallbackType::
5181                    kVideoLayersAllocation);
5182 
5183   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5184       kSimulcastTargetBitrate, kSimulcastTargetBitrate, kSimulcastTargetBitrate,
5185       0, 0, 0);
5186 
5187   video_source_.IncomingCapturedFrame(
5188       CreateFrame(CurrentTimeMs(), codec_width_, codec_height_));
5189   WaitForEncodedFrame(CurrentTimeMs());
5190   EXPECT_EQ(sink_.number_of_layers_allocations(), 1);
5191   ASSERT_THAT(sink_.GetLastVideoLayersAllocation().active_spatial_layers,
5192               SizeIs(2));
5193   EXPECT_EQ(sink_.GetLastVideoLayersAllocation().active_spatial_layers[1].width,
5194             codec_width_);
5195   EXPECT_EQ(
5196       sink_.GetLastVideoLayersAllocation().active_spatial_layers[1].height,
5197       codec_height_);
5198 
5199   video_source_.IncomingCapturedFrame(
5200       CreateFrame(CurrentTimeMs(), codec_width_ / 2, codec_height_ / 2));
5201   WaitForEncodedFrame(CurrentTimeMs());
5202   EXPECT_EQ(sink_.number_of_layers_allocations(), 2);
5203   ASSERT_THAT(sink_.GetLastVideoLayersAllocation().active_spatial_layers,
5204               SizeIs(2));
5205   EXPECT_EQ(sink_.GetLastVideoLayersAllocation().active_spatial_layers[1].width,
5206             codec_width_ / 2);
5207   EXPECT_EQ(
5208       sink_.GetLastVideoLayersAllocation().active_spatial_layers[1].height,
5209       codec_height_ / 2);
5210 
5211   video_stream_encoder_->Stop();
5212 }
5213 
TEST_F(VideoStreamEncoderTest,TemporalLayersNotDisabledIfSupported)5214 TEST_F(VideoStreamEncoderTest, TemporalLayersNotDisabledIfSupported) {
5215   // 2 TLs configured, temporal layers supported by encoder.
5216   const int kNumTemporalLayers = 2;
5217   ResetEncoder("VP8", 1, kNumTemporalLayers, 1, /*screenshare*/ false,
5218                VideoStreamEncoder::BitrateAllocationCallbackType::
5219                    kVideoBitrateAllocation);
5220   fake_encoder_.SetTemporalLayersSupported(0, true);
5221 
5222   // Bitrate allocated across temporal layers.
5223   const int kTl0Bps = kTargetBitrate.bps() *
5224                       webrtc::SimulcastRateAllocator::GetTemporalRateAllocation(
5225                           kNumTemporalLayers, /*temporal_id*/ 0,
5226                           /*base_heavy_tl3_alloc*/ false);
5227   const int kTl1Bps = kTargetBitrate.bps() *
5228                       webrtc::SimulcastRateAllocator::GetTemporalRateAllocation(
5229                           kNumTemporalLayers, /*temporal_id*/ 1,
5230                           /*base_heavy_tl3_alloc*/ false);
5231   VideoBitrateAllocation expected_bitrate;
5232   expected_bitrate.SetBitrate(/*si*/ 0, /*ti*/ 0, kTl0Bps);
5233   expected_bitrate.SetBitrate(/*si*/ 0, /*ti*/ 1, kTl1Bps - kTl0Bps);
5234 
5235   VerifyAllocatedBitrate(expected_bitrate);
5236   video_stream_encoder_->Stop();
5237 }
5238 
TEST_F(VideoStreamEncoderTest,TemporalLayersDisabledIfNotSupported)5239 TEST_F(VideoStreamEncoderTest, TemporalLayersDisabledIfNotSupported) {
5240   // 2 TLs configured, temporal layers not supported by encoder.
5241   ResetEncoder("VP8", 1, /*num_temporal_layers*/ 2, 1, /*screenshare*/ false,
5242                VideoStreamEncoder::BitrateAllocationCallbackType::
5243                    kVideoBitrateAllocation);
5244   fake_encoder_.SetTemporalLayersSupported(0, false);
5245 
5246   // Temporal layers not supported by the encoder.
5247   // Total bitrate should be at ti:0.
5248   VideoBitrateAllocation expected_bitrate;
5249   expected_bitrate.SetBitrate(/*si*/ 0, /*ti*/ 0, kTargetBitrate.bps());
5250 
5251   VerifyAllocatedBitrate(expected_bitrate);
5252   video_stream_encoder_->Stop();
5253 }
5254 
TEST_F(VideoStreamEncoderTest,VerifyBitrateAllocationForTwoStreams)5255 TEST_F(VideoStreamEncoderTest, VerifyBitrateAllocationForTwoStreams) {
5256   webrtc::test::ScopedKeyValueConfig field_trials(
5257       field_trials_,
5258       "WebRTC-Video-QualityScalerSettings/"
5259       "initial_bitrate_interval_ms:1000,initial_bitrate_factor:0.2/");
5260   // Reset encoder for field trials to take effect.
5261   ConfigureEncoder(video_encoder_config_.Copy());
5262 
5263   // 2 TLs configured, temporal layers only supported for first stream.
5264   ResetEncoder("VP8", 2, /*num_temporal_layers*/ 2, 1, /*screenshare*/ false,
5265                VideoStreamEncoder::BitrateAllocationCallbackType::
5266                    kVideoBitrateAllocation);
5267   fake_encoder_.SetTemporalLayersSupported(0, true);
5268   fake_encoder_.SetTemporalLayersSupported(1, false);
5269 
5270   const int kS0Bps = 150000;
5271   const int kS0Tl0Bps =
5272       kS0Bps *
5273       webrtc::SimulcastRateAllocator::GetTemporalRateAllocation(
5274           /*num_layers*/ 2, /*temporal_id*/ 0, /*base_heavy_tl3_alloc*/ false);
5275   const int kS0Tl1Bps =
5276       kS0Bps *
5277       webrtc::SimulcastRateAllocator::GetTemporalRateAllocation(
5278           /*num_layers*/ 2, /*temporal_id*/ 1, /*base_heavy_tl3_alloc*/ false);
5279   const int kS1Bps = kTargetBitrate.bps() - kS0Tl1Bps;
5280   // Temporal layers not supported by si:1.
5281   VideoBitrateAllocation expected_bitrate;
5282   expected_bitrate.SetBitrate(/*si*/ 0, /*ti*/ 0, kS0Tl0Bps);
5283   expected_bitrate.SetBitrate(/*si*/ 0, /*ti*/ 1, kS0Tl1Bps - kS0Tl0Bps);
5284   expected_bitrate.SetBitrate(/*si*/ 1, /*ti*/ 0, kS1Bps);
5285 
5286   VerifyAllocatedBitrate(expected_bitrate);
5287   video_stream_encoder_->Stop();
5288 }
5289 
TEST_F(VideoStreamEncoderTest,OveruseDetectorUpdatedOnReconfigureAndAdaption)5290 TEST_F(VideoStreamEncoderTest, OveruseDetectorUpdatedOnReconfigureAndAdaption) {
5291   const int kFrameWidth = 1280;
5292   const int kFrameHeight = 720;
5293   const int kFramerate = 24;
5294 
5295   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5296       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
5297   test::FrameForwarder source;
5298   video_stream_encoder_->SetSource(
5299       &source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
5300 
5301   // Insert a single frame, triggering initial configuration.
5302   source.IncomingCapturedFrame(CreateFrame(1, kFrameWidth, kFrameHeight));
5303   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5304 
5305   EXPECT_EQ(
5306       video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
5307       kDefaultFramerate);
5308 
5309   // Trigger reconfigure encoder (without resetting the entire instance).
5310   VideoEncoderConfig video_encoder_config;
5311   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
5312   video_encoder_config.simulcast_layers[0].max_framerate = kFramerate;
5313   video_encoder_config.max_bitrate_bps = kTargetBitrate.bps();
5314   video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
5315                                           kMaxPayloadLength);
5316   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5317 
5318   // Detector should be updated with fps limit from codec config.
5319   EXPECT_EQ(
5320       video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
5321       kFramerate);
5322 
5323   // Trigger overuse, max framerate should be reduced.
5324   VideoSendStream::Stats stats = stats_proxy_->GetStats();
5325   stats.input_frame_rate = kFramerate;
5326   stats_proxy_->SetMockStats(stats);
5327   video_stream_encoder_->TriggerCpuOveruse();
5328   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5329   int adapted_framerate =
5330       video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate();
5331   EXPECT_LT(adapted_framerate, kFramerate);
5332 
5333   // Trigger underuse, max framerate should go back to codec configured fps.
5334   // Set extra low fps, to make sure it's actually reset, not just incremented.
5335   stats = stats_proxy_->GetStats();
5336   stats.input_frame_rate = adapted_framerate / 2;
5337   stats_proxy_->SetMockStats(stats);
5338   video_stream_encoder_->TriggerCpuUnderuse();
5339   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5340   EXPECT_EQ(
5341       video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
5342       kFramerate);
5343 
5344   video_stream_encoder_->Stop();
5345 }
5346 
TEST_F(VideoStreamEncoderTest,OveruseDetectorUpdatedRespectsFramerateAfterUnderuse)5347 TEST_F(VideoStreamEncoderTest,
5348        OveruseDetectorUpdatedRespectsFramerateAfterUnderuse) {
5349   const int kFrameWidth = 1280;
5350   const int kFrameHeight = 720;
5351   const int kLowFramerate = 15;
5352   const int kHighFramerate = 25;
5353 
5354   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5355       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
5356   test::FrameForwarder source;
5357   video_stream_encoder_->SetSource(
5358       &source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
5359 
5360   // Trigger initial configuration.
5361   VideoEncoderConfig video_encoder_config;
5362   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
5363   video_encoder_config.simulcast_layers[0].max_framerate = kLowFramerate;
5364   video_encoder_config.max_bitrate_bps = kTargetBitrate.bps();
5365   source.IncomingCapturedFrame(CreateFrame(1, kFrameWidth, kFrameHeight));
5366   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
5367                                           kMaxPayloadLength);
5368   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5369 
5370   EXPECT_EQ(
5371       video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
5372       kLowFramerate);
5373 
5374   // Trigger overuse, max framerate should be reduced.
5375   VideoSendStream::Stats stats = stats_proxy_->GetStats();
5376   stats.input_frame_rate = kLowFramerate;
5377   stats_proxy_->SetMockStats(stats);
5378   video_stream_encoder_->TriggerCpuOveruse();
5379   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5380   int adapted_framerate =
5381       video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate();
5382   EXPECT_LT(adapted_framerate, kLowFramerate);
5383 
5384   // Reconfigure the encoder with a new (higher max framerate), max fps should
5385   // still respect the adaptation.
5386   video_encoder_config.simulcast_layers[0].max_framerate = kHighFramerate;
5387   source.IncomingCapturedFrame(CreateFrame(1, kFrameWidth, kFrameHeight));
5388   video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
5389                                           kMaxPayloadLength);
5390   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5391 
5392   EXPECT_EQ(
5393       video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
5394       adapted_framerate);
5395 
5396   // Trigger underuse, max framerate should go back to codec configured fps.
5397   stats = stats_proxy_->GetStats();
5398   stats.input_frame_rate = adapted_framerate;
5399   stats_proxy_->SetMockStats(stats);
5400   video_stream_encoder_->TriggerCpuUnderuse();
5401   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5402   EXPECT_EQ(
5403       video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
5404       kHighFramerate);
5405 
5406   video_stream_encoder_->Stop();
5407 }
5408 
TEST_F(VideoStreamEncoderTest,OveruseDetectorUpdatedOnDegradationPreferenceChange)5409 TEST_F(VideoStreamEncoderTest,
5410        OveruseDetectorUpdatedOnDegradationPreferenceChange) {
5411   const int kFrameWidth = 1280;
5412   const int kFrameHeight = 720;
5413   const int kFramerate = 24;
5414 
5415   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5416       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
5417   test::FrameForwarder source;
5418   video_stream_encoder_->SetSource(
5419       &source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
5420 
5421   // Trigger initial configuration.
5422   VideoEncoderConfig video_encoder_config;
5423   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
5424   video_encoder_config.simulcast_layers[0].max_framerate = kFramerate;
5425   video_encoder_config.max_bitrate_bps = kTargetBitrate.bps();
5426   source.IncomingCapturedFrame(CreateFrame(1, kFrameWidth, kFrameHeight));
5427   video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
5428                                           kMaxPayloadLength);
5429   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5430 
5431   EXPECT_EQ(
5432       video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
5433       kFramerate);
5434 
5435   // Trigger overuse, max framerate should be reduced.
5436   VideoSendStream::Stats stats = stats_proxy_->GetStats();
5437   stats.input_frame_rate = kFramerate;
5438   stats_proxy_->SetMockStats(stats);
5439   video_stream_encoder_->TriggerCpuOveruse();
5440   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5441   int adapted_framerate =
5442       video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate();
5443   EXPECT_LT(adapted_framerate, kFramerate);
5444 
5445   // Change degradation preference to not enable framerate scaling. Target
5446   // framerate should be changed to codec defined limit.
5447   video_stream_encoder_->SetSourceAndWaitForFramerateUpdated(
5448       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
5449   EXPECT_EQ(
5450       video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
5451       kFramerate);
5452 
5453   video_stream_encoder_->Stop();
5454 }
5455 
TEST_F(VideoStreamEncoderTest,DropsFramesAndScalesWhenBitrateIsTooLow)5456 TEST_F(VideoStreamEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) {
5457   const int kTooLowBitrateForFrameSizeBps = 10000;
5458   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5459       DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
5460       DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
5461       DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), 0, 0, 0);
5462   const int kWidth = 640;
5463   const int kHeight = 360;
5464 
5465   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
5466 
5467   // Expect to drop this frame, the wait should time out.
5468   ExpectDroppedFrame();
5469 
5470   // Expect the sink_wants to specify a scaled frame.
5471   EXPECT_TRUE_WAIT(
5472       video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000);
5473 
5474   int last_pixel_count = video_source_.sink_wants().max_pixel_count;
5475 
5476   // Next frame is scaled.
5477   video_source_.IncomingCapturedFrame(
5478       CreateFrame(2, kWidth * 3 / 4, kHeight * 3 / 4));
5479 
5480   // Expect to drop this frame, the wait should time out.
5481   ExpectDroppedFrame();
5482 
5483   EXPECT_TRUE_WAIT(
5484       video_source_.sink_wants().max_pixel_count < last_pixel_count, 5000);
5485 
5486   video_stream_encoder_->Stop();
5487 }
5488 
TEST_F(VideoStreamEncoderTest,NumberOfDroppedFramesLimitedWhenBitrateIsTooLow)5489 TEST_F(VideoStreamEncoderTest,
5490        NumberOfDroppedFramesLimitedWhenBitrateIsTooLow) {
5491   const int kTooLowBitrateForFrameSizeBps = 10000;
5492   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5493       DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
5494       DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
5495       DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), 0, 0, 0);
5496   const int kWidth = 640;
5497   const int kHeight = 360;
5498 
5499   // We expect the n initial frames to get dropped.
5500   int i;
5501   for (i = 1; i <= kMaxInitialFramedrop; ++i) {
5502     video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
5503     ExpectDroppedFrame();
5504   }
5505   // The n+1th frame should not be dropped, even though it's size is too large.
5506   video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
5507   WaitForEncodedFrame(i);
5508 
5509   // Expect the sink_wants to specify a scaled frame.
5510   EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight);
5511 
5512   video_stream_encoder_->Stop();
5513 }
5514 
TEST_F(VideoStreamEncoderTest,InitialFrameDropOffWithMaintainResolutionPreference)5515 TEST_F(VideoStreamEncoderTest,
5516        InitialFrameDropOffWithMaintainResolutionPreference) {
5517   const int kWidth = 640;
5518   const int kHeight = 360;
5519   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5520       kLowTargetBitrate, kLowTargetBitrate, kLowTargetBitrate, 0, 0, 0);
5521 
5522   // Set degradation preference.
5523   video_stream_encoder_->SetSource(
5524       &video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
5525 
5526   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
5527   // Frame should not be dropped, even if it's too large.
5528   WaitForEncodedFrame(1);
5529 
5530   video_stream_encoder_->Stop();
5531 }
5532 
TEST_F(VideoStreamEncoderTest,InitialFrameDropOffWhenEncoderDisabledScaling)5533 TEST_F(VideoStreamEncoderTest, InitialFrameDropOffWhenEncoderDisabledScaling) {
5534   const int kWidth = 640;
5535   const int kHeight = 360;
5536   fake_encoder_.SetQualityScaling(false);
5537 
5538   VideoEncoderConfig video_encoder_config;
5539   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
5540   // Make format different, to force recreation of encoder.
5541   video_encoder_config.video_format.parameters["foo"] = "foo";
5542   video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
5543                                           kMaxPayloadLength);
5544   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5545       kLowTargetBitrate, kLowTargetBitrate, kLowTargetBitrate, 0, 0, 0);
5546 
5547   // Force quality scaler reconfiguration by resetting the source.
5548   video_stream_encoder_->SetSource(&video_source_,
5549                                    webrtc::DegradationPreference::BALANCED);
5550 
5551   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
5552   // Frame should not be dropped, even if it's too large.
5553   WaitForEncodedFrame(1);
5554 
5555   video_stream_encoder_->Stop();
5556   fake_encoder_.SetQualityScaling(true);
5557 }
5558 
TEST_F(VideoStreamEncoderTest,InitialFrameDropActivatesWhenBweDrops)5559 TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenBweDrops) {
5560   webrtc::test::ScopedKeyValueConfig field_trials(
5561       field_trials_,
5562       "WebRTC-Video-QualityScalerSettings/"
5563       "initial_bitrate_interval_ms:1000,initial_bitrate_factor:0.2/");
5564   // Reset encoder for field trials to take effect.
5565   ConfigureEncoder(video_encoder_config_.Copy());
5566   const int kNotTooLowBitrateForFrameSizeBps = kTargetBitrate.bps() * 0.2;
5567   const int kTooLowBitrateForFrameSizeBps = kTargetBitrate.bps() * 0.19;
5568   const int kWidth = 640;
5569   const int kHeight = 360;
5570 
5571   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5572       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
5573   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
5574   // Frame should not be dropped.
5575   WaitForEncodedFrame(1);
5576 
5577   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5578       DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps),
5579       DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps),
5580       DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps), 0, 0, 0);
5581   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
5582   // Frame should not be dropped.
5583   WaitForEncodedFrame(2);
5584 
5585   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5586       DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
5587       DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
5588       DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), 0, 0, 0);
5589   video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
5590   // Expect to drop this frame, the wait should time out.
5591   ExpectDroppedFrame();
5592 
5593   // Expect the sink_wants to specify a scaled frame.
5594   EXPECT_TRUE_WAIT(
5595       video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000);
5596   video_stream_encoder_->Stop();
5597 }
5598 
TEST_F(VideoStreamEncoderTest,InitialFrameDropNotReactivatedWhenBweDropsWhenScalingDisabled)5599 TEST_F(VideoStreamEncoderTest,
5600        InitialFrameDropNotReactivatedWhenBweDropsWhenScalingDisabled) {
5601   webrtc::test::ScopedKeyValueConfig field_trials(
5602       field_trials_,
5603       "WebRTC-Video-QualityScalerSettings/"
5604       "initial_bitrate_interval_ms:1000,initial_bitrate_factor:0.2/");
5605   fake_encoder_.SetQualityScaling(false);
5606   ConfigureEncoder(video_encoder_config_.Copy());
5607   const int kNotTooLowBitrateForFrameSizeBps = kTargetBitrate.bps() * 0.2;
5608   const int kTooLowBitrateForFrameSizeBps = kTargetBitrate.bps() * 0.19;
5609   const int kWidth = 640;
5610   const int kHeight = 360;
5611 
5612   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5613       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
5614   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
5615   // Frame should not be dropped.
5616   WaitForEncodedFrame(1);
5617 
5618   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5619       DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps),
5620       DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps),
5621       DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps), 0, 0, 0);
5622   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
5623   // Frame should not be dropped.
5624   WaitForEncodedFrame(2);
5625 
5626   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5627       DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
5628       DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps),
5629       DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), 0, 0, 0);
5630   video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
5631   // Not dropped since quality scaling is disabled.
5632   WaitForEncodedFrame(3);
5633 
5634   // Expect the sink_wants to specify a scaled frame.
5635   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5636   EXPECT_THAT(video_source_.sink_wants(), ResolutionMax());
5637 
5638   video_stream_encoder_->Stop();
5639 }
5640 
TEST_F(VideoStreamEncoderTest,InitialFrameDropActivatesWhenLayersChange)5641 TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenLayersChange) {
5642   const DataRate kLowTargetBitrate = DataRate::KilobitsPerSec(400);
5643   // Set simulcast.
5644   ResetEncoder("VP8", 3, 1, 1, false);
5645   fake_encoder_.SetQualityScaling(true);
5646   const int kWidth = 1280;
5647   const int kHeight = 720;
5648   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5649       kLowTargetBitrate, kLowTargetBitrate, kLowTargetBitrate, 0, 0, 0);
5650   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
5651   // Frame should not be dropped.
5652   WaitForEncodedFrame(1);
5653 
5654   // Trigger QVGA "singlecast"
5655   // Update the config.
5656   VideoEncoderConfig video_encoder_config;
5657   webrtc::VideoEncoder::EncoderInfo encoder_info;
5658   test::FillEncoderConfiguration(PayloadStringToCodecType("VP8"), 3,
5659                                  &video_encoder_config);
5660   video_encoder_config.video_stream_factory =
5661       rtc::make_ref_counted<cricket::EncoderStreamFactory>(
5662           "VP8", /*max qp*/ 56, /*screencast*/ false,
5663           /*screenshare enabled*/ false, encoder_info);
5664   for (auto& layer : video_encoder_config.simulcast_layers) {
5665     layer.num_temporal_layers = 1;
5666     layer.max_framerate = kDefaultFramerate;
5667   }
5668   video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrate.bps();
5669   video_encoder_config.content_type =
5670       VideoEncoderConfig::ContentType::kRealtimeVideo;
5671 
5672   video_encoder_config.simulcast_layers[0].active = true;
5673   video_encoder_config.simulcast_layers[1].active = false;
5674   video_encoder_config.simulcast_layers[2].active = false;
5675 
5676   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
5677                                           kMaxPayloadLength);
5678   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5679 
5680   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
5681   // Frame should not be dropped.
5682   WaitForEncodedFrame(2);
5683 
5684   // Trigger HD "singlecast"
5685   video_encoder_config.simulcast_layers[0].active = false;
5686   video_encoder_config.simulcast_layers[1].active = false;
5687   video_encoder_config.simulcast_layers[2].active = true;
5688 
5689   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
5690                                           kMaxPayloadLength);
5691   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5692 
5693   video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
5694   // Frame should be dropped because of initial frame drop.
5695   ExpectDroppedFrame();
5696 
5697   // Expect the sink_wants to specify a scaled frame.
5698   EXPECT_TRUE_WAIT(
5699       video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000);
5700   video_stream_encoder_->Stop();
5701 }
5702 
TEST_F(VideoStreamEncoderTest,InitialFrameDropActivatesWhenSVCLayersChange)5703 TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenSVCLayersChange) {
5704   const DataRate kLowTargetBitrate = DataRate::KilobitsPerSec(400);
5705   // Set simulcast.
5706   ResetEncoder("VP9", 1, 1, 3, false);
5707   fake_encoder_.SetQualityScaling(true);
5708   const int kWidth = 1280;
5709   const int kHeight = 720;
5710   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
5711       kLowTargetBitrate, kLowTargetBitrate, kLowTargetBitrate, 0, 0, 0);
5712   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
5713   // Frame should not be dropped.
5714   WaitForEncodedFrame(1);
5715 
5716   // Trigger QVGA "singlecast"
5717   // Update the config.
5718   VideoEncoderConfig video_encoder_config;
5719   test::FillEncoderConfiguration(PayloadStringToCodecType("VP9"), 1,
5720                                  &video_encoder_config);
5721   VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
5722   vp9_settings.numberOfSpatialLayers = 3;
5723   // Since only one layer is active - automatic resize should be enabled.
5724   vp9_settings.automaticResizeOn = true;
5725   video_encoder_config.encoder_specific_settings =
5726       rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
5727           vp9_settings);
5728   video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrate.bps();
5729   video_encoder_config.content_type =
5730       VideoEncoderConfig::ContentType::kRealtimeVideo;
5731   // Currently simulcast layers `active` flags are used to inidicate
5732   // which SVC layers are active.
5733   video_encoder_config.simulcast_layers.resize(3);
5734 
5735   video_encoder_config.simulcast_layers[0].active = true;
5736   video_encoder_config.simulcast_layers[1].active = false;
5737   video_encoder_config.simulcast_layers[2].active = false;
5738 
5739   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
5740                                           kMaxPayloadLength);
5741   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5742 
5743   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
5744   // Frame should not be dropped.
5745   WaitForEncodedFrame(2);
5746 
5747   // Trigger HD "singlecast"
5748   video_encoder_config.simulcast_layers[0].active = false;
5749   video_encoder_config.simulcast_layers[1].active = false;
5750   video_encoder_config.simulcast_layers[2].active = true;
5751 
5752   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
5753                                           kMaxPayloadLength);
5754   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5755 
5756   video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
5757   // Frame should be dropped because of initial frame drop.
5758   ExpectDroppedFrame();
5759 
5760   // Expect the sink_wants to specify a scaled frame.
5761   EXPECT_TRUE_WAIT(
5762       video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000);
5763   video_stream_encoder_->Stop();
5764 }
5765 
TEST_F(VideoStreamEncoderTest,EncoderMaxAndMinBitratesUsedIfMiddleStreamActive)5766 TEST_F(VideoStreamEncoderTest,
5767        EncoderMaxAndMinBitratesUsedIfMiddleStreamActive) {
5768   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits270p(
5769       480 * 270, 34 * 1000, 12 * 1000, 1234 * 1000);
5770   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits360p(
5771       640 * 360, 43 * 1000, 21 * 1000, 2345 * 1000);
5772   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits720p(
5773       1280 * 720, 54 * 1000, 31 * 1000, 2500 * 1000);
5774   fake_encoder_.SetResolutionBitrateLimits(
5775       {kEncoderLimits270p, kEncoderLimits360p, kEncoderLimits720p});
5776 
5777   VideoEncoderConfig video_encoder_config;
5778   test::FillEncoderConfiguration(PayloadStringToCodecType("VP9"), 1,
5779                                  &video_encoder_config);
5780   VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
5781   vp9_settings.numberOfSpatialLayers = 3;
5782   // Since only one layer is active - automatic resize should be enabled.
5783   vp9_settings.automaticResizeOn = true;
5784   video_encoder_config.encoder_specific_settings =
5785       rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
5786           vp9_settings);
5787   video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrate.bps();
5788   video_encoder_config.content_type =
5789       VideoEncoderConfig::ContentType::kRealtimeVideo;
5790   // Simulcast layers are used to indicate which spatial layers are active.
5791   video_encoder_config.simulcast_layers.resize(3);
5792   video_encoder_config.simulcast_layers[0].active = false;
5793   video_encoder_config.simulcast_layers[1].active = true;
5794   video_encoder_config.simulcast_layers[2].active = false;
5795 
5796   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
5797                                           kMaxPayloadLength);
5798 
5799   // The encoder bitrate limits for 360p should be used.
5800   video_source_.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
5801   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5802   EXPECT_EQ(fake_encoder_.config().numberOfSimulcastStreams, 1);
5803   EXPECT_EQ(fake_encoder_.config().codecType, VideoCodecType::kVideoCodecVP9);
5804   EXPECT_EQ(fake_encoder_.config().VP9().numberOfSpatialLayers, 2);
5805   EXPECT_TRUE(fake_encoder_.config().spatialLayers[0].active);
5806   EXPECT_EQ(640, fake_encoder_.config().spatialLayers[0].width);
5807   EXPECT_EQ(360, fake_encoder_.config().spatialLayers[0].height);
5808   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits360p.min_bitrate_bps),
5809             fake_encoder_.config().spatialLayers[0].minBitrate * 1000);
5810   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits360p.max_bitrate_bps),
5811             fake_encoder_.config().spatialLayers[0].maxBitrate * 1000);
5812 
5813   // The encoder bitrate limits for 270p should be used.
5814   video_source_.IncomingCapturedFrame(CreateFrame(2, 960, 540));
5815   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5816   EXPECT_EQ(fake_encoder_.config().numberOfSimulcastStreams, 1);
5817   EXPECT_EQ(fake_encoder_.config().codecType, VideoCodecType::kVideoCodecVP9);
5818   EXPECT_EQ(fake_encoder_.config().VP9().numberOfSpatialLayers, 2);
5819   EXPECT_TRUE(fake_encoder_.config().spatialLayers[0].active);
5820   EXPECT_EQ(480, fake_encoder_.config().spatialLayers[0].width);
5821   EXPECT_EQ(270, fake_encoder_.config().spatialLayers[0].height);
5822   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits270p.min_bitrate_bps),
5823             fake_encoder_.config().spatialLayers[0].minBitrate * 1000);
5824   EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits270p.max_bitrate_bps),
5825             fake_encoder_.config().spatialLayers[0].maxBitrate * 1000);
5826 
5827   video_stream_encoder_->Stop();
5828 }
5829 
TEST_F(VideoStreamEncoderTest,DefaultMaxAndMinBitratesUsedIfMiddleStreamActive)5830 TEST_F(VideoStreamEncoderTest,
5831        DefaultMaxAndMinBitratesUsedIfMiddleStreamActive) {
5832   VideoEncoderConfig video_encoder_config;
5833   test::FillEncoderConfiguration(PayloadStringToCodecType("VP9"), 1,
5834                                  &video_encoder_config);
5835   VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
5836   vp9_settings.numberOfSpatialLayers = 3;
5837   // Since only one layer is active - automatic resize should be enabled.
5838   vp9_settings.automaticResizeOn = true;
5839   video_encoder_config.encoder_specific_settings =
5840       rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
5841           vp9_settings);
5842   video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrate.bps();
5843   video_encoder_config.content_type =
5844       VideoEncoderConfig::ContentType::kRealtimeVideo;
5845   // Simulcast layers are used to indicate which spatial layers are active.
5846   video_encoder_config.simulcast_layers.resize(3);
5847   video_encoder_config.simulcast_layers[0].active = false;
5848   video_encoder_config.simulcast_layers[1].active = true;
5849   video_encoder_config.simulcast_layers[2].active = false;
5850 
5851   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
5852                                           kMaxPayloadLength);
5853 
5854   // The default bitrate limits for 360p should be used.
5855   const absl::optional<VideoEncoder::ResolutionBitrateLimits> kLimits360p =
5856       EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution(
5857           kVideoCodecVP9, 640 * 360);
5858   video_source_.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
5859   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5860   EXPECT_EQ(fake_encoder_.config().numberOfSimulcastStreams, 1);
5861   EXPECT_EQ(fake_encoder_.config().codecType, VideoCodecType::kVideoCodecVP9);
5862   EXPECT_EQ(fake_encoder_.config().VP9().numberOfSpatialLayers, 2);
5863   EXPECT_TRUE(fake_encoder_.config().spatialLayers[0].active);
5864   EXPECT_EQ(640, fake_encoder_.config().spatialLayers[0].width);
5865   EXPECT_EQ(360, fake_encoder_.config().spatialLayers[0].height);
5866   EXPECT_EQ(static_cast<uint32_t>(kLimits360p->min_bitrate_bps),
5867             fake_encoder_.config().spatialLayers[0].minBitrate * 1000);
5868   EXPECT_EQ(static_cast<uint32_t>(kLimits360p->max_bitrate_bps),
5869             fake_encoder_.config().spatialLayers[0].maxBitrate * 1000);
5870 
5871   // The default bitrate limits for 270p should be used.
5872   const absl::optional<VideoEncoder::ResolutionBitrateLimits> kLimits270p =
5873       EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution(
5874           kVideoCodecVP9, 480 * 270);
5875   video_source_.IncomingCapturedFrame(CreateFrame(2, 960, 540));
5876   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5877   EXPECT_EQ(fake_encoder_.config().numberOfSimulcastStreams, 1);
5878   EXPECT_EQ(fake_encoder_.config().codecType, VideoCodecType::kVideoCodecVP9);
5879   EXPECT_EQ(fake_encoder_.config().VP9().numberOfSpatialLayers, 2);
5880   EXPECT_TRUE(fake_encoder_.config().spatialLayers[0].active);
5881   EXPECT_EQ(480, fake_encoder_.config().spatialLayers[0].width);
5882   EXPECT_EQ(270, fake_encoder_.config().spatialLayers[0].height);
5883   EXPECT_EQ(static_cast<uint32_t>(kLimits270p->min_bitrate_bps),
5884             fake_encoder_.config().spatialLayers[0].minBitrate * 1000);
5885   EXPECT_EQ(static_cast<uint32_t>(kLimits270p->max_bitrate_bps),
5886             fake_encoder_.config().spatialLayers[0].maxBitrate * 1000);
5887 
5888   video_stream_encoder_->Stop();
5889 }
5890 
TEST_F(VideoStreamEncoderTest,DefaultMaxAndMinBitratesNotUsedIfDisabled)5891 TEST_F(VideoStreamEncoderTest, DefaultMaxAndMinBitratesNotUsedIfDisabled) {
5892   webrtc::test::ScopedKeyValueConfig field_trials(
5893       field_trials_, "WebRTC-DefaultBitrateLimitsKillSwitch/Enabled/");
5894   VideoEncoderConfig video_encoder_config;
5895   test::FillEncoderConfiguration(PayloadStringToCodecType("VP9"), 1,
5896                                  &video_encoder_config);
5897   VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
5898   vp9_settings.numberOfSpatialLayers = 3;
5899   // Since only one layer is active - automatic resize should be enabled.
5900   vp9_settings.automaticResizeOn = true;
5901   video_encoder_config.encoder_specific_settings =
5902       rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
5903           vp9_settings);
5904   video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrate.bps();
5905   video_encoder_config.content_type =
5906       VideoEncoderConfig::ContentType::kRealtimeVideo;
5907   // Simulcast layers are used to indicate which spatial layers are active.
5908   video_encoder_config.simulcast_layers.resize(3);
5909   video_encoder_config.simulcast_layers[0].active = false;
5910   video_encoder_config.simulcast_layers[1].active = true;
5911   video_encoder_config.simulcast_layers[2].active = false;
5912 
5913   // Reset encoder for field trials to take effect.
5914   ConfigureEncoder(video_encoder_config.Copy());
5915 
5916   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
5917                                           kMaxPayloadLength);
5918 
5919   // The default bitrate limits for 360p should not be used.
5920   const absl::optional<VideoEncoder::ResolutionBitrateLimits> kLimits360p =
5921       EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution(
5922           kVideoCodecVP9, 640 * 360);
5923   video_source_.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
5924   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5925   EXPECT_EQ(fake_encoder_.config().numberOfSimulcastStreams, 1);
5926   EXPECT_EQ(fake_encoder_.config().codecType, kVideoCodecVP9);
5927   EXPECT_EQ(fake_encoder_.config().VP9().numberOfSpatialLayers, 2);
5928   EXPECT_TRUE(fake_encoder_.config().spatialLayers[0].active);
5929   EXPECT_EQ(640, fake_encoder_.config().spatialLayers[0].width);
5930   EXPECT_EQ(360, fake_encoder_.config().spatialLayers[0].height);
5931   EXPECT_NE(static_cast<uint32_t>(kLimits360p->max_bitrate_bps),
5932             fake_encoder_.config().spatialLayers[0].maxBitrate * 1000);
5933 
5934   video_stream_encoder_->Stop();
5935 }
5936 
TEST_F(VideoStreamEncoderTest,SinglecastBitrateLimitsNotUsedForOneStream)5937 TEST_F(VideoStreamEncoderTest, SinglecastBitrateLimitsNotUsedForOneStream) {
5938   ResetEncoder("VP9", /*num_streams=*/1, /*num_temporal_layers=*/1,
5939                /*num_spatial_layers=*/1, /*screenshare=*/false);
5940 
5941   // The default singlecast bitrate limits for 720p should not be used.
5942   const absl::optional<VideoEncoder::ResolutionBitrateLimits> kLimits720p =
5943       EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution(
5944           kVideoCodecVP9, 1280 * 720);
5945   video_source_.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
5946   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5947   EXPECT_EQ(fake_encoder_.config().numberOfSimulcastStreams, 1);
5948   EXPECT_EQ(fake_encoder_.config().codecType, VideoCodecType::kVideoCodecVP9);
5949   EXPECT_EQ(fake_encoder_.config().VP9().numberOfSpatialLayers, 1);
5950   EXPECT_TRUE(fake_encoder_.config().spatialLayers[0].active);
5951   EXPECT_EQ(1280, fake_encoder_.config().spatialLayers[0].width);
5952   EXPECT_EQ(720, fake_encoder_.config().spatialLayers[0].height);
5953   EXPECT_NE(static_cast<uint32_t>(kLimits720p->max_bitrate_bps),
5954             fake_encoder_.config().spatialLayers[0].maxBitrate * 1000);
5955 
5956   video_stream_encoder_->Stop();
5957 }
5958 
TEST_F(VideoStreamEncoderTest,EncoderMaxAndMinBitratesNotUsedIfLowestStreamActive)5959 TEST_F(VideoStreamEncoderTest,
5960        EncoderMaxAndMinBitratesNotUsedIfLowestStreamActive) {
5961   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits180p(
5962       320 * 180, 34 * 1000, 12 * 1000, 1234 * 1000);
5963   const VideoEncoder::ResolutionBitrateLimits kEncoderLimits720p(
5964       1280 * 720, 54 * 1000, 31 * 1000, 2500 * 1000);
5965   fake_encoder_.SetResolutionBitrateLimits(
5966       {kEncoderLimits180p, kEncoderLimits720p});
5967 
5968   VideoEncoderConfig video_encoder_config;
5969   test::FillEncoderConfiguration(PayloadStringToCodecType("VP9"), 1,
5970                                  &video_encoder_config);
5971   VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
5972   vp9_settings.numberOfSpatialLayers = 3;
5973   // Since only one layer is active - automatic resize should be enabled.
5974   vp9_settings.automaticResizeOn = true;
5975   video_encoder_config.encoder_specific_settings =
5976       rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
5977           vp9_settings);
5978   video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrate.bps();
5979   video_encoder_config.content_type =
5980       VideoEncoderConfig::ContentType::kRealtimeVideo;
5981   // Simulcast layers are used to indicate which spatial layers are active.
5982   video_encoder_config.simulcast_layers.resize(3);
5983   video_encoder_config.simulcast_layers[0].active = true;
5984   video_encoder_config.simulcast_layers[1].active = false;
5985   video_encoder_config.simulcast_layers[2].active = false;
5986 
5987   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
5988                                           kMaxPayloadLength);
5989 
5990   // Limits not applied on lowest stream, limits for 180p should not be used.
5991   video_source_.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
5992   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
5993   EXPECT_EQ(fake_encoder_.config().numberOfSimulcastStreams, 1);
5994   EXPECT_EQ(fake_encoder_.config().codecType, VideoCodecType::kVideoCodecVP9);
5995   EXPECT_EQ(fake_encoder_.config().VP9().numberOfSpatialLayers, 3);
5996   EXPECT_TRUE(fake_encoder_.config().spatialLayers[0].active);
5997   EXPECT_EQ(320, fake_encoder_.config().spatialLayers[0].width);
5998   EXPECT_EQ(180, fake_encoder_.config().spatialLayers[0].height);
5999   EXPECT_NE(static_cast<uint32_t>(kEncoderLimits180p.min_bitrate_bps),
6000             fake_encoder_.config().spatialLayers[0].minBitrate * 1000);
6001   EXPECT_NE(static_cast<uint32_t>(kEncoderLimits180p.max_bitrate_bps),
6002             fake_encoder_.config().spatialLayers[0].maxBitrate * 1000);
6003 
6004   video_stream_encoder_->Stop();
6005 }
6006 
TEST_F(VideoStreamEncoderTest,InitialFrameDropActivatesWhenResolutionIncreases)6007 TEST_F(VideoStreamEncoderTest,
6008        InitialFrameDropActivatesWhenResolutionIncreases) {
6009   const int kWidth = 640;
6010   const int kHeight = 360;
6011 
6012   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6013       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6014   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth / 2, kHeight / 2));
6015   // Frame should not be dropped.
6016   WaitForEncodedFrame(1);
6017 
6018   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6019       kLowTargetBitrate, kLowTargetBitrate, kLowTargetBitrate, 0, 0, 0);
6020   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth / 2, kHeight / 2));
6021   // Frame should not be dropped, bitrate not too low for frame.
6022   WaitForEncodedFrame(2);
6023 
6024   // Incoming resolution increases.
6025   video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
6026   // Expect to drop this frame, bitrate too low for frame.
6027   ExpectDroppedFrame();
6028 
6029   // Expect the sink_wants to specify a scaled frame.
6030   EXPECT_TRUE_WAIT(
6031       video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000);
6032   video_stream_encoder_->Stop();
6033 }
6034 
TEST_F(VideoStreamEncoderTest,InitialFrameDropIsNotReactivatedWhenAdaptingUp)6035 TEST_F(VideoStreamEncoderTest, InitialFrameDropIsNotReactivatedWhenAdaptingUp) {
6036   const int kWidth = 640;
6037   const int kHeight = 360;
6038   // So that quality scaling doesn't happen by itself.
6039   fake_encoder_.SetQp(kQpHigh);
6040 
6041   AdaptingFrameForwarder source(&time_controller_);
6042   source.set_adaptation_enabled(true);
6043   video_stream_encoder_->SetSource(
6044       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
6045 
6046   int timestamp = 1;
6047 
6048   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6049       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6050   source.IncomingCapturedFrame(CreateFrame(timestamp, kWidth, kHeight));
6051   WaitForEncodedFrame(timestamp);
6052   timestamp += 9000;
6053   // Long pause to disable all first BWE drop logic.
6054   AdvanceTime(TimeDelta::Millis(1000));
6055 
6056   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6057       kLowTargetBitrate, kLowTargetBitrate, kLowTargetBitrate, 0, 0, 0);
6058   source.IncomingCapturedFrame(CreateFrame(timestamp, kWidth, kHeight));
6059   // Not dropped frame, as initial frame drop is disabled by now.
6060   WaitForEncodedFrame(timestamp);
6061   timestamp += 9000;
6062   AdvanceTime(TimeDelta::Millis(100));
6063 
6064   // Quality adaptation down.
6065   video_stream_encoder_->TriggerQualityLow();
6066 
6067   // Adaptation has an effect.
6068   EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count < kWidth * kHeight,
6069                    5000);
6070 
6071   // Frame isn't dropped as initial frame dropper is disabled.
6072   source.IncomingCapturedFrame(CreateFrame(timestamp, kWidth, kHeight));
6073   WaitForEncodedFrame(timestamp);
6074   timestamp += 9000;
6075   AdvanceTime(TimeDelta::Millis(100));
6076 
6077   // Quality adaptation up.
6078   video_stream_encoder_->TriggerQualityHigh();
6079 
6080   // Adaptation has an effect.
6081   EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count > kWidth * kHeight,
6082                    5000);
6083 
6084   source.IncomingCapturedFrame(CreateFrame(timestamp, kWidth, kHeight));
6085   // Frame should not be dropped, as initial framedropper is off.
6086   WaitForEncodedFrame(timestamp);
6087 
6088   video_stream_encoder_->Stop();
6089 }
6090 
TEST_F(VideoStreamEncoderTest,FrameDroppedWhenResolutionIncreasesAndLinkAllocationIsLow)6091 TEST_F(VideoStreamEncoderTest,
6092        FrameDroppedWhenResolutionIncreasesAndLinkAllocationIsLow) {
6093   const int kMinStartBps360p = 222000;
6094   fake_encoder_.SetResolutionBitrateLimits(
6095       {VideoEncoder::ResolutionBitrateLimits(320 * 180, 0, 30000, 400000),
6096        VideoEncoder::ResolutionBitrateLimits(640 * 360, kMinStartBps360p, 30000,
6097                                              800000)});
6098 
6099   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6100       DataRate::BitsPerSec(kMinStartBps360p - 1),  // target_bitrate
6101       DataRate::BitsPerSec(kMinStartBps360p - 1),  // stable_target_bitrate
6102       DataRate::BitsPerSec(kMinStartBps360p - 1),  // link_allocation
6103       0, 0, 0);
6104   // Frame should not be dropped, bitrate not too low for frame.
6105   video_source_.IncomingCapturedFrame(CreateFrame(1, 320, 180));
6106   WaitForEncodedFrame(1);
6107 
6108   // Incoming resolution increases, initial frame drop activates.
6109   // Frame should be dropped, link allocation too low for frame.
6110   video_source_.IncomingCapturedFrame(CreateFrame(2, 640, 360));
6111   ExpectDroppedFrame();
6112 
6113   // Expect sink_wants to specify a scaled frame.
6114   EXPECT_TRUE_WAIT(video_source_.sink_wants().max_pixel_count < 640 * 360,
6115                    5000);
6116   video_stream_encoder_->Stop();
6117 }
6118 
TEST_F(VideoStreamEncoderTest,FrameNotDroppedWhenResolutionIncreasesAndLinkAllocationIsHigh)6119 TEST_F(VideoStreamEncoderTest,
6120        FrameNotDroppedWhenResolutionIncreasesAndLinkAllocationIsHigh) {
6121   const int kMinStartBps360p = 222000;
6122   fake_encoder_.SetResolutionBitrateLimits(
6123       {VideoEncoder::ResolutionBitrateLimits(320 * 180, 0, 30000, 400000),
6124        VideoEncoder::ResolutionBitrateLimits(640 * 360, kMinStartBps360p, 30000,
6125                                              800000)});
6126 
6127   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6128       DataRate::BitsPerSec(kMinStartBps360p - 1),  // target_bitrate
6129       DataRate::BitsPerSec(kMinStartBps360p - 1),  // stable_target_bitrate
6130       DataRate::BitsPerSec(kMinStartBps360p),      // link_allocation
6131       0, 0, 0);
6132   // Frame should not be dropped, bitrate not too low for frame.
6133   video_source_.IncomingCapturedFrame(CreateFrame(1, 320, 180));
6134   WaitForEncodedFrame(1);
6135 
6136   // Incoming resolution increases, initial frame drop activates.
6137   // Frame should be dropped, link allocation not too low for frame.
6138   video_source_.IncomingCapturedFrame(CreateFrame(2, 640, 360));
6139   WaitForEncodedFrame(2);
6140 
6141   video_stream_encoder_->Stop();
6142 }
6143 
TEST_F(VideoStreamEncoderTest,RampsUpInQualityWhenBwIsHigh)6144 TEST_F(VideoStreamEncoderTest, RampsUpInQualityWhenBwIsHigh) {
6145   webrtc::test::ScopedKeyValueConfig field_trials(
6146       field_trials_,
6147       "WebRTC-Video-QualityRampupSettings/"
6148       "min_pixels:921600,min_duration_ms:2000/");
6149 
6150   const int kWidth = 1280;
6151   const int kHeight = 720;
6152   const int kFps = 10;
6153   max_framerate_ = kFps;
6154 
6155   // Reset encoder for field trials to take effect.
6156   VideoEncoderConfig config = video_encoder_config_.Copy();
6157   config.max_bitrate_bps = kTargetBitrate.bps();
6158   DataRate max_bitrate = DataRate::BitsPerSec(config.max_bitrate_bps);
6159   ConfigureEncoder(std::move(config));
6160   fake_encoder_.SetQp(kQpLow);
6161 
6162   // Enable MAINTAIN_FRAMERATE preference.
6163   AdaptingFrameForwarder source(&time_controller_);
6164   source.set_adaptation_enabled(true);
6165   video_stream_encoder_->SetSource(&source,
6166                                    DegradationPreference::MAINTAIN_FRAMERATE);
6167 
6168   // Start at low bitrate.
6169   const DataRate kLowBitrate = DataRate::KilobitsPerSec(200);
6170   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6171       kLowBitrate, kLowBitrate, kLowBitrate, 0, 0, 0);
6172 
6173   // Expect first frame to be dropped and resolution to be limited.
6174   const int64_t kFrameIntervalMs = 1000 / kFps;
6175   int64_t timestamp_ms = kFrameIntervalMs;
6176   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6177   ExpectDroppedFrame();
6178   EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count < kWidth * kHeight,
6179                    5000);
6180 
6181   // Increase bitrate to encoder max.
6182   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6183       max_bitrate, max_bitrate, max_bitrate, 0, 0, 0);
6184 
6185   // Insert frames and advance `min_duration_ms`.
6186   const int64_t start_bw_high_ms = CurrentTimeMs();
6187   for (size_t i = 1; i <= 10; i++) {
6188     timestamp_ms += kFrameIntervalMs;
6189     source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6190     WaitForEncodedFrame(timestamp_ms);
6191   }
6192 
6193   // Advance to `min_duration_ms` - 1, frame should not trigger high BW.
6194   int64_t elapsed_bw_high_ms = CurrentTimeMs() - start_bw_high_ms;
6195   AdvanceTime(TimeDelta::Millis(2000 - elapsed_bw_high_ms - 1));
6196   timestamp_ms += kFrameIntervalMs;
6197   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6198   WaitForEncodedFrame(timestamp_ms);
6199   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6200   EXPECT_LT(source.sink_wants().max_pixel_count, kWidth * kHeight);
6201 
6202   // Frame should trigger high BW and release quality limitation.
6203   timestamp_ms += kFrameIntervalMs;
6204   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6205   WaitForEncodedFrame(timestamp_ms);
6206   // The ramp-up code involves the adaptation queue, give it time to execute.
6207   // TODO(hbos): Can we await an appropriate event instead?
6208   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
6209   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
6210 
6211   // Frame should not be adapted.
6212   timestamp_ms += kFrameIntervalMs;
6213   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6214   WaitForEncodedFrame(kWidth, kHeight);
6215   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6216 
6217   video_stream_encoder_->Stop();
6218 }
6219 
TEST_F(VideoStreamEncoderTest,QualityScalerAdaptationsRemovedWhenQualityScalingDisabled)6220 TEST_F(VideoStreamEncoderTest,
6221        QualityScalerAdaptationsRemovedWhenQualityScalingDisabled) {
6222   webrtc::test::ScopedKeyValueConfig field_trials(
6223       field_trials_, "WebRTC-Video-QualityScaling/Disabled/");
6224   AdaptingFrameForwarder source(&time_controller_);
6225   source.set_adaptation_enabled(true);
6226   video_stream_encoder_->SetSource(&source,
6227                                    DegradationPreference::MAINTAIN_FRAMERATE);
6228   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6229       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6230   fake_encoder_.SetQp(kQpHigh + 1);
6231   const int kWidth = 1280;
6232   const int kHeight = 720;
6233   const int64_t kFrameIntervalMs = 100;
6234   int64_t timestamp_ms = kFrameIntervalMs;
6235   for (size_t i = 1; i <= 100; i++) {
6236     timestamp_ms += kFrameIntervalMs;
6237     source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6238     WaitForEncodedFrame(timestamp_ms);
6239   }
6240   // Wait for QualityScaler, which will wait for 2000*2.5 ms until checking QP
6241   // for the first time.
6242   // TODO(eshr): We should avoid these waits by using threads with simulated
6243   // time.
6244   EXPECT_TRUE_WAIT(stats_proxy_->GetStats().bw_limited_resolution,
6245                    2000 * 2.5 * 2);
6246   timestamp_ms += kFrameIntervalMs;
6247   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6248   WaitForEncodedFrame(timestamp_ms);
6249   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
6250   EXPECT_THAT(source.sink_wants(), WantsMaxPixels(Lt(kWidth * kHeight)));
6251   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6252 
6253   // Disable Quality scaling by turning off scaler on the encoder and
6254   // reconfiguring.
6255   fake_encoder_.SetQualityScaling(false);
6256   video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(),
6257                                           kMaxPayloadLength);
6258   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
6259   AdvanceTime(TimeDelta::Zero());
6260   // Since we turned off the quality scaler, the adaptations made by it are
6261   // removed.
6262   EXPECT_THAT(source.sink_wants(), ResolutionMax());
6263   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6264 
6265   video_stream_encoder_->Stop();
6266 }
6267 
TEST_F(VideoStreamEncoderTest,ResolutionNotAdaptedForTooSmallFrame_MaintainFramerateMode)6268 TEST_F(VideoStreamEncoderTest,
6269        ResolutionNotAdaptedForTooSmallFrame_MaintainFramerateMode) {
6270   const int kTooSmallWidth = 10;
6271   const int kTooSmallHeight = 10;
6272   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6273       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6274 
6275   // Enable MAINTAIN_FRAMERATE preference, no initial limitation.
6276   test::FrameForwarder source;
6277   video_stream_encoder_->SetSource(
6278       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
6279   EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
6280   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
6281 
6282   // Trigger adapt down, too small frame, expect no change.
6283   source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight));
6284   WaitForEncodedFrame(1);
6285   video_stream_encoder_->TriggerCpuOveruse();
6286   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
6287   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
6288   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6289 
6290   video_stream_encoder_->Stop();
6291 }
6292 
TEST_F(VideoStreamEncoderTest,ResolutionNotAdaptedForTooSmallFrame_BalancedMode)6293 TEST_F(VideoStreamEncoderTest,
6294        ResolutionNotAdaptedForTooSmallFrame_BalancedMode) {
6295   const int kTooSmallWidth = 10;
6296   const int kTooSmallHeight = 10;
6297   const int kFpsLimit = 7;
6298   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6299       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6300 
6301   // Enable BALANCED preference, no initial limitation.
6302   test::FrameForwarder source;
6303   video_stream_encoder_->SetSource(&source,
6304                                    webrtc::DegradationPreference::BALANCED);
6305   EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
6306   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6307   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6308 
6309   // Trigger adapt down, expect limited framerate.
6310   source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight));
6311   WaitForEncodedFrame(1);
6312   video_stream_encoder_->TriggerQualityLow();
6313   EXPECT_THAT(source.sink_wants(), FpsMatchesResolutionMax(Eq(kFpsLimit)));
6314   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6315   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6316   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6317 
6318   // Trigger adapt down, too small frame, expect no change.
6319   source.IncomingCapturedFrame(CreateFrame(2, kTooSmallWidth, kTooSmallHeight));
6320   WaitForEncodedFrame(2);
6321   video_stream_encoder_->TriggerQualityLow();
6322   EXPECT_THAT(source.sink_wants(), FpsMatchesResolutionMax(Eq(kFpsLimit)));
6323   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6324   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6325   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6326 
6327   video_stream_encoder_->Stop();
6328 }
6329 
TEST_F(VideoStreamEncoderTest,FailingInitEncodeDoesntCauseCrash)6330 TEST_F(VideoStreamEncoderTest, FailingInitEncodeDoesntCauseCrash) {
6331   fake_encoder_.ForceInitEncodeFailure(true);
6332   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6333       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6334   ResetEncoder("VP8", 2, 1, 1, false);
6335   const int kFrameWidth = 1280;
6336   const int kFrameHeight = 720;
6337   video_source_.IncomingCapturedFrame(
6338       CreateFrame(1, kFrameWidth, kFrameHeight));
6339   ExpectDroppedFrame();
6340   video_stream_encoder_->Stop();
6341 }
6342 
6343 // TODO(sprang): Extend this with fps throttling and any "balanced" extensions.
TEST_F(VideoStreamEncoderTest,AdaptsResolutionOnOveruse_MaintainFramerateMode)6344 TEST_F(VideoStreamEncoderTest,
6345        AdaptsResolutionOnOveruse_MaintainFramerateMode) {
6346   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6347       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6348 
6349   const int kFrameWidth = 1280;
6350   const int kFrameHeight = 720;
6351   // Enabled default VideoAdapter downscaling. First step is 3/4, not 3/5 as
6352   // requested by
6353   // VideoStreamEncoder::VideoSourceProxy::RequestResolutionLowerThan().
6354   video_source_.set_adaptation_enabled(true);
6355 
6356   video_source_.IncomingCapturedFrame(
6357       CreateFrame(1 * kFrameIntervalMs, kFrameWidth, kFrameHeight));
6358   WaitForEncodedFrame(kFrameWidth, kFrameHeight);
6359 
6360   // Trigger CPU overuse, downscale by 3/4.
6361   video_stream_encoder_->TriggerCpuOveruse();
6362   video_source_.IncomingCapturedFrame(
6363       CreateFrame(2 * kFrameIntervalMs, kFrameWidth, kFrameHeight));
6364   WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4);
6365 
6366   // Trigger CPU normal use, return to original resolution.
6367   video_stream_encoder_->TriggerCpuUnderuse();
6368   video_source_.IncomingCapturedFrame(
6369       CreateFrame(3 * kFrameIntervalMs, kFrameWidth, kFrameHeight));
6370   WaitForEncodedFrame(kFrameWidth, kFrameHeight);
6371 
6372   video_stream_encoder_->Stop();
6373 }
6374 
TEST_F(VideoStreamEncoderTest,AdaptsFramerateOnOveruse_MaintainResolutionMode)6375 TEST_F(VideoStreamEncoderTest,
6376        AdaptsFramerateOnOveruse_MaintainResolutionMode) {
6377   const int kFrameWidth = 1280;
6378   const int kFrameHeight = 720;
6379 
6380   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6381       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6382   video_stream_encoder_->SetSource(
6383       &video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
6384   video_source_.set_adaptation_enabled(true);
6385 
6386   int64_t timestamp_ms = CurrentTimeMs();
6387 
6388   video_source_.IncomingCapturedFrame(
6389       CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
6390   WaitForEncodedFrame(timestamp_ms);
6391 
6392   // Try to trigger overuse. No fps estimate available => no effect.
6393   video_stream_encoder_->TriggerCpuOveruse();
6394 
6395   // Insert frames for one second to get a stable estimate.
6396   for (int i = 0; i < max_framerate_; ++i) {
6397     timestamp_ms += kFrameIntervalMs;
6398     video_source_.IncomingCapturedFrame(
6399         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
6400     WaitForEncodedFrame(timestamp_ms);
6401   }
6402 
6403   // Trigger CPU overuse, reduce framerate by 2/3.
6404   video_stream_encoder_->TriggerCpuOveruse();
6405   int num_frames_dropped = 0;
6406   for (int i = 0; i < max_framerate_; ++i) {
6407     timestamp_ms += kFrameIntervalMs;
6408     video_source_.IncomingCapturedFrame(
6409         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
6410     if (!WaitForFrame(kFrameTimeout)) {
6411       ++num_frames_dropped;
6412     } else {
6413       sink_.CheckLastFrameSizeMatches(kFrameWidth, kFrameHeight);
6414     }
6415   }
6416 
6417   // Add some slack to account for frames dropped by the frame dropper.
6418   const int kErrorMargin = 1;
6419   EXPECT_NEAR(num_frames_dropped, max_framerate_ - (max_framerate_ * 2 / 3),
6420               kErrorMargin);
6421 
6422   // Trigger CPU overuse, reduce framerate by 2/3 again.
6423   video_stream_encoder_->TriggerCpuOveruse();
6424   num_frames_dropped = 0;
6425   for (int i = 0; i <= max_framerate_; ++i) {
6426     timestamp_ms += kFrameIntervalMs;
6427     video_source_.IncomingCapturedFrame(
6428         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
6429     if (!WaitForFrame(kFrameTimeout)) {
6430       ++num_frames_dropped;
6431     } else {
6432       sink_.CheckLastFrameSizeMatches(kFrameWidth, kFrameHeight);
6433     }
6434   }
6435   EXPECT_NEAR(num_frames_dropped, max_framerate_ - (max_framerate_ * 4 / 9),
6436               kErrorMargin);
6437 
6438   // Go back up one step.
6439   video_stream_encoder_->TriggerCpuUnderuse();
6440   num_frames_dropped = 0;
6441   for (int i = 0; i < max_framerate_; ++i) {
6442     timestamp_ms += kFrameIntervalMs;
6443     video_source_.IncomingCapturedFrame(
6444         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
6445     if (!WaitForFrame(kFrameTimeout)) {
6446       ++num_frames_dropped;
6447     } else {
6448       sink_.CheckLastFrameSizeMatches(kFrameWidth, kFrameHeight);
6449     }
6450   }
6451   EXPECT_NEAR(num_frames_dropped, max_framerate_ - (max_framerate_ * 2 / 3),
6452               kErrorMargin);
6453 
6454   // Go back up to original mode.
6455   video_stream_encoder_->TriggerCpuUnderuse();
6456   num_frames_dropped = 0;
6457   for (int i = 0; i < max_framerate_; ++i) {
6458     timestamp_ms += kFrameIntervalMs;
6459     video_source_.IncomingCapturedFrame(
6460         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
6461     if (!WaitForFrame(kFrameTimeout)) {
6462       ++num_frames_dropped;
6463     } else {
6464       sink_.CheckLastFrameSizeMatches(kFrameWidth, kFrameHeight);
6465     }
6466   }
6467   EXPECT_NEAR(num_frames_dropped, 0, kErrorMargin);
6468 
6469   video_stream_encoder_->Stop();
6470 }
6471 
TEST_F(VideoStreamEncoderTest,DoesntAdaptDownPastMinFramerate)6472 TEST_F(VideoStreamEncoderTest, DoesntAdaptDownPastMinFramerate) {
6473   const int kFramerateFps = 5;
6474   const int kFrameIntervalMs = rtc::kNumMillisecsPerSec / kFramerateFps;
6475   const int kFrameWidth = 1280;
6476   const int kFrameHeight = 720;
6477 
6478   // Reconfigure encoder with two temporal layers and screensharing, which will
6479   // disable frame dropping and make testing easier.
6480   ResetEncoder("VP8", 1, 2, 1, true);
6481 
6482   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6483       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6484   video_stream_encoder_->SetSource(
6485       &video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
6486   video_source_.set_adaptation_enabled(true);
6487 
6488   int64_t timestamp_ms = CurrentTimeMs();
6489 
6490   // Trigger overuse as much as we can.
6491   rtc::VideoSinkWants last_wants;
6492   do {
6493     last_wants = video_source_.sink_wants();
6494 
6495     // Insert frames to get a new fps estimate...
6496     for (int j = 0; j < kFramerateFps; ++j) {
6497       video_source_.IncomingCapturedFrame(
6498           CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
6499       if (video_source_.last_sent_width()) {
6500         sink_.WaitForEncodedFrame(timestamp_ms);
6501       }
6502       timestamp_ms += kFrameIntervalMs;
6503       AdvanceTime(TimeDelta::Millis(kFrameIntervalMs));
6504     }
6505     // ...and then try to adapt again.
6506     video_stream_encoder_->TriggerCpuOveruse();
6507   } while (video_source_.sink_wants().max_framerate_fps <
6508            last_wants.max_framerate_fps);
6509 
6510   EXPECT_THAT(video_source_.sink_wants(),
6511               FpsMatchesResolutionMax(Eq(kMinFramerateFps)));
6512 
6513   video_stream_encoder_->Stop();
6514 }
6515 
TEST_F(VideoStreamEncoderTest,AdaptsResolutionAndFramerateForLowQuality_BalancedMode)6516 TEST_F(VideoStreamEncoderTest,
6517        AdaptsResolutionAndFramerateForLowQuality_BalancedMode) {
6518   const int kWidth = 1280;
6519   const int kHeight = 720;
6520   const int64_t kFrameIntervalMs = 150;
6521   int64_t timestamp_ms = kFrameIntervalMs;
6522   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6523       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6524 
6525   // Enable BALANCED preference, no initial limitation.
6526   AdaptingFrameForwarder source(&time_controller_);
6527   source.set_adaptation_enabled(true);
6528   video_stream_encoder_->SetSource(&source,
6529                                    webrtc::DegradationPreference::BALANCED);
6530   timestamp_ms += kFrameIntervalMs;
6531   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6532   WaitForEncodedFrame(kWidth, kHeight);
6533   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
6534   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6535   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6536   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6537 
6538   // Trigger adapt down, expect scaled down resolution (960x540@30fps).
6539   video_stream_encoder_->TriggerQualityLow();
6540   timestamp_ms += kFrameIntervalMs;
6541   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6542   WaitForEncodedFrame(timestamp_ms);
6543   EXPECT_THAT(source.sink_wants(),
6544               FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
6545   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6546   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6547   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6548 
6549   // Trigger adapt down, expect scaled down resolution (640x360@30fps).
6550   video_stream_encoder_->TriggerQualityLow();
6551   timestamp_ms += kFrameIntervalMs;
6552   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6553   WaitForEncodedFrame(timestamp_ms);
6554   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
6555   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6556   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6557   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6558 
6559   // Trigger adapt down, expect reduced fps (640x360@15fps).
6560   video_stream_encoder_->TriggerQualityLow();
6561   timestamp_ms += kFrameIntervalMs;
6562   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6563   WaitForEncodedFrame(timestamp_ms);
6564   EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
6565   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6566   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6567   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6568 
6569   // Trigger adapt down, expect scaled down resolution (480x270@15fps).
6570   video_stream_encoder_->TriggerQualityLow();
6571   timestamp_ms += kFrameIntervalMs;
6572   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6573   WaitForEncodedFrame(timestamp_ms);
6574   EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants()));
6575   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6576   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6577   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6578 
6579   // Restrict bitrate, trigger adapt down, expect reduced fps (480x270@10fps).
6580   video_stream_encoder_->TriggerQualityLow();
6581   timestamp_ms += kFrameIntervalMs;
6582   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6583   WaitForEncodedFrame(timestamp_ms);
6584   EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
6585   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6586   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6587   EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6588 
6589   // Trigger adapt down, expect scaled down resolution (320x180@10fps).
6590   video_stream_encoder_->TriggerQualityLow();
6591   timestamp_ms += kFrameIntervalMs;
6592   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6593   WaitForEncodedFrame(timestamp_ms);
6594   EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants()));
6595   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6596   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6597   EXPECT_EQ(6, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6598 
6599   // Trigger adapt down, expect reduced fps (320x180@7fps).
6600   video_stream_encoder_->TriggerQualityLow();
6601   timestamp_ms += kFrameIntervalMs;
6602   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6603   WaitForEncodedFrame(timestamp_ms);
6604   EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
6605   rtc::VideoSinkWants last_wants = source.sink_wants();
6606   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6607   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6608   EXPECT_EQ(7, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6609 
6610   // Trigger adapt down, min resolution reached, expect no change.
6611   video_stream_encoder_->TriggerQualityLow();
6612   timestamp_ms += kFrameIntervalMs;
6613   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6614   WaitForEncodedFrame(timestamp_ms);
6615   EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(last_wants));
6616   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6617   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6618   EXPECT_EQ(7, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6619 
6620   // Trigger adapt up, expect increased fps (320x180@10fps).
6621   video_stream_encoder_->TriggerQualityHigh();
6622   timestamp_ms += kFrameIntervalMs;
6623   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6624   WaitForEncodedFrame(timestamp_ms);
6625   EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
6626   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6627   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6628   EXPECT_EQ(8, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6629 
6630   // Trigger adapt up, expect upscaled resolution (480x270@10fps).
6631   video_stream_encoder_->TriggerQualityHigh();
6632   timestamp_ms += kFrameIntervalMs;
6633   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6634   WaitForEncodedFrame(timestamp_ms);
6635   EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants()));
6636   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6637   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6638   EXPECT_EQ(9, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6639 
6640   // Increase bitrate, trigger adapt up, expect increased fps (480x270@15fps).
6641   video_stream_encoder_->TriggerQualityHigh();
6642   timestamp_ms += kFrameIntervalMs;
6643   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6644   WaitForEncodedFrame(timestamp_ms);
6645   EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
6646   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6647   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6648   EXPECT_EQ(10, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6649 
6650   // Trigger adapt up, expect upscaled resolution (640x360@15fps).
6651   video_stream_encoder_->TriggerQualityHigh();
6652   timestamp_ms += kFrameIntervalMs;
6653   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6654   WaitForEncodedFrame(timestamp_ms);
6655   EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants()));
6656   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6657   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6658   EXPECT_EQ(11, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6659 
6660   // Trigger adapt up, expect increased fps (640x360@30fps).
6661   video_stream_encoder_->TriggerQualityHigh();
6662   timestamp_ms += kFrameIntervalMs;
6663   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6664   WaitForEncodedFrame(timestamp_ms);
6665   EXPECT_THAT(source.sink_wants(), FpsMax());
6666   EXPECT_EQ(source.sink_wants().max_pixel_count,
6667             source.last_wants().max_pixel_count);
6668   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6669   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6670   EXPECT_EQ(12, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6671 
6672   // Trigger adapt up, expect upscaled resolution (960x540@30fps).
6673   video_stream_encoder_->TriggerQualityHigh();
6674   timestamp_ms += kFrameIntervalMs;
6675   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6676   WaitForEncodedFrame(timestamp_ms);
6677   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
6678   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6679   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6680   EXPECT_EQ(13, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6681 
6682   // Trigger adapt up, expect no restriction (1280x720fps@30fps).
6683   video_stream_encoder_->TriggerQualityHigh();
6684   timestamp_ms += kFrameIntervalMs;
6685   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6686   WaitForEncodedFrame(kWidth, kHeight);
6687   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
6688   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
6689   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6690   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6691   EXPECT_EQ(14, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6692 
6693   // Trigger adapt up, expect no change.
6694   video_stream_encoder_->TriggerQualityHigh();
6695   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
6696   EXPECT_EQ(14, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6697 
6698   video_stream_encoder_->Stop();
6699 }
6700 
TEST_F(VideoStreamEncoderTest,AdaptWithTwoReasonsAndDifferentOrder_Framerate)6701 TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
6702   const int kWidth = 1280;
6703   const int kHeight = 720;
6704   const int64_t kFrameIntervalMs = 150;
6705   int64_t timestamp_ms = kFrameIntervalMs;
6706   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6707       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6708 
6709   // Enable BALANCED preference, no initial limitation.
6710   AdaptingFrameForwarder source(&time_controller_);
6711   source.set_adaptation_enabled(true);
6712   video_stream_encoder_->SetSource(&source,
6713                                    webrtc::DegradationPreference::BALANCED);
6714   timestamp_ms += kFrameIntervalMs;
6715   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6716   WaitForEncodedFrame(kWidth, kHeight);
6717   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
6718   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6719   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6720   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
6721   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
6722   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6723   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6724 
6725   // Trigger cpu adapt down, expect scaled down resolution (960x540@30fps).
6726   video_stream_encoder_->TriggerCpuOveruse();
6727   timestamp_ms += kFrameIntervalMs;
6728   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6729   WaitForEncodedFrame(timestamp_ms);
6730   EXPECT_THAT(source.sink_wants(),
6731               FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
6732   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6733   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6734   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
6735   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
6736   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6737   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6738 
6739   // Trigger cpu adapt down, expect scaled down resolution (640x360@30fps).
6740   video_stream_encoder_->TriggerCpuOveruse();
6741   timestamp_ms += kFrameIntervalMs;
6742   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6743   WaitForEncodedFrame(timestamp_ms);
6744   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
6745   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6746   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6747   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
6748   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
6749   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6750   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6751 
6752   // Trigger quality adapt down, expect reduced fps (640x360@15fps).
6753   video_stream_encoder_->TriggerQualityLow();
6754   timestamp_ms += kFrameIntervalMs;
6755   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6756   WaitForEncodedFrame(timestamp_ms);
6757   EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
6758   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6759   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6760   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
6761   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
6762   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6763   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6764 
6765   // Trigger cpu adapt up, expect no change since QP is most limited.
6766   {
6767     // Store current sink wants since we expect no change and if there is no
6768     // change then last_wants() is not updated.
6769     auto previous_sink_wants = source.sink_wants();
6770     video_stream_encoder_->TriggerCpuUnderuse();
6771     timestamp_ms += kFrameIntervalMs;
6772     source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6773     WaitForEncodedFrame(timestamp_ms);
6774     EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(previous_sink_wants));
6775     EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6776     EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6777   }
6778 
6779   // Trigger quality adapt up, expect increased fps (640x360@30fps).
6780   video_stream_encoder_->TriggerQualityHigh();
6781   timestamp_ms += kFrameIntervalMs;
6782   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6783   WaitForEncodedFrame(timestamp_ms);
6784   EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
6785   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6786   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6787   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
6788   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
6789   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6790   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6791 
6792   // Trigger quality adapt up and Cpu adapt up since both are most limited,
6793   // expect increased resolution (960x540@30fps).
6794   video_stream_encoder_->TriggerQualityHigh();
6795   video_stream_encoder_->TriggerCpuUnderuse();
6796   timestamp_ms += kFrameIntervalMs;
6797   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6798   WaitForEncodedFrame(timestamp_ms);
6799   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
6800   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6801   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6802   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
6803   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
6804   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6805   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6806 
6807   // Trigger quality adapt up and Cpu adapt up since both are most limited,
6808   // expect no restriction (1280x720fps@30fps).
6809   video_stream_encoder_->TriggerQualityHigh();
6810   video_stream_encoder_->TriggerCpuUnderuse();
6811   timestamp_ms += kFrameIntervalMs;
6812   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6813   WaitForEncodedFrame(kWidth, kHeight);
6814   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
6815   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
6816   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6817   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6818   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
6819   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
6820   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6821   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6822 
6823   // Trigger adapt up, expect no change.
6824   video_stream_encoder_->TriggerQualityHigh();
6825   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
6826   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6827   EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6828 
6829   video_stream_encoder_->Stop();
6830 }
6831 
TEST_F(VideoStreamEncoderTest,AdaptWithTwoReasonsAndDifferentOrder_Resolution)6832 TEST_F(VideoStreamEncoderTest,
6833        AdaptWithTwoReasonsAndDifferentOrder_Resolution) {
6834   const int kWidth = 640;
6835   const int kHeight = 360;
6836   const int kFpsLimit = 15;
6837   const int64_t kFrameIntervalMs = 150;
6838   int64_t timestamp_ms = kFrameIntervalMs;
6839   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6840       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6841 
6842   // Enable BALANCED preference, no initial limitation.
6843   AdaptingFrameForwarder source(&time_controller_);
6844   source.set_adaptation_enabled(true);
6845   video_stream_encoder_->SetSource(&source,
6846                                    webrtc::DegradationPreference::BALANCED);
6847   timestamp_ms += kFrameIntervalMs;
6848   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6849   WaitForEncodedFrame(kWidth, kHeight);
6850   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
6851   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6852   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6853   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
6854   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
6855   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6856   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6857 
6858   // Trigger cpu adapt down, expect scaled down framerate (640x360@15fps).
6859   video_stream_encoder_->TriggerCpuOveruse();
6860   timestamp_ms += kFrameIntervalMs;
6861   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6862   WaitForEncodedFrame(timestamp_ms);
6863   EXPECT_THAT(source.sink_wants(), FpsMatchesResolutionMax(Eq(kFpsLimit)));
6864   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6865   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6866   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
6867   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate);
6868   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6869   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6870 
6871   // Trigger quality adapt down, expect scaled down resolution (480x270@15fps).
6872   video_stream_encoder_->TriggerQualityLow();
6873   timestamp_ms += kFrameIntervalMs;
6874   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6875   WaitForEncodedFrame(timestamp_ms);
6876   EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants()));
6877   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
6878   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6879   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
6880   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate);
6881   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6882   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6883 
6884   // Trigger cpu adapt up, expect no change because quality is most limited.
6885   {
6886     auto previous_sink_wants = source.sink_wants();
6887     // Store current sink wants since we expect no change ind if there is no
6888     // change then last__wants() is not updated.
6889     video_stream_encoder_->TriggerCpuUnderuse();
6890     timestamp_ms += kFrameIntervalMs;
6891     source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6892     WaitForEncodedFrame(timestamp_ms);
6893     EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(previous_sink_wants));
6894     EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6895     EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6896   }
6897 
6898   // Trigger quality adapt up, expect upscaled resolution (640x360@15fps).
6899   video_stream_encoder_->TriggerQualityHigh();
6900   timestamp_ms += kFrameIntervalMs;
6901   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6902   WaitForEncodedFrame(timestamp_ms);
6903   EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants()));
6904   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6905   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
6906   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
6907   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate);
6908   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6909   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6910 
6911   // Trigger quality and cpu adapt up, expect increased fps (640x360@30fps).
6912   video_stream_encoder_->TriggerQualityHigh();
6913   video_stream_encoder_->TriggerCpuUnderuse();
6914   timestamp_ms += kFrameIntervalMs;
6915   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
6916   WaitForEncodedFrame(timestamp_ms);
6917   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
6918   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
6919   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
6920   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
6921   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
6922   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6923   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6924 
6925   // Trigger adapt up, expect no change.
6926   video_stream_encoder_->TriggerQualityHigh();
6927   EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
6928   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
6929   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
6930 
6931   video_stream_encoder_->Stop();
6932 }
6933 
TEST_F(VideoStreamEncoderTest,AcceptsFullHdAdaptedDownSimulcastFrames)6934 TEST_F(VideoStreamEncoderTest, AcceptsFullHdAdaptedDownSimulcastFrames) {
6935   const int kFrameWidth = 1920;
6936   const int kFrameHeight = 1080;
6937   // 2/3 of 1920.
6938   const int kAdaptedFrameWidth = 1280;
6939   // 2/3 of 1080.
6940   const int kAdaptedFrameHeight = 720;
6941   const int kFramerate = 24;
6942 
6943   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6944       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6945   // Trigger reconfigure encoder (without resetting the entire instance).
6946   VideoEncoderConfig video_encoder_config;
6947   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config);
6948   video_encoder_config.simulcast_layers[0].max_framerate = kFramerate;
6949   video_encoder_config.max_bitrate_bps = kTargetBitrate.bps();
6950   video_encoder_config.video_stream_factory =
6951       rtc::make_ref_counted<CroppingVideoStreamFactory>();
6952   video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
6953                                           kMaxPayloadLength);
6954   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
6955 
6956   video_source_.set_adaptation_enabled(true);
6957 
6958   video_source_.IncomingCapturedFrame(
6959       CreateFrame(1, kFrameWidth, kFrameHeight));
6960   WaitForEncodedFrame(kFrameWidth, kFrameHeight);
6961 
6962   // Trigger CPU overuse, downscale by 3/4.
6963   video_stream_encoder_->TriggerCpuOveruse();
6964   video_source_.IncomingCapturedFrame(
6965       CreateFrame(2, kFrameWidth, kFrameHeight));
6966   WaitForEncodedFrame(kAdaptedFrameWidth, kAdaptedFrameHeight);
6967 
6968   video_stream_encoder_->Stop();
6969 }
6970 
TEST_F(VideoStreamEncoderTest,PeriodicallyUpdatesChannelParameters)6971 TEST_F(VideoStreamEncoderTest, PeriodicallyUpdatesChannelParameters) {
6972   const int kFrameWidth = 1280;
6973   const int kFrameHeight = 720;
6974   const int kLowFps = 2;
6975   const int kHighFps = 30;
6976 
6977   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6978       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6979 
6980   int64_t timestamp_ms = CurrentTimeMs();
6981   max_framerate_ = kLowFps;
6982 
6983   // Insert 2 seconds of 2fps video.
6984   for (int i = 0; i < kLowFps * 2; ++i) {
6985     video_source_.IncomingCapturedFrame(
6986         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
6987     WaitForEncodedFrame(timestamp_ms);
6988     timestamp_ms += 1000 / kLowFps;
6989   }
6990 
6991   // Make sure encoder is updated with new target.
6992   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
6993       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
6994   video_source_.IncomingCapturedFrame(
6995       CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
6996   WaitForEncodedFrame(timestamp_ms);
6997   timestamp_ms += 1000 / kLowFps;
6998 
6999   EXPECT_EQ(kLowFps, fake_encoder_.GetConfiguredInputFramerate());
7000 
7001   // Insert 30fps frames for just a little more than the forced update period.
7002   const int kVcmTimerIntervalFrames = (kProcessIntervalMs * kHighFps) / 1000;
7003   constexpr TimeDelta kFrameInterval = TimeDelta::Seconds(1) / kHighFps;
7004   max_framerate_ = kHighFps;
7005   for (int i = 0; i < kVcmTimerIntervalFrames + 2; ++i) {
7006     video_source_.IncomingCapturedFrame(
7007         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
7008     // Wait for encoded frame, but skip ahead if it doesn't arrive as it might
7009     // be dropped if the encoder hans't been updated with the new higher target
7010     // framerate yet, causing it to overshoot the target bitrate and then
7011     // suffering the wrath of the media optimizer.
7012     TimedWaitForEncodedFrame(timestamp_ms, 2 * kFrameInterval);
7013     timestamp_ms += kFrameInterval.ms();
7014   }
7015 
7016   // Don expect correct measurement just yet, but it should be higher than
7017   // before.
7018   EXPECT_GT(fake_encoder_.GetConfiguredInputFramerate(), kLowFps);
7019 
7020   video_stream_encoder_->Stop();
7021 }
7022 
TEST_F(VideoStreamEncoderTest,DoesNotUpdateBitrateAllocationWhenSuspended)7023 TEST_F(VideoStreamEncoderTest, DoesNotUpdateBitrateAllocationWhenSuspended) {
7024   const int kFrameWidth = 1280;
7025   const int kFrameHeight = 720;
7026   ResetEncoder("FAKE", 1, 1, 1, false,
7027                VideoStreamEncoder::BitrateAllocationCallbackType::
7028                    kVideoBitrateAllocation);
7029 
7030   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7031       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7032   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
7033 
7034   // Insert a first video frame, causes another bitrate update.
7035   int64_t timestamp_ms = CurrentTimeMs();
7036   video_source_.IncomingCapturedFrame(
7037       CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
7038   WaitForEncodedFrame(timestamp_ms);
7039   EXPECT_EQ(sink_.number_of_bitrate_allocations(), 1);
7040 
7041   // Next, simulate video suspension due to pacer queue overrun.
7042   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7043       DataRate::Zero(), DataRate::Zero(), DataRate::Zero(), 0, 1, 0);
7044 
7045   // Skip ahead until a new periodic parameter update should have occured.
7046   timestamp_ms += kProcessIntervalMs;
7047   AdvanceTime(TimeDelta::Millis(kProcessIntervalMs));
7048 
7049   // No more allocations has been made.
7050   video_source_.IncomingCapturedFrame(
7051       CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
7052   ExpectDroppedFrame();
7053   EXPECT_EQ(sink_.number_of_bitrate_allocations(), 1);
7054 
7055   video_stream_encoder_->Stop();
7056 }
7057 
TEST_F(VideoStreamEncoderTest,DefaultCpuAdaptationThresholdsForSoftwareEncoder)7058 TEST_F(VideoStreamEncoderTest,
7059        DefaultCpuAdaptationThresholdsForSoftwareEncoder) {
7060   const int kFrameWidth = 1280;
7061   const int kFrameHeight = 720;
7062   const test::ScopedKeyValueConfig kFieldTrials;
7063   const CpuOveruseOptions default_options(kFieldTrials);
7064   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7065       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7066   video_source_.IncomingCapturedFrame(
7067       CreateFrame(1, kFrameWidth, kFrameHeight));
7068   WaitForEncodedFrame(1);
7069   EXPECT_EQ(video_stream_encoder_->overuse_detector_proxy_->GetOptions()
7070                 .low_encode_usage_threshold_percent,
7071             default_options.low_encode_usage_threshold_percent);
7072   EXPECT_EQ(video_stream_encoder_->overuse_detector_proxy_->GetOptions()
7073                 .high_encode_usage_threshold_percent,
7074             default_options.high_encode_usage_threshold_percent);
7075   video_stream_encoder_->Stop();
7076 }
7077 
TEST_F(VideoStreamEncoderTest,HigherCpuAdaptationThresholdsForHardwareEncoder)7078 TEST_F(VideoStreamEncoderTest,
7079        HigherCpuAdaptationThresholdsForHardwareEncoder) {
7080   const int kFrameWidth = 1280;
7081   const int kFrameHeight = 720;
7082   const test::ScopedKeyValueConfig kFieldTrials;
7083   CpuOveruseOptions hardware_options(kFieldTrials);
7084   hardware_options.low_encode_usage_threshold_percent = 150;
7085   hardware_options.high_encode_usage_threshold_percent = 200;
7086   fake_encoder_.SetIsHardwareAccelerated(true);
7087 
7088   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7089       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7090   video_source_.IncomingCapturedFrame(
7091       CreateFrame(1, kFrameWidth, kFrameHeight));
7092   WaitForEncodedFrame(1);
7093   EXPECT_EQ(video_stream_encoder_->overuse_detector_proxy_->GetOptions()
7094                 .low_encode_usage_threshold_percent,
7095             hardware_options.low_encode_usage_threshold_percent);
7096   EXPECT_EQ(video_stream_encoder_->overuse_detector_proxy_->GetOptions()
7097                 .high_encode_usage_threshold_percent,
7098             hardware_options.high_encode_usage_threshold_percent);
7099   video_stream_encoder_->Stop();
7100 }
7101 
TEST_F(VideoStreamEncoderTest,CpuAdaptationThresholdsUpdatesWhenHardwareAccelerationChange)7102 TEST_F(VideoStreamEncoderTest,
7103        CpuAdaptationThresholdsUpdatesWhenHardwareAccelerationChange) {
7104   const int kFrameWidth = 1280;
7105   const int kFrameHeight = 720;
7106 
7107   const test::ScopedKeyValueConfig kFieldTrials;
7108   const CpuOveruseOptions default_options(kFieldTrials);
7109   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7110       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7111   video_source_.IncomingCapturedFrame(
7112       CreateFrame(1, kFrameWidth, kFrameHeight));
7113   WaitForEncodedFrame(1);
7114   EXPECT_EQ(video_stream_encoder_->overuse_detector_proxy_->GetOptions()
7115                 .low_encode_usage_threshold_percent,
7116             default_options.low_encode_usage_threshold_percent);
7117   EXPECT_EQ(video_stream_encoder_->overuse_detector_proxy_->GetOptions()
7118                 .high_encode_usage_threshold_percent,
7119             default_options.high_encode_usage_threshold_percent);
7120 
7121   CpuOveruseOptions hardware_options(kFieldTrials);
7122   hardware_options.low_encode_usage_threshold_percent = 150;
7123   hardware_options.high_encode_usage_threshold_percent = 200;
7124   fake_encoder_.SetIsHardwareAccelerated(true);
7125 
7126   video_source_.IncomingCapturedFrame(
7127       CreateFrame(2, kFrameWidth, kFrameHeight));
7128   WaitForEncodedFrame(2);
7129 
7130   EXPECT_EQ(video_stream_encoder_->overuse_detector_proxy_->GetOptions()
7131                 .low_encode_usage_threshold_percent,
7132             hardware_options.low_encode_usage_threshold_percent);
7133   EXPECT_EQ(video_stream_encoder_->overuse_detector_proxy_->GetOptions()
7134                 .high_encode_usage_threshold_percent,
7135             hardware_options.high_encode_usage_threshold_percent);
7136 
7137   video_stream_encoder_->Stop();
7138 }
7139 
TEST_F(VideoStreamEncoderTest,DropsFramesWhenEncoderOvershoots)7140 TEST_F(VideoStreamEncoderTest, DropsFramesWhenEncoderOvershoots) {
7141   const int kFrameWidth = 320;
7142   const int kFrameHeight = 240;
7143   const int kFps = 30;
7144   const DataRate kTargetBitrate = DataRate::KilobitsPerSec(120);
7145   const int kNumFramesInRun = kFps * 5;  // Runs of five seconds.
7146 
7147   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7148       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7149 
7150   int64_t timestamp_ms = CurrentTimeMs();
7151   max_framerate_ = kFps;
7152 
7153   // Insert 3 seconds of video, verify number of drops with normal bitrate.
7154   fake_encoder_.SimulateOvershoot(1.0);
7155   int num_dropped = 0;
7156   for (int i = 0; i < kNumFramesInRun; ++i) {
7157     video_source_.IncomingCapturedFrame(
7158         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
7159     // Wait up to two frame durations for a frame to arrive.
7160     if (!TimedWaitForEncodedFrame(timestamp_ms,
7161                                   2 * TimeDelta::Seconds(1) / kFps)) {
7162       ++num_dropped;
7163     }
7164     timestamp_ms += 1000 / kFps;
7165   }
7166 
7167   // Framerate should be measured to be near the expected target rate.
7168   EXPECT_NEAR(fake_encoder_.GetLastFramerate(), kFps, 1);
7169 
7170   // Frame drops should be within 5% of expected 0%.
7171   EXPECT_NEAR(num_dropped, 0, 5 * kNumFramesInRun / 100);
7172 
7173   // Make encoder produce frames at double the expected bitrate during 3 seconds
7174   // of video, verify number of drops. Rate needs to be slightly changed in
7175   // order to force the rate to be reconfigured.
7176   double overshoot_factor = 2.0;
7177   const RateControlSettings trials =
7178       RateControlSettings::ParseFromFieldTrials();
7179   if (trials.UseEncoderBitrateAdjuster()) {
7180     // With bitrate adjuster, when need to overshoot even more to trigger
7181     // frame dropping since the adjuter will try to just lower the target
7182     // bitrate rather than drop frames. If network headroom can be used, it
7183     // doesn't push back as hard so we don't need quite as much overshoot.
7184     // These numbers are unfortunately a bit magical but there's not trivial
7185     // way to algebraically infer them.
7186     overshoot_factor = 3.0;
7187   }
7188   fake_encoder_.SimulateOvershoot(overshoot_factor);
7189   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7190       kTargetBitrate + DataRate::KilobitsPerSec(1),
7191       kTargetBitrate + DataRate::KilobitsPerSec(1),
7192       kTargetBitrate + DataRate::KilobitsPerSec(1), 0, 0, 0);
7193   num_dropped = 0;
7194   for (int i = 0; i < kNumFramesInRun; ++i) {
7195     video_source_.IncomingCapturedFrame(
7196         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
7197     // Wait up to two frame durations for a frame to arrive.
7198     if (!TimedWaitForEncodedFrame(timestamp_ms,
7199                                   2 * TimeDelta::Seconds(1) / kFps)) {
7200       ++num_dropped;
7201     }
7202     timestamp_ms += 1000 / kFps;
7203   }
7204 
7205   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7206       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7207 
7208   // Target framerate should be still be near the expected target, despite
7209   // the frame drops.
7210   EXPECT_NEAR(fake_encoder_.GetLastFramerate(), kFps, 1);
7211 
7212   // Frame drops should be within 5% of expected 50%.
7213   EXPECT_NEAR(num_dropped, kNumFramesInRun / 2, 5 * kNumFramesInRun / 100);
7214 
7215   video_stream_encoder_->Stop();
7216 }
7217 
TEST_F(VideoStreamEncoderTest,ConfiguresCorrectFrameRate)7218 TEST_F(VideoStreamEncoderTest, ConfiguresCorrectFrameRate) {
7219   const int kFrameWidth = 320;
7220   const int kFrameHeight = 240;
7221   const int kActualInputFps = 24;
7222   const DataRate kTargetBitrate = DataRate::KilobitsPerSec(120);
7223 
7224   ASSERT_GT(max_framerate_, kActualInputFps);
7225 
7226   int64_t timestamp_ms = CurrentTimeMs();
7227   max_framerate_ = kActualInputFps;
7228   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7229       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7230 
7231   // Insert 3 seconds of video, with an input fps lower than configured max.
7232   for (int i = 0; i < kActualInputFps * 3; ++i) {
7233     video_source_.IncomingCapturedFrame(
7234         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
7235     // Wait up to two frame durations for a frame to arrive.
7236     WaitForEncodedFrame(timestamp_ms);
7237     timestamp_ms += 1000 / kActualInputFps;
7238   }
7239 
7240   EXPECT_NEAR(kActualInputFps, fake_encoder_.GetLastFramerate(), 1);
7241 
7242   video_stream_encoder_->Stop();
7243 }
7244 
TEST_F(VideoStreamEncoderTest,AccumulatesUpdateRectOnDroppedFrames)7245 TEST_F(VideoStreamEncoderTest, AccumulatesUpdateRectOnDroppedFrames) {
7246   VideoFrame::UpdateRect rect;
7247   test::FrameForwarder source;
7248   video_stream_encoder_->SetSource(&source,
7249                                    DegradationPreference::MAINTAIN_FRAMERATE);
7250   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7251       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7252 
7253   source.IncomingCapturedFrame(CreateFrameWithUpdatedPixel(1, nullptr, 0));
7254   WaitForEncodedFrame(1);
7255   // On the very first frame full update should be forced.
7256   rect = fake_encoder_.GetLastUpdateRect();
7257   EXPECT_EQ(rect.offset_x, 0);
7258   EXPECT_EQ(rect.offset_y, 0);
7259   EXPECT_EQ(rect.height, codec_height_);
7260   EXPECT_EQ(rect.width, codec_width_);
7261   // Frame with NTP timestamp 2 will be dropped due to outstanding frames
7262   // scheduled for processing during encoder queue processing of frame 2.
7263   source.IncomingCapturedFrame(CreateFrameWithUpdatedPixel(2, nullptr, 1));
7264   source.IncomingCapturedFrame(CreateFrameWithUpdatedPixel(3, nullptr, 10));
7265   WaitForEncodedFrame(3);
7266   // Updates to pixels 1 and 10 should be accumulated to one 10x1 rect.
7267   rect = fake_encoder_.GetLastUpdateRect();
7268   EXPECT_EQ(rect.offset_x, 1);
7269   EXPECT_EQ(rect.offset_y, 0);
7270   EXPECT_EQ(rect.width, 10);
7271   EXPECT_EQ(rect.height, 1);
7272 
7273   source.IncomingCapturedFrame(CreateFrameWithUpdatedPixel(4, nullptr, 0));
7274   WaitForEncodedFrame(4);
7275   // Previous frame was encoded, so no accumulation should happen.
7276   rect = fake_encoder_.GetLastUpdateRect();
7277   EXPECT_EQ(rect.offset_x, 0);
7278   EXPECT_EQ(rect.offset_y, 0);
7279   EXPECT_EQ(rect.width, 1);
7280   EXPECT_EQ(rect.height, 1);
7281 
7282   video_stream_encoder_->Stop();
7283 }
7284 
TEST_F(VideoStreamEncoderTest,SetsFrameTypes)7285 TEST_F(VideoStreamEncoderTest, SetsFrameTypes) {
7286   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7287       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7288 
7289   // First frame is always keyframe.
7290   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
7291   WaitForEncodedFrame(1);
7292   EXPECT_THAT(
7293       fake_encoder_.LastFrameTypes(),
7294       ::testing::ElementsAre(VideoFrameType{VideoFrameType::kVideoFrameKey}));
7295 
7296   // Insert delta frame.
7297   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
7298   WaitForEncodedFrame(2);
7299   EXPECT_THAT(
7300       fake_encoder_.LastFrameTypes(),
7301       ::testing::ElementsAre(VideoFrameType{VideoFrameType::kVideoFrameDelta}));
7302 
7303   // Request next frame be a key-frame.
7304   video_stream_encoder_->SendKeyFrame();
7305   video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr));
7306   WaitForEncodedFrame(3);
7307   EXPECT_THAT(
7308       fake_encoder_.LastFrameTypes(),
7309       ::testing::ElementsAre(VideoFrameType{VideoFrameType::kVideoFrameKey}));
7310 
7311   video_stream_encoder_->Stop();
7312 }
7313 
TEST_F(VideoStreamEncoderTest,SetsFrameTypesSimulcast)7314 TEST_F(VideoStreamEncoderTest, SetsFrameTypesSimulcast) {
7315   // Setup simulcast with three streams.
7316   ResetEncoder("VP8", 3, 1, 1, false);
7317   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7318       kSimulcastTargetBitrate, kSimulcastTargetBitrate, kSimulcastTargetBitrate,
7319       0, 0, 0);
7320   // Wait for all three layers before triggering event.
7321   sink_.SetNumExpectedLayers(3);
7322 
7323   // First frame is always keyframe.
7324   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
7325   WaitForEncodedFrame(1);
7326   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
7327               ::testing::ElementsAreArray({VideoFrameType::kVideoFrameKey,
7328                                            VideoFrameType::kVideoFrameKey,
7329                                            VideoFrameType::kVideoFrameKey}));
7330 
7331   // Insert delta frame.
7332   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
7333   WaitForEncodedFrame(2);
7334   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
7335               ::testing::ElementsAreArray({VideoFrameType::kVideoFrameDelta,
7336                                            VideoFrameType::kVideoFrameDelta,
7337                                            VideoFrameType::kVideoFrameDelta}));
7338 
7339   // Request next frame be a key-frame.
7340   // Only first stream is configured to produce key-frame.
7341   video_stream_encoder_->SendKeyFrame();
7342   video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr));
7343   WaitForEncodedFrame(3);
7344 
7345   // TODO(webrtc:10615): Map keyframe request to spatial layer. Currently
7346   // keyframe request on any layer triggers keyframe on all layers.
7347   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
7348               ::testing::ElementsAreArray({VideoFrameType::kVideoFrameKey,
7349                                            VideoFrameType::kVideoFrameKey,
7350                                            VideoFrameType::kVideoFrameKey}));
7351 
7352   video_stream_encoder_->Stop();
7353 }
7354 
TEST_F(VideoStreamEncoderTest,DoesNotRewriteH264BitstreamWithOptimalSps)7355 TEST_F(VideoStreamEncoderTest, DoesNotRewriteH264BitstreamWithOptimalSps) {
7356   // SPS contains VUI with restrictions on the maximum number of reordered
7357   // pictures, there is no need to rewrite the bitstream to enable faster
7358   // decoding.
7359   ResetEncoder("H264", 1, 1, 1, false);
7360 
7361   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7362       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7363   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
7364 
7365   fake_encoder_.SetEncodedImageData(
7366       EncodedImageBuffer::Create(kOptimalSps, sizeof(kOptimalSps)));
7367 
7368   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
7369   WaitForEncodedFrame(1);
7370 
7371   EXPECT_THAT(sink_.GetLastEncodedImageData(),
7372               testing::ElementsAreArray(kOptimalSps));
7373 
7374   video_stream_encoder_->Stop();
7375 }
7376 
TEST_F(VideoStreamEncoderTest,RewritesH264BitstreamWithNonOptimalSps)7377 TEST_F(VideoStreamEncoderTest, RewritesH264BitstreamWithNonOptimalSps) {
7378   // SPS does not contain VUI, the bitstream is will be rewritten with added
7379   // VUI with restrictions on the maximum number of reordered pictures to
7380   // enable faster decoding.
7381   uint8_t original_sps[] = {0,    0,    0,    1,    H264::NaluType::kSps,
7382                             0x00, 0x00, 0x03, 0x03, 0xF4,
7383                             0x05, 0x03, 0xC7, 0xC0};
7384   ResetEncoder("H264", 1, 1, 1, false);
7385 
7386   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7387       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7388   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
7389 
7390   fake_encoder_.SetEncodedImageData(
7391       EncodedImageBuffer::Create(original_sps, sizeof(original_sps)));
7392 
7393   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
7394   WaitForEncodedFrame(1);
7395 
7396   EXPECT_THAT(sink_.GetLastEncodedImageData(),
7397               testing::ElementsAreArray(kOptimalSps));
7398 
7399   video_stream_encoder_->Stop();
7400 }
7401 
TEST_F(VideoStreamEncoderTest,CopiesVideoFrameMetadataAfterDownscale)7402 TEST_F(VideoStreamEncoderTest, CopiesVideoFrameMetadataAfterDownscale) {
7403   const int kFrameWidth = 1280;
7404   const int kFrameHeight = 720;
7405   const DataRate kTargetBitrate =
7406       DataRate::KilobitsPerSec(300);  // Too low for HD resolution.
7407 
7408   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7409       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7410   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
7411 
7412   // Insert a first video frame. It should be dropped because of downscale in
7413   // resolution.
7414   int64_t timestamp_ms = CurrentTimeMs();
7415   VideoFrame frame = CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight);
7416   frame.set_rotation(kVideoRotation_270);
7417   video_source_.IncomingCapturedFrame(frame);
7418 
7419   ExpectDroppedFrame();
7420 
7421   // Second frame is downscaled.
7422   timestamp_ms = CurrentTimeMs();
7423   frame = CreateFrame(timestamp_ms, kFrameWidth / 2, kFrameHeight / 2);
7424   frame.set_rotation(kVideoRotation_90);
7425   video_source_.IncomingCapturedFrame(frame);
7426 
7427   WaitForEncodedFrame(timestamp_ms);
7428   sink_.CheckLastFrameRotationMatches(kVideoRotation_90);
7429 
7430   // Insert another frame, also downscaled.
7431   timestamp_ms = CurrentTimeMs();
7432   frame = CreateFrame(timestamp_ms, kFrameWidth / 2, kFrameHeight / 2);
7433   frame.set_rotation(kVideoRotation_180);
7434   video_source_.IncomingCapturedFrame(frame);
7435 
7436   WaitForEncodedFrame(timestamp_ms);
7437   sink_.CheckLastFrameRotationMatches(kVideoRotation_180);
7438 
7439   video_stream_encoder_->Stop();
7440 }
7441 
TEST_F(VideoStreamEncoderTest,BandwidthAllocationLowerBound)7442 TEST_F(VideoStreamEncoderTest, BandwidthAllocationLowerBound) {
7443   const int kFrameWidth = 320;
7444   const int kFrameHeight = 180;
7445 
7446   // Initial rate.
7447   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7448       /*target_bitrate=*/DataRate::KilobitsPerSec(300),
7449       /*stable_target_bitrate=*/DataRate::KilobitsPerSec(300),
7450       /*link_allocation=*/DataRate::KilobitsPerSec(300),
7451       /*fraction_lost=*/0,
7452       /*round_trip_time_ms=*/0,
7453       /*cwnd_reduce_ratio=*/0);
7454 
7455   // Insert a first video frame so that encoder gets configured.
7456   int64_t timestamp_ms = CurrentTimeMs();
7457   VideoFrame frame = CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight);
7458   frame.set_rotation(kVideoRotation_270);
7459   video_source_.IncomingCapturedFrame(frame);
7460   WaitForEncodedFrame(timestamp_ms);
7461 
7462   // Set a target rate below the minimum allowed by the codec settings.
7463   VideoCodec codec_config = fake_encoder_.config();
7464   DataRate min_rate = DataRate::KilobitsPerSec(codec_config.minBitrate);
7465   DataRate target_rate = min_rate - DataRate::KilobitsPerSec(1);
7466   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7467       /*target_bitrate=*/target_rate,
7468       /*stable_target_bitrate=*/target_rate,
7469       /*link_allocation=*/target_rate,
7470       /*fraction_lost=*/0,
7471       /*round_trip_time_ms=*/0,
7472       /*cwnd_reduce_ratio=*/0);
7473   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
7474 
7475   // Target bitrate and bandwidth allocation should both be capped at min_rate.
7476   auto rate_settings = fake_encoder_.GetAndResetLastRateControlSettings();
7477   ASSERT_TRUE(rate_settings.has_value());
7478   DataRate allocation_sum =
7479       DataRate::BitsPerSec(rate_settings->bitrate.get_sum_bps());
7480   EXPECT_EQ(min_rate, allocation_sum);
7481   EXPECT_EQ(rate_settings->bandwidth_allocation, min_rate);
7482 
7483   video_stream_encoder_->Stop();
7484 }
7485 
TEST_F(VideoStreamEncoderTest,EncoderRatesPropagatedOnReconfigure)7486 TEST_F(VideoStreamEncoderTest, EncoderRatesPropagatedOnReconfigure) {
7487   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7488       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7489   // Capture a frame and wait for it to synchronize with the encoder thread.
7490   int64_t timestamp_ms = CurrentTimeMs();
7491   video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, nullptr));
7492   WaitForEncodedFrame(1);
7493 
7494   auto prev_rate_settings = fake_encoder_.GetAndResetLastRateControlSettings();
7495   ASSERT_TRUE(prev_rate_settings.has_value());
7496   EXPECT_EQ(static_cast<int>(prev_rate_settings->framerate_fps),
7497             kDefaultFramerate);
7498 
7499   // Send 1s of video to ensure the framerate is stable at kDefaultFramerate.
7500   for (int i = 0; i < 2 * kDefaultFramerate; i++) {
7501     timestamp_ms += 1000 / kDefaultFramerate;
7502     video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, nullptr));
7503     WaitForEncodedFrame(timestamp_ms);
7504   }
7505   EXPECT_EQ(static_cast<int>(fake_encoder_.GetLastFramerate()),
7506             kDefaultFramerate);
7507   // Capture larger frame to trigger a reconfigure.
7508   codec_height_ *= 2;
7509   codec_width_ *= 2;
7510   timestamp_ms += 1000 / kDefaultFramerate;
7511   video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, nullptr));
7512   WaitForEncodedFrame(timestamp_ms);
7513 
7514   EXPECT_EQ(2, sink_.number_of_reconfigurations());
7515   auto current_rate_settings =
7516       fake_encoder_.GetAndResetLastRateControlSettings();
7517   // Ensure we have actually reconfigured twice
7518   // The rate settings should have been set again even though
7519   // they haven't changed.
7520   ASSERT_TRUE(current_rate_settings.has_value());
7521   EXPECT_EQ(prev_rate_settings, current_rate_settings);
7522 
7523   video_stream_encoder_->Stop();
7524 }
7525 
7526 struct MockEncoderSwitchRequestCallback : public EncoderSwitchRequestCallback {
7527   MOCK_METHOD(void, RequestEncoderFallback, (), (override));
7528   MOCK_METHOD(void,
7529               RequestEncoderSwitch,
7530               (const webrtc::SdpVideoFormat& format,
7531                bool allow_default_fallback),
7532               (override));
7533 };
7534 
TEST_F(VideoStreamEncoderTest,EncoderSelectorCurrentEncoderIsSignaled)7535 TEST_F(VideoStreamEncoderTest, EncoderSelectorCurrentEncoderIsSignaled) {
7536   constexpr int kDontCare = 100;
7537   StrictMock<MockEncoderSelector> encoder_selector;
7538   auto encoder_factory = std::make_unique<test::VideoEncoderProxyFactory>(
7539       &fake_encoder_, &encoder_selector);
7540   video_send_config_.encoder_settings.encoder_factory = encoder_factory.get();
7541 
7542   // Reset encoder for new configuration to take effect.
7543   ConfigureEncoder(video_encoder_config_.Copy());
7544 
7545   EXPECT_CALL(encoder_selector, OnCurrentEncoder);
7546 
7547   video_source_.IncomingCapturedFrame(
7548       CreateFrame(kDontCare, kDontCare, kDontCare));
7549   AdvanceTime(TimeDelta::Zero());
7550   video_stream_encoder_->Stop();
7551 
7552   // The encoders produced by the VideoEncoderProxyFactory have a pointer back
7553   // to it's factory, so in order for the encoder instance in the
7554   // `video_stream_encoder_` to be destroyed before the `encoder_factory` we
7555   // reset the `video_stream_encoder_` here.
7556   video_stream_encoder_.reset();
7557 }
7558 
TEST_F(VideoStreamEncoderTest,EncoderSelectorBitrateSwitch)7559 TEST_F(VideoStreamEncoderTest, EncoderSelectorBitrateSwitch) {
7560   constexpr int kDontCare = 100;
7561 
7562   NiceMock<MockEncoderSelector> encoder_selector;
7563   StrictMock<MockEncoderSwitchRequestCallback> switch_callback;
7564   video_send_config_.encoder_settings.encoder_switch_request_callback =
7565       &switch_callback;
7566   auto encoder_factory = std::make_unique<test::VideoEncoderProxyFactory>(
7567       &fake_encoder_, &encoder_selector);
7568   video_send_config_.encoder_settings.encoder_factory = encoder_factory.get();
7569 
7570   // Reset encoder for new configuration to take effect.
7571   ConfigureEncoder(video_encoder_config_.Copy());
7572 
7573   ON_CALL(encoder_selector, OnAvailableBitrate)
7574       .WillByDefault(Return(SdpVideoFormat("AV1")));
7575   EXPECT_CALL(switch_callback,
7576               RequestEncoderSwitch(Field(&SdpVideoFormat::name, "AV1"),
7577                                    /*allow_default_fallback=*/false));
7578 
7579   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7580       /*target_bitrate=*/DataRate::KilobitsPerSec(50),
7581       /*stable_target_bitrate=*/DataRate::KilobitsPerSec(kDontCare),
7582       /*link_allocation=*/DataRate::KilobitsPerSec(kDontCare),
7583       /*fraction_lost=*/0,
7584       /*round_trip_time_ms=*/0,
7585       /*cwnd_reduce_ratio=*/0);
7586   AdvanceTime(TimeDelta::Zero());
7587 
7588   video_stream_encoder_->Stop();
7589 }
7590 
TEST_F(VideoStreamEncoderTest,EncoderSelectorResolutionSwitch)7591 TEST_F(VideoStreamEncoderTest, EncoderSelectorResolutionSwitch) {
7592   NiceMock<MockEncoderSelector> encoder_selector;
7593   StrictMock<MockEncoderSwitchRequestCallback> switch_callback;
7594   video_send_config_.encoder_settings.encoder_switch_request_callback =
7595       &switch_callback;
7596   auto encoder_factory = std::make_unique<test::VideoEncoderProxyFactory>(
7597       &fake_encoder_, &encoder_selector);
7598   video_send_config_.encoder_settings.encoder_factory = encoder_factory.get();
7599 
7600   // Reset encoder for new configuration to take effect.
7601   ConfigureEncoder(video_encoder_config_.Copy());
7602 
7603   EXPECT_CALL(encoder_selector, OnResolutionChange(RenderResolution(640, 480)))
7604       .WillOnce(Return(absl::nullopt));
7605   EXPECT_CALL(encoder_selector, OnResolutionChange(RenderResolution(320, 240)))
7606       .WillOnce(Return(SdpVideoFormat("AV1")));
7607   EXPECT_CALL(switch_callback,
7608               RequestEncoderSwitch(Field(&SdpVideoFormat::name, "AV1"),
7609                                    /*allow_default_fallback=*/false));
7610 
7611   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7612       /*target_bitrate=*/DataRate::KilobitsPerSec(800),
7613       /*stable_target_bitrate=*/DataRate::KilobitsPerSec(1000),
7614       /*link_allocation=*/DataRate::KilobitsPerSec(1000),
7615       /*fraction_lost=*/0,
7616       /*round_trip_time_ms=*/0,
7617       /*cwnd_reduce_ratio=*/0);
7618 
7619   video_source_.IncomingCapturedFrame(CreateFrame(1, 640, 480));
7620   video_source_.IncomingCapturedFrame(CreateFrame(2, 640, 480));
7621   video_source_.IncomingCapturedFrame(CreateFrame(3, 320, 240));
7622 
7623   AdvanceTime(TimeDelta::Zero());
7624 
7625   video_stream_encoder_->Stop();
7626 }
7627 
TEST_F(VideoStreamEncoderTest,EncoderSelectorBrokenEncoderSwitch)7628 TEST_F(VideoStreamEncoderTest, EncoderSelectorBrokenEncoderSwitch) {
7629   constexpr int kSufficientBitrateToNotDrop = 1000;
7630   constexpr int kDontCare = 100;
7631 
7632   NiceMock<MockVideoEncoder> video_encoder;
7633   NiceMock<MockEncoderSelector> encoder_selector;
7634   StrictMock<MockEncoderSwitchRequestCallback> switch_callback;
7635   video_send_config_.encoder_settings.encoder_switch_request_callback =
7636       &switch_callback;
7637   auto encoder_factory = std::make_unique<test::VideoEncoderProxyFactory>(
7638       &video_encoder, &encoder_selector);
7639   video_send_config_.encoder_settings.encoder_factory = encoder_factory.get();
7640 
7641   // Reset encoder for new configuration to take effect.
7642   ConfigureEncoder(video_encoder_config_.Copy());
7643 
7644   // The VideoStreamEncoder needs some bitrate before it can start encoding,
7645   // setting some bitrate so that subsequent calls to WaitForEncodedFrame does
7646   // not fail.
7647   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7648       /*target_bitrate=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop),
7649       /*stable_target_bitrate=*/
7650       DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop),
7651       /*link_allocation=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop),
7652       /*fraction_lost=*/0,
7653       /*round_trip_time_ms=*/0,
7654       /*cwnd_reduce_ratio=*/0);
7655 
7656   ON_CALL(video_encoder, Encode)
7657       .WillByDefault(Return(WEBRTC_VIDEO_CODEC_ENCODER_FAILURE));
7658   ON_CALL(encoder_selector, OnEncoderBroken)
7659       .WillByDefault(Return(SdpVideoFormat("AV2")));
7660 
7661   rtc::Event encode_attempted;
7662   EXPECT_CALL(switch_callback,
7663               RequestEncoderSwitch(Field(&SdpVideoFormat::name, "AV2"),
7664                                    /*allow_default_fallback=*/true))
7665       .WillOnce([&encode_attempted]() { encode_attempted.Set(); });
7666 
7667   video_source_.IncomingCapturedFrame(CreateFrame(1, kDontCare, kDontCare));
7668   encode_attempted.Wait(TimeDelta::Seconds(3));
7669 
7670   AdvanceTime(TimeDelta::Zero());
7671 
7672   video_stream_encoder_->Stop();
7673 
7674   // The encoders produced by the VideoEncoderProxyFactory have a pointer back
7675   // to it's factory, so in order for the encoder instance in the
7676   // `video_stream_encoder_` to be destroyed before the `encoder_factory` we
7677   // reset the `video_stream_encoder_` here.
7678   video_stream_encoder_.reset();
7679 }
7680 
TEST_F(VideoStreamEncoderTest,SwitchEncoderOnInitFailureWithEncoderSelector)7681 TEST_F(VideoStreamEncoderTest, SwitchEncoderOnInitFailureWithEncoderSelector) {
7682   NiceMock<MockVideoEncoder> video_encoder;
7683   NiceMock<MockEncoderSelector> encoder_selector;
7684   StrictMock<MockEncoderSwitchRequestCallback> switch_callback;
7685   video_send_config_.encoder_settings.encoder_switch_request_callback =
7686       &switch_callback;
7687   auto encoder_factory = std::make_unique<test::VideoEncoderProxyFactory>(
7688       &video_encoder, &encoder_selector);
7689   video_send_config_.encoder_settings.encoder_factory = encoder_factory.get();
7690 
7691   // Reset encoder for new configuration to take effect.
7692   ConfigureEncoder(video_encoder_config_.Copy());
7693 
7694   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7695       kTargetBitrate, kTargetBitrate, kTargetBitrate, /*fraction_lost=*/0,
7696       /*round_trip_time_ms=*/0,
7697       /*cwnd_reduce_ratio=*/0);
7698   ASSERT_EQ(0, sink_.number_of_reconfigurations());
7699 
7700   ON_CALL(video_encoder, InitEncode(_, _))
7701       .WillByDefault(Return(WEBRTC_VIDEO_CODEC_ENCODER_FAILURE));
7702   ON_CALL(encoder_selector, OnEncoderBroken)
7703       .WillByDefault(Return(SdpVideoFormat("AV2")));
7704 
7705   rtc::Event encode_attempted;
7706   EXPECT_CALL(switch_callback,
7707               RequestEncoderSwitch(Field(&SdpVideoFormat::name, "AV2"),
7708                                    /*allow_default_fallback=*/true))
7709       .WillOnce([&encode_attempted]() { encode_attempted.Set(); });
7710 
7711   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
7712   encode_attempted.Wait(TimeDelta::Seconds(3));
7713 
7714   AdvanceTime(TimeDelta::Zero());
7715 
7716   video_stream_encoder_->Stop();
7717 
7718   // The encoders produced by the VideoEncoderProxyFactory have a pointer back
7719   // to it's factory, so in order for the encoder instance in the
7720   // `video_stream_encoder_` to be destroyed before the `encoder_factory` we
7721   // reset the `video_stream_encoder_` here.
7722   video_stream_encoder_.reset();
7723 }
7724 
TEST_F(VideoStreamEncoderTest,SwitchEncoderOnInitFailureWithoutEncoderSelector)7725 TEST_F(VideoStreamEncoderTest,
7726        SwitchEncoderOnInitFailureWithoutEncoderSelector) {
7727   NiceMock<MockVideoEncoder> video_encoder;
7728   StrictMock<MockEncoderSwitchRequestCallback> switch_callback;
7729   video_send_config_.encoder_settings.encoder_switch_request_callback =
7730       &switch_callback;
7731   auto encoder_factory = std::make_unique<test::VideoEncoderProxyFactory>(
7732       &video_encoder, /*encoder_selector=*/nullptr);
7733   video_send_config_.encoder_settings.encoder_factory = encoder_factory.get();
7734 
7735   // Reset encoder for new configuration to take effect.
7736   ConfigureEncoder(video_encoder_config_.Copy());
7737 
7738   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7739       kTargetBitrate, kTargetBitrate, kTargetBitrate, /*fraction_lost=*/0,
7740       /*round_trip_time_ms=*/0,
7741       /*cwnd_reduce_ratio=*/0);
7742   ASSERT_EQ(0, sink_.number_of_reconfigurations());
7743 
7744   ON_CALL(video_encoder, InitEncode(_, _))
7745       .WillByDefault(Return(WEBRTC_VIDEO_CODEC_ENCODER_FAILURE));
7746 
7747   rtc::Event encode_attempted;
7748   EXPECT_CALL(switch_callback,
7749               RequestEncoderSwitch(Field(&SdpVideoFormat::name, "VP8"),
7750                                    /*allow_default_fallback=*/true))
7751       .WillOnce([&encode_attempted]() { encode_attempted.Set(); });
7752 
7753   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
7754   encode_attempted.Wait(TimeDelta::Seconds(3));
7755 
7756   AdvanceTime(TimeDelta::Zero());
7757 
7758   video_stream_encoder_->Stop();
7759 
7760   // The encoders produced by the VideoEncoderProxyFactory have a pointer back
7761   // to it's factory, so in order for the encoder instance in the
7762   // `video_stream_encoder_` to be destroyed before the `encoder_factory` we
7763   // reset the `video_stream_encoder_` here.
7764   video_stream_encoder_.reset();
7765 }
7766 
TEST_F(VideoStreamEncoderTest,NullEncoderReturnSwitch)7767 TEST_F(VideoStreamEncoderTest, NullEncoderReturnSwitch) {
7768   // As a variant of EncoderSelectorBrokenEncoderSwitch, when a null
7769   // VideoEncoder is passed in encoder_factory, it checks whether
7770   // Codec Switch occurs without a crash.
7771   constexpr int kSufficientBitrateToNotDrop = 1000;
7772   constexpr int kDontCare = 100;
7773 
7774   NiceMock<MockEncoderSelector> encoder_selector;
7775   StrictMock<MockEncoderSwitchRequestCallback> switch_callback;
7776   video_send_config_.encoder_settings.encoder_switch_request_callback =
7777       &switch_callback;
7778   auto encoder_factory =
7779       std::make_unique<test::VideoEncoderNullableProxyFactory>(
7780           /*encoder=*/nullptr, &encoder_selector);
7781   video_send_config_.encoder_settings.encoder_factory = encoder_factory.get();
7782 
7783   // Reset encoder for new configuration to take effect.
7784   ConfigureEncoder(video_encoder_config_.Copy());
7785   // The VideoStreamEncoder needs some bitrate before it can start encoding,
7786   // setting some bitrate so that subsequent calls to WaitForEncodedFrame does
7787   // not fail.
7788   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7789       /*target_bitrate=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop),
7790       /*stable_target_bitrate=*/
7791       DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop),
7792       /*link_allocation=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop),
7793       /*fraction_lost=*/0,
7794       /*round_trip_time_ms=*/0,
7795       /*cwnd_reduce_ratio=*/0);
7796   ON_CALL(encoder_selector, OnEncoderBroken)
7797       .WillByDefault(Return(SdpVideoFormat("AV2")));
7798   rtc::Event encode_attempted;
7799   EXPECT_CALL(switch_callback,
7800               RequestEncoderSwitch(Field(&SdpVideoFormat::name, "AV2"),
7801                                    /*allow_default_fallback=*/_))
7802       .WillOnce([&encode_attempted]() { encode_attempted.Set(); });
7803 
7804   video_source_.IncomingCapturedFrame(CreateFrame(1, kDontCare, kDontCare));
7805   encode_attempted.Wait(TimeDelta::Seconds(3));
7806 
7807   AdvanceTime(TimeDelta::Zero());
7808 
7809   video_stream_encoder_->Stop();
7810 
7811   // The encoders produced by the VideoEncoderProxyFactory have a pointer back
7812   // to it's factory, so in order for the encoder instance in the
7813   // `video_stream_encoder_` to be destroyed before the `encoder_factory` we
7814   // reset the `video_stream_encoder_` here.
7815   video_stream_encoder_.reset();
7816 }
7817 
TEST_F(VideoStreamEncoderTest,AllocationPropagatedToEncoderWhenTargetRateChanged)7818 TEST_F(VideoStreamEncoderTest,
7819        AllocationPropagatedToEncoderWhenTargetRateChanged) {
7820   const int kFrameWidth = 320;
7821   const int kFrameHeight = 180;
7822 
7823   // Set initial rate.
7824   auto rate = DataRate::KilobitsPerSec(100);
7825   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7826       /*target_bitrate=*/rate,
7827       /*stable_target_bitrate=*/rate,
7828       /*link_allocation=*/rate,
7829       /*fraction_lost=*/0,
7830       /*round_trip_time_ms=*/0,
7831       /*cwnd_reduce_ratio=*/0);
7832 
7833   // Insert a first video frame so that encoder gets configured.
7834   int64_t timestamp_ms = CurrentTimeMs();
7835   VideoFrame frame = CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight);
7836   frame.set_rotation(kVideoRotation_270);
7837   video_source_.IncomingCapturedFrame(frame);
7838   WaitForEncodedFrame(timestamp_ms);
7839   EXPECT_EQ(1, fake_encoder_.GetNumSetRates());
7840 
7841   // Change of target bitrate propagates to the encoder.
7842   auto new_stable_rate = rate - DataRate::KilobitsPerSec(5);
7843   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7844       /*target_bitrate=*/new_stable_rate,
7845       /*stable_target_bitrate=*/new_stable_rate,
7846       /*link_allocation=*/rate,
7847       /*fraction_lost=*/0,
7848       /*round_trip_time_ms=*/0,
7849       /*cwnd_reduce_ratio=*/0);
7850   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
7851   EXPECT_EQ(2, fake_encoder_.GetNumSetRates());
7852   video_stream_encoder_->Stop();
7853 }
7854 
TEST_F(VideoStreamEncoderTest,AllocationNotPropagatedToEncoderWhenTargetRateUnchanged)7855 TEST_F(VideoStreamEncoderTest,
7856        AllocationNotPropagatedToEncoderWhenTargetRateUnchanged) {
7857   const int kFrameWidth = 320;
7858   const int kFrameHeight = 180;
7859 
7860   // Set initial rate.
7861   auto rate = DataRate::KilobitsPerSec(100);
7862   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7863       /*target_bitrate=*/rate,
7864       /*stable_target_bitrate=*/rate,
7865       /*link_allocation=*/rate,
7866       /*fraction_lost=*/0,
7867       /*round_trip_time_ms=*/0,
7868       /*cwnd_reduce_ratio=*/0);
7869 
7870   // Insert a first video frame so that encoder gets configured.
7871   int64_t timestamp_ms = CurrentTimeMs();
7872   VideoFrame frame = CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight);
7873   frame.set_rotation(kVideoRotation_270);
7874   video_source_.IncomingCapturedFrame(frame);
7875   WaitForEncodedFrame(timestamp_ms);
7876   EXPECT_EQ(1, fake_encoder_.GetNumSetRates());
7877 
7878   // Set a higher target rate without changing the link_allocation. Should not
7879   // reset encoder's rate.
7880   auto new_stable_rate = rate - DataRate::KilobitsPerSec(5);
7881   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7882       /*target_bitrate=*/rate,
7883       /*stable_target_bitrate=*/new_stable_rate,
7884       /*link_allocation=*/rate,
7885       /*fraction_lost=*/0,
7886       /*round_trip_time_ms=*/0,
7887       /*cwnd_reduce_ratio=*/0);
7888   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
7889   EXPECT_EQ(1, fake_encoder_.GetNumSetRates());
7890   video_stream_encoder_->Stop();
7891 }
7892 
TEST_F(VideoStreamEncoderTest,AutomaticAnimationDetection)7893 TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) {
7894   test::ScopedKeyValueConfig field_trials(
7895       field_trials_,
7896       "WebRTC-AutomaticAnimationDetectionScreenshare/"
7897       "enabled:true,min_fps:20,min_duration_ms:1000,min_area_ratio:0.8/");
7898   const int kFramerateFps = 30;
7899   const int kWidth = 1920;
7900   const int kHeight = 1080;
7901   const int kNumFrames = 2 * kFramerateFps;  // >1 seconds of frames.
7902   // Works on screenshare mode.
7903   ResetEncoder("VP8", 1, 1, 1, /*screenshare*/ true);
7904   // We rely on the automatic resolution adaptation, but we handle framerate
7905   // adaptation manually by mocking the stats proxy.
7906   video_source_.set_adaptation_enabled(true);
7907 
7908   // BALANCED degradation preference is required for this feature.
7909   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7910       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7911   video_stream_encoder_->SetSource(&video_source_,
7912                                    webrtc::DegradationPreference::BALANCED);
7913   EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
7914 
7915   VideoFrame frame = CreateFrame(1, kWidth, kHeight);
7916   frame.set_update_rect(VideoFrame::UpdateRect{0, 0, kWidth, kHeight});
7917 
7918   // Pass enough frames with the full update to trigger animation detection.
7919   for (int i = 0; i < kNumFrames; ++i) {
7920     int64_t timestamp_ms = CurrentTimeMs();
7921     frame.set_ntp_time_ms(timestamp_ms);
7922     frame.set_timestamp_us(timestamp_ms * 1000);
7923     video_source_.IncomingCapturedFrame(frame);
7924     WaitForEncodedFrame(timestamp_ms);
7925   }
7926 
7927   // Resolution should be limited.
7928   rtc::VideoSinkWants expected;
7929   expected.max_framerate_fps = kFramerateFps;
7930   expected.max_pixel_count = 1280 * 720 + 1;
7931   EXPECT_THAT(video_source_.sink_wants(), FpsEqResolutionLt(expected));
7932 
7933   // Pass one frame with no known update.
7934   //  Resolution cap should be removed immediately.
7935   int64_t timestamp_ms = CurrentTimeMs();
7936   frame.set_ntp_time_ms(timestamp_ms);
7937   frame.set_timestamp_us(timestamp_ms * 1000);
7938   frame.clear_update_rect();
7939 
7940   video_source_.IncomingCapturedFrame(frame);
7941   WaitForEncodedFrame(timestamp_ms);
7942 
7943   // Resolution should be unlimited now.
7944   EXPECT_THAT(video_source_.sink_wants(),
7945               FpsMatchesResolutionMax(Eq(kFramerateFps)));
7946 
7947   video_stream_encoder_->Stop();
7948 }
7949 
TEST_F(VideoStreamEncoderTest,ConfiguresVp9SvcAtOddResolutions)7950 TEST_F(VideoStreamEncoderTest, ConfiguresVp9SvcAtOddResolutions) {
7951   const int kWidth = 720;  // 540p adapted down.
7952   const int kHeight = 405;
7953   const int kNumFrames = 3;
7954   // Works on screenshare mode.
7955   ResetEncoder("VP9", /*num_streams=*/1, /*num_temporal_layers=*/1,
7956                /*num_spatial_layers=*/2, /*screenshare=*/true);
7957 
7958   video_source_.set_adaptation_enabled(true);
7959 
7960   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7961       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
7962 
7963   VideoFrame frame = CreateFrame(1, kWidth, kHeight);
7964 
7965   // Pass enough frames with the full update to trigger animation detection.
7966   for (int i = 0; i < kNumFrames; ++i) {
7967     int64_t timestamp_ms = CurrentTimeMs();
7968     frame.set_ntp_time_ms(timestamp_ms);
7969     frame.set_timestamp_us(timestamp_ms * 1000);
7970     video_source_.IncomingCapturedFrame(frame);
7971     WaitForEncodedFrame(timestamp_ms);
7972   }
7973 
7974   video_stream_encoder_->Stop();
7975 }
7976 
TEST_F(VideoStreamEncoderTest,EncoderResetAccordingToParameterChange)7977 TEST_F(VideoStreamEncoderTest, EncoderResetAccordingToParameterChange) {
7978   const float downscale_factors[] = {4.0, 2.0, 1.0};
7979   const int number_layers =
7980       sizeof(downscale_factors) / sizeof(downscale_factors[0]);
7981   VideoEncoderConfig config;
7982   webrtc::VideoEncoder::EncoderInfo encoder_info;
7983   test::FillEncoderConfiguration(kVideoCodecVP8, number_layers, &config);
7984   for (int i = 0; i < number_layers; ++i) {
7985     config.simulcast_layers[i].scale_resolution_down_by = downscale_factors[i];
7986     config.simulcast_layers[i].active = true;
7987   }
7988   config.video_stream_factory =
7989       rtc::make_ref_counted<cricket::EncoderStreamFactory>(
7990           "VP8", /*max qp*/ 56, /*screencast*/ false,
7991           /*screenshare enabled*/ false, encoder_info);
7992   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
7993       kSimulcastTargetBitrate, kSimulcastTargetBitrate, kSimulcastTargetBitrate,
7994       0, 0, 0);
7995 
7996   // First initialization.
7997   // Encoder should be initialized. Next frame should be key frame.
7998   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
7999   sink_.SetNumExpectedLayers(number_layers);
8000   int64_t timestamp_ms = kFrameIntervalMs;
8001   video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720));
8002   WaitForEncodedFrame(timestamp_ms);
8003   EXPECT_EQ(1, fake_encoder_.GetNumInitializations());
8004   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
8005               ::testing::ElementsAreArray({VideoFrameType::kVideoFrameKey,
8006                                            VideoFrameType::kVideoFrameKey,
8007                                            VideoFrameType::kVideoFrameKey}));
8008 
8009   // Disable top layer.
8010   // Encoder shouldn't be re-initialized. Next frame should be delta frame.
8011   config.simulcast_layers[number_layers - 1].active = false;
8012   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
8013   sink_.SetNumExpectedLayers(number_layers - 1);
8014   timestamp_ms += kFrameIntervalMs;
8015   video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720));
8016   WaitForEncodedFrame(timestamp_ms);
8017   EXPECT_EQ(1, fake_encoder_.GetNumInitializations());
8018   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
8019               ::testing::ElementsAreArray({VideoFrameType::kVideoFrameDelta,
8020                                            VideoFrameType::kVideoFrameDelta,
8021                                            VideoFrameType::kVideoFrameDelta}));
8022 
8023   // Re-enable top layer.
8024   // Encoder should be re-initialized. Next frame should be key frame.
8025   config.simulcast_layers[number_layers - 1].active = true;
8026   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
8027   sink_.SetNumExpectedLayers(number_layers);
8028   timestamp_ms += kFrameIntervalMs;
8029   video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720));
8030   WaitForEncodedFrame(timestamp_ms);
8031   EXPECT_EQ(2, fake_encoder_.GetNumInitializations());
8032   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
8033               ::testing::ElementsAreArray({VideoFrameType::kVideoFrameKey,
8034                                            VideoFrameType::kVideoFrameKey,
8035                                            VideoFrameType::kVideoFrameKey}));
8036 
8037   // Top layer max rate change.
8038   // Encoder shouldn't be re-initialized. Next frame should be delta frame.
8039   config.simulcast_layers[number_layers - 1].max_bitrate_bps -= 100;
8040   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
8041   sink_.SetNumExpectedLayers(number_layers);
8042   timestamp_ms += kFrameIntervalMs;
8043   video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720));
8044   WaitForEncodedFrame(timestamp_ms);
8045   EXPECT_EQ(2, fake_encoder_.GetNumInitializations());
8046   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
8047               ::testing::ElementsAreArray({VideoFrameType::kVideoFrameDelta,
8048                                            VideoFrameType::kVideoFrameDelta,
8049                                            VideoFrameType::kVideoFrameDelta}));
8050 
8051   // Top layer resolution change.
8052   // Encoder should be re-initialized. Next frame should be key frame.
8053   config.simulcast_layers[number_layers - 1].scale_resolution_down_by += 0.1;
8054   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
8055   sink_.SetNumExpectedLayers(number_layers);
8056   timestamp_ms += kFrameIntervalMs;
8057   video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720));
8058   WaitForEncodedFrame(timestamp_ms);
8059   EXPECT_EQ(3, fake_encoder_.GetNumInitializations());
8060   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
8061               ::testing::ElementsAreArray({VideoFrameType::kVideoFrameKey,
8062                                            VideoFrameType::kVideoFrameKey,
8063                                            VideoFrameType::kVideoFrameKey}));
8064   video_stream_encoder_->Stop();
8065 }
8066 
TEST_F(VideoStreamEncoderTest,EncoderResolutionsExposedInSinglecast)8067 TEST_F(VideoStreamEncoderTest, EncoderResolutionsExposedInSinglecast) {
8068   const int kFrameWidth = 1280;
8069   const int kFrameHeight = 720;
8070 
8071   SetUp();
8072   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8073       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8074 
8075   // Capturing a frame should reconfigure the encoder and expose the encoder
8076   // resolution, which is the same as the input frame.
8077   int64_t timestamp_ms = kFrameIntervalMs;
8078   video_source_.IncomingCapturedFrame(
8079       CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
8080   WaitForEncodedFrame(timestamp_ms);
8081   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
8082   EXPECT_THAT(video_source_.sink_wants().resolutions,
8083               ::testing::ElementsAreArray(
8084                   {rtc::VideoSinkWants::FrameSize(kFrameWidth, kFrameHeight)}));
8085 
8086   video_stream_encoder_->Stop();
8087 }
8088 
TEST_F(VideoStreamEncoderTest,EncoderResolutionsExposedInSimulcast)8089 TEST_F(VideoStreamEncoderTest, EncoderResolutionsExposedInSimulcast) {
8090   // Pick downscale factors such that we never encode at full resolution - this
8091   // is an interesting use case. The frame resolution influences the encoder
8092   // resolutions, but if no layer has `scale_resolution_down_by` == 1 then the
8093   // encoder should not ask for the frame resolution. This allows video frames
8094   // to have the appearence of one resolution but optimize its internal buffers
8095   // for what is actually encoded.
8096   const size_t kNumSimulcastLayers = 3u;
8097   const float kDownscaleFactors[] = {8.0, 4.0, 2.0};
8098   const int kFrameWidth = 1280;
8099   const int kFrameHeight = 720;
8100   const rtc::VideoSinkWants::FrameSize kLayer0Size(
8101       kFrameWidth / kDownscaleFactors[0], kFrameHeight / kDownscaleFactors[0]);
8102   const rtc::VideoSinkWants::FrameSize kLayer1Size(
8103       kFrameWidth / kDownscaleFactors[1], kFrameHeight / kDownscaleFactors[1]);
8104   const rtc::VideoSinkWants::FrameSize kLayer2Size(
8105       kFrameWidth / kDownscaleFactors[2], kFrameHeight / kDownscaleFactors[2]);
8106 
8107   VideoEncoderConfig config;
8108   webrtc::VideoEncoder::EncoderInfo encoder_info;
8109   test::FillEncoderConfiguration(kVideoCodecVP8, kNumSimulcastLayers, &config);
8110   for (size_t i = 0; i < kNumSimulcastLayers; ++i) {
8111     config.simulcast_layers[i].scale_resolution_down_by = kDownscaleFactors[i];
8112     config.simulcast_layers[i].active = true;
8113   }
8114   config.video_stream_factory =
8115       rtc::make_ref_counted<cricket::EncoderStreamFactory>(
8116           "VP8", /*max qp*/ 56, /*screencast*/ false,
8117           /*screenshare enabled*/ false, encoder_info);
8118   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8119       kSimulcastTargetBitrate, kSimulcastTargetBitrate, kSimulcastTargetBitrate,
8120       0, 0, 0);
8121 
8122   // Capture a frame with all layers active.
8123   int64_t timestamp_ms = kFrameIntervalMs;
8124   sink_.SetNumExpectedLayers(kNumSimulcastLayers);
8125   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
8126   video_source_.IncomingCapturedFrame(
8127       CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
8128   WaitForEncodedFrame(timestamp_ms);
8129   // Expect encoded resolutions to match the expected simulcast layers.
8130   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
8131   EXPECT_THAT(
8132       video_source_.sink_wants().resolutions,
8133       ::testing::ElementsAreArray({kLayer0Size, kLayer1Size, kLayer2Size}));
8134 
8135   // Capture a frame with one of the layers inactive.
8136   timestamp_ms += kFrameIntervalMs;
8137   config.simulcast_layers[2].active = false;
8138   sink_.SetNumExpectedLayers(kNumSimulcastLayers - 1);
8139   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
8140   video_source_.IncomingCapturedFrame(
8141       CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
8142   WaitForEncodedFrame(timestamp_ms);
8143 
8144   // Expect encoded resolutions to match the expected simulcast layers.
8145   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
8146   EXPECT_THAT(video_source_.sink_wants().resolutions,
8147               ::testing::ElementsAreArray({kLayer0Size, kLayer1Size}));
8148 
8149   // Capture a frame with all but one layer turned off.
8150   timestamp_ms += kFrameIntervalMs;
8151   config.simulcast_layers[1].active = false;
8152   sink_.SetNumExpectedLayers(kNumSimulcastLayers - 2);
8153   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
8154   video_source_.IncomingCapturedFrame(
8155       CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
8156   WaitForEncodedFrame(timestamp_ms);
8157 
8158   // Expect encoded resolutions to match the expected simulcast layers.
8159   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
8160   EXPECT_THAT(video_source_.sink_wants().resolutions,
8161               ::testing::ElementsAreArray({kLayer0Size}));
8162 
8163   video_stream_encoder_->Stop();
8164 }
8165 
TEST_F(VideoStreamEncoderTest,QpPresent_QpKept)8166 TEST_F(VideoStreamEncoderTest, QpPresent_QpKept) {
8167   ResetEncoder("VP8", 1, 1, 1, false);
8168 
8169   // Force encoder reconfig.
8170   video_source_.IncomingCapturedFrame(
8171       CreateFrame(1, codec_width_, codec_height_));
8172   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
8173 
8174   // Set QP on encoded frame and pass the frame to encode complete callback.
8175   // Since QP is present QP parsing won't be triggered and the original value
8176   // should be kept.
8177   EncodedImage encoded_image;
8178   encoded_image.qp_ = 123;
8179   encoded_image.SetEncodedData(EncodedImageBuffer::Create(
8180       kCodedFrameVp8Qp25, sizeof(kCodedFrameVp8Qp25)));
8181   CodecSpecificInfo codec_info;
8182   codec_info.codecType = kVideoCodecVP8;
8183   fake_encoder_.InjectEncodedImage(encoded_image, &codec_info);
8184   EXPECT_TRUE(sink_.WaitForFrame(kDefaultTimeout));
8185   EXPECT_EQ(sink_.GetLastEncodedImage().qp_, 123);
8186   video_stream_encoder_->Stop();
8187 }
8188 
TEST_F(VideoStreamEncoderTest,QpAbsent_QpParsed)8189 TEST_F(VideoStreamEncoderTest, QpAbsent_QpParsed) {
8190   ResetEncoder("VP8", 1, 1, 1, false);
8191 
8192   // Force encoder reconfig.
8193   video_source_.IncomingCapturedFrame(
8194       CreateFrame(1, codec_width_, codec_height_));
8195   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
8196 
8197   // Pass an encoded frame without QP to encode complete callback. QP should be
8198   // parsed and set.
8199   EncodedImage encoded_image;
8200   encoded_image.qp_ = -1;
8201   encoded_image.SetEncodedData(EncodedImageBuffer::Create(
8202       kCodedFrameVp8Qp25, sizeof(kCodedFrameVp8Qp25)));
8203   CodecSpecificInfo codec_info;
8204   codec_info.codecType = kVideoCodecVP8;
8205   fake_encoder_.InjectEncodedImage(encoded_image, &codec_info);
8206   EXPECT_TRUE(sink_.WaitForFrame(kDefaultTimeout));
8207   EXPECT_EQ(sink_.GetLastEncodedImage().qp_, 25);
8208   video_stream_encoder_->Stop();
8209 }
8210 
TEST_F(VideoStreamEncoderTest,QpAbsentParsingDisabled_QpAbsent)8211 TEST_F(VideoStreamEncoderTest, QpAbsentParsingDisabled_QpAbsent) {
8212   webrtc::test::ScopedKeyValueConfig field_trials(
8213       field_trials_, "WebRTC-QpParsingKillSwitch/Enabled/");
8214 
8215   ResetEncoder("VP8", 1, 1, 1, false);
8216 
8217   // Force encoder reconfig.
8218   video_source_.IncomingCapturedFrame(
8219       CreateFrame(1, codec_width_, codec_height_));
8220   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
8221 
8222   EncodedImage encoded_image;
8223   encoded_image.qp_ = -1;
8224   encoded_image.SetEncodedData(EncodedImageBuffer::Create(
8225       kCodedFrameVp8Qp25, sizeof(kCodedFrameVp8Qp25)));
8226   CodecSpecificInfo codec_info;
8227   codec_info.codecType = kVideoCodecVP8;
8228   fake_encoder_.InjectEncodedImage(encoded_image, &codec_info);
8229   EXPECT_TRUE(sink_.WaitForFrame(kDefaultTimeout));
8230   EXPECT_EQ(sink_.GetLastEncodedImage().qp_, -1);
8231   video_stream_encoder_->Stop();
8232 }
8233 
TEST_F(VideoStreamEncoderTest,QualityScalingNotAllowed_QualityScalingDisabled)8234 TEST_F(VideoStreamEncoderTest,
8235        QualityScalingNotAllowed_QualityScalingDisabled) {
8236   VideoEncoderConfig video_encoder_config = video_encoder_config_.Copy();
8237 
8238   // Disable scaling settings in encoder info.
8239   fake_encoder_.SetQualityScaling(false);
8240   // Disable quality scaling in encoder config.
8241   video_encoder_config.is_quality_scaling_allowed = false;
8242   ConfigureEncoder(std::move(video_encoder_config));
8243 
8244   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8245       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8246 
8247   test::FrameForwarder source;
8248   video_stream_encoder_->SetSource(
8249       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
8250   EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
8251   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
8252 
8253   source.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
8254   WaitForEncodedFrame(1);
8255   video_stream_encoder_->TriggerQualityLow();
8256   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
8257 
8258   video_stream_encoder_->Stop();
8259 }
8260 
TEST_F(VideoStreamEncoderTest,QualityScalingNotAllowed_IsQpTrustedSetTrue)8261 TEST_F(VideoStreamEncoderTest, QualityScalingNotAllowed_IsQpTrustedSetTrue) {
8262   VideoEncoderConfig video_encoder_config = video_encoder_config_.Copy();
8263 
8264   // Disable scaling settings in encoder info.
8265   fake_encoder_.SetQualityScaling(false);
8266   // Set QP trusted in encoder info.
8267   fake_encoder_.SetIsQpTrusted(true);
8268   // Enable quality scaling in encoder config.
8269   video_encoder_config.is_quality_scaling_allowed = false;
8270   ConfigureEncoder(std::move(video_encoder_config));
8271 
8272   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8273       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8274 
8275   test::FrameForwarder source;
8276   video_stream_encoder_->SetSource(
8277       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
8278   EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
8279   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
8280 
8281   source.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
8282   WaitForEncodedFrame(1);
8283   video_stream_encoder_->TriggerQualityLow();
8284   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
8285 
8286   video_stream_encoder_->Stop();
8287 }
8288 
TEST_F(VideoStreamEncoderTest,QualityScalingNotAllowedAndQPIsTrusted_BandwidthScalerDisable)8289 TEST_F(VideoStreamEncoderTest,
8290        QualityScalingNotAllowedAndQPIsTrusted_BandwidthScalerDisable) {
8291   VideoEncoderConfig video_encoder_config = video_encoder_config_.Copy();
8292 
8293   // Disable scaling settings in encoder info.
8294   fake_encoder_.SetQualityScaling(false);
8295   // Set QP trusted in encoder info.
8296   fake_encoder_.SetIsQpTrusted(true);
8297   // Enable quality scaling in encoder config.
8298   video_encoder_config.is_quality_scaling_allowed = false;
8299   ConfigureEncoder(std::move(video_encoder_config));
8300 
8301   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8302       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8303 
8304   test::FrameForwarder source;
8305   video_stream_encoder_->SetSource(
8306       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
8307   EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
8308   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
8309 
8310   source.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
8311   WaitForEncodedFrame(1);
8312   video_stream_encoder_->TriggerQualityLow();
8313   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
8314 
8315   video_stream_encoder_->Stop();
8316 }
8317 
TEST_F(VideoStreamEncoderTest,QualityScalingNotAllowedAndQPIsNotTrusted_BandwidthScalerDisable)8318 TEST_F(VideoStreamEncoderTest,
8319        QualityScalingNotAllowedAndQPIsNotTrusted_BandwidthScalerDisable) {
8320   VideoEncoderConfig video_encoder_config = video_encoder_config_.Copy();
8321 
8322   // Disable scaling settings in encoder info.
8323   fake_encoder_.SetQualityScaling(false);
8324   // Set QP trusted in encoder info.
8325   fake_encoder_.SetIsQpTrusted(false);
8326   // Enable quality scaling in encoder config.
8327   video_encoder_config.is_quality_scaling_allowed = false;
8328   ConfigureEncoder(std::move(video_encoder_config));
8329 
8330   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8331       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8332 
8333   test::FrameForwarder source;
8334   video_stream_encoder_->SetSource(
8335       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
8336   EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
8337   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
8338 
8339   source.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
8340   WaitForEncodedFrame(1);
8341   video_stream_encoder_->TriggerQualityLow();
8342   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
8343 
8344   video_stream_encoder_->Stop();
8345 }
8346 
TEST_F(VideoStreamEncoderTest,EncoderProvideLimitsWhenQPIsNotTrusted)8347 TEST_F(VideoStreamEncoderTest, EncoderProvideLimitsWhenQPIsNotTrusted) {
8348   // Set QP trusted in encoder info.
8349   fake_encoder_.SetIsQpTrusted(false);
8350 
8351   const int MinEncBitrateKbps = 30;
8352   const int MaxEncBitrateKbps = 100;
8353   const int MinStartBitrateKbp = 50;
8354   const VideoEncoder::ResolutionBitrateLimits encoder_bitrate_limits(
8355       /*frame_size_pixels=*/codec_width_ * codec_height_,
8356       /*min_start_bitrate_bps=*/MinStartBitrateKbp,
8357       /*min_bitrate_bps=*/MinEncBitrateKbps * 1000,
8358       /*max_bitrate_bps=*/MaxEncBitrateKbps * 1000);
8359 
8360   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8361       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8362 
8363   fake_encoder_.SetResolutionBitrateLimits({encoder_bitrate_limits});
8364 
8365   VideoEncoderConfig video_encoder_config;
8366   test::FillEncoderConfiguration(kVideoCodecH264, 1, &video_encoder_config);
8367   video_encoder_config.max_bitrate_bps = MaxEncBitrateKbps * 1000;
8368   video_encoder_config.simulcast_layers[0].min_bitrate_bps =
8369       MinEncBitrateKbps * 1000;
8370   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
8371                                           kMaxPayloadLength);
8372 
8373   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
8374   WaitForEncodedFrame(1);
8375   EXPECT_EQ(
8376       MaxEncBitrateKbps,
8377       static_cast<int>(bitrate_allocator_factory_.codec_config().maxBitrate));
8378   EXPECT_EQ(
8379       MinEncBitrateKbps,
8380       static_cast<int>(bitrate_allocator_factory_.codec_config().minBitrate));
8381 
8382   video_stream_encoder_->Stop();
8383 }
8384 
TEST_F(VideoStreamEncoderTest,EncoderDoesnotProvideLimitsWhenQPIsNotTrusted)8385 TEST_F(VideoStreamEncoderTest, EncoderDoesnotProvideLimitsWhenQPIsNotTrusted) {
8386   // Set QP trusted in encoder info.
8387   fake_encoder_.SetIsQpTrusted(false);
8388 
8389   absl::optional<VideoEncoder::ResolutionBitrateLimits> suitable_bitrate_limit =
8390       EncoderInfoSettings::
8391           GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted(
8392               codec_width_ * codec_height_,
8393               EncoderInfoSettings::
8394                   GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted());
8395   EXPECT_TRUE(suitable_bitrate_limit.has_value());
8396 
8397   const int MaxEncBitrate = suitable_bitrate_limit->max_bitrate_bps;
8398   const int MinEncBitrate = suitable_bitrate_limit->min_bitrate_bps;
8399   const int TargetEncBitrate = MaxEncBitrate;
8400   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8401       DataRate::BitsPerSec(TargetEncBitrate),
8402       DataRate::BitsPerSec(TargetEncBitrate),
8403       DataRate::BitsPerSec(TargetEncBitrate), 0, 0, 0);
8404 
8405   VideoEncoderConfig video_encoder_config;
8406   test::FillEncoderConfiguration(kVideoCodecH264, 1, &video_encoder_config);
8407   video_encoder_config.max_bitrate_bps = MaxEncBitrate;
8408   video_encoder_config.simulcast_layers[0].min_bitrate_bps = MinEncBitrate;
8409   video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
8410                                           kMaxPayloadLength);
8411 
8412   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
8413   WaitForEncodedFrame(1);
8414   EXPECT_EQ(
8415       MaxEncBitrate / 1000,
8416       static_cast<int>(bitrate_allocator_factory_.codec_config().maxBitrate));
8417   EXPECT_EQ(
8418       MinEncBitrate / 1000,
8419       static_cast<int>(bitrate_allocator_factory_.codec_config().minBitrate));
8420 
8421   video_stream_encoder_->Stop();
8422 }
8423 
TEST_F(VideoStreamEncoderTest,NormalComplexityWithMoreThanTwoCores)8424 TEST_F(VideoStreamEncoderTest, NormalComplexityWithMoreThanTwoCores) {
8425   ResetEncoder("VP9", /*num_stream=*/1, /*num_temporal_layers=*/1,
8426                /*num_spatial_layers=*/1,
8427                /*screenshare=*/false, /*allocation_callback_type=*/
8428                VideoStreamEncoder::BitrateAllocationCallbackType::
8429                    kVideoBitrateAllocationWhenScreenSharing,
8430                /*num_cores=*/3);
8431 
8432   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8433       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8434   video_source_.IncomingCapturedFrame(
8435       CreateFrame(1, /*width=*/320, /*height=*/180));
8436   WaitForEncodedFrame(1);
8437   EXPECT_EQ(fake_encoder_.LastEncoderComplexity(),
8438             VideoCodecComplexity::kComplexityNormal);
8439   video_stream_encoder_->Stop();
8440 }
8441 
TEST_F(VideoStreamEncoderTest,NormalComplexityWhenLowTierOptimizationsAreDisabled)8442 TEST_F(VideoStreamEncoderTest,
8443        NormalComplexityWhenLowTierOptimizationsAreDisabled) {
8444   webrtc::test::ScopedKeyValueConfig field_trials(
8445       field_trials_, "WebRTC-VP9-LowTierOptimizations/Disabled/");
8446 
8447   ResetEncoder("VP9", /*num_stream=*/1, /*num_temporal_layers=*/1,
8448                /*num_spatial_layers=*/1,
8449                /*screenshare=*/false, /*allocation_callback_type=*/
8450                VideoStreamEncoder::BitrateAllocationCallbackType::
8451                    kVideoBitrateAllocationWhenScreenSharing,
8452                /*num_cores=*/2);
8453 
8454   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8455       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8456   video_source_.IncomingCapturedFrame(
8457       CreateFrame(1, /*width=*/320, /*height=*/180));
8458   WaitForEncodedFrame(1);
8459   EXPECT_EQ(fake_encoder_.LastEncoderComplexity(),
8460             VideoCodecComplexity::kComplexityNormal);
8461   video_stream_encoder_->Stop();
8462 }
8463 
TEST_F(VideoStreamEncoderTest,LowComplexityWithTwoCores)8464 TEST_F(VideoStreamEncoderTest, LowComplexityWithTwoCores) {
8465   ResetEncoder("VP9", /*num_stream=*/1, /*num_temporal_layers=*/1,
8466                /*num_spatial_layers=*/1,
8467                /*screenshare=*/false, /*allocation_callback_type=*/
8468                VideoStreamEncoder::BitrateAllocationCallbackType::
8469                    kVideoBitrateAllocationWhenScreenSharing,
8470                /*num_cores=*/2);
8471 
8472   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8473       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8474   video_source_.IncomingCapturedFrame(
8475       CreateFrame(1, /*width=*/320, /*height=*/180));
8476   WaitForEncodedFrame(1);
8477   EXPECT_EQ(fake_encoder_.LastEncoderComplexity(),
8478             VideoCodecComplexity::kComplexityLow);
8479   video_stream_encoder_->Stop();
8480 }
8481 
8482 #if !defined(WEBRTC_IOS)
8483 // TODO(bugs.webrtc.org/12401): Disabled because WebRTC-Video-QualityScaling is
8484 // disabled by default on iOS.
TEST_F(VideoStreamEncoderTest,QualityScalingAllowed_QualityScalingEnabled)8485 TEST_F(VideoStreamEncoderTest, QualityScalingAllowed_QualityScalingEnabled) {
8486   VideoEncoderConfig video_encoder_config = video_encoder_config_.Copy();
8487 
8488   // Disable scaling settings in encoder info.
8489   fake_encoder_.SetQualityScaling(false);
8490   // Enable quality scaling in encoder config.
8491   video_encoder_config.is_quality_scaling_allowed = true;
8492   ConfigureEncoder(std::move(video_encoder_config));
8493 
8494   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8495       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8496 
8497   test::FrameForwarder source;
8498   video_stream_encoder_->SetSource(
8499       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
8500   EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
8501   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
8502 
8503   source.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
8504   WaitForEncodedFrame(1);
8505   video_stream_encoder_->TriggerQualityLow();
8506   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
8507 
8508   video_stream_encoder_->Stop();
8509 }
8510 
TEST_F(VideoStreamEncoderTest,QualityScalingAllowed_IsQpTrustedSetTrue)8511 TEST_F(VideoStreamEncoderTest, QualityScalingAllowed_IsQpTrustedSetTrue) {
8512   VideoEncoderConfig video_encoder_config = video_encoder_config_.Copy();
8513 
8514   // Disable scaling settings in encoder info.
8515   fake_encoder_.SetQualityScaling(false);
8516   // Set QP trusted in encoder info.
8517   fake_encoder_.SetIsQpTrusted(true);
8518   // Enable quality scaling in encoder config.
8519   video_encoder_config.is_quality_scaling_allowed = true;
8520   ConfigureEncoder(std::move(video_encoder_config));
8521 
8522   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8523       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8524 
8525   test::FrameForwarder source;
8526   video_stream_encoder_->SetSource(
8527       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
8528   EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
8529   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
8530 
8531   source.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
8532   WaitForEncodedFrame(1);
8533   video_stream_encoder_->TriggerQualityLow();
8534   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
8535 
8536   video_stream_encoder_->Stop();
8537 }
8538 
TEST_F(VideoStreamEncoderTest,QualityScalingAllowed_IsQpTrustedSetFalse)8539 TEST_F(VideoStreamEncoderTest, QualityScalingAllowed_IsQpTrustedSetFalse) {
8540   VideoEncoderConfig video_encoder_config = video_encoder_config_.Copy();
8541 
8542   // Disable scaling settings in encoder info.
8543   fake_encoder_.SetQualityScaling(false);
8544   // Set QP not trusted in encoder info.
8545   fake_encoder_.SetIsQpTrusted(false);
8546   // Enable quality scaling in encoder config.
8547   video_encoder_config.is_quality_scaling_allowed = true;
8548   ConfigureEncoder(std::move(video_encoder_config));
8549 
8550   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8551       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8552 
8553   test::FrameForwarder source;
8554   video_stream_encoder_->SetSource(
8555       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
8556   EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
8557   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
8558 
8559   source.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
8560   WaitForEncodedFrame(1);
8561   video_stream_encoder_->TriggerQualityLow();
8562   // When quality_scaler doesn't work and is_quality_scaling_allowed is
8563   // true,the bandwidth_quality_scaler_ works,so bw_limited_resolution is true.
8564   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
8565 
8566   video_stream_encoder_->Stop();
8567 }
8568 
TEST_F(VideoStreamEncoderTest,QualityScalingAllowedAndQPIsTrusted_BandwidthScalerDisable)8569 TEST_F(VideoStreamEncoderTest,
8570        QualityScalingAllowedAndQPIsTrusted_BandwidthScalerDisable) {
8571   VideoEncoderConfig video_encoder_config = video_encoder_config_.Copy();
8572 
8573   // Disable scaling settings in encoder info.
8574   fake_encoder_.SetQualityScaling(false);
8575   // Set QP trusted in encoder info.
8576   fake_encoder_.SetIsQpTrusted(true);
8577   // Enable quality scaling in encoder config.
8578   video_encoder_config.is_quality_scaling_allowed = true;
8579   ConfigureEncoder(std::move(video_encoder_config));
8580 
8581   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8582       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8583 
8584   test::FrameForwarder source;
8585   video_stream_encoder_->SetSource(
8586       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
8587   EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
8588   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
8589 
8590   source.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
8591   WaitForEncodedFrame(1);
8592   video_stream_encoder_->TriggerQualityLow();
8593   // bandwidth_quality_scaler isn't working, but quality_scaler is working.
8594   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
8595 
8596   video_stream_encoder_->Stop();
8597 }
8598 
TEST_F(VideoStreamEncoderTest,QualityScalingAllowedAndQPIsNotTrusted_BandwidthScalerEnabled)8599 TEST_F(VideoStreamEncoderTest,
8600        QualityScalingAllowedAndQPIsNotTrusted_BandwidthScalerEnabled) {
8601   VideoEncoderConfig video_encoder_config = video_encoder_config_.Copy();
8602 
8603   // Disable scaling settings in encoder info.
8604   fake_encoder_.SetQualityScaling(false);
8605   // Set QP trusted in encoder info.
8606   fake_encoder_.SetIsQpTrusted(false);
8607   // Enable quality scaling in encoder config.
8608   video_encoder_config.is_quality_scaling_allowed = true;
8609   ConfigureEncoder(std::move(video_encoder_config));
8610 
8611   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8612       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8613 
8614   test::FrameForwarder source;
8615   video_stream_encoder_->SetSource(
8616       &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
8617   EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
8618   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
8619 
8620   source.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
8621   WaitForEncodedFrame(1);
8622   video_stream_encoder_->TriggerQualityLow();
8623   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
8624 
8625   video_stream_encoder_->Stop();
8626 }
8627 
TEST_F(VideoStreamEncoderTest,RequestsRefreshFrameAfterEarlyDroppedNativeFrame)8628 TEST_F(VideoStreamEncoderTest,
8629        RequestsRefreshFrameAfterEarlyDroppedNativeFrame) {
8630   // Send a native frame before encoder rates have been set. The encoder is
8631   // seen as paused at this time.
8632   rtc::Event frame_destroyed_event;
8633   video_source_.IncomingCapturedFrame(CreateFakeNativeFrame(
8634       /*ntp_time_ms=*/1, &frame_destroyed_event, codec_width_, codec_height_));
8635 
8636   // Frame should be dropped and destroyed.
8637   ExpectDroppedFrame();
8638   EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeout));
8639   EXPECT_EQ(video_source_.refresh_frames_requested_, 0);
8640 
8641   // Set bitrates, unpausing the encoder and triggering a request for a refresh
8642   // frame.
8643   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8644       kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8645   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
8646   EXPECT_EQ(video_source_.refresh_frames_requested_, 1);
8647 
8648   video_stream_encoder_->Stop();
8649 }
8650 
8651 #endif  // !defined(WEBRTC_IOS)
8652 
8653 // Test parameters: (VideoCodecType codec, bool allow_i420_conversion)
8654 class VideoStreamEncoderWithRealEncoderTest
8655     : public VideoStreamEncoderTest,
8656       public ::testing::WithParamInterface<std::pair<VideoCodecType, bool>> {
8657  public:
VideoStreamEncoderWithRealEncoderTest()8658   VideoStreamEncoderWithRealEncoderTest()
8659       : VideoStreamEncoderTest(),
8660         codec_type_(std::get<0>(GetParam())),
8661         allow_i420_conversion_(std::get<1>(GetParam())) {}
8662 
SetUp()8663   void SetUp() override {
8664     VideoStreamEncoderTest::SetUp();
8665     std::unique_ptr<VideoEncoder> encoder;
8666     switch (codec_type_) {
8667       case kVideoCodecVP8:
8668         encoder = VP8Encoder::Create();
8669         break;
8670       case kVideoCodecVP9:
8671         encoder = VP9Encoder::Create();
8672         break;
8673       case kVideoCodecAV1:
8674         encoder = CreateLibaomAv1Encoder();
8675         break;
8676       case kVideoCodecH264:
8677         encoder =
8678             H264Encoder::Create(cricket::VideoCodec(cricket::kH264CodecName));
8679         break;
8680       case kVideoCodecMultiplex:
8681         mock_encoder_factory_for_multiplex_ =
8682             std::make_unique<MockVideoEncoderFactory>();
8683         EXPECT_CALL(*mock_encoder_factory_for_multiplex_, Die);
8684         EXPECT_CALL(*mock_encoder_factory_for_multiplex_, CreateVideoEncoder)
8685             .WillRepeatedly([] { return VP8Encoder::Create(); });
8686         encoder = std::make_unique<MultiplexEncoderAdapter>(
8687             mock_encoder_factory_for_multiplex_.get(), SdpVideoFormat("VP8"),
8688             false);
8689         break;
8690       default:
8691         RTC_DCHECK_NOTREACHED();
8692     }
8693     ConfigureEncoderAndBitrate(codec_type_, std::move(encoder));
8694   }
8695 
TearDown()8696   void TearDown() override {
8697     video_stream_encoder_->Stop();
8698     // Ensure `video_stream_encoder_` is destroyed before
8699     // `encoder_proxy_factory_`.
8700     video_stream_encoder_.reset();
8701     VideoStreamEncoderTest::TearDown();
8702   }
8703 
8704  protected:
ConfigureEncoderAndBitrate(VideoCodecType codec_type,std::unique_ptr<VideoEncoder> encoder)8705   void ConfigureEncoderAndBitrate(VideoCodecType codec_type,
8706                                   std::unique_ptr<VideoEncoder> encoder) {
8707     // Configure VSE to use the encoder.
8708     encoder_ = std::move(encoder);
8709     encoder_proxy_factory_ = std::make_unique<test::VideoEncoderProxyFactory>(
8710         encoder_.get(), &encoder_selector_);
8711     video_send_config_.encoder_settings.encoder_factory =
8712         encoder_proxy_factory_.get();
8713     VideoEncoderConfig video_encoder_config;
8714     test::FillEncoderConfiguration(codec_type, 1, &video_encoder_config);
8715     video_encoder_config_ = video_encoder_config.Copy();
8716     ConfigureEncoder(video_encoder_config_.Copy());
8717 
8718     // Set bitrate to ensure frame is not dropped.
8719     video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8720         kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
8721   }
8722 
8723   const VideoCodecType codec_type_;
8724   const bool allow_i420_conversion_;
8725   NiceMock<MockEncoderSelector> encoder_selector_;
8726   std::unique_ptr<test::VideoEncoderProxyFactory> encoder_proxy_factory_;
8727   std::unique_ptr<VideoEncoder> encoder_;
8728   std::unique_ptr<MockVideoEncoderFactory> mock_encoder_factory_for_multiplex_;
8729 };
8730 
TEST_P(VideoStreamEncoderWithRealEncoderTest,EncoderMapsNativeI420)8731 TEST_P(VideoStreamEncoderWithRealEncoderTest, EncoderMapsNativeI420) {
8732   auto native_i420_frame = test::CreateMappableNativeFrame(
8733       1, VideoFrameBuffer::Type::kI420, codec_width_, codec_height_);
8734   video_source_.IncomingCapturedFrame(native_i420_frame);
8735   WaitForEncodedFrame(codec_width_, codec_height_);
8736 
8737   auto mappable_native_buffer =
8738       test::GetMappableNativeBufferFromVideoFrame(native_i420_frame);
8739   std::vector<rtc::scoped_refptr<VideoFrameBuffer>> mapped_frame_buffers =
8740       mappable_native_buffer->GetMappedFramedBuffers();
8741   ASSERT_EQ(mapped_frame_buffers.size(), 1u);
8742   EXPECT_EQ(mapped_frame_buffers[0]->width(), codec_width_);
8743   EXPECT_EQ(mapped_frame_buffers[0]->height(), codec_height_);
8744   EXPECT_EQ(mapped_frame_buffers[0]->type(), VideoFrameBuffer::Type::kI420);
8745 }
8746 
TEST_P(VideoStreamEncoderWithRealEncoderTest,EncoderMapsNativeNV12)8747 TEST_P(VideoStreamEncoderWithRealEncoderTest, EncoderMapsNativeNV12) {
8748   auto native_nv12_frame = test::CreateMappableNativeFrame(
8749       1, VideoFrameBuffer::Type::kNV12, codec_width_, codec_height_);
8750   video_source_.IncomingCapturedFrame(native_nv12_frame);
8751   WaitForEncodedFrame(codec_width_, codec_height_);
8752 
8753   auto mappable_native_buffer =
8754       test::GetMappableNativeBufferFromVideoFrame(native_nv12_frame);
8755   std::vector<rtc::scoped_refptr<VideoFrameBuffer>> mapped_frame_buffers =
8756       mappable_native_buffer->GetMappedFramedBuffers();
8757   ASSERT_EQ(mapped_frame_buffers.size(), 1u);
8758   EXPECT_EQ(mapped_frame_buffers[0]->width(), codec_width_);
8759   EXPECT_EQ(mapped_frame_buffers[0]->height(), codec_height_);
8760   EXPECT_EQ(mapped_frame_buffers[0]->type(), VideoFrameBuffer::Type::kNV12);
8761 
8762   if (!allow_i420_conversion_) {
8763     EXPECT_FALSE(mappable_native_buffer->DidConvertToI420());
8764   }
8765 }
8766 
TEST_P(VideoStreamEncoderWithRealEncoderTest,HandlesLayerToggling)8767 TEST_P(VideoStreamEncoderWithRealEncoderTest, HandlesLayerToggling) {
8768   if (codec_type_ == kVideoCodecMultiplex) {
8769     // Multiplex codec here uses wrapped mock codecs, ignore for this test.
8770     return;
8771   }
8772 
8773   const size_t kNumSpatialLayers = 3u;
8774   const float kDownscaleFactors[] = {4.0, 2.0, 1.0};
8775   const int kFrameWidth = 1280;
8776   const int kFrameHeight = 720;
8777   const rtc::VideoSinkWants::FrameSize kLayer0Size(
8778       kFrameWidth / kDownscaleFactors[0], kFrameHeight / kDownscaleFactors[0]);
8779   const rtc::VideoSinkWants::FrameSize kLayer1Size(
8780       kFrameWidth / kDownscaleFactors[1], kFrameHeight / kDownscaleFactors[1]);
8781   const rtc::VideoSinkWants::FrameSize kLayer2Size(
8782       kFrameWidth / kDownscaleFactors[2], kFrameHeight / kDownscaleFactors[2]);
8783 
8784   VideoEncoderConfig config;
8785   webrtc::VideoEncoder::EncoderInfo encoder_info;
8786   if (codec_type_ == VideoCodecType::kVideoCodecVP9) {
8787     test::FillEncoderConfiguration(codec_type_, 1, &config);
8788     config.max_bitrate_bps = kSimulcastTargetBitrate.bps();
8789     VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
8790     vp9_settings.numberOfSpatialLayers = kNumSpatialLayers;
8791     vp9_settings.numberOfTemporalLayers = 3;
8792     vp9_settings.automaticResizeOn = false;
8793     config.encoder_specific_settings =
8794         rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
8795             vp9_settings);
8796     config.spatial_layers = GetSvcConfig(kFrameWidth, kFrameHeight,
8797                                          /*fps=*/30.0,
8798                                          /*first_active_layer=*/0,
8799                                          /*num_spatial_layers=*/3,
8800                                          /*num_temporal_layers=*/3,
8801                                          /*is_screenshare=*/false);
8802   } else if (codec_type_ == VideoCodecType::kVideoCodecAV1) {
8803     test::FillEncoderConfiguration(codec_type_, 1, &config);
8804     config.max_bitrate_bps = kSimulcastTargetBitrate.bps();
8805     config.spatial_layers = GetSvcConfig(kFrameWidth, kFrameHeight,
8806                                          /*fps=*/30.0,
8807                                          /*first_active_layer=*/0,
8808                                          /*num_spatial_layers=*/3,
8809                                          /*num_temporal_layers=*/3,
8810                                          /*is_screenshare=*/false);
8811     config.simulcast_layers[0].scalability_mode = ScalabilityMode::kL3T3_KEY;
8812   } else {
8813     // Simulcast for VP8/H264.
8814     test::FillEncoderConfiguration(codec_type_, kNumSpatialLayers, &config);
8815     for (size_t i = 0; i < kNumSpatialLayers; ++i) {
8816       config.simulcast_layers[i].scale_resolution_down_by =
8817           kDownscaleFactors[i];
8818       config.simulcast_layers[i].active = true;
8819     }
8820     if (codec_type_ == VideoCodecType::kVideoCodecH264) {
8821       // Turn off frame dropping to prevent flakiness.
8822       config.frame_drop_enabled = false;
8823     }
8824   }
8825 
8826   auto set_layer_active = [&](int layer_idx, bool active) {
8827     if (codec_type_ == VideoCodecType::kVideoCodecVP9 ||
8828         codec_type_ == VideoCodecType::kVideoCodecAV1) {
8829       config.spatial_layers[layer_idx].active = active;
8830     } else {
8831       config.simulcast_layers[layer_idx].active = active;
8832     }
8833   };
8834 
8835   config.video_stream_factory =
8836       rtc::make_ref_counted<cricket::EncoderStreamFactory>(
8837           CodecTypeToPayloadString(codec_type_), /*max qp*/ 56,
8838           /*screencast*/ false,
8839           /*screenshare enabled*/ false, encoder_info);
8840   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8841       kSimulcastTargetBitrate, kSimulcastTargetBitrate, kSimulcastTargetBitrate,
8842       0, 0, 0);
8843 
8844   // Capture a frame with all layers active.
8845   sink_.SetNumExpectedLayers(kNumSpatialLayers);
8846   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
8847   int64_t timestamp_ms = kFrameIntervalMs;
8848   video_source_.IncomingCapturedFrame(
8849       CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
8850 
8851   WaitForEncodedFrame(kLayer2Size.width, kLayer2Size.height);
8852   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
8853 
8854   // Capture a frame with one of the layers inactive.
8855   set_layer_active(2, false);
8856   sink_.SetNumExpectedLayers(kNumSpatialLayers - 1);
8857   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
8858   timestamp_ms += kFrameIntervalMs;
8859   video_source_.IncomingCapturedFrame(
8860       CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
8861   WaitForEncodedFrame(kLayer1Size.width, kLayer1Size.height);
8862 
8863   // New target bitrates signaled based on lower resolution.
8864   DataRate kTwoLayerBitrate = DataRate::KilobitsPerSec(833);
8865   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8866       kTwoLayerBitrate, kTwoLayerBitrate, kTwoLayerBitrate, 0, 0, 0);
8867   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
8868 
8869   // Re-enable the top layer.
8870   set_layer_active(2, true);
8871   sink_.SetNumExpectedLayers(kNumSpatialLayers);
8872   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
8873   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
8874 
8875   // Bitrate target adjusted back up to enable HD layer...
8876   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8877       DataRate::KilobitsPerSec(1800), DataRate::KilobitsPerSec(1800),
8878       DataRate::KilobitsPerSec(1800), 0, 0, 0);
8879   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
8880 
8881   // ...then add a new frame.
8882   timestamp_ms += kFrameIntervalMs;
8883   video_source_.IncomingCapturedFrame(
8884       CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
8885   WaitForEncodedFrame(kLayer2Size.width, kLayer2Size.height);
8886   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
8887 
8888   video_stream_encoder_->Stop();
8889 }
8890 
TestParametersVideoCodecAndAllowI420ConversionToString(testing::TestParamInfo<std::pair<VideoCodecType,bool>> info)8891 std::string TestParametersVideoCodecAndAllowI420ConversionToString(
8892     testing::TestParamInfo<std::pair<VideoCodecType, bool>> info) {
8893   VideoCodecType codec_type = std::get<0>(info.param);
8894   bool allow_i420_conversion = std::get<1>(info.param);
8895   std::string str;
8896   switch (codec_type) {
8897     case kVideoCodecGeneric:
8898       str = "Generic";
8899       break;
8900     case kVideoCodecVP8:
8901       str = "VP8";
8902       break;
8903     case kVideoCodecVP9:
8904       str = "VP9";
8905       break;
8906     case kVideoCodecAV1:
8907       str = "AV1";
8908       break;
8909     case kVideoCodecH264:
8910       str = "H264";
8911       break;
8912     case kVideoCodecMultiplex:
8913       str = "Multiplex";
8914       break;
8915     default:
8916       RTC_DCHECK_NOTREACHED();
8917   }
8918   str += allow_i420_conversion ? "_AllowToI420" : "_DisallowToI420";
8919   return str;
8920 }
8921 
8922 constexpr std::pair<VideoCodecType, bool> kVP8DisallowConversion =
8923     std::make_pair(kVideoCodecVP8, /*allow_i420_conversion=*/false);
8924 constexpr std::pair<VideoCodecType, bool> kVP9DisallowConversion =
8925     std::make_pair(kVideoCodecVP9, /*allow_i420_conversion=*/false);
8926 constexpr std::pair<VideoCodecType, bool> kAV1AllowConversion =
8927     std::make_pair(kVideoCodecAV1, /*allow_i420_conversion=*/false);
8928 constexpr std::pair<VideoCodecType, bool> kMultiplexDisallowConversion =
8929     std::make_pair(kVideoCodecMultiplex, /*allow_i420_conversion=*/false);
8930 #if defined(WEBRTC_USE_H264)
8931 constexpr std::pair<VideoCodecType, bool> kH264AllowConversion =
8932     std::make_pair(kVideoCodecH264, /*allow_i420_conversion=*/true);
8933 
8934 // The windows compiler does not tolerate #if statements inside the
8935 // INSTANTIATE_TEST_SUITE_P() macro, so we have to have two definitions (with
8936 // and without H264).
8937 INSTANTIATE_TEST_SUITE_P(
8938     All,
8939     VideoStreamEncoderWithRealEncoderTest,
8940     ::testing::Values(kVP8DisallowConversion,
8941                       kVP9DisallowConversion,
8942                       kAV1AllowConversion,
8943                       kMultiplexDisallowConversion,
8944                       kH264AllowConversion),
8945     TestParametersVideoCodecAndAllowI420ConversionToString);
8946 #else
8947 INSTANTIATE_TEST_SUITE_P(
8948     All,
8949     VideoStreamEncoderWithRealEncoderTest,
8950     ::testing::Values(kVP8DisallowConversion,
8951                       kVP9DisallowConversion,
8952                       kAV1AllowConversion,
8953                       kMultiplexDisallowConversion),
8954     TestParametersVideoCodecAndAllowI420ConversionToString);
8955 #endif
8956 
8957 class ReconfigureEncoderTest : public VideoStreamEncoderTest {
8958  protected:
RunTest(const std::vector<VideoStream> & configs,const int expected_num_init_encode)8959   void RunTest(const std::vector<VideoStream>& configs,
8960                const int expected_num_init_encode) {
8961     ConfigureEncoder(configs[0]);
8962     OnBitrateUpdated(kTargetBitrate);
8963     InsertFrameAndWaitForEncoded();
8964     EXPECT_EQ(1, sink_.number_of_reconfigurations());
8965     ExpectEqual(bitrate_allocator_factory_.codec_config(), configs[0]);
8966     EXPECT_EQ(1, fake_encoder_.GetNumInitializations());
8967     ExpectEqual(fake_encoder_.config(), configs[0]);
8968 
8969     // Reconfigure encoder, the encoder should only be reconfigured if needed.
8970     ConfigureEncoder(configs[1]);
8971     InsertFrameAndWaitForEncoded();
8972     EXPECT_EQ(2, sink_.number_of_reconfigurations());
8973     ExpectEqual(bitrate_allocator_factory_.codec_config(), configs[1]);
8974     EXPECT_EQ(expected_num_init_encode, fake_encoder_.GetNumInitializations());
8975     if (expected_num_init_encode > 1)
8976       ExpectEqual(fake_encoder_.config(), configs[1]);
8977 
8978     video_stream_encoder_->Stop();
8979   }
8980 
ConfigureEncoder(const VideoStream & stream)8981   void ConfigureEncoder(const VideoStream& stream) {
8982     VideoEncoderConfig config;
8983     webrtc::VideoEncoder::EncoderInfo encoder_info;
8984 
8985     test::FillEncoderConfiguration(kVideoCodecVP8, /*num_streams=*/1, &config);
8986     config.max_bitrate_bps = stream.max_bitrate_bps;
8987     config.simulcast_layers[0] = stream;
8988     config.video_stream_factory =
8989         rtc::make_ref_counted<cricket::EncoderStreamFactory>(
8990             /*codec_name=*/"VP8", /*max_qp=*/0, /*is_screenshare=*/false,
8991             /*conference_mode=*/false, encoder_info);
8992     video_stream_encoder_->ConfigureEncoder(std::move(config),
8993                                             kMaxPayloadLength);
8994   }
8995 
OnBitrateUpdated(DataRate bitrate)8996   void OnBitrateUpdated(DataRate bitrate) {
8997     video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
8998         bitrate, bitrate, bitrate, 0, 0, 0);
8999   }
9000 
InsertFrameAndWaitForEncoded()9001   void InsertFrameAndWaitForEncoded() {
9002     timestamp_ms_ += kFrameIntervalMs;
9003     video_source_.IncomingCapturedFrame(
9004         CreateFrame(timestamp_ms_, kWidth, kHeight));
9005     sink_.WaitForEncodedFrame(timestamp_ms_);
9006   }
9007 
ExpectEqual(const VideoCodec & actual,const VideoStream & expected) const9008   void ExpectEqual(const VideoCodec& actual,
9009                    const VideoStream& expected) const {
9010     EXPECT_EQ(actual.numberOfSimulcastStreams, 1);
9011     EXPECT_EQ(actual.simulcastStream[0].maxFramerate, expected.max_framerate);
9012     EXPECT_EQ(actual.simulcastStream[0].minBitrate * 1000,
9013               static_cast<unsigned int>(expected.min_bitrate_bps));
9014     EXPECT_EQ(actual.simulcastStream[0].maxBitrate * 1000,
9015               static_cast<unsigned int>(expected.max_bitrate_bps));
9016     EXPECT_EQ(actual.simulcastStream[0].width,
9017               kWidth / expected.scale_resolution_down_by);
9018     EXPECT_EQ(actual.simulcastStream[0].height,
9019               kHeight / expected.scale_resolution_down_by);
9020     EXPECT_EQ(actual.simulcastStream[0].numberOfTemporalLayers,
9021               expected.num_temporal_layers);
9022     EXPECT_EQ(actual.GetScalabilityMode(), expected.scalability_mode);
9023   }
9024 
DefaultConfig() const9025   VideoStream DefaultConfig() const {
9026     VideoStream stream;
9027     stream.max_framerate = 25;
9028     stream.min_bitrate_bps = 35000;
9029     stream.max_bitrate_bps = 900000;
9030     stream.scale_resolution_down_by = 1.0;
9031     stream.num_temporal_layers = 1;
9032     stream.bitrate_priority = 1.0;
9033     stream.scalability_mode = absl::nullopt;
9034     return stream;
9035   }
9036 
9037   const int kWidth = 640;
9038   const int kHeight = 360;
9039   int64_t timestamp_ms_ = 0;
9040 };
9041 
TEST_F(ReconfigureEncoderTest,NotReconfiguredIfMaxFramerateChanges)9042 TEST_F(ReconfigureEncoderTest, NotReconfiguredIfMaxFramerateChanges) {
9043   VideoStream config1 = DefaultConfig();
9044   VideoStream config2 = config1;
9045   config2.max_framerate++;
9046 
9047   RunTest({config1, config2}, /*expected_num_init_encode=*/1);
9048 }
9049 
TEST_F(ReconfigureEncoderTest,NotReconfiguredIfMinBitrateChanges)9050 TEST_F(ReconfigureEncoderTest, NotReconfiguredIfMinBitrateChanges) {
9051   VideoStream config1 = DefaultConfig();
9052   VideoStream config2 = config1;
9053   config2.min_bitrate_bps += 10000;
9054 
9055   RunTest({config1, config2}, /*expected_num_init_encode=*/1);
9056 }
9057 
TEST_F(ReconfigureEncoderTest,NotReconfiguredIfMaxBitrateChanges)9058 TEST_F(ReconfigureEncoderTest, NotReconfiguredIfMaxBitrateChanges) {
9059   VideoStream config1 = DefaultConfig();
9060   VideoStream config2 = config1;
9061   config2.max_bitrate_bps += 100000;
9062 
9063   RunTest({config1, config2}, /*expected_num_init_encode=*/1);
9064 }
9065 
TEST_F(ReconfigureEncoderTest,NotReconfiguredIfBitratePriorityChanges)9066 TEST_F(ReconfigureEncoderTest, NotReconfiguredIfBitratePriorityChanges) {
9067   VideoStream config1 = DefaultConfig();
9068   VideoStream config2 = config1;
9069   config2.bitrate_priority = config1.bitrate_priority.value() * 2.0;
9070 
9071   RunTest({config1, config2}, /*expected_num_init_encode=*/1);
9072 }
9073 
TEST_F(ReconfigureEncoderTest,ReconfiguredIfResolutionChanges)9074 TEST_F(ReconfigureEncoderTest, ReconfiguredIfResolutionChanges) {
9075   VideoStream config1 = DefaultConfig();
9076   VideoStream config2 = config1;
9077   config2.scale_resolution_down_by *= 2;
9078 
9079   RunTest({config1, config2}, /*expected_num_init_encode=*/2);
9080 }
9081 
TEST_F(ReconfigureEncoderTest,ReconfiguredIfNumTemporalLayerChanges)9082 TEST_F(ReconfigureEncoderTest, ReconfiguredIfNumTemporalLayerChanges) {
9083   VideoStream config1 = DefaultConfig();
9084   VideoStream config2 = config1;
9085   config2.num_temporal_layers = config1.num_temporal_layers.value() + 1;
9086 
9087   RunTest({config1, config2}, /*expected_num_init_encode=*/2);
9088 }
9089 
TEST_F(ReconfigureEncoderTest,ReconfiguredIfScalabilityModeChanges)9090 TEST_F(ReconfigureEncoderTest, ReconfiguredIfScalabilityModeChanges) {
9091   VideoStream config1 = DefaultConfig();
9092   VideoStream config2 = config1;
9093   config2.scalability_mode = ScalabilityMode::kL2T1;
9094 
9095   RunTest({config1, config2}, /*expected_num_init_encode=*/2);
9096 }
9097 
TEST_F(ReconfigureEncoderTest,UpdatesNumTemporalLayersFromScalabilityModeChanges)9098 TEST_F(ReconfigureEncoderTest,
9099        UpdatesNumTemporalLayersFromScalabilityModeChanges) {
9100   VideoStream config1 = DefaultConfig();
9101   VideoStream config2 = config1;
9102   config2.scalability_mode = ScalabilityMode::kL1T2;
9103   config2.num_temporal_layers = 2;
9104 
9105   RunTest({config1, config2}, /*expected_num_init_encode=*/2);
9106 }
9107 
9108 // Simple test that just creates and then immediately destroys an encoder.
9109 // The purpose of the test is to make sure that nothing bad happens if the
9110 // initialization step on the encoder queue, doesn't run.
TEST(VideoStreamEncoderSimpleTest,CreateDestroy)9111 TEST(VideoStreamEncoderSimpleTest, CreateDestroy) {
9112   class SuperLazyTaskQueue : public webrtc::TaskQueueBase {
9113    public:
9114     SuperLazyTaskQueue() = default;
9115     ~SuperLazyTaskQueue() override = default;
9116 
9117    private:
9118     void Delete() override { delete this; }
9119     void PostTask(absl::AnyInvocable<void() &&> task) override {
9120       // meh.
9121     }
9122     void PostDelayedTask(absl::AnyInvocable<void() &&> task,
9123                          TimeDelta delay) override {
9124       ASSERT_TRUE(false);
9125     }
9126     void PostDelayedHighPrecisionTask(absl::AnyInvocable<void() &&> task,
9127                                       TimeDelta delay) override {
9128       ADD_FAILURE();
9129     }
9130   };
9131 
9132   // Lots of boiler plate.
9133   test::ScopedKeyValueConfig field_trials;
9134   GlobalSimulatedTimeController time_controller(Timestamp::Zero());
9135   auto stats_proxy = std::make_unique<MockableSendStatisticsProxy>(
9136       time_controller.GetClock(), VideoSendStream::Config(nullptr),
9137       webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo, field_trials);
9138   SimpleVideoStreamEncoderFactory::MockFakeEncoder mock_fake_encoder(
9139       time_controller.GetClock());
9140   test::VideoEncoderProxyFactory encoder_factory(&mock_fake_encoder);
9141   std::unique_ptr<VideoBitrateAllocatorFactory> bitrate_allocator_factory =
9142       CreateBuiltinVideoBitrateAllocatorFactory();
9143   VideoStreamEncoderSettings encoder_settings{
9144       VideoEncoder::Capabilities(/*loss_notification=*/false)};
9145   encoder_settings.encoder_factory = &encoder_factory;
9146   encoder_settings.bitrate_allocator_factory = bitrate_allocator_factory.get();
9147 
9148   auto adapter = std::make_unique<MockFrameCadenceAdapter>();
9149   EXPECT_CALL((*adapter.get()), Initialize).WillOnce(Return());
9150 
9151   std::unique_ptr<webrtc::TaskQueueBase, webrtc::TaskQueueDeleter>
9152       encoder_queue(new SuperLazyTaskQueue());
9153 
9154   // Construct a VideoStreamEncoder instance and let it go out of scope without
9155   // doing anything else (including calling Stop()). This should be fine since
9156   // the posted init task will simply be deleted.
9157   auto encoder = std::make_unique<VideoStreamEncoder>(
9158       time_controller.GetClock(), 1, stats_proxy.get(), encoder_settings,
9159       std::make_unique<CpuOveruseDetectorProxy>(stats_proxy.get(),
9160                                                 field_trials),
9161       std::move(adapter), std::move(encoder_queue),
9162       VideoStreamEncoder::BitrateAllocationCallbackType::
9163           kVideoBitrateAllocation,
9164       field_trials);
9165 
9166   // Stop the encoder explicitly. This additional step tests if we could
9167   // hang when calling stop and the TQ has been stopped and/or isn't accepting
9168   // any more tasks.
9169   encoder->Stop();
9170 }
9171 
TEST(VideoStreamEncoderFrameCadenceTest,ActivatesFrameCadenceOnContentType)9172 TEST(VideoStreamEncoderFrameCadenceTest, ActivatesFrameCadenceOnContentType) {
9173   auto adapter = std::make_unique<MockFrameCadenceAdapter>();
9174   auto* adapter_ptr = adapter.get();
9175   SimpleVideoStreamEncoderFactory factory;
9176   FrameCadenceAdapterInterface::Callback* video_stream_encoder_callback =
9177       nullptr;
9178   EXPECT_CALL(*adapter_ptr, Initialize)
9179       .WillOnce(Invoke([&video_stream_encoder_callback](
9180                            FrameCadenceAdapterInterface::Callback* callback) {
9181         video_stream_encoder_callback = callback;
9182       }));
9183   TaskQueueBase* encoder_queue = nullptr;
9184   auto video_stream_encoder =
9185       factory.Create(std::move(adapter), &encoder_queue);
9186 
9187   // First a call before we know the frame size and hence cannot compute the
9188   // number of simulcast layers.
9189   EXPECT_CALL(*adapter_ptr, SetZeroHertzModeEnabled(Optional(Field(
9190                                 &FrameCadenceAdapterInterface::
9191                                     ZeroHertzModeParams::num_simulcast_layers,
9192                                 Eq(0u)))));
9193   VideoEncoderConfig config;
9194   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &config);
9195   config.content_type = VideoEncoderConfig::ContentType::kScreen;
9196   video_stream_encoder->ConfigureEncoder(std::move(config), 0);
9197   factory.DepleteTaskQueues();
9198 
9199   // Then a call as we've computed the number of simulcast layers after a passed
9200   // frame.
9201   EXPECT_CALL(*adapter_ptr, SetZeroHertzModeEnabled(Optional(Field(
9202                                 &FrameCadenceAdapterInterface::
9203                                     ZeroHertzModeParams::num_simulcast_layers,
9204                                 Gt(0u)))));
9205   PassAFrame(encoder_queue, video_stream_encoder_callback, /*ntp_time_ms=*/1);
9206   factory.DepleteTaskQueues();
9207   Mock::VerifyAndClearExpectations(adapter_ptr);
9208 
9209   // Expect a disabled zero-hertz mode after passing realtime video.
9210   EXPECT_CALL(*adapter_ptr, SetZeroHertzModeEnabled(Eq(absl::nullopt)));
9211   VideoEncoderConfig config2;
9212   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &config2);
9213   config2.content_type = VideoEncoderConfig::ContentType::kRealtimeVideo;
9214   video_stream_encoder->ConfigureEncoder(std::move(config2), 0);
9215   PassAFrame(encoder_queue, video_stream_encoder_callback, /*ntp_time_ms=*/2);
9216   factory.DepleteTaskQueues();
9217 }
9218 
TEST(VideoStreamEncoderFrameCadenceTest,ForwardsFramesIntoFrameCadenceAdapter)9219 TEST(VideoStreamEncoderFrameCadenceTest,
9220      ForwardsFramesIntoFrameCadenceAdapter) {
9221   auto adapter = std::make_unique<MockFrameCadenceAdapter>();
9222   auto* adapter_ptr = adapter.get();
9223   test::FrameForwarder video_source;
9224   SimpleVideoStreamEncoderFactory factory;
9225   auto video_stream_encoder = factory.Create(std::move(adapter));
9226   video_stream_encoder->SetSource(
9227       &video_source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
9228 
9229   EXPECT_CALL(*adapter_ptr, OnFrame);
9230   auto buffer = rtc::make_ref_counted<NV12Buffer>(/*width=*/16, /*height=*/16);
9231   video_source.IncomingCapturedFrame(
9232       VideoFrame::Builder().set_video_frame_buffer(std::move(buffer)).build());
9233 }
9234 
TEST(VideoStreamEncoderFrameCadenceTest,UsesFrameCadenceAdapterForFrameRate)9235 TEST(VideoStreamEncoderFrameCadenceTest, UsesFrameCadenceAdapterForFrameRate) {
9236   auto adapter = std::make_unique<MockFrameCadenceAdapter>();
9237   auto* adapter_ptr = adapter.get();
9238   test::FrameForwarder video_source;
9239   SimpleVideoStreamEncoderFactory factory;
9240   FrameCadenceAdapterInterface::Callback* video_stream_encoder_callback =
9241       nullptr;
9242   EXPECT_CALL(*adapter_ptr, Initialize)
9243       .WillOnce(Invoke([&video_stream_encoder_callback](
9244                            FrameCadenceAdapterInterface::Callback* callback) {
9245         video_stream_encoder_callback = callback;
9246       }));
9247   TaskQueueBase* encoder_queue = nullptr;
9248   auto video_stream_encoder =
9249       factory.Create(std::move(adapter), &encoder_queue);
9250 
9251   // This is just to make the VSE operational. We'll feed a frame directly by
9252   // the callback interface.
9253   video_stream_encoder->SetSource(
9254       &video_source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
9255 
9256   VideoEncoderConfig video_encoder_config;
9257   test::FillEncoderConfiguration(kVideoCodecGeneric, 1, &video_encoder_config);
9258   video_stream_encoder->ConfigureEncoder(std::move(video_encoder_config),
9259                                          /*max_data_payload_length=*/1000);
9260 
9261   EXPECT_CALL(*adapter_ptr, GetInputFrameRateFps);
9262   EXPECT_CALL(*adapter_ptr, UpdateFrameRate);
9263   PassAFrame(encoder_queue, video_stream_encoder_callback, /*ntp_time_ms=*/1);
9264   factory.DepleteTaskQueues();
9265 }
9266 
TEST(VideoStreamEncoderFrameCadenceTest,DeactivatesActivatesLayersOnBitrateChanges)9267 TEST(VideoStreamEncoderFrameCadenceTest,
9268      DeactivatesActivatesLayersOnBitrateChanges) {
9269   auto adapter = std::make_unique<MockFrameCadenceAdapter>();
9270   auto* adapter_ptr = adapter.get();
9271   SimpleVideoStreamEncoderFactory factory;
9272   FrameCadenceAdapterInterface::Callback* video_stream_encoder_callback =
9273       nullptr;
9274   EXPECT_CALL(*adapter_ptr, Initialize)
9275       .WillOnce(Invoke([&video_stream_encoder_callback](
9276                            FrameCadenceAdapterInterface::Callback* callback) {
9277         video_stream_encoder_callback = callback;
9278       }));
9279   TaskQueueBase* encoder_queue = nullptr;
9280   auto video_stream_encoder =
9281       factory.Create(std::move(adapter), &encoder_queue);
9282 
9283   // Configure 2 simulcast layers. FillEncoderConfiguration sets min bitrates to
9284   //  {150000, 450000}.
9285   VideoEncoderConfig video_encoder_config;
9286   test::FillEncoderConfiguration(kVideoCodecVP8, 2, &video_encoder_config);
9287   video_stream_encoder->ConfigureEncoder(video_encoder_config.Copy(),
9288                                          kMaxPayloadLength);
9289   // Ensure an encoder is created.
9290   PassAFrame(encoder_queue, video_stream_encoder_callback, /*ntp_time_ms=*/1);
9291 
9292   // Both layers enabled at 1 MBit/s.
9293   video_stream_encoder->OnBitrateUpdated(
9294       DataRate::KilobitsPerSec(1000), DataRate::KilobitsPerSec(1000),
9295       DataRate::KilobitsPerSec(1000), 0, 0, 0);
9296   EXPECT_CALL(*adapter_ptr, UpdateLayerStatus(0, /*enabled=*/true));
9297   EXPECT_CALL(*adapter_ptr, UpdateLayerStatus(1, /*enabled=*/true));
9298   factory.DepleteTaskQueues();
9299   Mock::VerifyAndClearExpectations(adapter_ptr);
9300 
9301   // Layer 1 disabled at 200 KBit/s.
9302   video_stream_encoder->OnBitrateUpdated(
9303       DataRate::KilobitsPerSec(200), DataRate::KilobitsPerSec(200),
9304       DataRate::KilobitsPerSec(200), 0, 0, 0);
9305   EXPECT_CALL(*adapter_ptr, UpdateLayerStatus(0, /*enabled=*/true));
9306   EXPECT_CALL(*adapter_ptr, UpdateLayerStatus(1, /*enabled=*/false));
9307   factory.DepleteTaskQueues();
9308   Mock::VerifyAndClearExpectations(adapter_ptr);
9309 
9310   // All layers off at suspended video.
9311   video_stream_encoder->OnBitrateUpdated(DataRate::Zero(), DataRate::Zero(),
9312                                          DataRate::Zero(), 0, 0, 0);
9313   EXPECT_CALL(*adapter_ptr, UpdateLayerStatus(0, /*enabled=*/false));
9314   EXPECT_CALL(*adapter_ptr, UpdateLayerStatus(1, /*enabled=*/false));
9315   factory.DepleteTaskQueues();
9316   Mock::VerifyAndClearExpectations(adapter_ptr);
9317 
9318   // Both layers enabled again back at 1 MBit/s.
9319   video_stream_encoder->OnBitrateUpdated(
9320       DataRate::KilobitsPerSec(1000), DataRate::KilobitsPerSec(1000),
9321       DataRate::KilobitsPerSec(1000), 0, 0, 0);
9322   EXPECT_CALL(*adapter_ptr, UpdateLayerStatus(0, /*enabled=*/true));
9323   EXPECT_CALL(*adapter_ptr, UpdateLayerStatus(1, /*enabled=*/true));
9324   factory.DepleteTaskQueues();
9325 }
9326 
TEST(VideoStreamEncoderFrameCadenceTest,UpdatesQualityConvergence)9327 TEST(VideoStreamEncoderFrameCadenceTest, UpdatesQualityConvergence) {
9328   auto adapter = std::make_unique<MockFrameCadenceAdapter>();
9329   auto* adapter_ptr = adapter.get();
9330   SimpleVideoStreamEncoderFactory factory;
9331   FrameCadenceAdapterInterface::Callback* video_stream_encoder_callback =
9332       nullptr;
9333   EXPECT_CALL(*adapter_ptr, Initialize)
9334       .WillOnce(Invoke([&video_stream_encoder_callback](
9335                            FrameCadenceAdapterInterface::Callback* callback) {
9336         video_stream_encoder_callback = callback;
9337       }));
9338   TaskQueueBase* encoder_queue = nullptr;
9339   auto video_stream_encoder =
9340       factory.Create(std::move(adapter), &encoder_queue);
9341 
9342   // Configure 2 simulcast layers and setup 1 MBit/s to unpause the encoder.
9343   VideoEncoderConfig video_encoder_config;
9344   test::FillEncoderConfiguration(kVideoCodecVP8, 2, &video_encoder_config);
9345   video_stream_encoder->ConfigureEncoder(video_encoder_config.Copy(),
9346                                          kMaxPayloadLength);
9347   video_stream_encoder->OnBitrateUpdated(
9348       DataRate::KilobitsPerSec(1000), DataRate::KilobitsPerSec(1000),
9349       DataRate::KilobitsPerSec(1000), 0, 0, 0);
9350 
9351   // Pass a frame which has unconverged results.
9352   PassAFrame(encoder_queue, video_stream_encoder_callback, /*ntp_time_ms=*/1);
9353   EXPECT_CALL(factory.GetMockFakeEncoder(), EncodeHook)
9354       .WillRepeatedly(Invoke([](EncodedImage& encoded_image,
9355                                 rtc::scoped_refptr<EncodedImageBuffer> buffer) {
9356         encoded_image.qp_ = kVp8SteadyStateQpThreshold + 1;
9357         CodecSpecificInfo codec_specific;
9358         codec_specific.codecType = kVideoCodecVP8;
9359         return codec_specific;
9360       }));
9361   EXPECT_CALL(*adapter_ptr, UpdateLayerQualityConvergence(0, false));
9362   EXPECT_CALL(*adapter_ptr, UpdateLayerQualityConvergence(1, false));
9363   factory.DepleteTaskQueues();
9364   Mock::VerifyAndClearExpectations(adapter_ptr);
9365   Mock::VerifyAndClearExpectations(&factory.GetMockFakeEncoder());
9366 
9367   // Pass a frame which converges in layer 0 and not in layer 1.
9368   PassAFrame(encoder_queue, video_stream_encoder_callback, /*ntp_time_ms=*/2);
9369   EXPECT_CALL(factory.GetMockFakeEncoder(), EncodeHook)
9370       .WillRepeatedly(Invoke([](EncodedImage& encoded_image,
9371                                 rtc::scoped_refptr<EncodedImageBuffer> buffer) {
9372         // This sets spatial index 0 content to be at target quality, while
9373         // index 1 content is not.
9374         encoded_image.qp_ = kVp8SteadyStateQpThreshold +
9375                             (encoded_image.SpatialIndex() == 0 ? 0 : 1);
9376         CodecSpecificInfo codec_specific;
9377         codec_specific.codecType = kVideoCodecVP8;
9378         return codec_specific;
9379       }));
9380   EXPECT_CALL(*adapter_ptr, UpdateLayerQualityConvergence(0, true));
9381   EXPECT_CALL(*adapter_ptr, UpdateLayerQualityConvergence(1, false));
9382   factory.DepleteTaskQueues();
9383   Mock::VerifyAndClearExpectations(adapter_ptr);
9384   Mock::VerifyAndClearExpectations(&factory.GetMockFakeEncoder());
9385 }
9386 
TEST(VideoStreamEncoderFrameCadenceTest,RequestsRefreshFramesWhenCadenceAdapterInstructs)9387 TEST(VideoStreamEncoderFrameCadenceTest,
9388      RequestsRefreshFramesWhenCadenceAdapterInstructs) {
9389   auto adapter = std::make_unique<MockFrameCadenceAdapter>();
9390   auto* adapter_ptr = adapter.get();
9391   MockVideoSourceInterface mock_source;
9392   SimpleVideoStreamEncoderFactory factory;
9393   FrameCadenceAdapterInterface::Callback* video_stream_encoder_callback =
9394       nullptr;
9395   EXPECT_CALL(*adapter_ptr, Initialize)
9396       .WillOnce(Invoke([&video_stream_encoder_callback](
9397                            FrameCadenceAdapterInterface::Callback* callback) {
9398         video_stream_encoder_callback = callback;
9399       }));
9400   TaskQueueBase* encoder_queue = nullptr;
9401   auto video_stream_encoder =
9402       factory.Create(std::move(adapter), &encoder_queue);
9403   video_stream_encoder->SetSource(
9404       &mock_source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
9405   VideoEncoderConfig config;
9406   config.content_type = VideoEncoderConfig::ContentType::kScreen;
9407   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &config);
9408   video_stream_encoder->ConfigureEncoder(std::move(config), 0);
9409   PassAFrame(encoder_queue, video_stream_encoder_callback, /*ntp_time_ms=*/2);
9410   // Ensure the encoder is set up.
9411   factory.DepleteTaskQueues();
9412 
9413   EXPECT_CALL(*adapter_ptr, ProcessKeyFrameRequest)
9414       .WillOnce(Invoke([video_stream_encoder_callback] {
9415         video_stream_encoder_callback->RequestRefreshFrame();
9416       }));
9417   EXPECT_CALL(mock_source, RequestRefreshFrame);
9418   video_stream_encoder->SendKeyFrame();
9419   factory.DepleteTaskQueues();
9420   Mock::VerifyAndClearExpectations(adapter_ptr);
9421   Mock::VerifyAndClearExpectations(&mock_source);
9422 
9423   EXPECT_CALL(*adapter_ptr, ProcessKeyFrameRequest);
9424   EXPECT_CALL(mock_source, RequestRefreshFrame).Times(0);
9425   video_stream_encoder->SendKeyFrame();
9426   factory.DepleteTaskQueues();
9427 }
9428 
TEST(VideoStreamEncoderFrameCadenceTest,RequestsRefreshFrameForEarlyZeroHertzKeyFrameRequest)9429 TEST(VideoStreamEncoderFrameCadenceTest,
9430      RequestsRefreshFrameForEarlyZeroHertzKeyFrameRequest) {
9431   SimpleVideoStreamEncoderFactory factory;
9432   auto encoder_queue =
9433       factory.GetTimeController()->GetTaskQueueFactory()->CreateTaskQueue(
9434           "EncoderQueue", TaskQueueFactory::Priority::NORMAL);
9435 
9436   // Enables zero-hertz mode.
9437   test::ScopedKeyValueConfig field_trials(
9438       "WebRTC-ZeroHertzScreenshare/Enabled/");
9439   auto adapter = FrameCadenceAdapterInterface::Create(
9440       factory.GetTimeController()->GetClock(), encoder_queue.get(),
9441       field_trials);
9442   FrameCadenceAdapterInterface* adapter_ptr = adapter.get();
9443 
9444   MockVideoSourceInterface mock_source;
9445   auto video_stream_encoder = factory.CreateWithEncoderQueue(
9446       std::move(adapter), std::move(encoder_queue), &field_trials);
9447 
9448   video_stream_encoder->SetSource(
9449       &mock_source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
9450   VideoEncoderConfig config;
9451   config.content_type = VideoEncoderConfig::ContentType::kScreen;
9452   test::FillEncoderConfiguration(kVideoCodecVP8, 1, &config);
9453   video_stream_encoder->ConfigureEncoder(std::move(config), 0);
9454 
9455   // Eventually expect a refresh frame request when requesting a key frame
9456   // before initializing zero-hertz mode. This can happen in reality because the
9457   // threads invoking key frame requests and constraints setup aren't
9458   // synchronized.
9459   EXPECT_CALL(mock_source, RequestRefreshFrame);
9460   video_stream_encoder->SendKeyFrame();
9461   constexpr int kMaxFps = 30;
9462   adapter_ptr->OnConstraintsChanged(VideoTrackSourceConstraints{0, kMaxFps});
9463   factory.GetTimeController()->AdvanceTime(
9464       TimeDelta::Seconds(1) *
9465       FrameCadenceAdapterInterface::kOnDiscardedFrameRefreshFramePeriod /
9466       kMaxFps);
9467 }
9468 
9469 }  // namespace webrtc
9470