1 /*
2 * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "video/video_receive_stream2.h"
12
13 #include <stdlib.h>
14 #include <string.h>
15
16 #include <algorithm>
17 #include <memory>
18 #include <set>
19 #include <string>
20 #include <utility>
21
22 #include "absl/algorithm/container.h"
23 #include "absl/types/optional.h"
24 #include "api/array_view.h"
25 #include "api/crypto/frame_decryptor_interface.h"
26 #include "api/scoped_refptr.h"
27 #include "api/sequence_checker.h"
28 #include "api/task_queue/pending_task_safety_flag.h"
29 #include "api/task_queue/task_queue_base.h"
30 #include "api/units/frequency.h"
31 #include "api/units/time_delta.h"
32 #include "api/units/timestamp.h"
33 #include "api/video/encoded_image.h"
34 #include "api/video_codecs/sdp_video_format.h"
35 #include "api/video_codecs/video_codec.h"
36 #include "api/video_codecs/video_decoder_factory.h"
37 #include "call/rtp_stream_receiver_controller_interface.h"
38 #include "call/rtx_receive_stream.h"
39 #include "modules/video_coding/include/video_codec_interface.h"
40 #include "modules/video_coding/include/video_coding_defines.h"
41 #include "modules/video_coding/include/video_error_codes.h"
42 #include "modules/video_coding/timing/timing.h"
43 #include "modules/video_coding/utility/vp8_header_parser.h"
44 #include "rtc_base/checks.h"
45 #include "rtc_base/event.h"
46 #include "rtc_base/logging.h"
47 #include "rtc_base/strings/string_builder.h"
48 #include "rtc_base/synchronization/mutex.h"
49 #include "rtc_base/thread_annotations.h"
50 #include "rtc_base/time_utils.h"
51 #include "rtc_base/trace_event.h"
52 #include "system_wrappers/include/clock.h"
53 #include "video/call_stats2.h"
54 #include "video/frame_dumping_decoder.h"
55 #include "video/receive_statistics_proxy2.h"
56 #include "video/render/incoming_video_stream.h"
57 #include "video/task_queue_frame_decode_scheduler.h"
58
59 namespace webrtc {
60
61 namespace internal {
62
63 namespace {
64
65 // The default delay before re-requesting a key frame to be sent.
66 constexpr TimeDelta kMinBaseMinimumDelay = TimeDelta::Zero();
67 constexpr TimeDelta kMaxBaseMinimumDelay = TimeDelta::Seconds(10);
68
69 // Concrete instance of RecordableEncodedFrame wrapping needed content
70 // from EncodedFrame.
71 class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame {
72 public:
WebRtcRecordableEncodedFrame(const EncodedFrame & frame,RecordableEncodedFrame::EncodedResolution resolution)73 explicit WebRtcRecordableEncodedFrame(
74 const EncodedFrame& frame,
75 RecordableEncodedFrame::EncodedResolution resolution)
76 : buffer_(frame.GetEncodedData()),
77 render_time_ms_(frame.RenderTime()),
78 codec_(frame.CodecSpecific()->codecType),
79 is_key_frame_(frame.FrameType() == VideoFrameType::kVideoFrameKey),
80 resolution_(resolution) {
81 if (frame.ColorSpace()) {
82 color_space_ = *frame.ColorSpace();
83 }
84 }
85
86 // VideoEncodedSinkInterface::FrameBuffer
encoded_buffer() const87 rtc::scoped_refptr<const EncodedImageBufferInterface> encoded_buffer()
88 const override {
89 return buffer_;
90 }
91
color_space() const92 absl::optional<webrtc::ColorSpace> color_space() const override {
93 return color_space_;
94 }
95
codec() const96 VideoCodecType codec() const override { return codec_; }
97
is_key_frame() const98 bool is_key_frame() const override { return is_key_frame_; }
99
resolution() const100 EncodedResolution resolution() const override { return resolution_; }
101
render_time() const102 Timestamp render_time() const override {
103 return Timestamp::Millis(render_time_ms_);
104 }
105
106 private:
107 rtc::scoped_refptr<EncodedImageBufferInterface> buffer_;
108 int64_t render_time_ms_;
109 VideoCodecType codec_;
110 bool is_key_frame_;
111 EncodedResolution resolution_;
112 absl::optional<webrtc::ColorSpace> color_space_;
113 };
114
InitialDecoderResolution(const FieldTrialsView & field_trials)115 RenderResolution InitialDecoderResolution(const FieldTrialsView& field_trials) {
116 FieldTrialOptional<int> width("w");
117 FieldTrialOptional<int> height("h");
118 ParseFieldTrial({&width, &height},
119 field_trials.Lookup("WebRTC-Video-InitialDecoderResolution"));
120 if (width && height) {
121 return RenderResolution(width.Value(), height.Value());
122 }
123
124 return RenderResolution(320, 180);
125 }
126
127 // Video decoder class to be used for unknown codecs. Doesn't support decoding
128 // but logs messages to LS_ERROR.
129 class NullVideoDecoder : public webrtc::VideoDecoder {
130 public:
Configure(const Settings & settings)131 bool Configure(const Settings& settings) override {
132 RTC_LOG(LS_ERROR) << "Can't initialize NullVideoDecoder.";
133 return true;
134 }
135
Decode(const webrtc::EncodedImage & input_image,bool missing_frames,int64_t render_time_ms)136 int32_t Decode(const webrtc::EncodedImage& input_image,
137 bool missing_frames,
138 int64_t render_time_ms) override {
139 RTC_LOG(LS_ERROR) << "The NullVideoDecoder doesn't support decoding.";
140 return WEBRTC_VIDEO_CODEC_OK;
141 }
142
RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback * callback)143 int32_t RegisterDecodeCompleteCallback(
144 webrtc::DecodedImageCallback* callback) override {
145 RTC_LOG(LS_ERROR)
146 << "Can't register decode complete callback on NullVideoDecoder.";
147 return WEBRTC_VIDEO_CODEC_OK;
148 }
149
Release()150 int32_t Release() override { return WEBRTC_VIDEO_CODEC_OK; }
151
ImplementationName() const152 const char* ImplementationName() const override { return "NullVideoDecoder"; }
153 };
154
IsKeyFrameAndUnspecifiedResolution(const EncodedFrame & frame)155 bool IsKeyFrameAndUnspecifiedResolution(const EncodedFrame& frame) {
156 return frame.FrameType() == VideoFrameType::kVideoFrameKey &&
157 frame.EncodedImage()._encodedWidth == 0 &&
158 frame.EncodedImage()._encodedHeight == 0;
159 }
160
OptionalDelayToLogString(const absl::optional<TimeDelta> opt)161 std::string OptionalDelayToLogString(const absl::optional<TimeDelta> opt) {
162 return opt.has_value() ? ToLogString(*opt) : "<unset>";
163 }
164
165 } // namespace
166
DetermineMaxWaitForFrame(TimeDelta rtp_history,bool is_keyframe)167 TimeDelta DetermineMaxWaitForFrame(TimeDelta rtp_history, bool is_keyframe) {
168 // A (arbitrary) conversion factor between the remotely signalled NACK buffer
169 // time (if not present defaults to 1000ms) and the maximum time we wait for a
170 // remote frame. Chosen to not change existing defaults when using not
171 // rtx-time.
172 const int conversion_factor = 3;
173 if (rtp_history > TimeDelta::Zero() &&
174 conversion_factor * rtp_history < kMaxWaitForFrame) {
175 return is_keyframe ? rtp_history : conversion_factor * rtp_history;
176 }
177 return is_keyframe ? kMaxWaitForKeyFrame : kMaxWaitForFrame;
178 }
179
VideoReceiveStream2(TaskQueueFactory * task_queue_factory,Call * call,int num_cpu_cores,PacketRouter * packet_router,VideoReceiveStreamInterface::Config config,CallStats * call_stats,Clock * clock,std::unique_ptr<VCMTiming> timing,NackPeriodicProcessor * nack_periodic_processor,DecodeSynchronizer * decode_sync,RtcEventLog * event_log)180 VideoReceiveStream2::VideoReceiveStream2(
181 TaskQueueFactory* task_queue_factory,
182 Call* call,
183 int num_cpu_cores,
184 PacketRouter* packet_router,
185 VideoReceiveStreamInterface::Config config,
186 CallStats* call_stats,
187 Clock* clock,
188 std::unique_ptr<VCMTiming> timing,
189 NackPeriodicProcessor* nack_periodic_processor,
190 DecodeSynchronizer* decode_sync,
191 RtcEventLog* event_log)
192 : task_queue_factory_(task_queue_factory),
193 transport_adapter_(config.rtcp_send_transport),
194 config_(std::move(config)),
195 num_cpu_cores_(num_cpu_cores),
196 call_(call),
197 clock_(clock),
198 call_stats_(call_stats),
199 source_tracker_(clock_),
200 stats_proxy_(remote_ssrc(), clock_, call->worker_thread()),
201 rtp_receive_statistics_(ReceiveStatistics::Create(clock_)),
202 timing_(std::move(timing)),
203 video_receiver_(clock_, timing_.get(), call->trials()),
204 rtp_video_stream_receiver_(call->worker_thread(),
205 clock_,
206 &transport_adapter_,
207 call_stats->AsRtcpRttStats(),
208 packet_router,
209 &config_,
210 rtp_receive_statistics_.get(),
211 &stats_proxy_,
212 &stats_proxy_,
213 nack_periodic_processor,
214 this, // OnCompleteFrameCallback
215 std::move(config_.frame_decryptor),
216 std::move(config_.frame_transformer),
217 call->trials(),
218 event_log),
219 rtp_stream_sync_(call->worker_thread(), this),
220 max_wait_for_keyframe_(DetermineMaxWaitForFrame(
221 TimeDelta::Millis(config_.rtp.nack.rtp_history_ms),
222 true)),
223 max_wait_for_frame_(DetermineMaxWaitForFrame(
224 TimeDelta::Millis(config_.rtp.nack.rtp_history_ms),
225 false)),
226 decode_queue_(task_queue_factory_->CreateTaskQueue(
227 "DecodingQueue",
228 TaskQueueFactory::Priority::HIGH)) {
229 RTC_LOG(LS_INFO) << "VideoReceiveStream2: " << config_.ToString();
230
231 RTC_DCHECK(call_->worker_thread());
232 RTC_DCHECK(config_.renderer);
233 RTC_DCHECK(call_stats_);
234 packet_sequence_checker_.Detach();
235
236 RTC_DCHECK(!config_.decoders.empty());
237 RTC_CHECK(config_.decoder_factory);
238 std::set<int> decoder_payload_types;
239 for (const Decoder& decoder : config_.decoders) {
240 RTC_CHECK(decoder_payload_types.find(decoder.payload_type) ==
241 decoder_payload_types.end())
242 << "Duplicate payload type (" << decoder.payload_type
243 << ") for different decoders.";
244 decoder_payload_types.insert(decoder.payload_type);
245 }
246
247 timing_->set_render_delay(TimeDelta::Millis(config_.render_delay_ms));
248
249 std::unique_ptr<FrameDecodeScheduler> scheduler =
250 decode_sync ? decode_sync->CreateSynchronizedFrameScheduler()
251 : std::make_unique<TaskQueueFrameDecodeScheduler>(
252 clock, call_->worker_thread());
253 buffer_ = std::make_unique<VideoStreamBufferController>(
254 clock_, call_->worker_thread(), timing_.get(), &stats_proxy_, this,
255 max_wait_for_keyframe_, max_wait_for_frame_, std::move(scheduler),
256 call_->trials());
257
258 if (rtx_ssrc()) {
259 rtx_receive_stream_ = std::make_unique<RtxReceiveStream>(
260 &rtp_video_stream_receiver_,
261 std::move(config_.rtp.rtx_associated_payload_types), remote_ssrc(),
262 rtp_receive_statistics_.get());
263 } else {
264 rtp_receive_statistics_->EnableRetransmitDetection(remote_ssrc(), true);
265 }
266 }
267
~VideoReceiveStream2()268 VideoReceiveStream2::~VideoReceiveStream2() {
269 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
270 RTC_LOG(LS_INFO) << "~VideoReceiveStream2: " << config_.ToString();
271 RTC_DCHECK(!media_receiver_);
272 RTC_DCHECK(!rtx_receiver_);
273 Stop();
274 }
275
RegisterWithTransport(RtpStreamReceiverControllerInterface * receiver_controller)276 void VideoReceiveStream2::RegisterWithTransport(
277 RtpStreamReceiverControllerInterface* receiver_controller) {
278 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
279 RTC_DCHECK(!media_receiver_);
280 RTC_DCHECK(!rtx_receiver_);
281
282 // Register with RtpStreamReceiverController.
283 media_receiver_ = receiver_controller->CreateReceiver(
284 remote_ssrc(), &rtp_video_stream_receiver_);
285 if (rtx_ssrc()) {
286 RTC_DCHECK(rtx_receive_stream_);
287 rtx_receiver_ = receiver_controller->CreateReceiver(
288 rtx_ssrc(), rtx_receive_stream_.get());
289 }
290 }
291
UnregisterFromTransport()292 void VideoReceiveStream2::UnregisterFromTransport() {
293 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
294 media_receiver_.reset();
295 rtx_receiver_.reset();
296 }
297
sync_group() const298 const std::string& VideoReceiveStream2::sync_group() const {
299 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
300 return config_.sync_group;
301 }
302
SignalNetworkState(NetworkState state)303 void VideoReceiveStream2::SignalNetworkState(NetworkState state) {
304 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
305 rtp_video_stream_receiver_.SignalNetworkState(state);
306 }
307
DeliverRtcp(const uint8_t * packet,size_t length)308 bool VideoReceiveStream2::DeliverRtcp(const uint8_t* packet, size_t length) {
309 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
310 return rtp_video_stream_receiver_.DeliverRtcp(packet, length);
311 }
312
SetSync(Syncable * audio_syncable)313 void VideoReceiveStream2::SetSync(Syncable* audio_syncable) {
314 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
315 rtp_stream_sync_.ConfigureSync(audio_syncable);
316 }
317
SetLocalSsrc(uint32_t local_ssrc)318 void VideoReceiveStream2::SetLocalSsrc(uint32_t local_ssrc) {
319 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
320 if (config_.rtp.local_ssrc == local_ssrc)
321 return;
322
323 // TODO(tommi): Make sure we don't rely on local_ssrc via the config struct.
324 const_cast<uint32_t&>(config_.rtp.local_ssrc) = local_ssrc;
325 rtp_video_stream_receiver_.OnLocalSsrcChange(local_ssrc);
326 }
327
Start()328 void VideoReceiveStream2::Start() {
329 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
330
331 if (decoder_running_) {
332 return;
333 }
334
335 const bool protected_by_fec =
336 config_.rtp.protected_by_flexfec ||
337 rtp_video_stream_receiver_.ulpfec_payload_type() != -1;
338
339 if (config_.rtp.nack.rtp_history_ms > 0 && protected_by_fec) {
340 buffer_->SetProtectionMode(kProtectionNackFEC);
341 }
342
343 transport_adapter_.Enable();
344 rtc::VideoSinkInterface<VideoFrame>* renderer = nullptr;
345 if (config_.enable_prerenderer_smoothing) {
346 incoming_video_stream_.reset(new IncomingVideoStream(
347 task_queue_factory_, config_.render_delay_ms, this));
348 renderer = incoming_video_stream_.get();
349 } else {
350 renderer = this;
351 }
352
353 for (const Decoder& decoder : config_.decoders) {
354 VideoDecoder::Settings settings;
355 settings.set_codec_type(
356 PayloadStringToCodecType(decoder.video_format.name));
357 settings.set_max_render_resolution(
358 InitialDecoderResolution(call_->trials()));
359 settings.set_number_of_cores(num_cpu_cores_);
360
361 const bool raw_payload =
362 config_.rtp.raw_payload_types.count(decoder.payload_type) > 0;
363 {
364 // TODO(bugs.webrtc.org/11993): Make this call on the network thread.
365 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
366 rtp_video_stream_receiver_.AddReceiveCodec(
367 decoder.payload_type, settings.codec_type(),
368 decoder.video_format.parameters, raw_payload);
369 }
370 video_receiver_.RegisterReceiveCodec(decoder.payload_type, settings);
371 }
372
373 RTC_DCHECK(renderer != nullptr);
374 video_stream_decoder_.reset(
375 new VideoStreamDecoder(&video_receiver_, &stats_proxy_, renderer));
376
377 // Make sure we register as a stats observer *after* we've prepared the
378 // `video_stream_decoder_`.
379 call_stats_->RegisterStatsObserver(this);
380
381 // Start decoding on task queue.
382 stats_proxy_.DecoderThreadStarting();
383 decode_queue_.PostTask([this] {
384 RTC_DCHECK_RUN_ON(&decode_queue_);
385 decoder_stopped_ = false;
386 });
387 buffer_->StartNextDecode(true);
388 decoder_running_ = true;
389
390 {
391 // TODO(bugs.webrtc.org/11993): Make this call on the network thread.
392 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
393 rtp_video_stream_receiver_.StartReceive();
394 }
395 }
396
Stop()397 void VideoReceiveStream2::Stop() {
398 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
399
400 // TODO(bugs.webrtc.org/11993): Make this call on the network thread.
401 // Also call `GetUniqueFramesSeen()` at the same time (since it's a counter
402 // that's updated on the network thread).
403 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
404 rtp_video_stream_receiver_.StopReceive();
405
406 stats_proxy_.OnUniqueFramesCounted(
407 rtp_video_stream_receiver_.GetUniqueFramesSeen());
408
409 buffer_->Stop();
410 call_stats_->DeregisterStatsObserver(this);
411
412 if (decoder_running_) {
413 rtc::Event done;
414 decode_queue_.PostTask([this, &done] {
415 RTC_DCHECK_RUN_ON(&decode_queue_);
416 // Set `decoder_stopped_` before deregistering all decoders. This means
417 // that any pending encoded frame will return early without trying to
418 // access the decoder database.
419 decoder_stopped_ = true;
420 for (const Decoder& decoder : config_.decoders) {
421 video_receiver_.RegisterExternalDecoder(nullptr, decoder.payload_type);
422 }
423 done.Set();
424 });
425 done.Wait(rtc::Event::kForever);
426
427 decoder_running_ = false;
428 stats_proxy_.DecoderThreadStopped();
429
430 UpdateHistograms();
431 }
432
433 // TODO(bugs.webrtc.org/11993): Make these calls on the network thread.
434 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
435 rtp_video_stream_receiver_.RemoveReceiveCodecs();
436 video_receiver_.DeregisterReceiveCodecs();
437
438 video_stream_decoder_.reset();
439 incoming_video_stream_.reset();
440 transport_adapter_.Disable();
441 }
442
SetRtpExtensions(std::vector<RtpExtension> extensions)443 void VideoReceiveStream2::SetRtpExtensions(
444 std::vector<RtpExtension> extensions) {
445 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
446 rtp_video_stream_receiver_.SetRtpExtensions(extensions);
447 // TODO(tommi): We don't use the `c.rtp.extensions` member in the
448 // VideoReceiveStream2 class, so this const_cast<> is a temporary hack to keep
449 // things consistent between VideoReceiveStream2 and RtpVideoStreamReceiver2
450 // for debugging purposes. The `packet_sequence_checker_` gives us assurances
451 // that from a threading perspective, this is still safe. The accessors that
452 // give read access to this state, run behind the same check.
453 // The alternative to the const_cast<> would be to make `config_` non-const
454 // and guarded by `packet_sequence_checker_`. However the scope of that state
455 // is huge (the whole Config struct), and would require all methods that touch
456 // the struct to abide the needs of the `extensions` member.
457 const_cast<std::vector<RtpExtension>&>(config_.rtp.extensions) =
458 std::move(extensions);
459 }
460
GetRtpExtensionMap() const461 RtpHeaderExtensionMap VideoReceiveStream2::GetRtpExtensionMap() const {
462 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
463 return rtp_video_stream_receiver_.GetRtpExtensions();
464 }
465
transport_cc() const466 bool VideoReceiveStream2::transport_cc() const {
467 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
468 return config_.rtp.transport_cc;
469 }
470
SetTransportCc(bool transport_cc)471 void VideoReceiveStream2::SetTransportCc(bool transport_cc) {
472 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
473 // TODO(tommi): Stop using the config struct for the internal state.
474 const_cast<bool&>(config_.rtp.transport_cc) = transport_cc;
475 }
476
SetRtcpMode(RtcpMode mode)477 void VideoReceiveStream2::SetRtcpMode(RtcpMode mode) {
478 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
479 // TODO(tommi): Stop using the config struct for the internal state.
480 const_cast<RtcpMode&>(config_.rtp.rtcp_mode) = mode;
481 rtp_video_stream_receiver_.SetRtcpMode(mode);
482 }
483
SetFlexFecProtection(RtpPacketSinkInterface * flexfec_sink)484 void VideoReceiveStream2::SetFlexFecProtection(
485 RtpPacketSinkInterface* flexfec_sink) {
486 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
487 rtp_video_stream_receiver_.SetPacketSink(flexfec_sink);
488 // TODO(tommi): Stop using the config struct for the internal state.
489 const_cast<RtpPacketSinkInterface*&>(config_.rtp.packet_sink_) = flexfec_sink;
490 const_cast<bool&>(config_.rtp.protected_by_flexfec) =
491 (flexfec_sink != nullptr);
492 }
493
SetLossNotificationEnabled(bool enabled)494 void VideoReceiveStream2::SetLossNotificationEnabled(bool enabled) {
495 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
496 // TODO(tommi): Stop using the config struct for the internal state.
497 const_cast<bool&>(config_.rtp.lntf.enabled) = enabled;
498 rtp_video_stream_receiver_.SetLossNotificationEnabled(enabled);
499 }
500
SetNackHistory(TimeDelta history)501 void VideoReceiveStream2::SetNackHistory(TimeDelta history) {
502 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
503 RTC_DCHECK_GE(history.ms(), 0);
504
505 if (config_.rtp.nack.rtp_history_ms == history.ms())
506 return;
507
508 // TODO(tommi): Stop using the config struct for the internal state.
509 const_cast<int&>(config_.rtp.nack.rtp_history_ms) = history.ms();
510
511 const bool protected_by_fec =
512 config_.rtp.protected_by_flexfec ||
513 rtp_video_stream_receiver_.ulpfec_payload_type() != -1;
514
515 buffer_->SetProtectionMode(history.ms() > 0 && protected_by_fec
516 ? kProtectionNackFEC
517 : kProtectionNack);
518
519 rtp_video_stream_receiver_.SetNackHistory(history);
520 TimeDelta max_wait_for_keyframe = DetermineMaxWaitForFrame(history, true);
521 TimeDelta max_wait_for_frame = DetermineMaxWaitForFrame(history, false);
522
523 max_wait_for_keyframe_ = max_wait_for_keyframe;
524 max_wait_for_frame_ = max_wait_for_frame;
525
526 buffer_->SetMaxWaits(max_wait_for_keyframe, max_wait_for_frame);
527 }
528
SetProtectionPayloadTypes(int red_payload_type,int ulpfec_payload_type)529 void VideoReceiveStream2::SetProtectionPayloadTypes(int red_payload_type,
530 int ulpfec_payload_type) {
531 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
532 rtp_video_stream_receiver_.SetProtectionPayloadTypes(red_payload_type,
533 ulpfec_payload_type);
534 }
535
SetRtcpXr(Config::Rtp::RtcpXr rtcp_xr)536 void VideoReceiveStream2::SetRtcpXr(Config::Rtp::RtcpXr rtcp_xr) {
537 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
538 rtp_video_stream_receiver_.SetReferenceTimeReport(
539 rtcp_xr.receiver_reference_time_report);
540 }
541
SetAssociatedPayloadTypes(std::map<int,int> associated_payload_types)542 void VideoReceiveStream2::SetAssociatedPayloadTypes(
543 std::map<int, int> associated_payload_types) {
544 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
545
546 // For setting the associated payload types after construction, we currently
547 // assume that the rtx_ssrc cannot change. In such a case we can know that
548 // if the ssrc is non-0, a `rtx_receive_stream_` instance has previously been
549 // created and configured (and is referenced by `rtx_receiver_`) and we can
550 // simply reconfigure it.
551 // If rtx_ssrc is 0 however, we ignore this call.
552 if (!rtx_ssrc())
553 return;
554
555 rtx_receive_stream_->SetAssociatedPayloadTypes(
556 std::move(associated_payload_types));
557 }
558
CreateAndRegisterExternalDecoder(const Decoder & decoder)559 void VideoReceiveStream2::CreateAndRegisterExternalDecoder(
560 const Decoder& decoder) {
561 TRACE_EVENT0("webrtc",
562 "VideoReceiveStream2::CreateAndRegisterExternalDecoder");
563 std::unique_ptr<VideoDecoder> video_decoder =
564 config_.decoder_factory->CreateVideoDecoder(decoder.video_format);
565 // If we still have no valid decoder, we have to create a "Null" decoder
566 // that ignores all calls. The reason we can get into this state is that the
567 // old decoder factory interface doesn't have a way to query supported
568 // codecs.
569 if (!video_decoder) {
570 video_decoder = std::make_unique<NullVideoDecoder>();
571 }
572
573 std::string decoded_output_file =
574 call_->trials().Lookup("WebRTC-DecoderDataDumpDirectory");
575 // Because '/' can't be used inside a field trial parameter, we use ';'
576 // instead.
577 // This is only relevant to WebRTC-DecoderDataDumpDirectory
578 // field trial. ';' is chosen arbitrary. Even though it's a legal character
579 // in some file systems, we can sacrifice ability to use it in the path to
580 // dumped video, since it's developers-only feature for debugging.
581 absl::c_replace(decoded_output_file, ';', '/');
582 if (!decoded_output_file.empty()) {
583 char filename_buffer[256];
584 rtc::SimpleStringBuilder ssb(filename_buffer);
585 ssb << decoded_output_file << "/webrtc_receive_stream_" << remote_ssrc()
586 << "-" << rtc::TimeMicros() << ".ivf";
587 video_decoder = CreateFrameDumpingDecoderWrapper(
588 std::move(video_decoder), FileWrapper::OpenWriteOnly(ssb.str()));
589 }
590
591 video_receiver_.RegisterExternalDecoder(std::move(video_decoder),
592 decoder.payload_type);
593 }
594
GetStats() const595 VideoReceiveStreamInterface::Stats VideoReceiveStream2::GetStats() const {
596 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
597 VideoReceiveStream2::Stats stats = stats_proxy_.GetStats();
598 stats.total_bitrate_bps = 0;
599 StreamStatistician* statistician =
600 rtp_receive_statistics_->GetStatistician(stats.ssrc);
601 if (statistician) {
602 stats.rtp_stats = statistician->GetStats();
603 stats.total_bitrate_bps = statistician->BitrateReceived();
604 }
605 if (rtx_ssrc()) {
606 StreamStatistician* rtx_statistician =
607 rtp_receive_statistics_->GetStatistician(rtx_ssrc());
608 if (rtx_statistician)
609 stats.total_bitrate_bps += rtx_statistician->BitrateReceived();
610 }
611 return stats;
612 }
613
UpdateHistograms()614 void VideoReceiveStream2::UpdateHistograms() {
615 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
616 absl::optional<int> fraction_lost;
617 StreamDataCounters rtp_stats;
618 StreamStatistician* statistician =
619 rtp_receive_statistics_->GetStatistician(remote_ssrc());
620 if (statistician) {
621 fraction_lost = statistician->GetFractionLostInPercent();
622 rtp_stats = statistician->GetReceiveStreamDataCounters();
623 }
624 if (rtx_ssrc()) {
625 StreamStatistician* rtx_statistician =
626 rtp_receive_statistics_->GetStatistician(rtx_ssrc());
627 if (rtx_statistician) {
628 StreamDataCounters rtx_stats =
629 rtx_statistician->GetReceiveStreamDataCounters();
630 stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, &rtx_stats);
631 return;
632 }
633 }
634 stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, nullptr);
635 }
636
SetBaseMinimumPlayoutDelayMs(int delay_ms)637 bool VideoReceiveStream2::SetBaseMinimumPlayoutDelayMs(int delay_ms) {
638 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
639 TimeDelta delay = TimeDelta::Millis(delay_ms);
640 if (delay < kMinBaseMinimumDelay || delay > kMaxBaseMinimumDelay) {
641 return false;
642 }
643
644 base_minimum_playout_delay_ = delay;
645 UpdatePlayoutDelays();
646 return true;
647 }
648
GetBaseMinimumPlayoutDelayMs() const649 int VideoReceiveStream2::GetBaseMinimumPlayoutDelayMs() const {
650 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
651 constexpr TimeDelta kDefaultBaseMinPlayoutDelay = TimeDelta::Millis(-1);
652 // Unset must be -1.
653 static_assert(-1 == kDefaultBaseMinPlayoutDelay.ms(), "");
654 return base_minimum_playout_delay_.value_or(kDefaultBaseMinPlayoutDelay).ms();
655 }
656
OnFrame(const VideoFrame & video_frame)657 void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) {
658 VideoFrameMetaData frame_meta(video_frame, clock_->CurrentTime());
659
660 // TODO(bugs.webrtc.org/10739): we should set local capture clock offset for
661 // `video_frame.packet_infos`. But VideoFrame is const qualified here.
662
663 call_->worker_thread()->PostTask(
664 SafeTask(task_safety_.flag(), [frame_meta, this]() {
665 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
666 int64_t video_playout_ntp_ms;
667 int64_t sync_offset_ms;
668 double estimated_freq_khz;
669 if (rtp_stream_sync_.GetStreamSyncOffsetInMs(
670 frame_meta.rtp_timestamp, frame_meta.render_time_ms(),
671 &video_playout_ntp_ms, &sync_offset_ms, &estimated_freq_khz)) {
672 stats_proxy_.OnSyncOffsetUpdated(video_playout_ntp_ms, sync_offset_ms,
673 estimated_freq_khz);
674 }
675 stats_proxy_.OnRenderedFrame(frame_meta);
676 }));
677
678 source_tracker_.OnFrameDelivered(video_frame.packet_infos());
679 config_.renderer->OnFrame(video_frame);
680 webrtc::MutexLock lock(&pending_resolution_mutex_);
681 if (pending_resolution_.has_value()) {
682 if (!pending_resolution_->empty() &&
683 (video_frame.width() != static_cast<int>(pending_resolution_->width) ||
684 video_frame.height() !=
685 static_cast<int>(pending_resolution_->height))) {
686 RTC_LOG(LS_WARNING)
687 << "Recordable encoded frame stream resolution was reported as "
688 << pending_resolution_->width << "x" << pending_resolution_->height
689 << " but the stream is now " << video_frame.width()
690 << video_frame.height();
691 }
692 pending_resolution_ = RecordableEncodedFrame::EncodedResolution{
693 static_cast<unsigned>(video_frame.width()),
694 static_cast<unsigned>(video_frame.height())};
695 }
696 }
697
SetFrameDecryptor(rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor)698 void VideoReceiveStream2::SetFrameDecryptor(
699 rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor) {
700 rtp_video_stream_receiver_.SetFrameDecryptor(std::move(frame_decryptor));
701 }
702
SetDepacketizerToDecoderFrameTransformer(rtc::scoped_refptr<FrameTransformerInterface> frame_transformer)703 void VideoReceiveStream2::SetDepacketizerToDecoderFrameTransformer(
704 rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
705 rtp_video_stream_receiver_.SetDepacketizerToDecoderFrameTransformer(
706 std::move(frame_transformer));
707 }
708
RequestKeyFrame(Timestamp now)709 void VideoReceiveStream2::RequestKeyFrame(Timestamp now) {
710 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
711 // Called from RtpVideoStreamReceiver (rtp_video_stream_receiver_ is
712 // ultimately responsible).
713 rtp_video_stream_receiver_.RequestKeyFrame();
714 last_keyframe_request_ = now;
715 }
716
OnCompleteFrame(std::unique_ptr<EncodedFrame> frame)717 void VideoReceiveStream2::OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) {
718 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
719
720 const VideoPlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_;
721 if (playout_delay.min_ms >= 0) {
722 frame_minimum_playout_delay_ = TimeDelta::Millis(playout_delay.min_ms);
723 UpdatePlayoutDelays();
724 }
725 if (playout_delay.max_ms >= 0) {
726 frame_maximum_playout_delay_ = TimeDelta::Millis(playout_delay.max_ms);
727 UpdatePlayoutDelays();
728 }
729
730 auto last_continuous_pid = buffer_->InsertFrame(std::move(frame));
731 if (last_continuous_pid.has_value()) {
732 {
733 // TODO(bugs.webrtc.org/11993): Call on the network thread.
734 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
735 rtp_video_stream_receiver_.FrameContinuous(*last_continuous_pid);
736 }
737 }
738 }
739
OnRttUpdate(int64_t avg_rtt_ms,int64_t max_rtt_ms)740 void VideoReceiveStream2::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
741 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
742 // TODO(bugs.webrtc.org/13757): Replace with TimeDelta.
743 buffer_->UpdateRtt(max_rtt_ms);
744 rtp_video_stream_receiver_.UpdateRtt(max_rtt_ms);
745 stats_proxy_.OnRttUpdate(avg_rtt_ms);
746 }
747
id() const748 uint32_t VideoReceiveStream2::id() const {
749 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
750 return remote_ssrc();
751 }
752
GetInfo() const753 absl::optional<Syncable::Info> VideoReceiveStream2::GetInfo() const {
754 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
755 absl::optional<Syncable::Info> info =
756 rtp_video_stream_receiver_.GetSyncInfo();
757
758 if (!info)
759 return absl::nullopt;
760
761 info->current_delay_ms = timing_->TargetVideoDelay().ms();
762 return info;
763 }
764
GetPlayoutRtpTimestamp(uint32_t * rtp_timestamp,int64_t * time_ms) const765 bool VideoReceiveStream2::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
766 int64_t* time_ms) const {
767 RTC_DCHECK_NOTREACHED();
768 return false;
769 }
770
SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,int64_t time_ms)771 void VideoReceiveStream2::SetEstimatedPlayoutNtpTimestampMs(
772 int64_t ntp_timestamp_ms,
773 int64_t time_ms) {
774 RTC_DCHECK_NOTREACHED();
775 }
776
SetMinimumPlayoutDelay(int delay_ms)777 bool VideoReceiveStream2::SetMinimumPlayoutDelay(int delay_ms) {
778 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
779 syncable_minimum_playout_delay_ = TimeDelta::Millis(delay_ms);
780 UpdatePlayoutDelays();
781 return true;
782 }
783
OnEncodedFrame(std::unique_ptr<EncodedFrame> frame)784 void VideoReceiveStream2::OnEncodedFrame(std::unique_ptr<EncodedFrame> frame) {
785 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
786 Timestamp now = clock_->CurrentTime();
787 const bool keyframe_request_is_due =
788 !last_keyframe_request_ ||
789 now >= (*last_keyframe_request_ + max_wait_for_keyframe_);
790 const bool received_frame_is_keyframe =
791 frame->FrameType() == VideoFrameType::kVideoFrameKey;
792
793 // Current OnPreDecode only cares about QP for VP8.
794 int qp = -1;
795 if (frame->CodecSpecific()->codecType == kVideoCodecVP8) {
796 if (!vp8::GetQp(frame->data(), frame->size(), &qp)) {
797 RTC_LOG(LS_WARNING) << "Failed to extract QP from VP8 video frame";
798 }
799 }
800 stats_proxy_.OnPreDecode(frame->CodecSpecific()->codecType, qp);
801
802 decode_queue_.PostTask([this, now, keyframe_request_is_due,
803 received_frame_is_keyframe, frame = std::move(frame),
804 keyframe_required = keyframe_required_]() mutable {
805 RTC_DCHECK_RUN_ON(&decode_queue_);
806 if (decoder_stopped_)
807 return;
808 DecodeFrameResult result = HandleEncodedFrameOnDecodeQueue(
809 std::move(frame), keyframe_request_is_due, keyframe_required);
810
811 // TODO(bugs.webrtc.org/11993): Make this PostTask to the network thread.
812 call_->worker_thread()->PostTask(
813 SafeTask(task_safety_.flag(),
814 [this, now, result = std::move(result),
815 received_frame_is_keyframe, keyframe_request_is_due]() {
816 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
817 keyframe_required_ = result.keyframe_required;
818
819 if (result.decoded_frame_picture_id) {
820 rtp_video_stream_receiver_.FrameDecoded(
821 *result.decoded_frame_picture_id);
822 }
823
824 HandleKeyFrameGeneration(received_frame_is_keyframe, now,
825 result.force_request_key_frame,
826 keyframe_request_is_due);
827 buffer_->StartNextDecode(keyframe_required_);
828 }));
829 });
830 }
831
OnDecodableFrameTimeout(TimeDelta wait)832 void VideoReceiveStream2::OnDecodableFrameTimeout(TimeDelta wait) {
833 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
834 Timestamp now = clock_->CurrentTime();
835
836 absl::optional<int64_t> last_packet_ms =
837 rtp_video_stream_receiver_.LastReceivedPacketMs();
838
839 // To avoid spamming keyframe requests for a stream that is not active we
840 // check if we have received a packet within the last 5 seconds.
841 constexpr TimeDelta kInactiveDuration = TimeDelta::Seconds(5);
842 const bool stream_is_active =
843 last_packet_ms &&
844 now - Timestamp::Millis(*last_packet_ms) < kInactiveDuration;
845 if (!stream_is_active)
846 stats_proxy_.OnStreamInactive();
847
848 if (stream_is_active && !IsReceivingKeyFrame(now) &&
849 (!config_.crypto_options.sframe.require_frame_encryption ||
850 rtp_video_stream_receiver_.IsDecryptable())) {
851 RTC_LOG(LS_WARNING) << "No decodable frame in " << wait
852 << ", requesting keyframe.";
853 RequestKeyFrame(now);
854 }
855
856 buffer_->StartNextDecode(keyframe_required_);
857 }
858
859 VideoReceiveStream2::DecodeFrameResult
HandleEncodedFrameOnDecodeQueue(std::unique_ptr<EncodedFrame> frame,bool keyframe_request_is_due,bool keyframe_required)860 VideoReceiveStream2::HandleEncodedFrameOnDecodeQueue(
861 std::unique_ptr<EncodedFrame> frame,
862 bool keyframe_request_is_due,
863 bool keyframe_required) {
864 RTC_DCHECK_RUN_ON(&decode_queue_);
865
866 bool force_request_key_frame = false;
867 absl::optional<int64_t> decoded_frame_picture_id;
868
869 if (!video_receiver_.IsExternalDecoderRegistered(frame->PayloadType())) {
870 // Look for the decoder with this payload type.
871 for (const Decoder& decoder : config_.decoders) {
872 if (decoder.payload_type == frame->PayloadType()) {
873 CreateAndRegisterExternalDecoder(decoder);
874 break;
875 }
876 }
877 }
878
879 int64_t frame_id = frame->Id();
880 int decode_result = DecodeAndMaybeDispatchEncodedFrame(std::move(frame));
881 if (decode_result == WEBRTC_VIDEO_CODEC_OK ||
882 decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) {
883 keyframe_required = false;
884 frame_decoded_ = true;
885
886 decoded_frame_picture_id = frame_id;
887
888 if (decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME)
889 force_request_key_frame = true;
890 } else if (!frame_decoded_ || !keyframe_required || keyframe_request_is_due) {
891 keyframe_required = true;
892 // TODO(philipel): Remove this keyframe request when downstream project
893 // has been fixed.
894 force_request_key_frame = true;
895 }
896
897 return DecodeFrameResult{
898 .force_request_key_frame = force_request_key_frame,
899 .decoded_frame_picture_id = std::move(decoded_frame_picture_id),
900 .keyframe_required = keyframe_required,
901 };
902 }
903
DecodeAndMaybeDispatchEncodedFrame(std::unique_ptr<EncodedFrame> frame)904 int VideoReceiveStream2::DecodeAndMaybeDispatchEncodedFrame(
905 std::unique_ptr<EncodedFrame> frame) {
906 RTC_DCHECK_RUN_ON(&decode_queue_);
907
908 // If `buffered_encoded_frames_` grows out of control (=60 queued frames),
909 // maybe due to a stuck decoder, we just halt the process here and log the
910 // error.
911 const bool encoded_frame_output_enabled =
912 encoded_frame_buffer_function_ != nullptr &&
913 buffered_encoded_frames_.size() < kBufferedEncodedFramesMaxSize;
914 EncodedFrame* frame_ptr = frame.get();
915 if (encoded_frame_output_enabled) {
916 // If we receive a key frame with unset resolution, hold on dispatching the
917 // frame and following ones until we know a resolution of the stream.
918 // NOTE: The code below has a race where it can report the wrong
919 // resolution for keyframes after an initial keyframe of other resolution.
920 // However, the only known consumer of this information is the W3C
921 // MediaRecorder and it will only use the resolution in the first encoded
922 // keyframe from WebRTC, so misreporting is fine.
923 buffered_encoded_frames_.push_back(std::move(frame));
924 if (buffered_encoded_frames_.size() == kBufferedEncodedFramesMaxSize)
925 RTC_LOG(LS_ERROR) << "About to halt recordable encoded frame output due "
926 "to too many buffered frames.";
927
928 webrtc::MutexLock lock(&pending_resolution_mutex_);
929 if (IsKeyFrameAndUnspecifiedResolution(*frame_ptr) &&
930 !pending_resolution_.has_value())
931 pending_resolution_.emplace();
932 }
933
934 int decode_result = video_receiver_.Decode(frame_ptr);
935 if (encoded_frame_output_enabled) {
936 absl::optional<RecordableEncodedFrame::EncodedResolution>
937 pending_resolution;
938 {
939 // Fish out `pending_resolution_` to avoid taking the mutex on every lap
940 // or dispatching under the mutex in the flush loop.
941 webrtc::MutexLock lock(&pending_resolution_mutex_);
942 if (pending_resolution_.has_value())
943 pending_resolution = *pending_resolution_;
944 }
945 if (!pending_resolution.has_value() || !pending_resolution->empty()) {
946 // Flush the buffered frames.
947 for (const auto& frame : buffered_encoded_frames_) {
948 RecordableEncodedFrame::EncodedResolution resolution{
949 frame->EncodedImage()._encodedWidth,
950 frame->EncodedImage()._encodedHeight};
951 if (IsKeyFrameAndUnspecifiedResolution(*frame)) {
952 RTC_DCHECK(!pending_resolution->empty());
953 resolution = *pending_resolution;
954 }
955 encoded_frame_buffer_function_(
956 WebRtcRecordableEncodedFrame(*frame, resolution));
957 }
958 buffered_encoded_frames_.clear();
959 }
960 }
961 return decode_result;
962 }
963
HandleKeyFrameGeneration(bool received_frame_is_keyframe,Timestamp now,bool always_request_key_frame,bool keyframe_request_is_due)964 void VideoReceiveStream2::HandleKeyFrameGeneration(
965 bool received_frame_is_keyframe,
966 Timestamp now,
967 bool always_request_key_frame,
968 bool keyframe_request_is_due) {
969 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
970 bool request_key_frame = always_request_key_frame;
971
972 // Repeat sending keyframe requests if we've requested a keyframe.
973 if (keyframe_generation_requested_) {
974 if (received_frame_is_keyframe) {
975 keyframe_generation_requested_ = false;
976 } else if (keyframe_request_is_due) {
977 if (!IsReceivingKeyFrame(now)) {
978 request_key_frame = true;
979 }
980 } else {
981 // It hasn't been long enough since the last keyframe request, do nothing.
982 }
983 }
984
985 if (request_key_frame) {
986 // HandleKeyFrameGeneration is initiated from the decode thread -
987 // RequestKeyFrame() triggers a call back to the decode thread.
988 // Perhaps there's a way to avoid that.
989 RequestKeyFrame(now);
990 }
991 }
992
IsReceivingKeyFrame(Timestamp now) const993 bool VideoReceiveStream2::IsReceivingKeyFrame(Timestamp now) const {
994 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
995 absl::optional<int64_t> last_keyframe_packet_ms =
996 rtp_video_stream_receiver_.LastReceivedKeyframePacketMs();
997
998 // If we recently have been receiving packets belonging to a keyframe then
999 // we assume a keyframe is currently being received.
1000 bool receiving_keyframe = last_keyframe_packet_ms &&
1001 now - Timestamp::Millis(*last_keyframe_packet_ms) <
1002 max_wait_for_keyframe_;
1003 return receiving_keyframe;
1004 }
1005
UpdatePlayoutDelays() const1006 void VideoReceiveStream2::UpdatePlayoutDelays() const {
1007 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
1008 const std::initializer_list<absl::optional<TimeDelta>> min_delays = {
1009 frame_minimum_playout_delay_, base_minimum_playout_delay_,
1010 syncable_minimum_playout_delay_};
1011
1012 // Since nullopt < anything, this will return the largest of the minumum
1013 // delays, or nullopt if all are nullopt.
1014 absl::optional<TimeDelta> minimum_delay = std::max(min_delays);
1015 if (minimum_delay) {
1016 auto num_playout_delays_set =
1017 absl::c_count_if(min_delays, [](auto opt) { return opt.has_value(); });
1018 if (num_playout_delays_set > 1 &&
1019 timing_->min_playout_delay() != minimum_delay) {
1020 RTC_LOG(LS_WARNING)
1021 << "Multiple playout delays set. Actual delay value set to "
1022 << *minimum_delay << " frame min delay="
1023 << OptionalDelayToLogString(frame_maximum_playout_delay_)
1024 << " base min delay="
1025 << OptionalDelayToLogString(base_minimum_playout_delay_)
1026 << " sync min delay="
1027 << OptionalDelayToLogString(syncable_minimum_playout_delay_);
1028 }
1029 timing_->set_min_playout_delay(*minimum_delay);
1030 if (frame_minimum_playout_delay_ == TimeDelta::Zero() &&
1031 frame_maximum_playout_delay_ > TimeDelta::Zero()) {
1032 // TODO(kron): Estimate frame rate from video stream.
1033 constexpr Frequency kFrameRate = Frequency::Hertz(60);
1034 // Convert playout delay in ms to number of frames.
1035 int max_composition_delay_in_frames =
1036 std::lrint(*frame_maximum_playout_delay_ * kFrameRate);
1037 // Subtract frames in buffer.
1038 max_composition_delay_in_frames =
1039 std::max(max_composition_delay_in_frames - buffer_->Size(), 0);
1040 timing_->SetMaxCompositionDelayInFrames(max_composition_delay_in_frames);
1041 }
1042 }
1043
1044 if (frame_maximum_playout_delay_) {
1045 timing_->set_max_playout_delay(*frame_maximum_playout_delay_);
1046 }
1047 }
1048
GetSources() const1049 std::vector<webrtc::RtpSource> VideoReceiveStream2::GetSources() const {
1050 return source_tracker_.GetSources();
1051 }
1052
1053 VideoReceiveStream2::RecordingState
SetAndGetRecordingState(RecordingState state,bool generate_key_frame)1054 VideoReceiveStream2::SetAndGetRecordingState(RecordingState state,
1055 bool generate_key_frame) {
1056 RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
1057 rtc::Event event;
1058
1059 // Save old state, set the new state.
1060 RecordingState old_state;
1061
1062 absl::optional<Timestamp> last_keyframe_request;
1063 {
1064 // TODO(bugs.webrtc.org/11993): Post this to the network thread.
1065 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
1066 last_keyframe_request = last_keyframe_request_;
1067 last_keyframe_request_ =
1068 generate_key_frame
1069 ? clock_->CurrentTime()
1070 : Timestamp::Millis(state.last_keyframe_request_ms.value_or(0));
1071 }
1072
1073 decode_queue_.PostTask(
1074 [this, &event, &old_state, callback = std::move(state.callback),
1075 last_keyframe_request = std::move(last_keyframe_request)] {
1076 RTC_DCHECK_RUN_ON(&decode_queue_);
1077 old_state.callback = std::move(encoded_frame_buffer_function_);
1078 encoded_frame_buffer_function_ = std::move(callback);
1079
1080 old_state.last_keyframe_request_ms =
1081 last_keyframe_request.value_or(Timestamp::Zero()).ms();
1082
1083 event.Set();
1084 });
1085
1086 if (generate_key_frame) {
1087 rtp_video_stream_receiver_.RequestKeyFrame();
1088 {
1089 // TODO(bugs.webrtc.org/11993): Post this to the network thread.
1090 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
1091 keyframe_generation_requested_ = true;
1092 }
1093 }
1094
1095 event.Wait(rtc::Event::kForever);
1096 return old_state;
1097 }
1098
GenerateKeyFrame()1099 void VideoReceiveStream2::GenerateKeyFrame() {
1100 RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
1101 RequestKeyFrame(clock_->CurrentTime());
1102 keyframe_generation_requested_ = true;
1103 }
1104
1105 } // namespace internal
1106 } // namespace webrtc
1107