1 /*
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "call/rtp_video_sender.h"
12
13 #include <algorithm>
14 #include <memory>
15 #include <string>
16 #include <utility>
17
18 #include "absl/algorithm/container.h"
19 #include "absl/strings/match.h"
20 #include "absl/strings/string_view.h"
21 #include "api/array_view.h"
22 #include "api/task_queue/task_queue_factory.h"
23 #include "api/transport/field_trial_based_config.h"
24 #include "api/video_codecs/video_codec.h"
25 #include "call/rtp_transport_controller_send_interface.h"
26 #include "modules/pacing/packet_router.h"
27 #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
28 #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
29 #include "modules/rtp_rtcp/source/rtp_sender.h"
30 #include "modules/utility/maybe_worker_thread.h"
31 #include "modules/video_coding/include/video_codec_interface.h"
32 #include "rtc_base/checks.h"
33 #include "rtc_base/logging.h"
34 #include "rtc_base/task_queue.h"
35 #include "rtc_base/trace_event.h"
36
37 namespace webrtc {
38
39 namespace webrtc_internal_rtp_video_sender {
40
RtpStreamSender(std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp,std::unique_ptr<RTPSenderVideo> sender_video,std::unique_ptr<VideoFecGenerator> fec_generator)41 RtpStreamSender::RtpStreamSender(
42 std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp,
43 std::unique_ptr<RTPSenderVideo> sender_video,
44 std::unique_ptr<VideoFecGenerator> fec_generator)
45 : rtp_rtcp(std::move(rtp_rtcp)),
46 sender_video(std::move(sender_video)),
47 fec_generator(std::move(fec_generator)) {}
48
49 RtpStreamSender::~RtpStreamSender() = default;
50
51 } // namespace webrtc_internal_rtp_video_sender
52
53 namespace {
54 static const int kMinSendSidePacketHistorySize = 600;
55 // We don't do MTU discovery, so assume that we have the standard ethernet MTU.
56 static const size_t kPathMTU = 1500;
57
58 using webrtc_internal_rtp_video_sender::RtpStreamSender;
59
PayloadTypeSupportsSkippingFecPackets(absl::string_view payload_name,const FieldTrialsView & trials)60 bool PayloadTypeSupportsSkippingFecPackets(absl::string_view payload_name,
61 const FieldTrialsView& trials) {
62 const VideoCodecType codecType =
63 PayloadStringToCodecType(std::string(payload_name));
64 if (codecType == kVideoCodecVP8 || codecType == kVideoCodecVP9) {
65 return true;
66 }
67 if (codecType == kVideoCodecGeneric &&
68 absl::StartsWith(trials.Lookup("WebRTC-GenericPictureId"), "Enabled")) {
69 return true;
70 }
71 return false;
72 }
73
ShouldDisableRedAndUlpfec(bool flexfec_enabled,const RtpConfig & rtp_config,const FieldTrialsView & trials)74 bool ShouldDisableRedAndUlpfec(bool flexfec_enabled,
75 const RtpConfig& rtp_config,
76 const FieldTrialsView& trials) {
77 // Consistency of NACK and RED+ULPFEC parameters is checked in this function.
78 const bool nack_enabled = rtp_config.nack.rtp_history_ms > 0;
79
80 // Shorthands.
81 auto IsRedEnabled = [&]() { return rtp_config.ulpfec.red_payload_type >= 0; };
82 auto IsUlpfecEnabled = [&]() {
83 return rtp_config.ulpfec.ulpfec_payload_type >= 0;
84 };
85
86 bool should_disable_red_and_ulpfec = false;
87
88 if (absl::StartsWith(trials.Lookup("WebRTC-DisableUlpFecExperiment"),
89 "Enabled")) {
90 RTC_LOG(LS_INFO) << "Experiment to disable sending ULPFEC is enabled.";
91 should_disable_red_and_ulpfec = true;
92 }
93
94 // If enabled, FlexFEC takes priority over RED+ULPFEC.
95 if (flexfec_enabled) {
96 if (IsUlpfecEnabled()) {
97 RTC_LOG(LS_INFO)
98 << "Both FlexFEC and ULPFEC are configured. Disabling ULPFEC.";
99 }
100 should_disable_red_and_ulpfec = true;
101 }
102
103 // Payload types without picture ID cannot determine that a stream is complete
104 // without retransmitting FEC, so using ULPFEC + NACK for H.264 (for instance)
105 // is a waste of bandwidth since FEC packets still have to be transmitted.
106 // Note that this is not the case with FlexFEC.
107 if (nack_enabled && IsUlpfecEnabled() &&
108 !PayloadTypeSupportsSkippingFecPackets(rtp_config.payload_name, trials)) {
109 RTC_LOG(LS_WARNING)
110 << "Transmitting payload type without picture ID using "
111 "NACK+ULPFEC is a waste of bandwidth since ULPFEC packets "
112 "also have to be retransmitted. Disabling ULPFEC.";
113 should_disable_red_and_ulpfec = true;
114 }
115
116 // Verify payload types.
117 if (IsUlpfecEnabled() ^ IsRedEnabled()) {
118 RTC_LOG(LS_WARNING)
119 << "Only RED or only ULPFEC enabled, but not both. Disabling both.";
120 should_disable_red_and_ulpfec = true;
121 }
122
123 return should_disable_red_and_ulpfec;
124 }
125
126 // TODO(brandtr): Update this function when we support multistream protection.
MaybeCreateFecGenerator(Clock * clock,const RtpConfig & rtp,const std::map<uint32_t,RtpState> & suspended_ssrcs,int simulcast_index,const FieldTrialsView & trials)127 std::unique_ptr<VideoFecGenerator> MaybeCreateFecGenerator(
128 Clock* clock,
129 const RtpConfig& rtp,
130 const std::map<uint32_t, RtpState>& suspended_ssrcs,
131 int simulcast_index,
132 const FieldTrialsView& trials) {
133 // If flexfec is configured that takes priority.
134 if (rtp.flexfec.payload_type >= 0) {
135 RTC_DCHECK_GE(rtp.flexfec.payload_type, 0);
136 RTC_DCHECK_LE(rtp.flexfec.payload_type, 127);
137 if (rtp.flexfec.ssrc == 0) {
138 RTC_LOG(LS_WARNING) << "FlexFEC is enabled, but no FlexFEC SSRC given. "
139 "Therefore disabling FlexFEC.";
140 return nullptr;
141 }
142 if (rtp.flexfec.protected_media_ssrcs.empty()) {
143 RTC_LOG(LS_WARNING)
144 << "FlexFEC is enabled, but no protected media SSRC given. "
145 "Therefore disabling FlexFEC.";
146 return nullptr;
147 }
148
149 if (rtp.flexfec.protected_media_ssrcs.size() > 1) {
150 RTC_LOG(LS_WARNING)
151 << "The supplied FlexfecConfig contained multiple protected "
152 "media streams, but our implementation currently only "
153 "supports protecting a single media stream. "
154 "To avoid confusion, disabling FlexFEC completely.";
155 return nullptr;
156 }
157
158 if (absl::c_find(rtp.flexfec.protected_media_ssrcs,
159 rtp.ssrcs[simulcast_index]) ==
160 rtp.flexfec.protected_media_ssrcs.end()) {
161 // Media SSRC not among flexfec protected SSRCs.
162 return nullptr;
163 }
164
165 const RtpState* rtp_state = nullptr;
166 auto it = suspended_ssrcs.find(rtp.flexfec.ssrc);
167 if (it != suspended_ssrcs.end()) {
168 rtp_state = &it->second;
169 }
170
171 RTC_DCHECK_EQ(1U, rtp.flexfec.protected_media_ssrcs.size());
172 return std::make_unique<FlexfecSender>(
173 rtp.flexfec.payload_type, rtp.flexfec.ssrc,
174 rtp.flexfec.protected_media_ssrcs[0], rtp.mid, rtp.extensions,
175 RTPSender::FecExtensionSizes(), rtp_state, clock);
176 } else if (rtp.ulpfec.red_payload_type >= 0 &&
177 rtp.ulpfec.ulpfec_payload_type >= 0 &&
178 !ShouldDisableRedAndUlpfec(/*flexfec_enabled=*/false, rtp,
179 trials)) {
180 // Flexfec not configured, but ulpfec is and is not disabled.
181 return std::make_unique<UlpfecGenerator>(
182 rtp.ulpfec.red_payload_type, rtp.ulpfec.ulpfec_payload_type, clock);
183 }
184
185 // Not a single FEC is given.
186 return nullptr;
187 }
188
CreateRtpStreamSenders(Clock * clock,const RtpConfig & rtp_config,const RtpSenderObservers & observers,int rtcp_report_interval_ms,Transport * send_transport,RtcpBandwidthObserver * bandwidth_callback,RtpTransportControllerSendInterface * transport,const std::map<uint32_t,RtpState> & suspended_ssrcs,RtcEventLog * event_log,RateLimiter * retransmission_rate_limiter,FrameEncryptorInterface * frame_encryptor,const CryptoOptions & crypto_options,rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,const FieldTrialsView & trials,TaskQueueFactory * task_queue_factory)189 std::vector<RtpStreamSender> CreateRtpStreamSenders(
190 Clock* clock,
191 const RtpConfig& rtp_config,
192 const RtpSenderObservers& observers,
193 int rtcp_report_interval_ms,
194 Transport* send_transport,
195 RtcpBandwidthObserver* bandwidth_callback,
196 RtpTransportControllerSendInterface* transport,
197 const std::map<uint32_t, RtpState>& suspended_ssrcs,
198 RtcEventLog* event_log,
199 RateLimiter* retransmission_rate_limiter,
200 FrameEncryptorInterface* frame_encryptor,
201 const CryptoOptions& crypto_options,
202 rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
203 const FieldTrialsView& trials,
204 TaskQueueFactory* task_queue_factory) {
205 RTC_DCHECK_GT(rtp_config.ssrcs.size(), 0);
206 RTC_DCHECK(task_queue_factory);
207
208 RtpRtcpInterface::Configuration configuration;
209 configuration.clock = clock;
210 configuration.audio = false;
211 configuration.receiver_only = false;
212 configuration.outgoing_transport = send_transport;
213 configuration.intra_frame_callback = observers.intra_frame_callback;
214 configuration.rtcp_loss_notification_observer =
215 observers.rtcp_loss_notification_observer;
216 configuration.bandwidth_callback = bandwidth_callback;
217 configuration.network_state_estimate_observer =
218 transport->network_state_estimate_observer();
219 configuration.transport_feedback_callback =
220 transport->transport_feedback_observer();
221 configuration.rtt_stats = observers.rtcp_rtt_stats;
222 configuration.rtcp_packet_type_counter_observer =
223 observers.rtcp_type_observer;
224 configuration.report_block_data_observer =
225 observers.report_block_data_observer;
226 configuration.paced_sender = transport->packet_sender();
227 configuration.send_bitrate_observer = observers.bitrate_observer;
228 configuration.send_side_delay_observer = observers.send_delay_observer;
229 configuration.send_packet_observer = observers.send_packet_observer;
230 configuration.event_log = event_log;
231 configuration.retransmission_rate_limiter = retransmission_rate_limiter;
232 configuration.rtp_stats_callback = observers.rtp_stats;
233 configuration.frame_encryptor = frame_encryptor;
234 configuration.require_frame_encryption =
235 crypto_options.sframe.require_frame_encryption;
236 configuration.extmap_allow_mixed = rtp_config.extmap_allow_mixed;
237 configuration.rtcp_report_interval_ms = rtcp_report_interval_ms;
238 configuration.field_trials = &trials;
239
240 std::vector<RtpStreamSender> rtp_streams;
241
242 RTC_DCHECK(rtp_config.rtx.ssrcs.empty() ||
243 rtp_config.rtx.ssrcs.size() == rtp_config.ssrcs.size());
244
245 // Some streams could have been disabled, but the rids are still there.
246 // This will occur when simulcast has been disabled for a codec (e.g. VP9)
247 RTC_DCHECK(rtp_config.rids.empty() ||
248 rtp_config.rids.size() >= rtp_config.ssrcs.size());
249
250 for (size_t i = 0; i < rtp_config.ssrcs.size(); ++i) {
251 RTPSenderVideo::Config video_config;
252 configuration.local_media_ssrc = rtp_config.ssrcs[i];
253
254 std::unique_ptr<VideoFecGenerator> fec_generator =
255 MaybeCreateFecGenerator(clock, rtp_config, suspended_ssrcs, i, trials);
256 configuration.fec_generator = fec_generator.get();
257
258 configuration.rtx_send_ssrc =
259 rtp_config.GetRtxSsrcAssociatedWithMediaSsrc(rtp_config.ssrcs[i]);
260 RTC_DCHECK_EQ(configuration.rtx_send_ssrc.has_value(),
261 !rtp_config.rtx.ssrcs.empty());
262
263 configuration.rid = (i < rtp_config.rids.size()) ? rtp_config.rids[i] : "";
264
265 configuration.need_rtp_packet_infos = rtp_config.lntf.enabled;
266
267 std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp(
268 ModuleRtpRtcpImpl2::Create(configuration));
269 rtp_rtcp->SetSendingStatus(false);
270 rtp_rtcp->SetSendingMediaStatus(false);
271 rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
272 // Set NACK.
273 rtp_rtcp->SetStorePacketsStatus(true, kMinSendSidePacketHistorySize);
274
275 video_config.clock = configuration.clock;
276 video_config.rtp_sender = rtp_rtcp->RtpSender();
277 video_config.frame_encryptor = frame_encryptor;
278 video_config.require_frame_encryption =
279 crypto_options.sframe.require_frame_encryption;
280 video_config.enable_retransmit_all_layers = false;
281 video_config.field_trials = &trials;
282
283 const bool using_flexfec =
284 fec_generator &&
285 fec_generator->GetFecType() == VideoFecGenerator::FecType::kFlexFec;
286 const bool should_disable_red_and_ulpfec =
287 ShouldDisableRedAndUlpfec(using_flexfec, rtp_config, trials);
288 if (!should_disable_red_and_ulpfec &&
289 rtp_config.ulpfec.red_payload_type != -1) {
290 video_config.red_payload_type = rtp_config.ulpfec.red_payload_type;
291 }
292 if (fec_generator) {
293 video_config.fec_type = fec_generator->GetFecType();
294 video_config.fec_overhead_bytes = fec_generator->MaxPacketOverhead();
295 }
296 video_config.frame_transformer = frame_transformer;
297 video_config.task_queue_factory = task_queue_factory;
298 auto sender_video = std::make_unique<RTPSenderVideo>(video_config);
299 rtp_streams.emplace_back(std::move(rtp_rtcp), std::move(sender_video),
300 std::move(fec_generator));
301 }
302 return rtp_streams;
303 }
304
GetVideoCodecType(const RtpConfig & config)305 absl::optional<VideoCodecType> GetVideoCodecType(const RtpConfig& config) {
306 if (config.raw_payload) {
307 return absl::nullopt;
308 }
309 return PayloadStringToCodecType(config.payload_name);
310 }
TransportSeqNumExtensionConfigured(const RtpConfig & config)311 bool TransportSeqNumExtensionConfigured(const RtpConfig& config) {
312 return absl::c_any_of(config.extensions, [](const RtpExtension& ext) {
313 return ext.uri == RtpExtension::kTransportSequenceNumberUri;
314 });
315 }
316
317 // Returns true when some coded video sequence can be decoded starting with
318 // this frame without requiring any previous frames.
319 // e.g. it is the same as a key frame when spatial scalability is not used.
320 // When spatial scalability is used, then it is true for layer frames of
321 // a key frame without inter-layer dependencies.
IsFirstFrameOfACodedVideoSequence(const EncodedImage & encoded_image,const CodecSpecificInfo * codec_specific_info)322 bool IsFirstFrameOfACodedVideoSequence(
323 const EncodedImage& encoded_image,
324 const CodecSpecificInfo* codec_specific_info) {
325 if (encoded_image._frameType != VideoFrameType::kVideoFrameKey) {
326 return false;
327 }
328
329 if (codec_specific_info != nullptr) {
330 if (codec_specific_info->generic_frame_info.has_value()) {
331 // This function is used before
332 // `codec_specific_info->generic_frame_info->frame_diffs` are calculated,
333 // so need to use a more complicated way to check for presence of the
334 // dependencies.
335 return absl::c_none_of(
336 codec_specific_info->generic_frame_info->encoder_buffers,
337 [](const CodecBufferUsage& buffer) { return buffer.referenced; });
338 }
339
340 if (codec_specific_info->codecType == VideoCodecType::kVideoCodecVP8 ||
341 codec_specific_info->codecType == VideoCodecType::kVideoCodecH264 ||
342 codec_specific_info->codecType == VideoCodecType::kVideoCodecGeneric) {
343 // These codecs do not support intra picture dependencies, so a frame
344 // marked as a key frame should be a key frame.
345 return true;
346 }
347 }
348
349 // Without depenedencies described in generic format do an educated guess.
350 // It might be wrong for VP9 with spatial layer 0 skipped or higher spatial
351 // layer not depending on the spatial layer 0. This corner case is unimportant
352 // for current usage of this helper function.
353
354 // Use <= to accept both 0 (i.e. the first) and nullopt (i.e. the only).
355 return encoded_image.SpatialIndex() <= 0;
356 }
357
358 } // namespace
359
RtpVideoSender(Clock * clock,const std::map<uint32_t,RtpState> & suspended_ssrcs,const std::map<uint32_t,RtpPayloadState> & states,const RtpConfig & rtp_config,int rtcp_report_interval_ms,Transport * send_transport,const RtpSenderObservers & observers,RtpTransportControllerSendInterface * transport,RtcEventLog * event_log,RateLimiter * retransmission_limiter,std::unique_ptr<FecController> fec_controller,FrameEncryptorInterface * frame_encryptor,const CryptoOptions & crypto_options,rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,const FieldTrialsView & field_trials,TaskQueueFactory * task_queue_factory)360 RtpVideoSender::RtpVideoSender(
361 Clock* clock,
362 const std::map<uint32_t, RtpState>& suspended_ssrcs,
363 const std::map<uint32_t, RtpPayloadState>& states,
364 const RtpConfig& rtp_config,
365 int rtcp_report_interval_ms,
366 Transport* send_transport,
367 const RtpSenderObservers& observers,
368 RtpTransportControllerSendInterface* transport,
369 RtcEventLog* event_log,
370 RateLimiter* retransmission_limiter,
371 std::unique_ptr<FecController> fec_controller,
372 FrameEncryptorInterface* frame_encryptor,
373 const CryptoOptions& crypto_options,
374 rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
375 const FieldTrialsView& field_trials,
376 TaskQueueFactory* task_queue_factory)
377 : field_trials_(field_trials),
378 use_frame_rate_for_overhead_(absl::StartsWith(
379 field_trials_.Lookup("WebRTC-Video-UseFrameRateForOverhead"),
380 "Enabled")),
381 has_packet_feedback_(TransportSeqNumExtensionConfigured(rtp_config)),
382 active_(false),
383 fec_controller_(std::move(fec_controller)),
384 fec_allowed_(true),
385 rtp_streams_(CreateRtpStreamSenders(clock,
386 rtp_config,
387 observers,
388 rtcp_report_interval_ms,
389 send_transport,
390 transport->GetBandwidthObserver(),
391 transport,
392 suspended_ssrcs,
393 event_log,
394 retransmission_limiter,
395 frame_encryptor,
396 crypto_options,
397 std::move(frame_transformer),
398 field_trials_,
399 task_queue_factory)),
400 rtp_config_(rtp_config),
401 codec_type_(GetVideoCodecType(rtp_config)),
402 transport_(transport),
403 transport_overhead_bytes_per_packet_(0),
404 encoder_target_rate_bps_(0),
405 frame_counts_(rtp_config.ssrcs.size()),
406 frame_count_observer_(observers.frame_count_observer) {
407 transport_checker_.Detach();
408 RTC_DCHECK_EQ(rtp_config_.ssrcs.size(), rtp_streams_.size());
409 if (has_packet_feedback_)
410 transport_->IncludeOverheadInPacedSender();
411 // SSRCs are assumed to be sorted in the same order as `rtp_modules`.
412 for (uint32_t ssrc : rtp_config_.ssrcs) {
413 // Restore state if it previously existed.
414 const RtpPayloadState* state = nullptr;
415 auto it = states.find(ssrc);
416 if (it != states.end()) {
417 state = &it->second;
418 shared_frame_id_ = std::max(shared_frame_id_, state->shared_frame_id);
419 }
420 params_.push_back(RtpPayloadParams(ssrc, state, field_trials_));
421 }
422
423 // RTP/RTCP initialization.
424
425 for (size_t i = 0; i < rtp_config_.extensions.size(); ++i) {
426 const std::string& extension = rtp_config_.extensions[i].uri;
427 int id = rtp_config_.extensions[i].id;
428 RTC_DCHECK(RtpExtension::IsSupportedForVideo(extension));
429 for (const RtpStreamSender& stream : rtp_streams_) {
430 stream.rtp_rtcp->RegisterRtpHeaderExtension(extension, id);
431 }
432 }
433
434 ConfigureSsrcs(suspended_ssrcs);
435
436 if (!rtp_config_.mid.empty()) {
437 for (const RtpStreamSender& stream : rtp_streams_) {
438 stream.rtp_rtcp->SetMid(rtp_config_.mid);
439 }
440 }
441
442 bool fec_enabled = false;
443 for (const RtpStreamSender& stream : rtp_streams_) {
444 // Simulcast has one module for each layer. Set the CNAME on all modules.
445 stream.rtp_rtcp->SetCNAME(rtp_config_.c_name.c_str());
446 stream.rtp_rtcp->SetMaxRtpPacketSize(rtp_config_.max_packet_size);
447 stream.rtp_rtcp->RegisterSendPayloadFrequency(rtp_config_.payload_type,
448 kVideoPayloadTypeFrequency);
449 if (stream.fec_generator != nullptr) {
450 fec_enabled = true;
451 }
452 }
453 // Currently, both ULPFEC and FlexFEC use the same FEC rate calculation logic,
454 // so enable that logic if either of those FEC schemes are enabled.
455 fec_controller_->SetProtectionMethod(fec_enabled, NackEnabled());
456
457 fec_controller_->SetProtectionCallback(this);
458
459 // Construction happens on the worker thread (see Call::CreateVideoSendStream)
460 // but subseqeuent calls to the RTP state will happen on one of two threads:
461 // * The pacer thread for actually sending packets.
462 // * The transport thread when tearing down and quering GetRtpState().
463 // Detach thread checkers.
464 for (const RtpStreamSender& stream : rtp_streams_) {
465 stream.rtp_rtcp->OnPacketSendingThreadSwitched();
466 }
467 }
468
~RtpVideoSender()469 RtpVideoSender::~RtpVideoSender() {
470 // TODO(bugs.webrtc.org/13517): Remove once RtpVideoSender gets deleted on the
471 // transport task queue.
472 transport_checker_.Detach();
473
474 SetActiveModulesLocked(
475 std::vector<bool>(rtp_streams_.size(), /*active=*/false));
476
477 RTC_DCHECK(!registered_for_feedback_);
478 }
479
Stop()480 void RtpVideoSender::Stop() {
481 RTC_DCHECK_RUN_ON(&transport_checker_);
482 MutexLock lock(&mutex_);
483 if (!active_)
484 return;
485
486 const std::vector<bool> active_modules(rtp_streams_.size(), false);
487 SetActiveModulesLocked(active_modules);
488 }
489
SetActiveModules(const std::vector<bool> & active_modules)490 void RtpVideoSender::SetActiveModules(const std::vector<bool>& active_modules) {
491 RTC_DCHECK_RUN_ON(&transport_checker_);
492 MutexLock lock(&mutex_);
493 return SetActiveModulesLocked(active_modules);
494 }
495
SetActiveModulesLocked(const std::vector<bool> & active_modules)496 void RtpVideoSender::SetActiveModulesLocked(
497 const std::vector<bool>& active_modules) {
498 RTC_DCHECK_RUN_ON(&transport_checker_);
499 RTC_DCHECK_EQ(rtp_streams_.size(), active_modules.size());
500 active_ = false;
501 for (size_t i = 0; i < active_modules.size(); ++i) {
502 if (active_modules[i]) {
503 active_ = true;
504 }
505
506 RtpRtcpInterface& rtp_module = *rtp_streams_[i].rtp_rtcp;
507 const bool was_active = rtp_module.Sending();
508 const bool should_be_active = active_modules[i];
509
510 // Sends a kRtcpByeCode when going from true to false.
511 rtp_module.SetSendingStatus(active_modules[i]);
512
513 if (was_active && !should_be_active) {
514 // Disabling media, remove from packet router map to reduce size and
515 // prevent any stray packets in the pacer from asynchronously arriving
516 // to a disabled module.
517 transport_->packet_router()->RemoveSendRtpModule(&rtp_module);
518 }
519
520 // If set to false this module won't send media.
521 rtp_module.SetSendingMediaStatus(active_modules[i]);
522
523 if (!was_active && should_be_active) {
524 // Turning on media, register with packet router.
525 transport_->packet_router()->AddSendRtpModule(&rtp_module,
526 /*remb_candidate=*/true);
527 }
528 }
529 if (!active_) {
530 auto* feedback_provider = transport_->GetStreamFeedbackProvider();
531 if (registered_for_feedback_) {
532 feedback_provider->DeRegisterStreamFeedbackObserver(this);
533 registered_for_feedback_ = false;
534 }
535 } else if (!registered_for_feedback_) {
536 auto* feedback_provider = transport_->GetStreamFeedbackProvider();
537 feedback_provider->RegisterStreamFeedbackObserver(rtp_config_.ssrcs, this);
538 registered_for_feedback_ = true;
539 }
540 }
541
IsActive()542 bool RtpVideoSender::IsActive() {
543 RTC_DCHECK_RUN_ON(&transport_checker_);
544 MutexLock lock(&mutex_);
545 return IsActiveLocked();
546 }
547
IsActiveLocked()548 bool RtpVideoSender::IsActiveLocked() {
549 return active_ && !rtp_streams_.empty();
550 }
551
OnEncodedImage(const EncodedImage & encoded_image,const CodecSpecificInfo * codec_specific_info)552 EncodedImageCallback::Result RtpVideoSender::OnEncodedImage(
553 const EncodedImage& encoded_image,
554 const CodecSpecificInfo* codec_specific_info) {
555 fec_controller_->UpdateWithEncodedData(encoded_image.size(),
556 encoded_image._frameType);
557 MutexLock lock(&mutex_);
558 RTC_DCHECK(!rtp_streams_.empty());
559 if (!active_)
560 return Result(Result::ERROR_SEND_FAILED);
561
562 shared_frame_id_++;
563 size_t stream_index = 0;
564 if (codec_specific_info &&
565 (codec_specific_info->codecType == kVideoCodecVP8 ||
566 codec_specific_info->codecType == kVideoCodecH264 ||
567 codec_specific_info->codecType == kVideoCodecGeneric)) {
568 // Map spatial index to simulcast.
569 stream_index = encoded_image.SpatialIndex().value_or(0);
570 }
571 RTC_DCHECK_LT(stream_index, rtp_streams_.size());
572
573 uint32_t rtp_timestamp =
574 encoded_image.Timestamp() +
575 rtp_streams_[stream_index].rtp_rtcp->StartTimestamp();
576
577 // RTCPSender has it's own copy of the timestamp offset, added in
578 // RTCPSender::BuildSR, hence we must not add the in the offset for this call.
579 // TODO(nisse): Delete RTCPSender:timestamp_offset_, and see if we can confine
580 // knowledge of the offset to a single place.
581 if (!rtp_streams_[stream_index].rtp_rtcp->OnSendingRtpFrame(
582 encoded_image.Timestamp(), encoded_image.capture_time_ms_,
583 rtp_config_.payload_type,
584 encoded_image._frameType == VideoFrameType::kVideoFrameKey)) {
585 // The payload router could be active but this module isn't sending.
586 return Result(Result::ERROR_SEND_FAILED);
587 }
588
589 absl::optional<int64_t> expected_retransmission_time_ms;
590 if (encoded_image.RetransmissionAllowed()) {
591 expected_retransmission_time_ms =
592 rtp_streams_[stream_index].rtp_rtcp->ExpectedRetransmissionTimeMs();
593 }
594
595 if (IsFirstFrameOfACodedVideoSequence(encoded_image, codec_specific_info)) {
596 // In order to use the dependency descriptor RTP header extension:
597 // - Pass along any `FrameDependencyStructure` templates produced by the
598 // encoder adapter.
599 // - If none were produced the `RtpPayloadParams::*ToGeneric` for the
600 // particular codec have simulated a dependency structure, so provide a
601 // minimal set of templates.
602 // - Otherwise, don't pass along any templates at all which will disable
603 // the generation of a dependency descriptor.
604 RTPSenderVideo& sender_video = *rtp_streams_[stream_index].sender_video;
605 if (codec_specific_info && codec_specific_info->template_structure) {
606 sender_video.SetVideoStructure(&*codec_specific_info->template_structure);
607 } else if (absl::optional<FrameDependencyStructure> structure =
608 params_[stream_index].GenericStructure(
609 codec_specific_info)) {
610 sender_video.SetVideoStructure(&*structure);
611 } else {
612 sender_video.SetVideoStructure(nullptr);
613 }
614 }
615
616 bool send_result = rtp_streams_[stream_index].sender_video->SendEncodedImage(
617 rtp_config_.payload_type, codec_type_, rtp_timestamp, encoded_image,
618 params_[stream_index].GetRtpVideoHeader(
619 encoded_image, codec_specific_info, shared_frame_id_),
620 expected_retransmission_time_ms);
621 if (frame_count_observer_) {
622 FrameCounts& counts = frame_counts_[stream_index];
623 if (encoded_image._frameType == VideoFrameType::kVideoFrameKey) {
624 ++counts.key_frames;
625 } else if (encoded_image._frameType == VideoFrameType::kVideoFrameDelta) {
626 ++counts.delta_frames;
627 } else {
628 RTC_DCHECK(encoded_image._frameType == VideoFrameType::kEmptyFrame);
629 }
630 frame_count_observer_->FrameCountUpdated(counts,
631 rtp_config_.ssrcs[stream_index]);
632 }
633 if (!send_result)
634 return Result(Result::ERROR_SEND_FAILED);
635
636 return Result(Result::OK, rtp_timestamp);
637 }
638
OnBitrateAllocationUpdated(const VideoBitrateAllocation & bitrate)639 void RtpVideoSender::OnBitrateAllocationUpdated(
640 const VideoBitrateAllocation& bitrate) {
641 RTC_DCHECK_RUN_ON(&transport_checker_);
642 MutexLock lock(&mutex_);
643 if (IsActiveLocked()) {
644 if (rtp_streams_.size() == 1) {
645 // If spatial scalability is enabled, it is covered by a single stream.
646 rtp_streams_[0].rtp_rtcp->SetVideoBitrateAllocation(bitrate);
647 } else {
648 std::vector<absl::optional<VideoBitrateAllocation>> layer_bitrates =
649 bitrate.GetSimulcastAllocations();
650 // Simulcast is in use, split the VideoBitrateAllocation into one struct
651 // per rtp stream, moving over the temporal layer allocation.
652 for (size_t i = 0; i < rtp_streams_.size(); ++i) {
653 // The next spatial layer could be used if the current one is
654 // inactive.
655 if (layer_bitrates[i]) {
656 rtp_streams_[i].rtp_rtcp->SetVideoBitrateAllocation(
657 *layer_bitrates[i]);
658 } else {
659 // Signal a 0 bitrate on a simulcast stream.
660 rtp_streams_[i].rtp_rtcp->SetVideoBitrateAllocation(
661 VideoBitrateAllocation());
662 }
663 }
664 }
665 }
666 }
OnVideoLayersAllocationUpdated(const VideoLayersAllocation & allocation)667 void RtpVideoSender::OnVideoLayersAllocationUpdated(
668 const VideoLayersAllocation& allocation) {
669 MutexLock lock(&mutex_);
670 if (IsActiveLocked()) {
671 for (size_t i = 0; i < rtp_streams_.size(); ++i) {
672 VideoLayersAllocation stream_allocation = allocation;
673 stream_allocation.rtp_stream_index = i;
674 rtp_streams_[i].sender_video->SetVideoLayersAllocation(
675 std::move(stream_allocation));
676 // Only send video frames on the rtp module if the encoder is configured
677 // to send. This is to prevent stray frames to be sent after an encoder
678 // has been reconfigured.
679 rtp_streams_[i].rtp_rtcp->SetSendingMediaStatus(
680 absl::c_any_of(allocation.active_spatial_layers,
681 [&i](const VideoLayersAllocation::SpatialLayer layer) {
682 return layer.rtp_stream_index == static_cast<int>(i);
683 }));
684 }
685 }
686 }
687
NackEnabled() const688 bool RtpVideoSender::NackEnabled() const {
689 const bool nack_enabled = rtp_config_.nack.rtp_history_ms > 0;
690 return nack_enabled;
691 }
692
GetPacketizationOverheadRate() const693 uint32_t RtpVideoSender::GetPacketizationOverheadRate() const {
694 uint32_t packetization_overhead_bps = 0;
695 for (size_t i = 0; i < rtp_streams_.size(); ++i) {
696 if (rtp_streams_[i].rtp_rtcp->SendingMedia()) {
697 packetization_overhead_bps +=
698 rtp_streams_[i].sender_video->PacketizationOverheadBps();
699 }
700 }
701 return packetization_overhead_bps;
702 }
703
DeliverRtcp(const uint8_t * packet,size_t length)704 void RtpVideoSender::DeliverRtcp(const uint8_t* packet, size_t length) {
705 // Runs on a network thread.
706 for (const RtpStreamSender& stream : rtp_streams_)
707 stream.rtp_rtcp->IncomingRtcpPacket(packet, length);
708 }
709
ConfigureSsrcs(const std::map<uint32_t,RtpState> & suspended_ssrcs)710 void RtpVideoSender::ConfigureSsrcs(
711 const std::map<uint32_t, RtpState>& suspended_ssrcs) {
712 // Configure regular SSRCs.
713 RTC_CHECK(ssrc_to_rtp_module_.empty());
714 for (size_t i = 0; i < rtp_config_.ssrcs.size(); ++i) {
715 uint32_t ssrc = rtp_config_.ssrcs[i];
716 RtpRtcpInterface* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get();
717
718 // Restore RTP state if previous existed.
719 auto it = suspended_ssrcs.find(ssrc);
720 if (it != suspended_ssrcs.end())
721 rtp_rtcp->SetRtpState(it->second);
722
723 ssrc_to_rtp_module_[ssrc] = rtp_rtcp;
724 }
725
726 // Set up RTX if available.
727 if (rtp_config_.rtx.ssrcs.empty())
728 return;
729
730 RTC_DCHECK_EQ(rtp_config_.rtx.ssrcs.size(), rtp_config_.ssrcs.size());
731 for (size_t i = 0; i < rtp_config_.rtx.ssrcs.size(); ++i) {
732 uint32_t ssrc = rtp_config_.rtx.ssrcs[i];
733 RtpRtcpInterface* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get();
734 auto it = suspended_ssrcs.find(ssrc);
735 if (it != suspended_ssrcs.end())
736 rtp_rtcp->SetRtxState(it->second);
737 }
738
739 // Configure RTX payload types.
740 RTC_DCHECK_GE(rtp_config_.rtx.payload_type, 0);
741 for (const RtpStreamSender& stream : rtp_streams_) {
742 stream.rtp_rtcp->SetRtxSendPayloadType(rtp_config_.rtx.payload_type,
743 rtp_config_.payload_type);
744 stream.rtp_rtcp->SetRtxSendStatus(kRtxRetransmitted |
745 kRtxRedundantPayloads);
746 }
747 if (rtp_config_.ulpfec.red_payload_type != -1 &&
748 rtp_config_.ulpfec.red_rtx_payload_type != -1) {
749 for (const RtpStreamSender& stream : rtp_streams_) {
750 stream.rtp_rtcp->SetRtxSendPayloadType(
751 rtp_config_.ulpfec.red_rtx_payload_type,
752 rtp_config_.ulpfec.red_payload_type);
753 }
754 }
755 }
756
OnNetworkAvailability(bool network_available)757 void RtpVideoSender::OnNetworkAvailability(bool network_available) {
758 for (const RtpStreamSender& stream : rtp_streams_) {
759 stream.rtp_rtcp->SetRTCPStatus(network_available ? rtp_config_.rtcp_mode
760 : RtcpMode::kOff);
761 }
762 }
763
GetRtpStates() const764 std::map<uint32_t, RtpState> RtpVideoSender::GetRtpStates() const {
765 std::map<uint32_t, RtpState> rtp_states;
766
767 for (size_t i = 0; i < rtp_config_.ssrcs.size(); ++i) {
768 uint32_t ssrc = rtp_config_.ssrcs[i];
769 RTC_DCHECK_EQ(ssrc, rtp_streams_[i].rtp_rtcp->SSRC());
770 rtp_states[ssrc] = rtp_streams_[i].rtp_rtcp->GetRtpState();
771
772 // Only happens during shutdown, when RTP module is already inactive,
773 // so OK to call fec generator here.
774 if (rtp_streams_[i].fec_generator) {
775 absl::optional<RtpState> fec_state =
776 rtp_streams_[i].fec_generator->GetRtpState();
777 if (fec_state) {
778 uint32_t ssrc = rtp_config_.flexfec.ssrc;
779 rtp_states[ssrc] = *fec_state;
780 }
781 }
782 }
783
784 for (size_t i = 0; i < rtp_config_.rtx.ssrcs.size(); ++i) {
785 uint32_t ssrc = rtp_config_.rtx.ssrcs[i];
786 rtp_states[ssrc] = rtp_streams_[i].rtp_rtcp->GetRtxState();
787 }
788
789 return rtp_states;
790 }
791
GetRtpPayloadStates() const792 std::map<uint32_t, RtpPayloadState> RtpVideoSender::GetRtpPayloadStates()
793 const {
794 MutexLock lock(&mutex_);
795 std::map<uint32_t, RtpPayloadState> payload_states;
796 for (const auto& param : params_) {
797 payload_states[param.ssrc()] = param.state();
798 payload_states[param.ssrc()].shared_frame_id = shared_frame_id_;
799 }
800 return payload_states;
801 }
802
OnTransportOverheadChanged(size_t transport_overhead_bytes_per_packet)803 void RtpVideoSender::OnTransportOverheadChanged(
804 size_t transport_overhead_bytes_per_packet) {
805 MutexLock lock(&mutex_);
806 transport_overhead_bytes_per_packet_ = transport_overhead_bytes_per_packet;
807
808 size_t max_rtp_packet_size =
809 std::min(rtp_config_.max_packet_size,
810 kPathMTU - transport_overhead_bytes_per_packet_);
811 for (const RtpStreamSender& stream : rtp_streams_) {
812 stream.rtp_rtcp->SetMaxRtpPacketSize(max_rtp_packet_size);
813 }
814 }
815
OnBitrateUpdated(BitrateAllocationUpdate update,int framerate)816 void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update,
817 int framerate) {
818 // Substract overhead from bitrate.
819 MutexLock lock(&mutex_);
820 size_t num_active_streams = 0;
821 size_t overhead_bytes_per_packet = 0;
822 for (const auto& stream : rtp_streams_) {
823 if (stream.rtp_rtcp->SendingMedia()) {
824 overhead_bytes_per_packet += stream.rtp_rtcp->ExpectedPerPacketOverhead();
825 ++num_active_streams;
826 }
827 }
828 if (num_active_streams > 1) {
829 overhead_bytes_per_packet /= num_active_streams;
830 }
831
832 DataSize packet_overhead = DataSize::Bytes(
833 overhead_bytes_per_packet + transport_overhead_bytes_per_packet_);
834 DataSize max_total_packet_size = DataSize::Bytes(
835 rtp_config_.max_packet_size + transport_overhead_bytes_per_packet_);
836 uint32_t payload_bitrate_bps = update.target_bitrate.bps();
837 if (has_packet_feedback_) {
838 DataRate overhead_rate =
839 CalculateOverheadRate(update.target_bitrate, max_total_packet_size,
840 packet_overhead, Frequency::Hertz(framerate));
841 // TODO(srte): We probably should not accept 0 payload bitrate here.
842 payload_bitrate_bps = rtc::saturated_cast<uint32_t>(payload_bitrate_bps -
843 overhead_rate.bps());
844 }
845
846 // Get the encoder target rate. It is the estimated network rate -
847 // protection overhead.
848 // TODO(srte): We should multiply with 255 here.
849 encoder_target_rate_bps_ = fec_controller_->UpdateFecRates(
850 payload_bitrate_bps, framerate,
851 rtc::saturated_cast<uint8_t>(update.packet_loss_ratio * 256),
852 loss_mask_vector_, update.round_trip_time.ms());
853 if (!fec_allowed_) {
854 encoder_target_rate_bps_ = payload_bitrate_bps;
855 // fec_controller_->UpdateFecRates() was still called so as to allow
856 // `fec_controller_` to update whatever internal state it might have,
857 // since `fec_allowed_` may be toggled back on at any moment.
858 }
859
860 // Subtract packetization overhead from the encoder target. If target rate
861 // is really low, cap the overhead at 50%. This also avoids the case where
862 // `encoder_target_rate_bps_` is 0 due to encoder pause event while the
863 // packetization rate is positive since packets are still flowing.
864 uint32_t packetization_rate_bps =
865 std::min(GetPacketizationOverheadRate(), encoder_target_rate_bps_ / 2);
866 encoder_target_rate_bps_ -= packetization_rate_bps;
867
868 loss_mask_vector_.clear();
869
870 uint32_t encoder_overhead_rate_bps = 0;
871 if (has_packet_feedback_) {
872 // TODO(srte): The packet size should probably be the same as in the
873 // CalculateOverheadRate call above (just max_total_packet_size), it doesn't
874 // make sense to use different packet rates for different overhead
875 // calculations.
876 DataRate encoder_overhead_rate = CalculateOverheadRate(
877 DataRate::BitsPerSec(encoder_target_rate_bps_),
878 max_total_packet_size - DataSize::Bytes(overhead_bytes_per_packet),
879 packet_overhead, Frequency::Hertz(framerate));
880 encoder_overhead_rate_bps = std::min(
881 encoder_overhead_rate.bps<uint32_t>(),
882 update.target_bitrate.bps<uint32_t>() - encoder_target_rate_bps_);
883 }
884 const uint32_t media_rate = encoder_target_rate_bps_ +
885 encoder_overhead_rate_bps +
886 packetization_rate_bps;
887 RTC_DCHECK_GE(update.target_bitrate, DataRate::BitsPerSec(media_rate));
888 // `protection_bitrate_bps_` includes overhead.
889 protection_bitrate_bps_ = update.target_bitrate.bps() - media_rate;
890 }
891
GetPayloadBitrateBps() const892 uint32_t RtpVideoSender::GetPayloadBitrateBps() const {
893 return encoder_target_rate_bps_;
894 }
895
GetProtectionBitrateBps() const896 uint32_t RtpVideoSender::GetProtectionBitrateBps() const {
897 return protection_bitrate_bps_;
898 }
899
GetSentRtpPacketInfos(uint32_t ssrc,rtc::ArrayView<const uint16_t> sequence_numbers) const900 std::vector<RtpSequenceNumberMap::Info> RtpVideoSender::GetSentRtpPacketInfos(
901 uint32_t ssrc,
902 rtc::ArrayView<const uint16_t> sequence_numbers) const {
903 for (const auto& rtp_stream : rtp_streams_) {
904 if (ssrc == rtp_stream.rtp_rtcp->SSRC()) {
905 return rtp_stream.rtp_rtcp->GetSentRtpPacketInfos(sequence_numbers);
906 }
907 }
908 return std::vector<RtpSequenceNumberMap::Info>();
909 }
910
ProtectionRequest(const FecProtectionParams * delta_params,const FecProtectionParams * key_params,uint32_t * sent_video_rate_bps,uint32_t * sent_nack_rate_bps,uint32_t * sent_fec_rate_bps)911 int RtpVideoSender::ProtectionRequest(const FecProtectionParams* delta_params,
912 const FecProtectionParams* key_params,
913 uint32_t* sent_video_rate_bps,
914 uint32_t* sent_nack_rate_bps,
915 uint32_t* sent_fec_rate_bps) {
916 *sent_video_rate_bps = 0;
917 *sent_nack_rate_bps = 0;
918 *sent_fec_rate_bps = 0;
919 for (const RtpStreamSender& stream : rtp_streams_) {
920 stream.rtp_rtcp->SetFecProtectionParams(*delta_params, *key_params);
921
922 auto send_bitrate = stream.rtp_rtcp->GetSendRates();
923 *sent_video_rate_bps += send_bitrate[RtpPacketMediaType::kVideo].bps();
924 *sent_fec_rate_bps +=
925 send_bitrate[RtpPacketMediaType::kForwardErrorCorrection].bps();
926 *sent_nack_rate_bps +=
927 send_bitrate[RtpPacketMediaType::kRetransmission].bps();
928 }
929 return 0;
930 }
931
SetFecAllowed(bool fec_allowed)932 void RtpVideoSender::SetFecAllowed(bool fec_allowed) {
933 MutexLock lock(&mutex_);
934 fec_allowed_ = fec_allowed;
935 }
936
OnPacketFeedbackVector(std::vector<StreamPacketInfo> packet_feedback_vector)937 void RtpVideoSender::OnPacketFeedbackVector(
938 std::vector<StreamPacketInfo> packet_feedback_vector) {
939 if (fec_controller_->UseLossVectorMask()) {
940 MutexLock lock(&mutex_);
941 for (const StreamPacketInfo& packet : packet_feedback_vector) {
942 loss_mask_vector_.push_back(!packet.received);
943 }
944 }
945
946 // Map from SSRC to all acked packets for that RTP module.
947 std::map<uint32_t, std::vector<uint16_t>> acked_packets_per_ssrc;
948 for (const StreamPacketInfo& packet : packet_feedback_vector) {
949 if (packet.received && packet.ssrc) {
950 acked_packets_per_ssrc[*packet.ssrc].push_back(
951 packet.rtp_sequence_number);
952 }
953 }
954
955 // Map from SSRC to vector of RTP sequence numbers that are indicated as
956 // lost by feedback, without being trailed by any received packets.
957 std::map<uint32_t, std::vector<uint16_t>> early_loss_detected_per_ssrc;
958
959 for (const StreamPacketInfo& packet : packet_feedback_vector) {
960 // Only include new media packets, not retransmissions/padding/fec.
961 if (!packet.received && packet.ssrc && !packet.is_retransmission) {
962 // Last known lost packet, might not be detectable as lost by remote
963 // jitter buffer.
964 early_loss_detected_per_ssrc[*packet.ssrc].push_back(
965 packet.rtp_sequence_number);
966 } else {
967 // Packet received, so any loss prior to this is already detectable.
968 early_loss_detected_per_ssrc.erase(*packet.ssrc);
969 }
970 }
971
972 for (const auto& kv : early_loss_detected_per_ssrc) {
973 const uint32_t ssrc = kv.first;
974 auto it = ssrc_to_rtp_module_.find(ssrc);
975 RTC_CHECK(it != ssrc_to_rtp_module_.end());
976 RTPSender* rtp_sender = it->second->RtpSender();
977 for (uint16_t sequence_number : kv.second) {
978 rtp_sender->ReSendPacket(sequence_number);
979 }
980 }
981
982 for (const auto& kv : acked_packets_per_ssrc) {
983 const uint32_t ssrc = kv.first;
984 auto it = ssrc_to_rtp_module_.find(ssrc);
985 if (it == ssrc_to_rtp_module_.end()) {
986 // No media, likely FEC or padding. Ignore since there's no RTP history to
987 // clean up anyway.
988 continue;
989 }
990 rtc::ArrayView<const uint16_t> rtp_sequence_numbers(kv.second);
991 it->second->OnPacketsAcknowledged(rtp_sequence_numbers);
992 }
993 }
994
SetEncodingData(size_t width,size_t height,size_t num_temporal_layers)995 void RtpVideoSender::SetEncodingData(size_t width,
996 size_t height,
997 size_t num_temporal_layers) {
998 fec_controller_->SetEncodingData(width, height, num_temporal_layers,
999 rtp_config_.max_packet_size);
1000 }
1001
CalculateOverheadRate(DataRate data_rate,DataSize packet_size,DataSize overhead_per_packet,Frequency framerate) const1002 DataRate RtpVideoSender::CalculateOverheadRate(DataRate data_rate,
1003 DataSize packet_size,
1004 DataSize overhead_per_packet,
1005 Frequency framerate) const {
1006 Frequency packet_rate = data_rate / packet_size;
1007 if (use_frame_rate_for_overhead_) {
1008 framerate = std::max(framerate, Frequency::Hertz(1));
1009 DataSize frame_size = data_rate / framerate;
1010 int packets_per_frame = ceil(frame_size / packet_size);
1011 packet_rate = packets_per_frame * framerate;
1012 }
1013 return packet_rate.RoundUpTo(Frequency::Hertz(1)) * overhead_per_packet;
1014 }
1015
1016 } // namespace webrtc
1017