• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 #include "webrtc/modules/video_coding/jitter_buffer.h"
11 
12 #include <assert.h>
13 
14 #include <algorithm>
15 #include <utility>
16 
17 #include "webrtc/base/checks.h"
18 #include "webrtc/base/logging.h"
19 #include "webrtc/base/trace_event.h"
20 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
21 #include "webrtc/modules/video_coding/include/video_coding.h"
22 #include "webrtc/modules/video_coding/frame_buffer.h"
23 #include "webrtc/modules/video_coding/inter_frame_delay.h"
24 #include "webrtc/modules/video_coding/internal_defines.h"
25 #include "webrtc/modules/video_coding/jitter_buffer_common.h"
26 #include "webrtc/modules/video_coding/jitter_estimator.h"
27 #include "webrtc/modules/video_coding/packet.h"
28 #include "webrtc/system_wrappers/include/clock.h"
29 #include "webrtc/system_wrappers/include/critical_section_wrapper.h"
30 #include "webrtc/system_wrappers/include/event_wrapper.h"
31 #include "webrtc/system_wrappers/include/metrics.h"
32 
33 namespace webrtc {
34 
35 // Interval for updating SS data.
36 static const uint32_t kSsCleanupIntervalSec = 60;
37 
38 // Use this rtt if no value has been reported.
39 static const int64_t kDefaultRtt = 200;
40 
41 // Request a keyframe if no continuous frame has been received for this
42 // number of milliseconds and NACKs are disabled.
43 static const int64_t kMaxDiscontinuousFramesTime = 1000;
44 
45 typedef std::pair<uint32_t, VCMFrameBuffer*> FrameListPair;
46 
IsKeyFrame(FrameListPair pair)47 bool IsKeyFrame(FrameListPair pair) {
48   return pair.second->FrameType() == kVideoFrameKey;
49 }
50 
HasNonEmptyState(FrameListPair pair)51 bool HasNonEmptyState(FrameListPair pair) {
52   return pair.second->GetState() != kStateEmpty;
53 }
54 
InsertFrame(VCMFrameBuffer * frame)55 void FrameList::InsertFrame(VCMFrameBuffer* frame) {
56   insert(rbegin().base(), FrameListPair(frame->TimeStamp(), frame));
57 }
58 
PopFrame(uint32_t timestamp)59 VCMFrameBuffer* FrameList::PopFrame(uint32_t timestamp) {
60   FrameList::iterator it = find(timestamp);
61   if (it == end())
62     return NULL;
63   VCMFrameBuffer* frame = it->second;
64   erase(it);
65   return frame;
66 }
67 
Front() const68 VCMFrameBuffer* FrameList::Front() const {
69   return begin()->second;
70 }
71 
Back() const72 VCMFrameBuffer* FrameList::Back() const {
73   return rbegin()->second;
74 }
75 
RecycleFramesUntilKeyFrame(FrameList::iterator * key_frame_it,UnorderedFrameList * free_frames)76 int FrameList::RecycleFramesUntilKeyFrame(FrameList::iterator* key_frame_it,
77                                           UnorderedFrameList* free_frames) {
78   int drop_count = 0;
79   FrameList::iterator it = begin();
80   while (!empty()) {
81     // Throw at least one frame.
82     it->second->Reset();
83     free_frames->push_back(it->second);
84     erase(it++);
85     ++drop_count;
86     if (it != end() && it->second->FrameType() == kVideoFrameKey) {
87       *key_frame_it = it;
88       return drop_count;
89     }
90   }
91   *key_frame_it = end();
92   return drop_count;
93 }
94 
CleanUpOldOrEmptyFrames(VCMDecodingState * decoding_state,UnorderedFrameList * free_frames)95 void FrameList::CleanUpOldOrEmptyFrames(VCMDecodingState* decoding_state,
96                                         UnorderedFrameList* free_frames) {
97   while (!empty()) {
98     VCMFrameBuffer* oldest_frame = Front();
99     bool remove_frame = false;
100     if (oldest_frame->GetState() == kStateEmpty && size() > 1) {
101       // This frame is empty, try to update the last decoded state and drop it
102       // if successful.
103       remove_frame = decoding_state->UpdateEmptyFrame(oldest_frame);
104     } else {
105       remove_frame = decoding_state->IsOldFrame(oldest_frame);
106     }
107     if (!remove_frame) {
108       break;
109     }
110     free_frames->push_back(oldest_frame);
111     TRACE_EVENT_INSTANT1("webrtc", "JB::OldOrEmptyFrameDropped", "timestamp",
112                          oldest_frame->TimeStamp());
113     erase(begin());
114   }
115 }
116 
Reset(UnorderedFrameList * free_frames)117 void FrameList::Reset(UnorderedFrameList* free_frames) {
118   while (!empty()) {
119     begin()->second->Reset();
120     free_frames->push_back(begin()->second);
121     erase(begin());
122   }
123 }
124 
Insert(const VCMPacket & packet)125 bool Vp9SsMap::Insert(const VCMPacket& packet) {
126   if (!packet.codecSpecificHeader.codecHeader.VP9.ss_data_available)
127     return false;
128 
129   ss_map_[packet.timestamp] = packet.codecSpecificHeader.codecHeader.VP9.gof;
130   return true;
131 }
132 
Reset()133 void Vp9SsMap::Reset() {
134   ss_map_.clear();
135 }
136 
Find(uint32_t timestamp,SsMap::iterator * it_out)137 bool Vp9SsMap::Find(uint32_t timestamp, SsMap::iterator* it_out) {
138   bool found = false;
139   for (SsMap::iterator it = ss_map_.begin(); it != ss_map_.end(); ++it) {
140     if (it->first == timestamp || IsNewerTimestamp(timestamp, it->first)) {
141       *it_out = it;
142       found = true;
143     }
144   }
145   return found;
146 }
147 
RemoveOld(uint32_t timestamp)148 void Vp9SsMap::RemoveOld(uint32_t timestamp) {
149   if (!TimeForCleanup(timestamp))
150     return;
151 
152   SsMap::iterator it;
153   if (!Find(timestamp, &it))
154     return;
155 
156   ss_map_.erase(ss_map_.begin(), it);
157   AdvanceFront(timestamp);
158 }
159 
TimeForCleanup(uint32_t timestamp) const160 bool Vp9SsMap::TimeForCleanup(uint32_t timestamp) const {
161   if (ss_map_.empty() || !IsNewerTimestamp(timestamp, ss_map_.begin()->first))
162     return false;
163 
164   uint32_t diff = timestamp - ss_map_.begin()->first;
165   return diff / kVideoPayloadTypeFrequency >= kSsCleanupIntervalSec;
166 }
167 
AdvanceFront(uint32_t timestamp)168 void Vp9SsMap::AdvanceFront(uint32_t timestamp) {
169   RTC_DCHECK(!ss_map_.empty());
170   GofInfoVP9 gof = ss_map_.begin()->second;
171   ss_map_.erase(ss_map_.begin());
172   ss_map_[timestamp] = gof;
173 }
174 
175 // TODO(asapersson): Update according to updates in RTP payload profile.
UpdatePacket(VCMPacket * packet)176 bool Vp9SsMap::UpdatePacket(VCMPacket* packet) {
177   uint8_t gof_idx = packet->codecSpecificHeader.codecHeader.VP9.gof_idx;
178   if (gof_idx == kNoGofIdx)
179     return false;  // No update needed.
180 
181   SsMap::iterator it;
182   if (!Find(packet->timestamp, &it))
183     return false;  // Corresponding SS not yet received.
184 
185   if (gof_idx >= it->second.num_frames_in_gof)
186     return false;  // Assume corresponding SS not yet received.
187 
188   RTPVideoHeaderVP9* vp9 = &packet->codecSpecificHeader.codecHeader.VP9;
189   vp9->temporal_idx = it->second.temporal_idx[gof_idx];
190   vp9->temporal_up_switch = it->second.temporal_up_switch[gof_idx];
191 
192   // TODO(asapersson): Set vp9.ref_picture_id[i] and add usage.
193   vp9->num_ref_pics = it->second.num_ref_pics[gof_idx];
194   for (uint8_t i = 0; i < it->second.num_ref_pics[gof_idx]; ++i) {
195     vp9->pid_diff[i] = it->second.pid_diff[gof_idx][i];
196   }
197   return true;
198 }
199 
UpdateFrames(FrameList * frames)200 void Vp9SsMap::UpdateFrames(FrameList* frames) {
201   for (const auto& frame_it : *frames) {
202     uint8_t gof_idx =
203         frame_it.second->CodecSpecific()->codecSpecific.VP9.gof_idx;
204     if (gof_idx == kNoGofIdx) {
205       continue;
206     }
207     SsMap::iterator ss_it;
208     if (Find(frame_it.second->TimeStamp(), &ss_it)) {
209       if (gof_idx >= ss_it->second.num_frames_in_gof) {
210         continue;  // Assume corresponding SS not yet received.
211       }
212       frame_it.second->SetGofInfo(ss_it->second, gof_idx);
213     }
214   }
215 }
216 
VCMJitterBuffer(Clock * clock,rtc::scoped_ptr<EventWrapper> event)217 VCMJitterBuffer::VCMJitterBuffer(Clock* clock,
218                                  rtc::scoped_ptr<EventWrapper> event)
219     : clock_(clock),
220       running_(false),
221       crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
222       frame_event_(std::move(event)),
223       max_number_of_frames_(kStartNumberOfFrames),
224       free_frames_(),
225       decodable_frames_(),
226       incomplete_frames_(),
227       last_decoded_state_(),
228       first_packet_since_reset_(true),
229       stats_callback_(NULL),
230       incoming_frame_rate_(0),
231       incoming_frame_count_(0),
232       time_last_incoming_frame_count_(0),
233       incoming_bit_count_(0),
234       incoming_bit_rate_(0),
235       num_consecutive_old_packets_(0),
236       num_packets_(0),
237       num_duplicated_packets_(0),
238       num_discarded_packets_(0),
239       time_first_packet_ms_(0),
240       jitter_estimate_(clock),
241       inter_frame_delay_(clock_->TimeInMilliseconds()),
242       rtt_ms_(kDefaultRtt),
243       nack_mode_(kNoNack),
244       low_rtt_nack_threshold_ms_(-1),
245       high_rtt_nack_threshold_ms_(-1),
246       missing_sequence_numbers_(SequenceNumberLessThan()),
247       max_nack_list_size_(0),
248       max_packet_age_to_nack_(0),
249       max_incomplete_time_ms_(0),
250       decode_error_mode_(kNoErrors),
251       average_packets_per_frame_(0.0f),
252       frame_counter_(0) {
253   for (int i = 0; i < kStartNumberOfFrames; i++)
254     free_frames_.push_back(new VCMFrameBuffer());
255 }
256 
~VCMJitterBuffer()257 VCMJitterBuffer::~VCMJitterBuffer() {
258   Stop();
259   for (UnorderedFrameList::iterator it = free_frames_.begin();
260        it != free_frames_.end(); ++it) {
261     delete *it;
262   }
263   for (FrameList::iterator it = incomplete_frames_.begin();
264        it != incomplete_frames_.end(); ++it) {
265     delete it->second;
266   }
267   for (FrameList::iterator it = decodable_frames_.begin();
268        it != decodable_frames_.end(); ++it) {
269     delete it->second;
270   }
271   delete crit_sect_;
272 }
273 
UpdateHistograms()274 void VCMJitterBuffer::UpdateHistograms() {
275   if (num_packets_ <= 0 || !running_) {
276     return;
277   }
278   int64_t elapsed_sec =
279       (clock_->TimeInMilliseconds() - time_first_packet_ms_) / 1000;
280   if (elapsed_sec < metrics::kMinRunTimeInSeconds) {
281     return;
282   }
283 
284   RTC_HISTOGRAM_PERCENTAGE_SPARSE("WebRTC.Video.DiscardedPacketsInPercent",
285                                   num_discarded_packets_ * 100 / num_packets_);
286   RTC_HISTOGRAM_PERCENTAGE_SPARSE("WebRTC.Video.DuplicatedPacketsInPercent",
287                                   num_duplicated_packets_ * 100 / num_packets_);
288 
289   int total_frames =
290       receive_statistics_.key_frames + receive_statistics_.delta_frames;
291   if (total_frames > 0) {
292     RTC_HISTOGRAM_COUNTS_SPARSE_100(
293         "WebRTC.Video.CompleteFramesReceivedPerSecond",
294         static_cast<int>((total_frames / elapsed_sec) + 0.5f));
295     RTC_HISTOGRAM_COUNTS_SPARSE_1000(
296         "WebRTC.Video.KeyFramesReceivedInPermille",
297         static_cast<int>(
298             (receive_statistics_.key_frames * 1000.0f / total_frames) + 0.5f));
299   }
300 }
301 
Start()302 void VCMJitterBuffer::Start() {
303   CriticalSectionScoped cs(crit_sect_);
304   running_ = true;
305   incoming_frame_count_ = 0;
306   incoming_frame_rate_ = 0;
307   incoming_bit_count_ = 0;
308   incoming_bit_rate_ = 0;
309   time_last_incoming_frame_count_ = clock_->TimeInMilliseconds();
310   receive_statistics_ = FrameCounts();
311 
312   num_consecutive_old_packets_ = 0;
313   num_packets_ = 0;
314   num_duplicated_packets_ = 0;
315   num_discarded_packets_ = 0;
316   time_first_packet_ms_ = 0;
317 
318   // Start in a non-signaled state.
319   waiting_for_completion_.frame_size = 0;
320   waiting_for_completion_.timestamp = 0;
321   waiting_for_completion_.latest_packet_time = -1;
322   first_packet_since_reset_ = true;
323   rtt_ms_ = kDefaultRtt;
324   last_decoded_state_.Reset();
325 }
326 
Stop()327 void VCMJitterBuffer::Stop() {
328   crit_sect_->Enter();
329   UpdateHistograms();
330   running_ = false;
331   last_decoded_state_.Reset();
332 
333   // Make sure all frames are free and reset.
334   for (FrameList::iterator it = decodable_frames_.begin();
335        it != decodable_frames_.end(); ++it) {
336     free_frames_.push_back(it->second);
337   }
338   for (FrameList::iterator it = incomplete_frames_.begin();
339        it != incomplete_frames_.end(); ++it) {
340     free_frames_.push_back(it->second);
341   }
342   for (UnorderedFrameList::iterator it = free_frames_.begin();
343        it != free_frames_.end(); ++it) {
344     (*it)->Reset();
345   }
346   decodable_frames_.clear();
347   incomplete_frames_.clear();
348   crit_sect_->Leave();
349   // Make sure we wake up any threads waiting on these events.
350   frame_event_->Set();
351 }
352 
Running() const353 bool VCMJitterBuffer::Running() const {
354   CriticalSectionScoped cs(crit_sect_);
355   return running_;
356 }
357 
Flush()358 void VCMJitterBuffer::Flush() {
359   CriticalSectionScoped cs(crit_sect_);
360   decodable_frames_.Reset(&free_frames_);
361   incomplete_frames_.Reset(&free_frames_);
362   last_decoded_state_.Reset();  // TODO(mikhal): sync reset.
363   num_consecutive_old_packets_ = 0;
364   // Also reset the jitter and delay estimates
365   jitter_estimate_.Reset();
366   inter_frame_delay_.Reset(clock_->TimeInMilliseconds());
367   waiting_for_completion_.frame_size = 0;
368   waiting_for_completion_.timestamp = 0;
369   waiting_for_completion_.latest_packet_time = -1;
370   first_packet_since_reset_ = true;
371   missing_sequence_numbers_.clear();
372 }
373 
374 // Get received key and delta frames
FrameStatistics() const375 FrameCounts VCMJitterBuffer::FrameStatistics() const {
376   CriticalSectionScoped cs(crit_sect_);
377   return receive_statistics_;
378 }
379 
num_packets() const380 int VCMJitterBuffer::num_packets() const {
381   CriticalSectionScoped cs(crit_sect_);
382   return num_packets_;
383 }
384 
num_duplicated_packets() const385 int VCMJitterBuffer::num_duplicated_packets() const {
386   CriticalSectionScoped cs(crit_sect_);
387   return num_duplicated_packets_;
388 }
389 
num_discarded_packets() const390 int VCMJitterBuffer::num_discarded_packets() const {
391   CriticalSectionScoped cs(crit_sect_);
392   return num_discarded_packets_;
393 }
394 
395 // Calculate framerate and bitrate.
IncomingRateStatistics(unsigned int * framerate,unsigned int * bitrate)396 void VCMJitterBuffer::IncomingRateStatistics(unsigned int* framerate,
397                                              unsigned int* bitrate) {
398   assert(framerate);
399   assert(bitrate);
400   CriticalSectionScoped cs(crit_sect_);
401   const int64_t now = clock_->TimeInMilliseconds();
402   int64_t diff = now - time_last_incoming_frame_count_;
403   if (diff < 1000 && incoming_frame_rate_ > 0 && incoming_bit_rate_ > 0) {
404     // Make sure we report something even though less than
405     // 1 second has passed since last update.
406     *framerate = incoming_frame_rate_;
407     *bitrate = incoming_bit_rate_;
408   } else if (incoming_frame_count_ != 0) {
409     // We have received frame(s) since last call to this function
410 
411     // Prepare calculations
412     if (diff <= 0) {
413       diff = 1;
414     }
415     // we add 0.5f for rounding
416     float rate = 0.5f + ((incoming_frame_count_ * 1000.0f) / diff);
417     if (rate < 1.0f) {
418       rate = 1.0f;
419     }
420 
421     // Calculate frame rate
422     // Let r be rate.
423     // r(0) = 1000*framecount/delta_time.
424     // (I.e. frames per second since last calculation.)
425     // frame_rate = r(0)/2 + r(-1)/2
426     // (I.e. fr/s average this and the previous calculation.)
427     *framerate = (incoming_frame_rate_ + static_cast<unsigned int>(rate)) / 2;
428     incoming_frame_rate_ = static_cast<unsigned int>(rate);
429 
430     // Calculate bit rate
431     if (incoming_bit_count_ == 0) {
432       *bitrate = 0;
433     } else {
434       *bitrate =
435           10 * ((100 * incoming_bit_count_) / static_cast<unsigned int>(diff));
436     }
437     incoming_bit_rate_ = *bitrate;
438 
439     // Reset count
440     incoming_frame_count_ = 0;
441     incoming_bit_count_ = 0;
442     time_last_incoming_frame_count_ = now;
443 
444   } else {
445     // No frames since last call
446     time_last_incoming_frame_count_ = clock_->TimeInMilliseconds();
447     *framerate = 0;
448     *bitrate = 0;
449     incoming_frame_rate_ = 0;
450     incoming_bit_rate_ = 0;
451   }
452 }
453 
454 // Answers the question:
455 // Will the packet sequence be complete if the next frame is grabbed for
456 // decoding right now? That is, have we lost a frame between the last decoded
457 // frame and the next, or is the next
458 // frame missing one or more packets?
CompleteSequenceWithNextFrame()459 bool VCMJitterBuffer::CompleteSequenceWithNextFrame() {
460   CriticalSectionScoped cs(crit_sect_);
461   // Finding oldest frame ready for decoder, check sequence number and size
462   CleanUpOldOrEmptyFrames();
463   if (!decodable_frames_.empty()) {
464     if (decodable_frames_.Front()->GetState() == kStateComplete) {
465       return true;
466     }
467   } else if (incomplete_frames_.size() <= 1) {
468     // Frame not ready to be decoded.
469     return true;
470   }
471   return false;
472 }
473 
474 // Returns immediately or a |max_wait_time_ms| ms event hang waiting for a
475 // complete frame, |max_wait_time_ms| decided by caller.
NextCompleteTimestamp(uint32_t max_wait_time_ms,uint32_t * timestamp)476 bool VCMJitterBuffer::NextCompleteTimestamp(uint32_t max_wait_time_ms,
477                                             uint32_t* timestamp) {
478   crit_sect_->Enter();
479   if (!running_) {
480     crit_sect_->Leave();
481     return false;
482   }
483   CleanUpOldOrEmptyFrames();
484 
485   if (decodable_frames_.empty() ||
486       decodable_frames_.Front()->GetState() != kStateComplete) {
487     const int64_t end_wait_time_ms =
488         clock_->TimeInMilliseconds() + max_wait_time_ms;
489     int64_t wait_time_ms = max_wait_time_ms;
490     while (wait_time_ms > 0) {
491       crit_sect_->Leave();
492       const EventTypeWrapper ret =
493           frame_event_->Wait(static_cast<uint32_t>(wait_time_ms));
494       crit_sect_->Enter();
495       if (ret == kEventSignaled) {
496         // Are we shutting down the jitter buffer?
497         if (!running_) {
498           crit_sect_->Leave();
499           return false;
500         }
501         // Finding oldest frame ready for decoder.
502         CleanUpOldOrEmptyFrames();
503         if (decodable_frames_.empty() ||
504             decodable_frames_.Front()->GetState() != kStateComplete) {
505           wait_time_ms = end_wait_time_ms - clock_->TimeInMilliseconds();
506         } else {
507           break;
508         }
509       } else {
510         break;
511       }
512     }
513   }
514   if (decodable_frames_.empty() ||
515       decodable_frames_.Front()->GetState() != kStateComplete) {
516     crit_sect_->Leave();
517     return false;
518   }
519   *timestamp = decodable_frames_.Front()->TimeStamp();
520   crit_sect_->Leave();
521   return true;
522 }
523 
NextMaybeIncompleteTimestamp(uint32_t * timestamp)524 bool VCMJitterBuffer::NextMaybeIncompleteTimestamp(uint32_t* timestamp) {
525   CriticalSectionScoped cs(crit_sect_);
526   if (!running_) {
527     return false;
528   }
529   if (decode_error_mode_ == kNoErrors) {
530     // No point to continue, as we are not decoding with errors.
531     return false;
532   }
533 
534   CleanUpOldOrEmptyFrames();
535 
536   VCMFrameBuffer* oldest_frame;
537   if (decodable_frames_.empty()) {
538     if (nack_mode_ != kNoNack || incomplete_frames_.size() <= 1) {
539       return false;
540     }
541     oldest_frame = incomplete_frames_.Front();
542     // Frame will only be removed from buffer if it is complete (or decodable).
543     if (oldest_frame->GetState() < kStateComplete) {
544       return false;
545     }
546   } else {
547     oldest_frame = decodable_frames_.Front();
548     // If we have exactly one frame in the buffer, release it only if it is
549     // complete. We know decodable_frames_ is  not empty due to the previous
550     // check.
551     if (decodable_frames_.size() == 1 && incomplete_frames_.empty() &&
552         oldest_frame->GetState() != kStateComplete) {
553       return false;
554     }
555   }
556 
557   *timestamp = oldest_frame->TimeStamp();
558   return true;
559 }
560 
ExtractAndSetDecode(uint32_t timestamp)561 VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
562   CriticalSectionScoped cs(crit_sect_);
563   if (!running_) {
564     return NULL;
565   }
566   // Extract the frame with the desired timestamp.
567   VCMFrameBuffer* frame = decodable_frames_.PopFrame(timestamp);
568   bool continuous = true;
569   if (!frame) {
570     frame = incomplete_frames_.PopFrame(timestamp);
571     if (frame)
572       continuous = last_decoded_state_.ContinuousFrame(frame);
573     else
574       return NULL;
575   }
576   TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", timestamp, "Extract");
577   // Frame pulled out from jitter buffer, update the jitter estimate.
578   const bool retransmitted = (frame->GetNackCount() > 0);
579   if (retransmitted) {
580     jitter_estimate_.FrameNacked();
581   } else if (frame->Length() > 0) {
582     // Ignore retransmitted and empty frames.
583     if (waiting_for_completion_.latest_packet_time >= 0) {
584       UpdateJitterEstimate(waiting_for_completion_, true);
585     }
586     if (frame->GetState() == kStateComplete) {
587       UpdateJitterEstimate(*frame, false);
588     } else {
589       // Wait for this one to get complete.
590       waiting_for_completion_.frame_size = frame->Length();
591       waiting_for_completion_.latest_packet_time = frame->LatestPacketTimeMs();
592       waiting_for_completion_.timestamp = frame->TimeStamp();
593     }
594   }
595 
596   // The state must be changed to decoding before cleaning up zero sized
597   // frames to avoid empty frames being cleaned up and then given to the
598   // decoder. Propagates the missing_frame bit.
599   frame->PrepareForDecode(continuous);
600 
601   // We have a frame - update the last decoded state and nack list.
602   last_decoded_state_.SetState(frame);
603   DropPacketsFromNackList(last_decoded_state_.sequence_num());
604 
605   if ((*frame).IsSessionComplete())
606     UpdateAveragePacketsPerFrame(frame->NumPackets());
607 
608   return frame;
609 }
610 
611 // Release frame when done with decoding. Should never be used to release
612 // frames from within the jitter buffer.
ReleaseFrame(VCMEncodedFrame * frame)613 void VCMJitterBuffer::ReleaseFrame(VCMEncodedFrame* frame) {
614   CriticalSectionScoped cs(crit_sect_);
615   VCMFrameBuffer* frame_buffer = static_cast<VCMFrameBuffer*>(frame);
616   if (frame_buffer) {
617     free_frames_.push_back(frame_buffer);
618   }
619 }
620 
621 // Gets frame to use for this timestamp. If no match, get empty frame.
GetFrame(const VCMPacket & packet,VCMFrameBuffer ** frame,FrameList ** frame_list)622 VCMFrameBufferEnum VCMJitterBuffer::GetFrame(const VCMPacket& packet,
623                                              VCMFrameBuffer** frame,
624                                              FrameList** frame_list) {
625   *frame = incomplete_frames_.PopFrame(packet.timestamp);
626   if (*frame != NULL) {
627     *frame_list = &incomplete_frames_;
628     return kNoError;
629   }
630   *frame = decodable_frames_.PopFrame(packet.timestamp);
631   if (*frame != NULL) {
632     *frame_list = &decodable_frames_;
633     return kNoError;
634   }
635 
636   *frame_list = NULL;
637   // No match, return empty frame.
638   *frame = GetEmptyFrame();
639   if (*frame == NULL) {
640     // No free frame! Try to reclaim some...
641     LOG(LS_WARNING) << "Unable to get empty frame; Recycling.";
642     bool found_key_frame = RecycleFramesUntilKeyFrame();
643     *frame = GetEmptyFrame();
644     assert(*frame);
645     if (!found_key_frame) {
646       free_frames_.push_back(*frame);
647       return kFlushIndicator;
648     }
649   }
650   (*frame)->Reset();
651   return kNoError;
652 }
653 
LastPacketTime(const VCMEncodedFrame * frame,bool * retransmitted) const654 int64_t VCMJitterBuffer::LastPacketTime(const VCMEncodedFrame* frame,
655                                         bool* retransmitted) const {
656   assert(retransmitted);
657   CriticalSectionScoped cs(crit_sect_);
658   const VCMFrameBuffer* frame_buffer =
659       static_cast<const VCMFrameBuffer*>(frame);
660   *retransmitted = (frame_buffer->GetNackCount() > 0);
661   return frame_buffer->LatestPacketTimeMs();
662 }
663 
InsertPacket(const VCMPacket & packet,bool * retransmitted)664 VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet,
665                                                  bool* retransmitted) {
666   CriticalSectionScoped cs(crit_sect_);
667 
668   ++num_packets_;
669   if (num_packets_ == 1) {
670     time_first_packet_ms_ = clock_->TimeInMilliseconds();
671   }
672   // Does this packet belong to an old frame?
673   if (last_decoded_state_.IsOldPacket(&packet)) {
674     // Account only for media packets.
675     if (packet.sizeBytes > 0) {
676       num_discarded_packets_++;
677       num_consecutive_old_packets_++;
678       if (stats_callback_ != NULL)
679         stats_callback_->OnDiscardedPacketsUpdated(num_discarded_packets_);
680     }
681     // Update last decoded sequence number if the packet arrived late and
682     // belongs to a frame with a timestamp equal to the last decoded
683     // timestamp.
684     last_decoded_state_.UpdateOldPacket(&packet);
685     DropPacketsFromNackList(last_decoded_state_.sequence_num());
686 
687     // Also see if this old packet made more incomplete frames continuous.
688     FindAndInsertContinuousFramesWithState(last_decoded_state_);
689 
690     if (num_consecutive_old_packets_ > kMaxConsecutiveOldPackets) {
691       LOG(LS_WARNING)
692           << num_consecutive_old_packets_
693           << " consecutive old packets received. Flushing the jitter buffer.";
694       Flush();
695       return kFlushIndicator;
696     }
697     return kOldPacket;
698   }
699 
700   num_consecutive_old_packets_ = 0;
701 
702   VCMFrameBuffer* frame;
703   FrameList* frame_list;
704   const VCMFrameBufferEnum error = GetFrame(packet, &frame, &frame_list);
705   if (error != kNoError)
706     return error;
707 
708   int64_t now_ms = clock_->TimeInMilliseconds();
709   // We are keeping track of the first and latest seq numbers, and
710   // the number of wraps to be able to calculate how many packets we expect.
711   if (first_packet_since_reset_) {
712     // Now it's time to start estimating jitter
713     // reset the delay estimate.
714     inter_frame_delay_.Reset(now_ms);
715   }
716 
717   // Empty packets may bias the jitter estimate (lacking size component),
718   // therefore don't let empty packet trigger the following updates:
719   if (packet.frameType != kEmptyFrame) {
720     if (waiting_for_completion_.timestamp == packet.timestamp) {
721       // This can get bad if we have a lot of duplicate packets,
722       // we will then count some packet multiple times.
723       waiting_for_completion_.frame_size += packet.sizeBytes;
724       waiting_for_completion_.latest_packet_time = now_ms;
725     } else if (waiting_for_completion_.latest_packet_time >= 0 &&
726                waiting_for_completion_.latest_packet_time + 2000 <= now_ms) {
727       // A packet should never be more than two seconds late
728       UpdateJitterEstimate(waiting_for_completion_, true);
729       waiting_for_completion_.latest_packet_time = -1;
730       waiting_for_completion_.frame_size = 0;
731       waiting_for_completion_.timestamp = 0;
732     }
733   }
734 
735   VCMFrameBufferStateEnum previous_state = frame->GetState();
736   // Insert packet.
737   FrameData frame_data;
738   frame_data.rtt_ms = rtt_ms_;
739   frame_data.rolling_average_packets_per_frame = average_packets_per_frame_;
740   VCMFrameBufferEnum buffer_state =
741       frame->InsertPacket(packet, now_ms, decode_error_mode_, frame_data);
742 
743   if (previous_state != kStateComplete) {
744     TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", frame->TimeStamp(), "timestamp",
745                              frame->TimeStamp());
746   }
747 
748   if (buffer_state > 0) {
749     incoming_bit_count_ += packet.sizeBytes << 3;
750     if (first_packet_since_reset_) {
751       latest_received_sequence_number_ = packet.seqNum;
752       first_packet_since_reset_ = false;
753     } else {
754       if (IsPacketRetransmitted(packet)) {
755         frame->IncrementNackCount();
756       }
757       if (!UpdateNackList(packet.seqNum) &&
758           packet.frameType != kVideoFrameKey) {
759         buffer_state = kFlushIndicator;
760       }
761 
762       latest_received_sequence_number_ =
763           LatestSequenceNumber(latest_received_sequence_number_, packet.seqNum);
764     }
765   }
766 
767   // Is the frame already in the decodable list?
768   bool continuous = IsContinuous(*frame);
769   switch (buffer_state) {
770     case kGeneralError:
771     case kTimeStampError:
772     case kSizeError: {
773       free_frames_.push_back(frame);
774       break;
775     }
776     case kCompleteSession: {
777       if (previous_state != kStateDecodable &&
778           previous_state != kStateComplete) {
779         CountFrame(*frame);
780         if (continuous) {
781           // Signal that we have a complete session.
782           frame_event_->Set();
783         }
784       }
785       FALLTHROUGH();
786     }
787     // Note: There is no break here - continuing to kDecodableSession.
788     case kDecodableSession: {
789       *retransmitted = (frame->GetNackCount() > 0);
790       if (continuous) {
791         decodable_frames_.InsertFrame(frame);
792         FindAndInsertContinuousFrames(*frame);
793       } else {
794         incomplete_frames_.InsertFrame(frame);
795         // If NACKs are enabled, keyframes are triggered by |GetNackList|.
796         if (nack_mode_ == kNoNack &&
797             NonContinuousOrIncompleteDuration() >
798                 90 * kMaxDiscontinuousFramesTime) {
799           return kFlushIndicator;
800         }
801       }
802       break;
803     }
804     case kIncomplete: {
805       if (frame->GetState() == kStateEmpty &&
806           last_decoded_state_.UpdateEmptyFrame(frame)) {
807         free_frames_.push_back(frame);
808         return kNoError;
809       } else {
810         incomplete_frames_.InsertFrame(frame);
811         // If NACKs are enabled, keyframes are triggered by |GetNackList|.
812         if (nack_mode_ == kNoNack &&
813             NonContinuousOrIncompleteDuration() >
814                 90 * kMaxDiscontinuousFramesTime) {
815           return kFlushIndicator;
816         }
817       }
818       break;
819     }
820     case kNoError:
821     case kOutOfBoundsPacket:
822     case kDuplicatePacket: {
823       // Put back the frame where it came from.
824       if (frame_list != NULL) {
825         frame_list->InsertFrame(frame);
826       } else {
827         free_frames_.push_back(frame);
828       }
829       ++num_duplicated_packets_;
830       break;
831     }
832     case kFlushIndicator:
833       free_frames_.push_back(frame);
834       return kFlushIndicator;
835     default:
836       assert(false);
837   }
838   return buffer_state;
839 }
840 
IsContinuousInState(const VCMFrameBuffer & frame,const VCMDecodingState & decoding_state) const841 bool VCMJitterBuffer::IsContinuousInState(
842     const VCMFrameBuffer& frame,
843     const VCMDecodingState& decoding_state) const {
844   // Is this frame (complete or decodable) and continuous?
845   // kStateDecodable will never be set when decode_error_mode_ is false
846   // as SessionInfo determines this state based on the error mode (and frame
847   // completeness).
848   return (frame.GetState() == kStateComplete ||
849           frame.GetState() == kStateDecodable) &&
850          decoding_state.ContinuousFrame(&frame);
851 }
852 
IsContinuous(const VCMFrameBuffer & frame) const853 bool VCMJitterBuffer::IsContinuous(const VCMFrameBuffer& frame) const {
854   if (IsContinuousInState(frame, last_decoded_state_)) {
855     return true;
856   }
857   VCMDecodingState decoding_state;
858   decoding_state.CopyFrom(last_decoded_state_);
859   for (FrameList::const_iterator it = decodable_frames_.begin();
860        it != decodable_frames_.end(); ++it) {
861     VCMFrameBuffer* decodable_frame = it->second;
862     if (IsNewerTimestamp(decodable_frame->TimeStamp(), frame.TimeStamp())) {
863       break;
864     }
865     decoding_state.SetState(decodable_frame);
866     if (IsContinuousInState(frame, decoding_state)) {
867       return true;
868     }
869   }
870   return false;
871 }
872 
FindAndInsertContinuousFrames(const VCMFrameBuffer & new_frame)873 void VCMJitterBuffer::FindAndInsertContinuousFrames(
874     const VCMFrameBuffer& new_frame) {
875   VCMDecodingState decoding_state;
876   decoding_state.CopyFrom(last_decoded_state_);
877   decoding_state.SetState(&new_frame);
878   FindAndInsertContinuousFramesWithState(decoding_state);
879 }
880 
FindAndInsertContinuousFramesWithState(const VCMDecodingState & original_decoded_state)881 void VCMJitterBuffer::FindAndInsertContinuousFramesWithState(
882     const VCMDecodingState& original_decoded_state) {
883   // Copy original_decoded_state so we can move the state forward with each
884   // decodable frame we find.
885   VCMDecodingState decoding_state;
886   decoding_state.CopyFrom(original_decoded_state);
887 
888   // When temporal layers are available, we search for a complete or decodable
889   // frame until we hit one of the following:
890   // 1. Continuous base or sync layer.
891   // 2. The end of the list was reached.
892   for (FrameList::iterator it = incomplete_frames_.begin();
893        it != incomplete_frames_.end();) {
894     VCMFrameBuffer* frame = it->second;
895     if (IsNewerTimestamp(original_decoded_state.time_stamp(),
896                          frame->TimeStamp())) {
897       ++it;
898       continue;
899     }
900     if (IsContinuousInState(*frame, decoding_state)) {
901       decodable_frames_.InsertFrame(frame);
902       incomplete_frames_.erase(it++);
903       decoding_state.SetState(frame);
904     } else if (frame->TemporalId() <= 0) {
905       break;
906     } else {
907       ++it;
908     }
909   }
910 }
911 
EstimatedJitterMs()912 uint32_t VCMJitterBuffer::EstimatedJitterMs() {
913   CriticalSectionScoped cs(crit_sect_);
914   // Compute RTT multiplier for estimation.
915   // low_rtt_nackThresholdMs_ == -1 means no FEC.
916   double rtt_mult = 1.0f;
917   if (low_rtt_nack_threshold_ms_ >= 0 &&
918       rtt_ms_ >= low_rtt_nack_threshold_ms_) {
919     // For RTTs above low_rtt_nack_threshold_ms_ we don't apply extra delay
920     // when waiting for retransmissions.
921     rtt_mult = 0.0f;
922   }
923   return jitter_estimate_.GetJitterEstimate(rtt_mult);
924 }
925 
UpdateRtt(int64_t rtt_ms)926 void VCMJitterBuffer::UpdateRtt(int64_t rtt_ms) {
927   CriticalSectionScoped cs(crit_sect_);
928   rtt_ms_ = rtt_ms;
929   jitter_estimate_.UpdateRtt(rtt_ms);
930 }
931 
SetNackMode(VCMNackMode mode,int64_t low_rtt_nack_threshold_ms,int64_t high_rtt_nack_threshold_ms)932 void VCMJitterBuffer::SetNackMode(VCMNackMode mode,
933                                   int64_t low_rtt_nack_threshold_ms,
934                                   int64_t high_rtt_nack_threshold_ms) {
935   CriticalSectionScoped cs(crit_sect_);
936   nack_mode_ = mode;
937   if (mode == kNoNack) {
938     missing_sequence_numbers_.clear();
939   }
940   assert(low_rtt_nack_threshold_ms >= -1 && high_rtt_nack_threshold_ms >= -1);
941   assert(high_rtt_nack_threshold_ms == -1 ||
942          low_rtt_nack_threshold_ms <= high_rtt_nack_threshold_ms);
943   assert(low_rtt_nack_threshold_ms > -1 || high_rtt_nack_threshold_ms == -1);
944   low_rtt_nack_threshold_ms_ = low_rtt_nack_threshold_ms;
945   high_rtt_nack_threshold_ms_ = high_rtt_nack_threshold_ms;
946   // Don't set a high start rtt if high_rtt_nack_threshold_ms_ is used, to not
947   // disable NACK in |kNack| mode.
948   if (rtt_ms_ == kDefaultRtt && high_rtt_nack_threshold_ms_ != -1) {
949     rtt_ms_ = 0;
950   }
951   if (!WaitForRetransmissions()) {
952     jitter_estimate_.ResetNackCount();
953   }
954 }
955 
SetNackSettings(size_t max_nack_list_size,int max_packet_age_to_nack,int max_incomplete_time_ms)956 void VCMJitterBuffer::SetNackSettings(size_t max_nack_list_size,
957                                       int max_packet_age_to_nack,
958                                       int max_incomplete_time_ms) {
959   CriticalSectionScoped cs(crit_sect_);
960   assert(max_packet_age_to_nack >= 0);
961   assert(max_incomplete_time_ms_ >= 0);
962   max_nack_list_size_ = max_nack_list_size;
963   max_packet_age_to_nack_ = max_packet_age_to_nack;
964   max_incomplete_time_ms_ = max_incomplete_time_ms;
965 }
966 
nack_mode() const967 VCMNackMode VCMJitterBuffer::nack_mode() const {
968   CriticalSectionScoped cs(crit_sect_);
969   return nack_mode_;
970 }
971 
NonContinuousOrIncompleteDuration()972 int VCMJitterBuffer::NonContinuousOrIncompleteDuration() {
973   if (incomplete_frames_.empty()) {
974     return 0;
975   }
976   uint32_t start_timestamp = incomplete_frames_.Front()->TimeStamp();
977   if (!decodable_frames_.empty()) {
978     start_timestamp = decodable_frames_.Back()->TimeStamp();
979   }
980   return incomplete_frames_.Back()->TimeStamp() - start_timestamp;
981 }
982 
EstimatedLowSequenceNumber(const VCMFrameBuffer & frame) const983 uint16_t VCMJitterBuffer::EstimatedLowSequenceNumber(
984     const VCMFrameBuffer& frame) const {
985   assert(frame.GetLowSeqNum() >= 0);
986   if (frame.HaveFirstPacket())
987     return frame.GetLowSeqNum();
988 
989   // This estimate is not accurate if more than one packet with lower sequence
990   // number is lost.
991   return frame.GetLowSeqNum() - 1;
992 }
993 
GetNackList(bool * request_key_frame)994 std::vector<uint16_t> VCMJitterBuffer::GetNackList(bool* request_key_frame) {
995   CriticalSectionScoped cs(crit_sect_);
996   *request_key_frame = false;
997   if (nack_mode_ == kNoNack) {
998     return std::vector<uint16_t>();
999   }
1000   if (last_decoded_state_.in_initial_state()) {
1001     VCMFrameBuffer* next_frame = NextFrame();
1002     const bool first_frame_is_key = next_frame &&
1003                                     next_frame->FrameType() == kVideoFrameKey &&
1004                                     next_frame->HaveFirstPacket();
1005     if (!first_frame_is_key) {
1006       bool have_non_empty_frame =
1007           decodable_frames_.end() != find_if(decodable_frames_.begin(),
1008                                              decodable_frames_.end(),
1009                                              HasNonEmptyState);
1010       if (!have_non_empty_frame) {
1011         have_non_empty_frame =
1012             incomplete_frames_.end() != find_if(incomplete_frames_.begin(),
1013                                                 incomplete_frames_.end(),
1014                                                 HasNonEmptyState);
1015       }
1016       bool found_key_frame = RecycleFramesUntilKeyFrame();
1017       if (!found_key_frame) {
1018         *request_key_frame = have_non_empty_frame;
1019         return std::vector<uint16_t>();
1020       }
1021     }
1022   }
1023   if (TooLargeNackList()) {
1024     *request_key_frame = !HandleTooLargeNackList();
1025   }
1026   if (max_incomplete_time_ms_ > 0) {
1027     int non_continuous_incomplete_duration =
1028         NonContinuousOrIncompleteDuration();
1029     if (non_continuous_incomplete_duration > 90 * max_incomplete_time_ms_) {
1030       LOG_F(LS_WARNING) << "Too long non-decodable duration: "
1031                         << non_continuous_incomplete_duration << " > "
1032                         << 90 * max_incomplete_time_ms_;
1033       FrameList::reverse_iterator rit = find_if(
1034           incomplete_frames_.rbegin(), incomplete_frames_.rend(), IsKeyFrame);
1035       if (rit == incomplete_frames_.rend()) {
1036         // Request a key frame if we don't have one already.
1037         *request_key_frame = true;
1038         return std::vector<uint16_t>();
1039       } else {
1040         // Skip to the last key frame. If it's incomplete we will start
1041         // NACKing it.
1042         // Note that the estimated low sequence number is correct for VP8
1043         // streams because only the first packet of a key frame is marked.
1044         last_decoded_state_.Reset();
1045         DropPacketsFromNackList(EstimatedLowSequenceNumber(*rit->second));
1046       }
1047     }
1048   }
1049   std::vector<uint16_t> nack_list(missing_sequence_numbers_.begin(),
1050                                   missing_sequence_numbers_.end());
1051   return nack_list;
1052 }
1053 
SetDecodeErrorMode(VCMDecodeErrorMode error_mode)1054 void VCMJitterBuffer::SetDecodeErrorMode(VCMDecodeErrorMode error_mode) {
1055   CriticalSectionScoped cs(crit_sect_);
1056   decode_error_mode_ = error_mode;
1057 }
1058 
NextFrame() const1059 VCMFrameBuffer* VCMJitterBuffer::NextFrame() const {
1060   if (!decodable_frames_.empty())
1061     return decodable_frames_.Front();
1062   if (!incomplete_frames_.empty())
1063     return incomplete_frames_.Front();
1064   return NULL;
1065 }
1066 
UpdateNackList(uint16_t sequence_number)1067 bool VCMJitterBuffer::UpdateNackList(uint16_t sequence_number) {
1068   if (nack_mode_ == kNoNack) {
1069     return true;
1070   }
1071   // Make sure we don't add packets which are already too old to be decoded.
1072   if (!last_decoded_state_.in_initial_state()) {
1073     latest_received_sequence_number_ = LatestSequenceNumber(
1074         latest_received_sequence_number_, last_decoded_state_.sequence_num());
1075   }
1076   if (IsNewerSequenceNumber(sequence_number,
1077                             latest_received_sequence_number_)) {
1078     // Push any missing sequence numbers to the NACK list.
1079     for (uint16_t i = latest_received_sequence_number_ + 1;
1080          IsNewerSequenceNumber(sequence_number, i); ++i) {
1081       missing_sequence_numbers_.insert(missing_sequence_numbers_.end(), i);
1082       TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), "AddNack",
1083                            "seqnum", i);
1084     }
1085     if (TooLargeNackList() && !HandleTooLargeNackList()) {
1086       LOG(LS_WARNING) << "Requesting key frame due to too large NACK list.";
1087       return false;
1088     }
1089     if (MissingTooOldPacket(sequence_number) &&
1090         !HandleTooOldPackets(sequence_number)) {
1091       LOG(LS_WARNING) << "Requesting key frame due to missing too old packets";
1092       return false;
1093     }
1094   } else {
1095     missing_sequence_numbers_.erase(sequence_number);
1096     TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), "RemoveNack",
1097                          "seqnum", sequence_number);
1098   }
1099   return true;
1100 }
1101 
TooLargeNackList() const1102 bool VCMJitterBuffer::TooLargeNackList() const {
1103   return missing_sequence_numbers_.size() > max_nack_list_size_;
1104 }
1105 
HandleTooLargeNackList()1106 bool VCMJitterBuffer::HandleTooLargeNackList() {
1107   // Recycle frames until the NACK list is small enough. It is likely cheaper to
1108   // request a key frame than to retransmit this many missing packets.
1109   LOG_F(LS_WARNING) << "NACK list has grown too large: "
1110                     << missing_sequence_numbers_.size() << " > "
1111                     << max_nack_list_size_;
1112   bool key_frame_found = false;
1113   while (TooLargeNackList()) {
1114     key_frame_found = RecycleFramesUntilKeyFrame();
1115   }
1116   return key_frame_found;
1117 }
1118 
MissingTooOldPacket(uint16_t latest_sequence_number) const1119 bool VCMJitterBuffer::MissingTooOldPacket(
1120     uint16_t latest_sequence_number) const {
1121   if (missing_sequence_numbers_.empty()) {
1122     return false;
1123   }
1124   const uint16_t age_of_oldest_missing_packet =
1125       latest_sequence_number - *missing_sequence_numbers_.begin();
1126   // Recycle frames if the NACK list contains too old sequence numbers as
1127   // the packets may have already been dropped by the sender.
1128   return age_of_oldest_missing_packet > max_packet_age_to_nack_;
1129 }
1130 
HandleTooOldPackets(uint16_t latest_sequence_number)1131 bool VCMJitterBuffer::HandleTooOldPackets(uint16_t latest_sequence_number) {
1132   bool key_frame_found = false;
1133   const uint16_t age_of_oldest_missing_packet =
1134       latest_sequence_number - *missing_sequence_numbers_.begin();
1135   LOG_F(LS_WARNING) << "NACK list contains too old sequence numbers: "
1136                     << age_of_oldest_missing_packet << " > "
1137                     << max_packet_age_to_nack_;
1138   while (MissingTooOldPacket(latest_sequence_number)) {
1139     key_frame_found = RecycleFramesUntilKeyFrame();
1140   }
1141   return key_frame_found;
1142 }
1143 
DropPacketsFromNackList(uint16_t last_decoded_sequence_number)1144 void VCMJitterBuffer::DropPacketsFromNackList(
1145     uint16_t last_decoded_sequence_number) {
1146   // Erase all sequence numbers from the NACK list which we won't need any
1147   // longer.
1148   missing_sequence_numbers_.erase(
1149       missing_sequence_numbers_.begin(),
1150       missing_sequence_numbers_.upper_bound(last_decoded_sequence_number));
1151 }
1152 
LastDecodedTimestamp() const1153 int64_t VCMJitterBuffer::LastDecodedTimestamp() const {
1154   CriticalSectionScoped cs(crit_sect_);
1155   return last_decoded_state_.time_stamp();
1156 }
1157 
RenderBufferSize(uint32_t * timestamp_start,uint32_t * timestamp_end)1158 void VCMJitterBuffer::RenderBufferSize(uint32_t* timestamp_start,
1159                                        uint32_t* timestamp_end) {
1160   CriticalSectionScoped cs(crit_sect_);
1161   CleanUpOldOrEmptyFrames();
1162   *timestamp_start = 0;
1163   *timestamp_end = 0;
1164   if (decodable_frames_.empty()) {
1165     return;
1166   }
1167   *timestamp_start = decodable_frames_.Front()->TimeStamp();
1168   *timestamp_end = decodable_frames_.Back()->TimeStamp();
1169 }
1170 
RegisterStatsCallback(VCMReceiveStatisticsCallback * callback)1171 void VCMJitterBuffer::RegisterStatsCallback(
1172     VCMReceiveStatisticsCallback* callback) {
1173   CriticalSectionScoped cs(crit_sect_);
1174   stats_callback_ = callback;
1175 }
1176 
GetEmptyFrame()1177 VCMFrameBuffer* VCMJitterBuffer::GetEmptyFrame() {
1178   if (free_frames_.empty()) {
1179     if (!TryToIncreaseJitterBufferSize()) {
1180       return NULL;
1181     }
1182   }
1183   VCMFrameBuffer* frame = free_frames_.front();
1184   free_frames_.pop_front();
1185   return frame;
1186 }
1187 
TryToIncreaseJitterBufferSize()1188 bool VCMJitterBuffer::TryToIncreaseJitterBufferSize() {
1189   if (max_number_of_frames_ >= kMaxNumberOfFrames)
1190     return false;
1191   free_frames_.push_back(new VCMFrameBuffer());
1192   ++max_number_of_frames_;
1193   TRACE_COUNTER1("webrtc", "JBMaxFrames", max_number_of_frames_);
1194   return true;
1195 }
1196 
1197 // Recycle oldest frames up to a key frame, used if jitter buffer is completely
1198 // full.
RecycleFramesUntilKeyFrame()1199 bool VCMJitterBuffer::RecycleFramesUntilKeyFrame() {
1200   // First release incomplete frames, and only release decodable frames if there
1201   // are no incomplete ones.
1202   FrameList::iterator key_frame_it;
1203   bool key_frame_found = false;
1204   int dropped_frames = 0;
1205   dropped_frames += incomplete_frames_.RecycleFramesUntilKeyFrame(
1206       &key_frame_it, &free_frames_);
1207   key_frame_found = key_frame_it != incomplete_frames_.end();
1208   if (dropped_frames == 0) {
1209     dropped_frames += decodable_frames_.RecycleFramesUntilKeyFrame(
1210         &key_frame_it, &free_frames_);
1211     key_frame_found = key_frame_it != decodable_frames_.end();
1212   }
1213   TRACE_EVENT_INSTANT0("webrtc", "JB::RecycleFramesUntilKeyFrame");
1214   if (key_frame_found) {
1215     LOG(LS_INFO) << "Found key frame while dropping frames.";
1216     // Reset last decoded state to make sure the next frame decoded is a key
1217     // frame, and start NACKing from here.
1218     last_decoded_state_.Reset();
1219     DropPacketsFromNackList(EstimatedLowSequenceNumber(*key_frame_it->second));
1220   } else if (decodable_frames_.empty()) {
1221     // All frames dropped. Reset the decoding state and clear missing sequence
1222     // numbers as we're starting fresh.
1223     last_decoded_state_.Reset();
1224     missing_sequence_numbers_.clear();
1225   }
1226   return key_frame_found;
1227 }
1228 
1229 // Must be called under the critical section |crit_sect_|.
CountFrame(const VCMFrameBuffer & frame)1230 void VCMJitterBuffer::CountFrame(const VCMFrameBuffer& frame) {
1231   incoming_frame_count_++;
1232 
1233   if (frame.FrameType() == kVideoFrameKey) {
1234     TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", frame.TimeStamp(),
1235                             "KeyComplete");
1236   } else {
1237     TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", frame.TimeStamp(),
1238                             "DeltaComplete");
1239   }
1240 
1241   // Update receive statistics. We count all layers, thus when you use layers
1242   // adding all key and delta frames might differ from frame count.
1243   if (frame.IsSessionComplete()) {
1244     if (frame.FrameType() == kVideoFrameKey) {
1245       ++receive_statistics_.key_frames;
1246     } else {
1247       ++receive_statistics_.delta_frames;
1248     }
1249     if (stats_callback_ != NULL)
1250       stats_callback_->OnFrameCountsUpdated(receive_statistics_);
1251   }
1252 }
1253 
UpdateAveragePacketsPerFrame(int current_number_packets)1254 void VCMJitterBuffer::UpdateAveragePacketsPerFrame(int current_number_packets) {
1255   if (frame_counter_ > kFastConvergeThreshold) {
1256     average_packets_per_frame_ =
1257         average_packets_per_frame_ * (1 - kNormalConvergeMultiplier) +
1258         current_number_packets * kNormalConvergeMultiplier;
1259   } else if (frame_counter_ > 0) {
1260     average_packets_per_frame_ =
1261         average_packets_per_frame_ * (1 - kFastConvergeMultiplier) +
1262         current_number_packets * kFastConvergeMultiplier;
1263     frame_counter_++;
1264   } else {
1265     average_packets_per_frame_ = current_number_packets;
1266     frame_counter_++;
1267   }
1268 }
1269 
1270 // Must be called under the critical section |crit_sect_|.
CleanUpOldOrEmptyFrames()1271 void VCMJitterBuffer::CleanUpOldOrEmptyFrames() {
1272   decodable_frames_.CleanUpOldOrEmptyFrames(&last_decoded_state_,
1273                                             &free_frames_);
1274   incomplete_frames_.CleanUpOldOrEmptyFrames(&last_decoded_state_,
1275                                              &free_frames_);
1276   if (!last_decoded_state_.in_initial_state()) {
1277     DropPacketsFromNackList(last_decoded_state_.sequence_num());
1278   }
1279 }
1280 
1281 // Must be called from within |crit_sect_|.
IsPacketRetransmitted(const VCMPacket & packet) const1282 bool VCMJitterBuffer::IsPacketRetransmitted(const VCMPacket& packet) const {
1283   return missing_sequence_numbers_.find(packet.seqNum) !=
1284          missing_sequence_numbers_.end();
1285 }
1286 
1287 // Must be called under the critical section |crit_sect_|. Should never be
1288 // called with retransmitted frames, they must be filtered out before this
1289 // function is called.
UpdateJitterEstimate(const VCMJitterSample & sample,bool incomplete_frame)1290 void VCMJitterBuffer::UpdateJitterEstimate(const VCMJitterSample& sample,
1291                                            bool incomplete_frame) {
1292   if (sample.latest_packet_time == -1) {
1293     return;
1294   }
1295   UpdateJitterEstimate(sample.latest_packet_time, sample.timestamp,
1296                        sample.frame_size, incomplete_frame);
1297 }
1298 
1299 // Must be called under the critical section crit_sect_. Should never be
1300 // called with retransmitted frames, they must be filtered out before this
1301 // function is called.
UpdateJitterEstimate(const VCMFrameBuffer & frame,bool incomplete_frame)1302 void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame,
1303                                            bool incomplete_frame) {
1304   if (frame.LatestPacketTimeMs() == -1) {
1305     return;
1306   }
1307   // No retransmitted frames should be a part of the jitter
1308   // estimate.
1309   UpdateJitterEstimate(frame.LatestPacketTimeMs(), frame.TimeStamp(),
1310                        frame.Length(), incomplete_frame);
1311 }
1312 
1313 // Must be called under the critical section |crit_sect_|. Should never be
1314 // called with retransmitted frames, they must be filtered out before this
1315 // function is called.
UpdateJitterEstimate(int64_t latest_packet_time_ms,uint32_t timestamp,unsigned int frame_size,bool incomplete_frame)1316 void VCMJitterBuffer::UpdateJitterEstimate(int64_t latest_packet_time_ms,
1317                                            uint32_t timestamp,
1318                                            unsigned int frame_size,
1319                                            bool incomplete_frame) {
1320   if (latest_packet_time_ms == -1) {
1321     return;
1322   }
1323   int64_t frame_delay;
1324   bool not_reordered = inter_frame_delay_.CalculateDelay(
1325       timestamp, &frame_delay, latest_packet_time_ms);
1326   // Filter out frames which have been reordered in time by the network
1327   if (not_reordered) {
1328     // Update the jitter estimate with the new samples
1329     jitter_estimate_.UpdateEstimate(frame_delay, frame_size, incomplete_frame);
1330   }
1331 }
1332 
WaitForRetransmissions()1333 bool VCMJitterBuffer::WaitForRetransmissions() {
1334   if (nack_mode_ == kNoNack) {
1335     // NACK disabled -> don't wait for retransmissions.
1336     return false;
1337   }
1338   // Evaluate if the RTT is higher than |high_rtt_nack_threshold_ms_|, and in
1339   // that case we don't wait for retransmissions.
1340   if (high_rtt_nack_threshold_ms_ >= 0 &&
1341       rtt_ms_ >= high_rtt_nack_threshold_ms_) {
1342     return false;
1343   }
1344   return true;
1345 }
1346 }  // namespace webrtc
1347