1 /*
2 * libjingle
3 * Copyright 2004 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28 #ifdef HAVE_WEBRTC_VIDEO
29 #include "talk/media/webrtc/webrtcvideoengine.h"
30
31 #ifdef HAVE_CONFIG_H
32 #include <config.h>
33 #endif
34
35 #include <math.h>
36 #include <set>
37
38 #include "talk/base/basictypes.h"
39 #include "talk/base/buffer.h"
40 #include "talk/base/byteorder.h"
41 #include "talk/base/common.h"
42 #include "talk/base/cpumonitor.h"
43 #include "talk/base/logging.h"
44 #include "talk/base/stringutils.h"
45 #include "talk/base/thread.h"
46 #include "talk/base/timeutils.h"
47 #include "talk/media/base/constants.h"
48 #include "talk/media/base/rtputils.h"
49 #include "talk/media/base/streamparams.h"
50 #include "talk/media/base/videoadapter.h"
51 #include "talk/media/base/videocapturer.h"
52 #include "talk/media/base/videorenderer.h"
53 #include "talk/media/devices/filevideocapturer.h"
54 #include "talk/media/webrtc/webrtcpassthroughrender.h"
55 #include "talk/media/webrtc/webrtctexturevideoframe.h"
56 #include "talk/media/webrtc/webrtcvideocapturer.h"
57 #include "talk/media/webrtc/webrtcvideodecoderfactory.h"
58 #include "talk/media/webrtc/webrtcvideoencoderfactory.h"
59 #include "talk/media/webrtc/webrtcvideoframe.h"
60 #include "talk/media/webrtc/webrtcvie.h"
61 #include "talk/media/webrtc/webrtcvoe.h"
62 #include "talk/media/webrtc/webrtcvoiceengine.h"
63 #include "webrtc/experiments.h"
64 #include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
65
66
67 namespace cricket {
68
69
70 static const int kDefaultLogSeverity = talk_base::LS_WARNING;
71
72 static const int kMinVideoBitrate = 50;
73 static const int kStartVideoBitrate = 300;
74 static const int kMaxVideoBitrate = 2000;
75
76 // Controlled by exp, try a super low minimum bitrate for poor connections.
77 static const int kLowerMinBitrate = 30;
78
79 static const int kVideoMtu = 1200;
80
81 static const int kVideoRtpBufferSize = 65536;
82
83 static const char kVp8PayloadName[] = "VP8";
84 static const char kRedPayloadName[] = "red";
85 static const char kFecPayloadName[] = "ulpfec";
86
87 static const int kDefaultNumberOfTemporalLayers = 1; // 1:1
88
89 static const int kExternalVideoPayloadTypeBase = 120;
90
BitrateIsSet(int value)91 static bool BitrateIsSet(int value) {
92 return value > kAutoBandwidth;
93 }
94
GetBitrate(int value,int deflt)95 static int GetBitrate(int value, int deflt) {
96 return BitrateIsSet(value) ? value : deflt;
97 }
98
99 // Static allocation of payload type values for external video codec.
GetExternalVideoPayloadType(int index)100 static int GetExternalVideoPayloadType(int index) {
101 #if ENABLE_DEBUG
102 static const int kMaxExternalVideoCodecs = 8;
103 ASSERT(index >= 0 && index < kMaxExternalVideoCodecs);
104 #endif
105 return kExternalVideoPayloadTypeBase + index;
106 }
107
LogMultiline(talk_base::LoggingSeverity sev,char * text)108 static void LogMultiline(talk_base::LoggingSeverity sev, char* text) {
109 const char* delim = "\r\n";
110 // TODO(fbarchard): Fix strtok lint warning.
111 for (char* tok = strtok(text, delim); tok; tok = strtok(NULL, delim)) {
112 LOG_V(sev) << tok;
113 }
114 }
115
116 // Severity is an integer because it comes is assumed to be from command line.
SeverityToFilter(int severity)117 static int SeverityToFilter(int severity) {
118 int filter = webrtc::kTraceNone;
119 switch (severity) {
120 case talk_base::LS_VERBOSE:
121 filter |= webrtc::kTraceAll;
122 case talk_base::LS_INFO:
123 filter |= (webrtc::kTraceStateInfo | webrtc::kTraceInfo);
124 case talk_base::LS_WARNING:
125 filter |= (webrtc::kTraceTerseInfo | webrtc::kTraceWarning);
126 case talk_base::LS_ERROR:
127 filter |= (webrtc::kTraceError | webrtc::kTraceCritical);
128 }
129 return filter;
130 }
131
132 static const int kCpuMonitorPeriodMs = 2000; // 2 seconds.
133
134 static const bool kNotSending = false;
135
136 // Default video dscp value.
137 // See http://tools.ietf.org/html/rfc2474 for details
138 // See also http://tools.ietf.org/html/draft-jennings-rtcweb-qos-00
139 static const talk_base::DiffServCodePoint kVideoDscpValue =
140 talk_base::DSCP_AF41;
141
IsNackEnabled(const VideoCodec & codec)142 static bool IsNackEnabled(const VideoCodec& codec) {
143 return codec.HasFeedbackParam(FeedbackParam(kRtcpFbParamNack,
144 kParamValueEmpty));
145 }
146
147 // Returns true if Receiver Estimated Max Bitrate is enabled.
IsRembEnabled(const VideoCodec & codec)148 static bool IsRembEnabled(const VideoCodec& codec) {
149 return codec.HasFeedbackParam(FeedbackParam(kRtcpFbParamRemb,
150 kParamValueEmpty));
151 }
152
153 struct FlushBlackFrameData : public talk_base::MessageData {
FlushBlackFrameDatacricket::FlushBlackFrameData154 FlushBlackFrameData(uint32 s, int64 t) : ssrc(s), timestamp(t) {
155 }
156 uint32 ssrc;
157 int64 timestamp;
158 };
159
160 class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
161 public:
WebRtcRenderAdapter(VideoRenderer * renderer,int channel_id)162 WebRtcRenderAdapter(VideoRenderer* renderer, int channel_id)
163 : renderer_(renderer),
164 channel_id_(channel_id),
165 width_(0),
166 height_(0),
167 capture_start_rtp_time_stamp_(-1),
168 capture_start_ntp_time_ms_(0) {
169 }
170
~WebRtcRenderAdapter()171 virtual ~WebRtcRenderAdapter() {
172 }
173
SetRenderer(VideoRenderer * renderer)174 void SetRenderer(VideoRenderer* renderer) {
175 talk_base::CritScope cs(&crit_);
176 renderer_ = renderer;
177 // FrameSizeChange may have already been called when renderer was not set.
178 // If so we should call SetSize here.
179 // TODO(ronghuawu): Add unit test for this case. Didn't do it now
180 // because the WebRtcRenderAdapter is currently hiding in cc file. No
181 // good way to get access to it from the unit test.
182 if (width_ > 0 && height_ > 0 && renderer_ != NULL) {
183 if (!renderer_->SetSize(width_, height_, 0)) {
184 LOG(LS_ERROR)
185 << "WebRtcRenderAdapter (channel " << channel_id_
186 << ") SetRenderer failed to SetSize to: "
187 << width_ << "x" << height_;
188 }
189 }
190 }
191
192 // Implementation of webrtc::ExternalRenderer.
FrameSizeChange(unsigned int width,unsigned int height,unsigned int)193 virtual int FrameSizeChange(unsigned int width, unsigned int height,
194 unsigned int /*number_of_streams*/) {
195 talk_base::CritScope cs(&crit_);
196 width_ = width;
197 height_ = height;
198 LOG(LS_INFO) << "WebRtcRenderAdapter (channel " << channel_id_
199 << ") frame size changed to: "
200 << width << "x" << height;
201 if (renderer_ == NULL) {
202 LOG(LS_VERBOSE) << "WebRtcRenderAdapter (channel " << channel_id_
203 << ") the renderer has not been set. "
204 << "SetSize will be called later in SetRenderer.";
205 return 0;
206 }
207 return renderer_->SetSize(width_, height_, 0) ? 0 : -1;
208 }
209
DeliverFrame(unsigned char * buffer,int buffer_size,uint32_t rtp_time_stamp,int64_t ntp_time_ms,int64_t render_time,void * handle)210 virtual int DeliverFrame(unsigned char* buffer,
211 int buffer_size,
212 uint32_t rtp_time_stamp,
213 #ifdef USE_WEBRTC_DEV_BRANCH
214 int64_t ntp_time_ms,
215 #endif
216 int64_t render_time,
217 void* handle) {
218 talk_base::CritScope cs(&crit_);
219 if (capture_start_rtp_time_stamp_ < 0) {
220 capture_start_rtp_time_stamp_ = rtp_time_stamp;
221 }
222
223 const int kVideoCodecClockratekHz = cricket::kVideoCodecClockrate / 1000;
224
225 int64 elapsed_time_ms =
226 (rtp_ts_wraparound_handler_.Unwrap(rtp_time_stamp) -
227 capture_start_rtp_time_stamp_) / kVideoCodecClockratekHz;
228 #ifdef USE_WEBRTC_DEV_BRANCH
229 if (ntp_time_ms > 0) {
230 capture_start_ntp_time_ms_ = ntp_time_ms - elapsed_time_ms;
231 }
232 #endif
233 frame_rate_tracker_.Update(1);
234 if (renderer_ == NULL) {
235 return 0;
236 }
237 // Convert elapsed_time_ms to ns timestamp.
238 int64 elapsed_time_ns =
239 elapsed_time_ms * talk_base::kNumNanosecsPerMillisec;
240 // Convert milisecond render time to ns timestamp.
241 int64 render_time_ns = render_time *
242 talk_base::kNumNanosecsPerMillisec;
243 // Note that here we send the |elapsed_time_ns| to renderer as the
244 // cricket::VideoFrame's elapsed_time_ and the |render_time_ns| as the
245 // cricket::VideoFrame's time_stamp_.
246 if (handle == NULL) {
247 return DeliverBufferFrame(buffer, buffer_size, render_time_ns,
248 elapsed_time_ns);
249 } else {
250 return DeliverTextureFrame(handle, render_time_ns,
251 elapsed_time_ns);
252 }
253 }
254
IsTextureSupported()255 virtual bool IsTextureSupported() { return true; }
256
DeliverBufferFrame(unsigned char * buffer,int buffer_size,int64 time_stamp,int64 elapsed_time)257 int DeliverBufferFrame(unsigned char* buffer, int buffer_size,
258 int64 time_stamp, int64 elapsed_time) {
259 WebRtcVideoFrame video_frame;
260 video_frame.Alias(buffer, buffer_size, width_, height_,
261 1, 1, elapsed_time, time_stamp, 0);
262
263 // Sanity check on decoded frame size.
264 if (buffer_size != static_cast<int>(VideoFrame::SizeOf(width_, height_))) {
265 LOG(LS_WARNING) << "WebRtcRenderAdapter (channel " << channel_id_
266 << ") received a strange frame size: "
267 << buffer_size;
268 }
269
270 int ret = renderer_->RenderFrame(&video_frame) ? 0 : -1;
271 return ret;
272 }
273
DeliverTextureFrame(void * handle,int64 time_stamp,int64 elapsed_time)274 int DeliverTextureFrame(void* handle, int64 time_stamp, int64 elapsed_time) {
275 WebRtcTextureVideoFrame video_frame(
276 static_cast<webrtc::NativeHandle*>(handle), width_, height_,
277 elapsed_time, time_stamp);
278 return renderer_->RenderFrame(&video_frame);
279 }
280
width()281 unsigned int width() {
282 talk_base::CritScope cs(&crit_);
283 return width_;
284 }
285
height()286 unsigned int height() {
287 talk_base::CritScope cs(&crit_);
288 return height_;
289 }
290
framerate()291 int framerate() {
292 talk_base::CritScope cs(&crit_);
293 return static_cast<int>(frame_rate_tracker_.units_second());
294 }
295
renderer()296 VideoRenderer* renderer() {
297 talk_base::CritScope cs(&crit_);
298 return renderer_;
299 }
300
capture_start_ntp_time_ms()301 int64 capture_start_ntp_time_ms() {
302 talk_base::CritScope cs(&crit_);
303 return capture_start_ntp_time_ms_;
304 }
305
306 private:
307 talk_base::CriticalSection crit_;
308 VideoRenderer* renderer_;
309 int channel_id_;
310 unsigned int width_;
311 unsigned int height_;
312 talk_base::RateTracker frame_rate_tracker_;
313 talk_base::TimestampWrapAroundHandler rtp_ts_wraparound_handler_;
314 int64 capture_start_rtp_time_stamp_;
315 int64 capture_start_ntp_time_ms_;
316 };
317
318 class WebRtcDecoderObserver : public webrtc::ViEDecoderObserver {
319 public:
WebRtcDecoderObserver(int video_channel)320 explicit WebRtcDecoderObserver(int video_channel)
321 : video_channel_(video_channel),
322 framerate_(0),
323 bitrate_(0),
324 decode_ms_(0),
325 max_decode_ms_(0),
326 current_delay_ms_(0),
327 target_delay_ms_(0),
328 jitter_buffer_ms_(0),
329 min_playout_delay_ms_(0),
330 render_delay_ms_(0) {
331 }
332
333 // virtual functions from VieDecoderObserver.
IncomingCodecChanged(const int videoChannel,const webrtc::VideoCodec & videoCodec)334 virtual void IncomingCodecChanged(const int videoChannel,
335 const webrtc::VideoCodec& videoCodec) {}
IncomingRate(const int videoChannel,const unsigned int framerate,const unsigned int bitrate)336 virtual void IncomingRate(const int videoChannel,
337 const unsigned int framerate,
338 const unsigned int bitrate) {
339 talk_base::CritScope cs(&crit_);
340 ASSERT(video_channel_ == videoChannel);
341 framerate_ = framerate;
342 bitrate_ = bitrate;
343 }
344
DecoderTiming(int decode_ms,int max_decode_ms,int current_delay_ms,int target_delay_ms,int jitter_buffer_ms,int min_playout_delay_ms,int render_delay_ms)345 virtual void DecoderTiming(int decode_ms,
346 int max_decode_ms,
347 int current_delay_ms,
348 int target_delay_ms,
349 int jitter_buffer_ms,
350 int min_playout_delay_ms,
351 int render_delay_ms) {
352 talk_base::CritScope cs(&crit_);
353 decode_ms_ = decode_ms;
354 max_decode_ms_ = max_decode_ms;
355 current_delay_ms_ = current_delay_ms;
356 target_delay_ms_ = target_delay_ms;
357 jitter_buffer_ms_ = jitter_buffer_ms;
358 min_playout_delay_ms_ = min_playout_delay_ms;
359 render_delay_ms_ = render_delay_ms;
360 }
361
RequestNewKeyFrame(const int videoChannel)362 virtual void RequestNewKeyFrame(const int videoChannel) {}
363
364 // Populate |rinfo| based on previously-set data in |*this|.
ExportTo(VideoReceiverInfo * rinfo)365 void ExportTo(VideoReceiverInfo* rinfo) {
366 talk_base::CritScope cs(&crit_);
367 rinfo->framerate_rcvd = framerate_;
368 rinfo->decode_ms = decode_ms_;
369 rinfo->max_decode_ms = max_decode_ms_;
370 rinfo->current_delay_ms = current_delay_ms_;
371 rinfo->target_delay_ms = target_delay_ms_;
372 rinfo->jitter_buffer_ms = jitter_buffer_ms_;
373 rinfo->min_playout_delay_ms = min_playout_delay_ms_;
374 rinfo->render_delay_ms = render_delay_ms_;
375 }
376
377 private:
378 mutable talk_base::CriticalSection crit_;
379 int video_channel_;
380 int framerate_;
381 int bitrate_;
382 int decode_ms_;
383 int max_decode_ms_;
384 int current_delay_ms_;
385 int target_delay_ms_;
386 int jitter_buffer_ms_;
387 int min_playout_delay_ms_;
388 int render_delay_ms_;
389 };
390
391 class WebRtcEncoderObserver : public webrtc::ViEEncoderObserver {
392 public:
WebRtcEncoderObserver(int video_channel)393 explicit WebRtcEncoderObserver(int video_channel)
394 : video_channel_(video_channel),
395 framerate_(0),
396 bitrate_(0),
397 suspended_(false) {
398 }
399
400 // virtual functions from VieEncoderObserver.
OutgoingRate(const int videoChannel,const unsigned int framerate,const unsigned int bitrate)401 virtual void OutgoingRate(const int videoChannel,
402 const unsigned int framerate,
403 const unsigned int bitrate) {
404 talk_base::CritScope cs(&crit_);
405 ASSERT(video_channel_ == videoChannel);
406 framerate_ = framerate;
407 bitrate_ = bitrate;
408 }
409
SuspendChange(int video_channel,bool is_suspended)410 virtual void SuspendChange(int video_channel, bool is_suspended) {
411 talk_base::CritScope cs(&crit_);
412 ASSERT(video_channel_ == video_channel);
413 suspended_ = is_suspended;
414 }
415
framerate() const416 int framerate() const {
417 talk_base::CritScope cs(&crit_);
418 return framerate_;
419 }
bitrate() const420 int bitrate() const {
421 talk_base::CritScope cs(&crit_);
422 return bitrate_;
423 }
suspended() const424 bool suspended() const {
425 talk_base::CritScope cs(&crit_);
426 return suspended_;
427 }
428
429 private:
430 mutable talk_base::CriticalSection crit_;
431 int video_channel_;
432 int framerate_;
433 int bitrate_;
434 bool suspended_;
435 };
436
437 class WebRtcLocalStreamInfo {
438 public:
WebRtcLocalStreamInfo()439 WebRtcLocalStreamInfo()
440 : width_(0), height_(0), elapsed_time_(-1), time_stamp_(-1) {}
width() const441 size_t width() const {
442 talk_base::CritScope cs(&crit_);
443 return width_;
444 }
height() const445 size_t height() const {
446 talk_base::CritScope cs(&crit_);
447 return height_;
448 }
elapsed_time() const449 int64 elapsed_time() const {
450 talk_base::CritScope cs(&crit_);
451 return elapsed_time_;
452 }
time_stamp() const453 int64 time_stamp() const {
454 talk_base::CritScope cs(&crit_);
455 return time_stamp_;
456 }
framerate()457 int framerate() {
458 talk_base::CritScope cs(&crit_);
459 return static_cast<int>(rate_tracker_.units_second());
460 }
GetLastFrameInfo(size_t * width,size_t * height,int64 * elapsed_time) const461 void GetLastFrameInfo(
462 size_t* width, size_t* height, int64* elapsed_time) const {
463 talk_base::CritScope cs(&crit_);
464 *width = width_;
465 *height = height_;
466 *elapsed_time = elapsed_time_;
467 }
468
UpdateFrame(const VideoFrame * frame)469 void UpdateFrame(const VideoFrame* frame) {
470 talk_base::CritScope cs(&crit_);
471
472 width_ = frame->GetWidth();
473 height_ = frame->GetHeight();
474 elapsed_time_ = frame->GetElapsedTime();
475 time_stamp_ = frame->GetTimeStamp();
476
477 rate_tracker_.Update(1);
478 }
479
480 private:
481 mutable talk_base::CriticalSection crit_;
482 size_t width_;
483 size_t height_;
484 int64 elapsed_time_;
485 int64 time_stamp_;
486 talk_base::RateTracker rate_tracker_;
487
488 DISALLOW_COPY_AND_ASSIGN(WebRtcLocalStreamInfo);
489 };
490
491 // WebRtcVideoChannelRecvInfo is a container class with members such as renderer
492 // and a decoder observer that is used by receive channels.
493 // It must exist as long as the receive channel is connected to renderer or a
494 // decoder observer in this class and methods in the class should only be called
495 // from the worker thread.
496 class WebRtcVideoChannelRecvInfo {
497 public:
498 typedef std::map<int, webrtc::VideoDecoder*> DecoderMap; // key: payload type
WebRtcVideoChannelRecvInfo(int channel_id)499 explicit WebRtcVideoChannelRecvInfo(int channel_id)
500 : channel_id_(channel_id),
501 render_adapter_(NULL, channel_id),
502 decoder_observer_(channel_id) {
503 }
channel_id()504 int channel_id() { return channel_id_; }
SetRenderer(VideoRenderer * renderer)505 void SetRenderer(VideoRenderer* renderer) {
506 render_adapter_.SetRenderer(renderer);
507 }
render_adapter()508 WebRtcRenderAdapter* render_adapter() { return &render_adapter_; }
decoder_observer()509 WebRtcDecoderObserver* decoder_observer() { return &decoder_observer_; }
RegisterDecoder(int pl_type,webrtc::VideoDecoder * decoder)510 void RegisterDecoder(int pl_type, webrtc::VideoDecoder* decoder) {
511 ASSERT(!IsDecoderRegistered(pl_type));
512 registered_decoders_[pl_type] = decoder;
513 }
IsDecoderRegistered(int pl_type)514 bool IsDecoderRegistered(int pl_type) {
515 return registered_decoders_.count(pl_type) != 0;
516 }
registered_decoders()517 const DecoderMap& registered_decoders() {
518 return registered_decoders_;
519 }
ClearRegisteredDecoders()520 void ClearRegisteredDecoders() {
521 registered_decoders_.clear();
522 }
523
524 private:
525 int channel_id_; // Webrtc video channel number.
526 // Renderer for this channel.
527 WebRtcRenderAdapter render_adapter_;
528 WebRtcDecoderObserver decoder_observer_;
529 DecoderMap registered_decoders_;
530 };
531
532 class WebRtcOveruseObserver : public webrtc::CpuOveruseObserver {
533 public:
WebRtcOveruseObserver(CoordinatedVideoAdapter * video_adapter)534 explicit WebRtcOveruseObserver(CoordinatedVideoAdapter* video_adapter)
535 : video_adapter_(video_adapter),
536 enabled_(false) {
537 }
538
539 // TODO(mflodman): Consider sending resolution as part of event, to let
540 // adapter know what resolution the request is based on. Helps eliminate stale
541 // data, race conditions.
OveruseDetected()542 virtual void OveruseDetected() OVERRIDE {
543 talk_base::CritScope cs(&crit_);
544 if (!enabled_) {
545 return;
546 }
547
548 video_adapter_->OnCpuResolutionRequest(CoordinatedVideoAdapter::DOWNGRADE);
549 }
550
NormalUsage()551 virtual void NormalUsage() OVERRIDE {
552 talk_base::CritScope cs(&crit_);
553 if (!enabled_) {
554 return;
555 }
556
557 video_adapter_->OnCpuResolutionRequest(CoordinatedVideoAdapter::UPGRADE);
558 }
559
Enable(bool enable)560 void Enable(bool enable) {
561 LOG(LS_INFO) << "WebRtcOveruseObserver enable: " << enable;
562 talk_base::CritScope cs(&crit_);
563 enabled_ = enable;
564 }
565
enabled() const566 bool enabled() const { return enabled_; }
567
568 private:
569 CoordinatedVideoAdapter* video_adapter_;
570 bool enabled_;
571 talk_base::CriticalSection crit_;
572 };
573
574
575 class WebRtcVideoChannelSendInfo : public sigslot::has_slots<> {
576 public:
577 typedef std::map<int, webrtc::VideoEncoder*> EncoderMap; // key: payload type
WebRtcVideoChannelSendInfo(int channel_id,int capture_id,webrtc::ViEExternalCapture * external_capture,talk_base::CpuMonitor * cpu_monitor)578 WebRtcVideoChannelSendInfo(int channel_id, int capture_id,
579 webrtc::ViEExternalCapture* external_capture,
580 talk_base::CpuMonitor* cpu_monitor)
581 : channel_id_(channel_id),
582 capture_id_(capture_id),
583 sending_(false),
584 muted_(false),
585 video_capturer_(NULL),
586 encoder_observer_(channel_id),
587 external_capture_(external_capture),
588 capturer_updated_(false),
589 interval_(0),
590 cpu_monitor_(cpu_monitor) {
591 }
592
channel_id() const593 int channel_id() const { return channel_id_; }
capture_id() const594 int capture_id() const { return capture_id_; }
set_sending(bool sending)595 void set_sending(bool sending) { sending_ = sending; }
sending() const596 bool sending() const { return sending_; }
set_muted(bool on)597 void set_muted(bool on) {
598 // TODO(asapersson): add support.
599 // video_adapter_.SetBlackOutput(on);
600 muted_ = on;
601 }
muted()602 bool muted() {return muted_; }
603
encoder_observer()604 WebRtcEncoderObserver* encoder_observer() { return &encoder_observer_; }
external_capture()605 webrtc::ViEExternalCapture* external_capture() { return external_capture_; }
video_format() const606 const VideoFormat& video_format() const {
607 return video_format_;
608 }
set_video_format(const VideoFormat & video_format)609 void set_video_format(const VideoFormat& video_format) {
610 video_format_ = video_format;
611 if (video_format_ != cricket::VideoFormat()) {
612 interval_ = video_format_.interval;
613 }
614 CoordinatedVideoAdapter* adapter = video_adapter();
615 if (adapter) {
616 adapter->OnOutputFormatRequest(video_format_);
617 }
618 }
set_interval(int64 interval)619 void set_interval(int64 interval) {
620 if (video_format() == cricket::VideoFormat()) {
621 interval_ = interval;
622 }
623 }
interval()624 int64 interval() { return interval_; }
625
CurrentAdaptReason() const626 int CurrentAdaptReason() const {
627 const CoordinatedVideoAdapter* adapter = video_adapter();
628 if (!adapter) {
629 return CoordinatedVideoAdapter::ADAPTREASON_NONE;
630 }
631 return video_adapter()->adapt_reason();
632 }
633
stream_params()634 StreamParams* stream_params() { return stream_params_.get(); }
set_stream_params(const StreamParams & sp)635 void set_stream_params(const StreamParams& sp) {
636 stream_params_.reset(new StreamParams(sp));
637 }
ClearStreamParams()638 void ClearStreamParams() { stream_params_.reset(); }
has_ssrc(uint32 local_ssrc) const639 bool has_ssrc(uint32 local_ssrc) const {
640 return !stream_params_ ? false :
641 stream_params_->has_ssrc(local_ssrc);
642 }
local_stream_info()643 WebRtcLocalStreamInfo* local_stream_info() {
644 return &local_stream_info_;
645 }
video_capturer()646 VideoCapturer* video_capturer() {
647 return video_capturer_;
648 }
set_video_capturer(VideoCapturer * video_capturer,ViEWrapper * vie_wrapper)649 void set_video_capturer(VideoCapturer* video_capturer,
650 ViEWrapper* vie_wrapper) {
651 if (video_capturer == video_capturer_) {
652 return;
653 }
654
655 CoordinatedVideoAdapter* old_video_adapter = video_adapter();
656 if (old_video_adapter) {
657 // Disconnect signals from old video adapter.
658 SignalCpuAdaptationUnable.disconnect(old_video_adapter);
659 if (cpu_monitor_) {
660 cpu_monitor_->SignalUpdate.disconnect(old_video_adapter);
661 }
662 }
663
664 capturer_updated_ = true;
665 video_capturer_ = video_capturer;
666
667 vie_wrapper->base()->RegisterCpuOveruseObserver(channel_id_, NULL);
668 if (!video_capturer) {
669 overuse_observer_.reset();
670 return;
671 }
672
673 CoordinatedVideoAdapter* adapter = video_adapter();
674 ASSERT(adapter && "Video adapter should not be null here.");
675
676 UpdateAdapterCpuOptions();
677
678 overuse_observer_.reset(new WebRtcOveruseObserver(adapter));
679 vie_wrapper->base()->RegisterCpuOveruseObserver(channel_id_,
680 overuse_observer_.get());
681 // (Dis)connect the video adapter from the cpu monitor as appropriate.
682 SetCpuOveruseDetection(
683 video_options_.cpu_overuse_detection.GetWithDefaultIfUnset(false));
684
685 SignalCpuAdaptationUnable.repeat(adapter->SignalCpuAdaptationUnable);
686 }
687
video_adapter()688 CoordinatedVideoAdapter* video_adapter() {
689 if (!video_capturer_) {
690 return NULL;
691 }
692 return video_capturer_->video_adapter();
693 }
video_adapter() const694 const CoordinatedVideoAdapter* video_adapter() const {
695 if (!video_capturer_) {
696 return NULL;
697 }
698 return video_capturer_->video_adapter();
699 }
700
ApplyCpuOptions(const VideoOptions & video_options)701 void ApplyCpuOptions(const VideoOptions& video_options) {
702 bool cpu_overuse_detection_changed =
703 video_options.cpu_overuse_detection.IsSet() &&
704 (video_options.cpu_overuse_detection.GetWithDefaultIfUnset(false) !=
705 video_options_.cpu_overuse_detection.GetWithDefaultIfUnset(false));
706 // Use video_options_.SetAll() instead of assignment so that unset value in
707 // video_options will not overwrite the previous option value.
708 video_options_.SetAll(video_options);
709 UpdateAdapterCpuOptions();
710 if (cpu_overuse_detection_changed) {
711 SetCpuOveruseDetection(
712 video_options_.cpu_overuse_detection.GetWithDefaultIfUnset(false));
713 }
714 }
715
UpdateAdapterCpuOptions()716 void UpdateAdapterCpuOptions() {
717 if (!video_capturer_) {
718 return;
719 }
720
721 bool cpu_smoothing, adapt_third;
722 float low, med, high;
723 bool cpu_adapt =
724 video_options_.adapt_input_to_cpu_usage.GetWithDefaultIfUnset(false);
725 bool cpu_overuse_detection =
726 video_options_.cpu_overuse_detection.GetWithDefaultIfUnset(false);
727
728 // TODO(thorcarpenter): Have VideoAdapter be responsible for setting
729 // all these video options.
730 CoordinatedVideoAdapter* video_adapter = video_capturer_->video_adapter();
731 if (video_options_.adapt_input_to_cpu_usage.IsSet() ||
732 video_options_.cpu_overuse_detection.IsSet()) {
733 video_adapter->set_cpu_adaptation(cpu_adapt || cpu_overuse_detection);
734 }
735 if (video_options_.adapt_cpu_with_smoothing.Get(&cpu_smoothing)) {
736 video_adapter->set_cpu_smoothing(cpu_smoothing);
737 }
738 if (video_options_.process_adaptation_threshhold.Get(&med)) {
739 video_adapter->set_process_threshold(med);
740 }
741 if (video_options_.system_low_adaptation_threshhold.Get(&low)) {
742 video_adapter->set_low_system_threshold(low);
743 }
744 if (video_options_.system_high_adaptation_threshhold.Get(&high)) {
745 video_adapter->set_high_system_threshold(high);
746 }
747 if (video_options_.video_adapt_third.Get(&adapt_third)) {
748 video_adapter->set_scale_third(adapt_third);
749 }
750 }
751
SetCpuOveruseDetection(bool enable)752 void SetCpuOveruseDetection(bool enable) {
753 if (overuse_observer_) {
754 overuse_observer_->Enable(enable);
755 }
756
757 // The video adapter is signaled by overuse detection if enabled; otherwise
758 // it will be signaled by cpu monitor.
759 CoordinatedVideoAdapter* adapter = video_adapter();
760 if (adapter) {
761 if (cpu_monitor_) {
762 if (enable) {
763 cpu_monitor_->SignalUpdate.disconnect(adapter);
764 } else {
765 cpu_monitor_->SignalUpdate.connect(
766 adapter, &CoordinatedVideoAdapter::OnCpuLoadUpdated);
767 }
768 }
769 }
770 }
771
ProcessFrame(const VideoFrame & original_frame,bool mute,VideoFrame ** processed_frame)772 void ProcessFrame(const VideoFrame& original_frame, bool mute,
773 VideoFrame** processed_frame) {
774 if (!mute) {
775 *processed_frame = original_frame.Copy();
776 } else {
777 WebRtcVideoFrame* black_frame = new WebRtcVideoFrame();
778 black_frame->InitToBlack(static_cast<int>(original_frame.GetWidth()),
779 static_cast<int>(original_frame.GetHeight()),
780 1, 1,
781 original_frame.GetElapsedTime(),
782 original_frame.GetTimeStamp());
783 *processed_frame = black_frame;
784 }
785 local_stream_info_.UpdateFrame(*processed_frame);
786 }
RegisterEncoder(int pl_type,webrtc::VideoEncoder * encoder)787 void RegisterEncoder(int pl_type, webrtc::VideoEncoder* encoder) {
788 ASSERT(!IsEncoderRegistered(pl_type));
789 registered_encoders_[pl_type] = encoder;
790 }
IsEncoderRegistered(int pl_type)791 bool IsEncoderRegistered(int pl_type) {
792 return registered_encoders_.count(pl_type) != 0;
793 }
registered_encoders()794 const EncoderMap& registered_encoders() {
795 return registered_encoders_;
796 }
ClearRegisteredEncoders()797 void ClearRegisteredEncoders() {
798 registered_encoders_.clear();
799 }
800
801 sigslot::repeater0<> SignalCpuAdaptationUnable;
802
803 private:
804 int channel_id_;
805 int capture_id_;
806 bool sending_;
807 bool muted_;
808 VideoCapturer* video_capturer_;
809 WebRtcEncoderObserver encoder_observer_;
810 webrtc::ViEExternalCapture* external_capture_;
811 EncoderMap registered_encoders_;
812
813 VideoFormat video_format_;
814
815 talk_base::scoped_ptr<StreamParams> stream_params_;
816
817 WebRtcLocalStreamInfo local_stream_info_;
818
819 bool capturer_updated_;
820
821 int64 interval_;
822
823 talk_base::CpuMonitor* cpu_monitor_;
824 talk_base::scoped_ptr<WebRtcOveruseObserver> overuse_observer_;
825
826 VideoOptions video_options_;
827 };
828
829 const WebRtcVideoEngine::VideoCodecPref
830 WebRtcVideoEngine::kVideoCodecPrefs[] = {
831 {kVp8PayloadName, 100, -1, 0},
832 {kRedPayloadName, 116, -1, 1},
833 {kFecPayloadName, 117, -1, 2},
834 {kRtxCodecName, 96, 100, 3},
835 };
836
837 // The formats are sorted by the descending order of width. We use the order to
838 // find the next format for CPU and bandwidth adaptation.
839 const VideoFormatPod WebRtcVideoEngine::kVideoFormats[] = {
840 {1280, 800, FPS_TO_INTERVAL(30), FOURCC_ANY},
841 {1280, 720, FPS_TO_INTERVAL(30), FOURCC_ANY},
842 {960, 600, FPS_TO_INTERVAL(30), FOURCC_ANY},
843 {960, 540, FPS_TO_INTERVAL(30), FOURCC_ANY},
844 {640, 400, FPS_TO_INTERVAL(30), FOURCC_ANY},
845 {640, 360, FPS_TO_INTERVAL(30), FOURCC_ANY},
846 {640, 480, FPS_TO_INTERVAL(30), FOURCC_ANY},
847 {480, 300, FPS_TO_INTERVAL(30), FOURCC_ANY},
848 {480, 270, FPS_TO_INTERVAL(30), FOURCC_ANY},
849 {480, 360, FPS_TO_INTERVAL(30), FOURCC_ANY},
850 {320, 200, FPS_TO_INTERVAL(30), FOURCC_ANY},
851 {320, 180, FPS_TO_INTERVAL(30), FOURCC_ANY},
852 {320, 240, FPS_TO_INTERVAL(30), FOURCC_ANY},
853 {240, 150, FPS_TO_INTERVAL(30), FOURCC_ANY},
854 {240, 135, FPS_TO_INTERVAL(30), FOURCC_ANY},
855 {240, 180, FPS_TO_INTERVAL(30), FOURCC_ANY},
856 {160, 100, FPS_TO_INTERVAL(30), FOURCC_ANY},
857 {160, 90, FPS_TO_INTERVAL(30), FOURCC_ANY},
858 {160, 120, FPS_TO_INTERVAL(30), FOURCC_ANY},
859 };
860
861 const VideoFormatPod WebRtcVideoEngine::kDefaultVideoFormat =
862 {640, 400, FPS_TO_INTERVAL(30), FOURCC_ANY};
863
UpdateVideoCodec(const cricket::VideoFormat & video_format,webrtc::VideoCodec * target_codec)864 static void UpdateVideoCodec(const cricket::VideoFormat& video_format,
865 webrtc::VideoCodec* target_codec) {
866 if ((target_codec == NULL) || (video_format == cricket::VideoFormat())) {
867 return;
868 }
869 target_codec->width = video_format.width;
870 target_codec->height = video_format.height;
871 target_codec->maxFramerate = cricket::VideoFormat::IntervalToFps(
872 video_format.interval);
873 }
874
GetCpuOveruseOptions(const VideoOptions & options,webrtc::CpuOveruseOptions * overuse_options)875 static bool GetCpuOveruseOptions(const VideoOptions& options,
876 webrtc::CpuOveruseOptions* overuse_options) {
877 int underuse_threshold = 0;
878 int overuse_threshold = 0;
879 if (!options.cpu_underuse_threshold.Get(&underuse_threshold) ||
880 !options.cpu_overuse_threshold.Get(&overuse_threshold)) {
881 return false;
882 }
883 if (underuse_threshold <= 0 || overuse_threshold <= 0) {
884 return false;
885 }
886 // Valid thresholds.
887 bool encode_usage =
888 options.cpu_overuse_encode_usage.GetWithDefaultIfUnset(false);
889 overuse_options->enable_capture_jitter_method = !encode_usage;
890 overuse_options->enable_encode_usage_method = encode_usage;
891 if (encode_usage) {
892 // Use method based on encode usage.
893 overuse_options->low_encode_usage_threshold_percent = underuse_threshold;
894 overuse_options->high_encode_usage_threshold_percent = overuse_threshold;
895 #ifdef USE_WEBRTC_DEV_BRANCH
896 // Set optional thresholds, if configured.
897 int underuse_rsd_threshold = 0;
898 if (options.cpu_underuse_encode_rsd_threshold.Get(
899 &underuse_rsd_threshold)) {
900 overuse_options->low_encode_time_rsd_threshold = underuse_rsd_threshold;
901 }
902 int overuse_rsd_threshold = 0;
903 if (options.cpu_overuse_encode_rsd_threshold.Get(&overuse_rsd_threshold)) {
904 overuse_options->high_encode_time_rsd_threshold = overuse_rsd_threshold;
905 }
906 #endif
907 } else {
908 // Use default method based on capture jitter.
909 overuse_options->low_capture_jitter_threshold_ms =
910 static_cast<float>(underuse_threshold);
911 overuse_options->high_capture_jitter_threshold_ms =
912 static_cast<float>(overuse_threshold);
913 }
914 return true;
915 }
916
WebRtcVideoEngine()917 WebRtcVideoEngine::WebRtcVideoEngine() {
918 Construct(new ViEWrapper(), new ViETraceWrapper(), NULL,
919 new talk_base::CpuMonitor(NULL));
920 }
921
WebRtcVideoEngine(WebRtcVoiceEngine * voice_engine,ViEWrapper * vie_wrapper,talk_base::CpuMonitor * cpu_monitor)922 WebRtcVideoEngine::WebRtcVideoEngine(WebRtcVoiceEngine* voice_engine,
923 ViEWrapper* vie_wrapper,
924 talk_base::CpuMonitor* cpu_monitor) {
925 Construct(vie_wrapper, new ViETraceWrapper(), voice_engine, cpu_monitor);
926 }
927
WebRtcVideoEngine(WebRtcVoiceEngine * voice_engine,ViEWrapper * vie_wrapper,ViETraceWrapper * tracing,talk_base::CpuMonitor * cpu_monitor)928 WebRtcVideoEngine::WebRtcVideoEngine(WebRtcVoiceEngine* voice_engine,
929 ViEWrapper* vie_wrapper,
930 ViETraceWrapper* tracing,
931 talk_base::CpuMonitor* cpu_monitor) {
932 Construct(vie_wrapper, tracing, voice_engine, cpu_monitor);
933 }
934
Construct(ViEWrapper * vie_wrapper,ViETraceWrapper * tracing,WebRtcVoiceEngine * voice_engine,talk_base::CpuMonitor * cpu_monitor)935 void WebRtcVideoEngine::Construct(ViEWrapper* vie_wrapper,
936 ViETraceWrapper* tracing,
937 WebRtcVoiceEngine* voice_engine,
938 talk_base::CpuMonitor* cpu_monitor) {
939 LOG(LS_INFO) << "WebRtcVideoEngine::WebRtcVideoEngine";
940 worker_thread_ = NULL;
941 vie_wrapper_.reset(vie_wrapper);
942 vie_wrapper_base_initialized_ = false;
943 tracing_.reset(tracing);
944 voice_engine_ = voice_engine;
945 initialized_ = false;
946 SetTraceFilter(SeverityToFilter(kDefaultLogSeverity));
947 render_module_.reset(new WebRtcPassthroughRender());
948 local_renderer_w_ = local_renderer_h_ = 0;
949 local_renderer_ = NULL;
950 capture_started_ = false;
951 decoder_factory_ = NULL;
952 encoder_factory_ = NULL;
953 cpu_monitor_.reset(cpu_monitor);
954
955 SetTraceOptions("");
956 if (tracing_->SetTraceCallback(this) != 0) {
957 LOG_RTCERR1(SetTraceCallback, this);
958 }
959
960 // Set default quality levels for our supported codecs. We override them here
961 // if we know your cpu performance is low, and they can be updated explicitly
962 // by calling SetDefaultCodec. For example by a flute preference setting, or
963 // by the server with a jec in response to our reported system info.
964 VideoCodec max_codec(kVideoCodecPrefs[0].payload_type,
965 kVideoCodecPrefs[0].name,
966 kDefaultVideoFormat.width,
967 kDefaultVideoFormat.height,
968 VideoFormat::IntervalToFps(kDefaultVideoFormat.interval),
969 0);
970 if (!SetDefaultCodec(max_codec)) {
971 LOG(LS_ERROR) << "Failed to initialize list of supported codec types";
972 }
973
974
975 // Load our RTP Header extensions.
976 rtp_header_extensions_.push_back(
977 RtpHeaderExtension(kRtpTimestampOffsetHeaderExtension,
978 kRtpTimestampOffsetHeaderExtensionDefaultId));
979 rtp_header_extensions_.push_back(
980 RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension,
981 kRtpAbsoluteSenderTimeHeaderExtensionDefaultId));
982 }
983
~WebRtcVideoEngine()984 WebRtcVideoEngine::~WebRtcVideoEngine() {
985 LOG(LS_INFO) << "WebRtcVideoEngine::~WebRtcVideoEngine";
986 if (initialized_) {
987 Terminate();
988 }
989 if (encoder_factory_) {
990 encoder_factory_->RemoveObserver(this);
991 }
992 tracing_->SetTraceCallback(NULL);
993 // Test to see if the media processor was deregistered properly.
994 ASSERT(SignalMediaFrame.is_empty());
995 }
996
Init(talk_base::Thread * worker_thread)997 bool WebRtcVideoEngine::Init(talk_base::Thread* worker_thread) {
998 LOG(LS_INFO) << "WebRtcVideoEngine::Init";
999 worker_thread_ = worker_thread;
1000 ASSERT(worker_thread_ != NULL);
1001
1002 cpu_monitor_->set_thread(worker_thread_);
1003 if (!cpu_monitor_->Start(kCpuMonitorPeriodMs)) {
1004 LOG(LS_ERROR) << "Failed to start CPU monitor.";
1005 cpu_monitor_.reset();
1006 }
1007
1008 bool result = InitVideoEngine();
1009 if (result) {
1010 LOG(LS_INFO) << "VideoEngine Init done";
1011 } else {
1012 LOG(LS_ERROR) << "VideoEngine Init failed, releasing";
1013 Terminate();
1014 }
1015 return result;
1016 }
1017
InitVideoEngine()1018 bool WebRtcVideoEngine::InitVideoEngine() {
1019 LOG(LS_INFO) << "WebRtcVideoEngine::InitVideoEngine";
1020
1021 // Init WebRTC VideoEngine.
1022 if (!vie_wrapper_base_initialized_) {
1023 if (vie_wrapper_->base()->Init() != 0) {
1024 LOG_RTCERR0(Init);
1025 return false;
1026 }
1027 vie_wrapper_base_initialized_ = true;
1028 }
1029
1030 // Log the VoiceEngine version info.
1031 char buffer[1024] = "";
1032 if (vie_wrapper_->base()->GetVersion(buffer) != 0) {
1033 LOG_RTCERR0(GetVersion);
1034 return false;
1035 }
1036
1037 LOG(LS_INFO) << "WebRtc VideoEngine Version:";
1038 LogMultiline(talk_base::LS_INFO, buffer);
1039
1040 // Hook up to VoiceEngine for sync purposes, if supplied.
1041 if (!voice_engine_) {
1042 LOG(LS_WARNING) << "NULL voice engine";
1043 } else if ((vie_wrapper_->base()->SetVoiceEngine(
1044 voice_engine_->voe()->engine())) != 0) {
1045 LOG_RTCERR0(SetVoiceEngine);
1046 return false;
1047 }
1048
1049 // Register our custom render module.
1050 if (vie_wrapper_->render()->RegisterVideoRenderModule(
1051 *render_module_.get()) != 0) {
1052 LOG_RTCERR0(RegisterVideoRenderModule);
1053 return false;
1054 }
1055
1056 initialized_ = true;
1057 return true;
1058 }
1059
Terminate()1060 void WebRtcVideoEngine::Terminate() {
1061 LOG(LS_INFO) << "WebRtcVideoEngine::Terminate";
1062 initialized_ = false;
1063
1064 if (vie_wrapper_->render()->DeRegisterVideoRenderModule(
1065 *render_module_.get()) != 0) {
1066 LOG_RTCERR0(DeRegisterVideoRenderModule);
1067 }
1068
1069 if (vie_wrapper_->base()->SetVoiceEngine(NULL) != 0) {
1070 LOG_RTCERR0(SetVoiceEngine);
1071 }
1072
1073 cpu_monitor_->Stop();
1074 }
1075
GetCapabilities()1076 int WebRtcVideoEngine::GetCapabilities() {
1077 return VIDEO_RECV | VIDEO_SEND;
1078 }
1079
SetOptions(const VideoOptions & options)1080 bool WebRtcVideoEngine::SetOptions(const VideoOptions &options) {
1081 return true;
1082 }
1083
SetDefaultEncoderConfig(const VideoEncoderConfig & config)1084 bool WebRtcVideoEngine::SetDefaultEncoderConfig(
1085 const VideoEncoderConfig& config) {
1086 return SetDefaultCodec(config.max_codec);
1087 }
1088
GetDefaultEncoderConfig() const1089 VideoEncoderConfig WebRtcVideoEngine::GetDefaultEncoderConfig() const {
1090 ASSERT(!video_codecs_.empty());
1091 VideoCodec max_codec(kVideoCodecPrefs[0].payload_type,
1092 kVideoCodecPrefs[0].name,
1093 video_codecs_[0].width,
1094 video_codecs_[0].height,
1095 video_codecs_[0].framerate,
1096 0);
1097 return VideoEncoderConfig(max_codec);
1098 }
1099
1100 // SetDefaultCodec may be called while the capturer is running. For example, a
1101 // test call is started in a page with QVGA default codec, and then a real call
1102 // is started in another page with VGA default codec. This is the corner case
1103 // and happens only when a session is started. We ignore this case currently.
SetDefaultCodec(const VideoCodec & codec)1104 bool WebRtcVideoEngine::SetDefaultCodec(const VideoCodec& codec) {
1105 if (!RebuildCodecList(codec)) {
1106 LOG(LS_WARNING) << "Failed to RebuildCodecList";
1107 return false;
1108 }
1109
1110 ASSERT(!video_codecs_.empty());
1111 default_codec_format_ = VideoFormat(
1112 video_codecs_[0].width,
1113 video_codecs_[0].height,
1114 VideoFormat::FpsToInterval(video_codecs_[0].framerate),
1115 FOURCC_ANY);
1116 return true;
1117 }
1118
CreateChannel(VoiceMediaChannel * voice_channel)1119 WebRtcVideoMediaChannel* WebRtcVideoEngine::CreateChannel(
1120 VoiceMediaChannel* voice_channel) {
1121 WebRtcVideoMediaChannel* channel =
1122 new WebRtcVideoMediaChannel(this, voice_channel);
1123 if (!channel->Init()) {
1124 delete channel;
1125 channel = NULL;
1126 }
1127 return channel;
1128 }
1129
SetLocalRenderer(VideoRenderer * renderer)1130 bool WebRtcVideoEngine::SetLocalRenderer(VideoRenderer* renderer) {
1131 local_renderer_w_ = local_renderer_h_ = 0;
1132 local_renderer_ = renderer;
1133 return true;
1134 }
1135
codecs() const1136 const std::vector<VideoCodec>& WebRtcVideoEngine::codecs() const {
1137 return video_codecs_;
1138 }
1139
1140 const std::vector<RtpHeaderExtension>&
rtp_header_extensions() const1141 WebRtcVideoEngine::rtp_header_extensions() const {
1142 return rtp_header_extensions_;
1143 }
1144
SetLogging(int min_sev,const char * filter)1145 void WebRtcVideoEngine::SetLogging(int min_sev, const char* filter) {
1146 // if min_sev == -1, we keep the current log level.
1147 if (min_sev >= 0) {
1148 SetTraceFilter(SeverityToFilter(min_sev));
1149 }
1150 SetTraceOptions(filter);
1151 }
1152
GetLastEngineError()1153 int WebRtcVideoEngine::GetLastEngineError() {
1154 return vie_wrapper_->error();
1155 }
1156
1157 // Checks to see whether we comprehend and could receive a particular codec
FindCodec(const VideoCodec & in)1158 bool WebRtcVideoEngine::FindCodec(const VideoCodec& in) {
1159 for (int i = 0; i < ARRAY_SIZE(kVideoFormats); ++i) {
1160 const VideoFormat fmt(kVideoFormats[i]);
1161 if ((in.width == 0 && in.height == 0) ||
1162 (fmt.width == in.width && fmt.height == in.height)) {
1163 if (encoder_factory_) {
1164 const std::vector<WebRtcVideoEncoderFactory::VideoCodec>& codecs =
1165 encoder_factory_->codecs();
1166 for (size_t j = 0; j < codecs.size(); ++j) {
1167 VideoCodec codec(GetExternalVideoPayloadType(static_cast<int>(j)),
1168 codecs[j].name, 0, 0, 0, 0);
1169 if (codec.Matches(in))
1170 return true;
1171 }
1172 }
1173 for (size_t j = 0; j < ARRAY_SIZE(kVideoCodecPrefs); ++j) {
1174 VideoCodec codec(kVideoCodecPrefs[j].payload_type,
1175 kVideoCodecPrefs[j].name, 0, 0, 0, 0);
1176 if (codec.Matches(in)) {
1177 return true;
1178 }
1179 }
1180 }
1181 }
1182 return false;
1183 }
1184
1185 // Given the requested codec, returns true if we can send that codec type and
1186 // updates out with the best quality we could send for that codec. If current is
1187 // not empty, we constrain out so that its aspect ratio matches current's.
CanSendCodec(const VideoCodec & requested,const VideoCodec & current,VideoCodec * out)1188 bool WebRtcVideoEngine::CanSendCodec(const VideoCodec& requested,
1189 const VideoCodec& current,
1190 VideoCodec* out) {
1191 if (!out) {
1192 return false;
1193 }
1194
1195 std::vector<VideoCodec>::const_iterator local_max;
1196 for (local_max = video_codecs_.begin();
1197 local_max < video_codecs_.end();
1198 ++local_max) {
1199 // First match codecs by payload type
1200 if (!requested.Matches(*local_max)) {
1201 continue;
1202 }
1203
1204 out->id = requested.id;
1205 out->name = requested.name;
1206 out->preference = requested.preference;
1207 out->params = requested.params;
1208 out->framerate = talk_base::_min(requested.framerate, local_max->framerate);
1209 out->width = 0;
1210 out->height = 0;
1211 out->params = requested.params;
1212 out->feedback_params = requested.feedback_params;
1213
1214 if (0 == requested.width && 0 == requested.height) {
1215 // Special case with resolution 0. The channel should not send frames.
1216 return true;
1217 } else if (0 == requested.width || 0 == requested.height) {
1218 // 0xn and nx0 are invalid resolutions.
1219 return false;
1220 }
1221
1222 // Pick the best quality that is within their and our bounds and has the
1223 // correct aspect ratio.
1224 for (int j = 0; j < ARRAY_SIZE(kVideoFormats); ++j) {
1225 const VideoFormat format(kVideoFormats[j]);
1226
1227 // Skip any format that is larger than the local or remote maximums, or
1228 // smaller than the current best match
1229 if (format.width > requested.width || format.height > requested.height ||
1230 format.width > local_max->width ||
1231 (format.width < out->width && format.height < out->height)) {
1232 continue;
1233 }
1234
1235 bool better = false;
1236
1237 // Check any further constraints on this prospective format
1238 if (!out->width || !out->height) {
1239 // If we don't have any matches yet, this is the best so far.
1240 better = true;
1241 } else if (current.width && current.height) {
1242 // current is set so format must match its ratio exactly.
1243 better =
1244 (format.width * current.height == format.height * current.width);
1245 } else {
1246 // Prefer closer aspect ratios i.e
1247 // format.aspect - requested.aspect < out.aspect - requested.aspect
1248 better = abs(format.width * requested.height * out->height -
1249 requested.width * format.height * out->height) <
1250 abs(out->width * format.height * requested.height -
1251 requested.width * format.height * out->height);
1252 }
1253
1254 if (better) {
1255 out->width = format.width;
1256 out->height = format.height;
1257 }
1258 }
1259 if (out->width > 0) {
1260 return true;
1261 }
1262 }
1263 return false;
1264 }
1265
ConvertToCricketVideoCodec(const webrtc::VideoCodec & in_codec,VideoCodec * out_codec)1266 static void ConvertToCricketVideoCodec(
1267 const webrtc::VideoCodec& in_codec, VideoCodec* out_codec) {
1268 out_codec->id = in_codec.plType;
1269 out_codec->name = in_codec.plName;
1270 out_codec->width = in_codec.width;
1271 out_codec->height = in_codec.height;
1272 out_codec->framerate = in_codec.maxFramerate;
1273 if (BitrateIsSet(in_codec.minBitrate)) {
1274 out_codec->SetParam(kCodecParamMinBitrate, in_codec.minBitrate);
1275 }
1276 if (BitrateIsSet(in_codec.maxBitrate)) {
1277 out_codec->SetParam(kCodecParamMaxBitrate, in_codec.maxBitrate);
1278 }
1279 if (BitrateIsSet(in_codec.startBitrate)) {
1280 out_codec->SetParam(kCodecParamStartBitrate, in_codec.startBitrate);
1281 }
1282 if (in_codec.qpMax) {
1283 out_codec->SetParam(kCodecParamMaxQuantization, in_codec.qpMax);
1284 }
1285 }
1286
ConvertFromCricketVideoCodec(const VideoCodec & in_codec,webrtc::VideoCodec * out_codec)1287 bool WebRtcVideoEngine::ConvertFromCricketVideoCodec(
1288 const VideoCodec& in_codec, webrtc::VideoCodec* out_codec) {
1289 bool found = false;
1290 int ncodecs = vie_wrapper_->codec()->NumberOfCodecs();
1291 for (int i = 0; i < ncodecs; ++i) {
1292 if (vie_wrapper_->codec()->GetCodec(i, *out_codec) == 0 &&
1293 _stricmp(in_codec.name.c_str(), out_codec->plName) == 0) {
1294 found = true;
1295 break;
1296 }
1297 }
1298
1299 // If not found, check if this is supported by external encoder factory.
1300 if (!found && encoder_factory_) {
1301 const std::vector<WebRtcVideoEncoderFactory::VideoCodec>& codecs =
1302 encoder_factory_->codecs();
1303 for (size_t i = 0; i < codecs.size(); ++i) {
1304 if (_stricmp(in_codec.name.c_str(), codecs[i].name.c_str()) == 0) {
1305 out_codec->codecType = codecs[i].type;
1306 out_codec->plType = GetExternalVideoPayloadType(static_cast<int>(i));
1307 talk_base::strcpyn(out_codec->plName, sizeof(out_codec->plName),
1308 codecs[i].name.c_str(), codecs[i].name.length());
1309 found = true;
1310 break;
1311 }
1312 }
1313 }
1314
1315 // Is this an RTX codec? Handled separately here since webrtc doesn't handle
1316 // them as webrtc::VideoCodec internally.
1317 if (!found && _stricmp(in_codec.name.c_str(), kRtxCodecName) == 0) {
1318 talk_base::strcpyn(out_codec->plName, sizeof(out_codec->plName),
1319 in_codec.name.c_str(), in_codec.name.length());
1320 out_codec->plType = in_codec.id;
1321 found = true;
1322 }
1323
1324 if (!found) {
1325 LOG(LS_ERROR) << "invalid codec type";
1326 return false;
1327 }
1328
1329 if (in_codec.id != 0)
1330 out_codec->plType = in_codec.id;
1331
1332 if (in_codec.width != 0)
1333 out_codec->width = in_codec.width;
1334
1335 if (in_codec.height != 0)
1336 out_codec->height = in_codec.height;
1337
1338 if (in_codec.framerate != 0)
1339 out_codec->maxFramerate = in_codec.framerate;
1340
1341 // Convert bitrate parameters.
1342 int max_bitrate = -1;
1343 int min_bitrate = -1;
1344 int start_bitrate = -1;
1345
1346 in_codec.GetParam(kCodecParamMinBitrate, &min_bitrate);
1347 in_codec.GetParam(kCodecParamMaxBitrate, &max_bitrate);
1348 in_codec.GetParam(kCodecParamStartBitrate, &start_bitrate);
1349
1350
1351 out_codec->minBitrate = min_bitrate;
1352 out_codec->startBitrate = start_bitrate;
1353 out_codec->maxBitrate = max_bitrate;
1354
1355 // Convert general codec parameters.
1356 int max_quantization = 0;
1357 if (in_codec.GetParam(kCodecParamMaxQuantization, &max_quantization)) {
1358 if (max_quantization < 0) {
1359 return false;
1360 }
1361 out_codec->qpMax = max_quantization;
1362 }
1363 return true;
1364 }
1365
RegisterChannel(WebRtcVideoMediaChannel * channel)1366 void WebRtcVideoEngine::RegisterChannel(WebRtcVideoMediaChannel *channel) {
1367 talk_base::CritScope cs(&channels_crit_);
1368 channels_.push_back(channel);
1369 }
1370
UnregisterChannel(WebRtcVideoMediaChannel * channel)1371 void WebRtcVideoEngine::UnregisterChannel(WebRtcVideoMediaChannel *channel) {
1372 talk_base::CritScope cs(&channels_crit_);
1373 channels_.erase(std::remove(channels_.begin(), channels_.end(), channel),
1374 channels_.end());
1375 }
1376
SetVoiceEngine(WebRtcVoiceEngine * voice_engine)1377 bool WebRtcVideoEngine::SetVoiceEngine(WebRtcVoiceEngine* voice_engine) {
1378 if (initialized_) {
1379 LOG(LS_WARNING) << "SetVoiceEngine can not be called after Init";
1380 return false;
1381 }
1382 voice_engine_ = voice_engine;
1383 return true;
1384 }
1385
EnableTimedRender()1386 bool WebRtcVideoEngine::EnableTimedRender() {
1387 if (initialized_) {
1388 LOG(LS_WARNING) << "EnableTimedRender can not be called after Init";
1389 return false;
1390 }
1391 render_module_.reset(webrtc::VideoRender::CreateVideoRender(0, NULL,
1392 false, webrtc::kRenderExternal));
1393 return true;
1394 }
1395
SetTraceFilter(int filter)1396 void WebRtcVideoEngine::SetTraceFilter(int filter) {
1397 tracing_->SetTraceFilter(filter);
1398 }
1399
1400 // See https://sites.google.com/a/google.com/wavelet/
1401 // Home/Magic-Flute--RTC-Engine-/Magic-Flute-Command-Line-Parameters
1402 // for all supported command line setttings.
SetTraceOptions(const std::string & options)1403 void WebRtcVideoEngine::SetTraceOptions(const std::string& options) {
1404 // Set WebRTC trace file.
1405 std::vector<std::string> opts;
1406 talk_base::tokenize(options, ' ', '"', '"', &opts);
1407 std::vector<std::string>::iterator tracefile =
1408 std::find(opts.begin(), opts.end(), "tracefile");
1409 if (tracefile != opts.end() && ++tracefile != opts.end()) {
1410 // Write WebRTC debug output (at same loglevel) to file
1411 if (tracing_->SetTraceFile(tracefile->c_str()) == -1) {
1412 LOG_RTCERR1(SetTraceFile, *tracefile);
1413 }
1414 }
1415 }
1416
AddDefaultFeedbackParams(VideoCodec * codec)1417 static void AddDefaultFeedbackParams(VideoCodec* codec) {
1418 const FeedbackParam kFir(kRtcpFbParamCcm, kRtcpFbCcmParamFir);
1419 codec->AddFeedbackParam(kFir);
1420 const FeedbackParam kNack(kRtcpFbParamNack, kParamValueEmpty);
1421 codec->AddFeedbackParam(kNack);
1422 const FeedbackParam kPli(kRtcpFbParamNack, kRtcpFbNackParamPli);
1423 codec->AddFeedbackParam(kPli);
1424 const FeedbackParam kRemb(kRtcpFbParamRemb, kParamValueEmpty);
1425 codec->AddFeedbackParam(kRemb);
1426 }
1427
1428 // Rebuilds the codec list to be only those that are less intensive
1429 // than the specified codec. Prefers internal codec over external with
1430 // higher preference field.
RebuildCodecList(const VideoCodec & in_codec)1431 bool WebRtcVideoEngine::RebuildCodecList(const VideoCodec& in_codec) {
1432 if (!FindCodec(in_codec))
1433 return false;
1434
1435 video_codecs_.clear();
1436
1437 bool found = false;
1438 std::set<std::string> internal_codec_names;
1439 for (size_t i = 0; i < ARRAY_SIZE(kVideoCodecPrefs); ++i) {
1440 const VideoCodecPref& pref(kVideoCodecPrefs[i]);
1441 if (!found)
1442 found = (in_codec.name == pref.name);
1443 if (found) {
1444 VideoCodec codec(pref.payload_type, pref.name,
1445 in_codec.width, in_codec.height, in_codec.framerate,
1446 static_cast<int>(ARRAY_SIZE(kVideoCodecPrefs) - i));
1447 if (_stricmp(kVp8PayloadName, codec.name.c_str()) == 0) {
1448 AddDefaultFeedbackParams(&codec);
1449 }
1450 if (pref.associated_payload_type != -1) {
1451 codec.SetParam(kCodecParamAssociatedPayloadType,
1452 pref.associated_payload_type);
1453 }
1454 video_codecs_.push_back(codec);
1455 internal_codec_names.insert(codec.name);
1456 }
1457 }
1458 if (encoder_factory_) {
1459 const std::vector<WebRtcVideoEncoderFactory::VideoCodec>& codecs =
1460 encoder_factory_->codecs();
1461 for (size_t i = 0; i < codecs.size(); ++i) {
1462 bool is_internal_codec = internal_codec_names.find(codecs[i].name) !=
1463 internal_codec_names.end();
1464 if (!is_internal_codec) {
1465 if (!found)
1466 found = (in_codec.name == codecs[i].name);
1467 VideoCodec codec(
1468 GetExternalVideoPayloadType(static_cast<int>(i)),
1469 codecs[i].name,
1470 codecs[i].max_width,
1471 codecs[i].max_height,
1472 codecs[i].max_fps,
1473 // Use negative preference on external codec to ensure the internal
1474 // codec is preferred.
1475 static_cast<int>(0 - i));
1476 AddDefaultFeedbackParams(&codec);
1477 video_codecs_.push_back(codec);
1478 }
1479 }
1480 }
1481 ASSERT(found);
1482 return true;
1483 }
1484
1485 // Ignore spammy trace messages, mostly from the stats API when we haven't
1486 // gotten RTCP info yet from the remote side.
ShouldIgnoreTrace(const std::string & trace)1487 bool WebRtcVideoEngine::ShouldIgnoreTrace(const std::string& trace) {
1488 static const char* const kTracesToIgnore[] = {
1489 NULL
1490 };
1491 for (const char* const* p = kTracesToIgnore; *p; ++p) {
1492 if (trace.find(*p) == 0) {
1493 return true;
1494 }
1495 }
1496 return false;
1497 }
1498
GetNumOfChannels()1499 int WebRtcVideoEngine::GetNumOfChannels() {
1500 talk_base::CritScope cs(&channels_crit_);
1501 return static_cast<int>(channels_.size());
1502 }
1503
Print(webrtc::TraceLevel level,const char * trace,int length)1504 void WebRtcVideoEngine::Print(webrtc::TraceLevel level, const char* trace,
1505 int length) {
1506 talk_base::LoggingSeverity sev = talk_base::LS_VERBOSE;
1507 if (level == webrtc::kTraceError || level == webrtc::kTraceCritical)
1508 sev = talk_base::LS_ERROR;
1509 else if (level == webrtc::kTraceWarning)
1510 sev = talk_base::LS_WARNING;
1511 else if (level == webrtc::kTraceStateInfo || level == webrtc::kTraceInfo)
1512 sev = talk_base::LS_INFO;
1513 else if (level == webrtc::kTraceTerseInfo)
1514 sev = talk_base::LS_INFO;
1515
1516 // Skip past boilerplate prefix text
1517 if (length < 72) {
1518 std::string msg(trace, length);
1519 LOG(LS_ERROR) << "Malformed webrtc log message: ";
1520 LOG_V(sev) << msg;
1521 } else {
1522 std::string msg(trace + 71, length - 72);
1523 if (!ShouldIgnoreTrace(msg) &&
1524 (!voice_engine_ || !voice_engine_->ShouldIgnoreTrace(msg))) {
1525 LOG_V(sev) << "webrtc: " << msg;
1526 }
1527 }
1528 }
1529
CreateExternalDecoder(webrtc::VideoCodecType type)1530 webrtc::VideoDecoder* WebRtcVideoEngine::CreateExternalDecoder(
1531 webrtc::VideoCodecType type) {
1532 if (decoder_factory_ == NULL) {
1533 return NULL;
1534 }
1535 return decoder_factory_->CreateVideoDecoder(type);
1536 }
1537
DestroyExternalDecoder(webrtc::VideoDecoder * decoder)1538 void WebRtcVideoEngine::DestroyExternalDecoder(webrtc::VideoDecoder* decoder) {
1539 ASSERT(decoder_factory_ != NULL);
1540 if (decoder_factory_ == NULL)
1541 return;
1542 decoder_factory_->DestroyVideoDecoder(decoder);
1543 }
1544
CreateExternalEncoder(webrtc::VideoCodecType type)1545 webrtc::VideoEncoder* WebRtcVideoEngine::CreateExternalEncoder(
1546 webrtc::VideoCodecType type) {
1547 if (encoder_factory_ == NULL) {
1548 return NULL;
1549 }
1550 return encoder_factory_->CreateVideoEncoder(type);
1551 }
1552
DestroyExternalEncoder(webrtc::VideoEncoder * encoder)1553 void WebRtcVideoEngine::DestroyExternalEncoder(webrtc::VideoEncoder* encoder) {
1554 ASSERT(encoder_factory_ != NULL);
1555 if (encoder_factory_ == NULL)
1556 return;
1557 encoder_factory_->DestroyVideoEncoder(encoder);
1558 }
1559
IsExternalEncoderCodecType(webrtc::VideoCodecType type) const1560 bool WebRtcVideoEngine::IsExternalEncoderCodecType(
1561 webrtc::VideoCodecType type) const {
1562 if (!encoder_factory_)
1563 return false;
1564 const std::vector<WebRtcVideoEncoderFactory::VideoCodec>& codecs =
1565 encoder_factory_->codecs();
1566 std::vector<WebRtcVideoEncoderFactory::VideoCodec>::const_iterator it;
1567 for (it = codecs.begin(); it != codecs.end(); ++it) {
1568 if (it->type == type)
1569 return true;
1570 }
1571 return false;
1572 }
1573
SetExternalDecoderFactory(WebRtcVideoDecoderFactory * decoder_factory)1574 void WebRtcVideoEngine::SetExternalDecoderFactory(
1575 WebRtcVideoDecoderFactory* decoder_factory) {
1576 decoder_factory_ = decoder_factory;
1577 }
1578
SetExternalEncoderFactory(WebRtcVideoEncoderFactory * encoder_factory)1579 void WebRtcVideoEngine::SetExternalEncoderFactory(
1580 WebRtcVideoEncoderFactory* encoder_factory) {
1581 if (encoder_factory_ == encoder_factory)
1582 return;
1583
1584 if (encoder_factory_) {
1585 encoder_factory_->RemoveObserver(this);
1586 }
1587 encoder_factory_ = encoder_factory;
1588 if (encoder_factory_) {
1589 encoder_factory_->AddObserver(this);
1590 }
1591
1592 // Invoke OnCodecAvailable() here in case the list of codecs is already
1593 // available when the encoder factory is installed. If not the encoder
1594 // factory will invoke the callback later when the codecs become available.
1595 OnCodecsAvailable();
1596 }
1597
OnCodecsAvailable()1598 void WebRtcVideoEngine::OnCodecsAvailable() {
1599 // Rebuild codec list while reapplying the current default codec format.
1600 VideoCodec max_codec(kVideoCodecPrefs[0].payload_type,
1601 kVideoCodecPrefs[0].name,
1602 video_codecs_[0].width,
1603 video_codecs_[0].height,
1604 video_codecs_[0].framerate,
1605 0);
1606 if (!RebuildCodecList(max_codec)) {
1607 LOG(LS_ERROR) << "Failed to initialize list of supported codec types";
1608 }
1609 }
1610
1611 // WebRtcVideoMediaChannel
1612
WebRtcVideoMediaChannel(WebRtcVideoEngine * engine,VoiceMediaChannel * channel)1613 WebRtcVideoMediaChannel::WebRtcVideoMediaChannel(
1614 WebRtcVideoEngine* engine,
1615 VoiceMediaChannel* channel)
1616 : engine_(engine),
1617 voice_channel_(channel),
1618 vie_channel_(-1),
1619 nack_enabled_(true),
1620 remb_enabled_(false),
1621 render_started_(false),
1622 first_receive_ssrc_(0),
1623 num_unsignalled_recv_channels_(0),
1624 send_rtx_type_(-1),
1625 send_red_type_(-1),
1626 send_fec_type_(-1),
1627 sending_(false),
1628 ratio_w_(0),
1629 ratio_h_(0) {
1630 engine->RegisterChannel(this);
1631 }
1632
Init()1633 bool WebRtcVideoMediaChannel::Init() {
1634 const uint32 ssrc_key = 0;
1635 return CreateChannel(ssrc_key, MD_SENDRECV, &vie_channel_);
1636 }
1637
~WebRtcVideoMediaChannel()1638 WebRtcVideoMediaChannel::~WebRtcVideoMediaChannel() {
1639 const bool send = false;
1640 SetSend(send);
1641 const bool render = false;
1642 SetRender(render);
1643
1644 while (!send_channels_.empty()) {
1645 if (!DeleteSendChannel(send_channels_.begin()->first)) {
1646 LOG(LS_ERROR) << "Unable to delete channel with ssrc key "
1647 << send_channels_.begin()->first;
1648 ASSERT(false);
1649 break;
1650 }
1651 }
1652
1653 // Remove all receive streams and the default channel.
1654 while (!recv_channels_.empty()) {
1655 RemoveRecvStreamInternal(recv_channels_.begin()->first);
1656 }
1657
1658 // Unregister the channel from the engine.
1659 engine()->UnregisterChannel(this);
1660 if (worker_thread()) {
1661 worker_thread()->Clear(this);
1662 }
1663 }
1664
SetRecvCodecs(const std::vector<VideoCodec> & codecs)1665 bool WebRtcVideoMediaChannel::SetRecvCodecs(
1666 const std::vector<VideoCodec>& codecs) {
1667 receive_codecs_.clear();
1668 associated_payload_types_.clear();
1669 for (std::vector<VideoCodec>::const_iterator iter = codecs.begin();
1670 iter != codecs.end(); ++iter) {
1671 if (engine()->FindCodec(*iter)) {
1672 webrtc::VideoCodec wcodec;
1673 if (engine()->ConvertFromCricketVideoCodec(*iter, &wcodec)) {
1674 receive_codecs_.push_back(wcodec);
1675 int apt;
1676 if (iter->GetParam(cricket::kCodecParamAssociatedPayloadType, &apt)) {
1677 associated_payload_types_[wcodec.plType] = apt;
1678 }
1679 }
1680 } else {
1681 LOG(LS_INFO) << "Unknown codec " << iter->name;
1682 return false;
1683 }
1684 }
1685
1686 for (RecvChannelMap::iterator it = recv_channels_.begin();
1687 it != recv_channels_.end(); ++it) {
1688 if (!SetReceiveCodecs(it->second))
1689 return false;
1690 }
1691 return true;
1692 }
1693
SetSendCodecs(const std::vector<VideoCodec> & codecs)1694 bool WebRtcVideoMediaChannel::SetSendCodecs(
1695 const std::vector<VideoCodec>& codecs) {
1696 // Match with local video codec list.
1697 std::vector<webrtc::VideoCodec> send_codecs;
1698 VideoCodec checked_codec;
1699 VideoCodec current; // defaults to 0x0
1700 if (sending_) {
1701 ConvertToCricketVideoCodec(*send_codec_, ¤t);
1702 }
1703 std::map<int, int> primary_rtx_pt_mapping;
1704 bool nack_enabled = nack_enabled_;
1705 bool remb_enabled = remb_enabled_;
1706 for (std::vector<VideoCodec>::const_iterator iter = codecs.begin();
1707 iter != codecs.end(); ++iter) {
1708 if (_stricmp(iter->name.c_str(), kRedPayloadName) == 0) {
1709 send_red_type_ = iter->id;
1710 } else if (_stricmp(iter->name.c_str(), kFecPayloadName) == 0) {
1711 send_fec_type_ = iter->id;
1712 } else if (_stricmp(iter->name.c_str(), kRtxCodecName) == 0) {
1713 int rtx_type = iter->id;
1714 int rtx_primary_type = -1;
1715 if (iter->GetParam(kCodecParamAssociatedPayloadType, &rtx_primary_type)) {
1716 primary_rtx_pt_mapping[rtx_primary_type] = rtx_type;
1717 }
1718 } else if (engine()->CanSendCodec(*iter, current, &checked_codec)) {
1719 webrtc::VideoCodec wcodec;
1720 if (engine()->ConvertFromCricketVideoCodec(checked_codec, &wcodec)) {
1721 if (send_codecs.empty()) {
1722 nack_enabled = IsNackEnabled(checked_codec);
1723 remb_enabled = IsRembEnabled(checked_codec);
1724 }
1725 send_codecs.push_back(wcodec);
1726 }
1727 } else {
1728 LOG(LS_WARNING) << "Unknown codec " << iter->name;
1729 }
1730 }
1731
1732 // Fail if we don't have a match.
1733 if (send_codecs.empty()) {
1734 LOG(LS_WARNING) << "No matching codecs available";
1735 return false;
1736 }
1737
1738 // Recv protection.
1739 // Do not update if the status is same as previously configured.
1740 if (nack_enabled_ != nack_enabled) {
1741 for (RecvChannelMap::iterator it = recv_channels_.begin();
1742 it != recv_channels_.end(); ++it) {
1743 int channel_id = it->second->channel_id();
1744 if (!SetNackFec(channel_id, send_red_type_, send_fec_type_,
1745 nack_enabled)) {
1746 return false;
1747 }
1748 if (engine_->vie()->rtp()->SetRembStatus(channel_id,
1749 kNotSending,
1750 remb_enabled_) != 0) {
1751 LOG_RTCERR3(SetRembStatus, channel_id, kNotSending, remb_enabled_);
1752 return false;
1753 }
1754 }
1755 nack_enabled_ = nack_enabled;
1756 }
1757
1758 // Send settings.
1759 // Do not update if the status is same as previously configured.
1760 if (remb_enabled_ != remb_enabled) {
1761 for (SendChannelMap::iterator iter = send_channels_.begin();
1762 iter != send_channels_.end(); ++iter) {
1763 int channel_id = iter->second->channel_id();
1764 if (!SetNackFec(channel_id, send_red_type_, send_fec_type_,
1765 nack_enabled_)) {
1766 return false;
1767 }
1768 if (engine_->vie()->rtp()->SetRembStatus(channel_id,
1769 remb_enabled,
1770 remb_enabled) != 0) {
1771 LOG_RTCERR3(SetRembStatus, channel_id, remb_enabled, remb_enabled);
1772 return false;
1773 }
1774 }
1775 remb_enabled_ = remb_enabled;
1776 }
1777
1778 // Select the first matched codec.
1779 webrtc::VideoCodec& codec(send_codecs[0]);
1780
1781 // Set RTX payload type if primary now active. This value will be used in
1782 // SetSendCodec.
1783 std::map<int, int>::const_iterator rtx_it =
1784 primary_rtx_pt_mapping.find(static_cast<int>(codec.plType));
1785 if (rtx_it != primary_rtx_pt_mapping.end()) {
1786 send_rtx_type_ = rtx_it->second;
1787 }
1788
1789 if (BitrateIsSet(codec.minBitrate) && BitrateIsSet(codec.maxBitrate) &&
1790 codec.minBitrate > codec.maxBitrate) {
1791 // TODO(pthatcher): This behavior contradicts other behavior in
1792 // this file which will cause min > max to push the min down to
1793 // the max. There are unit tests for both behaviors. We should
1794 // pick one and do that.
1795 LOG(LS_INFO) << "Rejecting codec with min bitrate ("
1796 << codec.minBitrate << ") larger than max ("
1797 << codec.maxBitrate << "). ";
1798 return false;
1799 }
1800
1801 if (!SetSendCodec(codec)) {
1802 return false;
1803 }
1804
1805 LogSendCodecChange("SetSendCodecs()");
1806
1807 return true;
1808 }
1809
GetSendCodec(VideoCodec * send_codec)1810 bool WebRtcVideoMediaChannel::GetSendCodec(VideoCodec* send_codec) {
1811 if (!send_codec_) {
1812 return false;
1813 }
1814 ConvertToCricketVideoCodec(*send_codec_, send_codec);
1815 return true;
1816 }
1817
SetSendStreamFormat(uint32 ssrc,const VideoFormat & format)1818 bool WebRtcVideoMediaChannel::SetSendStreamFormat(uint32 ssrc,
1819 const VideoFormat& format) {
1820 WebRtcVideoChannelSendInfo* send_channel = GetSendChannel(ssrc);
1821 if (!send_channel) {
1822 LOG(LS_ERROR) << "The specified ssrc " << ssrc << " is not in use.";
1823 return false;
1824 }
1825 send_channel->set_video_format(format);
1826 return true;
1827 }
1828
SetRender(bool render)1829 bool WebRtcVideoMediaChannel::SetRender(bool render) {
1830 if (render == render_started_) {
1831 return true; // no action required
1832 }
1833
1834 bool ret = true;
1835 for (RecvChannelMap::iterator it = recv_channels_.begin();
1836 it != recv_channels_.end(); ++it) {
1837 if (render) {
1838 if (engine()->vie()->render()->StartRender(
1839 it->second->channel_id()) != 0) {
1840 LOG_RTCERR1(StartRender, it->second->channel_id());
1841 ret = false;
1842 }
1843 } else {
1844 if (engine()->vie()->render()->StopRender(
1845 it->second->channel_id()) != 0) {
1846 LOG_RTCERR1(StopRender, it->second->channel_id());
1847 ret = false;
1848 }
1849 }
1850 }
1851 if (ret) {
1852 render_started_ = render;
1853 }
1854
1855 return ret;
1856 }
1857
SetSend(bool send)1858 bool WebRtcVideoMediaChannel::SetSend(bool send) {
1859 if (!HasReadySendChannels() && send) {
1860 LOG(LS_ERROR) << "No stream added";
1861 return false;
1862 }
1863 if (send == sending()) {
1864 return true; // No action required.
1865 }
1866
1867 if (send) {
1868 // We've been asked to start sending.
1869 // SetSendCodecs must have been called already.
1870 if (!send_codec_) {
1871 return false;
1872 }
1873 // Start send now.
1874 if (!StartSend()) {
1875 return false;
1876 }
1877 } else {
1878 // We've been asked to stop sending.
1879 if (!StopSend()) {
1880 return false;
1881 }
1882 }
1883 sending_ = send;
1884
1885 return true;
1886 }
1887
AddSendStream(const StreamParams & sp)1888 bool WebRtcVideoMediaChannel::AddSendStream(const StreamParams& sp) {
1889 if (sp.first_ssrc() == 0) {
1890 LOG(LS_ERROR) << "AddSendStream with 0 ssrc is not supported.";
1891 return false;
1892 }
1893
1894 LOG(LS_INFO) << "AddSendStream " << sp.ToString();
1895
1896 if (!IsOneSsrcStream(sp) && !IsSimulcastStream(sp)) {
1897 LOG(LS_ERROR) << "AddSendStream: bad local stream parameters";
1898 return false;
1899 }
1900
1901 uint32 ssrc_key;
1902 if (!CreateSendChannelKey(sp.first_ssrc(), &ssrc_key)) {
1903 LOG(LS_ERROR) << "Trying to register duplicate ssrc: " << sp.first_ssrc();
1904 return false;
1905 }
1906 // If the default channel is already used for sending create a new channel
1907 // otherwise use the default channel for sending.
1908 int channel_id = -1;
1909 if (send_channels_[0]->stream_params() == NULL) {
1910 channel_id = vie_channel_;
1911 } else {
1912 if (!CreateChannel(ssrc_key, MD_SEND, &channel_id)) {
1913 LOG(LS_ERROR) << "AddSendStream: unable to create channel";
1914 return false;
1915 }
1916 }
1917 WebRtcVideoChannelSendInfo* send_channel = send_channels_[ssrc_key];
1918 // Set the send (local) SSRC.
1919 // If there are multiple send SSRCs, we can only set the first one here, and
1920 // the rest of the SSRC(s) need to be set after SetSendCodec has been called
1921 // (with a codec requires multiple SSRC(s)).
1922 if (engine()->vie()->rtp()->SetLocalSSRC(channel_id,
1923 sp.first_ssrc()) != 0) {
1924 LOG_RTCERR2(SetLocalSSRC, channel_id, sp.first_ssrc());
1925 return false;
1926 }
1927
1928 // Set the corresponding RTX SSRC.
1929 if (!SetLocalRtxSsrc(channel_id, sp, sp.first_ssrc(), 0)) {
1930 return false;
1931 }
1932
1933 // Set RTCP CName.
1934 if (engine()->vie()->rtp()->SetRTCPCName(channel_id,
1935 sp.cname.c_str()) != 0) {
1936 LOG_RTCERR2(SetRTCPCName, channel_id, sp.cname.c_str());
1937 return false;
1938 }
1939
1940 // At this point the channel's local SSRC has been updated. If the channel is
1941 // the default channel make sure that all the receive channels are updated as
1942 // well. Receive channels have to have the same SSRC as the default channel in
1943 // order to send receiver reports with this SSRC.
1944 if (IsDefaultChannel(channel_id)) {
1945 for (RecvChannelMap::const_iterator it = recv_channels_.begin();
1946 it != recv_channels_.end(); ++it) {
1947 WebRtcVideoChannelRecvInfo* info = it->second;
1948 int channel_id = info->channel_id();
1949 if (engine()->vie()->rtp()->SetLocalSSRC(channel_id,
1950 sp.first_ssrc()) != 0) {
1951 LOG_RTCERR1(SetLocalSSRC, it->first);
1952 return false;
1953 }
1954 }
1955 }
1956
1957 send_channel->set_stream_params(sp);
1958
1959 // Reset send codec after stream parameters changed.
1960 if (send_codec_) {
1961 if (!SetSendCodec(send_channel, *send_codec_)) {
1962 return false;
1963 }
1964 LogSendCodecChange("SetSendStreamFormat()");
1965 }
1966
1967 if (sending_) {
1968 return StartSend(send_channel);
1969 }
1970 return true;
1971 }
1972
RemoveSendStream(uint32 ssrc)1973 bool WebRtcVideoMediaChannel::RemoveSendStream(uint32 ssrc) {
1974 if (ssrc == 0) {
1975 LOG(LS_ERROR) << "RemoveSendStream with 0 ssrc is not supported.";
1976 return false;
1977 }
1978
1979 uint32 ssrc_key;
1980 if (!GetSendChannelKey(ssrc, &ssrc_key)) {
1981 LOG(LS_WARNING) << "Try to remove stream with ssrc " << ssrc
1982 << " which doesn't exist.";
1983 return false;
1984 }
1985 WebRtcVideoChannelSendInfo* send_channel = send_channels_[ssrc_key];
1986 int channel_id = send_channel->channel_id();
1987 if (IsDefaultChannel(channel_id) && (send_channel->stream_params() == NULL)) {
1988 // Default channel will still exist. However, if stream_params() is NULL
1989 // there is no stream to remove.
1990 return false;
1991 }
1992 if (sending_) {
1993 StopSend(send_channel);
1994 }
1995
1996 const WebRtcVideoChannelSendInfo::EncoderMap& encoder_map =
1997 send_channel->registered_encoders();
1998 for (WebRtcVideoChannelSendInfo::EncoderMap::const_iterator it =
1999 encoder_map.begin(); it != encoder_map.end(); ++it) {
2000 if (engine()->vie()->ext_codec()->DeRegisterExternalSendCodec(
2001 channel_id, it->first) != 0) {
2002 LOG_RTCERR1(DeregisterEncoderObserver, channel_id);
2003 }
2004 engine()->DestroyExternalEncoder(it->second);
2005 }
2006 send_channel->ClearRegisteredEncoders();
2007
2008 // The receive channels depend on the default channel, recycle it instead.
2009 if (IsDefaultChannel(channel_id)) {
2010 SetCapturer(GetDefaultChannelSsrc(), NULL);
2011 send_channel->ClearStreamParams();
2012 } else {
2013 return DeleteSendChannel(ssrc_key);
2014 }
2015 return true;
2016 }
2017
AddRecvStream(const StreamParams & sp)2018 bool WebRtcVideoMediaChannel::AddRecvStream(const StreamParams& sp) {
2019 if (sp.first_ssrc() == 0) {
2020 LOG(LS_ERROR) << "AddRecvStream with 0 ssrc is not supported.";
2021 return false;
2022 }
2023
2024 // TODO(zhurunz) Remove this once BWE works properly across different send
2025 // and receive channels.
2026 // Reuse default channel for recv stream in 1:1 call.
2027 if (!InConferenceMode() && first_receive_ssrc_ == 0) {
2028 LOG(LS_INFO) << "Recv stream " << sp.first_ssrc()
2029 << " reuse default channel #"
2030 << vie_channel_;
2031 first_receive_ssrc_ = sp.first_ssrc();
2032 if (!MaybeSetRtxSsrc(sp, vie_channel_)) {
2033 return false;
2034 }
2035 if (render_started_) {
2036 if (engine()->vie()->render()->StartRender(vie_channel_) !=0) {
2037 LOG_RTCERR1(StartRender, vie_channel_);
2038 }
2039 }
2040 return true;
2041 }
2042
2043 int channel_id = -1;
2044 RecvChannelMap::iterator channel_iterator =
2045 recv_channels_.find(sp.first_ssrc());
2046 if (channel_iterator == recv_channels_.end() &&
2047 first_receive_ssrc_ != sp.first_ssrc()) {
2048 // TODO(perkj): Implement recv media from multiple media SSRCs per stream.
2049 // NOTE: We have two SSRCs per stream when RTX is enabled.
2050 if (!IsOneSsrcStream(sp)) {
2051 LOG(LS_ERROR) << "WebRtcVideoMediaChannel supports one primary SSRC per"
2052 << " stream and one FID SSRC per primary SSRC.";
2053 return false;
2054 }
2055
2056 // Create a new channel for receiving video data.
2057 // In order to get the bandwidth estimation work fine for
2058 // receive only channels, we connect all receiving channels
2059 // to our master send channel.
2060 if (!CreateChannel(sp.first_ssrc(), MD_RECV, &channel_id)) {
2061 return false;
2062 }
2063 } else {
2064 // Already exists.
2065 if (first_receive_ssrc_ == sp.first_ssrc()) {
2066 return false;
2067 }
2068 // Early receive added channel.
2069 channel_id = (*channel_iterator).second->channel_id();
2070 }
2071 channel_iterator = recv_channels_.find(sp.first_ssrc());
2072
2073 if (!MaybeSetRtxSsrc(sp, channel_id)) {
2074 return false;
2075 }
2076
2077 // Get the default renderer.
2078 VideoRenderer* default_renderer = NULL;
2079 if (InConferenceMode()) {
2080 // The recv_channels_ size start out being 1, so if it is two here this
2081 // is the first receive channel created (vie_channel_ is not used for
2082 // receiving in a conference call). This means that the renderer stored
2083 // inside vie_channel_ should be used for the just created channel.
2084 if (recv_channels_.size() == 2 &&
2085 recv_channels_.find(0) != recv_channels_.end()) {
2086 GetRenderer(0, &default_renderer);
2087 }
2088 }
2089
2090 // The first recv stream reuses the default renderer (if a default renderer
2091 // has been set).
2092 if (default_renderer) {
2093 SetRenderer(sp.first_ssrc(), default_renderer);
2094 }
2095
2096 LOG(LS_INFO) << "New video stream " << sp.first_ssrc()
2097 << " registered to VideoEngine channel #"
2098 << channel_id << " and connected to channel #" << vie_channel_;
2099
2100 return true;
2101 }
2102
MaybeSetRtxSsrc(const StreamParams & sp,int channel_id)2103 bool WebRtcVideoMediaChannel::MaybeSetRtxSsrc(const StreamParams& sp,
2104 int channel_id) {
2105 uint32 rtx_ssrc;
2106 bool has_rtx = sp.GetFidSsrc(sp.first_ssrc(), &rtx_ssrc);
2107 if (has_rtx) {
2108 LOG(LS_INFO) << "Setting rtx ssrc " << rtx_ssrc << " for stream "
2109 << sp.first_ssrc();
2110 if (engine()->vie()->rtp()->SetRemoteSSRCType(
2111 channel_id, webrtc::kViEStreamTypeRtx, rtx_ssrc) != 0) {
2112 LOG_RTCERR3(SetRemoteSSRCType, channel_id, webrtc::kViEStreamTypeRtx,
2113 rtx_ssrc);
2114 return false;
2115 }
2116 rtx_to_primary_ssrc_[rtx_ssrc] = sp.first_ssrc();
2117 }
2118 return true;
2119 }
2120
RemoveRecvStream(uint32 ssrc)2121 bool WebRtcVideoMediaChannel::RemoveRecvStream(uint32 ssrc) {
2122 if (ssrc == 0) {
2123 LOG(LS_ERROR) << "RemoveRecvStream with 0 ssrc is not supported.";
2124 return false;
2125 }
2126 return RemoveRecvStreamInternal(ssrc);
2127 }
2128
RemoveRecvStreamInternal(uint32 ssrc)2129 bool WebRtcVideoMediaChannel::RemoveRecvStreamInternal(uint32 ssrc) {
2130 RecvChannelMap::iterator it = recv_channels_.find(ssrc);
2131 if (it == recv_channels_.end()) {
2132 // TODO(perkj): Remove this once BWE works properly across different send
2133 // and receive channels.
2134 // The default channel is reused for recv stream in 1:1 call.
2135 if (first_receive_ssrc_ == ssrc) {
2136 first_receive_ssrc_ = 0;
2137 // Need to stop the renderer and remove it since the render window can be
2138 // deleted after this.
2139 if (render_started_) {
2140 if (engine()->vie()->render()->StopRender(vie_channel_) !=0) {
2141 LOG_RTCERR1(StopRender, it->second->channel_id());
2142 }
2143 }
2144 recv_channels_[0]->SetRenderer(NULL);
2145 return true;
2146 }
2147 return false;
2148 }
2149 WebRtcVideoChannelRecvInfo* info = it->second;
2150
2151 // Remove any RTX SSRC mappings to this stream.
2152 SsrcMap::iterator rtx_it = rtx_to_primary_ssrc_.begin();
2153 while (rtx_it != rtx_to_primary_ssrc_.end()) {
2154 if (rtx_it->second == ssrc) {
2155 rtx_to_primary_ssrc_.erase(rtx_it++);
2156 } else {
2157 ++rtx_it;
2158 }
2159 }
2160
2161 int channel_id = info->channel_id();
2162 if (engine()->vie()->render()->RemoveRenderer(channel_id) != 0) {
2163 LOG_RTCERR1(RemoveRenderer, channel_id);
2164 }
2165
2166 if (engine()->vie()->network()->DeregisterSendTransport(channel_id) !=0) {
2167 LOG_RTCERR1(DeRegisterSendTransport, channel_id);
2168 }
2169
2170 if (engine()->vie()->codec()->DeregisterDecoderObserver(
2171 channel_id) != 0) {
2172 LOG_RTCERR1(DeregisterDecoderObserver, channel_id);
2173 }
2174
2175 const WebRtcVideoChannelRecvInfo::DecoderMap& decoder_map =
2176 info->registered_decoders();
2177 for (WebRtcVideoChannelRecvInfo::DecoderMap::const_iterator it =
2178 decoder_map.begin(); it != decoder_map.end(); ++it) {
2179 if (engine()->vie()->ext_codec()->DeRegisterExternalReceiveCodec(
2180 channel_id, it->first) != 0) {
2181 LOG_RTCERR1(DeregisterDecoderObserver, channel_id);
2182 }
2183 engine()->DestroyExternalDecoder(it->second);
2184 }
2185 info->ClearRegisteredDecoders();
2186
2187 LOG(LS_INFO) << "Removing video stream " << ssrc
2188 << " with VideoEngine channel #"
2189 << channel_id;
2190 bool ret = true;
2191 if (engine()->vie()->base()->DeleteChannel(channel_id) == -1) {
2192 LOG_RTCERR1(DeleteChannel, channel_id);
2193 ret = false;
2194 }
2195 // Delete the WebRtcVideoChannelRecvInfo pointed to by it->second.
2196 delete info;
2197 recv_channels_.erase(it);
2198 return ret;
2199 }
2200
StartSend()2201 bool WebRtcVideoMediaChannel::StartSend() {
2202 bool success = true;
2203 for (SendChannelMap::iterator iter = send_channels_.begin();
2204 iter != send_channels_.end(); ++iter) {
2205 WebRtcVideoChannelSendInfo* send_channel = iter->second;
2206 if (!StartSend(send_channel)) {
2207 success = false;
2208 }
2209 }
2210 return success;
2211 }
2212
StartSend(WebRtcVideoChannelSendInfo * send_channel)2213 bool WebRtcVideoMediaChannel::StartSend(
2214 WebRtcVideoChannelSendInfo* send_channel) {
2215 const int channel_id = send_channel->channel_id();
2216 if (engine()->vie()->base()->StartSend(channel_id) != 0) {
2217 LOG_RTCERR1(StartSend, channel_id);
2218 return false;
2219 }
2220
2221 send_channel->set_sending(true);
2222 return true;
2223 }
2224
StopSend()2225 bool WebRtcVideoMediaChannel::StopSend() {
2226 bool success = true;
2227 for (SendChannelMap::iterator iter = send_channels_.begin();
2228 iter != send_channels_.end(); ++iter) {
2229 WebRtcVideoChannelSendInfo* send_channel = iter->second;
2230 if (!StopSend(send_channel)) {
2231 success = false;
2232 }
2233 }
2234 return success;
2235 }
2236
StopSend(WebRtcVideoChannelSendInfo * send_channel)2237 bool WebRtcVideoMediaChannel::StopSend(
2238 WebRtcVideoChannelSendInfo* send_channel) {
2239 const int channel_id = send_channel->channel_id();
2240 if (engine()->vie()->base()->StopSend(channel_id) != 0) {
2241 LOG_RTCERR1(StopSend, channel_id);
2242 return false;
2243 }
2244 send_channel->set_sending(false);
2245 return true;
2246 }
2247
SendIntraFrame()2248 bool WebRtcVideoMediaChannel::SendIntraFrame() {
2249 bool success = true;
2250 for (SendChannelMap::iterator iter = send_channels_.begin();
2251 iter != send_channels_.end();
2252 ++iter) {
2253 WebRtcVideoChannelSendInfo* send_channel = iter->second;
2254 const int channel_id = send_channel->channel_id();
2255 if (engine()->vie()->codec()->SendKeyFrame(channel_id) != 0) {
2256 LOG_RTCERR1(SendKeyFrame, channel_id);
2257 success = false;
2258 }
2259 }
2260 return success;
2261 }
2262
HasReadySendChannels()2263 bool WebRtcVideoMediaChannel::HasReadySendChannels() {
2264 return !send_channels_.empty() &&
2265 ((send_channels_.size() > 1) ||
2266 (send_channels_[0]->stream_params() != NULL));
2267 }
2268
GetSendChannelKey(uint32 local_ssrc,uint32 * key)2269 bool WebRtcVideoMediaChannel::GetSendChannelKey(uint32 local_ssrc,
2270 uint32* key) {
2271 *key = 0;
2272 // If a send channel is not ready to send it will not have local_ssrc
2273 // registered to it.
2274 if (!HasReadySendChannels()) {
2275 return false;
2276 }
2277 // The default channel is stored with key 0. The key therefore does not match
2278 // the SSRC associated with the default channel. Check if the SSRC provided
2279 // corresponds to the default channel's SSRC.
2280 if (local_ssrc == GetDefaultChannelSsrc()) {
2281 return true;
2282 }
2283 if (send_channels_.find(local_ssrc) == send_channels_.end()) {
2284 for (SendChannelMap::iterator iter = send_channels_.begin();
2285 iter != send_channels_.end(); ++iter) {
2286 WebRtcVideoChannelSendInfo* send_channel = iter->second;
2287 if (send_channel->has_ssrc(local_ssrc)) {
2288 *key = iter->first;
2289 return true;
2290 }
2291 }
2292 return false;
2293 }
2294 // The key was found in the above std::map::find call. This means that the
2295 // ssrc is the key.
2296 *key = local_ssrc;
2297 return true;
2298 }
2299
GetSendChannel(uint32 local_ssrc)2300 WebRtcVideoChannelSendInfo* WebRtcVideoMediaChannel::GetSendChannel(
2301 uint32 local_ssrc) {
2302 uint32 key;
2303 if (!GetSendChannelKey(local_ssrc, &key)) {
2304 return NULL;
2305 }
2306 return send_channels_[key];
2307 }
2308
CreateSendChannelKey(uint32 local_ssrc,uint32 * key)2309 bool WebRtcVideoMediaChannel::CreateSendChannelKey(uint32 local_ssrc,
2310 uint32* key) {
2311 if (GetSendChannelKey(local_ssrc, key)) {
2312 // If there is a key corresponding to |local_ssrc|, the SSRC is already in
2313 // use. SSRCs need to be unique in a session and at this point a duplicate
2314 // SSRC has been detected.
2315 return false;
2316 }
2317 if (send_channels_[0]->stream_params() == NULL) {
2318 // key should be 0 here as the default channel should be re-used whenever it
2319 // is not used.
2320 *key = 0;
2321 return true;
2322 }
2323 // SSRC is currently not in use and the default channel is already in use. Use
2324 // the SSRC as key since it is supposed to be unique in a session.
2325 *key = local_ssrc;
2326 return true;
2327 }
2328
GetSendChannelNum(VideoCapturer * capturer)2329 int WebRtcVideoMediaChannel::GetSendChannelNum(VideoCapturer* capturer) {
2330 int num = 0;
2331 for (SendChannelMap::iterator iter = send_channels_.begin();
2332 iter != send_channels_.end(); ++iter) {
2333 WebRtcVideoChannelSendInfo* send_channel = iter->second;
2334 if (send_channel->video_capturer() == capturer) {
2335 ++num;
2336 }
2337 }
2338 return num;
2339 }
2340
GetDefaultChannelSsrc()2341 uint32 WebRtcVideoMediaChannel::GetDefaultChannelSsrc() {
2342 WebRtcVideoChannelSendInfo* send_channel = send_channels_[0];
2343 const StreamParams* sp = send_channel->stream_params();
2344 if (sp == NULL) {
2345 // This happens if no send stream is currently registered.
2346 return 0;
2347 }
2348 return sp->first_ssrc();
2349 }
2350
DeleteSendChannel(uint32 ssrc_key)2351 bool WebRtcVideoMediaChannel::DeleteSendChannel(uint32 ssrc_key) {
2352 if (send_channels_.find(ssrc_key) == send_channels_.end()) {
2353 return false;
2354 }
2355 WebRtcVideoChannelSendInfo* send_channel = send_channels_[ssrc_key];
2356 MaybeDisconnectCapturer(send_channel->video_capturer());
2357 send_channel->set_video_capturer(NULL, engine()->vie());
2358
2359 int channel_id = send_channel->channel_id();
2360 int capture_id = send_channel->capture_id();
2361 if (engine()->vie()->codec()->DeregisterEncoderObserver(
2362 channel_id) != 0) {
2363 LOG_RTCERR1(DeregisterEncoderObserver, channel_id);
2364 }
2365
2366 // Destroy the external capture interface.
2367 if (engine()->vie()->capture()->DisconnectCaptureDevice(
2368 channel_id) != 0) {
2369 LOG_RTCERR1(DisconnectCaptureDevice, channel_id);
2370 }
2371 if (engine()->vie()->capture()->ReleaseCaptureDevice(
2372 capture_id) != 0) {
2373 LOG_RTCERR1(ReleaseCaptureDevice, capture_id);
2374 }
2375
2376 // The default channel is stored in both |send_channels_| and
2377 // |recv_channels_|. To make sure it is only deleted once from vie let the
2378 // delete call happen when tearing down |recv_channels_| and not here.
2379 if (!IsDefaultChannel(channel_id)) {
2380 engine_->vie()->base()->DeleteChannel(channel_id);
2381 }
2382 delete send_channel;
2383 send_channels_.erase(ssrc_key);
2384 return true;
2385 }
2386
RemoveCapturer(uint32 ssrc)2387 bool WebRtcVideoMediaChannel::RemoveCapturer(uint32 ssrc) {
2388 WebRtcVideoChannelSendInfo* send_channel = GetSendChannel(ssrc);
2389 if (!send_channel) {
2390 return false;
2391 }
2392 VideoCapturer* capturer = send_channel->video_capturer();
2393 if (capturer == NULL) {
2394 return false;
2395 }
2396 MaybeDisconnectCapturer(capturer);
2397 send_channel->set_video_capturer(NULL, engine()->vie());
2398 const int64 timestamp = send_channel->local_stream_info()->time_stamp();
2399 if (send_codec_) {
2400 QueueBlackFrame(ssrc, timestamp, send_codec_->maxFramerate);
2401 }
2402 return true;
2403 }
2404
SetRenderer(uint32 ssrc,VideoRenderer * renderer)2405 bool WebRtcVideoMediaChannel::SetRenderer(uint32 ssrc,
2406 VideoRenderer* renderer) {
2407 if (recv_channels_.find(ssrc) == recv_channels_.end()) {
2408 // TODO(perkj): Remove this once BWE works properly across different send
2409 // and receive channels.
2410 // The default channel is reused for recv stream in 1:1 call.
2411 if (first_receive_ssrc_ == ssrc &&
2412 recv_channels_.find(0) != recv_channels_.end()) {
2413 LOG(LS_INFO) << "SetRenderer " << ssrc
2414 << " reuse default channel #"
2415 << vie_channel_;
2416 recv_channels_[0]->SetRenderer(renderer);
2417 return true;
2418 }
2419 return false;
2420 }
2421
2422 recv_channels_[ssrc]->SetRenderer(renderer);
2423 return true;
2424 }
2425
GetStats(const StatsOptions & options,VideoMediaInfo * info)2426 bool WebRtcVideoMediaChannel::GetStats(const StatsOptions& options,
2427 VideoMediaInfo* info) {
2428 // Get sender statistics and build VideoSenderInfo.
2429 unsigned int total_bitrate_sent = 0;
2430 unsigned int video_bitrate_sent = 0;
2431 unsigned int fec_bitrate_sent = 0;
2432 unsigned int nack_bitrate_sent = 0;
2433 unsigned int estimated_send_bandwidth = 0;
2434 unsigned int target_enc_bitrate = 0;
2435 if (send_codec_) {
2436 for (SendChannelMap::const_iterator iter = send_channels_.begin();
2437 iter != send_channels_.end(); ++iter) {
2438 WebRtcVideoChannelSendInfo* send_channel = iter->second;
2439 const int channel_id = send_channel->channel_id();
2440 VideoSenderInfo sinfo;
2441 const StreamParams* send_params = send_channel->stream_params();
2442 if (send_params == NULL) {
2443 // This should only happen if the default vie channel is not in use.
2444 // This can happen if no streams have ever been added or the stream
2445 // corresponding to the default channel has been removed. Note that
2446 // there may be non-default vie channels in use when this happen so
2447 // asserting send_channels_.size() == 1 is not correct and neither is
2448 // breaking out of the loop.
2449 ASSERT(channel_id == vie_channel_);
2450 continue;
2451 }
2452 unsigned int bytes_sent, packets_sent, bytes_recv, packets_recv;
2453 if (engine_->vie()->rtp()->GetRTPStatistics(channel_id, bytes_sent,
2454 packets_sent, bytes_recv,
2455 packets_recv) != 0) {
2456 LOG_RTCERR1(GetRTPStatistics, vie_channel_);
2457 continue;
2458 }
2459 WebRtcLocalStreamInfo* channel_stream_info =
2460 send_channel->local_stream_info();
2461
2462 for (size_t i = 0; i < send_params->ssrcs.size(); ++i) {
2463 sinfo.add_ssrc(send_params->ssrcs[i]);
2464 }
2465 sinfo.codec_name = send_codec_->plName;
2466 sinfo.bytes_sent = bytes_sent;
2467 sinfo.packets_sent = packets_sent;
2468 sinfo.packets_cached = -1;
2469 sinfo.packets_lost = -1;
2470 sinfo.fraction_lost = -1;
2471 sinfo.rtt_ms = -1;
2472
2473 VideoCapturer* video_capturer = send_channel->video_capturer();
2474 if (video_capturer) {
2475 VideoFormat last_captured_frame_format;
2476 video_capturer->GetStats(&sinfo.adapt_frame_drops,
2477 &sinfo.effects_frame_drops,
2478 &sinfo.capturer_frame_time,
2479 &last_captured_frame_format);
2480 sinfo.input_frame_width = last_captured_frame_format.width;
2481 sinfo.input_frame_height = last_captured_frame_format.height;
2482 } else {
2483 sinfo.input_frame_width = 0;
2484 sinfo.input_frame_height = 0;
2485 }
2486
2487 webrtc::VideoCodec vie_codec;
2488 if (!video_capturer || video_capturer->IsMuted()) {
2489 sinfo.send_frame_width = 0;
2490 sinfo.send_frame_height = 0;
2491 } else if (engine()->vie()->codec()->GetSendCodec(channel_id,
2492 vie_codec) == 0) {
2493 sinfo.send_frame_width = vie_codec.width;
2494 sinfo.send_frame_height = vie_codec.height;
2495 } else {
2496 sinfo.send_frame_width = -1;
2497 sinfo.send_frame_height = -1;
2498 LOG_RTCERR1(GetSendCodec, channel_id);
2499 }
2500 sinfo.framerate_input = channel_stream_info->framerate();
2501 sinfo.framerate_sent = send_channel->encoder_observer()->framerate();
2502 sinfo.nominal_bitrate = send_channel->encoder_observer()->bitrate();
2503 if (send_codec_) {
2504 sinfo.preferred_bitrate = GetBitrate(
2505 send_codec_->maxBitrate, kMaxVideoBitrate);
2506 }
2507 sinfo.adapt_reason = send_channel->CurrentAdaptReason();
2508
2509 #ifdef USE_WEBRTC_DEV_BRANCH
2510 webrtc::CpuOveruseMetrics metrics;
2511 engine()->vie()->base()->GetCpuOveruseMetrics(channel_id, &metrics);
2512 sinfo.capture_jitter_ms = metrics.capture_jitter_ms;
2513 sinfo.avg_encode_ms = metrics.avg_encode_time_ms;
2514 sinfo.encode_usage_percent = metrics.encode_usage_percent;
2515 sinfo.encode_rsd = metrics.encode_rsd;
2516 sinfo.capture_queue_delay_ms_per_s = metrics.capture_queue_delay_ms_per_s;
2517 #else
2518 sinfo.capture_jitter_ms = -1;
2519 sinfo.avg_encode_ms = -1;
2520 sinfo.encode_usage_percent = -1;
2521 sinfo.capture_queue_delay_ms_per_s = -1;
2522
2523 int capture_jitter_ms = 0;
2524 int avg_encode_time_ms = 0;
2525 int encode_usage_percent = 0;
2526 int capture_queue_delay_ms_per_s = 0;
2527 if (engine()->vie()->base()->CpuOveruseMeasures(
2528 channel_id,
2529 &capture_jitter_ms,
2530 &avg_encode_time_ms,
2531 &encode_usage_percent,
2532 &capture_queue_delay_ms_per_s) == 0) {
2533 sinfo.capture_jitter_ms = capture_jitter_ms;
2534 sinfo.avg_encode_ms = avg_encode_time_ms;
2535 sinfo.encode_usage_percent = encode_usage_percent;
2536 sinfo.capture_queue_delay_ms_per_s = capture_queue_delay_ms_per_s;
2537 }
2538 #endif
2539
2540 webrtc::RtcpPacketTypeCounter rtcp_sent;
2541 webrtc::RtcpPacketTypeCounter rtcp_received;
2542 if (engine()->vie()->rtp()->GetRtcpPacketTypeCounters(
2543 channel_id, &rtcp_sent, &rtcp_received) == 0) {
2544 sinfo.firs_rcvd = rtcp_received.fir_packets;
2545 sinfo.plis_rcvd = rtcp_received.pli_packets;
2546 sinfo.nacks_rcvd = rtcp_received.nack_packets;
2547 } else {
2548 sinfo.firs_rcvd = -1;
2549 sinfo.plis_rcvd = -1;
2550 sinfo.nacks_rcvd = -1;
2551 LOG_RTCERR1(GetRtcpPacketTypeCounters, channel_id);
2552 }
2553
2554 // Get received RTCP statistics for the sender (reported by the remote
2555 // client in a RTCP packet), if available.
2556 // It's not a fatal error if we can't, since RTCP may not have arrived
2557 // yet.
2558 webrtc::RtcpStatistics outgoing_stream_rtcp_stats;
2559 int outgoing_stream_rtt_ms;
2560
2561 if (engine_->vie()->rtp()->GetSendChannelRtcpStatistics(
2562 channel_id,
2563 outgoing_stream_rtcp_stats,
2564 outgoing_stream_rtt_ms) == 0) {
2565 // Convert Q8 to float.
2566 sinfo.packets_lost = outgoing_stream_rtcp_stats.cumulative_lost;
2567 sinfo.fraction_lost = static_cast<float>(
2568 outgoing_stream_rtcp_stats.fraction_lost) / (1 << 8);
2569 sinfo.rtt_ms = outgoing_stream_rtt_ms;
2570 }
2571 info->senders.push_back(sinfo);
2572
2573 unsigned int channel_total_bitrate_sent = 0;
2574 unsigned int channel_video_bitrate_sent = 0;
2575 unsigned int channel_fec_bitrate_sent = 0;
2576 unsigned int channel_nack_bitrate_sent = 0;
2577 if (engine_->vie()->rtp()->GetBandwidthUsage(
2578 channel_id, channel_total_bitrate_sent, channel_video_bitrate_sent,
2579 channel_fec_bitrate_sent, channel_nack_bitrate_sent) == 0) {
2580 total_bitrate_sent += channel_total_bitrate_sent;
2581 video_bitrate_sent += channel_video_bitrate_sent;
2582 fec_bitrate_sent += channel_fec_bitrate_sent;
2583 nack_bitrate_sent += channel_nack_bitrate_sent;
2584 } else {
2585 LOG_RTCERR1(GetBandwidthUsage, channel_id);
2586 }
2587
2588 unsigned int target_enc_stream_bitrate = 0;
2589 if (engine_->vie()->codec()->GetCodecTargetBitrate(
2590 channel_id, &target_enc_stream_bitrate) == 0) {
2591 target_enc_bitrate += target_enc_stream_bitrate;
2592 } else {
2593 LOG_RTCERR1(GetCodecTargetBitrate, channel_id);
2594 }
2595 }
2596 if (!send_channels_.empty()) {
2597 // GetEstimatedSendBandwidth returns the estimated bandwidth for all video
2598 // engine channels in a channel group. Any valid channel id will do as it
2599 // is only used to access the right group of channels.
2600 const int channel_id = send_channels_.begin()->second->channel_id();
2601 // Get the send bandwidth available for this MediaChannel.
2602 if (engine_->vie()->rtp()->GetEstimatedSendBandwidth(
2603 channel_id, &estimated_send_bandwidth) != 0) {
2604 LOG_RTCERR1(GetEstimatedSendBandwidth, channel_id);
2605 }
2606 }
2607 } else {
2608 LOG(LS_WARNING) << "GetStats: sender information not ready.";
2609 }
2610
2611 // Get the SSRC and stats for each receiver, based on our own calculations.
2612 for (RecvChannelMap::const_iterator it = recv_channels_.begin();
2613 it != recv_channels_.end(); ++it) {
2614 WebRtcVideoChannelRecvInfo* channel = it->second;
2615
2616 unsigned int ssrc = 0;
2617 // Get receiver statistics and build VideoReceiverInfo, if we have data.
2618 // Skip the default channel (ssrc == 0).
2619 if (engine_->vie()->rtp()->GetRemoteSSRC(
2620 channel->channel_id(), ssrc) != 0 ||
2621 ssrc == 0)
2622 continue;
2623
2624 webrtc::StreamDataCounters sent;
2625 webrtc::StreamDataCounters received;
2626 if (engine_->vie()->rtp()->GetRtpStatistics(channel->channel_id(),
2627 sent, received) != 0) {
2628 LOG_RTCERR1(GetRTPStatistics, channel->channel_id());
2629 return false;
2630 }
2631 VideoReceiverInfo rinfo;
2632 rinfo.add_ssrc(ssrc);
2633 rinfo.bytes_rcvd = received.bytes;
2634 rinfo.packets_rcvd = received.packets;
2635 rinfo.packets_lost = -1;
2636 rinfo.packets_concealed = -1;
2637 rinfo.fraction_lost = -1; // from SentRTCP
2638 rinfo.frame_width = channel->render_adapter()->width();
2639 rinfo.frame_height = channel->render_adapter()->height();
2640 int fps = channel->render_adapter()->framerate();
2641 rinfo.framerate_decoded = fps;
2642 rinfo.framerate_output = fps;
2643 rinfo.capture_start_ntp_time_ms =
2644 channel->render_adapter()->capture_start_ntp_time_ms();
2645 channel->decoder_observer()->ExportTo(&rinfo);
2646
2647 webrtc::RtcpPacketTypeCounter rtcp_sent;
2648 webrtc::RtcpPacketTypeCounter rtcp_received;
2649 if (engine()->vie()->rtp()->GetRtcpPacketTypeCounters(
2650 channel->channel_id(), &rtcp_sent, &rtcp_received) == 0) {
2651 rinfo.firs_sent = rtcp_sent.fir_packets;
2652 rinfo.plis_sent = rtcp_sent.pli_packets;
2653 rinfo.nacks_sent = rtcp_sent.nack_packets;
2654 } else {
2655 rinfo.firs_sent = -1;
2656 rinfo.plis_sent = -1;
2657 rinfo.nacks_sent = -1;
2658 LOG_RTCERR1(GetRtcpPacketTypeCounters, channel->channel_id());
2659 }
2660
2661 // Get our locally created statistics of the received RTP stream.
2662 webrtc::RtcpStatistics incoming_stream_rtcp_stats;
2663 int incoming_stream_rtt_ms;
2664 if (engine_->vie()->rtp()->GetReceiveChannelRtcpStatistics(
2665 channel->channel_id(),
2666 incoming_stream_rtcp_stats,
2667 incoming_stream_rtt_ms) == 0) {
2668 // Convert Q8 to float.
2669 rinfo.packets_lost = incoming_stream_rtcp_stats.cumulative_lost;
2670 rinfo.fraction_lost = static_cast<float>(
2671 incoming_stream_rtcp_stats.fraction_lost) / (1 << 8);
2672 }
2673 info->receivers.push_back(rinfo);
2674 }
2675 unsigned int estimated_recv_bandwidth = 0;
2676 if (!recv_channels_.empty()) {
2677 // GetEstimatedReceiveBandwidth returns the estimated bandwidth for all
2678 // video engine channels in a channel group. Any valid channel id will do as
2679 // it is only used to access the right group of channels.
2680 const int channel_id = recv_channels_.begin()->second->channel_id();
2681 // Gets the estimated receive bandwidth for the MediaChannel.
2682 if (engine_->vie()->rtp()->GetEstimatedReceiveBandwidth(
2683 channel_id, &estimated_recv_bandwidth) != 0) {
2684 LOG_RTCERR1(GetEstimatedReceiveBandwidth, channel_id);
2685 }
2686 }
2687
2688 // Build BandwidthEstimationInfo.
2689 // TODO(zhurunz): Add real unittest for this.
2690 BandwidthEstimationInfo bwe;
2691
2692 // TODO(jiayl): remove the condition when the necessary changes are available
2693 // outside the dev branch.
2694 if (options.include_received_propagation_stats) {
2695 webrtc::ReceiveBandwidthEstimatorStats additional_stats;
2696 // Only call for the default channel because the returned stats are
2697 // collected for all the channels using the same estimator.
2698 if (engine_->vie()->rtp()->GetReceiveBandwidthEstimatorStats(
2699 recv_channels_[0]->channel_id(), &additional_stats) == 0) {
2700 bwe.total_received_propagation_delta_ms =
2701 additional_stats.total_propagation_time_delta_ms;
2702 bwe.recent_received_propagation_delta_ms.swap(
2703 additional_stats.recent_propagation_time_delta_ms);
2704 bwe.recent_received_packet_group_arrival_time_ms.swap(
2705 additional_stats.recent_arrival_time_ms);
2706 }
2707 }
2708
2709 engine_->vie()->rtp()->GetPacerQueuingDelayMs(
2710 recv_channels_[0]->channel_id(), &bwe.bucket_delay);
2711
2712 // Calculations done above per send/receive stream.
2713 bwe.actual_enc_bitrate = video_bitrate_sent;
2714 bwe.transmit_bitrate = total_bitrate_sent;
2715 bwe.retransmit_bitrate = nack_bitrate_sent;
2716 bwe.available_send_bandwidth = estimated_send_bandwidth;
2717 bwe.available_recv_bandwidth = estimated_recv_bandwidth;
2718 bwe.target_enc_bitrate = target_enc_bitrate;
2719
2720 info->bw_estimations.push_back(bwe);
2721
2722 return true;
2723 }
2724
SetCapturer(uint32 ssrc,VideoCapturer * capturer)2725 bool WebRtcVideoMediaChannel::SetCapturer(uint32 ssrc,
2726 VideoCapturer* capturer) {
2727 ASSERT(ssrc != 0);
2728 if (!capturer) {
2729 return RemoveCapturer(ssrc);
2730 }
2731 WebRtcVideoChannelSendInfo* send_channel = GetSendChannel(ssrc);
2732 if (!send_channel) {
2733 return false;
2734 }
2735 VideoCapturer* old_capturer = send_channel->video_capturer();
2736 MaybeDisconnectCapturer(old_capturer);
2737
2738 send_channel->set_video_capturer(capturer, engine()->vie());
2739 MaybeConnectCapturer(capturer);
2740 if (!capturer->IsScreencast() && ratio_w_ != 0 && ratio_h_ != 0) {
2741 capturer->UpdateAspectRatio(ratio_w_, ratio_h_);
2742 }
2743 const int64 timestamp = send_channel->local_stream_info()->time_stamp();
2744 if (send_codec_) {
2745 QueueBlackFrame(ssrc, timestamp, send_codec_->maxFramerate);
2746 }
2747 return true;
2748 }
2749
RequestIntraFrame()2750 bool WebRtcVideoMediaChannel::RequestIntraFrame() {
2751 // There is no API exposed to application to request a key frame
2752 // ViE does this internally when there are errors from decoder
2753 return false;
2754 }
2755
OnPacketReceived(talk_base::Buffer * packet,const talk_base::PacketTime & packet_time)2756 void WebRtcVideoMediaChannel::OnPacketReceived(
2757 talk_base::Buffer* packet, const talk_base::PacketTime& packet_time) {
2758 // Pick which channel to send this packet to. If this packet doesn't match
2759 // any multiplexed streams, just send it to the default channel. Otherwise,
2760 // send it to the specific decoder instance for that stream.
2761 uint32 ssrc = 0;
2762 if (!GetRtpSsrc(packet->data(), packet->length(), &ssrc))
2763 return;
2764 int processing_channel = GetRecvChannelNum(ssrc);
2765 if (processing_channel == -1) {
2766 // Allocate an unsignalled recv channel for processing in conference mode.
2767 if (!InConferenceMode()) {
2768 // If we can't find or allocate one, use the default.
2769 processing_channel = video_channel();
2770 } else if (!CreateUnsignalledRecvChannel(ssrc, &processing_channel)) {
2771 // If we can't create an unsignalled recv channel, drop the packet in
2772 // conference mode.
2773 return;
2774 }
2775 }
2776
2777 engine()->vie()->network()->ReceivedRTPPacket(
2778 processing_channel,
2779 packet->data(),
2780 static_cast<int>(packet->length()),
2781 webrtc::PacketTime(packet_time.timestamp, packet_time.not_before));
2782 }
2783
OnRtcpReceived(talk_base::Buffer * packet,const talk_base::PacketTime & packet_time)2784 void WebRtcVideoMediaChannel::OnRtcpReceived(
2785 talk_base::Buffer* packet, const talk_base::PacketTime& packet_time) {
2786 // Sending channels need all RTCP packets with feedback information.
2787 // Even sender reports can contain attached report blocks.
2788 // Receiving channels need sender reports in order to create
2789 // correct receiver reports.
2790
2791 uint32 ssrc = 0;
2792 if (!GetRtcpSsrc(packet->data(), packet->length(), &ssrc)) {
2793 LOG(LS_WARNING) << "Failed to parse SSRC from received RTCP packet";
2794 return;
2795 }
2796 int type = 0;
2797 if (!GetRtcpType(packet->data(), packet->length(), &type)) {
2798 LOG(LS_WARNING) << "Failed to parse type from received RTCP packet";
2799 return;
2800 }
2801
2802 // If it is a sender report, find the channel that is listening.
2803 if (type == kRtcpTypeSR) {
2804 int which_channel = GetRecvChannelNum(ssrc);
2805 if (which_channel != -1 && !IsDefaultChannel(which_channel)) {
2806 engine_->vie()->network()->ReceivedRTCPPacket(
2807 which_channel,
2808 packet->data(),
2809 static_cast<int>(packet->length()));
2810 }
2811 }
2812 // SR may continue RR and any RR entry may correspond to any one of the send
2813 // channels. So all RTCP packets must be forwarded all send channels. ViE
2814 // will filter out RR internally.
2815 for (SendChannelMap::iterator iter = send_channels_.begin();
2816 iter != send_channels_.end(); ++iter) {
2817 WebRtcVideoChannelSendInfo* send_channel = iter->second;
2818 int channel_id = send_channel->channel_id();
2819 engine_->vie()->network()->ReceivedRTCPPacket(
2820 channel_id,
2821 packet->data(),
2822 static_cast<int>(packet->length()));
2823 }
2824 }
2825
OnReadyToSend(bool ready)2826 void WebRtcVideoMediaChannel::OnReadyToSend(bool ready) {
2827 SetNetworkTransmissionState(ready);
2828 }
2829
MuteStream(uint32 ssrc,bool muted)2830 bool WebRtcVideoMediaChannel::MuteStream(uint32 ssrc, bool muted) {
2831 WebRtcVideoChannelSendInfo* send_channel = GetSendChannel(ssrc);
2832 if (!send_channel) {
2833 LOG(LS_ERROR) << "The specified ssrc " << ssrc << " is not in use.";
2834 return false;
2835 }
2836 send_channel->set_muted(muted);
2837 return true;
2838 }
2839
SetRecvRtpHeaderExtensions(const std::vector<RtpHeaderExtension> & extensions)2840 bool WebRtcVideoMediaChannel::SetRecvRtpHeaderExtensions(
2841 const std::vector<RtpHeaderExtension>& extensions) {
2842 if (receive_extensions_ == extensions) {
2843 return true;
2844 }
2845
2846 const RtpHeaderExtension* offset_extension =
2847 FindHeaderExtension(extensions, kRtpTimestampOffsetHeaderExtension);
2848 const RtpHeaderExtension* send_time_extension =
2849 FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
2850
2851 // Loop through all receive channels and enable/disable the extensions.
2852 for (RecvChannelMap::iterator channel_it = recv_channels_.begin();
2853 channel_it != recv_channels_.end(); ++channel_it) {
2854 int channel_id = channel_it->second->channel_id();
2855 if (!SetHeaderExtension(
2856 &webrtc::ViERTP_RTCP::SetReceiveTimestampOffsetStatus, channel_id,
2857 offset_extension)) {
2858 return false;
2859 }
2860 if (!SetHeaderExtension(
2861 &webrtc::ViERTP_RTCP::SetReceiveAbsoluteSendTimeStatus, channel_id,
2862 send_time_extension)) {
2863 return false;
2864 }
2865 }
2866
2867 receive_extensions_ = extensions;
2868 return true;
2869 }
2870
SetSendRtpHeaderExtensions(const std::vector<RtpHeaderExtension> & extensions)2871 bool WebRtcVideoMediaChannel::SetSendRtpHeaderExtensions(
2872 const std::vector<RtpHeaderExtension>& extensions) {
2873 if (send_extensions_ == extensions) {
2874 return true;
2875 }
2876
2877 const RtpHeaderExtension* offset_extension =
2878 FindHeaderExtension(extensions, kRtpTimestampOffsetHeaderExtension);
2879 const RtpHeaderExtension* send_time_extension =
2880 FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
2881
2882 // Loop through all send channels and enable/disable the extensions.
2883 for (SendChannelMap::iterator channel_it = send_channels_.begin();
2884 channel_it != send_channels_.end(); ++channel_it) {
2885 int channel_id = channel_it->second->channel_id();
2886 if (!SetHeaderExtension(
2887 &webrtc::ViERTP_RTCP::SetSendTimestampOffsetStatus, channel_id,
2888 offset_extension)) {
2889 return false;
2890 }
2891 if (!SetHeaderExtension(
2892 &webrtc::ViERTP_RTCP::SetSendAbsoluteSendTimeStatus, channel_id,
2893 send_time_extension)) {
2894 return false;
2895 }
2896 }
2897
2898 if (send_time_extension) {
2899 // For video RTP packets, we would like to update AbsoluteSendTimeHeader
2900 // Extension closer to the network, @ socket level before sending.
2901 // Pushing the extension id to socket layer.
2902 MediaChannel::SetOption(NetworkInterface::ST_RTP,
2903 talk_base::Socket::OPT_RTP_SENDTIME_EXTN_ID,
2904 send_time_extension->id);
2905 }
2906
2907 send_extensions_ = extensions;
2908 return true;
2909 }
2910
GetRtpSendTimeExtnId() const2911 int WebRtcVideoMediaChannel::GetRtpSendTimeExtnId() const {
2912 const RtpHeaderExtension* send_time_extension = FindHeaderExtension(
2913 send_extensions_, kRtpAbsoluteSenderTimeHeaderExtension);
2914 if (send_time_extension) {
2915 return send_time_extension->id;
2916 }
2917 return -1;
2918 }
2919
SetStartSendBandwidth(int bps)2920 bool WebRtcVideoMediaChannel::SetStartSendBandwidth(int bps) {
2921 LOG(LS_INFO) << "WebRtcVideoMediaChannel::SetStartSendBandwidth";
2922
2923 if (!send_codec_) {
2924 LOG(LS_INFO) << "The send codec has not been set up yet";
2925 return true;
2926 }
2927
2928 // On success, SetSendCodec() will reset |send_start_bitrate_| to |bps/1000|,
2929 // by calling MaybeChangeBitrates. That method will also clamp the
2930 // start bitrate between min and max, consistent with the override behavior
2931 // in SetMaxSendBandwidth.
2932 webrtc::VideoCodec new_codec = *send_codec_;
2933 if (BitrateIsSet(bps)) {
2934 new_codec.startBitrate = bps / 1000;
2935 }
2936 return SetSendCodec(new_codec);
2937 }
2938
SetMaxSendBandwidth(int bps)2939 bool WebRtcVideoMediaChannel::SetMaxSendBandwidth(int bps) {
2940 LOG(LS_INFO) << "WebRtcVideoMediaChannel::SetMaxSendBandwidth";
2941
2942 if (!send_codec_) {
2943 LOG(LS_INFO) << "The send codec has not been set up yet";
2944 return true;
2945 }
2946
2947 webrtc::VideoCodec new_codec = *send_codec_;
2948 if (BitrateIsSet(bps)) {
2949 new_codec.maxBitrate = bps / 1000;
2950 }
2951 if (!SetSendCodec(new_codec)) {
2952 return false;
2953 }
2954 LogSendCodecChange("SetMaxSendBandwidth()");
2955
2956 return true;
2957 }
2958
SetOptions(const VideoOptions & options)2959 bool WebRtcVideoMediaChannel::SetOptions(const VideoOptions &options) {
2960 // Always accept options that are unchanged.
2961 if (options_ == options) {
2962 return true;
2963 }
2964
2965 // Trigger SetSendCodec to set correct noise reduction state if the option has
2966 // changed.
2967 bool denoiser_changed = options.video_noise_reduction.IsSet() &&
2968 (options_.video_noise_reduction != options.video_noise_reduction);
2969
2970 bool leaky_bucket_changed = options.video_leaky_bucket.IsSet() &&
2971 (options_.video_leaky_bucket != options.video_leaky_bucket);
2972
2973 bool buffer_latency_changed = options.buffered_mode_latency.IsSet() &&
2974 (options_.buffered_mode_latency != options.buffered_mode_latency);
2975
2976 bool dscp_option_changed = (options_.dscp != options.dscp);
2977
2978 bool suspend_below_min_bitrate_changed =
2979 options.suspend_below_min_bitrate.IsSet() &&
2980 (options_.suspend_below_min_bitrate != options.suspend_below_min_bitrate);
2981
2982 bool conference_mode_turned_off = false;
2983 if (options_.conference_mode.IsSet() && options.conference_mode.IsSet() &&
2984 options_.conference_mode.GetWithDefaultIfUnset(false) &&
2985 !options.conference_mode.GetWithDefaultIfUnset(false)) {
2986 conference_mode_turned_off = true;
2987 }
2988
2989 bool improved_wifi_bwe_changed =
2990 options.use_improved_wifi_bandwidth_estimator.IsSet() &&
2991 options_.use_improved_wifi_bandwidth_estimator !=
2992 options.use_improved_wifi_bandwidth_estimator;
2993
2994 #ifdef USE_WEBRTC_DEV_BRANCH
2995 bool payload_padding_changed = options.use_payload_padding.IsSet() &&
2996 options_.use_payload_padding != options.use_payload_padding;
2997 #endif
2998
2999
3000 // Save the options, to be interpreted where appropriate.
3001 // Use options_.SetAll() instead of assignment so that unset value in options
3002 // will not overwrite the previous option value.
3003 options_.SetAll(options);
3004
3005 // Set CPU options for all send channels.
3006 for (SendChannelMap::iterator iter = send_channels_.begin();
3007 iter != send_channels_.end(); ++iter) {
3008 WebRtcVideoChannelSendInfo* send_channel = iter->second;
3009 send_channel->ApplyCpuOptions(options_);
3010 }
3011
3012 if (send_codec_) {
3013 bool reset_send_codec_needed = denoiser_changed;
3014 webrtc::VideoCodec new_codec = *send_codec_;
3015
3016 // TODO(pthatcher): Remove this. We don't need 4 ways to set bitrates.
3017 bool lower_min_bitrate;
3018 if (options.lower_min_bitrate.Get(&lower_min_bitrate)) {
3019 new_codec.minBitrate = kLowerMinBitrate;
3020 reset_send_codec_needed = true;
3021 }
3022
3023 if (conference_mode_turned_off) {
3024 // This is a special case for turning conference mode off.
3025 // Max bitrate should go back to the default maximum value instead
3026 // of the current maximum.
3027 new_codec.maxBitrate = kAutoBandwidth;
3028 reset_send_codec_needed = true;
3029 }
3030
3031 // TODO(pthatcher): Remove this. We don't need 4 ways to set bitrates.
3032 int new_start_bitrate;
3033 if (options.video_start_bitrate.Get(&new_start_bitrate)) {
3034 new_codec.startBitrate = new_start_bitrate;
3035 reset_send_codec_needed = true;
3036 }
3037
3038
3039 LOG(LS_INFO) << "Reset send codec needed is enabled? "
3040 << reset_send_codec_needed;
3041 if (reset_send_codec_needed) {
3042 if (!SetSendCodec(new_codec)) {
3043 return false;
3044 }
3045 LogSendCodecChange("SetOptions()");
3046 }
3047 }
3048
3049 if (leaky_bucket_changed) {
3050 bool enable_leaky_bucket =
3051 options_.video_leaky_bucket.GetWithDefaultIfUnset(true);
3052 LOG(LS_INFO) << "Leaky bucket is enabled? " << enable_leaky_bucket;
3053 for (SendChannelMap::iterator it = send_channels_.begin();
3054 it != send_channels_.end(); ++it) {
3055 // TODO(holmer): This API will be removed as we move to the new
3056 // webrtc::Call API. We should clean up this experiment when that is
3057 // happening.
3058 if (engine()->vie()->rtp()->SetTransmissionSmoothingStatus(
3059 it->second->channel_id(), enable_leaky_bucket) != 0) {
3060 LOG_RTCERR2(SetTransmissionSmoothingStatus, it->second->channel_id(),
3061 enable_leaky_bucket);
3062 }
3063 }
3064 }
3065 if (buffer_latency_changed) {
3066 int buffer_latency =
3067 options_.buffered_mode_latency.GetWithDefaultIfUnset(
3068 cricket::kBufferedModeDisabled);
3069 LOG(LS_INFO) << "Buffer latency is " << buffer_latency;
3070 for (SendChannelMap::iterator it = send_channels_.begin();
3071 it != send_channels_.end(); ++it) {
3072 if (engine()->vie()->rtp()->SetSenderBufferingMode(
3073 it->second->channel_id(), buffer_latency) != 0) {
3074 LOG_RTCERR2(SetSenderBufferingMode, it->second->channel_id(),
3075 buffer_latency);
3076 }
3077 }
3078 for (RecvChannelMap::iterator it = recv_channels_.begin();
3079 it != recv_channels_.end(); ++it) {
3080 if (engine()->vie()->rtp()->SetReceiverBufferingMode(
3081 it->second->channel_id(), buffer_latency) != 0) {
3082 LOG_RTCERR2(SetReceiverBufferingMode, it->second->channel_id(),
3083 buffer_latency);
3084 }
3085 }
3086 }
3087 if (dscp_option_changed) {
3088 talk_base::DiffServCodePoint dscp = talk_base::DSCP_DEFAULT;
3089 if (options_.dscp.GetWithDefaultIfUnset(false))
3090 dscp = kVideoDscpValue;
3091 LOG(LS_INFO) << "DSCP is " << dscp;
3092 if (MediaChannel::SetDscp(dscp) != 0) {
3093 LOG(LS_WARNING) << "Failed to set DSCP settings for video channel";
3094 }
3095 }
3096 if (suspend_below_min_bitrate_changed) {
3097 if (options_.suspend_below_min_bitrate.GetWithDefaultIfUnset(false)) {
3098 LOG(LS_INFO) << "Suspend below min bitrate enabled.";
3099 for (SendChannelMap::iterator it = send_channels_.begin();
3100 it != send_channels_.end(); ++it) {
3101 engine()->vie()->codec()->SuspendBelowMinBitrate(
3102 it->second->channel_id());
3103 }
3104 } else {
3105 LOG(LS_WARNING) << "Cannot disable video suspension once it is enabled";
3106 }
3107 }
3108 if (improved_wifi_bwe_changed) {
3109 LOG(LS_INFO) << "Improved WIFI BWE called.";
3110 webrtc::Config config;
3111 config.Set(new webrtc::AimdRemoteRateControl(
3112 options_.use_improved_wifi_bandwidth_estimator
3113 .GetWithDefaultIfUnset(false)));
3114 for (SendChannelMap::iterator it = send_channels_.begin();
3115 it != send_channels_.end(); ++it) {
3116 engine()->vie()->network()->SetBandwidthEstimationConfig(
3117 it->second->channel_id(), config);
3118 }
3119 }
3120 #ifdef USE_WEBRTC_DEV_BRANCH
3121 if (payload_padding_changed) {
3122 LOG(LS_INFO) << "Payload-based padding called.";
3123 for (SendChannelMap::iterator it = send_channels_.begin();
3124 it != send_channels_.end(); ++it) {
3125 engine()->vie()->rtp()->SetPadWithRedundantPayloads(
3126 it->second->channel_id(),
3127 options_.use_payload_padding.GetWithDefaultIfUnset(false));
3128 }
3129 }
3130 #endif
3131 webrtc::CpuOveruseOptions overuse_options;
3132 if (GetCpuOveruseOptions(options_, &overuse_options)) {
3133 for (SendChannelMap::iterator it = send_channels_.begin();
3134 it != send_channels_.end(); ++it) {
3135 if (engine()->vie()->base()->SetCpuOveruseOptions(
3136 it->second->channel_id(), overuse_options) != 0) {
3137 LOG_RTCERR1(SetCpuOveruseOptions, it->second->channel_id());
3138 }
3139 }
3140 }
3141 return true;
3142 }
3143
SetInterface(NetworkInterface * iface)3144 void WebRtcVideoMediaChannel::SetInterface(NetworkInterface* iface) {
3145 MediaChannel::SetInterface(iface);
3146 // Set the RTP recv/send buffer to a bigger size
3147 MediaChannel::SetOption(NetworkInterface::ST_RTP,
3148 talk_base::Socket::OPT_RCVBUF,
3149 kVideoRtpBufferSize);
3150
3151 // TODO(sriniv): Remove or re-enable this.
3152 // As part of b/8030474, send-buffer is size now controlled through
3153 // portallocator flags.
3154 // network_interface_->SetOption(NetworkInterface::ST_RTP,
3155 // talk_base::Socket::OPT_SNDBUF,
3156 // kVideoRtpBufferSize);
3157 }
3158
UpdateAspectRatio(int ratio_w,int ratio_h)3159 void WebRtcVideoMediaChannel::UpdateAspectRatio(int ratio_w, int ratio_h) {
3160 ASSERT(ratio_w != 0);
3161 ASSERT(ratio_h != 0);
3162 ratio_w_ = ratio_w;
3163 ratio_h_ = ratio_h;
3164 // For now assume that all streams want the same aspect ratio.
3165 // TODO(hellner): remove the need for this assumption.
3166 for (SendChannelMap::iterator iter = send_channels_.begin();
3167 iter != send_channels_.end(); ++iter) {
3168 WebRtcVideoChannelSendInfo* send_channel = iter->second;
3169 VideoCapturer* capturer = send_channel->video_capturer();
3170 if (capturer) {
3171 capturer->UpdateAspectRatio(ratio_w, ratio_h);
3172 }
3173 }
3174 }
3175
GetRenderer(uint32 ssrc,VideoRenderer ** renderer)3176 bool WebRtcVideoMediaChannel::GetRenderer(uint32 ssrc,
3177 VideoRenderer** renderer) {
3178 RecvChannelMap::const_iterator it = recv_channels_.find(ssrc);
3179 if (it == recv_channels_.end()) {
3180 if (first_receive_ssrc_ == ssrc &&
3181 recv_channels_.find(0) != recv_channels_.end()) {
3182 LOG(LS_INFO) << " GetRenderer " << ssrc
3183 << " reuse default renderer #"
3184 << vie_channel_;
3185 *renderer = recv_channels_[0]->render_adapter()->renderer();
3186 return true;
3187 }
3188 return false;
3189 }
3190
3191 *renderer = it->second->render_adapter()->renderer();
3192 return true;
3193 }
3194
GetVideoAdapter(uint32 ssrc,CoordinatedVideoAdapter ** video_adapter)3195 bool WebRtcVideoMediaChannel::GetVideoAdapter(
3196 uint32 ssrc, CoordinatedVideoAdapter** video_adapter) {
3197 SendChannelMap::iterator it = send_channels_.find(ssrc);
3198 if (it == send_channels_.end()) {
3199 return false;
3200 }
3201 *video_adapter = it->second->video_adapter();
3202 return true;
3203 }
3204
SendFrame(VideoCapturer * capturer,const VideoFrame * frame)3205 void WebRtcVideoMediaChannel::SendFrame(VideoCapturer* capturer,
3206 const VideoFrame* frame) {
3207 // If the |capturer| is registered to any send channel, then send the frame
3208 // to those send channels.
3209 bool capturer_is_channel_owned = false;
3210 for (SendChannelMap::iterator iter = send_channels_.begin();
3211 iter != send_channels_.end(); ++iter) {
3212 WebRtcVideoChannelSendInfo* send_channel = iter->second;
3213 if (send_channel->video_capturer() == capturer) {
3214 SendFrame(send_channel, frame, capturer->IsScreencast());
3215 capturer_is_channel_owned = true;
3216 }
3217 }
3218 if (capturer_is_channel_owned) {
3219 return;
3220 }
3221
3222 // TODO(hellner): Remove below for loop once the captured frame no longer
3223 // come from the engine, i.e. the engine no longer owns a capturer.
3224 for (SendChannelMap::iterator iter = send_channels_.begin();
3225 iter != send_channels_.end(); ++iter) {
3226 WebRtcVideoChannelSendInfo* send_channel = iter->second;
3227 if (send_channel->video_capturer() == NULL) {
3228 SendFrame(send_channel, frame, capturer->IsScreencast());
3229 }
3230 }
3231 }
3232
SendFrame(WebRtcVideoChannelSendInfo * send_channel,const VideoFrame * frame,bool is_screencast)3233 bool WebRtcVideoMediaChannel::SendFrame(
3234 WebRtcVideoChannelSendInfo* send_channel,
3235 const VideoFrame* frame,
3236 bool is_screencast) {
3237 if (!send_channel) {
3238 return false;
3239 }
3240 if (!send_codec_) {
3241 // Send codec has not been set. No reason to process the frame any further.
3242 return false;
3243 }
3244 const VideoFormat& video_format = send_channel->video_format();
3245 // If the frame should be dropped.
3246 const bool video_format_set = video_format != cricket::VideoFormat();
3247 if (video_format_set &&
3248 (video_format.width == 0 && video_format.height == 0)) {
3249 return true;
3250 }
3251
3252 // Checks if we need to reset vie send codec.
3253 if (!MaybeResetVieSendCodec(send_channel,
3254 static_cast<int>(frame->GetWidth()),
3255 static_cast<int>(frame->GetHeight()),
3256 is_screencast, NULL)) {
3257 LOG(LS_ERROR) << "MaybeResetVieSendCodec failed with "
3258 << frame->GetWidth() << "x" << frame->GetHeight();
3259 return false;
3260 }
3261 const VideoFrame* frame_out = frame;
3262 talk_base::scoped_ptr<VideoFrame> processed_frame;
3263 // Disable muting for screencast.
3264 const bool mute = (send_channel->muted() && !is_screencast);
3265 send_channel->ProcessFrame(*frame_out, mute, processed_frame.use());
3266 if (processed_frame) {
3267 frame_out = processed_frame.get();
3268 }
3269
3270 webrtc::ViEVideoFrameI420 frame_i420;
3271 // TODO(ronghuawu): Update the webrtc::ViEVideoFrameI420
3272 // to use const unsigned char*
3273 frame_i420.y_plane = const_cast<unsigned char*>(frame_out->GetYPlane());
3274 frame_i420.u_plane = const_cast<unsigned char*>(frame_out->GetUPlane());
3275 frame_i420.v_plane = const_cast<unsigned char*>(frame_out->GetVPlane());
3276 frame_i420.y_pitch = frame_out->GetYPitch();
3277 frame_i420.u_pitch = frame_out->GetUPitch();
3278 frame_i420.v_pitch = frame_out->GetVPitch();
3279 frame_i420.width = static_cast<uint16>(frame_out->GetWidth());
3280 frame_i420.height = static_cast<uint16>(frame_out->GetHeight());
3281
3282 int64 timestamp_ntp_ms = 0;
3283 // TODO(justinlin): Reenable after Windows issues with clock drift are fixed.
3284 // Currently reverted to old behavior of discarding capture timestamp.
3285 #if 0
3286 static const int kTimestampDeltaInSecondsForWarning = 2;
3287
3288 // If the frame timestamp is 0, we will use the deliver time.
3289 const int64 frame_timestamp = frame->GetTimeStamp();
3290 if (frame_timestamp != 0) {
3291 if (abs(time(NULL) - frame_timestamp / talk_base::kNumNanosecsPerSec) >
3292 kTimestampDeltaInSecondsForWarning) {
3293 LOG(LS_WARNING) << "Frame timestamp differs by more than "
3294 << kTimestampDeltaInSecondsForWarning << " seconds from "
3295 << "current Unix timestamp.";
3296 }
3297
3298 timestamp_ntp_ms =
3299 talk_base::UnixTimestampNanosecsToNtpMillisecs(frame_timestamp);
3300 }
3301 #endif
3302
3303 return send_channel->external_capture()->IncomingFrameI420(
3304 frame_i420, timestamp_ntp_ms) == 0;
3305 }
3306
CreateChannel(uint32 ssrc_key,MediaDirection direction,int * channel_id)3307 bool WebRtcVideoMediaChannel::CreateChannel(uint32 ssrc_key,
3308 MediaDirection direction,
3309 int* channel_id) {
3310 // There are 3 types of channels. Sending only, receiving only and
3311 // sending and receiving. The sending and receiving channel is the
3312 // default channel and there is only one. All other channels that are created
3313 // are associated with the default channel which must exist. The default
3314 // channel id is stored in |vie_channel_|. All channels need to know about
3315 // the default channel to properly handle remb which is why there are
3316 // different ViE create channel calls.
3317 // For this channel the local and remote ssrc key is 0. However, it may
3318 // have a non-zero local and/or remote ssrc depending on if it is currently
3319 // sending and/or receiving.
3320 if ((vie_channel_ == -1 || direction == MD_SENDRECV) &&
3321 (!send_channels_.empty() || !recv_channels_.empty())) {
3322 ASSERT(false);
3323 return false;
3324 }
3325
3326 *channel_id = -1;
3327 if (direction == MD_RECV) {
3328 // All rec channels are associated with the default channel |vie_channel_|
3329 if (engine_->vie()->base()->CreateReceiveChannel(*channel_id,
3330 vie_channel_) != 0) {
3331 LOG_RTCERR2(CreateReceiveChannel, *channel_id, vie_channel_);
3332 return false;
3333 }
3334 } else if (direction == MD_SEND) {
3335 if (engine_->vie()->base()->CreateChannel(*channel_id,
3336 vie_channel_) != 0) {
3337 LOG_RTCERR2(CreateChannel, *channel_id, vie_channel_);
3338 return false;
3339 }
3340 } else {
3341 ASSERT(direction == MD_SENDRECV);
3342 if (engine_->vie()->base()->CreateChannel(*channel_id) != 0) {
3343 LOG_RTCERR1(CreateChannel, *channel_id);
3344 return false;
3345 }
3346 }
3347 if (!ConfigureChannel(*channel_id, direction, ssrc_key)) {
3348 engine_->vie()->base()->DeleteChannel(*channel_id);
3349 *channel_id = -1;
3350 return false;
3351 }
3352
3353 return true;
3354 }
3355
CreateUnsignalledRecvChannel(uint32 ssrc_key,int * out_channel_id)3356 bool WebRtcVideoMediaChannel::CreateUnsignalledRecvChannel(
3357 uint32 ssrc_key, int* out_channel_id) {
3358 int unsignalled_recv_channel_limit =
3359 options_.unsignalled_recv_stream_limit.GetWithDefaultIfUnset(
3360 kNumDefaultUnsignalledVideoRecvStreams);
3361 if (num_unsignalled_recv_channels_ >= unsignalled_recv_channel_limit) {
3362 return false;
3363 }
3364 if (!CreateChannel(ssrc_key, MD_RECV, out_channel_id)) {
3365 return false;
3366 }
3367 // TODO(tvsriram): Support dynamic sizing of unsignalled recv channels.
3368 num_unsignalled_recv_channels_++;
3369 return true;
3370 }
3371
ConfigureChannel(int channel_id,MediaDirection direction,uint32 ssrc_key)3372 bool WebRtcVideoMediaChannel::ConfigureChannel(int channel_id,
3373 MediaDirection direction,
3374 uint32 ssrc_key) {
3375 const bool receiving = (direction == MD_RECV) || (direction == MD_SENDRECV);
3376 const bool sending = (direction == MD_SEND) || (direction == MD_SENDRECV);
3377 // Register external transport.
3378 if (engine_->vie()->network()->RegisterSendTransport(
3379 channel_id, *this) != 0) {
3380 LOG_RTCERR1(RegisterSendTransport, channel_id);
3381 return false;
3382 }
3383
3384 // Set MTU.
3385 if (engine_->vie()->network()->SetMTU(channel_id, kVideoMtu) != 0) {
3386 LOG_RTCERR2(SetMTU, channel_id, kVideoMtu);
3387 return false;
3388 }
3389 // Turn on RTCP and loss feedback reporting.
3390 if (engine()->vie()->rtp()->SetRTCPStatus(
3391 channel_id, webrtc::kRtcpCompound_RFC4585) != 0) {
3392 LOG_RTCERR2(SetRTCPStatus, channel_id, webrtc::kRtcpCompound_RFC4585);
3393 return false;
3394 }
3395 // Enable pli as key frame request method.
3396 if (engine_->vie()->rtp()->SetKeyFrameRequestMethod(
3397 channel_id, webrtc::kViEKeyFrameRequestPliRtcp) != 0) {
3398 LOG_RTCERR2(SetKeyFrameRequestMethod,
3399 channel_id, webrtc::kViEKeyFrameRequestPliRtcp);
3400 return false;
3401 }
3402 if (!SetNackFec(channel_id, send_red_type_, send_fec_type_, nack_enabled_)) {
3403 // Logged in SetNackFec. Don't spam the logs.
3404 return false;
3405 }
3406 // Note that receiving must always be configured before sending to ensure
3407 // that send and receive channel is configured correctly (ConfigureReceiving
3408 // assumes no sending).
3409 if (receiving) {
3410 if (!ConfigureReceiving(channel_id, ssrc_key)) {
3411 return false;
3412 }
3413 }
3414 if (sending) {
3415 if (!ConfigureSending(channel_id, ssrc_key)) {
3416 return false;
3417 }
3418 }
3419
3420 // Start receiving for both receive and send channels so that we get incoming
3421 // RTP (if receiving) as well as RTCP feedback (if sending).
3422 if (engine()->vie()->base()->StartReceive(channel_id) != 0) {
3423 LOG_RTCERR1(StartReceive, channel_id);
3424 return false;
3425 }
3426
3427 return true;
3428 }
3429
ConfigureReceiving(int channel_id,uint32 remote_ssrc_key)3430 bool WebRtcVideoMediaChannel::ConfigureReceiving(int channel_id,
3431 uint32 remote_ssrc_key) {
3432 // Make sure that an SSRC/key isn't registered more than once.
3433 if (recv_channels_.find(remote_ssrc_key) != recv_channels_.end()) {
3434 return false;
3435 }
3436 // Connect the voice channel, if there is one.
3437 // TODO(perkj): The A/V is synched by the receiving channel. So we need to
3438 // know the SSRC of the remote audio channel in order to fetch the correct
3439 // webrtc VoiceEngine channel. For now- only sync the default channel used
3440 // in 1-1 calls.
3441 if (remote_ssrc_key == 0 && voice_channel_) {
3442 WebRtcVoiceMediaChannel* voice_channel =
3443 static_cast<WebRtcVoiceMediaChannel*>(voice_channel_);
3444 if (engine_->vie()->base()->ConnectAudioChannel(
3445 vie_channel_, voice_channel->voe_channel()) != 0) {
3446 LOG_RTCERR2(ConnectAudioChannel, channel_id,
3447 voice_channel->voe_channel());
3448 LOG(LS_WARNING) << "A/V not synchronized";
3449 // Not a fatal error.
3450 }
3451 }
3452
3453 talk_base::scoped_ptr<WebRtcVideoChannelRecvInfo> channel_info(
3454 new WebRtcVideoChannelRecvInfo(channel_id));
3455
3456 // Install a render adapter.
3457 if (engine_->vie()->render()->AddRenderer(channel_id,
3458 webrtc::kVideoI420, channel_info->render_adapter()) != 0) {
3459 LOG_RTCERR3(AddRenderer, channel_id, webrtc::kVideoI420,
3460 channel_info->render_adapter());
3461 return false;
3462 }
3463
3464
3465 if (engine_->vie()->rtp()->SetRembStatus(channel_id,
3466 kNotSending,
3467 remb_enabled_) != 0) {
3468 LOG_RTCERR3(SetRembStatus, channel_id, kNotSending, remb_enabled_);
3469 return false;
3470 }
3471
3472 if (!SetHeaderExtension(&webrtc::ViERTP_RTCP::SetReceiveTimestampOffsetStatus,
3473 channel_id, receive_extensions_, kRtpTimestampOffsetHeaderExtension)) {
3474 return false;
3475 }
3476 if (!SetHeaderExtension(
3477 &webrtc::ViERTP_RTCP::SetReceiveAbsoluteSendTimeStatus, channel_id,
3478 receive_extensions_, kRtpAbsoluteSenderTimeHeaderExtension)) {
3479 return false;
3480 }
3481
3482 if (remote_ssrc_key != 0) {
3483 // Use the same SSRC as our default channel
3484 // (so the RTCP reports are correct).
3485 unsigned int send_ssrc = 0;
3486 webrtc::ViERTP_RTCP* rtp = engine()->vie()->rtp();
3487 if (rtp->GetLocalSSRC(vie_channel_, send_ssrc) == -1) {
3488 LOG_RTCERR2(GetLocalSSRC, vie_channel_, send_ssrc);
3489 return false;
3490 }
3491 if (rtp->SetLocalSSRC(channel_id, send_ssrc) == -1) {
3492 LOG_RTCERR2(SetLocalSSRC, channel_id, send_ssrc);
3493 return false;
3494 }
3495 } // Else this is the the default channel and we don't change the SSRC.
3496
3497 // Disable color enhancement since it is a bit too aggressive.
3498 if (engine()->vie()->image()->EnableColorEnhancement(channel_id,
3499 false) != 0) {
3500 LOG_RTCERR1(EnableColorEnhancement, channel_id);
3501 return false;
3502 }
3503
3504 if (!SetReceiveCodecs(channel_info.get())) {
3505 return false;
3506 }
3507
3508 int buffer_latency =
3509 options_.buffered_mode_latency.GetWithDefaultIfUnset(
3510 cricket::kBufferedModeDisabled);
3511 if (buffer_latency != cricket::kBufferedModeDisabled) {
3512 if (engine()->vie()->rtp()->SetReceiverBufferingMode(
3513 channel_id, buffer_latency) != 0) {
3514 LOG_RTCERR2(SetReceiverBufferingMode, channel_id, buffer_latency);
3515 }
3516 }
3517
3518 if (render_started_) {
3519 if (engine_->vie()->render()->StartRender(channel_id) != 0) {
3520 LOG_RTCERR1(StartRender, channel_id);
3521 return false;
3522 }
3523 }
3524
3525 // Register decoder observer for incoming framerate and bitrate.
3526 if (engine()->vie()->codec()->RegisterDecoderObserver(
3527 channel_id, *channel_info->decoder_observer()) != 0) {
3528 LOG_RTCERR1(RegisterDecoderObserver, channel_info->decoder_observer());
3529 return false;
3530 }
3531
3532 recv_channels_[remote_ssrc_key] = channel_info.release();
3533 return true;
3534 }
3535
ConfigureSending(int channel_id,uint32 local_ssrc_key)3536 bool WebRtcVideoMediaChannel::ConfigureSending(int channel_id,
3537 uint32 local_ssrc_key) {
3538 // The ssrc key can be zero or correspond to an SSRC.
3539 // Make sure the default channel isn't configured more than once.
3540 if (local_ssrc_key == 0 && send_channels_.find(0) != send_channels_.end()) {
3541 return false;
3542 }
3543 // Make sure that the SSRC is not already in use.
3544 uint32 dummy_key;
3545 if (GetSendChannelKey(local_ssrc_key, &dummy_key)) {
3546 return false;
3547 }
3548 int vie_capture = 0;
3549 webrtc::ViEExternalCapture* external_capture = NULL;
3550 // Register external capture.
3551 if (engine()->vie()->capture()->AllocateExternalCaptureDevice(
3552 vie_capture, external_capture) != 0) {
3553 LOG_RTCERR0(AllocateExternalCaptureDevice);
3554 return false;
3555 }
3556
3557 // Connect external capture.
3558 if (engine()->vie()->capture()->ConnectCaptureDevice(
3559 vie_capture, channel_id) != 0) {
3560 LOG_RTCERR2(ConnectCaptureDevice, vie_capture, channel_id);
3561 return false;
3562 }
3563 talk_base::scoped_ptr<WebRtcVideoChannelSendInfo> send_channel(
3564 new WebRtcVideoChannelSendInfo(channel_id, vie_capture,
3565 external_capture,
3566 engine()->cpu_monitor()));
3567 send_channel->ApplyCpuOptions(options_);
3568 send_channel->SignalCpuAdaptationUnable.connect(this,
3569 &WebRtcVideoMediaChannel::OnCpuAdaptationUnable);
3570
3571 webrtc::CpuOveruseOptions overuse_options;
3572 if (GetCpuOveruseOptions(options_, &overuse_options)) {
3573 if (engine()->vie()->base()->SetCpuOveruseOptions(channel_id,
3574 overuse_options) != 0) {
3575 LOG_RTCERR1(SetCpuOveruseOptions, channel_id);
3576 }
3577 }
3578
3579 // Register encoder observer for outgoing framerate and bitrate.
3580 if (engine()->vie()->codec()->RegisterEncoderObserver(
3581 channel_id, *send_channel->encoder_observer()) != 0) {
3582 LOG_RTCERR1(RegisterEncoderObserver, send_channel->encoder_observer());
3583 return false;
3584 }
3585
3586 if (!SetHeaderExtension(&webrtc::ViERTP_RTCP::SetSendTimestampOffsetStatus,
3587 channel_id, send_extensions_, kRtpTimestampOffsetHeaderExtension)) {
3588 return false;
3589 }
3590
3591 if (!SetHeaderExtension(&webrtc::ViERTP_RTCP::SetSendAbsoluteSendTimeStatus,
3592 channel_id, send_extensions_, kRtpAbsoluteSenderTimeHeaderExtension)) {
3593 return false;
3594 }
3595
3596 if (options_.video_leaky_bucket.GetWithDefaultIfUnset(true)) {
3597 if (engine()->vie()->rtp()->SetTransmissionSmoothingStatus(channel_id,
3598 true) != 0) {
3599 LOG_RTCERR2(SetTransmissionSmoothingStatus, channel_id, true);
3600 return false;
3601 }
3602 }
3603
3604 int buffer_latency =
3605 options_.buffered_mode_latency.GetWithDefaultIfUnset(
3606 cricket::kBufferedModeDisabled);
3607 if (buffer_latency != cricket::kBufferedModeDisabled) {
3608 if (engine()->vie()->rtp()->SetSenderBufferingMode(
3609 channel_id, buffer_latency) != 0) {
3610 LOG_RTCERR2(SetSenderBufferingMode, channel_id, buffer_latency);
3611 }
3612 }
3613
3614 if (options_.suspend_below_min_bitrate.GetWithDefaultIfUnset(false)) {
3615 engine()->vie()->codec()->SuspendBelowMinBitrate(channel_id);
3616 }
3617
3618 // The remb status direction correspond to the RTP stream (and not the RTCP
3619 // stream). I.e. if send remb is enabled it means it is receiving remote
3620 // rembs and should use them to estimate bandwidth. Receive remb mean that
3621 // remb packets will be generated and that the channel should be included in
3622 // it. If remb is enabled all channels are allowed to contribute to the remb
3623 // but only receive channels will ever end up actually contributing. This
3624 // keeps the logic simple.
3625 if (engine_->vie()->rtp()->SetRembStatus(channel_id,
3626 remb_enabled_,
3627 remb_enabled_) != 0) {
3628 LOG_RTCERR3(SetRembStatus, channel_id, remb_enabled_, remb_enabled_);
3629 return false;
3630 }
3631 if (!SetNackFec(channel_id, send_red_type_, send_fec_type_, nack_enabled_)) {
3632 // Logged in SetNackFec. Don't spam the logs.
3633 return false;
3634 }
3635
3636 send_channels_[local_ssrc_key] = send_channel.release();
3637
3638 return true;
3639 }
3640
SetNackFec(int channel_id,int red_payload_type,int fec_payload_type,bool nack_enabled)3641 bool WebRtcVideoMediaChannel::SetNackFec(int channel_id,
3642 int red_payload_type,
3643 int fec_payload_type,
3644 bool nack_enabled) {
3645 bool enable = (red_payload_type != -1 && fec_payload_type != -1 &&
3646 !InConferenceMode());
3647 if (enable) {
3648 if (engine_->vie()->rtp()->SetHybridNACKFECStatus(
3649 channel_id, nack_enabled, red_payload_type, fec_payload_type) != 0) {
3650 LOG_RTCERR4(SetHybridNACKFECStatus,
3651 channel_id, nack_enabled, red_payload_type, fec_payload_type);
3652 return false;
3653 }
3654 LOG(LS_INFO) << "Hybrid NACK/FEC enabled for channel " << channel_id;
3655 } else {
3656 if (engine_->vie()->rtp()->SetNACKStatus(channel_id, nack_enabled) != 0) {
3657 LOG_RTCERR1(SetNACKStatus, channel_id);
3658 return false;
3659 }
3660 std::string enabled = nack_enabled ? "enabled" : "disabled";
3661 LOG(LS_INFO) << "NACK " << enabled << " for channel " << channel_id;
3662 }
3663 return true;
3664 }
3665
SetSendCodec(const webrtc::VideoCodec & codec)3666 bool WebRtcVideoMediaChannel::SetSendCodec(const webrtc::VideoCodec& codec) {
3667 bool ret_val = true;
3668 for (SendChannelMap::iterator iter = send_channels_.begin();
3669 iter != send_channels_.end(); ++iter) {
3670 WebRtcVideoChannelSendInfo* send_channel = iter->second;
3671 ret_val = SetSendCodec(send_channel, codec) && ret_val;
3672 }
3673 if (ret_val) {
3674 // All SetSendCodec calls were successful. Update the global state
3675 // accordingly.
3676 send_codec_.reset(new webrtc::VideoCodec(codec));
3677 } else {
3678 // At least one SetSendCodec call failed, rollback.
3679 for (SendChannelMap::iterator iter = send_channels_.begin();
3680 iter != send_channels_.end(); ++iter) {
3681 WebRtcVideoChannelSendInfo* send_channel = iter->second;
3682 if (send_codec_) {
3683 SetSendCodec(send_channel, *send_codec_);
3684 }
3685 }
3686 }
3687 return ret_val;
3688 }
3689
SetSendCodec(WebRtcVideoChannelSendInfo * send_channel,const webrtc::VideoCodec & codec)3690 bool WebRtcVideoMediaChannel::SetSendCodec(
3691 WebRtcVideoChannelSendInfo* send_channel,
3692 const webrtc::VideoCodec& codec) {
3693 if (!send_channel) {
3694 return false;
3695 }
3696
3697 const int channel_id = send_channel->channel_id();
3698 // Make a copy of the codec
3699 webrtc::VideoCodec target_codec = codec;
3700
3701 // Set the default number of temporal layers for VP8.
3702 if (webrtc::kVideoCodecVP8 == codec.codecType) {
3703 target_codec.codecSpecific.VP8.numberOfTemporalLayers =
3704 kDefaultNumberOfTemporalLayers;
3705
3706 // Turn off the VP8 error resilience
3707 target_codec.codecSpecific.VP8.resilience = webrtc::kResilienceOff;
3708
3709 bool enable_denoising =
3710 options_.video_noise_reduction.GetWithDefaultIfUnset(true);
3711 target_codec.codecSpecific.VP8.denoisingOn = enable_denoising;
3712 }
3713
3714 // Register external encoder if codec type is supported by encoder factory.
3715 if (engine()->IsExternalEncoderCodecType(codec.codecType) &&
3716 !send_channel->IsEncoderRegistered(target_codec.plType)) {
3717 webrtc::VideoEncoder* encoder =
3718 engine()->CreateExternalEncoder(codec.codecType);
3719 if (encoder) {
3720 if (engine()->vie()->ext_codec()->RegisterExternalSendCodec(
3721 channel_id, target_codec.plType, encoder, false) == 0) {
3722 send_channel->RegisterEncoder(target_codec.plType, encoder);
3723 } else {
3724 LOG_RTCERR2(RegisterExternalSendCodec, channel_id, target_codec.plName);
3725 engine()->DestroyExternalEncoder(encoder);
3726 }
3727 }
3728 }
3729
3730 // Resolution and framerate may vary for different send channels.
3731 const VideoFormat& video_format = send_channel->video_format();
3732 UpdateVideoCodec(video_format, &target_codec);
3733
3734 if (target_codec.width == 0 && target_codec.height == 0) {
3735 const uint32 ssrc = send_channel->stream_params()->first_ssrc();
3736 LOG(LS_INFO) << "0x0 resolution selected. Captured frames will be dropped "
3737 << "for ssrc: " << ssrc << ".";
3738 } else {
3739 MaybeChangeBitrates(channel_id, &target_codec);
3740 webrtc::VideoCodec current_codec;
3741 if (!engine()->vie()->codec()->GetSendCodec(channel_id, current_codec)) {
3742 // Compare against existing configured send codec.
3743 if (current_codec == target_codec) {
3744 // Codec is already configured on channel. no need to apply.
3745 return true;
3746 }
3747 }
3748
3749 if (0 != engine()->vie()->codec()->SetSendCodec(channel_id, target_codec)) {
3750 LOG_RTCERR2(SetSendCodec, channel_id, target_codec.plName);
3751 return false;
3752 }
3753
3754 // NOTE: SetRtxSendPayloadType must be called after all simulcast SSRCs
3755 // are configured. Otherwise ssrc's configured after this point will use
3756 // the primary PT for RTX.
3757 if (send_rtx_type_ != -1 &&
3758 engine()->vie()->rtp()->SetRtxSendPayloadType(channel_id,
3759 send_rtx_type_) != 0) {
3760 LOG_RTCERR2(SetRtxSendPayloadType, channel_id, send_rtx_type_);
3761 return false;
3762 }
3763 }
3764 send_channel->set_interval(
3765 cricket::VideoFormat::FpsToInterval(target_codec.maxFramerate));
3766 return true;
3767 }
3768
3769
ToString(webrtc::VideoCodecComplexity complexity)3770 static std::string ToString(webrtc::VideoCodecComplexity complexity) {
3771 switch (complexity) {
3772 case webrtc::kComplexityNormal:
3773 return "normal";
3774 case webrtc::kComplexityHigh:
3775 return "high";
3776 case webrtc::kComplexityHigher:
3777 return "higher";
3778 case webrtc::kComplexityMax:
3779 return "max";
3780 default:
3781 return "unknown";
3782 }
3783 }
3784
ToString(webrtc::VP8ResilienceMode resilience)3785 static std::string ToString(webrtc::VP8ResilienceMode resilience) {
3786 switch (resilience) {
3787 case webrtc::kResilienceOff:
3788 return "off";
3789 case webrtc::kResilientStream:
3790 return "stream";
3791 case webrtc::kResilientFrames:
3792 return "frames";
3793 default:
3794 return "unknown";
3795 }
3796 }
3797
LogSendCodecChange(const std::string & reason)3798 void WebRtcVideoMediaChannel::LogSendCodecChange(const std::string& reason) {
3799 webrtc::VideoCodec vie_codec;
3800 if (engine()->vie()->codec()->GetSendCodec(vie_channel_, vie_codec) != 0) {
3801 LOG_RTCERR1(GetSendCodec, vie_channel_);
3802 return;
3803 }
3804
3805 LOG(LS_INFO) << reason << " : selected video codec "
3806 << vie_codec.plName << "/"
3807 << vie_codec.width << "x" << vie_codec.height << "x"
3808 << static_cast<int>(vie_codec.maxFramerate) << "fps"
3809 << "@" << vie_codec.maxBitrate << "kbps"
3810 << " (min=" << vie_codec.minBitrate << "kbps,"
3811 << " start=" << vie_codec.startBitrate << "kbps)";
3812 LOG(LS_INFO) << "Video max quantization: " << vie_codec.qpMax;
3813 if (webrtc::kVideoCodecVP8 == vie_codec.codecType) {
3814 LOG(LS_INFO) << "VP8 number of temporal layers: "
3815 << static_cast<int>(
3816 vie_codec.codecSpecific.VP8.numberOfTemporalLayers);
3817 LOG(LS_INFO) << "VP8 options : "
3818 << "picture loss indication = "
3819 << vie_codec.codecSpecific.VP8.pictureLossIndicationOn
3820 << ", feedback mode = "
3821 << vie_codec.codecSpecific.VP8.feedbackModeOn
3822 << ", complexity = "
3823 << ToString(vie_codec.codecSpecific.VP8.complexity)
3824 << ", resilience = "
3825 << ToString(vie_codec.codecSpecific.VP8.resilience)
3826 << ", denoising = "
3827 << vie_codec.codecSpecific.VP8.denoisingOn
3828 << ", error concealment = "
3829 << vie_codec.codecSpecific.VP8.errorConcealmentOn
3830 << ", automatic resize = "
3831 << vie_codec.codecSpecific.VP8.automaticResizeOn
3832 << ", frame dropping = "
3833 << vie_codec.codecSpecific.VP8.frameDroppingOn
3834 << ", key frame interval = "
3835 << vie_codec.codecSpecific.VP8.keyFrameInterval;
3836 }
3837
3838 if (send_rtx_type_ != -1) {
3839 LOG(LS_INFO) << "RTX payload type: " << send_rtx_type_;
3840 }
3841 }
3842
SetReceiveCodecs(WebRtcVideoChannelRecvInfo * info)3843 bool WebRtcVideoMediaChannel::SetReceiveCodecs(
3844 WebRtcVideoChannelRecvInfo* info) {
3845 int red_type = -1;
3846 int fec_type = -1;
3847 int channel_id = info->channel_id();
3848 // Build a map from payload types to video codecs so that we easily can find
3849 // out if associated payload types are referring to valid codecs.
3850 std::map<int, webrtc::VideoCodec*> pt_to_codec;
3851 for (std::vector<webrtc::VideoCodec>::iterator it = receive_codecs_.begin();
3852 it != receive_codecs_.end(); ++it) {
3853 pt_to_codec[it->plType] = &(*it);
3854 }
3855 for (std::vector<webrtc::VideoCodec>::iterator it = receive_codecs_.begin();
3856 it != receive_codecs_.end(); ++it) {
3857 if (it->codecType == webrtc::kVideoCodecRED) {
3858 red_type = it->plType;
3859 } else if (it->codecType == webrtc::kVideoCodecULPFEC) {
3860 fec_type = it->plType;
3861 }
3862 // If this is an RTX codec we have to verify that it is associated with
3863 // a valid video codec which we have RTX support for.
3864 if (_stricmp(it->plName, kRtxCodecName) == 0) {
3865 std::map<int, int>::iterator apt_it = associated_payload_types_.find(
3866 it->plType);
3867 bool valid_apt = false;
3868 if (apt_it != associated_payload_types_.end()) {
3869 std::map<int, webrtc::VideoCodec*>::iterator codec_it =
3870 pt_to_codec.find(apt_it->second);
3871 // We currently only support RTX associated with VP8 due to limitations
3872 // in webrtc where only one RTX payload type can be registered.
3873 valid_apt = codec_it != pt_to_codec.end() &&
3874 _stricmp(codec_it->second->plName, kVp8PayloadName) == 0;
3875 }
3876 if (!valid_apt) {
3877 LOG(LS_ERROR) << "The RTX codec isn't associated with a known and "
3878 "supported payload type";
3879 return false;
3880 }
3881 if (engine()->vie()->rtp()->SetRtxReceivePayloadType(
3882 channel_id, it->plType) != 0) {
3883 LOG_RTCERR2(SetRtxReceivePayloadType, channel_id, it->plType);
3884 return false;
3885 }
3886 continue;
3887 }
3888 if (engine()->vie()->codec()->SetReceiveCodec(channel_id, *it) != 0) {
3889 LOG_RTCERR2(SetReceiveCodec, channel_id, it->plName);
3890 return false;
3891 }
3892 if (!info->IsDecoderRegistered(it->plType) &&
3893 it->codecType != webrtc::kVideoCodecRED &&
3894 it->codecType != webrtc::kVideoCodecULPFEC) {
3895 webrtc::VideoDecoder* decoder =
3896 engine()->CreateExternalDecoder(it->codecType);
3897 if (decoder) {
3898 if (engine()->vie()->ext_codec()->RegisterExternalReceiveCodec(
3899 channel_id, it->plType, decoder) == 0) {
3900 info->RegisterDecoder(it->plType, decoder);
3901 } else {
3902 LOG_RTCERR2(RegisterExternalReceiveCodec, channel_id, it->plName);
3903 engine()->DestroyExternalDecoder(decoder);
3904 }
3905 }
3906 }
3907 }
3908 return true;
3909 }
3910
GetRecvChannelNum(uint32 ssrc)3911 int WebRtcVideoMediaChannel::GetRecvChannelNum(uint32 ssrc) {
3912 if (ssrc == first_receive_ssrc_) {
3913 return vie_channel_;
3914 }
3915 int recv_channel = -1;
3916 RecvChannelMap::iterator it = recv_channels_.find(ssrc);
3917 if (it == recv_channels_.end()) {
3918 // Check if we have an RTX stream registered on this SSRC.
3919 SsrcMap::iterator rtx_it = rtx_to_primary_ssrc_.find(ssrc);
3920 if (rtx_it != rtx_to_primary_ssrc_.end()) {
3921 if (rtx_it->second == first_receive_ssrc_) {
3922 recv_channel = vie_channel_;
3923 } else {
3924 it = recv_channels_.find(rtx_it->second);
3925 assert(it != recv_channels_.end());
3926 recv_channel = it->second->channel_id();
3927 }
3928 }
3929 } else {
3930 recv_channel = it->second->channel_id();
3931 }
3932 return recv_channel;
3933 }
3934
3935 // If the new frame size is different from the send codec size we set on vie,
3936 // we need to reset the send codec on vie.
3937 // The new send codec size should not exceed send_codec_ which is controlled
3938 // only by the 'jec' logic.
3939 // TODO(pthatcher): Get rid of this function, so we only ever set up
3940 // codecs in a single place.
MaybeResetVieSendCodec(WebRtcVideoChannelSendInfo * send_channel,int new_width,int new_height,bool is_screencast,bool * reset)3941 bool WebRtcVideoMediaChannel::MaybeResetVieSendCodec(
3942 WebRtcVideoChannelSendInfo* send_channel,
3943 int new_width,
3944 int new_height,
3945 bool is_screencast,
3946 bool* reset) {
3947 if (reset) {
3948 *reset = false;
3949 }
3950 ASSERT(send_codec_.get() != NULL);
3951
3952 webrtc::VideoCodec target_codec = *send_codec_;
3953 const VideoFormat& video_format = send_channel->video_format();
3954 UpdateVideoCodec(video_format, &target_codec);
3955
3956 // Vie send codec size should not exceed target_codec.
3957 int target_width = new_width;
3958 int target_height = new_height;
3959 if (!is_screencast &&
3960 (new_width > target_codec.width || new_height > target_codec.height)) {
3961 target_width = target_codec.width;
3962 target_height = target_codec.height;
3963 }
3964
3965 // Get current vie codec.
3966 webrtc::VideoCodec vie_codec;
3967 const int channel_id = send_channel->channel_id();
3968 if (engine()->vie()->codec()->GetSendCodec(channel_id, vie_codec) != 0) {
3969 LOG_RTCERR1(GetSendCodec, channel_id);
3970 return false;
3971 }
3972 const int cur_width = vie_codec.width;
3973 const int cur_height = vie_codec.height;
3974
3975 // Only reset send codec when there is a size change. Additionally,
3976 // automatic resize needs to be turned off when screencasting and on when
3977 // not screencasting.
3978 // Don't allow automatic resizing for screencasting.
3979 bool automatic_resize = !is_screencast;
3980 // Turn off VP8 frame dropping when screensharing as the current model does
3981 // not work well at low fps.
3982 bool vp8_frame_dropping = !is_screencast;
3983 // TODO(pbos): Remove |video_noise_reduction| and enable it for all
3984 // non-screencast.
3985 bool enable_denoising =
3986 options_.video_noise_reduction.GetWithDefaultIfUnset(true);
3987 // Disable denoising for screencasting.
3988 if (is_screencast) {
3989 enable_denoising = false;
3990 }
3991 int screencast_min_bitrate =
3992 options_.screencast_min_bitrate.GetWithDefaultIfUnset(0);
3993 bool leaky_bucket = options_.video_leaky_bucket.GetWithDefaultIfUnset(true);
3994 bool reset_send_codec =
3995 target_width != cur_width || target_height != cur_height ||
3996 automatic_resize != vie_codec.codecSpecific.VP8.automaticResizeOn ||
3997 enable_denoising != vie_codec.codecSpecific.VP8.denoisingOn ||
3998 vp8_frame_dropping != vie_codec.codecSpecific.VP8.frameDroppingOn;
3999
4000 if (reset_send_codec) {
4001 // Set the new codec on vie.
4002 vie_codec.width = target_width;
4003 vie_codec.height = target_height;
4004 vie_codec.maxFramerate = target_codec.maxFramerate;
4005 vie_codec.startBitrate = target_codec.startBitrate;
4006 vie_codec.minBitrate = target_codec.minBitrate;
4007 vie_codec.maxBitrate = target_codec.maxBitrate;
4008 vie_codec.targetBitrate = 0;
4009 vie_codec.codecSpecific.VP8.automaticResizeOn = automatic_resize;
4010 vie_codec.codecSpecific.VP8.denoisingOn = enable_denoising;
4011 vie_codec.codecSpecific.VP8.frameDroppingOn = vp8_frame_dropping;
4012 MaybeChangeBitrates(channel_id, &vie_codec);
4013
4014 if (engine()->vie()->codec()->SetSendCodec(channel_id, vie_codec) != 0) {
4015 LOG_RTCERR1(SetSendCodec, channel_id);
4016 return false;
4017 }
4018
4019 if (is_screencast) {
4020 engine()->vie()->rtp()->SetMinTransmitBitrate(channel_id,
4021 screencast_min_bitrate);
4022 // If screencast and min bitrate set, force enable pacer.
4023 if (screencast_min_bitrate > 0) {
4024 engine()->vie()->rtp()->SetTransmissionSmoothingStatus(channel_id,
4025 true);
4026 }
4027 } else {
4028 // In case of switching from screencast to regular capture, set
4029 // min bitrate padding and pacer back to defaults.
4030 engine()->vie()->rtp()->SetMinTransmitBitrate(channel_id, 0);
4031 engine()->vie()->rtp()->SetTransmissionSmoothingStatus(channel_id,
4032 leaky_bucket);
4033 }
4034 if (reset) {
4035 *reset = true;
4036 }
4037 LogSendCodecChange("Capture size changed");
4038 }
4039
4040 return true;
4041 }
4042
MaybeChangeBitrates(int channel_id,webrtc::VideoCodec * codec)4043 void WebRtcVideoMediaChannel::MaybeChangeBitrates(
4044 int channel_id, webrtc::VideoCodec* codec) {
4045 codec->minBitrate = GetBitrate(codec->minBitrate, kMinVideoBitrate);
4046 codec->startBitrate = GetBitrate(codec->startBitrate, kStartVideoBitrate);
4047 codec->maxBitrate = GetBitrate(codec->maxBitrate, kMaxVideoBitrate);
4048
4049 if (codec->minBitrate > codec->maxBitrate) {
4050 LOG(LS_INFO) << "Decreasing codec min bitrate to the max ("
4051 << codec->maxBitrate << ") because the min ("
4052 << codec->minBitrate << ") exceeds the max.";
4053 codec->minBitrate = codec->maxBitrate;
4054 }
4055 if (codec->startBitrate < codec->minBitrate) {
4056 LOG(LS_INFO) << "Increasing codec start bitrate to the min ("
4057 << codec->minBitrate << ") because the start ("
4058 << codec->startBitrate << ") is less than the min.";
4059 codec->startBitrate = codec->minBitrate;
4060 } else if (codec->startBitrate > codec->maxBitrate) {
4061 LOG(LS_INFO) << "Decreasing codec start bitrate to the max ("
4062 << codec->maxBitrate << ") because the start ("
4063 << codec->startBitrate << ") exceeds the max.";
4064 codec->startBitrate = codec->maxBitrate;
4065 }
4066
4067 // Use a previous target bitrate, if there is one.
4068 unsigned int current_target_bitrate = 0;
4069 if (engine()->vie()->codec()->GetCodecTargetBitrate(
4070 channel_id, ¤t_target_bitrate) == 0) {
4071 // Convert to kbps.
4072 current_target_bitrate /= 1000;
4073 if (current_target_bitrate > codec->maxBitrate) {
4074 current_target_bitrate = codec->maxBitrate;
4075 }
4076 if (current_target_bitrate > codec->startBitrate) {
4077 codec->startBitrate = current_target_bitrate;
4078 }
4079 }
4080 }
4081
OnMessage(talk_base::Message * msg)4082 void WebRtcVideoMediaChannel::OnMessage(talk_base::Message* msg) {
4083 FlushBlackFrameData* black_frame_data =
4084 static_cast<FlushBlackFrameData*>(msg->pdata);
4085 FlushBlackFrame(black_frame_data->ssrc, black_frame_data->timestamp);
4086 delete black_frame_data;
4087 }
4088
SendPacket(int channel,const void * data,int len)4089 int WebRtcVideoMediaChannel::SendPacket(int channel, const void* data,
4090 int len) {
4091 talk_base::Buffer packet(data, len, kMaxRtpPacketLen);
4092 return MediaChannel::SendPacket(&packet) ? len : -1;
4093 }
4094
SendRTCPPacket(int channel,const void * data,int len)4095 int WebRtcVideoMediaChannel::SendRTCPPacket(int channel,
4096 const void* data,
4097 int len) {
4098 talk_base::Buffer packet(data, len, kMaxRtpPacketLen);
4099 return MediaChannel::SendRtcp(&packet) ? len : -1;
4100 }
4101
QueueBlackFrame(uint32 ssrc,int64 timestamp,int framerate)4102 void WebRtcVideoMediaChannel::QueueBlackFrame(uint32 ssrc, int64 timestamp,
4103 int framerate) {
4104 if (timestamp) {
4105 FlushBlackFrameData* black_frame_data = new FlushBlackFrameData(
4106 ssrc,
4107 timestamp);
4108 const int delay_ms = static_cast<int>(
4109 2 * cricket::VideoFormat::FpsToInterval(framerate) *
4110 talk_base::kNumMillisecsPerSec / talk_base::kNumNanosecsPerSec);
4111 worker_thread()->PostDelayed(delay_ms, this, 0, black_frame_data);
4112 }
4113 }
4114
FlushBlackFrame(uint32 ssrc,int64 timestamp)4115 void WebRtcVideoMediaChannel::FlushBlackFrame(uint32 ssrc, int64 timestamp) {
4116 WebRtcVideoChannelSendInfo* send_channel = GetSendChannel(ssrc);
4117 if (!send_channel) {
4118 return;
4119 }
4120 talk_base::scoped_ptr<const VideoFrame> black_frame_ptr;
4121
4122 const WebRtcLocalStreamInfo* channel_stream_info =
4123 send_channel->local_stream_info();
4124 int64 last_frame_time_stamp = channel_stream_info->time_stamp();
4125 if (last_frame_time_stamp == timestamp) {
4126 size_t last_frame_width = 0;
4127 size_t last_frame_height = 0;
4128 int64 last_frame_elapsed_time = 0;
4129 channel_stream_info->GetLastFrameInfo(&last_frame_width, &last_frame_height,
4130 &last_frame_elapsed_time);
4131 if (!last_frame_width || !last_frame_height) {
4132 return;
4133 }
4134 WebRtcVideoFrame black_frame;
4135 // Black frame is not screencast.
4136 const bool screencasting = false;
4137 const int64 timestamp_delta = send_channel->interval();
4138 if (!black_frame.InitToBlack(send_codec_->width, send_codec_->height, 1, 1,
4139 last_frame_elapsed_time + timestamp_delta,
4140 last_frame_time_stamp + timestamp_delta) ||
4141 !SendFrame(send_channel, &black_frame, screencasting)) {
4142 LOG(LS_ERROR) << "Failed to send black frame.";
4143 }
4144 }
4145 }
4146
OnCpuAdaptationUnable()4147 void WebRtcVideoMediaChannel::OnCpuAdaptationUnable() {
4148 // ssrc is hardcoded to 0. This message is based on a system wide issue,
4149 // so finding which ssrc caused it doesn't matter.
4150 SignalMediaError(0, VideoMediaChannel::ERROR_REC_CPU_MAX_CANT_DOWNGRADE);
4151 }
4152
SetNetworkTransmissionState(bool is_transmitting)4153 void WebRtcVideoMediaChannel::SetNetworkTransmissionState(
4154 bool is_transmitting) {
4155 LOG(LS_INFO) << "SetNetworkTransmissionState: " << is_transmitting;
4156 for (SendChannelMap::iterator iter = send_channels_.begin();
4157 iter != send_channels_.end(); ++iter) {
4158 WebRtcVideoChannelSendInfo* send_channel = iter->second;
4159 int channel_id = send_channel->channel_id();
4160 engine_->vie()->network()->SetNetworkTransmissionState(channel_id,
4161 is_transmitting);
4162 }
4163 }
4164
SetHeaderExtension(ExtensionSetterFunction setter,int channel_id,const RtpHeaderExtension * extension)4165 bool WebRtcVideoMediaChannel::SetHeaderExtension(ExtensionSetterFunction setter,
4166 int channel_id, const RtpHeaderExtension* extension) {
4167 bool enable = false;
4168 int id = 0;
4169 if (extension) {
4170 enable = true;
4171 id = extension->id;
4172 }
4173 if ((engine_->vie()->rtp()->*setter)(channel_id, enable, id) != 0) {
4174 LOG_RTCERR4(*setter, extension->uri, channel_id, enable, id);
4175 return false;
4176 }
4177 return true;
4178 }
4179
SetHeaderExtension(ExtensionSetterFunction setter,int channel_id,const std::vector<RtpHeaderExtension> & extensions,const char header_extension_uri[])4180 bool WebRtcVideoMediaChannel::SetHeaderExtension(ExtensionSetterFunction setter,
4181 int channel_id, const std::vector<RtpHeaderExtension>& extensions,
4182 const char header_extension_uri[]) {
4183 const RtpHeaderExtension* extension = FindHeaderExtension(extensions,
4184 header_extension_uri);
4185 return SetHeaderExtension(setter, channel_id, extension);
4186 }
4187
SetLocalRtxSsrc(int channel_id,const StreamParams & send_params,uint32 primary_ssrc,int stream_idx)4188 bool WebRtcVideoMediaChannel::SetLocalRtxSsrc(int channel_id,
4189 const StreamParams& send_params,
4190 uint32 primary_ssrc,
4191 int stream_idx) {
4192 uint32 rtx_ssrc = 0;
4193 bool has_rtx = send_params.GetFidSsrc(primary_ssrc, &rtx_ssrc);
4194 if (has_rtx && engine()->vie()->rtp()->SetLocalSSRC(
4195 channel_id, rtx_ssrc, webrtc::kViEStreamTypeRtx, stream_idx) != 0) {
4196 LOG_RTCERR4(SetLocalSSRC, channel_id, rtx_ssrc,
4197 webrtc::kViEStreamTypeRtx, stream_idx);
4198 return false;
4199 }
4200 return true;
4201 }
4202
MaybeConnectCapturer(VideoCapturer * capturer)4203 void WebRtcVideoMediaChannel::MaybeConnectCapturer(VideoCapturer* capturer) {
4204 if (capturer != NULL && GetSendChannelNum(capturer) == 1) {
4205 capturer->SignalVideoFrame.connect(this,
4206 &WebRtcVideoMediaChannel::SendFrame);
4207 }
4208 }
4209
MaybeDisconnectCapturer(VideoCapturer * capturer)4210 void WebRtcVideoMediaChannel::MaybeDisconnectCapturer(VideoCapturer* capturer) {
4211 if (capturer != NULL && GetSendChannelNum(capturer) == 1) {
4212 capturer->SignalVideoFrame.disconnect(this);
4213 }
4214 }
4215
4216 } // namespace cricket
4217
4218 #endif // HAVE_WEBRTC_VIDEO
4219