1 /*
2 * libjingle
3 * Copyright 2012, Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28 #include "talk/app/webrtc/mediastreamsignaling.h"
29
30 #include <vector>
31
32 #include "talk/app/webrtc/audiotrack.h"
33 #include "talk/app/webrtc/mediastreamproxy.h"
34 #include "talk/app/webrtc/mediaconstraintsinterface.h"
35 #include "talk/app/webrtc/mediastreamtrackproxy.h"
36 #include "talk/app/webrtc/remotevideocapturer.h"
37 #include "talk/app/webrtc/videosource.h"
38 #include "talk/app/webrtc/videotrack.h"
39 #include "talk/base/bytebuffer.h"
40 #include "talk/base/stringutils.h"
41 #include "talk/media/sctp/sctpdataengine.h"
42
43 static const char kDefaultStreamLabel[] = "default";
44 static const char kDefaultAudioTrackLabel[] = "defaulta0";
45 static const char kDefaultVideoTrackLabel[] = "defaultv0";
46
47 namespace webrtc {
48
49 using talk_base::scoped_ptr;
50 using talk_base::scoped_refptr;
51
52 // Supported MediaConstraints.
53 const char MediaConstraintsInterface::kOfferToReceiveAudio[] =
54 "OfferToReceiveAudio";
55 const char MediaConstraintsInterface::kOfferToReceiveVideo[] =
56 "OfferToReceiveVideo";
57 const char MediaConstraintsInterface::kIceRestart[] =
58 "IceRestart";
59 const char MediaConstraintsInterface::kUseRtpMux[] =
60 "googUseRtpMUX";
61 const char MediaConstraintsInterface::kVoiceActivityDetection[] =
62 "VoiceActivityDetection";
63
ParseConstraints(const MediaConstraintsInterface * constraints,cricket::MediaSessionOptions * options,bool is_answer)64 static bool ParseConstraints(
65 const MediaConstraintsInterface* constraints,
66 cricket::MediaSessionOptions* options, bool is_answer) {
67 bool value;
68 size_t mandatory_constraints_satisfied = 0;
69
70 if (FindConstraint(constraints,
71 MediaConstraintsInterface::kOfferToReceiveAudio,
72 &value, &mandatory_constraints_satisfied)) {
73 // |options-|has_audio| can only change from false to
74 // true, but never change from true to false. This is to make sure
75 // CreateOffer / CreateAnswer doesn't remove a media content
76 // description that has been created.
77 options->has_audio |= value;
78 } else {
79 // kOfferToReceiveAudio defaults to true according to spec.
80 options->has_audio = true;
81 }
82
83 if (FindConstraint(constraints,
84 MediaConstraintsInterface::kOfferToReceiveVideo,
85 &value, &mandatory_constraints_satisfied)) {
86 // |options->has_video| can only change from false to
87 // true, but never change from true to false. This is to make sure
88 // CreateOffer / CreateAnswer doesn't remove a media content
89 // description that has been created.
90 options->has_video |= value;
91 } else {
92 // kOfferToReceiveVideo defaults to false according to spec. But
93 // if it is an answer and video is offered, we should still accept video
94 // per default.
95 options->has_video |= is_answer;
96 }
97
98 if (FindConstraint(constraints,
99 MediaConstraintsInterface::kVoiceActivityDetection,
100 &value, &mandatory_constraints_satisfied)) {
101 options->vad_enabled = value;
102 }
103
104 if (FindConstraint(constraints,
105 MediaConstraintsInterface::kUseRtpMux,
106 &value, &mandatory_constraints_satisfied)) {
107 options->bundle_enabled = value;
108 } else {
109 // kUseRtpMux defaults to true according to spec.
110 options->bundle_enabled = true;
111 }
112 if (FindConstraint(constraints,
113 MediaConstraintsInterface::kIceRestart,
114 &value, &mandatory_constraints_satisfied)) {
115 options->transport_options.ice_restart = value;
116 } else {
117 // kIceRestart defaults to false according to spec.
118 options->transport_options.ice_restart = false;
119 }
120
121 if (!constraints) {
122 return true;
123 }
124 return mandatory_constraints_satisfied == constraints->GetMandatory().size();
125 }
126
127 // Returns true if if at least one media content is present and
128 // |options.bundle_enabled| is true.
129 // Bundle will be enabled by default if at least one media content is present
130 // and the constraint kUseRtpMux has not disabled bundle.
EvaluateNeedForBundle(const cricket::MediaSessionOptions & options)131 static bool EvaluateNeedForBundle(const cricket::MediaSessionOptions& options) {
132 return options.bundle_enabled &&
133 (options.has_audio || options.has_video || options.has_data());
134 }
135
136 // Factory class for creating remote MediaStreams and MediaStreamTracks.
137 class RemoteMediaStreamFactory {
138 public:
RemoteMediaStreamFactory(talk_base::Thread * signaling_thread,cricket::ChannelManager * channel_manager)139 explicit RemoteMediaStreamFactory(talk_base::Thread* signaling_thread,
140 cricket::ChannelManager* channel_manager)
141 : signaling_thread_(signaling_thread),
142 channel_manager_(channel_manager) {
143 }
144
CreateMediaStream(const std::string & stream_label)145 talk_base::scoped_refptr<MediaStreamInterface> CreateMediaStream(
146 const std::string& stream_label) {
147 return MediaStreamProxy::Create(
148 signaling_thread_, MediaStream::Create(stream_label));
149 }
150
AddAudioTrack(webrtc::MediaStreamInterface * stream,const std::string & track_id)151 AudioTrackInterface* AddAudioTrack(webrtc::MediaStreamInterface* stream,
152 const std::string& track_id) {
153 return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>(
154 stream, track_id, static_cast<AudioSourceInterface*>(NULL));
155 }
156
AddVideoTrack(webrtc::MediaStreamInterface * stream,const std::string & track_id)157 VideoTrackInterface* AddVideoTrack(webrtc::MediaStreamInterface* stream,
158 const std::string& track_id) {
159 return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>(
160 stream, track_id, VideoSource::Create(channel_manager_,
161 new RemoteVideoCapturer(),
162 NULL).get());
163 }
164
165 private:
166 template <typename TI, typename T, typename TP, typename S>
AddTrack(MediaStreamInterface * stream,const std::string & track_id,S * source)167 TI* AddTrack(MediaStreamInterface* stream, const std::string& track_id,
168 S* source) {
169 talk_base::scoped_refptr<TI> track(
170 TP::Create(signaling_thread_, T::Create(track_id, source)));
171 track->set_state(webrtc::MediaStreamTrackInterface::kLive);
172 if (stream->AddTrack(track)) {
173 return track;
174 }
175 return NULL;
176 }
177
178 talk_base::Thread* signaling_thread_;
179 cricket::ChannelManager* channel_manager_;
180 };
181
MediaStreamSignaling(talk_base::Thread * signaling_thread,MediaStreamSignalingObserver * stream_observer,cricket::ChannelManager * channel_manager)182 MediaStreamSignaling::MediaStreamSignaling(
183 talk_base::Thread* signaling_thread,
184 MediaStreamSignalingObserver* stream_observer,
185 cricket::ChannelManager* channel_manager)
186 : signaling_thread_(signaling_thread),
187 data_channel_factory_(NULL),
188 stream_observer_(stream_observer),
189 local_streams_(StreamCollection::Create()),
190 remote_streams_(StreamCollection::Create()),
191 remote_stream_factory_(new RemoteMediaStreamFactory(signaling_thread,
192 channel_manager)),
193 last_allocated_sctp_even_sid_(-2),
194 last_allocated_sctp_odd_sid_(-1) {
195 options_.has_video = false;
196 options_.has_audio = false;
197 }
198
~MediaStreamSignaling()199 MediaStreamSignaling::~MediaStreamSignaling() {
200 }
201
TearDown()202 void MediaStreamSignaling::TearDown() {
203 OnAudioChannelClose();
204 OnVideoChannelClose();
205 OnDataChannelClose();
206 }
207
IsSctpSidAvailable(int sid) const208 bool MediaStreamSignaling::IsSctpSidAvailable(int sid) const {
209 if (sid < 0 || sid > static_cast<int>(cricket::kMaxSctpSid))
210 return false;
211 for (SctpDataChannels::const_iterator iter = sctp_data_channels_.begin();
212 iter != sctp_data_channels_.end();
213 ++iter) {
214 if ((*iter)->id() == sid) {
215 return false;
216 }
217 }
218 return true;
219 }
220
221 // Gets the first unused odd/even id based on the DTLS role. If |role| is
222 // SSL_CLIENT, the allocated id starts from 0 and takes even numbers; otherwise,
223 // the id starts from 1 and takes odd numbers. Returns false if no id can be
224 // allocated.
AllocateSctpSid(talk_base::SSLRole role,int * sid)225 bool MediaStreamSignaling::AllocateSctpSid(talk_base::SSLRole role, int* sid) {
226 int& last_id = (role == talk_base::SSL_CLIENT) ?
227 last_allocated_sctp_even_sid_ : last_allocated_sctp_odd_sid_;
228
229 do {
230 last_id += 2;
231 } while (last_id <= static_cast<int>(cricket::kMaxSctpSid) &&
232 !IsSctpSidAvailable(last_id));
233
234 if (last_id > static_cast<int>(cricket::kMaxSctpSid)) {
235 return false;
236 }
237
238 *sid = last_id;
239 return true;
240 }
241
HasDataChannels() const242 bool MediaStreamSignaling::HasDataChannels() const {
243 return !rtp_data_channels_.empty() || !sctp_data_channels_.empty();
244 }
245
AddDataChannel(DataChannel * data_channel)246 bool MediaStreamSignaling::AddDataChannel(DataChannel* data_channel) {
247 ASSERT(data_channel != NULL);
248 if (data_channel->data_channel_type() == cricket::DCT_RTP) {
249 if (rtp_data_channels_.find(data_channel->label()) !=
250 rtp_data_channels_.end()) {
251 LOG(LS_ERROR) << "DataChannel with label " << data_channel->label()
252 << " already exists.";
253 return false;
254 }
255 rtp_data_channels_[data_channel->label()] = data_channel;
256 } else {
257 ASSERT(data_channel->data_channel_type() == cricket::DCT_SCTP);
258 sctp_data_channels_.push_back(data_channel);
259 }
260 return true;
261 }
262
AddDataChannelFromOpenMessage(const std::string & label,const DataChannelInit & config)263 bool MediaStreamSignaling::AddDataChannelFromOpenMessage(
264 const std::string& label,
265 const DataChannelInit& config) {
266 if (!data_channel_factory_) {
267 LOG(LS_WARNING) << "Remote peer requested a DataChannel but DataChannels "
268 << "are not supported.";
269 return false;
270 }
271 scoped_refptr<DataChannel> channel(
272 data_channel_factory_->CreateDataChannel(label, &config));
273 if (!channel.get()) {
274 LOG(LS_ERROR) << "Failed to create DataChannel from the OPEN message.";
275 return false;
276 }
277 sctp_data_channels_.push_back(channel);
278 stream_observer_->OnAddDataChannel(channel);
279 return true;
280 }
281
AddLocalStream(MediaStreamInterface * local_stream)282 bool MediaStreamSignaling::AddLocalStream(MediaStreamInterface* local_stream) {
283 if (local_streams_->find(local_stream->label()) != NULL) {
284 LOG(LS_WARNING) << "MediaStream with label " << local_stream->label()
285 << "already exist.";
286 return false;
287 }
288 local_streams_->AddStream(local_stream);
289
290 // Find tracks that has already been configured in SDP. This can occur if a
291 // local session description that contains the MSID of these tracks is set
292 // before AddLocalStream is called. It can also occur if the local session
293 // description is not changed and RemoveLocalStream
294 // is called and later AddLocalStream is called again with the same stream.
295 AudioTrackVector audio_tracks = local_stream->GetAudioTracks();
296 for (AudioTrackVector::const_iterator it = audio_tracks.begin();
297 it != audio_tracks.end(); ++it) {
298 TrackInfos::const_iterator track_info_it =
299 local_audio_tracks_.find((*it)->id());
300 if (track_info_it != local_audio_tracks_.end()) {
301 const TrackInfo& info = track_info_it->second;
302 OnLocalTrackSeen(info.stream_label, info.track_id, info.ssrc,
303 cricket::MEDIA_TYPE_AUDIO);
304 }
305 }
306
307 VideoTrackVector video_tracks = local_stream->GetVideoTracks();
308 for (VideoTrackVector::const_iterator it = video_tracks.begin();
309 it != video_tracks.end(); ++it) {
310 TrackInfos::const_iterator track_info_it =
311 local_video_tracks_.find((*it)->id());
312 if (track_info_it != local_video_tracks_.end()) {
313 const TrackInfo& info = track_info_it->second;
314 OnLocalTrackSeen(info.stream_label, info.track_id, info.ssrc,
315 cricket::MEDIA_TYPE_VIDEO);
316 }
317 }
318 return true;
319 }
320
RemoveLocalStream(MediaStreamInterface * local_stream)321 void MediaStreamSignaling::RemoveLocalStream(
322 MediaStreamInterface* local_stream) {
323 local_streams_->RemoveStream(local_stream);
324 stream_observer_->OnRemoveLocalStream(local_stream);
325 }
326
GetOptionsForOffer(const MediaConstraintsInterface * constraints,cricket::MediaSessionOptions * options)327 bool MediaStreamSignaling::GetOptionsForOffer(
328 const MediaConstraintsInterface* constraints,
329 cricket::MediaSessionOptions* options) {
330 UpdateSessionOptions();
331 if (!ParseConstraints(constraints, &options_, false)) {
332 return false;
333 }
334 options_.bundle_enabled = EvaluateNeedForBundle(options_);
335 *options = options_;
336 return true;
337 }
338
GetOptionsForAnswer(const MediaConstraintsInterface * constraints,cricket::MediaSessionOptions * options)339 bool MediaStreamSignaling::GetOptionsForAnswer(
340 const MediaConstraintsInterface* constraints,
341 cricket::MediaSessionOptions* options) {
342 UpdateSessionOptions();
343
344 // Copy the |options_| to not let the flag MediaSessionOptions::has_audio and
345 // MediaSessionOptions::has_video affect subsequent offers.
346 cricket::MediaSessionOptions current_options = options_;
347 if (!ParseConstraints(constraints, ¤t_options, true)) {
348 return false;
349 }
350 current_options.bundle_enabled = EvaluateNeedForBundle(current_options);
351 *options = current_options;
352 return true;
353 }
354
355 // Updates or creates remote MediaStream objects given a
356 // remote SessionDesription.
357 // If the remote SessionDesription contains new remote MediaStreams
358 // the observer OnAddStream method is called. If a remote MediaStream is missing
359 // from the remote SessionDescription OnRemoveStream is called.
OnRemoteDescriptionChanged(const SessionDescriptionInterface * desc)360 void MediaStreamSignaling::OnRemoteDescriptionChanged(
361 const SessionDescriptionInterface* desc) {
362 const cricket::SessionDescription* remote_desc = desc->description();
363 talk_base::scoped_refptr<StreamCollection> new_streams(
364 StreamCollection::Create());
365
366 // Find all audio rtp streams and create corresponding remote AudioTracks
367 // and MediaStreams.
368 const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
369 if (audio_content) {
370 const cricket::AudioContentDescription* desc =
371 static_cast<const cricket::AudioContentDescription*>(
372 audio_content->description);
373 UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams);
374 remote_info_.default_audio_track_needed =
375 desc->direction() == cricket::MD_SENDRECV && desc->streams().empty();
376 }
377
378 // Find all video rtp streams and create corresponding remote VideoTracks
379 // and MediaStreams.
380 const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc);
381 if (video_content) {
382 const cricket::VideoContentDescription* desc =
383 static_cast<const cricket::VideoContentDescription*>(
384 video_content->description);
385 UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams);
386 remote_info_.default_video_track_needed =
387 desc->direction() == cricket::MD_SENDRECV && desc->streams().empty();
388 }
389
390 // Update the DataChannels with the information from the remote peer.
391 const cricket::ContentInfo* data_content = GetFirstDataContent(remote_desc);
392 if (data_content) {
393 const cricket::DataContentDescription* data_desc =
394 static_cast<const cricket::DataContentDescription*>(
395 data_content->description);
396 if (talk_base::starts_with(
397 data_desc->protocol().data(), cricket::kMediaProtocolRtpPrefix)) {
398 UpdateRemoteRtpDataChannels(data_desc->streams());
399 }
400 }
401
402 // Iterate new_streams and notify the observer about new MediaStreams.
403 for (size_t i = 0; i < new_streams->count(); ++i) {
404 MediaStreamInterface* new_stream = new_streams->at(i);
405 stream_observer_->OnAddRemoteStream(new_stream);
406 }
407
408 // Find removed MediaStreams.
409 if (remote_info_.IsDefaultMediaStreamNeeded() &&
410 remote_streams_->find(kDefaultStreamLabel) != NULL) {
411 // The default media stream already exists. No need to do anything.
412 } else {
413 UpdateEndedRemoteMediaStreams();
414 remote_info_.msid_supported |= remote_streams_->count() > 0;
415 }
416 MaybeCreateDefaultStream();
417 }
418
OnLocalDescriptionChanged(const SessionDescriptionInterface * desc)419 void MediaStreamSignaling::OnLocalDescriptionChanged(
420 const SessionDescriptionInterface* desc) {
421 const cricket::ContentInfo* audio_content =
422 GetFirstAudioContent(desc->description());
423 if (audio_content) {
424 if (audio_content->rejected) {
425 RejectRemoteTracks(cricket::MEDIA_TYPE_AUDIO);
426 }
427 const cricket::AudioContentDescription* audio_desc =
428 static_cast<const cricket::AudioContentDescription*>(
429 audio_content->description);
430 UpdateLocalTracks(audio_desc->streams(), audio_desc->type());
431 }
432
433 const cricket::ContentInfo* video_content =
434 GetFirstVideoContent(desc->description());
435 if (video_content) {
436 if (video_content->rejected) {
437 RejectRemoteTracks(cricket::MEDIA_TYPE_VIDEO);
438 }
439 const cricket::VideoContentDescription* video_desc =
440 static_cast<const cricket::VideoContentDescription*>(
441 video_content->description);
442 UpdateLocalTracks(video_desc->streams(), video_desc->type());
443 }
444
445 const cricket::ContentInfo* data_content =
446 GetFirstDataContent(desc->description());
447 if (data_content) {
448 const cricket::DataContentDescription* data_desc =
449 static_cast<const cricket::DataContentDescription*>(
450 data_content->description);
451 if (talk_base::starts_with(
452 data_desc->protocol().data(), cricket::kMediaProtocolRtpPrefix)) {
453 UpdateLocalRtpDataChannels(data_desc->streams());
454 }
455 }
456 }
457
OnAudioChannelClose()458 void MediaStreamSignaling::OnAudioChannelClose() {
459 RejectRemoteTracks(cricket::MEDIA_TYPE_AUDIO);
460 }
461
OnVideoChannelClose()462 void MediaStreamSignaling::OnVideoChannelClose() {
463 RejectRemoteTracks(cricket::MEDIA_TYPE_VIDEO);
464 }
465
OnDataChannelClose()466 void MediaStreamSignaling::OnDataChannelClose() {
467 RtpDataChannels::iterator it1 = rtp_data_channels_.begin();
468 for (; it1 != rtp_data_channels_.end(); ++it1) {
469 it1->second->OnDataEngineClose();
470 }
471 SctpDataChannels::iterator it2 = sctp_data_channels_.begin();
472 for (; it2 != sctp_data_channels_.end(); ++it2) {
473 (*it2)->OnDataEngineClose();
474 }
475 }
476
GetRemoteAudioTrackSsrc(const std::string & track_id,uint32 * ssrc) const477 bool MediaStreamSignaling::GetRemoteAudioTrackSsrc(
478 const std::string& track_id, uint32* ssrc) const {
479 TrackInfos::const_iterator it = remote_audio_tracks_.find(track_id);
480 if (it == remote_audio_tracks_.end()) {
481 return false;
482 }
483
484 *ssrc = it->second.ssrc;
485 return true;
486 }
487
GetRemoteVideoTrackSsrc(const std::string & track_id,uint32 * ssrc) const488 bool MediaStreamSignaling::GetRemoteVideoTrackSsrc(
489 const std::string& track_id, uint32* ssrc) const {
490 TrackInfos::const_iterator it = remote_video_tracks_.find(track_id);
491 if (it == remote_video_tracks_.end()) {
492 return false;
493 }
494
495 *ssrc = it->second.ssrc;
496 return true;
497 }
498
UpdateSessionOptions()499 void MediaStreamSignaling::UpdateSessionOptions() {
500 options_.streams.clear();
501 if (local_streams_ != NULL) {
502 for (size_t i = 0; i < local_streams_->count(); ++i) {
503 MediaStreamInterface* stream = local_streams_->at(i);
504
505 AudioTrackVector audio_tracks(stream->GetAudioTracks());
506 if (!audio_tracks.empty()) {
507 options_.has_audio = true;
508 }
509
510 // For each audio track in the stream, add it to the MediaSessionOptions.
511 for (size_t j = 0; j < audio_tracks.size(); ++j) {
512 scoped_refptr<MediaStreamTrackInterface> track(audio_tracks[j]);
513 options_.AddStream(cricket::MEDIA_TYPE_AUDIO, track->id(),
514 stream->label());
515 }
516
517 VideoTrackVector video_tracks(stream->GetVideoTracks());
518 if (!video_tracks.empty()) {
519 options_.has_video = true;
520 }
521 // For each video track in the stream, add it to the MediaSessionOptions.
522 for (size_t j = 0; j < video_tracks.size(); ++j) {
523 scoped_refptr<MediaStreamTrackInterface> track(video_tracks[j]);
524 options_.AddStream(cricket::MEDIA_TYPE_VIDEO, track->id(),
525 stream->label());
526 }
527 }
528 }
529
530 // Check for data channels.
531 RtpDataChannels::const_iterator data_channel_it = rtp_data_channels_.begin();
532 for (; data_channel_it != rtp_data_channels_.end(); ++data_channel_it) {
533 const DataChannel* channel = data_channel_it->second;
534 if (channel->state() == DataChannel::kConnecting ||
535 channel->state() == DataChannel::kOpen) {
536 // |streamid| and |sync_label| are both set to the DataChannel label
537 // here so they can be signaled the same way as MediaStreams and Tracks.
538 // For MediaStreams, the sync_label is the MediaStream label and the
539 // track label is the same as |streamid|.
540 const std::string& streamid = channel->label();
541 const std::string& sync_label = channel->label();
542 options_.AddStream(cricket::MEDIA_TYPE_DATA, streamid, sync_label);
543 }
544 }
545 }
546
UpdateRemoteStreamsList(const cricket::StreamParamsVec & streams,cricket::MediaType media_type,StreamCollection * new_streams)547 void MediaStreamSignaling::UpdateRemoteStreamsList(
548 const cricket::StreamParamsVec& streams,
549 cricket::MediaType media_type,
550 StreamCollection* new_streams) {
551 TrackInfos* current_tracks = GetRemoteTracks(media_type);
552
553 // Find removed tracks. Ie tracks where the track id or ssrc don't match the
554 // new StreamParam.
555 TrackInfos::iterator track_it = current_tracks->begin();
556 while (track_it != current_tracks->end()) {
557 TrackInfo info = track_it->second;
558 cricket::StreamParams params;
559 if (!cricket::GetStreamBySsrc(streams, info.ssrc, ¶ms) ||
560 params.id != info.track_id) {
561 OnRemoteTrackRemoved(info.stream_label, info.track_id, media_type);
562 current_tracks->erase(track_it++);
563 } else {
564 ++track_it;
565 }
566 }
567
568 // Find new and active tracks.
569 for (cricket::StreamParamsVec::const_iterator it = streams.begin();
570 it != streams.end(); ++it) {
571 // The sync_label is the MediaStream label and the |stream.id| is the
572 // track id.
573 const std::string& stream_label = it->sync_label;
574 const std::string& track_id = it->id;
575 uint32 ssrc = it->first_ssrc();
576
577 talk_base::scoped_refptr<MediaStreamInterface> stream =
578 remote_streams_->find(stream_label);
579 if (!stream) {
580 // This is a new MediaStream. Create a new remote MediaStream.
581 stream = remote_stream_factory_->CreateMediaStream(stream_label);
582 remote_streams_->AddStream(stream);
583 new_streams->AddStream(stream);
584 }
585
586 TrackInfos::iterator track_it = current_tracks->find(track_id);
587 if (track_it == current_tracks->end()) {
588 (*current_tracks)[track_id] =
589 TrackInfo(stream_label, track_id, ssrc);
590 OnRemoteTrackSeen(stream_label, track_id, it->first_ssrc(), media_type);
591 }
592 }
593 }
594
OnRemoteTrackSeen(const std::string & stream_label,const std::string & track_id,uint32 ssrc,cricket::MediaType media_type)595 void MediaStreamSignaling::OnRemoteTrackSeen(const std::string& stream_label,
596 const std::string& track_id,
597 uint32 ssrc,
598 cricket::MediaType media_type) {
599 MediaStreamInterface* stream = remote_streams_->find(stream_label);
600
601 if (media_type == cricket::MEDIA_TYPE_AUDIO) {
602 AudioTrackInterface* audio_track =
603 remote_stream_factory_->AddAudioTrack(stream, track_id);
604 stream_observer_->OnAddRemoteAudioTrack(stream, audio_track, ssrc);
605 } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
606 VideoTrackInterface* video_track =
607 remote_stream_factory_->AddVideoTrack(stream, track_id);
608 stream_observer_->OnAddRemoteVideoTrack(stream, video_track, ssrc);
609 } else {
610 ASSERT(false && "Invalid media type");
611 }
612 }
613
OnRemoteTrackRemoved(const std::string & stream_label,const std::string & track_id,cricket::MediaType media_type)614 void MediaStreamSignaling::OnRemoteTrackRemoved(
615 const std::string& stream_label,
616 const std::string& track_id,
617 cricket::MediaType media_type) {
618 MediaStreamInterface* stream = remote_streams_->find(stream_label);
619
620 if (media_type == cricket::MEDIA_TYPE_AUDIO) {
621 talk_base::scoped_refptr<AudioTrackInterface> audio_track =
622 stream->FindAudioTrack(track_id);
623 if (audio_track) {
624 audio_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
625 stream->RemoveTrack(audio_track);
626 stream_observer_->OnRemoveRemoteAudioTrack(stream, audio_track);
627 }
628 } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
629 talk_base::scoped_refptr<VideoTrackInterface> video_track =
630 stream->FindVideoTrack(track_id);
631 if (video_track) {
632 video_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
633 stream->RemoveTrack(video_track);
634 stream_observer_->OnRemoveRemoteVideoTrack(stream, video_track);
635 }
636 } else {
637 ASSERT(false && "Invalid media type");
638 }
639 }
640
RejectRemoteTracks(cricket::MediaType media_type)641 void MediaStreamSignaling::RejectRemoteTracks(cricket::MediaType media_type) {
642 TrackInfos* current_tracks = GetRemoteTracks(media_type);
643 for (TrackInfos::iterator track_it = current_tracks->begin();
644 track_it != current_tracks->end(); ++track_it) {
645 TrackInfo info = track_it->second;
646 MediaStreamInterface* stream = remote_streams_->find(info.stream_label);
647 if (media_type == cricket::MEDIA_TYPE_AUDIO) {
648 AudioTrackInterface* track = stream->FindAudioTrack(info.track_id);
649 // There's no guarantee the track is still available, e.g. the track may
650 // have been removed from the stream by javascript.
651 if (track) {
652 track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
653 }
654 }
655 if (media_type == cricket::MEDIA_TYPE_VIDEO) {
656 VideoTrackInterface* track = stream->FindVideoTrack(info.track_id);
657 // There's no guarantee the track is still available, e.g. the track may
658 // have been removed from the stream by javascript.
659 if (track) {
660 track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
661 }
662 }
663 }
664 }
665
UpdateEndedRemoteMediaStreams()666 void MediaStreamSignaling::UpdateEndedRemoteMediaStreams() {
667 std::vector<scoped_refptr<MediaStreamInterface> > streams_to_remove;
668 for (size_t i = 0; i < remote_streams_->count(); ++i) {
669 MediaStreamInterface*stream = remote_streams_->at(i);
670 if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) {
671 streams_to_remove.push_back(stream);
672 }
673 }
674
675 std::vector<scoped_refptr<MediaStreamInterface> >::const_iterator it;
676 for (it = streams_to_remove.begin(); it != streams_to_remove.end(); ++it) {
677 remote_streams_->RemoveStream(*it);
678 stream_observer_->OnRemoveRemoteStream(*it);
679 }
680 }
681
MaybeCreateDefaultStream()682 void MediaStreamSignaling::MaybeCreateDefaultStream() {
683 if (!remote_info_.IsDefaultMediaStreamNeeded())
684 return;
685
686 bool default_created = false;
687
688 scoped_refptr<MediaStreamInterface> default_remote_stream =
689 remote_streams_->find(kDefaultStreamLabel);
690 if (default_remote_stream == NULL) {
691 default_created = true;
692 default_remote_stream =
693 remote_stream_factory_->CreateMediaStream(kDefaultStreamLabel);
694 remote_streams_->AddStream(default_remote_stream);
695 }
696 if (remote_info_.default_audio_track_needed &&
697 default_remote_stream->GetAudioTracks().size() == 0) {
698 remote_audio_tracks_[kDefaultAudioTrackLabel] =
699 TrackInfo(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0);
700 OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0,
701 cricket::MEDIA_TYPE_AUDIO);
702 }
703 if (remote_info_.default_video_track_needed &&
704 default_remote_stream->GetVideoTracks().size() == 0) {
705 remote_video_tracks_[kDefaultVideoTrackLabel] =
706 TrackInfo(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0);
707 OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0,
708 cricket::MEDIA_TYPE_VIDEO);
709 }
710 if (default_created) {
711 stream_observer_->OnAddRemoteStream(default_remote_stream);
712 }
713 }
714
GetRemoteTracks(cricket::MediaType type)715 MediaStreamSignaling::TrackInfos* MediaStreamSignaling::GetRemoteTracks(
716 cricket::MediaType type) {
717 if (type == cricket::MEDIA_TYPE_AUDIO)
718 return &remote_audio_tracks_;
719 else if (type == cricket::MEDIA_TYPE_VIDEO)
720 return &remote_video_tracks_;
721 ASSERT(false && "Unknown MediaType");
722 return NULL;
723 }
724
GetLocalTracks(cricket::MediaType media_type)725 MediaStreamSignaling::TrackInfos* MediaStreamSignaling::GetLocalTracks(
726 cricket::MediaType media_type) {
727 ASSERT(media_type == cricket::MEDIA_TYPE_AUDIO ||
728 media_type == cricket::MEDIA_TYPE_VIDEO);
729
730 return (media_type == cricket::MEDIA_TYPE_AUDIO) ?
731 &local_audio_tracks_ : &local_video_tracks_;
732 }
733
UpdateLocalTracks(const std::vector<cricket::StreamParams> & streams,cricket::MediaType media_type)734 void MediaStreamSignaling::UpdateLocalTracks(
735 const std::vector<cricket::StreamParams>& streams,
736 cricket::MediaType media_type) {
737 TrackInfos* current_tracks = GetLocalTracks(media_type);
738
739 // Find removed tracks. Ie tracks where the track id or ssrc don't match the
740 // new StreamParam.
741 TrackInfos::iterator track_it = current_tracks->begin();
742 while (track_it != current_tracks->end()) {
743 TrackInfo info = track_it->second;
744 cricket::StreamParams params;
745 if (!cricket::GetStreamBySsrc(streams, info.ssrc, ¶ms) ||
746 params.id != info.track_id) {
747 OnLocalTrackRemoved(info.stream_label, info.track_id, media_type);
748 current_tracks->erase(track_it++);
749 } else {
750 ++track_it;
751 }
752 }
753
754 // Find new and active tracks.
755 for (cricket::StreamParamsVec::const_iterator it = streams.begin();
756 it != streams.end(); ++it) {
757 // The sync_label is the MediaStream label and the |stream.id| is the
758 // track id.
759 const std::string& stream_label = it->sync_label;
760 const std::string& track_id = it->id;
761 uint32 ssrc = it->first_ssrc();
762 TrackInfos::iterator track_it = current_tracks->find(track_id);
763 if (track_it == current_tracks->end()) {
764 (*current_tracks)[track_id] =
765 TrackInfo(stream_label, track_id, ssrc);
766 OnLocalTrackSeen(stream_label, track_id, it->first_ssrc(),
767 media_type);
768 }
769 }
770 }
771
OnLocalTrackSeen(const std::string & stream_label,const std::string & track_id,uint32 ssrc,cricket::MediaType media_type)772 void MediaStreamSignaling::OnLocalTrackSeen(
773 const std::string& stream_label,
774 const std::string& track_id,
775 uint32 ssrc,
776 cricket::MediaType media_type) {
777 MediaStreamInterface* stream = local_streams_->find(stream_label);
778 if (!stream) {
779 LOG(LS_WARNING) << "An unknown local MediaStream with label "
780 << stream_label << " has been configured.";
781 return;
782 }
783
784 if (media_type == cricket::MEDIA_TYPE_AUDIO) {
785 AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
786 if (!audio_track) {
787 LOG(LS_WARNING) << "An unknown local AudioTrack with id , "
788 << track_id << " has been configured.";
789 return;
790 }
791 stream_observer_->OnAddLocalAudioTrack(stream, audio_track, ssrc);
792 } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
793 VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
794 if (!video_track) {
795 LOG(LS_WARNING) << "An unknown local VideoTrack with id , "
796 << track_id << " has been configured.";
797 return;
798 }
799 stream_observer_->OnAddLocalVideoTrack(stream, video_track, ssrc);
800 } else {
801 ASSERT(false && "Invalid media type");
802 }
803 }
804
OnLocalTrackRemoved(const std::string & stream_label,const std::string & track_id,cricket::MediaType media_type)805 void MediaStreamSignaling::OnLocalTrackRemoved(
806 const std::string& stream_label,
807 const std::string& track_id,
808 cricket::MediaType media_type) {
809 MediaStreamInterface* stream = local_streams_->find(stream_label);
810 if (!stream) {
811 // This is the normal case. Ie RemoveLocalStream has been called and the
812 // SessionDescriptions has been renegotiated.
813 return;
814 }
815 // A track has been removed from the SessionDescription but the MediaStream
816 // is still associated with MediaStreamSignaling. This only occurs if the SDP
817 // doesn't match with the calls to AddLocalStream and RemoveLocalStream.
818
819 if (media_type == cricket::MEDIA_TYPE_AUDIO) {
820 AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
821 if (!audio_track) {
822 return;
823 }
824 stream_observer_->OnRemoveLocalAudioTrack(stream, audio_track);
825 } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
826 VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
827 if (!video_track) {
828 return;
829 }
830 stream_observer_->OnRemoveLocalVideoTrack(stream, video_track);
831 } else {
832 ASSERT(false && "Invalid media type.");
833 }
834 }
835
UpdateLocalRtpDataChannels(const cricket::StreamParamsVec & streams)836 void MediaStreamSignaling::UpdateLocalRtpDataChannels(
837 const cricket::StreamParamsVec& streams) {
838 std::vector<std::string> existing_channels;
839
840 // Find new and active data channels.
841 for (cricket::StreamParamsVec::const_iterator it =streams.begin();
842 it != streams.end(); ++it) {
843 // |it->sync_label| is actually the data channel label. The reason is that
844 // we use the same naming of data channels as we do for
845 // MediaStreams and Tracks.
846 // For MediaStreams, the sync_label is the MediaStream label and the
847 // track label is the same as |streamid|.
848 const std::string& channel_label = it->sync_label;
849 RtpDataChannels::iterator data_channel_it =
850 rtp_data_channels_.find(channel_label);
851 if (!VERIFY(data_channel_it != rtp_data_channels_.end())) {
852 continue;
853 }
854 // Set the SSRC the data channel should use for sending.
855 data_channel_it->second->SetSendSsrc(it->first_ssrc());
856 existing_channels.push_back(data_channel_it->first);
857 }
858
859 UpdateClosingDataChannels(existing_channels, true);
860 }
861
UpdateRemoteRtpDataChannels(const cricket::StreamParamsVec & streams)862 void MediaStreamSignaling::UpdateRemoteRtpDataChannels(
863 const cricket::StreamParamsVec& streams) {
864 std::vector<std::string> existing_channels;
865
866 // Find new and active data channels.
867 for (cricket::StreamParamsVec::const_iterator it = streams.begin();
868 it != streams.end(); ++it) {
869 // The data channel label is either the mslabel or the SSRC if the mslabel
870 // does not exist. Ex a=ssrc:444330170 mslabel:test1.
871 std::string label = it->sync_label.empty() ?
872 talk_base::ToString(it->first_ssrc()) : it->sync_label;
873 RtpDataChannels::iterator data_channel_it =
874 rtp_data_channels_.find(label);
875 if (data_channel_it == rtp_data_channels_.end()) {
876 // This is a new data channel.
877 CreateRemoteDataChannel(label, it->first_ssrc());
878 } else {
879 data_channel_it->second->SetReceiveSsrc(it->first_ssrc());
880 }
881 existing_channels.push_back(label);
882 }
883
884 UpdateClosingDataChannels(existing_channels, false);
885 }
886
UpdateClosingDataChannels(const std::vector<std::string> & active_channels,bool is_local_update)887 void MediaStreamSignaling::UpdateClosingDataChannels(
888 const std::vector<std::string>& active_channels, bool is_local_update) {
889 RtpDataChannels::iterator it = rtp_data_channels_.begin();
890 while (it != rtp_data_channels_.end()) {
891 DataChannel* data_channel = it->second;
892 if (std::find(active_channels.begin(), active_channels.end(),
893 data_channel->label()) != active_channels.end()) {
894 ++it;
895 continue;
896 }
897
898 if (is_local_update)
899 data_channel->SetSendSsrc(0);
900 else
901 data_channel->RemotePeerRequestClose();
902
903 if (data_channel->state() == DataChannel::kClosed) {
904 rtp_data_channels_.erase(it);
905 it = rtp_data_channels_.begin();
906 } else {
907 ++it;
908 }
909 }
910 }
911
CreateRemoteDataChannel(const std::string & label,uint32 remote_ssrc)912 void MediaStreamSignaling::CreateRemoteDataChannel(const std::string& label,
913 uint32 remote_ssrc) {
914 if (!data_channel_factory_) {
915 LOG(LS_WARNING) << "Remote peer requested a DataChannel but DataChannels "
916 << "are not supported.";
917 return;
918 }
919 scoped_refptr<DataChannel> channel(
920 data_channel_factory_->CreateDataChannel(label, NULL));
921 if (!channel.get()) {
922 LOG(LS_WARNING) << "Remote peer requested a DataChannel but"
923 << "CreateDataChannel failed.";
924 return;
925 }
926 channel->SetReceiveSsrc(remote_ssrc);
927 stream_observer_->OnAddDataChannel(channel);
928 }
929
OnDataTransportCreatedForSctp()930 void MediaStreamSignaling::OnDataTransportCreatedForSctp() {
931 SctpDataChannels::iterator it = sctp_data_channels_.begin();
932 for (; it != sctp_data_channels_.end(); ++it) {
933 (*it)->OnTransportChannelCreated();
934 }
935 }
936
OnDtlsRoleReadyForSctp(talk_base::SSLRole role)937 void MediaStreamSignaling::OnDtlsRoleReadyForSctp(talk_base::SSLRole role) {
938 SctpDataChannels::iterator it = sctp_data_channels_.begin();
939 for (; it != sctp_data_channels_.end(); ++it) {
940 if ((*it)->id() < 0) {
941 int sid;
942 if (!AllocateSctpSid(role, &sid)) {
943 LOG(LS_ERROR) << "Failed to allocate SCTP sid.";
944 continue;
945 }
946 (*it)->SetSctpSid(sid);
947 }
948 }
949 }
950
951 } // namespace webrtc
952