• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "content/renderer/media/media_stream_impl.h"
6 
7 #include <utility>
8 
9 #include "base/logging.h"
10 #include "base/strings/string_util.h"
11 #include "base/strings/stringprintf.h"
12 #include "base/strings/utf_string_conversions.h"
13 #include "content/renderer/media/media_stream_audio_renderer.h"
14 #include "content/renderer/media/media_stream_dependency_factory.h"
15 #include "content/renderer/media/media_stream_dispatcher.h"
16 #include "content/renderer/media/media_stream_extra_data.h"
17 #include "content/renderer/media/media_stream_source_extra_data.h"
18 #include "content/renderer/media/rtc_video_renderer.h"
19 #include "content/renderer/media/webrtc_audio_capturer.h"
20 #include "content/renderer/media/webrtc_audio_renderer.h"
21 #include "content/renderer/media/webrtc_local_audio_renderer.h"
22 #include "content/renderer/media/webrtc_logging.h"
23 #include "content/renderer/media/webrtc_uma_histograms.h"
24 #include "content/renderer/render_thread_impl.h"
25 #include "media/base/audio_hardware_config.h"
26 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
27 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
28 #include "third_party/WebKit/public/web/WebDocument.h"
29 #include "third_party/WebKit/public/web/WebFrame.h"
30 #include "third_party/WebKit/public/web/WebMediaStreamRegistry.h"
31 
32 namespace content {
33 namespace {
34 
CopyStreamConstraints(const blink::WebMediaConstraints & constraints,StreamOptions::Constraints * mandatory,StreamOptions::Constraints * optional)35 void CopyStreamConstraints(const blink::WebMediaConstraints& constraints,
36                            StreamOptions::Constraints* mandatory,
37                            StreamOptions::Constraints* optional) {
38   blink::WebVector<blink::WebMediaConstraint> mandatory_constraints;
39   constraints.getMandatoryConstraints(mandatory_constraints);
40   for (size_t i = 0; i < mandatory_constraints.size(); i++) {
41     mandatory->push_back(StreamOptions::Constraint(
42         mandatory_constraints[i].m_name.utf8(),
43         mandatory_constraints[i].m_value.utf8()));
44   }
45 
46   blink::WebVector<blink::WebMediaConstraint> optional_constraints;
47   constraints.getOptionalConstraints(optional_constraints);
48   for (size_t i = 0; i < optional_constraints.size(); i++) {
49     optional->push_back(StreamOptions::Constraint(
50         optional_constraints[i].m_name.utf8(),
51         optional_constraints[i].m_value.utf8()));
52   }
53 }
54 
55 static int g_next_request_id  = 0;
56 
GetNativeMediaStream(const blink::WebMediaStream & web_stream)57 webrtc::MediaStreamInterface* GetNativeMediaStream(
58     const blink::WebMediaStream& web_stream) {
59   content::MediaStreamExtraData* extra_data =
60       static_cast<content::MediaStreamExtraData*>(web_stream.extraData());
61   if (!extra_data)
62     return NULL;
63   return extra_data->stream().get();
64 }
65 
GetDefaultOutputDeviceParams(int * output_sample_rate,int * output_buffer_size)66 void GetDefaultOutputDeviceParams(
67     int* output_sample_rate, int* output_buffer_size) {
68   // Fetch the default audio output hardware config.
69   media::AudioHardwareConfig* hardware_config =
70       RenderThreadImpl::current()->GetAudioHardwareConfig();
71   *output_sample_rate = hardware_config->GetOutputSampleRate();
72   *output_buffer_size = hardware_config->GetOutputBufferSize();
73 }
74 
RemoveSource(const blink::WebMediaStreamSource & source,std::vector<blink::WebMediaStreamSource> * sources)75 void RemoveSource(const blink::WebMediaStreamSource& source,
76                   std::vector<blink::WebMediaStreamSource>* sources) {
77   for (std::vector<blink::WebMediaStreamSource>::iterator it =
78            sources->begin();
79        it != sources->end(); ++it) {
80     if (source.id() == it->id()) {
81       sources->erase(it);
82       return;
83     }
84   }
85 }
86 
87 }  // namespace
88 
MediaStreamImpl(RenderView * render_view,MediaStreamDispatcher * media_stream_dispatcher,MediaStreamDependencyFactory * dependency_factory)89 MediaStreamImpl::MediaStreamImpl(
90     RenderView* render_view,
91     MediaStreamDispatcher* media_stream_dispatcher,
92     MediaStreamDependencyFactory* dependency_factory)
93     : RenderViewObserver(render_view),
94       dependency_factory_(dependency_factory),
95       media_stream_dispatcher_(media_stream_dispatcher) {
96 }
97 
~MediaStreamImpl()98 MediaStreamImpl::~MediaStreamImpl() {
99 }
100 
requestUserMedia(const blink::WebUserMediaRequest & user_media_request)101 void MediaStreamImpl::requestUserMedia(
102     const blink::WebUserMediaRequest& user_media_request) {
103   // Save histogram data so we can see how much GetUserMedia is used.
104   // The histogram counts the number of calls to the JS API
105   // webGetUserMedia.
106   UpdateWebRTCMethodCount(WEBKIT_GET_USER_MEDIA);
107   DCHECK(CalledOnValidThread());
108   int request_id = g_next_request_id++;
109   StreamOptions options;
110   blink::WebFrame* frame = NULL;
111   GURL security_origin;
112   bool enable_automatic_output_device_selection = false;
113 
114   // |user_media_request| can't be mocked. So in order to test at all we check
115   // if it isNull.
116   if (user_media_request.isNull()) {
117     // We are in a test.
118     options.audio_requested = true;
119     options.video_requested = true;
120   } else {
121     if (user_media_request.audio()) {
122       options.audio_requested = true;
123       CopyStreamConstraints(user_media_request.audioConstraints(),
124                             &options.mandatory_audio,
125                             &options.optional_audio);
126 
127       // Check if this input device should be used to select a matching output
128       // device for audio rendering.
129       std::string enable;
130       if (options.GetFirstAudioConstraintByName(
131               kMediaStreamRenderToAssociatedSink, &enable, NULL) &&
132           LowerCaseEqualsASCII(enable, "true")) {
133         enable_automatic_output_device_selection = true;
134       }
135     }
136     if (user_media_request.video()) {
137       options.video_requested = true;
138       CopyStreamConstraints(user_media_request.videoConstraints(),
139                             &options.mandatory_video,
140                             &options.optional_video);
141     }
142 
143     security_origin = GURL(user_media_request.securityOrigin().toString());
144     // Get the WebFrame that requested a MediaStream.
145     // The frame is needed to tell the MediaStreamDispatcher when a stream goes
146     // out of scope.
147     frame = user_media_request.ownerDocument().frame();
148     DCHECK(frame);
149   }
150 
151   DVLOG(1) << "MediaStreamImpl::requestUserMedia(" << request_id << ", [ "
152            << "audio=" << (options.audio_requested)
153            << " select associated sink: "
154            << enable_automatic_output_device_selection
155            << ", video=" << (options.video_requested) << " ], "
156            << security_origin.spec() << ")";
157 
158   std::string audio_device_id;
159   bool mandatory_audio;
160   options.GetFirstAudioConstraintByName(kMediaStreamSourceInfoId,
161                                         &audio_device_id, &mandatory_audio);
162   std::string video_device_id;
163   bool mandatory_video;
164   options.GetFirstVideoConstraintByName(kMediaStreamSourceInfoId,
165                                         &video_device_id, &mandatory_video);
166 
167   WebRtcLogMessage(base::StringPrintf(
168       "MSI::requestUserMedia. request_id=%d"
169       ", audio source id=%s mandatory= %s "
170       ", video source id=%s mandatory= %s",
171       request_id,
172       audio_device_id.c_str(),
173       mandatory_audio ? "true":"false",
174       video_device_id.c_str(),
175       mandatory_video ? "true":"false"));
176 
177   user_media_requests_.push_back(
178       new UserMediaRequestInfo(request_id, frame, user_media_request,
179           enable_automatic_output_device_selection));
180 
181   media_stream_dispatcher_->GenerateStream(
182       request_id,
183       AsWeakPtr(),
184       options,
185       security_origin);
186 }
187 
cancelUserMediaRequest(const blink::WebUserMediaRequest & user_media_request)188 void MediaStreamImpl::cancelUserMediaRequest(
189     const blink::WebUserMediaRequest& user_media_request) {
190   DCHECK(CalledOnValidThread());
191   UserMediaRequestInfo* request = FindUserMediaRequestInfo(user_media_request);
192   if (request) {
193     // We can't abort the stream generation process.
194     // Instead, erase the request. Once the stream is generated we will stop the
195     // stream if the request does not exist.
196     DeleteUserMediaRequestInfo(request);
197   }
198 }
199 
GetMediaStream(const GURL & url)200 blink::WebMediaStream MediaStreamImpl::GetMediaStream(
201     const GURL& url) {
202   return blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url);
203 }
204 
IsMediaStream(const GURL & url)205 bool MediaStreamImpl::IsMediaStream(const GURL& url) {
206   blink::WebMediaStream web_stream(
207       blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url));
208 
209   if (web_stream.isNull() || !web_stream.extraData())
210     return false;  // This is not a valid stream.
211 
212   webrtc::MediaStreamInterface* stream = GetNativeMediaStream(web_stream);
213   return (stream &&
214       (!stream->GetVideoTracks().empty() || !stream->GetAudioTracks().empty()));
215 }
216 
217 scoped_refptr<VideoFrameProvider>
GetVideoFrameProvider(const GURL & url,const base::Closure & error_cb,const VideoFrameProvider::RepaintCB & repaint_cb)218 MediaStreamImpl::GetVideoFrameProvider(
219     const GURL& url,
220     const base::Closure& error_cb,
221     const VideoFrameProvider::RepaintCB& repaint_cb) {
222   DCHECK(CalledOnValidThread());
223   blink::WebMediaStream web_stream(GetMediaStream(url));
224 
225   if (web_stream.isNull() || !web_stream.extraData())
226     return NULL;  // This is not a valid stream.
227 
228   DVLOG(1) << "MediaStreamImpl::GetVideoFrameProvider stream:"
229            << UTF16ToUTF8(web_stream.id());
230 
231   blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
232   web_stream.videoTracks(video_tracks);
233   if (video_tracks.isEmpty())
234     return NULL;
235 
236   return new RTCVideoRenderer(video_tracks[0], error_cb, repaint_cb);
237 }
238 
239 scoped_refptr<MediaStreamAudioRenderer>
GetAudioRenderer(const GURL & url)240 MediaStreamImpl::GetAudioRenderer(const GURL& url) {
241   DCHECK(CalledOnValidThread());
242   blink::WebMediaStream web_stream(GetMediaStream(url));
243 
244   if (web_stream.isNull() || !web_stream.extraData())
245     return NULL;  // This is not a valid stream.
246 
247   DVLOG(1) << "MediaStreamImpl::GetAudioRenderer stream:"
248            << UTF16ToUTF8(web_stream.id());
249 
250   MediaStreamExtraData* extra_data =
251       static_cast<MediaStreamExtraData*>(web_stream.extraData());
252 
253   if (extra_data->is_local()) {
254     // Create the local audio renderer if the stream contains audio tracks.
255     blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
256     web_stream.audioTracks(audio_tracks);
257     if (audio_tracks.isEmpty())
258       return NULL;
259 
260     // TODO(xians): Add support for the case that the media stream contains
261     // multiple audio tracks.
262     return CreateLocalAudioRenderer(audio_tracks[0]);
263   }
264 
265   webrtc::MediaStreamInterface* stream = extra_data->stream().get();
266   if (!stream || stream->GetAudioTracks().empty())
267     return NULL;
268 
269   // This is a remote media stream.
270   WebRtcAudioDeviceImpl* audio_device =
271       dependency_factory_->GetWebRtcAudioDevice();
272 
273   // Share the existing renderer if any, otherwise create a new one.
274   scoped_refptr<WebRtcAudioRenderer> renderer(audio_device->renderer());
275   if (!renderer.get()) {
276     renderer = CreateRemoteAudioRenderer(extra_data->stream().get());
277 
278     if (renderer.get() && !audio_device->SetAudioRenderer(renderer.get()))
279       renderer = NULL;
280   }
281 
282   return renderer.get() ? renderer->CreateSharedAudioRendererProxy() : NULL;
283 }
284 
285 // Callback from MediaStreamDispatcher.
286 // The requested stream have been generated by the MediaStreamDispatcher.
OnStreamGenerated(int request_id,const std::string & label,const StreamDeviceInfoArray & audio_array,const StreamDeviceInfoArray & video_array)287 void MediaStreamImpl::OnStreamGenerated(
288     int request_id,
289     const std::string& label,
290     const StreamDeviceInfoArray& audio_array,
291     const StreamDeviceInfoArray& video_array) {
292   DCHECK(CalledOnValidThread());
293   DVLOG(1) << "MediaStreamImpl::OnStreamGenerated stream:" << label;
294 
295   UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
296   if (!request_info) {
297     // This can happen if the request is canceled or the frame reloads while
298     // MediaStreamDispatcher is processing the request.
299     // Only stop the device if the device is not used in another MediaStream.
300     for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin();
301          device_it != audio_array.end(); ++device_it) {
302       if (!FindLocalSource(*device_it))
303         media_stream_dispatcher_->StopStreamDevice(*device_it);
304     }
305 
306     for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin();
307          device_it != video_array.end(); ++device_it) {
308       if (!FindLocalSource(*device_it))
309         media_stream_dispatcher_->StopStreamDevice(*device_it);
310     }
311 
312     DVLOG(1) << "Request ID not found";
313     return;
314   }
315   request_info->generated = true;
316 
317   blink::WebVector<blink::WebMediaStreamSource> audio_source_vector(
318         audio_array.size());
319 
320   // Log the device names for this request.
321   for (StreamDeviceInfoArray::const_iterator it = audio_array.begin();
322        it != audio_array.end(); ++it) {
323     WebRtcLogMessage(base::StringPrintf(
324         "Generated media stream for request id %d contains audio device name"
325         " \"%s\"",
326         request_id,
327         it->device.name.c_str()));
328   }
329 
330   StreamDeviceInfoArray overridden_audio_array = audio_array;
331   if (!request_info->enable_automatic_output_device_selection) {
332     // If the GetUserMedia request did not explicitly set the constraint
333     // kMediaStreamRenderToAssociatedSink, the output device parameters must
334     // be removed.
335     for (StreamDeviceInfoArray::iterator it = overridden_audio_array.begin();
336          it != overridden_audio_array.end(); ++it) {
337       it->device.matched_output_device_id = "";
338       it->device.matched_output = MediaStreamDevice::AudioDeviceParameters();
339     }
340   }
341   CreateWebKitSourceVector(label, overridden_audio_array,
342                            blink::WebMediaStreamSource::TypeAudio,
343                            request_info->frame,
344                            audio_source_vector);
345 
346   blink::WebVector<blink::WebMediaStreamSource> video_source_vector(
347       video_array.size());
348   CreateWebKitSourceVector(label, video_array,
349                            blink::WebMediaStreamSource::TypeVideo,
350                            request_info->frame,
351                            video_source_vector);
352   blink::WebUserMediaRequest* request = &(request_info->request);
353   blink::WebString webkit_id = UTF8ToUTF16(label);
354   blink::WebMediaStream* web_stream = &(request_info->web_stream);
355 
356   blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector(
357       audio_array.size());
358   for (size_t i = 0; i < audio_track_vector.size(); ++i) {
359     audio_track_vector[i].initialize(audio_source_vector[i]);
360     request_info->sources.push_back(audio_source_vector[i]);
361   }
362 
363   blink::WebVector<blink::WebMediaStreamTrack> video_track_vector(
364       video_array.size());
365   for (size_t i = 0; i < video_track_vector.size(); ++i) {
366     video_track_vector[i].initialize(video_source_vector[i]);
367     request_info->sources.push_back(video_source_vector[i]);
368   }
369 
370   web_stream->initialize(webkit_id, audio_track_vector,
371                          video_track_vector);
372 
373   // WebUserMediaRequest don't have an implementation in unit tests.
374   // Therefore we need to check for isNull here.
375   blink::WebMediaConstraints audio_constraints = request->isNull() ?
376       blink::WebMediaConstraints() : request->audioConstraints();
377   blink::WebMediaConstraints video_constraints = request->isNull() ?
378       blink::WebMediaConstraints() : request->videoConstraints();
379 
380   dependency_factory_->CreateNativeMediaSources(
381       RenderViewObserver::routing_id(),
382       audio_constraints, video_constraints, web_stream,
383       base::Bind(&MediaStreamImpl::OnCreateNativeSourcesComplete, AsWeakPtr()));
384 }
385 
386 // Callback from MediaStreamDispatcher.
387 // The requested stream failed to be generated.
OnStreamGenerationFailed(int request_id)388 void MediaStreamImpl::OnStreamGenerationFailed(int request_id) {
389   DCHECK(CalledOnValidThread());
390   DVLOG(1) << "MediaStreamImpl::OnStreamGenerationFailed("
391            << request_id << ")";
392   UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
393   if (!request_info) {
394     // This can happen if the request is canceled or the frame reloads while
395     // MediaStreamDispatcher is processing the request.
396     DVLOG(1) << "Request ID not found";
397     return;
398   }
399   CompleteGetUserMediaRequest(request_info->web_stream,
400                               &request_info->request,
401                               false);
402   DeleteUserMediaRequestInfo(request_info);
403 }
404 
405 // Callback from MediaStreamDispatcher.
406 // The browser process has stopped a device used by a MediaStream.
OnDeviceStopped(const std::string & label,const StreamDeviceInfo & device_info)407 void MediaStreamImpl::OnDeviceStopped(
408     const std::string& label,
409     const StreamDeviceInfo& device_info) {
410   DCHECK(CalledOnValidThread());
411   DVLOG(1) << "MediaStreamImpl::OnDeviceStopped("
412            << "{device_id = " << device_info.device.id << "})";
413 
414   const blink::WebMediaStreamSource* source_ptr = FindLocalSource(device_info);
415   if (!source_ptr) {
416     // This happens if the same device is used in several guM requests or
417     // if a user happen stop a track from JS at the same time
418     // as the underlying media device is unplugged from the system.
419     return;
420   }
421   // By creating |source| it is guaranteed that the blink::WebMediaStreamSource
422   // object is valid during the cleanup.
423   blink::WebMediaStreamSource source(*source_ptr);
424   StopLocalSource(source, false);
425 
426   for (LocalStreamSources::iterator device_it = local_sources_.begin();
427        device_it != local_sources_.end(); ++device_it) {
428     if (device_it->source.id() == source.id()) {
429       local_sources_.erase(device_it);
430       break;
431     }
432   }
433 
434   // Remove the reference to this source from all |user_media_requests_|.
435   // TODO(perkj): The below is not necessary once we don't need to support
436   // MediaStream::Stop().
437   UserMediaRequests::iterator it = user_media_requests_.begin();
438   while (it != user_media_requests_.end()) {
439     RemoveSource(source, &(*it)->sources);
440     if ((*it)->sources.empty()) {
441       it = user_media_requests_.erase(it);
442     } else {
443       ++it;
444     }
445   }
446 }
447 
CreateWebKitSourceVector(const std::string & label,const StreamDeviceInfoArray & devices,blink::WebMediaStreamSource::Type type,blink::WebFrame * frame,blink::WebVector<blink::WebMediaStreamSource> & webkit_sources)448 void MediaStreamImpl::CreateWebKitSourceVector(
449     const std::string& label,
450     const StreamDeviceInfoArray& devices,
451     blink::WebMediaStreamSource::Type type,
452     blink::WebFrame* frame,
453     blink::WebVector<blink::WebMediaStreamSource>& webkit_sources) {
454   CHECK_EQ(devices.size(), webkit_sources.size());
455   for (size_t i = 0; i < devices.size(); ++i) {
456     const blink::WebMediaStreamSource* existing_source =
457         FindLocalSource(devices[i]);
458     if (existing_source) {
459       webkit_sources[i] = *existing_source;
460       DVLOG(1) << "Source already exist. Reusing source with id "
461                << webkit_sources[i]. id().utf8();
462       continue;
463     }
464     webkit_sources[i].initialize(
465         UTF8ToUTF16(devices[i].device.id),
466         type,
467         UTF8ToUTF16(devices[i].device.name));
468     MediaStreamSourceExtraData* source_extra_data(
469         new content::MediaStreamSourceExtraData(
470             devices[i],
471             base::Bind(&MediaStreamImpl::OnLocalSourceStop, AsWeakPtr())));
472     // |source_extra_data| is owned by webkit_sources[i].
473     webkit_sources[i].setExtraData(source_extra_data);
474     local_sources_.push_back(LocalStreamSource(frame, webkit_sources[i]));
475   }
476 }
477 
478 // Callback from MediaStreamDependencyFactory when the sources in |web_stream|
479 // have been generated.
OnCreateNativeSourcesComplete(blink::WebMediaStream * web_stream,bool request_succeeded)480 void MediaStreamImpl::OnCreateNativeSourcesComplete(
481     blink::WebMediaStream* web_stream,
482     bool request_succeeded) {
483   UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(web_stream);
484   if (!request_info) {
485     // This can happen if the request is canceled or the frame reloads while
486     // MediaStreamDependencyFactory is creating the sources.
487     DVLOG(1) << "Request ID not found";
488     return;
489   }
490 
491   // Create a native representation of the stream.
492   if (request_succeeded) {
493     dependency_factory_->CreateNativeLocalMediaStream(
494         web_stream,
495         base::Bind(&MediaStreamImpl::OnLocalMediaStreamStop, AsWeakPtr()));
496   }
497   DVLOG(1) << "MediaStreamImpl::OnCreateNativeSourcesComplete("
498            << "{request_id = " << request_info->request_id << "} "
499            << "{request_succeeded = " << request_succeeded << "})";
500   CompleteGetUserMediaRequest(request_info->web_stream, &request_info->request,
501                               request_succeeded);
502   if (!request_succeeded) {
503     // TODO(perkj): Once we don't support MediaStream::Stop the |request_info|
504     // can be deleted even if the request succeeds.
505     DeleteUserMediaRequestInfo(request_info);
506     StopUnreferencedSources(true);
507   }
508 }
509 
OnDevicesEnumerated(int request_id,const StreamDeviceInfoArray & device_array)510 void MediaStreamImpl::OnDevicesEnumerated(
511     int request_id,
512     const StreamDeviceInfoArray& device_array) {
513   DVLOG(1) << "MediaStreamImpl::OnDevicesEnumerated("
514            << request_id << ")";
515   NOTIMPLEMENTED();
516 }
517 
OnDeviceOpened(int request_id,const std::string & label,const StreamDeviceInfo & video_device)518 void MediaStreamImpl::OnDeviceOpened(
519     int request_id,
520     const std::string& label,
521     const StreamDeviceInfo& video_device) {
522   DVLOG(1) << "MediaStreamImpl::OnDeviceOpened("
523            << request_id << ", " << label << ")";
524   NOTIMPLEMENTED();
525 }
526 
OnDeviceOpenFailed(int request_id)527 void MediaStreamImpl::OnDeviceOpenFailed(int request_id) {
528   DVLOG(1) << "MediaStreamImpl::VideoDeviceOpenFailed("
529            << request_id << ")";
530   NOTIMPLEMENTED();
531 }
532 
CompleteGetUserMediaRequest(const blink::WebMediaStream & stream,blink::WebUserMediaRequest * request_info,bool request_succeeded)533 void MediaStreamImpl::CompleteGetUserMediaRequest(
534     const blink::WebMediaStream& stream,
535     blink::WebUserMediaRequest* request_info,
536     bool request_succeeded) {
537   if (request_succeeded) {
538     request_info->requestSucceeded(stream);
539   } else {
540     request_info->requestFailed();
541   }
542 }
543 
FindLocalSource(const StreamDeviceInfo & device) const544 const blink::WebMediaStreamSource* MediaStreamImpl::FindLocalSource(
545     const StreamDeviceInfo& device) const {
546   for (LocalStreamSources::const_iterator it = local_sources_.begin();
547        it != local_sources_.end(); ++it) {
548     MediaStreamSourceExtraData* extra_data =
549         static_cast<MediaStreamSourceExtraData*>(
550             it->source.extraData());
551     const StreamDeviceInfo& active_device = extra_data->device_info();
552     if (active_device.device.id == device.device.id &&
553         active_device.device.type == device.device.type &&
554         active_device.session_id == device.session_id) {
555       return &it->source;
556     }
557   }
558   return NULL;
559 }
560 
FindSourceInRequests(const blink::WebMediaStreamSource & source) const561 bool MediaStreamImpl::FindSourceInRequests(
562     const blink::WebMediaStreamSource& source) const {
563   for (UserMediaRequests::const_iterator req_it = user_media_requests_.begin();
564        req_it != user_media_requests_.end(); ++req_it) {
565     const std::vector<blink::WebMediaStreamSource>& sources =
566         (*req_it)->sources;
567     for (std::vector<blink::WebMediaStreamSource>::const_iterator source_it =
568              sources.begin();
569          source_it != sources.end(); ++source_it) {
570       if (source_it->id() == source.id()) {
571         return true;
572       }
573     }
574   }
575   return false;
576 }
577 
578 MediaStreamImpl::UserMediaRequestInfo*
FindUserMediaRequestInfo(int request_id)579 MediaStreamImpl::FindUserMediaRequestInfo(int request_id) {
580   UserMediaRequests::iterator it = user_media_requests_.begin();
581   for (; it != user_media_requests_.end(); ++it) {
582     if ((*it)->request_id == request_id)
583       return (*it);
584   }
585   return NULL;
586 }
587 
588 MediaStreamImpl::UserMediaRequestInfo*
FindUserMediaRequestInfo(const blink::WebUserMediaRequest & request)589 MediaStreamImpl::FindUserMediaRequestInfo(
590     const blink::WebUserMediaRequest& request) {
591   UserMediaRequests::iterator it = user_media_requests_.begin();
592   for (; it != user_media_requests_.end(); ++it) {
593     if ((*it)->request == request)
594       return (*it);
595   }
596   return NULL;
597 }
598 
599 MediaStreamImpl::UserMediaRequestInfo*
FindUserMediaRequestInfo(const std::string & label)600 MediaStreamImpl::FindUserMediaRequestInfo(const std::string& label) {
601   UserMediaRequests::iterator it = user_media_requests_.begin();
602   for (; it != user_media_requests_.end(); ++it) {
603     if ((*it)->generated && (*it)->web_stream.id() == UTF8ToUTF16(label))
604       return (*it);
605   }
606   return NULL;
607 }
608 
609 MediaStreamImpl::UserMediaRequestInfo*
FindUserMediaRequestInfo(blink::WebMediaStream * web_stream)610 MediaStreamImpl::FindUserMediaRequestInfo(
611     blink::WebMediaStream* web_stream) {
612   UserMediaRequests::iterator it = user_media_requests_.begin();
613   for (; it != user_media_requests_.end(); ++it) {
614     if (&((*it)->web_stream) == web_stream)
615       return  (*it);
616   }
617   return NULL;
618 }
619 
DeleteUserMediaRequestInfo(UserMediaRequestInfo * request)620 void MediaStreamImpl::DeleteUserMediaRequestInfo(
621     UserMediaRequestInfo* request) {
622   UserMediaRequests::iterator it = user_media_requests_.begin();
623   for (; it != user_media_requests_.end(); ++it) {
624     if ((*it) == request) {
625       user_media_requests_.erase(it);
626       return;
627     }
628   }
629   NOTREACHED();
630 }
631 
FrameDetached(blink::WebFrame * frame)632 void MediaStreamImpl::FrameDetached(blink::WebFrame* frame) {
633   // Do same thing as FrameWillClose.
634   FrameWillClose(frame);
635 }
636 
FrameWillClose(blink::WebFrame * frame)637 void MediaStreamImpl::FrameWillClose(blink::WebFrame* frame) {
638   // Loop through all UserMediaRequests and find the requests that belong to the
639   // frame that is being closed.
640   UserMediaRequests::iterator request_it = user_media_requests_.begin();
641   while (request_it != user_media_requests_.end()) {
642     if ((*request_it)->frame == frame) {
643       DVLOG(1) << "MediaStreamImpl::FrameWillClose: "
644                << "Cancel user media request " << (*request_it)->request_id;
645       // If the request is not generated, it means that a request
646       // has been sent to the MediaStreamDispatcher to generate a stream
647       // but MediaStreamDispatcher has not yet responded and we need to cancel
648       // the request.
649       if (!(*request_it)->generated) {
650         media_stream_dispatcher_->CancelGenerateStream(
651             (*request_it)->request_id, AsWeakPtr());
652       }
653       request_it = user_media_requests_.erase(request_it);
654     } else {
655       ++request_it;
656     }
657   }
658 
659   // Loop through all current local sources and stop the sources that were
660   // created by the frame that will be closed.
661   LocalStreamSources::iterator sources_it = local_sources_.begin();
662   while (sources_it != local_sources_.end()) {
663     if (sources_it->frame == frame) {
664       StopLocalSource(sources_it->source, true);
665       sources_it = local_sources_.erase(sources_it);
666     } else {
667       ++sources_it;
668     }
669   }
670 }
671 
OnLocalMediaStreamStop(const std::string & label)672 void MediaStreamImpl::OnLocalMediaStreamStop(
673     const std::string& label) {
674   DVLOG(1) << "MediaStreamImpl::OnLocalMediaStreamStop(" << label << ")";
675 
676   UserMediaRequestInfo* user_media_request = FindUserMediaRequestInfo(label);
677   if (user_media_request) {
678     DeleteUserMediaRequestInfo(user_media_request);
679   }
680   StopUnreferencedSources(true);
681 }
682 
OnLocalSourceStop(const blink::WebMediaStreamSource & source)683 void MediaStreamImpl::OnLocalSourceStop(
684     const blink::WebMediaStreamSource& source) {
685   DCHECK(CalledOnValidThread());
686 
687   StopLocalSource(source, true);
688 
689   bool device_found = false;
690   for (LocalStreamSources::iterator device_it = local_sources_.begin();
691        device_it != local_sources_.end(); ++device_it) {
692     if (device_it->source.id()  == source.id()) {
693       device_found = true;
694       local_sources_.erase(device_it);
695       break;
696     }
697   }
698   CHECK(device_found);
699 
700   // Remove the reference to this source from all |user_media_requests_|.
701   // TODO(perkj): The below is not necessary once we don't need to support
702   // MediaStream::Stop().
703   UserMediaRequests::iterator it = user_media_requests_.begin();
704   while (it != user_media_requests_.end()) {
705     RemoveSource(source, &(*it)->sources);
706     if ((*it)->sources.empty()) {
707       it = user_media_requests_.erase(it);
708     } else {
709       ++it;
710     }
711   }
712 }
713 
StopLocalSource(const blink::WebMediaStreamSource & source,bool notify_dispatcher)714 void MediaStreamImpl::StopLocalSource(
715     const blink::WebMediaStreamSource& source,
716     bool notify_dispatcher) {
717   MediaStreamSourceExtraData* extra_data =
718         static_cast<MediaStreamSourceExtraData*> (source.extraData());
719   CHECK(extra_data);
720   DVLOG(1) << "MediaStreamImpl::StopLocalSource("
721            << "{device_id = " << extra_data->device_info().device.id << "})";
722 
723   if (source.type() == blink::WebMediaStreamSource::TypeAudio) {
724     if (extra_data->GetAudioCapturer()) {
725       extra_data->GetAudioCapturer()->Stop();
726     }
727   }
728 
729   if (notify_dispatcher)
730     media_stream_dispatcher_->StopStreamDevice(extra_data->device_info());
731 
732   blink::WebMediaStreamSource writable_source(source);
733   writable_source.setReadyState(
734       blink::WebMediaStreamSource::ReadyStateEnded);
735   writable_source.setExtraData(NULL);
736 }
737 
StopUnreferencedSources(bool notify_dispatcher)738 void MediaStreamImpl::StopUnreferencedSources(bool notify_dispatcher) {
739   LocalStreamSources::iterator source_it = local_sources_.begin();
740   while (source_it != local_sources_.end()) {
741     if (!FindSourceInRequests(source_it->source)) {
742       StopLocalSource(source_it->source, notify_dispatcher);
743       source_it = local_sources_.erase(source_it);
744     } else {
745       ++source_it;
746     }
747   }
748 }
749 
CreateRemoteAudioRenderer(webrtc::MediaStreamInterface * stream)750 scoped_refptr<WebRtcAudioRenderer> MediaStreamImpl::CreateRemoteAudioRenderer(
751     webrtc::MediaStreamInterface* stream) {
752   if (stream->GetAudioTracks().empty())
753     return NULL;
754 
755   DVLOG(1) << "MediaStreamImpl::CreateRemoteAudioRenderer label:"
756            << stream->label();
757 
758   // TODO(tommi): Change the default value of session_id to be
759   // StreamDeviceInfo::kNoId.  Also update AudioOutputDevice etc.
760   int session_id = 0, sample_rate = 0, buffer_size = 0;
761   if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
762                                                &sample_rate,
763                                                &buffer_size)) {
764     GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
765   }
766 
767   return new WebRtcAudioRenderer(RenderViewObserver::routing_id(),
768       session_id, sample_rate, buffer_size);
769 }
770 
771 scoped_refptr<WebRtcLocalAudioRenderer>
CreateLocalAudioRenderer(const blink::WebMediaStreamTrack & audio_track)772 MediaStreamImpl::CreateLocalAudioRenderer(
773     const blink::WebMediaStreamTrack& audio_track) {
774   DVLOG(1) << "MediaStreamImpl::CreateLocalAudioRenderer";
775 
776   int session_id = 0, sample_rate = 0, buffer_size = 0;
777   if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
778                                                &sample_rate,
779                                                &buffer_size)) {
780     GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
781   }
782 
783   // Create a new WebRtcLocalAudioRenderer instance and connect it to the
784   // existing WebRtcAudioCapturer so that the renderer can use it as source.
785   return new WebRtcLocalAudioRenderer(
786       audio_track,
787       RenderViewObserver::routing_id(),
788       session_id,
789       buffer_size);
790 }
791 
GetAuthorizedDeviceInfoForAudioRenderer(int * session_id,int * output_sample_rate,int * output_frames_per_buffer)792 bool MediaStreamImpl::GetAuthorizedDeviceInfoForAudioRenderer(
793     int* session_id,
794     int* output_sample_rate,
795     int* output_frames_per_buffer) {
796   DCHECK(CalledOnValidThread());
797 
798   WebRtcAudioDeviceImpl* audio_device =
799       dependency_factory_->GetWebRtcAudioDevice();
800   if (!audio_device)
801     return false;
802 
803   if (!audio_device->GetDefaultCapturer())
804     return false;
805 
806   return audio_device->GetDefaultCapturer()->GetPairedOutputParameters(
807       session_id,
808       output_sample_rate,
809       output_frames_per_buffer);
810 }
811 
MediaStreamSourceExtraData(const StreamDeviceInfo & device_info,const SourceStopCallback & stop_callback)812 MediaStreamSourceExtraData::MediaStreamSourceExtraData(
813     const StreamDeviceInfo& device_info,
814     const SourceStopCallback& stop_callback)
815     : device_info_(device_info),
816       stop_callback_(stop_callback) {
817 }
818 
MediaStreamSourceExtraData()819 MediaStreamSourceExtraData::MediaStreamSourceExtraData() {
820 }
821 
~MediaStreamSourceExtraData()822 MediaStreamSourceExtraData::~MediaStreamSourceExtraData() {}
823 
OnLocalSourceStop()824 void MediaStreamSourceExtraData::OnLocalSourceStop() {
825   if (!stop_callback_.is_null())
826     stop_callback_.Run(owner());
827 }
828 
MediaStreamExtraData(webrtc::MediaStreamInterface * stream,bool is_local)829 MediaStreamExtraData::MediaStreamExtraData(
830     webrtc::MediaStreamInterface* stream, bool is_local)
831     : stream_(stream),
832       is_local_(is_local) {
833 }
834 
~MediaStreamExtraData()835 MediaStreamExtraData::~MediaStreamExtraData() {
836 }
837 
SetLocalStreamStopCallback(const StreamStopCallback & stop_callback)838 void MediaStreamExtraData::SetLocalStreamStopCallback(
839     const StreamStopCallback& stop_callback) {
840   stream_stop_callback_ = stop_callback;
841 }
842 
OnLocalStreamStop()843 void MediaStreamExtraData::OnLocalStreamStop() {
844   if (!stream_stop_callback_.is_null())
845     stream_stop_callback_.Run(stream_->label());
846 }
847 
UserMediaRequestInfo(int request_id,blink::WebFrame * frame,const blink::WebUserMediaRequest & request,bool enable_automatic_output_device_selection)848 MediaStreamImpl::UserMediaRequestInfo::UserMediaRequestInfo(
849     int request_id,
850     blink::WebFrame* frame,
851     const blink::WebUserMediaRequest& request,
852     bool enable_automatic_output_device_selection)
853     : request_id(request_id),
854       generated(false),
855       enable_automatic_output_device_selection(
856           enable_automatic_output_device_selection),
857       frame(frame),
858       request(request) {
859 }
860 
~UserMediaRequestInfo()861 MediaStreamImpl::UserMediaRequestInfo::~UserMediaRequestInfo() {
862 }
863 
864 }  // namespace content
865