1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/media_stream_impl.h"
6
7 #include <utility>
8
9 #include "base/hash.h"
10 #include "base/logging.h"
11 #include "base/strings/string_number_conversions.h"
12 #include "base/strings/string_util.h"
13 #include "base/strings/stringprintf.h"
14 #include "base/strings/utf_string_conversions.h"
15 #include "content/renderer/media/media_stream.h"
16 #include "content/renderer/media/media_stream_audio_source.h"
17 #include "content/renderer/media/media_stream_dispatcher.h"
18 #include "content/renderer/media/media_stream_video_capturer_source.h"
19 #include "content/renderer/media/media_stream_video_track.h"
20 #include "content/renderer/media/peer_connection_tracker.h"
21 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
22 #include "content/renderer/media/webrtc_audio_capturer.h"
23 #include "content/renderer/media/webrtc_logging.h"
24 #include "content/renderer/media/webrtc_uma_histograms.h"
25 #include "content/renderer/render_thread_impl.h"
26 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
27 #include "third_party/WebKit/public/platform/WebMediaDeviceInfo.h"
28 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
29 #include "third_party/WebKit/public/web/WebDocument.h"
30 #include "third_party/WebKit/public/web/WebLocalFrame.h"
31
32 namespace content {
33 namespace {
34
CopyStreamConstraints(const blink::WebMediaConstraints & constraints,StreamOptions::Constraints * mandatory,StreamOptions::Constraints * optional)35 void CopyStreamConstraints(const blink::WebMediaConstraints& constraints,
36 StreamOptions::Constraints* mandatory,
37 StreamOptions::Constraints* optional) {
38 blink::WebVector<blink::WebMediaConstraint> mandatory_constraints;
39 constraints.getMandatoryConstraints(mandatory_constraints);
40 for (size_t i = 0; i < mandatory_constraints.size(); i++) {
41 mandatory->push_back(StreamOptions::Constraint(
42 mandatory_constraints[i].m_name.utf8(),
43 mandatory_constraints[i].m_value.utf8()));
44 }
45
46 blink::WebVector<blink::WebMediaConstraint> optional_constraints;
47 constraints.getOptionalConstraints(optional_constraints);
48 for (size_t i = 0; i < optional_constraints.size(); i++) {
49 optional->push_back(StreamOptions::Constraint(
50 optional_constraints[i].m_name.utf8(),
51 optional_constraints[i].m_value.utf8()));
52 }
53 }
54
55 static int g_next_request_id = 0;
56
57 } // namespace
58
59 struct MediaStreamImpl::MediaDevicesRequestInfo {
MediaDevicesRequestInfocontent::MediaStreamImpl::MediaDevicesRequestInfo60 MediaDevicesRequestInfo(const blink::WebMediaDevicesRequest& request,
61 int audio_input_request_id,
62 int video_input_request_id,
63 int audio_output_request_id)
64 : request(request),
65 audio_input_request_id(audio_input_request_id),
66 video_input_request_id(video_input_request_id),
67 audio_output_request_id(audio_output_request_id),
68 has_audio_input_returned(false),
69 has_video_input_returned(false),
70 has_audio_output_returned(false) {}
71
72 blink::WebMediaDevicesRequest request;
73 int audio_input_request_id;
74 int video_input_request_id;
75 int audio_output_request_id;
76 bool has_audio_input_returned;
77 bool has_video_input_returned;
78 bool has_audio_output_returned;
79 StreamDeviceInfoArray audio_input_devices;
80 StreamDeviceInfoArray video_input_devices;
81 StreamDeviceInfoArray audio_output_devices;
82 };
83
MediaStreamImpl(RenderView * render_view,MediaStreamDispatcher * media_stream_dispatcher,PeerConnectionDependencyFactory * dependency_factory)84 MediaStreamImpl::MediaStreamImpl(
85 RenderView* render_view,
86 MediaStreamDispatcher* media_stream_dispatcher,
87 PeerConnectionDependencyFactory* dependency_factory)
88 : RenderViewObserver(render_view),
89 dependency_factory_(dependency_factory),
90 media_stream_dispatcher_(media_stream_dispatcher) {
91 }
92
~MediaStreamImpl()93 MediaStreamImpl::~MediaStreamImpl() {
94 }
95
requestUserMedia(const blink::WebUserMediaRequest & user_media_request)96 void MediaStreamImpl::requestUserMedia(
97 const blink::WebUserMediaRequest& user_media_request) {
98 // Save histogram data so we can see how much GetUserMedia is used.
99 // The histogram counts the number of calls to the JS API
100 // webGetUserMedia.
101 UpdateWebRTCMethodCount(WEBKIT_GET_USER_MEDIA);
102 DCHECK(CalledOnValidThread());
103
104 if (RenderThreadImpl::current()) {
105 RenderThreadImpl::current()->peer_connection_tracker()->TrackGetUserMedia(
106 user_media_request);
107 }
108
109 int request_id = g_next_request_id++;
110 StreamOptions options;
111 blink::WebLocalFrame* frame = NULL;
112 GURL security_origin;
113 bool enable_automatic_output_device_selection = false;
114
115 // |user_media_request| can't be mocked. So in order to test at all we check
116 // if it isNull.
117 if (user_media_request.isNull()) {
118 // We are in a test.
119 options.audio_requested = true;
120 options.video_requested = true;
121 } else {
122 if (user_media_request.audio()) {
123 options.audio_requested = true;
124 CopyStreamConstraints(user_media_request.audioConstraints(),
125 &options.mandatory_audio,
126 &options.optional_audio);
127
128 // Check if this input device should be used to select a matching output
129 // device for audio rendering.
130 std::string enable;
131 if (options.GetFirstAudioConstraintByName(
132 kMediaStreamRenderToAssociatedSink, &enable, NULL) &&
133 LowerCaseEqualsASCII(enable, "true")) {
134 enable_automatic_output_device_selection = true;
135 }
136 }
137 if (user_media_request.video()) {
138 options.video_requested = true;
139 CopyStreamConstraints(user_media_request.videoConstraints(),
140 &options.mandatory_video,
141 &options.optional_video);
142 }
143
144 security_origin = GURL(user_media_request.securityOrigin().toString());
145 // Get the WebFrame that requested a MediaStream.
146 // The frame is needed to tell the MediaStreamDispatcher when a stream goes
147 // out of scope.
148 frame = user_media_request.ownerDocument().frame();
149 DCHECK(frame);
150 }
151
152 DVLOG(1) << "MediaStreamImpl::requestUserMedia(" << request_id << ", [ "
153 << "audio=" << (options.audio_requested)
154 << " select associated sink: "
155 << enable_automatic_output_device_selection
156 << ", video=" << (options.video_requested) << " ], "
157 << security_origin.spec() << ")";
158
159 std::string audio_device_id;
160 bool mandatory_audio;
161 options.GetFirstAudioConstraintByName(kMediaStreamSourceInfoId,
162 &audio_device_id, &mandatory_audio);
163 std::string video_device_id;
164 bool mandatory_video;
165 options.GetFirstVideoConstraintByName(kMediaStreamSourceInfoId,
166 &video_device_id, &mandatory_video);
167
168 WebRtcLogMessage(base::StringPrintf(
169 "MSI::requestUserMedia. request_id=%d"
170 ", audio source id=%s mandatory= %s "
171 ", video source id=%s mandatory= %s",
172 request_id,
173 audio_device_id.c_str(),
174 mandatory_audio ? "true":"false",
175 video_device_id.c_str(),
176 mandatory_video ? "true":"false"));
177
178 user_media_requests_.push_back(
179 new UserMediaRequestInfo(request_id, frame, user_media_request,
180 enable_automatic_output_device_selection));
181
182 media_stream_dispatcher_->GenerateStream(
183 request_id,
184 AsWeakPtr(),
185 options,
186 security_origin);
187 }
188
cancelUserMediaRequest(const blink::WebUserMediaRequest & user_media_request)189 void MediaStreamImpl::cancelUserMediaRequest(
190 const blink::WebUserMediaRequest& user_media_request) {
191 DCHECK(CalledOnValidThread());
192 UserMediaRequestInfo* request = FindUserMediaRequestInfo(user_media_request);
193 if (request) {
194 // We can't abort the stream generation process.
195 // Instead, erase the request. Once the stream is generated we will stop the
196 // stream if the request does not exist.
197 DeleteUserMediaRequestInfo(request);
198 }
199 }
200
requestMediaDevices(const blink::WebMediaDevicesRequest & media_devices_request)201 void MediaStreamImpl::requestMediaDevices(
202 const blink::WebMediaDevicesRequest& media_devices_request) {
203 UpdateWebRTCMethodCount(WEBKIT_GET_MEDIA_DEVICES);
204 DCHECK(CalledOnValidThread());
205
206 int audio_input_request_id = g_next_request_id++;
207 int video_input_request_id = g_next_request_id++;
208 int audio_output_request_id = g_next_request_id++;
209
210 // |media_devices_request| can't be mocked, so in tests it will be empty (the
211 // underlying pointer is null). In order to use this function in a test we
212 // need to check if it isNull.
213 GURL security_origin;
214 if (!media_devices_request.isNull())
215 security_origin = GURL(media_devices_request.securityOrigin().toString());
216
217 DVLOG(1) << "MediaStreamImpl::requestMediaDevices(" << audio_input_request_id
218 << ", " << video_input_request_id << ", " << audio_output_request_id
219 << ", " << security_origin.spec() << ")";
220
221 media_devices_requests_.push_back(new MediaDevicesRequestInfo(
222 media_devices_request,
223 audio_input_request_id,
224 video_input_request_id,
225 audio_output_request_id));
226
227 media_stream_dispatcher_->EnumerateDevices(
228 audio_input_request_id,
229 AsWeakPtr(),
230 MEDIA_DEVICE_AUDIO_CAPTURE,
231 security_origin,
232 true);
233
234 media_stream_dispatcher_->EnumerateDevices(
235 video_input_request_id,
236 AsWeakPtr(),
237 MEDIA_DEVICE_VIDEO_CAPTURE,
238 security_origin,
239 true);
240
241 media_stream_dispatcher_->EnumerateDevices(
242 audio_output_request_id,
243 AsWeakPtr(),
244 MEDIA_DEVICE_AUDIO_OUTPUT,
245 security_origin,
246 true);
247 }
248
cancelMediaDevicesRequest(const blink::WebMediaDevicesRequest & media_devices_request)249 void MediaStreamImpl::cancelMediaDevicesRequest(
250 const blink::WebMediaDevicesRequest& media_devices_request) {
251 DCHECK(CalledOnValidThread());
252 MediaDevicesRequestInfo* request =
253 FindMediaDevicesRequestInfo(media_devices_request);
254 if (!request)
255 return;
256
257 // Cancel device enumeration.
258 media_stream_dispatcher_->StopEnumerateDevices(
259 request->audio_input_request_id,
260 AsWeakPtr());
261 media_stream_dispatcher_->StopEnumerateDevices(
262 request->video_input_request_id,
263 AsWeakPtr());
264 media_stream_dispatcher_->StopEnumerateDevices(
265 request->audio_output_request_id,
266 AsWeakPtr());
267 DeleteMediaDevicesRequestInfo(request);
268 }
269
270 // Callback from MediaStreamDispatcher.
271 // The requested stream have been generated by the MediaStreamDispatcher.
OnStreamGenerated(int request_id,const std::string & label,const StreamDeviceInfoArray & audio_array,const StreamDeviceInfoArray & video_array)272 void MediaStreamImpl::OnStreamGenerated(
273 int request_id,
274 const std::string& label,
275 const StreamDeviceInfoArray& audio_array,
276 const StreamDeviceInfoArray& video_array) {
277 DCHECK(CalledOnValidThread());
278 DVLOG(1) << "MediaStreamImpl::OnStreamGenerated stream:" << label;
279
280 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
281 if (!request_info) {
282 // This can happen if the request is canceled or the frame reloads while
283 // MediaStreamDispatcher is processing the request.
284 // Only stop the device if the device is not used in another MediaStream.
285 for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin();
286 device_it != audio_array.end(); ++device_it) {
287 if (!FindLocalSource(*device_it))
288 media_stream_dispatcher_->StopStreamDevice(*device_it);
289 }
290
291 for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin();
292 device_it != video_array.end(); ++device_it) {
293 if (!FindLocalSource(*device_it))
294 media_stream_dispatcher_->StopStreamDevice(*device_it);
295 }
296
297 DVLOG(1) << "Request ID not found";
298 return;
299 }
300 request_info->generated = true;
301
302 // WebUserMediaRequest don't have an implementation in unit tests.
303 // Therefore we need to check for isNull here and initialize the
304 // constraints.
305 blink::WebUserMediaRequest* request = &(request_info->request);
306 blink::WebMediaConstraints audio_constraints;
307 blink::WebMediaConstraints video_constraints;
308 if (request->isNull()) {
309 audio_constraints.initialize();
310 video_constraints.initialize();
311 } else {
312 audio_constraints = request->audioConstraints();
313 video_constraints = request->videoConstraints();
314 }
315
316 blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector(
317 audio_array.size());
318 CreateAudioTracks(audio_array, audio_constraints, &audio_track_vector,
319 request_info);
320
321 blink::WebVector<blink::WebMediaStreamTrack> video_track_vector(
322 video_array.size());
323 CreateVideoTracks(video_array, video_constraints, &video_track_vector,
324 request_info);
325
326 blink::WebString webkit_id = base::UTF8ToUTF16(label);
327 blink::WebMediaStream* web_stream = &(request_info->web_stream);
328
329 web_stream->initialize(webkit_id, audio_track_vector,
330 video_track_vector);
331 web_stream->setExtraData(
332 new MediaStream(
333 *web_stream));
334
335 // Wait for the tracks to be started successfully or to fail.
336 request_info->CallbackOnTracksStarted(
337 base::Bind(&MediaStreamImpl::OnCreateNativeTracksCompleted, AsWeakPtr()));
338 }
339
340 // Callback from MediaStreamDispatcher.
341 // The requested stream failed to be generated.
OnStreamGenerationFailed(int request_id,content::MediaStreamRequestResult result)342 void MediaStreamImpl::OnStreamGenerationFailed(
343 int request_id,
344 content::MediaStreamRequestResult result) {
345 DCHECK(CalledOnValidThread());
346 DVLOG(1) << "MediaStreamImpl::OnStreamGenerationFailed("
347 << request_id << ")";
348 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
349 if (!request_info) {
350 // This can happen if the request is canceled or the frame reloads while
351 // MediaStreamDispatcher is processing the request.
352 DVLOG(1) << "Request ID not found";
353 return;
354 }
355
356 GetUserMediaRequestFailed(&request_info->request, result);
357 DeleteUserMediaRequestInfo(request_info);
358 }
359
360 // Callback from MediaStreamDispatcher.
361 // The browser process has stopped a device used by a MediaStream.
OnDeviceStopped(const std::string & label,const StreamDeviceInfo & device_info)362 void MediaStreamImpl::OnDeviceStopped(
363 const std::string& label,
364 const StreamDeviceInfo& device_info) {
365 DCHECK(CalledOnValidThread());
366 DVLOG(1) << "MediaStreamImpl::OnDeviceStopped("
367 << "{device_id = " << device_info.device.id << "})";
368
369 const blink::WebMediaStreamSource* source_ptr = FindLocalSource(device_info);
370 if (!source_ptr) {
371 // This happens if the same device is used in several guM requests or
372 // if a user happen stop a track from JS at the same time
373 // as the underlying media device is unplugged from the system.
374 return;
375 }
376 // By creating |source| it is guaranteed that the blink::WebMediaStreamSource
377 // object is valid during the cleanup.
378 blink::WebMediaStreamSource source(*source_ptr);
379 StopLocalSource(source, false);
380
381 for (LocalStreamSources::iterator device_it = local_sources_.begin();
382 device_it != local_sources_.end(); ++device_it) {
383 if (device_it->source.id() == source.id()) {
384 local_sources_.erase(device_it);
385 break;
386 }
387 }
388 }
389
InitializeSourceObject(const StreamDeviceInfo & device,blink::WebMediaStreamSource::Type type,const blink::WebMediaConstraints & constraints,blink::WebFrame * frame,blink::WebMediaStreamSource * webkit_source)390 void MediaStreamImpl::InitializeSourceObject(
391 const StreamDeviceInfo& device,
392 blink::WebMediaStreamSource::Type type,
393 const blink::WebMediaConstraints& constraints,
394 blink::WebFrame* frame,
395 blink::WebMediaStreamSource* webkit_source) {
396 const blink::WebMediaStreamSource* existing_source =
397 FindLocalSource(device);
398 if (existing_source) {
399 *webkit_source = *existing_source;
400 DVLOG(1) << "Source already exist. Reusing source with id "
401 << webkit_source->id().utf8();
402 return;
403 }
404
405 webkit_source->initialize(
406 base::UTF8ToUTF16(device.device.id),
407 type,
408 base::UTF8ToUTF16(device.device.name));
409
410 DVLOG(1) << "Initialize source object :"
411 << "id = " << webkit_source->id().utf8()
412 << ", name = " << webkit_source->name().utf8();
413
414 if (type == blink::WebMediaStreamSource::TypeVideo) {
415 webkit_source->setExtraData(
416 CreateVideoSource(
417 device,
418 base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr())));
419 } else {
420 DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, type);
421 MediaStreamAudioSource* audio_source(
422 new MediaStreamAudioSource(
423 RenderViewObserver::routing_id(),
424 device,
425 base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr()),
426 dependency_factory_));
427 webkit_source->setExtraData(audio_source);
428 }
429 local_sources_.push_back(LocalStreamSource(frame, *webkit_source));
430 }
431
CreateVideoSource(const StreamDeviceInfo & device,const MediaStreamSource::SourceStoppedCallback & stop_callback)432 MediaStreamVideoSource* MediaStreamImpl::CreateVideoSource(
433 const StreamDeviceInfo& device,
434 const MediaStreamSource::SourceStoppedCallback& stop_callback) {
435 return new content::MediaStreamVideoCapturerSource(
436 device,
437 stop_callback,
438 new VideoCapturerDelegate(device));
439 }
440
CreateVideoTracks(const StreamDeviceInfoArray & devices,const blink::WebMediaConstraints & constraints,blink::WebVector<blink::WebMediaStreamTrack> * webkit_tracks,UserMediaRequestInfo * request)441 void MediaStreamImpl::CreateVideoTracks(
442 const StreamDeviceInfoArray& devices,
443 const blink::WebMediaConstraints& constraints,
444 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks,
445 UserMediaRequestInfo* request) {
446 DCHECK_EQ(devices.size(), webkit_tracks->size());
447
448 for (size_t i = 0; i < devices.size(); ++i) {
449 blink::WebMediaStreamSource webkit_source;
450 InitializeSourceObject(devices[i],
451 blink::WebMediaStreamSource::TypeVideo,
452 constraints,
453 request->frame,
454 &webkit_source);
455 (*webkit_tracks)[i] =
456 request->CreateAndStartVideoTrack(webkit_source, constraints);
457 }
458 }
459
CreateAudioTracks(const StreamDeviceInfoArray & devices,const blink::WebMediaConstraints & constraints,blink::WebVector<blink::WebMediaStreamTrack> * webkit_tracks,UserMediaRequestInfo * request)460 void MediaStreamImpl::CreateAudioTracks(
461 const StreamDeviceInfoArray& devices,
462 const blink::WebMediaConstraints& constraints,
463 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks,
464 UserMediaRequestInfo* request) {
465 DCHECK_EQ(devices.size(), webkit_tracks->size());
466
467 // Log the device names for this request.
468 for (StreamDeviceInfoArray::const_iterator it = devices.begin();
469 it != devices.end(); ++it) {
470 WebRtcLogMessage(base::StringPrintf(
471 "Generated media stream for request id %d contains audio device name"
472 " \"%s\"",
473 request->request_id,
474 it->device.name.c_str()));
475 }
476
477 StreamDeviceInfoArray overridden_audio_array = devices;
478 if (!request->enable_automatic_output_device_selection) {
479 // If the GetUserMedia request did not explicitly set the constraint
480 // kMediaStreamRenderToAssociatedSink, the output device parameters must
481 // be removed.
482 for (StreamDeviceInfoArray::iterator it = overridden_audio_array.begin();
483 it != overridden_audio_array.end(); ++it) {
484 it->device.matched_output_device_id = "";
485 it->device.matched_output = MediaStreamDevice::AudioDeviceParameters();
486 }
487 }
488
489 for (size_t i = 0; i < overridden_audio_array.size(); ++i) {
490 blink::WebMediaStreamSource webkit_source;
491 InitializeSourceObject(overridden_audio_array[i],
492 blink::WebMediaStreamSource::TypeAudio,
493 constraints,
494 request->frame,
495 &webkit_source);
496 (*webkit_tracks)[i].initialize(webkit_source);
497 request->StartAudioTrack((*webkit_tracks)[i], constraints);
498 }
499 }
500
OnCreateNativeTracksCompleted(UserMediaRequestInfo * request,content::MediaStreamRequestResult result)501 void MediaStreamImpl::OnCreateNativeTracksCompleted(
502 UserMediaRequestInfo* request,
503 content::MediaStreamRequestResult result) {
504 DVLOG(1) << "MediaStreamImpl::OnCreateNativeTracksComplete("
505 << "{request_id = " << request->request_id << "} "
506 << "{result = " << result << "})";
507 if (result == content::MEDIA_DEVICE_OK)
508 GetUserMediaRequestSucceeded(request->web_stream, &request->request);
509 else
510 GetUserMediaRequestFailed(&request->request, result);
511
512 DeleteUserMediaRequestInfo(request);
513 }
514
OnDevicesEnumerated(int request_id,const StreamDeviceInfoArray & device_array)515 void MediaStreamImpl::OnDevicesEnumerated(
516 int request_id,
517 const StreamDeviceInfoArray& device_array) {
518 DVLOG(1) << "MediaStreamImpl::OnDevicesEnumerated(" << request_id << ")";
519
520 MediaDevicesRequestInfo* request = FindMediaDevicesRequestInfo(request_id);
521 DCHECK(request);
522
523 if (request_id == request->audio_input_request_id) {
524 request->has_audio_input_returned = true;
525 DCHECK(request->audio_input_devices.empty());
526 request->audio_input_devices = device_array;
527 } else if (request_id == request->video_input_request_id) {
528 request->has_video_input_returned = true;
529 DCHECK(request->video_input_devices.empty());
530 request->video_input_devices = device_array;
531 } else {
532 DCHECK_EQ(request->audio_output_request_id, request_id);
533 request->has_audio_output_returned = true;
534 DCHECK(request->audio_output_devices.empty());
535 request->audio_output_devices = device_array;
536 }
537
538 if (!request->has_audio_input_returned ||
539 !request->has_video_input_returned ||
540 !request->has_audio_output_returned) {
541 // Wait for the rest of the devices to complete.
542 return;
543 }
544
545 // All devices are ready for copying. We use a hashed audio output device id
546 // as the group id for input and output audio devices. If an input device
547 // doesn't have an associated output device, we use the input device's own id.
548 // We don't support group id for video devices, that's left empty.
549 blink::WebVector<blink::WebMediaDeviceInfo>
550 devices(request->audio_input_devices.size() +
551 request->video_input_devices.size() +
552 request->audio_output_devices.size());
553 for (size_t i = 0; i < request->audio_input_devices.size(); ++i) {
554 const MediaStreamDevice& device = request->audio_input_devices[i].device;
555 DCHECK_EQ(device.type, MEDIA_DEVICE_AUDIO_CAPTURE);
556 std::string group_id = base::UintToString(base::Hash(
557 !device.matched_output_device_id.empty() ?
558 device.matched_output_device_id :
559 device.id));
560 devices[i].initialize(
561 blink::WebString::fromUTF8(device.id),
562 blink::WebMediaDeviceInfo::MediaDeviceKindAudioInput,
563 blink::WebString::fromUTF8(device.name),
564 blink::WebString::fromUTF8(group_id));
565 }
566 size_t offset = request->audio_input_devices.size();
567 for (size_t i = 0; i < request->video_input_devices.size(); ++i) {
568 const MediaStreamDevice& device = request->video_input_devices[i].device;
569 DCHECK_EQ(device.type, MEDIA_DEVICE_VIDEO_CAPTURE);
570 devices[offset + i].initialize(
571 blink::WebString::fromUTF8(device.id),
572 blink::WebMediaDeviceInfo::MediaDeviceKindVideoInput,
573 blink::WebString::fromUTF8(device.name),
574 blink::WebString());
575 }
576 offset += request->video_input_devices.size();
577 for (size_t i = 0; i < request->audio_output_devices.size(); ++i) {
578 const MediaStreamDevice& device = request->audio_output_devices[i].device;
579 DCHECK_EQ(device.type, MEDIA_DEVICE_AUDIO_OUTPUT);
580 devices[offset + i].initialize(
581 blink::WebString::fromUTF8(device.id),
582 blink::WebMediaDeviceInfo::MediaDeviceKindAudioOutput,
583 blink::WebString::fromUTF8(device.name),
584 blink::WebString::fromUTF8(base::UintToString(base::Hash(device.id))));
585 }
586
587 EnumerateDevicesSucceded(&request->request, devices);
588
589 // Cancel device enumeration.
590 media_stream_dispatcher_->StopEnumerateDevices(
591 request->audio_input_request_id,
592 AsWeakPtr());
593 media_stream_dispatcher_->StopEnumerateDevices(
594 request->video_input_request_id,
595 AsWeakPtr());
596 media_stream_dispatcher_->StopEnumerateDevices(
597 request->audio_output_request_id,
598 AsWeakPtr());
599
600 DeleteMediaDevicesRequestInfo(request);
601 }
602
OnDeviceOpened(int request_id,const std::string & label,const StreamDeviceInfo & video_device)603 void MediaStreamImpl::OnDeviceOpened(
604 int request_id,
605 const std::string& label,
606 const StreamDeviceInfo& video_device) {
607 DVLOG(1) << "MediaStreamImpl::OnDeviceOpened("
608 << request_id << ", " << label << ")";
609 NOTIMPLEMENTED();
610 }
611
OnDeviceOpenFailed(int request_id)612 void MediaStreamImpl::OnDeviceOpenFailed(int request_id) {
613 DVLOG(1) << "MediaStreamImpl::VideoDeviceOpenFailed("
614 << request_id << ")";
615 NOTIMPLEMENTED();
616 }
617
GetUserMediaRequestSucceeded(const blink::WebMediaStream & stream,blink::WebUserMediaRequest * request_info)618 void MediaStreamImpl::GetUserMediaRequestSucceeded(
619 const blink::WebMediaStream& stream,
620 blink::WebUserMediaRequest* request_info) {
621 DVLOG(1) << "MediaStreamImpl::GetUserMediaRequestSucceeded";
622 request_info->requestSucceeded(stream);
623 }
624
GetUserMediaRequestFailed(blink::WebUserMediaRequest * request_info,content::MediaStreamRequestResult result)625 void MediaStreamImpl::GetUserMediaRequestFailed(
626 blink::WebUserMediaRequest* request_info,
627 content::MediaStreamRequestResult result) {
628 switch (result) {
629 case MEDIA_DEVICE_OK:
630 NOTREACHED();
631 break;
632 case MEDIA_DEVICE_PERMISSION_DENIED:
633 request_info->requestDenied();
634 break;
635 case MEDIA_DEVICE_PERMISSION_DISMISSED:
636 request_info->requestFailedUASpecific("PermissionDismissedError");
637 break;
638 case MEDIA_DEVICE_INVALID_STATE:
639 request_info->requestFailedUASpecific("InvalidStateError");
640 break;
641 case MEDIA_DEVICE_NO_HARDWARE:
642 request_info->requestFailedUASpecific("DevicesNotFoundError");
643 break;
644 case MEDIA_DEVICE_INVALID_SECURITY_ORIGIN:
645 request_info->requestFailedUASpecific("InvalidSecurityOriginError");
646 break;
647 case MEDIA_DEVICE_TAB_CAPTURE_FAILURE:
648 request_info->requestFailedUASpecific("TabCaptureError");
649 break;
650 case MEDIA_DEVICE_SCREEN_CAPTURE_FAILURE:
651 request_info->requestFailedUASpecific("ScreenCaptureError");
652 break;
653 case MEDIA_DEVICE_CAPTURE_FAILURE:
654 request_info->requestFailedUASpecific("DeviceCaptureError");
655 break;
656 case MEDIA_DEVICE_TRACK_START_FAILURE:
657 request_info->requestFailedUASpecific("TrackStartError");
658 break;
659 default:
660 request_info->requestFailed();
661 break;
662 }
663 }
664
EnumerateDevicesSucceded(blink::WebMediaDevicesRequest * request,blink::WebVector<blink::WebMediaDeviceInfo> & devices)665 void MediaStreamImpl::EnumerateDevicesSucceded(
666 blink::WebMediaDevicesRequest* request,
667 blink::WebVector<blink::WebMediaDeviceInfo>& devices) {
668 request->requestSucceeded(devices);
669 }
670
FindLocalSource(const StreamDeviceInfo & device) const671 const blink::WebMediaStreamSource* MediaStreamImpl::FindLocalSource(
672 const StreamDeviceInfo& device) const {
673 for (LocalStreamSources::const_iterator it = local_sources_.begin();
674 it != local_sources_.end(); ++it) {
675 MediaStreamSource* source =
676 static_cast<MediaStreamSource*>(it->source.extraData());
677 const StreamDeviceInfo& active_device = source->device_info();
678 if (active_device.device.id == device.device.id &&
679 active_device.device.type == device.device.type &&
680 active_device.session_id == device.session_id) {
681 return &it->source;
682 }
683 }
684 return NULL;
685 }
686
687 MediaStreamImpl::UserMediaRequestInfo*
FindUserMediaRequestInfo(int request_id)688 MediaStreamImpl::FindUserMediaRequestInfo(int request_id) {
689 UserMediaRequests::iterator it = user_media_requests_.begin();
690 for (; it != user_media_requests_.end(); ++it) {
691 if ((*it)->request_id == request_id)
692 return (*it);
693 }
694 return NULL;
695 }
696
697 MediaStreamImpl::UserMediaRequestInfo*
FindUserMediaRequestInfo(const blink::WebUserMediaRequest & request)698 MediaStreamImpl::FindUserMediaRequestInfo(
699 const blink::WebUserMediaRequest& request) {
700 UserMediaRequests::iterator it = user_media_requests_.begin();
701 for (; it != user_media_requests_.end(); ++it) {
702 if ((*it)->request == request)
703 return (*it);
704 }
705 return NULL;
706 }
707
DeleteUserMediaRequestInfo(UserMediaRequestInfo * request)708 void MediaStreamImpl::DeleteUserMediaRequestInfo(
709 UserMediaRequestInfo* request) {
710 UserMediaRequests::iterator it = user_media_requests_.begin();
711 for (; it != user_media_requests_.end(); ++it) {
712 if ((*it) == request) {
713 user_media_requests_.erase(it);
714 return;
715 }
716 }
717 NOTREACHED();
718 }
719
720 MediaStreamImpl::MediaDevicesRequestInfo*
FindMediaDevicesRequestInfo(int request_id)721 MediaStreamImpl::FindMediaDevicesRequestInfo(
722 int request_id) {
723 MediaDevicesRequests::iterator it = media_devices_requests_.begin();
724 for (; it != media_devices_requests_.end(); ++it) {
725 if ((*it)->audio_input_request_id == request_id ||
726 (*it)->video_input_request_id == request_id ||
727 (*it)->audio_output_request_id == request_id) {
728 return (*it);
729 }
730 }
731 return NULL;
732 }
733
734 MediaStreamImpl::MediaDevicesRequestInfo*
FindMediaDevicesRequestInfo(const blink::WebMediaDevicesRequest & request)735 MediaStreamImpl::FindMediaDevicesRequestInfo(
736 const blink::WebMediaDevicesRequest& request) {
737 MediaDevicesRequests::iterator it = media_devices_requests_.begin();
738 for (; it != media_devices_requests_.end(); ++it) {
739 if ((*it)->request == request)
740 return (*it);
741 }
742 return NULL;
743 }
744
DeleteMediaDevicesRequestInfo(MediaDevicesRequestInfo * request)745 void MediaStreamImpl::DeleteMediaDevicesRequestInfo(
746 MediaDevicesRequestInfo* request) {
747 MediaDevicesRequests::iterator it = media_devices_requests_.begin();
748 for (; it != media_devices_requests_.end(); ++it) {
749 if ((*it) == request) {
750 media_devices_requests_.erase(it);
751 return;
752 }
753 }
754 NOTREACHED();
755 }
756
FrameDetached(blink::WebFrame * frame)757 void MediaStreamImpl::FrameDetached(blink::WebFrame* frame) {
758 // Do same thing as FrameWillClose.
759 FrameWillClose(frame);
760 }
761
FrameWillClose(blink::WebFrame * frame)762 void MediaStreamImpl::FrameWillClose(blink::WebFrame* frame) {
763 // Loop through all UserMediaRequests and find the requests that belong to the
764 // frame that is being closed.
765 UserMediaRequests::iterator request_it = user_media_requests_.begin();
766 while (request_it != user_media_requests_.end()) {
767 if ((*request_it)->frame == frame) {
768 DVLOG(1) << "MediaStreamImpl::FrameWillClose: "
769 << "Cancel user media request " << (*request_it)->request_id;
770 // If the request is not generated, it means that a request
771 // has been sent to the MediaStreamDispatcher to generate a stream
772 // but MediaStreamDispatcher has not yet responded and we need to cancel
773 // the request.
774 if (!(*request_it)->generated) {
775 media_stream_dispatcher_->CancelGenerateStream(
776 (*request_it)->request_id, AsWeakPtr());
777 }
778 request_it = user_media_requests_.erase(request_it);
779 } else {
780 ++request_it;
781 }
782 }
783
784 // Loop through all current local sources and stop the sources that were
785 // created by the frame that will be closed.
786 LocalStreamSources::iterator sources_it = local_sources_.begin();
787 while (sources_it != local_sources_.end()) {
788 if (sources_it->frame == frame) {
789 StopLocalSource(sources_it->source, true);
790 sources_it = local_sources_.erase(sources_it);
791 } else {
792 ++sources_it;
793 }
794 }
795 }
796
OnLocalSourceStopped(const blink::WebMediaStreamSource & source)797 void MediaStreamImpl::OnLocalSourceStopped(
798 const blink::WebMediaStreamSource& source) {
799 DCHECK(CalledOnValidThread());
800 DVLOG(1) << "MediaStreamImpl::OnLocalSourceStopped";
801
802 bool device_found = false;
803 for (LocalStreamSources::iterator device_it = local_sources_.begin();
804 device_it != local_sources_.end(); ++device_it) {
805 if (device_it->source.id() == source.id()) {
806 device_found = true;
807 local_sources_.erase(device_it);
808 break;
809 }
810 }
811 CHECK(device_found);
812
813 MediaStreamSource* source_impl =
814 static_cast<MediaStreamSource*> (source.extraData());
815 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
816 }
817
StopLocalSource(const blink::WebMediaStreamSource & source,bool notify_dispatcher)818 void MediaStreamImpl::StopLocalSource(
819 const blink::WebMediaStreamSource& source,
820 bool notify_dispatcher) {
821 MediaStreamSource* source_impl =
822 static_cast<MediaStreamSource*> (source.extraData());
823 DVLOG(1) << "MediaStreamImpl::StopLocalSource("
824 << "{device_id = " << source_impl->device_info().device.id << "})";
825
826 if (notify_dispatcher)
827 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
828
829 source_impl->ResetSourceStoppedCallback();
830 source_impl->StopSource();
831 }
832
UserMediaRequestInfo(int request_id,blink::WebFrame * frame,const blink::WebUserMediaRequest & request,bool enable_automatic_output_device_selection)833 MediaStreamImpl::UserMediaRequestInfo::UserMediaRequestInfo(
834 int request_id,
835 blink::WebFrame* frame,
836 const blink::WebUserMediaRequest& request,
837 bool enable_automatic_output_device_selection)
838 : request_id(request_id),
839 generated(false),
840 enable_automatic_output_device_selection(
841 enable_automatic_output_device_selection),
842 frame(frame),
843 request(request),
844 request_failed_(false) {
845 }
846
~UserMediaRequestInfo()847 MediaStreamImpl::UserMediaRequestInfo::~UserMediaRequestInfo() {
848 DVLOG(1) << "~UserMediaRequestInfo";
849 }
850
StartAudioTrack(const blink::WebMediaStreamTrack & track,const blink::WebMediaConstraints & constraints)851 void MediaStreamImpl::UserMediaRequestInfo::StartAudioTrack(
852 const blink::WebMediaStreamTrack& track,
853 const blink::WebMediaConstraints& constraints) {
854 DCHECK(track.source().type() == blink::WebMediaStreamSource::TypeAudio);
855 MediaStreamAudioSource* native_source =
856 static_cast <MediaStreamAudioSource*>(track.source().extraData());
857 DCHECK(native_source);
858
859 sources_.push_back(track.source());
860 sources_waiting_for_callback_.push_back(native_source);
861 native_source->AddTrack(
862 track, constraints, base::Bind(
863 &MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted,
864 AsWeakPtr()));
865 }
866
867 blink::WebMediaStreamTrack
CreateAndStartVideoTrack(const blink::WebMediaStreamSource & source,const blink::WebMediaConstraints & constraints)868 MediaStreamImpl::UserMediaRequestInfo::CreateAndStartVideoTrack(
869 const blink::WebMediaStreamSource& source,
870 const blink::WebMediaConstraints& constraints) {
871 DCHECK(source.type() == blink::WebMediaStreamSource::TypeVideo);
872 MediaStreamVideoSource* native_source =
873 MediaStreamVideoSource::GetVideoSource(source);
874 DCHECK(native_source);
875 sources_.push_back(source);
876 sources_waiting_for_callback_.push_back(native_source);
877 return MediaStreamVideoTrack::CreateVideoTrack(
878 native_source, constraints, base::Bind(
879 &MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted,
880 AsWeakPtr()),
881 true);
882 }
883
CallbackOnTracksStarted(const ResourcesReady & callback)884 void MediaStreamImpl::UserMediaRequestInfo::CallbackOnTracksStarted(
885 const ResourcesReady& callback) {
886 DCHECK(ready_callback_.is_null());
887 ready_callback_ = callback;
888 CheckAllTracksStarted();
889 }
890
OnTrackStarted(MediaStreamSource * source,bool success)891 void MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted(
892 MediaStreamSource* source, bool success) {
893 DVLOG(1) << "OnTrackStarted result " << success;
894 std::vector<MediaStreamSource*>::iterator it =
895 std::find(sources_waiting_for_callback_.begin(),
896 sources_waiting_for_callback_.end(),
897 source);
898 DCHECK(it != sources_waiting_for_callback_.end());
899 sources_waiting_for_callback_.erase(it);
900 // All tracks must be started successfully. Otherwise the request is a
901 // failure.
902 if (!success)
903 request_failed_ = true;
904 CheckAllTracksStarted();
905 }
906
CheckAllTracksStarted()907 void MediaStreamImpl::UserMediaRequestInfo::CheckAllTracksStarted() {
908 if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty()) {
909 ready_callback_.Run(
910 this,
911 request_failed_ ? MEDIA_DEVICE_TRACK_START_FAILURE : MEDIA_DEVICE_OK);
912 }
913 }
914
IsSourceUsed(const blink::WebMediaStreamSource & source) const915 bool MediaStreamImpl::UserMediaRequestInfo::IsSourceUsed(
916 const blink::WebMediaStreamSource& source) const {
917 for (std::vector<blink::WebMediaStreamSource>::const_iterator source_it =
918 sources_.begin();
919 source_it != sources_.end(); ++source_it) {
920 if (source_it->id() == source.id())
921 return true;
922 }
923 return false;
924 }
925
RemoveSource(const blink::WebMediaStreamSource & source)926 void MediaStreamImpl::UserMediaRequestInfo::RemoveSource(
927 const blink::WebMediaStreamSource& source) {
928 for (std::vector<blink::WebMediaStreamSource>::iterator it =
929 sources_.begin();
930 it != sources_.end(); ++it) {
931 if (source.id() == it->id()) {
932 sources_.erase(it);
933 return;
934 }
935 }
936 }
937
938 } // namespace content
939