1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
6
7 #include <vector>
8
9 #include "base/command_line.h"
10 #include "base/strings/utf_string_conversions.h"
11 #include "base/synchronization/waitable_event.h"
12 #include "content/common/media/media_stream_messages.h"
13 #include "content/public/common/content_switches.h"
14 #include "content/renderer/media/media_stream.h"
15 #include "content/renderer/media/media_stream_audio_processor.h"
16 #include "content/renderer/media/media_stream_audio_processor_options.h"
17 #include "content/renderer/media/media_stream_audio_source.h"
18 #include "content/renderer/media/media_stream_video_source.h"
19 #include "content/renderer/media/media_stream_video_track.h"
20 #include "content/renderer/media/peer_connection_identity_service.h"
21 #include "content/renderer/media/rtc_media_constraints.h"
22 #include "content/renderer/media/rtc_peer_connection_handler.h"
23 #include "content/renderer/media/rtc_video_decoder_factory.h"
24 #include "content/renderer/media/rtc_video_encoder_factory.h"
25 #include "content/renderer/media/webaudio_capturer_source.h"
26 #include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h"
27 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
28 #include "content/renderer/media/webrtc_audio_device_impl.h"
29 #include "content/renderer/media/webrtc_local_audio_track.h"
30 #include "content/renderer/media/webrtc_logging.h"
31 #include "content/renderer/media/webrtc_uma_histograms.h"
32 #include "content/renderer/p2p/ipc_network_manager.h"
33 #include "content/renderer/p2p/ipc_socket_factory.h"
34 #include "content/renderer/p2p/port_allocator.h"
35 #include "content/renderer/render_thread_impl.h"
36 #include "jingle/glue/thread_wrapper.h"
37 #include "media/filters/gpu_video_accelerator_factories.h"
38 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
39 #include "third_party/WebKit/public/platform/WebMediaStream.h"
40 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
41 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
42 #include "third_party/WebKit/public/platform/WebURL.h"
43 #include "third_party/WebKit/public/web/WebDocument.h"
44 #include "third_party/WebKit/public/web/WebFrame.h"
45 #include "third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h"
46
47 #if defined(USE_OPENSSL)
48 #include "third_party/webrtc/base/ssladapter.h"
49 #else
50 #include "net/socket/nss_ssl_util.h"
51 #endif
52
53 #if defined(OS_ANDROID)
54 #include "media/base/android/media_codec_bridge.h"
55 #endif
56
57 namespace content {
58
59 // Map of corresponding media constraints and platform effects.
60 struct {
61 const char* constraint;
62 const media::AudioParameters::PlatformEffectsMask effect;
63 } const kConstraintEffectMap[] = {
64 { content::kMediaStreamAudioDucking,
65 media::AudioParameters::DUCKING },
66 { webrtc::MediaConstraintsInterface::kEchoCancellation,
67 media::AudioParameters::ECHO_CANCELLER },
68 };
69
70 // If any platform effects are available, check them against the constraints.
71 // Disable effects to match false constraints, but if a constraint is true, set
72 // the constraint to false to later disable the software effect.
73 //
74 // This function may modify both |constraints| and |effects|.
HarmonizeConstraintsAndEffects(RTCMediaConstraints * constraints,int * effects)75 void HarmonizeConstraintsAndEffects(RTCMediaConstraints* constraints,
76 int* effects) {
77 if (*effects != media::AudioParameters::NO_EFFECTS) {
78 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) {
79 bool value;
80 size_t is_mandatory = 0;
81 if (!webrtc::FindConstraint(constraints,
82 kConstraintEffectMap[i].constraint,
83 &value,
84 &is_mandatory) || !value) {
85 // If the constraint is false, or does not exist, disable the platform
86 // effect.
87 *effects &= ~kConstraintEffectMap[i].effect;
88 DVLOG(1) << "Disabling platform effect: "
89 << kConstraintEffectMap[i].effect;
90 } else if (*effects & kConstraintEffectMap[i].effect) {
91 // If the constraint is true, leave the platform effect enabled, and
92 // set the constraint to false to later disable the software effect.
93 if (is_mandatory) {
94 constraints->AddMandatory(kConstraintEffectMap[i].constraint,
95 webrtc::MediaConstraintsInterface::kValueFalse, true);
96 } else {
97 constraints->AddOptional(kConstraintEffectMap[i].constraint,
98 webrtc::MediaConstraintsInterface::kValueFalse, true);
99 }
100 DVLOG(1) << "Disabling constraint: "
101 << kConstraintEffectMap[i].constraint;
102 } else if (kConstraintEffectMap[i].effect ==
103 media::AudioParameters::DUCKING && value && !is_mandatory) {
104 // Special handling of the DUCKING flag that sets the optional
105 // constraint to |false| to match what the device will support.
106 constraints->AddOptional(kConstraintEffectMap[i].constraint,
107 webrtc::MediaConstraintsInterface::kValueFalse, true);
108 // No need to modify |effects| since the ducking flag is already off.
109 DCHECK((*effects & media::AudioParameters::DUCKING) == 0);
110 }
111 }
112 }
113 }
114
115 class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface {
116 public:
P2PPortAllocatorFactory(P2PSocketDispatcher * socket_dispatcher,rtc::NetworkManager * network_manager,rtc::PacketSocketFactory * socket_factory,blink::WebFrame * web_frame)117 P2PPortAllocatorFactory(
118 P2PSocketDispatcher* socket_dispatcher,
119 rtc::NetworkManager* network_manager,
120 rtc::PacketSocketFactory* socket_factory,
121 blink::WebFrame* web_frame)
122 : socket_dispatcher_(socket_dispatcher),
123 network_manager_(network_manager),
124 socket_factory_(socket_factory),
125 web_frame_(web_frame) {
126 }
127
CreatePortAllocator(const std::vector<StunConfiguration> & stun_servers,const std::vector<TurnConfiguration> & turn_configurations)128 virtual cricket::PortAllocator* CreatePortAllocator(
129 const std::vector<StunConfiguration>& stun_servers,
130 const std::vector<TurnConfiguration>& turn_configurations) OVERRIDE {
131 CHECK(web_frame_);
132 P2PPortAllocator::Config config;
133 for (size_t i = 0; i < stun_servers.size(); ++i) {
134 config.stun_servers.insert(rtc::SocketAddress(
135 stun_servers[i].server.hostname(),
136 stun_servers[i].server.port()));
137 }
138 config.legacy_relay = false;
139 for (size_t i = 0; i < turn_configurations.size(); ++i) {
140 P2PPortAllocator::Config::RelayServerConfig relay_config;
141 relay_config.server_address = turn_configurations[i].server.hostname();
142 relay_config.port = turn_configurations[i].server.port();
143 relay_config.username = turn_configurations[i].username;
144 relay_config.password = turn_configurations[i].password;
145 relay_config.transport_type = turn_configurations[i].transport_type;
146 relay_config.secure = turn_configurations[i].secure;
147 config.relays.push_back(relay_config);
148
149 // Use turn servers as stun servers.
150 config.stun_servers.insert(rtc::SocketAddress(
151 turn_configurations[i].server.hostname(),
152 turn_configurations[i].server.port()));
153 }
154
155 return new P2PPortAllocator(
156 web_frame_, socket_dispatcher_.get(), network_manager_,
157 socket_factory_, config);
158 }
159
160 protected:
~P2PPortAllocatorFactory()161 virtual ~P2PPortAllocatorFactory() {}
162
163 private:
164 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_;
165 // |network_manager_| and |socket_factory_| are a weak references, owned by
166 // PeerConnectionDependencyFactory.
167 rtc::NetworkManager* network_manager_;
168 rtc::PacketSocketFactory* socket_factory_;
169 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory.
170 blink::WebFrame* web_frame_;
171 };
172
PeerConnectionDependencyFactory(P2PSocketDispatcher * p2p_socket_dispatcher)173 PeerConnectionDependencyFactory::PeerConnectionDependencyFactory(
174 P2PSocketDispatcher* p2p_socket_dispatcher)
175 : network_manager_(NULL),
176 p2p_socket_dispatcher_(p2p_socket_dispatcher),
177 signaling_thread_(NULL),
178 worker_thread_(NULL),
179 chrome_worker_thread_("Chrome_libJingle_WorkerThread") {
180 }
181
~PeerConnectionDependencyFactory()182 PeerConnectionDependencyFactory::~PeerConnectionDependencyFactory() {
183 CleanupPeerConnectionFactory();
184 if (aec_dump_message_filter_.get())
185 aec_dump_message_filter_->RemoveDelegate(this);
186 }
187
188 blink::WebRTCPeerConnectionHandler*
CreateRTCPeerConnectionHandler(blink::WebRTCPeerConnectionHandlerClient * client)189 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler(
190 blink::WebRTCPeerConnectionHandlerClient* client) {
191 // Save histogram data so we can see how much PeerConnetion is used.
192 // The histogram counts the number of calls to the JS API
193 // webKitRTCPeerConnection.
194 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
195
196 return new RTCPeerConnectionHandler(client, this);
197 }
198
InitializeMediaStreamAudioSource(int render_view_id,const blink::WebMediaConstraints & audio_constraints,MediaStreamAudioSource * source_data)199 bool PeerConnectionDependencyFactory::InitializeMediaStreamAudioSource(
200 int render_view_id,
201 const blink::WebMediaConstraints& audio_constraints,
202 MediaStreamAudioSource* source_data) {
203 DVLOG(1) << "InitializeMediaStreamAudioSources()";
204
205 // Do additional source initialization if the audio source is a valid
206 // microphone or tab audio.
207 RTCMediaConstraints native_audio_constraints(audio_constraints);
208 MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints);
209
210 StreamDeviceInfo device_info = source_data->device_info();
211 RTCMediaConstraints constraints = native_audio_constraints;
212 // May modify both |constraints| and |effects|.
213 HarmonizeConstraintsAndEffects(&constraints,
214 &device_info.device.input.effects);
215
216 scoped_refptr<WebRtcAudioCapturer> capturer(
217 CreateAudioCapturer(render_view_id, device_info, audio_constraints,
218 source_data));
219 if (!capturer.get()) {
220 const std::string log_string =
221 "PCDF::InitializeMediaStreamAudioSource: fails to create capturer";
222 WebRtcLogMessage(log_string);
223 DVLOG(1) << log_string;
224 // TODO(xians): Don't we need to check if source_observer is observing
225 // something? If not, then it looks like we have a leak here.
226 // OTOH, if it _is_ observing something, then the callback might
227 // be called multiple times which is likely also a bug.
228 return false;
229 }
230 source_data->SetAudioCapturer(capturer.get());
231
232 // Creates a LocalAudioSource object which holds audio options.
233 // TODO(xians): The option should apply to the track instead of the source.
234 // TODO(perkj): Move audio constraints parsing to Chrome.
235 // Currently there are a few constraints that are parsed by libjingle and
236 // the state is set to ended if parsing fails.
237 scoped_refptr<webrtc::AudioSourceInterface> rtc_source(
238 CreateLocalAudioSource(&constraints).get());
239 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) {
240 DLOG(WARNING) << "Failed to create rtc LocalAudioSource.";
241 return false;
242 }
243 source_data->SetLocalAudioSource(rtc_source.get());
244 return true;
245 }
246
247 WebRtcVideoCapturerAdapter*
CreateVideoCapturer(bool is_screeencast)248 PeerConnectionDependencyFactory::CreateVideoCapturer(
249 bool is_screeencast) {
250 // We need to make sure the libjingle thread wrappers have been created
251 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is
252 // since the base class of WebRtcVideoCapturerAdapter is a
253 // cricket::VideoCapturer and it uses the libjingle thread wrappers.
254 if (!GetPcFactory().get())
255 return NULL;
256 return new WebRtcVideoCapturerAdapter(is_screeencast);
257 }
258
259 scoped_refptr<webrtc::VideoSourceInterface>
CreateVideoSource(cricket::VideoCapturer * capturer,const blink::WebMediaConstraints & constraints)260 PeerConnectionDependencyFactory::CreateVideoSource(
261 cricket::VideoCapturer* capturer,
262 const blink::WebMediaConstraints& constraints) {
263 RTCMediaConstraints webrtc_constraints(constraints);
264 scoped_refptr<webrtc::VideoSourceInterface> source =
265 GetPcFactory()->CreateVideoSource(capturer, &webrtc_constraints).get();
266 return source;
267 }
268
269 const scoped_refptr<webrtc::PeerConnectionFactoryInterface>&
GetPcFactory()270 PeerConnectionDependencyFactory::GetPcFactory() {
271 if (!pc_factory_.get())
272 CreatePeerConnectionFactory();
273 CHECK(pc_factory_.get());
274 return pc_factory_;
275 }
276
CreatePeerConnectionFactory()277 void PeerConnectionDependencyFactory::CreatePeerConnectionFactory() {
278 DCHECK(!pc_factory_.get());
279 DCHECK(!signaling_thread_);
280 DCHECK(!worker_thread_);
281 DCHECK(!network_manager_);
282 DCHECK(!socket_factory_);
283 DCHECK(!chrome_worker_thread_.IsRunning());
284
285 DVLOG(1) << "PeerConnectionDependencyFactory::CreatePeerConnectionFactory()";
286
287 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
288 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
289 signaling_thread_ = jingle_glue::JingleThreadWrapper::current();
290 CHECK(signaling_thread_);
291
292 CHECK(chrome_worker_thread_.Start());
293
294 base::WaitableEvent start_worker_event(true, false);
295 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
296 &PeerConnectionDependencyFactory::InitializeWorkerThread,
297 base::Unretained(this),
298 &worker_thread_,
299 &start_worker_event));
300 start_worker_event.Wait();
301 CHECK(worker_thread_);
302
303 base::WaitableEvent create_network_manager_event(true, false);
304 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
305 &PeerConnectionDependencyFactory::CreateIpcNetworkManagerOnWorkerThread,
306 base::Unretained(this),
307 &create_network_manager_event));
308 create_network_manager_event.Wait();
309
310 socket_factory_.reset(
311 new IpcPacketSocketFactory(p2p_socket_dispatcher_.get()));
312
313 // Init SSL, which will be needed by PeerConnection.
314 #if defined(USE_OPENSSL)
315 if (!rtc::InitializeSSL()) {
316 LOG(ERROR) << "Failed on InitializeSSL.";
317 NOTREACHED();
318 return;
319 }
320 #else
321 // TODO(ronghuawu): Replace this call with InitializeSSL.
322 net::EnsureNSSSSLInit();
323 #endif
324
325 scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
326 scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
327
328 const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
329 scoped_refptr<media::GpuVideoAcceleratorFactories> gpu_factories =
330 RenderThreadImpl::current()->GetGpuFactories();
331 if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWDecoding)) {
332 if (gpu_factories.get())
333 decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories));
334 }
335
336 if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWEncoding)) {
337 if (gpu_factories.get())
338 encoder_factory.reset(new RTCVideoEncoderFactory(gpu_factories));
339 }
340
341 #if defined(OS_ANDROID)
342 if (!media::MediaCodecBridge::SupportsSetParameters())
343 encoder_factory.reset();
344 #endif
345
346 EnsureWebRtcAudioDeviceImpl();
347
348 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory(
349 webrtc::CreatePeerConnectionFactory(worker_thread_,
350 signaling_thread_,
351 audio_device_.get(),
352 encoder_factory.release(),
353 decoder_factory.release()));
354 CHECK(factory.get());
355
356 pc_factory_ = factory;
357 webrtc::PeerConnectionFactoryInterface::Options factory_options;
358 factory_options.disable_sctp_data_channels = false;
359 factory_options.disable_encryption =
360 cmd_line->HasSwitch(switches::kDisableWebRtcEncryption);
361 pc_factory_->SetOptions(factory_options);
362
363 // TODO(xians): Remove the following code after kDisableAudioTrackProcessing
364 // is removed.
365 if (!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled()) {
366 aec_dump_message_filter_ = AecDumpMessageFilter::Get();
367 // In unit tests not creating a message filter, |aec_dump_message_filter_|
368 // will be NULL. We can just ignore that. Other unit tests and browser tests
369 // ensure that we do get the filter when we should.
370 if (aec_dump_message_filter_.get())
371 aec_dump_message_filter_->AddDelegate(this);
372 }
373 }
374
PeerConnectionFactoryCreated()375 bool PeerConnectionDependencyFactory::PeerConnectionFactoryCreated() {
376 return pc_factory_.get() != NULL;
377 }
378
379 scoped_refptr<webrtc::PeerConnectionInterface>
CreatePeerConnection(const webrtc::PeerConnectionInterface::RTCConfiguration & config,const webrtc::MediaConstraintsInterface * constraints,blink::WebFrame * web_frame,webrtc::PeerConnectionObserver * observer)380 PeerConnectionDependencyFactory::CreatePeerConnection(
381 const webrtc::PeerConnectionInterface::RTCConfiguration& config,
382 const webrtc::MediaConstraintsInterface* constraints,
383 blink::WebFrame* web_frame,
384 webrtc::PeerConnectionObserver* observer) {
385 CHECK(web_frame);
386 CHECK(observer);
387 if (!GetPcFactory().get())
388 return NULL;
389
390 scoped_refptr<P2PPortAllocatorFactory> pa_factory =
391 new rtc::RefCountedObject<P2PPortAllocatorFactory>(
392 p2p_socket_dispatcher_.get(),
393 network_manager_,
394 socket_factory_.get(),
395 web_frame);
396
397 PeerConnectionIdentityService* identity_service =
398 new PeerConnectionIdentityService(
399 GURL(web_frame->document().url().spec()).GetOrigin());
400
401 return GetPcFactory()->CreatePeerConnection(config,
402 constraints,
403 pa_factory.get(),
404 identity_service,
405 observer).get();
406 }
407
408 scoped_refptr<webrtc::MediaStreamInterface>
CreateLocalMediaStream(const std::string & label)409 PeerConnectionDependencyFactory::CreateLocalMediaStream(
410 const std::string& label) {
411 return GetPcFactory()->CreateLocalMediaStream(label).get();
412 }
413
414 scoped_refptr<webrtc::AudioSourceInterface>
CreateLocalAudioSource(const webrtc::MediaConstraintsInterface * constraints)415 PeerConnectionDependencyFactory::CreateLocalAudioSource(
416 const webrtc::MediaConstraintsInterface* constraints) {
417 scoped_refptr<webrtc::AudioSourceInterface> source =
418 GetPcFactory()->CreateAudioSource(constraints).get();
419 return source;
420 }
421
CreateLocalAudioTrack(const blink::WebMediaStreamTrack & track)422 void PeerConnectionDependencyFactory::CreateLocalAudioTrack(
423 const blink::WebMediaStreamTrack& track) {
424 blink::WebMediaStreamSource source = track.source();
425 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
426 MediaStreamAudioSource* source_data =
427 static_cast<MediaStreamAudioSource*>(source.extraData());
428
429 scoped_refptr<WebAudioCapturerSource> webaudio_source;
430 if (!source_data) {
431 if (source.requiresAudioConsumer()) {
432 // We're adding a WebAudio MediaStream.
433 // Create a specific capturer for each WebAudio consumer.
434 webaudio_source = CreateWebAudioSource(&source);
435 source_data =
436 static_cast<MediaStreamAudioSource*>(source.extraData());
437 } else {
438 // TODO(perkj): Implement support for sources from
439 // remote MediaStreams.
440 NOTIMPLEMENTED();
441 return;
442 }
443 }
444
445 // Creates an adapter to hold all the libjingle objects.
446 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
447 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(),
448 source_data->local_audio_source()));
449 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled(
450 track.isEnabled());
451
452 // TODO(xians): Merge |source| to the capturer(). We can't do this today
453 // because only one capturer() is supported while one |source| is created
454 // for each audio track.
455 scoped_ptr<WebRtcLocalAudioTrack> audio_track(new WebRtcLocalAudioTrack(
456 adapter.get(), source_data->GetAudioCapturer(), webaudio_source.get()));
457
458 StartLocalAudioTrack(audio_track.get());
459
460 // Pass the ownership of the native local audio track to the blink track.
461 blink::WebMediaStreamTrack writable_track = track;
462 writable_track.setExtraData(audio_track.release());
463 }
464
StartLocalAudioTrack(WebRtcLocalAudioTrack * audio_track)465 void PeerConnectionDependencyFactory::StartLocalAudioTrack(
466 WebRtcLocalAudioTrack* audio_track) {
467 // Add the WebRtcAudioDevice as the sink to the local audio track.
468 // TODO(xians): Remove the following line of code after the APM in WebRTC is
469 // completely deprecated. See http://crbug/365672.
470 if (!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled())
471 audio_track->AddSink(GetWebRtcAudioDevice());
472
473 // Start the audio track. This will hook the |audio_track| to the capturer
474 // as the sink of the audio, and only start the source of the capturer if
475 // it is the first audio track connecting to the capturer.
476 audio_track->Start();
477 }
478
479 scoped_refptr<WebAudioCapturerSource>
CreateWebAudioSource(blink::WebMediaStreamSource * source)480 PeerConnectionDependencyFactory::CreateWebAudioSource(
481 blink::WebMediaStreamSource* source) {
482 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()";
483
484 scoped_refptr<WebAudioCapturerSource>
485 webaudio_capturer_source(new WebAudioCapturerSource());
486 MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
487
488 // Use the current default capturer for the WebAudio track so that the
489 // WebAudio track can pass a valid delay value and |need_audio_processing|
490 // flag to PeerConnection.
491 // TODO(xians): Remove this after moving APM to Chrome.
492 if (GetWebRtcAudioDevice()) {
493 source_data->SetAudioCapturer(
494 GetWebRtcAudioDevice()->GetDefaultCapturer());
495 }
496
497 // Create a LocalAudioSource object which holds audio options.
498 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
499 source_data->SetLocalAudioSource(CreateLocalAudioSource(NULL).get());
500 source->setExtraData(source_data);
501
502 // Replace the default source with WebAudio as source instead.
503 source->addAudioConsumer(webaudio_capturer_source.get());
504
505 return webaudio_capturer_source;
506 }
507
508 scoped_refptr<webrtc::VideoTrackInterface>
CreateLocalVideoTrack(const std::string & id,webrtc::VideoSourceInterface * source)509 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
510 const std::string& id,
511 webrtc::VideoSourceInterface* source) {
512 return GetPcFactory()->CreateVideoTrack(id, source).get();
513 }
514
515 scoped_refptr<webrtc::VideoTrackInterface>
CreateLocalVideoTrack(const std::string & id,cricket::VideoCapturer * capturer)516 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
517 const std::string& id, cricket::VideoCapturer* capturer) {
518 if (!capturer) {
519 LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer.";
520 return NULL;
521 }
522
523 // Create video source from the |capturer|.
524 scoped_refptr<webrtc::VideoSourceInterface> source =
525 GetPcFactory()->CreateVideoSource(capturer, NULL).get();
526
527 // Create native track from the source.
528 return GetPcFactory()->CreateVideoTrack(id, source.get()).get();
529 }
530
531 webrtc::SessionDescriptionInterface*
CreateSessionDescription(const std::string & type,const std::string & sdp,webrtc::SdpParseError * error)532 PeerConnectionDependencyFactory::CreateSessionDescription(
533 const std::string& type,
534 const std::string& sdp,
535 webrtc::SdpParseError* error) {
536 return webrtc::CreateSessionDescription(type, sdp, error);
537 }
538
539 webrtc::IceCandidateInterface*
CreateIceCandidate(const std::string & sdp_mid,int sdp_mline_index,const std::string & sdp)540 PeerConnectionDependencyFactory::CreateIceCandidate(
541 const std::string& sdp_mid,
542 int sdp_mline_index,
543 const std::string& sdp) {
544 return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp);
545 }
546
547 WebRtcAudioDeviceImpl*
GetWebRtcAudioDevice()548 PeerConnectionDependencyFactory::GetWebRtcAudioDevice() {
549 return audio_device_.get();
550 }
551
InitializeWorkerThread(rtc::Thread ** thread,base::WaitableEvent * event)552 void PeerConnectionDependencyFactory::InitializeWorkerThread(
553 rtc::Thread** thread,
554 base::WaitableEvent* event) {
555 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
556 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
557 *thread = jingle_glue::JingleThreadWrapper::current();
558 event->Signal();
559 }
560
CreateIpcNetworkManagerOnWorkerThread(base::WaitableEvent * event)561 void PeerConnectionDependencyFactory::CreateIpcNetworkManagerOnWorkerThread(
562 base::WaitableEvent* event) {
563 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
564 network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get());
565 event->Signal();
566 }
567
DeleteIpcNetworkManager()568 void PeerConnectionDependencyFactory::DeleteIpcNetworkManager() {
569 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
570 delete network_manager_;
571 network_manager_ = NULL;
572 }
573
CleanupPeerConnectionFactory()574 void PeerConnectionDependencyFactory::CleanupPeerConnectionFactory() {
575 pc_factory_ = NULL;
576 if (network_manager_) {
577 // The network manager needs to free its resources on the thread they were
578 // created, which is the worked thread.
579 if (chrome_worker_thread_.IsRunning()) {
580 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
581 &PeerConnectionDependencyFactory::DeleteIpcNetworkManager,
582 base::Unretained(this)));
583 // Stopping the thread will wait until all tasks have been
584 // processed before returning. We wait for the above task to finish before
585 // letting the the function continue to avoid any potential race issues.
586 chrome_worker_thread_.Stop();
587 } else {
588 NOTREACHED() << "Worker thread not running.";
589 }
590 }
591 }
592
593 scoped_refptr<WebRtcAudioCapturer>
CreateAudioCapturer(int render_view_id,const StreamDeviceInfo & device_info,const blink::WebMediaConstraints & constraints,MediaStreamAudioSource * audio_source)594 PeerConnectionDependencyFactory::CreateAudioCapturer(
595 int render_view_id,
596 const StreamDeviceInfo& device_info,
597 const blink::WebMediaConstraints& constraints,
598 MediaStreamAudioSource* audio_source) {
599 // TODO(xians): Handle the cases when gUM is called without a proper render
600 // view, for example, by an extension.
601 DCHECK_GE(render_view_id, 0);
602
603 EnsureWebRtcAudioDeviceImpl();
604 DCHECK(GetWebRtcAudioDevice());
605 return WebRtcAudioCapturer::CreateCapturer(render_view_id, device_info,
606 constraints,
607 GetWebRtcAudioDevice(),
608 audio_source);
609 }
610
AddNativeAudioTrackToBlinkTrack(webrtc::MediaStreamTrackInterface * native_track,const blink::WebMediaStreamTrack & webkit_track,bool is_local_track)611 void PeerConnectionDependencyFactory::AddNativeAudioTrackToBlinkTrack(
612 webrtc::MediaStreamTrackInterface* native_track,
613 const blink::WebMediaStreamTrack& webkit_track,
614 bool is_local_track) {
615 DCHECK(!webkit_track.isNull() && !webkit_track.extraData());
616 DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio,
617 webkit_track.source().type());
618 blink::WebMediaStreamTrack track = webkit_track;
619
620 DVLOG(1) << "AddNativeTrackToBlinkTrack() audio";
621 track.setExtraData(
622 new MediaStreamTrack(
623 static_cast<webrtc::AudioTrackInterface*>(native_track),
624 is_local_track));
625 }
626
627 scoped_refptr<base::MessageLoopProxy>
GetWebRtcWorkerThread() const628 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const {
629 DCHECK(CalledOnValidThread());
630 return chrome_worker_thread_.message_loop_proxy();
631 }
632
OnAecDumpFile(const IPC::PlatformFileForTransit & file_handle)633 void PeerConnectionDependencyFactory::OnAecDumpFile(
634 const IPC::PlatformFileForTransit& file_handle) {
635 DCHECK(CalledOnValidThread());
636 DCHECK(!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled());
637 DCHECK(PeerConnectionFactoryCreated());
638
639 base::File file = IPC::PlatformFileForTransitToFile(file_handle);
640 DCHECK(file.IsValid());
641
642 // |pc_factory_| always takes ownership of |aec_dump_file|. If StartAecDump()
643 // fails, |aec_dump_file| will be closed.
644 if (!GetPcFactory()->StartAecDump(file.TakePlatformFile()))
645 VLOG(1) << "Could not start AEC dump.";
646 }
647
OnDisableAecDump()648 void PeerConnectionDependencyFactory::OnDisableAecDump() {
649 DCHECK(CalledOnValidThread());
650 DCHECK(!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled());
651 // Do nothing. We never disable AEC dump for non-track-processing case.
652 }
653
OnIpcClosing()654 void PeerConnectionDependencyFactory::OnIpcClosing() {
655 DCHECK(CalledOnValidThread());
656 aec_dump_message_filter_ = NULL;
657 }
658
EnsureWebRtcAudioDeviceImpl()659 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() {
660 if (audio_device_.get())
661 return;
662
663 audio_device_ = new WebRtcAudioDeviceImpl();
664 }
665
666 } // namespace content
667