1 /*
2 * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "examples/unityplugin/simple_peer_connection.h"
12
13 #include <utility>
14
15 #include "absl/memory/memory.h"
16 #include "api/audio_codecs/builtin_audio_decoder_factory.h"
17 #include "api/audio_codecs/builtin_audio_encoder_factory.h"
18 #include "api/create_peerconnection_factory.h"
19 #include "media/engine/internal_decoder_factory.h"
20 #include "media/engine/internal_encoder_factory.h"
21 #include "media/engine/multiplex_codec_factory.h"
22 #include "modules/audio_device/include/audio_device.h"
23 #include "modules/audio_processing/include/audio_processing.h"
24 #include "modules/video_capture/video_capture_factory.h"
25 #include "pc/video_track_source.h"
26 #include "test/vcm_capturer.h"
27
28 #if defined(WEBRTC_ANDROID)
29 #include "examples/unityplugin/class_reference_holder.h"
30 #include "modules/utility/include/helpers_android.h"
31 #include "sdk/android/src/jni/android_video_track_source.h"
32 #include "sdk/android/src/jni/jni_helpers.h"
33 #endif
34
35 // Names used for media stream ids.
36 const char kAudioLabel[] = "audio_label";
37 const char kVideoLabel[] = "video_label";
38 const char kStreamId[] = "stream_id";
39
40 namespace {
41 static int g_peer_count = 0;
42 static std::unique_ptr<rtc::Thread> g_worker_thread;
43 static std::unique_ptr<rtc::Thread> g_signaling_thread;
44 static rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
45 g_peer_connection_factory;
46 #if defined(WEBRTC_ANDROID)
47 // Android case: the video track does not own the capturer, and it
48 // relies on the app to dispose the capturer when the peerconnection
49 // shuts down.
50 static jobject g_camera = nullptr;
51 #else
52 class CapturerTrackSource : public webrtc::VideoTrackSource {
53 public:
Create()54 static rtc::scoped_refptr<CapturerTrackSource> Create() {
55 const size_t kWidth = 640;
56 const size_t kHeight = 480;
57 const size_t kFps = 30;
58 const size_t kDeviceIndex = 0;
59 std::unique_ptr<webrtc::test::VcmCapturer> capturer = absl::WrapUnique(
60 webrtc::test::VcmCapturer::Create(kWidth, kHeight, kFps, kDeviceIndex));
61 if (!capturer) {
62 return nullptr;
63 }
64 return new rtc::RefCountedObject<CapturerTrackSource>(std::move(capturer));
65 }
66
67 protected:
CapturerTrackSource(std::unique_ptr<webrtc::test::VcmCapturer> capturer)68 explicit CapturerTrackSource(
69 std::unique_ptr<webrtc::test::VcmCapturer> capturer)
70 : VideoTrackSource(/*remote=*/false), capturer_(std::move(capturer)) {}
71
72 private:
source()73 rtc::VideoSourceInterface<webrtc::VideoFrame>* source() override {
74 return capturer_.get();
75 }
76 std::unique_ptr<webrtc::test::VcmCapturer> capturer_;
77 };
78
79 #endif
80
GetEnvVarOrDefault(const char * env_var_name,const char * default_value)81 std::string GetEnvVarOrDefault(const char* env_var_name,
82 const char* default_value) {
83 std::string value;
84 const char* env_var = getenv(env_var_name);
85 if (env_var)
86 value = env_var;
87
88 if (value.empty())
89 value = default_value;
90
91 return value;
92 }
93
GetPeerConnectionString()94 std::string GetPeerConnectionString() {
95 return GetEnvVarOrDefault("WEBRTC_CONNECT", "stun:stun.l.google.com:19302");
96 }
97
98 class DummySetSessionDescriptionObserver
99 : public webrtc::SetSessionDescriptionObserver {
100 public:
Create()101 static DummySetSessionDescriptionObserver* Create() {
102 return new rtc::RefCountedObject<DummySetSessionDescriptionObserver>();
103 }
OnSuccess()104 virtual void OnSuccess() { RTC_LOG(INFO) << __FUNCTION__; }
OnFailure(webrtc::RTCError error)105 virtual void OnFailure(webrtc::RTCError error) {
106 RTC_LOG(INFO) << __FUNCTION__ << " " << ToString(error.type()) << ": "
107 << error.message();
108 }
109
110 protected:
DummySetSessionDescriptionObserver()111 DummySetSessionDescriptionObserver() {}
~DummySetSessionDescriptionObserver()112 ~DummySetSessionDescriptionObserver() {}
113 };
114
115 } // namespace
116
InitializePeerConnection(const char ** turn_urls,const int no_of_urls,const char * username,const char * credential,bool is_receiver)117 bool SimplePeerConnection::InitializePeerConnection(const char** turn_urls,
118 const int no_of_urls,
119 const char* username,
120 const char* credential,
121 bool is_receiver) {
122 RTC_DCHECK(peer_connection_.get() == nullptr);
123
124 if (g_peer_connection_factory == nullptr) {
125 g_worker_thread = rtc::Thread::Create();
126 g_worker_thread->Start();
127 g_signaling_thread = rtc::Thread::Create();
128 g_signaling_thread->Start();
129
130 g_peer_connection_factory = webrtc::CreatePeerConnectionFactory(
131 g_worker_thread.get(), g_worker_thread.get(), g_signaling_thread.get(),
132 nullptr, webrtc::CreateBuiltinAudioEncoderFactory(),
133 webrtc::CreateBuiltinAudioDecoderFactory(),
134 std::unique_ptr<webrtc::VideoEncoderFactory>(
135 new webrtc::MultiplexEncoderFactory(
136 std::make_unique<webrtc::InternalEncoderFactory>())),
137 std::unique_ptr<webrtc::VideoDecoderFactory>(
138 new webrtc::MultiplexDecoderFactory(
139 std::make_unique<webrtc::InternalDecoderFactory>())),
140 nullptr, nullptr);
141 }
142 if (!g_peer_connection_factory.get()) {
143 DeletePeerConnection();
144 return false;
145 }
146
147 g_peer_count++;
148 if (!CreatePeerConnection(turn_urls, no_of_urls, username, credential)) {
149 DeletePeerConnection();
150 return false;
151 }
152
153 mandatory_receive_ = is_receiver;
154 return peer_connection_.get() != nullptr;
155 }
156
CreatePeerConnection(const char ** turn_urls,const int no_of_urls,const char * username,const char * credential)157 bool SimplePeerConnection::CreatePeerConnection(const char** turn_urls,
158 const int no_of_urls,
159 const char* username,
160 const char* credential) {
161 RTC_DCHECK(g_peer_connection_factory.get() != nullptr);
162 RTC_DCHECK(peer_connection_.get() == nullptr);
163
164 local_video_observer_.reset(new VideoObserver());
165 remote_video_observer_.reset(new VideoObserver());
166
167 // Add the turn server.
168 if (turn_urls != nullptr) {
169 if (no_of_urls > 0) {
170 webrtc::PeerConnectionInterface::IceServer turn_server;
171 for (int i = 0; i < no_of_urls; i++) {
172 std::string url(turn_urls[i]);
173 if (url.length() > 0)
174 turn_server.urls.push_back(turn_urls[i]);
175 }
176
177 std::string user_name(username);
178 if (user_name.length() > 0)
179 turn_server.username = username;
180
181 std::string password(credential);
182 if (password.length() > 0)
183 turn_server.password = credential;
184
185 config_.servers.push_back(turn_server);
186 }
187 }
188
189 // Add the stun server.
190 webrtc::PeerConnectionInterface::IceServer stun_server;
191 stun_server.uri = GetPeerConnectionString();
192 config_.servers.push_back(stun_server);
193 config_.enable_rtp_data_channel = true;
194 config_.enable_dtls_srtp = false;
195
196 peer_connection_ = g_peer_connection_factory->CreatePeerConnection(
197 config_, nullptr, nullptr, this);
198
199 return peer_connection_.get() != nullptr;
200 }
201
DeletePeerConnection()202 void SimplePeerConnection::DeletePeerConnection() {
203 g_peer_count--;
204
205 #if defined(WEBRTC_ANDROID)
206 if (g_camera) {
207 JNIEnv* env = webrtc::jni::GetEnv();
208 jclass pc_factory_class =
209 unity_plugin::FindClass(env, "org/webrtc/UnityUtility");
210 jmethodID stop_camera_method = webrtc::GetStaticMethodID(
211 env, pc_factory_class, "StopCamera", "(Lorg/webrtc/VideoCapturer;)V");
212
213 env->CallStaticVoidMethod(pc_factory_class, stop_camera_method, g_camera);
214 CHECK_EXCEPTION(env);
215
216 g_camera = nullptr;
217 }
218 #endif
219
220 CloseDataChannel();
221 peer_connection_ = nullptr;
222 active_streams_.clear();
223
224 if (g_peer_count == 0) {
225 g_peer_connection_factory = nullptr;
226 g_signaling_thread.reset();
227 g_worker_thread.reset();
228 }
229 }
230
CreateOffer()231 bool SimplePeerConnection::CreateOffer() {
232 if (!peer_connection_.get())
233 return false;
234
235 webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
236 if (mandatory_receive_) {
237 options.offer_to_receive_audio = true;
238 options.offer_to_receive_video = true;
239 }
240 peer_connection_->CreateOffer(this, options);
241 return true;
242 }
243
CreateAnswer()244 bool SimplePeerConnection::CreateAnswer() {
245 if (!peer_connection_.get())
246 return false;
247
248 webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
249 if (mandatory_receive_) {
250 options.offer_to_receive_audio = true;
251 options.offer_to_receive_video = true;
252 }
253 peer_connection_->CreateAnswer(this, options);
254 return true;
255 }
256
OnSuccess(webrtc::SessionDescriptionInterface * desc)257 void SimplePeerConnection::OnSuccess(
258 webrtc::SessionDescriptionInterface* desc) {
259 peer_connection_->SetLocalDescription(
260 DummySetSessionDescriptionObserver::Create(), desc);
261
262 std::string sdp;
263 desc->ToString(&sdp);
264
265 if (OnLocalSdpReady)
266 OnLocalSdpReady(desc->type().c_str(), sdp.c_str());
267 }
268
OnFailure(webrtc::RTCError error)269 void SimplePeerConnection::OnFailure(webrtc::RTCError error) {
270 RTC_LOG(LERROR) << ToString(error.type()) << ": " << error.message();
271
272 // TODO(hta): include error.type in the message
273 if (OnFailureMessage)
274 OnFailureMessage(error.message());
275 }
276
OnIceCandidate(const webrtc::IceCandidateInterface * candidate)277 void SimplePeerConnection::OnIceCandidate(
278 const webrtc::IceCandidateInterface* candidate) {
279 RTC_LOG(INFO) << __FUNCTION__ << " " << candidate->sdp_mline_index();
280
281 std::string sdp;
282 if (!candidate->ToString(&sdp)) {
283 RTC_LOG(LS_ERROR) << "Failed to serialize candidate";
284 return;
285 }
286
287 if (OnIceCandiateReady)
288 OnIceCandiateReady(sdp.c_str(), candidate->sdp_mline_index(),
289 candidate->sdp_mid().c_str());
290 }
291
RegisterOnLocalI420FrameReady(I420FRAMEREADY_CALLBACK callback)292 void SimplePeerConnection::RegisterOnLocalI420FrameReady(
293 I420FRAMEREADY_CALLBACK callback) {
294 if (local_video_observer_)
295 local_video_observer_->SetVideoCallback(callback);
296 }
297
RegisterOnRemoteI420FrameReady(I420FRAMEREADY_CALLBACK callback)298 void SimplePeerConnection::RegisterOnRemoteI420FrameReady(
299 I420FRAMEREADY_CALLBACK callback) {
300 if (remote_video_observer_)
301 remote_video_observer_->SetVideoCallback(callback);
302 }
303
RegisterOnLocalDataChannelReady(LOCALDATACHANNELREADY_CALLBACK callback)304 void SimplePeerConnection::RegisterOnLocalDataChannelReady(
305 LOCALDATACHANNELREADY_CALLBACK callback) {
306 OnLocalDataChannelReady = callback;
307 }
308
RegisterOnDataFromDataChannelReady(DATAFROMEDATECHANNELREADY_CALLBACK callback)309 void SimplePeerConnection::RegisterOnDataFromDataChannelReady(
310 DATAFROMEDATECHANNELREADY_CALLBACK callback) {
311 OnDataFromDataChannelReady = callback;
312 }
313
RegisterOnFailure(FAILURE_CALLBACK callback)314 void SimplePeerConnection::RegisterOnFailure(FAILURE_CALLBACK callback) {
315 OnFailureMessage = callback;
316 }
317
RegisterOnAudioBusReady(AUDIOBUSREADY_CALLBACK callback)318 void SimplePeerConnection::RegisterOnAudioBusReady(
319 AUDIOBUSREADY_CALLBACK callback) {
320 OnAudioReady = callback;
321 }
322
RegisterOnLocalSdpReadytoSend(LOCALSDPREADYTOSEND_CALLBACK callback)323 void SimplePeerConnection::RegisterOnLocalSdpReadytoSend(
324 LOCALSDPREADYTOSEND_CALLBACK callback) {
325 OnLocalSdpReady = callback;
326 }
327
RegisterOnIceCandiateReadytoSend(ICECANDIDATEREADYTOSEND_CALLBACK callback)328 void SimplePeerConnection::RegisterOnIceCandiateReadytoSend(
329 ICECANDIDATEREADYTOSEND_CALLBACK callback) {
330 OnIceCandiateReady = callback;
331 }
332
SetRemoteDescription(const char * type,const char * sdp)333 bool SimplePeerConnection::SetRemoteDescription(const char* type,
334 const char* sdp) {
335 if (!peer_connection_)
336 return false;
337
338 std::string remote_desc(sdp);
339 std::string sdp_type(type);
340 webrtc::SdpParseError error;
341 webrtc::SessionDescriptionInterface* session_description(
342 webrtc::CreateSessionDescription(sdp_type, remote_desc, &error));
343 if (!session_description) {
344 RTC_LOG(WARNING) << "Can't parse received session description message. "
345 "SdpParseError was: "
346 << error.description;
347 return false;
348 }
349 RTC_LOG(INFO) << " Received session description :" << remote_desc;
350 peer_connection_->SetRemoteDescription(
351 DummySetSessionDescriptionObserver::Create(), session_description);
352
353 return true;
354 }
355
AddIceCandidate(const char * candidate,const int sdp_mlineindex,const char * sdp_mid)356 bool SimplePeerConnection::AddIceCandidate(const char* candidate,
357 const int sdp_mlineindex,
358 const char* sdp_mid) {
359 if (!peer_connection_)
360 return false;
361
362 webrtc::SdpParseError error;
363 std::unique_ptr<webrtc::IceCandidateInterface> ice_candidate(
364 webrtc::CreateIceCandidate(sdp_mid, sdp_mlineindex, candidate, &error));
365 if (!ice_candidate.get()) {
366 RTC_LOG(WARNING) << "Can't parse received candidate message. "
367 "SdpParseError was: "
368 << error.description;
369 return false;
370 }
371 if (!peer_connection_->AddIceCandidate(ice_candidate.get())) {
372 RTC_LOG(WARNING) << "Failed to apply the received candidate";
373 return false;
374 }
375 RTC_LOG(INFO) << " Received candidate :" << candidate;
376 return true;
377 }
378
SetAudioControl(bool is_mute,bool is_record)379 void SimplePeerConnection::SetAudioControl(bool is_mute, bool is_record) {
380 is_mute_audio_ = is_mute;
381 is_record_audio_ = is_record;
382
383 SetAudioControl();
384 }
385
SetAudioControl()386 void SimplePeerConnection::SetAudioControl() {
387 if (!remote_stream_)
388 return;
389 webrtc::AudioTrackVector tracks = remote_stream_->GetAudioTracks();
390 if (tracks.empty())
391 return;
392
393 webrtc::AudioTrackInterface* audio_track = tracks[0];
394 std::string id = audio_track->id();
395 if (is_record_audio_)
396 audio_track->AddSink(this);
397 else
398 audio_track->RemoveSink(this);
399
400 for (auto& track : tracks) {
401 if (is_mute_audio_)
402 track->set_enabled(false);
403 else
404 track->set_enabled(true);
405 }
406 }
407
OnAddStream(rtc::scoped_refptr<webrtc::MediaStreamInterface> stream)408 void SimplePeerConnection::OnAddStream(
409 rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) {
410 RTC_LOG(INFO) << __FUNCTION__ << " " << stream->id();
411 remote_stream_ = stream;
412 if (remote_video_observer_ && !remote_stream_->GetVideoTracks().empty()) {
413 remote_stream_->GetVideoTracks()[0]->AddOrUpdateSink(
414 remote_video_observer_.get(), rtc::VideoSinkWants());
415 }
416 SetAudioControl();
417 }
418
AddStreams(bool audio_only)419 void SimplePeerConnection::AddStreams(bool audio_only) {
420 if (active_streams_.find(kStreamId) != active_streams_.end())
421 return; // Already added.
422
423 rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
424 g_peer_connection_factory->CreateLocalMediaStream(kStreamId);
425
426 rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
427 g_peer_connection_factory->CreateAudioTrack(
428 kAudioLabel, g_peer_connection_factory->CreateAudioSource(
429 cricket::AudioOptions())));
430 std::string id = audio_track->id();
431 stream->AddTrack(audio_track);
432
433 if (!audio_only) {
434 #if defined(WEBRTC_ANDROID)
435 JNIEnv* env = webrtc::jni::GetEnv();
436 jclass pc_factory_class =
437 unity_plugin::FindClass(env, "org/webrtc/UnityUtility");
438 jmethodID load_texture_helper_method = webrtc::GetStaticMethodID(
439 env, pc_factory_class, "LoadSurfaceTextureHelper",
440 "()Lorg/webrtc/SurfaceTextureHelper;");
441 jobject texture_helper = env->CallStaticObjectMethod(
442 pc_factory_class, load_texture_helper_method);
443 CHECK_EXCEPTION(env);
444 RTC_DCHECK(texture_helper != nullptr)
445 << "Cannot get the Surface Texture Helper.";
446
447 rtc::scoped_refptr<webrtc::jni::AndroidVideoTrackSource> source(
448 new rtc::RefCountedObject<webrtc::jni::AndroidVideoTrackSource>(
449 g_signaling_thread.get(), env, /* is_screencast= */ false,
450 /* align_timestamps= */ true));
451
452 // link with VideoCapturer (Camera);
453 jmethodID link_camera_method = webrtc::GetStaticMethodID(
454 env, pc_factory_class, "LinkCamera",
455 "(JLorg/webrtc/SurfaceTextureHelper;)Lorg/webrtc/VideoCapturer;");
456 jobject camera_tmp =
457 env->CallStaticObjectMethod(pc_factory_class, link_camera_method,
458 (jlong)source.get(), texture_helper);
459 CHECK_EXCEPTION(env);
460 g_camera = (jobject)env->NewGlobalRef(camera_tmp);
461
462 rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
463 g_peer_connection_factory->CreateVideoTrack(kVideoLabel,
464 source.release()));
465 stream->AddTrack(video_track);
466 #else
467 rtc::scoped_refptr<CapturerTrackSource> video_device =
468 CapturerTrackSource::Create();
469 if (video_device) {
470 rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
471 g_peer_connection_factory->CreateVideoTrack(kVideoLabel,
472 video_device));
473
474 stream->AddTrack(video_track);
475 }
476 #endif
477 if (local_video_observer_ && !stream->GetVideoTracks().empty()) {
478 stream->GetVideoTracks()[0]->AddOrUpdateSink(local_video_observer_.get(),
479 rtc::VideoSinkWants());
480 }
481 }
482
483 if (!peer_connection_->AddStream(stream)) {
484 RTC_LOG(LS_ERROR) << "Adding stream to PeerConnection failed";
485 }
486
487 typedef std::pair<std::string,
488 rtc::scoped_refptr<webrtc::MediaStreamInterface>>
489 MediaStreamPair;
490 active_streams_.insert(MediaStreamPair(stream->id(), stream));
491 }
492
CreateDataChannel()493 bool SimplePeerConnection::CreateDataChannel() {
494 struct webrtc::DataChannelInit init;
495 init.ordered = true;
496 init.reliable = true;
497 data_channel_ = peer_connection_->CreateDataChannel("Hello", &init);
498 if (data_channel_.get()) {
499 data_channel_->RegisterObserver(this);
500 RTC_LOG(LS_INFO) << "Succeeds to create data channel";
501 return true;
502 } else {
503 RTC_LOG(LS_INFO) << "Fails to create data channel";
504 return false;
505 }
506 }
507
CloseDataChannel()508 void SimplePeerConnection::CloseDataChannel() {
509 if (data_channel_.get()) {
510 data_channel_->UnregisterObserver();
511 data_channel_->Close();
512 }
513 data_channel_ = nullptr;
514 }
515
SendDataViaDataChannel(const std::string & data)516 bool SimplePeerConnection::SendDataViaDataChannel(const std::string& data) {
517 if (!data_channel_.get()) {
518 RTC_LOG(LS_INFO) << "Data channel is not established";
519 return false;
520 }
521 webrtc::DataBuffer buffer(data);
522 data_channel_->Send(buffer);
523 return true;
524 }
525
526 // Peerconnection observer
OnDataChannel(rtc::scoped_refptr<webrtc::DataChannelInterface> channel)527 void SimplePeerConnection::OnDataChannel(
528 rtc::scoped_refptr<webrtc::DataChannelInterface> channel) {
529 channel->RegisterObserver(this);
530 }
531
OnStateChange()532 void SimplePeerConnection::OnStateChange() {
533 if (data_channel_) {
534 webrtc::DataChannelInterface::DataState state = data_channel_->state();
535 if (state == webrtc::DataChannelInterface::kOpen) {
536 if (OnLocalDataChannelReady)
537 OnLocalDataChannelReady();
538 RTC_LOG(LS_INFO) << "Data channel is open";
539 }
540 }
541 }
542
543 // A data buffer was successfully received.
OnMessage(const webrtc::DataBuffer & buffer)544 void SimplePeerConnection::OnMessage(const webrtc::DataBuffer& buffer) {
545 size_t size = buffer.data.size();
546 char* msg = new char[size + 1];
547 memcpy(msg, buffer.data.data(), size);
548 msg[size] = 0;
549 if (OnDataFromDataChannelReady)
550 OnDataFromDataChannelReady(msg);
551 delete[] msg;
552 }
553
554 // AudioTrackSinkInterface implementation.
OnData(const void * audio_data,int bits_per_sample,int sample_rate,size_t number_of_channels,size_t number_of_frames)555 void SimplePeerConnection::OnData(const void* audio_data,
556 int bits_per_sample,
557 int sample_rate,
558 size_t number_of_channels,
559 size_t number_of_frames) {
560 if (OnAudioReady)
561 OnAudioReady(audio_data, bits_per_sample, sample_rate,
562 static_cast<int>(number_of_channels),
563 static_cast<int>(number_of_frames));
564 }
565
GetRemoteAudioTrackSsrcs()566 std::vector<uint32_t> SimplePeerConnection::GetRemoteAudioTrackSsrcs() {
567 std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> receivers =
568 peer_connection_->GetReceivers();
569
570 std::vector<uint32_t> ssrcs;
571 for (const auto& receiver : receivers) {
572 if (receiver->media_type() != cricket::MEDIA_TYPE_AUDIO)
573 continue;
574
575 std::vector<webrtc::RtpEncodingParameters> params =
576 receiver->GetParameters().encodings;
577
578 for (const auto& param : params) {
579 uint32_t ssrc = param.ssrc.value_or(0);
580 if (ssrc > 0)
581 ssrcs.push_back(ssrc);
582 }
583 }
584
585 return ssrcs;
586 }
587