• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright 2014 The WebRTC Project Authors. All rights reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 package org.appspot.apprtc;
12 
13 import android.content.Context;
14 import android.os.Environment;
15 import android.os.ParcelFileDescriptor;
16 import android.support.annotation.Nullable;
17 import android.util.Log;
18 import java.io.File;
19 import java.io.IOException;
20 import java.nio.ByteBuffer;
21 import java.nio.charset.Charset;
22 import java.text.DateFormat;
23 import java.text.SimpleDateFormat;
24 import java.util.ArrayList;
25 import java.util.Arrays;
26 import java.util.Collections;
27 import java.util.Date;
28 import java.util.Iterator;
29 import java.util.List;
30 import java.util.Locale;
31 import java.util.Timer;
32 import java.util.TimerTask;
33 import java.util.concurrent.ExecutorService;
34 import java.util.concurrent.Executors;
35 import java.util.regex.Matcher;
36 import java.util.regex.Pattern;
37 import org.appspot.apprtc.AppRTCClient.SignalingParameters;
38 import org.appspot.apprtc.RecordedAudioToFileController;
39 import org.webrtc.AudioSource;
40 import org.webrtc.AudioTrack;
41 import org.webrtc.CameraVideoCapturer;
42 import org.webrtc.CandidatePairChangeEvent;
43 import org.webrtc.DataChannel;
44 import org.webrtc.DefaultVideoDecoderFactory;
45 import org.webrtc.DefaultVideoEncoderFactory;
46 import org.webrtc.EglBase;
47 import org.webrtc.IceCandidate;
48 import org.webrtc.Logging;
49 import org.webrtc.MediaConstraints;
50 import org.webrtc.MediaStream;
51 import org.webrtc.MediaStreamTrack;
52 import org.webrtc.PeerConnection;
53 import org.webrtc.PeerConnection.IceConnectionState;
54 import org.webrtc.PeerConnection.PeerConnectionState;
55 import org.webrtc.PeerConnectionFactory;
56 import org.webrtc.RtpParameters;
57 import org.webrtc.RtpReceiver;
58 import org.webrtc.RtpSender;
59 import org.webrtc.RtpTransceiver;
60 import org.webrtc.SdpObserver;
61 import org.webrtc.SessionDescription;
62 import org.webrtc.SoftwareVideoDecoderFactory;
63 import org.webrtc.SoftwareVideoEncoderFactory;
64 import org.webrtc.StatsObserver;
65 import org.webrtc.StatsReport;
66 import org.webrtc.SurfaceTextureHelper;
67 import org.webrtc.VideoCapturer;
68 import org.webrtc.VideoDecoderFactory;
69 import org.webrtc.VideoEncoderFactory;
70 import org.webrtc.VideoSink;
71 import org.webrtc.VideoSource;
72 import org.webrtc.VideoTrack;
73 import org.webrtc.audio.AudioDeviceModule;
74 import org.webrtc.audio.JavaAudioDeviceModule;
75 import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback;
76 import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback;
77 import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback;
78 import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback;
79 
80 /**
81  * Peer connection client implementation.
82  *
83  * <p>All public methods are routed to local looper thread.
84  * All PeerConnectionEvents callbacks are invoked from the same looper thread.
85  * This class is a singleton.
86  */
87 public class PeerConnectionClient {
88   public static final String VIDEO_TRACK_ID = "ARDAMSv0";
89   public static final String AUDIO_TRACK_ID = "ARDAMSa0";
90   public static final String VIDEO_TRACK_TYPE = "video";
91   private static final String TAG = "PCRTCClient";
92   private static final String VIDEO_CODEC_VP8 = "VP8";
93   private static final String VIDEO_CODEC_VP9 = "VP9";
94   private static final String VIDEO_CODEC_H264 = "H264";
95   private static final String VIDEO_CODEC_H264_BASELINE = "H264 Baseline";
96   private static final String VIDEO_CODEC_H264_HIGH = "H264 High";
97   private static final String AUDIO_CODEC_OPUS = "opus";
98   private static final String AUDIO_CODEC_ISAC = "ISAC";
99   private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate";
100   private static final String VIDEO_FLEXFEC_FIELDTRIAL =
101       "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/";
102   private static final String VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL = "WebRTC-IntelVP8/Enabled/";
103   private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL =
104       "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/";
105   private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
106   private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
107   private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
108   private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter";
109   private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression";
110   private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyAgreement";
111   private static final int HD_VIDEO_WIDTH = 1280;
112   private static final int HD_VIDEO_HEIGHT = 720;
113   private static final int BPS_IN_KBPS = 1000;
114   private static final String RTCEVENTLOG_OUTPUT_DIR_NAME = "rtc_event_log";
115 
116   // Executor thread is started once in private ctor and is used for all
117   // peer connection API calls to ensure new peer connection factory is
118   // created on the same thread as previously destroyed factory.
119   private static final ExecutorService executor = Executors.newSingleThreadExecutor();
120 
121   private final PCObserver pcObserver = new PCObserver();
122   private final SDPObserver sdpObserver = new SDPObserver();
123   private final Timer statsTimer = new Timer();
124   private final EglBase rootEglBase;
125   private final Context appContext;
126   private final PeerConnectionParameters peerConnectionParameters;
127   private final PeerConnectionEvents events;
128 
129   @Nullable
130   private PeerConnectionFactory factory;
131   @Nullable
132   private PeerConnection peerConnection;
133   @Nullable
134   private AudioSource audioSource;
135   @Nullable private SurfaceTextureHelper surfaceTextureHelper;
136   @Nullable private VideoSource videoSource;
137   private boolean preferIsac;
138   private boolean videoCapturerStopped;
139   private boolean isError;
140   @Nullable
141   private VideoSink localRender;
142   @Nullable private List<VideoSink> remoteSinks;
143   private SignalingParameters signalingParameters;
144   private int videoWidth;
145   private int videoHeight;
146   private int videoFps;
147   private MediaConstraints audioConstraints;
148   private MediaConstraints sdpMediaConstraints;
149   // Queued remote ICE candidates are consumed only after both local and
150   // remote descriptions are set. Similarly local ICE candidates are sent to
151   // remote peer after both local and remote description are set.
152   @Nullable
153   private List<IceCandidate> queuedRemoteCandidates;
154   private boolean isInitiator;
155   @Nullable
156   private SessionDescription localSdp; // either offer or answer SDP
157   @Nullable
158   private VideoCapturer videoCapturer;
159   // enableVideo is set to true if video should be rendered and sent.
160   private boolean renderVideo = true;
161   @Nullable
162   private VideoTrack localVideoTrack;
163   @Nullable
164   private VideoTrack remoteVideoTrack;
165   @Nullable
166   private RtpSender localVideoSender;
167   // enableAudio is set to true if audio should be sent.
168   private boolean enableAudio = true;
169   @Nullable
170   private AudioTrack localAudioTrack;
171   @Nullable
172   private DataChannel dataChannel;
173   private final boolean dataChannelEnabled;
174   // Enable RtcEventLog.
175   @Nullable
176   private RtcEventLog rtcEventLog;
177   // Implements the WebRtcAudioRecordSamplesReadyCallback interface and writes
178   // recorded audio samples to an output file.
179   @Nullable private RecordedAudioToFileController saveRecordedAudioToFile;
180 
181   /**
182    * Peer connection parameters.
183    */
184   public static class DataChannelParameters {
185     public final boolean ordered;
186     public final int maxRetransmitTimeMs;
187     public final int maxRetransmits;
188     public final String protocol;
189     public final boolean negotiated;
190     public final int id;
191 
DataChannelParameters(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, String protocol, boolean negotiated, int id)192     public DataChannelParameters(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
193         String protocol, boolean negotiated, int id) {
194       this.ordered = ordered;
195       this.maxRetransmitTimeMs = maxRetransmitTimeMs;
196       this.maxRetransmits = maxRetransmits;
197       this.protocol = protocol;
198       this.negotiated = negotiated;
199       this.id = id;
200     }
201   }
202 
203   /**
204    * Peer connection parameters.
205    */
206   public static class PeerConnectionParameters {
207     public final boolean videoCallEnabled;
208     public final boolean loopback;
209     public final boolean tracing;
210     public final int videoWidth;
211     public final int videoHeight;
212     public final int videoFps;
213     public final int videoMaxBitrate;
214     public final String videoCodec;
215     public final boolean videoCodecHwAcceleration;
216     public final boolean videoFlexfecEnabled;
217     public final int audioStartBitrate;
218     public final String audioCodec;
219     public final boolean noAudioProcessing;
220     public final boolean aecDump;
221     public final boolean saveInputAudioToFile;
222     public final boolean useOpenSLES;
223     public final boolean disableBuiltInAEC;
224     public final boolean disableBuiltInAGC;
225     public final boolean disableBuiltInNS;
226     public final boolean disableWebRtcAGCAndHPF;
227     public final boolean enableRtcEventLog;
228     private final DataChannelParameters dataChannelParameters;
229 
PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing, int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec, boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate, String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean saveInputAudioToFile, boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC, boolean disableBuiltInNS, boolean disableWebRtcAGCAndHPF, boolean enableRtcEventLog, DataChannelParameters dataChannelParameters)230     public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
231         int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec,
232         boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate,
233         String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean saveInputAudioToFile,
234         boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC,
235         boolean disableBuiltInNS, boolean disableWebRtcAGCAndHPF, boolean enableRtcEventLog,
236         DataChannelParameters dataChannelParameters) {
237       this.videoCallEnabled = videoCallEnabled;
238       this.loopback = loopback;
239       this.tracing = tracing;
240       this.videoWidth = videoWidth;
241       this.videoHeight = videoHeight;
242       this.videoFps = videoFps;
243       this.videoMaxBitrate = videoMaxBitrate;
244       this.videoCodec = videoCodec;
245       this.videoFlexfecEnabled = videoFlexfecEnabled;
246       this.videoCodecHwAcceleration = videoCodecHwAcceleration;
247       this.audioStartBitrate = audioStartBitrate;
248       this.audioCodec = audioCodec;
249       this.noAudioProcessing = noAudioProcessing;
250       this.aecDump = aecDump;
251       this.saveInputAudioToFile = saveInputAudioToFile;
252       this.useOpenSLES = useOpenSLES;
253       this.disableBuiltInAEC = disableBuiltInAEC;
254       this.disableBuiltInAGC = disableBuiltInAGC;
255       this.disableBuiltInNS = disableBuiltInNS;
256       this.disableWebRtcAGCAndHPF = disableWebRtcAGCAndHPF;
257       this.enableRtcEventLog = enableRtcEventLog;
258       this.dataChannelParameters = dataChannelParameters;
259     }
260   }
261 
262   /**
263    * Peer connection events.
264    */
265   public interface PeerConnectionEvents {
266     /**
267      * Callback fired once local SDP is created and set.
268      */
onLocalDescription(final SessionDescription sdp)269     void onLocalDescription(final SessionDescription sdp);
270 
271     /**
272      * Callback fired once local Ice candidate is generated.
273      */
onIceCandidate(final IceCandidate candidate)274     void onIceCandidate(final IceCandidate candidate);
275 
276     /**
277      * Callback fired once local ICE candidates are removed.
278      */
onIceCandidatesRemoved(final IceCandidate[] candidates)279     void onIceCandidatesRemoved(final IceCandidate[] candidates);
280 
281     /**
282      * Callback fired once connection is established (IceConnectionState is
283      * CONNECTED).
284      */
onIceConnected()285     void onIceConnected();
286 
287     /**
288      * Callback fired once connection is disconnected (IceConnectionState is
289      * DISCONNECTED).
290      */
onIceDisconnected()291     void onIceDisconnected();
292 
293     /**
294      * Callback fired once DTLS connection is established (PeerConnectionState
295      * is CONNECTED).
296      */
onConnected()297     void onConnected();
298 
299     /**
300      * Callback fired once DTLS connection is disconnected (PeerConnectionState
301      * is DISCONNECTED).
302      */
onDisconnected()303     void onDisconnected();
304 
305     /**
306      * Callback fired once peer connection is closed.
307      */
onPeerConnectionClosed()308     void onPeerConnectionClosed();
309 
310     /**
311      * Callback fired once peer connection statistics is ready.
312      */
onPeerConnectionStatsReady(final StatsReport[] reports)313     void onPeerConnectionStatsReady(final StatsReport[] reports);
314 
315     /**
316      * Callback fired once peer connection error happened.
317      */
onPeerConnectionError(final String description)318     void onPeerConnectionError(final String description);
319   }
320 
321   /**
322    * Create a PeerConnectionClient with the specified parameters. PeerConnectionClient takes
323    * ownership of |eglBase|.
324    */
PeerConnectionClient(Context appContext, EglBase eglBase, PeerConnectionParameters peerConnectionParameters, PeerConnectionEvents events)325   public PeerConnectionClient(Context appContext, EglBase eglBase,
326       PeerConnectionParameters peerConnectionParameters, PeerConnectionEvents events) {
327     this.rootEglBase = eglBase;
328     this.appContext = appContext;
329     this.events = events;
330     this.peerConnectionParameters = peerConnectionParameters;
331     this.dataChannelEnabled = peerConnectionParameters.dataChannelParameters != null;
332 
333     Log.d(TAG, "Preferred video codec: " + getSdpVideoCodecName(peerConnectionParameters));
334 
335     final String fieldTrials = getFieldTrials(peerConnectionParameters);
336     executor.execute(() -> {
337       Log.d(TAG, "Initialize WebRTC. Field trials: " + fieldTrials);
338       PeerConnectionFactory.initialize(
339           PeerConnectionFactory.InitializationOptions.builder(appContext)
340               .setFieldTrials(fieldTrials)
341               .setEnableInternalTracer(true)
342               .createInitializationOptions());
343     });
344   }
345 
346   /**
347    * This function should only be called once.
348    */
createPeerConnectionFactory(PeerConnectionFactory.Options options)349   public void createPeerConnectionFactory(PeerConnectionFactory.Options options) {
350     if (factory != null) {
351       throw new IllegalStateException("PeerConnectionFactory has already been constructed");
352     }
353     executor.execute(() -> createPeerConnectionFactoryInternal(options));
354   }
355 
createPeerConnection(final VideoSink localRender, final VideoSink remoteSink, final VideoCapturer videoCapturer, final SignalingParameters signalingParameters)356   public void createPeerConnection(final VideoSink localRender, final VideoSink remoteSink,
357       final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
358     if (peerConnectionParameters.videoCallEnabled && videoCapturer == null) {
359       Log.w(TAG, "Video call enabled but no video capturer provided.");
360     }
361     createPeerConnection(
362         localRender, Collections.singletonList(remoteSink), videoCapturer, signalingParameters);
363   }
364 
createPeerConnection(final VideoSink localRender, final List<VideoSink> remoteSinks, final VideoCapturer videoCapturer, final SignalingParameters signalingParameters)365   public void createPeerConnection(final VideoSink localRender, final List<VideoSink> remoteSinks,
366       final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
367     if (peerConnectionParameters == null) {
368       Log.e(TAG, "Creating peer connection without initializing factory.");
369       return;
370     }
371     this.localRender = localRender;
372     this.remoteSinks = remoteSinks;
373     this.videoCapturer = videoCapturer;
374     this.signalingParameters = signalingParameters;
375     executor.execute(() -> {
376       try {
377         createMediaConstraintsInternal();
378         createPeerConnectionInternal();
379         maybeCreateAndStartRtcEventLog();
380       } catch (Exception e) {
381         reportError("Failed to create peer connection: " + e.getMessage());
382         throw e;
383       }
384     });
385   }
386 
close()387   public void close() {
388     executor.execute(this ::closeInternal);
389   }
390 
isVideoCallEnabled()391   private boolean isVideoCallEnabled() {
392     return peerConnectionParameters.videoCallEnabled && videoCapturer != null;
393   }
394 
createPeerConnectionFactoryInternal(PeerConnectionFactory.Options options)395   private void createPeerConnectionFactoryInternal(PeerConnectionFactory.Options options) {
396     isError = false;
397 
398     if (peerConnectionParameters.tracing) {
399       PeerConnectionFactory.startInternalTracingCapture(
400           Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
401           + "webrtc-trace.txt");
402     }
403 
404     // Check if ISAC is used by default.
405     preferIsac = peerConnectionParameters.audioCodec != null
406         && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC);
407 
408     // It is possible to save a copy in raw PCM format on a file by checking
409     // the "Save input audio to file" checkbox in the Settings UI. A callback
410     // interface is set when this flag is enabled. As a result, a copy of recorded
411     // audio samples are provided to this client directly from the native audio
412     // layer in Java.
413     if (peerConnectionParameters.saveInputAudioToFile) {
414       if (!peerConnectionParameters.useOpenSLES) {
415         Log.d(TAG, "Enable recording of microphone input audio to file");
416         saveRecordedAudioToFile = new RecordedAudioToFileController(executor);
417       } else {
418         // TODO(henrika): ensure that the UI reflects that if OpenSL ES is selected,
419         // then the "Save inut audio to file" option shall be grayed out.
420         Log.e(TAG, "Recording of input audio is not supported for OpenSL ES");
421       }
422     }
423 
424     final AudioDeviceModule adm = createJavaAudioDevice();
425 
426     // Create peer connection factory.
427     if (options != null) {
428       Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
429     }
430     final boolean enableH264HighProfile =
431         VIDEO_CODEC_H264_HIGH.equals(peerConnectionParameters.videoCodec);
432     final VideoEncoderFactory encoderFactory;
433     final VideoDecoderFactory decoderFactory;
434 
435     if (peerConnectionParameters.videoCodecHwAcceleration) {
436       encoderFactory = new DefaultVideoEncoderFactory(
437           rootEglBase.getEglBaseContext(), true /* enableIntelVp8Encoder */, enableH264HighProfile);
438       decoderFactory = new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext());
439     } else {
440       encoderFactory = new SoftwareVideoEncoderFactory();
441       decoderFactory = new SoftwareVideoDecoderFactory();
442     }
443 
444     factory = PeerConnectionFactory.builder()
445                   .setOptions(options)
446                   .setAudioDeviceModule(adm)
447                   .setVideoEncoderFactory(encoderFactory)
448                   .setVideoDecoderFactory(decoderFactory)
449                   .createPeerConnectionFactory();
450     Log.d(TAG, "Peer connection factory created.");
451     adm.release();
452   }
453 
createJavaAudioDevice()454   AudioDeviceModule createJavaAudioDevice() {
455     // Enable/disable OpenSL ES playback.
456     if (!peerConnectionParameters.useOpenSLES) {
457       Log.w(TAG, "External OpenSLES ADM not implemented yet.");
458       // TODO(magjed): Add support for external OpenSLES ADM.
459     }
460 
461     // Set audio record error callbacks.
462     AudioRecordErrorCallback audioRecordErrorCallback = new AudioRecordErrorCallback() {
463       @Override
464       public void onWebRtcAudioRecordInitError(String errorMessage) {
465         Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage);
466         reportError(errorMessage);
467       }
468 
469       @Override
470       public void onWebRtcAudioRecordStartError(
471           JavaAudioDeviceModule.AudioRecordStartErrorCode errorCode, String errorMessage) {
472         Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage);
473         reportError(errorMessage);
474       }
475 
476       @Override
477       public void onWebRtcAudioRecordError(String errorMessage) {
478         Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage);
479         reportError(errorMessage);
480       }
481     };
482 
483     AudioTrackErrorCallback audioTrackErrorCallback = new AudioTrackErrorCallback() {
484       @Override
485       public void onWebRtcAudioTrackInitError(String errorMessage) {
486         Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage);
487         reportError(errorMessage);
488       }
489 
490       @Override
491       public void onWebRtcAudioTrackStartError(
492           JavaAudioDeviceModule.AudioTrackStartErrorCode errorCode, String errorMessage) {
493         Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage);
494         reportError(errorMessage);
495       }
496 
497       @Override
498       public void onWebRtcAudioTrackError(String errorMessage) {
499         Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage);
500         reportError(errorMessage);
501       }
502     };
503 
504     // Set audio record state callbacks.
505     AudioRecordStateCallback audioRecordStateCallback = new AudioRecordStateCallback() {
506       @Override
507       public void onWebRtcAudioRecordStart() {
508         Log.i(TAG, "Audio recording starts");
509       }
510 
511       @Override
512       public void onWebRtcAudioRecordStop() {
513         Log.i(TAG, "Audio recording stops");
514       }
515     };
516 
517     // Set audio track state callbacks.
518     AudioTrackStateCallback audioTrackStateCallback = new AudioTrackStateCallback() {
519       @Override
520       public void onWebRtcAudioTrackStart() {
521         Log.i(TAG, "Audio playout starts");
522       }
523 
524       @Override
525       public void onWebRtcAudioTrackStop() {
526         Log.i(TAG, "Audio playout stops");
527       }
528     };
529 
530     return JavaAudioDeviceModule.builder(appContext)
531         .setSamplesReadyCallback(saveRecordedAudioToFile)
532         .setUseHardwareAcousticEchoCanceler(!peerConnectionParameters.disableBuiltInAEC)
533         .setUseHardwareNoiseSuppressor(!peerConnectionParameters.disableBuiltInNS)
534         .setAudioRecordErrorCallback(audioRecordErrorCallback)
535         .setAudioTrackErrorCallback(audioTrackErrorCallback)
536         .setAudioRecordStateCallback(audioRecordStateCallback)
537         .setAudioTrackStateCallback(audioTrackStateCallback)
538         .createAudioDeviceModule();
539   }
540 
createMediaConstraintsInternal()541   private void createMediaConstraintsInternal() {
542     // Create video constraints if video call is enabled.
543     if (isVideoCallEnabled()) {
544       videoWidth = peerConnectionParameters.videoWidth;
545       videoHeight = peerConnectionParameters.videoHeight;
546       videoFps = peerConnectionParameters.videoFps;
547 
548       // If video resolution is not specified, default to HD.
549       if (videoWidth == 0 || videoHeight == 0) {
550         videoWidth = HD_VIDEO_WIDTH;
551         videoHeight = HD_VIDEO_HEIGHT;
552       }
553 
554       // If fps is not specified, default to 30.
555       if (videoFps == 0) {
556         videoFps = 30;
557       }
558       Logging.d(TAG, "Capturing format: " + videoWidth + "x" + videoHeight + "@" + videoFps);
559     }
560 
561     // Create audio constraints.
562     audioConstraints = new MediaConstraints();
563     // added for audio performance measurements
564     if (peerConnectionParameters.noAudioProcessing) {
565       Log.d(TAG, "Disabling audio processing");
566       audioConstraints.mandatory.add(
567           new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
568       audioConstraints.mandatory.add(
569           new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
570       audioConstraints.mandatory.add(
571           new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
572       audioConstraints.mandatory.add(
573           new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
574     }
575     // Create SDP constraints.
576     sdpMediaConstraints = new MediaConstraints();
577     sdpMediaConstraints.mandatory.add(
578         new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
579     sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
580         "OfferToReceiveVideo", Boolean.toString(isVideoCallEnabled())));
581   }
582 
createPeerConnectionInternal()583   private void createPeerConnectionInternal() {
584     if (factory == null || isError) {
585       Log.e(TAG, "Peerconnection factory is not created");
586       return;
587     }
588     Log.d(TAG, "Create peer connection.");
589 
590     queuedRemoteCandidates = new ArrayList<>();
591 
592     PeerConnection.RTCConfiguration rtcConfig =
593         new PeerConnection.RTCConfiguration(signalingParameters.iceServers);
594     // TCP candidates are only useful when connecting to a server that supports
595     // ICE-TCP.
596     rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
597     rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
598     rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;
599     rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
600     // Use ECDSA encryption.
601     rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
602     // Enable DTLS for normal calls and disable for loopback calls.
603     rtcConfig.enableDtlsSrtp = !peerConnectionParameters.loopback;
604     rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
605 
606     peerConnection = factory.createPeerConnection(rtcConfig, pcObserver);
607 
608     if (dataChannelEnabled) {
609       DataChannel.Init init = new DataChannel.Init();
610       init.ordered = peerConnectionParameters.dataChannelParameters.ordered;
611       init.negotiated = peerConnectionParameters.dataChannelParameters.negotiated;
612       init.maxRetransmits = peerConnectionParameters.dataChannelParameters.maxRetransmits;
613       init.maxRetransmitTimeMs = peerConnectionParameters.dataChannelParameters.maxRetransmitTimeMs;
614       init.id = peerConnectionParameters.dataChannelParameters.id;
615       init.protocol = peerConnectionParameters.dataChannelParameters.protocol;
616       dataChannel = peerConnection.createDataChannel("ApprtcDemo data", init);
617     }
618     isInitiator = false;
619 
620     // Set INFO libjingle logging.
621     // NOTE: this _must_ happen while |factory| is alive!
622     Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO);
623 
624     List<String> mediaStreamLabels = Collections.singletonList("ARDAMS");
625     if (isVideoCallEnabled()) {
626       peerConnection.addTrack(createVideoTrack(videoCapturer), mediaStreamLabels);
627       // We can add the renderers right away because we don't need to wait for an
628       // answer to get the remote track.
629       remoteVideoTrack = getRemoteVideoTrack();
630       remoteVideoTrack.setEnabled(renderVideo);
631       for (VideoSink remoteSink : remoteSinks) {
632         remoteVideoTrack.addSink(remoteSink);
633       }
634     }
635     peerConnection.addTrack(createAudioTrack(), mediaStreamLabels);
636     if (isVideoCallEnabled()) {
637       findVideoSender();
638     }
639 
640     if (peerConnectionParameters.aecDump) {
641       try {
642         ParcelFileDescriptor aecDumpFileDescriptor =
643             ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath()
644                                           + File.separator + "Download/audio.aecdump"),
645                 ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
646                     | ParcelFileDescriptor.MODE_TRUNCATE);
647         factory.startAecDump(aecDumpFileDescriptor.detachFd(), -1);
648       } catch (IOException e) {
649         Log.e(TAG, "Can not open aecdump file", e);
650       }
651     }
652 
653     if (saveRecordedAudioToFile != null) {
654       if (saveRecordedAudioToFile.start()) {
655         Log.d(TAG, "Recording input audio to file is activated");
656       }
657     }
658     Log.d(TAG, "Peer connection created.");
659   }
660 
createRtcEventLogOutputFile()661   private File createRtcEventLogOutputFile() {
662     DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_hhmm_ss", Locale.getDefault());
663     Date date = new Date();
664     final String outputFileName = "event_log_" + dateFormat.format(date) + ".log";
665     return new File(
666         appContext.getDir(RTCEVENTLOG_OUTPUT_DIR_NAME, Context.MODE_PRIVATE), outputFileName);
667   }
668 
maybeCreateAndStartRtcEventLog()669   private void maybeCreateAndStartRtcEventLog() {
670     if (appContext == null || peerConnection == null) {
671       return;
672     }
673     if (!peerConnectionParameters.enableRtcEventLog) {
674       Log.d(TAG, "RtcEventLog is disabled.");
675       return;
676     }
677     rtcEventLog = new RtcEventLog(peerConnection);
678     rtcEventLog.start(createRtcEventLogOutputFile());
679   }
680 
closeInternal()681   private void closeInternal() {
682     if (factory != null && peerConnectionParameters.aecDump) {
683       factory.stopAecDump();
684     }
685     Log.d(TAG, "Closing peer connection.");
686     statsTimer.cancel();
687     if (dataChannel != null) {
688       dataChannel.dispose();
689       dataChannel = null;
690     }
691     if (rtcEventLog != null) {
692       // RtcEventLog should stop before the peer connection is disposed.
693       rtcEventLog.stop();
694       rtcEventLog = null;
695     }
696     if (peerConnection != null) {
697       peerConnection.dispose();
698       peerConnection = null;
699     }
700     Log.d(TAG, "Closing audio source.");
701     if (audioSource != null) {
702       audioSource.dispose();
703       audioSource = null;
704     }
705     Log.d(TAG, "Stopping capture.");
706     if (videoCapturer != null) {
707       try {
708         videoCapturer.stopCapture();
709       } catch (InterruptedException e) {
710         throw new RuntimeException(e);
711       }
712       videoCapturerStopped = true;
713       videoCapturer.dispose();
714       videoCapturer = null;
715     }
716     Log.d(TAG, "Closing video source.");
717     if (videoSource != null) {
718       videoSource.dispose();
719       videoSource = null;
720     }
721     if (surfaceTextureHelper != null) {
722       surfaceTextureHelper.dispose();
723       surfaceTextureHelper = null;
724     }
725     if (saveRecordedAudioToFile != null) {
726       Log.d(TAG, "Closing audio file for recorded input audio.");
727       saveRecordedAudioToFile.stop();
728       saveRecordedAudioToFile = null;
729     }
730     localRender = null;
731     remoteSinks = null;
732     Log.d(TAG, "Closing peer connection factory.");
733     if (factory != null) {
734       factory.dispose();
735       factory = null;
736     }
737     rootEglBase.release();
738     Log.d(TAG, "Closing peer connection done.");
739     events.onPeerConnectionClosed();
740     PeerConnectionFactory.stopInternalTracingCapture();
741     PeerConnectionFactory.shutdownInternalTracer();
742   }
743 
isHDVideo()744   public boolean isHDVideo() {
745     return isVideoCallEnabled() && videoWidth * videoHeight >= 1280 * 720;
746   }
747 
748   @SuppressWarnings("deprecation") // TODO(sakal): getStats is deprecated.
getStats()749   private void getStats() {
750     if (peerConnection == null || isError) {
751       return;
752     }
753     boolean success = peerConnection.getStats(new StatsObserver() {
754       @Override
755       public void onComplete(final StatsReport[] reports) {
756         events.onPeerConnectionStatsReady(reports);
757       }
758     }, null);
759     if (!success) {
760       Log.e(TAG, "getStats() returns false!");
761     }
762   }
763 
enableStatsEvents(boolean enable, int periodMs)764   public void enableStatsEvents(boolean enable, int periodMs) {
765     if (enable) {
766       try {
767         statsTimer.schedule(new TimerTask() {
768           @Override
769           public void run() {
770             executor.execute(() -> getStats());
771           }
772         }, 0, periodMs);
773       } catch (Exception e) {
774         Log.e(TAG, "Can not schedule statistics timer", e);
775       }
776     } else {
777       statsTimer.cancel();
778     }
779   }
780 
setAudioEnabled(final boolean enable)781   public void setAudioEnabled(final boolean enable) {
782     executor.execute(() -> {
783       enableAudio = enable;
784       if (localAudioTrack != null) {
785         localAudioTrack.setEnabled(enableAudio);
786       }
787     });
788   }
789 
setVideoEnabled(final boolean enable)790   public void setVideoEnabled(final boolean enable) {
791     executor.execute(() -> {
792       renderVideo = enable;
793       if (localVideoTrack != null) {
794         localVideoTrack.setEnabled(renderVideo);
795       }
796       if (remoteVideoTrack != null) {
797         remoteVideoTrack.setEnabled(renderVideo);
798       }
799     });
800   }
801 
createOffer()802   public void createOffer() {
803     executor.execute(() -> {
804       if (peerConnection != null && !isError) {
805         Log.d(TAG, "PC Create OFFER");
806         isInitiator = true;
807         peerConnection.createOffer(sdpObserver, sdpMediaConstraints);
808       }
809     });
810   }
811 
createAnswer()812   public void createAnswer() {
813     executor.execute(() -> {
814       if (peerConnection != null && !isError) {
815         Log.d(TAG, "PC create ANSWER");
816         isInitiator = false;
817         peerConnection.createAnswer(sdpObserver, sdpMediaConstraints);
818       }
819     });
820   }
821 
addRemoteIceCandidate(final IceCandidate candidate)822   public void addRemoteIceCandidate(final IceCandidate candidate) {
823     executor.execute(() -> {
824       if (peerConnection != null && !isError) {
825         if (queuedRemoteCandidates != null) {
826           queuedRemoteCandidates.add(candidate);
827         } else {
828           peerConnection.addIceCandidate(candidate);
829         }
830       }
831     });
832   }
833 
removeRemoteIceCandidates(final IceCandidate[] candidates)834   public void removeRemoteIceCandidates(final IceCandidate[] candidates) {
835     executor.execute(() -> {
836       if (peerConnection == null || isError) {
837         return;
838       }
839       // Drain the queued remote candidates if there is any so that
840       // they are processed in the proper order.
841       drainCandidates();
842       peerConnection.removeIceCandidates(candidates);
843     });
844   }
845 
setRemoteDescription(final SessionDescription sdp)846   public void setRemoteDescription(final SessionDescription sdp) {
847     executor.execute(() -> {
848       if (peerConnection == null || isError) {
849         return;
850       }
851       String sdpDescription = sdp.description;
852       if (preferIsac) {
853         sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
854       }
855       if (isVideoCallEnabled()) {
856         sdpDescription =
857             preferCodec(sdpDescription, getSdpVideoCodecName(peerConnectionParameters), false);
858       }
859       if (peerConnectionParameters.audioStartBitrate > 0) {
860         sdpDescription = setStartBitrate(
861             AUDIO_CODEC_OPUS, false, sdpDescription, peerConnectionParameters.audioStartBitrate);
862       }
863       Log.d(TAG, "Set remote SDP.");
864       SessionDescription sdpRemote = new SessionDescription(sdp.type, sdpDescription);
865       peerConnection.setRemoteDescription(sdpObserver, sdpRemote);
866     });
867   }
868 
stopVideoSource()869   public void stopVideoSource() {
870     executor.execute(() -> {
871       if (videoCapturer != null && !videoCapturerStopped) {
872         Log.d(TAG, "Stop video source.");
873         try {
874           videoCapturer.stopCapture();
875         } catch (InterruptedException e) {
876         }
877         videoCapturerStopped = true;
878       }
879     });
880   }
881 
startVideoSource()882   public void startVideoSource() {
883     executor.execute(() -> {
884       if (videoCapturer != null && videoCapturerStopped) {
885         Log.d(TAG, "Restart video source.");
886         videoCapturer.startCapture(videoWidth, videoHeight, videoFps);
887         videoCapturerStopped = false;
888       }
889     });
890   }
891 
setVideoMaxBitrate(@ullable final Integer maxBitrateKbps)892   public void setVideoMaxBitrate(@Nullable final Integer maxBitrateKbps) {
893     executor.execute(() -> {
894       if (peerConnection == null || localVideoSender == null || isError) {
895         return;
896       }
897       Log.d(TAG, "Requested max video bitrate: " + maxBitrateKbps);
898       if (localVideoSender == null) {
899         Log.w(TAG, "Sender is not ready.");
900         return;
901       }
902 
903       RtpParameters parameters = localVideoSender.getParameters();
904       if (parameters.encodings.size() == 0) {
905         Log.w(TAG, "RtpParameters are not ready.");
906         return;
907       }
908 
909       for (RtpParameters.Encoding encoding : parameters.encodings) {
910         // Null value means no limit.
911         encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS;
912       }
913       if (!localVideoSender.setParameters(parameters)) {
914         Log.e(TAG, "RtpSender.setParameters failed.");
915       }
916       Log.d(TAG, "Configured max video bitrate to: " + maxBitrateKbps);
917     });
918   }
919 
reportError(final String errorMessage)920   private void reportError(final String errorMessage) {
921     Log.e(TAG, "Peerconnection error: " + errorMessage);
922     executor.execute(() -> {
923       if (!isError) {
924         events.onPeerConnectionError(errorMessage);
925         isError = true;
926       }
927     });
928   }
929 
930   @Nullable
createAudioTrack()931   private AudioTrack createAudioTrack() {
932     audioSource = factory.createAudioSource(audioConstraints);
933     localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
934     localAudioTrack.setEnabled(enableAudio);
935     return localAudioTrack;
936   }
937 
938   @Nullable
createVideoTrack(VideoCapturer capturer)939   private VideoTrack createVideoTrack(VideoCapturer capturer) {
940     surfaceTextureHelper =
941         SurfaceTextureHelper.create("CaptureThread", rootEglBase.getEglBaseContext());
942     videoSource = factory.createVideoSource(capturer.isScreencast());
943     capturer.initialize(surfaceTextureHelper, appContext, videoSource.getCapturerObserver());
944     capturer.startCapture(videoWidth, videoHeight, videoFps);
945 
946     localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
947     localVideoTrack.setEnabled(renderVideo);
948     localVideoTrack.addSink(localRender);
949     return localVideoTrack;
950   }
951 
findVideoSender()952   private void findVideoSender() {
953     for (RtpSender sender : peerConnection.getSenders()) {
954       if (sender.track() != null) {
955         String trackType = sender.track().kind();
956         if (trackType.equals(VIDEO_TRACK_TYPE)) {
957           Log.d(TAG, "Found video sender.");
958           localVideoSender = sender;
959         }
960       }
961     }
962   }
963 
964   // Returns the remote VideoTrack, assuming there is only one.
getRemoteVideoTrack()965   private @Nullable VideoTrack getRemoteVideoTrack() {
966     for (RtpTransceiver transceiver : peerConnection.getTransceivers()) {
967       MediaStreamTrack track = transceiver.getReceiver().track();
968       if (track instanceof VideoTrack) {
969         return (VideoTrack) track;
970       }
971     }
972     return null;
973   }
974 
getSdpVideoCodecName(PeerConnectionParameters parameters)975   private static String getSdpVideoCodecName(PeerConnectionParameters parameters) {
976     switch (parameters.videoCodec) {
977       case VIDEO_CODEC_VP8:
978         return VIDEO_CODEC_VP8;
979       case VIDEO_CODEC_VP9:
980         return VIDEO_CODEC_VP9;
981       case VIDEO_CODEC_H264_HIGH:
982       case VIDEO_CODEC_H264_BASELINE:
983         return VIDEO_CODEC_H264;
984       default:
985         return VIDEO_CODEC_VP8;
986     }
987   }
988 
getFieldTrials(PeerConnectionParameters peerConnectionParameters)989   private static String getFieldTrials(PeerConnectionParameters peerConnectionParameters) {
990     String fieldTrials = "";
991     if (peerConnectionParameters.videoFlexfecEnabled) {
992       fieldTrials += VIDEO_FLEXFEC_FIELDTRIAL;
993       Log.d(TAG, "Enable FlexFEC field trial.");
994     }
995     fieldTrials += VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL;
996     if (peerConnectionParameters.disableWebRtcAGCAndHPF) {
997       fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL;
998       Log.d(TAG, "Disable WebRTC AGC field trial.");
999     }
1000     return fieldTrials;
1001   }
1002 
1003   @SuppressWarnings("StringSplitter")
setStartBitrate( String codec, boolean isVideoCodec, String sdpDescription, int bitrateKbps)1004   private static String setStartBitrate(
1005       String codec, boolean isVideoCodec, String sdpDescription, int bitrateKbps) {
1006     String[] lines = sdpDescription.split("\r\n");
1007     int rtpmapLineIndex = -1;
1008     boolean sdpFormatUpdated = false;
1009     String codecRtpMap = null;
1010     // Search for codec rtpmap in format
1011     // a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding parameters>]
1012     String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$";
1013     Pattern codecPattern = Pattern.compile(regex);
1014     for (int i = 0; i < lines.length; i++) {
1015       Matcher codecMatcher = codecPattern.matcher(lines[i]);
1016       if (codecMatcher.matches()) {
1017         codecRtpMap = codecMatcher.group(1);
1018         rtpmapLineIndex = i;
1019         break;
1020       }
1021     }
1022     if (codecRtpMap == null) {
1023       Log.w(TAG, "No rtpmap for " + codec + " codec");
1024       return sdpDescription;
1025     }
1026     Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]);
1027 
1028     // Check if a=fmtp string already exist in remote SDP for this codec and
1029     // update it with new bitrate parameter.
1030     regex = "^a=fmtp:" + codecRtpMap + " \\w+=\\d+.*[\r]?$";
1031     codecPattern = Pattern.compile(regex);
1032     for (int i = 0; i < lines.length; i++) {
1033       Matcher codecMatcher = codecPattern.matcher(lines[i]);
1034       if (codecMatcher.matches()) {
1035         Log.d(TAG, "Found " + codec + " " + lines[i]);
1036         if (isVideoCodec) {
1037           lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
1038         } else {
1039           lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000);
1040         }
1041         Log.d(TAG, "Update remote SDP line: " + lines[i]);
1042         sdpFormatUpdated = true;
1043         break;
1044       }
1045     }
1046 
1047     StringBuilder newSdpDescription = new StringBuilder();
1048     for (int i = 0; i < lines.length; i++) {
1049       newSdpDescription.append(lines[i]).append("\r\n");
1050       // Append new a=fmtp line if no such line exist for a codec.
1051       if (!sdpFormatUpdated && i == rtpmapLineIndex) {
1052         String bitrateSet;
1053         if (isVideoCodec) {
1054           bitrateSet =
1055               "a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
1056         } else {
1057           bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "="
1058               + (bitrateKbps * 1000);
1059         }
1060         Log.d(TAG, "Add remote SDP line: " + bitrateSet);
1061         newSdpDescription.append(bitrateSet).append("\r\n");
1062       }
1063     }
1064     return newSdpDescription.toString();
1065   }
1066 
1067   /** Returns the line number containing "m=audio|video", or -1 if no such line exists. */
findMediaDescriptionLine(boolean isAudio, String[] sdpLines)1068   private static int findMediaDescriptionLine(boolean isAudio, String[] sdpLines) {
1069     final String mediaDescription = isAudio ? "m=audio " : "m=video ";
1070     for (int i = 0; i < sdpLines.length; ++i) {
1071       if (sdpLines[i].startsWith(mediaDescription)) {
1072         return i;
1073       }
1074     }
1075     return -1;
1076   }
1077 
joinString( Iterable<? extends CharSequence> s, String delimiter, boolean delimiterAtEnd)1078   private static String joinString(
1079       Iterable<? extends CharSequence> s, String delimiter, boolean delimiterAtEnd) {
1080     Iterator<? extends CharSequence> iter = s.iterator();
1081     if (!iter.hasNext()) {
1082       return "";
1083     }
1084     StringBuilder buffer = new StringBuilder(iter.next());
1085     while (iter.hasNext()) {
1086       buffer.append(delimiter).append(iter.next());
1087     }
1088     if (delimiterAtEnd) {
1089       buffer.append(delimiter);
1090     }
1091     return buffer.toString();
1092   }
1093 
movePayloadTypesToFront( List<String> preferredPayloadTypes, String mLine)1094   private static @Nullable String movePayloadTypesToFront(
1095       List<String> preferredPayloadTypes, String mLine) {
1096     // The format of the media description line should be: m=<media> <port> <proto> <fmt> ...
1097     final List<String> origLineParts = Arrays.asList(mLine.split(" "));
1098     if (origLineParts.size() <= 3) {
1099       Log.e(TAG, "Wrong SDP media description format: " + mLine);
1100       return null;
1101     }
1102     final List<String> header = origLineParts.subList(0, 3);
1103     final List<String> unpreferredPayloadTypes =
1104         new ArrayList<>(origLineParts.subList(3, origLineParts.size()));
1105     unpreferredPayloadTypes.removeAll(preferredPayloadTypes);
1106     // Reconstruct the line with |preferredPayloadTypes| moved to the beginning of the payload
1107     // types.
1108     final List<String> newLineParts = new ArrayList<>();
1109     newLineParts.addAll(header);
1110     newLineParts.addAll(preferredPayloadTypes);
1111     newLineParts.addAll(unpreferredPayloadTypes);
1112     return joinString(newLineParts, " ", false /* delimiterAtEnd */);
1113   }
1114 
preferCodec(String sdpDescription, String codec, boolean isAudio)1115   private static String preferCodec(String sdpDescription, String codec, boolean isAudio) {
1116     final String[] lines = sdpDescription.split("\r\n");
1117     final int mLineIndex = findMediaDescriptionLine(isAudio, lines);
1118     if (mLineIndex == -1) {
1119       Log.w(TAG, "No mediaDescription line, so can't prefer " + codec);
1120       return sdpDescription;
1121     }
1122     // A list with all the payload types with name |codec|. The payload types are integers in the
1123     // range 96-127, but they are stored as strings here.
1124     final List<String> codecPayloadTypes = new ArrayList<>();
1125     // a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding parameters>]
1126     final Pattern codecPattern = Pattern.compile("^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$");
1127     for (String line : lines) {
1128       Matcher codecMatcher = codecPattern.matcher(line);
1129       if (codecMatcher.matches()) {
1130         codecPayloadTypes.add(codecMatcher.group(1));
1131       }
1132     }
1133     if (codecPayloadTypes.isEmpty()) {
1134       Log.w(TAG, "No payload types with name " + codec);
1135       return sdpDescription;
1136     }
1137 
1138     final String newMLine = movePayloadTypesToFront(codecPayloadTypes, lines[mLineIndex]);
1139     if (newMLine == null) {
1140       return sdpDescription;
1141     }
1142     Log.d(TAG, "Change media description from: " + lines[mLineIndex] + " to " + newMLine);
1143     lines[mLineIndex] = newMLine;
1144     return joinString(Arrays.asList(lines), "\r\n", true /* delimiterAtEnd */);
1145   }
1146 
drainCandidates()1147   private void drainCandidates() {
1148     if (queuedRemoteCandidates != null) {
1149       Log.d(TAG, "Add " + queuedRemoteCandidates.size() + " remote candidates");
1150       for (IceCandidate candidate : queuedRemoteCandidates) {
1151         peerConnection.addIceCandidate(candidate);
1152       }
1153       queuedRemoteCandidates = null;
1154     }
1155   }
1156 
switchCameraInternal()1157   private void switchCameraInternal() {
1158     if (videoCapturer instanceof CameraVideoCapturer) {
1159       if (!isVideoCallEnabled() || isError) {
1160         Log.e(TAG,
1161             "Failed to switch camera. Video: " + isVideoCallEnabled() + ". Error : " + isError);
1162         return; // No video is sent or only one camera is available or error happened.
1163       }
1164       Log.d(TAG, "Switch camera");
1165       CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer;
1166       cameraVideoCapturer.switchCamera(null);
1167     } else {
1168       Log.d(TAG, "Will not switch camera, video caputurer is not a camera");
1169     }
1170   }
1171 
switchCamera()1172   public void switchCamera() {
1173     executor.execute(this ::switchCameraInternal);
1174   }
1175 
changeCaptureFormat(final int width, final int height, final int framerate)1176   public void changeCaptureFormat(final int width, final int height, final int framerate) {
1177     executor.execute(() -> changeCaptureFormatInternal(width, height, framerate));
1178   }
1179 
changeCaptureFormatInternal(int width, int height, int framerate)1180   private void changeCaptureFormatInternal(int width, int height, int framerate) {
1181     if (!isVideoCallEnabled() || isError || videoCapturer == null) {
1182       Log.e(TAG,
1183           "Failed to change capture format. Video: " + isVideoCallEnabled()
1184               + ". Error : " + isError);
1185       return;
1186     }
1187     Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
1188     videoSource.adaptOutputFormat(width, height, framerate);
1189   }
1190 
1191   // Implementation detail: observe ICE & stream changes and react accordingly.
1192   private class PCObserver implements PeerConnection.Observer {
1193     @Override
onIceCandidate(final IceCandidate candidate)1194     public void onIceCandidate(final IceCandidate candidate) {
1195       executor.execute(() -> events.onIceCandidate(candidate));
1196     }
1197 
1198     @Override
onIceCandidatesRemoved(final IceCandidate[] candidates)1199     public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
1200       executor.execute(() -> events.onIceCandidatesRemoved(candidates));
1201     }
1202 
1203     @Override
onSignalingChange(PeerConnection.SignalingState newState)1204     public void onSignalingChange(PeerConnection.SignalingState newState) {
1205       Log.d(TAG, "SignalingState: " + newState);
1206     }
1207 
1208     @Override
onIceConnectionChange(final PeerConnection.IceConnectionState newState)1209     public void onIceConnectionChange(final PeerConnection.IceConnectionState newState) {
1210       executor.execute(() -> {
1211         Log.d(TAG, "IceConnectionState: " + newState);
1212         if (newState == IceConnectionState.CONNECTED) {
1213           events.onIceConnected();
1214         } else if (newState == IceConnectionState.DISCONNECTED) {
1215           events.onIceDisconnected();
1216         } else if (newState == IceConnectionState.FAILED) {
1217           reportError("ICE connection failed.");
1218         }
1219       });
1220     }
1221 
1222     @Override
onConnectionChange(final PeerConnection.PeerConnectionState newState)1223     public void onConnectionChange(final PeerConnection.PeerConnectionState newState) {
1224       executor.execute(() -> {
1225         Log.d(TAG, "PeerConnectionState: " + newState);
1226         if (newState == PeerConnectionState.CONNECTED) {
1227           events.onConnected();
1228         } else if (newState == PeerConnectionState.DISCONNECTED) {
1229           events.onDisconnected();
1230         } else if (newState == PeerConnectionState.FAILED) {
1231           reportError("DTLS connection failed.");
1232         }
1233       });
1234     }
1235 
1236     @Override
onIceGatheringChange(PeerConnection.IceGatheringState newState)1237     public void onIceGatheringChange(PeerConnection.IceGatheringState newState) {
1238       Log.d(TAG, "IceGatheringState: " + newState);
1239     }
1240 
1241     @Override
onIceConnectionReceivingChange(boolean receiving)1242     public void onIceConnectionReceivingChange(boolean receiving) {
1243       Log.d(TAG, "IceConnectionReceiving changed to " + receiving);
1244     }
1245 
1246     @Override
onSelectedCandidatePairChanged(CandidatePairChangeEvent event)1247     public void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) {
1248       Log.d(TAG, "Selected candidate pair changed because: " + event);
1249     }
1250 
1251     @Override
onAddStream(final MediaStream stream)1252     public void onAddStream(final MediaStream stream) {}
1253 
1254     @Override
onRemoveStream(final MediaStream stream)1255     public void onRemoveStream(final MediaStream stream) {}
1256 
1257     @Override
onDataChannel(final DataChannel dc)1258     public void onDataChannel(final DataChannel dc) {
1259       Log.d(TAG, "New Data channel " + dc.label());
1260 
1261       if (!dataChannelEnabled)
1262         return;
1263 
1264       dc.registerObserver(new DataChannel.Observer() {
1265         @Override
1266         public void onBufferedAmountChange(long previousAmount) {
1267           Log.d(TAG, "Data channel buffered amount changed: " + dc.label() + ": " + dc.state());
1268         }
1269 
1270         @Override
1271         public void onStateChange() {
1272           Log.d(TAG, "Data channel state changed: " + dc.label() + ": " + dc.state());
1273         }
1274 
1275         @Override
1276         public void onMessage(final DataChannel.Buffer buffer) {
1277           if (buffer.binary) {
1278             Log.d(TAG, "Received binary msg over " + dc);
1279             return;
1280           }
1281           ByteBuffer data = buffer.data;
1282           final byte[] bytes = new byte[data.capacity()];
1283           data.get(bytes);
1284           String strData = new String(bytes, Charset.forName("UTF-8"));
1285           Log.d(TAG, "Got msg: " + strData + " over " + dc);
1286         }
1287       });
1288     }
1289 
1290     @Override
onRenegotiationNeeded()1291     public void onRenegotiationNeeded() {
1292       // No need to do anything; AppRTC follows a pre-agreed-upon
1293       // signaling/negotiation protocol.
1294     }
1295 
1296     @Override
onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStreams)1297     public void onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStreams) {}
1298   }
1299 
1300   // Implementation detail: handle offer creation/signaling and answer setting,
1301   // as well as adding remote ICE candidates once the answer SDP is set.
1302   private class SDPObserver implements SdpObserver {
1303     @Override
onCreateSuccess(final SessionDescription origSdp)1304     public void onCreateSuccess(final SessionDescription origSdp) {
1305       if (localSdp != null) {
1306         reportError("Multiple SDP create.");
1307         return;
1308       }
1309       String sdpDescription = origSdp.description;
1310       if (preferIsac) {
1311         sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
1312       }
1313       if (isVideoCallEnabled()) {
1314         sdpDescription =
1315             preferCodec(sdpDescription, getSdpVideoCodecName(peerConnectionParameters), false);
1316       }
1317       final SessionDescription sdp = new SessionDescription(origSdp.type, sdpDescription);
1318       localSdp = sdp;
1319       executor.execute(() -> {
1320         if (peerConnection != null && !isError) {
1321           Log.d(TAG, "Set local SDP from " + sdp.type);
1322           peerConnection.setLocalDescription(sdpObserver, sdp);
1323         }
1324       });
1325     }
1326 
1327     @Override
onSetSuccess()1328     public void onSetSuccess() {
1329       executor.execute(() -> {
1330         if (peerConnection == null || isError) {
1331           return;
1332         }
1333         if (isInitiator) {
1334           // For offering peer connection we first create offer and set
1335           // local SDP, then after receiving answer set remote SDP.
1336           if (peerConnection.getRemoteDescription() == null) {
1337             // We've just set our local SDP so time to send it.
1338             Log.d(TAG, "Local SDP set succesfully");
1339             events.onLocalDescription(localSdp);
1340           } else {
1341             // We've just set remote description, so drain remote
1342             // and send local ICE candidates.
1343             Log.d(TAG, "Remote SDP set succesfully");
1344             drainCandidates();
1345           }
1346         } else {
1347           // For answering peer connection we set remote SDP and then
1348           // create answer and set local SDP.
1349           if (peerConnection.getLocalDescription() != null) {
1350             // We've just set our local SDP so time to send it, drain
1351             // remote and send local ICE candidates.
1352             Log.d(TAG, "Local SDP set succesfully");
1353             events.onLocalDescription(localSdp);
1354             drainCandidates();
1355           } else {
1356             // We've just set remote SDP - do nothing for now -
1357             // answer will be created soon.
1358             Log.d(TAG, "Remote SDP set succesfully");
1359           }
1360         }
1361       });
1362     }
1363 
1364     @Override
onCreateFailure(final String error)1365     public void onCreateFailure(final String error) {
1366       reportError("createSDP error: " + error);
1367     }
1368 
1369     @Override
onSetFailure(final String error)1370     public void onSetFailure(final String error) {
1371       reportError("setSDP error: " + error);
1372     }
1373   }
1374 }
1375