• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright 2014 The WebRTC Project Authors. All rights reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 package org.appspot.apprtc.test;
12 
13 import static org.junit.Assert.assertTrue;
14 import static org.junit.Assert.fail;
15 
16 import android.os.Build;
17 import android.support.test.InstrumentationRegistry;
18 import android.support.test.filters.SmallTest;
19 import android.support.test.runner.AndroidJUnit4;
20 import android.util.Log;
21 import java.util.ArrayList;
22 import java.util.List;
23 import java.util.concurrent.CountDownLatch;
24 import java.util.concurrent.ExecutorService;
25 import java.util.concurrent.Executors;
26 import java.util.concurrent.TimeUnit;
27 import org.appspot.apprtc.AppRTCClient.SignalingParameters;
28 import org.appspot.apprtc.PeerConnectionClient;
29 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents;
30 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters;
31 import org.junit.After;
32 import org.junit.Before;
33 import org.junit.Test;
34 import org.junit.runner.RunWith;
35 import org.webrtc.Camera1Enumerator;
36 import org.webrtc.Camera2Enumerator;
37 import org.webrtc.CameraEnumerator;
38 import org.webrtc.EglBase;
39 import org.webrtc.IceCandidate;
40 import org.webrtc.PeerConnection;
41 import org.webrtc.PeerConnectionFactory;
42 import org.webrtc.SessionDescription;
43 import org.webrtc.StatsReport;
44 import org.webrtc.VideoCapturer;
45 import org.webrtc.VideoFrame;
46 import org.webrtc.VideoSink;
47 
48 @RunWith(AndroidJUnit4.class)
49 public class PeerConnectionClientTest implements PeerConnectionEvents {
50   private static final String TAG = "RTCClientTest";
51   private static final int ICE_CONNECTION_WAIT_TIMEOUT = 10000;
52   private static final int WAIT_TIMEOUT = 7000;
53   private static final int CAMERA_SWITCH_ATTEMPTS = 3;
54   private static final int VIDEO_RESTART_ATTEMPTS = 3;
55   private static final int CAPTURE_FORMAT_CHANGE_ATTEMPTS = 3;
56   private static final int VIDEO_RESTART_TIMEOUT = 500;
57   private static final int EXPECTED_VIDEO_FRAMES = 10;
58   private static final String VIDEO_CODEC_VP8 = "VP8";
59   private static final String VIDEO_CODEC_VP9 = "VP9";
60   private static final String VIDEO_CODEC_H264 = "H264";
61   private static final int AUDIO_RUN_TIMEOUT = 1000;
62   private static final String LOCAL_RENDERER_NAME = "Local renderer";
63   private static final String REMOTE_RENDERER_NAME = "Remote renderer";
64 
65   private static final int MAX_VIDEO_FPS = 30;
66   private static final int WIDTH_VGA = 640;
67   private static final int HEIGHT_VGA = 480;
68   private static final int WIDTH_QVGA = 320;
69   private static final int HEIGHT_QVGA = 240;
70 
71   // The peer connection client is assumed to be thread safe in itself; the
72   // reference is written by the test thread and read by worker threads.
73   private volatile PeerConnectionClient pcClient;
74   private volatile boolean loopback;
75 
76   // These are protected by their respective event objects.
77   private ExecutorService signalingExecutor;
78   private boolean isClosed;
79   private boolean isIceConnected;
80   private SessionDescription localSdp;
81   private List<IceCandidate> iceCandidates = new ArrayList<>();
82   private final Object localSdpEvent = new Object();
83   private final Object iceCandidateEvent = new Object();
84   private final Object iceConnectedEvent = new Object();
85   private final Object closeEvent = new Object();
86 
87   // Mock VideoSink implementation.
88   private static class MockSink implements VideoSink {
89     // These are protected by 'this' since we gets called from worker threads.
90     private String rendererName;
91     private boolean renderFrameCalled;
92 
93     // Thread-safe in itself.
94     private CountDownLatch doneRendering;
95 
MockSink(int expectedFrames, String rendererName)96     public MockSink(int expectedFrames, String rendererName) {
97       this.rendererName = rendererName;
98       reset(expectedFrames);
99     }
100 
101     // Resets render to wait for new amount of video frames.
102     // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
103     @SuppressWarnings("NoSynchronizedMethodCheck")
reset(int expectedFrames)104     public synchronized void reset(int expectedFrames) {
105       renderFrameCalled = false;
106       doneRendering = new CountDownLatch(expectedFrames);
107     }
108 
109     @Override
110     // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
111     @SuppressWarnings("NoSynchronizedMethodCheck")
onFrame(VideoFrame frame)112     public synchronized void onFrame(VideoFrame frame) {
113       if (!renderFrameCalled) {
114         if (rendererName != null) {
115           Log.d(TAG,
116               rendererName + " render frame: " + frame.getRotatedWidth() + " x "
117                   + frame.getRotatedHeight());
118         } else {
119           Log.d(TAG, "Render frame: " + frame.getRotatedWidth() + " x " + frame.getRotatedHeight());
120         }
121       }
122       renderFrameCalled = true;
123       doneRendering.countDown();
124     }
125 
126     // This method shouldn't hold any locks or touch member variables since it
127     // blocks.
waitForFramesRendered(int timeoutMs)128     public boolean waitForFramesRendered(int timeoutMs) throws InterruptedException {
129       doneRendering.await(timeoutMs, TimeUnit.MILLISECONDS);
130       return (doneRendering.getCount() <= 0);
131     }
132   }
133 
134   // Peer connection events implementation.
135   @Override
onLocalDescription(SessionDescription sdp)136   public void onLocalDescription(SessionDescription sdp) {
137     Log.d(TAG, "LocalSDP type: " + sdp.type);
138     synchronized (localSdpEvent) {
139       localSdp = sdp;
140       localSdpEvent.notifyAll();
141     }
142   }
143 
144   @Override
onIceCandidate(final IceCandidate candidate)145   public void onIceCandidate(final IceCandidate candidate) {
146     synchronized (iceCandidateEvent) {
147       Log.d(TAG, "IceCandidate #" + iceCandidates.size() + " : " + candidate.toString());
148       if (loopback) {
149         // Loopback local ICE candidate in a separate thread to avoid adding
150         // remote ICE candidate in a local ICE candidate callback.
151         signalingExecutor.execute(new Runnable() {
152           @Override
153           public void run() {
154             pcClient.addRemoteIceCandidate(candidate);
155           }
156         });
157       }
158       iceCandidates.add(candidate);
159       iceCandidateEvent.notifyAll();
160     }
161   }
162 
163   @Override
onIceCandidatesRemoved(final IceCandidate[] candidates)164   public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
165     // TODO(honghaiz): Add this for tests.
166   }
167 
168   @Override
onIceConnected()169   public void onIceConnected() {
170     Log.d(TAG, "ICE Connected");
171     synchronized (iceConnectedEvent) {
172       isIceConnected = true;
173       iceConnectedEvent.notifyAll();
174     }
175   }
176 
177   @Override
onIceDisconnected()178   public void onIceDisconnected() {
179     Log.d(TAG, "ICE Disconnected");
180     synchronized (iceConnectedEvent) {
181       isIceConnected = false;
182       iceConnectedEvent.notifyAll();
183     }
184   }
185 
186   @Override
onConnected()187   public void onConnected() {
188     Log.d(TAG, "DTLS Connected");
189   }
190 
191   @Override
onDisconnected()192   public void onDisconnected() {
193     Log.d(TAG, "DTLS Disconnected");
194   }
195 
196   @Override
onPeerConnectionClosed()197   public void onPeerConnectionClosed() {
198     Log.d(TAG, "PeerConnection closed");
199     synchronized (closeEvent) {
200       isClosed = true;
201       closeEvent.notifyAll();
202     }
203   }
204 
205   @Override
onPeerConnectionError(String description)206   public void onPeerConnectionError(String description) {
207     fail("PC Error: " + description);
208   }
209 
210   @Override
onPeerConnectionStatsReady(StatsReport[] reports)211   public void onPeerConnectionStatsReady(StatsReport[] reports) {}
212 
213   // Helper wait functions.
waitForLocalSDP(int timeoutMs)214   private boolean waitForLocalSDP(int timeoutMs) throws InterruptedException {
215     synchronized (localSdpEvent) {
216       final long endTimeMs = System.currentTimeMillis() + timeoutMs;
217       while (localSdp == null) {
218         final long waitTimeMs = endTimeMs - System.currentTimeMillis();
219         if (waitTimeMs < 0) {
220           return false;
221         }
222         localSdpEvent.wait(waitTimeMs);
223       }
224       return true;
225     }
226   }
227 
waitForIceCandidates(int timeoutMs)228   private boolean waitForIceCandidates(int timeoutMs) throws InterruptedException {
229     synchronized (iceCandidateEvent) {
230       final long endTimeMs = System.currentTimeMillis() + timeoutMs;
231       while (iceCandidates.size() == 0) {
232         final long waitTimeMs = endTimeMs - System.currentTimeMillis();
233         if (waitTimeMs < 0) {
234           return false;
235         }
236         iceCandidateEvent.wait(timeoutMs);
237       }
238       return true;
239     }
240   }
241 
waitForIceConnected(int timeoutMs)242   private boolean waitForIceConnected(int timeoutMs) throws InterruptedException {
243     synchronized (iceConnectedEvent) {
244       final long endTimeMs = System.currentTimeMillis() + timeoutMs;
245       while (!isIceConnected) {
246         final long waitTimeMs = endTimeMs - System.currentTimeMillis();
247         if (waitTimeMs < 0) {
248           Log.e(TAG, "ICE connection failure");
249           return false;
250         }
251         iceConnectedEvent.wait(timeoutMs);
252       }
253       return true;
254     }
255   }
256 
waitForPeerConnectionClosed(int timeoutMs)257   private boolean waitForPeerConnectionClosed(int timeoutMs) throws InterruptedException {
258     synchronized (closeEvent) {
259       final long endTimeMs = System.currentTimeMillis() + timeoutMs;
260       while (!isClosed) {
261         final long waitTimeMs = endTimeMs - System.currentTimeMillis();
262         if (waitTimeMs < 0) {
263           return false;
264         }
265         closeEvent.wait(timeoutMs);
266       }
267       return true;
268     }
269   }
270 
createPeerConnectionClient(MockSink localRenderer, MockSink remoteRenderer, PeerConnectionParameters peerConnectionParameters, VideoCapturer videoCapturer)271   PeerConnectionClient createPeerConnectionClient(MockSink localRenderer, MockSink remoteRenderer,
272       PeerConnectionParameters peerConnectionParameters, VideoCapturer videoCapturer) {
273     List<PeerConnection.IceServer> iceServers = new ArrayList<>();
274     SignalingParameters signalingParameters =
275         new SignalingParameters(iceServers, true, // iceServers, initiator.
276             null, null, null, // clientId, wssUrl, wssPostUrl.
277             null, null); // offerSdp, iceCandidates.
278 
279     final EglBase eglBase = EglBase.create();
280     PeerConnectionClient client =
281         new PeerConnectionClient(InstrumentationRegistry.getTargetContext(), eglBase,
282             peerConnectionParameters, this /* PeerConnectionEvents */);
283     PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
284     options.networkIgnoreMask = 0;
285     options.disableNetworkMonitor = true;
286     client.createPeerConnectionFactory(options);
287     client.createPeerConnection(localRenderer, remoteRenderer, videoCapturer, signalingParameters);
288     client.createOffer();
289     return client;
290   }
291 
createParametersForAudioCall()292   private PeerConnectionParameters createParametersForAudioCall() {
293     return new PeerConnectionParameters(false, /* videoCallEnabled */
294         true, /* loopback */
295         false, /* tracing */
296         // Video codec parameters.
297         0, /* videoWidth */
298         0, /* videoHeight */
299         0, /* videoFps */
300         0, /* videoStartBitrate */
301         "", /* videoCodec */
302         true, /* videoCodecHwAcceleration */
303         false, /* videoFlexfecEnabled */
304         // Audio codec parameters.
305         0, /* audioStartBitrate */
306         "OPUS", /* audioCodec */
307         false, /* noAudioProcessing */
308         false, /* aecDump */
309         false, /* saveInputAudioToFile */
310         false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
311         false /* disableBuiltInNS */, false /* disableWebRtcAGC */, false /* enableRtcEventLog */,
312         null /* dataChannelParameters */);
313   }
314 
createCameraCapturer(boolean captureToTexture)315   private VideoCapturer createCameraCapturer(boolean captureToTexture) {
316     final boolean useCamera2 = captureToTexture
317         && Camera2Enumerator.isSupported(InstrumentationRegistry.getTargetContext());
318 
319     CameraEnumerator enumerator;
320     if (useCamera2) {
321       enumerator = new Camera2Enumerator(InstrumentationRegistry.getTargetContext());
322     } else {
323       enumerator = new Camera1Enumerator(captureToTexture);
324     }
325     String deviceName = enumerator.getDeviceNames()[0];
326     return enumerator.createCapturer(deviceName, null);
327   }
328 
createParametersForVideoCall(String videoCodec)329   private PeerConnectionParameters createParametersForVideoCall(String videoCodec) {
330     return new PeerConnectionParameters(true, /* videoCallEnabled */
331         true, /* loopback */
332         false, /* tracing */
333         // Video codec parameters.
334         0, /* videoWidth */
335         0, /* videoHeight */
336         0, /* videoFps */
337         0, /* videoStartBitrate */
338         videoCodec, /* videoCodec */
339         true, /* videoCodecHwAcceleration */
340         false, /* videoFlexfecEnabled */
341         // Audio codec parameters.
342         0, /* audioStartBitrate */
343         "OPUS", /* audioCodec */
344         false, /* noAudioProcessing */
345         false, /* aecDump */
346         false, /* saveInputAudioToFile */
347         false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
348         false /* disableBuiltInNS */, false /* disableWebRtcAGC */, false /* enableRtcEventLog */,
349         null /* dataChannelParameters */);
350   }
351 
352   @Before
setUp()353   public void setUp() {
354     signalingExecutor = Executors.newSingleThreadExecutor();
355   }
356 
357   @After
tearDown()358   public void tearDown() {
359     signalingExecutor.shutdown();
360   }
361 
362   @Test
363   @SmallTest
testSetLocalOfferMakesVideoFlowLocally()364   public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedException {
365     Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
366     MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
367     pcClient = createPeerConnectionClient(localRenderer,
368         new MockSink(/* expectedFrames= */ 0, /* rendererName= */ null),
369         createParametersForVideoCall(VIDEO_CODEC_VP8),
370         createCameraCapturer(false /* captureToTexture */));
371 
372     // Wait for local SDP and ice candidates set events.
373     assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
374     assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_TIMEOUT));
375 
376     // Check that local video frames were rendered.
377     assertTrue(
378         "Local video frames were not rendered.", localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
379 
380     pcClient.close();
381     assertTrue(
382         "PeerConnection close event was not received.", waitForPeerConnectionClosed(WAIT_TIMEOUT));
383     Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done.");
384   }
385 
doLoopbackTest(PeerConnectionParameters parameters, VideoCapturer videoCapturer, boolean decodeToTexture)386   private void doLoopbackTest(PeerConnectionParameters parameters, VideoCapturer videoCapturer,
387       boolean decodeToTexture) throws InterruptedException {
388     loopback = true;
389     MockSink localRenderer = null;
390     MockSink remoteRenderer = null;
391     if (parameters.videoCallEnabled) {
392       Log.d(TAG, "testLoopback for video " + parameters.videoCodec);
393       localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
394       remoteRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
395     } else {
396       Log.d(TAG, "testLoopback for audio.");
397     }
398     pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters, videoCapturer);
399 
400     // Wait for local SDP, rename it to answer and set as remote SDP.
401     assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
402     SessionDescription remoteSdp = new SessionDescription(
403         SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
404     pcClient.setRemoteDescription(remoteSdp);
405 
406     // Wait for ICE connection.
407     assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
408 
409     if (parameters.videoCallEnabled) {
410       // Check that local and remote video frames were rendered.
411       assertTrue("Local video frames were not rendered.",
412           localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
413       assertTrue("Remote video frames were not rendered.",
414           remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
415     } else {
416       // For audio just sleep for 1 sec.
417       // TODO(glaznev): check how we can detect that remote audio was rendered.
418       Thread.sleep(AUDIO_RUN_TIMEOUT);
419     }
420 
421     pcClient.close();
422     assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
423     Log.d(TAG, "testLoopback done.");
424   }
425 
426   @Test
427   @SmallTest
testLoopbackAudio()428   public void testLoopbackAudio() throws InterruptedException {
429     doLoopbackTest(createParametersForAudioCall(), null, false /* decodeToTexture */);
430   }
431 
432   @Test
433   @SmallTest
testLoopbackVp8()434   public void testLoopbackVp8() throws InterruptedException {
435     doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
436         createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
437   }
438 
439   @Test
440   @SmallTest
testLoopbackVp9()441   public void testLoopbackVp9() throws InterruptedException {
442     doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
443         createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
444   }
445 
446   @Test
447   @SmallTest
testLoopbackH264()448   public void testLoopbackH264() throws InterruptedException {
449     doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
450         createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
451   }
452 
453   @Test
454   @SmallTest
testLoopbackVp8DecodeToTexture()455   public void testLoopbackVp8DecodeToTexture() throws InterruptedException {
456     if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
457       Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
458       return;
459     }
460     doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
461         createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
462   }
463 
464   @Test
465   @SmallTest
testLoopbackVp9DecodeToTexture()466   public void testLoopbackVp9DecodeToTexture() throws InterruptedException {
467     if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
468       Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
469       return;
470     }
471     doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
472         createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
473   }
474 
475   @Test
476   @SmallTest
testLoopbackH264DecodeToTexture()477   public void testLoopbackH264DecodeToTexture() throws InterruptedException {
478     if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
479       Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
480       return;
481     }
482     doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
483         createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
484   }
485 
486   @Test
487   @SmallTest
testLoopbackVp8CaptureToTexture()488   public void testLoopbackVp8CaptureToTexture() throws InterruptedException {
489     if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
490       Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19");
491       return;
492     }
493     doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
494         createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
495   }
496 
497   @Test
498   @SmallTest
testLoopbackH264CaptureToTexture()499   public void testLoopbackH264CaptureToTexture() throws InterruptedException {
500     if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
501       Log.i(TAG, "Encode to textures is not supported. Requires KITKAT");
502       return;
503     }
504     doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
505         createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
506   }
507 
508   // Checks if default front camera can be switched to back camera and then
509   // again to front camera.
510   @Test
511   @SmallTest
testCameraSwitch()512   public void testCameraSwitch() throws InterruptedException {
513     Log.d(TAG, "testCameraSwitch");
514     loopback = true;
515 
516     MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
517     MockSink remoteRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
518 
519     pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
520         createParametersForVideoCall(VIDEO_CODEC_VP8),
521         createCameraCapturer(false /* captureToTexture */));
522 
523     // Wait for local SDP, rename it to answer and set as remote SDP.
524     assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
525     SessionDescription remoteSdp = new SessionDescription(
526         SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
527     pcClient.setRemoteDescription(remoteSdp);
528 
529     // Wait for ICE connection.
530     assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
531 
532     // Check that local and remote video frames were rendered.
533     assertTrue("Local video frames were not rendered before camera switch.",
534         localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
535     assertTrue("Remote video frames were not rendered before camera switch.",
536         remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
537 
538     for (int i = 0; i < CAMERA_SWITCH_ATTEMPTS; i++) {
539       // Try to switch camera
540       pcClient.switchCamera();
541 
542       // Reset video renders and check that local and remote video frames
543       // were rendered after camera switch.
544       localRenderer.reset(EXPECTED_VIDEO_FRAMES);
545       remoteRenderer.reset(EXPECTED_VIDEO_FRAMES);
546       assertTrue("Local video frames were not rendered after camera switch.",
547           localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
548       assertTrue("Remote video frames were not rendered after camera switch.",
549           remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
550     }
551     pcClient.close();
552     assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
553     Log.d(TAG, "testCameraSwitch done.");
554   }
555 
556   // Checks if video source can be restarted - simulate app goes to
557   // background and back to foreground.
558   @Test
559   @SmallTest
testVideoSourceRestart()560   public void testVideoSourceRestart() throws InterruptedException {
561     Log.d(TAG, "testVideoSourceRestart");
562     loopback = true;
563 
564     MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
565     MockSink remoteRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
566 
567     pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
568         createParametersForVideoCall(VIDEO_CODEC_VP8),
569         createCameraCapturer(false /* captureToTexture */));
570 
571     // Wait for local SDP, rename it to answer and set as remote SDP.
572     assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
573     SessionDescription remoteSdp = new SessionDescription(
574         SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
575     pcClient.setRemoteDescription(remoteSdp);
576 
577     // Wait for ICE connection.
578     assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
579 
580     // Check that local and remote video frames were rendered.
581     assertTrue("Local video frames were not rendered before video restart.",
582         localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
583     assertTrue("Remote video frames were not rendered before video restart.",
584         remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
585 
586     // Stop and then start video source a few times.
587     for (int i = 0; i < VIDEO_RESTART_ATTEMPTS; i++) {
588       pcClient.stopVideoSource();
589       Thread.sleep(VIDEO_RESTART_TIMEOUT);
590       pcClient.startVideoSource();
591 
592       // Reset video renders and check that local and remote video frames
593       // were rendered after video restart.
594       localRenderer.reset(EXPECTED_VIDEO_FRAMES);
595       remoteRenderer.reset(EXPECTED_VIDEO_FRAMES);
596       assertTrue("Local video frames were not rendered after video restart.",
597           localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
598       assertTrue("Remote video frames were not rendered after video restart.",
599           remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
600     }
601     pcClient.close();
602     assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
603     Log.d(TAG, "testVideoSourceRestart done.");
604   }
605 
606   // Checks if capture format can be changed on fly and decoder can be reset properly.
607   @Test
608   @SmallTest
testCaptureFormatChange()609   public void testCaptureFormatChange() throws InterruptedException {
610     Log.d(TAG, "testCaptureFormatChange");
611     loopback = true;
612 
613     MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
614     MockSink remoteRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
615 
616     pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
617         createParametersForVideoCall(VIDEO_CODEC_VP8),
618         createCameraCapturer(false /* captureToTexture */));
619 
620     // Wait for local SDP, rename it to answer and set as remote SDP.
621     assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
622     SessionDescription remoteSdp = new SessionDescription(
623         SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
624     pcClient.setRemoteDescription(remoteSdp);
625 
626     // Wait for ICE connection.
627     assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
628 
629     // Check that local and remote video frames were rendered.
630     assertTrue("Local video frames were not rendered before camera resolution change.",
631         localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
632     assertTrue("Remote video frames were not rendered before camera resolution change.",
633         remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
634 
635     // Change capture output format a few times.
636     for (int i = 0; i < 2 * CAPTURE_FORMAT_CHANGE_ATTEMPTS; i++) {
637       if (i % 2 == 0) {
638         pcClient.changeCaptureFormat(WIDTH_VGA, HEIGHT_VGA, MAX_VIDEO_FPS);
639       } else {
640         pcClient.changeCaptureFormat(WIDTH_QVGA, HEIGHT_QVGA, MAX_VIDEO_FPS);
641       }
642 
643       // Reset video renders and check that local and remote video frames
644       // were rendered after capture format change.
645       localRenderer.reset(EXPECTED_VIDEO_FRAMES);
646       remoteRenderer.reset(EXPECTED_VIDEO_FRAMES);
647       assertTrue("Local video frames were not rendered after capture format change.",
648           localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
649       assertTrue("Remote video frames were not rendered after capture format change.",
650           remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
651     }
652 
653     pcClient.close();
654     assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
655     Log.d(TAG, "testCaptureFormatChange done.");
656   }
657 }
658