1 /* 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 package org.webrtc; 12 13 import static org.junit.Assert.assertEquals; 14 import static org.junit.Assert.assertFalse; 15 import static org.junit.Assert.assertTrue; 16 import static org.junit.Assert.fail; 17 18 import android.content.Context; 19 import androidx.annotation.Nullable; 20 import java.util.ArrayList; 21 import java.util.List; 22 import java.util.concurrent.CountDownLatch; 23 import org.webrtc.CameraEnumerationAndroid.CaptureFormat; 24 import org.webrtc.VideoFrame; 25 26 class CameraVideoCapturerTestFixtures { 27 static final String TAG = "CameraVideoCapturerTestFixtures"; 28 // Default values used for starting capturing 29 static final int DEFAULT_WIDTH = 640; 30 static final int DEFAULT_HEIGHT = 480; 31 static final int DEFAULT_FPS = 15; 32 33 static private class RendererCallbacks implements VideoSink { 34 private final Object frameLock = new Object(); 35 private int framesRendered; 36 private int width; 37 private int height; 38 39 @Override onFrame(VideoFrame frame)40 public void onFrame(VideoFrame frame) { 41 synchronized (frameLock) { 42 ++framesRendered; 43 width = frame.getRotatedWidth(); 44 height = frame.getRotatedHeight(); 45 frameLock.notify(); 46 } 47 } 48 frameWidth()49 public int frameWidth() { 50 synchronized (frameLock) { 51 return width; 52 } 53 } 54 frameHeight()55 public int frameHeight() { 56 synchronized (frameLock) { 57 return height; 58 } 59 } 60 waitForNextFrameToRender()61 public int waitForNextFrameToRender() throws InterruptedException { 62 Logging.d(TAG, "Waiting for the next frame to render"); 63 synchronized (frameLock) { 64 final int framesRenderedStart = framesRendered; 65 while (framesRendered == framesRenderedStart) { 66 frameLock.wait(); 67 } 68 return framesRendered; 69 } 70 } 71 } 72 73 static private class FakeAsyncRenderer implements VideoSink { 74 private final List<VideoFrame> pendingFrames = new ArrayList<VideoFrame>(); 75 76 @Override onFrame(VideoFrame frame)77 public void onFrame(VideoFrame frame) { 78 synchronized (pendingFrames) { 79 frame.retain(); 80 pendingFrames.add(frame); 81 pendingFrames.notifyAll(); 82 } 83 } 84 85 // Wait until at least one frame have been received, before returning them. waitForPendingFrames()86 public List<VideoFrame> waitForPendingFrames() throws InterruptedException { 87 Logging.d(TAG, "Waiting for pending frames"); 88 synchronized (pendingFrames) { 89 while (pendingFrames.isEmpty()) { 90 pendingFrames.wait(); 91 } 92 return new ArrayList<VideoFrame>(pendingFrames); 93 } 94 } 95 } 96 97 static private class FakeCapturerObserver implements CapturerObserver { 98 private int framesCaptured; 99 private @Nullable VideoFrame videoFrame; 100 final private Object frameLock = new Object(); 101 final private Object capturerStartLock = new Object(); 102 private Boolean capturerStartResult; 103 final private List<Long> timestamps = new ArrayList<Long>(); 104 105 @Override onCapturerStarted(boolean success)106 public void onCapturerStarted(boolean success) { 107 Logging.d(TAG, "onCapturerStarted: " + success); 108 109 synchronized (capturerStartLock) { 110 capturerStartResult = success; 111 capturerStartLock.notifyAll(); 112 } 113 } 114 115 @Override onCapturerStopped()116 public void onCapturerStopped() { 117 Logging.d(TAG, "onCapturerStopped"); 118 } 119 120 @Override onFrameCaptured(VideoFrame frame)121 public void onFrameCaptured(VideoFrame frame) { 122 synchronized (frameLock) { 123 ++framesCaptured; 124 if (videoFrame != null) { 125 videoFrame.release(); 126 } 127 videoFrame = frame; 128 videoFrame.retain(); 129 timestamps.add(videoFrame.getTimestampNs()); 130 frameLock.notify(); 131 } 132 } 133 waitForCapturerToStart()134 public boolean waitForCapturerToStart() throws InterruptedException { 135 Logging.d(TAG, "Waiting for the capturer to start"); 136 synchronized (capturerStartLock) { 137 while (capturerStartResult == null) { 138 capturerStartLock.wait(); 139 } 140 return capturerStartResult; 141 } 142 } 143 waitForNextCapturedFrame()144 public int waitForNextCapturedFrame() throws InterruptedException { 145 Logging.d(TAG, "Waiting for the next captured frame"); 146 synchronized (frameLock) { 147 final int framesCapturedStart = framesCaptured; 148 while (framesCaptured == framesCapturedStart) { 149 frameLock.wait(); 150 } 151 return framesCaptured; 152 } 153 } 154 frameWidth()155 int frameWidth() { 156 synchronized (frameLock) { 157 return videoFrame.getBuffer().getWidth(); 158 } 159 } 160 frameHeight()161 int frameHeight() { 162 synchronized (frameLock) { 163 return videoFrame.getBuffer().getHeight(); 164 } 165 } 166 releaseFrame()167 void releaseFrame() { 168 synchronized (frameLock) { 169 if (videoFrame != null) { 170 videoFrame.release(); 171 videoFrame = null; 172 } 173 } 174 } 175 getCopyAndResetListOftimeStamps()176 List<Long> getCopyAndResetListOftimeStamps() { 177 synchronized (frameLock) { 178 ArrayList<Long> list = new ArrayList<Long>(timestamps); 179 timestamps.clear(); 180 return list; 181 } 182 } 183 } 184 185 static class CameraEvents implements CameraVideoCapturer.CameraEventsHandler { 186 public boolean onCameraOpeningCalled; 187 public boolean onFirstFrameAvailableCalled; 188 private final Object onCameraFreezedLock = new Object(); 189 private String onCameraFreezedDescription; 190 private final Object cameraClosedLock = new Object(); 191 private boolean cameraClosed = true; 192 193 @Override onCameraError(String errorDescription)194 public void onCameraError(String errorDescription) { 195 Logging.w(TAG, "Camera error: " + errorDescription); 196 cameraClosed = true; 197 } 198 199 @Override onCameraDisconnected()200 public void onCameraDisconnected() {} 201 202 @Override onCameraFreezed(String errorDescription)203 public void onCameraFreezed(String errorDescription) { 204 synchronized (onCameraFreezedLock) { 205 onCameraFreezedDescription = errorDescription; 206 onCameraFreezedLock.notifyAll(); 207 } 208 } 209 210 @Override onCameraOpening(String cameraName)211 public void onCameraOpening(String cameraName) { 212 onCameraOpeningCalled = true; 213 synchronized (cameraClosedLock) { 214 cameraClosed = false; 215 } 216 } 217 218 @Override onFirstFrameAvailable()219 public void onFirstFrameAvailable() { 220 onFirstFrameAvailableCalled = true; 221 } 222 223 @Override onCameraClosed()224 public void onCameraClosed() { 225 synchronized (cameraClosedLock) { 226 cameraClosed = true; 227 cameraClosedLock.notifyAll(); 228 } 229 } 230 waitForCameraFreezed()231 public String waitForCameraFreezed() throws InterruptedException { 232 Logging.d(TAG, "Waiting for the camera to freeze"); 233 synchronized (onCameraFreezedLock) { 234 while (onCameraFreezedDescription == null) { 235 onCameraFreezedLock.wait(); 236 } 237 return onCameraFreezedDescription; 238 } 239 } 240 waitForCameraClosed()241 public void waitForCameraClosed() throws InterruptedException { 242 synchronized (cameraClosedLock) { 243 while (!cameraClosed) { 244 Logging.d(TAG, "Waiting for the camera to close."); 245 cameraClosedLock.wait(); 246 } 247 } 248 } 249 } 250 251 /** 252 * Class to collect all classes related to single capturer instance. 253 */ 254 static private class CapturerInstance { 255 public CameraVideoCapturer capturer; 256 public CameraEvents cameraEvents; 257 public SurfaceTextureHelper surfaceTextureHelper; 258 public FakeCapturerObserver observer; 259 public List<CaptureFormat> supportedFormats; 260 public CaptureFormat format; 261 } 262 263 /** 264 * Class used for collecting a VideoSource, a VideoTrack and a renderer. The class 265 * is used for testing local rendering from a capturer. 266 */ 267 static private class VideoTrackWithRenderer { 268 public SurfaceTextureHelper surfaceTextureHelper; 269 public VideoSource source; 270 public VideoTrack track; 271 public RendererCallbacks rendererCallbacks; 272 public FakeAsyncRenderer fakeAsyncRenderer; 273 } 274 275 public abstract static class TestObjectFactory { 276 final CameraEnumerator cameraEnumerator; 277 TestObjectFactory()278 TestObjectFactory() { 279 cameraEnumerator = getCameraEnumerator(); 280 } 281 createCapturer( String name, CameraVideoCapturer.CameraEventsHandler eventsHandler)282 public CameraVideoCapturer createCapturer( 283 String name, CameraVideoCapturer.CameraEventsHandler eventsHandler) { 284 return cameraEnumerator.createCapturer(name, eventsHandler); 285 } 286 getNameOfFrontFacingDevice()287 public @Nullable String getNameOfFrontFacingDevice() { 288 for (String deviceName : cameraEnumerator.getDeviceNames()) { 289 if (cameraEnumerator.isFrontFacing(deviceName)) { 290 return deviceName; 291 } 292 } 293 294 return null; 295 } 296 getNameOfBackFacingDevice()297 public @Nullable String getNameOfBackFacingDevice() { 298 for (String deviceName : cameraEnumerator.getDeviceNames()) { 299 if (cameraEnumerator.isBackFacing(deviceName)) { 300 return deviceName; 301 } 302 } 303 304 return null; 305 } 306 haveTwoCameras()307 public boolean haveTwoCameras() { 308 return cameraEnumerator.getDeviceNames().length >= 2; 309 } 310 isCapturingToTexture()311 public boolean isCapturingToTexture() { 312 // In the future, we plan to only support capturing to texture, so default to true 313 return true; 314 } 315 getCameraEnumerator()316 abstract public CameraEnumerator getCameraEnumerator(); getAppContext()317 abstract public Context getAppContext(); 318 319 // CameraVideoCapturer API is too slow for some of our tests where we need to open a competing 320 // camera. These methods are used instead. rawOpenCamera(String cameraName)321 abstract public Object rawOpenCamera(String cameraName); rawCloseCamera(Object camera)322 abstract public void rawCloseCamera(Object camera); 323 } 324 325 private PeerConnectionFactory peerConnectionFactory; 326 private TestObjectFactory testObjectFactory; 327 CameraVideoCapturerTestFixtures(TestObjectFactory testObjectFactory)328 CameraVideoCapturerTestFixtures(TestObjectFactory testObjectFactory) { 329 PeerConnectionFactory.initialize( 330 PeerConnectionFactory.InitializationOptions.builder(testObjectFactory.getAppContext()) 331 .setNativeLibraryName(TestConstants.NATIVE_LIBRARY) 332 .createInitializationOptions()); 333 334 this.peerConnectionFactory = PeerConnectionFactory.builder().createPeerConnectionFactory(); 335 this.testObjectFactory = testObjectFactory; 336 } 337 dispose()338 public void dispose() { 339 this.peerConnectionFactory.dispose(); 340 } 341 342 // Internal helper methods createCapturer(String name, boolean initialize)343 private CapturerInstance createCapturer(String name, boolean initialize) { 344 CapturerInstance instance = new CapturerInstance(); 345 instance.cameraEvents = new CameraEvents(); 346 instance.capturer = testObjectFactory.createCapturer(name, instance.cameraEvents); 347 instance.surfaceTextureHelper = SurfaceTextureHelper.create( 348 "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); 349 instance.observer = new FakeCapturerObserver(); 350 if (initialize) { 351 instance.capturer.initialize( 352 instance.surfaceTextureHelper, testObjectFactory.getAppContext(), instance.observer); 353 } 354 instance.supportedFormats = testObjectFactory.cameraEnumerator.getSupportedFormats(name); 355 return instance; 356 } 357 createCapturer(boolean initialize)358 private CapturerInstance createCapturer(boolean initialize) { 359 String name = testObjectFactory.cameraEnumerator.getDeviceNames()[0]; 360 return createCapturer(name, initialize); 361 } 362 startCapture(CapturerInstance instance)363 private void startCapture(CapturerInstance instance) { 364 startCapture(instance, 0); 365 } 366 startCapture(CapturerInstance instance, int formatIndex)367 private void startCapture(CapturerInstance instance, int formatIndex) { 368 final CameraEnumerationAndroid.CaptureFormat format = 369 instance.supportedFormats.get(formatIndex); 370 371 instance.capturer.startCapture(format.width, format.height, format.framerate.max); 372 instance.format = format; 373 } 374 disposeCapturer(CapturerInstance instance)375 private void disposeCapturer(CapturerInstance instance) throws InterruptedException { 376 instance.capturer.stopCapture(); 377 instance.cameraEvents.waitForCameraClosed(); 378 instance.capturer.dispose(); 379 instance.observer.releaseFrame(); 380 instance.surfaceTextureHelper.dispose(); 381 } 382 createVideoTrackWithRenderer( CameraVideoCapturer capturer, VideoSink rendererCallbacks)383 private VideoTrackWithRenderer createVideoTrackWithRenderer( 384 CameraVideoCapturer capturer, VideoSink rendererCallbacks) { 385 VideoTrackWithRenderer videoTrackWithRenderer = new VideoTrackWithRenderer(); 386 videoTrackWithRenderer.surfaceTextureHelper = SurfaceTextureHelper.create( 387 "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); 388 videoTrackWithRenderer.source = 389 peerConnectionFactory.createVideoSource(/* isScreencast= */ false); 390 capturer.initialize(videoTrackWithRenderer.surfaceTextureHelper, 391 testObjectFactory.getAppContext(), videoTrackWithRenderer.source.getCapturerObserver()); 392 capturer.startCapture(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS); 393 videoTrackWithRenderer.track = 394 peerConnectionFactory.createVideoTrack("dummy", videoTrackWithRenderer.source); 395 videoTrackWithRenderer.track.addSink(rendererCallbacks); 396 return videoTrackWithRenderer; 397 } 398 createVideoTrackWithRenderer(CameraVideoCapturer capturer)399 private VideoTrackWithRenderer createVideoTrackWithRenderer(CameraVideoCapturer capturer) { 400 RendererCallbacks rendererCallbacks = new RendererCallbacks(); 401 VideoTrackWithRenderer videoTrackWithRenderer = 402 createVideoTrackWithRenderer(capturer, rendererCallbacks); 403 videoTrackWithRenderer.rendererCallbacks = rendererCallbacks; 404 return videoTrackWithRenderer; 405 } 406 createVideoTrackWithFakeAsyncRenderer( CameraVideoCapturer capturer)407 private VideoTrackWithRenderer createVideoTrackWithFakeAsyncRenderer( 408 CameraVideoCapturer capturer) { 409 FakeAsyncRenderer fakeAsyncRenderer = new FakeAsyncRenderer(); 410 VideoTrackWithRenderer videoTrackWithRenderer = 411 createVideoTrackWithRenderer(capturer, fakeAsyncRenderer); 412 videoTrackWithRenderer.fakeAsyncRenderer = fakeAsyncRenderer; 413 return videoTrackWithRenderer; 414 } 415 disposeVideoTrackWithRenderer(VideoTrackWithRenderer videoTrackWithRenderer)416 private void disposeVideoTrackWithRenderer(VideoTrackWithRenderer videoTrackWithRenderer) { 417 videoTrackWithRenderer.track.dispose(); 418 videoTrackWithRenderer.source.dispose(); 419 } 420 waitUntilIdle(CapturerInstance capturerInstance)421 private void waitUntilIdle(CapturerInstance capturerInstance) throws InterruptedException { 422 final CountDownLatch barrier = new CountDownLatch(1); 423 capturerInstance.surfaceTextureHelper.getHandler().post(new Runnable() { 424 @Override 425 public void run() { 426 barrier.countDown(); 427 } 428 }); 429 barrier.await(); 430 } 431 createCapturerAndRender(String name)432 private void createCapturerAndRender(String name) throws InterruptedException { 433 if (name == null) { 434 Logging.w(TAG, "Skipping video capturer test because device name is null."); 435 return; 436 } 437 438 final CapturerInstance capturerInstance = createCapturer(name, false /* initialize */); 439 final VideoTrackWithRenderer videoTrackWithRenderer = 440 createVideoTrackWithRenderer(capturerInstance.capturer); 441 assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0); 442 disposeCapturer(capturerInstance); 443 disposeVideoTrackWithRenderer(videoTrackWithRenderer); 444 } 445 446 // Test methods createCapturerAndDispose()447 public void createCapturerAndDispose() throws InterruptedException { 448 disposeCapturer(createCapturer(true /* initialize */)); 449 } 450 createNonExistingCamera()451 public void createNonExistingCamera() throws InterruptedException { 452 try { 453 disposeCapturer(createCapturer("non-existing camera", false /* initialize */)); 454 } catch (IllegalArgumentException e) { 455 return; 456 } 457 458 fail("Expected illegal argument exception when creating non-existing camera."); 459 } 460 createCapturerAndRender()461 public void createCapturerAndRender() throws InterruptedException { 462 String name = testObjectFactory.cameraEnumerator.getDeviceNames()[0]; 463 createCapturerAndRender(name); 464 } 465 createFrontFacingCapturerAndRender()466 public void createFrontFacingCapturerAndRender() throws InterruptedException { 467 createCapturerAndRender(testObjectFactory.getNameOfFrontFacingDevice()); 468 } 469 createBackFacingCapturerAndRender()470 public void createBackFacingCapturerAndRender() throws InterruptedException { 471 createCapturerAndRender(testObjectFactory.getNameOfBackFacingDevice()); 472 } 473 switchCamera()474 public void switchCamera() throws InterruptedException { 475 switchCamera(false /* specifyCameraName */); 476 } 477 switchCamera(boolean specifyCameraName)478 public void switchCamera(boolean specifyCameraName) throws InterruptedException { 479 if (!testObjectFactory.haveTwoCameras()) { 480 Logging.w( 481 TAG, "Skipping test switch video capturer because the device doesn't have two cameras."); 482 return; 483 } 484 485 final CapturerInstance capturerInstance = createCapturer(false /* initialize */); 486 final VideoTrackWithRenderer videoTrackWithRenderer = 487 createVideoTrackWithRenderer(capturerInstance.capturer); 488 // Wait for the camera to start so we can switch it 489 assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0); 490 491 // Array with one element to avoid final problem in nested classes. 492 final boolean[] cameraSwitchSuccessful = new boolean[1]; 493 final CountDownLatch barrier = new CountDownLatch(1); 494 final CameraVideoCapturer.CameraSwitchHandler cameraSwitchHandler = 495 new CameraVideoCapturer.CameraSwitchHandler() { 496 @Override 497 public void onCameraSwitchDone(boolean isFrontCamera) { 498 cameraSwitchSuccessful[0] = true; 499 barrier.countDown(); 500 } 501 @Override 502 public void onCameraSwitchError(String errorDescription) { 503 cameraSwitchSuccessful[0] = false; 504 barrier.countDown(); 505 } 506 }; 507 if (specifyCameraName) { 508 String expectedCameraName = testObjectFactory.cameraEnumerator.getDeviceNames()[1]; 509 capturerInstance.capturer.switchCamera(cameraSwitchHandler, expectedCameraName); 510 } else { 511 capturerInstance.capturer.switchCamera(cameraSwitchHandler); 512 } 513 // Wait until the camera has been switched. 514 barrier.await(); 515 516 // Check result. 517 assertTrue(cameraSwitchSuccessful[0]); 518 // Ensure that frames are received. 519 assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0); 520 disposeCapturer(capturerInstance); 521 disposeVideoTrackWithRenderer(videoTrackWithRenderer); 522 } 523 cameraEventsInvoked()524 public void cameraEventsInvoked() throws InterruptedException { 525 final CapturerInstance capturerInstance = createCapturer(true /* initialize */); 526 startCapture(capturerInstance); 527 // Make sure camera is started and first frame is received and then stop it. 528 assertTrue(capturerInstance.observer.waitForCapturerToStart()); 529 capturerInstance.observer.waitForNextCapturedFrame(); 530 disposeCapturer(capturerInstance); 531 532 assertTrue(capturerInstance.cameraEvents.onCameraOpeningCalled); 533 assertTrue(capturerInstance.cameraEvents.onFirstFrameAvailableCalled); 534 } 535 cameraCallsAfterStop()536 public void cameraCallsAfterStop() throws InterruptedException { 537 final CapturerInstance capturerInstance = createCapturer(true /* initialize */); 538 startCapture(capturerInstance); 539 // Make sure camera is started and then stop it. 540 assertTrue(capturerInstance.observer.waitForCapturerToStart()); 541 capturerInstance.capturer.stopCapture(); 542 capturerInstance.observer.releaseFrame(); 543 544 // We can't change `capturer` at this point, but we should not crash. 545 capturerInstance.capturer.switchCamera(null /* switchEventsHandler */); 546 capturerInstance.capturer.changeCaptureFormat(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS); 547 548 disposeCapturer(capturerInstance); 549 } 550 stopRestartVideoSource()551 public void stopRestartVideoSource() throws InterruptedException { 552 final CapturerInstance capturerInstance = createCapturer(false /* initialize */); 553 final VideoTrackWithRenderer videoTrackWithRenderer = 554 createVideoTrackWithRenderer(capturerInstance.capturer); 555 556 assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0); 557 assertEquals(MediaSource.State.LIVE, videoTrackWithRenderer.source.state()); 558 559 capturerInstance.capturer.stopCapture(); 560 assertEquals(MediaSource.State.ENDED, videoTrackWithRenderer.source.state()); 561 562 startCapture(capturerInstance); 563 assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0); 564 assertEquals(MediaSource.State.LIVE, videoTrackWithRenderer.source.state()); 565 566 disposeCapturer(capturerInstance); 567 disposeVideoTrackWithRenderer(videoTrackWithRenderer); 568 } 569 startStopWithDifferentResolutions()570 public void startStopWithDifferentResolutions() throws InterruptedException { 571 final CapturerInstance capturerInstance = createCapturer(true /* initialize */); 572 573 for (int i = 0; i < 3; ++i) { 574 startCapture(capturerInstance, i); 575 assertTrue(capturerInstance.observer.waitForCapturerToStart()); 576 capturerInstance.observer.waitForNextCapturedFrame(); 577 578 // Check the frame size. The actual width and height depend on how the capturer is mounted. 579 final boolean identicalResolution = 580 (capturerInstance.observer.frameWidth() == capturerInstance.format.width 581 && capturerInstance.observer.frameHeight() == capturerInstance.format.height); 582 final boolean flippedResolution = 583 (capturerInstance.observer.frameWidth() == capturerInstance.format.height 584 && capturerInstance.observer.frameHeight() == capturerInstance.format.width); 585 if (!identicalResolution && !flippedResolution) { 586 fail("Wrong resolution, got: " + capturerInstance.observer.frameWidth() + "x" 587 + capturerInstance.observer.frameHeight() + " expected: " 588 + capturerInstance.format.width + "x" + capturerInstance.format.height + " or " 589 + capturerInstance.format.height + "x" + capturerInstance.format.width); 590 } 591 592 capturerInstance.capturer.stopCapture(); 593 capturerInstance.observer.releaseFrame(); 594 } 595 disposeCapturer(capturerInstance); 596 } 597 returnBufferLate()598 public void returnBufferLate() throws InterruptedException { 599 final CapturerInstance capturerInstance = createCapturer(true /* initialize */); 600 startCapture(capturerInstance); 601 assertTrue(capturerInstance.observer.waitForCapturerToStart()); 602 603 capturerInstance.observer.waitForNextCapturedFrame(); 604 capturerInstance.capturer.stopCapture(); 605 List<Long> listOftimestamps = capturerInstance.observer.getCopyAndResetListOftimeStamps(); 606 assertTrue(listOftimestamps.size() >= 1); 607 608 startCapture(capturerInstance, 1); 609 capturerInstance.observer.waitForCapturerToStart(); 610 capturerInstance.observer.releaseFrame(); 611 612 capturerInstance.observer.waitForNextCapturedFrame(); 613 capturerInstance.capturer.stopCapture(); 614 615 listOftimestamps = capturerInstance.observer.getCopyAndResetListOftimeStamps(); 616 assertTrue(listOftimestamps.size() >= 1); 617 618 disposeCapturer(capturerInstance); 619 } 620 returnBufferLateEndToEnd()621 public void returnBufferLateEndToEnd() throws InterruptedException { 622 final CapturerInstance capturerInstance = createCapturer(false /* initialize */); 623 final VideoTrackWithRenderer videoTrackWithRenderer = 624 createVideoTrackWithFakeAsyncRenderer(capturerInstance.capturer); 625 // Wait for at least one frame that has not been returned. 626 assertFalse(videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames().isEmpty()); 627 628 capturerInstance.capturer.stopCapture(); 629 630 // Dispose everything. 631 disposeCapturer(capturerInstance); 632 disposeVideoTrackWithRenderer(videoTrackWithRenderer); 633 634 // Return the frame(s), on a different thread out of spite. 635 final List<VideoFrame> pendingFrames = 636 videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames(); 637 final Thread returnThread = new Thread(new Runnable() { 638 @Override 639 public void run() { 640 for (VideoFrame frame : pendingFrames) { 641 frame.release(); 642 } 643 } 644 }); 645 returnThread.start(); 646 returnThread.join(); 647 } 648 cameraFreezedEventOnBufferStarvation()649 public void cameraFreezedEventOnBufferStarvation() throws InterruptedException { 650 final CapturerInstance capturerInstance = createCapturer(true /* initialize */); 651 startCapture(capturerInstance); 652 // Make sure camera is started. 653 assertTrue(capturerInstance.observer.waitForCapturerToStart()); 654 // Since we don't return the buffer, we should get a starvation message if we are 655 // capturing to a texture. 656 assertEquals("Camera failure. Client must return video buffers.", 657 capturerInstance.cameraEvents.waitForCameraFreezed()); 658 659 capturerInstance.capturer.stopCapture(); 660 disposeCapturer(capturerInstance); 661 } 662 scaleCameraOutput()663 public void scaleCameraOutput() throws InterruptedException { 664 final CapturerInstance capturerInstance = createCapturer(false /* initialize */); 665 final VideoTrackWithRenderer videoTrackWithRenderer = 666 createVideoTrackWithRenderer(capturerInstance.capturer); 667 assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0); 668 669 final int startWidth = videoTrackWithRenderer.rendererCallbacks.frameWidth(); 670 final int startHeight = videoTrackWithRenderer.rendererCallbacks.frameHeight(); 671 final int frameRate = 30; 672 final int scaledWidth = startWidth / 2; 673 final int scaledHeight = startHeight / 2; 674 675 // Request the captured frames to be scaled. 676 videoTrackWithRenderer.source.adaptOutputFormat(scaledWidth, scaledHeight, frameRate); 677 678 boolean gotExpectedResolution = false; 679 int numberOfInspectedFrames = 0; 680 681 do { 682 videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender(); 683 ++numberOfInspectedFrames; 684 685 gotExpectedResolution = (videoTrackWithRenderer.rendererCallbacks.frameWidth() == scaledWidth 686 && videoTrackWithRenderer.rendererCallbacks.frameHeight() == scaledHeight); 687 } while (!gotExpectedResolution && numberOfInspectedFrames < 30); 688 689 disposeCapturer(capturerInstance); 690 disposeVideoTrackWithRenderer(videoTrackWithRenderer); 691 692 assertTrue(gotExpectedResolution); 693 } 694 cropCameraOutput()695 public void cropCameraOutput() throws InterruptedException { 696 final CapturerInstance capturerInstance = createCapturer(false /* initialize */); 697 final VideoTrackWithRenderer videoTrackWithRenderer = 698 createVideoTrackWithRenderer(capturerInstance.capturer); 699 assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0); 700 701 final int startWidth = videoTrackWithRenderer.rendererCallbacks.frameWidth(); 702 final int startHeight = videoTrackWithRenderer.rendererCallbacks.frameHeight(); 703 final int frameRate = 30; 704 final int cropWidth; 705 final int cropHeight; 706 if (startWidth > startHeight) { 707 // Landscape input, request portrait output. 708 cropWidth = 360; 709 cropHeight = 640; 710 } else { 711 // Portrait input, request landscape output. 712 cropWidth = 640; 713 cropHeight = 630; 714 } 715 716 // Request different output orientation than input. 717 videoTrackWithRenderer.source.adaptOutputFormat( 718 cropWidth, cropHeight, cropWidth, cropHeight, frameRate); 719 720 boolean gotExpectedOrientation = false; 721 int numberOfInspectedFrames = 0; 722 723 do { 724 videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender(); 725 ++numberOfInspectedFrames; 726 727 gotExpectedOrientation = (cropWidth > cropHeight) 728 == (videoTrackWithRenderer.rendererCallbacks.frameWidth() 729 > videoTrackWithRenderer.rendererCallbacks.frameHeight()); 730 } while (!gotExpectedOrientation && numberOfInspectedFrames < 30); 731 732 disposeCapturer(capturerInstance); 733 disposeVideoTrackWithRenderer(videoTrackWithRenderer); 734 735 assertTrue(gotExpectedOrientation); 736 } 737 startWhileCameraIsAlreadyOpen()738 public void startWhileCameraIsAlreadyOpen() throws InterruptedException { 739 final String cameraName = testObjectFactory.getNameOfBackFacingDevice(); 740 // At this point camera is not actually opened. 741 final CapturerInstance capturerInstance = createCapturer(cameraName, true /* initialize */); 742 743 final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName); 744 745 startCapture(capturerInstance); 746 747 if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) { 748 // The first opened camera client will be evicted. 749 assertTrue(capturerInstance.observer.waitForCapturerToStart()); 750 } else { 751 assertFalse(capturerInstance.observer.waitForCapturerToStart()); 752 } 753 754 testObjectFactory.rawCloseCamera(competingCamera); 755 disposeCapturer(capturerInstance); 756 } 757 startWhileCameraIsAlreadyOpenAndCloseCamera()758 public void startWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException { 759 final String cameraName = testObjectFactory.getNameOfBackFacingDevice(); 760 // At this point camera is not actually opened. 761 final CapturerInstance capturerInstance = createCapturer(cameraName, false /* initialize */); 762 763 Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Opening competing camera."); 764 final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName); 765 766 Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Opening camera."); 767 final VideoTrackWithRenderer videoTrackWithRenderer = 768 createVideoTrackWithRenderer(capturerInstance.capturer); 769 waitUntilIdle(capturerInstance); 770 771 Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Closing competing camera."); 772 testObjectFactory.rawCloseCamera(competingCamera); 773 774 // Make sure camera is started and first frame is received and then stop it. 775 Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Waiting for capture to start."); 776 videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender(); 777 Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Stopping capture."); 778 disposeCapturer(capturerInstance); 779 } 780 startWhileCameraIsAlreadyOpenAndStop()781 public void startWhileCameraIsAlreadyOpenAndStop() throws InterruptedException { 782 final String cameraName = testObjectFactory.getNameOfBackFacingDevice(); 783 // At this point camera is not actually opened. 784 final CapturerInstance capturerInstance = createCapturer(cameraName, true /* initialize */); 785 786 final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName); 787 788 startCapture(capturerInstance); 789 disposeCapturer(capturerInstance); 790 791 testObjectFactory.rawCloseCamera(competingCamera); 792 } 793 } 794