• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * libjingle
3  * Copyright 2015 Google Inc.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions are met:
7  *
8  *  1. Redistributions of source code must retain the above copyright notice,
9  *     this list of conditions and the following disclaimer.
10  *  2. Redistributions in binary form must reproduce the above copyright notice,
11  *     this list of conditions and the following disclaimer in the documentation
12  *     and/or other materials provided with the distribution.
13  *  3. The name of the author may not be used to endorse or promote products
14  *     derived from this software without specific prior written permission.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19  * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26  */
27 
28 package org.webrtc;
29 
30 import android.content.Context;
31 import android.os.Handler;
32 import android.os.HandlerThread;
33 import android.os.SystemClock;
34 import android.view.Surface;
35 import android.view.WindowManager;
36 
37 import org.json.JSONException;
38 import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
39 import org.webrtc.Logging;
40 
41 import java.io.IOException;
42 import java.nio.ByteBuffer;
43 import java.util.ArrayList;
44 import java.util.HashMap;
45 import java.util.HashSet;
46 import java.util.IdentityHashMap;
47 import java.util.List;
48 import java.util.Map;
49 import java.util.Set;
50 import java.util.concurrent.CountDownLatch;
51 import java.util.concurrent.TimeUnit;
52 
53 // Android specific implementation of VideoCapturer.
54 // An instance of this class can be created by an application using
55 // VideoCapturerAndroid.create();
56 // This class extends VideoCapturer with a method to easily switch between the
57 // front and back camera. It also provides methods for enumerating valid device
58 // names.
59 //
60 // Threading notes: this class is called from C++ code, Android Camera callbacks, and possibly
61 // arbitrary Java threads. All public entry points are thread safe, and delegate the work to the
62 // camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
63 // the camera has been stopped.
64 @SuppressWarnings("deprecation")
65 public class VideoCapturerAndroid extends VideoCapturer implements
66     android.hardware.Camera.PreviewCallback,
67     SurfaceTextureHelper.OnTextureFrameAvailableListener {
68   private final static String TAG = "VideoCapturerAndroid";
69   private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
70   private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 6000;
71 
72   private android.hardware.Camera camera;  // Only non-null while capturing.
73   private HandlerThread cameraThread;
74   private final Handler cameraThreadHandler;
75   private Context applicationContext;
76   // Synchronization lock for |id|.
77   private final Object cameraIdLock = new Object();
78   private int id;
79   private android.hardware.Camera.CameraInfo info;
80   private final CameraStatistics cameraStatistics;
81   // Remember the requested format in case we want to switch cameras.
82   private int requestedWidth;
83   private int requestedHeight;
84   private int requestedFramerate;
85   // The capture format will be the closest supported format to the requested format.
86   private CaptureFormat captureFormat;
87   private final Object pendingCameraSwitchLock = new Object();
88   private volatile boolean pendingCameraSwitch;
89   private CapturerObserver frameObserver = null;
90   private final CameraEventsHandler eventsHandler;
91   private boolean firstFrameReported;
92   // Arbitrary queue depth.  Higher number means more memory allocated & held,
93   // lower number means more sensitivity to processing time in the client (and
94   // potentially stalling the capturer if it runs out of buffers to write to).
95   private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
96   private final Set<byte[]> queuedBuffers = new HashSet<byte[]>();
97   private final boolean isCapturingToTexture;
98   final SurfaceTextureHelper surfaceHelper; // Package visible for testing purposes.
99   // The camera API can output one old frame after the camera has been switched or the resolution
100   // has been changed. This flag is used for dropping the first frame after camera restart.
101   private boolean dropNextFrame = false;
102   // |openCameraOnCodecThreadRunner| is used for retrying to open the camera if it is in use by
103   // another application when startCaptureOnCameraThread is called.
104   private Runnable openCameraOnCodecThreadRunner;
105   private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
106   private final static int OPEN_CAMERA_DELAY_MS = 500;
107   private int openCameraAttempts;
108 
109   // Camera error callback.
110   private final android.hardware.Camera.ErrorCallback cameraErrorCallback =
111       new android.hardware.Camera.ErrorCallback() {
112     @Override
113     public void onError(int error, android.hardware.Camera camera) {
114       String errorMessage;
115       if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
116         errorMessage = "Camera server died!";
117       } else {
118         errorMessage = "Camera error: " + error;
119       }
120       Logging.e(TAG, errorMessage);
121       if (eventsHandler != null) {
122         eventsHandler.onCameraError(errorMessage);
123       }
124     }
125   };
126 
127   // Camera observer - monitors camera framerate. Observer is executed on camera thread.
128   private final Runnable cameraObserver = new Runnable() {
129     private int freezePeriodCount;
130     @Override
131     public void run() {
132       int cameraFramesCount = cameraStatistics.getAndResetFrameCount();
133       int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2)
134           / CAMERA_OBSERVER_PERIOD_MS;
135 
136       Logging.d(TAG, "Camera fps: " + cameraFps +".");
137       if (cameraFramesCount == 0) {
138         ++freezePeriodCount;
139         if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount > CAMERA_FREEZE_REPORT_TIMOUT_MS
140             && eventsHandler != null) {
141           Logging.e(TAG, "Camera freezed.");
142           if (surfaceHelper.isTextureInUse()) {
143             // This can only happen if we are capturing to textures.
144             eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
145           } else {
146             eventsHandler.onCameraFreezed("Camera failure.");
147           }
148           return;
149         }
150       } else {
151         freezePeriodCount = 0;
152       }
153       cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
154     }
155   };
156 
157   private static class CameraStatistics {
158     private int frameCount = 0;
159     private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
160 
CameraStatistics()161     CameraStatistics() {
162       threadChecker.detachThread();
163     }
164 
addFrame()165     public void addFrame() {
166       threadChecker.checkIsOnValidThread();
167       ++frameCount;
168     }
169 
getAndResetFrameCount()170     public int getAndResetFrameCount() {
171       threadChecker.checkIsOnValidThread();
172       int count = frameCount;
173       frameCount = 0;
174       return count;
175     }
176   }
177 
178   public static interface CameraEventsHandler {
179     // Camera error handler - invoked when camera can not be opened
180     // or any camera exception happens on camera thread.
onCameraError(String errorDescription)181     void onCameraError(String errorDescription);
182 
183     // Invoked when camera stops receiving frames
onCameraFreezed(String errorDescription)184     void onCameraFreezed(String errorDescription);
185 
186     // Callback invoked when camera is opening.
onCameraOpening(int cameraId)187     void onCameraOpening(int cameraId);
188 
189     // Callback invoked when first camera frame is available after camera is opened.
onFirstFrameAvailable()190     void onFirstFrameAvailable();
191 
192     // Callback invoked when camera closed.
onCameraClosed()193     void onCameraClosed();
194   }
195 
196   // Camera switch handler - one of these functions are invoked with the result of switchCamera().
197   // The callback may be called on an arbitrary thread.
198   public interface CameraSwitchHandler {
199     // Invoked on success. |isFrontCamera| is true if the new camera is front facing.
onCameraSwitchDone(boolean isFrontCamera)200     void onCameraSwitchDone(boolean isFrontCamera);
201     // Invoked on failure, e.g. camera is stopped or only one camera available.
onCameraSwitchError(String errorDescription)202     void onCameraSwitchError(String errorDescription);
203   }
204 
create(String name, CameraEventsHandler eventsHandler)205   public static VideoCapturerAndroid create(String name,
206       CameraEventsHandler eventsHandler) {
207     return VideoCapturerAndroid.create(name, eventsHandler, null);
208   }
209 
create(String name, CameraEventsHandler eventsHandler, EglBase.Context sharedEglContext)210   public static VideoCapturerAndroid create(String name,
211       CameraEventsHandler eventsHandler, EglBase.Context sharedEglContext) {
212     final int cameraId = lookupDeviceName(name);
213     if (cameraId == -1) {
214       return null;
215     }
216 
217     final VideoCapturerAndroid capturer = new VideoCapturerAndroid(cameraId, eventsHandler,
218         sharedEglContext);
219     capturer.setNativeCapturer(
220         nativeCreateVideoCapturer(capturer, capturer.surfaceHelper));
221     return capturer;
222   }
223 
printStackTrace()224   public void printStackTrace() {
225     if (cameraThread != null) {
226       StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace();
227       if (cameraStackTraces.length > 0) {
228         Logging.d(TAG, "VideoCapturerAndroid stacks trace:");
229         for (StackTraceElement stackTrace : cameraStackTraces) {
230           Logging.d(TAG, stackTrace.toString());
231         }
232       }
233     }
234   }
235 
236   // Switch camera to the next valid camera id. This can only be called while
237   // the camera is running.
switchCamera(final CameraSwitchHandler handler)238   public void switchCamera(final CameraSwitchHandler handler) {
239     if (android.hardware.Camera.getNumberOfCameras() < 2) {
240       if (handler != null) {
241         handler.onCameraSwitchError("No camera to switch to.");
242       }
243       return;
244     }
245     synchronized (pendingCameraSwitchLock) {
246       if (pendingCameraSwitch) {
247         // Do not handle multiple camera switch request to avoid blocking
248         // camera thread by handling too many switch request from a queue.
249         Logging.w(TAG, "Ignoring camera switch request.");
250         if (handler != null) {
251           handler.onCameraSwitchError("Pending camera switch already in progress.");
252         }
253         return;
254       }
255       pendingCameraSwitch = true;
256     }
257     cameraThreadHandler.post(new Runnable() {
258       @Override public void run() {
259         if (camera == null) {
260           if (handler != null) {
261             handler.onCameraSwitchError("Camera is stopped.");
262           }
263           return;
264         }
265         switchCameraOnCameraThread();
266         synchronized (pendingCameraSwitchLock) {
267           pendingCameraSwitch = false;
268         }
269         if (handler != null) {
270           handler.onCameraSwitchDone(
271               info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
272         }
273       }
274     });
275   }
276 
277   // Requests a new output format from the video capturer. Captured frames
278   // by the camera will be scaled/or dropped by the video capturer.
279   // It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
280   // the same result as |width| = 480, |height| = 640.
281   // TODO(magjed/perkj): Document what this function does. Change name?
onOutputFormatRequest(final int width, final int height, final int framerate)282   public void onOutputFormatRequest(final int width, final int height, final int framerate) {
283     cameraThreadHandler.post(new Runnable() {
284       @Override public void run() {
285         onOutputFormatRequestOnCameraThread(width, height, framerate);
286       }
287     });
288   }
289 
290   // Reconfigure the camera to capture in a new format. This should only be called while the camera
291   // is running.
changeCaptureFormat(final int width, final int height, final int framerate)292   public void changeCaptureFormat(final int width, final int height, final int framerate) {
293     cameraThreadHandler.post(new Runnable() {
294       @Override public void run() {
295         startPreviewOnCameraThread(width, height, framerate);
296       }
297     });
298   }
299 
300   // Helper function to retrieve the current camera id synchronously. Note that the camera id might
301   // change at any point by switchCamera() calls.
getCurrentCameraId()302   int getCurrentCameraId() {
303     synchronized (cameraIdLock) {
304       return id;
305     }
306   }
307 
getSupportedFormats()308   public List<CaptureFormat> getSupportedFormats() {
309     return CameraEnumerationAndroid.getSupportedFormats(getCurrentCameraId());
310   }
311 
312   // Returns true if this VideoCapturer is setup to capture video frames to a SurfaceTexture.
isCapturingToTexture()313   public boolean isCapturingToTexture() {
314     return isCapturingToTexture;
315   }
316 
317   // Called from native code.
getSupportedFormatsAsJson()318   private String getSupportedFormatsAsJson() throws JSONException {
319     return CameraEnumerationAndroid.getSupportedFormatsAsJson(getCurrentCameraId());
320   }
321 
322   // Called from native VideoCapturer_nativeCreateVideoCapturer.
VideoCapturerAndroid(int cameraId)323   private VideoCapturerAndroid(int cameraId) {
324     this(cameraId, null, null);
325   }
326 
VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler, EglBase.Context sharedContext)327   private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler,
328       EglBase.Context sharedContext) {
329     this.id = cameraId;
330     this.eventsHandler = eventsHandler;
331     cameraThread = new HandlerThread(TAG);
332     cameraThread.start();
333     cameraThreadHandler = new Handler(cameraThread.getLooper());
334     isCapturingToTexture = (sharedContext != null);
335     cameraStatistics = new CameraStatistics();
336     surfaceHelper = SurfaceTextureHelper.create(sharedContext, cameraThreadHandler);
337     if (isCapturingToTexture) {
338       surfaceHelper.setListener(this);
339     }
340     Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingToTexture);
341   }
342 
checkIsOnCameraThread()343   private void checkIsOnCameraThread() {
344     if (Thread.currentThread() != cameraThread) {
345       throw new IllegalStateException("Wrong thread");
346     }
347   }
348 
349   // Returns the camera index for camera with name |deviceName|, or -1 if no such camera can be
350   // found. If |deviceName| is empty, the first available device is used.
lookupDeviceName(String deviceName)351   private static int lookupDeviceName(String deviceName) {
352     Logging.d(TAG, "lookupDeviceName: " + deviceName);
353     if (deviceName == null || android.hardware.Camera.getNumberOfCameras() == 0) {
354       return -1;
355     }
356     if (deviceName.isEmpty()) {
357       return 0;
358     }
359     for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
360       if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) {
361         return i;
362       }
363     }
364     return -1;
365   }
366 
367   // Called by native code to quit the camera thread. This needs to be done manually, otherwise the
368   // thread and handler will not be garbage collected.
release()369   private void release() {
370     Logging.d(TAG, "release");
371     if (isReleased()) {
372       throw new IllegalStateException("Already released");
373     }
374     ThreadUtils.invokeUninterruptibly(cameraThreadHandler, new Runnable() {
375       @Override
376       public void run() {
377         if (camera != null) {
378           throw new IllegalStateException("Release called while camera is running");
379         }
380       }
381     });
382     surfaceHelper.disconnect(cameraThreadHandler);
383     cameraThread = null;
384   }
385 
386   // Used for testing purposes to check if release() has been called.
isReleased()387   public boolean isReleased() {
388     return (cameraThread == null);
389   }
390 
391   // Called by native code.
392   //
393   // Note that this actually opens the camera, and Camera callbacks run on the
394   // thread that calls open(), so this is done on the CameraThread.
startCapture( final int width, final int height, final int framerate, final Context applicationContext, final CapturerObserver frameObserver)395   void startCapture(
396       final int width, final int height, final int framerate,
397       final Context applicationContext, final CapturerObserver frameObserver) {
398     Logging.d(TAG, "startCapture requested: " + width + "x" + height
399         + "@" + framerate);
400     if (applicationContext == null) {
401       throw new RuntimeException("applicationContext not set.");
402     }
403     if (frameObserver == null) {
404       throw new RuntimeException("frameObserver not set.");
405     }
406 
407     cameraThreadHandler.post(new Runnable() {
408       @Override public void run() {
409         startCaptureOnCameraThread(width, height, framerate, frameObserver,
410             applicationContext);
411       }
412     });
413   }
414 
startCaptureOnCameraThread( final int width, final int height, final int framerate, final CapturerObserver frameObserver, final Context applicationContext)415   private void startCaptureOnCameraThread(
416       final int width, final int height, final int framerate, final CapturerObserver frameObserver,
417       final Context applicationContext) {
418     Throwable error = null;
419     checkIsOnCameraThread();
420     if (camera != null) {
421       throw new RuntimeException("Camera has already been started.");
422     }
423     this.applicationContext = applicationContext;
424     this.frameObserver = frameObserver;
425     this.firstFrameReported = false;
426 
427     try {
428       try {
429         synchronized (cameraIdLock) {
430           Logging.d(TAG, "Opening camera " + id);
431           if (eventsHandler != null) {
432             eventsHandler.onCameraOpening(id);
433           }
434           camera = android.hardware.Camera.open(id);
435           info = new android.hardware.Camera.CameraInfo();
436           android.hardware.Camera.getCameraInfo(id, info);
437         }
438       } catch (RuntimeException e) {
439         openCameraAttempts++;
440         if (openCameraAttempts < MAX_OPEN_CAMERA_ATTEMPTS) {
441           Logging.e(TAG, "Camera.open failed, retrying", e);
442           openCameraOnCodecThreadRunner = new Runnable() {
443             @Override public void run() {
444               startCaptureOnCameraThread(width, height, framerate, frameObserver,
445                   applicationContext);
446             }
447           };
448           cameraThreadHandler.postDelayed(openCameraOnCodecThreadRunner, OPEN_CAMERA_DELAY_MS);
449           return;
450         }
451         openCameraAttempts = 0;
452         throw e;
453       }
454 
455       try {
456         camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
457       } catch (IOException e) {
458         Logging.e(TAG, "setPreviewTexture failed", error);
459         throw new RuntimeException(e);
460       }
461 
462       Logging.d(TAG, "Camera orientation: " + info.orientation +
463           " .Device orientation: " + getDeviceOrientation());
464       camera.setErrorCallback(cameraErrorCallback);
465       startPreviewOnCameraThread(width, height, framerate);
466       frameObserver.onCapturerStarted(true);
467 
468       // Start camera observer.
469       cameraThreadHandler.postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
470       return;
471     } catch (RuntimeException e) {
472       error = e;
473     }
474     Logging.e(TAG, "startCapture failed", error);
475     stopCaptureOnCameraThread();
476     frameObserver.onCapturerStarted(false);
477     if (eventsHandler != null) {
478       eventsHandler.onCameraError("Camera can not be started.");
479     }
480     return;
481   }
482 
483   // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
startPreviewOnCameraThread(int width, int height, int framerate)484   private void startPreviewOnCameraThread(int width, int height, int framerate) {
485     checkIsOnCameraThread();
486     Logging.d(
487         TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "@" + framerate);
488     if (camera == null) {
489       Logging.e(TAG, "Calling startPreviewOnCameraThread on stopped camera.");
490       return;
491     }
492 
493     requestedWidth = width;
494     requestedHeight = height;
495     requestedFramerate = framerate;
496 
497     // Find closest supported format for |width| x |height| @ |framerate|.
498     final android.hardware.Camera.Parameters parameters = camera.getParameters();
499     final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, framerate * 1000);
500     final android.hardware.Camera.Size previewSize =
501         CameraEnumerationAndroid.getClosestSupportedSize(
502             parameters.getSupportedPreviewSizes(), width, height);
503     final CaptureFormat captureFormat = new CaptureFormat(
504         previewSize.width, previewSize.height,
505         range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
506         range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
507 
508     // Check if we are already using this capture format, then we don't need to do anything.
509     if (captureFormat.isSameFormat(this.captureFormat)) {
510       return;
511     }
512 
513     // Update camera parameters.
514     Logging.d(TAG, "isVideoStabilizationSupported: " +
515         parameters.isVideoStabilizationSupported());
516     if (parameters.isVideoStabilizationSupported()) {
517       parameters.setVideoStabilization(true);
518     }
519     // Note: setRecordingHint(true) actually decrease frame rate on N5.
520     // parameters.setRecordingHint(true);
521     if (captureFormat.maxFramerate > 0) {
522       parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.maxFramerate);
523     }
524     parameters.setPreviewSize(captureFormat.width, captureFormat.height);
525 
526     if (!isCapturingToTexture) {
527       parameters.setPreviewFormat(captureFormat.imageFormat);
528     }
529     // Picture size is for taking pictures and not for preview/video, but we need to set it anyway
530     // as a workaround for an aspect ratio problem on Nexus 7.
531     final android.hardware.Camera.Size pictureSize =
532         CameraEnumerationAndroid.getClosestSupportedSize(
533             parameters.getSupportedPictureSizes(), width, height);
534     parameters.setPictureSize(pictureSize.width, pictureSize.height);
535 
536     // Temporarily stop preview if it's already running.
537     if (this.captureFormat != null) {
538       camera.stopPreview();
539       dropNextFrame = true;
540       // Calling |setPreviewCallbackWithBuffer| with null should clear the internal camera buffer
541       // queue, but sometimes we receive a frame with the old resolution after this call anyway.
542       camera.setPreviewCallbackWithBuffer(null);
543     }
544 
545     // (Re)start preview.
546     Logging.d(TAG, "Start capturing: " + captureFormat);
547     this.captureFormat = captureFormat;
548 
549     List<String> focusModes = parameters.getSupportedFocusModes();
550     if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
551       parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
552     }
553 
554     camera.setParameters(parameters);
555     if (!isCapturingToTexture) {
556       queuedBuffers.clear();
557       final int frameSize = captureFormat.frameSize();
558       for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
559         final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
560         queuedBuffers.add(buffer.array());
561         camera.addCallbackBuffer(buffer.array());
562       }
563       camera.setPreviewCallbackWithBuffer(this);
564     }
565     camera.startPreview();
566   }
567 
568   // Called by native code.  Returns true when camera is known to be stopped.
stopCapture()569   void stopCapture() throws InterruptedException {
570     Logging.d(TAG, "stopCapture");
571     final CountDownLatch barrier = new CountDownLatch(1);
572     cameraThreadHandler.post(new Runnable() {
573         @Override public void run() {
574           stopCaptureOnCameraThread();
575           barrier.countDown();
576         }
577     });
578     barrier.await();
579     Logging.d(TAG, "stopCapture done");
580   }
581 
stopCaptureOnCameraThread()582   private void stopCaptureOnCameraThread() {
583     checkIsOnCameraThread();
584     Logging.d(TAG, "stopCaptureOnCameraThread");
585     if (openCameraOnCodecThreadRunner != null) {
586       cameraThreadHandler.removeCallbacks(openCameraOnCodecThreadRunner);
587     }
588     openCameraAttempts = 0;
589     if (camera == null) {
590       Logging.e(TAG, "Calling stopCapture() for already stopped camera.");
591       return;
592     }
593 
594     cameraThreadHandler.removeCallbacks(cameraObserver);
595     cameraStatistics.getAndResetFrameCount();
596     Logging.d(TAG, "Stop preview.");
597     camera.stopPreview();
598     camera.setPreviewCallbackWithBuffer(null);
599     queuedBuffers.clear();
600     captureFormat = null;
601 
602     Logging.d(TAG, "Release camera.");
603     camera.release();
604     camera = null;
605     if (eventsHandler != null) {
606       eventsHandler.onCameraClosed();
607     }
608   }
609 
switchCameraOnCameraThread()610   private void switchCameraOnCameraThread() {
611     checkIsOnCameraThread();
612     Logging.d(TAG, "switchCameraOnCameraThread");
613     stopCaptureOnCameraThread();
614     synchronized (cameraIdLock) {
615       id = (id + 1) % android.hardware.Camera.getNumberOfCameras();
616     }
617     dropNextFrame = true;
618     startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver,
619         applicationContext);
620     Logging.d(TAG, "switchCameraOnCameraThread done");
621   }
622 
onOutputFormatRequestOnCameraThread(int width, int height, int framerate)623   private void onOutputFormatRequestOnCameraThread(int width, int height, int framerate) {
624     checkIsOnCameraThread();
625     if (camera == null) {
626       Logging.e(TAG, "Calling onOutputFormatRequest() on stopped camera.");
627       return;
628     }
629     Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height +
630         "@" + framerate);
631     frameObserver.onOutputFormatRequest(width, height, framerate);
632   }
633 
634   // Exposed for testing purposes only.
getCameraThreadHandler()635   Handler getCameraThreadHandler() {
636     return cameraThreadHandler;
637   }
638 
getDeviceOrientation()639   private int getDeviceOrientation() {
640     int orientation = 0;
641 
642     WindowManager wm = (WindowManager) applicationContext.getSystemService(
643         Context.WINDOW_SERVICE);
644     switch(wm.getDefaultDisplay().getRotation()) {
645       case Surface.ROTATION_90:
646         orientation = 90;
647         break;
648       case Surface.ROTATION_180:
649         orientation = 180;
650         break;
651       case Surface.ROTATION_270:
652         orientation = 270;
653         break;
654       case Surface.ROTATION_0:
655       default:
656         orientation = 0;
657         break;
658     }
659     return orientation;
660   }
661 
getFrameOrientation()662   private int getFrameOrientation() {
663     int rotation = getDeviceOrientation();
664     if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
665       rotation = 360 - rotation;
666     }
667     return (info.orientation + rotation) % 360;
668   }
669 
670   // Called on cameraThread so must not "synchronized".
671   @Override
onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera)672   public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
673     checkIsOnCameraThread();
674     if (camera == null || !queuedBuffers.contains(data)) {
675       // The camera has been stopped or |data| is an old invalid buffer.
676       return;
677     }
678     if (camera != callbackCamera) {
679       throw new RuntimeException("Unexpected camera in callback!");
680     }
681 
682     final long captureTimeNs =
683         TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
684 
685     if (eventsHandler != null && !firstFrameReported) {
686       eventsHandler.onFirstFrameAvailable();
687       firstFrameReported = true;
688     }
689 
690     cameraStatistics.addFrame();
691     frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
692         getFrameOrientation(), captureTimeNs);
693     camera.addCallbackBuffer(data);
694   }
695 
696   @Override
onTextureFrameAvailable( int oesTextureId, float[] transformMatrix, long timestampNs)697   public void onTextureFrameAvailable(
698       int oesTextureId, float[] transformMatrix, long timestampNs) {
699     checkIsOnCameraThread();
700     if (camera == null) {
701       // Camera is stopped, we need to return the buffer immediately.
702       surfaceHelper.returnTextureFrame();
703       return;
704     }
705     if (dropNextFrame)  {
706      surfaceHelper.returnTextureFrame();
707      dropNextFrame = false;
708      return;
709     }
710 
711     int rotation = getFrameOrientation();
712     if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
713       // Undo the mirror that the OS "helps" us with.
714       // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
715       transformMatrix =
716           RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
717     }
718     cameraStatistics.addFrame();
719     frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
720         transformMatrix, rotation, timestampNs);
721   }
722 
723   // Interface used for providing callbacks to an observer.
724   interface CapturerObserver {
725     // Notify if the camera have been started successfully or not.
726     // Called on a Java thread owned by VideoCapturerAndroid.
onCapturerStarted(boolean success)727     abstract void onCapturerStarted(boolean success);
728 
729     // Delivers a captured frame. Called on a Java thread owned by
730     // VideoCapturerAndroid.
onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation, long timeStamp)731     abstract void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
732         long timeStamp);
733 
734     // Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
735     // owned by VideoCapturerAndroid.
onTextureFrameCaptured( int width, int height, int oesTextureId, float[] transformMatrix, int rotation, long timestamp)736     abstract void onTextureFrameCaptured(
737         int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
738         long timestamp);
739 
740     // Requests an output format from the video capturer. Captured frames
741     // by the camera will be scaled/or dropped by the video capturer.
742     // Called on a Java thread owned by VideoCapturerAndroid.
onOutputFormatRequest(int width, int height, int framerate)743     abstract void onOutputFormatRequest(int width, int height, int framerate);
744   }
745 
746   // An implementation of CapturerObserver that forwards all calls from
747   // Java to the C layer.
748   static class NativeObserver implements CapturerObserver {
749     private final long nativeCapturer;
750 
NativeObserver(long nativeCapturer)751     public NativeObserver(long nativeCapturer) {
752       this.nativeCapturer = nativeCapturer;
753     }
754 
755     @Override
onCapturerStarted(boolean success)756     public void onCapturerStarted(boolean success) {
757       nativeCapturerStarted(nativeCapturer, success);
758     }
759 
760     @Override
onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation, long timeStamp)761     public void onByteBufferFrameCaptured(byte[] data, int width, int height,
762         int rotation, long timeStamp) {
763       nativeOnByteBufferFrameCaptured(nativeCapturer, data, data.length, width, height, rotation,
764           timeStamp);
765     }
766 
767     @Override
onTextureFrameCaptured( int width, int height, int oesTextureId, float[] transformMatrix, int rotation, long timestamp)768     public void onTextureFrameCaptured(
769         int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
770         long timestamp) {
771       nativeOnTextureFrameCaptured(nativeCapturer, width, height, oesTextureId, transformMatrix,
772           rotation, timestamp);
773     }
774 
775     @Override
onOutputFormatRequest(int width, int height, int framerate)776     public void onOutputFormatRequest(int width, int height, int framerate) {
777       nativeOnOutputFormatRequest(nativeCapturer, width, height, framerate);
778     }
779 
nativeCapturerStarted(long nativeCapturer, boolean success)780     private native void nativeCapturerStarted(long nativeCapturer,
781         boolean success);
nativeOnByteBufferFrameCaptured(long nativeCapturer, byte[] data, int length, int width, int height, int rotation, long timeStamp)782     private native void nativeOnByteBufferFrameCaptured(long nativeCapturer,
783         byte[] data, int length, int width, int height, int rotation, long timeStamp);
nativeOnTextureFrameCaptured(long nativeCapturer, int width, int height, int oesTextureId, float[] transformMatrix, int rotation, long timestamp)784     private native void nativeOnTextureFrameCaptured(long nativeCapturer, int width, int height,
785         int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
nativeOnOutputFormatRequest(long nativeCapturer, int width, int height, int framerate)786     private native void nativeOnOutputFormatRequest(long nativeCapturer,
787         int width, int height, int framerate);
788   }
789 
nativeCreateVideoCapturer( VideoCapturerAndroid videoCapturer, SurfaceTextureHelper surfaceHelper)790   private static native long nativeCreateVideoCapturer(
791       VideoCapturerAndroid videoCapturer,
792       SurfaceTextureHelper surfaceHelper);
793 }
794