1 /* 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 package org.webrtc; 12 13 import android.content.Context; 14 import android.hardware.camera2.CameraAccessException; 15 import android.hardware.camera2.CameraCaptureSession; 16 import android.hardware.camera2.CameraCharacteristics; 17 import android.hardware.camera2.CameraDevice; 18 import android.hardware.camera2.CameraManager; 19 import android.hardware.camera2.CameraMetadata; 20 import android.hardware.camera2.CaptureFailure; 21 import android.hardware.camera2.CaptureRequest; 22 import android.os.Handler; 23 import android.util.Range; 24 import android.view.Surface; 25 import androidx.annotation.Nullable; 26 import java.util.Arrays; 27 import java.util.List; 28 import java.util.concurrent.TimeUnit; 29 import org.webrtc.CameraEnumerationAndroid.CaptureFormat; 30 31 class Camera2Session implements CameraSession { 32 private static final String TAG = "Camera2Session"; 33 34 private static final Histogram camera2StartTimeMsHistogram = 35 Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50); 36 private static final Histogram camera2StopTimeMsHistogram = 37 Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50); 38 private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration( 39 "WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size()); 40 41 private static enum SessionState { RUNNING, STOPPED } 42 43 private final Handler cameraThreadHandler; 44 private final CreateSessionCallback callback; 45 private final Events events; 46 private final Context applicationContext; 47 private final CameraManager cameraManager; 48 private final SurfaceTextureHelper surfaceTextureHelper; 49 private final String cameraId; 50 private final int width; 51 private final int height; 52 private final int framerate; 53 54 // Initialized at start 55 private CameraCharacteristics cameraCharacteristics; 56 private int cameraOrientation; 57 private boolean isCameraFrontFacing; 58 private int fpsUnitFactor; 59 private CaptureFormat captureFormat; 60 61 // Initialized when camera opens 62 @Nullable private CameraDevice cameraDevice; 63 @Nullable private Surface surface; 64 65 // Initialized when capture session is created 66 @Nullable private CameraCaptureSession captureSession; 67 68 // State 69 private SessionState state = SessionState.RUNNING; 70 private boolean firstFrameReported; 71 72 // Used only for stats. Only used on the camera thread. 73 private final long constructionTimeNs; // Construction time of this class. 74 75 private class CameraStateCallback extends CameraDevice.StateCallback { getErrorDescription(int errorCode)76 private String getErrorDescription(int errorCode) { 77 switch (errorCode) { 78 case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE: 79 return "Camera device has encountered a fatal error."; 80 case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED: 81 return "Camera device could not be opened due to a device policy."; 82 case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE: 83 return "Camera device is in use already."; 84 case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE: 85 return "Camera service has encountered a fatal error."; 86 case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE: 87 return "Camera device could not be opened because" 88 + " there are too many other open camera devices."; 89 default: 90 return "Unknown camera error: " + errorCode; 91 } 92 } 93 94 @Override onDisconnected(CameraDevice camera)95 public void onDisconnected(CameraDevice camera) { 96 checkIsOnCameraThread(); 97 final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED); 98 state = SessionState.STOPPED; 99 stopInternal(); 100 if (startFailure) { 101 callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted."); 102 } else { 103 events.onCameraDisconnected(Camera2Session.this); 104 } 105 } 106 107 @Override onError(CameraDevice camera, int errorCode)108 public void onError(CameraDevice camera, int errorCode) { 109 checkIsOnCameraThread(); 110 reportError(getErrorDescription(errorCode)); 111 } 112 113 @Override onOpened(CameraDevice camera)114 public void onOpened(CameraDevice camera) { 115 checkIsOnCameraThread(); 116 117 Logging.d(TAG, "Camera opened."); 118 cameraDevice = camera; 119 120 surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height); 121 surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); 122 try { 123 camera.createCaptureSession( 124 Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler); 125 } catch (CameraAccessException e) { 126 reportError("Failed to create capture session. " + e); 127 return; 128 } 129 } 130 131 @Override onClosed(CameraDevice camera)132 public void onClosed(CameraDevice camera) { 133 checkIsOnCameraThread(); 134 135 Logging.d(TAG, "Camera device closed."); 136 events.onCameraClosed(Camera2Session.this); 137 } 138 } 139 140 private class CaptureSessionCallback extends CameraCaptureSession.StateCallback { 141 @Override onConfigureFailed(CameraCaptureSession session)142 public void onConfigureFailed(CameraCaptureSession session) { 143 checkIsOnCameraThread(); 144 session.close(); 145 reportError("Failed to configure capture session."); 146 } 147 148 @Override onConfigured(CameraCaptureSession session)149 public void onConfigured(CameraCaptureSession session) { 150 checkIsOnCameraThread(); 151 Logging.d(TAG, "Camera capture session configured."); 152 captureSession = session; 153 try { 154 /* 155 * The viable options for video capture requests are: 156 * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality 157 * post-processing. 158 * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording 159 * quality. 160 */ 161 final CaptureRequest.Builder captureRequestBuilder = 162 cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); 163 // Set auto exposure fps range. 164 captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, 165 new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor, 166 captureFormat.framerate.max / fpsUnitFactor)); 167 captureRequestBuilder.set( 168 CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); 169 captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); 170 chooseStabilizationMode(captureRequestBuilder); 171 chooseFocusMode(captureRequestBuilder); 172 173 captureRequestBuilder.addTarget(surface); 174 session.setRepeatingRequest( 175 captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler); 176 } catch (CameraAccessException e) { 177 reportError("Failed to start capture request. " + e); 178 return; 179 } 180 181 surfaceTextureHelper.startListening((VideoFrame frame) -> { 182 checkIsOnCameraThread(); 183 184 if (state != SessionState.RUNNING) { 185 Logging.d(TAG, "Texture frame captured but camera is no longer running."); 186 return; 187 } 188 189 if (!firstFrameReported) { 190 firstFrameReported = true; 191 final int startTimeMs = 192 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); 193 camera2StartTimeMsHistogram.addSample(startTimeMs); 194 } 195 196 // Undo the mirror that the OS "helps" us with. 197 // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) 198 // Also, undo camera orientation, we report it as rotation instead. 199 final VideoFrame modifiedFrame = 200 new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix( 201 (TextureBufferImpl) frame.getBuffer(), 202 /* mirror= */ isCameraFrontFacing, 203 /* rotation= */ -cameraOrientation), 204 /* rotation= */ getFrameOrientation(), frame.getTimestampNs()); 205 events.onFrameCaptured(Camera2Session.this, modifiedFrame); 206 modifiedFrame.release(); 207 }); 208 Logging.d(TAG, "Camera device successfully started."); 209 callback.onDone(Camera2Session.this); 210 } 211 212 // Prefers optical stabilization over software stabilization if available. Only enables one of 213 // the stabilization modes at a time because having both enabled can cause strange results. chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder)214 private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) { 215 final int[] availableOpticalStabilization = cameraCharacteristics.get( 216 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION); 217 if (availableOpticalStabilization != null) { 218 for (int mode : availableOpticalStabilization) { 219 if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) { 220 captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, 221 CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON); 222 captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, 223 CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF); 224 Logging.d(TAG, "Using optical stabilization."); 225 return; 226 } 227 } 228 } 229 // If no optical mode is available, try software. 230 final int[] availableVideoStabilization = cameraCharacteristics.get( 231 CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); 232 if (availableVideoStabilization != null) { 233 for (int mode : availableVideoStabilization) { 234 if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) { 235 captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, 236 CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON); 237 captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, 238 CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF); 239 Logging.d(TAG, "Using video stabilization."); 240 return; 241 } 242 } 243 } 244 Logging.d(TAG, "Stabilization not available."); 245 } 246 chooseFocusMode(CaptureRequest.Builder captureRequestBuilder)247 private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) { 248 final int[] availableFocusModes = 249 cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES); 250 for (int mode : availableFocusModes) { 251 if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) { 252 captureRequestBuilder.set( 253 CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO); 254 Logging.d(TAG, "Using continuous video auto-focus."); 255 return; 256 } 257 } 258 Logging.d(TAG, "Auto-focus is not available."); 259 } 260 } 261 262 private static class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback { 263 @Override onCaptureFailed( CameraCaptureSession session, CaptureRequest request, CaptureFailure failure)264 public void onCaptureFailed( 265 CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) { 266 Logging.d(TAG, "Capture failed: " + failure); 267 } 268 } 269 create(CreateSessionCallback callback, Events events, Context applicationContext, CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height, int framerate)270 public static void create(CreateSessionCallback callback, Events events, 271 Context applicationContext, CameraManager cameraManager, 272 SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height, 273 int framerate) { 274 new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper, 275 cameraId, width, height, framerate); 276 } 277 Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext, CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height, int framerate)278 private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext, 279 CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId, 280 int width, int height, int framerate) { 281 Logging.d(TAG, "Create new camera2 session on camera " + cameraId); 282 283 constructionTimeNs = System.nanoTime(); 284 285 this.cameraThreadHandler = new Handler(); 286 this.callback = callback; 287 this.events = events; 288 this.applicationContext = applicationContext; 289 this.cameraManager = cameraManager; 290 this.surfaceTextureHelper = surfaceTextureHelper; 291 this.cameraId = cameraId; 292 this.width = width; 293 this.height = height; 294 this.framerate = framerate; 295 296 start(); 297 } 298 start()299 private void start() { 300 checkIsOnCameraThread(); 301 Logging.d(TAG, "start"); 302 303 try { 304 cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId); 305 } catch (CameraAccessException | IllegalArgumentException e) { 306 reportError("getCameraCharacteristics(): " + e.getMessage()); 307 return; 308 } 309 cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); 310 isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) 311 == CameraMetadata.LENS_FACING_FRONT; 312 313 findCaptureFormat(); 314 315 if (captureFormat == null) { 316 // findCaptureFormat reports an error already. 317 return; 318 } 319 320 openCamera(); 321 } 322 findCaptureFormat()323 private void findCaptureFormat() { 324 checkIsOnCameraThread(); 325 326 Range<Integer>[] fpsRanges = 327 cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); 328 fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges); 329 List<CaptureFormat.FramerateRange> framerateRanges = 330 Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor); 331 List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics); 332 Logging.d(TAG, "Available preview sizes: " + sizes); 333 Logging.d(TAG, "Available fps ranges: " + framerateRanges); 334 335 if (framerateRanges.isEmpty() || sizes.isEmpty()) { 336 reportError("No supported capture formats."); 337 return; 338 } 339 340 final CaptureFormat.FramerateRange bestFpsRange = 341 CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate); 342 343 final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height); 344 CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize); 345 346 captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange); 347 Logging.d(TAG, "Using capture format: " + captureFormat); 348 } 349 openCamera()350 private void openCamera() { 351 checkIsOnCameraThread(); 352 353 Logging.d(TAG, "Opening camera " + cameraId); 354 events.onCameraOpening(); 355 356 try { 357 cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler); 358 } catch (CameraAccessException | IllegalArgumentException | SecurityException e) { 359 reportError("Failed to open camera: " + e); 360 return; 361 } 362 } 363 364 @Override stop()365 public void stop() { 366 Logging.d(TAG, "Stop camera2 session on camera " + cameraId); 367 checkIsOnCameraThread(); 368 if (state != SessionState.STOPPED) { 369 final long stopStartTime = System.nanoTime(); 370 state = SessionState.STOPPED; 371 stopInternal(); 372 final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime); 373 camera2StopTimeMsHistogram.addSample(stopTimeMs); 374 } 375 } 376 stopInternal()377 private void stopInternal() { 378 Logging.d(TAG, "Stop internal"); 379 checkIsOnCameraThread(); 380 381 surfaceTextureHelper.stopListening(); 382 383 if (captureSession != null) { 384 captureSession.close(); 385 captureSession = null; 386 } 387 if (surface != null) { 388 surface.release(); 389 surface = null; 390 } 391 if (cameraDevice != null) { 392 cameraDevice.close(); 393 cameraDevice = null; 394 } 395 396 Logging.d(TAG, "Stop done"); 397 } 398 reportError(String error)399 private void reportError(String error) { 400 checkIsOnCameraThread(); 401 Logging.e(TAG, "Error: " + error); 402 403 final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED); 404 state = SessionState.STOPPED; 405 stopInternal(); 406 if (startFailure) { 407 callback.onFailure(FailureType.ERROR, error); 408 } else { 409 events.onCameraError(this, error); 410 } 411 } 412 getFrameOrientation()413 private int getFrameOrientation() { 414 int rotation = CameraSession.getDeviceOrientation(applicationContext); 415 if (!isCameraFrontFacing) { 416 rotation = 360 - rotation; 417 } 418 return (cameraOrientation + rotation) % 360; 419 } 420 checkIsOnCameraThread()421 private void checkIsOnCameraThread() { 422 if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { 423 throw new IllegalStateException("Wrong thread"); 424 } 425 } 426 } 427