1 /* 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 package org.webrtc; 12 13 import android.content.Context; 14 import android.hardware.Camera; 15 import android.os.Handler; 16 import android.os.SystemClock; 17 import java.io.IOException; 18 import java.nio.ByteBuffer; 19 import java.util.List; 20 import java.util.concurrent.TimeUnit; 21 import org.webrtc.CameraEnumerationAndroid.CaptureFormat; 22 23 @SuppressWarnings("deprecation") 24 class Camera1Session implements CameraSession { 25 private static final String TAG = "Camera1Session"; 26 private static final int NUMBER_OF_CAPTURE_BUFFERS = 3; 27 28 private static final Histogram camera1StartTimeMsHistogram = 29 Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50); 30 private static final Histogram camera1StopTimeMsHistogram = 31 Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50); 32 private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration( 33 "WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size()); 34 35 private static enum SessionState { RUNNING, STOPPED } 36 37 private final Handler cameraThreadHandler; 38 private final Events events; 39 private final boolean captureToTexture; 40 private final Context applicationContext; 41 private final SurfaceTextureHelper surfaceTextureHelper; 42 private final int cameraId; 43 private final Camera camera; 44 private final Camera.CameraInfo info; 45 private final CaptureFormat captureFormat; 46 // Used only for stats. Only used on the camera thread. 47 private final long constructionTimeNs; // Construction time of this class. 48 49 private SessionState state; 50 private boolean firstFrameReported; 51 52 // TODO(titovartem) make correct fix during webrtc:9175 53 @SuppressWarnings("ByteBufferBackingArray") create(final CreateSessionCallback callback, final Events events, final boolean captureToTexture, final Context applicationContext, final SurfaceTextureHelper surfaceTextureHelper, final String cameraName, final int width, final int height, final int framerate)54 public static void create(final CreateSessionCallback callback, final Events events, 55 final boolean captureToTexture, final Context applicationContext, 56 final SurfaceTextureHelper surfaceTextureHelper, final String cameraName, 57 final int width, final int height, final int framerate) { 58 final long constructionTimeNs = System.nanoTime(); 59 Logging.d(TAG, "Open camera " + cameraName); 60 events.onCameraOpening(); 61 62 final int cameraId; 63 try { 64 cameraId = Camera1Enumerator.getCameraIndex(cameraName); 65 } catch (IllegalArgumentException e) { 66 callback.onFailure(FailureType.ERROR, e.getMessage()); 67 return; 68 } 69 70 final Camera camera; 71 try { 72 camera = Camera.open(cameraId); 73 } catch (RuntimeException e) { 74 callback.onFailure(FailureType.ERROR, e.getMessage()); 75 return; 76 } 77 78 if (camera == null) { 79 callback.onFailure( 80 FailureType.ERROR, "Camera.open returned null for camera id = " + cameraId); 81 return; 82 } 83 84 try { 85 camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture()); 86 } catch (IOException | RuntimeException e) { 87 camera.release(); 88 callback.onFailure(FailureType.ERROR, e.getMessage()); 89 return; 90 } 91 92 final Camera.CameraInfo info = new Camera.CameraInfo(); 93 Camera.getCameraInfo(cameraId, info); 94 95 final CaptureFormat captureFormat; 96 try { 97 final Camera.Parameters parameters = camera.getParameters(); 98 captureFormat = findClosestCaptureFormat(parameters, width, height, framerate); 99 final Size pictureSize = findClosestPictureSize(parameters, width, height); 100 updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture); 101 } catch (RuntimeException e) { 102 camera.release(); 103 callback.onFailure(FailureType.ERROR, e.getMessage()); 104 return; 105 } 106 107 if (!captureToTexture) { 108 final int frameSize = captureFormat.frameSize(); 109 for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) { 110 final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize); 111 camera.addCallbackBuffer(buffer.array()); 112 } 113 } 114 115 // Calculate orientation manually and send it as CVO instead. 116 try { 117 camera.setDisplayOrientation(0 /* degrees */); 118 } catch (RuntimeException e) { 119 camera.release(); 120 callback.onFailure(FailureType.ERROR, e.getMessage()); 121 return; 122 } 123 124 callback.onDone(new Camera1Session(events, captureToTexture, applicationContext, 125 surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs)); 126 } 127 updateCameraParameters(Camera camera, Camera.Parameters parameters, CaptureFormat captureFormat, Size pictureSize, boolean captureToTexture)128 private static void updateCameraParameters(Camera camera, Camera.Parameters parameters, 129 CaptureFormat captureFormat, Size pictureSize, boolean captureToTexture) { 130 final List<String> focusModes = parameters.getSupportedFocusModes(); 131 132 parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max); 133 parameters.setPreviewSize(captureFormat.width, captureFormat.height); 134 parameters.setPictureSize(pictureSize.width, pictureSize.height); 135 if (!captureToTexture) { 136 parameters.setPreviewFormat(captureFormat.imageFormat); 137 } 138 139 if (parameters.isVideoStabilizationSupported()) { 140 parameters.setVideoStabilization(true); 141 } 142 if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { 143 parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); 144 } 145 camera.setParameters(parameters); 146 } 147 findClosestCaptureFormat( Camera.Parameters parameters, int width, int height, int framerate)148 private static CaptureFormat findClosestCaptureFormat( 149 Camera.Parameters parameters, int width, int height, int framerate) { 150 // Find closest supported format for `width` x `height` @ `framerate`. 151 final List<CaptureFormat.FramerateRange> supportedFramerates = 152 Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange()); 153 Logging.d(TAG, "Available fps ranges: " + supportedFramerates); 154 155 final CaptureFormat.FramerateRange fpsRange = 156 CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate); 157 158 final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize( 159 Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height); 160 CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize); 161 162 return new CaptureFormat(previewSize.width, previewSize.height, fpsRange); 163 } 164 findClosestPictureSize(Camera.Parameters parameters, int width, int height)165 private static Size findClosestPictureSize(Camera.Parameters parameters, int width, int height) { 166 return CameraEnumerationAndroid.getClosestSupportedSize( 167 Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height); 168 } 169 Camera1Session(Events events, boolean captureToTexture, Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, int cameraId, Camera camera, Camera.CameraInfo info, CaptureFormat captureFormat, long constructionTimeNs)170 private Camera1Session(Events events, boolean captureToTexture, Context applicationContext, 171 SurfaceTextureHelper surfaceTextureHelper, int cameraId, Camera camera, 172 Camera.CameraInfo info, CaptureFormat captureFormat, long constructionTimeNs) { 173 Logging.d(TAG, "Create new camera1 session on camera " + cameraId); 174 175 this.cameraThreadHandler = new Handler(); 176 this.events = events; 177 this.captureToTexture = captureToTexture; 178 this.applicationContext = applicationContext; 179 this.surfaceTextureHelper = surfaceTextureHelper; 180 this.cameraId = cameraId; 181 this.camera = camera; 182 this.info = info; 183 this.captureFormat = captureFormat; 184 this.constructionTimeNs = constructionTimeNs; 185 186 surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height); 187 188 startCapturing(); 189 } 190 191 @Override stop()192 public void stop() { 193 Logging.d(TAG, "Stop camera1 session on camera " + cameraId); 194 checkIsOnCameraThread(); 195 if (state != SessionState.STOPPED) { 196 final long stopStartTime = System.nanoTime(); 197 stopInternal(); 198 final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime); 199 camera1StopTimeMsHistogram.addSample(stopTimeMs); 200 } 201 } 202 startCapturing()203 private void startCapturing() { 204 Logging.d(TAG, "Start capturing"); 205 checkIsOnCameraThread(); 206 207 state = SessionState.RUNNING; 208 209 camera.setErrorCallback(new Camera.ErrorCallback() { 210 @Override 211 public void onError(int error, Camera camera) { 212 String errorMessage; 213 if (error == Camera.CAMERA_ERROR_SERVER_DIED) { 214 errorMessage = "Camera server died!"; 215 } else { 216 errorMessage = "Camera error: " + error; 217 } 218 Logging.e(TAG, errorMessage); 219 stopInternal(); 220 if (error == Camera.CAMERA_ERROR_EVICTED) { 221 events.onCameraDisconnected(Camera1Session.this); 222 } else { 223 events.onCameraError(Camera1Session.this, errorMessage); 224 } 225 } 226 }); 227 228 if (captureToTexture) { 229 listenForTextureFrames(); 230 } else { 231 listenForBytebufferFrames(); 232 } 233 try { 234 camera.startPreview(); 235 } catch (RuntimeException e) { 236 stopInternal(); 237 events.onCameraError(this, e.getMessage()); 238 } 239 } 240 stopInternal()241 private void stopInternal() { 242 Logging.d(TAG, "Stop internal"); 243 checkIsOnCameraThread(); 244 if (state == SessionState.STOPPED) { 245 Logging.d(TAG, "Camera is already stopped"); 246 return; 247 } 248 249 state = SessionState.STOPPED; 250 surfaceTextureHelper.stopListening(); 251 // Note: stopPreview or other driver code might deadlock. Deadlock in 252 // Camera._stopPreview(Native Method) has been observed on 253 // Nexus 5 (hammerhead), OS version LMY48I. 254 camera.stopPreview(); 255 camera.release(); 256 events.onCameraClosed(this); 257 Logging.d(TAG, "Stop done"); 258 } 259 listenForTextureFrames()260 private void listenForTextureFrames() { 261 surfaceTextureHelper.startListening((VideoFrame frame) -> { 262 checkIsOnCameraThread(); 263 264 if (state != SessionState.RUNNING) { 265 Logging.d(TAG, "Texture frame captured but camera is no longer running."); 266 return; 267 } 268 269 if (!firstFrameReported) { 270 final int startTimeMs = 271 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); 272 camera1StartTimeMsHistogram.addSample(startTimeMs); 273 firstFrameReported = true; 274 } 275 276 // Undo the mirror that the OS "helps" us with. 277 // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) 278 final VideoFrame modifiedFrame = 279 new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix( 280 (TextureBufferImpl) frame.getBuffer(), 281 /* mirror= */ info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT, 282 /* rotation= */ 0), 283 /* rotation= */ getFrameOrientation(), frame.getTimestampNs()); 284 events.onFrameCaptured(Camera1Session.this, modifiedFrame); 285 modifiedFrame.release(); 286 }); 287 } 288 listenForBytebufferFrames()289 private void listenForBytebufferFrames() { 290 camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() { 291 @Override 292 public void onPreviewFrame(final byte[] data, Camera callbackCamera) { 293 checkIsOnCameraThread(); 294 295 if (callbackCamera != camera) { 296 Logging.e(TAG, "Callback from a different camera. This should never happen."); 297 return; 298 } 299 300 if (state != SessionState.RUNNING) { 301 Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running."); 302 return; 303 } 304 305 final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime()); 306 307 if (!firstFrameReported) { 308 final int startTimeMs = 309 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); 310 camera1StartTimeMsHistogram.addSample(startTimeMs); 311 firstFrameReported = true; 312 } 313 314 VideoFrame.Buffer frameBuffer = new NV21Buffer( 315 data, captureFormat.width, captureFormat.height, () -> cameraThreadHandler.post(() -> { 316 if (state == SessionState.RUNNING) { 317 camera.addCallbackBuffer(data); 318 } 319 })); 320 final VideoFrame frame = new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs); 321 events.onFrameCaptured(Camera1Session.this, frame); 322 frame.release(); 323 } 324 }); 325 } 326 getFrameOrientation()327 private int getFrameOrientation() { 328 int rotation = CameraSession.getDeviceOrientation(applicationContext); 329 if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { 330 rotation = 360 - rotation; 331 } 332 return (info.orientation + rotation) % 360; 333 } 334 checkIsOnCameraThread()335 private void checkIsOnCameraThread() { 336 if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { 337 throw new IllegalStateException("Wrong thread"); 338 } 339 } 340 } 341