1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.camera.one.v2; 18 19 import android.annotation.TargetApi; 20 import android.content.Context; 21 import android.graphics.ImageFormat; 22 import android.graphics.Rect; 23 import android.graphics.SurfaceTexture; 24 import android.hardware.camera2.CameraAccessException; 25 import android.hardware.camera2.CameraCaptureSession; 26 import android.hardware.camera2.CameraCharacteristics; 27 import android.hardware.camera2.CameraDevice; 28 import android.hardware.camera2.CameraMetadata; 29 import android.hardware.camera2.CaptureRequest; 30 import android.hardware.camera2.CaptureResult; 31 import android.hardware.camera2.DngCreator; 32 import android.hardware.camera2.TotalCaptureResult; 33 import android.hardware.camera2.params.MeteringRectangle; 34 import android.hardware.camera2.params.StreamConfigurationMap; 35 import android.location.Location; 36 import android.media.Image; 37 import android.media.ImageReader; 38 import android.net.Uri; 39 import android.os.Build; 40 import android.os.Handler; 41 import android.os.HandlerThread; 42 import android.os.SystemClock; 43 import android.view.Surface; 44 45 import com.android.camera.CaptureModuleUtil; 46 import com.android.camera.Exif; 47 import com.android.camera.Storage; 48 import com.android.camera.debug.DebugPropertyHelper; 49 import com.android.camera.debug.Log; 50 import com.android.camera.debug.Log.Tag; 51 import com.android.camera.exif.ExifInterface; 52 import com.android.camera.exif.ExifTag; 53 import com.android.camera.exif.Rational; 54 import com.android.camera.one.AbstractOneCamera; 55 import com.android.camera.one.CameraDirectionProvider; 56 import com.android.camera.one.OneCamera; 57 import com.android.camera.one.Settings3A; 58 import com.android.camera.one.v2.camera2proxy.AndroidCaptureResultProxy; 59 import com.android.camera.one.v2.camera2proxy.AndroidImageProxy; 60 import com.android.camera.one.v2.camera2proxy.CaptureResultProxy; 61 import com.android.camera.processing.imagebackend.TaskImageContainer; 62 import com.android.camera.session.CaptureSession; 63 import com.android.camera.ui.focus.LensRangeCalculator; 64 import com.android.camera.ui.motion.LinearScale; 65 import com.android.camera.util.CameraUtil; 66 import com.android.camera.util.CaptureDataSerializer; 67 import com.android.camera.util.ExifUtil; 68 import com.android.camera.util.JpegUtilNative; 69 import com.android.camera.util.Size; 70 import com.google.common.base.Optional; 71 import com.google.common.util.concurrent.FutureCallback; 72 import com.google.common.util.concurrent.Futures; 73 import com.google.common.util.concurrent.ListenableFuture; 74 import com.google.common.util.concurrent.MoreExecutors; 75 76 import java.io.File; 77 import java.io.FileOutputStream; 78 import java.io.IOException; 79 import java.nio.ByteBuffer; 80 import java.util.ArrayList; 81 import java.util.LinkedList; 82 import java.util.List; 83 84 /** 85 * {@link OneCamera} implementation directly on top of the Camera2 API for 86 * cameras without API 2 FULL support (limited or legacy). 87 */ 88 @TargetApi(Build.VERSION_CODES.LOLLIPOP) 89 public class OneCameraImpl extends AbstractOneCamera { 90 /** Captures that are requested but haven't completed yet. */ 91 private static class InFlightCapture { 92 final PhotoCaptureParameters parameters; 93 final CaptureSession session; 94 Image image; 95 TotalCaptureResult totalCaptureResult; 96 InFlightCapture(PhotoCaptureParameters parameters, CaptureSession session)97 public InFlightCapture(PhotoCaptureParameters parameters, 98 CaptureSession session) { 99 this.parameters = parameters; 100 this.session = session; 101 } 102 103 /** Set the image once it's been received. */ setImage(Image capturedImage)104 public InFlightCapture setImage(Image capturedImage) { 105 image = capturedImage; 106 return this; 107 } 108 109 /** Set the total capture result once it's been received. */ setCaptureResult(TotalCaptureResult result)110 public InFlightCapture setCaptureResult(TotalCaptureResult result) { 111 totalCaptureResult = result; 112 return this; 113 } 114 115 /** 116 * Returns whether the capture is complete (which is the case once the 117 * image and capture result are both present. 118 */ isCaptureComplete()119 boolean isCaptureComplete() { 120 return image != null && totalCaptureResult != null; 121 } 122 } 123 124 private static final Tag TAG = new Tag("OneCameraImpl2"); 125 126 /** If true, will write data about each capture request to disk. */ 127 private static final boolean DEBUG_WRITE_CAPTURE_DATA = DebugPropertyHelper.writeCaptureData(); 128 /** If true, will log per-frame AF info. */ 129 private static final boolean DEBUG_FOCUS_LOG = DebugPropertyHelper.showFrameDebugLog(); 130 131 /** Default JPEG encoding quality. */ 132 private static final Byte JPEG_QUALITY = 90; 133 134 /** 135 * Set to ImageFormat.JPEG, to use the hardware encoder, or 136 * ImageFormat.YUV_420_888 to use the software encoder. You can also try 137 * RAW_SENSOR experimentally. 138 */ 139 private static final int sCaptureImageFormat = DebugPropertyHelper.isCaptureDngEnabled() ? 140 ImageFormat.RAW_SENSOR : ImageFormat.JPEG; 141 142 /** Duration to hold after manual focus tap. */ 143 private static final int FOCUS_HOLD_MILLIS = Settings3A.getFocusHoldMillis(); 144 /** Zero weight 3A region, to reset regions per API. */ 145 private static final MeteringRectangle[] ZERO_WEIGHT_3A_REGION = AutoFocusHelper 146 .getZeroWeightRegion(); 147 148 /** 149 * CaptureRequest tags. 150 * <ul> 151 * <li>{@link #PRESHOT_TRIGGERED_AF}</li> 152 * <li>{@link #CAPTURE}</li> 153 * </ul> 154 */ 155 public static enum RequestTag { 156 /** Request that is part of a pre shot trigger. */ 157 PRESHOT_TRIGGERED_AF, 158 /** Capture request (purely for logging). */ 159 CAPTURE, 160 /** Tap to focus (purely for logging). */ 161 TAP_TO_FOCUS 162 } 163 164 /** Directory to store raw DNG files in. */ 165 private static final File RAW_DIRECTORY = new File(Storage.DIRECTORY, "DNG"); 166 167 /** Current CONTROL_AF_MODE request to Camera2 API. */ 168 private int mControlAFMode = CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE; 169 /** Last OneCamera.AutoFocusState reported. */ 170 private AutoFocusState mLastResultAFState = AutoFocusState.INACTIVE; 171 /** Flag to take a picture when the lens is stopped. */ 172 private boolean mTakePictureWhenLensIsStopped = false; 173 /** Takes a (delayed) picture with appropriate parameters. */ 174 private Runnable mTakePictureRunnable; 175 /** Keep PictureCallback for last requested capture. */ 176 private PictureCallback mLastPictureCallback = null; 177 /** Last time takePicture() was called in uptimeMillis. */ 178 private long mTakePictureStartMillis; 179 /** Runnable that returns to CONTROL_AF_MODE = AF_CONTINUOUS_PICTURE. */ 180 private final Runnable mReturnToContinuousAFRunnable = new Runnable() { 181 @Override 182 public void run() { 183 mAFRegions = ZERO_WEIGHT_3A_REGION; 184 mAERegions = ZERO_WEIGHT_3A_REGION; 185 mControlAFMode = CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE; 186 repeatingPreview(null); 187 } 188 }; 189 190 /** Current zoom value. 1.0 is no zoom. */ 191 private float mZoomValue = 1f; 192 /** Current crop region: set from mZoomValue. */ 193 private Rect mCropRegion; 194 /** Current AF and AE regions */ 195 private MeteringRectangle[] mAFRegions = ZERO_WEIGHT_3A_REGION; 196 private MeteringRectangle[] mAERegions = ZERO_WEIGHT_3A_REGION; 197 /** Last frame for which CONTROL_AF_STATE was received. */ 198 private long mLastControlAfStateFrameNumber = 0; 199 200 /** 201 * Common listener for preview frame metadata. 202 */ 203 private final CameraCaptureSession.CaptureCallback mCaptureCallback = 204 new CameraCaptureSession.CaptureCallback() { 205 @Override 206 public void onCaptureStarted(CameraCaptureSession session, 207 CaptureRequest request, long timestamp, 208 long frameNumber) { 209 if (request.getTag() == RequestTag.CAPTURE 210 && mLastPictureCallback != null) { 211 mLastPictureCallback.onQuickExpose(); 212 } 213 } 214 215 // AF state information is sometimes available 1 frame before 216 // onCaptureCompleted(), so we take advantage of that. 217 @Override 218 public void onCaptureProgressed(CameraCaptureSession session, 219 CaptureRequest request, CaptureResult partialResult) { 220 autofocusStateChangeDispatcher(partialResult); 221 super.onCaptureProgressed(session, request, partialResult); 222 } 223 224 @Override 225 public void onCaptureCompleted(CameraCaptureSession session, 226 CaptureRequest request, TotalCaptureResult result) { 227 autofocusStateChangeDispatcher(result); 228 // This checks for a HAL implementation error where 229 // TotalCaptureResult 230 // is missing CONTROL_AF_STATE. This should not happen. 231 if (result.get(CaptureResult.CONTROL_AF_STATE) == null) { 232 AutoFocusHelper.checkControlAfState(result); 233 } 234 if (DEBUG_FOCUS_LOG) { 235 AutoFocusHelper.logExtraFocusInfo(result); 236 } 237 238 Float diopter = result.get(CaptureResult.LENS_FOCUS_DISTANCE); 239 if (diopter != null && mFocusDistanceListener != null) { 240 mFocusDistanceListener.onFocusDistance(diopter, mLensRange); 241 } 242 243 if (request.getTag() == RequestTag.CAPTURE) { 244 // Add the capture result to the latest in-flight 245 // capture. If all the data for that capture is 246 // complete, store the image on disk. 247 InFlightCapture capture = null; 248 synchronized (mCaptureQueue) { 249 if (mCaptureQueue.getFirst().setCaptureResult(result) 250 .isCaptureComplete()) { 251 capture = mCaptureQueue.removeFirst(); 252 } 253 } 254 if (capture != null) { 255 OneCameraImpl.this.onCaptureCompleted(capture); 256 } 257 } 258 super.onCaptureCompleted(session, request, result); 259 } 260 }; 261 /** Thread on which the camera operations are running. */ 262 private final HandlerThread mCameraThread; 263 /** Handler of the {@link #mCameraThread}. */ 264 private final Handler mCameraHandler; 265 /** The characteristics of this camera. */ 266 private final CameraCharacteristics mCharacteristics; 267 private final LinearScale mLensRange; 268 /** The underlying Camera2 API camera device. */ 269 private final CameraDevice mDevice; 270 private final CameraDirectionProvider mDirectionProvider; 271 272 /** 273 * The aspect ratio (width/height) of the full resolution for this camera. 274 * Usually the native aspect ratio of this camera. 275 */ 276 private final float mFullSizeAspectRatio; 277 /** The Camera2 API capture session currently active. */ 278 private CameraCaptureSession mCaptureSession; 279 /** The surface onto which to render the preview. */ 280 private Surface mPreviewSurface; 281 /** 282 * A queue of capture requests that have been requested but are not done 283 * yet. 284 */ 285 private final LinkedList<InFlightCapture> mCaptureQueue = 286 new LinkedList<InFlightCapture>(); 287 /** Whether closing of this device has been requested. */ 288 private volatile boolean mIsClosed = false; 289 290 /** Receives the normal captured images. */ 291 private final ImageReader mCaptureImageReader; 292 ImageReader.OnImageAvailableListener mCaptureImageListener = 293 new ImageReader.OnImageAvailableListener() { 294 @Override 295 public void onImageAvailable(ImageReader reader) { 296 // Add the image data to the latest in-flight capture. 297 // If all the data for that capture is complete, store the 298 // image data. 299 InFlightCapture capture = null; 300 synchronized (mCaptureQueue) { 301 if (mCaptureQueue.getFirst().setImage(reader.acquireLatestImage()) 302 .isCaptureComplete()) { 303 capture = mCaptureQueue.removeFirst(); 304 } 305 } 306 if (capture != null) { 307 onCaptureCompleted(capture); 308 } 309 } 310 }; 311 312 /** 313 * Instantiates a new camera based on Camera 2 API. 314 * 315 * @param device The underlying Camera 2 device. 316 * @param characteristics The device's characteristics. 317 * @param pictureSize the size of the final image to be taken. 318 */ OneCameraImpl(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize)319 OneCameraImpl(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) { 320 mDevice = device; 321 mCharacteristics = characteristics; 322 mLensRange = LensRangeCalculator.getDiopterToRatioCalculator(characteristics); 323 mDirectionProvider = new CameraDirectionProvider(characteristics); 324 mFullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics); 325 326 // Override pictureSize for RAW (our picture size settings don't include 327 // RAW, which typically only supports one size (sensor size). This also 328 // typically differs from the larges JPEG or YUV size. 329 // TODO: If we ever want to support RAW properly, it should be one entry 330 // in the picture quality list, which should then lead to the right 331 // pictureSize being passes into here. 332 if (sCaptureImageFormat == ImageFormat.RAW_SENSOR) { 333 pictureSize = getDefaultPictureSize(); 334 } 335 336 mCameraThread = new HandlerThread("OneCamera2"); 337 mCameraThread.start(); 338 mCameraHandler = new Handler(mCameraThread.getLooper()); 339 340 mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(), 341 pictureSize.getHeight(), 342 sCaptureImageFormat, 2); 343 mCaptureImageReader.setOnImageAvailableListener(mCaptureImageListener, mCameraHandler); 344 Log.d(TAG, "New Camera2 based OneCameraImpl created."); 345 } 346 347 /** 348 * Take picture, initiating an auto focus scan if needed. 349 */ 350 @Override takePicture(final PhotoCaptureParameters params, final CaptureSession session)351 public void takePicture(final PhotoCaptureParameters params, final CaptureSession session) { 352 // Do not do anything when a picture is already requested. 353 if (mTakePictureWhenLensIsStopped) { 354 return; 355 } 356 357 // Not ready until the picture comes back. 358 broadcastReadyState(false); 359 360 mTakePictureRunnable = new Runnable() { 361 @Override 362 public void run() { 363 takePictureNow(params, session); 364 } 365 }; 366 mLastPictureCallback = params.callback; 367 mTakePictureStartMillis = SystemClock.uptimeMillis(); 368 369 // This class implements a very simple version of AF, which 370 // only delays capture if the lens is scanning. 371 if (mLastResultAFState == AutoFocusState.ACTIVE_SCAN) { 372 Log.v(TAG, "Waiting until scan is done before taking shot."); 373 mTakePictureWhenLensIsStopped = true; 374 } else { 375 // We could do CONTROL_AF_TRIGGER_START and wait until lens locks, 376 // but this would slow down the capture. 377 takePictureNow(params, session); 378 } 379 } 380 381 /** 382 * Take picture immediately. Parameters passed through from takePicture(). 383 */ takePictureNow(PhotoCaptureParameters params, CaptureSession session)384 public void takePictureNow(PhotoCaptureParameters params, CaptureSession session) { 385 long dt = SystemClock.uptimeMillis() - mTakePictureStartMillis; 386 Log.v(TAG, "Taking shot with extra AF delay of " + dt + " ms."); 387 try { 388 // JPEG capture. 389 CaptureRequest.Builder builder = mDevice 390 .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); 391 builder.setTag(RequestTag.CAPTURE); 392 addBaselineCaptureKeysToRequest(builder); 393 394 // Enable lens-shading correction for even better DNGs. 395 if (sCaptureImageFormat == ImageFormat.RAW_SENSOR) { 396 builder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 397 CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON); 398 } else if (sCaptureImageFormat == ImageFormat.JPEG) { 399 builder.set(CaptureRequest.JPEG_QUALITY, JPEG_QUALITY); 400 builder.set(CaptureRequest.JPEG_ORIENTATION, 401 CameraUtil.getJpegRotation(params.orientation, mCharacteristics)); 402 } 403 404 builder.addTarget(mPreviewSurface); 405 builder.addTarget(mCaptureImageReader.getSurface()); 406 CaptureRequest request = builder.build(); 407 408 if (DEBUG_WRITE_CAPTURE_DATA) { 409 final String debugDataDir = makeDebugDir(params.debugDataFolder, 410 "normal_capture_debug"); 411 Log.i(TAG, "Writing capture data to: " + debugDataDir); 412 CaptureDataSerializer.toFile("Normal Capture", request, new File(debugDataDir, 413 "capture.txt")); 414 } 415 416 mCaptureSession.capture(request, mCaptureCallback, mCameraHandler); 417 } catch (CameraAccessException e) { 418 Log.e(TAG, "Could not access camera for still image capture."); 419 broadcastReadyState(true); 420 params.callback.onPictureTakingFailed(); 421 return; 422 } 423 synchronized (mCaptureQueue) { 424 mCaptureQueue.add(new InFlightCapture(params, session)); 425 } 426 } 427 428 @Override startPreview(Surface previewSurface, CaptureReadyCallback listener)429 public void startPreview(Surface previewSurface, CaptureReadyCallback listener) { 430 mPreviewSurface = previewSurface; 431 setupAsync(mPreviewSurface, listener); 432 } 433 434 @Override close()435 public void close() { 436 if (mIsClosed) { 437 Log.w(TAG, "Camera is already closed."); 438 return; 439 } 440 try { 441 if (mCaptureSession != null) { 442 mCaptureSession.abortCaptures(); 443 } 444 } catch (CameraAccessException e) { 445 Log.e(TAG, "Could not abort captures in progress."); 446 } 447 mIsClosed = true; 448 mCameraThread.quitSafely(); 449 mDevice.close(); 450 } 451 getSupportedPreviewSizes()452 public Size[] getSupportedPreviewSizes() { 453 StreamConfigurationMap config = mCharacteristics 454 .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 455 return Size.convert(config.getOutputSizes(SurfaceTexture.class)); 456 } 457 getFullSizeAspectRatio()458 public float getFullSizeAspectRatio() { 459 return mFullSizeAspectRatio; 460 } 461 462 @Override getDirection()463 public Facing getDirection() { 464 return mDirectionProvider.getDirection(); 465 } 466 saveJpegPicture(byte[] jpegData, final PhotoCaptureParameters captureParams, CaptureSession session, CaptureResult result)467 private void saveJpegPicture(byte[] jpegData, final PhotoCaptureParameters captureParams, 468 CaptureSession session, CaptureResult result) { 469 int heading = captureParams.heading; 470 int width = 0; 471 int height = 0; 472 int rotation = 0; 473 ExifInterface exif = null; 474 try { 475 exif = new ExifInterface(); 476 exif.readExif(jpegData); 477 478 Integer w = exif.getTagIntValue(ExifInterface.TAG_PIXEL_X_DIMENSION); 479 width = (w == null) ? width : w; 480 Integer h = exif.getTagIntValue(ExifInterface.TAG_PIXEL_Y_DIMENSION); 481 height = (h == null) ? height : h; 482 483 // Get image rotation from EXIF. 484 rotation = Exif.getOrientation(exif); 485 486 // Set GPS heading direction based on sensor, if location is on. 487 if (heading >= 0) { 488 ExifTag directionRefTag = exif.buildTag( 489 ExifInterface.TAG_GPS_IMG_DIRECTION_REF, 490 ExifInterface.GpsTrackRef.MAGNETIC_DIRECTION); 491 ExifTag directionTag = exif.buildTag( 492 ExifInterface.TAG_GPS_IMG_DIRECTION, 493 new Rational(heading, 1)); 494 exif.setTag(directionRefTag); 495 exif.setTag(directionTag); 496 } 497 new ExifUtil(exif).populateExif(Optional.<TaskImageContainer.TaskImage> absent(), 498 Optional.of((CaptureResultProxy) new AndroidCaptureResultProxy(result)), 499 Optional.<Location> absent()); 500 } catch (IOException e) { 501 Log.w(TAG, "Could not read exif from gcam jpeg", e); 502 exif = null; 503 } 504 ListenableFuture<Optional<Uri>> futureUri = session.saveAndFinish(jpegData, width, height, 505 rotation, exif); 506 Futures.addCallback(futureUri, new FutureCallback<Optional<Uri>>() { 507 @Override 508 public void onSuccess(Optional<Uri> uriOptional) { 509 captureParams.callback.onPictureSaved(uriOptional.orNull()); 510 } 511 512 @Override 513 public void onFailure(Throwable throwable) { 514 captureParams.callback.onPictureSaved(null); 515 } 516 }, MoreExecutors.directExecutor()); 517 } 518 519 /** 520 * Asynchronously sets up the capture session. 521 * 522 * @param previewSurface the surface onto which the preview should be 523 * rendered. 524 * @param listener called when setup is completed. 525 */ setupAsync(final Surface previewSurface, final CaptureReadyCallback listener)526 private void setupAsync(final Surface previewSurface, final CaptureReadyCallback listener) { 527 mCameraHandler.post(new Runnable() { 528 @Override 529 public void run() { 530 setup(previewSurface, listener); 531 } 532 }); 533 } 534 535 /** 536 * Configures and attempts to create a capture session. 537 * 538 * @param previewSurface the surface onto which the preview should be 539 * rendered. 540 * @param listener called when the setup is completed. 541 */ setup(Surface previewSurface, final CaptureReadyCallback listener)542 private void setup(Surface previewSurface, final CaptureReadyCallback listener) { 543 try { 544 if (mCaptureSession != null) { 545 mCaptureSession.abortCaptures(); 546 mCaptureSession = null; 547 } 548 List<Surface> outputSurfaces = new ArrayList<Surface>(2); 549 outputSurfaces.add(previewSurface); 550 outputSurfaces.add(mCaptureImageReader.getSurface()); 551 552 mDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() { 553 554 @Override 555 public void onConfigureFailed(CameraCaptureSession session) { 556 listener.onSetupFailed(); 557 } 558 559 @Override 560 public void onConfigured(CameraCaptureSession session) { 561 mCaptureSession = session; 562 mAFRegions = ZERO_WEIGHT_3A_REGION; 563 mAERegions = ZERO_WEIGHT_3A_REGION; 564 mZoomValue = 1f; 565 mCropRegion = cropRegionForZoom(mZoomValue); 566 boolean success = repeatingPreview(null); 567 if (success) { 568 listener.onReadyForCapture(); 569 } else { 570 listener.onSetupFailed(); 571 } 572 } 573 574 @Override 575 public void onClosed(CameraCaptureSession session) { 576 super.onClosed(session); 577 } 578 }, mCameraHandler); 579 } catch (CameraAccessException ex) { 580 Log.e(TAG, "Could not set up capture session", ex); 581 listener.onSetupFailed(); 582 } 583 } 584 585 /** 586 * Adds current regions to CaptureRequest and base AF mode + 587 * AF_TRIGGER_IDLE. 588 * 589 * @param builder Build for the CaptureRequest 590 */ addBaselineCaptureKeysToRequest(CaptureRequest.Builder builder)591 private void addBaselineCaptureKeysToRequest(CaptureRequest.Builder builder) { 592 builder.set(CaptureRequest.CONTROL_AF_REGIONS, mAFRegions); 593 builder.set(CaptureRequest.CONTROL_AE_REGIONS, mAERegions); 594 builder.set(CaptureRequest.SCALER_CROP_REGION, mCropRegion); 595 builder.set(CaptureRequest.CONTROL_AF_MODE, mControlAFMode); 596 builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE); 597 // Enable face detection 598 builder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, 599 CaptureRequest.STATISTICS_FACE_DETECT_MODE_FULL); 600 builder.set(CaptureRequest.CONTROL_SCENE_MODE, 601 CaptureRequest.CONTROL_SCENE_MODE_FACE_PRIORITY); 602 } 603 604 /** 605 * Request preview capture stream with AF_MODE_CONTINUOUS_PICTURE. 606 * 607 * @return true if request was build and sent successfully. 608 * @param tag 609 */ repeatingPreview(Object tag)610 private boolean repeatingPreview(Object tag) { 611 try { 612 CaptureRequest.Builder builder = mDevice. 613 createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 614 builder.addTarget(mPreviewSurface); 615 builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); 616 addBaselineCaptureKeysToRequest(builder); 617 mCaptureSession.setRepeatingRequest(builder.build(), mCaptureCallback, 618 mCameraHandler); 619 Log.v(TAG, String.format("Sent repeating Preview request, zoom = %.2f", mZoomValue)); 620 return true; 621 } catch (CameraAccessException ex) { 622 Log.e(TAG, "Could not access camera setting up preview.", ex); 623 return false; 624 } 625 } 626 627 /** 628 * Request preview capture stream with auto focus trigger cycle. 629 */ sendAutoFocusTriggerCaptureRequest(Object tag)630 private void sendAutoFocusTriggerCaptureRequest(Object tag) { 631 try { 632 // Step 1: Request single frame CONTROL_AF_TRIGGER_START. 633 CaptureRequest.Builder builder; 634 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 635 builder.addTarget(mPreviewSurface); 636 builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); 637 mControlAFMode = CameraMetadata.CONTROL_AF_MODE_AUTO; 638 addBaselineCaptureKeysToRequest(builder); 639 builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START); 640 builder.setTag(tag); 641 mCaptureSession.capture(builder.build(), mCaptureCallback, mCameraHandler); 642 643 // Step 2: Call repeatingPreview to update mControlAFMode. 644 repeatingPreview(tag); 645 resumeContinuousAFAfterDelay(FOCUS_HOLD_MILLIS); 646 } catch (CameraAccessException ex) { 647 Log.e(TAG, "Could not execute preview request.", ex); 648 } 649 } 650 651 /** 652 * Resume AF_MODE_CONTINUOUS_PICTURE after FOCUS_HOLD_MILLIS. 653 */ resumeContinuousAFAfterDelay(int millis)654 private void resumeContinuousAFAfterDelay(int millis) { 655 mCameraHandler.removeCallbacks(mReturnToContinuousAFRunnable); 656 mCameraHandler.postDelayed(mReturnToContinuousAFRunnable, millis); 657 } 658 659 /** 660 * This method takes appropriate action if camera2 AF state changes. 661 * <ol> 662 * <li>Reports changes in camera2 AF state to OneCamera.FocusStateListener.</li> 663 * <li>Take picture after AF scan if mTakePictureWhenLensIsStopped true.</li> 664 * </ol> 665 */ autofocusStateChangeDispatcher(CaptureResult result)666 private void autofocusStateChangeDispatcher(CaptureResult result) { 667 if (result.getFrameNumber() < mLastControlAfStateFrameNumber || 668 result.get(CaptureResult.CONTROL_AF_STATE) == null) { 669 return; 670 } 671 mLastControlAfStateFrameNumber = result.getFrameNumber(); 672 673 // Convert to OneCamera mode and state. 674 AutoFocusState resultAFState = AutoFocusHelper. 675 stateFromCamera2State(result.get(CaptureResult.CONTROL_AF_STATE)); 676 677 // TODO: Consider using LENS_STATE. 678 boolean lensIsStopped = resultAFState == AutoFocusState.ACTIVE_FOCUSED || 679 resultAFState == AutoFocusState.ACTIVE_UNFOCUSED || 680 resultAFState == AutoFocusState.PASSIVE_FOCUSED || 681 resultAFState == AutoFocusState.PASSIVE_UNFOCUSED; 682 683 if (mTakePictureWhenLensIsStopped && lensIsStopped) { 684 // Take the shot. 685 mCameraHandler.post(mTakePictureRunnable); 686 mTakePictureWhenLensIsStopped = false; 687 } 688 689 // Report state change when AF state has changed. 690 if (resultAFState != mLastResultAFState && mFocusStateListener != null) { 691 mFocusStateListener.onFocusStatusUpdate(resultAFState, result.getFrameNumber()); 692 } 693 mLastResultAFState = resultAFState; 694 } 695 696 @Override triggerFocusAndMeterAtPoint(float nx, float ny)697 public void triggerFocusAndMeterAtPoint(float nx, float ny) { 698 int sensorOrientation = mCharacteristics.get( 699 CameraCharacteristics.SENSOR_ORIENTATION); 700 mAERegions = AutoFocusHelper.aeRegionsForNormalizedCoord(nx, ny, mCropRegion, 701 sensorOrientation); 702 mAFRegions = AutoFocusHelper.afRegionsForNormalizedCoord(nx, ny, mCropRegion, 703 sensorOrientation); 704 705 sendAutoFocusTriggerCaptureRequest(RequestTag.TAP_TO_FOCUS); 706 } 707 708 @Override getMaxZoom()709 public float getMaxZoom() { 710 return mCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); 711 } 712 713 @Override setZoom(float zoom)714 public void setZoom(float zoom) { 715 mZoomValue = zoom; 716 mCropRegion = cropRegionForZoom(zoom); 717 repeatingPreview(null); 718 } 719 720 @Override pickPreviewSize(Size pictureSize, Context context)721 public Size pickPreviewSize(Size pictureSize, Context context) { 722 if (pictureSize == null) { 723 // TODO The default should be selected by the caller, and 724 // pictureSize should never be null. 725 pictureSize = getDefaultPictureSize(); 726 } 727 float pictureAspectRatio = pictureSize.getWidth() / (float) pictureSize.getHeight(); 728 Size[] supportedSizes = getSupportedPreviewSizes(); 729 730 // Since devices only have one raw resolution we need to be more 731 // flexible for selecting a matching preview resolution. 732 Double aspectRatioTolerance = sCaptureImageFormat == ImageFormat.RAW_SENSOR ? 10d : null; 733 Size size = CaptureModuleUtil.getOptimalPreviewSize(supportedSizes, 734 pictureAspectRatio, aspectRatioTolerance); 735 Log.d(TAG, "Selected preview size: " + size); 736 return size; 737 } 738 cropRegionForZoom(float zoom)739 private Rect cropRegionForZoom(float zoom) { 740 return AutoFocusHelper.cropRegionForZoom(mCharacteristics, zoom); 741 } 742 743 /** 744 * Calculate the aspect ratio of the full size capture on this device. 745 * 746 * @param characteristics the characteristics of the camera device. 747 * @return The aspect ration, in terms of width/height of the full capture 748 * size. 749 */ calculateFullSizeAspectRatio(CameraCharacteristics characteristics)750 private static float calculateFullSizeAspectRatio(CameraCharacteristics characteristics) { 751 Rect activeArraySize = 752 characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); 753 return ((float) (activeArraySize.width())) / activeArraySize.height(); 754 } 755 756 /* 757 * Called when a capture that is in flight is completed. 758 * @param capture the in-flight capture which needs to contain the received 759 * image and capture data 760 */ onCaptureCompleted(InFlightCapture capture)761 private void onCaptureCompleted(InFlightCapture capture) { 762 763 // Experimental support for writing RAW. We do not have a usable JPEG 764 // here, so we don't use the usual capture session mechanism and instead 765 // just store the RAW file in its own directory. 766 // TODO: If we make this a real feature we should probably put the DNGs 767 // into the Camera directly. 768 if (sCaptureImageFormat == ImageFormat.RAW_SENSOR) { 769 if (!RAW_DIRECTORY.exists()) { 770 if (!RAW_DIRECTORY.mkdirs()) { 771 throw new RuntimeException("Could not create RAW directory."); 772 } 773 } 774 File dngFile = new File(RAW_DIRECTORY, capture.session.getTitle() + ".dng"); 775 writeDngBytesAndClose(capture.image, capture.totalCaptureResult, 776 mCharacteristics, dngFile); 777 } else { 778 // Since this is not an HDR+ session, we will just save the 779 // result. 780 byte[] imageBytes = acquireJpegBytesAndClose(capture.image); 781 saveJpegPicture(imageBytes, capture.parameters, capture.session, 782 capture.totalCaptureResult); 783 } 784 broadcastReadyState(true); 785 capture.parameters.callback.onPictureTaken(capture.session); 786 } 787 788 /** 789 * Take the given RAW image and capture result, convert it to a DNG and 790 * write it to disk. 791 * 792 * @param image the image containing the 16-bit RAW data (RAW_SENSOR) 793 * @param captureResult the capture result for the image 794 * @param characteristics the camera characteristics of the camera that took 795 * the RAW image 796 * @param dngFile the destination to where the resulting DNG data is written 797 * to 798 */ writeDngBytesAndClose(Image image, TotalCaptureResult captureResult, CameraCharacteristics characteristics, File dngFile)799 private static void writeDngBytesAndClose(Image image, TotalCaptureResult captureResult, 800 CameraCharacteristics characteristics, File dngFile) { 801 try (DngCreator dngCreator = new DngCreator(characteristics, captureResult); 802 FileOutputStream outputStream = new FileOutputStream(dngFile)) { 803 // TODO: Add DngCreator#setThumbnail and add the DNG to the normal 804 // filmstrip. 805 dngCreator.writeImage(outputStream, image); 806 outputStream.close(); 807 image.close(); 808 } catch (IOException e) { 809 Log.e(TAG, "Could not store DNG file", e); 810 return; 811 } 812 Log.i(TAG, "Successfully stored DNG file: " + dngFile.getAbsolutePath()); 813 } 814 815 /** 816 * Given an image reader, this extracts the final image. If the image in the 817 * reader is JPEG, we extract and return it as is. If the image is YUV, we 818 * convert it to JPEG and return the result. 819 * 820 * @param image the image we got from the image reader. 821 * @return A valid JPEG image. 822 */ acquireJpegBytesAndClose(Image image)823 private static byte[] acquireJpegBytesAndClose(Image image) { 824 ByteBuffer buffer; 825 if (image.getFormat() == ImageFormat.JPEG) { 826 Image.Plane plane0 = image.getPlanes()[0]; 827 buffer = plane0.getBuffer(); 828 } else if (image.getFormat() == ImageFormat.YUV_420_888) { 829 buffer = ByteBuffer.allocateDirect(image.getWidth() * image.getHeight() * 3); 830 831 Log.v(TAG, "Compressing JPEG with software encoder."); 832 int numBytes = JpegUtilNative.compressJpegFromYUV420Image( 833 new AndroidImageProxy(image), buffer, JPEG_QUALITY); 834 835 if (numBytes < 0) { 836 throw new RuntimeException("Error compressing jpeg."); 837 } 838 buffer.limit(numBytes); 839 } else { 840 throw new RuntimeException("Unsupported image format."); 841 } 842 843 byte[] imageBytes = new byte[buffer.remaining()]; 844 buffer.get(imageBytes); 845 buffer.rewind(); 846 image.close(); 847 return imageBytes; 848 } 849 850 /** 851 * @return The largest supported picture size. 852 */ getDefaultPictureSize()853 public Size getDefaultPictureSize() { 854 StreamConfigurationMap configs = 855 mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 856 android.util.Size[] supportedSizes = configs.getOutputSizes(sCaptureImageFormat); 857 858 // Find the largest supported size. 859 android.util.Size largestSupportedSize = supportedSizes[0]; 860 long largestSupportedSizePixels = 861 largestSupportedSize.getWidth() * largestSupportedSize.getHeight(); 862 for (int i = 1; i < supportedSizes.length; i++) { 863 long numPixels = supportedSizes[i].getWidth() * supportedSizes[i].getHeight(); 864 if (numPixels > largestSupportedSizePixels) { 865 largestSupportedSize = supportedSizes[i]; 866 largestSupportedSizePixels = numPixels; 867 } 868 } 869 return new Size(largestSupportedSize.getWidth(), largestSupportedSize.getHeight()); 870 } 871 } 872