1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.legacy; 18 19 import android.graphics.SurfaceTexture; 20 import android.hardware.Camera; 21 import android.hardware.camera2.CameraCharacteristics; 22 import android.hardware.camera2.CaptureRequest; 23 import android.hardware.camera2.impl.CameraDeviceImpl; 24 import android.hardware.camera2.utils.LongParcelable; 25 import android.hardware.camera2.utils.SizeAreaComparator; 26 import android.hardware.camera2.impl.CameraMetadataNative; 27 import android.os.ConditionVariable; 28 import android.os.Handler; 29 import android.os.Message; 30 import android.os.SystemClock; 31 import android.util.Log; 32 import android.util.MutableLong; 33 import android.util.Pair; 34 import android.util.Size; 35 import android.view.Surface; 36 37 import java.io.IOException; 38 import java.util.ArrayList; 39 import java.util.Collection; 40 import java.util.Collections; 41 import java.util.Iterator; 42 import java.util.List; 43 import java.util.concurrent.TimeUnit; 44 import java.util.concurrent.atomic.AtomicBoolean; 45 46 import static com.android.internal.util.Preconditions.*; 47 48 /** 49 * This class executes requests to the {@link Camera}. 50 * 51 * <p> 52 * The main components of this class are: 53 * - A message queue of requests to the {@link Camera}. 54 * - A thread that consumes requests to the {@link Camera} and executes them. 55 * - A {@link GLThreadManager} that draws to the configured output {@link Surface}s. 56 * - An {@link CameraDeviceState} state machine that manages the callbacks for various operations. 57 * </p> 58 */ 59 @SuppressWarnings("deprecation") 60 public class RequestThreadManager { 61 private final String TAG; 62 private final int mCameraId; 63 private final RequestHandlerThread mRequestThread; 64 65 private static final boolean DEBUG = false; 66 // For slightly more spammy messages that will get repeated every frame 67 private static final boolean VERBOSE = false; 68 private Camera mCamera; 69 private final CameraCharacteristics mCharacteristics; 70 71 private final CameraDeviceState mDeviceState; 72 private final CaptureCollector mCaptureCollector; 73 private final LegacyFocusStateMapper mFocusStateMapper; 74 private final LegacyFaceDetectMapper mFaceDetectMapper; 75 76 private static final int MSG_CONFIGURE_OUTPUTS = 1; 77 private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2; 78 private static final int MSG_CLEANUP = 3; 79 80 private static final int MAX_IN_FLIGHT_REQUESTS = 2; 81 82 private static final int PREVIEW_FRAME_TIMEOUT = 1000; // ms 83 private static final int JPEG_FRAME_TIMEOUT = 4000; // ms (same as CTS for API2) 84 private static final int REQUEST_COMPLETE_TIMEOUT = JPEG_FRAME_TIMEOUT; 85 86 private static final float ASPECT_RATIO_TOLERANCE = 0.01f; 87 private boolean mPreviewRunning = false; 88 89 private final List<Surface> mPreviewOutputs = new ArrayList<>(); 90 private final List<Surface> mCallbackOutputs = new ArrayList<>(); 91 private GLThreadManager mGLThreadManager; 92 private SurfaceTexture mPreviewTexture; 93 private Camera.Parameters mParams; 94 95 private final List<Long> mJpegSurfaceIds = new ArrayList<>(); 96 97 private Size mIntermediateBufferSize; 98 99 private final RequestQueue mRequestQueue = new RequestQueue(mJpegSurfaceIds); 100 private LegacyRequest mLastRequest = null; 101 private SurfaceTexture mDummyTexture; 102 private Surface mDummySurface; 103 104 private final Object mIdleLock = new Object(); 105 private final FpsCounter mPrevCounter = new FpsCounter("Incoming Preview"); 106 private final FpsCounter mRequestCounter = new FpsCounter("Incoming Requests"); 107 108 private final AtomicBoolean mQuit = new AtomicBoolean(false); 109 110 // Stuff JPEGs into HAL_PIXEL_FORMAT_RGBA_8888 gralloc buffers to get around SW write 111 // limitations for (b/17379185). 112 private static final boolean USE_BLOB_FORMAT_OVERRIDE = true; 113 114 /** 115 * Container object for Configure messages. 116 */ 117 private static class ConfigureHolder { 118 public final ConditionVariable condition; 119 public final Collection<Pair<Surface, Size>> surfaces; 120 ConfigureHolder(ConditionVariable condition, Collection<Pair<Surface, Size>> surfaces)121 public ConfigureHolder(ConditionVariable condition, Collection<Pair<Surface, 122 Size>> surfaces) { 123 this.condition = condition; 124 this.surfaces = surfaces; 125 } 126 } 127 128 /** 129 * Counter class used to calculate and log the current FPS of frame production. 130 */ 131 public static class FpsCounter { 132 //TODO: Hook this up to SystTrace? 133 private static final String TAG = "FpsCounter"; 134 private int mFrameCount = 0; 135 private long mLastTime = 0; 136 private long mLastPrintTime = 0; 137 private double mLastFps = 0; 138 private final String mStreamType; 139 private static final long NANO_PER_SECOND = 1000000000; //ns 140 FpsCounter(String streamType)141 public FpsCounter(String streamType) { 142 mStreamType = streamType; 143 } 144 countFrame()145 public synchronized void countFrame() { 146 mFrameCount++; 147 long nextTime = SystemClock.elapsedRealtimeNanos(); 148 if (mLastTime == 0) { 149 mLastTime = nextTime; 150 } 151 if (nextTime > mLastTime + NANO_PER_SECOND) { 152 long elapsed = nextTime - mLastTime; 153 mLastFps = mFrameCount * (NANO_PER_SECOND / (double) elapsed); 154 mFrameCount = 0; 155 mLastTime = nextTime; 156 } 157 } 158 checkFps()159 public synchronized double checkFps() { 160 return mLastFps; 161 } 162 staggeredLog()163 public synchronized void staggeredLog() { 164 if (mLastTime > mLastPrintTime + 5 * NANO_PER_SECOND) { 165 mLastPrintTime = mLastTime; 166 Log.d(TAG, "FPS for " + mStreamType + " stream: " + mLastFps ); 167 } 168 } 169 countAndLog()170 public synchronized void countAndLog() { 171 countFrame(); 172 staggeredLog(); 173 } 174 } 175 /** 176 * Fake preview for jpeg captures when there is no active preview 177 */ createDummySurface()178 private void createDummySurface() { 179 if (mDummyTexture == null || mDummySurface == null) { 180 mDummyTexture = new SurfaceTexture(/*ignored*/0); 181 // TODO: use smallest default sizes 182 mDummyTexture.setDefaultBufferSize(640, 480); 183 mDummySurface = new Surface(mDummyTexture); 184 } 185 } 186 187 private final Camera.ErrorCallback mErrorCallback = new Camera.ErrorCallback() { 188 @Override 189 public void onError(int i, Camera camera) { 190 switch(i) { 191 case Camera.CAMERA_ERROR_EVICTED: { 192 flush(); 193 mDeviceState.setError( 194 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DISCONNECTED); 195 } break; 196 default: { 197 Log.e(TAG, "Received error " + i + " from the Camera1 ErrorCallback"); 198 mDeviceState.setError( 199 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 200 } break; 201 } 202 } 203 }; 204 205 private final ConditionVariable mReceivedJpeg = new ConditionVariable(false); 206 207 private final Camera.PictureCallback mJpegCallback = new Camera.PictureCallback() { 208 @Override 209 public void onPictureTaken(byte[] data, Camera camera) { 210 Log.i(TAG, "Received jpeg."); 211 Pair<RequestHolder, Long> captureInfo = mCaptureCollector.jpegProduced(); 212 if (captureInfo == null || captureInfo.first == null) { 213 Log.e(TAG, "Dropping jpeg frame."); 214 return; 215 } 216 RequestHolder holder = captureInfo.first; 217 long timestamp = captureInfo.second; 218 for (Surface s : holder.getHolderTargets()) { 219 try { 220 if (LegacyCameraDevice.containsSurfaceId(s, mJpegSurfaceIds)) { 221 Log.i(TAG, "Producing jpeg buffer..."); 222 223 int totalSize = data.length + LegacyCameraDevice.nativeGetJpegFooterSize(); 224 totalSize = (totalSize + 3) & ~0x3; // round up to nearest octonibble 225 LegacyCameraDevice.setNextTimestamp(s, timestamp); 226 227 if (USE_BLOB_FORMAT_OVERRIDE) { 228 // Override to RGBA_8888 format. 229 LegacyCameraDevice.setSurfaceFormat(s, 230 LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888); 231 232 int dimen = (int) Math.ceil(Math.sqrt(totalSize)); 233 dimen = (dimen + 0xf) & ~0xf; // round up to nearest multiple of 16 234 LegacyCameraDevice.setSurfaceDimens(s, dimen, dimen); 235 LegacyCameraDevice.produceFrame(s, data, dimen, dimen, 236 CameraMetadataNative.NATIVE_JPEG_FORMAT); 237 } else { 238 LegacyCameraDevice.setSurfaceDimens(s, totalSize, /*height*/1); 239 LegacyCameraDevice.produceFrame(s, data, totalSize, /*height*/1, 240 CameraMetadataNative.NATIVE_JPEG_FORMAT); 241 } 242 } 243 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 244 Log.w(TAG, "Surface abandoned, dropping frame. ", e); 245 } 246 } 247 248 mReceivedJpeg.open(); 249 } 250 }; 251 252 private final Camera.ShutterCallback mJpegShutterCallback = new Camera.ShutterCallback() { 253 @Override 254 public void onShutter() { 255 mCaptureCollector.jpegCaptured(SystemClock.elapsedRealtimeNanos()); 256 } 257 }; 258 259 private final SurfaceTexture.OnFrameAvailableListener mPreviewCallback = 260 new SurfaceTexture.OnFrameAvailableListener() { 261 @Override 262 public void onFrameAvailable(SurfaceTexture surfaceTexture) { 263 if (DEBUG) { 264 mPrevCounter.countAndLog(); 265 } 266 mGLThreadManager.queueNewFrame(); 267 } 268 }; 269 stopPreview()270 private void stopPreview() { 271 if (VERBOSE) { 272 Log.v(TAG, "stopPreview - preview running? " + mPreviewRunning); 273 } 274 if (mPreviewRunning) { 275 mCamera.stopPreview(); 276 mPreviewRunning = false; 277 } 278 } 279 startPreview()280 private void startPreview() { 281 if (VERBOSE) { 282 Log.v(TAG, "startPreview - preview running? " + mPreviewRunning); 283 } 284 if (!mPreviewRunning) { 285 // XX: CameraClient:;startPreview is not getting called after a stop 286 mCamera.startPreview(); 287 mPreviewRunning = true; 288 } 289 } 290 doJpegCapturePrepare(RequestHolder request)291 private void doJpegCapturePrepare(RequestHolder request) throws IOException { 292 if (DEBUG) Log.d(TAG, "doJpegCapturePrepare - preview running? " + mPreviewRunning); 293 294 if (!mPreviewRunning) { 295 if (DEBUG) Log.d(TAG, "doJpegCapture - create fake surface"); 296 297 createDummySurface(); 298 mCamera.setPreviewTexture(mDummyTexture); 299 startPreview(); 300 } 301 } 302 doJpegCapture(RequestHolder request)303 private void doJpegCapture(RequestHolder request) { 304 if (DEBUG) Log.d(TAG, "doJpegCapturePrepare"); 305 306 mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback); 307 mPreviewRunning = false; 308 } 309 doPreviewCapture(RequestHolder request)310 private void doPreviewCapture(RequestHolder request) throws IOException { 311 if (VERBOSE) { 312 Log.v(TAG, "doPreviewCapture - preview running? " + mPreviewRunning); 313 } 314 315 if (mPreviewRunning) { 316 return; // Already running 317 } 318 319 if (mPreviewTexture == null) { 320 throw new IllegalStateException( 321 "Preview capture called with no preview surfaces configured."); 322 } 323 324 mPreviewTexture.setDefaultBufferSize(mIntermediateBufferSize.getWidth(), 325 mIntermediateBufferSize.getHeight()); 326 mCamera.setPreviewTexture(mPreviewTexture); 327 328 startPreview(); 329 } 330 configureOutputs(Collection<Pair<Surface, Size>> outputs)331 private void configureOutputs(Collection<Pair<Surface, Size>> outputs) { 332 if (DEBUG) { 333 String outputsStr = outputs == null ? "null" : (outputs.size() + " surfaces"); 334 Log.d(TAG, "configureOutputs with " + outputsStr); 335 } 336 337 try { 338 stopPreview(); 339 } catch (RuntimeException e) { 340 Log.e(TAG, "Received device exception in configure call: ", e); 341 mDeviceState.setError( 342 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 343 return; 344 } 345 346 /* 347 * Try to release the previous preview's surface texture earlier if we end up 348 * using a different one; this also reduces the likelihood of getting into a deadlock 349 * when disconnecting from the old previous texture at a later time. 350 */ 351 try { 352 mCamera.setPreviewTexture(/*surfaceTexture*/null); 353 } catch (IOException e) { 354 Log.w(TAG, "Failed to clear prior SurfaceTexture, may cause GL deadlock: ", e); 355 } catch (RuntimeException e) { 356 Log.e(TAG, "Received device exception in configure call: ", e); 357 mDeviceState.setError( 358 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 359 return; 360 } 361 362 if (mGLThreadManager != null) { 363 mGLThreadManager.waitUntilStarted(); 364 mGLThreadManager.ignoreNewFrames(); 365 mGLThreadManager.waitUntilIdle(); 366 } 367 resetJpegSurfaceFormats(mCallbackOutputs); 368 mPreviewOutputs.clear(); 369 mCallbackOutputs.clear(); 370 mJpegSurfaceIds.clear(); 371 mPreviewTexture = null; 372 373 List<Size> previewOutputSizes = new ArrayList<>(); 374 List<Size> callbackOutputSizes = new ArrayList<>(); 375 376 int facing = mCharacteristics.get(CameraCharacteristics.LENS_FACING); 377 int orientation = mCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); 378 if (outputs != null) { 379 for (Pair<Surface, Size> outPair : outputs) { 380 Surface s = outPair.first; 381 Size outSize = outPair.second; 382 try { 383 int format = LegacyCameraDevice.detectSurfaceType(s); 384 LegacyCameraDevice.setSurfaceOrientation(s, facing, orientation); 385 switch (format) { 386 case CameraMetadataNative.NATIVE_JPEG_FORMAT: 387 if (USE_BLOB_FORMAT_OVERRIDE) { 388 // Override to RGBA_8888 format. 389 LegacyCameraDevice.setSurfaceFormat(s, 390 LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888); 391 } 392 mJpegSurfaceIds.add(LegacyCameraDevice.getSurfaceId(s)); 393 mCallbackOutputs.add(s); 394 callbackOutputSizes.add(outSize); 395 break; 396 default: 397 LegacyCameraDevice.setScalingMode(s, LegacyCameraDevice. 398 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); 399 mPreviewOutputs.add(s); 400 previewOutputSizes.add(outSize); 401 break; 402 } 403 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 404 Log.w(TAG, "Surface abandoned, skipping...", e); 405 } 406 } 407 } 408 try { 409 mParams = mCamera.getParameters(); 410 } catch (RuntimeException e) { 411 Log.e(TAG, "Received device exception: ", e); 412 mDeviceState.setError( 413 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 414 return; 415 } 416 417 List<int[]> supportedFpsRanges = mParams.getSupportedPreviewFpsRange(); 418 int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges); 419 if (DEBUG) { 420 Log.d(TAG, "doPreviewCapture - Selected range [" + 421 bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," + 422 bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]"); 423 } 424 mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], 425 bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); 426 427 Size smallestSupportedJpegSize = calculatePictureSize(mCallbackOutputs, 428 callbackOutputSizes, mParams); 429 430 if (previewOutputSizes.size() > 0) { 431 432 Size largestOutput = SizeAreaComparator.findLargestByArea(previewOutputSizes); 433 434 // Find largest jpeg dimension - assume to have the same aspect ratio as sensor. 435 Size largestJpegDimen = ParameterUtils.getLargestSupportedJpegSizeByArea(mParams); 436 437 Size chosenJpegDimen = (smallestSupportedJpegSize != null) ? smallestSupportedJpegSize 438 : largestJpegDimen; 439 440 List<Size> supportedPreviewSizes = ParameterUtils.convertSizeList( 441 mParams.getSupportedPreviewSizes()); 442 443 // Use smallest preview dimension with same aspect ratio as sensor that is >= than all 444 // of the configured output dimensions. If none exists, fall back to using the largest 445 // supported preview size. 446 long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth(); 447 Size bestPreviewDimen = SizeAreaComparator.findLargestByArea(supportedPreviewSizes); 448 for (Size s : supportedPreviewSizes) { 449 long currArea = s.getWidth() * s.getHeight(); 450 long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight(); 451 if (checkAspectRatiosMatch(chosenJpegDimen, s) && (currArea < bestArea && 452 currArea >= largestOutputArea)) { 453 bestPreviewDimen = s; 454 } 455 } 456 457 mIntermediateBufferSize = bestPreviewDimen; 458 mParams.setPreviewSize(mIntermediateBufferSize.getWidth(), 459 mIntermediateBufferSize.getHeight()); 460 461 if (DEBUG) { 462 Log.d(TAG, "Intermediate buffer selected with dimens: " + 463 bestPreviewDimen.toString()); 464 } 465 } else { 466 mIntermediateBufferSize = null; 467 if (DEBUG) { 468 Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured"); 469 } 470 } 471 472 if (smallestSupportedJpegSize != null) { 473 /* 474 * Set takePicture size to the smallest supported JPEG size large enough 475 * to scale/crop out of for the bounding rectangle of the configured JPEG sizes. 476 */ 477 478 Log.i(TAG, "configureOutputs - set take picture size to " + smallestSupportedJpegSize); 479 mParams.setPictureSize( 480 smallestSupportedJpegSize.getWidth(), smallestSupportedJpegSize.getHeight()); 481 } 482 483 // TODO: Detect and optimize single-output paths here to skip stream teeing. 484 if (mGLThreadManager == null) { 485 mGLThreadManager = new GLThreadManager(mCameraId, facing, mDeviceState); 486 mGLThreadManager.start(); 487 } 488 mGLThreadManager.waitUntilStarted(); 489 List<Pair<Surface, Size>> previews = new ArrayList<>(); 490 Iterator<Size> previewSizeIter = previewOutputSizes.iterator(); 491 for (Surface p : mPreviewOutputs) { 492 previews.add(new Pair<>(p, previewSizeIter.next())); 493 } 494 mGLThreadManager.setConfigurationAndWait(previews, mCaptureCollector); 495 mGLThreadManager.allowNewFrames(); 496 mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture(); 497 if (mPreviewTexture != null) { 498 mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback); 499 } 500 501 try { 502 mCamera.setParameters(mParams); 503 } catch (RuntimeException e) { 504 Log.e(TAG, "Received device exception while configuring: ", e); 505 mDeviceState.setError( 506 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 507 508 } 509 } 510 resetJpegSurfaceFormats(Collection<Surface> surfaces)511 private void resetJpegSurfaceFormats(Collection<Surface> surfaces) { 512 if (!USE_BLOB_FORMAT_OVERRIDE || surfaces == null) { 513 return; 514 } 515 for(Surface s : surfaces) { 516 if (s == null || !s.isValid()) { 517 Log.w(TAG, "Jpeg surface is invalid, skipping..."); 518 continue; 519 } 520 try { 521 LegacyCameraDevice.setSurfaceFormat(s, LegacyMetadataMapper.HAL_PIXEL_FORMAT_BLOB); 522 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 523 Log.w(TAG, "Surface abandoned, skipping...", e); 524 } 525 } 526 } 527 528 /** 529 * Find a JPEG size (that is supported by the legacy camera device) which is equal to or larger 530 * than all of the configured {@code JPEG} outputs (by both width and height). 531 * 532 * <p>If multiple supported JPEG sizes are larger, select the smallest of them which 533 * still satisfies the above constraint.</p> 534 * 535 * <p>As a result, the returned size is guaranteed to be usable without needing 536 * to upscale any of the outputs. If only one {@code JPEG} surface is used, 537 * then no scaling/cropping is necessary between the taken picture and 538 * the {@code JPEG} output surface.</p> 539 * 540 * @param callbackOutputs a non-{@code null} list of {@code Surface}s with any image formats 541 * @param params api1 parameters (used for reading only) 542 * 543 * @return a size large enough to fit all of the configured {@code JPEG} outputs, or 544 * {@code null} if the {@code callbackOutputs} did not have any {@code JPEG} 545 * surfaces. 546 */ calculatePictureSize( List<Surface> callbackOutputs, List<Size> callbackSizes, Camera.Parameters params)547 private Size calculatePictureSize( List<Surface> callbackOutputs, 548 List<Size> callbackSizes, Camera.Parameters params) { 549 /* 550 * Find the largest JPEG size (if any), from the configured outputs: 551 * - the api1 picture size should be set to the smallest legal size that's at least as large 552 * as the largest configured JPEG size 553 */ 554 if (callbackOutputs.size() != callbackSizes.size()) { 555 throw new IllegalStateException("Input collections must be same length"); 556 } 557 List<Size> configuredJpegSizes = new ArrayList<>(); 558 Iterator<Size> sizeIterator = callbackSizes.iterator(); 559 for (Surface callbackSurface : callbackOutputs) { 560 Size jpegSize = sizeIterator.next(); 561 if (!LegacyCameraDevice.containsSurfaceId(callbackSurface, mJpegSurfaceIds)) { 562 continue; // Ignore non-JPEG callback formats 563 } 564 565 configuredJpegSizes.add(jpegSize); 566 } 567 if (!configuredJpegSizes.isEmpty()) { 568 /* 569 * Find the largest configured JPEG width, and height, independently 570 * of the rest. 571 * 572 * The rest of the JPEG streams can be cropped out of this smallest bounding 573 * rectangle. 574 */ 575 int maxConfiguredJpegWidth = -1; 576 int maxConfiguredJpegHeight = -1; 577 for (Size jpegSize : configuredJpegSizes) { 578 maxConfiguredJpegWidth = jpegSize.getWidth() > maxConfiguredJpegWidth ? 579 jpegSize.getWidth() : maxConfiguredJpegWidth; 580 maxConfiguredJpegHeight = jpegSize.getHeight() > maxConfiguredJpegHeight ? 581 jpegSize.getHeight() : maxConfiguredJpegHeight; 582 } 583 Size smallestBoundJpegSize = new Size(maxConfiguredJpegWidth, maxConfiguredJpegHeight); 584 585 List<Size> supportedJpegSizes = ParameterUtils.convertSizeList( 586 params.getSupportedPictureSizes()); 587 588 /* 589 * Find the smallest supported JPEG size that can fit the smallest bounding 590 * rectangle for the configured JPEG sizes. 591 */ 592 List<Size> candidateSupportedJpegSizes = new ArrayList<>(); 593 for (Size supportedJpegSize : supportedJpegSizes) { 594 if (supportedJpegSize.getWidth() >= maxConfiguredJpegWidth && 595 supportedJpegSize.getHeight() >= maxConfiguredJpegHeight) { 596 candidateSupportedJpegSizes.add(supportedJpegSize); 597 } 598 } 599 600 if (candidateSupportedJpegSizes.isEmpty()) { 601 throw new AssertionError( 602 "Could not find any supported JPEG sizes large enough to fit " + 603 smallestBoundJpegSize); 604 } 605 606 Size smallestSupportedJpegSize = Collections.min(candidateSupportedJpegSizes, 607 new SizeAreaComparator()); 608 609 if (!smallestSupportedJpegSize.equals(smallestBoundJpegSize)) { 610 Log.w(TAG, 611 String.format( 612 "configureOutputs - Will need to crop picture %s into " 613 + "smallest bound size %s", 614 smallestSupportedJpegSize, smallestBoundJpegSize)); 615 } 616 617 return smallestSupportedJpegSize; 618 } 619 620 return null; 621 } 622 checkAspectRatiosMatch(Size a, Size b)623 private static boolean checkAspectRatiosMatch(Size a, Size b) { 624 float aAspect = a.getWidth() / (float) a.getHeight(); 625 float bAspect = b.getWidth() / (float) b.getHeight(); 626 627 return Math.abs(aAspect - bAspect) < ASPECT_RATIO_TOLERANCE; 628 } 629 630 // Calculate the highest FPS range supported getPhotoPreviewFpsRange(List<int[]> frameRates)631 private int[] getPhotoPreviewFpsRange(List<int[]> frameRates) { 632 if (frameRates.size() == 0) { 633 Log.e(TAG, "No supported frame rates returned!"); 634 return null; 635 } 636 637 int bestMin = 0; 638 int bestMax = 0; 639 int bestIndex = 0; 640 int index = 0; 641 for (int[] rate : frameRates) { 642 int minFps = rate[Camera.Parameters.PREVIEW_FPS_MIN_INDEX]; 643 int maxFps = rate[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]; 644 if (maxFps > bestMax || (maxFps == bestMax && minFps > bestMin)) { 645 bestMin = minFps; 646 bestMax = maxFps; 647 bestIndex = index; 648 } 649 index++; 650 } 651 652 return frameRates.get(bestIndex); 653 } 654 655 private final Handler.Callback mRequestHandlerCb = new Handler.Callback() { 656 private boolean mCleanup = false; 657 private final LegacyResultMapper mMapper = new LegacyResultMapper(); 658 659 @Override 660 public boolean handleMessage(Message msg) { 661 if (mCleanup) { 662 return true; 663 } 664 665 if (DEBUG) { 666 Log.d(TAG, "Request thread handling message:" + msg.what); 667 } 668 long startTime = 0; 669 if (DEBUG) { 670 startTime = SystemClock.elapsedRealtimeNanos(); 671 } 672 switch (msg.what) { 673 case MSG_CONFIGURE_OUTPUTS: 674 ConfigureHolder config = (ConfigureHolder) msg.obj; 675 int sizes = config.surfaces != null ? config.surfaces.size() : 0; 676 Log.i(TAG, "Configure outputs: " + sizes + " surfaces configured."); 677 678 try { 679 boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT, 680 TimeUnit.MILLISECONDS); 681 if (!success) { 682 Log.e(TAG, "Timed out while queueing configure request."); 683 mCaptureCollector.failAll(); 684 } 685 } catch (InterruptedException e) { 686 Log.e(TAG, "Interrupted while waiting for requests to complete."); 687 mDeviceState.setError( 688 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 689 break; 690 } 691 692 configureOutputs(config.surfaces); 693 config.condition.open(); 694 if (DEBUG) { 695 long totalTime = SystemClock.elapsedRealtimeNanos() - startTime; 696 Log.d(TAG, "Configure took " + totalTime + " ns"); 697 } 698 break; 699 case MSG_SUBMIT_CAPTURE_REQUEST: 700 Handler handler = RequestThreadManager.this.mRequestThread.getHandler(); 701 702 // Get the next burst from the request queue. 703 Pair<BurstHolder, Long> nextBurst = mRequestQueue.getNext(); 704 705 if (nextBurst == null) { 706 // If there are no further requests queued, wait for any currently executing 707 // requests to complete, then switch to idle state. 708 try { 709 boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT, 710 TimeUnit.MILLISECONDS); 711 if (!success) { 712 Log.e(TAG, 713 "Timed out while waiting for prior requests to complete."); 714 mCaptureCollector.failAll(); 715 } 716 } catch (InterruptedException e) { 717 Log.e(TAG, "Interrupted while waiting for requests to complete: ", e); 718 mDeviceState.setError( 719 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 720 break; 721 } 722 723 synchronized (mIdleLock) { 724 // Retry the the request queue. 725 nextBurst = mRequestQueue.getNext(); 726 727 // If we still have no queued requests, go idle. 728 if (nextBurst == null) { 729 mDeviceState.setIdle(); 730 break; 731 } 732 } 733 } 734 735 if (nextBurst != null) { 736 // Queue another capture if we did not get the last burst. 737 handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST); 738 } 739 740 // Complete each request in the burst 741 List<RequestHolder> requests = 742 nextBurst.first.produceRequestHolders(nextBurst.second); 743 for (RequestHolder holder : requests) { 744 CaptureRequest request = holder.getRequest(); 745 746 boolean paramsChanged = false; 747 748 // Only update parameters if the request has changed 749 if (mLastRequest == null || mLastRequest.captureRequest != request) { 750 751 // The intermediate buffer is sometimes null, but we always need 752 // the Camera1 API configured preview size 753 Size previewSize = ParameterUtils.convertSize(mParams.getPreviewSize()); 754 755 LegacyRequest legacyRequest = new LegacyRequest(mCharacteristics, 756 request, previewSize, mParams); // params are copied 757 758 759 // Parameters are mutated as a side-effect 760 LegacyMetadataMapper.convertRequestMetadata(/*inout*/legacyRequest); 761 762 // If the parameters have changed, set them in the Camera1 API. 763 if (!mParams.same(legacyRequest.parameters)) { 764 try { 765 mCamera.setParameters(legacyRequest.parameters); 766 } catch (RuntimeException e) { 767 // If setting the parameters failed, report a request error to 768 // the camera client, and skip any further work for this request 769 Log.e(TAG, "Exception while setting camera parameters: ", e); 770 holder.failRequest(); 771 mDeviceState.setCaptureStart(holder, /*timestamp*/0, 772 CameraDeviceImpl.CameraDeviceCallbacks. 773 ERROR_CAMERA_REQUEST); 774 continue; 775 } 776 paramsChanged = true; 777 mParams = legacyRequest.parameters; 778 } 779 780 mLastRequest = legacyRequest; 781 } 782 783 try { 784 boolean success = mCaptureCollector.queueRequest(holder, 785 mLastRequest, JPEG_FRAME_TIMEOUT, TimeUnit.MILLISECONDS); 786 787 if (!success) { 788 // Report a request error if we timed out while queuing this. 789 Log.e(TAG, "Timed out while queueing capture request."); 790 holder.failRequest(); 791 mDeviceState.setCaptureStart(holder, /*timestamp*/0, 792 CameraDeviceImpl.CameraDeviceCallbacks. 793 ERROR_CAMERA_REQUEST); 794 continue; 795 } 796 797 // Starting the preview needs to happen before enabling 798 // face detection or auto focus 799 if (holder.hasPreviewTargets()) { 800 doPreviewCapture(holder); 801 } 802 if (holder.hasJpegTargets()) { 803 while(!mCaptureCollector.waitForPreviewsEmpty(PREVIEW_FRAME_TIMEOUT, 804 TimeUnit.MILLISECONDS)) { 805 // Fail preview requests until the queue is empty. 806 Log.e(TAG, "Timed out while waiting for preview requests to " + 807 "complete."); 808 mCaptureCollector.failNextPreview(); 809 } 810 mReceivedJpeg.close(); 811 doJpegCapturePrepare(holder); 812 } 813 814 /* 815 * Do all the actions that require a preview to have been started 816 */ 817 818 // Toggle face detection on/off 819 // - do this before AF to give AF a chance to use faces 820 mFaceDetectMapper.processFaceDetectMode(request, /*in*/mParams); 821 822 // Unconditionally process AF triggers, since they're non-idempotent 823 // - must be done after setting the most-up-to-date AF mode 824 mFocusStateMapper.processRequestTriggers(request, mParams); 825 826 if (holder.hasJpegTargets()) { 827 doJpegCapture(holder); 828 if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) { 829 Log.e(TAG, "Hit timeout for jpeg callback!"); 830 mCaptureCollector.failNextJpeg(); 831 } 832 } 833 834 } catch (IOException e) { 835 Log.e(TAG, "Received device exception during capture call: ", e); 836 mDeviceState.setError( 837 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 838 break; 839 } catch (InterruptedException e) { 840 Log.e(TAG, "Interrupted during capture: ", e); 841 mDeviceState.setError( 842 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 843 break; 844 } catch (RuntimeException e) { 845 Log.e(TAG, "Received device exception during capture call: ", e); 846 mDeviceState.setError( 847 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 848 break; 849 } 850 851 if (paramsChanged) { 852 if (DEBUG) { 853 Log.d(TAG, "Params changed -- getting new Parameters from HAL."); 854 } 855 try { 856 mParams = mCamera.getParameters(); 857 } catch (RuntimeException e) { 858 Log.e(TAG, "Received device exception: ", e); 859 mDeviceState.setError( 860 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 861 break; 862 } 863 864 // Update parameters to the latest that we think the camera is using 865 mLastRequest.setParameters(mParams); 866 } 867 868 MutableLong timestampMutable = new MutableLong(/*value*/0L); 869 try { 870 boolean success = mCaptureCollector.waitForRequestCompleted(holder, 871 REQUEST_COMPLETE_TIMEOUT, TimeUnit.MILLISECONDS, 872 /*out*/timestampMutable); 873 874 if (!success) { 875 Log.e(TAG, "Timed out while waiting for request to complete."); 876 mCaptureCollector.failAll(); 877 } 878 } catch (InterruptedException e) { 879 Log.e(TAG, "Interrupted waiting for request completion: ", e); 880 mDeviceState.setError( 881 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 882 break; 883 } 884 885 CameraMetadataNative result = mMapper.cachedConvertResultMetadata( 886 mLastRequest, timestampMutable.value); 887 /* 888 * Order matters: The default result mapper is state-less; the 889 * other mappers carry state and may override keys set by the default 890 * mapper with their own values. 891 */ 892 893 // Update AF state 894 mFocusStateMapper.mapResultTriggers(result); 895 // Update face-related results 896 mFaceDetectMapper.mapResultFaces(result, mLastRequest); 897 898 if (!holder.requestFailed()) { 899 mDeviceState.setCaptureResult(holder, result, 900 CameraDeviceState.NO_CAPTURE_ERROR); 901 } 902 } 903 if (DEBUG) { 904 long totalTime = SystemClock.elapsedRealtimeNanos() - startTime; 905 Log.d(TAG, "Capture request took " + totalTime + " ns"); 906 mRequestCounter.countAndLog(); 907 } 908 break; 909 case MSG_CLEANUP: 910 mCleanup = true; 911 try { 912 boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT, 913 TimeUnit.MILLISECONDS); 914 if (!success) { 915 Log.e(TAG, "Timed out while queueing cleanup request."); 916 mCaptureCollector.failAll(); 917 } 918 } catch (InterruptedException e) { 919 Log.e(TAG, "Interrupted while waiting for requests to complete: ", e); 920 mDeviceState.setError( 921 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 922 } 923 if (mGLThreadManager != null) { 924 mGLThreadManager.quit(); 925 mGLThreadManager = null; 926 } 927 if (mCamera != null) { 928 mCamera.release(); 929 mCamera = null; 930 } 931 resetJpegSurfaceFormats(mCallbackOutputs); 932 break; 933 case RequestHandlerThread.MSG_POKE_IDLE_HANDLER: 934 // OK: Ignore message. 935 break; 936 default: 937 throw new AssertionError("Unhandled message " + msg.what + 938 " on RequestThread."); 939 } 940 return true; 941 } 942 }; 943 944 /** 945 * Create a new RequestThreadManager. 946 * 947 * @param cameraId the id of the camera to use. 948 * @param camera an open camera object. The RequestThreadManager takes ownership of this camera 949 * object, and is responsible for closing it. 950 * @param characteristics the static camera characteristics corresponding to this camera device 951 * @param deviceState a {@link CameraDeviceState} state machine. 952 */ RequestThreadManager(int cameraId, Camera camera, CameraCharacteristics characteristics, CameraDeviceState deviceState)953 public RequestThreadManager(int cameraId, Camera camera, CameraCharacteristics characteristics, 954 CameraDeviceState deviceState) { 955 mCamera = checkNotNull(camera, "camera must not be null"); 956 mCameraId = cameraId; 957 mCharacteristics = checkNotNull(characteristics, "characteristics must not be null"); 958 String name = String.format("RequestThread-%d", cameraId); 959 TAG = name; 960 mDeviceState = checkNotNull(deviceState, "deviceState must not be null"); 961 mFocusStateMapper = new LegacyFocusStateMapper(mCamera); 962 mFaceDetectMapper = new LegacyFaceDetectMapper(mCamera, mCharacteristics); 963 mCaptureCollector = new CaptureCollector(MAX_IN_FLIGHT_REQUESTS, mDeviceState); 964 mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb); 965 mCamera.setErrorCallback(mErrorCallback); 966 } 967 968 /** 969 * Start the request thread. 970 */ start()971 public void start() { 972 mRequestThread.start(); 973 } 974 975 /** 976 * Flush any pending requests. 977 * 978 * @return the last frame number. 979 */ flush()980 public long flush() { 981 Log.i(TAG, "Flushing all pending requests."); 982 long lastFrame = mRequestQueue.stopRepeating(); 983 mCaptureCollector.failAll(); 984 return lastFrame; 985 } 986 987 /** 988 * Quit the request thread, and clean up everything. 989 */ quit()990 public void quit() { 991 if (!mQuit.getAndSet(true)) { // Avoid sending messages on dead thread's handler. 992 Handler handler = mRequestThread.waitAndGetHandler(); 993 handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP)); 994 mRequestThread.quitSafely(); 995 try { 996 mRequestThread.join(); 997 } catch (InterruptedException e) { 998 Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.", 999 mRequestThread.getName(), mRequestThread.getId())); 1000 } 1001 } 1002 } 1003 1004 /** 1005 * Submit the given burst of requests to be captured. 1006 * 1007 * <p>If the burst is repeating, replace the current repeating burst.</p> 1008 * 1009 * @param requests the burst of requests to add to the queue. 1010 * @param repeating true if the burst is repeating. 1011 * @param frameNumber an output argument that contains either the frame number of the last frame 1012 * that will be returned for this request, or the frame number of the last 1013 * frame that will be returned for the current repeating request if this 1014 * burst is set to be repeating. 1015 * @return the request id. 1016 */ submitCaptureRequests(List<CaptureRequest> requests, boolean repeating, LongParcelable frameNumber)1017 public int submitCaptureRequests(List<CaptureRequest> requests, boolean repeating, 1018 /*out*/LongParcelable frameNumber) { 1019 Handler handler = mRequestThread.waitAndGetHandler(); 1020 int ret; 1021 synchronized (mIdleLock) { 1022 ret = mRequestQueue.submit(requests, repeating, frameNumber); 1023 handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST); 1024 } 1025 return ret; 1026 } 1027 1028 /** 1029 * Cancel a repeating request. 1030 * 1031 * @param requestId the id of the repeating request to cancel. 1032 * @return the last frame to be returned from the HAL for the given repeating request, or 1033 * {@code INVALID_FRAME} if none exists. 1034 */ cancelRepeating(int requestId)1035 public long cancelRepeating(int requestId) { 1036 return mRequestQueue.stopRepeating(requestId); 1037 } 1038 1039 /** 1040 * Configure with the current list of output Surfaces. 1041 * 1042 * <p> 1043 * This operation blocks until the configuration is complete. 1044 * </p> 1045 * 1046 * <p>Using a {@code null} or empty {@code outputs} list is the equivalent of unconfiguring.</p> 1047 * 1048 * @param outputs a {@link java.util.Collection} of outputs to configure. 1049 */ configure(Collection<Pair<Surface, Size>> outputs)1050 public void configure(Collection<Pair<Surface, Size>> outputs) { 1051 Handler handler = mRequestThread.waitAndGetHandler(); 1052 final ConditionVariable condition = new ConditionVariable(/*closed*/false); 1053 ConfigureHolder holder = new ConfigureHolder(condition, outputs); 1054 handler.sendMessage(handler.obtainMessage(MSG_CONFIGURE_OUTPUTS, 0, 0, holder)); 1055 condition.block(); 1056 } 1057 } 1058