1 /* 2 * Copyright 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.cts; 18 19 import static android.hardware.camera2.CameraCharacteristics.*; 20 import static android.hardware.camera2.cts.CameraTestUtils.*; 21 22 import android.graphics.Point; 23 import android.graphics.PointF; 24 import android.graphics.Rect; 25 import android.graphics.SurfaceTexture; 26 import android.hardware.camera2.CameraCharacteristics; 27 import android.hardware.camera2.CameraDevice; 28 import android.hardware.camera2.CameraMetadata; 29 import android.hardware.camera2.CaptureRequest; 30 import android.hardware.camera2.CaptureResult; 31 import android.hardware.camera2.TotalCaptureResult; 32 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback; 33 import android.hardware.camera2.cts.helpers.StaticMetadata; 34 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase; 35 import android.hardware.camera2.params.BlackLevelPattern; 36 import android.hardware.camera2.params.Capability; 37 import android.hardware.camera2.params.ColorSpaceTransform; 38 import android.hardware.camera2.params.Face; 39 import android.hardware.camera2.params.LensShadingMap; 40 import android.hardware.camera2.params.MeteringRectangle; 41 import android.hardware.camera2.params.RggbChannelVector; 42 import android.hardware.camera2.params.TonemapCurve; 43 import android.hardware.cts.helpers.CameraUtils; 44 import android.media.Image; 45 import android.os.Build; 46 import android.os.Parcel; 47 import android.platform.test.annotations.AppModeFull; 48 import android.util.ArraySet; 49 import android.util.Log; 50 import android.util.Pair; 51 import android.util.Range; 52 import android.util.Rational; 53 import android.util.Size; 54 import android.view.Surface; 55 56 import com.android.compatibility.common.util.PropertyUtil; 57 58 import org.junit.Test; 59 import org.junit.runner.RunWith; 60 import org.junit.runners.Parameterized; 61 62 import java.nio.ByteBuffer; 63 import java.util.ArrayList; 64 import java.util.Arrays; 65 import java.util.List; 66 67 /** 68 * <p> 69 * Basic test for camera CaptureRequest key controls. 70 * </p> 71 * <p> 72 * Several test categories are covered: manual sensor control, 3A control, 73 * manual ISP control and other per-frame control and synchronization. 74 * </p> 75 */ 76 77 @RunWith(Parameterized.class) 78 public class CaptureRequestTest extends Camera2SurfaceViewTestCase { 79 private static final String TAG = "CaptureRequestTest"; 80 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 81 private static final int NUM_FRAMES_VERIFIED = 15; 82 private static final int NUM_FACE_DETECTION_FRAMES_VERIFIED = 60; 83 /** 30ms exposure time must be supported by full capability devices. */ 84 private static final long DEFAULT_EXP_TIME_NS = 30000000L; // 30ms 85 private static final int DEFAULT_SENSITIVITY = 100; 86 private static final int RGGB_COLOR_CHANNEL_COUNT = 4; 87 private static final int MAX_SHADING_MAP_SIZE = 64 * 64 * RGGB_COLOR_CHANNEL_COUNT; 88 private static final int MIN_SHADING_MAP_SIZE = 1 * 1 * RGGB_COLOR_CHANNEL_COUNT; 89 private static final long IGNORE_REQUESTED_EXPOSURE_TIME_CHECK = -1L; 90 private static final long EXPOSURE_TIME_BOUNDARY_50HZ_NS = 10000000L; // 10ms 91 private static final long EXPOSURE_TIME_BOUNDARY_60HZ_NS = 8333333L; // 8.3ms, Approximation. 92 private static final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000L; // 100us, Approximation. 93 private static final float EXPOSURE_TIME_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation. 94 private static final float SENSITIVITY_ERROR_MARGIN_RATE = 0.06f; // 6%, Approximation. 95 private static final int DEFAULT_NUM_EXPOSURE_TIME_STEPS = 3; 96 private static final int DEFAULT_NUM_SENSITIVITY_STEPS = 8; 97 private static final int DEFAULT_SENSITIVITY_STEP_SIZE = 100; 98 private static final int NUM_RESULTS_WAIT_TIMEOUT = 100; 99 private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8; 100 private static final int NUM_FRAMES_WAITED_FOR_TORCH = 100; 101 private static final int NUM_PARTIAL_FRAMES_PFC = 2; 102 private static final int NUM_PARTIAL_FRAMES_NPFC = 6; 103 104 private static final int NUM_TEST_FOCUS_DISTANCES = 10; 105 private static final int NUM_FOCUS_DISTANCES_REPEAT = 3; 106 // 5 percent error margin for calibrated device 107 private static final float FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED = 0.05f; 108 // 25 percent error margin for uncalibrated device 109 private static final float FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED = 0.25f; 110 // 10 percent error margin for approximate device 111 private static final float FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE = 0.10f; 112 // 1 percent boundary margin for focus range verify 113 private static final float FOCUS_RANGE_BOUNDARY_MARGIN_PERCENT = 0.01f; 114 private static final int ANTI_FLICKERING_50HZ = 1; 115 private static final int ANTI_FLICKERING_60HZ = 2; 116 // 5 percent error margin for resulting crop regions 117 private static final float CROP_REGION_ERROR_PERCENT_DELTA = 0.05f; 118 private static final float ZOOM_RATIO_ERROR_PERCENT_DELTA = 0.05f; 119 120 // 1 percent error margin for centering the crop region 121 private static final float CROP_REGION_ERROR_PERCENT_CENTERED = 0.01f; 122 private static final float DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN = 0.25f; 123 private static final float DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN = 0.2f; 124 125 // Linear tone mapping curve example. 126 private static final float[] TONEMAP_CURVE_LINEAR = {0, 0, 1.0f, 1.0f}; 127 // Standard sRGB tone mapping, per IEC 61966-2-1:1999, with 16 control points. 128 private static final float[] TONEMAP_CURVE_SRGB = { 129 0.0000f, 0.0000f, 0.0667f, 0.2864f, 0.1333f, 0.4007f, 0.2000f, 0.4845f, 130 0.2667f, 0.5532f, 0.3333f, 0.6125f, 0.4000f, 0.6652f, 0.4667f, 0.7130f, 131 0.5333f, 0.7569f, 0.6000f, 0.7977f, 0.6667f, 0.8360f, 0.7333f, 0.8721f, 132 0.8000f, 0.9063f, 0.8667f, 0.9389f, 0.9333f, 0.9701f, 1.0000f, 1.0000f 133 }; 134 private final Rational ZERO_R = new Rational(0, 1); 135 private final Rational ONE_R = new Rational(1, 1); 136 137 private static final int ZOOM_STEPS = 15; 138 139 private enum TorchSeqState { 140 RAMPING_UP, 141 FIRED, 142 RAMPING_DOWN 143 } 144 145 @Override setUp()146 public void setUp() throws Exception { 147 super.setUp(); 148 } 149 150 @Override tearDown()151 public void tearDown() throws Exception { 152 super.tearDown(); 153 } 154 155 /** 156 * Test CaptureRequest settings parcelling. 157 */ 158 @Test testSettingsBinderParcel()159 public void testSettingsBinderParcel() throws Exception { 160 SurfaceTexture outputTexture = new SurfaceTexture(/* random texture ID */ 5); 161 Surface surface = new Surface(outputTexture); 162 163 for (int i = 0; i < mCameraIdsUnderTest.length; i++) { 164 try { 165 openDevice(mCameraIdsUnderTest[i]); 166 CaptureRequest.Builder requestBuilder = 167 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 168 requestBuilder.addTarget(surface); 169 170 // Check regular/default case 171 CaptureRequest captureRequestOriginal = requestBuilder.build(); 172 Parcel p; 173 p = Parcel.obtain(); 174 captureRequestOriginal.writeToParcel(p, 0); 175 p.setDataPosition(0); 176 CaptureRequest captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 177 assertEquals("Parcelled camera settings should match", 178 captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT), 179 new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW)); 180 p.recycle(); 181 182 // Check capture request with additional physical camera settings 183 String physicalId = new String(Integer.toString(i + 1)); 184 ArraySet<String> physicalIds = new ArraySet<String> (); 185 physicalIds.add(physicalId); 186 187 requestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW, 188 physicalIds); 189 requestBuilder.addTarget(surface); 190 captureRequestOriginal = requestBuilder.build(); 191 p = Parcel.obtain(); 192 captureRequestOriginal.writeToParcel(p, 0); 193 p.setDataPosition(0); 194 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 195 assertEquals("Parcelled camera settings should match", 196 captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT), 197 new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW)); 198 p.recycle(); 199 200 // Check consistency between parcel write and read by stacking 2 201 // CaptureRequest objects when writing and reading. 202 p = Parcel.obtain(); 203 captureRequestOriginal.writeToParcel(p, 0); 204 captureRequestOriginal.writeToParcel(p, 0); 205 p.setDataPosition(0); 206 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 207 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 208 p.recycle(); 209 210 // Check various invalid cases 211 p = Parcel.obtain(); 212 p.writeInt(-1); 213 p.setDataPosition(0); 214 try { 215 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 216 fail("should get RuntimeException due to invalid number of settings"); 217 } catch (RuntimeException e) { 218 // Expected 219 } 220 p.recycle(); 221 222 p = Parcel.obtain(); 223 p.writeInt(0); 224 p.setDataPosition(0); 225 try { 226 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 227 fail("should get RuntimeException due to invalid number of settings"); 228 } catch (RuntimeException e) { 229 // Expected 230 } 231 p.recycle(); 232 233 p = Parcel.obtain(); 234 p.writeInt(1); 235 p.setDataPosition(0); 236 try { 237 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 238 fail("should get RuntimeException due to absent settings"); 239 } catch (RuntimeException e) { 240 // Expected 241 } 242 p.recycle(); 243 } finally { 244 closeDevice(); 245 } 246 } 247 } 248 249 /** 250 * Test black level lock when exposure value change. 251 * <p> 252 * When {@link CaptureRequest#BLACK_LEVEL_LOCK} is true in a request, the 253 * camera device should lock the black level. When the exposure values are changed, 254 * the camera may require reset black level Since changes to certain capture 255 * parameters (such as exposure time) may require resetting of black level 256 * compensation. However, the black level must remain locked after exposure 257 * value changes (when requests have lock ON). 258 * </p> 259 */ 260 @Test testBlackLevelLock()261 public void testBlackLevelLock() throws Exception { 262 for (int i = 0; i < mCameraIdsUnderTest.length; i++) { 263 try { 264 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isCapabilitySupported( 265 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 266 continue; 267 } 268 269 openDevice(mCameraIdsUnderTest[i]); 270 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 271 CaptureRequest.Builder requestBuilder = 272 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 273 274 // Start with default manual exposure time, with black level being locked. 275 requestBuilder.set(CaptureRequest.BLACK_LEVEL_LOCK, true); 276 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY); 277 278 Size previewSz = 279 getMaxPreviewSize(mCamera.getId(), mCameraManager, 280 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 281 282 startPreview(requestBuilder, previewSz, listener); 283 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 284 // No lock OFF state is allowed as the exposure is not changed. 285 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/0); 286 287 // Double the exposure time and gain, with black level still being locked. 288 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS * 2, DEFAULT_SENSITIVITY * 2); 289 listener = new SimpleCaptureCallback(); 290 startPreview(requestBuilder, previewSz, listener); 291 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 292 // Allow at most one lock OFF state as the exposure is changed once. 293 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/1); 294 295 stopPreview(); 296 } finally { 297 closeDevice(); 298 } 299 } 300 } 301 302 /** 303 * Test dynamic black/white levels if they are supported. 304 * 305 * <p> 306 * If the dynamic black and white levels are reported, test below: 307 * 1. the dynamic black and white levels shouldn't deviate from the global value too much 308 * for different sensitivities. 309 * 2. If the RAW_SENSOR and optical black regions are supported, capture RAW images and 310 * calculate the optical black level values. The reported dynamic black level should be 311 * close enough to the optical black level values. 312 * </p> 313 */ 314 @Test testDynamicBlackWhiteLevel()315 public void testDynamicBlackWhiteLevel() throws Exception { 316 for (String id : mCameraIdsUnderTest) { 317 try { 318 if (!mAllStaticInfo.get(id).isDynamicBlackLevelSupported()) { 319 continue; 320 } 321 openDevice(id); 322 dynamicBlackWhiteLevelTestByCamera(); 323 } finally { 324 closeDevice(); 325 } 326 } 327 } 328 329 /** 330 * Basic lens shading map request test. 331 * <p> 332 * When {@link CaptureRequest#SHADING_MODE} is set to OFF, no lens shading correction will 333 * be applied by the camera device, and an identity lens shading map data 334 * will be provided if {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} is ON. 335 * </p> 336 * <p> 337 * When {@link CaptureRequest#SHADING_MODE} is set to other modes, lens shading correction 338 * will be applied by the camera device. The lens shading map data can be 339 * requested by setting {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} to ON. 340 * </p> 341 */ 342 @Test testLensShadingMap()343 public void testLensShadingMap() throws Exception { 344 for (int i = 0; i < mCameraIdsUnderTest.length; i++) { 345 try { 346 StaticMetadata staticInfo = mAllStaticInfo.get(mCameraIdsUnderTest[i]); 347 if (!staticInfo.isManualLensShadingMapSupported()) { 348 Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] + 349 " doesn't support lens shading controls, skipping test"); 350 continue; 351 } 352 353 List<Integer> lensShadingMapModes = Arrays.asList(CameraTestUtils.toObject( 354 staticInfo.getAvailableLensShadingMapModesChecked())); 355 356 if (!lensShadingMapModes.contains(STATISTICS_LENS_SHADING_MAP_MODE_ON)) { 357 continue; 358 } 359 360 openDevice(mCameraIdsUnderTest[i]); 361 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 362 CaptureRequest.Builder requestBuilder = 363 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 364 requestBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 365 STATISTICS_LENS_SHADING_MAP_MODE_ON); 366 367 Size previewSz = 368 getMaxPreviewSize(mCamera.getId(), mCameraManager, 369 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 370 List<Integer> lensShadingModes = Arrays.asList(CameraTestUtils.toObject( 371 mStaticInfo.getAvailableLensShadingModesChecked())); 372 373 // Shading map mode OFF, lensShadingMapMode ON, camera device 374 // should output unity maps. 375 if (lensShadingModes.contains(SHADING_MODE_OFF)) { 376 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_OFF); 377 listener = new SimpleCaptureCallback(); 378 startPreview(requestBuilder, previewSz, listener); 379 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 380 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_OFF); 381 } 382 383 // Shading map mode FAST, lensShadingMapMode ON, camera device 384 // should output valid maps. 385 if (lensShadingModes.contains(SHADING_MODE_FAST)) { 386 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_FAST); 387 388 listener = new SimpleCaptureCallback(); 389 startPreview(requestBuilder, previewSz, listener); 390 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 391 // Allow at most one lock OFF state as the exposure is changed once. 392 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_FAST); 393 } 394 395 // Shading map mode HIGH_QUALITY, lensShadingMapMode ON, camera device 396 // should output valid maps. 397 if (lensShadingModes.contains(SHADING_MODE_HIGH_QUALITY)) { 398 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_HIGH_QUALITY); 399 400 listener = new SimpleCaptureCallback(); 401 startPreview(requestBuilder, previewSz, listener); 402 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 403 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_HIGH_QUALITY); 404 } 405 406 stopPreview(); 407 } finally { 408 closeDevice(); 409 } 410 } 411 } 412 413 /** 414 * Test {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE} control. 415 * <p> 416 * Test all available anti-banding modes, check if the exposure time adjustment is 417 * correct. 418 * </p> 419 */ 420 @Test testAntiBandingModes()421 public void testAntiBandingModes() throws Exception { 422 for (int i = 0; i < mCameraIdsUnderTest.length; i++) { 423 try { 424 // Without manual sensor control, exposure time cannot be verified 425 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isCapabilitySupported( 426 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 427 continue; 428 } 429 430 openDevice(mCameraIdsUnderTest[i]); 431 int[] modes = mStaticInfo.getAeAvailableAntiBandingModesChecked(); 432 433 Size previewSz = 434 getMaxPreviewSize(mCamera.getId(), mCameraManager, 435 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 436 437 for (int mode : modes) { 438 antiBandingTestByMode(previewSz, mode); 439 } 440 } finally { 441 closeDevice(); 442 } 443 } 444 445 } 446 447 /** 448 * Test AE mode and lock. 449 * 450 * <p> 451 * For AE lock, when it is locked, exposure parameters shouldn't be changed. 452 * For AE modes, each mode should satisfy the per frame controls defined in 453 * API specifications. 454 * </p> 455 */ 456 @Test(timeout=60*60*1000) // timeout = 60 mins for long running tests testAeModeAndLock()457 public void testAeModeAndLock() throws Exception { 458 for (int i = 0; i < mCameraIdsUnderTest.length; i++) { 459 try { 460 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isColorOutputSupported()) { 461 Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] + 462 " does not support color outputs, skipping"); 463 continue; 464 } 465 466 openDevice(mCameraIdsUnderTest[i]); 467 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 468 469 // Update preview surface with given size for all sub-tests. 470 updatePreviewSurface(maxPreviewSz); 471 472 // Test aeMode and lock 473 int[] aeModes = mStaticInfo.getAeAvailableModesChecked(); 474 for (int mode : aeModes) { 475 aeModeAndLockTestByMode(mode); 476 } 477 } finally { 478 closeDevice(); 479 } 480 } 481 } 482 483 /** Test {@link CaptureRequest#FLASH_MODE} control. 484 * <p> 485 * For each {@link CaptureRequest#FLASH_MODE} mode, test the flash control 486 * and {@link CaptureResult#FLASH_STATE} result. 487 * </p> 488 */ 489 @Test testFlashControl()490 public void testFlashControl() throws Exception { 491 for (int i = 0; i < mCameraIdsUnderTest.length; i++) { 492 try { 493 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isColorOutputSupported()) { 494 Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] + 495 " does not support color outputs, skipping"); 496 continue; 497 } 498 499 openDevice(mCameraIdsUnderTest[i]); 500 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 501 CaptureRequest.Builder requestBuilder = 502 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 503 504 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 505 506 startPreview(requestBuilder, maxPreviewSz, listener); 507 508 // Flash control can only be used when the AE mode is ON or OFF. 509 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_ON); 510 511 // LEGACY won't support AE mode OFF 512 boolean aeOffModeSupported = false; 513 for (int aeMode : mStaticInfo.getAeAvailableModesChecked()) { 514 if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) { 515 aeOffModeSupported = true; 516 } 517 } 518 if (aeOffModeSupported) { 519 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_OFF); 520 } 521 522 stopPreview(); 523 } finally { 524 closeDevice(); 525 } 526 } 527 } 528 529 /** 530 * Test that the flash can be successfully turned off given various initial and final 531 * AE_CONTROL modes for repeating CaptureRequests. 532 */ 533 @Test testFlashTurnOff()534 public void testFlashTurnOff() throws Exception { 535 for (int i = 0; i < mCameraIdsUnderTest.length; i++) { 536 try { 537 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isColorOutputSupported()) { 538 Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] + 539 " does not support color outputs, skipping"); 540 continue; 541 } 542 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).hasFlash()) { 543 Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] + 544 " does not support flash, skipping"); 545 continue; 546 } 547 openDevice(mCameraIdsUnderTest[i]); 548 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 549 CaptureRequest.Builder requestBuilder = 550 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 551 552 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 553 554 startPreview(requestBuilder, maxPreviewSz, listener); 555 boolean isLegacy = CameraUtils.isLegacyHAL(mCameraManager, mCameraIdsUnderTest[i]); 556 flashTurnOffTest(listener, isLegacy, 557 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 558 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH); 559 560 flashTurnOffTest(listener, isLegacy, 561 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 562 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); 563 564 flashTurnOffTest(listener, isLegacy, 565 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 566 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE); 567 568 stopPreview(); 569 } finally { 570 closeDevice(); 571 } 572 } 573 574 } 575 576 /** 577 * Test face detection modes and results. 578 */ 579 @Test testFaceDetection()580 public void testFaceDetection() throws Exception { 581 for (int i = 0; i < mCameraIdsUnderTest.length; i++) { 582 try { 583 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isColorOutputSupported()) { 584 Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] + 585 " does not support color outputs, skipping"); 586 continue; 587 } 588 openDevice(mCameraIdsUnderTest[i]); 589 faceDetectionTestByCamera(); 590 } finally { 591 closeDevice(); 592 } 593 } 594 } 595 596 /** 597 * Test tone map modes and controls. 598 */ 599 @Test testToneMapControl()600 public void testToneMapControl() throws Exception { 601 for (String id : mCameraIdsUnderTest) { 602 try { 603 if (!mAllStaticInfo.get(id).isManualToneMapSupported()) { 604 Log.i(TAG, "Camera " + id + 605 " doesn't support tone mapping controls, skipping test"); 606 continue; 607 } 608 openDevice(id); 609 toneMapTestByCamera(); 610 } finally { 611 closeDevice(); 612 } 613 } 614 } 615 616 /** 617 * Test color correction modes and controls. 618 */ 619 @Test testColorCorrectionControl()620 public void testColorCorrectionControl() throws Exception { 621 for (String id : mCameraIdsUnderTest) { 622 try { 623 if (!mAllStaticInfo.get(id).isColorCorrectionSupported()) { 624 Log.i(TAG, "Camera " + id + 625 " doesn't support color correction controls, skipping test"); 626 continue; 627 } 628 openDevice(id); 629 colorCorrectionTestByCamera(); 630 } finally { 631 closeDevice(); 632 } 633 } 634 } 635 636 /** 637 * Test edge mode control for Fps not exceeding 30. 638 */ 639 @Test testEdgeModeControl()640 public void testEdgeModeControl() throws Exception { 641 for (String id : mCameraIdsUnderTest) { 642 try { 643 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) { 644 Log.i(TAG, "Camera " + id + 645 " doesn't support EDGE_MODE controls, skipping test"); 646 continue; 647 } 648 649 openDevice(id); 650 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 651 edgeModesTestByCamera(fpsRanges); 652 } finally { 653 closeDevice(); 654 } 655 } 656 } 657 658 /** 659 * Test edge mode control for Fps greater than 30. 660 */ 661 @Test testEdgeModeControlFastFps()662 public void testEdgeModeControlFastFps() throws Exception { 663 for (String id : mCameraIdsUnderTest) { 664 try { 665 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) { 666 Log.i(TAG, "Camera " + id + 667 " doesn't support EDGE_MODE controls, skipping test"); 668 continue; 669 } 670 671 openDevice(id); 672 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo); 673 edgeModesTestByCamera(fpsRanges); 674 } finally { 675 closeDevice(); 676 } 677 } 678 679 } 680 681 /** 682 * Test focus distance control. 683 */ 684 @Test 685 @AppModeFull(reason = "PropertyUtil methods don't work for instant apps") testFocusDistanceControl()686 public void testFocusDistanceControl() throws Exception { 687 for (String id : mCameraIdsUnderTest) { 688 try { 689 StaticMetadata staticInfo = mAllStaticInfo.get(id); 690 if (!staticInfo.hasFocuser()) { 691 Log.i(TAG, "Camera " + id + " has no focuser, skipping test"); 692 continue; 693 } 694 695 if (!staticInfo.isCapabilitySupported( 696 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 697 Log.i(TAG, "Camera " + id + 698 " does not support MANUAL_SENSOR, skipping test"); 699 continue; 700 } 701 702 openDevice(id); 703 focusDistanceTestByCamera(); 704 } finally { 705 closeDevice(); 706 } 707 } 708 } 709 710 /** 711 * Test noise reduction mode for fps ranges not exceeding 30 712 */ 713 @Test testNoiseReductionModeControl()714 public void testNoiseReductionModeControl() throws Exception { 715 for (String id : mCameraIdsUnderTest) { 716 try { 717 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) { 718 Log.i(TAG, "Camera " + id + 719 " doesn't support noise reduction mode, skipping test"); 720 continue; 721 } 722 723 openDevice(id); 724 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 725 noiseReductionModeTestByCamera(fpsRanges); 726 } finally { 727 closeDevice(); 728 } 729 } 730 } 731 732 /** 733 * Test noise reduction mode for fps ranges greater than 30 734 */ 735 @Test testNoiseReductionModeControlFastFps()736 public void testNoiseReductionModeControlFastFps() throws Exception { 737 for (String id : mCameraIdsUnderTest) { 738 try { 739 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) { 740 Log.i(TAG, "Camera " + id + 741 " doesn't support noise reduction mode, skipping test"); 742 continue; 743 } 744 745 openDevice(id); 746 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo); 747 noiseReductionModeTestByCamera(fpsRanges); 748 } finally { 749 closeDevice(); 750 } 751 } 752 } 753 754 /** 755 * Test AWB lock control. 756 * 757 * <p>The color correction gain and transform shouldn't be changed when AWB is locked.</p> 758 */ 759 @Test testAwbModeAndLock()760 public void testAwbModeAndLock() throws Exception { 761 for (String id : mCameraIdsUnderTest) { 762 try { 763 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 764 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 765 continue; 766 } 767 openDevice(id); 768 awbModeAndLockTestByCamera(); 769 } finally { 770 closeDevice(); 771 } 772 } 773 } 774 775 /** 776 * Test different AF modes. 777 */ 778 @Test testAfModes()779 public void testAfModes() throws Exception { 780 for (String id : mCameraIdsUnderTest) { 781 try { 782 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 783 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 784 continue; 785 } 786 openDevice(id); 787 afModeTestByCamera(); 788 } finally { 789 closeDevice(); 790 } 791 } 792 } 793 794 /** 795 * Test video and optical stabilizations. 796 */ 797 @Test testCameraStabilizations()798 public void testCameraStabilizations() throws Exception { 799 for (String id : mCameraIdsUnderTest) { 800 try { 801 StaticMetadata staticInfo = mAllStaticInfo.get(id); 802 List<Key<?>> keys = staticInfo.getCharacteristics().getKeys(); 803 if (!(keys.contains( 804 CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) || 805 keys.contains( 806 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION))) { 807 Log.i(TAG, "Camera " + id + " doesn't support any stabilization modes"); 808 continue; 809 } 810 if (!staticInfo.isColorOutputSupported()) { 811 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 812 continue; 813 } 814 openDevice(id); 815 stabilizationTestByCamera(); 816 } finally { 817 closeDevice(); 818 } 819 } 820 } 821 822 /** 823 * Test digitalZoom (center wise and non-center wise), validate the returned crop regions. 824 * The max preview size is used for each camera. 825 */ 826 @Test testDigitalZoom()827 public void testDigitalZoom() throws Exception { 828 for (String id : mCameraIdsUnderTest) { 829 try { 830 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 831 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 832 continue; 833 } 834 openDevice(id); 835 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 836 digitalZoomTestByCamera(maxPreviewSize, /*repeating*/false); 837 digitalZoomTestByCamera(maxPreviewSize, /*repeating*/true); 838 } finally { 839 closeDevice(); 840 } 841 } 842 } 843 844 /** 845 * Test zoom using CONTROL_ZOOM_RATIO, validate the returned crop regions and zoom ratio. 846 * The max preview size is used for each camera. 847 */ 848 @Test testZoomRatio()849 public void testZoomRatio() throws Exception { 850 for (String id : mCameraIdsUnderTest) { 851 try { 852 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 853 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 854 continue; 855 } 856 openDevice(id); 857 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 858 zoomRatioTestByCamera(maxPreviewSize); 859 } finally { 860 closeDevice(); 861 } 862 } 863 } 864 865 /** 866 * Test that zoom doesn't incur non-monotonic timestamp sequence 867 * 868 * Camera API requires that camera timestamps monotonically increase. 869 */ 870 @Test 871 @AppModeFull(reason = "PropertyUtil methods don't work for instant apps") testZoomTimestampIncrease()872 public void testZoomTimestampIncrease() throws Exception { 873 if (PropertyUtil.getVendorApiLevel() <= Build.VERSION_CODES.UPSIDE_DOWN_CAKE) { 874 // Only run test for Vendor API level V or higher 875 return; 876 } 877 878 for (String id : mCameraIdsUnderTest) { 879 try { 880 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 881 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 882 continue; 883 } 884 openDevice(id); 885 zoomTimestampIncreaseTestByCamera(); 886 } finally { 887 closeDevice(); 888 } 889 } 890 } 891 892 /** 893 * Test digital zoom and all preview size combinations. 894 * TODO: this and above test should all be moved to preview test class. 895 */ 896 @Test testDigitalZoomPreviewCombinations()897 public void testDigitalZoomPreviewCombinations() throws Exception { 898 for (String id : mCameraIdsUnderTest) { 899 try { 900 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 901 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 902 continue; 903 } 904 openDevice(id); 905 digitalZoomPreviewCombinationTestByCamera(); 906 } finally { 907 closeDevice(); 908 } 909 } 910 } 911 912 /** 913 * Test scene mode controls. 914 */ 915 @Test testSceneModes()916 public void testSceneModes() throws Exception { 917 for (String id : mCameraIdsUnderTest) { 918 try { 919 if (mAllStaticInfo.get(id).isSceneModeSupported()) { 920 openDevice(id); 921 sceneModeTestByCamera(); 922 } 923 } finally { 924 closeDevice(); 925 } 926 } 927 } 928 929 /** 930 * Test effect mode controls. 931 */ 932 @Test testEffectModes()933 public void testEffectModes() throws Exception { 934 for (String id : mCameraIdsUnderTest) { 935 try { 936 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 937 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 938 continue; 939 } 940 openDevice(id); 941 effectModeTestByCamera(); 942 } finally { 943 closeDevice(); 944 } 945 } 946 } 947 948 /** 949 * Test extended scene mode controls. 950 */ 951 @Test testExtendedSceneModes()952 public void testExtendedSceneModes() throws Exception { 953 for (String id : mCameraIdsUnderTest) { 954 try { 955 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 956 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 957 continue; 958 } 959 openDevice(id); 960 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 961 extendedSceneModeTestByCamera(fpsRanges); 962 } finally { 963 closeDevice(); 964 } 965 } 966 } 967 968 /** 969 * Test basic auto-framing. 970 */ 971 @Test testAutoframing()972 public void testAutoframing() throws Exception { 973 for (String id : mCameraIdsUnderTest) { 974 try { 975 if (!mAllStaticInfo.get(id).isAutoframingSupported()) { 976 Log.i(TAG, "Camera " + id + " does not support auto-framing, skipping"); 977 continue; 978 } 979 openDevice(id); 980 autoframingTestByCamera(); 981 } finally { 982 closeDevice(); 983 } 984 } 985 } 986 987 /** 988 * Test settings override controls. 989 */ 990 @Test testSettingsOverrides()991 public void testSettingsOverrides() throws Exception { 992 for (String id : mCameraIdsUnderTest) { 993 try { 994 StaticMetadata staticInfo = mAllStaticInfo.get(id); 995 if (!staticInfo.isColorOutputSupported()) { 996 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 997 continue; 998 } 999 if (!staticInfo.isZoomSettingsOverrideSupported()) { 1000 Log.i(TAG, "Camera " + id + " does not support zoom overrides, skipping"); 1001 continue; 1002 } 1003 openDevice(id); 1004 settingsOverrideTestByCamera(); 1005 } finally { 1006 closeDevice(); 1007 } 1008 } 1009 } 1010 1011 // TODO: add 3A state machine test. 1012 1013 /** 1014 * Per camera dynamic black and white level test. 1015 */ dynamicBlackWhiteLevelTestByCamera()1016 private void dynamicBlackWhiteLevelTestByCamera() throws Exception { 1017 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1018 SimpleImageReaderListener imageListener = null; 1019 CaptureRequest.Builder previewBuilder = 1020 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1021 CaptureRequest.Builder rawBuilder = null; 1022 Size previewSize = 1023 getMaxPreviewSize(mCamera.getId(), mCameraManager, 1024 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 1025 Size rawSize = null; 1026 boolean canCaptureBlackRaw = 1027 mStaticInfo.isCapabilitySupported( 1028 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) && 1029 mStaticInfo.isOpticalBlackRegionSupported(); 1030 if (canCaptureBlackRaw) { 1031 // Capture Raw16, then calculate the optical black, and use it to check with the dynamic 1032 // black level. 1033 rawBuilder = 1034 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); 1035 rawSize = mStaticInfo.getRawDimensChecked(); 1036 imageListener = new SimpleImageReaderListener(); 1037 prepareRawCaptureAndStartPreview(previewBuilder, rawBuilder, previewSize, rawSize, 1038 resultListener, imageListener); 1039 } else { 1040 startPreview(previewBuilder, previewSize, resultListener); 1041 } 1042 1043 // Capture a sequence of frames with different sensitivities and validate the black/white 1044 // level values 1045 int[] sensitivities = getSensitivityTestValues(); 1046 float[][] dynamicBlackLevels = new float[sensitivities.length][]; 1047 int[] dynamicWhiteLevels = new int[sensitivities.length]; 1048 float[][] opticalBlackLevels = new float[sensitivities.length][]; 1049 for (int i = 0; i < sensitivities.length; i++) { 1050 CaptureResult result = null; 1051 if (canCaptureBlackRaw) { 1052 changeExposure(rawBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]); 1053 CaptureRequest rawRequest = rawBuilder.build(); 1054 mSession.capture(rawRequest, resultListener, mHandler); 1055 result = resultListener.getCaptureResultForRequest(rawRequest, 1056 NUM_RESULTS_WAIT_TIMEOUT); 1057 Image rawImage = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS); 1058 1059 // Get max (area-wise) optical black region 1060 Rect[] opticalBlackRegions = mStaticInfo.getCharacteristics().get( 1061 CameraCharacteristics.SENSOR_OPTICAL_BLACK_REGIONS); 1062 Rect maxRegion = opticalBlackRegions[0]; 1063 for (Rect region : opticalBlackRegions) { 1064 if (region.width() * region.height() > maxRegion.width() * maxRegion.height()) { 1065 maxRegion = region; 1066 } 1067 } 1068 1069 // Get average black pixel values in the region (region is multiple of 2x2) 1070 Image.Plane rawPlane = rawImage.getPlanes()[0]; 1071 ByteBuffer rawBuffer = rawPlane.getBuffer(); 1072 float[] avgBlackLevels = {0, 0, 0, 0}; 1073 final int rowSize = rawPlane.getRowStride(); 1074 final int bytePerPixel = rawPlane.getPixelStride(); 1075 if (VERBOSE) { 1076 Log.v(TAG, "maxRegion: " + maxRegion + ", Row stride: " + 1077 rawPlane.getRowStride()); 1078 } 1079 for (int row = maxRegion.top; row < maxRegion.bottom; row += 2) { 1080 for (int col = maxRegion.left; col < maxRegion.right; col += 2) { 1081 int startOffset = row * rowSize + col * bytePerPixel; 1082 avgBlackLevels[0] += rawBuffer.getShort(startOffset); 1083 avgBlackLevels[1] += rawBuffer.getShort(startOffset + bytePerPixel); 1084 startOffset += rowSize; 1085 avgBlackLevels[2] += rawBuffer.getShort(startOffset); 1086 avgBlackLevels[3] += rawBuffer.getShort(startOffset + bytePerPixel); 1087 } 1088 } 1089 int numBlackBlocks = maxRegion.width() * maxRegion.height() / (2 * 2); 1090 for (int m = 0; m < avgBlackLevels.length; m++) { 1091 avgBlackLevels[m] /= numBlackBlocks; 1092 } 1093 opticalBlackLevels[i] = avgBlackLevels; 1094 1095 if (VERBOSE) { 1096 Log.v(TAG, String.format("Optical black level results for sensitivity (%d): %s", 1097 sensitivities[i], Arrays.toString(avgBlackLevels))); 1098 } 1099 1100 rawImage.close(); 1101 } else { 1102 changeExposure(previewBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]); 1103 CaptureRequest previewRequest = previewBuilder.build(); 1104 mSession.capture(previewRequest, resultListener, mHandler); 1105 result = resultListener.getCaptureResultForRequest(previewRequest, 1106 NUM_RESULTS_WAIT_TIMEOUT); 1107 } 1108 1109 dynamicBlackLevels[i] = getValueNotNull(result, 1110 CaptureResult.SENSOR_DYNAMIC_BLACK_LEVEL); 1111 dynamicWhiteLevels[i] = getValueNotNull(result, 1112 CaptureResult.SENSOR_DYNAMIC_WHITE_LEVEL); 1113 } 1114 1115 if (VERBOSE) { 1116 Log.v(TAG, "Different sensitivities tested: " + Arrays.toString(sensitivities)); 1117 Log.v(TAG, "Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels)); 1118 Log.v(TAG, "Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels)); 1119 if (canCaptureBlackRaw) { 1120 Log.v(TAG, "Optical black level results " + 1121 Arrays.deepToString(opticalBlackLevels)); 1122 } 1123 } 1124 1125 // check the dynamic black level against global black level. 1126 // Implicit guarantee: if the dynamic black level is supported, fixed black level must be 1127 // supported as well (tested in ExtendedCameraCharacteristicsTest#testOpticalBlackRegions). 1128 BlackLevelPattern blackPattern = mStaticInfo.getCharacteristics().get( 1129 CameraCharacteristics.SENSOR_BLACK_LEVEL_PATTERN); 1130 int[] fixedBlackLevels = new int[4]; 1131 int fixedWhiteLevel = mStaticInfo.getCharacteristics().get( 1132 CameraCharacteristics.SENSOR_INFO_WHITE_LEVEL); 1133 blackPattern.copyTo(fixedBlackLevels, 0); 1134 float maxBlackDeviation = 0; 1135 int maxWhiteDeviation = 0; 1136 for (int i = 0; i < dynamicBlackLevels.length; i++) { 1137 for (int j = 0; j < dynamicBlackLevels[i].length; j++) { 1138 if (maxBlackDeviation < Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j])) { 1139 maxBlackDeviation = Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j]); 1140 } 1141 } 1142 if (maxWhiteDeviation < Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel)) { 1143 maxWhiteDeviation = Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel); 1144 } 1145 } 1146 mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs fixed black level" 1147 + " exceed threshold." 1148 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels), 1149 fixedBlackLevels[0] * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, maxBlackDeviation); 1150 mCollector.expectLessOrEqual("Max deviation of the dynamic white level exceed threshold." 1151 + " Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels), 1152 fixedWhiteLevel * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, 1153 (float)maxWhiteDeviation); 1154 1155 // Validate against optical black levels if it is available 1156 if (canCaptureBlackRaw) { 1157 maxBlackDeviation = 0; 1158 for (int i = 0; i < dynamicBlackLevels.length; i++) { 1159 for (int j = 0; j < dynamicBlackLevels[i].length; j++) { 1160 if (maxBlackDeviation < 1161 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j])) { 1162 maxBlackDeviation = 1163 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j]); 1164 } 1165 } 1166 } 1167 1168 mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs optical black" 1169 + " exceed threshold." 1170 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels) 1171 + " Optical black level results: " + Arrays.deepToString(opticalBlackLevels), 1172 fixedBlackLevels[0] * DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN, 1173 maxBlackDeviation); 1174 } 1175 } 1176 noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges)1177 private void noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 1178 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1179 CaptureRequest.Builder requestBuilder = 1180 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1181 int[] availableModes = mStaticInfo.getAvailableNoiseReductionModesChecked(); 1182 1183 for (int mode : availableModes) { 1184 requestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, mode); 1185 1186 // Test that OFF and FAST mode should not slow down the frame rate. 1187 if (mode == CaptureRequest.NOISE_REDUCTION_MODE_OFF || 1188 mode == CaptureRequest.NOISE_REDUCTION_MODE_FAST) { 1189 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 1190 } 1191 1192 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1193 startPreview(requestBuilder, maxPrevSize, resultListener); 1194 mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler); 1195 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1196 1197 verifyCaptureResultForKey(CaptureResult.NOISE_REDUCTION_MODE, mode, 1198 resultListener, NUM_FRAMES_VERIFIED); 1199 } 1200 1201 stopPreview(); 1202 } 1203 focusDistanceTestByCamera()1204 private void focusDistanceTestByCamera() throws Exception { 1205 CaptureRequest.Builder requestBuilder = 1206 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1207 requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF); 1208 int calibrationStatus = mStaticInfo.getFocusDistanceCalibrationChecked(); 1209 float errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED; 1210 if (calibrationStatus == 1211 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) { 1212 errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED; 1213 } else if (calibrationStatus == 1214 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE) { 1215 errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE; 1216 } 1217 1218 // Test changing focus distance with repeating request 1219 focusDistanceTestRepeating(requestBuilder, errorMargin); 1220 1221 if (calibrationStatus == 1222 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) { 1223 // Test changing focus distance with burst request 1224 focusDistanceTestBurst(requestBuilder, errorMargin); 1225 } 1226 } 1227 verifyFocusRange(CaptureResult result, float focusDistance)1228 private void verifyFocusRange(CaptureResult result, float focusDistance) { 1229 if (PropertyUtil.getVendorApiLevel() < 33) { 1230 // Skip, as this only applies to UDC and above 1231 if (VERBOSE) { 1232 Log.v(TAG, "Skipping FOCUS_RANGE verification due to API level"); 1233 } 1234 return; 1235 } 1236 1237 Pair<Float, Float> focusRange = result.get(CaptureResult.LENS_FOCUS_RANGE); 1238 if (focusRange != null) { 1239 // Prevent differences in floating point precision between manual request and HAL 1240 // result, some margin need to be considered for focusRange.near and far check 1241 float focusRangeNear = focusRange.first * (1.0f + FOCUS_RANGE_BOUNDARY_MARGIN_PERCENT); 1242 float focusRangeFar = focusRange.second * (1.0f - FOCUS_RANGE_BOUNDARY_MARGIN_PERCENT); 1243 1244 mCollector.expectLessOrEqual("Focus distance should be less than or equal to " 1245 + "FOCUS_RANGE.near (with margin)", focusRangeNear, focusDistance); 1246 mCollector.expectGreaterOrEqual("Focus distance should be greater than or equal to " 1247 + "FOCUS_RANGE.far (with margin)", focusRangeFar, focusDistance); 1248 } else if (VERBOSE) { 1249 Log.v(TAG, "FOCUS_RANGE undefined, skipping verification"); 1250 } 1251 } 1252 focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder, float errorMargin)1253 private void focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder, 1254 float errorMargin) throws Exception { 1255 CaptureRequest request; 1256 float[] testDistances = getFocusDistanceTestValuesInOrder(0, 0); 1257 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1258 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1259 startPreview(requestBuilder, maxPrevSize, resultListener); 1260 1261 float[] resultDistances = new float[testDistances.length]; 1262 int[] resultLensStates = new int[testDistances.length]; 1263 1264 // Collect results 1265 for (int i = 0; i < testDistances.length; i++) { 1266 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]); 1267 request = requestBuilder.build(); 1268 resultListener = new SimpleCaptureCallback(); 1269 mSession.setRepeatingRequest(request, resultListener, mHandler); 1270 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1271 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1272 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1273 CaptureResult result = resultListener.getCaptureResultForRequest(request, 1274 NUM_RESULTS_WAIT_TIMEOUT); 1275 1276 resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1277 resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE); 1278 1279 verifyFocusRange(result, resultDistances[i]); 1280 1281 if (VERBOSE) { 1282 Log.v(TAG, "Capture repeating request focus distance: " + testDistances[i] 1283 + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]); 1284 } 1285 } 1286 1287 verifyFocusDistance(testDistances, resultDistances, resultLensStates, 1288 /*ascendingOrder*/true, /*noOvershoot*/false, /*repeatStart*/0, /*repeatEnd*/0, 1289 errorMargin); 1290 1291 if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) { 1292 1293 // Test hyperfocal distance optionally 1294 float hyperFocalDistance = mStaticInfo.getHyperfocalDistanceChecked(); 1295 if (hyperFocalDistance > 0) { 1296 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, hyperFocalDistance); 1297 request = requestBuilder.build(); 1298 resultListener = new SimpleCaptureCallback(); 1299 mSession.setRepeatingRequest(request, resultListener, mHandler); 1300 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1301 1302 // Then wait for the lens.state to be stationary. 1303 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1304 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1305 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1306 Float focusDistance = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1307 mCollector.expectInRange("Focus distance for hyper focal should be close enough to" + 1308 " requested value", focusDistance, 1309 hyperFocalDistance * (1.0f - errorMargin), 1310 hyperFocalDistance * (1.0f + errorMargin)); 1311 } 1312 } 1313 } 1314 focusDistanceTestBurst(CaptureRequest.Builder requestBuilder, float errorMargin)1315 private void focusDistanceTestBurst(CaptureRequest.Builder requestBuilder, 1316 float errorMargin) throws Exception { 1317 1318 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1319 float[] testDistances = getFocusDistanceTestValuesInOrder(NUM_FOCUS_DISTANCES_REPEAT, 1320 NUM_FOCUS_DISTANCES_REPEAT); 1321 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1322 startPreview(requestBuilder, maxPrevSize, resultListener); 1323 1324 float[] resultDistances = new float[testDistances.length]; 1325 int[] resultLensStates = new int[testDistances.length]; 1326 1327 final int maxPipelineDepth = mStaticInfo.getCharacteristics().get( 1328 CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH); 1329 1330 // Move lens to starting position, and wait for the lens.state to be stationary. 1331 CaptureRequest request; 1332 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[0]); 1333 request = requestBuilder.build(); 1334 mSession.setRepeatingRequest(request, resultListener, mHandler); 1335 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1336 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1337 1338 // Submit burst of requests with different focus distances 1339 List<CaptureRequest> burst = new ArrayList<>(); 1340 for (int i = 0; i < testDistances.length; i ++) { 1341 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]); 1342 burst.add(requestBuilder.build()); 1343 } 1344 mSession.captureBurst(burst, resultListener, mHandler); 1345 1346 for (int i = 0; i < testDistances.length; i++) { 1347 CaptureResult result = resultListener.getCaptureResultForRequest( 1348 burst.get(i), maxPipelineDepth+1); 1349 1350 resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1351 resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE); 1352 1353 verifyFocusRange(result, resultDistances[i]); 1354 1355 if (VERBOSE) { 1356 Log.v(TAG, "Capture burst request focus distance: " + testDistances[i] 1357 + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]); 1358 } 1359 } 1360 1361 verifyFocusDistance(testDistances, resultDistances, resultLensStates, 1362 /*ascendingOrder*/true, /*noOvershoot*/true, 1363 /*repeatStart*/NUM_FOCUS_DISTANCES_REPEAT, /*repeatEnd*/NUM_FOCUS_DISTANCES_REPEAT, 1364 errorMargin); 1365 1366 } 1367 1368 /** 1369 * Verify focus distance control. 1370 * 1371 * Assumption: 1372 * - First repeatStart+1 elements of requestedDistances share the same value 1373 * - Last repeatEnd+1 elements of requestedDistances share the same value 1374 * - All elements in between are monotonically increasing/decreasing depending on ascendingOrder. 1375 * - Focuser is at requestedDistances[0] at the beginning of the test. 1376 * 1377 * @param requestedDistances The requested focus distances 1378 * @param resultDistances The result focus distances 1379 * @param lensStates The result lens states 1380 * @param ascendingOrder The order of the expected focus distance request/output 1381 * @param noOvershoot Assert that focus control doesn't overshoot the requested value 1382 * @param repeatStart The number of times the starting focus distance is repeated 1383 * @param repeatEnd The number of times the ending focus distance is repeated 1384 * @param errorMargin The error margin between request and result 1385 */ verifyFocusDistance(float[] requestedDistances, float[] resultDistances, int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart, int repeatEnd, float errorMargin)1386 private void verifyFocusDistance(float[] requestedDistances, float[] resultDistances, 1387 int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart, 1388 int repeatEnd, float errorMargin) { 1389 1390 float minValue = 0; 1391 float maxValue = mStaticInfo.getMinimumFocusDistanceChecked(); 1392 float hyperfocalDistance = 0; 1393 if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) { 1394 hyperfocalDistance = mStaticInfo.getHyperfocalDistanceChecked(); 1395 } 1396 1397 // Verify lens and focus distance do not change for first repeatStart 1398 // results. 1399 for (int i = 0; i < repeatStart; i ++) { 1400 float marginMin = requestedDistances[i] * (1.0f - errorMargin); 1401 // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal]. 1402 float marginMax = 1403 Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin); 1404 1405 mCollector.expectEquals("Lens moves even though focus_distance didn't change", 1406 lensStates[i], CaptureResult.LENS_STATE_STATIONARY); 1407 if (noOvershoot) { 1408 mCollector.expectInRange("Focus distance in result should be close enough to " + 1409 "requested value", resultDistances[i], marginMin, marginMax); 1410 } 1411 mCollector.expectInRange("Result focus distance is out of range", 1412 resultDistances[i], minValue, maxValue); 1413 } 1414 1415 for (int i = repeatStart; i < resultDistances.length-1; i ++) { 1416 float marginMin = requestedDistances[i] * (1.0f - errorMargin); 1417 // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal]. 1418 float marginMax = 1419 Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin); 1420 if (noOvershoot) { 1421 // Result focus distance shouldn't overshoot the request 1422 boolean condition; 1423 if (ascendingOrder) { 1424 condition = resultDistances[i] <= marginMax; 1425 } else { 1426 condition = resultDistances[i] >= marginMin; 1427 } 1428 mCollector.expectTrue(String.format( 1429 "Lens shouldn't move past request focus distance. result " + 1430 resultDistances[i] + " vs target of " + 1431 (ascendingOrder ? marginMax : marginMin)), condition); 1432 } 1433 1434 // Verify monotonically increased focus distance setting 1435 boolean condition; 1436 float compareDistance = resultDistances[i+1] - resultDistances[i]; 1437 if (i < resultDistances.length-1-repeatEnd) { 1438 condition = (ascendingOrder ? compareDistance > 0 : compareDistance < 0); 1439 } else { 1440 condition = (ascendingOrder ? compareDistance >= 0 : compareDistance <= 0); 1441 } 1442 mCollector.expectTrue(String.format("Adjacent [resultDistances, lens_state] results [" 1443 + resultDistances[i] + "," + lensStates[i] + "], [" + resultDistances[i+1] + "," 1444 + lensStates[i+1] + "] monotonicity is broken"), condition); 1445 } 1446 1447 mCollector.expectTrue(String.format("All values of this array are equal: " + 1448 resultDistances[0] + " " + resultDistances[resultDistances.length-1]), 1449 resultDistances[0] != resultDistances[resultDistances.length-1]); 1450 1451 // Verify lens moved to destination location. 1452 mCollector.expectInRange("Focus distance " + resultDistances[resultDistances.length-1] + 1453 " for minFocusDistance should be closed enough to requested value " + 1454 requestedDistances[requestedDistances.length-1], 1455 resultDistances[resultDistances.length-1], 1456 requestedDistances[requestedDistances.length-1] * (1.0f - errorMargin), 1457 requestedDistances[requestedDistances.length-1] * (1.0f + errorMargin)); 1458 } 1459 1460 /** 1461 * Verify edge mode control results for fpsRanges 1462 */ edgeModesTestByCamera(List<Range<Integer>> fpsRanges)1463 private void edgeModesTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 1464 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1465 int[] edgeModes = mStaticInfo.getAvailableEdgeModesChecked(); 1466 CaptureRequest.Builder requestBuilder = 1467 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1468 1469 for (int mode : edgeModes) { 1470 requestBuilder.set(CaptureRequest.EDGE_MODE, mode); 1471 1472 // Test that OFF and FAST mode should not slow down the frame rate. 1473 if (mode == CaptureRequest.EDGE_MODE_OFF || 1474 mode == CaptureRequest.EDGE_MODE_FAST) { 1475 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 1476 } 1477 1478 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1479 startPreview(requestBuilder, maxPrevSize, resultListener); 1480 mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler); 1481 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1482 1483 verifyCaptureResultForKey(CaptureResult.EDGE_MODE, mode, resultListener, 1484 NUM_FRAMES_VERIFIED); 1485 } 1486 1487 stopPreview(); 1488 } 1489 1490 /** 1491 * Test color correction controls. 1492 * 1493 * <p>Test different color correction modes. For TRANSFORM_MATRIX, only test 1494 * the unit gain and identity transform.</p> 1495 */ colorCorrectionTestByCamera()1496 private void colorCorrectionTestByCamera() throws Exception { 1497 CaptureRequest request; 1498 CaptureResult result; 1499 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 1500 updatePreviewSurface(maxPreviewSz); 1501 CaptureRequest.Builder manualRequestBuilder = createRequestForPreview(); 1502 CaptureRequest.Builder previewRequestBuilder = createRequestForPreview(); 1503 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 1504 1505 startPreview(previewRequestBuilder, maxPreviewSz, listener); 1506 1507 // Default preview result should give valid color correction metadata. 1508 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1509 validateColorCorrectionResult(result, 1510 previewRequestBuilder.get(CaptureRequest.COLOR_CORRECTION_MODE)); 1511 int colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX; 1512 // TRANSFORM_MATRIX mode 1513 // Only test unit gain and identity transform 1514 List<Integer> availableControlModes = Arrays.asList( 1515 CameraTestUtils.toObject(mStaticInfo.getAvailableControlModesChecked())); 1516 List<Integer> availableAwbModes = Arrays.asList( 1517 CameraTestUtils.toObject(mStaticInfo.getAwbAvailableModesChecked())); 1518 boolean isManualCCSupported = 1519 availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF) || 1520 availableAwbModes.contains(CaptureRequest.CONTROL_AWB_MODE_OFF); 1521 if (isManualCCSupported) { 1522 if (!availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF)) { 1523 // Only manual AWB mode is supported 1524 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1525 CaptureRequest.CONTROL_MODE_AUTO); 1526 manualRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, 1527 CaptureRequest.CONTROL_AWB_MODE_OFF); 1528 } else { 1529 // All 3A manual controls are supported, it doesn't matter what we set for AWB mode. 1530 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1531 CaptureRequest.CONTROL_MODE_OFF); 1532 } 1533 1534 RggbChannelVector UNIT_GAIN = new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f); 1535 1536 ColorSpaceTransform IDENTITY_TRANSFORM = new ColorSpaceTransform( 1537 new Rational[] { 1538 ONE_R, ZERO_R, ZERO_R, 1539 ZERO_R, ONE_R, ZERO_R, 1540 ZERO_R, ZERO_R, ONE_R 1541 }); 1542 1543 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1544 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS, UNIT_GAIN); 1545 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, IDENTITY_TRANSFORM); 1546 request = manualRequestBuilder.build(); 1547 mSession.capture(request, listener, mHandler); 1548 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1549 RggbChannelVector gains = result.get(CaptureResult.COLOR_CORRECTION_GAINS); 1550 ColorSpaceTransform transform = result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM); 1551 validateColorCorrectionResult(result, colorCorrectionMode); 1552 mCollector.expectEquals("control mode result/request mismatch", 1553 CaptureResult.CONTROL_MODE_OFF, result.get(CaptureResult.CONTROL_MODE)); 1554 mCollector.expectEquals("Color correction gain result/request mismatch", 1555 UNIT_GAIN, gains); 1556 mCollector.expectEquals("Color correction gain result/request mismatch", 1557 IDENTITY_TRANSFORM, transform); 1558 1559 } 1560 1561 // FAST mode 1562 colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_FAST; 1563 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1564 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1565 request = manualRequestBuilder.build(); 1566 mSession.capture(request, listener, mHandler); 1567 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1568 validateColorCorrectionResult(result, colorCorrectionMode); 1569 mCollector.expectEquals("control mode result/request mismatch", 1570 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE)); 1571 1572 // HIGH_QUALITY mode 1573 colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_HIGH_QUALITY; 1574 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1575 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1576 request = manualRequestBuilder.build(); 1577 mSession.capture(request, listener, mHandler); 1578 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1579 validateColorCorrectionResult(result, colorCorrectionMode); 1580 mCollector.expectEquals("control mode result/request mismatch", 1581 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE)); 1582 } 1583 validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode)1584 private void validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode) { 1585 final RggbChannelVector ZERO_GAINS = new RggbChannelVector(0, 0, 0, 0); 1586 final int TRANSFORM_SIZE = 9; 1587 Rational[] zeroTransform = new Rational[TRANSFORM_SIZE]; 1588 Arrays.fill(zeroTransform, ZERO_R); 1589 final ColorSpaceTransform ZERO_TRANSFORM = new ColorSpaceTransform(zeroTransform); 1590 1591 RggbChannelVector resultGain; 1592 if ((resultGain = mCollector.expectKeyValueNotNull(result, 1593 CaptureResult.COLOR_CORRECTION_GAINS)) != null) { 1594 mCollector.expectKeyValueNotEquals(result, 1595 CaptureResult.COLOR_CORRECTION_GAINS, ZERO_GAINS); 1596 } 1597 1598 ColorSpaceTransform resultTransform; 1599 if ((resultTransform = mCollector.expectKeyValueNotNull(result, 1600 CaptureResult.COLOR_CORRECTION_TRANSFORM)) != null) { 1601 mCollector.expectKeyValueNotEquals(result, 1602 CaptureResult.COLOR_CORRECTION_TRANSFORM, ZERO_TRANSFORM); 1603 } 1604 1605 mCollector.expectEquals("color correction mode result/request mismatch", 1606 colorCorrectionMode, result.get(CaptureResult.COLOR_CORRECTION_MODE)); 1607 } 1608 1609 /** 1610 * Test that flash can be turned off successfully with a given initial and final AE_CONTROL 1611 * states. 1612 * 1613 * This function expects that initialAeControl and flashOffAeControl will not be either 1614 * CaptureRequest.CONTROL_AE_MODE_ON or CaptureRequest.CONTROL_AE_MODE_OFF 1615 * 1616 * @param listener The Capture listener that is used to wait for capture result 1617 * @param initialAeControl The initial AE_CONTROL mode to start repeating requests with. 1618 * @param flashOffAeControl The final AE_CONTROL mode which is expected to turn flash off for 1619 * TEMPLATE_PREVIEW repeating requests. 1620 */ flashTurnOffTest(SimpleCaptureCallback listener, boolean isLegacy, int initialAeControl, int flashOffAeControl)1621 private void flashTurnOffTest(SimpleCaptureCallback listener, boolean isLegacy, 1622 int initialAeControl, int flashOffAeControl) throws Exception { 1623 CaptureResult result; 1624 final int NUM_FLASH_REQUESTS_TESTED = 10; 1625 CaptureRequest.Builder requestBuilder = createRequestForPreview(); 1626 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1627 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, initialAeControl); 1628 1629 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 1630 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1631 1632 // Turn on torch using FLASH_MODE_TORCH 1633 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); 1634 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 1635 CaptureRequest torchOnRequest = requestBuilder.build(); 1636 mSession.setRepeatingRequest(torchOnRequest, listener, mHandler); 1637 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_TORCH); 1638 result = listener.getCaptureResultForRequest(torchOnRequest, NUM_RESULTS_WAIT_TIMEOUT); 1639 // Test that the flash actually turned on continuously. 1640 mCollector.expectEquals("Flash state result must be FIRED", CaptureResult.FLASH_STATE_FIRED, 1641 result.get(CaptureResult.FLASH_STATE)); 1642 mSession.stopRepeating(); 1643 // Turn off the torch 1644 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, flashOffAeControl); 1645 // TODO: jchowdhary@, b/130323585, this line can be removed. 1646 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 1647 int numAllowedTransitionStates = NUM_PARTIAL_FRAMES_NPFC; 1648 if (mStaticInfo.isPerFrameControlSupported()) { 1649 numAllowedTransitionStates = NUM_PARTIAL_FRAMES_PFC; 1650 1651 } 1652 // We submit 2 * numAllowedTransitionStates + 1 requests since we have two torch mode 1653 // transitions. The additional request is to check for at least 1 expected (FIRED / READY) 1654 // state. 1655 int numTorchTestSamples = 2 * numAllowedTransitionStates + 1; 1656 CaptureRequest flashOffRequest = requestBuilder.build(); 1657 int flashModeOffRequests = captureRequestsSynchronizedBurst(flashOffRequest, 1658 numTorchTestSamples, listener, mHandler); 1659 // Turn it on again. 1660 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 1661 // We need to have CONTROL_AE_MODE be either CONTROL_AE_MODE_ON or CONTROL_AE_MODE_OFF to 1662 // turn the torch on again. 1663 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); 1664 CaptureRequest flashModeTorchRequest = requestBuilder.build(); 1665 int flashModeTorchRequests = captureRequestsSynchronizedBurst(flashModeTorchRequest, 1666 numTorchTestSamples, listener, mHandler); 1667 1668 CaptureResult[] torchStateResults = 1669 new CaptureResult[flashModeTorchRequests + flashModeOffRequests]; 1670 Arrays.fill(torchStateResults, null); 1671 int i = 0; 1672 for (; i < flashModeOffRequests; i++) { 1673 torchStateResults[i] = 1674 listener.getCaptureResultForRequest(flashOffRequest, NUM_RESULTS_WAIT_TIMEOUT); 1675 mCollector.expectNotEquals("Result for flashModeOff request null", 1676 torchStateResults[i], null); 1677 } 1678 for (int j = i; j < torchStateResults.length; j++) { 1679 torchStateResults[j] = 1680 listener.getCaptureResultForRequest(flashModeTorchRequest, 1681 NUM_RESULTS_WAIT_TIMEOUT); 1682 mCollector.expectNotEquals("Result for flashModeTorch request null", 1683 torchStateResults[j], null); 1684 } 1685 if (isLegacy) { 1686 // For LEGACY devices, flash state is null for all situations except: 1687 // android.control.aeMode == ON_ALWAYS_FLASH, where flash.state will be FIRED 1688 // android.flash.mode == TORCH, where flash.state will be FIRED 1689 testLegacyTorchStates(torchStateResults, 0, flashModeOffRequests - 1, flashOffRequest); 1690 testLegacyTorchStates(torchStateResults, flashModeOffRequests, 1691 torchStateResults.length -1, 1692 flashModeTorchRequest); 1693 } else { 1694 checkTorchStates(torchStateResults, numAllowedTransitionStates, flashModeOffRequests, 1695 flashModeTorchRequests); 1696 } 1697 } 1698 testLegacyTorchStates(CaptureResult []torchStateResults, int beg, int end, CaptureRequest request)1699 private void testLegacyTorchStates(CaptureResult []torchStateResults, int beg, int end, 1700 CaptureRequest request) { 1701 for (int i = beg; i <= end; i++) { 1702 Integer requestControlAeMode = request.get(CaptureRequest.CONTROL_AE_MODE); 1703 Integer requestFlashMode = request.get(CaptureRequest.FLASH_MODE); 1704 Integer resultFlashState = torchStateResults[i].get(CaptureResult.FLASH_STATE); 1705 if (requestControlAeMode == CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH || 1706 requestFlashMode == CaptureRequest.FLASH_MODE_TORCH) { 1707 mCollector.expectEquals("For LEGACY devices, flash state must be FIRED when" + 1708 "CONTROL_AE_MODE == CONTROL_AE_MODE_ON_ALWAYS_FLASH or FLASH_MODE == " + 1709 "TORCH, CONTROL_AE_MODE = " + requestControlAeMode + " FLASH_MODE = " + 1710 requestFlashMode, CaptureResult.FLASH_STATE_FIRED, resultFlashState); 1711 continue; 1712 } 1713 mCollector.expectTrue("For LEGACY devices, flash state must be null when" + 1714 "CONTROL_AE_MODE != CONTROL_AE_MODE_ON_ALWAYS_FLASH or FLASH_MODE != " + 1715 "TORCH, CONTROL_AE_MODE = " + requestControlAeMode + " FLASH_MODE = " + 1716 requestFlashMode, resultFlashState == null); 1717 } 1718 } 1719 // We check that torch states appear in the order expected. We don't necessarily know how many 1720 // times each state might appear, however we make sure that the states do not appear out of 1721 // order. checkTorchTransitionStates(CaptureResult []torchStateResults, int beg, int end, List<Integer> stateOrder, boolean isTurningOff)1722 private void checkTorchTransitionStates(CaptureResult []torchStateResults, int beg, int end, 1723 List<Integer> stateOrder, boolean isTurningOff) { 1724 Integer flashState; 1725 Integer curIndex = 0; 1726 for (int i = beg; i <= end; i++) { 1727 flashState = torchStateResults[i].get(CaptureResult.FLASH_STATE); 1728 int index = stateOrder.indexOf(flashState); 1729 mCollector.expectNotEquals("Invalid state " + flashState + " not in expected list" + 1730 stateOrder, index, -1); 1731 mCollector.expectGreaterOrEqual("state " + flashState + " index " + index + 1732 " is expected to be >= " + curIndex, 1733 curIndex, index); 1734 curIndex = index; 1735 } 1736 } 1737 checkTorchStates(CaptureResult []torchResults, int numAllowedTransitionStates, int numTorchOffSamples, int numTorchOnSamples)1738 private void checkTorchStates(CaptureResult []torchResults, int numAllowedTransitionStates, 1739 int numTorchOffSamples, int numTorchOnSamples) { 1740 // We test for flash states from request: 1741 // Request: O(0) O(1) O(2) O(n)....O(nOFF) T(0) T(1) T(2) ....T(n) .... T(nON) 1742 // Valid Result : P/R P/R P/R R R R...P/R P/R P/F P/F P/F F F 1743 // For the FLASH_STATE_OFF requests, once FLASH_STATE READY has been seen, for the 1744 // transition states while switching the torch off, it must not transition to 1745 // FLASH_STATE_PARTIAL again till the next transition period which turns the torch on. 1746 // P - FLASH_STATE_PARTIAL 1747 // R - FLASH_STATE_READY 1748 // F - FLASH_STATE_FIRED 1749 // O(k) - kth FLASH_MODE_OFF request 1750 // T(k) - kth FLASH_MODE_TORCH request 1751 // nOFF - number of torch off samples 1752 // nON - number of torch on samples 1753 Integer flashState; 1754 // Check on -> off transition states 1755 List<Integer> onToOffStateOrderList = new ArrayList<Integer>(); 1756 onToOffStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL); 1757 onToOffStateOrderList.add(CaptureRequest.FLASH_STATE_READY); 1758 checkTorchTransitionStates(torchResults, 0, numAllowedTransitionStates, 1759 onToOffStateOrderList, true); 1760 // The next frames (before transition) must have its flash state as FLASH_STATE_READY 1761 for (int i = numAllowedTransitionStates + 1; 1762 i < numTorchOffSamples - numAllowedTransitionStates; i++) { 1763 flashState = torchResults[numAllowedTransitionStates].get(CaptureResult.FLASH_STATE); 1764 mCollector.expectEquals("flash state result must be READY", 1765 CaptureResult.FLASH_STATE_READY, flashState); 1766 } 1767 // check off -> on transition states, before the FLASH_MODE_TORCH request was sent 1768 List<Integer> offToOnPreStateOrderList = new ArrayList<Integer>(); 1769 offToOnPreStateOrderList.add(CaptureRequest.FLASH_STATE_READY); 1770 offToOnPreStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL); 1771 checkTorchTransitionStates(torchResults, 1772 numTorchOffSamples - numAllowedTransitionStates, numTorchOffSamples - 1, 1773 offToOnPreStateOrderList, false); 1774 // check off -> on transition states 1775 List<Integer> offToOnPostStateOrderList = new ArrayList<Integer>(); 1776 offToOnPostStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL); 1777 offToOnPostStateOrderList.add(CaptureRequest.FLASH_STATE_FIRED); 1778 checkTorchTransitionStates(torchResults, 1779 numTorchOffSamples, numTorchOffSamples + numAllowedTransitionStates, 1780 offToOnPostStateOrderList, false); 1781 // check on states after off -> on transition 1782 // The next frames must have its flash state as FLASH_STATE_FIRED 1783 for (int i = numTorchOffSamples + numAllowedTransitionStates + 1; 1784 i < torchResults.length - 1; i++) { 1785 flashState = torchResults[i].get(CaptureResult.FLASH_STATE); 1786 mCollector.expectEquals("flash state result must be FIRED for frame " + i, 1787 CaptureRequest.FLASH_STATE_FIRED, flashState); 1788 } 1789 } 1790 1791 /** 1792 * Test flash mode control by AE mode. 1793 * <p> 1794 * Only allow AE mode ON or OFF, because other AE mode could run into conflict with 1795 * flash manual control. This function expects the camera to already have an active 1796 * repeating request and be sending results to the listener. 1797 * </p> 1798 * 1799 * @param listener The Capture listener that is used to wait for capture result 1800 * @param aeMode The AE mode for flash to test with 1801 */ flashTestByAeMode(SimpleCaptureCallback listener, int aeMode)1802 private void flashTestByAeMode(SimpleCaptureCallback listener, int aeMode) throws Exception { 1803 CaptureResult result; 1804 final int NUM_FLASH_REQUESTS_TESTED = 10; 1805 CaptureRequest.Builder requestBuilder = createRequestForPreview(); 1806 1807 if (aeMode == CaptureRequest.CONTROL_AE_MODE_ON) { 1808 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, aeMode); 1809 } else if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) { 1810 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY); 1811 } else { 1812 throw new IllegalArgumentException("This test only works when AE mode is ON or OFF"); 1813 } 1814 1815 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 1816 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1817 1818 // For camera that doesn't have flash unit, flash state should always be UNAVAILABLE. 1819 if (mStaticInfo.getFlashInfoChecked() == false) { 1820 for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) { 1821 result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS); 1822 mCollector.expectEquals("No flash unit available, flash state must be UNAVAILABLE" 1823 + "for AE mode " + aeMode, CaptureResult.FLASH_STATE_UNAVAILABLE, 1824 result.get(CaptureResult.FLASH_STATE)); 1825 } 1826 1827 return; 1828 } 1829 1830 // Test flash SINGLE mode control. Wait for flash state to be READY first. 1831 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 1832 waitForResultValue(listener, CaptureResult.FLASH_STATE, CaptureResult.FLASH_STATE_READY, 1833 NUM_RESULTS_WAIT_TIMEOUT); 1834 } // else the settings were already waited on earlier 1835 1836 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE); 1837 CaptureRequest flashSinglerequest = requestBuilder.build(); 1838 1839 int flashModeSingleRequests = captureRequestsSynchronized( 1840 flashSinglerequest, listener, mHandler); 1841 waitForNumResults(listener, flashModeSingleRequests - 1); 1842 result = listener.getCaptureResultForRequest(flashSinglerequest, NUM_RESULTS_WAIT_TIMEOUT); 1843 // Result mode must be SINGLE, state must be FIRED. 1844 mCollector.expectEquals("Flash mode result must be SINGLE", 1845 CaptureResult.FLASH_MODE_SINGLE, result.get(CaptureResult.FLASH_MODE)); 1846 mCollector.expectEquals("Flash state result must be FIRED", 1847 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE)); 1848 1849 // Test flash TORCH mode control. 1850 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 1851 CaptureRequest torchRequest = requestBuilder.build(); 1852 1853 int flashModeTorchRequests = captureRequestsSynchronized(torchRequest, 1854 NUM_FLASH_REQUESTS_TESTED, listener, mHandler); 1855 waitForNumResults(listener, flashModeTorchRequests - NUM_FLASH_REQUESTS_TESTED); 1856 1857 // Verify the results 1858 TorchSeqState state = TorchSeqState.RAMPING_UP; 1859 for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) { 1860 result = listener.getCaptureResultForRequest(torchRequest, 1861 NUM_RESULTS_WAIT_TIMEOUT); 1862 int flashMode = result.get(CaptureResult.FLASH_MODE); 1863 int flashState = result.get(CaptureResult.FLASH_STATE); 1864 // Result mode must be TORCH 1865 mCollector.expectEquals("Flash mode result " + i + " must be TORCH", 1866 CaptureResult.FLASH_MODE_TORCH, result.get(CaptureResult.FLASH_MODE)); 1867 if (state == TorchSeqState.RAMPING_UP && 1868 flashState == CaptureResult.FLASH_STATE_FIRED) { 1869 state = TorchSeqState.FIRED; 1870 } else if (state == TorchSeqState.FIRED && 1871 flashState == CaptureResult.FLASH_STATE_PARTIAL) { 1872 state = TorchSeqState.RAMPING_DOWN; 1873 } 1874 1875 if (i == 0 && mStaticInfo.isPerFrameControlSupported()) { 1876 mCollector.expectTrue( 1877 "Per frame control device must enter FIRED state on first torch request", 1878 state == TorchSeqState.FIRED); 1879 } 1880 1881 if (state == TorchSeqState.FIRED) { 1882 mCollector.expectEquals("Flash state result " + i + " must be FIRED", 1883 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE)); 1884 } else { 1885 mCollector.expectEquals("Flash state result " + i + " must be PARTIAL", 1886 CaptureResult.FLASH_STATE_PARTIAL, result.get(CaptureResult.FLASH_STATE)); 1887 } 1888 } 1889 mCollector.expectTrue("Torch state FIRED never seen", 1890 state == TorchSeqState.FIRED || state == TorchSeqState.RAMPING_DOWN); 1891 1892 // Test flash OFF mode control 1893 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 1894 CaptureRequest flashOffrequest = requestBuilder.build(); 1895 1896 int flashModeOffRequests = captureRequestsSynchronized(flashOffrequest, listener, mHandler); 1897 waitForNumResults(listener, flashModeOffRequests - 1); 1898 result = listener.getCaptureResultForRequest(flashOffrequest, NUM_RESULTS_WAIT_TIMEOUT); 1899 mCollector.expectEquals("Flash mode result must be OFF", CaptureResult.FLASH_MODE_OFF, 1900 result.get(CaptureResult.FLASH_MODE)); 1901 } 1902 verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified, int mode, boolean isAeManual, long requestExpTime)1903 private void verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified, 1904 int mode, boolean isAeManual, long requestExpTime) throws Exception { 1905 // Skip the first a couple of frames as antibanding may not be fully up yet. 1906 final int NUM_FRAMES_SKIPPED = 5; 1907 for (int i = 0; i < NUM_FRAMES_SKIPPED; i++) { 1908 listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1909 } 1910 1911 for (int i = 0; i < numFramesVerified; i++) { 1912 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1913 Long resultExpTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); 1914 assertNotNull("Exposure time shouldn't be null", resultExpTime); 1915 Integer flicker = result.get(CaptureResult.STATISTICS_SCENE_FLICKER); 1916 // Scene flicker result should be always available. 1917 assertNotNull("Scene flicker must not be null", flicker); 1918 assertTrue("Scene flicker is invalid", flicker >= STATISTICS_SCENE_FLICKER_NONE && 1919 flicker <= STATISTICS_SCENE_FLICKER_60HZ); 1920 1921 Integer antiBandMode = result.get(CaptureResult.CONTROL_AE_ANTIBANDING_MODE); 1922 assertNotNull("antiBanding mode shouldn't be null", antiBandMode); 1923 assertTrue("antiBanding Mode invalid, should be == " + mode + ", is: " + antiBandMode, 1924 antiBandMode == mode); 1925 if (isAeManual) { 1926 // First, round down not up, second, need close enough. 1927 validateExposureTime(requestExpTime, resultExpTime); 1928 return; 1929 } 1930 1931 long expectedExpTime = resultExpTime; // Default, no exposure adjustment. 1932 if (mode == CONTROL_AE_ANTIBANDING_MODE_50HZ) { 1933 // result exposure time must be adjusted by 50Hz illuminant source. 1934 expectedExpTime = 1935 getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime); 1936 } else if (mode == CONTROL_AE_ANTIBANDING_MODE_60HZ) { 1937 // result exposure time must be adjusted by 60Hz illuminant source. 1938 expectedExpTime = 1939 getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime); 1940 } else if (mode == CONTROL_AE_ANTIBANDING_MODE_AUTO){ 1941 /** 1942 * Use STATISTICS_SCENE_FLICKER to tell the illuminant source 1943 * and do the exposure adjustment. 1944 */ 1945 expectedExpTime = resultExpTime; 1946 if (flicker == STATISTICS_SCENE_FLICKER_60HZ) { 1947 expectedExpTime = 1948 getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime); 1949 } else if (flicker == STATISTICS_SCENE_FLICKER_50HZ) { 1950 expectedExpTime = 1951 getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime); 1952 } 1953 } 1954 1955 if (Math.abs(resultExpTime - expectedExpTime) > EXPOSURE_TIME_ERROR_MARGIN_NS) { 1956 mCollector.addMessage(String.format("Result exposure time %dns diverges too much" 1957 + " from expected exposure time %dns for mode %d when AE is auto", 1958 resultExpTime, expectedExpTime, mode)); 1959 } 1960 } 1961 } 1962 antiBandingTestByMode(Size size, int mode)1963 private void antiBandingTestByMode(Size size, int mode) 1964 throws Exception { 1965 if(VERBOSE) { 1966 Log.v(TAG, "Anti-banding test for mode " + mode + " for camera " + mCamera.getId()); 1967 } 1968 CaptureRequest.Builder requestBuilder = 1969 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1970 1971 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, mode); 1972 1973 // Test auto AE mode anti-banding behavior 1974 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1975 startPreview(requestBuilder, size, resultListener); 1976 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1977 verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/false, 1978 IGNORE_REQUESTED_EXPOSURE_TIME_CHECK); 1979 1980 // Test manual AE mode anti-banding behavior 1981 // 65ms, must be supported by full capability devices. 1982 final long TEST_MANUAL_EXP_TIME_NS = 65000000L; 1983 long manualExpTime = mStaticInfo.getExposureClampToRange(TEST_MANUAL_EXP_TIME_NS); 1984 changeExposure(requestBuilder, manualExpTime); 1985 resultListener = new SimpleCaptureCallback(); 1986 startPreview(requestBuilder, size, resultListener); 1987 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1988 verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/true, 1989 manualExpTime); 1990 1991 stopPreview(); 1992 } 1993 1994 /** 1995 * Test the all available AE modes and AE lock. 1996 * <p> 1997 * For manual AE mode, test iterates through different sensitivities and 1998 * exposure times, validate the result exposure time correctness. For 1999 * CONTROL_AE_MODE_ON_ALWAYS_FLASH mode, the AE lock and flash are tested. 2000 * For the rest of the AUTO mode, AE lock is tested. 2001 * </p> 2002 * 2003 * @param mode 2004 */ aeModeAndLockTestByMode(int mode)2005 private void aeModeAndLockTestByMode(int mode) 2006 throws Exception { 2007 switch (mode) { 2008 case CONTROL_AE_MODE_OFF: 2009 if (mStaticInfo.isCapabilitySupported( 2010 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 2011 // Test manual exposure control. 2012 aeManualControlTest(); 2013 } else { 2014 Log.w(TAG, 2015 "aeModeAndLockTestByMode - can't test AE mode OFF without " + 2016 "manual sensor control"); 2017 } 2018 break; 2019 case CONTROL_AE_MODE_ON: 2020 case CONTROL_AE_MODE_ON_AUTO_FLASH: 2021 case CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: 2022 case CONTROL_AE_MODE_ON_ALWAYS_FLASH: 2023 case CONTROL_AE_MODE_ON_EXTERNAL_FLASH: 2024 // Test AE lock for above AUTO modes. 2025 aeAutoModeTestLock(mode); 2026 break; 2027 default: 2028 throw new UnsupportedOperationException("Unhandled AE mode " + mode); 2029 } 2030 } 2031 2032 /** 2033 * Test AE auto modes. 2034 * <p> 2035 * Use single request rather than repeating request to test AE lock per frame control. 2036 * </p> 2037 */ aeAutoModeTestLock(int mode)2038 private void aeAutoModeTestLock(int mode) throws Exception { 2039 CaptureRequest.Builder requestBuilder = 2040 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2041 if (mStaticInfo.isAeLockSupported()) { 2042 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); 2043 } 2044 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mode); 2045 configurePreviewOutput(requestBuilder); 2046 2047 final int MAX_NUM_CAPTURES_DURING_LOCK = 5; 2048 for (int i = 1; i <= MAX_NUM_CAPTURES_DURING_LOCK; i++) { 2049 autoAeMultipleCapturesThenTestLock(requestBuilder, mode, i); 2050 } 2051 } 2052 2053 /** 2054 * Issue multiple auto AE captures, then lock AE, validate the AE lock vs. 2055 * the first capture result after the AE lock. The right AE lock behavior is: 2056 * When it is locked, it locks to the current exposure value, and all subsequent 2057 * request with lock ON will have the same exposure value locked. 2058 */ autoAeMultipleCapturesThenTestLock( CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock)2059 private void autoAeMultipleCapturesThenTestLock( 2060 CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock) 2061 throws Exception { 2062 if (numCapturesDuringLock < 1) { 2063 throw new IllegalArgumentException("numCapturesBeforeLock must be no less than 1"); 2064 } 2065 if (VERBOSE) { 2066 Log.v(TAG, "Camera " + mCamera.getId() + ": Testing auto AE mode and lock for mode " 2067 + aeMode + " with " + numCapturesDuringLock + " captures before lock"); 2068 } 2069 2070 final int NUM_CAPTURES_BEFORE_LOCK = 2; 2071 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2072 2073 CaptureResult[] resultsDuringLock = new CaptureResult[numCapturesDuringLock]; 2074 boolean canSetAeLock = mStaticInfo.isAeLockSupported(); 2075 2076 // Reset the AE lock to OFF, since we are reusing this builder many times 2077 if (canSetAeLock) { 2078 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); 2079 } 2080 2081 // Just send several captures with auto AE, lock off. 2082 CaptureRequest request = requestBuilder.build(); 2083 for (int i = 0; i < NUM_CAPTURES_BEFORE_LOCK; i++) { 2084 mSession.capture(request, listener, mHandler); 2085 } 2086 waitForNumResults(listener, NUM_CAPTURES_BEFORE_LOCK); 2087 2088 if (!canSetAeLock) { 2089 // Without AE lock, the remaining tests items won't work 2090 return; 2091 } 2092 2093 // Then fire several capture to lock the AE. 2094 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true); 2095 2096 int requestCount = captureRequestsSynchronized( 2097 requestBuilder.build(), numCapturesDuringLock, listener, mHandler); 2098 2099 int[] sensitivities = new int[numCapturesDuringLock]; 2100 long[] expTimes = new long[numCapturesDuringLock]; 2101 Arrays.fill(sensitivities, -1); 2102 Arrays.fill(expTimes, -1L); 2103 2104 // Get the AE lock on result and validate the exposure values. 2105 waitForNumResults(listener, requestCount - numCapturesDuringLock); 2106 for (int i = 0; i < resultsDuringLock.length; i++) { 2107 resultsDuringLock[i] = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2108 } 2109 2110 for (int i = 0; i < numCapturesDuringLock; i++) { 2111 mCollector.expectKeyValueEquals( 2112 resultsDuringLock[i], CaptureResult.CONTROL_AE_LOCK, true); 2113 } 2114 2115 // Can't read manual sensor/exposure settings without manual sensor 2116 if (mStaticInfo.isCapabilitySupported( 2117 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) { 2118 int sensitivityLocked = 2119 getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_SENSITIVITY); 2120 long expTimeLocked = 2121 getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_EXPOSURE_TIME); 2122 for (int i = 1; i < resultsDuringLock.length; i++) { 2123 mCollector.expectKeyValueEquals( 2124 resultsDuringLock[i], CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked); 2125 mCollector.expectKeyValueEquals( 2126 resultsDuringLock[i], CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked); 2127 } 2128 } 2129 } 2130 2131 /** 2132 * Iterate through exposure times and sensitivities for manual AE control. 2133 * <p> 2134 * Use single request rather than repeating request to test manual exposure 2135 * value change per frame control. 2136 * </p> 2137 */ aeManualControlTest()2138 private void aeManualControlTest() 2139 throws Exception { 2140 CaptureRequest.Builder requestBuilder = 2141 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2142 configurePreviewOutput(requestBuilder); 2143 2144 // Warm up pipeline for more accurate timing 2145 SimpleCaptureCallback warmupListener = new SimpleCaptureCallback(); 2146 mSession.setRepeatingRequest(requestBuilder.build(), warmupListener, mHandler); 2147 warmupListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2148 2149 // Do manual captures 2150 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF); 2151 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2152 2153 long[] expTimesNs = getExposureTimeTestValues(); 2154 int[] sensitivities = getSensitivityTestValues(); 2155 // Submit single request at a time, then verify the result. 2156 for (int i = 0; i < expTimesNs.length; i++) { 2157 for (int j = 0; j < sensitivities.length; j++) { 2158 if (VERBOSE) { 2159 Log.v(TAG, "Camera " + mCamera.getId() + ": Testing sensitivity " 2160 + sensitivities[j] + ", exposure time " + expTimesNs[i] + "ns"); 2161 } 2162 2163 changeExposure(requestBuilder, expTimesNs[i], sensitivities[j]); 2164 mSession.capture(requestBuilder.build(), listener, mHandler); 2165 2166 // make sure timeout is long enough for long exposure time - add a 2x safety margin 2167 // to exposure time 2168 long timeoutMs = WAIT_FOR_RESULT_TIMEOUT_MS + 2 * expTimesNs[i] / 1000000; 2169 CaptureResult result = listener.getCaptureResult(timeoutMs); 2170 long resultExpTimeNs = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 2171 int resultSensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY); 2172 validateExposureTime(expTimesNs[i], resultExpTimeNs); 2173 validateSensitivity(sensitivities[j], resultSensitivity); 2174 validateFrameDurationForCapture(result); 2175 } 2176 } 2177 mSession.stopRepeating(); 2178 2179 // TODO: Add another case to test where we can submit all requests, then wait for 2180 // results, which will hide the pipeline latency. this is not only faster, but also 2181 // test high speed per frame control and synchronization. 2182 } 2183 2184 2185 /** 2186 * Verify black level lock control. 2187 */ verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified, int maxLockOffCnt)2188 private void verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified, 2189 int maxLockOffCnt) throws Exception { 2190 int noLockCnt = 0; 2191 for (int i = 0; i < numFramesVerified; i++) { 2192 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2193 Boolean blackLevelLock = result.get(CaptureResult.BLACK_LEVEL_LOCK); 2194 assertNotNull("Black level lock result shouldn't be null", blackLevelLock); 2195 2196 // Count the lock == false result, which could possibly occur at most once. 2197 if (blackLevelLock == false) { 2198 noLockCnt++; 2199 } 2200 2201 if(VERBOSE) { 2202 Log.v(TAG, "Black level lock result: " + blackLevelLock); 2203 } 2204 } 2205 assertTrue("Black level lock OFF occurs " + noLockCnt + " times, expect at most " 2206 + maxLockOffCnt + " for camera " + mCamera.getId(), noLockCnt <= maxLockOffCnt); 2207 } 2208 2209 /** 2210 * Verify shading map for different shading modes. 2211 */ verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified, int shadingMode)2212 private void verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified, 2213 int shadingMode) throws Exception { 2214 2215 for (int i = 0; i < numFramesVerified; i++) { 2216 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2217 mCollector.expectEquals("Shading mode result doesn't match request", 2218 shadingMode, result.get(CaptureResult.SHADING_MODE)); 2219 LensShadingMap mapObj = result.get( 2220 CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP); 2221 assertNotNull("Map object must not be null", mapObj); 2222 int numElementsInMap = mapObj.getGainFactorCount(); 2223 float[] map = new float[numElementsInMap]; 2224 mapObj.copyGainFactors(map, /*offset*/0); 2225 assertNotNull("Map must not be null", map); 2226 assertFalse(String.format( 2227 "Map size %d should be less than %d", numElementsInMap, MAX_SHADING_MAP_SIZE), 2228 numElementsInMap >= MAX_SHADING_MAP_SIZE); 2229 assertFalse(String.format("Map size %d should be no less than %d", numElementsInMap, 2230 MIN_SHADING_MAP_SIZE), numElementsInMap < MIN_SHADING_MAP_SIZE); 2231 2232 if (shadingMode == CaptureRequest.SHADING_MODE_FAST || 2233 shadingMode == CaptureRequest.SHADING_MODE_HIGH_QUALITY) { 2234 // shading mode is FAST or HIGH_QUALITY, expect to receive a map with all 2235 // elements >= 1.0f 2236 2237 int badValueCnt = 0; 2238 // Detect the bad values of the map data. 2239 for (int j = 0; j < numElementsInMap; j++) { 2240 if (Float.isNaN(map[j]) || map[j] < 1.0f) { 2241 badValueCnt++; 2242 } 2243 } 2244 assertEquals("Number of value in the map is " + badValueCnt + " out of " 2245 + numElementsInMap, /*expected*/0, /*actual*/badValueCnt); 2246 } else if (shadingMode == CaptureRequest.SHADING_MODE_OFF) { 2247 float[] unityMap = new float[numElementsInMap]; 2248 Arrays.fill(unityMap, 1.0f); 2249 // shading mode is OFF, expect to receive a unity map. 2250 assertTrue("Result map " + Arrays.toString(map) + " must be an unity map", 2251 Arrays.equals(unityMap, map)); 2252 } 2253 } 2254 } 2255 2256 /** 2257 * Test face detection for a camera. 2258 */ 2259 private void faceDetectionTestByCamera() throws Exception { 2260 int[] faceDetectModes = mStaticInfo.getAvailableFaceDetectModesChecked(); 2261 2262 SimpleCaptureCallback listener; 2263 CaptureRequest.Builder requestBuilder = 2264 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2265 2266 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 2267 for (int mode : faceDetectModes) { 2268 requestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mode); 2269 if (VERBOSE) { 2270 Log.v(TAG, "Start testing face detection mode " + mode); 2271 } 2272 2273 // Create a new listener for each run to avoid the results from one run spill 2274 // into another run. 2275 listener = new SimpleCaptureCallback(); 2276 startPreview(requestBuilder, maxPreviewSz, listener); 2277 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2278 verifyFaceDetectionResults(listener, NUM_FACE_DETECTION_FRAMES_VERIFIED, mode); 2279 } 2280 2281 stopPreview(); 2282 } 2283 2284 /** 2285 * Verify face detection results for different face detection modes. 2286 * 2287 * @param listener The listener to get capture result 2288 * @param numFramesVerified Number of results to be verified 2289 * @param faceDetectionMode Face detection mode to be verified against 2290 */ 2291 private void verifyFaceDetectionResults(SimpleCaptureCallback listener, int numFramesVerified, 2292 int faceDetectionMode) { 2293 for (int i = 0; i < numFramesVerified; i++) { 2294 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2295 mCollector.expectEquals("Result face detection mode should match the request", 2296 faceDetectionMode, result.get(CaptureResult.STATISTICS_FACE_DETECT_MODE)); 2297 2298 Face[] faces = result.get(CaptureResult.STATISTICS_FACES); 2299 List<Integer> faceIds = new ArrayList<Integer>(faces.length); 2300 List<Integer> faceScores = new ArrayList<Integer>(faces.length); 2301 if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) { 2302 mCollector.expectEquals("Number of detection faces should always 0 for OFF mode", 2303 0, faces.length); 2304 } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) { 2305 for (Face face : faces) { 2306 mCollector.expectNotNull("Face rectangle shouldn't be null", face.getBounds()); 2307 faceScores.add(face.getScore()); 2308 mCollector.expectTrue("Face id is expected to be -1 for SIMPLE mode", 2309 face.getId() == Face.ID_UNSUPPORTED); 2310 } 2311 } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 2312 if (VERBOSE) { 2313 Log.v(TAG, "Number of faces detected: " + faces.length); 2314 } 2315 2316 for (Face face : faces) { 2317 Rect faceBound; 2318 boolean faceRectAvailable = mCollector.expectTrue("Face rectangle " 2319 + "shouldn't be null", face.getBounds() != null); 2320 if (!faceRectAvailable) { 2321 continue; 2322 } 2323 faceBound = face.getBounds(); 2324 2325 faceScores.add(face.getScore()); 2326 faceIds.add(face.getId()); 2327 2328 mCollector.expectTrue("Face id is shouldn't be -1 for FULL mode", 2329 face.getId() != Face.ID_UNSUPPORTED); 2330 boolean leftEyeAvailable = 2331 mCollector.expectTrue("Left eye position shouldn't be null", 2332 face.getLeftEyePosition() != null); 2333 boolean rightEyeAvailable = 2334 mCollector.expectTrue("Right eye position shouldn't be null", 2335 face.getRightEyePosition() != null); 2336 boolean mouthAvailable = 2337 mCollector.expectTrue("Mouth position shouldn't be null", 2338 face.getMouthPosition() != null); 2339 // Eyes/mouth position should be inside of the face rect. 2340 if (leftEyeAvailable) { 2341 Point leftEye = face.getLeftEyePosition(); 2342 mCollector.expectTrue("Left eye " + leftEye + "should be" 2343 + "inside of face rect " + faceBound, 2344 faceBound.contains(leftEye.x, leftEye.y)); 2345 } 2346 if (rightEyeAvailable) { 2347 Point rightEye = face.getRightEyePosition(); 2348 mCollector.expectTrue("Right eye " + rightEye + "should be" 2349 + "inside of face rect " + faceBound, 2350 faceBound.contains(rightEye.x, rightEye.y)); 2351 } 2352 if (mouthAvailable) { 2353 Point mouth = face.getMouthPosition(); 2354 mCollector.expectTrue("Mouth " + mouth + " should be inside of" 2355 + " face rect " + faceBound, 2356 faceBound.contains(mouth.x, mouth.y)); 2357 } 2358 } 2359 } 2360 mCollector.expectValuesInRange("Face scores are invalid", faceScores, 2361 Face.SCORE_MIN, Face.SCORE_MAX); 2362 mCollector.expectValuesUnique("Face ids are invalid", faceIds); 2363 } 2364 } 2365 2366 /** 2367 * Test tone map mode and result by camera 2368 */ 2369 private void toneMapTestByCamera() throws Exception { 2370 if (!mStaticInfo.isManualToneMapSupported()) { 2371 return; 2372 } 2373 2374 CaptureRequest.Builder requestBuilder = 2375 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2376 int[] toneMapModes = mStaticInfo.getAvailableToneMapModesChecked(); 2377 // Test AUTO modes first. Note that FAST/HQ must both present or not present 2378 for (int i = 0; i < toneMapModes.length; i++) { 2379 if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_FAST && i > 0) { 2380 int tmpMode = toneMapModes[0]; 2381 toneMapModes[0] = CaptureRequest.TONEMAP_MODE_FAST; 2382 toneMapModes[i] = tmpMode; 2383 } 2384 if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_HIGH_QUALITY && i > 1) { 2385 int tmpMode = toneMapModes[1]; 2386 toneMapModes[1] = CaptureRequest.TONEMAP_MODE_HIGH_QUALITY; 2387 toneMapModes[i] = tmpMode; 2388 } 2389 } 2390 for (int mode : toneMapModes) { 2391 if (VERBOSE) { 2392 Log.v(TAG, "Testing tonemap mode " + mode); 2393 } 2394 2395 requestBuilder.set(CaptureRequest.TONEMAP_MODE, mode); 2396 switch (mode) { 2397 case CaptureRequest.TONEMAP_MODE_CONTRAST_CURVE: 2398 TonemapCurve toneCurve = new TonemapCurve(TONEMAP_CURVE_LINEAR, 2399 TONEMAP_CURVE_LINEAR, TONEMAP_CURVE_LINEAR); 2400 requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve); 2401 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2402 2403 toneCurve = new TonemapCurve(TONEMAP_CURVE_SRGB, 2404 TONEMAP_CURVE_SRGB, TONEMAP_CURVE_SRGB); 2405 requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve); 2406 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2407 break; 2408 case CaptureRequest.TONEMAP_MODE_GAMMA_VALUE: 2409 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 1.0f); 2410 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2411 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 2.2f); 2412 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2413 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 5.0f); 2414 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2415 break; 2416 case CaptureRequest.TONEMAP_MODE_PRESET_CURVE: 2417 requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE, 2418 CaptureRequest.TONEMAP_PRESET_CURVE_REC709); 2419 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2420 requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE, 2421 CaptureRequest.TONEMAP_PRESET_CURVE_SRGB); 2422 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2423 break; 2424 default: 2425 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2426 break; 2427 } 2428 } 2429 2430 2431 } 2432 2433 /** 2434 * Test tonemap mode with speficied request settings 2435 * 2436 * @param numFramesVerified Number of results to be verified 2437 * @param requestBuilder the request builder of settings to be tested 2438 */ 2439 private void testToneMapMode (int numFramesVerified, 2440 CaptureRequest.Builder requestBuilder) throws Exception { 2441 final int MIN_TONEMAP_CURVE_POINTS = 2; 2442 final Float ZERO = new Float(0); 2443 final Float ONE = new Float(1.0f); 2444 2445 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2446 int tonemapMode = requestBuilder.get(CaptureRequest.TONEMAP_MODE); 2447 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 2448 startPreview(requestBuilder, maxPreviewSz, listener); 2449 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2450 2451 int maxCurvePoints = mStaticInfo.getMaxTonemapCurvePointChecked(); 2452 for (int i = 0; i < numFramesVerified; i++) { 2453 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2454 mCollector.expectEquals("Capture result tonemap mode should match request", tonemapMode, 2455 result.get(CaptureResult.TONEMAP_MODE)); 2456 TonemapCurve tc = getValueNotNull(result, CaptureResult.TONEMAP_CURVE); 2457 int pointCount = tc.getPointCount(TonemapCurve.CHANNEL_RED); 2458 float[] mapRed = new float[pointCount * TonemapCurve.POINT_SIZE]; 2459 pointCount = tc.getPointCount(TonemapCurve.CHANNEL_GREEN); 2460 float[] mapGreen = new float[pointCount * TonemapCurve.POINT_SIZE]; 2461 pointCount = tc.getPointCount(TonemapCurve.CHANNEL_BLUE); 2462 float[] mapBlue = new float[pointCount * TonemapCurve.POINT_SIZE]; 2463 tc.copyColorCurve(TonemapCurve.CHANNEL_RED, mapRed, 0); 2464 tc.copyColorCurve(TonemapCurve.CHANNEL_GREEN, mapGreen, 0); 2465 tc.copyColorCurve(TonemapCurve.CHANNEL_BLUE, mapBlue, 0); 2466 if (tonemapMode == CaptureResult.TONEMAP_MODE_CONTRAST_CURVE) { 2467 /** 2468 * TODO: need figure out a good way to measure the difference 2469 * between request and result, as they may have different array 2470 * size. 2471 */ 2472 } else if (tonemapMode == CaptureResult.TONEMAP_MODE_GAMMA_VALUE) { 2473 mCollector.expectEquals("Capture result gamma value should match request", 2474 requestBuilder.get(CaptureRequest.TONEMAP_GAMMA), 2475 result.get(CaptureResult.TONEMAP_GAMMA)); 2476 } else if (tonemapMode == CaptureResult.TONEMAP_MODE_PRESET_CURVE) { 2477 mCollector.expectEquals("Capture result preset curve should match request", 2478 requestBuilder.get(CaptureRequest.TONEMAP_PRESET_CURVE), 2479 result.get(CaptureResult.TONEMAP_PRESET_CURVE)); 2480 } 2481 2482 // Tonemap curve result availability and basic validity check for all modes. 2483 mCollector.expectValuesInRange("Tonemap curve red values are out of range", 2484 CameraTestUtils.toObject(mapRed), /*min*/ZERO, /*max*/ONE); 2485 mCollector.expectInRange("Tonemap curve red length is out of range", 2486 mapRed.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2487 mCollector.expectValuesInRange("Tonemap curve green values are out of range", 2488 CameraTestUtils.toObject(mapGreen), /*min*/ZERO, /*max*/ONE); 2489 mCollector.expectInRange("Tonemap curve green length is out of range", 2490 mapGreen.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2491 mCollector.expectValuesInRange("Tonemap curve blue values are out of range", 2492 CameraTestUtils.toObject(mapBlue), /*min*/ZERO, /*max*/ONE); 2493 mCollector.expectInRange("Tonemap curve blue length is out of range", 2494 mapBlue.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2495 2496 // Make sure capture result tonemap has identical channels. 2497 if (mStaticInfo.isMonochromeCamera()) { 2498 mCollector.expectEquals("Capture result tonemap of monochrome camera should " + 2499 "have same dimension for all channels", mapRed.length, mapGreen.length); 2500 mCollector.expectEquals("Capture result tonemap of monochrome camera should " + 2501 "have same dimension for all channels", mapRed.length, mapBlue.length); 2502 2503 if (mapRed.length == mapGreen.length && mapRed.length == mapBlue.length) { 2504 boolean isIdentical = true; 2505 for (int j = 0; j < mapRed.length; j++) { 2506 isIdentical = (mapRed[j] == mapGreen[j] && mapRed[j] == mapBlue[j]); 2507 if (!isIdentical) 2508 break; 2509 } 2510 mCollector.expectTrue("Capture result tonemap of monochrome camera should " + 2511 "be identical between all channels", isIdentical); 2512 } 2513 } 2514 } 2515 stopPreview(); 2516 } 2517 2518 /** 2519 * Test awb mode control. 2520 * <p> 2521 * Test each supported AWB mode, verify the AWB mode in capture result 2522 * matches request. When AWB is locked, the color correction gains and 2523 * transform should remain unchanged. 2524 * </p> 2525 */ 2526 private void awbModeAndLockTestByCamera() throws Exception { 2527 int[] awbModes = mStaticInfo.getAwbAvailableModesChecked(); 2528 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2529 boolean canSetAwbLock = mStaticInfo.isAwbLockSupported(); 2530 CaptureRequest.Builder requestBuilder = 2531 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2532 startPreview(requestBuilder, maxPreviewSize, /*listener*/null); 2533 2534 for (int mode : awbModes) { 2535 SimpleCaptureCallback listener; 2536 requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, mode); 2537 listener = new SimpleCaptureCallback(); 2538 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2539 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2540 2541 // Verify AWB mode in capture result. 2542 verifyCaptureResultForKey(CaptureResult.CONTROL_AWB_MODE, mode, listener, 2543 NUM_FRAMES_VERIFIED); 2544 2545 if (mode == CameraMetadata.CONTROL_AWB_MODE_AUTO && canSetAwbLock) { 2546 // Verify color correction transform and gains stay unchanged after a lock. 2547 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true); 2548 listener = new SimpleCaptureCallback(); 2549 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2550 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2551 2552 if (mStaticInfo.areKeysAvailable(CaptureResult.CONTROL_AWB_STATE)) { 2553 waitForResultValue(listener, CaptureResult.CONTROL_AWB_STATE, 2554 CaptureResult.CONTROL_AWB_STATE_LOCKED, NUM_RESULTS_WAIT_TIMEOUT); 2555 } 2556 2557 } 2558 // Don't verify auto mode result if AWB lock is not supported 2559 if (mode != CameraMetadata.CONTROL_AWB_MODE_AUTO || canSetAwbLock) { 2560 verifyAwbCaptureResultUnchanged(listener, NUM_FRAMES_VERIFIED); 2561 } 2562 } 2563 } 2564 2565 private void verifyAwbCaptureResultUnchanged(SimpleCaptureCallback listener, 2566 int numFramesVerified) { 2567 // Skip check if cc gains/transform/mode are not available 2568 if (!mStaticInfo.areKeysAvailable( 2569 CaptureResult.COLOR_CORRECTION_GAINS, 2570 CaptureResult.COLOR_CORRECTION_TRANSFORM, 2571 CaptureResult.COLOR_CORRECTION_MODE)) { 2572 return; 2573 } 2574 2575 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2576 RggbChannelVector lockedGains = 2577 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS); 2578 ColorSpaceTransform lockedTransform = 2579 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM); 2580 2581 for (int i = 0; i < numFramesVerified; i++) { 2582 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2583 // Color correction mode check is skipped here, as it is checked in colorCorrectionTest. 2584 validateColorCorrectionResult(result, result.get(CaptureResult.COLOR_CORRECTION_MODE)); 2585 2586 RggbChannelVector gains = getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS); 2587 ColorSpaceTransform transform = 2588 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM); 2589 mCollector.expectEquals("Color correction gains should remain unchanged after awb lock", 2590 lockedGains, gains); 2591 mCollector.expectEquals("Color correction transform should remain unchanged after" 2592 + " awb lock", lockedTransform, transform); 2593 } 2594 } 2595 2596 /** 2597 * Test AF mode control. 2598 * <p> 2599 * Test all supported AF modes, verify the AF mode in capture result matches 2600 * request. When AF mode is one of the CONTROL_AF_MODE_CONTINUOUS_* mode, 2601 * verify if the AF can converge to PASSIVE_FOCUSED or PASSIVE_UNFOCUSED 2602 * state within certain amount of frames. 2603 * </p> 2604 */ 2605 private void afModeTestByCamera() throws Exception { 2606 int[] afModes = mStaticInfo.getAfAvailableModesChecked(); 2607 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2608 CaptureRequest.Builder requestBuilder = 2609 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2610 startPreview(requestBuilder, maxPreviewSize, /*listener*/null); 2611 2612 for (int mode : afModes) { 2613 SimpleCaptureCallback listener; 2614 requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mode); 2615 listener = new SimpleCaptureCallback(); 2616 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2617 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2618 2619 // Verify AF mode in capture result. 2620 verifyCaptureResultForKey(CaptureResult.CONTROL_AF_MODE, mode, listener, 2621 NUM_FRAMES_VERIFIED); 2622 2623 // Verify AF can finish a scan for CONTROL_AF_MODE_CONTINUOUS_* modes. 2624 // In LEGACY mode, a transition to one of the continuous AF modes does not necessarily 2625 // result in a passive AF call if the camera has already been focused, and the scene has 2626 // not changed enough to trigger an AF pass. Skip this constraint for LEGACY. 2627 if (mStaticInfo.isHardwareLevelAtLeastLimited() && 2628 (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE || 2629 mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) { 2630 List<Integer> afStateList = new ArrayList<Integer>(); 2631 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED); 2632 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED); 2633 waitForAnyResultValue(listener, CaptureResult.CONTROL_AF_STATE, afStateList, 2634 NUM_RESULTS_WAIT_TIMEOUT); 2635 } 2636 } 2637 } 2638 2639 /** 2640 * Test video and optical stabilizations if they are supported by a given camera. 2641 */ 2642 private void stabilizationTestByCamera() throws Exception { 2643 // video stabilization test. 2644 List<Key<?>> keys = mStaticInfo.getCharacteristics().getKeys(); 2645 2646 Integer[] videoStabModes = (keys.contains(CameraCharacteristics. 2647 CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)) ? 2648 CameraTestUtils.toObject(mStaticInfo.getAvailableVideoStabilizationModesChecked()) : 2649 new Integer[0]; 2650 int[] opticalStabModes = (keys.contains( 2651 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION)) ? 2652 mStaticInfo.getAvailableOpticalStabilizationChecked() : new int[0]; 2653 2654 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2655 CaptureRequest.Builder requestBuilder = 2656 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2657 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2658 startPreview(requestBuilder, maxPreviewSize, listener); 2659 2660 for (Integer mode : videoStabModes) { 2661 listener = new SimpleCaptureCallback(); 2662 requestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mode); 2663 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2664 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2665 // Video stabilization could return any modes. 2666 verifyAnyCaptureResultForKey(CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE, 2667 videoStabModes, listener, NUM_FRAMES_VERIFIED); 2668 } 2669 2670 for (int mode : opticalStabModes) { 2671 listener = new SimpleCaptureCallback(); 2672 requestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, mode); 2673 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2674 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2675 verifyCaptureResultForKey(CaptureResult.LENS_OPTICAL_STABILIZATION_MODE, mode, 2676 listener, NUM_FRAMES_VERIFIED); 2677 } 2678 2679 stopPreview(); 2680 } 2681 2682 private void digitalZoomTestByCamera(Size previewSize, boolean repeating) throws Exception { 2683 final PointF[] TEST_ZOOM_CENTERS; 2684 final float maxZoom = mStaticInfo.getAvailableMaxDigitalZoomChecked(); 2685 final float ZOOM_ERROR_MARGIN = 0.01f; 2686 if (Math.abs(maxZoom - 1.0f) < ZOOM_ERROR_MARGIN) { 2687 // It doesn't make much sense to test the zoom if the device effectively supports 2688 // no zoom. 2689 return; 2690 } 2691 2692 final int croppingType = mStaticInfo.getScalerCroppingTypeChecked(); 2693 if (croppingType == CameraCharacteristics.SCALER_CROPPING_TYPE_FREEFORM) { 2694 // Set the four corners in a way that the minimally allowed zoom factor is 2x. 2695 float normalizedLeft = 0.25f; 2696 float normalizedTop = 0.25f; 2697 float normalizedRight = 0.75f; 2698 float normalizedBottom = 0.75f; 2699 // If the max supported zoom is too small, make sure we at least test the max 2700 // Zoom is tested for the four corners. 2701 if (maxZoom < 2.0f) { 2702 normalizedLeft = 0.5f / maxZoom; 2703 normalizedTop = 0.5f / maxZoom; 2704 normalizedRight = 1.0f - normalizedLeft; 2705 normalizedBottom = 1.0f - normalizedTop; 2706 } 2707 TEST_ZOOM_CENTERS = new PointF[] { 2708 new PointF(0.5f, 0.5f), // Center point 2709 new PointF(normalizedLeft, normalizedTop), // top left corner zoom 2710 new PointF(normalizedRight, normalizedTop), // top right corner zoom 2711 new PointF(normalizedLeft, normalizedBottom), // bottom left corner zoom 2712 new PointF(normalizedRight, normalizedBottom), // bottom right corner zoom 2713 }; 2714 2715 if (VERBOSE) { 2716 Log.v(TAG, "Testing zoom with CROPPING_TYPE = FREEFORM"); 2717 } 2718 } else { 2719 // CENTER_ONLY 2720 TEST_ZOOM_CENTERS = new PointF[] { 2721 new PointF(0.5f, 0.5f), // Center point 2722 }; 2723 2724 if (VERBOSE) { 2725 Log.v(TAG, "Testing zoom with CROPPING_TYPE = CENTER_ONLY"); 2726 } 2727 } 2728 2729 final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked(); 2730 final Rect defaultCropRegion = new Rect(0, 0, 2731 activeArraySize.width(), activeArraySize.height()); 2732 Rect[] cropRegions = new Rect[ZOOM_STEPS]; 2733 MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][]; 2734 CaptureRequest.Builder requestBuilder = 2735 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2736 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2737 2738 updatePreviewSurface(previewSize); 2739 configurePreviewOutput(requestBuilder); 2740 2741 CaptureRequest[] requests = new CaptureRequest[ZOOM_STEPS]; 2742 2743 // Set algorithm regions 2744 final int METERING_RECT_RATIO = 10; 2745 final MeteringRectangle[][] defaultMeteringRects = new MeteringRectangle[][] { 2746 { 2747 new MeteringRectangle ( 2748 /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(), 2749 /*meteringWeight*/1), /* full active region */ 2750 }, 2751 { 2752 new MeteringRectangle ( 2753 /*x*/0, /*y*/0, activeArraySize.width()/METERING_RECT_RATIO, 2754 activeArraySize.height()/METERING_RECT_RATIO, 2755 /*meteringWeight*/1), 2756 }, 2757 { 2758 new MeteringRectangle ( 2759 /*x*/(int)(activeArraySize.width() * (0.5f - 0.5f/METERING_RECT_RATIO)), 2760 /*y*/(int)(activeArraySize.height() * (0.5f - 0.5f/METERING_RECT_RATIO)), 2761 activeArraySize.width()/METERING_RECT_RATIO, 2762 activeArraySize.height()/METERING_RECT_RATIO, 2763 /*meteringWeight*/1), 2764 }, 2765 }; 2766 2767 final int CAPTURE_SUBMIT_REPEAT; 2768 final int NUM_RESULTS_TO_SKIP; 2769 { 2770 int maxLatency = mStaticInfo.getSyncMaxLatency(); 2771 if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) { 2772 CAPTURE_SUBMIT_REPEAT = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1; 2773 } else { 2774 CAPTURE_SUBMIT_REPEAT = maxLatency + 1; 2775 } 2776 if (repeating) { 2777 NUM_RESULTS_TO_SKIP = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1; 2778 } else { 2779 NUM_RESULTS_TO_SKIP = CAPTURE_SUBMIT_REPEAT - 1; 2780 } 2781 } 2782 2783 if (VERBOSE) { 2784 Log.v(TAG, "Testing zoom with CAPTURE_SUBMIT_REPEAT = " + CAPTURE_SUBMIT_REPEAT); 2785 } 2786 2787 for (MeteringRectangle[] meteringRect : defaultMeteringRects) { 2788 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 2789 update3aRegion(requestBuilder, algo, meteringRect, mStaticInfo); 2790 } 2791 2792 for (PointF center : TEST_ZOOM_CENTERS) { 2793 Rect previousCrop = null; 2794 2795 for (int i = 0; i < ZOOM_STEPS; i++) { 2796 /* 2797 * Submit capture request 2798 */ 2799 float zoomFactor = (float) (1.0f + (maxZoom - 1.0) * i / ZOOM_STEPS); 2800 cropRegions[i] = getCropRegionForZoom(zoomFactor, center, 2801 maxZoom, defaultCropRegion); 2802 if (VERBOSE) { 2803 Log.v(TAG, "Testing Zoom for factor " + zoomFactor + " and center " + 2804 center + " The cropRegion is " + cropRegions[i] + 2805 " Preview size is " + previewSize + ", repeating is " + repeating); 2806 } 2807 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, cropRegions[i]); 2808 requests[i] = requestBuilder.build(); 2809 if (VERBOSE) { 2810 Log.v(TAG, "submit crop region " + cropRegions[i]); 2811 } 2812 if (repeating) { 2813 mSession.setRepeatingRequest(requests[i], listener, mHandler); 2814 // Drop first few frames 2815 waitForNumResults(listener, NUM_RESULTS_TO_SKIP); 2816 // Interleave a regular capture 2817 mSession.capture(requests[0], listener, mHandler); 2818 } else { 2819 for (int j = 0; j < CAPTURE_SUBMIT_REPEAT; ++j) { 2820 mSession.capture(requests[i], listener, mHandler); 2821 } 2822 } 2823 2824 /* 2825 * Validate capture result 2826 */ 2827 waitForNumResults(listener, NUM_RESULTS_TO_SKIP); // Drop first few frames 2828 TotalCaptureResult result = listener.getTotalCaptureResultForRequest( 2829 requests[i], NUM_RESULTS_WAIT_TIMEOUT); 2830 List<CaptureResult> partialResults = result.getPartialResults(); 2831 2832 Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION); 2833 for (CaptureResult partialResult : partialResults) { 2834 Rect cropRegionInPartial = 2835 partialResult.get(CaptureResult.SCALER_CROP_REGION); 2836 if (cropRegionInPartial != null) { 2837 mCollector.expectEquals("SCALER_CROP_REGION in partial result must " 2838 + "match in final result", cropRegionInPartial, cropRegion); 2839 } 2840 } 2841 2842 /* 2843 * Validate resulting crop regions 2844 */ 2845 if (previousCrop != null) { 2846 Rect currentCrop = cropRegion; 2847 mCollector.expectTrue(String.format( 2848 "Crop region should shrink or stay the same " + 2849 "(previous = %s, current = %s)", 2850 previousCrop, currentCrop), 2851 previousCrop.equals(currentCrop) || 2852 (previousCrop.width() > currentCrop.width() && 2853 previousCrop.height() > currentCrop.height())); 2854 } 2855 2856 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 2857 mCollector.expectRectsAreSimilar( 2858 "Request and result crop region should be similar", 2859 cropRegions[i], cropRegion, CROP_REGION_ERROR_PERCENT_DELTA); 2860 } 2861 2862 if (croppingType == SCALER_CROPPING_TYPE_CENTER_ONLY) { 2863 mCollector.expectRectCentered( 2864 "Result crop region should be centered inside the active array", 2865 new Size(activeArraySize.width(), activeArraySize.height()), 2866 cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED); 2867 } 2868 2869 /* 2870 * Validate resulting metering regions 2871 */ 2872 2873 // Use the actual reported crop region to calculate the resulting metering region 2874 expectRegions[i] = getExpectedOutputRegion( 2875 /*requestRegion*/meteringRect, 2876 /*cropRect*/ cropRegion); 2877 2878 // Verify Output 3A region is intersection of input 3A region and crop region 2879 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 2880 validate3aRegion(result, partialResults, algo, expectRegions[i], 2881 false/*scaleByZoomRatio*/, mStaticInfo); 2882 } 2883 2884 previousCrop = cropRegion; 2885 } 2886 2887 if (maxZoom > 1.0f) { 2888 mCollector.expectTrue( 2889 String.format("Most zoomed-in crop region should be smaller " + 2890 "than active array w/h" + 2891 "(last crop = %s, active array = %s)", 2892 previousCrop, activeArraySize), 2893 (previousCrop.width() < activeArraySize.width() && 2894 previousCrop.height() < activeArraySize.height())); 2895 } 2896 } 2897 } 2898 } 2899 2900 private void zoomRatioTestByCamera(Size previewSize) throws Exception { 2901 final Range<Float> zoomRatioRange = mStaticInfo.getZoomRatioRangeChecked(); 2902 // The error margin is derive from a VGA size camera zoomed all the way to 10x, in which 2903 // case the cropping error can be as large as 480/46 - 480/48 = 0.435. 2904 final float ZOOM_ERROR_MARGIN = 0.05f; 2905 2906 final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked(); 2907 final Rect defaultCropRegion = 2908 new Rect(0, 0, activeArraySize.width(), activeArraySize.height()); 2909 final Rect zoom2xCropRegion = 2910 new Rect(activeArraySize.width()/4, activeArraySize.height()/4, 2911 activeArraySize.width()*3/4, activeArraySize.height()*3/4); 2912 MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][]; 2913 CaptureRequest.Builder requestBuilder = 2914 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2915 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, defaultCropRegion); 2916 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2917 2918 updatePreviewSurface(previewSize); 2919 configurePreviewOutput(requestBuilder); 2920 2921 // Set algorithm regions to full active region 2922 final MeteringRectangle[] defaultMeteringRect = new MeteringRectangle[] { 2923 new MeteringRectangle ( 2924 /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(), 2925 /*meteringWeight*/1) 2926 }; 2927 2928 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 2929 update3aRegion(requestBuilder, algo, defaultMeteringRect, mStaticInfo); 2930 } 2931 2932 final int captureSubmitRepeat; 2933 { 2934 int maxLatency = mStaticInfo.getSyncMaxLatency(); 2935 if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) { 2936 captureSubmitRepeat = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1; 2937 } else { 2938 captureSubmitRepeat = maxLatency + 1; 2939 } 2940 } 2941 2942 float previousRatio = zoomRatioRange.getLower(); 2943 for (int i = 0; i < ZOOM_STEPS; i++) { 2944 /* 2945 * Submit capture request 2946 */ 2947 float zoomFactor = zoomRatioRange.getLower() + (zoomRatioRange.getUpper() - 2948 zoomRatioRange.getLower()) * i / ZOOM_STEPS; 2949 if (VERBOSE) { 2950 Log.v(TAG, "Testing Zoom ratio " + zoomFactor + " Preview size is " + previewSize); 2951 } 2952 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomFactor); 2953 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, defaultCropRegion); 2954 CaptureRequest request = requestBuilder.build(); 2955 for (int j = 0; j < captureSubmitRepeat; ++j) { 2956 mSession.capture(request, listener, mHandler); 2957 } 2958 2959 /* 2960 * Validate capture result 2961 */ 2962 waitForNumResults(listener, captureSubmitRepeat - 1); // Drop first few frames 2963 TotalCaptureResult result = listener.getTotalCaptureResultForRequest( 2964 request, NUM_RESULTS_WAIT_TIMEOUT); 2965 List<CaptureResult> partialResults = result.getPartialResults(); 2966 float resultZoomRatio = getValueNotNull(result, CaptureResult.CONTROL_ZOOM_RATIO); 2967 Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION); 2968 2969 for (CaptureResult partialResult : partialResults) { 2970 Rect cropRegionInPartial = 2971 partialResult.get(CaptureResult.SCALER_CROP_REGION); 2972 if (cropRegionInPartial != null) { 2973 mCollector.expectEquals("SCALER_CROP_REGION in partial result must " 2974 + "match in final result", cropRegionInPartial, cropRegion); 2975 } 2976 2977 Float zoomRatioInPartial = partialResult.get(CaptureResult.CONTROL_ZOOM_RATIO); 2978 if (zoomRatioInPartial != null) { 2979 mCollector.expectEquals("CONTROL_ZOOM_RATIO in partial result must match" 2980 + " that in final result", resultZoomRatio, zoomRatioInPartial); 2981 } 2982 } 2983 2984 /* 2985 * Validate resulting crop regions and zoom ratio 2986 */ 2987 mCollector.expectTrue(String.format( 2988 "Zoom ratio should increase or stay the same " + 2989 "(previous = %f, current = %f)", 2990 previousRatio, resultZoomRatio), 2991 Math.abs(previousRatio - resultZoomRatio) < ZOOM_ERROR_MARGIN || 2992 (previousRatio < resultZoomRatio)); 2993 2994 mCollector.expectTrue(String.format( 2995 "Request and result zoom ratio should be similar " + 2996 "(requested = %f, result = %f", zoomFactor, resultZoomRatio), 2997 Math.abs(zoomFactor - resultZoomRatio)/zoomFactor <= ZOOM_ERROR_MARGIN); 2998 2999 //In case zoom ratio is converted to crop region at HAL, due to error magnification 3000 //when converting to post-zoom crop region, scale the error threshold for crop region 3001 //check. 3002 float errorMultiplier = Math.max(1.0f, zoomFactor); 3003 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 3004 mCollector.expectRectsAreSimilar( 3005 "Request and result crop region should be similar", 3006 defaultCropRegion, cropRegion, 3007 CROP_REGION_ERROR_PERCENT_DELTA * errorMultiplier); 3008 } 3009 3010 mCollector.expectRectCentered( 3011 "Result crop region should be centered inside the active array", 3012 new Size(activeArraySize.width(), activeArraySize.height()), 3013 cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED * errorMultiplier); 3014 3015 /* 3016 * Validate resulting metering regions 3017 */ 3018 // Use the actual reported crop region to calculate the resulting metering region 3019 expectRegions[i] = getExpectedOutputRegion( 3020 /*requestRegion*/defaultMeteringRect, 3021 /*cropRect*/ cropRegion); 3022 3023 // Verify Output 3A region is intersection of input 3A region and crop region 3024 boolean scaleByZoomRatio = zoomFactor > 1.0f; 3025 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 3026 validate3aRegion(result, partialResults, algo, expectRegions[i], scaleByZoomRatio, 3027 mStaticInfo); 3028 } 3029 3030 previousRatio = resultZoomRatio; 3031 3032 /* 3033 * Set windowboxing cropRegion while zoomRatio is not 1.0x, and make sure the crop 3034 * region was overwritten. 3035 */ 3036 if (zoomFactor != 1.0f) { 3037 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom2xCropRegion); 3038 CaptureRequest requestWithCrop = requestBuilder.build(); 3039 for (int j = 0; j < captureSubmitRepeat; ++j) { 3040 mSession.capture(requestWithCrop, listener, mHandler); 3041 } 3042 3043 waitForNumResults(listener, captureSubmitRepeat - 1); // Drop first few frames 3044 CaptureResult resultWithCrop = listener.getCaptureResultForRequest( 3045 requestWithCrop, NUM_RESULTS_WAIT_TIMEOUT); 3046 float resultZoomRatioWithCrop = getValueNotNull(resultWithCrop, 3047 CaptureResult.CONTROL_ZOOM_RATIO); 3048 Rect cropRegionWithCrop = getValueNotNull(resultWithCrop, 3049 CaptureResult.SCALER_CROP_REGION); 3050 3051 mCollector.expectTrue(String.format( 3052 "Result zoom ratio should remain the same (activeArrayCrop: %f, " + 3053 "zoomedCrop: %f)", resultZoomRatio, resultZoomRatioWithCrop), 3054 Math.abs(resultZoomRatio - resultZoomRatioWithCrop) < ZOOM_ERROR_MARGIN); 3055 3056 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 3057 mCollector.expectRectsAreSimilar( 3058 "Result crop region should remain the same with or without crop", 3059 cropRegion, cropRegionWithCrop, CROP_REGION_ERROR_PERCENT_DELTA); 3060 } 3061 } 3062 } 3063 } 3064 3065 private void zoomTimestampIncreaseTestByCamera() throws Exception { 3066 final Range<Float> zoomRatioRange = mStaticInfo.getZoomRatioRangeChecked(); 3067 3068 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3069 updatePreviewSurface(maxPreviewSize); 3070 CaptureRequest.Builder requestBuilder = 3071 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3072 configurePreviewOutput(requestBuilder); 3073 3074 // Submit a sequence of requests first zooming in then zooming out. 3075 List<CaptureRequest> requests = new ArrayList<CaptureRequest>(); 3076 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3077 float zoomRange = zoomRatioRange.getUpper() - zoomRatioRange.getLower(); 3078 for (int i = 0; i <= ZOOM_STEPS; i++) { 3079 float zoomFactor = zoomRatioRange.getUpper() - (zoomRange * i / ZOOM_STEPS); 3080 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomFactor); 3081 // Add each ratio to both the beginning and end of the list. 3082 requests.add(requestBuilder.build()); 3083 requests.add(0, requestBuilder.build()); 3084 } 3085 int seqId = mSession.captureBurst(requests, listener, mHandler); 3086 3087 // onCaptureSequenceCompleted() trails all capture results. Upon its return, 3088 // we make sure we've received all results/errors. 3089 listener.getCaptureSequenceLastFrameNumber( 3090 seqId, WAIT_FOR_RESULT_TIMEOUT_MS * ZOOM_STEPS); 3091 // Check timestamp monotonically increase for the whole sequence 3092 long prevTimestamp = 0; 3093 while (listener.hasMoreResults()) { 3094 TotalCaptureResult result = listener.getTotalCaptureResult( 3095 WAIT_FOR_RESULT_TIMEOUT_MS); 3096 long timestamp = getValueNotNull(result, CaptureResult.SENSOR_TIMESTAMP); 3097 mCollector.expectGreater("Sensor timestamp must monotonically increase, " 3098 + "but changed from " + prevTimestamp + " to " + timestamp, 3099 prevTimestamp, timestamp); 3100 prevTimestamp = timestamp; 3101 } 3102 } 3103 3104 private void digitalZoomPreviewCombinationTestByCamera() throws Exception { 3105 final double ASPECT_RATIO_THRESHOLD = 0.001; 3106 List<Double> aspectRatiosTested = new ArrayList<Double>(); 3107 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3108 aspectRatiosTested.add((double)(maxPreviewSize.getWidth()) / maxPreviewSize.getHeight()); 3109 3110 for (Size size : mOrderedPreviewSizes) { 3111 // Max preview size was already tested in testDigitalZoom test. skip it. 3112 if (size.equals(maxPreviewSize)) { 3113 continue; 3114 } 3115 3116 // Only test the largest size for each aspect ratio. 3117 double aspectRatio = (double)(size.getWidth()) / size.getHeight(); 3118 if (isAspectRatioContained(aspectRatiosTested, aspectRatio, ASPECT_RATIO_THRESHOLD)) { 3119 continue; 3120 } 3121 3122 if (VERBOSE) { 3123 Log.v(TAG, "Test preview size " + size.toString() + " digital zoom"); 3124 } 3125 3126 aspectRatiosTested.add(aspectRatio); 3127 digitalZoomTestByCamera(size, /*repeating*/false); 3128 } 3129 } 3130 3131 private static boolean isAspectRatioContained(List<Double> aspectRatioList, 3132 double aspectRatio, double delta) { 3133 for (Double ratio : aspectRatioList) { 3134 if (Math.abs(ratio - aspectRatio) < delta) { 3135 return true; 3136 } 3137 } 3138 3139 return false; 3140 } 3141 3142 private void sceneModeTestByCamera() throws Exception { 3143 int[] sceneModes = mStaticInfo.getAvailableSceneModesChecked(); 3144 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3145 CaptureRequest.Builder requestBuilder = 3146 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3147 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3148 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE); 3149 startPreview(requestBuilder, maxPreviewSize, listener); 3150 3151 for(int mode : sceneModes) { 3152 requestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, mode); 3153 listener = new SimpleCaptureCallback(); 3154 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 3155 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3156 3157 verifyCaptureResultForKey(CaptureResult.CONTROL_SCENE_MODE, 3158 mode, listener, NUM_FRAMES_VERIFIED); 3159 // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED 3160 verifyCaptureResultForKey(CaptureResult.CONTROL_MODE, 3161 CaptureRequest.CONTROL_MODE_USE_SCENE_MODE, listener, NUM_FRAMES_VERIFIED); 3162 } 3163 } 3164 3165 private void effectModeTestByCamera() throws Exception { 3166 int[] effectModes = mStaticInfo.getAvailableEffectModesChecked(); 3167 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3168 CaptureRequest.Builder requestBuilder = 3169 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3170 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 3171 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3172 startPreview(requestBuilder, maxPreviewSize, listener); 3173 3174 for(int mode : effectModes) { 3175 requestBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, mode); 3176 listener = new SimpleCaptureCallback(); 3177 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 3178 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3179 3180 verifyCaptureResultForKey(CaptureResult.CONTROL_EFFECT_MODE, 3181 mode, listener, NUM_FRAMES_VERIFIED); 3182 // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED 3183 verifyCaptureResultForKey(CaptureResult.CONTROL_MODE, 3184 CaptureRequest.CONTROL_MODE_AUTO, listener, NUM_FRAMES_VERIFIED); 3185 } 3186 } 3187 3188 private void extendedSceneModeTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 3189 Capability[] extendedSceneModeCaps = mStaticInfo.getAvailableExtendedSceneModeCapsChecked(); 3190 if (extendedSceneModeCaps.length == 0) { 3191 return; 3192 } 3193 3194 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3195 CaptureRequest.Builder requestBuilder = 3196 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3197 3198 for (Capability cap : extendedSceneModeCaps) { 3199 int mode = cap.getMode(); 3200 requestBuilder.set(CaptureRequest.CONTROL_EXTENDED_SCENE_MODE, mode); 3201 3202 // Test that DISABLED and BOKEH_CONTINUOUS mode doesn't slow down the frame rate 3203 if (mode == CaptureRequest.CONTROL_EXTENDED_SCENE_MODE_DISABLED || 3204 mode == CaptureRequest.CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS) { 3205 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 3206 } 3207 3208 Range<Float> zoomRange = cap.getZoomRatioRange(); 3209 float[] zoomRatios = new float[]{zoomRange.getLower(), zoomRange.getUpper()}; 3210 for (float ratio : zoomRatios) { 3211 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3212 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, ratio); 3213 startPreview(requestBuilder, maxPreviewSize, listener); 3214 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3215 3216 verifyCaptureResultForKey(CaptureResult.CONTROL_EXTENDED_SCENE_MODE, 3217 mode, listener, NUM_FRAMES_VERIFIED); 3218 float zoomRatioDelta = ZOOM_RATIO_ERROR_PERCENT_DELTA * ratio; 3219 verifyCaptureResultForKey(CaptureResult.CONTROL_ZOOM_RATIO, 3220 ratio, listener, NUM_FRAMES_VERIFIED, zoomRatioDelta); 3221 } 3222 } 3223 } 3224 3225 private void autoframingTestByCamera() throws Exception { 3226 // Verify autoframing state, zoom ratio and video stabilizations controls for autoframing 3227 // modes ON and OFF 3228 int[] autoframingModes = {CameraMetadata.CONTROL_AUTOFRAMING_OFF, 3229 CameraMetadata.CONTROL_AUTOFRAMING_ON}; 3230 final int zoomSteps = 5; 3231 final float zoomErrorMargin = 0.05f; 3232 Size maxPreviewSize = mOrderedPreviewSizes.get(0); // Max preview size. 3233 CaptureRequest.Builder requestBuilder = 3234 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3235 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3236 startPreview(requestBuilder, maxPreviewSize, listener); 3237 3238 for (int mode : autoframingModes) { 3239 float expectedZoomRatio = 0.0f; 3240 final Range<Float> zoomRatioRange = mStaticInfo.getZoomRatioRangeChecked(); 3241 for (int i = 0; i < zoomSteps; i++) { 3242 float testZoomRatio = zoomRatioRange.getLower() + (zoomRatioRange.getUpper() 3243 - zoomRatioRange.getLower()) * i / zoomSteps; 3244 // Zoom ratio 1.0f is a special case. The ZoomRatioMapper in framework maintains the 3245 // 1.0f ratio in the CaptureResult 3246 if (testZoomRatio == 1.0f) { 3247 continue; 3248 } 3249 requestBuilder.set(CaptureRequest.CONTROL_AUTOFRAMING, mode); 3250 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, testZoomRatio); 3251 listener = new SimpleCaptureCallback(); 3252 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 3253 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3254 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3255 Float resultZoomRatio = getValueNotNull(result, CaptureResult.CONTROL_ZOOM_RATIO); 3256 int autoframingState = getValueNotNull(result, 3257 CaptureResult.CONTROL_AUTOFRAMING_STATE); 3258 int videoStabilizationMode = getValueNotNull(result, 3259 CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE); 3260 3261 if (mode == CameraMetadata.CONTROL_AUTOFRAMING_ON) { 3262 if (expectedZoomRatio == 0.0f) { 3263 expectedZoomRatio = resultZoomRatio; 3264 } 3265 assertTrue("Autoframing state should be FRAMING or CONVERGED when AUTOFRAMING" 3266 + "is ON", 3267 autoframingState == CameraMetadata.CONTROL_AUTOFRAMING_STATE_FRAMING 3268 || autoframingState 3269 == CameraMetadata.CONTROL_AUTOFRAMING_STATE_CONVERGED); 3270 assertTrue("Video Stablization should be OFF when AUTOFRAMING is ON", 3271 videoStabilizationMode 3272 == CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF); 3273 } else { 3274 expectedZoomRatio = testZoomRatio; 3275 assertTrue("Autoframing state should be INACTIVE when AUTOFRAMING is OFF", 3276 autoframingState == CameraMetadata.CONTROL_AUTOFRAMING_STATE_INACTIVE); 3277 } 3278 3279 verifyCaptureResultForKey(CaptureResult.CONTROL_AUTOFRAMING, mode, listener, 3280 NUM_FRAMES_VERIFIED); 3281 3282 mCollector.expectTrue(String.format( 3283 "Zoom Ratio in Capture Request does not match the expected zoom" 3284 + "ratio in Capture Result (expected = %f, actual = %f)", 3285 expectedZoomRatio, resultZoomRatio), 3286 Math.abs(expectedZoomRatio - resultZoomRatio) / expectedZoomRatio 3287 <= zoomErrorMargin); 3288 } 3289 } 3290 } 3291 3292 private void settingsOverrideTestByCamera() throws Exception { 3293 // Verify that settings override is OFF by default 3294 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3295 CaptureRequest.Builder requestBuilder = 3296 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3297 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3298 startPreview(requestBuilder, maxPreviewSize, listener); 3299 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3300 verifyCaptureResultForKey(CaptureResult.CONTROL_SETTINGS_OVERRIDE, 3301 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_OFF, listener, NUM_FRAMES_VERIFIED); 3302 3303 // Turn settings override to ZOOM, and make sure it's reflected in result 3304 requestBuilder.set(CaptureRequest.CONTROL_SETTINGS_OVERRIDE, 3305 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM); 3306 SimpleCaptureCallback listenerZoom = new SimpleCaptureCallback(); 3307 mSession.setRepeatingRequest(requestBuilder.build(), listenerZoom, mHandler); 3308 waitForSettingsApplied(listenerZoom, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3309 verifyCaptureResultForKey(CaptureResult.CONTROL_SETTINGS_OVERRIDE, 3310 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM, listenerZoom, NUM_FRAMES_VERIFIED); 3311 3312 // Verify that settings override result is ON if turned on from the beginning 3313 listenerZoom = new SimpleCaptureCallback(); 3314 stopPreviewAndDrain(); 3315 startPreview(requestBuilder, maxPreviewSize, listenerZoom); 3316 waitForSettingsApplied(listenerZoom, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3317 // Wait additional 2 frames to allow non-overridden 3318 // results during startup. 3319 final int ZOOM_SOME_FRAMES = 2; 3320 waitForNumResults(listenerZoom, ZOOM_SOME_FRAMES); 3321 verifyCaptureResultForKey(CaptureResult.CONTROL_SETTINGS_OVERRIDE, 3322 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM, listenerZoom, NUM_FRAMES_VERIFIED); 3323 } 3324 3325 //---------------------------------------------------------------- 3326 //---------Below are common functions for all tests.-------------- 3327 //---------------------------------------------------------------- 3328 3329 /** 3330 * Enable exposure manual control and change exposure and sensitivity and 3331 * clamp the value into the supported range. 3332 */ 3333 private void changeExposure(CaptureRequest.Builder requestBuilder, 3334 long expTime, int sensitivity) { 3335 // Check if the max analog sensitivity is available and no larger than max sensitivity. The 3336 // max analog sensitivity is not actually used here. This is only an extra correctness 3337 // check. 3338 mStaticInfo.getMaxAnalogSensitivityChecked(); 3339 3340 expTime = mStaticInfo.getExposureClampToRange(expTime); 3341 sensitivity = mStaticInfo.getSensitivityClampToRange(sensitivity); 3342 3343 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF); 3344 requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTime); 3345 requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity); 3346 } 3347 /** 3348 * Enable exposure manual control and change exposure time and 3349 * clamp the value into the supported range. 3350 * 3351 * <p>The sensitivity is set to default value.</p> 3352 */ 3353 private void changeExposure(CaptureRequest.Builder requestBuilder, long expTime) { 3354 changeExposure(requestBuilder, expTime, DEFAULT_SENSITIVITY); 3355 } 3356 3357 /** 3358 * Get the exposure time array that contains multiple exposure time steps in 3359 * the exposure time range, in nanoseconds. 3360 */ 3361 private long[] getExposureTimeTestValues() { 3362 long[] testValues = new long[DEFAULT_NUM_EXPOSURE_TIME_STEPS + 1]; 3363 long maxExpTime = mStaticInfo.getExposureMaximumOrDefault(DEFAULT_EXP_TIME_NS); 3364 long minExpTime = mStaticInfo.getExposureMinimumOrDefault(DEFAULT_EXP_TIME_NS); 3365 3366 long range = maxExpTime - minExpTime; 3367 double stepSize = range / (double)DEFAULT_NUM_EXPOSURE_TIME_STEPS; 3368 for (int i = 0; i < testValues.length; i++) { 3369 testValues[i] = maxExpTime - (long)(stepSize * i); 3370 testValues[i] = mStaticInfo.getExposureClampToRange(testValues[i]); 3371 } 3372 3373 return testValues; 3374 } 3375 3376 /** 3377 * Generate test focus distances in range of [0, minFocusDistance] in increasing order. 3378 * 3379 * @param repeatMin number of times minValue will be repeated. 3380 * @param repeatMax number of times maxValue will be repeated. 3381 */ 3382 private float[] getFocusDistanceTestValuesInOrder(int repeatMin, int repeatMax) { 3383 int totalCount = NUM_TEST_FOCUS_DISTANCES + 1 + repeatMin + repeatMax; 3384 float[] testValues = new float[totalCount]; 3385 float minValue = 0; 3386 float maxValue = mStaticInfo.getMinimumFocusDistanceChecked(); 3387 3388 float range = maxValue - minValue; 3389 float stepSize = range / NUM_TEST_FOCUS_DISTANCES; 3390 3391 for (int i = 0; i < repeatMin; i++) { 3392 testValues[i] = minValue; 3393 } 3394 for (int i = 0; i <= NUM_TEST_FOCUS_DISTANCES; i++) { 3395 testValues[repeatMin+i] = minValue + stepSize * i; 3396 } 3397 for (int i = 0; i < repeatMax; i++) { 3398 testValues[repeatMin+NUM_TEST_FOCUS_DISTANCES+1+i] = 3399 maxValue; 3400 } 3401 3402 return testValues; 3403 } 3404 3405 /** 3406 * Get the sensitivity array that contains multiple sensitivity steps in the 3407 * sensitivity range. 3408 * <p> 3409 * Sensitivity number of test values is determined by 3410 * {@value #DEFAULT_SENSITIVITY_STEP_SIZE} and sensitivity range, and 3411 * bounded by {@value #DEFAULT_NUM_SENSITIVITY_STEPS}. 3412 * </p> 3413 */ 3414 private int[] getSensitivityTestValues() { 3415 int maxSensitivity = mStaticInfo.getSensitivityMaximumOrDefault( 3416 DEFAULT_SENSITIVITY); 3417 int minSensitivity = mStaticInfo.getSensitivityMinimumOrDefault( 3418 DEFAULT_SENSITIVITY); 3419 3420 int range = maxSensitivity - minSensitivity; 3421 int stepSize = DEFAULT_SENSITIVITY_STEP_SIZE; 3422 int numSteps = range / stepSize; 3423 // Bound the test steps to avoid supper long test. 3424 if (numSteps > DEFAULT_NUM_SENSITIVITY_STEPS) { 3425 numSteps = DEFAULT_NUM_SENSITIVITY_STEPS; 3426 stepSize = range / numSteps; 3427 } 3428 int[] testValues = new int[numSteps + 1]; 3429 for (int i = 0; i < testValues.length; i++) { 3430 testValues[i] = maxSensitivity - stepSize * i; 3431 testValues[i] = mStaticInfo.getSensitivityClampToRange(testValues[i]); 3432 } 3433 3434 return testValues; 3435 } 3436 3437 /** 3438 * Validate the AE manual control exposure time. 3439 * 3440 * <p>Exposure should be close enough, and only round down if they are not equal.</p> 3441 * 3442 * @param request Request exposure time 3443 * @param result Result exposure time 3444 */ 3445 private void validateExposureTime(long request, long result) { 3446 long expTimeDelta = request - result; 3447 long expTimeErrorMargin = (long)(Math.max(EXPOSURE_TIME_ERROR_MARGIN_NS, request 3448 * EXPOSURE_TIME_ERROR_MARGIN_RATE)); 3449 // First, round down not up, second, need close enough. 3450 mCollector.expectTrue("Exposure time is invalid for AE manual control test, request: " 3451 + request + " result: " + result, 3452 expTimeDelta < expTimeErrorMargin && expTimeDelta >= 0); 3453 } 3454 3455 /** 3456 * Validate AE manual control sensitivity. 3457 * 3458 * @param request Request sensitivity 3459 * @param result Result sensitivity 3460 */ 3461 private void validateSensitivity(int request, int result) { 3462 float sensitivityDelta = request - result; 3463 float sensitivityErrorMargin = request * SENSITIVITY_ERROR_MARGIN_RATE; 3464 // First, round down not up, second, need close enough. 3465 mCollector.expectTrue("Sensitivity is invalid for AE manual control test, request: " 3466 + request + " result: " + result, 3467 sensitivityDelta < sensitivityErrorMargin && sensitivityDelta >= 0); 3468 } 3469 3470 /** 3471 * Validate frame duration for a given capture. 3472 * 3473 * <p>Frame duration should be longer than exposure time.</p> 3474 * 3475 * @param result The capture result for a given capture 3476 */ 3477 private void validateFrameDurationForCapture(CaptureResult result) { 3478 long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 3479 long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION); 3480 if (VERBOSE) { 3481 Log.v(TAG, "frame duration: " + frameDuration + " Exposure time: " + expTime); 3482 } 3483 3484 mCollector.expectTrue(String.format("Frame duration (%d) should be longer than exposure" 3485 + " time (%d) for a given capture", frameDuration, expTime), 3486 frameDuration >= expTime); 3487 3488 validatePipelineDepth(result); 3489 } 3490 3491 /** 3492 * Basic verification for the control mode capture result. 3493 * 3494 * @param key The capture result key to be verified against 3495 * @param requestMode The request mode for this result 3496 * @param listener The capture listener to get capture results 3497 * @param numFramesVerified The number of capture results to be verified 3498 * @param threshold The threshold by which the request and result keys can differ 3499 */ 3500 private void verifyCaptureResultForKey(CaptureResult.Key<Float> key, float requestMode, 3501 SimpleCaptureCallback listener, int numFramesVerified, float threshold) { 3502 for (int i = 0; i < numFramesVerified; i++) { 3503 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3504 validatePipelineDepth(result); 3505 float resultMode = getValueNotNull(result, key); 3506 if (VERBOSE) { 3507 Log.v(TAG, "Expect value: " + requestMode + " result value: " 3508 + resultMode + " threshold " + threshold); 3509 } 3510 // Check that the request and result are within the given threshold of each other. 3511 // (expectEquals isn't the most intuitive function name.) 3512 mCollector.expectEquals("Key " + key.getName() + " request: " + requestMode + 3513 " result: " + resultMode + " not within threshold " + threshold + 3514 " of each other", requestMode, resultMode, threshold); 3515 } 3516 } 3517 3518 /** 3519 * Basic verification for the control mode capture result. 3520 * 3521 * @param key The capture result key to be verified against 3522 * @param requestMode The request mode for this result 3523 * @param listener The capture listener to get capture results 3524 * @param numFramesVerified The number of capture results to be verified 3525 */ 3526 private <T> void verifyCaptureResultForKey(CaptureResult.Key<T> key, T requestMode, 3527 SimpleCaptureCallback listener, int numFramesVerified) { 3528 for (int i = 0; i < numFramesVerified; i++) { 3529 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3530 validatePipelineDepth(result); 3531 T resultMode = getValueNotNull(result, key); 3532 if (VERBOSE) { 3533 Log.v(TAG, "Expect value: " + requestMode.toString() + " result value: " 3534 + resultMode.toString()); 3535 } 3536 mCollector.expectEquals("Key " + key.getName() + " result should match request", 3537 requestMode, resultMode); 3538 } 3539 } 3540 3541 /** 3542 * Basic verification that the value of a capture result key should be one of the expected 3543 * values. 3544 * 3545 * @param key The capture result key to be verified against 3546 * @param expectedModes The list of any possible expected modes for this result 3547 * @param listener The capture listener to get capture results 3548 * @param numFramesVerified The number of capture results to be verified 3549 */ 3550 private <T> void verifyAnyCaptureResultForKey(CaptureResult.Key<T> key, T[] expectedModes, 3551 SimpleCaptureCallback listener, int numFramesVerified) { 3552 for (int i = 0; i < numFramesVerified; i++) { 3553 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3554 validatePipelineDepth(result); 3555 T resultMode = getValueNotNull(result, key); 3556 if (VERBOSE) { 3557 Log.v(TAG, "Expect values: " + Arrays.toString(expectedModes) + " result value: " 3558 + resultMode.toString()); 3559 } 3560 // Capture result should be one of the expected values. 3561 mCollector.expectContains(expectedModes, resultMode); 3562 } 3563 } 3564 3565 /** 3566 * Verify if the fps is slow down for given input request with certain 3567 * controls inside. 3568 * <p> 3569 * This method selects a max preview size for each fps range, and then 3570 * configure the preview stream. Preview is started with the max preview 3571 * size, and then verify if the result frame duration is in the frame 3572 * duration range. 3573 * </p> 3574 * 3575 * @param requestBuilder The request builder that contains post-processing 3576 * controls that could impact the output frame rate, such as 3577 * {@link CaptureRequest.NOISE_REDUCTION_MODE}. The value of 3578 * these controls must be set to some values such that the frame 3579 * rate is not slow down. 3580 * @param numFramesVerified The number of frames to be verified 3581 * @param fpsRanges The fps ranges to be verified 3582 */ 3583 private void verifyFpsNotSlowDown(CaptureRequest.Builder requestBuilder, 3584 int numFramesVerified, List<Range<Integer>> fpsRanges ) throws Exception { 3585 boolean frameDurationAvailable = true; 3586 // Allow a few frames for AE to settle on target FPS range 3587 final int NUM_FRAME_TO_SKIP = 6; 3588 float frameDurationErrorMargin = FRAME_DURATION_ERROR_MARGIN; 3589 if (!mStaticInfo.areKeysAvailable(CaptureResult.SENSOR_FRAME_DURATION)) { 3590 frameDurationAvailable = false; 3591 // Allow a larger error margin (1.5%) for timestamps 3592 frameDurationErrorMargin = 0.015f; 3593 } 3594 if (mStaticInfo.isExternalCamera()) { 3595 // Allow a even larger error margin (15%) for external camera timestamps 3596 frameDurationErrorMargin = 0.15f; 3597 } 3598 3599 boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported(); 3600 Range<Integer> fpsRange; 3601 SimpleCaptureCallback resultListener; 3602 3603 for (int i = 0; i < fpsRanges.size(); i += 1) { 3604 fpsRange = fpsRanges.get(i); 3605 Size previewSz = getMaxPreviewSizeForFpsRange(fpsRange); 3606 // If unable to find a preview size, then log the failure, and skip this run. 3607 if (previewSz == null) { 3608 if (mStaticInfo.isCapabilitySupported( 3609 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 3610 mCollector.addMessage(String.format( 3611 "Unable to find a preview size supporting given fps range %s", 3612 fpsRange)); 3613 } 3614 continue; 3615 } 3616 3617 if (VERBOSE) { 3618 Log.v(TAG, String.format("Test fps range %s for preview size %s", 3619 fpsRange, previewSz.toString())); 3620 } 3621 requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange); 3622 // Turn off auto antibanding to avoid exposure time and frame duration interference 3623 // from antibanding algorithm. 3624 if (antiBandingOffIsSupported) { 3625 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, 3626 CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF); 3627 } else { 3628 // The device doesn't implement the OFF mode, test continues. It need make sure 3629 // that the antibanding algorithm doesn't slow down the fps. 3630 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" + 3631 " not slow down the frame rate regardless of its current antibanding" + 3632 " mode"); 3633 } 3634 3635 resultListener = new SimpleCaptureCallback(); 3636 startPreview(requestBuilder, previewSz, resultListener); 3637 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3638 // Wait several more frames for AE to settle on target FPS range 3639 waitForNumResults(resultListener, NUM_FRAME_TO_SKIP); 3640 3641 long[] frameDurationRange = new long[]{ 3642 (long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())}; 3643 long captureTime = 0, prevCaptureTime = 0; 3644 long frameDurationSum = 0; 3645 for (int j = 0; j < numFramesVerified; j++) { 3646 long frameDuration = frameDurationRange[0]; 3647 CaptureResult result = 3648 resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3649 validatePipelineDepth(result); 3650 if (frameDurationAvailable) { 3651 frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION); 3652 } else { 3653 // if frame duration is not available, check timestamp instead 3654 captureTime = getValueNotNull(result, CaptureResult.SENSOR_TIMESTAMP); 3655 if (j > 0) { 3656 frameDuration = captureTime - prevCaptureTime; 3657 } 3658 prevCaptureTime = captureTime; 3659 } 3660 frameDurationSum += frameDuration; 3661 } 3662 long frameDurationAvg = frameDurationSum / numFramesVerified; 3663 mCollector.expectInRange( 3664 "Frame duration must be in the range of " + 3665 Arrays.toString(frameDurationRange), 3666 frameDurationAvg, 3667 (long) (frameDurationRange[0] * (1 - frameDurationErrorMargin)), 3668 (long) (frameDurationRange[1] * (1 + frameDurationErrorMargin))); 3669 3670 } 3671 3672 stopPreview(); 3673 } 3674 3675 /** 3676 * Validate the pipeline depth result. 3677 * 3678 * @param result The capture result to get pipeline depth data 3679 */ 3680 private void validatePipelineDepth(CaptureResult result) { 3681 final byte MIN_PIPELINE_DEPTH = 1; 3682 byte maxPipelineDepth = mStaticInfo.getPipelineMaxDepthChecked(); 3683 Byte pipelineDepth = getValueNotNull(result, CaptureResult.REQUEST_PIPELINE_DEPTH); 3684 mCollector.expectInRange(String.format("Pipeline depth must be in the range of [%d, %d]", 3685 MIN_PIPELINE_DEPTH, maxPipelineDepth), pipelineDepth, MIN_PIPELINE_DEPTH, 3686 maxPipelineDepth); 3687 } 3688 3689 /** 3690 * Calculate the anti-flickering corrected exposure time. 3691 * <p> 3692 * If the input exposure time is very short (shorter than flickering 3693 * boundary), which indicate the scene is bright and very likely at outdoor 3694 * environment, skip the correction, as it doesn't make much sense by doing so. 3695 * </p> 3696 * <p> 3697 * For long exposure time (larger than the flickering boundary), find the 3698 * exposure time that is closest to the flickering boundary. 3699 * </p> 3700 * 3701 * @param flickeringMode The flickering mode 3702 * @param exposureTime The input exposureTime to be corrected 3703 * @return anti-flickering corrected exposure time 3704 */ 3705 private long getAntiFlickeringExposureTime(int flickeringMode, long exposureTime) { 3706 if (flickeringMode != ANTI_FLICKERING_50HZ && flickeringMode != ANTI_FLICKERING_60HZ) { 3707 throw new IllegalArgumentException("Input anti-flickering mode must be 50 or 60Hz"); 3708 } 3709 long flickeringBoundary = EXPOSURE_TIME_BOUNDARY_50HZ_NS; 3710 if (flickeringMode == ANTI_FLICKERING_60HZ) { 3711 flickeringBoundary = EXPOSURE_TIME_BOUNDARY_60HZ_NS; 3712 } 3713 3714 if (exposureTime <= flickeringBoundary) { 3715 return exposureTime; 3716 } 3717 3718 // Find the closest anti-flickering corrected exposure time 3719 long correctedExpTime = exposureTime + (flickeringBoundary / 2); 3720 correctedExpTime = correctedExpTime - (correctedExpTime % flickeringBoundary); 3721 return correctedExpTime; 3722 } 3723 } 3724