1 /* 2 * Copyright 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.cts; 18 19 import static android.hardware.camera2.CameraCharacteristics.*; 20 import static android.hardware.camera2.cts.CameraTestUtils.*; 21 22 import android.graphics.Point; 23 import android.graphics.PointF; 24 import android.graphics.Rect; 25 import android.graphics.SurfaceTexture; 26 import android.hardware.camera2.CameraCharacteristics; 27 import android.hardware.camera2.CameraDevice; 28 import android.hardware.camera2.CameraMetadata; 29 import android.hardware.camera2.CaptureRequest; 30 import android.hardware.camera2.CaptureResult; 31 import android.hardware.camera2.TotalCaptureResult; 32 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback; 33 import android.hardware.camera2.cts.helpers.StaticMetadata; 34 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase; 35 import android.hardware.camera2.params.BlackLevelPattern; 36 import android.hardware.camera2.params.Capability; 37 import android.hardware.camera2.params.ColorSpaceTransform; 38 import android.hardware.camera2.params.Face; 39 import android.hardware.camera2.params.LensShadingMap; 40 import android.hardware.camera2.params.MeteringRectangle; 41 import android.hardware.camera2.params.RggbChannelVector; 42 import android.hardware.camera2.params.TonemapCurve; 43 import android.hardware.cts.helpers.CameraUtils; 44 import android.media.Image; 45 import android.os.Build; 46 import android.os.Parcel; 47 import android.platform.test.annotations.AppModeFull; 48 import android.platform.test.annotations.RequiresFlagsEnabled; 49 import android.platform.test.flag.junit.CheckFlagsRule; 50 import android.platform.test.flag.junit.DeviceFlagsValueProvider; 51 import android.text.TextUtils; 52 import android.util.ArraySet; 53 import android.util.Log; 54 import android.util.Pair; 55 import android.util.Range; 56 import android.util.Rational; 57 import android.util.Size; 58 import android.view.Surface; 59 60 import com.android.compatibility.common.util.PropertyUtil; 61 import com.android.internal.camera.flags.Flags; 62 63 import org.junit.Rule; 64 import org.junit.Test; 65 import org.junit.runner.RunWith; 66 import org.junit.runners.Parameterized; 67 68 import java.nio.ByteBuffer; 69 import java.util.ArrayList; 70 import java.util.Arrays; 71 import java.util.List; 72 73 /** 74 * <p> 75 * Basic test for camera CaptureRequest key controls. 76 * </p> 77 * <p> 78 * Several test categories are covered: manual sensor control, 3A control, 79 * manual ISP control and other per-frame control and synchronization. 80 * </p> 81 */ 82 83 @RunWith(Parameterized.class) 84 public class CaptureRequestTest extends Camera2SurfaceViewTestCase { 85 private static final String TAG = "CaptureRequestTest"; 86 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 87 private static final int NUM_FRAMES_VERIFIED = 15; 88 private static final int NUM_FACE_DETECTION_FRAMES_VERIFIED = 60; 89 /** 30ms exposure time must be supported by full capability devices. */ 90 private static final long DEFAULT_EXP_TIME_NS = 30000000L; // 30ms 91 private static final int DEFAULT_SENSITIVITY = 100; 92 private static final int RGGB_COLOR_CHANNEL_COUNT = 4; 93 private static final int MAX_SHADING_MAP_SIZE = 64 * 64 * RGGB_COLOR_CHANNEL_COUNT; 94 private static final int MIN_SHADING_MAP_SIZE = 1 * 1 * RGGB_COLOR_CHANNEL_COUNT; 95 private static final long IGNORE_REQUESTED_EXPOSURE_TIME_CHECK = -1L; 96 private static final long EXPOSURE_TIME_BOUNDARY_50HZ_NS = 10000000L; // 10ms 97 private static final long EXPOSURE_TIME_BOUNDARY_60HZ_NS = 8333333L; // 8.3ms, Approximation. 98 private static final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000L; // 100us, Approximation. 99 private static final float EXPOSURE_TIME_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation. 100 private static final float SENSITIVITY_ERROR_MARGIN_RATE = 0.06f; // 6%, Approximation. 101 private static final int DEFAULT_NUM_EXPOSURE_TIME_STEPS = 3; 102 private static final int DEFAULT_NUM_SENSITIVITY_STEPS = 8; 103 private static final int DEFAULT_SENSITIVITY_STEP_SIZE = 100; 104 private static final int NUM_RESULTS_WAIT_TIMEOUT = 100; 105 private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8; 106 private static final int NUM_FRAMES_WAITED_FOR_TORCH = 100; 107 private static final int NUM_PARTIAL_FRAMES_PFC = 2; 108 private static final int NUM_PARTIAL_FRAMES_NPFC = 6; 109 110 private static final int NUM_TEST_FOCUS_DISTANCES = 10; 111 private static final int NUM_FOCUS_DISTANCES_REPEAT = 3; 112 // 5 percent error margin for calibrated device 113 private static final float FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED = 0.05f; 114 // 25 percent error margin for uncalibrated device 115 private static final float FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED = 0.25f; 116 // 10 percent error margin for approximate device 117 private static final float FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE = 0.10f; 118 // 1 percent boundary margin for focus range verify 119 private static final float FOCUS_RANGE_BOUNDARY_MARGIN_PERCENT = 0.01f; 120 private static final int ANTI_FLICKERING_50HZ = 1; 121 private static final int ANTI_FLICKERING_60HZ = 2; 122 // 5 percent error margin for resulting crop regions 123 private static final float CROP_REGION_ERROR_PERCENT_DELTA = 0.05f; 124 private static final float ZOOM_RATIO_ERROR_PERCENT_DELTA = 0.05f; 125 126 // 1 percent error margin for centering the crop region 127 private static final float CROP_REGION_ERROR_PERCENT_CENTERED = 0.01f; 128 private static final float DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN = 0.25f; 129 private static final float DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN = 0.2f; 130 131 // Linear tone mapping curve example. 132 private static final float[] TONEMAP_CURVE_LINEAR = {0, 0, 1.0f, 1.0f}; 133 // Standard sRGB tone mapping, per IEC 61966-2-1:1999, with 16 control points. 134 private static final float[] TONEMAP_CURVE_SRGB = { 135 0.0000f, 0.0000f, 0.0667f, 0.2864f, 0.1333f, 0.4007f, 0.2000f, 0.4845f, 136 0.2667f, 0.5532f, 0.3333f, 0.6125f, 0.4000f, 0.6652f, 0.4667f, 0.7130f, 137 0.5333f, 0.7569f, 0.6000f, 0.7977f, 0.6667f, 0.8360f, 0.7333f, 0.8721f, 138 0.8000f, 0.9063f, 0.8667f, 0.9389f, 0.9333f, 0.9701f, 1.0000f, 1.0000f 139 }; 140 private final Rational ZERO_R = new Rational(0, 1); 141 private final Rational ONE_R = new Rational(1, 1); 142 143 private static final int ZOOM_STEPS = 15; 144 145 private enum TorchSeqState { 146 RAMPING_UP, 147 FIRED, 148 RAMPING_DOWN 149 } 150 151 @Rule 152 public final CheckFlagsRule mCheckFlagsRule = 153 DeviceFlagsValueProvider.createCheckFlagsRule(); 154 155 @Override setUp()156 public void setUp() throws Exception { 157 super.setUp(); 158 } 159 160 @Override tearDown()161 public void tearDown() throws Exception { 162 super.tearDown(); 163 } 164 165 /** 166 * Test CaptureRequest settings parcelling. 167 */ 168 @Test testSettingsBinderParcel()169 public void testSettingsBinderParcel() throws Exception { 170 SurfaceTexture outputTexture = new SurfaceTexture(/* random texture ID */ 5); 171 Surface surface = new Surface(outputTexture); 172 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 173 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 174 try { 175 openDevice(cameraIdsUnderTest[i]); 176 CaptureRequest.Builder requestBuilder = 177 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 178 requestBuilder.addTarget(surface); 179 180 // Check regular/default case 181 CaptureRequest captureRequestOriginal = requestBuilder.build(); 182 Parcel p; 183 p = Parcel.obtain(); 184 captureRequestOriginal.writeToParcel(p, 0); 185 p.setDataPosition(0); 186 CaptureRequest captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 187 assertEquals("Parcelled camera settings should match", 188 captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT), 189 new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW)); 190 p.recycle(); 191 192 // Check capture request with additional physical camera settings 193 String physicalId; 194 if (TextUtils.isDigitsOnly(cameraIdsUnderTest[i])) { 195 physicalId = new String( 196 Integer.toString(Integer.valueOf(cameraIdsUnderTest[i]) + 1)); 197 } else { 198 physicalId = new String(Integer.toString(i + 1)); 199 } 200 201 ArraySet<String> physicalIds = new ArraySet<String> (); 202 physicalIds.add(physicalId); 203 204 requestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW, 205 physicalIds); 206 requestBuilder.addTarget(surface); 207 captureRequestOriginal = requestBuilder.build(); 208 p = Parcel.obtain(); 209 captureRequestOriginal.writeToParcel(p, 0); 210 p.setDataPosition(0); 211 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 212 assertEquals("Parcelled camera settings should match", 213 captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT), 214 new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW)); 215 p.recycle(); 216 217 // Check consistency between parcel write and read by stacking 2 218 // CaptureRequest objects when writing and reading. 219 p = Parcel.obtain(); 220 captureRequestOriginal.writeToParcel(p, 0); 221 captureRequestOriginal.writeToParcel(p, 0); 222 p.setDataPosition(0); 223 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 224 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 225 p.recycle(); 226 227 // Check various invalid cases 228 p = Parcel.obtain(); 229 p.writeInt(-1); 230 p.setDataPosition(0); 231 try { 232 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 233 fail("should get RuntimeException due to invalid number of settings"); 234 } catch (RuntimeException e) { 235 // Expected 236 } 237 p.recycle(); 238 239 p = Parcel.obtain(); 240 p.writeInt(0); 241 p.setDataPosition(0); 242 try { 243 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 244 fail("should get RuntimeException due to invalid number of settings"); 245 } catch (RuntimeException e) { 246 // Expected 247 } 248 p.recycle(); 249 250 p = Parcel.obtain(); 251 p.writeInt(1); 252 p.setDataPosition(0); 253 try { 254 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 255 fail("should get RuntimeException due to absent settings"); 256 } catch (RuntimeException e) { 257 // Expected 258 } 259 p.recycle(); 260 } finally { 261 closeDevice(); 262 } 263 } 264 } 265 266 /** 267 * Test black level lock when exposure value change. 268 * <p> 269 * When {@link CaptureRequest#BLACK_LEVEL_LOCK} is true in a request, the 270 * camera device should lock the black level. When the exposure values are changed, 271 * the camera may require reset black level Since changes to certain capture 272 * parameters (such as exposure time) may require resetting of black level 273 * compensation. However, the black level must remain locked after exposure 274 * value changes (when requests have lock ON). 275 * </p> 276 */ 277 @Test testBlackLevelLock()278 public void testBlackLevelLock() throws Exception { 279 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 280 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 281 try { 282 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isCapabilitySupported( 283 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 284 continue; 285 } 286 287 openDevice(cameraIdsUnderTest[i]); 288 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 289 CaptureRequest.Builder requestBuilder = 290 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 291 292 // Start with default manual exposure time, with black level being locked. 293 requestBuilder.set(CaptureRequest.BLACK_LEVEL_LOCK, true); 294 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY); 295 296 Size previewSz = 297 getMaxPreviewSize(mCamera.getId(), mCameraManager, 298 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 299 300 startPreview(requestBuilder, previewSz, listener); 301 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 302 // No lock OFF state is allowed as the exposure is not changed. 303 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/0); 304 305 // Double the exposure time and gain, with black level still being locked. 306 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS * 2, DEFAULT_SENSITIVITY * 2); 307 listener = new SimpleCaptureCallback(); 308 startPreview(requestBuilder, previewSz, listener); 309 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 310 // Allow at most one lock OFF state as the exposure is changed once. 311 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/1); 312 313 stopPreview(); 314 } finally { 315 closeDevice(); 316 } 317 } 318 } 319 320 /** 321 * Test dynamic black/white levels if they are supported. 322 * 323 * <p> 324 * If the dynamic black and white levels are reported, test below: 325 * 1. the dynamic black and white levels shouldn't deviate from the global value too much 326 * for different sensitivities. 327 * 2. If the RAW_SENSOR and optical black regions are supported, capture RAW images and 328 * calculate the optical black level values. The reported dynamic black level should be 329 * close enough to the optical black level values. 330 * </p> 331 */ 332 @Test testDynamicBlackWhiteLevel()333 public void testDynamicBlackWhiteLevel() throws Exception { 334 for (String id : getCameraIdsUnderTest()) { 335 try { 336 if (!mAllStaticInfo.get(id).isDynamicBlackLevelSupported()) { 337 continue; 338 } 339 openDevice(id); 340 dynamicBlackWhiteLevelTestByCamera(); 341 } finally { 342 closeDevice(); 343 } 344 } 345 } 346 347 /** 348 * Basic lens shading map request test. 349 * <p> 350 * When {@link CaptureRequest#SHADING_MODE} is set to OFF, no lens shading correction will 351 * be applied by the camera device, and an identity lens shading map data 352 * will be provided if {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} is ON. 353 * </p> 354 * <p> 355 * When {@link CaptureRequest#SHADING_MODE} is set to other modes, lens shading correction 356 * will be applied by the camera device. The lens shading map data can be 357 * requested by setting {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} to ON. 358 * </p> 359 */ 360 @Test testLensShadingMap()361 public void testLensShadingMap() throws Exception { 362 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 363 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 364 try { 365 StaticMetadata staticInfo = mAllStaticInfo.get(cameraIdsUnderTest[i]); 366 if (!staticInfo.isManualLensShadingMapSupported()) { 367 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 368 " doesn't support lens shading controls, skipping test"); 369 continue; 370 } 371 372 List<Integer> lensShadingMapModes = Arrays.asList(CameraTestUtils.toObject( 373 staticInfo.getAvailableLensShadingMapModesChecked())); 374 375 if (!lensShadingMapModes.contains(STATISTICS_LENS_SHADING_MAP_MODE_ON)) { 376 continue; 377 } 378 379 openDevice(cameraIdsUnderTest[i]); 380 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 381 CaptureRequest.Builder requestBuilder = 382 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 383 requestBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 384 STATISTICS_LENS_SHADING_MAP_MODE_ON); 385 386 Size previewSz = 387 getMaxPreviewSize(mCamera.getId(), mCameraManager, 388 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 389 List<Integer> lensShadingModes = Arrays.asList(CameraTestUtils.toObject( 390 mStaticInfo.getAvailableLensShadingModesChecked())); 391 392 // Shading map mode OFF, lensShadingMapMode ON, camera device 393 // should output unity maps. 394 if (lensShadingModes.contains(SHADING_MODE_OFF)) { 395 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_OFF); 396 listener = new SimpleCaptureCallback(); 397 startPreview(requestBuilder, previewSz, listener); 398 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 399 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_OFF); 400 } 401 402 // Shading map mode FAST, lensShadingMapMode ON, camera device 403 // should output valid maps. 404 if (lensShadingModes.contains(SHADING_MODE_FAST)) { 405 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_FAST); 406 407 listener = new SimpleCaptureCallback(); 408 startPreview(requestBuilder, previewSz, listener); 409 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 410 // Allow at most one lock OFF state as the exposure is changed once. 411 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_FAST); 412 } 413 414 // Shading map mode HIGH_QUALITY, lensShadingMapMode ON, camera device 415 // should output valid maps. 416 if (lensShadingModes.contains(SHADING_MODE_HIGH_QUALITY)) { 417 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_HIGH_QUALITY); 418 419 listener = new SimpleCaptureCallback(); 420 startPreview(requestBuilder, previewSz, listener); 421 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 422 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_HIGH_QUALITY); 423 } 424 425 stopPreview(); 426 } finally { 427 closeDevice(); 428 } 429 } 430 } 431 432 /** 433 * Test {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE} control. 434 * <p> 435 * Test all available anti-banding modes, check if the exposure time adjustment is 436 * correct. 437 * </p> 438 */ 439 @Test testAntiBandingModes()440 public void testAntiBandingModes() throws Exception { 441 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 442 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 443 try { 444 // Without manual sensor control, exposure time cannot be verified 445 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isCapabilitySupported( 446 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 447 continue; 448 } 449 450 openDevice(cameraIdsUnderTest[i]); 451 int[] modes = mStaticInfo.getAeAvailableAntiBandingModesChecked(); 452 453 Size previewSz = 454 getMaxPreviewSize(mCamera.getId(), mCameraManager, 455 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 456 457 for (int mode : modes) { 458 antiBandingTestByMode(previewSz, mode); 459 } 460 } finally { 461 closeDevice(); 462 } 463 } 464 465 } 466 467 /** 468 * Test AE and AE priority modes with AE lock. 469 * 470 * <p> 471 * For AE lock, when it is locked, exposure parameters shouldn't be changed. 472 * For AE modes, each mode should satisfy the per frame controls defined in 473 * API specifications. 474 * </p> 475 */ 476 @Test(timeout=60*60*1000) // timeout = 60 mins for long running tests testAeModeAndLock()477 public void testAeModeAndLock() throws Exception { 478 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 479 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 480 try { 481 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) { 482 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 483 " does not support color outputs, skipping"); 484 continue; 485 } 486 487 openDevice(cameraIdsUnderTest[i]); 488 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 489 490 // Update preview surface with given size for all sub-tests. 491 updatePreviewSurface(maxPreviewSz); 492 493 // Test aeMode and lock 494 int[] aeModes = mStaticInfo.getAeAvailableModesChecked(); 495 for (int aeMode : aeModes) { 496 // Test ae mode with lock without priority mode enabled 497 aeModeAndLockTestByMode(aeMode, CameraMetadata.CONTROL_AE_PRIORITY_MODE_OFF); 498 499 if (Flags.aePriority()) { 500 int[] aePriorityModes = mStaticInfo.getAeAvailablePriorityModesChecked(); 501 502 // LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY not supported with AE priority mode 503 if (aeMode == 504 CameraMetadata 505 .CONTROL_AE_MODE_ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY || 506 aeMode == CameraMetadata.CONTROL_AE_PRIORITY_MODE_OFF) { 507 continue; 508 } 509 for (int aePriorityMode : aePriorityModes) { 510 // Test ae mode with lock and priority mode enabled 511 aeModeAndLockTestByMode(aeMode, aePriorityMode); 512 } 513 } 514 } 515 516 } finally { 517 closeDevice(); 518 } 519 } 520 } 521 522 /** Test {@link CaptureRequest#FLASH_MODE} control. 523 * <p> 524 * For each {@link CaptureRequest#FLASH_MODE} mode, test the flash control 525 * and {@link CaptureResult#FLASH_STATE} result. 526 * </p> 527 */ 528 @Test testFlashControl()529 public void testFlashControl() throws Exception { 530 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 531 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 532 try { 533 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) { 534 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 535 " does not support color outputs, skipping"); 536 continue; 537 } 538 539 openDevice(cameraIdsUnderTest[i]); 540 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 541 CaptureRequest.Builder requestBuilder = 542 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 543 544 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 545 546 startPreview(requestBuilder, maxPreviewSz, listener); 547 548 // Flash control can only be used when the AE mode is ON or OFF. 549 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_ON); 550 551 // LEGACY won't support AE mode OFF 552 boolean aeOffModeSupported = false; 553 for (int aeMode : mStaticInfo.getAeAvailableModesChecked()) { 554 if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) { 555 aeOffModeSupported = true; 556 } 557 } 558 if (aeOffModeSupported) { 559 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_OFF); 560 } 561 562 stopPreview(); 563 } finally { 564 closeDevice(); 565 } 566 } 567 } 568 569 /** 570 * Test that the flash can be successfully turned off given various initial and final 571 * AE_CONTROL modes for repeating CaptureRequests. 572 */ 573 @Test testFlashTurnOff()574 public void testFlashTurnOff() throws Exception { 575 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 576 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 577 try { 578 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) { 579 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 580 " does not support color outputs, skipping"); 581 continue; 582 } 583 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).hasFlash()) { 584 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 585 " does not support flash, skipping"); 586 continue; 587 } 588 openDevice(cameraIdsUnderTest[i]); 589 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 590 CaptureRequest.Builder requestBuilder = 591 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 592 593 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 594 595 startPreview(requestBuilder, maxPreviewSz, listener); 596 boolean isLegacy = CameraUtils.isLegacyHAL(mCameraManager, cameraIdsUnderTest[i]); 597 flashTurnOffTest(listener, isLegacy, 598 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 599 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH); 600 601 flashTurnOffTest(listener, isLegacy, 602 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 603 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); 604 605 flashTurnOffTest(listener, isLegacy, 606 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 607 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE); 608 609 stopPreview(); 610 } finally { 611 closeDevice(); 612 } 613 } 614 615 } 616 617 /** 618 * Test face detection modes and results. 619 */ 620 @Test testFaceDetection()621 public void testFaceDetection() throws Exception { 622 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 623 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 624 try { 625 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) { 626 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 627 " does not support color outputs, skipping"); 628 continue; 629 } 630 openDevice(cameraIdsUnderTest[i]); 631 faceDetectionTestByCamera(); 632 } finally { 633 closeDevice(); 634 } 635 } 636 } 637 638 /** 639 * Test tone map modes and controls. 640 */ 641 @Test testToneMapControl()642 public void testToneMapControl() throws Exception { 643 for (String id : getCameraIdsUnderTest()) { 644 try { 645 if (!mAllStaticInfo.get(id).isManualToneMapSupported()) { 646 Log.i(TAG, "Camera " + id + 647 " doesn't support tone mapping controls, skipping test"); 648 continue; 649 } 650 openDevice(id); 651 toneMapTestByCamera(); 652 } finally { 653 closeDevice(); 654 } 655 } 656 } 657 658 /** 659 * Test CCT color correction mode and color temperature, color tint controls 660 */ 661 @Test 662 @RequiresFlagsEnabled(Flags.FLAG_COLOR_TEMPERATURE) testCctColorCorrectionControl()663 public void testCctColorCorrectionControl() throws Exception { 664 for (String id : getCameraIdsUnderTest()) { 665 try { 666 if (!mAllStaticInfo.get(id).isCctModeSupported()) { 667 Log.i(TAG, "Camera " + id + 668 " doesn't support CCT color correction mode, skipping test"); 669 continue; 670 } 671 openDevice(id); 672 cctColorCorrectionTestByCamera(); 673 } finally { 674 closeDevice(); 675 } 676 } 677 } 678 679 /** 680 * Test color correction modes and controls. 681 */ 682 @Test testColorCorrectionControl()683 public void testColorCorrectionControl() throws Exception { 684 for (String id : getCameraIdsUnderTest()) { 685 try { 686 if (!mAllStaticInfo.get(id).isColorCorrectionSupported()) { 687 Log.i(TAG, "Camera " + id + 688 " doesn't support color correction controls, skipping test"); 689 continue; 690 } 691 openDevice(id); 692 colorCorrectionTestByCamera(); 693 } finally { 694 closeDevice(); 695 } 696 } 697 } 698 699 /** 700 * Test edge mode control for Fps not exceeding 30. 701 */ 702 @Test testEdgeModeControl()703 public void testEdgeModeControl() throws Exception { 704 for (String id : getCameraIdsUnderTest()) { 705 try { 706 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) { 707 Log.i(TAG, "Camera " + id + 708 " doesn't support EDGE_MODE controls, skipping test"); 709 continue; 710 } 711 712 openDevice(id); 713 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 714 edgeModesTestByCamera(fpsRanges); 715 } finally { 716 closeDevice(); 717 } 718 } 719 } 720 721 /** 722 * Test edge mode control for Fps greater than 30. 723 */ 724 @Test testEdgeModeControlFastFps()725 public void testEdgeModeControlFastFps() throws Exception { 726 for (String id : getCameraIdsUnderTest()) { 727 try { 728 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) { 729 Log.i(TAG, "Camera " + id + 730 " doesn't support EDGE_MODE controls, skipping test"); 731 continue; 732 } 733 734 openDevice(id); 735 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo); 736 edgeModesTestByCamera(fpsRanges); 737 } finally { 738 closeDevice(); 739 } 740 } 741 742 } 743 744 /** 745 * Test focus distance control. 746 */ 747 @Test 748 @AppModeFull(reason = "PropertyUtil methods don't work for instant apps") testFocusDistanceControl()749 public void testFocusDistanceControl() throws Exception { 750 for (String id : getCameraIdsUnderTest()) { 751 try { 752 StaticMetadata staticInfo = mAllStaticInfo.get(id); 753 if (!staticInfo.hasFocuser()) { 754 Log.i(TAG, "Camera " + id + " has no focuser, skipping test"); 755 continue; 756 } 757 758 if (!staticInfo.isCapabilitySupported( 759 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 760 Log.i(TAG, "Camera " + id + 761 " does not support MANUAL_SENSOR, skipping test"); 762 continue; 763 } 764 765 openDevice(id); 766 focusDistanceTestByCamera(); 767 } finally { 768 closeDevice(); 769 } 770 } 771 } 772 773 /** 774 * Test noise reduction mode for fps ranges not exceeding 30 775 */ 776 @Test testNoiseReductionModeControl()777 public void testNoiseReductionModeControl() throws Exception { 778 for (String id : getCameraIdsUnderTest()) { 779 try { 780 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) { 781 Log.i(TAG, "Camera " + id + 782 " doesn't support noise reduction mode, skipping test"); 783 continue; 784 } 785 786 openDevice(id); 787 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 788 noiseReductionModeTestByCamera(fpsRanges); 789 } finally { 790 closeDevice(); 791 } 792 } 793 } 794 795 /** 796 * Test noise reduction mode for fps ranges greater than 30 797 */ 798 @Test testNoiseReductionModeControlFastFps()799 public void testNoiseReductionModeControlFastFps() throws Exception { 800 for (String id : getCameraIdsUnderTest()) { 801 try { 802 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) { 803 Log.i(TAG, "Camera " + id + 804 " doesn't support noise reduction mode, skipping test"); 805 continue; 806 } 807 808 openDevice(id); 809 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo); 810 noiseReductionModeTestByCamera(fpsRanges); 811 } finally { 812 closeDevice(); 813 } 814 } 815 } 816 817 /** 818 * Test AWB lock control. 819 * 820 * <p>The color correction gain and transform shouldn't be changed when AWB is locked.</p> 821 */ 822 @Test testAwbModeAndLock()823 public void testAwbModeAndLock() throws Exception { 824 for (String id : getCameraIdsUnderTest()) { 825 try { 826 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 827 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 828 continue; 829 } 830 openDevice(id); 831 awbModeAndLockTestByCamera(); 832 } finally { 833 closeDevice(); 834 } 835 } 836 } 837 838 /** 839 * Test different AF modes. 840 */ 841 @Test testAfModes()842 public void testAfModes() throws Exception { 843 for (String id : getCameraIdsUnderTest()) { 844 try { 845 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 846 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 847 continue; 848 } 849 openDevice(id); 850 afModeTestByCamera(); 851 } finally { 852 closeDevice(); 853 } 854 } 855 } 856 857 /** 858 * Test video and optical stabilizations. 859 */ 860 @Test testCameraStabilizations()861 public void testCameraStabilizations() throws Exception { 862 for (String id : getCameraIdsUnderTest()) { 863 try { 864 StaticMetadata staticInfo = mAllStaticInfo.get(id); 865 List<Key<?>> keys = staticInfo.getCharacteristics().getKeys(); 866 if (!(keys.contains( 867 CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) || 868 keys.contains( 869 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION))) { 870 Log.i(TAG, "Camera " + id + " doesn't support any stabilization modes"); 871 continue; 872 } 873 if (!staticInfo.isColorOutputSupported()) { 874 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 875 continue; 876 } 877 openDevice(id); 878 stabilizationTestByCamera(); 879 } finally { 880 closeDevice(); 881 } 882 } 883 } 884 885 /** 886 * Test digitalZoom (center wise and non-center wise), validate the returned crop regions. 887 * The max preview size is used for each camera. 888 */ 889 @Test testDigitalZoom()890 public void testDigitalZoom() throws Exception { 891 for (String id : getCameraIdsUnderTest()) { 892 try { 893 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 894 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 895 continue; 896 } 897 openDevice(id); 898 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 899 digitalZoomTestByCamera(maxPreviewSize, /*repeating*/false); 900 digitalZoomTestByCamera(maxPreviewSize, /*repeating*/true); 901 } finally { 902 closeDevice(); 903 } 904 } 905 } 906 907 /** 908 * Test zoom using CONTROL_ZOOM_RATIO, validate the returned crop regions and zoom ratio. 909 * The max preview size is used for each camera. 910 */ 911 @Test testZoomRatio()912 public void testZoomRatio() throws Exception { 913 for (String id : getCameraIdsUnderTest()) { 914 try { 915 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 916 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 917 continue; 918 } 919 openDevice(id); 920 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 921 zoomRatioTestByCamera(maxPreviewSize, /*useZoomRatioMethod*/false); 922 } finally { 923 closeDevice(); 924 } 925 } 926 } 927 928 /** 929 * Test zoom using CONTROL_ZOOM_RATIO with CONTROL_ZOOM_METHOD set explicitly to ZOOM_RATIO, 930 * validate the returned crop regions and zoom ratio. 931 * 932 * The max preview size is used for each camera. 933 */ 934 @Test 935 @RequiresFlagsEnabled(Flags.FLAG_ZOOM_METHOD) testZoomRatioWithMethod()936 public void testZoomRatioWithMethod() throws Exception { 937 for (String id : getCameraIdsUnderTest()) { 938 try { 939 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 940 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 941 continue; 942 } 943 openDevice(id); 944 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 945 zoomRatioTestByCamera(maxPreviewSize, /*useZoomRatioMethod*/true); 946 } finally { 947 closeDevice(); 948 } 949 } 950 } 951 952 /** 953 * Test that zoom doesn't incur non-monotonic timestamp sequence 954 * 955 * Camera API requires that camera timestamps monotonically increase. 956 */ 957 @Test 958 @AppModeFull(reason = "PropertyUtil methods don't work for instant apps") testZoomTimestampIncrease()959 public void testZoomTimestampIncrease() throws Exception { 960 if (PropertyUtil.getFirstApiLevel() <= Build.VERSION_CODES.UPSIDE_DOWN_CAKE) { 961 // Only run test for first API level V or higher 962 return; 963 } 964 965 for (String id : getCameraIdsUnderTest()) { 966 try { 967 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 968 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 969 continue; 970 } 971 openDevice(id); 972 zoomTimestampIncreaseTestByCamera(); 973 } finally { 974 closeDevice(); 975 } 976 } 977 } 978 979 /** 980 * Test digital zoom and all preview size combinations. 981 * TODO: this and above test should all be moved to preview test class. 982 */ 983 @Test testDigitalZoomPreviewCombinations()984 public void testDigitalZoomPreviewCombinations() throws Exception { 985 for (String id : getCameraIdsUnderTest()) { 986 try { 987 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 988 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 989 continue; 990 } 991 openDevice(id); 992 digitalZoomPreviewCombinationTestByCamera(); 993 } finally { 994 closeDevice(); 995 } 996 } 997 } 998 999 /** 1000 * Test scene mode controls. 1001 */ 1002 @Test testSceneModes()1003 public void testSceneModes() throws Exception { 1004 for (String id : getCameraIdsUnderTest()) { 1005 try { 1006 if (mAllStaticInfo.get(id).isSceneModeSupported()) { 1007 openDevice(id); 1008 sceneModeTestByCamera(); 1009 } 1010 } finally { 1011 closeDevice(); 1012 } 1013 } 1014 } 1015 1016 /** 1017 * Test effect mode controls. 1018 */ 1019 @Test testEffectModes()1020 public void testEffectModes() throws Exception { 1021 for (String id : getCameraIdsUnderTest()) { 1022 try { 1023 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 1024 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 1025 continue; 1026 } 1027 openDevice(id); 1028 effectModeTestByCamera(); 1029 } finally { 1030 closeDevice(); 1031 } 1032 } 1033 } 1034 1035 /** 1036 * Test extended scene mode controls. 1037 */ 1038 @Test testExtendedSceneModes()1039 public void testExtendedSceneModes() throws Exception { 1040 for (String id : getCameraIdsUnderTest()) { 1041 try { 1042 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 1043 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 1044 continue; 1045 } 1046 openDevice(id); 1047 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 1048 extendedSceneModeTestByCamera(fpsRanges); 1049 } finally { 1050 closeDevice(); 1051 } 1052 } 1053 } 1054 1055 /** 1056 * Test basic auto-framing. 1057 */ 1058 @Test testAutoframing()1059 public void testAutoframing() throws Exception { 1060 for (String id : getCameraIdsUnderTest()) { 1061 try { 1062 if (!mAllStaticInfo.get(id).isAutoframingSupported()) { 1063 Log.i(TAG, "Camera " + id + " does not support auto-framing, skipping"); 1064 continue; 1065 } 1066 openDevice(id); 1067 autoframingTestByCamera(); 1068 } finally { 1069 closeDevice(); 1070 } 1071 } 1072 } 1073 1074 /** 1075 * Test manual flash strength level control. 1076 */ 1077 @Test testManualFlashStrengthLevelControl()1078 public void testManualFlashStrengthLevelControl() throws Exception { 1079 for (String id : getCameraIdsUnderTest()) { 1080 try { 1081 if (!mAllStaticInfo.get(id).isManualFlashStrengthControlSupported()) { 1082 Log.i(TAG, "Camera " + id + " does not support manual flash " 1083 + "strength control, skipping"); 1084 continue; 1085 } 1086 openDevice(id); 1087 manualFlashStrengthControlTestByCamera(); 1088 } finally { 1089 closeDevice(); 1090 } 1091 } 1092 } 1093 1094 /** 1095 * Test AE mode ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY. 1096 */ 1097 @Test 1098 @RequiresFlagsEnabled(Flags.FLAG_CAMERA_AE_MODE_LOW_LIGHT_BOOST) testAeModeOnLowLightBoostBrightnessPriority()1099 public void testAeModeOnLowLightBoostBrightnessPriority() throws Exception { 1100 for (String id : getCameraIdsUnderTest()) { 1101 try { 1102 StaticMetadata staticInfo = mAllStaticInfo.get(id); 1103 if (!staticInfo.isAeModeLowLightBoostSupported()) { 1104 Log.i(TAG, "Camera " + id + " does not have AE mode " 1105 + "ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY, skipping"); 1106 continue; 1107 } 1108 openDevice(id); 1109 testAeModeOnLowLightBoostBrightnessPriorityTestByCamera(); 1110 } finally { 1111 closeDevice(); 1112 } 1113 } 1114 } 1115 1116 /** 1117 * Test AE priority modes 1118 */ 1119 @Test 1120 @RequiresFlagsEnabled(Flags.FLAG_AE_PRIORITY) testAePriorityModes()1121 public void testAePriorityModes() throws Exception { 1122 for (String id : getCameraIdsUnderTest()) { 1123 try { 1124 StaticMetadata staticInfo = mAllStaticInfo.get(id); 1125 int[] aePriorityModes = staticInfo.getAeAvailablePriorityModesChecked(); 1126 1127 openDevice(id); 1128 for (int aePriorityMode : aePriorityModes) { 1129 if (aePriorityMode == CameraMetadata.CONTROL_AE_PRIORITY_MODE_OFF) { 1130 continue; 1131 } 1132 testAePriorityModesByCamera(aePriorityMode); 1133 } 1134 } finally { 1135 closeDevice(); 1136 } 1137 } 1138 } 1139 1140 /** 1141 * Test settings override controls. 1142 */ 1143 @Test testSettingsOverrides()1144 public void testSettingsOverrides() throws Exception { 1145 for (String id : getCameraIdsUnderTest()) { 1146 try { 1147 StaticMetadata staticInfo = mAllStaticInfo.get(id); 1148 if (!staticInfo.isColorOutputSupported()) { 1149 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 1150 continue; 1151 } 1152 if (!staticInfo.isZoomSettingsOverrideSupported()) { 1153 Log.i(TAG, "Camera " + id + " does not support zoom overrides, skipping"); 1154 continue; 1155 } 1156 openDevice(id); 1157 settingsOverrideTestByCamera(); 1158 } finally { 1159 closeDevice(); 1160 } 1161 } 1162 } 1163 1164 // TODO: add 3A state machine test. 1165 1166 /** 1167 * Per camera dynamic black and white level test. 1168 */ dynamicBlackWhiteLevelTestByCamera()1169 private void dynamicBlackWhiteLevelTestByCamera() throws Exception { 1170 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1171 SimpleImageReaderListener imageListener = null; 1172 CaptureRequest.Builder previewBuilder = 1173 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1174 CaptureRequest.Builder rawBuilder = null; 1175 Size previewSize = 1176 getMaxPreviewSize(mCamera.getId(), mCameraManager, 1177 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 1178 Size rawSize = null; 1179 boolean canCaptureBlackRaw = 1180 mStaticInfo.isCapabilitySupported( 1181 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) && 1182 mStaticInfo.isOpticalBlackRegionSupported(); 1183 if (canCaptureBlackRaw) { 1184 // Capture Raw16, then calculate the optical black, and use it to check with the dynamic 1185 // black level. 1186 rawBuilder = 1187 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); 1188 rawSize = mStaticInfo.getRawDimensChecked(); 1189 imageListener = new SimpleImageReaderListener(); 1190 prepareRawCaptureAndStartPreview(previewBuilder, rawBuilder, previewSize, rawSize, 1191 resultListener, imageListener); 1192 } else { 1193 startPreview(previewBuilder, previewSize, resultListener); 1194 } 1195 1196 // Capture a sequence of frames with different sensitivities and validate the black/white 1197 // level values 1198 int[] sensitivities = getSensitivityTestValuesSorted(); 1199 float[][] dynamicBlackLevels = new float[sensitivities.length][]; 1200 int[] dynamicWhiteLevels = new int[sensitivities.length]; 1201 float[][] opticalBlackLevels = new float[sensitivities.length][]; 1202 for (int i = 0; i < sensitivities.length; i++) { 1203 CaptureResult result = null; 1204 if (canCaptureBlackRaw) { 1205 changeExposure(rawBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]); 1206 CaptureRequest rawRequest = rawBuilder.build(); 1207 mSession.capture(rawRequest, resultListener, mHandler); 1208 result = resultListener.getCaptureResultForRequest(rawRequest, 1209 NUM_RESULTS_WAIT_TIMEOUT); 1210 Image rawImage = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS); 1211 1212 // Get max (area-wise) optical black region 1213 Rect[] opticalBlackRegions = mStaticInfo.getCharacteristics().get( 1214 CameraCharacteristics.SENSOR_OPTICAL_BLACK_REGIONS); 1215 Rect maxRegion = opticalBlackRegions[0]; 1216 for (Rect region : opticalBlackRegions) { 1217 if (region.width() * region.height() > maxRegion.width() * maxRegion.height()) { 1218 maxRegion = region; 1219 } 1220 } 1221 1222 // Get average black pixel values in the region (region is multiple of 2x2) 1223 Image.Plane rawPlane = rawImage.getPlanes()[0]; 1224 ByteBuffer rawBuffer = rawPlane.getBuffer(); 1225 float[] avgBlackLevels = {0, 0, 0, 0}; 1226 final int rowSize = rawPlane.getRowStride(); 1227 final int bytePerPixel = rawPlane.getPixelStride(); 1228 if (VERBOSE) { 1229 Log.v(TAG, "maxRegion: " + maxRegion + ", Row stride: " + 1230 rawPlane.getRowStride()); 1231 } 1232 for (int row = maxRegion.top; row < maxRegion.bottom; row += 2) { 1233 for (int col = maxRegion.left; col < maxRegion.right; col += 2) { 1234 int startOffset = row * rowSize + col * bytePerPixel; 1235 avgBlackLevels[0] += rawBuffer.getShort(startOffset); 1236 avgBlackLevels[1] += rawBuffer.getShort(startOffset + bytePerPixel); 1237 startOffset += rowSize; 1238 avgBlackLevels[2] += rawBuffer.getShort(startOffset); 1239 avgBlackLevels[3] += rawBuffer.getShort(startOffset + bytePerPixel); 1240 } 1241 } 1242 int numBlackBlocks = maxRegion.width() * maxRegion.height() / (2 * 2); 1243 for (int m = 0; m < avgBlackLevels.length; m++) { 1244 avgBlackLevels[m] /= numBlackBlocks; 1245 } 1246 opticalBlackLevels[i] = avgBlackLevels; 1247 1248 if (VERBOSE) { 1249 Log.v(TAG, String.format("Optical black level results for sensitivity (%d): %s", 1250 sensitivities[i], Arrays.toString(avgBlackLevels))); 1251 } 1252 1253 rawImage.close(); 1254 } else { 1255 changeExposure(previewBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]); 1256 CaptureRequest previewRequest = previewBuilder.build(); 1257 mSession.capture(previewRequest, resultListener, mHandler); 1258 result = resultListener.getCaptureResultForRequest(previewRequest, 1259 NUM_RESULTS_WAIT_TIMEOUT); 1260 } 1261 1262 dynamicBlackLevels[i] = getValueNotNull(result, 1263 CaptureResult.SENSOR_DYNAMIC_BLACK_LEVEL); 1264 dynamicWhiteLevels[i] = getValueNotNull(result, 1265 CaptureResult.SENSOR_DYNAMIC_WHITE_LEVEL); 1266 } 1267 1268 if (VERBOSE) { 1269 Log.v(TAG, "Different sensitivities tested: " + Arrays.toString(sensitivities)); 1270 Log.v(TAG, "Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels)); 1271 Log.v(TAG, "Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels)); 1272 if (canCaptureBlackRaw) { 1273 Log.v(TAG, "Optical black level results " + 1274 Arrays.deepToString(opticalBlackLevels)); 1275 } 1276 } 1277 1278 // check the dynamic black level against global black level. 1279 // Implicit guarantee: if the dynamic black level is supported, fixed black level must be 1280 // supported as well (tested in ExtendedCameraCharacteristicsTest#testOpticalBlackRegions). 1281 BlackLevelPattern blackPattern = mStaticInfo.getCharacteristics().get( 1282 CameraCharacteristics.SENSOR_BLACK_LEVEL_PATTERN); 1283 int[] fixedBlackLevels = new int[4]; 1284 int fixedWhiteLevel = mStaticInfo.getCharacteristics().get( 1285 CameraCharacteristics.SENSOR_INFO_WHITE_LEVEL); 1286 blackPattern.copyTo(fixedBlackLevels, 0); 1287 float maxBlackDeviation = 0; 1288 int maxWhiteDeviation = 0; 1289 for (int i = 0; i < dynamicBlackLevels.length; i++) { 1290 for (int j = 0; j < dynamicBlackLevels[i].length; j++) { 1291 if (maxBlackDeviation < Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j])) { 1292 maxBlackDeviation = Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j]); 1293 } 1294 } 1295 if (maxWhiteDeviation < Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel)) { 1296 maxWhiteDeviation = Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel); 1297 } 1298 } 1299 mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs fixed black level" 1300 + " exceed threshold." 1301 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels), 1302 fixedBlackLevels[0] * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, maxBlackDeviation); 1303 mCollector.expectLessOrEqual("Max deviation of the dynamic white level exceed threshold." 1304 + " Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels), 1305 fixedWhiteLevel * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, 1306 (float)maxWhiteDeviation); 1307 1308 // Validate against optical black levels if it is available 1309 if (canCaptureBlackRaw) { 1310 maxBlackDeviation = 0; 1311 for (int i = 0; i < dynamicBlackLevels.length; i++) { 1312 for (int j = 0; j < dynamicBlackLevels[i].length; j++) { 1313 if (maxBlackDeviation < 1314 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j])) { 1315 maxBlackDeviation = 1316 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j]); 1317 } 1318 } 1319 } 1320 1321 mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs optical black" 1322 + " exceed threshold." 1323 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels) 1324 + " Optical black level results: " + Arrays.deepToString(opticalBlackLevels), 1325 fixedBlackLevels[0] * DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN, 1326 maxBlackDeviation); 1327 } 1328 } 1329 noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges)1330 private void noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 1331 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1332 CaptureRequest.Builder requestBuilder = 1333 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1334 int[] availableModes = mStaticInfo.getAvailableNoiseReductionModesChecked(); 1335 1336 for (int mode : availableModes) { 1337 requestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, mode); 1338 1339 // Test that OFF and FAST mode should not slow down the frame rate. 1340 if (mode == CaptureRequest.NOISE_REDUCTION_MODE_OFF || 1341 mode == CaptureRequest.NOISE_REDUCTION_MODE_FAST) { 1342 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 1343 } 1344 1345 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1346 startPreview(requestBuilder, maxPrevSize, resultListener); 1347 mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler); 1348 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1349 1350 verifyCaptureResultForKey(CaptureResult.NOISE_REDUCTION_MODE, mode, 1351 resultListener, NUM_FRAMES_VERIFIED); 1352 } 1353 1354 stopPreview(); 1355 } 1356 focusDistanceTestByCamera()1357 private void focusDistanceTestByCamera() throws Exception { 1358 CaptureRequest.Builder requestBuilder = 1359 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1360 requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF); 1361 int calibrationStatus = mStaticInfo.getFocusDistanceCalibrationChecked(); 1362 float errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED; 1363 if (calibrationStatus == 1364 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) { 1365 errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED; 1366 } else if (calibrationStatus == 1367 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE) { 1368 errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE; 1369 } 1370 1371 // Test changing focus distance with repeating request 1372 focusDistanceTestRepeating(requestBuilder, errorMargin); 1373 1374 if (calibrationStatus == 1375 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) { 1376 // Test changing focus distance with burst request 1377 focusDistanceTestBurst(requestBuilder, errorMargin); 1378 } 1379 } 1380 verifyFocusRange(CaptureResult result, float focusDistance)1381 private void verifyFocusRange(CaptureResult result, float focusDistance) { 1382 if (PropertyUtil.getVendorApiLevel() < 33) { 1383 // Skip, as this only applies to UDC and above 1384 if (VERBOSE) { 1385 Log.v(TAG, "Skipping FOCUS_RANGE verification due to API level"); 1386 } 1387 return; 1388 } 1389 1390 Pair<Float, Float> focusRange = result.get(CaptureResult.LENS_FOCUS_RANGE); 1391 if (focusRange != null) { 1392 // Prevent differences in floating point precision between manual request and HAL 1393 // result, some margin need to be considered for focusRange.near and far check 1394 float focusRangeNear = focusRange.first * (1.0f + FOCUS_RANGE_BOUNDARY_MARGIN_PERCENT); 1395 float focusRangeFar = focusRange.second * (1.0f - FOCUS_RANGE_BOUNDARY_MARGIN_PERCENT); 1396 1397 mCollector.expectLessOrEqual("Focus distance should be less than or equal to " 1398 + "FOCUS_RANGE.near (with margin)", focusRangeNear, focusDistance); 1399 mCollector.expectGreaterOrEqual("Focus distance should be greater than or equal to " 1400 + "FOCUS_RANGE.far (with margin)", focusRangeFar, focusDistance); 1401 } else if (VERBOSE) { 1402 Log.v(TAG, "FOCUS_RANGE undefined, skipping verification"); 1403 } 1404 } 1405 focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder, float errorMargin)1406 private void focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder, 1407 float errorMargin) throws Exception { 1408 CaptureRequest request; 1409 float[] testDistances = getFocusDistanceTestValuesInOrder(0, 0); 1410 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1411 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1412 startPreview(requestBuilder, maxPrevSize, resultListener); 1413 1414 float[] resultDistances = new float[testDistances.length]; 1415 int[] resultLensStates = new int[testDistances.length]; 1416 1417 // Collect results 1418 for (int i = 0; i < testDistances.length; i++) { 1419 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]); 1420 request = requestBuilder.build(); 1421 resultListener = new SimpleCaptureCallback(); 1422 mSession.setRepeatingRequest(request, resultListener, mHandler); 1423 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1424 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1425 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1426 CaptureResult result = resultListener.getCaptureResultForRequest(request, 1427 NUM_RESULTS_WAIT_TIMEOUT); 1428 1429 resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1430 resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE); 1431 1432 verifyFocusRange(result, resultDistances[i]); 1433 1434 if (VERBOSE) { 1435 Log.v(TAG, "Capture repeating request focus distance: " + testDistances[i] 1436 + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]); 1437 } 1438 } 1439 1440 verifyFocusDistance(testDistances, resultDistances, resultLensStates, 1441 /*ascendingOrder*/true, /*noOvershoot*/false, /*repeatStart*/0, /*repeatEnd*/0, 1442 errorMargin); 1443 1444 if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) { 1445 1446 // Test hyperfocal distance optionally 1447 float hyperFocalDistance = mStaticInfo.getHyperfocalDistanceChecked(); 1448 if (hyperFocalDistance > 0) { 1449 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, hyperFocalDistance); 1450 request = requestBuilder.build(); 1451 resultListener = new SimpleCaptureCallback(); 1452 mSession.setRepeatingRequest(request, resultListener, mHandler); 1453 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1454 1455 // Then wait for the lens.state to be stationary. 1456 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1457 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1458 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1459 Float focusDistance = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1460 mCollector.expectInRange("Focus distance for hyper focal should be close enough to" + 1461 " requested value", focusDistance, 1462 hyperFocalDistance * (1.0f - errorMargin), 1463 hyperFocalDistance * (1.0f + errorMargin)); 1464 } 1465 } 1466 } 1467 focusDistanceTestBurst(CaptureRequest.Builder requestBuilder, float errorMargin)1468 private void focusDistanceTestBurst(CaptureRequest.Builder requestBuilder, 1469 float errorMargin) throws Exception { 1470 1471 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1472 float[] testDistances = getFocusDistanceTestValuesInOrder(NUM_FOCUS_DISTANCES_REPEAT, 1473 NUM_FOCUS_DISTANCES_REPEAT); 1474 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1475 startPreview(requestBuilder, maxPrevSize, resultListener); 1476 1477 float[] resultDistances = new float[testDistances.length]; 1478 int[] resultLensStates = new int[testDistances.length]; 1479 1480 final int maxPipelineDepth = mStaticInfo.getCharacteristics().get( 1481 CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH); 1482 1483 // Move lens to starting position, and wait for the lens.state to be stationary. 1484 CaptureRequest request; 1485 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[0]); 1486 request = requestBuilder.build(); 1487 mSession.setRepeatingRequest(request, resultListener, mHandler); 1488 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1489 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1490 1491 // Submit burst of requests with different focus distances 1492 List<CaptureRequest> burst = new ArrayList<>(); 1493 for (int i = 0; i < testDistances.length; i ++) { 1494 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]); 1495 burst.add(requestBuilder.build()); 1496 } 1497 mSession.captureBurst(burst, resultListener, mHandler); 1498 1499 for (int i = 0; i < testDistances.length; i++) { 1500 CaptureResult result = resultListener.getCaptureResultForRequest( 1501 burst.get(i), maxPipelineDepth+1); 1502 1503 resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1504 resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE); 1505 1506 verifyFocusRange(result, resultDistances[i]); 1507 1508 if (VERBOSE) { 1509 Log.v(TAG, "Capture burst request focus distance: " + testDistances[i] 1510 + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]); 1511 } 1512 } 1513 1514 verifyFocusDistance(testDistances, resultDistances, resultLensStates, 1515 /*ascendingOrder*/true, /*noOvershoot*/true, 1516 /*repeatStart*/NUM_FOCUS_DISTANCES_REPEAT, /*repeatEnd*/NUM_FOCUS_DISTANCES_REPEAT, 1517 errorMargin); 1518 1519 } 1520 1521 /** 1522 * Verify focus distance control. 1523 * 1524 * Assumption: 1525 * - First repeatStart+1 elements of requestedDistances share the same value 1526 * - Last repeatEnd+1 elements of requestedDistances share the same value 1527 * - All elements in between are monotonically increasing/decreasing depending on ascendingOrder. 1528 * - Focuser is at requestedDistances[0] at the beginning of the test. 1529 * 1530 * @param requestedDistances The requested focus distances 1531 * @param resultDistances The result focus distances 1532 * @param lensStates The result lens states 1533 * @param ascendingOrder The order of the expected focus distance request/output 1534 * @param noOvershoot Assert that focus control doesn't overshoot the requested value 1535 * @param repeatStart The number of times the starting focus distance is repeated 1536 * @param repeatEnd The number of times the ending focus distance is repeated 1537 * @param errorMargin The error margin between request and result 1538 */ verifyFocusDistance(float[] requestedDistances, float[] resultDistances, int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart, int repeatEnd, float errorMargin)1539 private void verifyFocusDistance(float[] requestedDistances, float[] resultDistances, 1540 int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart, 1541 int repeatEnd, float errorMargin) { 1542 1543 float minValue = 0; 1544 float maxValue = mStaticInfo.getMinimumFocusDistanceChecked(); 1545 float hyperfocalDistance = 0; 1546 if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) { 1547 hyperfocalDistance = mStaticInfo.getHyperfocalDistanceChecked(); 1548 } 1549 1550 // Verify lens and focus distance do not change for first repeatStart 1551 // results. 1552 for (int i = 0; i < repeatStart; i ++) { 1553 float marginMin = requestedDistances[i] * (1.0f - errorMargin); 1554 // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal]. 1555 float marginMax = 1556 Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin); 1557 1558 mCollector.expectEquals("Lens moves even though focus_distance didn't change", 1559 lensStates[i], CaptureResult.LENS_STATE_STATIONARY); 1560 if (noOvershoot) { 1561 mCollector.expectInRange("Focus distance in result should be close enough to " + 1562 "requested value", resultDistances[i], marginMin, marginMax); 1563 } 1564 mCollector.expectInRange("Result focus distance is out of range", 1565 resultDistances[i], minValue, maxValue); 1566 } 1567 1568 for (int i = repeatStart; i < resultDistances.length-1; i ++) { 1569 float marginMin = requestedDistances[i] * (1.0f - errorMargin); 1570 // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal]. 1571 float marginMax = 1572 Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin); 1573 if (noOvershoot) { 1574 // Result focus distance shouldn't overshoot the request 1575 boolean condition; 1576 if (ascendingOrder) { 1577 condition = resultDistances[i] <= marginMax; 1578 } else { 1579 condition = resultDistances[i] >= marginMin; 1580 } 1581 mCollector.expectTrue(String.format( 1582 "Lens shouldn't move past request focus distance. result " + 1583 resultDistances[i] + " vs target of " + 1584 (ascendingOrder ? marginMax : marginMin)), condition); 1585 } 1586 1587 // Verify monotonically increased focus distance setting 1588 boolean condition; 1589 float compareDistance = resultDistances[i+1] - resultDistances[i]; 1590 if (i < resultDistances.length-1-repeatEnd) { 1591 condition = (ascendingOrder ? compareDistance > 0 : compareDistance < 0); 1592 } else { 1593 condition = (ascendingOrder ? compareDistance >= 0 : compareDistance <= 0); 1594 } 1595 mCollector.expectTrue(String.format("Adjacent [resultDistances, lens_state] results [" 1596 + resultDistances[i] + "," + lensStates[i] + "], [" + resultDistances[i+1] + "," 1597 + lensStates[i+1] + "] monotonicity is broken"), condition); 1598 } 1599 1600 mCollector.expectTrue(String.format("All values of this array are equal: " + 1601 resultDistances[0] + " " + resultDistances[resultDistances.length-1]), 1602 resultDistances[0] != resultDistances[resultDistances.length-1]); 1603 1604 // Verify lens moved to destination location. 1605 mCollector.expectInRange("Focus distance " + resultDistances[resultDistances.length-1] + 1606 " for minFocusDistance should be closed enough to requested value " + 1607 requestedDistances[requestedDistances.length-1], 1608 resultDistances[resultDistances.length-1], 1609 requestedDistances[requestedDistances.length-1] * (1.0f - errorMargin), 1610 requestedDistances[requestedDistances.length-1] * (1.0f + errorMargin)); 1611 } 1612 1613 /** 1614 * Verify edge mode control results for fpsRanges 1615 */ edgeModesTestByCamera(List<Range<Integer>> fpsRanges)1616 private void edgeModesTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 1617 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1618 int[] edgeModes = mStaticInfo.getAvailableEdgeModesChecked(); 1619 CaptureRequest.Builder requestBuilder = 1620 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1621 1622 for (int mode : edgeModes) { 1623 requestBuilder.set(CaptureRequest.EDGE_MODE, mode); 1624 1625 // Test that OFF and FAST mode should not slow down the frame rate. 1626 if (mode == CaptureRequest.EDGE_MODE_OFF || 1627 mode == CaptureRequest.EDGE_MODE_FAST) { 1628 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 1629 } 1630 1631 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1632 startPreview(requestBuilder, maxPrevSize, resultListener); 1633 mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler); 1634 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1635 1636 verifyCaptureResultForKey(CaptureResult.EDGE_MODE, mode, resultListener, 1637 NUM_FRAMES_VERIFIED); 1638 } 1639 1640 stopPreview(); 1641 } 1642 1643 /** 1644 * Test CCT color correction controls. 1645 * 1646 * <p>Test CCT color correction mode and control keys for color temperaure 1647 * and color tint.</p> 1648 */ cctColorCorrectionTestByCamera()1649 private void cctColorCorrectionTestByCamera() throws Exception { 1650 CaptureRequest request; 1651 CaptureResult result; 1652 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 1653 updatePreviewSurface(maxPreviewSz); 1654 CaptureRequest.Builder manualRequestBuilder = createRequestForPreview(); 1655 CaptureRequest.Builder previewRequestBuilder = createRequestForPreview(); 1656 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 1657 1658 startPreview(previewRequestBuilder, maxPreviewSz, listener); 1659 1660 // Default preview result should give valid color correction metadata. 1661 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1662 validateColorCorrectionResult(result, 1663 previewRequestBuilder.get(CaptureRequest.COLOR_CORRECTION_MODE)); 1664 int colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_CCT; 1665 1666 // Check if the color temperature range is advertised and 1667 // supports the minimum required range. 1668 Range<Integer> colorTemperatureRange = 1669 mStaticInfo.getCharacteristics().get(CameraCharacteristics. 1670 COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE); 1671 assertNotNull("CCT mode is supported but color temperature range is null", 1672 colorTemperatureRange); 1673 assertTrue("Color temperature range should advertise at least [2856, 6500]", 1674 colorTemperatureRange.getLower() <= 2856 1675 && colorTemperatureRange.getUpper() >= 6500); 1676 assertTrue("Color temperature range should advertise between [1000, 40000]", 1677 colorTemperatureRange.getLower() >= 1000 1678 && colorTemperatureRange.getUpper() <= 40000); 1679 1680 List<Integer> availableControlModes = Arrays.asList( 1681 CameraTestUtils.toObject(mStaticInfo.getAvailableControlModesChecked())); 1682 List<Integer> availableAwbModes = Arrays.asList( 1683 CameraTestUtils.toObject(mStaticInfo.getAwbAvailableModesChecked())); 1684 boolean isManualCCSupported = 1685 availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF) || 1686 availableAwbModes.contains(CaptureRequest.CONTROL_AWB_MODE_OFF); 1687 if (isManualCCSupported) { 1688 // Turn off AWB through either CONTROL_AWB_MODE_OFF or CONTROL_MODE_OFF 1689 if (!availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF)) { 1690 // Only manual AWB mode is supported 1691 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1692 CaptureRequest.CONTROL_MODE_AUTO); 1693 manualRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, 1694 CaptureRequest.CONTROL_AWB_MODE_OFF); 1695 } else { 1696 // All 3A manual controls are supported, it doesn't matter what we set for AWB mode 1697 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1698 CaptureRequest.CONTROL_MODE_OFF); 1699 } 1700 1701 int[] TEST_COLOR_TEMPERATURE_VALUES = {2500, 4500, 6500}; 1702 int[] TEST_COLOR_TINT_VALUES = {-25, 0, 25}; 1703 1704 for (int i = 0; i < TEST_COLOR_TEMPERATURE_VALUES.length; i++) { 1705 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, 1706 colorCorrectionMode); 1707 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_COLOR_TEMPERATURE, 1708 TEST_COLOR_TEMPERATURE_VALUES[i]); 1709 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_COLOR_TINT, 1710 TEST_COLOR_TINT_VALUES[i]); 1711 request = manualRequestBuilder.build(); 1712 mSession.capture(request, listener, mHandler); 1713 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1714 validateColorCorrectionResult(result, colorCorrectionMode); 1715 int colorTemperatureResult = 1716 result.get(CaptureResult.COLOR_CORRECTION_COLOR_TEMPERATURE); 1717 int colorTintResult = result.get(CaptureResult.COLOR_CORRECTION_COLOR_TINT); 1718 mCollector.expectEquals("Color temperature result/request mismatch", 1719 TEST_COLOR_TEMPERATURE_VALUES[i], colorTemperatureResult); 1720 // The actual color tint applied may be clamped so the result 1721 // may differ from the request, so we just check if it is null 1722 mCollector.expectNotNull("Color tint result null", colorTintResult); 1723 1724 if (!availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF)) { 1725 mCollector.expectEquals("Control mode result/request mismatch", 1726 CaptureResult.CONTROL_MODE_AUTO, 1727 result.get(CaptureResult.CONTROL_MODE)); 1728 mCollector.expectEquals("AWB mode result/request mismatch", 1729 CaptureResult.CONTROL_AWB_MODE_OFF, 1730 result.get(CaptureResult.CONTROL_AWB_MODE)); 1731 } else { 1732 mCollector.expectEquals("Control mode result/request mismatch", 1733 CaptureResult.CONTROL_MODE_OFF, result.get(CaptureResult.CONTROL_MODE)); 1734 } 1735 } 1736 } 1737 } 1738 1739 /** 1740 * Test color correction controls. 1741 * 1742 * <p>Test different color correction modes. For TRANSFORM_MATRIX, only test 1743 * the unit gain and identity transform.</p> 1744 */ colorCorrectionTestByCamera()1745 private void colorCorrectionTestByCamera() throws Exception { 1746 CaptureRequest request; 1747 CaptureResult result; 1748 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 1749 updatePreviewSurface(maxPreviewSz); 1750 CaptureRequest.Builder manualRequestBuilder = createRequestForPreview(); 1751 CaptureRequest.Builder previewRequestBuilder = createRequestForPreview(); 1752 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 1753 1754 startPreview(previewRequestBuilder, maxPreviewSz, listener); 1755 1756 // Default preview result should give valid color correction metadata. 1757 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1758 validateColorCorrectionResult(result, 1759 previewRequestBuilder.get(CaptureRequest.COLOR_CORRECTION_MODE)); 1760 int colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX; 1761 // TRANSFORM_MATRIX mode 1762 // Only test unit gain and identity transform 1763 List<Integer> availableControlModes = Arrays.asList( 1764 CameraTestUtils.toObject(mStaticInfo.getAvailableControlModesChecked())); 1765 List<Integer> availableAwbModes = Arrays.asList( 1766 CameraTestUtils.toObject(mStaticInfo.getAwbAvailableModesChecked())); 1767 boolean isManualCCSupported = 1768 availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF) || 1769 availableAwbModes.contains(CaptureRequest.CONTROL_AWB_MODE_OFF); 1770 if (isManualCCSupported) { 1771 if (!availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF)) { 1772 // Only manual AWB mode is supported 1773 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1774 CaptureRequest.CONTROL_MODE_AUTO); 1775 manualRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, 1776 CaptureRequest.CONTROL_AWB_MODE_OFF); 1777 } else { 1778 // All 3A manual controls are supported, it doesn't matter what we set for AWB mode. 1779 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1780 CaptureRequest.CONTROL_MODE_OFF); 1781 } 1782 1783 RggbChannelVector UNIT_GAIN = new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f); 1784 1785 ColorSpaceTransform IDENTITY_TRANSFORM = new ColorSpaceTransform( 1786 new Rational[] { 1787 ONE_R, ZERO_R, ZERO_R, 1788 ZERO_R, ONE_R, ZERO_R, 1789 ZERO_R, ZERO_R, ONE_R 1790 }); 1791 1792 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1793 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS, UNIT_GAIN); 1794 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, IDENTITY_TRANSFORM); 1795 request = manualRequestBuilder.build(); 1796 mSession.capture(request, listener, mHandler); 1797 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1798 RggbChannelVector gains = result.get(CaptureResult.COLOR_CORRECTION_GAINS); 1799 ColorSpaceTransform transform = result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM); 1800 validateColorCorrectionResult(result, colorCorrectionMode); 1801 mCollector.expectEquals("control mode result/request mismatch", 1802 CaptureResult.CONTROL_MODE_OFF, result.get(CaptureResult.CONTROL_MODE)); 1803 mCollector.expectEquals("Color correction gain result/request mismatch", 1804 UNIT_GAIN, gains); 1805 mCollector.expectEquals("Color correction gain result/request mismatch", 1806 IDENTITY_TRANSFORM, transform); 1807 1808 } 1809 1810 // FAST mode 1811 colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_FAST; 1812 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1813 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1814 request = manualRequestBuilder.build(); 1815 mSession.capture(request, listener, mHandler); 1816 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1817 validateColorCorrectionResult(result, colorCorrectionMode); 1818 mCollector.expectEquals("control mode result/request mismatch", 1819 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE)); 1820 1821 // HIGH_QUALITY mode 1822 colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_HIGH_QUALITY; 1823 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1824 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1825 request = manualRequestBuilder.build(); 1826 mSession.capture(request, listener, mHandler); 1827 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1828 validateColorCorrectionResult(result, colorCorrectionMode); 1829 mCollector.expectEquals("control mode result/request mismatch", 1830 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE)); 1831 } 1832 validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode)1833 private void validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode) { 1834 final RggbChannelVector ZERO_GAINS = new RggbChannelVector(0, 0, 0, 0); 1835 final int TRANSFORM_SIZE = 9; 1836 Rational[] zeroTransform = new Rational[TRANSFORM_SIZE]; 1837 Arrays.fill(zeroTransform, ZERO_R); 1838 final ColorSpaceTransform ZERO_TRANSFORM = new ColorSpaceTransform(zeroTransform); 1839 1840 RggbChannelVector resultGain; 1841 if ((resultGain = mCollector.expectKeyValueNotNull(result, 1842 CaptureResult.COLOR_CORRECTION_GAINS)) != null) { 1843 mCollector.expectKeyValueNotEquals(result, 1844 CaptureResult.COLOR_CORRECTION_GAINS, ZERO_GAINS); 1845 } 1846 1847 ColorSpaceTransform resultTransform; 1848 if ((resultTransform = mCollector.expectKeyValueNotNull(result, 1849 CaptureResult.COLOR_CORRECTION_TRANSFORM)) != null) { 1850 mCollector.expectKeyValueNotEquals(result, 1851 CaptureResult.COLOR_CORRECTION_TRANSFORM, ZERO_TRANSFORM); 1852 } 1853 1854 mCollector.expectEquals("color correction mode result/request mismatch", 1855 colorCorrectionMode, result.get(CaptureResult.COLOR_CORRECTION_MODE)); 1856 } 1857 1858 /** 1859 * Test that flash can be turned off successfully with a given initial and final AE_CONTROL 1860 * states. 1861 * 1862 * This function expects that initialAeControl and flashOffAeControl will not be either 1863 * CaptureRequest.CONTROL_AE_MODE_ON or CaptureRequest.CONTROL_AE_MODE_OFF 1864 * 1865 * @param listener The Capture listener that is used to wait for capture result 1866 * @param initialAeControl The initial AE_CONTROL mode to start repeating requests with. 1867 * @param flashOffAeControl The final AE_CONTROL mode which is expected to turn flash off for 1868 * TEMPLATE_PREVIEW repeating requests. 1869 */ flashTurnOffTest(SimpleCaptureCallback listener, boolean isLegacy, int initialAeControl, int flashOffAeControl)1870 private void flashTurnOffTest(SimpleCaptureCallback listener, boolean isLegacy, 1871 int initialAeControl, int flashOffAeControl) throws Exception { 1872 CaptureResult result; 1873 final int NUM_FLASH_REQUESTS_TESTED = 10; 1874 CaptureRequest.Builder requestBuilder = createRequestForPreview(); 1875 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1876 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, initialAeControl); 1877 1878 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 1879 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1880 1881 // Turn on torch using FLASH_MODE_TORCH 1882 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); 1883 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 1884 CaptureRequest torchOnRequest = requestBuilder.build(); 1885 mSession.setRepeatingRequest(torchOnRequest, listener, mHandler); 1886 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_TORCH); 1887 result = listener.getCaptureResultForRequest(torchOnRequest, NUM_RESULTS_WAIT_TIMEOUT); 1888 // Test that the flash actually turned on continuously. 1889 mCollector.expectEquals("Flash state result must be FIRED", CaptureResult.FLASH_STATE_FIRED, 1890 result.get(CaptureResult.FLASH_STATE)); 1891 mSession.stopRepeating(); 1892 // Turn off the torch 1893 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, flashOffAeControl); 1894 // TODO: jchowdhary@, b/130323585, this line can be removed. 1895 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 1896 int numAllowedTransitionStates = NUM_PARTIAL_FRAMES_NPFC; 1897 if (mStaticInfo.isPerFrameControlSupported()) { 1898 numAllowedTransitionStates = NUM_PARTIAL_FRAMES_PFC; 1899 1900 } 1901 // We submit 2 * numAllowedTransitionStates + 1 requests since we have two torch mode 1902 // transitions. The additional request is to check for at least 1 expected (FIRED / READY) 1903 // state. 1904 int numTorchTestSamples = 2 * numAllowedTransitionStates + 1; 1905 CaptureRequest flashOffRequest = requestBuilder.build(); 1906 int flashModeOffRequests = captureRequestsSynchronizedBurst(flashOffRequest, 1907 numTorchTestSamples, listener, mHandler); 1908 // Turn it on again. 1909 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 1910 // We need to have CONTROL_AE_MODE be either CONTROL_AE_MODE_ON or CONTROL_AE_MODE_OFF to 1911 // turn the torch on again. 1912 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); 1913 CaptureRequest flashModeTorchRequest = requestBuilder.build(); 1914 int flashModeTorchRequests = captureRequestsSynchronizedBurst(flashModeTorchRequest, 1915 numTorchTestSamples, listener, mHandler); 1916 1917 CaptureResult[] torchStateResults = 1918 new CaptureResult[flashModeTorchRequests + flashModeOffRequests]; 1919 Arrays.fill(torchStateResults, null); 1920 int i = 0; 1921 for (; i < flashModeOffRequests; i++) { 1922 torchStateResults[i] = 1923 listener.getCaptureResultForRequest(flashOffRequest, NUM_RESULTS_WAIT_TIMEOUT); 1924 mCollector.expectNotEquals("Result for flashModeOff request null", 1925 torchStateResults[i], null); 1926 } 1927 for (int j = i; j < torchStateResults.length; j++) { 1928 torchStateResults[j] = 1929 listener.getCaptureResultForRequest(flashModeTorchRequest, 1930 NUM_RESULTS_WAIT_TIMEOUT); 1931 mCollector.expectNotEquals("Result for flashModeTorch request null", 1932 torchStateResults[j], null); 1933 } 1934 if (isLegacy) { 1935 // For LEGACY devices, flash state is null for all situations except: 1936 // android.control.aeMode == ON_ALWAYS_FLASH, where flash.state will be FIRED 1937 // android.flash.mode == TORCH, where flash.state will be FIRED 1938 testLegacyTorchStates(torchStateResults, 0, flashModeOffRequests - 1, flashOffRequest); 1939 testLegacyTorchStates(torchStateResults, flashModeOffRequests, 1940 torchStateResults.length -1, 1941 flashModeTorchRequest); 1942 } else { 1943 checkTorchStates(torchStateResults, numAllowedTransitionStates, flashModeOffRequests, 1944 flashModeTorchRequests); 1945 } 1946 } 1947 testLegacyTorchStates(CaptureResult []torchStateResults, int beg, int end, CaptureRequest request)1948 private void testLegacyTorchStates(CaptureResult []torchStateResults, int beg, int end, 1949 CaptureRequest request) { 1950 for (int i = beg; i <= end; i++) { 1951 Integer requestControlAeMode = request.get(CaptureRequest.CONTROL_AE_MODE); 1952 Integer requestFlashMode = request.get(CaptureRequest.FLASH_MODE); 1953 Integer resultFlashState = torchStateResults[i].get(CaptureResult.FLASH_STATE); 1954 if (requestControlAeMode == CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH || 1955 requestFlashMode == CaptureRequest.FLASH_MODE_TORCH) { 1956 mCollector.expectEquals("For LEGACY devices, flash state must be FIRED when" + 1957 "CONTROL_AE_MODE == CONTROL_AE_MODE_ON_ALWAYS_FLASH or FLASH_MODE == " + 1958 "TORCH, CONTROL_AE_MODE = " + requestControlAeMode + " FLASH_MODE = " + 1959 requestFlashMode, CaptureResult.FLASH_STATE_FIRED, resultFlashState); 1960 continue; 1961 } 1962 mCollector.expectTrue("For LEGACY devices, flash state must be null when" + 1963 "CONTROL_AE_MODE != CONTROL_AE_MODE_ON_ALWAYS_FLASH or FLASH_MODE != " + 1964 "TORCH, CONTROL_AE_MODE = " + requestControlAeMode + " FLASH_MODE = " + 1965 requestFlashMode, resultFlashState == null); 1966 } 1967 } 1968 // We check that torch states appear in the order expected. We don't necessarily know how many 1969 // times each state might appear, however we make sure that the states do not appear out of 1970 // order. checkTorchTransitionStates(CaptureResult []torchStateResults, int beg, int end, List<Integer> stateOrder, boolean isTurningOff)1971 private void checkTorchTransitionStates(CaptureResult []torchStateResults, int beg, int end, 1972 List<Integer> stateOrder, boolean isTurningOff) { 1973 Integer flashState; 1974 Integer curIndex = 0; 1975 for (int i = beg; i <= end; i++) { 1976 flashState = torchStateResults[i].get(CaptureResult.FLASH_STATE); 1977 int index = stateOrder.indexOf(flashState); 1978 mCollector.expectNotEquals("Invalid state " + flashState + " not in expected list" + 1979 stateOrder, index, -1); 1980 mCollector.expectGreaterOrEqual("state " + flashState + " index " + index + 1981 " is expected to be >= " + curIndex, 1982 curIndex, index); 1983 curIndex = index; 1984 } 1985 } 1986 checkTorchStates(CaptureResult []torchResults, int numAllowedTransitionStates, int numTorchOffSamples, int numTorchOnSamples)1987 private void checkTorchStates(CaptureResult []torchResults, int numAllowedTransitionStates, 1988 int numTorchOffSamples, int numTorchOnSamples) { 1989 // We test for flash states from request: 1990 // Request: O(0) O(1) O(2) O(n)....O(nOFF) T(0) T(1) T(2) ....T(n) .... T(nON) 1991 // Valid Result : P/R P/R P/R R R R...P/R P/R P/F P/F P/F F F 1992 // For the FLASH_STATE_OFF requests, once FLASH_STATE READY has been seen, for the 1993 // transition states while switching the torch off, it must not transition to 1994 // FLASH_STATE_PARTIAL again till the next transition period which turns the torch on. 1995 // P - FLASH_STATE_PARTIAL 1996 // R - FLASH_STATE_READY 1997 // F - FLASH_STATE_FIRED 1998 // O(k) - kth FLASH_MODE_OFF request 1999 // T(k) - kth FLASH_MODE_TORCH request 2000 // nOFF - number of torch off samples 2001 // nON - number of torch on samples 2002 Integer flashState; 2003 // Check on -> off transition states 2004 List<Integer> onToOffStateOrderList = new ArrayList<Integer>(); 2005 onToOffStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL); 2006 onToOffStateOrderList.add(CaptureRequest.FLASH_STATE_READY); 2007 checkTorchTransitionStates(torchResults, 0, numAllowedTransitionStates, 2008 onToOffStateOrderList, true); 2009 // The next frames (before transition) must have its flash state as FLASH_STATE_READY 2010 for (int i = numAllowedTransitionStates + 1; 2011 i < numTorchOffSamples - numAllowedTransitionStates; i++) { 2012 flashState = torchResults[numAllowedTransitionStates].get(CaptureResult.FLASH_STATE); 2013 mCollector.expectEquals("flash state result must be READY", 2014 CaptureResult.FLASH_STATE_READY, flashState); 2015 } 2016 // check off -> on transition states, before the FLASH_MODE_TORCH request was sent 2017 List<Integer> offToOnPreStateOrderList = new ArrayList<Integer>(); 2018 offToOnPreStateOrderList.add(CaptureRequest.FLASH_STATE_READY); 2019 offToOnPreStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL); 2020 checkTorchTransitionStates(torchResults, 2021 numTorchOffSamples - numAllowedTransitionStates, numTorchOffSamples - 1, 2022 offToOnPreStateOrderList, false); 2023 // check off -> on transition states 2024 List<Integer> offToOnPostStateOrderList = new ArrayList<Integer>(); 2025 offToOnPostStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL); 2026 offToOnPostStateOrderList.add(CaptureRequest.FLASH_STATE_FIRED); 2027 checkTorchTransitionStates(torchResults, 2028 numTorchOffSamples, numTorchOffSamples + numAllowedTransitionStates, 2029 offToOnPostStateOrderList, false); 2030 // check on states after off -> on transition 2031 // The next frames must have its flash state as FLASH_STATE_FIRED 2032 for (int i = numTorchOffSamples + numAllowedTransitionStates + 1; 2033 i < torchResults.length - 1; i++) { 2034 flashState = torchResults[i].get(CaptureResult.FLASH_STATE); 2035 mCollector.expectEquals("flash state result must be FIRED for frame " + i, 2036 CaptureRequest.FLASH_STATE_FIRED, flashState); 2037 } 2038 } 2039 2040 /** 2041 * Test flash mode control by AE mode. 2042 * <p> 2043 * Only allow AE mode ON or OFF, because other AE mode could run into conflict with 2044 * flash manual control. This function expects the camera to already have an active 2045 * repeating request and be sending results to the listener. 2046 * </p> 2047 * 2048 * @param listener The Capture listener that is used to wait for capture result 2049 * @param aeMode The AE mode for flash to test with 2050 */ flashTestByAeMode(SimpleCaptureCallback listener, int aeMode)2051 private void flashTestByAeMode(SimpleCaptureCallback listener, int aeMode) throws Exception { 2052 CaptureResult result; 2053 final int NUM_FLASH_REQUESTS_TESTED = 10; 2054 CaptureRequest.Builder requestBuilder = createRequestForPreview(); 2055 2056 if (aeMode == CaptureRequest.CONTROL_AE_MODE_ON) { 2057 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, aeMode); 2058 } else if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) { 2059 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY); 2060 } else { 2061 throw new IllegalArgumentException("This test only works when AE mode is ON or OFF"); 2062 } 2063 2064 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2065 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2066 2067 // For camera that doesn't have flash unit, flash state should always be UNAVAILABLE. 2068 if (mStaticInfo.getFlashInfoChecked() == false) { 2069 for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) { 2070 result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS); 2071 mCollector.expectEquals("No flash unit available, flash state must be UNAVAILABLE" 2072 + "for AE mode " + aeMode, CaptureResult.FLASH_STATE_UNAVAILABLE, 2073 result.get(CaptureResult.FLASH_STATE)); 2074 } 2075 2076 return; 2077 } 2078 2079 // Test flash SINGLE mode control. Wait for flash state to be READY first. 2080 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 2081 waitForResultValue(listener, CaptureResult.FLASH_STATE, CaptureResult.FLASH_STATE_READY, 2082 NUM_RESULTS_WAIT_TIMEOUT); 2083 } // else the settings were already waited on earlier 2084 2085 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE); 2086 CaptureRequest flashSinglerequest = requestBuilder.build(); 2087 2088 int flashModeSingleRequests = captureRequestsSynchronized( 2089 flashSinglerequest, listener, mHandler); 2090 waitForNumResults(listener, flashModeSingleRequests - 1); 2091 result = listener.getCaptureResultForRequest(flashSinglerequest, NUM_RESULTS_WAIT_TIMEOUT); 2092 // Result mode must be SINGLE, state must be FIRED. 2093 mCollector.expectEquals("Flash mode result must be SINGLE", 2094 CaptureResult.FLASH_MODE_SINGLE, result.get(CaptureResult.FLASH_MODE)); 2095 mCollector.expectEquals("Flash state result must be FIRED", 2096 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE)); 2097 2098 // Test flash TORCH mode control. 2099 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 2100 CaptureRequest torchRequest = requestBuilder.build(); 2101 2102 int flashModeTorchRequests = captureRequestsSynchronized(torchRequest, 2103 NUM_FLASH_REQUESTS_TESTED, listener, mHandler); 2104 waitForNumResults(listener, flashModeTorchRequests - NUM_FLASH_REQUESTS_TESTED); 2105 2106 // Verify the results 2107 TorchSeqState state = TorchSeqState.RAMPING_UP; 2108 for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) { 2109 result = listener.getCaptureResultForRequest(torchRequest, 2110 NUM_RESULTS_WAIT_TIMEOUT); 2111 int flashMode = result.get(CaptureResult.FLASH_MODE); 2112 int flashState = result.get(CaptureResult.FLASH_STATE); 2113 // Result mode must be TORCH 2114 mCollector.expectEquals("Flash mode result " + i + " must be TORCH", 2115 CaptureResult.FLASH_MODE_TORCH, result.get(CaptureResult.FLASH_MODE)); 2116 if (state == TorchSeqState.RAMPING_UP && 2117 flashState == CaptureResult.FLASH_STATE_FIRED) { 2118 state = TorchSeqState.FIRED; 2119 } else if (state == TorchSeqState.FIRED && 2120 flashState == CaptureResult.FLASH_STATE_PARTIAL) { 2121 state = TorchSeqState.RAMPING_DOWN; 2122 } 2123 2124 if (i == 0 && mStaticInfo.isPerFrameControlSupported()) { 2125 mCollector.expectTrue( 2126 "Per frame control device must enter FIRED state on first torch request", 2127 state == TorchSeqState.FIRED); 2128 } 2129 2130 if (state == TorchSeqState.FIRED) { 2131 mCollector.expectEquals("Flash state result " + i + " must be FIRED", 2132 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE)); 2133 } else { 2134 mCollector.expectEquals("Flash state result " + i + " must be PARTIAL", 2135 CaptureResult.FLASH_STATE_PARTIAL, result.get(CaptureResult.FLASH_STATE)); 2136 } 2137 } 2138 mCollector.expectTrue("Torch state FIRED never seen", 2139 state == TorchSeqState.FIRED || state == TorchSeqState.RAMPING_DOWN); 2140 2141 // Test flash OFF mode control 2142 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 2143 CaptureRequest flashOffrequest = requestBuilder.build(); 2144 2145 int flashModeOffRequests = captureRequestsSynchronized(flashOffrequest, listener, mHandler); 2146 waitForNumResults(listener, flashModeOffRequests - 1); 2147 result = listener.getCaptureResultForRequest(flashOffrequest, NUM_RESULTS_WAIT_TIMEOUT); 2148 mCollector.expectEquals("Flash mode result must be OFF", CaptureResult.FLASH_MODE_OFF, 2149 result.get(CaptureResult.FLASH_MODE)); 2150 } 2151 verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified, int mode, boolean isAeManual, long requestExpTime)2152 private void verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified, 2153 int mode, boolean isAeManual, long requestExpTime) throws Exception { 2154 // Skip the first a couple of frames as antibanding may not be fully up yet. 2155 final int NUM_FRAMES_SKIPPED = 5; 2156 for (int i = 0; i < NUM_FRAMES_SKIPPED; i++) { 2157 listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2158 } 2159 2160 for (int i = 0; i < numFramesVerified; i++) { 2161 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2162 Long resultExpTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); 2163 assertNotNull("Exposure time shouldn't be null", resultExpTime); 2164 Integer flicker = result.get(CaptureResult.STATISTICS_SCENE_FLICKER); 2165 // Scene flicker result should be always available. 2166 assertNotNull("Scene flicker must not be null", flicker); 2167 assertTrue("Scene flicker is invalid", flicker >= STATISTICS_SCENE_FLICKER_NONE && 2168 flicker <= STATISTICS_SCENE_FLICKER_60HZ); 2169 2170 Integer antiBandMode = result.get(CaptureResult.CONTROL_AE_ANTIBANDING_MODE); 2171 assertNotNull("antiBanding mode shouldn't be null", antiBandMode); 2172 assertTrue("antiBanding Mode invalid, should be == " + mode + ", is: " + antiBandMode, 2173 antiBandMode == mode); 2174 if (isAeManual) { 2175 // First, round down not up, second, need close enough. 2176 validateExposureTime(requestExpTime, resultExpTime); 2177 return; 2178 } 2179 2180 long expectedExpTime = resultExpTime; // Default, no exposure adjustment. 2181 if (mode == CONTROL_AE_ANTIBANDING_MODE_50HZ) { 2182 // result exposure time must be adjusted by 50Hz illuminant source. 2183 expectedExpTime = 2184 getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime); 2185 } else if (mode == CONTROL_AE_ANTIBANDING_MODE_60HZ) { 2186 // result exposure time must be adjusted by 60Hz illuminant source. 2187 expectedExpTime = 2188 getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime); 2189 } else if (mode == CONTROL_AE_ANTIBANDING_MODE_AUTO){ 2190 /** 2191 * Use STATISTICS_SCENE_FLICKER to tell the illuminant source 2192 * and do the exposure adjustment. 2193 */ 2194 expectedExpTime = resultExpTime; 2195 if (flicker == STATISTICS_SCENE_FLICKER_60HZ) { 2196 expectedExpTime = 2197 getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime); 2198 } else if (flicker == STATISTICS_SCENE_FLICKER_50HZ) { 2199 expectedExpTime = 2200 getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime); 2201 } 2202 } 2203 2204 if (Math.abs(resultExpTime - expectedExpTime) > EXPOSURE_TIME_ERROR_MARGIN_NS) { 2205 mCollector.addMessage(String.format("Result exposure time %dns diverges too much" 2206 + " from expected exposure time %dns for mode %d when AE is auto", 2207 resultExpTime, expectedExpTime, mode)); 2208 } 2209 } 2210 } 2211 antiBandingTestByMode(Size size, int mode)2212 private void antiBandingTestByMode(Size size, int mode) 2213 throws Exception { 2214 if(VERBOSE) { 2215 Log.v(TAG, "Anti-banding test for mode " + mode + " for camera " + mCamera.getId()); 2216 } 2217 CaptureRequest.Builder requestBuilder = 2218 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2219 2220 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, mode); 2221 2222 // Test auto AE mode anti-banding behavior 2223 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 2224 startPreview(requestBuilder, size, resultListener); 2225 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2226 verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/false, 2227 IGNORE_REQUESTED_EXPOSURE_TIME_CHECK); 2228 2229 // Test manual AE mode anti-banding behavior 2230 // 65ms, must be supported by full capability devices. 2231 final long TEST_MANUAL_EXP_TIME_NS = 65000000L; 2232 long manualExpTime = mStaticInfo.getExposureClampToRange(TEST_MANUAL_EXP_TIME_NS); 2233 changeExposure(requestBuilder, manualExpTime); 2234 resultListener = new SimpleCaptureCallback(); 2235 startPreview(requestBuilder, size, resultListener); 2236 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2237 verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/true, 2238 manualExpTime); 2239 2240 stopPreview(); 2241 } 2242 2243 /** 2244 * Test the all available AE modes and AE lock. 2245 * <p> 2246 * For manual AE mode, test iterates through different sensitivities and 2247 * exposure times, validate the result exposure time correctness. For 2248 * CONTROL_AE_MODE_ON_ALWAYS_FLASH mode, the AE lock and flash are tested. 2249 * For the rest of the AUTO mode, AE lock is tested. 2250 * </p> 2251 * 2252 * @param mode corresponding to AE_MODE_* 2253 * @param priorityMode corresponding to AE_PRIORITY_MODE_* 2254 */ aeModeAndLockTestByMode(int mode, int priorityMode)2255 private void aeModeAndLockTestByMode(int mode, int priorityMode) 2256 throws Exception { 2257 switch (mode) { 2258 case CONTROL_AE_MODE_OFF: 2259 if (mStaticInfo.isCapabilitySupported( 2260 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 2261 // Test manual exposure control. 2262 aeManualControlTest(); 2263 } else { 2264 Log.w(TAG, 2265 "aeModeAndLockTestByMode - can't test AE mode OFF without " + 2266 "manual sensor control"); 2267 } 2268 break; 2269 case CONTROL_AE_MODE_ON: 2270 case CONTROL_AE_MODE_ON_AUTO_FLASH: 2271 case CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: 2272 case CONTROL_AE_MODE_ON_ALWAYS_FLASH: 2273 case CONTROL_AE_MODE_ON_EXTERNAL_FLASH: 2274 // Test AE lock for above AUTO modes. 2275 aeAutoModeTestLock(mode, priorityMode); 2276 break; 2277 default: 2278 throw new UnsupportedOperationException("Unhandled AE mode " + mode); 2279 } 2280 } 2281 2282 /** 2283 * Test AE auto modes. 2284 * <p> 2285 * Use single request rather than repeating request to test AE lock per frame control. 2286 * </p> 2287 */ aeAutoModeTestLock(int mode, int priorityMode)2288 private void aeAutoModeTestLock(int mode, int priorityMode) throws Exception { 2289 CaptureRequest.Builder requestBuilder = 2290 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2291 if (mStaticInfo.isAeLockSupported()) { 2292 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); 2293 } 2294 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mode); 2295 2296 if (Flags.aePriority()) { 2297 requestBuilder.set(CaptureRequest.CONTROL_AE_PRIORITY_MODE, priorityMode); 2298 } 2299 2300 configurePreviewOutput(requestBuilder); 2301 2302 final int MAX_NUM_CAPTURES_DURING_LOCK = 5; 2303 for (int i = 1; i <= MAX_NUM_CAPTURES_DURING_LOCK; i++) { 2304 autoAeMultipleCapturesThenTestLock(requestBuilder, mode, i, priorityMode); 2305 } 2306 } 2307 2308 /** 2309 * Issue multiple auto AE captures, then lock AE, validate the AE lock vs. 2310 * the first capture result after the AE lock. The right AE lock behavior is: 2311 * When it is locked, it locks to the current exposure value, and all subsequent 2312 * request with lock ON will have the same exposure value locked. 2313 */ autoAeMultipleCapturesThenTestLock( CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock, int priorityMode)2314 private void autoAeMultipleCapturesThenTestLock( 2315 CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock, 2316 int priorityMode) 2317 throws Exception { 2318 if (numCapturesDuringLock < 1) { 2319 throw new IllegalArgumentException("numCapturesBeforeLock must be no less than 1"); 2320 } 2321 if (VERBOSE) { 2322 Log.v(TAG, "Camera " + mCamera.getId() + ": Testing auto AE mode and lock for mode " 2323 + aeMode + " with " + numCapturesDuringLock + " captures before lock"); 2324 } 2325 2326 final int NUM_CAPTURES_BEFORE_LOCK = 2; 2327 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2328 2329 CaptureResult[] resultsDuringLock = new CaptureResult[numCapturesDuringLock]; 2330 boolean canSetAeLock = mStaticInfo.isAeLockSupported(); 2331 2332 // Reset the AE lock to OFF, since we are reusing this builder many times 2333 if (canSetAeLock) { 2334 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); 2335 } 2336 2337 // Just send several captures with auto AE, lock off. 2338 CaptureRequest request = requestBuilder.build(); 2339 for (int i = 0; i < NUM_CAPTURES_BEFORE_LOCK; i++) { 2340 mSession.capture(request, listener, mHandler); 2341 } 2342 waitForNumResults(listener, NUM_CAPTURES_BEFORE_LOCK); 2343 2344 if (!canSetAeLock) { 2345 // Without AE lock, the remaining tests items won't work 2346 return; 2347 } 2348 2349 // Then fire several capture to lock the AE. 2350 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true); 2351 2352 int requestCount = captureRequestsSynchronized( 2353 requestBuilder.build(), numCapturesDuringLock, listener, mHandler); 2354 2355 int[] sensitivities = new int[numCapturesDuringLock]; 2356 long[] expTimes = new long[numCapturesDuringLock]; 2357 Arrays.fill(sensitivities, -1); 2358 Arrays.fill(expTimes, -1L); 2359 2360 // Get the AE lock on result and validate the exposure values. 2361 waitForNumResults(listener, requestCount - numCapturesDuringLock); 2362 for (int i = 0; i < resultsDuringLock.length; i++) { 2363 resultsDuringLock[i] = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2364 } 2365 2366 for (int i = 0; i < numCapturesDuringLock; i++) { 2367 mCollector.expectKeyValueEquals( 2368 resultsDuringLock[i], CaptureResult.CONTROL_AE_LOCK, true); 2369 } 2370 2371 // Can't read manual sensor/exposure settings without manual sensor 2372 if (mStaticInfo.isCapabilitySupported( 2373 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) { 2374 int sensitivityLocked = 2375 getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_SENSITIVITY); 2376 long expTimeLocked = 2377 getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_EXPOSURE_TIME); 2378 for (int i = 1; i < resultsDuringLock.length; i++) { 2379 if (Flags.aePriority()) { 2380 switch (priorityMode) { 2381 case CONTROL_AE_PRIORITY_MODE_OFF: 2382 mCollector.expectKeyValueEquals( 2383 resultsDuringLock[i], 2384 CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked); 2385 mCollector.expectKeyValueEquals( 2386 resultsDuringLock[i], 2387 CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked); 2388 break; 2389 case CONTROL_AE_PRIORITY_MODE_SENSOR_EXPOSURE_TIME_PRIORITY: 2390 mCollector.expectKeyValueEquals( 2391 resultsDuringLock[i], 2392 CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked); 2393 break; 2394 case CONTROL_AE_PRIORITY_MODE_SENSOR_SENSITIVITY_PRIORITY: 2395 mCollector.expectKeyValueEquals( 2396 resultsDuringLock[i], 2397 CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked); 2398 break; 2399 default: 2400 throw new UnsupportedOperationException("Unhandled AE priority mode " 2401 + priorityMode); 2402 } 2403 } else { 2404 mCollector.expectKeyValueEquals( 2405 resultsDuringLock[i], CaptureResult.SENSOR_EXPOSURE_TIME, 2406 expTimeLocked); 2407 mCollector.expectKeyValueEquals( 2408 resultsDuringLock[i], CaptureResult.SENSOR_SENSITIVITY, 2409 sensitivityLocked); 2410 } 2411 } 2412 } 2413 } 2414 2415 /** 2416 * Iterate through exposure times and sensitivities for manual AE control. 2417 * <p> 2418 * Use single request rather than repeating request to test manual exposure 2419 * value change per frame control. 2420 * </p> 2421 */ aeManualControlTest()2422 private void aeManualControlTest() 2423 throws Exception { 2424 CaptureRequest.Builder requestBuilder = 2425 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2426 configurePreviewOutput(requestBuilder); 2427 2428 // Warm up pipeline for more accurate timing 2429 SimpleCaptureCallback warmupListener = new SimpleCaptureCallback(); 2430 mSession.setRepeatingRequest(requestBuilder.build(), warmupListener, mHandler); 2431 warmupListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2432 2433 // Do manual captures 2434 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF); 2435 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2436 2437 long[] expTimesNs = getExposureTimeTestValuesSorted(); 2438 int[] sensitivities = getSensitivityTestValuesSorted(); 2439 2440 assertTrue(expTimesNs.length > 0); 2441 assertTrue(sensitivities.length > 0); 2442 2443 // For multiple exposure times, make smart combinations of exposure and sensitivity to 2444 // reduce test time and still have exhaustive coverage. 2445 List<Pair<Long, Integer>> exposureSensitivityTestValues = 2446 new ArrayList<Pair<Long, Integer>>(); 2447 2448 // Min exposure should be tested with all sensitivity values. 2449 for (int i = 0; i < sensitivities.length; i++) { 2450 exposureSensitivityTestValues.add( 2451 new Pair<Long, Integer>(expTimesNs[0], sensitivities[i])); 2452 } 2453 2454 // All other exposure values should be tested only with min and max sensitivity. 2455 for (int i = 1; i < expTimesNs.length; i++) { 2456 exposureSensitivityTestValues.add( 2457 new Pair<Long, Integer>(expTimesNs[i], sensitivities[0])); 2458 2459 if (sensitivities.length > 1) { 2460 exposureSensitivityTestValues.add( 2461 new Pair<Long, Integer>(expTimesNs[i], 2462 sensitivities[sensitivities.length - 1])); 2463 } 2464 } 2465 2466 // Submit single request at a time, then verify the result. 2467 for (int i = 0; i < exposureSensitivityTestValues.size(); i++) { 2468 long exposure = exposureSensitivityTestValues.get(i).first; 2469 int sensitivity = exposureSensitivityTestValues.get(i).second; 2470 2471 if (VERBOSE) { 2472 Log.v(TAG, "Camera " + mCamera.getId() + ": Testing sensitivity " 2473 + sensitivity + ", exposure time " + exposure + "ns"); 2474 } 2475 2476 changeExposure(requestBuilder, exposure, sensitivity); 2477 mSession.capture(requestBuilder.build(), listener, mHandler); 2478 2479 // make sure timeout is long enough for long exposure time - add a 2x safety margin 2480 // to exposure time 2481 long timeoutMs = WAIT_FOR_RESULT_TIMEOUT_MS + 2 * exposure / 1000000; 2482 CaptureResult result = listener.getCaptureResult(timeoutMs); 2483 long resultExpTimeNs = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 2484 int resultSensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY); 2485 validateExposureTime(exposure, resultExpTimeNs); 2486 validateSensitivity(sensitivity, resultSensitivity); 2487 validateFrameDurationForCapture(result); 2488 } 2489 mSession.stopRepeating(); 2490 2491 // TODO: Add another case to test where we can submit all requests, then wait for 2492 // results, which will hide the pipeline latency. this is not only faster, but also 2493 // test high speed per frame control and synchronization. 2494 } 2495 2496 2497 /** 2498 * Verify black level lock control. 2499 */ verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified, int maxLockOffCnt)2500 private void verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified, 2501 int maxLockOffCnt) throws Exception { 2502 int noLockCnt = 0; 2503 for (int i = 0; i < numFramesVerified; i++) { 2504 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2505 Boolean blackLevelLock = result.get(CaptureResult.BLACK_LEVEL_LOCK); 2506 assertNotNull("Black level lock result shouldn't be null", blackLevelLock); 2507 2508 // Count the lock == false result, which could possibly occur at most once. 2509 if (blackLevelLock == false) { 2510 noLockCnt++; 2511 } 2512 2513 if(VERBOSE) { 2514 Log.v(TAG, "Black level lock result: " + blackLevelLock); 2515 } 2516 } 2517 assertTrue("Black level lock OFF occurs " + noLockCnt + " times, expect at most " 2518 + maxLockOffCnt + " for camera " + mCamera.getId(), noLockCnt <= maxLockOffCnt); 2519 } 2520 2521 /** 2522 * Verify shading map for different shading modes. 2523 */ verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified, int shadingMode)2524 private void verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified, 2525 int shadingMode) throws Exception { 2526 2527 for (int i = 0; i < numFramesVerified; i++) { 2528 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2529 mCollector.expectEquals("Shading mode result doesn't match request", 2530 shadingMode, result.get(CaptureResult.SHADING_MODE)); 2531 LensShadingMap mapObj = result.get( 2532 CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP); 2533 assertNotNull("Map object must not be null", mapObj); 2534 int numElementsInMap = mapObj.getGainFactorCount(); 2535 float[] map = new float[numElementsInMap]; 2536 mapObj.copyGainFactors(map, /*offset*/0); 2537 assertNotNull("Map must not be null", map); 2538 assertFalse(String.format( 2539 "Map size %d should be less than %d", numElementsInMap, MAX_SHADING_MAP_SIZE), 2540 numElementsInMap >= MAX_SHADING_MAP_SIZE); 2541 assertFalse(String.format("Map size %d should be no less than %d", numElementsInMap, 2542 MIN_SHADING_MAP_SIZE), numElementsInMap < MIN_SHADING_MAP_SIZE); 2543 2544 if (shadingMode == CaptureRequest.SHADING_MODE_FAST || 2545 shadingMode == CaptureRequest.SHADING_MODE_HIGH_QUALITY) { 2546 // shading mode is FAST or HIGH_QUALITY, expect to receive a map with all 2547 // elements >= 1.0f 2548 2549 int badValueCnt = 0; 2550 // Detect the bad values of the map data. 2551 for (int j = 0; j < numElementsInMap; j++) { 2552 if (Float.isNaN(map[j]) || map[j] < 1.0f) { 2553 badValueCnt++; 2554 } 2555 } 2556 assertEquals("Number of value in the map is " + badValueCnt + " out of " 2557 + numElementsInMap, /*expected*/0, /*actual*/badValueCnt); 2558 } else if (shadingMode == CaptureRequest.SHADING_MODE_OFF) { 2559 float[] unityMap = new float[numElementsInMap]; 2560 Arrays.fill(unityMap, 1.0f); 2561 // shading mode is OFF, expect to receive a unity map. 2562 assertTrue("Result map " + Arrays.toString(map) + " must be an unity map", 2563 Arrays.equals(unityMap, map)); 2564 } 2565 } 2566 } 2567 2568 /** 2569 * Test face detection for a camera. 2570 */ 2571 private void faceDetectionTestByCamera() throws Exception { 2572 int[] faceDetectModes = mStaticInfo.getAvailableFaceDetectModesChecked(); 2573 2574 SimpleCaptureCallback listener; 2575 CaptureRequest.Builder requestBuilder = 2576 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2577 2578 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 2579 for (int mode : faceDetectModes) { 2580 requestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mode); 2581 if (VERBOSE) { 2582 Log.v(TAG, "Start testing face detection mode " + mode); 2583 } 2584 2585 // Create a new listener for each run to avoid the results from one run spill 2586 // into another run. 2587 listener = new SimpleCaptureCallback(); 2588 startPreview(requestBuilder, maxPreviewSz, listener); 2589 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2590 verifyFaceDetectionResults(listener, NUM_FACE_DETECTION_FRAMES_VERIFIED, mode); 2591 } 2592 2593 stopPreview(); 2594 } 2595 2596 /** 2597 * Verify face detection results for different face detection modes. 2598 * 2599 * @param listener The listener to get capture result 2600 * @param numFramesVerified Number of results to be verified 2601 * @param faceDetectionMode Face detection mode to be verified against 2602 */ 2603 private void verifyFaceDetectionResults(SimpleCaptureCallback listener, int numFramesVerified, 2604 int faceDetectionMode) { 2605 for (int i = 0; i < numFramesVerified; i++) { 2606 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2607 mCollector.expectEquals("Result face detection mode should match the request", 2608 faceDetectionMode, result.get(CaptureResult.STATISTICS_FACE_DETECT_MODE)); 2609 2610 Face[] faces = result.get(CaptureResult.STATISTICS_FACES); 2611 List<Integer> faceIds = new ArrayList<Integer>(faces.length); 2612 List<Integer> faceScores = new ArrayList<Integer>(faces.length); 2613 if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) { 2614 mCollector.expectEquals("Number of detection faces should always 0 for OFF mode", 2615 0, faces.length); 2616 } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) { 2617 for (Face face : faces) { 2618 mCollector.expectNotNull("Face rectangle shouldn't be null", face.getBounds()); 2619 faceScores.add(face.getScore()); 2620 mCollector.expectTrue("Face id is expected to be -1 for SIMPLE mode", 2621 face.getId() == Face.ID_UNSUPPORTED); 2622 } 2623 } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 2624 if (VERBOSE) { 2625 Log.v(TAG, "Number of faces detected: " + faces.length); 2626 } 2627 2628 for (Face face : faces) { 2629 Rect faceBound; 2630 boolean faceRectAvailable = mCollector.expectTrue("Face rectangle " 2631 + "shouldn't be null", face.getBounds() != null); 2632 if (!faceRectAvailable) { 2633 continue; 2634 } 2635 faceBound = face.getBounds(); 2636 2637 faceScores.add(face.getScore()); 2638 faceIds.add(face.getId()); 2639 2640 mCollector.expectTrue("Face id is shouldn't be -1 for FULL mode", 2641 face.getId() != Face.ID_UNSUPPORTED); 2642 boolean leftEyeAvailable = 2643 mCollector.expectTrue("Left eye position shouldn't be null", 2644 face.getLeftEyePosition() != null); 2645 boolean rightEyeAvailable = 2646 mCollector.expectTrue("Right eye position shouldn't be null", 2647 face.getRightEyePosition() != null); 2648 boolean mouthAvailable = 2649 mCollector.expectTrue("Mouth position shouldn't be null", 2650 face.getMouthPosition() != null); 2651 // Eyes/mouth position should be inside of the face rect. 2652 if (leftEyeAvailable) { 2653 Point leftEye = face.getLeftEyePosition(); 2654 mCollector.expectTrue("Left eye " + leftEye + "should be" 2655 + "inside of face rect " + faceBound, 2656 faceBound.contains(leftEye.x, leftEye.y)); 2657 } 2658 if (rightEyeAvailable) { 2659 Point rightEye = face.getRightEyePosition(); 2660 mCollector.expectTrue("Right eye " + rightEye + "should be" 2661 + "inside of face rect " + faceBound, 2662 faceBound.contains(rightEye.x, rightEye.y)); 2663 } 2664 if (mouthAvailable) { 2665 Point mouth = face.getMouthPosition(); 2666 mCollector.expectTrue("Mouth " + mouth + " should be inside of" 2667 + " face rect " + faceBound, 2668 faceBound.contains(mouth.x, mouth.y)); 2669 } 2670 } 2671 } 2672 mCollector.expectValuesInRange("Face scores are invalid", faceScores, 2673 Face.SCORE_MIN, Face.SCORE_MAX); 2674 mCollector.expectValuesUnique("Face ids are invalid", faceIds); 2675 } 2676 } 2677 2678 /** 2679 * Test tone map mode and result by camera 2680 */ 2681 private void toneMapTestByCamera() throws Exception { 2682 if (!mStaticInfo.isManualToneMapSupported()) { 2683 return; 2684 } 2685 2686 CaptureRequest.Builder requestBuilder = 2687 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2688 int[] toneMapModes = mStaticInfo.getAvailableToneMapModesChecked(); 2689 // Test AUTO modes first. Note that FAST/HQ must both present or not present 2690 for (int i = 0; i < toneMapModes.length; i++) { 2691 if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_FAST && i > 0) { 2692 int tmpMode = toneMapModes[0]; 2693 toneMapModes[0] = CaptureRequest.TONEMAP_MODE_FAST; 2694 toneMapModes[i] = tmpMode; 2695 } 2696 if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_HIGH_QUALITY && i > 1) { 2697 int tmpMode = toneMapModes[1]; 2698 toneMapModes[1] = CaptureRequest.TONEMAP_MODE_HIGH_QUALITY; 2699 toneMapModes[i] = tmpMode; 2700 } 2701 } 2702 for (int mode : toneMapModes) { 2703 if (VERBOSE) { 2704 Log.v(TAG, "Testing tonemap mode " + mode); 2705 } 2706 2707 requestBuilder.set(CaptureRequest.TONEMAP_MODE, mode); 2708 switch (mode) { 2709 case CaptureRequest.TONEMAP_MODE_CONTRAST_CURVE: 2710 TonemapCurve toneCurve = new TonemapCurve(TONEMAP_CURVE_LINEAR, 2711 TONEMAP_CURVE_LINEAR, TONEMAP_CURVE_LINEAR); 2712 requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve); 2713 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2714 2715 toneCurve = new TonemapCurve(TONEMAP_CURVE_SRGB, 2716 TONEMAP_CURVE_SRGB, TONEMAP_CURVE_SRGB); 2717 requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve); 2718 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2719 break; 2720 case CaptureRequest.TONEMAP_MODE_GAMMA_VALUE: 2721 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 1.0f); 2722 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2723 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 2.2f); 2724 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2725 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 5.0f); 2726 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2727 break; 2728 case CaptureRequest.TONEMAP_MODE_PRESET_CURVE: 2729 requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE, 2730 CaptureRequest.TONEMAP_PRESET_CURVE_REC709); 2731 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2732 requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE, 2733 CaptureRequest.TONEMAP_PRESET_CURVE_SRGB); 2734 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2735 break; 2736 default: 2737 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2738 break; 2739 } 2740 } 2741 2742 2743 } 2744 2745 /** 2746 * Test tonemap mode with speficied request settings 2747 * 2748 * @param numFramesVerified Number of results to be verified 2749 * @param requestBuilder the request builder of settings to be tested 2750 */ 2751 private void testToneMapMode (int numFramesVerified, 2752 CaptureRequest.Builder requestBuilder) throws Exception { 2753 final int MIN_TONEMAP_CURVE_POINTS = 2; 2754 final Float ZERO = new Float(0); 2755 final Float ONE = new Float(1.0f); 2756 2757 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2758 int tonemapMode = requestBuilder.get(CaptureRequest.TONEMAP_MODE); 2759 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 2760 startPreview(requestBuilder, maxPreviewSz, listener); 2761 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2762 2763 int maxCurvePoints = mStaticInfo.getMaxTonemapCurvePointChecked(); 2764 for (int i = 0; i < numFramesVerified; i++) { 2765 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2766 mCollector.expectEquals("Capture result tonemap mode should match request", tonemapMode, 2767 result.get(CaptureResult.TONEMAP_MODE)); 2768 TonemapCurve tc = getValueNotNull(result, CaptureResult.TONEMAP_CURVE); 2769 int pointCount = tc.getPointCount(TonemapCurve.CHANNEL_RED); 2770 float[] mapRed = new float[pointCount * TonemapCurve.POINT_SIZE]; 2771 pointCount = tc.getPointCount(TonemapCurve.CHANNEL_GREEN); 2772 float[] mapGreen = new float[pointCount * TonemapCurve.POINT_SIZE]; 2773 pointCount = tc.getPointCount(TonemapCurve.CHANNEL_BLUE); 2774 float[] mapBlue = new float[pointCount * TonemapCurve.POINT_SIZE]; 2775 tc.copyColorCurve(TonemapCurve.CHANNEL_RED, mapRed, 0); 2776 tc.copyColorCurve(TonemapCurve.CHANNEL_GREEN, mapGreen, 0); 2777 tc.copyColorCurve(TonemapCurve.CHANNEL_BLUE, mapBlue, 0); 2778 if (tonemapMode == CaptureResult.TONEMAP_MODE_CONTRAST_CURVE) { 2779 /** 2780 * TODO: need figure out a good way to measure the difference 2781 * between request and result, as they may have different array 2782 * size. 2783 */ 2784 } else if (tonemapMode == CaptureResult.TONEMAP_MODE_GAMMA_VALUE) { 2785 mCollector.expectEquals("Capture result gamma value should match request", 2786 requestBuilder.get(CaptureRequest.TONEMAP_GAMMA), 2787 result.get(CaptureResult.TONEMAP_GAMMA)); 2788 } else if (tonemapMode == CaptureResult.TONEMAP_MODE_PRESET_CURVE) { 2789 mCollector.expectEquals("Capture result preset curve should match request", 2790 requestBuilder.get(CaptureRequest.TONEMAP_PRESET_CURVE), 2791 result.get(CaptureResult.TONEMAP_PRESET_CURVE)); 2792 } 2793 2794 // Tonemap curve result availability and basic validity check for all modes. 2795 mCollector.expectValuesInRange("Tonemap curve red values are out of range", 2796 CameraTestUtils.toObject(mapRed), /*min*/ZERO, /*max*/ONE); 2797 mCollector.expectInRange("Tonemap curve red length is out of range", 2798 mapRed.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2799 mCollector.expectValuesInRange("Tonemap curve green values are out of range", 2800 CameraTestUtils.toObject(mapGreen), /*min*/ZERO, /*max*/ONE); 2801 mCollector.expectInRange("Tonemap curve green length is out of range", 2802 mapGreen.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2803 mCollector.expectValuesInRange("Tonemap curve blue values are out of range", 2804 CameraTestUtils.toObject(mapBlue), /*min*/ZERO, /*max*/ONE); 2805 mCollector.expectInRange("Tonemap curve blue length is out of range", 2806 mapBlue.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2807 2808 // Make sure capture result tonemap has identical channels. 2809 if (mStaticInfo.isMonochromeCamera()) { 2810 mCollector.expectEquals("Capture result tonemap of monochrome camera should " + 2811 "have same dimension for all channels", mapRed.length, mapGreen.length); 2812 mCollector.expectEquals("Capture result tonemap of monochrome camera should " + 2813 "have same dimension for all channels", mapRed.length, mapBlue.length); 2814 2815 if (mapRed.length == mapGreen.length && mapRed.length == mapBlue.length) { 2816 boolean isIdentical = true; 2817 for (int j = 0; j < mapRed.length; j++) { 2818 isIdentical = (mapRed[j] == mapGreen[j] && mapRed[j] == mapBlue[j]); 2819 if (!isIdentical) 2820 break; 2821 } 2822 mCollector.expectTrue("Capture result tonemap of monochrome camera should " + 2823 "be identical between all channels", isIdentical); 2824 } 2825 } 2826 } 2827 stopPreview(); 2828 } 2829 2830 /** 2831 * Test awb mode control. 2832 * <p> 2833 * Test each supported AWB mode, verify the AWB mode in capture result 2834 * matches request. When AWB is locked, the color correction gains and 2835 * transform should remain unchanged. 2836 * </p> 2837 */ 2838 private void awbModeAndLockTestByCamera() throws Exception { 2839 int[] awbModes = mStaticInfo.getAwbAvailableModesChecked(); 2840 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2841 boolean canSetAwbLock = mStaticInfo.isAwbLockSupported(); 2842 CaptureRequest.Builder requestBuilder = 2843 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2844 startPreview(requestBuilder, maxPreviewSize, /*listener*/null); 2845 2846 for (int mode : awbModes) { 2847 SimpleCaptureCallback listener; 2848 requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, mode); 2849 listener = new SimpleCaptureCallback(); 2850 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2851 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2852 2853 // Verify AWB mode in capture result. 2854 verifyCaptureResultForKey(CaptureResult.CONTROL_AWB_MODE, mode, listener, 2855 NUM_FRAMES_VERIFIED); 2856 2857 if (mode == CameraMetadata.CONTROL_AWB_MODE_AUTO && canSetAwbLock) { 2858 // Verify color correction transform and gains stay unchanged after a lock. 2859 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true); 2860 listener = new SimpleCaptureCallback(); 2861 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2862 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2863 2864 if (mStaticInfo.areKeysAvailable(CaptureResult.CONTROL_AWB_STATE)) { 2865 waitForResultValue(listener, CaptureResult.CONTROL_AWB_STATE, 2866 CaptureResult.CONTROL_AWB_STATE_LOCKED, NUM_RESULTS_WAIT_TIMEOUT); 2867 } 2868 2869 } 2870 // Don't verify auto mode result if AWB lock is not supported 2871 if (mode != CameraMetadata.CONTROL_AWB_MODE_AUTO || canSetAwbLock) { 2872 verifyAwbCaptureResultUnchanged(listener, NUM_FRAMES_VERIFIED); 2873 } 2874 } 2875 } 2876 2877 private void verifyAwbCaptureResultUnchanged(SimpleCaptureCallback listener, 2878 int numFramesVerified) { 2879 // Skip check if cc gains/transform/mode are not available 2880 if (!mStaticInfo.areKeysAvailable( 2881 CaptureResult.COLOR_CORRECTION_GAINS, 2882 CaptureResult.COLOR_CORRECTION_TRANSFORM, 2883 CaptureResult.COLOR_CORRECTION_MODE)) { 2884 return; 2885 } 2886 2887 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2888 RggbChannelVector lockedGains = 2889 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS); 2890 ColorSpaceTransform lockedTransform = 2891 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM); 2892 2893 for (int i = 0; i < numFramesVerified; i++) { 2894 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2895 // Color correction mode check is skipped here, as it is checked in colorCorrectionTest. 2896 validateColorCorrectionResult(result, result.get(CaptureResult.COLOR_CORRECTION_MODE)); 2897 2898 RggbChannelVector gains = getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS); 2899 ColorSpaceTransform transform = 2900 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM); 2901 mCollector.expectEquals("Color correction gains should remain unchanged after awb lock", 2902 lockedGains, gains); 2903 mCollector.expectEquals("Color correction transform should remain unchanged after" 2904 + " awb lock", lockedTransform, transform); 2905 } 2906 } 2907 2908 /** 2909 * Test AF mode control. 2910 * <p> 2911 * Test all supported AF modes, verify the AF mode in capture result matches 2912 * request. When AF mode is one of the CONTROL_AF_MODE_CONTINUOUS_* mode, 2913 * verify if the AF can converge to PASSIVE_FOCUSED or PASSIVE_UNFOCUSED 2914 * state within certain amount of frames. 2915 * </p> 2916 */ 2917 private void afModeTestByCamera() throws Exception { 2918 int[] afModes = mStaticInfo.getAfAvailableModesChecked(); 2919 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2920 CaptureRequest.Builder requestBuilder = 2921 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2922 startPreview(requestBuilder, maxPreviewSize, /*listener*/null); 2923 2924 for (int mode : afModes) { 2925 SimpleCaptureCallback listener; 2926 requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mode); 2927 listener = new SimpleCaptureCallback(); 2928 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2929 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2930 2931 // Verify AF mode in capture result. 2932 verifyCaptureResultForKey(CaptureResult.CONTROL_AF_MODE, mode, listener, 2933 NUM_FRAMES_VERIFIED); 2934 2935 // Verify AF can finish a scan for CONTROL_AF_MODE_CONTINUOUS_* modes. 2936 // In LEGACY mode, a transition to one of the continuous AF modes does not necessarily 2937 // result in a passive AF call if the camera has already been focused, and the scene has 2938 // not changed enough to trigger an AF pass. Skip this constraint for LEGACY. 2939 if (mStaticInfo.isHardwareLevelAtLeastLimited() && 2940 (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE || 2941 mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) { 2942 List<Integer> afStateList = new ArrayList<Integer>(); 2943 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED); 2944 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED); 2945 waitForAnyResultValue(listener, CaptureResult.CONTROL_AF_STATE, afStateList, 2946 NUM_RESULTS_WAIT_TIMEOUT); 2947 } 2948 } 2949 } 2950 2951 /** 2952 * Test video and optical stabilizations if they are supported by a given camera. 2953 */ 2954 private void stabilizationTestByCamera() throws Exception { 2955 // video stabilization test. 2956 List<Key<?>> keys = mStaticInfo.getCharacteristics().getKeys(); 2957 2958 Integer[] videoStabModes = (keys.contains(CameraCharacteristics. 2959 CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)) ? 2960 CameraTestUtils.toObject(mStaticInfo.getAvailableVideoStabilizationModesChecked()) : 2961 new Integer[0]; 2962 int[] opticalStabModes = (keys.contains( 2963 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION)) ? 2964 mStaticInfo.getAvailableOpticalStabilizationChecked() : new int[0]; 2965 2966 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2967 CaptureRequest.Builder requestBuilder = 2968 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2969 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2970 startPreview(requestBuilder, maxPreviewSize, listener); 2971 2972 for (Integer mode : videoStabModes) { 2973 listener = new SimpleCaptureCallback(); 2974 requestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mode); 2975 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2976 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2977 // Video stabilization could return any modes. 2978 verifyAnyCaptureResultForKey(CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE, 2979 videoStabModes, listener, NUM_FRAMES_VERIFIED); 2980 } 2981 2982 for (int mode : opticalStabModes) { 2983 listener = new SimpleCaptureCallback(); 2984 requestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, mode); 2985 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2986 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2987 verifyCaptureResultForKey(CaptureResult.LENS_OPTICAL_STABILIZATION_MODE, mode, 2988 listener, NUM_FRAMES_VERIFIED); 2989 } 2990 2991 stopPreview(); 2992 } 2993 2994 private void digitalZoomTestByCamera(Size previewSize, boolean repeating) throws Exception { 2995 final PointF[] TEST_ZOOM_CENTERS; 2996 final float maxZoom = mStaticInfo.getAvailableMaxDigitalZoomChecked(); 2997 final float ZOOM_ERROR_MARGIN = 0.01f; 2998 if (Math.abs(maxZoom - 1.0f) < ZOOM_ERROR_MARGIN) { 2999 // It doesn't make much sense to test the zoom if the device effectively supports 3000 // no zoom. 3001 return; 3002 } 3003 3004 final int croppingType = mStaticInfo.getScalerCroppingTypeChecked(); 3005 if (croppingType == CameraCharacteristics.SCALER_CROPPING_TYPE_FREEFORM) { 3006 // Set the four corners in a way that the minimally allowed zoom factor is 2x. 3007 float normalizedLeft = 0.25f; 3008 float normalizedTop = 0.25f; 3009 float normalizedRight = 0.75f; 3010 float normalizedBottom = 0.75f; 3011 // If the max supported zoom is too small, make sure we at least test the max 3012 // Zoom is tested for the four corners. 3013 if (maxZoom < 2.0f) { 3014 normalizedLeft = 0.5f / maxZoom; 3015 normalizedTop = 0.5f / maxZoom; 3016 normalizedRight = 1.0f - normalizedLeft; 3017 normalizedBottom = 1.0f - normalizedTop; 3018 } 3019 TEST_ZOOM_CENTERS = new PointF[] { 3020 new PointF(0.5f, 0.5f), // Center point 3021 new PointF(normalizedLeft, normalizedTop), // top left corner zoom 3022 new PointF(normalizedRight, normalizedTop), // top right corner zoom 3023 new PointF(normalizedLeft, normalizedBottom), // bottom left corner zoom 3024 new PointF(normalizedRight, normalizedBottom), // bottom right corner zoom 3025 }; 3026 3027 if (VERBOSE) { 3028 Log.v(TAG, "Testing zoom with CROPPING_TYPE = FREEFORM"); 3029 } 3030 } else { 3031 // CENTER_ONLY 3032 TEST_ZOOM_CENTERS = new PointF[] { 3033 new PointF(0.5f, 0.5f), // Center point 3034 }; 3035 3036 if (VERBOSE) { 3037 Log.v(TAG, "Testing zoom with CROPPING_TYPE = CENTER_ONLY"); 3038 } 3039 } 3040 3041 final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked(); 3042 final Rect defaultCropRegion = new Rect(0, 0, 3043 activeArraySize.width(), activeArraySize.height()); 3044 Rect[] cropRegions = new Rect[ZOOM_STEPS]; 3045 MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][]; 3046 CaptureRequest.Builder requestBuilder = 3047 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3048 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3049 3050 updatePreviewSurface(previewSize); 3051 configurePreviewOutput(requestBuilder); 3052 3053 CaptureRequest[] requests = new CaptureRequest[ZOOM_STEPS]; 3054 3055 // Set algorithm regions 3056 final int METERING_RECT_RATIO = 10; 3057 final MeteringRectangle[][] defaultMeteringRects = new MeteringRectangle[][] { 3058 { 3059 new MeteringRectangle ( 3060 /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(), 3061 /*meteringWeight*/1), /* full active region */ 3062 }, 3063 { 3064 new MeteringRectangle ( 3065 /*x*/0, /*y*/0, activeArraySize.width()/METERING_RECT_RATIO, 3066 activeArraySize.height()/METERING_RECT_RATIO, 3067 /*meteringWeight*/1), 3068 }, 3069 { 3070 new MeteringRectangle ( 3071 /*x*/(int)(activeArraySize.width() * (0.5f - 0.5f/METERING_RECT_RATIO)), 3072 /*y*/(int)(activeArraySize.height() * (0.5f - 0.5f/METERING_RECT_RATIO)), 3073 activeArraySize.width()/METERING_RECT_RATIO, 3074 activeArraySize.height()/METERING_RECT_RATIO, 3075 /*meteringWeight*/1), 3076 }, 3077 }; 3078 3079 final int CAPTURE_SUBMIT_REPEAT; 3080 final int NUM_RESULTS_TO_SKIP; 3081 { 3082 int maxLatency = mStaticInfo.getSyncMaxLatency(); 3083 if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) { 3084 CAPTURE_SUBMIT_REPEAT = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1; 3085 } else { 3086 CAPTURE_SUBMIT_REPEAT = maxLatency + 1; 3087 } 3088 if (repeating) { 3089 NUM_RESULTS_TO_SKIP = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1; 3090 } else { 3091 NUM_RESULTS_TO_SKIP = CAPTURE_SUBMIT_REPEAT - 1; 3092 } 3093 } 3094 3095 if (VERBOSE) { 3096 Log.v(TAG, "Testing zoom with CAPTURE_SUBMIT_REPEAT = " + CAPTURE_SUBMIT_REPEAT); 3097 } 3098 3099 for (MeteringRectangle[] meteringRect : defaultMeteringRects) { 3100 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 3101 update3aRegion(requestBuilder, algo, meteringRect, mStaticInfo); 3102 } 3103 3104 for (PointF center : TEST_ZOOM_CENTERS) { 3105 Rect previousCrop = null; 3106 3107 for (int i = 0; i < ZOOM_STEPS; i++) { 3108 /* 3109 * Submit capture request 3110 */ 3111 float zoomFactor = (float) (1.0f + (maxZoom - 1.0) * i / ZOOM_STEPS); 3112 cropRegions[i] = getCropRegionForZoom(zoomFactor, center, 3113 maxZoom, defaultCropRegion); 3114 if (VERBOSE) { 3115 Log.v(TAG, "Testing Zoom for factor " + zoomFactor + " and center " + 3116 center + " The cropRegion is " + cropRegions[i] + 3117 " Preview size is " + previewSize + ", repeating is " + repeating); 3118 } 3119 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, cropRegions[i]); 3120 requests[i] = requestBuilder.build(); 3121 if (VERBOSE) { 3122 Log.v(TAG, "submit crop region " + cropRegions[i]); 3123 } 3124 if (repeating) { 3125 mSession.setRepeatingRequest(requests[i], listener, mHandler); 3126 // Drop first few frames 3127 waitForNumResults(listener, NUM_RESULTS_TO_SKIP); 3128 // Interleave a regular capture 3129 mSession.capture(requests[0], listener, mHandler); 3130 } else { 3131 for (int j = 0; j < CAPTURE_SUBMIT_REPEAT; ++j) { 3132 mSession.capture(requests[i], listener, mHandler); 3133 } 3134 } 3135 3136 /* 3137 * Validate capture result 3138 */ 3139 waitForNumResults(listener, NUM_RESULTS_TO_SKIP); // Drop first few frames 3140 TotalCaptureResult result = listener.getTotalCaptureResultForRequest( 3141 requests[i], NUM_RESULTS_WAIT_TIMEOUT); 3142 List<CaptureResult> partialResults = result.getPartialResults(); 3143 3144 Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION); 3145 for (CaptureResult partialResult : partialResults) { 3146 Rect cropRegionInPartial = 3147 partialResult.get(CaptureResult.SCALER_CROP_REGION); 3148 if (cropRegionInPartial != null) { 3149 mCollector.expectEquals("SCALER_CROP_REGION in partial result must " 3150 + "match in final result", cropRegionInPartial, cropRegion); 3151 } 3152 } 3153 3154 if (CameraTestUtils.isStabilizationOff(requests[i])) { 3155 /* 3156 * Validate resulting crop regions 3157 */ 3158 if (previousCrop != null) { 3159 Rect currentCrop = cropRegion; 3160 mCollector.expectTrue(String.format( 3161 "Crop region should shrink or stay the same " 3162 + "(previous = %s, current = %s)", 3163 previousCrop, currentCrop), 3164 previousCrop.equals(currentCrop) 3165 || (previousCrop.width() > currentCrop.width() 3166 && previousCrop.height() > currentCrop.height())); 3167 } 3168 3169 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 3170 mCollector.expectRectsAreSimilar( 3171 "Request and result crop region should be similar", 3172 cropRegions[i], cropRegion, CROP_REGION_ERROR_PERCENT_DELTA); 3173 } 3174 3175 if (croppingType == SCALER_CROPPING_TYPE_CENTER_ONLY) { 3176 mCollector.expectRectCentered( 3177 "Result crop region should be centered inside the active array", 3178 new Size(activeArraySize.width(), activeArraySize.height()), 3179 cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED); 3180 } 3181 3182 /* 3183 * Validate resulting metering regions 3184 */ 3185 3186 // Use the actual reported crop region to calculate the resulting 3187 // metering region 3188 expectRegions[i] = getExpectedOutputRegion( 3189 /*requestRegion*/meteringRect, 3190 /*cropRect*/ cropRegion); 3191 3192 // Verify Output 3A region is intersection of input 3A region and 3193 // crop region 3194 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 3195 validate3aRegion(result, partialResults, algo, expectRegions[i], 3196 false/*scaleByZoomRatio*/, mStaticInfo); 3197 } 3198 } 3199 3200 previousCrop = cropRegion; 3201 } 3202 3203 if (maxZoom > 1.0f) { 3204 mCollector.expectTrue( 3205 String.format("Most zoomed-in crop region should be smaller " + 3206 "than active array w/h" + 3207 "(last crop = %s, active array = %s)", 3208 previousCrop, activeArraySize), 3209 (previousCrop.width() < activeArraySize.width() && 3210 previousCrop.height() < activeArraySize.height())); 3211 } 3212 } 3213 } 3214 } 3215 3216 private void zoomRatioTestByCamera(Size previewSize, boolean useZoomRatioMethod) 3217 throws Exception { 3218 final Range<Float> zoomRatioRange = mStaticInfo.getZoomRatioRangeChecked(); 3219 // The error margin is derive from a VGA size camera zoomed all the way to 10x, in which 3220 // case the cropping error can be as large as 480/46 - 480/48 = 0.435. 3221 final float ZOOM_ERROR_MARGIN = 0.05f; 3222 3223 final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked(); 3224 final Rect defaultCropRegion = 3225 new Rect(0, 0, activeArraySize.width(), activeArraySize.height()); 3226 final Rect zoom2xCropRegion = 3227 new Rect(activeArraySize.width()/4, activeArraySize.height()/4, 3228 activeArraySize.width()*3/4, activeArraySize.height()*3/4); 3229 MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][]; 3230 CaptureRequest.Builder requestBuilder = 3231 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3232 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, defaultCropRegion); 3233 if (Flags.zoomMethod() && useZoomRatioMethod) { 3234 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_METHOD, 3235 CameraMetadata.CONTROL_ZOOM_METHOD_ZOOM_RATIO); 3236 } 3237 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3238 3239 updatePreviewSurface(previewSize); 3240 configurePreviewOutput(requestBuilder); 3241 3242 // Set algorithm regions to full active region 3243 final MeteringRectangle[] defaultMeteringRect = new MeteringRectangle[] { 3244 new MeteringRectangle ( 3245 /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(), 3246 /*meteringWeight*/1) 3247 }; 3248 3249 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 3250 update3aRegion(requestBuilder, algo, defaultMeteringRect, mStaticInfo); 3251 } 3252 3253 final int captureSubmitRepeat; 3254 { 3255 int maxLatency = mStaticInfo.getSyncMaxLatency(); 3256 if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) { 3257 captureSubmitRepeat = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1; 3258 } else { 3259 captureSubmitRepeat = maxLatency + 1; 3260 } 3261 } 3262 3263 float previousRatio = zoomRatioRange.getLower(); 3264 for (int i = 0; i < ZOOM_STEPS; i++) { 3265 /* 3266 * Submit capture request 3267 */ 3268 float zoomFactor = zoomRatioRange.getLower() + (zoomRatioRange.getUpper() - 3269 zoomRatioRange.getLower()) * i / ZOOM_STEPS; 3270 if (VERBOSE) { 3271 Log.v(TAG, "Testing Zoom ratio " + zoomFactor + " Preview size is " + previewSize); 3272 } 3273 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomFactor); 3274 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, defaultCropRegion); 3275 CaptureRequest request = requestBuilder.build(); 3276 for (int j = 0; j < captureSubmitRepeat; ++j) { 3277 mSession.capture(request, listener, mHandler); 3278 } 3279 3280 /* 3281 * Validate capture result 3282 */ 3283 waitForNumResults(listener, captureSubmitRepeat - 1); // Drop first few frames 3284 TotalCaptureResult result = listener.getTotalCaptureResultForRequest( 3285 request, NUM_RESULTS_WAIT_TIMEOUT); 3286 List<CaptureResult> partialResults = result.getPartialResults(); 3287 float resultZoomRatio = getValueNotNull(result, CaptureResult.CONTROL_ZOOM_RATIO); 3288 Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION); 3289 3290 for (CaptureResult partialResult : partialResults) { 3291 Rect cropRegionInPartial = 3292 partialResult.get(CaptureResult.SCALER_CROP_REGION); 3293 if (cropRegionInPartial != null) { 3294 mCollector.expectEquals("SCALER_CROP_REGION in partial result must " 3295 + "match in final result", cropRegionInPartial, cropRegion); 3296 } 3297 3298 Float zoomRatioInPartial = partialResult.get(CaptureResult.CONTROL_ZOOM_RATIO); 3299 if (zoomRatioInPartial != null) { 3300 mCollector.expectEquals("CONTROL_ZOOM_RATIO in partial result must match" 3301 + " that in final result", resultZoomRatio, zoomRatioInPartial); 3302 } 3303 } 3304 3305 /* 3306 * Validate resulting crop regions and zoom ratio 3307 */ 3308 mCollector.expectTrue(String.format( 3309 "Zoom ratio should increase or stay the same " + 3310 "(previous = %f, current = %f)", 3311 previousRatio, resultZoomRatio), 3312 Math.abs(previousRatio - resultZoomRatio) < ZOOM_ERROR_MARGIN || 3313 (previousRatio < resultZoomRatio)); 3314 3315 if (CameraTestUtils.isStabilizationOff(request)) { 3316 mCollector.expectTrue(String.format( 3317 "Request and result zoom ratio should be similar " 3318 + "(requested = %f, result = %f", zoomFactor, resultZoomRatio), 3319 Math.abs(zoomFactor - resultZoomRatio) / zoomFactor <= ZOOM_ERROR_MARGIN); 3320 3321 //In case zoom ratio is converted to crop region at HAL, due to error magnification 3322 //when converting to post-zoom crop region, scale the error threshold for crop 3323 //region check. 3324 float errorMultiplier = Math.max(1.0f, zoomFactor); 3325 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 3326 mCollector.expectRectsAreSimilar( 3327 "Request and result crop region should be similar", 3328 defaultCropRegion, cropRegion, 3329 CROP_REGION_ERROR_PERCENT_DELTA * errorMultiplier); 3330 } 3331 3332 mCollector.expectRectCentered( 3333 "Result crop region should be centered inside the active array", 3334 new Size(activeArraySize.width(), activeArraySize.height()), 3335 cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED * errorMultiplier); 3336 3337 /* 3338 * Validate resulting metering regions 3339 */ 3340 // Use the actual reported crop region to calculate the resulting metering region 3341 expectRegions[i] = getExpectedOutputRegion( 3342 /*requestRegion*/defaultMeteringRect, 3343 /*cropRect*/ cropRegion); 3344 3345 // Verify Output 3A region is intersection of input 3A region and crop region 3346 boolean scaleByZoomRatio = zoomFactor > 1.0f; 3347 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 3348 validate3aRegion(result, partialResults, algo, expectRegions[i], 3349 scaleByZoomRatio, mStaticInfo); 3350 } 3351 } 3352 3353 previousRatio = resultZoomRatio; 3354 3355 /* 3356 * Set windowboxing cropRegion while zoomRatio is not 1.0x or zoomRatio method 3357 * is used, and make sure the crop region was overwritten. 3358 */ 3359 if (zoomFactor != 1.0f || useZoomRatioMethod) { 3360 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom2xCropRegion); 3361 CaptureRequest requestWithCrop = requestBuilder.build(); 3362 for (int j = 0; j < captureSubmitRepeat; ++j) { 3363 mSession.capture(requestWithCrop, listener, mHandler); 3364 } 3365 3366 waitForNumResults(listener, captureSubmitRepeat - 1); // Drop first few frames 3367 CaptureResult resultWithCrop = listener.getCaptureResultForRequest( 3368 requestWithCrop, NUM_RESULTS_WAIT_TIMEOUT); 3369 float resultZoomRatioWithCrop = getValueNotNull(resultWithCrop, 3370 CaptureResult.CONTROL_ZOOM_RATIO); 3371 Rect cropRegionWithCrop = getValueNotNull(resultWithCrop, 3372 CaptureResult.SCALER_CROP_REGION); 3373 3374 mCollector.expectTrue(String.format( 3375 "Result zoom ratio should remain the same (activeArrayCrop: %f, " + 3376 "zoomedCrop: %f)", resultZoomRatio, resultZoomRatioWithCrop), 3377 Math.abs(resultZoomRatio - resultZoomRatioWithCrop) < ZOOM_ERROR_MARGIN); 3378 3379 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 3380 mCollector.expectRectsAreSimilar( 3381 "Result crop region should remain the same with or without crop", 3382 cropRegion, cropRegionWithCrop, CROP_REGION_ERROR_PERCENT_DELTA); 3383 } 3384 } 3385 } 3386 } 3387 3388 private void zoomTimestampIncreaseTestByCamera() throws Exception { 3389 final Range<Float> zoomRatioRange = mStaticInfo.getZoomRatioRangeChecked(); 3390 3391 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3392 updatePreviewSurface(maxPreviewSize); 3393 CaptureRequest.Builder requestBuilder = 3394 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3395 configurePreviewOutput(requestBuilder); 3396 3397 // Submit a sequence of requests first zooming in then zooming out. 3398 List<CaptureRequest> requests = new ArrayList<CaptureRequest>(); 3399 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3400 float zoomRange = zoomRatioRange.getUpper() - zoomRatioRange.getLower(); 3401 for (int i = 0; i <= ZOOM_STEPS; i++) { 3402 float zoomFactor = zoomRatioRange.getUpper() - (zoomRange * i / ZOOM_STEPS); 3403 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomFactor); 3404 // Add each ratio to both the beginning and end of the list. 3405 requests.add(requestBuilder.build()); 3406 requests.add(0, requestBuilder.build()); 3407 } 3408 int seqId = mSession.captureBurst(requests, listener, mHandler); 3409 3410 // onCaptureSequenceCompleted() trails all capture results. Upon its return, 3411 // we make sure we've received all results/errors. 3412 listener.getCaptureSequenceLastFrameNumber( 3413 seqId, WAIT_FOR_RESULT_TIMEOUT_MS * ZOOM_STEPS); 3414 // Check timestamp monotonically increase for the whole sequence 3415 long prevTimestamp = 0; 3416 while (listener.hasMoreResults()) { 3417 TotalCaptureResult result = listener.getTotalCaptureResult( 3418 WAIT_FOR_RESULT_TIMEOUT_MS); 3419 long timestamp = getValueNotNull(result, CaptureResult.SENSOR_TIMESTAMP); 3420 mCollector.expectGreater("Sensor timestamp must monotonically increase, " 3421 + "but changed from " + prevTimestamp + " to " + timestamp, 3422 prevTimestamp, timestamp); 3423 prevTimestamp = timestamp; 3424 } 3425 } 3426 3427 private void digitalZoomPreviewCombinationTestByCamera() throws Exception { 3428 final double ASPECT_RATIO_THRESHOLD = 0.001; 3429 List<Double> aspectRatiosTested = new ArrayList<Double>(); 3430 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3431 aspectRatiosTested.add((double)(maxPreviewSize.getWidth()) / maxPreviewSize.getHeight()); 3432 3433 for (Size size : mOrderedPreviewSizes) { 3434 // Max preview size was already tested in testDigitalZoom test. skip it. 3435 if (size.equals(maxPreviewSize)) { 3436 continue; 3437 } 3438 3439 // Only test the largest size for each aspect ratio. 3440 double aspectRatio = (double)(size.getWidth()) / size.getHeight(); 3441 if (isAspectRatioContained(aspectRatiosTested, aspectRatio, ASPECT_RATIO_THRESHOLD)) { 3442 continue; 3443 } 3444 3445 if (VERBOSE) { 3446 Log.v(TAG, "Test preview size " + size.toString() + " digital zoom"); 3447 } 3448 3449 aspectRatiosTested.add(aspectRatio); 3450 digitalZoomTestByCamera(size, /*repeating*/false); 3451 } 3452 } 3453 3454 private static boolean isAspectRatioContained(List<Double> aspectRatioList, 3455 double aspectRatio, double delta) { 3456 for (Double ratio : aspectRatioList) { 3457 if (Math.abs(ratio - aspectRatio) < delta) { 3458 return true; 3459 } 3460 } 3461 3462 return false; 3463 } 3464 3465 private void sceneModeTestByCamera() throws Exception { 3466 int[] sceneModes = mStaticInfo.getAvailableSceneModesChecked(); 3467 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3468 CaptureRequest.Builder requestBuilder = 3469 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3470 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3471 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE); 3472 startPreview(requestBuilder, maxPreviewSize, listener); 3473 3474 for(int mode : sceneModes) { 3475 requestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, mode); 3476 listener = new SimpleCaptureCallback(); 3477 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 3478 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3479 3480 verifyCaptureResultForKey(CaptureResult.CONTROL_SCENE_MODE, 3481 mode, listener, NUM_FRAMES_VERIFIED); 3482 // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED 3483 verifyCaptureResultForKey(CaptureResult.CONTROL_MODE, 3484 CaptureRequest.CONTROL_MODE_USE_SCENE_MODE, listener, NUM_FRAMES_VERIFIED); 3485 } 3486 } 3487 3488 private void effectModeTestByCamera() throws Exception { 3489 int[] effectModes = mStaticInfo.getAvailableEffectModesChecked(); 3490 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3491 CaptureRequest.Builder requestBuilder = 3492 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3493 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 3494 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3495 startPreview(requestBuilder, maxPreviewSize, listener); 3496 3497 for(int mode : effectModes) { 3498 requestBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, mode); 3499 listener = new SimpleCaptureCallback(); 3500 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 3501 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3502 3503 verifyCaptureResultForKey(CaptureResult.CONTROL_EFFECT_MODE, 3504 mode, listener, NUM_FRAMES_VERIFIED); 3505 // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED 3506 verifyCaptureResultForKey(CaptureResult.CONTROL_MODE, 3507 CaptureRequest.CONTROL_MODE_AUTO, listener, NUM_FRAMES_VERIFIED); 3508 } 3509 } 3510 3511 private void extendedSceneModeTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 3512 Capability[] extendedSceneModeCaps = mStaticInfo.getAvailableExtendedSceneModeCapsChecked(); 3513 if (extendedSceneModeCaps.length == 0) { 3514 return; 3515 } 3516 3517 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3518 CaptureRequest.Builder requestBuilder = 3519 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3520 3521 for (Capability cap : extendedSceneModeCaps) { 3522 int mode = cap.getMode(); 3523 requestBuilder.set(CaptureRequest.CONTROL_EXTENDED_SCENE_MODE, mode); 3524 3525 // Test that DISABLED and BOKEH_CONTINUOUS mode doesn't slow down the frame rate 3526 if (mode == CaptureRequest.CONTROL_EXTENDED_SCENE_MODE_DISABLED || 3527 mode == CaptureRequest.CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS) { 3528 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 3529 } 3530 3531 Range<Float> zoomRange = cap.getZoomRatioRange(); 3532 float[] zoomRatios = new float[]{zoomRange.getLower(), zoomRange.getUpper()}; 3533 for (float ratio : zoomRatios) { 3534 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3535 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, ratio); 3536 startPreview(requestBuilder, maxPreviewSize, listener); 3537 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3538 3539 verifyCaptureResultForKey(CaptureResult.CONTROL_EXTENDED_SCENE_MODE, 3540 mode, listener, NUM_FRAMES_VERIFIED); 3541 float zoomRatioDelta = ZOOM_RATIO_ERROR_PERCENT_DELTA * ratio; 3542 if (CameraTestUtils.isStabilizationOff(requestBuilder.build())) { 3543 verifyCaptureResultForKey(CaptureResult.CONTROL_ZOOM_RATIO, 3544 ratio, listener, NUM_FRAMES_VERIFIED, zoomRatioDelta); 3545 } 3546 } 3547 } 3548 } 3549 3550 private void manualFlashStrengthControlTestByCamera() throws Exception { 3551 Size maxPrevSize = mOrderedPreviewSizes.get(0); 3552 int singleMaxLevel = mStaticInfo.getCharacteristics().get( 3553 CameraCharacteristics.FLASH_SINGLE_STRENGTH_MAX_LEVEL); 3554 int torchMaxLevel = mStaticInfo.getCharacteristics().get( 3555 CameraCharacteristics.FLASH_TORCH_STRENGTH_MAX_LEVEL); 3556 int strengthLevel = singleMaxLevel - 1; 3557 3558 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 3559 CaptureRequest.Builder requestBuilder = 3560 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3561 3562 // Single mode 3563 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); 3564 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE); 3565 requestBuilder.set(CaptureRequest.FLASH_STRENGTH_LEVEL, strengthLevel); 3566 3567 CaptureRequest request; 3568 updatePreviewSurface(maxPrevSize); 3569 configurePreviewOutput(requestBuilder); 3570 request = requestBuilder.build(); 3571 mSession.capture(request, resultListener, mHandler); 3572 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3573 CaptureResult result = 3574 resultListener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 3575 int resultStrengthLevel = getValueNotNull(result, CaptureResult.FLASH_STRENGTH_LEVEL); 3576 assertTrue(resultStrengthLevel == strengthLevel); 3577 assertTrue(resultStrengthLevel <= singleMaxLevel); 3578 3579 // Torch mode 3580 strengthLevel = torchMaxLevel - 1; 3581 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 3582 requestBuilder.set(CaptureRequest.FLASH_STRENGTH_LEVEL, strengthLevel); 3583 CaptureRequest torchRequest = requestBuilder.build(); 3584 mSession.setRepeatingRequest(torchRequest, resultListener, mHandler); 3585 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3586 result = resultListener.getCaptureResultForRequest( 3587 torchRequest, NUM_RESULTS_WAIT_TIMEOUT); 3588 resultStrengthLevel = getValueNotNull(result, CaptureResult.FLASH_STRENGTH_LEVEL); 3589 assertTrue(resultStrengthLevel == strengthLevel); 3590 assertTrue(resultStrengthLevel <= torchMaxLevel); 3591 } 3592 3593 private void autoframingTestByCamera() throws Exception { 3594 // Verify autoframing state, zoom ratio and video stabilizations controls for autoframing 3595 // modes ON and OFF 3596 int[] autoframingModes = {CameraMetadata.CONTROL_AUTOFRAMING_OFF, 3597 CameraMetadata.CONTROL_AUTOFRAMING_ON}; 3598 final int zoomSteps = 5; 3599 final float zoomErrorMargin = 0.05f; 3600 final int kMaxNumFrames = 200; 3601 Size maxPreviewSize = mOrderedPreviewSizes.get(0); // Max preview size. 3602 CaptureRequest.Builder requestBuilder = 3603 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3604 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3605 startPreview(requestBuilder, maxPreviewSize, listener); 3606 3607 for (int mode : autoframingModes) { 3608 float expectedZoomRatio = 0.0f; 3609 final Range<Float> zoomRatioRange = mStaticInfo.getZoomRatioRangeChecked(); 3610 for (int i = 0; i < zoomSteps; i++) { 3611 float testZoomRatio = zoomRatioRange.getLower() + (zoomRatioRange.getUpper() 3612 - zoomRatioRange.getLower()) * i / zoomSteps; 3613 // Zoom ratio 1.0f is a special case. The ZoomRatioMapper in framework maintains the 3614 // 1.0f ratio in the CaptureResult 3615 if (testZoomRatio == 1.0f) { 3616 continue; 3617 } 3618 requestBuilder.set(CaptureRequest.CONTROL_AUTOFRAMING, mode); 3619 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, testZoomRatio); 3620 listener = new SimpleCaptureCallback(); 3621 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 3622 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3623 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3624 Float resultZoomRatio = getValueNotNull(result, CaptureResult.CONTROL_ZOOM_RATIO); 3625 int autoframingState = getValueNotNull(result, 3626 CaptureResult.CONTROL_AUTOFRAMING_STATE); 3627 int videoStabilizationMode = getValueNotNull(result, 3628 CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE); 3629 3630 if (mode == CameraMetadata.CONTROL_AUTOFRAMING_ON) { 3631 int numFrames = 0; 3632 while (numFrames < kMaxNumFrames) { 3633 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3634 autoframingState = getValueNotNull(result, 3635 CaptureResult.CONTROL_AUTOFRAMING_STATE); 3636 assertTrue("Autoframing state should be FRAMING or CONVERGED when " 3637 + "AUTOFRAMING is ON", 3638 autoframingState == CameraMetadata.CONTROL_AUTOFRAMING_STATE_FRAMING 3639 || autoframingState 3640 == CameraMetadata.CONTROL_AUTOFRAMING_STATE_CONVERGED); 3641 3642 assertTrue("Video Stablization should be OFF when AUTOFRAMING is ON", 3643 videoStabilizationMode 3644 == CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF); 3645 3646 resultZoomRatio = getValueNotNull(result, CaptureResult.CONTROL_ZOOM_RATIO); 3647 if (autoframingState == 3648 CameraMetadata.CONTROL_AUTOFRAMING_STATE_CONVERGED) { 3649 break; 3650 } 3651 numFrames++; 3652 } 3653 3654 if (autoframingState == CameraMetadata.CONTROL_AUTOFRAMING_STATE_CONVERGED 3655 && expectedZoomRatio == 0.0f) { 3656 expectedZoomRatio = resultZoomRatio; 3657 } 3658 } else { 3659 expectedZoomRatio = testZoomRatio; 3660 assertTrue("Autoframing state should be INACTIVE when AUTOFRAMING is OFF", 3661 autoframingState == CameraMetadata.CONTROL_AUTOFRAMING_STATE_INACTIVE); 3662 } 3663 3664 verifyCaptureResultForKey(CaptureResult.CONTROL_AUTOFRAMING, mode, listener, 3665 NUM_FRAMES_VERIFIED); 3666 3667 // If autoframing was OFF, or the framing state CONVERGED, the zoom ratio in result 3668 // should be within the margin of error. 3669 if (autoframingState != CameraMetadata.CONTROL_AUTOFRAMING_STATE_FRAMING) { 3670 mCollector.expectTrue(String.format( 3671 "Zoom Ratio in Capture Request does not match the expected zoom" 3672 + "ratio in Capture Result (expected = %f, actual = %f)", 3673 expectedZoomRatio, resultZoomRatio), 3674 Math.abs(expectedZoomRatio - resultZoomRatio) / expectedZoomRatio 3675 <= zoomErrorMargin); 3676 } 3677 } 3678 } 3679 } 3680 3681 private void settingsOverrideTestByCamera() throws Exception { 3682 // Verify that settings override is OFF by default 3683 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3684 CaptureRequest.Builder requestBuilder = 3685 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3686 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3687 startPreview(requestBuilder, maxPreviewSize, listener); 3688 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3689 verifyCaptureResultForKey(CaptureResult.CONTROL_SETTINGS_OVERRIDE, 3690 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_OFF, listener, NUM_FRAMES_VERIFIED); 3691 3692 // Turn settings override to ZOOM, and make sure it's reflected in result 3693 requestBuilder.set(CaptureRequest.CONTROL_SETTINGS_OVERRIDE, 3694 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM); 3695 SimpleCaptureCallback listenerZoom = new SimpleCaptureCallback(); 3696 mSession.setRepeatingRequest(requestBuilder.build(), listenerZoom, mHandler); 3697 waitForSettingsApplied(listenerZoom, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3698 verifyCaptureResultForKey(CaptureResult.CONTROL_SETTINGS_OVERRIDE, 3699 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM, listenerZoom, NUM_FRAMES_VERIFIED); 3700 3701 // Verify that settings override result is ON if turned on from the beginning 3702 listenerZoom = new SimpleCaptureCallback(); 3703 stopPreviewAndDrain(); 3704 startPreview(requestBuilder, maxPreviewSize, listenerZoom); 3705 waitForSettingsApplied(listenerZoom, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3706 // Wait additional 2 frames to allow non-overridden 3707 // results during startup. 3708 final int ZOOM_SOME_FRAMES = 2; 3709 waitForNumResults(listenerZoom, ZOOM_SOME_FRAMES); 3710 verifyCaptureResultForKey(CaptureResult.CONTROL_SETTINGS_OVERRIDE, 3711 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM, listenerZoom, NUM_FRAMES_VERIFIED); 3712 } 3713 3714 private void testAeModeOnLowLightBoostBrightnessPriorityTestByCamera() throws Exception { 3715 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3716 CaptureRequest.Builder requestBuilder = 3717 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3718 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, 3719 CameraMetadata.CONTROL_AE_MODE_ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY); 3720 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3721 startPreview(requestBuilder, maxPreviewSize, listener); 3722 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3723 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3724 // Expect that AE_MODE is ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY 3725 int resultAeMode = getValueNotNull(result, CaptureResult.CONTROL_AE_MODE); 3726 assertTrue("AE Mode should be ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY", resultAeMode 3727 == CameraMetadata.CONTROL_AE_MODE_ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY); 3728 3729 // Expect that CaptureResult.CONTROL_LOW_LIGHT_BOOST_STATE is present 3730 int resultLowLightBoostState = 3731 getValueNotNull(result, CaptureResult.CONTROL_LOW_LIGHT_BOOST_STATE); 3732 assertTrue("Low Light Boost State should be ACTIVE or INACTIVE", 3733 resultLowLightBoostState == CameraMetadata.CONTROL_LOW_LIGHT_BOOST_STATE_INACTIVE 3734 || resultLowLightBoostState == CameraMetadata.CONTROL_LOW_LIGHT_BOOST_STATE_ACTIVE); 3735 } 3736 3737 3738 private void testAePriorityModesByCamera(int aePriorityMode) throws Exception { 3739 final int TEST_SENSITIVITY_VALUE = mStaticInfo.getSensitivityClampToRange(204); 3740 final long TEST_EXPOSURE_TIME_NS = mStaticInfo.getExposureClampToRange(28000000); 3741 final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000; 3742 3743 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3744 CaptureRequest.Builder requestBuilder = 3745 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3746 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON); 3747 requestBuilder.set(CaptureRequest.CONTROL_AE_PRIORITY_MODE, aePriorityMode); 3748 3749 switch (aePriorityMode) { 3750 case CONTROL_AE_PRIORITY_MODE_SENSOR_EXPOSURE_TIME_PRIORITY: 3751 requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, TEST_EXPOSURE_TIME_NS); 3752 break; 3753 case CONTROL_AE_PRIORITY_MODE_SENSOR_SENSITIVITY_PRIORITY: 3754 requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, TEST_SENSITIVITY_VALUE); 3755 break; 3756 default: 3757 throw new UnsupportedOperationException("Unhandled AE priority mode " 3758 + aePriorityMode); 3759 } 3760 3761 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3762 startPreview(requestBuilder, maxPreviewSize, listener); 3763 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3764 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3765 3766 // Expect that AE Priority mode result matches request 3767 int resultAePriorityMode = getValueNotNull(result, CaptureResult.CONTROL_AE_PRIORITY_MODE); 3768 assertTrue("AE Mode should be " + aePriorityMode, resultAePriorityMode 3769 == aePriorityMode); 3770 3771 long exposureTimeDiff = TEST_EXPOSURE_TIME_NS - 3772 getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 3773 int sensitivityDiff = TEST_SENSITIVITY_VALUE - 3774 getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY); 3775 3776 switch (aePriorityMode) { 3777 case CONTROL_AE_PRIORITY_MODE_SENSOR_EXPOSURE_TIME_PRIORITY: 3778 validateExposureTime(TEST_EXPOSURE_TIME_NS, 3779 getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME)); 3780 break; 3781 case CONTROL_AE_PRIORITY_MODE_SENSOR_SENSITIVITY_PRIORITY: 3782 validateSensitivity(TEST_SENSITIVITY_VALUE, 3783 getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY)); 3784 break; 3785 default: 3786 throw new UnsupportedOperationException("Unhandled AE priority mode " 3787 + aePriorityMode); 3788 } 3789 3790 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_OFF); 3791 listener = new SimpleCaptureCallback(); 3792 startPreview(requestBuilder, maxPreviewSize, listener); 3793 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3794 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3795 3796 // Expect that AE priority mode is off when AE mode if off 3797 resultAePriorityMode = 3798 getValueNotNull(result, CaptureResult.CONTROL_AE_PRIORITY_MODE); 3799 assertTrue("AE Priority mode should be off when AE mode is turned off", 3800 resultAePriorityMode == CameraMetadata.CONTROL_AE_PRIORITY_MODE_OFF); 3801 } 3802 3803 //---------------------------------------------------------------- 3804 //---------Below are common functions for all tests.-------------- 3805 //---------------------------------------------------------------- 3806 3807 /** 3808 * Enable exposure manual control and change exposure and sensitivity and 3809 * clamp the value into the supported range. 3810 */ 3811 private void changeExposure(CaptureRequest.Builder requestBuilder, 3812 long expTime, int sensitivity) { 3813 // Check if the max analog sensitivity is available and no larger than max sensitivity. The 3814 // max analog sensitivity is not actually used here. This is only an extra correctness 3815 // check. 3816 mStaticInfo.getMaxAnalogSensitivityChecked(); 3817 3818 expTime = mStaticInfo.getExposureClampToRange(expTime); 3819 sensitivity = mStaticInfo.getSensitivityClampToRange(sensitivity); 3820 3821 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF); 3822 requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTime); 3823 requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity); 3824 } 3825 /** 3826 * Enable exposure manual control and change exposure time and 3827 * clamp the value into the supported range. 3828 * 3829 * <p>The sensitivity is set to default value.</p> 3830 */ 3831 private void changeExposure(CaptureRequest.Builder requestBuilder, long expTime) { 3832 changeExposure(requestBuilder, expTime, DEFAULT_SENSITIVITY); 3833 } 3834 3835 /** 3836 * Get the exposure time array that contains multiple exposure time steps in 3837 * the exposure time range, in nanoseconds. 3838 */ 3839 private long[] getExposureTimeTestValuesSorted() { 3840 long[] testValues = new long[DEFAULT_NUM_EXPOSURE_TIME_STEPS + 1]; 3841 long maxExpTime = mStaticInfo.getExposureMaximumOrDefault(DEFAULT_EXP_TIME_NS); 3842 long minExpTime = mStaticInfo.getExposureMinimumOrDefault(DEFAULT_EXP_TIME_NS); 3843 3844 long range = maxExpTime - minExpTime; 3845 double stepSize = range / (double)DEFAULT_NUM_EXPOSURE_TIME_STEPS; 3846 for (int i = 0; i < testValues.length; i++) { 3847 // Multiply stepSize by largest to smallest so that the final array is sorted. 3848 testValues[i] = maxExpTime - (long) (stepSize * (testValues.length - 1 - i)); 3849 testValues[i] = mStaticInfo.getExposureClampToRange(testValues[i]); 3850 } 3851 3852 return testValues; 3853 } 3854 3855 /** 3856 * Generate test focus distances in range of [0, minFocusDistance] in increasing order. 3857 * 3858 * @param repeatMin number of times minValue will be repeated. 3859 * @param repeatMax number of times maxValue will be repeated. 3860 */ 3861 private float[] getFocusDistanceTestValuesInOrder(int repeatMin, int repeatMax) { 3862 int totalCount = NUM_TEST_FOCUS_DISTANCES + 1 + repeatMin + repeatMax; 3863 float[] testValues = new float[totalCount]; 3864 float minValue = 0; 3865 float maxValue = mStaticInfo.getMinimumFocusDistanceChecked(); 3866 3867 float range = maxValue - minValue; 3868 float stepSize = range / NUM_TEST_FOCUS_DISTANCES; 3869 3870 for (int i = 0; i < repeatMin; i++) { 3871 testValues[i] = minValue; 3872 } 3873 for (int i = 0; i <= NUM_TEST_FOCUS_DISTANCES; i++) { 3874 testValues[repeatMin+i] = minValue + stepSize * i; 3875 } 3876 for (int i = 0; i < repeatMax; i++) { 3877 testValues[repeatMin+NUM_TEST_FOCUS_DISTANCES+1+i] = 3878 maxValue; 3879 } 3880 3881 return testValues; 3882 } 3883 3884 /** 3885 * Get the sensitivity array that contains multiple sensitivity steps in the 3886 * sensitivity range. 3887 * <p> 3888 * Sensitivity number of test values is determined by 3889 * {@value #DEFAULT_SENSITIVITY_STEP_SIZE} and sensitivity range, and 3890 * bounded by {@value #DEFAULT_NUM_SENSITIVITY_STEPS}. 3891 * </p> 3892 */ 3893 private int[] getSensitivityTestValuesSorted() { 3894 int maxSensitivity = mStaticInfo.getSensitivityMaximumOrDefault( 3895 DEFAULT_SENSITIVITY); 3896 int minSensitivity = mStaticInfo.getSensitivityMinimumOrDefault( 3897 DEFAULT_SENSITIVITY); 3898 3899 int range = maxSensitivity - minSensitivity; 3900 int stepSize = DEFAULT_SENSITIVITY_STEP_SIZE; 3901 int numSteps = range / stepSize; 3902 // Bound the test steps to avoid supper long test. 3903 if (numSteps > DEFAULT_NUM_SENSITIVITY_STEPS) { 3904 numSteps = DEFAULT_NUM_SENSITIVITY_STEPS; 3905 stepSize = range / numSteps; 3906 } 3907 int[] testValues = new int[numSteps + 1]; 3908 for (int i = 0; i < testValues.length; i++) { 3909 // Multiply stepSize by largest to smallest so that the final array is sorted. 3910 testValues[i] = maxSensitivity - stepSize * (testValues.length - 1 - i); 3911 testValues[i] = mStaticInfo.getSensitivityClampToRange(testValues[i]); 3912 } 3913 3914 return testValues; 3915 } 3916 3917 /** 3918 * Validate the AE control exposure time. 3919 * 3920 * <p>Exposure should be close enough, and only round down if they are not equal.</p> 3921 * 3922 * @param request Request exposure time 3923 * @param result Result exposure time 3924 */ 3925 private void validateExposureTime(long request, long result) { 3926 long expTimeDelta = request - result; 3927 long expTimeErrorMargin = (long)(Math.max(EXPOSURE_TIME_ERROR_MARGIN_NS, request 3928 * EXPOSURE_TIME_ERROR_MARGIN_RATE)); 3929 // First, round down not up, second, need close enough. 3930 mCollector.expectTrue("Exposure time is invalid, request: " 3931 + request + " result: " + result, 3932 expTimeDelta < expTimeErrorMargin && expTimeDelta >= 0); 3933 } 3934 3935 /** 3936 * Validate AE control sensitivity. 3937 * 3938 * @param request Request sensitivity 3939 * @param result Result sensitivity 3940 */ 3941 private void validateSensitivity(int request, int result) { 3942 float sensitivityDelta = request - result; 3943 float sensitivityErrorMargin = request * SENSITIVITY_ERROR_MARGIN_RATE; 3944 // First, round down not up, second, need close enough. 3945 mCollector.expectTrue("Sensitivity is invalid, request: " + request + " result: " + result, 3946 sensitivityDelta < sensitivityErrorMargin && sensitivityDelta >= 0); 3947 } 3948 3949 /** 3950 * Validate frame duration for a given capture. 3951 * 3952 * <p>Frame duration should be longer than exposure time.</p> 3953 * 3954 * @param result The capture result for a given capture 3955 */ 3956 private void validateFrameDurationForCapture(CaptureResult result) { 3957 long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 3958 long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION); 3959 if (VERBOSE) { 3960 Log.v(TAG, "frame duration: " + frameDuration + " Exposure time: " + expTime); 3961 } 3962 3963 mCollector.expectTrue(String.format("Frame duration (%d) should be longer than exposure" 3964 + " time (%d) for a given capture", frameDuration, expTime), 3965 frameDuration >= expTime); 3966 3967 validatePipelineDepth(result); 3968 } 3969 3970 /** 3971 * Basic verification for the control mode capture result. 3972 * 3973 * @param key The capture result key to be verified against 3974 * @param requestMode The request mode for this result 3975 * @param listener The capture listener to get capture results 3976 * @param numFramesVerified The number of capture results to be verified 3977 * @param threshold The threshold by which the request and result keys can differ 3978 */ 3979 private void verifyCaptureResultForKey(CaptureResult.Key<Float> key, float requestMode, 3980 SimpleCaptureCallback listener, int numFramesVerified, float threshold) { 3981 for (int i = 0; i < numFramesVerified; i++) { 3982 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3983 validatePipelineDepth(result); 3984 float resultMode = getValueNotNull(result, key); 3985 if (VERBOSE) { 3986 Log.v(TAG, "Expect value: " + requestMode + " result value: " 3987 + resultMode + " threshold " + threshold); 3988 } 3989 // Check that the request and result are within the given threshold of each other. 3990 // (expectEquals isn't the most intuitive function name.) 3991 mCollector.expectEquals("Key " + key.getName() + " request: " + requestMode + 3992 " result: " + resultMode + " not within threshold " + threshold + 3993 " of each other", requestMode, resultMode, threshold); 3994 } 3995 } 3996 3997 /** 3998 * Basic verification for the control mode capture result. 3999 * 4000 * @param key The capture result key to be verified against 4001 * @param requestMode The request mode for this result 4002 * @param listener The capture listener to get capture results 4003 * @param numFramesVerified The number of capture results to be verified 4004 */ 4005 private <T> void verifyCaptureResultForKey(CaptureResult.Key<T> key, T requestMode, 4006 SimpleCaptureCallback listener, int numFramesVerified) { 4007 for (int i = 0; i < numFramesVerified; i++) { 4008 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 4009 validatePipelineDepth(result); 4010 T resultMode = getValueNotNull(result, key); 4011 if (VERBOSE) { 4012 Log.v(TAG, "Expect value: " + requestMode.toString() + " result value: " 4013 + resultMode.toString()); 4014 } 4015 mCollector.expectEquals("Key " + key.getName() + " result should match request", 4016 requestMode, resultMode); 4017 } 4018 } 4019 4020 /** 4021 * Basic verification that the value of a capture result key should be one of the expected 4022 * values. 4023 * 4024 * @param key The capture result key to be verified against 4025 * @param expectedModes The list of any possible expected modes for this result 4026 * @param listener The capture listener to get capture results 4027 * @param numFramesVerified The number of capture results to be verified 4028 */ 4029 private <T> void verifyAnyCaptureResultForKey(CaptureResult.Key<T> key, T[] expectedModes, 4030 SimpleCaptureCallback listener, int numFramesVerified) { 4031 for (int i = 0; i < numFramesVerified; i++) { 4032 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 4033 validatePipelineDepth(result); 4034 T resultMode = getValueNotNull(result, key); 4035 if (VERBOSE) { 4036 Log.v(TAG, "Expect values: " + Arrays.toString(expectedModes) + " result value: " 4037 + resultMode.toString()); 4038 } 4039 // Capture result should be one of the expected values. 4040 mCollector.expectContains(expectedModes, resultMode); 4041 } 4042 } 4043 4044 /** 4045 * Verify if the fps is slow down for given input request with certain 4046 * controls inside. 4047 * <p> 4048 * This method selects a max preview size for each fps range, and then 4049 * configure the preview stream. Preview is started with the max preview 4050 * size, and then verify if the result frame duration is in the frame 4051 * duration range. 4052 * </p> 4053 * 4054 * @param requestBuilder The request builder that contains post-processing 4055 * controls that could impact the output frame rate, such as 4056 * {@link CaptureRequest.NOISE_REDUCTION_MODE}. The value of 4057 * these controls must be set to some values such that the frame 4058 * rate is not slow down. 4059 * @param numFramesVerified The number of frames to be verified 4060 * @param fpsRanges The fps ranges to be verified 4061 */ 4062 private void verifyFpsNotSlowDown(CaptureRequest.Builder requestBuilder, 4063 int numFramesVerified, List<Range<Integer>> fpsRanges ) throws Exception { 4064 boolean frameDurationAvailable = true; 4065 // Allow a few frames for AE to settle on target FPS range 4066 final int NUM_FRAME_TO_SKIP = 6; 4067 float frameDurationErrorMargin = FRAME_DURATION_ERROR_MARGIN; 4068 if (!mStaticInfo.areKeysAvailable(CaptureResult.SENSOR_FRAME_DURATION)) { 4069 frameDurationAvailable = false; 4070 // Allow a larger error margin (1.5%) for timestamps 4071 frameDurationErrorMargin = 0.015f; 4072 } 4073 if (mStaticInfo.isExternalCamera()) { 4074 // Allow a even larger error margin (15%) for external camera timestamps 4075 frameDurationErrorMargin = 0.15f; 4076 } 4077 4078 boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported(); 4079 Range<Integer> fpsRange; 4080 SimpleCaptureCallback resultListener; 4081 4082 for (int i = 0; i < fpsRanges.size(); i += 1) { 4083 fpsRange = fpsRanges.get(i); 4084 Size previewSz = getMaxPreviewSizeForFpsRange(fpsRange); 4085 // If unable to find a preview size, then log the failure, and skip this run. 4086 if (previewSz == null) { 4087 if (mStaticInfo.isCapabilitySupported( 4088 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 4089 mCollector.addMessage(String.format( 4090 "Unable to find a preview size supporting given fps range %s", 4091 fpsRange)); 4092 } 4093 continue; 4094 } 4095 4096 if (VERBOSE) { 4097 Log.v(TAG, String.format("Test fps range %s for preview size %s", 4098 fpsRange, previewSz.toString())); 4099 } 4100 requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange); 4101 // Turn off auto antibanding to avoid exposure time and frame duration interference 4102 // from antibanding algorithm. 4103 if (antiBandingOffIsSupported) { 4104 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, 4105 CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF); 4106 } else { 4107 // The device doesn't implement the OFF mode, test continues. It need make sure 4108 // that the antibanding algorithm doesn't slow down the fps. 4109 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" + 4110 " not slow down the frame rate regardless of its current antibanding" + 4111 " mode"); 4112 } 4113 4114 resultListener = new SimpleCaptureCallback(); 4115 startPreview(requestBuilder, previewSz, resultListener); 4116 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 4117 // Wait several more frames for AE to settle on target FPS range 4118 waitForNumResults(resultListener, NUM_FRAME_TO_SKIP); 4119 4120 long[] frameDurationRange = new long[]{ 4121 (long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())}; 4122 long captureTime = 0, prevCaptureTime = 0; 4123 long frameDurationSum = 0; 4124 for (int j = 0; j < numFramesVerified; j++) { 4125 long frameDuration = frameDurationRange[0]; 4126 CaptureResult result = 4127 resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 4128 validatePipelineDepth(result); 4129 if (frameDurationAvailable) { 4130 frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION); 4131 } else { 4132 // if frame duration is not available, check timestamp instead 4133 captureTime = getValueNotNull(result, CaptureResult.SENSOR_TIMESTAMP); 4134 if (j > 0) { 4135 frameDuration = captureTime - prevCaptureTime; 4136 } 4137 prevCaptureTime = captureTime; 4138 } 4139 frameDurationSum += frameDuration; 4140 } 4141 long frameDurationAvg = frameDurationSum / numFramesVerified; 4142 mCollector.expectInRange( 4143 "Frame duration must be in the range of " + 4144 Arrays.toString(frameDurationRange), 4145 frameDurationAvg, 4146 (long) (frameDurationRange[0] * (1 - frameDurationErrorMargin)), 4147 (long) (frameDurationRange[1] * (1 + frameDurationErrorMargin))); 4148 4149 } 4150 4151 stopPreview(); 4152 } 4153 4154 /** 4155 * Validate the pipeline depth result. 4156 * 4157 * @param result The capture result to get pipeline depth data 4158 */ 4159 private void validatePipelineDepth(CaptureResult result) { 4160 final byte MIN_PIPELINE_DEPTH = 1; 4161 byte maxPipelineDepth = mStaticInfo.getPipelineMaxDepthChecked(); 4162 Byte pipelineDepth = getValueNotNull(result, CaptureResult.REQUEST_PIPELINE_DEPTH); 4163 mCollector.expectInRange(String.format("Pipeline depth must be in the range of [%d, %d]", 4164 MIN_PIPELINE_DEPTH, maxPipelineDepth), pipelineDepth, MIN_PIPELINE_DEPTH, 4165 maxPipelineDepth); 4166 } 4167 4168 /** 4169 * Calculate the anti-flickering corrected exposure time. 4170 * <p> 4171 * If the input exposure time is very short (shorter than flickering 4172 * boundary), which indicate the scene is bright and very likely at outdoor 4173 * environment, skip the correction, as it doesn't make much sense by doing so. 4174 * </p> 4175 * <p> 4176 * For long exposure time (larger than the flickering boundary), find the 4177 * exposure time that is closest to the flickering boundary. 4178 * </p> 4179 * 4180 * @param flickeringMode The flickering mode 4181 * @param exposureTime The input exposureTime to be corrected 4182 * @return anti-flickering corrected exposure time 4183 */ 4184 private long getAntiFlickeringExposureTime(int flickeringMode, long exposureTime) { 4185 if (flickeringMode != ANTI_FLICKERING_50HZ && flickeringMode != ANTI_FLICKERING_60HZ) { 4186 throw new IllegalArgumentException("Input anti-flickering mode must be 50 or 60Hz"); 4187 } 4188 long flickeringBoundary = EXPOSURE_TIME_BOUNDARY_50HZ_NS; 4189 if (flickeringMode == ANTI_FLICKERING_60HZ) { 4190 flickeringBoundary = EXPOSURE_TIME_BOUNDARY_60HZ_NS; 4191 } 4192 4193 if (exposureTime <= flickeringBoundary) { 4194 return exposureTime; 4195 } 4196 4197 // Find the closest anti-flickering corrected exposure time 4198 long correctedExpTime = exposureTime + (flickeringBoundary / 2); 4199 correctedExpTime = correctedExpTime - (correctedExpTime % flickeringBoundary); 4200 return correctedExpTime; 4201 } 4202 } 4203