1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.cts; 18 19 import static android.hardware.camera2.cts.CameraTestUtils.*; 20 21 import android.graphics.ImageFormat; 22 import android.view.Surface; 23 24 import com.android.ex.camera2.blocking.BlockingSessionCallback; 25 26 import android.graphics.SurfaceTexture; 27 import android.hardware.camera2.CameraCaptureSession; 28 import android.hardware.camera2.CameraCaptureSession.CaptureCallback; 29 import android.hardware.camera2.CameraDevice; 30 import android.hardware.camera2.CaptureFailure; 31 import android.hardware.camera2.CaptureRequest; 32 import android.hardware.camera2.CaptureResult; 33 import android.hardware.camera2.TotalCaptureResult; 34 import android.util.Size; 35 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback; 36 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase; 37 import android.hardware.camera2.params.OutputConfiguration; 38 import android.util.Log; 39 import android.util.Pair; 40 import android.util.Range; 41 42 import org.mockito.ArgumentCaptor; 43 import org.mockito.ArgumentMatcher; 44 45 import static org.mockito.Mockito.*; 46 47 import java.util.ArrayList; 48 import java.util.Arrays; 49 import java.util.List; 50 51 /** 52 * CameraDevice preview test by using SurfaceView. 53 */ 54 public class SurfaceViewPreviewTest extends Camera2SurfaceViewTestCase { 55 private static final String TAG = "SurfaceViewPreviewTest"; 56 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 57 private static final int FRAME_TIMEOUT_MS = 1000; 58 private static final int NUM_FRAMES_VERIFIED = 30; 59 private static final int NUM_TEST_PATTERN_FRAMES_VERIFIED = 60; 60 private static final float FRAME_DURATION_ERROR_MARGIN = 0.005f; // 0.5 percent error margin. 61 private static final int PREPARE_TIMEOUT_MS = 10000; // 10 s 62 63 @Override setUp()64 protected void setUp() throws Exception { 65 super.setUp(); 66 } 67 68 @Override tearDown()69 protected void tearDown() throws Exception { 70 super.tearDown(); 71 } 72 73 /** 74 * Test all supported preview sizes for each camera device. 75 * <p> 76 * For the first {@link #NUM_FRAMES_VERIFIED} of capture results, 77 * the {@link CaptureCallback} callback availability and the capture timestamp 78 * (monotonically increasing) ordering are verified. 79 * </p> 80 */ testCameraPreview()81 public void testCameraPreview() throws Exception { 82 for (int i = 0; i < mCameraIds.length; i++) { 83 try { 84 Log.i(TAG, "Testing preview for Camera " + mCameraIds[i]); 85 openDevice(mCameraIds[i]); 86 if (!mStaticInfo.isColorOutputSupported()) { 87 Log.i(TAG, "Camera " + mCameraIds[i] + 88 " does not support color outputs, skipping"); 89 continue; 90 } 91 previewTestByCamera(); 92 } finally { 93 closeDevice(); 94 } 95 } 96 } 97 98 /** 99 * Basic test pattern mode preview. 100 * <p> 101 * Only test the test pattern preview and capture result, the image buffer 102 * is not validated. 103 * </p> 104 */ testBasicTestPatternPreview()105 public void testBasicTestPatternPreview() throws Exception{ 106 for (int i = 0; i < mCameraIds.length; i++) { 107 try { 108 Log.i(TAG, "Testing preview for Camera " + mCameraIds[i]); 109 openDevice(mCameraIds[i]); 110 if (!mStaticInfo.isColorOutputSupported()) { 111 Log.i(TAG, "Camera " + mCameraIds[i] + 112 " does not support color outputs, skipping"); 113 continue; 114 } 115 previewTestPatternTestByCamera(); 116 } finally { 117 closeDevice(); 118 } 119 } 120 } 121 122 /** 123 * Test {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE} for preview, validate the preview 124 * frame duration and exposure time. 125 */ testPreviewFpsRange()126 public void testPreviewFpsRange() throws Exception { 127 for (String id : mCameraIds) { 128 try { 129 openDevice(id); 130 if (!mStaticInfo.isColorOutputSupported()) { 131 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 132 continue; 133 } 134 previewFpsRangeTestByCamera(); 135 } finally { 136 closeDevice(); 137 } 138 } 139 } 140 141 /** 142 * Test surface set streaming use cases. 143 * 144 * <p> 145 * The test sets output configuration with increasing surface set IDs for preview and YUV 146 * streams. The max supported preview size is selected for preview stream, and the max 147 * supported YUV size (depending on hw supported level) is selected for YUV stream. This test 148 * also exercises the prepare API. 149 * </p> 150 */ testSurfaceSet()151 public void testSurfaceSet() throws Exception { 152 for (String id : mCameraIds) { 153 try { 154 openDevice(id); 155 if (!mStaticInfo.isColorOutputSupported()) { 156 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 157 continue; 158 } 159 surfaceSetTestByCamera(id); 160 } finally { 161 closeDevice(); 162 } 163 } 164 } 165 166 /** 167 * Test to verify the {@link CameraCaptureSession#prepare} method works correctly, and has the 168 * expected effects on performance. 169 * 170 * - Ensure that prepare() results in onSurfacePrepared() being invoked 171 * - Ensure that prepare() does not cause preview glitches while operating 172 * - Ensure that starting to use a newly-prepared output does not cause additional 173 * preview glitches to occur 174 */ testPreparePerformance()175 public void testPreparePerformance() throws Throwable { 176 for (int i = 0; i < mCameraIds.length; i++) { 177 try { 178 openDevice(mCameraIds[i]); 179 if (!mStaticInfo.isColorOutputSupported()) { 180 Log.i(TAG, "Camera " + mCameraIds[i] + 181 " does not support color outputs, skipping"); 182 continue; 183 } 184 preparePerformanceTestByCamera(mCameraIds[i]); 185 } 186 finally { 187 closeDevice(); 188 } 189 } 190 } 191 preparePerformanceTestByCamera(String cameraId)192 private void preparePerformanceTestByCamera(String cameraId) throws Exception { 193 final int MAX_IMAGES_TO_PREPARE = 10; 194 final int UNKNOWN_LATENCY_RESULT_WAIT = 5; 195 final int MAX_RESULTS_TO_WAIT = 10; 196 final int FRAMES_FOR_AVERAGING = 100; 197 final float PREPARE_FRAME_RATE_BOUNDS = 0.05f; // fraction allowed difference 198 final float PREPARE_PEAK_RATE_BOUNDS = 0.5f; // fraction allowed difference 199 200 Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, null).get(0); 201 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 202 203 // Don't need image data, just drop it right away to minimize overhead 204 ImageDropperListener imageListener = new ImageDropperListener(); 205 206 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 207 208 CaptureRequest.Builder previewRequest = 209 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 210 211 // Configure outputs and session 212 213 updatePreviewSurface(maxPreviewSize); 214 215 createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_IMAGES_TO_PREPARE, imageListener); 216 217 List<Surface> outputSurfaces = new ArrayList<Surface>(); 218 outputSurfaces.add(mPreviewSurface); 219 outputSurfaces.add(mReaderSurface); 220 221 CameraCaptureSession.StateCallback mockSessionListener = 222 mock(CameraCaptureSession.StateCallback.class); 223 224 mSession = configureCameraSession(mCamera, outputSurfaces, mockSessionListener, mHandler); 225 226 previewRequest.addTarget(mPreviewSurface); 227 Range<Integer> maxFpsTarget = mStaticInfo.getAeMaxTargetFpsRange(); 228 previewRequest.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, maxFpsTarget); 229 230 mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler); 231 232 // Converge AE 233 waitForAeStable(resultListener, UNKNOWN_LATENCY_RESULT_WAIT); 234 235 if (mStaticInfo.isAeLockSupported()) { 236 // Lock AE if possible to improve stability 237 previewRequest.set(CaptureRequest.CONTROL_AE_LOCK, true); 238 mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler); 239 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 240 // Legacy mode doesn't output AE state 241 waitForResultValue(resultListener, CaptureResult.CONTROL_AE_STATE, 242 CaptureResult.CONTROL_AE_STATE_LOCKED, MAX_RESULTS_TO_WAIT); 243 } 244 } 245 246 // Measure frame rate for a bit 247 Pair<Long, Long> frameDurationStats = 248 measureMeanFrameInterval(resultListener, FRAMES_FOR_AVERAGING, /*prevTimestamp*/ 0); 249 250 Log.i(TAG, String.format("Frame interval avg during normal preview: %f ms, peak %f ms", 251 frameDurationStats.first / 1e6, frameDurationStats.second / 1e6)); 252 253 // Drain results, do prepare 254 resultListener.drain(); 255 256 mSession.prepare(mReaderSurface); 257 258 verify(mockSessionListener, 259 timeout(PREPARE_TIMEOUT_MS).times(1)). 260 onSurfacePrepared(eq(mSession), eq(mReaderSurface)); 261 262 // Calculate frame rate during prepare 263 264 int resultsReceived = (int) resultListener.getTotalNumFrames(); 265 if (resultsReceived > 2) { 266 // Only verify frame rate if there are a couple of results 267 Pair<Long, Long> whilePreparingFrameDurationStats = 268 measureMeanFrameInterval(resultListener, resultsReceived, /*prevTimestamp*/ 0); 269 270 Log.i(TAG, String.format("Frame interval during prepare avg: %f ms, peak %f ms", 271 whilePreparingFrameDurationStats.first / 1e6, 272 whilePreparingFrameDurationStats.second / 1e6)); 273 274 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 275 mCollector.expectTrue( 276 String.format("Camera %s: Preview peak frame interval affected by prepare " + 277 "call: preview avg frame duration: %f ms, peak during prepare: %f ms", 278 cameraId, 279 frameDurationStats.first / 1e6, 280 whilePreparingFrameDurationStats.second / 1e6), 281 (whilePreparingFrameDurationStats.second <= 282 frameDurationStats.first * (1 + PREPARE_PEAK_RATE_BOUNDS))); 283 mCollector.expectTrue( 284 String.format("Camera %s: Preview average frame interval affected by prepare " + 285 "call: preview avg frame duration: %f ms, during prepare: %f ms", 286 cameraId, 287 frameDurationStats.first / 1e6, 288 whilePreparingFrameDurationStats.first / 1e6), 289 (whilePreparingFrameDurationStats.first <= 290 frameDurationStats.first * (1 + PREPARE_FRAME_RATE_BOUNDS))); 291 } 292 } 293 294 resultListener.drain(); 295 296 // Get at least one more preview result without prepared target 297 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 298 long prevTimestamp = result.get(CaptureResult.SENSOR_TIMESTAMP); 299 300 // Now use the prepared stream and ensure there are no hiccups from using it 301 previewRequest.addTarget(mReaderSurface); 302 303 mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler); 304 305 Pair<Long, Long> preparedFrameDurationStats = 306 measureMeanFrameInterval(resultListener, MAX_IMAGES_TO_PREPARE*2, prevTimestamp); 307 308 Log.i(TAG, String.format("Frame interval with prepared stream added avg: %f ms, peak %f ms", 309 preparedFrameDurationStats.first / 1e6, 310 preparedFrameDurationStats.second / 1e6)); 311 312 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 313 mCollector.expectTrue( 314 String.format("Camera %s: Preview peak frame interval affected by use of new " + 315 " stream: preview avg frame duration: %f ms, peak with new stream: %f ms", 316 cameraId, 317 frameDurationStats.first / 1e6, preparedFrameDurationStats.second / 1e6), 318 (preparedFrameDurationStats.second <= 319 frameDurationStats.first * (1 + PREPARE_PEAK_RATE_BOUNDS))); 320 mCollector.expectTrue( 321 String.format("Camera %s: Preview average frame interval affected by use of new " + 322 "stream: preview avg frame duration: %f ms, with new stream: %f ms", 323 cameraId, 324 frameDurationStats.first / 1e6, preparedFrameDurationStats.first / 1e6), 325 (preparedFrameDurationStats.first <= 326 frameDurationStats.first * (1 + PREPARE_FRAME_RATE_BOUNDS))); 327 } 328 } 329 330 /** 331 * Test to verify correct behavior with the same Surface object being used repeatedly with 332 * different native internals, and multiple Surfaces pointing to the same actual consumer object 333 */ testSurfaceEquality()334 public void testSurfaceEquality() throws Exception { 335 for (int i = 0; i < mCameraIds.length; i++) { 336 try { 337 openDevice(mCameraIds[i]); 338 if (!mStaticInfo.isColorOutputSupported()) { 339 Log.i(TAG, "Camera " + mCameraIds[i] + 340 " does not support color outputs, skipping"); 341 continue; 342 } 343 surfaceEqualityTestByCamera(mCameraIds[i]); 344 } 345 finally { 346 closeDevice(); 347 } 348 } 349 } 350 surfaceEqualityTestByCamera(String cameraId)351 private void surfaceEqualityTestByCamera(String cameraId) throws Exception { 352 final int SOME_FRAMES = 10; 353 354 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 355 356 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 357 358 // Create a SurfaceTexture for a second output 359 SurfaceTexture sharedOutputTexture = new SurfaceTexture(/*random texture ID*/ 5); 360 sharedOutputTexture.setDefaultBufferSize(maxPreviewSize.getWidth(), 361 maxPreviewSize.getHeight()); 362 Surface sharedOutputSurface1 = new Surface(sharedOutputTexture); 363 364 updatePreviewSurface(maxPreviewSize); 365 366 List<Surface> outputSurfaces = new ArrayList<Surface>(); 367 outputSurfaces.add(mPreviewSurface); 368 outputSurfaces.add(sharedOutputSurface1); 369 370 BlockingSessionCallback sessionListener = 371 new BlockingSessionCallback(); 372 373 mSession = configureCameraSession(mCamera, outputSurfaces, sessionListener, mHandler); 374 sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY, 375 SESSION_CONFIGURE_TIMEOUT_MS); 376 377 CaptureRequest.Builder previewRequest = 378 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 379 previewRequest.addTarget(mPreviewSurface); 380 previewRequest.addTarget(sharedOutputSurface1); 381 382 mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler); 383 384 // Wait to get some frames out 385 waitForNumResults(resultListener, SOME_FRAMES); 386 387 // Drain 388 mSession.abortCaptures(); 389 sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY, 390 SESSION_CONFIGURE_TIMEOUT_MS); 391 392 // Hide / unhide the SurfaceView to get a new target Surface 393 recreatePreviewSurface(); 394 395 // And resize it again 396 updatePreviewSurface(maxPreviewSize); 397 398 // Create a second surface that targets the shared SurfaceTexture 399 Surface sharedOutputSurface2 = new Surface(sharedOutputTexture); 400 401 // Use the new Surfaces for a new session 402 outputSurfaces.clear(); 403 outputSurfaces.add(mPreviewSurface); 404 outputSurfaces.add(sharedOutputSurface2); 405 406 sessionListener = new BlockingSessionCallback(); 407 408 mSession = configureCameraSession(mCamera, outputSurfaces, sessionListener, mHandler); 409 410 previewRequest = 411 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 412 previewRequest.addTarget(mPreviewSurface); 413 previewRequest.addTarget(sharedOutputSurface2); 414 415 mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler); 416 417 // Wait to get some frames out 418 waitForNumResults(resultListener, SOME_FRAMES); 419 } 420 421 /** 422 * Measure the inter-frame interval based on SENSOR_TIMESTAMP for frameCount frames from the 423 * provided capture listener. If prevTimestamp is positive, it is used for the first interval 424 * calculation; otherwise, the first result is used to establish the starting time. 425 * 426 * Returns the mean interval in the first pair entry, and the largest interval in the second 427 * pair entry 428 */ measureMeanFrameInterval(SimpleCaptureCallback resultListener, int frameCount, long prevTimestamp)429 Pair<Long, Long> measureMeanFrameInterval(SimpleCaptureCallback resultListener, int frameCount, 430 long prevTimestamp) throws Exception { 431 long summedIntervals = 0; 432 long maxInterval = 0; 433 int measurementCount = frameCount - ((prevTimestamp > 0) ? 0 : 1); 434 435 for (int i = 0; i < frameCount; i++) { 436 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 437 long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP); 438 if (prevTimestamp > 0) { 439 long interval = timestamp - prevTimestamp; 440 if (interval > maxInterval) maxInterval = interval; 441 summedIntervals += interval; 442 } 443 prevTimestamp = timestamp; 444 } 445 return new Pair<Long, Long>(summedIntervals / measurementCount, maxInterval); 446 } 447 448 449 /** 450 * Test preview fps range for all supported ranges. The exposure time are frame duration are 451 * validated. 452 */ previewFpsRangeTestByCamera()453 private void previewFpsRangeTestByCamera() throws Exception { 454 Size maxPreviewSz; 455 Range<Integer>[] fpsRanges = getDescendingTargetFpsRanges(mStaticInfo); 456 boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported(); 457 Range<Integer> fpsRange; 458 CaptureRequest.Builder requestBuilder = 459 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 460 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 461 462 for (int i = 0; i < fpsRanges.length; i += 1) { 463 fpsRange = fpsRanges[i]; 464 if (mStaticInfo.isHardwareLevelLegacy()) { 465 // Legacy devices don't report minimum frame duration for preview sizes. The FPS 466 // range should be valid for any supported preview size. 467 maxPreviewSz = mOrderedPreviewSizes.get(0); 468 } else { 469 maxPreviewSz = getMaxPreviewSizeForFpsRange(fpsRange); 470 } 471 472 requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange); 473 // Turn off auto antibanding to avoid exposure time and frame duration interference 474 // from antibanding algorithm. 475 if (antiBandingOffIsSupported) { 476 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, 477 CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF); 478 } else { 479 // The device doesn't implement the OFF mode, test continues. It need make sure 480 // that the antibanding algorithm doesn't interfere with the fps range control. 481 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" + 482 " satisfy the specified fps range regardless of its current antibanding" + 483 " mode"); 484 } 485 486 startPreview(requestBuilder, maxPreviewSz, resultListener); 487 resultListener = new SimpleCaptureCallback(); 488 mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler); 489 490 verifyPreviewTargetFpsRange(resultListener, NUM_FRAMES_VERIFIED, fpsRange, 491 maxPreviewSz); 492 stopPreview(); 493 resultListener.drain(); 494 } 495 } 496 verifyPreviewTargetFpsRange(SimpleCaptureCallback resultListener, int numFramesVerified, Range<Integer> fpsRange, Size previewSz)497 private void verifyPreviewTargetFpsRange(SimpleCaptureCallback resultListener, 498 int numFramesVerified, Range<Integer> fpsRange, Size previewSz) { 499 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 500 List<Integer> capabilities = mStaticInfo.getAvailableCapabilitiesChecked(); 501 502 if (capabilities.contains(CaptureRequest.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 503 long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION); 504 long[] frameDurationRange = 505 new long[]{(long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())}; 506 mCollector.expectInRange( 507 "Frame duration must be in the range of " + Arrays.toString(frameDurationRange), 508 frameDuration, (long) (frameDurationRange[0] * (1 - FRAME_DURATION_ERROR_MARGIN)), 509 (long) (frameDurationRange[1] * (1 + FRAME_DURATION_ERROR_MARGIN))); 510 long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 511 mCollector.expectTrue(String.format("Exposure time %d must be no larger than frame" 512 + "duration %d", expTime, frameDuration), expTime <= frameDuration); 513 514 Long minFrameDuration = mMinPreviewFrameDurationMap.get(previewSz); 515 boolean findDuration = mCollector.expectTrue("Unable to find minFrameDuration for size " 516 + previewSz.toString(), minFrameDuration != null); 517 if (findDuration) { 518 mCollector.expectTrue("Frame duration " + frameDuration + " must be no smaller than" 519 + " minFrameDuration " + minFrameDuration, frameDuration >= minFrameDuration); 520 } 521 } else { 522 Log.i(TAG, "verifyPreviewTargetFpsRange - MANUAL_SENSOR control is not supported," + 523 " skipping duration and exposure time check."); 524 } 525 } 526 527 /** 528 * Test all supported preview sizes for a camera device 529 * 530 * @throws Exception 531 */ previewTestByCamera()532 private void previewTestByCamera() throws Exception { 533 List<Size> previewSizes = getSupportedPreviewSizes( 534 mCamera.getId(), mCameraManager, PREVIEW_SIZE_BOUND); 535 536 for (final Size sz : previewSizes) { 537 if (VERBOSE) { 538 Log.v(TAG, "Testing camera preview size: " + sz.toString()); 539 } 540 541 // TODO: vary the different settings like crop region to cover more cases. 542 CaptureRequest.Builder requestBuilder = 543 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 544 CaptureCallback mockCaptureCallback = 545 mock(CameraCaptureSession.CaptureCallback.class); 546 547 startPreview(requestBuilder, sz, mockCaptureCallback); 548 verifyCaptureResults(mSession, mockCaptureCallback, NUM_FRAMES_VERIFIED, 549 NUM_FRAMES_VERIFIED * FRAME_TIMEOUT_MS); 550 stopPreview(); 551 } 552 } 553 previewTestPatternTestByCamera()554 private void previewTestPatternTestByCamera() throws Exception { 555 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 556 int[] testPatternModes = mStaticInfo.getAvailableTestPatternModesChecked(); 557 CaptureRequest.Builder requestBuilder = 558 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 559 CaptureCallback mockCaptureCallback; 560 561 final int[] TEST_PATTERN_DATA = {0, 0xFFFFFFFF, 0xFFFFFFFF, 0}; // G:100%, RB:0. 562 for (int mode : testPatternModes) { 563 if (VERBOSE) { 564 Log.v(TAG, "Test pattern mode: " + mode); 565 } 566 requestBuilder.set(CaptureRequest.SENSOR_TEST_PATTERN_MODE, mode); 567 if (mode == CaptureRequest.SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) { 568 // Assign color pattern to SENSOR_TEST_PATTERN_MODE_DATA 569 requestBuilder.set(CaptureRequest.SENSOR_TEST_PATTERN_DATA, TEST_PATTERN_DATA); 570 } 571 mockCaptureCallback = mock(CaptureCallback.class); 572 startPreview(requestBuilder, maxPreviewSize, mockCaptureCallback); 573 verifyCaptureResults(mSession, mockCaptureCallback, NUM_TEST_PATTERN_FRAMES_VERIFIED, 574 NUM_TEST_PATTERN_FRAMES_VERIFIED * FRAME_TIMEOUT_MS); 575 } 576 577 stopPreview(); 578 } 579 surfaceSetTestByCamera(String cameraId)580 private void surfaceSetTestByCamera(String cameraId) throws Exception { 581 final int MAX_SURFACE_GROUP_ID = 10; 582 Size maxPreviewSz = mOrderedPreviewSizes.get(0); 583 Size yuvSizeBound = maxPreviewSz; // Default case: legacy device 584 if (mStaticInfo.isHardwareLevelLimited()) { 585 yuvSizeBound = mOrderedVideoSizes.get(0); 586 } else if (mStaticInfo.isHardwareLevelAtLeastFull()) { 587 yuvSizeBound = null; 588 } 589 Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, yuvSizeBound).get(0); 590 591 CaptureRequest.Builder requestBuilder = 592 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 593 ImageDropperListener imageListener = new ImageDropperListener(); 594 595 updatePreviewSurface(maxPreviewSz); 596 createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_READER_IMAGES, imageListener); 597 List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>(); 598 OutputConfiguration previewConfig = new OutputConfiguration(mPreviewSurface); 599 OutputConfiguration yuvConfig = new OutputConfiguration(mReaderSurface); 600 assertEquals(OutputConfiguration.SURFACE_GROUP_ID_NONE, previewConfig.getSurfaceGroupId()); 601 assertEquals(OutputConfiguration.SURFACE_GROUP_ID_NONE, yuvConfig.getSurfaceGroupId()); 602 assertEquals(mPreviewSurface, previewConfig.getSurface()); 603 assertEquals(mReaderSurface, yuvConfig.getSurface()); 604 outputConfigs.add(previewConfig); 605 outputConfigs.add(yuvConfig); 606 requestBuilder.addTarget(mPreviewSurface); 607 requestBuilder.addTarget(mReaderSurface); 608 609 // Test different stream set ID. 610 for (int surfaceGroupId = OutputConfiguration.SURFACE_GROUP_ID_NONE; 611 surfaceGroupId < MAX_SURFACE_GROUP_ID; surfaceGroupId++) { 612 if (VERBOSE) { 613 Log.v(TAG, "test preview with surface group id: "); 614 } 615 616 previewConfig = new OutputConfiguration(surfaceGroupId, mPreviewSurface); 617 yuvConfig = new OutputConfiguration(surfaceGroupId, mReaderSurface); 618 outputConfigs.clear(); 619 outputConfigs.add(previewConfig); 620 outputConfigs.add(yuvConfig); 621 622 for (OutputConfiguration config : outputConfigs) { 623 assertEquals(surfaceGroupId, config.getSurfaceGroupId()); 624 } 625 626 CameraCaptureSession.StateCallback mockSessionListener = 627 mock(CameraCaptureSession.StateCallback.class); 628 629 mSession = configureCameraSessionWithConfig(mCamera, outputConfigs, 630 mockSessionListener, mHandler); 631 632 633 mSession.prepare(mPreviewSurface); 634 verify(mockSessionListener, 635 timeout(PREPARE_TIMEOUT_MS).times(1)). 636 onSurfacePrepared(eq(mSession), eq(mPreviewSurface)); 637 638 mSession.prepare(mReaderSurface); 639 verify(mockSessionListener, 640 timeout(PREPARE_TIMEOUT_MS).times(1)). 641 onSurfacePrepared(eq(mSession), eq(mReaderSurface)); 642 643 CaptureRequest request = requestBuilder.build(); 644 CaptureCallback mockCaptureCallback = 645 mock(CameraCaptureSession.CaptureCallback.class); 646 mSession.setRepeatingRequest(request, mockCaptureCallback, mHandler); 647 verifyCaptureResults(mSession, mockCaptureCallback, NUM_FRAMES_VERIFIED, 648 NUM_FRAMES_VERIFIED * FRAME_TIMEOUT_MS); 649 } 650 } 651 652 private class IsCaptureResultValid extends ArgumentMatcher<TotalCaptureResult> { 653 @Override matches(Object obj)654 public boolean matches(Object obj) { 655 TotalCaptureResult result = (TotalCaptureResult)obj; 656 Long timeStamp = result.get(CaptureResult.SENSOR_TIMESTAMP); 657 if (timeStamp != null && timeStamp.longValue() > 0L) { 658 return true; 659 } 660 return false; 661 } 662 } 663 verifyCaptureResults( CameraCaptureSession session, CaptureCallback mockListener, int expectResultCount, int timeOutMs)664 private void verifyCaptureResults( 665 CameraCaptureSession session, 666 CaptureCallback mockListener, 667 int expectResultCount, 668 int timeOutMs) { 669 // Should receive expected number of onCaptureStarted callbacks. 670 ArgumentCaptor<Long> timestamps = ArgumentCaptor.forClass(Long.class); 671 ArgumentCaptor<Long> frameNumbers = ArgumentCaptor.forClass(Long.class); 672 verify(mockListener, 673 timeout(timeOutMs).atLeast(expectResultCount)) 674 .onCaptureStarted( 675 eq(session), 676 isA(CaptureRequest.class), 677 timestamps.capture(), 678 frameNumbers.capture()); 679 680 // Validate timestamps: all timestamps should be larger than 0 and monotonically increase. 681 long timestamp = 0; 682 for (Long nextTimestamp : timestamps.getAllValues()) { 683 assertNotNull("Next timestamp is null!", nextTimestamp); 684 assertTrue("Captures are out of order", timestamp < nextTimestamp); 685 timestamp = nextTimestamp; 686 } 687 688 // Validate framenumbers: all framenumbers should be consecutive and positive 689 long frameNumber = -1; 690 for (Long nextFrameNumber : frameNumbers.getAllValues()) { 691 assertNotNull("Next frame number is null!", nextFrameNumber); 692 assertTrue("Captures are out of order", 693 (frameNumber == -1) || (frameNumber + 1 == nextFrameNumber)); 694 frameNumber = nextFrameNumber; 695 } 696 697 // Should receive expected number of capture results. 698 verify(mockListener, 699 timeout(timeOutMs).atLeast(expectResultCount)) 700 .onCaptureCompleted( 701 eq(session), 702 isA(CaptureRequest.class), 703 argThat(new IsCaptureResultValid())); 704 705 // Should not receive any capture failed callbacks. 706 verify(mockListener, never()) 707 .onCaptureFailed( 708 eq(session), 709 isA(CaptureRequest.class), 710 isA(CaptureFailure.class)); 711 } 712 713 } 714